pax_global_header00006660000000000000000000000064141340331140014504gustar00rootroot0000000000000052 comment=b38572030f3d968c945675efaccd053d123f158b nipype-1.7.0/000077500000000000000000000000001413403311400130155ustar00rootroot00000000000000nipype-1.7.0/.circleci/000077500000000000000000000000001413403311400146505ustar00rootroot00000000000000nipype-1.7.0/.circleci/config.yml000066400000000000000000000216271413403311400166500ustar00rootroot00000000000000_machine_kwds: &machine_kwds image: ubuntu-2004:202107-02 _store_artifacts_kwds: &store_artifacts_kwds path: /home/circleci/work/tests _test_environment: &test_environment WORKDIR: /home/circleci/work DOCKER_IMAGE: "nipype/nipype" _set_pr_number: &set_pr_number name: Set PR number command: | echo 'export CIRCLE_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}"' >> $BASH_ENV source $BASH_ENV echo $CIRCLE_PR_NUMBER _generate_dockerfiles: &generate_dockerfiles name: Generate Dockerfiles command: | make gen-dockerfiles _modify_nipype_version: &modify_nipype_version name: Modify Nipype version if necessary command: | if [ "$CIRCLE_TAG" != "" ]; then sed -i -E "s/(__version__ = )'[A-Za-z0-9.-]+'/\1'$CIRCLE_TAG'/" nipype/info.py fi _get_base_image: &get_base_image name: Get base image (pull or build) no_output_timeout: 60m command: | source /tmp/docker/get_base_image.sh if [ "$GET_BASE" == "PULL" ]; then echo "Pulling base image ..." docker pull nipype/nipype:base elif [ "$GET_BASE" == "BUILD" ]; then tools/retry_cmd.sh -n 5 -s 15 \ docker build -t nipype/nipype:base - < docker/Dockerfile.base else echo "Error: method to get base image not understood" exit 1 fi _build_main_image_py38: &build_main_image_py38 name: Build main image (py38) no_output_timeout: 60m command: | tools/retry_cmd.sh -n 5 -s 15 \ docker build \ --rm=false \ --tag nipype/nipype:latest \ --tag nipype/nipype:py38 \ $(test -z "${CIRCLE_TAG}" || echo --tag nipype/nipype:"${CIRCLE_TAG}") \ --build-arg BUILD_DATE="$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \ --build-arg VCS_REF="$(git rev-parse --short HEAD)" \ --build-arg VERSION="${CIRCLE_TAG}" /home/circleci/nipype _download_test_data: &_download_test_data name: Download test data no_output_timeout: 20m working_directory: /home/circleci/examples environment: OSF_NIPYPE_URL: "https://files.osf.io/v1/resources/nefdp/providers/osfstorage" command: | export DATA_NIPYPE_TUTORIAL_URL="${OSF_NIPYPE_URL}/57f4739cb83f6901ed94bf21" curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_TUTORIAL_URL" | tar xj export DATA_NIPYPE_FSL_COURSE="${OSF_NIPYPE_URL}/57f472cf9ad5a101f977ecfe" curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_FSL_COURSE" | tar xz export DATA_NIPYPE_FSL_FEEDS="${OSF_NIPYPE_URL}/57f473066c613b01f113e7af" curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_FSL_FEEDS" | tar xz _prepare_working_directory: &prepare_working_directory name: Prepare working directory environment: *test_environment command: | mkdir -p "$WORKDIR" chmod -R 777 "$WORKDIR" _get_codecov: &_get_codecov name: Get codecov command: | pip install --no-cache-dir codecov _run_codecov_coverage: &_run_codecov_coverage name: Run codecov (coverage) environment: *test_environment command: | codecov --file $WORKDIR/tests/coverage*.xml --root "$HOME/nipype" --flags unittests -e CIRCLE_JOB _run_codecov_smoke: &_run_codecov_smoke name: Run codecov (smoke tests) environment: *test_environment command: | codecov --file $WORKDIR/tests/smoketest*.xml --root "$HOME/nipype" --flags smoketests -e CIRCLE_JOB version: 2 jobs: compare_base_dockerfiles: docker: - image: docker:17.10.0-ce-git steps: - checkout: path: /home/circleci/nipype - setup_remote_docker - run: name: Generate and prune base Dockerfile in preparation for cache check working_directory: /home/circleci/nipype/docker command: | mkdir -p /tmp/docker ash ./generate_dockerfiles.sh -b # Use the sha256 sum of the pruned Dockerfile as the cache key. ash prune_dockerfile.sh Dockerfile.base > /tmp/docker/Dockerfile.base-pruned - restore_cache: key: dockerfile-cache-v1-master-{{ checksum "/tmp/docker/Dockerfile.base-pruned" }} - run: name: Determine how to get base image command: | if [ -f /tmp/docker/cache/Dockerfile.base-pruned ]; then echo "Cache found. Will pull base image." echo 'export GET_BASE=PULL' > /tmp/docker/get_base_image.sh else echo "Cache not found. Will build base image." echo 'export GET_BASE=BUILD' > /tmp/docker/get_base_image.sh fi - persist_to_workspace: root: /tmp paths: - docker/Dockerfile.base-pruned - docker/get_base_image.sh test_pytest: machine: *machine_kwds working_directory: /home/circleci/nipype steps: - checkout: path: /home/circleci/nipype - attach_workspace: at: /tmp - run: *set_pr_number - run: *generate_dockerfiles - run: *modify_nipype_version - run: *get_base_image - run: *build_main_image_py38 - run: *_get_codecov - run: *_download_test_data - run: *prepare_working_directory - run: name: Run pytests no_output_timeout: 30m environment: *test_environment command: bash -ux /home/circleci/nipype/.circleci/test_pytest.sh - run: *_run_codecov_coverage - store_artifacts: *store_artifacts_kwds - store_test_results: *store_artifacts_kwds - run: name: Save Docker images to workspace if on master no_output_timeout: 60m command: | if [ "$CIRCLE_BRANCH" = "master" -a -z "$CIRCLE_PULL_REQUEST" ]; then docker save nipype/nipype:base \ nipype/nipype:latest \ nipype/nipype:py38 | gzip -1 > /tmp/docker/nipype-base-latest-py38.tar.gz \ && du -h /tmp/docker/nipype-base-latest-py38.tar.gz fi - persist_to_workspace: root: /tmp paths: - docker deploy_dockerhub: docker: - image: docker:17.10.0-ce-git steps: - setup_remote_docker - attach_workspace: at: /tmp - run: name: Load saved Docker images. no_output_timeout: 60m command: | docker load < /tmp/docker/nipype-base-latest-py38.tar.gz - run: name: Push to DockerHub no_output_timeout: 120m command: | echo "$DOCKER_PASS" | docker login -u "$DOCKER_USER" --password-stdin docker push nipype/nipype:base docker push nipype/nipype:latest docker push nipype/nipype:py38 test -z "${CIRCLE_TAG}" || docker push nipype/nipype:"${CIRCLE_TAG}" - run: name: Move pruned Dockerfile to /tmp/docker/cache directory command: | mkdir -p /tmp/docker/cache/ mv /tmp/docker/Dockerfile.base-pruned /tmp/docker/cache/Dockerfile.base-pruned - save_cache: paths: - /tmp/docker/cache/Dockerfile.base-pruned key: dockerfile-cache-v1-{{ .Branch }}-{{ checksum "/tmp/docker/cache/Dockerfile.base-pruned" }} update_feedstock: machine: *machine_kwds working_directory: /home/circleci/nipype steps: - checkout: path: /home/circleci/nipype - run: name: Install hub command: | curl -sSL https://github.com/github/hub/releases/download/v2.2.9/hub-linux-amd64-2.2.9.tgz | \ tar zxv -C /tmp sudo /tmp/hub-linux-amd64-2.2.9/install - run: name: Expand SSH Key command: | (echo "-----BEGIN OPENSSH PRIVATE KEY-----"; echo $GITHUB_ED25519 | fold -w 71; echo "-----END OPENSSH PRIVATE KEY-----") > ~/.ssh/id_ed25519 chmod go-rwx ~/.ssh/id_ed25519 ssh-keygen -y -f ~/.ssh/id_ed25519 > ~/.ssh/id_ed25519.pub - run: name: Set git identity command: | git config --global user.name "nipybot" git config --global user.email "nipybot@gmail.com" - run: name: Update feedstock command: | ssh-add -D ssh-add ~/.ssh/id_ed25519 /home/circleci/nipype/tools/feedstock.sh workflows: version: 2 build_test_deploy: jobs: - compare_base_dockerfiles: filters: branches: ignore: - /docs?\/.*/ tags: only: /.*/ - test_pytest: filters: branches: ignore: - /docs?\/.*/ tags: only: /.*/ requires: - compare_base_dockerfiles - deploy_dockerhub: filters: branches: only: master tags: only: /.*/ requires: - test_pytest - update_feedstock: context: nipybot filters: branches: only: /rel\/\d.*/ tags: only: /.*/ nipype-1.7.0/.circleci/test_pytest.sh000066400000000000000000000003031413403311400175670ustar00rootroot00000000000000#!/bin/bash docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work -e CI_SKIP_TEST=1 -e NIPYPE_RESOURCE_MONITOR=1 "${DOCKER_IMAGE}:py38" /usr/bin/run_pytests.sh nipype-1.7.0/.coveragerc000066400000000000000000000000241413403311400151320ustar00rootroot00000000000000[run] branch = True nipype-1.7.0/.dockerignore000066400000000000000000000005351413403311400154740ustar00rootroot00000000000000# python cache __pycache__ **/__pycache__ **/*.pyc *.pyc # python distribution build/**/* build dist/**/* dist nipype.egg-info/**/* nipype.egg-info .eggs/**/* .eggs src/**/* src/ # git .gitignore .git/**/* .git # other doc/**/* doc/ .cache/ .circle/**/* .circle/ circle.yml Vagrantfile .travis.yml .mailmap # Previous coverage results .coverage nipype-1.7.0/.et000066400000000000000000000001341413403311400134240ustar00rootroot00000000000000{ "bad_versions" : [ "1.2.1", "1.2.3", "1.3.0"] } nipype-1.7.0/.git-blame-ignore-revs000066400000000000000000000014331413403311400171160ustar00rootroot00000000000000# Commits with messages like "STY: black" or "run black" 12deb959cccc431fb8222cc5854f1c92a0080021 f64bf338f630a9ee5cbe7a3ec98c68292897e720 83358d7f17aac07cb90d0330f11ea2322e2974d8 faef7d0f93013a700c882f709e98fb3cd36ebb03 d50c1858564c0b3073fb23c54886a0454cb66afa 417b8897a116fcded5000e21e2b6ccbe29452a52 aaf677a87f64c485f3e305799e4a5dc73b69e5fb f763008442d88d8ce00ec266698268389415f8d6 b1eccafd4edc8503b02d715f5b5f6f783520fdf9 70db90349598cc7f26a4a513779529fba7d0a797 6c1d91d71f6f0db0e985bd2adc34206442b0653d 97bdbd5f48ab242de5288ba4715192a27619a803 78fa360f5b785224349b8b85b07e510d2233bb63 7f85f43a34de8bff8e634232c939b17cee8e8fc5 9c50b5daa797def5672dd057155b0e2c658853e2 47194993ae14aceeec436cfb3769def667196668 75653feadc6667d5313d83e9c62a5d5819771a9c 497b44d680eee0892fa59c6aaaae22a17d70a536 nipype-1.7.0/.gitattributes000066400000000000000000000000441413403311400157060ustar00rootroot00000000000000nipype/COMMIT_INFO.txt export-subst nipype-1.7.0/.github/000077500000000000000000000000001413403311400143555ustar00rootroot00000000000000nipype-1.7.0/.github/ISSUE_TEMPLATE.md000066400000000000000000000010601413403311400170570ustar00rootroot00000000000000### Summary ### Actual behavior ### Expected behavior ### How to replicate the behavior ### Script/Workflow details Please put URL to code or code here (if not too long). ### Platform details: ``` ``` ### Execution environment Choose one - Container [Tag: ???] - My python environment inside container [Base Tag: ???] - My python environment outside container nipype-1.7.0/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000015461413403311400201640ustar00rootroot00000000000000 ## Summary Fixes # . ## List of changes proposed in this PR (pull-request) ## Acknowledgment - [ ] \(Mandatory\) I acknowledge that this contribution will be available under the Apache 2 license. nipype-1.7.0/.github/release-drafter.yml000066400000000000000000000000541413403311400201440ustar00rootroot00000000000000template: | ## What's Changed $CHANGES nipype-1.7.0/.github/workflows/000077500000000000000000000000001413403311400164125ustar00rootroot00000000000000nipype-1.7.0/.github/workflows/contrib.yml000066400000000000000000000040751413403311400206030ustar00rootroot00000000000000name: Contribution checks # This checks validate contributions meet baseline checks # # * specs - Ensure make on: push: branches: - master - maint/* pull_request: branches: - master - maint/* defaults: run: shell: bash jobs: stable: # Check each OS, all supported Python, minimum versions and latest releases runs-on: ${{ matrix.os }} strategy: matrix: os: ['ubuntu-18.04'] python-version: [3.8] nipype-extras: ['dev'] check: ['specs', 'style'] env: DEPENDS: "" CHECK_TYPE: ${{ matrix.check }} NIPYPE_EXTRAS: ${{ matrix.nipype-extras }} EXTRA_PIP_FLAGS: "" INSTALL_DEB_DEPENDENCIES: false INSTALL_TYPE: pip CI_SKIP_TEST: 1 steps: - uses: actions/checkout@v2 with: submodules: recursive fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Display Python version run: python -c "import sys; print(sys.version)" - name: Create virtual environment run: tools/ci/create_venv.sh - name: Build archive run: | source tools/ci/build_archive.sh echo "ARCHIVE=$ARCHIVE" >> $GITHUB_ENV - name: Install Debian dependencies run: tools/ci/install_deb_dependencies.sh if: ${{ matrix.os == 'ubuntu-18.04' }} - name: Install dependencies run: tools/ci/install_dependencies.sh - name: Install Nipype run: tools/ci/install.sh - name: Run tests run: tools/ci/check.sh if: ${{ matrix.check != 'skiptests' }} - uses: codecov/codecov-action@v1 with: file: coverage.xml if: ${{ always() }} - name: Upload pytest test results uses: actions/upload-artifact@v2 with: name: pytest-results-${{ matrix.os }}-${{ matrix.python-version }} path: test-results.xml if: ${{ always() && matrix.check == 'test' }} nipype-1.7.0/.github/workflows/package.yml000066400000000000000000000032131413403311400205270ustar00rootroot00000000000000name: Packaging on: push: branches: - master - maint/* - rel/* tags: - '*' defaults: run: shell: bash jobs: package: # Build packages and upload runs-on: ${{ matrix.os }} strategy: matrix: include: - os: ubuntu-latest python-version: 3.8 steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Display Python version run: python -c "import sys; print(sys.version)" - name: Create virtual environment run: tools/ci/create_venv.sh - name: Build sdist run: tools/ci/build_archive.sh env: INSTALL_TYPE: sdist - name: Build wheel run: tools/ci/build_archive.sh env: INSTALL_TYPE: wheel ### Temporary - name: Check packages with twine run: | pip install twine twine check dist/* ### Switch back to this if we figure out who has permissions on test.pypi.org # - name: Test PyPI upload # uses: pypa/gh-action-pypi-publish@master # with: # user: __token__ # password: ${{ secrets.TEST_PYPI_API_TOKEN }} # repository_url: https://test.pypi.org/legacy/ # skip_existing: true - name: Upload to PyPI (on tags) if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') uses: pypa/gh-action-pypi-publish@master with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} nipype-1.7.0/.github/workflows/tests.yml000066400000000000000000000051461413403311400203050ustar00rootroot00000000000000name: Stable tests # This file tests the claimed support range of nipype including # # * Operating systems: Linux, OSX # * Dependencies: minimum requirements, optional requirements # * Installation methods: setup.py, sdist, wheel, archive on: push: branches: - master - maint/* pull_request: branches: - master - maint/* defaults: run: shell: bash jobs: stable: # Check each OS, all supported Python, minimum versions and latest releases runs-on: ${{ matrix.os }} strategy: matrix: os: ['ubuntu-18.04'] python-version: [3.6, 3.7, 3.8, 3.9] check: ['test'] pip-flags: [''] depends: ['REQUIREMENTS'] deb-depends: [false] nipype-extras: ['doc,tests,profiler'] include: - os: ubuntu-18.04 python-version: 3.8 check: test pip-flags: '' depends: REQUIREMENTS deb-depends: true nipype-extras: doc,tests,nipy,profiler,duecredit,ssh env: DEPENDS: ${{ matrix.depends }} CHECK_TYPE: ${{ matrix.check }} EXTRA_PIP_FLAGS: ${{ matrix.pip-flags }} INSTALL_DEB_DEPENDENCIES: ${{ matrix.deb-depends }} NIPYPE_EXTRAS: ${{ matrix.nipype-extras }} INSTALL_TYPE: pip CI_SKIP_TEST: 1 steps: - uses: actions/checkout@v2 with: submodules: recursive fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Display Python version run: python -c "import sys; print(sys.version)" - name: Create virtual environment run: tools/ci/create_venv.sh - name: Build archive run: | source tools/ci/build_archive.sh echo "ARCHIVE=$ARCHIVE" >> $GITHUB_ENV - name: Install Debian dependencies run: tools/ci/install_deb_dependencies.sh if: ${{ matrix.os == 'ubuntu-18.04' }} - name: Install dependencies run: tools/ci/install_dependencies.sh - name: Install Nipype run: tools/ci/install.sh - name: Run tests run: tools/ci/check.sh if: ${{ matrix.check != 'skiptests' }} - uses: codecov/codecov-action@v1 with: file: coverage.xml if: ${{ always() }} - name: Upload pytest test results uses: actions/upload-artifact@v2 with: name: pytest-results-${{ matrix.os }}-${{ matrix.python-version }} path: test-results.xml if: ${{ always() && matrix.check == 'test' }} nipype-1.7.0/.gitignore000066400000000000000000000007031413403311400150050ustar00rootroot00000000000000/build /dist /nipype.egg-info /MANIFEST /nipype/build /nipype/nipype.egg-info /doc/_build /doc/preproc /doc/users/examples /doc/api/generated *.pyc *.so .project .settings .pydevproject .eggs .idea/ /documentation.zip .DS_Store nipype/testing/data/von-ray_errmap.nii.gz nipype/testing/data/von_errmap.nii.gz nipype/testing/data/.proc* crash*.pklz .coverage htmlcov/ __pycache__/ *~ .*.swp .ipynb_checkpoints/ .ruby-version .pytest_cache .vscode/ venv/nipype-1.7.0/.mailmap000066400000000000000000000250551413403311400144450ustar00rootroot00000000000000Abel A. González Orozco Aimi Watanabe Aimi Watanabe stymy Alejandro Tabas Alejandro Tabas Alejandro de la Vega Alejandro de la Vega Alexander Schaefer Alexander Schaefer Alexander Schaefer Alexandre M. Savio Alexandre M. Savio Andrea Dell'Orco Andrew Floren Andrey Chetverikov Andrey Chetverikov Anibal Sólon Heinsfeld Anisha Keshavan Anisha Keshavan Anna Doll <45283972+AnnaD15@users.noreply.github.com> Ariel Rokem Ariel Rokem Arman Eshaghi Ashely Gillman Basille Pinsard Basille Pinsard Ben Cipollini Benjamin Acland Benjamin Meyers <34044274+BenjaminMey@users.noreply.github.com> Benjamin Meyers <34044274+BenjaminMey@users.noreply.github.com> BenjaminMey Benjamin Yvernault Benjamin Yvernault Blake Dewey Blake Dewey Blake Dewey Brendan Moloney Caroline Froehlich Christopher J. Markiewicz Christopher J. Markiewicz Christopher J. Markiewicz Christopher John Steele Cindee Madison Colin Buchanan Colin Buchanan Colin Buchanan Daniel Brenner Daniel Clark Daniel Geisler Daniel Geisler Daniel Geisler <3453485+daniel-ge@users.noreply.github.com> Daniel Ginsburg Daniel McNamee David Ellis David Ellis David Mordom David Welch Dimitri Papadopoulos Orfanos Dmytro Belevtsoff Dorian Vogel Dylan M. Nielson Dylan M. Nielson Eduard Ort Elizabeth DuPre Erik Ziegler Erik Ziegler Erik Ziegler Feilong Ma Fernando Pérez-García Franz Liem Franz Liem Fred Loney Gael Varoquaux Gavin Cooper Gilles de Hollander Gio Piantoni Guillaume Flandin Hans Johnson Henry Jones Horea Christian Hrvoje Stojic Isaac Schwabacher Jakub Kaczmarzyk James Kent James Kent Fred Mertz Janosch Linkersdörfer Jason Wong Jason Wong Jens Kleesiek Jessica Forbes Jérémy Guillon Joerg Stadler Joerg Stadler Joerg Stadler John A. Lee John A. Lee Joke Durnez Jordi Huguet Josh Warner Junhao WEN Kai Schlamp Katherine Bottenhorn Kesshi Jordan Kesshi Jordan Kesshi Jordan Kesshi Jordan Kesshi Jordan Kesshi Jordan Kevin Sitek Kevin Sitek Sin Kim Sin Kim Kornelius Podranski Krzysztof J. Gorgolewski Krzysztof J. Gorgolewski Krzysztof J. Gorgolewski Krzysztof J. Gorgolewski Krzysztof J. Gorgolewski Krzysztof J. Gorgolewski Krzysztof J. Gorgolewski Kshitij Chawla Leonie Lampe Lukas Snoek Marcel Falkiewicz Martin Perez-Guevara Mathias Goncalves Mathias Goncalves Mathieu Dubois Mathieu Dubois Matteo Mancini Matteo Visconti di Oleggio Castello Matteo Visconti di Oleggio Castello Matthew Cieslak Michael Clark Michael Dayan Michael Dayan Michael Dayan mick-d Michael Dayan Michael Joseph Michael Joseph Michael Philipp Notter Michael Philipp Notter Michael Waskom Michael Waskom Michael Waskom Miguel Molina-Romero Murat Bilgel Nat Lee Ole Numssen Oliver Contier Olivia Stanley Oscar Esteban Oscar Esteban Pablo Polosecki Pablo Polosecki Paul Sharp Ranjit Khanuja Rastko Ćirić Rastko Ćirić Rastko Ćirić Raunak Jalan Raunak Jalan <41023976+RaunakJalan@users.noreply.github.com> Ross Markello Russell Poldrack Russell Poldrack Salma Bougacha Sami Kristian Andberg Satrajit Ghosh Sebastian Urchs Serge Koudoro Sharad Sikka Shariq Iqbal Shariq Iqbal Shoshana Berleant Shoshana Berleant Shoshana Berleant Shoshana Berleant Ubuntu Simon Rothmei Simon Rothmei Siqi Liu Steven Giavasis Steven Giavasis Steven Giavasis Steven Tilley Sulantha Mathotaarachchi Tim Robert-Fitzgerald Tom Close Tom Close Tristan Glatard Victor Férat Victor Férat Victor Férat Victor Saase Weijie Huang William Triplett Wolfgang Pauli Xiangzhen Kong Yaroslav Halchenko nipype-1.7.0/.pre-commit-config.yaml000066400000000000000000000006211413403311400172750ustar00rootroot00000000000000# See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v3.4.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml - id: check-added-large-files - repo: https://github.com/psf/black rev: 20.8b1 hooks: - id: black nipype-1.7.0/.readthedocs.yml000066400000000000000000000010551413403311400161040ustar00rootroot00000000000000# Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 # Build documentation in the docs/ directory with Sphinx sphinx: configuration: doc/conf.py # Optionally build your docs in additional formats such as PDF and ePub formats: - htmlzip # Optionally set the version of Python and requirements required to build your docs python: version: 3.7 install: - requirements: doc/requirements.txt - method: pip path: . extra_requirements: - doc nipype-1.7.0/.zenodo.json000066400000000000000000000556461413403311400153040ustar00rootroot00000000000000{ "creators": [ { "affiliation": "Department of Psychology, Stanford University", "name": "Esteban, Oscar", "orcid": "0000-0001-8435-6191" }, { "affiliation": "Stanford University", "name": "Markiewicz, Christopher J.", "orcid": "0000-0002-6533-164X" }, { "name": "Burns, Christopher" }, { "affiliation": "MIT", "name": "Goncalves, Mathias", "orcid": "0000-0002-7252-7771" }, { "affiliation": "MIT", "name": "Jarecka, Dorota", "orcid": "0000-0001-8282-2988" }, { "affiliation": "Independent", "name": "Ziegler, Erik", "orcid": "0000-0003-1857-8129" }, { "name": "Berleant, Shoshana" }, { "affiliation": "The University of Iowa", "name": "Ellis, David Gage", "orcid": "0000-0002-3718-6836" }, { "name": "Pinsard, Basile" }, { "name": "Madison, Cindee" }, { "affiliation": "Department of Psychology, Stanford University", "name": "Waskom, Michael" }, { "affiliation": "The Laboratory for Investigative Neurophysiology (The LINE), Department of Radiology and Department of Clinical Neurosciences, Lausanne, Switzerland; Center for Biomedical Imaging (CIBM), Lausanne, Switzerland", "name": "Notter, Michael Philipp", "orcid": "0000-0002-5866-047X" }, { "affiliation": "Developer", "name": "Clark, Daniel", "orcid": "0000-0002-8121-8954" }, { "affiliation": "Klinikum rechts der Isar, TUM. ACPySS", "name": "Manh\u00e3es-Savio, Alexandre", "orcid": "0000-0002-6608-6885" }, { "affiliation": "UC Berkeley", "name": "Clark, Dav", "orcid": "0000-0002-3982-4416" }, { "affiliation": "University of California, San Francisco", "name": "Jordan, Kesshi", "orcid": "0000-0001-6313-0580" }, { "affiliation": "Mayo Clinic, Neurology, Rochester, MN, USA", "name": "Dayan, Michael", "orcid": "0000-0002-2666-0969" }, { "affiliation": "Dartmouth College: Hanover, NH, United States", "name": "Halchenko, Yaroslav O.", "orcid": "0000-0003-3456-2493" }, { "name": "Loney, Fred" }, { "affiliation": "Florida International University", "name": "Salo, Taylor", "orcid": "0000-0001-9813-3167" }, { "affiliation": "Department of Electrical and Computer Engineering, Johns Hopkins University", "name": "Dewey, Blake E", "orcid": "0000-0003-4554-5058" }, { "affiliation": "University of Iowa", "name": "Johnson, Hans", "orcid": "0000-0001-9513-2660" }, { "affiliation": "Molecular Imaging Research Center, CEA, France", "name": "Bougacha, Salma" }, { "affiliation": "UC Berkeley - UCSF Graduate Program in Bioengineering", "name": "Keshavan, Anisha", "orcid": "0000-0003-3554-043X" }, { "name": "Yvernault, Benjamin" }, { "name": "Hamalainen, Carlo", "orcid": "0000-0001-7655-3830" }, { "affiliation": "Institute for Biomedical Engineering, ETH and University of Zurich", "name": "Christian, Horea", "orcid": "0000-0001-7037-2449" }, { "affiliation": "Stanford University", "name": "\u0106iri\u0107 , Rastko", "orcid": "0000-0001-6347-7939" }, { "name": "Dubois, Mathieu" }, { "affiliation": "The Centre for Addiction and Mental Health", "name": "Joseph, Michael", "orcid": "0000-0002-0068-230X" }, { "affiliation": "UC San Diego", "name": "Cipollini, Ben", "orcid": "0000-0002-7782-0790" }, { "affiliation": "Holland Bloorview Kids Rehabilitation Hospital", "name": "Tilley II, Steven", "orcid": "0000-0003-4853-5082" }, { "affiliation": "Dartmouth College", "name": "Visconti di Oleggio Castello, Matteo", "orcid": "0000-0001-7931-5272" }, { "affiliation": "University of Texas at Austin", "name": "De La Vega, Alejandro", "orcid": "0000-0001-9062-3778" }, { "affiliation": "Shattuck Lab, UCLA Brain Mapping Center", "name": "Wong, Jason" }, { "affiliation": "MIT", "name": "Kaczmarzyk, Jakub", "orcid": "0000-0002-5544-7577" }, { "affiliation": "Research Group Neuroanatomy and Connectivity, Max Planck Institute for Human Cognitive and Brain Sciences, Leipzig, Germany", "name": "Huntenburg, Julia M.", "orcid": "0000-0003-0579-9811" }, { "affiliation": "National Institutes of Health", "name": "Clark, Michael G. " }, { "affiliation": "Concordia University", "name": "Benderoff, Erin" }, { "name": "Erickson, Drew" }, { "affiliation": "Neuroscience Program, University of Iowa", "name": "Kent, James D.", "orcid": "0000-0002-4892-2659" }, { "affiliation": "Otto-von-Guericke-University Magdeburg, Germany", "name": "Hanke, Michael", "orcid": "0000-0001-6398-6370" }, { "affiliation": "Child Mind Institute", "name": "Giavasis, Steven" }, { "name": "Moloney, Brendan" }, { "affiliation": "SRI International", "name": "Nichols, B. Nolan", "orcid": "0000-0003-1099-3328" }, { "name": "Tungaraza, Rosalia" }, { "affiliation": "Child Mind Institute", "name": "Frohlich, Caroline" }, { "affiliation": "Athena EPI, Inria Sophia-Antipolis", "name": "Wassermann, Demian", "orcid": "0000-0001-5194-6056" }, { "affiliation": "Vrije Universiteit, Amsterdam", "name": "de Hollander, Gilles", "orcid": "0000-0003-1988-5091" }, { "affiliation": "Charit\u00e8 Universit\u00e4tsmedizin Berlin, Germany", "name": "Dell'Orco, Andrea", "orcid": "0000-0002-3964-8360" }, { "affiliation": "University College London", "name": "Eshaghi, Arman", "orcid": "0000-0002-6652-3512" }, { "name": "Millman, Jarrod" }, { "affiliation": "University College London", "name": "Mancini, Matteo", "orcid": "0000-0001-7194-4568" }, { "affiliation": "University of Sydney", "name": "Close, Thomas", "orcid": "0000-0002-4160-2134" }, { "affiliation": "National Institute of Mental Health", "name": "Nielson, Dylan M.", "orcid": "0000-0003-4613-6643" }, { "affiliation": "INRIA", "name": "Varoquaux, Gael", "orcid": "0000-0003-1076-5122" }, { "name": "Watanabe, Aimi" }, { "name": "Mordom, David" }, { "affiliation": "Charite Universitatsmedizin Berlin, Germany", "name": "Waller, Lea", "orcid": "0000-0002-3239-6957" }, { "affiliation": "CIBIT, UC", "name": "Machado, F\u00e1tima", "orcid": "0000-0001-8878-1750" }, { "affiliation": "ARAMIS LAB, Brain and Spine Institute (ICM), Paris, France.", "name": "Guillon, Je\u0301re\u0301my", "orcid": "0000-0002-2672-7510" }, { "affiliation": "Indiana University, IN, USA", "name": "Koudoro, Serge" }, { "affiliation": "Penn Statistics in Imaging and Visualization Endeavor, University of Pennsylvania", "name": "Robert-Fitzgerald, Timothy", "orcid": "0000-0001-8303-8001" }, { "affiliation": "Donders Institute for Brain, Cognition and Behavior, Center for Cognitive Neuroimaging", "name": "Chetverikov, Andrey", "orcid": "0000-0003-2767-6310" }, { "affiliation": "The University of Washington eScience Institute", "name": "Rokem, Ariel", "orcid": "0000-0003-0679-1985" }, { "affiliation": "Washington University in St Louis", "name": "Acland, Benjamin", "orcid": "0000-0001-6392-6634" }, { "name": "Forbes, Jessica" }, { "affiliation": "Montreal Neurological Institute and Hospital", "name": "Markello, Ross", "orcid": "0000-0003-1057-1336" }, { "affiliation": "Australian eHealth Research Centre, Commonwealth Scientific and Industrial Research Organisation; University of Queensland", "name": "Gillman, Ashley", "orcid": "0000-0001-9130-1092" }, { "affiliation": "State Key Laboratory of Cognitive Neuroscience and Learning & IDG/McGovern Institute for Brain Research, Beijing Normal University, Beijing, China; Max Planck Institute for Psycholinguistics, Nijmegen, the Netherlands", "name": "Kong, Xiang-Zhen", "orcid": "0000-0002-0805-1350" }, { "affiliation": "Division of Psychological and Social Medicine and Developmental Neuroscience, Faculty of Medicine, Technische Universit\u00e4t Dresden, Dresden, Germany", "name": "Geisler, Daniel", "orcid": "0000-0003-2076-5329" }, { "name": "Salvatore, John" }, { "affiliation": "CNRS LTCI, Telecom ParisTech, Universit\u00e9 Paris-Saclay", "name": "Gramfort, Alexandre", "orcid": "0000-0001-9791-4404" }, { "affiliation": "Department of Psychology, University of Bielefeld, Bielefeld, Germany.", "name": "Doll, Anna", "orcid": "0000-0002-0799-0831" }, { "name": "Buchanan, Colin" }, { "affiliation": "Montreal Neurological Institute and Hospital", "name": "DuPre, Elizabeth", "orcid": "0000-0003-1358-196X" }, { "affiliation": "The University of Sydney", "name": "Liu, Siqi" }, { "affiliation": "National University Singapore", "name": "Schaefer, Alexander", "orcid": "0000-0001-6488-4739" }, { "affiliation": "UniversityHospital Heidelberg, Germany", "name": "Kleesiek, Jens" }, { "affiliation": "Nathan s Kline institute for psychiatric research", "name": "Sikka, Sharad" }, { "name": "Schwartz, Yannick" }, { "affiliation": "The University of Iowa", "name": "Ghayoor, Ali", "orcid": "0000-0002-8858-1254" }, { "affiliation": "NIMH IRP", "name": "Lee, John A.", "orcid": "0000-0001-5884-4247" }, { "name": "Mattfeld, Aaron" }, { "affiliation": "University of Washington", "name": "Richie-Halford, Adam", "orcid": "0000-0001-9276-9084" }, { "affiliation": "University of Zurich", "name": "Liem, Franz", "orcid": "0000-0003-0646-4810" }, { "affiliation": "Neurospin/Unicog/Inserm/CEA", "name": "Perez-Guevara, Martin Felipe", "orcid": "0000-0003-4497-861X" }, { "name": "Heinsfeld, Anibal S\u00f3lon", "orcid": "0000-0002-2050-0614" }, { "name": "Haselgrove, Christian" }, { "affiliation": "Department of Psychology, Stanford University; Parietal, INRIA", "name": "Durnez, Joke", "orcid": "0000-0001-9030-2202" }, { "affiliation": "MPI CBS Leipzig, Germany", "name": "Lampe, Leonie" }, { "name": "Poldrack, Russell" }, { "affiliation": "1 McGill Centre for Integrative Neuroscience (MCIN), Ludmer Centre for Neuroinformatics and Mental Health, Montreal Neurological Institute (MNI), McGill University, Montr\u00e9al, 3801 University Street, WB-208, H3A 2B4, Qu\u00e9bec, Canada. 2 University of Lyon, CNRS, INSERM, CREATIS., Villeurbanne, 7, avenue Jean Capelle, 69621, France.", "name": "Glatard, Tristan", "orcid": "0000-0003-2620-5883" }, { "affiliation": "Max Planck Institute for Human Cognitive and Brain Sciences, Leipzig, Germany.", "name": "Tabas, Alejandro", "orcid": "0000-0002-8643-1543" }, { "name": "Cumba, Chad" }, { "affiliation": "University College London", "name": "P\u00e9rez-Garc\u00eda, Fernando", "orcid": "0000-0001-9090-3024" }, { "name": "Blair, Ross" }, { "affiliation": "Duke University", "name": "Iqbal, Shariq", "orcid": "0000-0003-2766-8425" }, { "affiliation": "Sagol School of Neuroscience, Tel Aviv University", "name": "Baratz, Zvi" }, { "affiliation": "University of Iowa", "name": "Welch, David" }, { "affiliation": "Max Planck Institute for Human Cognitive and Brain Sciences", "name": "Contier, Oliver", "orcid": "0000-0002-2983-4709" }, { "affiliation": "Department of Psychology, Stanford University", "name": "Triplett, William", "orcid": "0000-0002-9546-1306" }, { "affiliation": "Child Mind Institute", "name": "Craddock, R. Cameron", "orcid": "0000-0002-4950-1303" }, { "name": "Correa, Carlos" }, { "affiliation": "CEA", "name": "Papadopoulos Orfanos, Dimitri", "orcid": "0000-0002-1242-8990" }, { "affiliation": "Leibniz Institute for Neurobiology", "name": "Stadler, J\u00f6rg", "orcid": "0000-0003-4313-129X" }, { "affiliation": "Mayo Clinic", "name": "Warner, Joshua", "orcid": "0000-0003-3579-4835" }, { "affiliation": "Yale University; New Haven, CT, United States", "name": "Sisk, Lucinda M.", "orcid": "0000-0003-4900-9770" }, { "name": "Falkiewicz, Marcel" }, { "affiliation": "University of Illinois Urbana Champaign", "name": "Sharp, Paul" }, { "name": "Rothmei, Simon" }, { "affiliation": "Korea Advanced Institute of Science and Technology", "name": "Kim, Sin", "orcid": "0000-0003-4652-3758" }, { "name": "Weinstein, Alejandro" }, { "affiliation": "University of Pennsylvania", "name": "Kahn, Ari E.", "orcid": "0000-0002-2127-0507" }, { "affiliation": "Harvard University - Psychology", "name": "Kastman, Erik", "orcid": "0000-0001-7221-9042" }, { "affiliation": "Florida International University", "name": "Bottenhorn, Katherine", "orcid": "0000-0002-7796-8795" }, { "affiliation": "GIGA Institute", "name": "Grignard, Martin", "orcid": "0000-0001-5549-1861" }, { "affiliation": "Boston University", "name": "Perkins, L. Nathan" }, { "name": "Zhou, Dale" }, { "name": "Bielievtsov, Dmytro", "orcid": "0000-0003-3846-7696" }, { "affiliation": "Sagol School of Neuroscience, Tel Aviv University", "name": "Ben-Zvi, Gal" }, { "affiliation": "University of Newcastle, Australia", "name": "Cooper, Gavin", "orcid": "0000-0002-7186-5293" }, { "affiliation": "Max Planck UCL Centre for Computational Psychiatry and Ageing Research, University College London", "name": "Stojic, Hrvoje", "orcid": "0000-0002-9699-9052" }, { "affiliation": "German Institute for International Educational Research", "name": "Linkersd\u00f6rfer, Janosch", "orcid": "0000-0002-1577-1233" }, { "name": "Renfro, Mandy" }, { "name": "Hinds, Oliver" }, { "affiliation": "Dept of Medical Biophysics, Univeristy of Western Ontario", "name": "Stanley, Olivia" }, { "name": "K\u00fcttner, Ren\u00e9" }, { "affiliation": "California Institute of Technology", "name": "Pauli, Wolfgang M.", "orcid": "0000-0002-0966-0254" }, { "affiliation": "NIMH, Scientific and Statistical Computing Core", "name": "Glen, Daniel", "orcid": "0000-0001-8456-5647" }, { "affiliation": "Florida International University", "name": "Kimbler, Adam", "orcid": "0000-0001-5885-9596" }, { "affiliation": "University of Pittsburgh", "name": "Meyers, Benjamin", "orcid": "0000-0001-9137-4363" }, { "name": "Tarbert, Claire" }, { "name": "Ginsburg, Daniel" }, { "name": "Haehn, Daniel" }, { "affiliation": "Max Planck Research Group for Neuroanatomy & Connectivity, Max Planck Institute for Human Cognitive and Brain Sciences, Leipzig, Germany", "name": "Margulies, Daniel S.", "orcid": "0000-0002-8880-9204" }, { "affiliation": "CNRS, UMS3552 IRMaGe", "name": "Condamine, Eric", "orcid": "0000-0002-9533-3769" }, { "affiliation": "Dartmouth College", "name": "Ma, Feilong", "orcid": "0000-0002-6838-3971" }, { "affiliation": "University College London", "name": "Malone, Ian B.", "orcid": "0000-0001-7512-7856" }, { "affiliation": "University of Amsterdam", "name": "Snoek, Lukas", "orcid": "0000-0001-8972-204X" }, { "name": "Brett, Matthew" }, { "affiliation": "Department of Neuropsychiatry, University of Pennsylvania", "name": "Cieslak, Matthew", "orcid": "0000-0002-1931-4734" }, { "name": "Hallquist, Michael" }, { "affiliation": "Technical University Munich", "name": "Molina-Romero, Miguel", "orcid": "0000-0001-8054-0426" }, { "affiliation": "National Institute on Aging, Baltimore, MD, USA", "name": "Bilgel, Murat", "orcid": "0000-0001-5042-7422" }, { "name": "Lee, Nat", "orcid": "0000-0001-9308-9988" }, { "name": "Jalan, Raunak" }, { "name": "Inati, Souheil" }, { "affiliation": "Institute of Neuroinformatics, ETH/University of Zurich", "name": "Gerhard, Stephan", "orcid": "0000-0003-4454-6171" }, { "affiliation": "Enigma Biomedical Group", "name": "Mathotaarachchi, Sulantha" }, { "name": "Saase, Victor" }, { "affiliation": "Washington University in St Louis", "name": "Van, Andrew", "orcid": "0000-0002-8787-0943" }, { "affiliation": "MPI-CBS; McGill University", "name": "Steele, Christopher John", "orcid": "0000-0003-1656-7928" }, { "affiliation": "Vrije Universiteit Amsterdam", "name": "Ort, Eduard" }, { "affiliation": "Stanford University", "name": "Lerma-Usabiaga, Garikoitz", "orcid": "0000-0001-9800-4816" }, { "name": "Schwabacher, Isaac" }, { "name": "Arias, Jaime" }, { "name": "Lai, Jeff" }, { "affiliation": "Child Mind Institute / Nathan Kline Institute", "name": "Pellman, John", "orcid": "0000-0001-6810-4461" }, { "affiliation": "BarcelonaBeta Brain Research Center", "name": "Huguet, Jordi", "orcid": "0000-0001-8420-4833" }, { "affiliation": "University of Pennsylvania", "name": "Junhao WEN", "orcid": "0000-0003-2077-3070" }, { "affiliation": "TIB \u2013 Leibniz Information Centre for Science and Technology and University Library, Hannover, Germany", "name": "Leinweber, Katrin", "orcid": "0000-0001-5135-5758" }, { "affiliation": "INRIA-Saclay, Team Parietal", "name": "Chawla, Kshitij", "orcid": "0000-0002-7517-6321" }, { "affiliation": "Institute of Imaging & Computer Vision, RWTH Aachen University, Germany", "name": "Weninger, Leon" }, { "name": "Modat, Marc" }, { "name": "Harms, Robbert" }, { "affiliation": "University of Helsinki", "name": "Andberg, Sami Kristian", "orcid": "0000-0002-5650-3964" }, { "name": "Matsubara, K" }, { "affiliation": "Universidad de Guadalajara", "name": "Gonz\u00e1lez Orozco, Abel A." }, { "affiliation": "ARAMIS Lab", "name": "Routier, Alexandre", "orcid": "0000-0003-1603-8049" }, { "name": "Marina, Ana" }, { "name": "Davison, Andrew" }, { "affiliation": "The University of Texas at Austin", "name": "Floren, Andrew", "orcid": "0000-0003-3618-2056" }, { "name": "Park, Anne" }, { "name": "Cheung, Brian" }, { "name": "McDermottroe, Conor" }, { "affiliation": "University of Cambridge", "name": "McNamee, Daniel", "orcid": "0000-0001-9928-4960" }, { "name": "Shachnev, Dmitry" }, { "affiliation": "University of Applied Sciences and Arts Northwestern Switzerland", "name": "Vogel, Dorian", "orcid": "0000-0003-3445-576X" }, { "name": "Flandin, Guillaume" }, { "affiliation": "Stanford University and the University of Chicago", "name": "Jones, Henry", "orcid": "0000-0001-7719-3646" }, { "affiliation": "Athinoula A. Martinos Center for Biomedical Imaging, Department of Radiology, Massachusetts General Hospital, Charlestown, MA, USA", "name": "Gonzalez, Ivan", "orcid": "0000-0002-6451-6909" }, { "name": "Varada, Jan" }, { "name": "Schlamp, Kai" }, { "name": "Podranski, Kornelius" }, { "affiliation": "State Key Laboratory of Cognitive Neuroscience and Learning & IDG/McGovern Institute for Brain Research, Beijing Normal University, Beijing, China", "name": "Huang, Lijie", "orcid": "0000-0002-9910-5069" }, { "name": "Noel, Maxime" }, { "affiliation": "Medical Imaging & Biomarkers, Bioclinica, Newark, CA, USA.", "name": "Pannetier, Nicolas", "orcid": "0000-0002-0744-5155" }, { "affiliation": "Max Planck Institute for Human Cognitive and Brain Sciences", "name": "Numssen, Ole", "orcid": "0000-0001-7164-2682" }, { "name": "Khanuja, Ranjeet" }, { "affiliation": "University of Waterloo", "name": "Mukhometzianov, Rinat", "orcid": "0000-0003-1274-4827" }, { "name": "Urchs, Sebastian" }, { "name": "Nickson, Thomas" }, { "affiliation": "State Key Laboratory of Cognitive Neuroscience and Learning & IDG/McGovern Institute for Brain Research, Beijing Normal University, Beijing, China", "name": "Huang, Lijie", "orcid": "0000-0002-9910-5069" }, { "affiliation": "Duke University", "name": "Broderick, William", "orcid": "0000-0002-8999-9003" }, { "affiliation": "Weill Cornell Medicine", "name": "Xie, Xihe", "orcid": "0000-0001-6595-2473" }, { "name": "Tambini, Arielle" }, { "affiliation": "Max Planck Institute for Human Cognitive and Brain Sciences, Leipzig, Germany.", "name": "Mihai, Paul Glad", "orcid": "0000-0001-5715-6442" }, { "affiliation": "Department of Psychology, Stanford University", "name": "Gorgolewski, Krzysztof J.", "orcid": "0000-0003-3321-7583" }, { "affiliation": "MIT, HMS", "name": "Ghosh, Satrajit", "orcid": "0000-0002-5312-6729" } ], "keywords": [ "neuroimaging", "workflow", "pipeline" ], "license": "Apache-2.0", "upload_type": "software" } nipype-1.7.0/CODE_OF_CONDUCT.md000066400000000000000000000062761413403311400156270ustar00rootroot00000000000000# Contributor Covenant Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting project leaders Satrajit Ghosh <> or Chris Gorgolewski <>. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project leaders is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org nipype-1.7.0/CONTRIBUTING.md000066400000000000000000000227551413403311400152610ustar00rootroot00000000000000# Contributing to Nipype Welcome to the Nipype repository! We're excited you're here and want to contribute. These guidelines are designed to make it as easy as possible to get involved. If you have any questions that aren't discussed below, please let us know by opening an [issue][link_issues]! Before you start you'll need to set up a free [GitHub][link_github] account and sign in. Here are some [instructions][link_signupinstructions]. If you are not familiar with version control systems such as git, we recommend the [VCS module](http://www.reproducibleimaging.org/module-reproducible-basics/02-vcs/) available from [ReproNim](http://www.reproducibleimaging.org/). Already know what you're looking for in this guide? Jump to the following sections: * [Understanding issue labels](#issue-labels) * [Making a change](#making-a-change) * [How to tag pull requests](#tagging-pull-requests) * [Notes for new code](#notes-for-new-code) * [Recognizing contributions](#recognizing-contributions) ## Issue labels The current list of issue labels are [here][link_labels] and include: * [![Bugs](https://img.shields.io/badge/-bugs-fc2929.svg)][link_bugs] *These issues point to problems in the project.* If you find a new bug, please provide as much information as possible to recreate the error. The [issue template][link_issue_template] will automatically populate any new issue you open, and contains information we've found to be helpful in addressing bug reports. Please fill it out to the best of your ability! If you experience the same bug as one already listed in an open issue, please add any additional information that you have as a comment. * [![Help Wanted](https://img.shields.io/badge/-help%20wanted-c2e0c6.svg)][link_helpwanted] *These issues contain a task that a member of the team has determined we need additional help with.* If you feel that you can contribute to one of these issues, we especially encourage you to do so! Issues that are also labelled as [good-first-issue][link_good_first_issue] are a great place to start if you're looking to make your first contribution. * [![Enhancement](https://img.shields.io/badge/-enhancement-00FF09.svg)][link_enhancement] *These issues are asking for new features to be added to the project.* Please try to make sure that your requested enhancement is distinct from any others that have already been requested or implemented. If you find one that's similar but there are subtle differences, please reference the other request in your issue. * [![Orphaned](https://img.shields.io/badge/-orphaned-9baddd.svg)][link_orphaned] *These pull requests have been closed for inactivity.* Before proposing a new pull request, browse through the "orphaned" pull requests. You may find that someone has already made significant progress toward your goal, and you can re-use their unfinished work. An adopted PR should be updated to merge or rebase the current master, and a new PR should be created (see below) that references the original PR. ## Making a change We appreciate all contributions to Nipype, but those accepted fastest will follow a workflow similar to the following: **1. Comment on an existing issue or open a new issue referencing your addition.** This allows other members of the Nipype development team to confirm that you aren't overlapping with work that's currently underway and that everyone is on the same page with the goal of the work you're going to carry out. [This blog][link_pushpullblog] is a nice explanation of why putting this work in up front is so useful to everyone involved. **2. [Fork][link_fork] the [Nipype repository][link_nipype] to your profile.** This is now your own unique copy of the Nipype repository. Changes here won't affect anyone else's work, so it's a safe space to explore edits to the code! You can clone your Nipype repository in order to create a local copy of the code on your machine. To install your version of Nipype, and the dependencies needed for development, in your Python environment, run `pip install -e ".[dev]"` from your local Nipype directory. Make sure to keep your fork up to date with the original Nipype repository. One way to do this is to [configure a new remote named "upstream"](https://help.github.com/articles/configuring-a-remote-for-a-fork/) and to [sync your fork with the upstream repository][link_updateupstreamwiki]. **3. Make the changes you've discussed.** If you're adding a new tool from an existing neuroimaging toolkit (e.g., 3dDeconvolve from AFNI), check out the [guide for adding new interfaces to Nipype][link_new_interfaces]. When you are working on your changes, test frequently to ensure you are not breaking the existing code. For more on testing, please see [the testing section of Nipype documentation](http://nipype.readthedocs.io/en/latest/devel/testing_nipype.html). Before pushing your changes to GitHub, run `make check-before-commit`. This will remove trailing spaces, create new auto tests, test the entire package, and build the documentation. If you get no errors, you're ready to submit your changes! It's a good practice to create [a new branch](https://help.github.com/articles/about-branches/) of the repository for a new set of changes. For Python 2.7-compatible fixes, the branch should start from the `maint/1.3.x` branch on the upstream repository. **4. Submit a [pull request][link_pullrequest].** A new pull request for your changes should be created from your fork of the repository. When opening a pull request, please use one of the following prefixes: * **[ENH]** for enhancements * **[FIX]** for bug fixes * **[TST]** for new or updated tests * **[DOC]** for new or updated documentation * **[STY]** for stylistic changes * **[REF]** for refactoring existing code **5. Install pre-commit.** [pre-commit](https://pre-commit.com/) is a git hook for running operations at commit time. To use it in your environment, do `pip install pre-commit` following by `pre-commit install` inside your source directory.
Pull requests should be submitted early and often (please don't mix too many unrelated changes within one PR)! If your pull request is not yet ready to be merged, please also include the **[WIP]** prefix (you can remove it once your PR is ready to be merged). This tells the development team that your pull request is a "work-in-progress", and that you plan to continue working on it. Review and discussion on new code can begin well before the work is complete, and the more discussion the better! The development team may prefer a different path than you've outlined, so it's better to discuss it and get approval at the early stage of your work. One your PR is ready a member of the development team will review your changes to confirm that they can be merged into the main codebase. ## Notes for New Code #### Catching exceptions In general, do not catch exceptions without good reason. For non-fatal exceptions, log the exception as a warning and add more information about what may have caused the error. If you do need to catch an exception, raise a new exception using ``raise NewException("message") from oldException)``. Do not log this, as it creates redundant/confusing logs. #### Testing New code should be tested, whenever feasible. Bug fixes should include an example that exposes the issue. Any new features should have tests that show at least a minimal example. If you're not sure what this means for your code, please ask in your pull request. ## Recognizing contributions We welcome and recognize all contributions from documentation to testing to code development. The development team member who accepts/merges your pull request will update the CHANGES file to reference your contribution. You can see a list of current contributors in our [zenodo file][link_zenodo]. If you are new to the project, don't forget to add your name and affiliation there! ## Thank you! You're awesome. :wave::smiley:
*— Based on contributing guidelines from the [STEMMRoleModels][link_stemmrolemodels] project.* [link_github]: https://github.com/ [link_nipype]: https://github.com/nipy/nipype [link_signupinstructions]: https://help.github.com/articles/signing-up-for-a-new-github-account [link_react]: https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comments [link_issues]: https://github.com/nipy/nipype/issues [link_labels]: https://github.com/nipy/nipype/labels [link_discussingissues]: https://help.github.com/articles/discussing-projects-in-issues-and-pull-requests [link_bugs]: https://github.com/nipy/nipype/labels/bug [link_issue_template]: https://github.com/nipy/nipype/blob/master/.github/ISSUE_TEMPLATE.md [link_helpwanted]: https://github.com/nipy/nipype/labels/help-wanted [link_good_first_issue]: https://github.com/nipy/nipype/issues?q=is%3Aopen+is%3Aissue+label%3Agood-first-issue [link_enhancement]: https://github.com/nipy/nipype/labels/enhancement [link_orphaned]: https://github.com/nipy/nipype/pulls?q=is%3Apr+label%3Aorphaned+is%3Aclosed [link_pullrequest]: https://help.github.com/articles/creating-a-pull-request-from-a-fork/ [link_fork]: https://help.github.com/articles/fork-a-repo/ [link_pushpullblog]: https://www.igvita.com/2011/12/19/dont-push-your-pull-requests/ [link_updateupstreamwiki]: https://help.github.com/articles/syncing-a-fork/ [link_new_interfaces]: http://nipype.readthedocs.io/en/latest/devel/interface_specs.html [link_cloning]: https://help.github.com/articles/cloning-a-repository/ [link_stemmrolemodels]: https://github.com/KirstieJane/STEMMRoleModels [link_zenodo]: https://github.com/nipy/nipype/blob/master/.zenodo.json nipype-1.7.0/INSTALL000066400000000000000000000002441413403311400140460ustar00rootroot00000000000000.. -*- rst -*- rest mode for emacs .. _development-quickstart: For installation instructions see documentation: http://nipy.org/nipype/ or doc/users/install.rst nipype-1.7.0/LICENSE000066400000000000000000000011711413403311400140220ustar00rootroot00000000000000Copyright (c) 2009-2016, Nipype developers Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Prior to release 0.12, Nipype was licensed under a BSD license. nipype-1.7.0/MANIFEST.in000066400000000000000000000003501413403311400145510ustar00rootroot00000000000000include INSTALL include LICENSE include MANIFEST.in include README include THANKS include Makefile include setup_egg.py include doc/documentation.zip include nipype/COMMIT_INFO.txt recursive-include doc * recursive-include tools * nipype-1.7.0/Makefile000066400000000000000000000045451413403311400144650ustar00rootroot00000000000000# Makefile for building distributions of nipype. # Files are then pushed to sourceforge using rsync with a command like this: # rsync -e ssh nipype-0.1-py2.5.egg cburns,nipy@frs.sourceforge.net:/home/frs/project/n/ni/nipy/nipype/nipype-0.1/ PYTHON ?= python .PHONY: zipdoc sdist egg upload_to_pypi trailing-spaces clean-pyc clean-so clean-build clean-ctags clean in inplace test-code test-coverage test html specs check-before-commit check gen-base-dockerfile gen-main-dockerfile gen-dockerfiles zipdoc: html zip documentation.zip doc/_build/html sdist: zipdoc @echo "Building source distribution..." $(PYTHON) setup.py sdist @echo "Done building source distribution." # XXX copy documentation.zip to dist directory. egg: zipdoc @echo "Building egg..." $(PYTHON) setup.py bdist_egg @echo "Done building egg." upload_to_pypi: zipdoc @echo "Uploading to PyPi..." $(PYTHON) setup.py sdist --formats=zip,gztar upload trailing-spaces: find . -name "*[.py|.rst]" -type f | xargs perl -pi -e 's/[ \t]*$$//' @echo "Reverting test_docparse" git checkout nipype/utils/tests/test_docparse.py clean-pyc: find . -name "*.pyc" | xargs rm -f find . -name "__pycache__" -type d | xargs rm -rf clean-so: find . -name "*.so" | xargs rm -f find . -name "*.pyd" | xargs rm -f clean-build: rm -rf build clean-ctags: rm -f tags clean-doc: rm -rf doc/_build clean-tests: rm -f .coverage clean: clean-build clean-pyc clean-so clean-ctags clean-doc clean-tests in: inplace # just a shortcut inplace: $(PYTHON) setup.py build_ext -i test-code: in $(PYTHON) -m pytest --doctest-modules nipype test-coverage: clean-tests in $(PYTHON) -m pytest --doctest-modules --cov-config .coveragerc --cov=nipype nipype test: tests # just another name tests: clean test-code html: @echo "building docs" make -C doc clean htmlonly specs: @echo "Checking specs and autogenerating spec tests" env PYTHONPATH=".:$(PYTHONPATH)" $(PYTHON) tools/checkspecs.py check: check-before-commit # just a shortcut check-before-commit: specs trailing-spaces html test @echo "removed spaces" @echo "built docs" @echo "ran test" @echo "generated spec tests" gen-base-dockerfile: @echo "Generating base Dockerfile" bash docker/generate_dockerfiles.sh -b gen-main-dockerfile: @echo "Generating main Dockerfile" bash docker/generate_dockerfiles.sh -m gen-dockerfiles: gen-base-dockerfile gen-main-dockerfile nipype-1.7.0/README.rst000066400000000000000000000116711413403311400145120ustar00rootroot00000000000000======================================================== NIPYPE: Neuroimaging in Python: Pipelines and Interfaces ======================================================== .. image:: https://travis-ci.org/nipy/nipype.svg?branch=master :target: https://travis-ci.org/nipy/nipype .. image:: https://circleci.com/gh/nipy/nipype/tree/master.svg?style=svg :target: https://circleci.com/gh/nipy/nipype/tree/master .. image:: https://codecov.io/gh/nipy/nipype/branch/master/graph/badge.svg :target: https://codecov.io/gh/nipy/nipype .. image:: https://api.codacy.com/project/badge/Grade/452bfc0d4de342c99b177d2c29abda7b :target: https://www.codacy.com/app/nipype/nipype?utm_source=github.com&utm_medium=referral&utm_content=nipy/nipype&utm_campaign=Badge_Grade .. image:: https://img.shields.io/pypi/v/nipype.svg :target: https://pypi.python.org/pypi/nipype/ :alt: Latest Version .. image:: https://img.shields.io/pypi/pyversions/nipype.svg :target: https://pypi.python.org/pypi/nipype/ :alt: Supported Python versions .. image:: https://img.shields.io/pypi/status/nipype.svg :target: https://pypi.python.org/pypi/nipype/ :alt: Development Status .. image:: https://img.shields.io/pypi/l/nipype.svg :target: https://pypi.python.org/pypi/nipype/ :alt: License .. image:: https://img.shields.io/badge/gitter-join%20chat%20%E2%86%92-brightgreen.svg?style=flat :target: http://gitter.im/nipy/nipype :alt: Chat .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.596855.svg :target: https://doi.org/10.5281/zenodo.596855 :alt: Citable DOI Current neuroimaging software offer users an incredible opportunity to analyze data using a variety of different algorithms. However, this has resulted in a heterogeneous collection of specialized applications without transparent interoperability or a uniform operating interface. *Nipype*, an open-source, community-developed initiative under the umbrella of NiPy, is a Python project that provides a uniform interface to existing neuroimaging software and facilitates interaction between these packages within a single workflow. Nipype provides an environment that encourages interactive exploration of algorithms from different packages (e.g., SPM, FSL, FreeSurfer, AFNI, Slicer, ANTS), eases the design of workflows within and between packages, and reduces the learning curve necessary to use different packages. Nipype is creating a collaborative platform for neuroimaging software development in a high-level language and addressing limitations of existing pipeline systems. *Nipype* allows you to: * easily interact with tools from different software packages * combine processing steps from different software packages * develop new workflows faster by reusing common steps from old ones * process data faster by running it in parallel on many cores/machines * make your research easily reproducible * share your processing workflows with the community Documentation ------------- Please see the ``doc/README.txt`` document for information on our documentation. Website ------- Information specific to Nipype is located here:: http://nipy.org/nipype Python 2 Statement ------------------ Python 2.7 reaches its end-of-life in January 2020, which means it will *no longer be maintained* by Python developers. `Many projects `__ are removing support in advance of this deadline, which will make it increasingly untenable to try to support Python 2, even if we wanted to. The final series with 2.7 support is 1.3.x. If you have a package using Python 2 and are unable or unwilling to upgrade to Python 3, then you should use the following `dependency `__ for Nipype:: nipype<1.4 Bug fixes will be accepted against the ``maint/1.3.x`` branch. Support and Communication ------------------------- If you have a problem or would like to ask a question about how to do something in Nipype please open an issue to `NeuroStars.org `_ with a *nipype* tag. `NeuroStars.org `_ is a platform similar to StackOverflow but dedicated to neuroinformatics. To participate in the Nipype development related discussions please use the following mailing list:: http://mail.python.org/mailman/listinfo/neuroimaging Please add *[nipype]* to the subject line when posting on the mailing list. You can even hangout with the Nipype developers in their `Gitter `_ channel or in the BrainHack `Slack `_ channel. (Click `here `_ to join the Slack workspace.) Contributing to the project --------------------------- If you'd like to contribute to the project please read our `guidelines `_. Please also read through our `code of conduct `_. nipype-1.7.0/THANKS.rst000066400000000000000000000031001413403311400145310ustar00rootroot00000000000000.. -*- mode: rst -*- Code contributors ----------------- See `Github contributors list `__. Funding ------- Nipype is currently supported by `1R01EB020740-01A1 Nipype: Dataflows for Reproducible Biomedical Research `__. Satrajit Ghosh work on this project was partially funded by NIBIB `R03EB008673 `__ and by the `INCF `__ through a contract with TankThink Labs, LLC. Chris Burns was supported by NIMH grant `5R01MH081909-02 Continued Development and Maintenance of the Neuroimaging in Python Project `__. Hans Jonson was supported by `2 U54 EB005149 - 06 Core 2b Huntington's Disease - Driving Biological Project `__, `S10 RR023392 Enterprise Storage In A Collaborative Neuroimaging Environment `__, `R01 NS040068 Neurobiological Predictors of Huntington's Disease `__, and `UL1 TR000442 University of Iowa Clinical and Translational Science Program `__. We would also like to thank `JetBrains `__ for providing `Pycharm `__ licenses. nipype-1.7.0/Vagrantfile000066400000000000000000000046641413403311400152140ustar00rootroot00000000000000VAGRANTFILE_API_VERSION = "2" $script = < {% endblock %} nipype-1.7.0/doc/_templates/indexsidebar.html000066400000000000000000000023521413403311400212500ustar00rootroot00000000000000{% block nipypelinks %}

{{ _('Links') }}

{% endblock %} nipype-1.7.0/doc/_templates/layout.html000066400000000000000000000036351413403311400201310ustar00rootroot00000000000000{% extends "!layout.html" %} {% set title = 'Neuroimaging in Python - Pipelines and Interfaces' %} {% set short_title = 'Nipype' %} {% block extrahead %} {{ super() }} {% endblock %} {% block header %} {% endblock %} {% block relbar1 %}{% endblock %} {% block relbar2 %}{% endblock %} {% block sidebar1 %}{{ sidebar() }}{% endblock %} {% block sidebar2 %}{% endblock %} {% block footer %} {{ super() }} {% endblock %} nipype-1.7.0/doc/_templates/navbar.html000066400000000000000000000012711413403311400200570ustar00rootroot00000000000000 Home · Quickstart · User Guide and Examples · Interfaces Index · Developers · About · Nipy nipype-1.7.0/doc/_templates/sidebar_versions.html000066400000000000000000000022671413403311400221550ustar00rootroot00000000000000{% block versions %}

{{ _('Versions') }}

ReleaseDevel
{{ release }}{{ version }}
Download Github
{% endblock %} nipype-1.7.0/doc/about.rst000066400000000000000000000035731413403311400154360ustar00rootroot00000000000000:orphan: .. _about: ===== About ===== Citation -------- Each Nipype release is archived on `Zenodo `__, which provides a :abbr:`DOI (Digital Object Identifier)` for the project and each release, separately. The project DOI (`10.5281/zenodo.596855 `__) will redirect to the latest release archive, which contains all information needed to cite the release. If you are a Nipype contributor and your name is not mentioned in the latest release, please submit a Pull Request modifying the `.zenodo.json `__ file. When publishing results obtained using Nipype we strongly encourage citing the latest Zenodo archive to give credit to all Nipype contributors. However, if for some reason the journal you are publishing with does not allow you do cite software this way you can use the initial paper published in 2011 (see below). .. admonition:: Reference Gorgolewski K, Burns CD, Madison C, Clark D, Halchenko YO, Waskom ML, Ghosh SS. (2011). Nipype: a flexible, lightweight and extensible neuroimaging data processing framework in Python. Front. Neuroimform. 5:13. `Download Paper`__ __ paper_ :: @article { Gorgolewski2011, title = "Nipype: a flexible, lightweight and extensible neuroimaging data processing framework in python.", year = "2011", author = "Krzysztof Gorgolewski and Christopher D Burns and Cindee Madison and Dav Clark and Yaroslav O Halchenko and Michael L Waskom and Satrajit S Ghosh", journal = "Front Neuroinform", volume = "5", month = "08", doi = "10.3389/fninf.2011.00013", pubmed = "21897815", url = "https://doi.org/10.3389/fninf.2011.00013", issn = "1662-5196"} .. include:: links_names.txt .. include:: ../THANKS.rst nipype-1.7.0/doc/changelog/000077500000000000000000000000001413403311400155115ustar00rootroot00000000000000nipype-1.7.0/doc/changelog/0.X.X-changelog.rst000066400000000000000000001263461413403311400207570ustar00rootroot000000000000000.14.0 (November 29, 2017) ========================== (`Full changelog `__) * FIX+MAINT: Revision of the resource monitor (https://github.com/nipy/nipype/pull/2285) * FIX: MultiProc mishandling crashes (https://github.com/nipy/nipype/pull/2301) * MAINT: Revise use of `subprocess.Popen` (https://github.com/nipy/nipype/pull/2289) * ENH: Memorize version checks (https://github.com/nipy/nipype/pull/2274, https://github.com/nipy/nipype/pull/2295) 0.14.0rc1 (November 21, 2017) ----------------------------- * ENH: Generate Dockerfiles with neurodocker (https://github.com/nipy/nipype/pull/2202) * ENH: FLAIR options for recon-all (https://github.com/nipy/nipype/pull/2279) * ENH: Config option for setting maxtasksperchild when multiprocessing (https://github.com/nipy/nipype/pull/2284) * FIX: Testing maintainance and improvements (https://github.com/nipy/nipype/pull/2252) * ENH: Add elapsed_time and final metric_value to ants.Registration (https://github.com/nipy/nipype/pull/1985) * ENH: Improve terminal_output feature (https://github.com/nipy/nipype/pull/2209) * ENH: Simple interface to FSL std2imgcoords (https://github.com/nipy/nipype/pull/2209, prev #1398) * ENH: Centralize virtual/physical $DISPLAYs (https://github.com/nipy/nipype/pull/2203, https://github.com/nipy/nipype/pull/2211) * ENH: New ResourceMonitor - replaces resource profiler (https://github.com/nipy/nipype/pull/2200) * ENH: Quickshear interface (https://github.com/nipy/nipype/pull/2047) * MAINT: updated deprecated HasTraits method (https://github.com/nipy/nipype/pull/2048) * ENH: CLI versioning (https://github.com/nipy/nipype/pull/2054) * ENH: Dual Regression interface (https://github.com/nipy/nipype/pull/2057) * ENH: Additional args to ANTs registration (https://github.com/nipy/nipype/pull/2062, https://github.com/nipy/nipype/pull/2078) * FIX: Mp2rage interfaces updated for new parameter names in cbstools 3 (https://github.com/nipy/nipype/pull/2065) * MAINT: Removed automatic nipype folder creation in HOME (https://github.com/nipy/nipype/pull/2076) * MAINT: Additional Windows support (https://github.com/nipy/nipype/pull/2085) * ENH: Output realignment matrices from TOPUP (https://github.com/nipy/nipype/pull/2084) * ENH: Additional AFNI interfaces: 3dZcat, 3dZeropad, 3dedge3, 3dDeconvolve, 3dQwarp, 1dCat, 3dNwarpApply, 3daxialize, 3dREMLfit, 3dUndump, 3dCM, 3dSynthesize + more (https://github.com/nipy/nipype/pull/2087, https://github.com/nipy/nipype/pull/2090, https://github.com/nipy/nipype/pull/2095, https://github.com/nipy/nipype/pull/2099, https://github.com/nipy/nipype/pull/2103, https://github.com/nipy/nipype/pull/2114, https://github.com/nipy/nipype/pull/2135, https://github.com/nipy/nipype/pull/2186, https://github.com/nipy/nipype/pull/2201, https://github.com/nipy/nipype/pull/2210) * MAINT: cleanup and update AFNI's Allineate (https://github.com/nipy/nipype/pull/2098) * ENH: Add cosine-basis high-pass-filter to CompCor, allow skip of initial volumes (https://github.com/nipy/nipype/pull/2107, https://github.com/nipy/nipype/pull/#2122) * FIX: Catch more dcm2niix DTI conversions (https://github.com/nipy/nipype/pull/2110) * FIX: Retrieve aseg + wmparc stats properly (https://github.com/nipy/nipype/pull/2117) * ENH: ANTs MeasureImageSimilarity Inteface (https://github.com/nipy/nipype/pull/2128) * FIX: CompCor filter_basis of correct size, pre-filter column headers (https://github.com/nipy/nipype/pull/2136, https://github.com/nipy/nipype/pull/2138) * ENH: FreeSurfer lta_convert and mri_coreg interfaces (https://github.com/nipy/nipype/pull/2140, https://github.com/nipy/nipype/pull/2172) * ENH: Speed up S3DataGrabber (https://github.com/nipy/nipype/pull/2143) * FIX: Allow S3DataGrabber to grab single file (https://github.com/nipy/nipype/pull/2147) * FIX: Allow 4D images as inputs to buildtemplateparallel.sh and N4BiasFieldCorrection (https://github.com/nipy/nipype/pull/2151) * MAINT: Detect and warn unconnected duplicate nodes (https://github.com/nipy/nipype/pull/2163) * ENH: Calcmedian Interface (https://github.com/nipy/nipype/pull/2167) * FIX: probtrackx2 outputs (https://github.com/nipy/nipype/pull/2169) * ENH: Improve FreeSurfer registration (https://github.com/nipy/nipype/pull/2172) * ENH: BIDSDataGrabber interface (https://github.com/nipy/nipype/pull/2174) * MAINT: Set minimum numpy version to 1.9.0 (https://github.com/nipy/nipype/pull/2182) * ENH: Support for multiple intial-moving-transforms (https://github.com/nipy/nipype/pull/2187) * MAINT: Fixes for networkx and afni (https://github.com/nipy/nipype/pull/2196, https://github.com/nipy/nipype/pull/2171) * TST: Update C3D version in Docker build (https://github.com/nipy/nipype/pull/2199) * ENH: SimpleInterface interface (https://github.com/nipy/nipype/pull/2220) * ENH: Add LTA to Tkregister2 (https://github.com/nipy/nipype/pull/2217) 0.13.1 (May 20, 2017) ===================== * FIX: Make release compatible with conda-forge build process (https://github.com/nipy/nipype/pull/2017) * ENH: Update some minimum versions in compliance with Debian Jessie (https://github.com/nipy/nipype/pull/2017) * ENH: Circle builds use cached docker layers (https://github.com/nipy/nipype/pull/2017) * ENH: Base docker to use FS6 and ANTS 2.2.0 (https://github.com/nipy/nipype/pull/2024) * FIX: Mailmap and contributor acknowledgment (https://github.com/nipy/nipype/pull/2017) * FIX: Preserve node properties in sub nodes of MapNode (https://github.com/nipy/nipype/pull/2019) * FIX: Fix interfaces 3DUnifize, ICA_AROMA, BinaryMaths, RegAverage, BBRegister, AffineInitializer (https://github.com/nipy/nipype/pull/2025, https://github.com/nipy/nipype/pull/2027, https://github.com/nipy/nipype/pull/2036, https://github.com/nipy/nipype/pull/2037, https://github.com/nipy/nipype/pull/2031, https://github.com/nipy/nipype/pull/2010) * ENH: Add Anisotropic Power interface (https://github.com/nipy/nipype/pull/2039) * FIX: Bayesian estimation in SPM (https://github.com/nipy/nipype/pull/2030) 0.13.0 (May 11, 2017) ===================== * ENH: Multi-stage recon-all directives (https://github.com/nipy/nipype/pull/1991) * FIX: FEAT "folder does not exist" error (https://github.com/nipy/nipype/pull/2000) * ENH: Niftyfit interfaces (https://github.com/nipy/nipype/pull/1910) * FIX: Define ANTSPATH for BrainExtraction automatically (https://github.com/nipy/nipype/pull/1986) * ENH: New trait for imaging files (https://github.com/nipy/nipype/pull/1949) * ENH: Niftyseg interfaces (https://github.com/nipy/nipype/pull/1911) * ENH: Niftyreg interfaces (https://github.com/nipy/nipype/pull/1913) * MRG: Allow more support for CLI (https://github.com/nipy/nipype/pull/1908) * ENH: 3dQwarpPlusMinus interface (https://github.com/nipy/nipype/pull/1974) * FIX: PY3.6 support (https://github.com/nipy/nipype/pull/1977) * FIX: PY3 and stream fixes for MRTrix2TrackVis (https://github.com/nipy/nipype/pull/1804) * ENH: More mask options for CompCor interfaces (https://github.com/nipy/nipype/pull/1968 + https://github.com/nipy/nipype/pull/1992) * ENH: Additional TOPUP outputs (https://github.com/nipy/nipype/pull/1976) * ENH: Additional Eddy flags (https://github.com/nipy/nipype/pull/1967) * ENH: ReconAll handlers for less common cases (https://github.com/nipy/nipype/pull/1966) * ENH: FreeSurferSource now finds graymid/midthickness surfs (https://github.com/nipy/nipype/pull/1972) * ENH: Additional fslmaths dimensional reduction operations (https://github.com/nipy/nipype/pull/1956) * ENH: More options for RobustFOV interface (https://github.com/nipy/nipype/pull/1923) * ENH: Add MRIsCombine to FreeSurfer utils (https://github.com/nipy/nipype/pull/1948) * FIX: Level1Design EV parameter substitution (https://github.com/nipy/nipype/pull/1953) * FIX: Dcm2niix outputs can be uncompressed (https://github.com/nipy/nipype/pull/1951) * FIX: Ensure build fails in Circle when tests fail (https://github.com/nipy/nipype/pull/1981) * ENH: Add interface to antsAffineInitializer (https://github.com/nipy/nipype/pull/1980) * ENH: AFNI motion parameter support for FrameWiseDisplacement (https://github.com/nipy/nipype/pull/1840) * ENH: Add ANTs KellyKapowski interface (https://github.com/nipy/nipype/pull/1845) * FIX: AFNI interface bug setting OMP_NUM_THREADS to 1 (https://github.com/nipy/nipype/pull/1728) * FIX: Select Eddy run command at runtime (https://github.com/nipy/nipype/pull/1871) * FIX: Increase FLIRT's flexibility with apply_xfm (https://github.com/nipy/nipype/pull/1875) * DOC: Update FSL preprocess docstrings (https://github.com/nipy/nipype/pull/1881) * ENH: Support GIFTI outputs in SampleToSurface (https://github.com/nipy/nipype/pull/1886) * FIX: Configparser differences between PY2 and PY3 (https://github.com/nipy/nipype/pull/1890) * ENH: Add mris_expand interface (https://github.com/nipy/nipype/pull/1893) * FIX: Split over-eager globs in FreeSurferSource (https://github.com/nipy/nipype/pull/1894) * FIX: Store undefined by default so that xor checks don't trip (https://github.com/nipy/nipype/pull/1903) * FIX: Gantt chart generator PY3 compatibility (https://github.com/nipy/nipype/pull/1907) * FIX: Add DOF and --fsl-dof options to BBRegister (https://github.com/nipy/nipype/pull/1917) * ENH: Auto-derive input_names in Function (https://github.com/nipy/nipype/pull/1918) * FIX: Minor fixes for NonSteadyStateDetector (https://github.com/nipy/nipype/pull/1926) * DOC: Add duecredit references for AFNI and FSL (https://github.com/nipy/nipype/pull/1930) * ENH: Added zenodo (https://zenodo.org/) file (https://github.com/nipy/nipype/pull/1924) * ENH: Disable symlinks on CIFS filesystems (https://github.com/nipy/nipype/pull/1941) * ENH: Sphinx extension to plot workflows (https://github.com/nipy/nipype/pull/1896) * ENH: Added non-steady state detector for EPI data (https://github.com/nipy/nipype/pull/1839) * ENH: Enable new BBRegister init options for FSv6+ (https://github.com/nipy/nipype/pull/1811) * REF: Splits nipype.interfaces.utility into base, csv, and wrappers (https://github.com/nipy/nipype/pull/1828) * FIX: Makespec now runs with nipype in current directory (https://github.com/nipy/nipype/pull/1813) * FIX: Flexible nifti opening with mmap if Numpy < 1.12.0 (https://github.com/nipy/nipype/pull/1796 + https://github.com/nipy/nipype/pull/1831) * ENH: DVARS includes intensity normalization feature - turned on by default (https://github.com/nipy/nipype/pull/1827) * FIX: DVARS is correctly using sum of squares instead of standard deviation (https://github.com/nipy/nipype/pull/1827) * ENH: Refactoring of nipype.interfaces.utility (https://github.com/nipy/nipype/pull/1828) * FIX: CircleCI were failing silently. Some fixes to tests (https://github.com/nipy/nipype/pull/1833) * FIX: Issues in Docker image permissions, and docker documentation (https://github.com/nipy/nipype/pull/1825) * ENH: Revised all Dockerfiles and automated deployment to Docker Hub from CircleCI (https://github.com/nipy/nipype/pull/1815) * ENH: Update ReconAll interface for FreeSurfer v6.0.0 (https://github.com/nipy/nipype/pull/1790) * FIX: Cast DVARS float outputs to avoid memmap error (https://github.com/nipy/nipype/pull/1777) * FIX: FSL FNIRT intensity mapping files (https://github.com/nipy/nipype/pull/1799) * ENH: Additional outputs generated by FSL EDDY (https://github.com/nipy/nipype/pull/1793) * TST: Parallelize CircleCI build across 4 containers (https://github.com/nipy/nipype/pull/1769) 0.13.0-rc1 (January 4, 2017) ============================ * FIX: Compatibility with traits 4.6 (https://github.com/nipy/nipype/pull/1770) * FIX: Multiproc deadlock (https://github.com/nipy/nipype/pull/1756) * TST: Replace nose and unittest with pytest (https://github.com/nipy/nipype/pull/1722, https://github.com/nipy/nipype/pull/1751) * FIX: Semaphore capture using MultiProc plugin (https://github.com/nipy/nipype/pull/1689) * REF: Refactor AFNI interfaces (https://github.com/nipy/nipype/pull/1678, https://github.com/nipy/nipype/pull/1680) * ENH: Move nipype commands to group command using click (https://github.com/nipy/nipype/pull/1608) * FIX: AFNI Retroicor interface fixes (https://github.com/nipy/nipype/pull/1669) * FIX: Minor errors after migration to setuptools (https://github.com/nipy/nipype/pull/1671) * ENH: Add AFNI 3dNote interface (https://github.com/nipy/nipype/pull/1637) * ENH: Abandon distutils, only use setuptools (https://github.com/nipy/nipype/pull/1627) * FIX: Minor bugfixes related to unicode literals (https://github.com/nipy/nipype/pull/1656) * TST: Automatic retries in travis (https://github.com/nipy/nipype/pull/1659/files) * ENH: Add signal extraction interface (https://github.com/nipy/nipype/pull/1647) * ENH: Add a DVARS calculation interface (https://github.com/nipy/nipype/pull/1606) * ENH: New interface to b0calc of FSL-POSSUM (https://github.com/nipy/nipype/pull/1399) * ENH: Add CompCor (https://github.com/nipy/nipype/pull/1599) * ENH: Add duecredit entries (https://github.com/nipy/nipype/pull/1466) * FIX: Python 3 compatibility fixes (https://github.com/nipy/nipype/pull/1572) * REF: Improved PEP8 compliance for fsl interfaces (https://github.com/nipy/nipype/pull/1597) * REF: Improved PEP8 compliance for spm interfaces (https://github.com/nipy/nipype/pull/1593) * TST: Replaced coveralls with codecov (https://github.com/nipy/nipype/pull/1609) * ENH: More BrainSuite interfaces (https://github.com/nipy/nipype/pull/1554) * ENH: Convenient load/save of interface inputs (https://github.com/nipy/nipype/pull/1591) * ENH: Add a Framewise Displacement calculation interface (https://github.com/nipy/nipype/pull/1604) * FIX: Use builtins open and unicode literals for py3 compatibility (https://github.com/nipy/nipype/pull/1572) * TST: reduce the size of docker images & use tags for images (https://github.com/nipy/nipype/pull/1564) * ENH: Implement missing inputs/outputs in FSL AvScale (https://github.com/nipy/nipype/pull/1563) * FIX: Fix symlink test in copyfile (https://github.com/nipy/nipype/pull/1570, https://github.com/nipy/nipype/pull/1586) * ENH: Added support for custom job submission check in SLURM (https://github.com/nipy/nipype/pull/1582) * ENH: Added ANTs interface CreateJacobianDeterminantImage; replaces deprecated JacobianDeterminant (https://github.com/nipy/nipype/pull/1654) Release 0.12.1 (August 3, 2016) =============================== * FIX: runtime profiling is optional and off by default (https://github.com/nipy/nipype/pull/1561) * TST: circle CI tests run with docker (https://github.com/nipy/nipype/pull/1541) * FIX: workflow export functions without import error (https://github.com/nipy/nipype/pull/1552) Release 0.12.0 (July 12, 2016) ============================== * ENH: New interface for Bruker to Nifti converter (https://github.com/nipy/nipype/pull/1523) * FIX: output file naming for FIRST outputs (https://github.com/nipy/nipype/pull/1524) * ENH: Adds `fslmaths -Tstd` to maths interfaces (https://github.com/nipy/nipype/pull/1518) * FIX: Selecting "gamma" in FSL Level1Design now does what the name says (https://github.com/nipy/nipype/pull/1500) * ENH: Added grad_dev input to fsl.dti.bedpostx5 interface(https://github.com/nipy/nipype/pull/1493) * ENH: ResourceMultiProc plugin to support resource allocation (https://github.com/nipy/nipype/pull/1372) * ENH: Added dcm2niix interface (https://github.com/nipy/nipype/pull/1435) * ENH: Add nipype_crash_search command (https://github.com/nipy/nipype/pull/1422) * ENH: Created interface for BrainSuite Cortical Surface Extraction command line tools (https://github.com/nipy/nipype/pull/1305) * FIX: job execution on systems/approaches where locale is undefined (https://github.com/nipy/nipype/pull/1401) * FIX: Clean up byte/unicode issues using subprocess (https://github.com/nipy/nipype/pull/1394) * FIX: Prevent crash when tvtk is loaded - ETS_TOOLKIT=null (https://github.com/nipy/nipype/pull/973) * ENH: New interfaces in dipy: RESTORE, EstimateResponseSH, CSD and StreamlineTractography (https://github.com/nipy/nipype/pull/1090) * ENH: Added interfaces of AFNI (https://github.com/nipy/nipype/pull/1360, https://github.com/nipy/nipype/pull/1361, https://github.com/nipy/nipype/pull/1382) * ENH: Provides a Nipype wrapper for antsJointFusion (https://github.com/nipy/nipype/pull/1351) * ENH: Added support for PETPVC (https://github.com/nipy/nipype/pull/1335) * ENH: Merge S3DataSink into DataSink, added AWS documentation (https://github.com/nipy/nipype/pull/1316) * TST: Cache APT in CircleCI (https://github.com/nipy/nipype/pull/1333) * ENH: Add new flags to the BRAINSABC for new features (https://github.com/nipy/nipype/pull/1322) * ENH: Provides a Nipype wrapper for ANTs DenoiseImage (https://github.com/nipy/nipype/pull/1291) * FIX: Minor bugfix logging hash differences (https://github.com/nipy/nipype/pull/1298) * FIX: Use released Prov python library (https://github.com/nipy/nipype/pull/1279) * ENH: Support for Python 3 (https://github.com/nipy/nipype/pull/1221) * FIX: VTK version check missing when using tvtk (https://github.com/nipy/nipype/pull/1219) * ENH: Added an OAR scheduler plugin (https://github.com/nipy/nipype/pull/1259) * ENH: New ANTs interface: antsBrainExtraction (https://github.com/nipy/nipype/pull/1231) * API: Default model level for the bedpostx workflow has been set to "2" following FSL 5.0.9 lead * ENH: New interfaces for interacting with AWS S3: S3DataSink and S3DataGrabber (https://github.com/nipy/nipype/pull/1201) * ENH: Interfaces for MINC tools (https://github.com/nipy/nipype/pull/1304) * FIX: Use realpath to determine hard link source (https://github.com/nipy/nipype/pull/1388) * FIX: Correct linking/copying fallback behavior (https://github.com/nipy/nipype/pull/1391) * ENH: Nipype workflow and interfaces for FreeSurfer's recon-all (https://github.com/nipy/nipype/pull/1326) * FIX: Permit relative path for concatenated_file input to Concatenate() (https://github.com/nipy/nipype/pull/1411) * ENH: Makes ReconAll workflow backwards compatible with FreeSurfer 5.3.0 (https://github.com/nipy/nipype/pull/1434) * ENH: Added interfaces for AFNI 3dDegreeCentrality, 3dECM, 3dLFCD, 3dClipLevel, 3dmask_tool, and 3dSeg (https://github.com/nipy/nipype/pull/1460) Release 0.11.0 (September 15, 2015) =================================== * API: Change how hash values are computed (https://github.com/nipy/nipype/pull/1174) * ENH: New algorithm: mesh.WarpPoints applies displacements fields to point sets (https://github.com/nipy/nipype/pull/889). * ENH: New interfaces for MRTrix3 (https://github.com/nipy/nipype/pull/1126) * ENH: New option in afni.3dRefit - zdel, ydel, zdel etc. (https://github.com/nipy/nipype/pull/1079) * FIX: ants.Registration composite transform outputs are no longer returned as lists (https://github.com/nipy/nipype/pull/1183) * BUG: ANTs Registration interface failed with multi-modal inputs (https://github.com/nipy/nipype/pull/1176) (https://github.com/nipy/nipype/issues/1175) * ENH: dipy.TrackDensityMap interface now accepts a reference image (https://github.com/nipy/nipype/pull/1091) * FIX: Bug in XFibres5 (https://github.com/nipy/nipype/pull/1168) * ENH: Attempt to use hard links for data sink. (https://github.com/nipy/nipype/pull/1161) * FIX: Updates to SGE Plugins (https://github.com/nipy/nipype/pull/1129) * ENH: Add ants JointFusion() node with testing (https://github.com/nipy/nipype/pull/1160) * ENH: Add --float option for antsRegistration calls (https://github.com/nipy/nipype/pull/1159) * ENH: Added interface to simulate DWIs using the multi-tensor model (https://github.com/nipy/nipype/pull/1085) * ENH: New interface for FSL fslcpgeom utility (https://github.com/nipy/nipype/pull/1152) * ENH: Added SLURMGraph plugin for submitting jobs to SLURM with dependencies (https://github.com/nipy/nipype/pull/1136) * FIX: Enable absolute path definitions in DCMStack (https://github.com/nipy/nipype/pull/1089, replaced by https://github.com/nipy/nipype/pull/1093) * ENH: New mesh.MeshWarpMaths to operate on surface-defined warpings (https://github.com/nipy/nipype/pull/1016) * FIX: Refactor P2PDistance, change name to ComputeMeshWarp, add regression tests, fix bug in area weighted distance, and added optimizations (https://github.com/nipy/nipype/pull/1016) * ENH: Add an option not to resubmit Nodes that finished running when using SGEGraph (https://github.com/nipy/nipype/pull/1002) * FIX: FUGUE is now properly listing outputs. (https://github.com/nipy/nipype/pull/978) * ENH: Improved FieldMap-Based (FMB) workflow for correction of susceptibility distortions in EPI seqs. (https://github.com/nipy/nipype/pull/1019) * FIX: In the FSLXcommand _list_outputs function fixed for loop range (https://github.com/nipy/nipype/pull/1071) * ENH: Dropped support for now 7 years old Python 2.6 (https://github.com/nipy/nipype/pull/1069) * FIX: terminal_output is not mandatory anymore (https://github.com/nipy/nipype/pull/1070) * ENH: Added "nipype_cmd" tool for running interfaces from the command line (https://github.com/nipy/nipype/pull/795) * FIX: Fixed Camino output naming (https://github.com/nipy/nipype/pull/1061) * ENH: Add the average distance to ErrorMap (https://github.com/nipy/nipype/pull/1039) * ENH: Inputs with name_source can be now chained in cascade (https://github.com/nipy/nipype/pull/938) * ENH: Improve JSON interfaces: default settings when reading and consistent output creation when writing (https://github.com/nipy/nipype/pull/1047) * FIX: AddCSVRow problems when using infields (https://github.com/nipy/nipype/pull/1028) * FIX: Removed unused ANTS registration flag (https://github.com/nipy/nipype/pull/999) * FIX: Amend create_tbss_non_fa() workflow to match FSL's tbss_non_fa command. (https://github.com/nipy/nipype/pull/1033) * FIX: remove unused mandatory flag from spm normalize (https://github.com/nipy/nipype/pull/1048) * ENH: Update ANTSCorticalThickness interface (https://github.com/nipy/nipype/pull/1013) * FIX: Edge case with sparsemodels and PEP8 cleanup (https://github.com/nipy/nipype/pull/1046) * ENH: New io interfaces for JSON files reading/writing (https://github.com/nipy/nipype/pull/1020) * ENH: Enhanced openfmri script to support freesurfer linkage (https://github.com/nipy/nipype/pull/1037) * BUG: matplotlib is supposed to be optional (https://github.com/nipy/nipype/pull/1003) * FIX: Fix split_filename behaviour when path has no file component (https://github.com/nipy/nipype/pull/1035) * ENH: Updated FSL dtifit to include option for grad non-linearities (https://github.com/nipy/nipype/pull/1032) * ENH: Updated Camino tracking interfaces, which can now use FSL bedpostx output. New options also include choice of tracker, interpolator, stepsize and curveinterval for angle threshold (https://github.com/nipy/nipype/pull/1029) * FIX: Interfaces redirecting X crashed if $DISPLAY not defined (https://github.com/nipy/nipype/pull/1027) * FIX: Bug crashed 'make api' (https://github.com/nipy/nipype/pull/1026) * ENH: Updated antsIntroduction to handle RA and RI registrations (https://github.com/nipy/nipype/pull/1009) * ENH: Updated N4BiasCorrection input spec to include weight image and spline order. Made argument formatting consistent. Cleaned ants.segmentation according to PEP8. (https://github.com/nipy/nipype/pull/990/files) * ENH: SPM12 Normalize interface (https://github.com/nipy/nipype/pull/986) * FIX: Utility interface test dir (https://github.com/nipy/nipype/pull/986) * FIX: IPython engine directory reset after crash (https://github.com/nipy/nipype/pull/987) * ENH: Resting state fMRI example with NiPy realignment and no SPM (https://github.com/nipy/nipype/pull/992) * FIX: Corrected Freesurfer SegStats _list_outputs to avoid error if summary_file is undefined (issue #994)(https://https://github.com/nipy/nipype/pull/996) * FIX: OpenfMRI support and FSL 5.0.7 changes (https://github.com/nipy/nipype/pull/1006) * FIX: Output prefix in SPM Normalize with modulation (https://github.com/nipy/nipype/pull/1023) * ENH: Usability improvements in cluster environments (https://github.com/nipy/nipype/pull/1025) * ENH: ANTs JointFusion() (https://github.com/nipy/nipype/pull/1042) * ENH: Added csvReader() utility (https://github.com/nipy/nipype/pull/1044) * FIX: typo in nipype.interfaces.freesurfer.utils.py Tkregister2 (https://github.com/nipy/nipype/pull/1083) * FIX: SSHDataGrabber outputs now return full path to the grabbed/downloaded files. (https://github.com/nipy/nipype/pull/1086) * FIX: Add QA output for TSNR to resting workflow (https://github.com/nipy/nipype/pull/1088) * FIX: Change N4BiasFieldCorrection to use short tag for dimensionality (backward compatible) (https://github.com/nipy/nipype/pull/1096) * ENH: Added -newgrid input to Warp in AFNI (3dWarp wrapper) (https://github.com/nipy/nipype/pull/1128) * FIX: Fixed AFNI Copy interface to use positional inputs as required (https://github.com/nipy/nipype/pull/1131) * ENH: Added a check in Dcm2nii to check if nipype created the config.ini file and remove if true (https://github.com/nipy/nipype/pull/1132) * ENH: Use a while loop to wait for Xvfb (up to a max wait time "xvfb_max_wait" in config file, default 10) (https://github.com/nipy/nipype/pull/1142) Release 0.10.0 (October 10, 2014) ================================= * ENH: New miscelaneous interfaces: SplitROIs (mapper), MergeROIs (reducer) to enable parallel processing of very large images. * ENH: Updated FSL interfaces: BEDPOSTX and XFibres, former interfaces are still available with the version suffix: BEDPOSTX4 and XFibres4. Added gpu versions of BEDPOSTX: BEDPOSTXGPU, BEDPOSTX5GPU, and BEDPOSTX4GPU * ENH: Added experimental support for MIPAV algorithms thorugh JIST plugins * ENH: New dipy interfaces: Denoise, Resample * ENH: New Freesurfer interfaces: Tkregister2 (for conversion of fsl style matrices to freesurfer format), MRIPretess * ENH: New FSL interfaces: WarpPoints, WarpPointsToStd, EpiReg, ProbTrackX2, WarpUtils, ConvertWarp * ENH: New miscelaneous interfaces: AddCSVRow, NormalizeProbabilityMapSet, AddNoise * ENH: New AFNI interfaces: Eval, Means, SVMTest, SVMTrain * ENH: FUGUE interface has been refactored to use the name_template system, 3 examples added to doctests, some bugs solved. * API: Interfaces to external packages are no longer available in the top-level ``nipype`` namespace, and must be imported directly (e.g. ``from nipype.interfaces import fsl``). * ENH: Support for elastix via a set of new interfaces: Registration, ApplyWarp, AnalyzeWarp, PointsWarp, and EditTransform * ENH: New ANTs interface: ApplyTransformsToPoints, LaplacianThickness * ENH: New Diffusion Toolkit interface: TrackMerge * ENH: New MRtrix interface: FilterTracks * ENH: New metrics group in algorithms. Now Distance, Overlap, and FuzzyOverlap are found in nipype.algorithms.metrics instead of misc. Overlap interface extended to allow files containing multiple ROIs and volume physical units. * ENH: New interface in algorithms.metrics: ErrorMap (a voxel-wise diff map). * ENH: New FreeSurfer workflow: create_skullstripped_recon_flow() * ENH: Deep revision of workflows for correction of dMRI artifacts. New dmri_preprocessing example. * ENH: New data grabbing interface that works over SSH connections, SSHDataGrabber * ENH: New color mode for write_graph * ENH: You can now force MapNodes to be run serially * ENH: Added ANTS based openfmri workflow * ENH: MapNode now supports flattening of nested lists * ENH: Support for headless mode using Xvfb * ENH: nipype_display_crash has a debugging mode * FIX: MRTrix tracking algorithms were ignoring mask parameters. * FIX: FNIRT registration pathway and associated OpenFMRI example script * FIX: spm12b compatibility for Model estimate * FIX: Batch scheduler controls the number of maximum jobs properly * FIX: Update for FSL 5.0.7 which deprecated Contrast Manager Release 0.9.2 (January 31, 2014) ================================ * FIX: DataFinder was broken due to a typo * FIX: Order of DataFinder outputs was not guaranteed, it's human sorted now * ENH: New interfaces: Vnifti2Image, VtoMat Release 0.9.1 (December 25, 2013) ================================= * FIX: installation issues Release 0.9.0 (December 20, 2013) ================================= * ENH: SelectFiles: a streamlined version of DataGrabber * ENH: new tools for defining workflows: JoinNode, synchronize and itersource * ENH: W3C PROV support with optional RDF export built into Nipype * ENH: Added support for Simple Linux Utility Resource Management (SLURM) * ENH: AFNI interfaces refactor, prefix, suffix are replaced by "flexible_%s_templates" * ENH: New SPM interfaces: - spm.ResliceToReference, - spm.DicomImport * ENH: New AFNI interfaces: - afni.AFNItoNIFTI - afni.TCorr1D * ENH: Several new interfaces related to Camino were added: - camino.SFPICOCalibData - camino.Conmat - camino.QBallMX - camino.LinRecon - camino.SFPeaks One outdated interface no longer part of Camino was removed: - camino.Conmap * ENH: Three new mrtrix interfaces were added: - mrtrix.GenerateDirections - mrtrix.FindShPeaks - mrtrix.Directions2Amplitude * ENH: New FSL interfaces: - fsl.PrepareFieldmap - fsl.TOPUP - fsl.ApplyTOPUP - fsl.Eddy * ENH: New misc interfaces: - FuzzyOverlap, - P2PDistance * ENH: New workflows: nipype.workflows.dmri.fsl.epi.[fieldmap_correction&topup_correction] * ENH: Added simplified outputname generation for command line interfaces. * ENH: Allow ants use a single mask image * ENH: Create configuration option for parameterizing directories with hashes * ENH: arrange nodes by topological sort with disconnected subgraphs * ENH: uses the nidm iri namespace for uuids * ENH: remove old reporting webpage * ENH: Added support for Vagrant * API: 'name' is now a positional argument for Workflow, Node, and MapNode constructors * API: SPM now defaults to SPM8 or SPM12b job format * API: DataGrabber and SelectFiles use human (or natural) sort now * FIX: Several fixes related to Camino interfaces: - ProcStreamlines would ignore many arguments silently (target, waypoint, exclusion ROIS, etc.) - DTLUTGen would silently round the "step", "snr" and "trace" parameters to integers - PicoPDFs would not accept more than one lookup table - PicoPDFs default pdf did not correspond to Camino default - Track input model names were outdated (and would generate an error) - Track numpds parameter could not be set for deterministic tractography - FA created output files with erroneous extension * FIX: Deals properly with 3d files in SPM Realign * FIX: SPM with MCR fixed * FIX: Cleaned up input and output spec metadata * FIX: example openfmri script now makes the contrast spec a hashed input * FIX: FILMGLS compatibility with FSL 5.0.5 * FIX: Freesurfer recon-all resume now avoids setting inputs * FIX: File removal from node respects file associations img/hdr/mat, BRIK/HEAD Release 0.8.0 (May 8, 2013) =========================== * ENH: New interfaces: nipy.Trim, fsl.GLM, fsl.SigLoss, spm.VBMSegment, fsl.InvWarp, dipy.TensorMode * ENH: Allow control over terminal output for commandline interfaces * ENH: Added preliminary support for generating Python code from Workflows. * ENH: New workflows for dMRI and fMRI pre-processing: added motion artifact correction with rotation of the B-matrix, and susceptibility correction for EPI imaging using fieldmaps. Updated eddy_correct pipeline to support both dMRI and fMRI, and new parameters. * ENH: Minor improvements to FSL's FUGUE and FLIRT interfaces * ENH: Added optional dilation of parcels in cmtk.Parcellate * ENH: Interpolation mode added to afni.Resample * ENH: Function interface can accept a list of strings containing import statements that allow external functions to run without their imports defined in the function body * ENH: Allow node configurations to override master configuration * FIX: SpecifyModel works with 3D files correctly now. Release 0.7.0 (Dec 18, 2012) ============================ * ENH: Add basic support for LSF plugin. * ENH: New interfaces: ICC, Meshfix, ants.Register, C3dAffineTool, ants.JacobianDeterminant, afni.AutoTcorrelate, DcmStack * ENH: New workflows: ants template building (both using 'ANTS' and the new 'antsRegistration') * ENH: New examples: how to use ANTS template building workflows (smri_ants_build_tmeplate), how to set SGE specific options (smri_ants_build_template_new) * ENH: added no_flatten option to Merge * ENH: added versioning option and checking to traits * ENH: added deprecation metadata to traits * ENH: Slicer interfaces were updated to version 4.1 Release 0.6.0 (Jun 30, 2012) ============================ * API: display variable no longer encoded as inputs in commandline interfaces * ENH: input hash not modified when environment DISPLAY is changed * ENH: support for 3d files for TSNR calculation * ENH: Preliminary support for graph submission with SGE, PBS and Soma Workflow * ENH: New interfaces: MySQLSink, nipy.Similarity, WatershedBEM, MRIsSmooth, NetworkBasedStatistic, Atropos, N4BiasFieldCorrection, ApplyTransforms, fs.MakeAverageSubject, epidewarp.fsl, WarpTimeSeriesImageMultiTransform, AVScale, mri_ms_LDA * ENH: simple interfaces for spm * FIX: CompCor component calculation was erroneous * FIX: filename generation for AFNI and PRELUDE * FIX: improved slicer module autogeneration * FIX: added missing options for BBRegsiter * FIX: functionality of remove_unnecessary_ouputs cleaned up * FIX: local hash check works with appropriate inputs * FIX: Captures all stdout from commandline programs * FIX: Afni outputs should inherit from TraitedSpec Release 0.5.3 (Mar 23, 2012) ============================ * FIX: SPM model generation when output units is in scans Release 0.5.2 (Mar 14, 2012) ============================ * API: Node now allows specifying node level configuration for SGE/PBS clusters * API: Logging to file is disabled by default * API: New location of log file -> .nipype/nipype.cfg * ENH: Changing logging options via config works for distributed processing * FIX: Unittests on debian (logging and ipython) Release 0.5 (Mar 10, 2012) ========================== * API: FSL defaults to Nifti when OUTPUTTYPE environment variable not found * API: By default inputs are removed from Node working directory * API: InterfaceResult class is now versioned and stores class type not instance * API: Added FIRST interface * API: Added max_jobs paramter to plugin_args. limits the number of jobs executing at any given point in time * API: crashdump_dir is now a config execution option * API: new config execution options for controlling hash checking, execution and logging behavior when running in distributed mode. * API: Node/MapNode has new attribute that allows it to run on master thread. * API: IPython plugin now invokes IPython 0.11 or greater * API: Canned workflows are now all under a different package structure * API: SpecifyModel event_info renamed to event_files * API: DataGrabber is always being rerun (unless overwrite is set to False on Node level) * API: "stop_on_first_rerun" does not stop for DataGrabber (unless overwrite is set to True on Node level) * API: Output prefix can be set for spm nodes (SliceTiming, Realign, Coregister, Normalize, Smooth) * ENH: Added fsl resting state workflow based on behzadi 2007 CompCorr method. * ENH: TSNR node produces mean and std-dev maps; allows polynomial detrending * ENH: IdentityNodes are removed prior to execution * ENH: Added Michael Notter's beginner's guide * ENH: Added engine support for status callback functions * ENH: SPM create warped node * ENH: All underlying interfaces (including python ones) are now optional * ENH: Added imperative programming option with Nodes and caching * ENH: Added debug mode to configuration * ENH: Results can be stored and loaded without traits exceptions * ENH: Added concurrent log handler for distributed writing to log file * ENH: Reporting can be turned off using config * ENH: Added stats files to FreeSurferOutput * ENH: Support for Condor through qsub emulation * ENH: IdentityNode with iterable expansion takes place after remaining Identity Node removal * ENH: Crashfile display script added * ENH: Added FmriRealign4d node wrapped from nipy * ENH: Added TBSS workflows and examples * ENH: Support for openfmri data processing * ENH: Package version check * FIX: Fixed spm preproc workflow to cater to multiple functional runs * FIX: Workflow outputs displays nodes with empty outputs * FIX: SUSAN workflow works without usans * FIX: SGE fixed for reading custom templates * FIX: warping in SPM realign, Dartel and interpolation parameters * FIX: Fixed voxel size parameter in freesurfer mri_convert * FIX: 4D images in spm coregister * FIX: Works around matlab tty bug * FIX: Overwriting connection raises exception * FIX: Outputs are loaded from results and not stored in memory for during distributed operation * FIX: SPM threshold uses SPM.mat name and improved error detection * FIX: Removing directory contents works even when a node has no outputs * FIX: DARTEL workflows will run only when SPM 8 is available * FIX: SPM Normalize estimate field fixed * FIX: hashmethod argument now used for calculating hash of old file * FIX: Modelgen now allows FSL style event files Release 0.4.1 (Jun 16, 2011) ============================ * Minor bugfixes Release 0.4 (Jun 11, 2011) ========================== * API: Timestamp hashing does not use ctime anymore. Please update your hashes by running workflows with updatehash=True option NOTE: THIS IS THE DEFAULT CONFIG NOW, so unless you updatehash, workflows will rerun * API: Workflow run function no longer supports (inseries, createdirsonly). Functions used in connect string must be pickleable * API: SPM EstimateContrast: ignore_derivs replaced by use_derivs * API: All interfaces: added new config option ignore_exception * API: SpecifModel no longer supports (concatenate_runs, output_specs). high_pass_filter cutoff is mandatory (even if its set to np.inf). Additional interfaces SpecifySPMModel and SpecifySparseModel support other types of data. * API: fsl.DTIFit input "save" is now called "save_tensor" * API: All inputs of IdentityInterfaces are mandatory by default. You can turn this off by specifying mandatory_inputs=False to the constructor. * API: fsl FILMGLS input "autocorr_estimate" is now called "autocorr_estimate_only" * API: fsl ContrastMgr now requires access to specific files (no longer accepts the result directory) * API: freesurfer.GLMFit input "surf" is now a boolean with three corresponding inputs -- subject_id, hemi, and surf_geo * ENH: All commandline interfaces display stdout and stderr * ENH: All interfaces raise exceptions on error with an option to suppress * ENH: Supports a plugin interface for execution (current support for multiprocessing, IPython, SGE, PBS) * ENH: MapNode runs in parallel under IPython, SGE, MultiProc, PBS * ENH: Optionally allows keeping only required outputs * ENH: New interface: utility.Rename to change the name of files, optionally using python string-formatting with inputs or regular expressions matching * ENH: New interface: freesurfer.ApplyMask (mri_mask) * ENH: New FSL interface -- SwapDimensions (fslswapdim) * ENH: Sparse models allow regressor scaling and temporal derivatives * ENH: Added support for the component parts of FSL's TBSS workflow (TBSSSkeleton and DistanceMap) * ENH: dcm2nii interface exposes bvals, bvecs, reoriented and cropped images * ENH: Added several higher-level interfaces to the fslmaths command: - ChangeDataType, Threshold, MeanImage, IsotropicSmooth, ApplyMask, TemporalFilter DilateImage, ErodeImage, SpatialFilter, UnaryMaths, BinaryMaths, MultiImageMaths * ENH: added support for networx 1.4 and improved iterable expansion * ENH: Replaced BEDPOSTX and EddyCurrent with nipype pipelines * ENH: Ability to create a hierarchical dot file * ENH: Improved debugging information for rerunning nodes * ENH: Added 'stop_on_first_rerun' option * ENH: Added support for Camino * ENH: Added support for Camino2Trackvis * ENH: Added support for Connectome Viewer * BF: dcm2nii interface handles gzipped files correctly * BF: FNIRT generates proper outputs * BF: fsl.DTIFit now properly collects tensor volume * BF: updatehash now removes old result hash file Release 0.3.4 (Jan 12, 2011) ============================ * API: hash values for float use a string conversion up to the 10th decimal place. * API: Iterables in output path will always be generated as _var1_val1_var2_val2 pairs * ENH: Added support to nipy: GLM fit, contrast estimation and calculating mask from EPI * ENH: Added support for manipulating surface files in Freesurfer: - projecting volume images onto the surface - smoothing along the surface - transforming a surface image from one subject to another - using tksurfer to save pictures of the surface * ENH: Added support for flash processing using FreeSurfer * ENH: Added support for flirt matrix in BBRegister * ENH: Added support for FSL convert_xfm * ENH: hashes can be updated again without rerunning all nodes. * ENH: Added multiple regression design for FSL * ENH: Added SPM based Analyze to Nifti converter * ENH: Added increased support for PyXNAT * ENH: Added support for MCR-based binary version of SPM * ENH: Added SPM node for calculating various threshold statistics * ENH: Added distance and dissimilarity measurements * BF: Diffusion toolkit gets installed * BF: Changed FNIRT interface to accept flexible lists (rather than 4-tuples) on all options specific to different subsampling levels Release 0.3.3 (Sep 16, 2010) ============================ * API: subject_id in ModelSpec is now deprecated * API: spm.Threshold - does not need mask, beta, RPV anymore - takes only one image (stat_image - mind the name change) - works with SPM2 SPM.mat - returns additional map - pre topological FDR * ENH: Added support for Diffusion toolkit * ENH: Added support for FSL slicer and overlay * ENH: Added support for dcm2nii * BF: DataSink properly handles lists of lists now * BF: DataGrabber has option for raising Exception on getting empty lists * BF: Traits logic for 'requires' metadata * BF: allows workflows to be relocatable * BF: nested workflows with connections don't raise connection not found error * BF: multiple workflows with identical nodenames and iterables do not create nestsed workflows Release 0.3.2 (Aug 03, 2010) ============================ Enhancements ------------ - all outputs from nodes are now pickled as part of workflow processing - added git developer docs Bugs fixed ---------- * FreeSurfer - Fixed bugs in SegStats doctest Release 0.3.1 (Jul 29, 2010) ============================ Bugs fixed ---------- * FreeSurfer - Fixed bugs in glmfit and concatenate - Added group t-test to freesurfer tutorial Release 0.3 (Jul 27, 2010) ========================== Incompatible changes -------------------- * Complete redesign of the Interface class - heavy use of Traits. * Changes in the engine API - added Workflow and MapNode. Compulsory name argument. Features added -------------- * General: - Type checking of inputs and outputs using Traits from ETS. - Support for nested workflows. - Preliminary Slicer and AFNI support. - New flexible DataGrabber node. - AtlasPick and Threshold nodes. - Preliminary support for XNAT. - Doubled number of the tutorials. * FSL: - Added DTI processing nodes (note that TBSS nodes are still experimental). - Recreated FEAT workflow. * SPM: - Added New Segment and many other nodes. - Redesigned second level analysis. nipype-1.7.0/doc/changelog/1.X.X-changelog.rst000066400000000000000000001255731413403311400207610ustar00rootroot000000000000001.7.0 (October 20, 2021) ======================== (`Full changelog `__) * FIX: Make ants.LaplacianThickness output_image a string, not file (https://github.com/nipy/nipype/pull/3393) * FIX: coord for mrconvert (https://github.com/nipy/nipype/pull/3369) * FIX: ``antsRegistration`` allows the ``restrict_deformation`` to be float (https://github.com/nipy/nipype/pull/3387) * FIX: Also allow `errno.EBUSY` during `emptydirs` on NFS (https://github.com/nipy/nipype/pull/3357) * FIX: Removed exists=True from MathsOutput (https://github.com/nipy/nipype/pull/3385) * FIX: Extension not extensions, after pybids v0.9 (https://github.com/nipy/nipype/pull/3380) * ENH: Add CAT12 SANLM denoising filter (https://github.com/nipy/nipype/pull/3374) * ENH: Add expected steps for FreeSurfer 7 recon-all (https://github.com/nipy/nipype/pull/3389) * ENH: Stop printing false positive differences when logging cached nodes (https://github.com/nipy/nipype/pull/3376) * ENH: Add new flags to MRtrix/preprocess.py (DWI2Tensor, MRtransform) (https://github.com/nipy/nipype/pull/3365) * ENH: ``verbose`` input should not be hashed in ``ants.Registration`` (https://github.com/nipy/nipype/pull/3377) * REF: Clean-up the BaseInterface ``run()`` function using context (https://github.com/nipy/nipype/pull/3347) * DOC: Fix typo in README (https://github.com/nipy/nipype/pull/3386) * STY: Make private member name consistent with the rest of them (https://github.com/nipy/nipype/pull/3346) * MNT: Simplify interface execution and better error handling of ``Node`` (https://github.com/nipy/nipype/pull/3349) * MNT: Add user name and email to Docker to appease git/annex/datalad (https://github.com/nipy/nipype/pull/3378) * CI: Update CircleCI machine image (https://github.com/nipy/nipype/pull/3391) 1.6.1 (June 16, 2021) ===================== Bug-fix release in the 1.6.x series. (`Full changelog `__) * FIX: Set DistributedPluginBase.refidx type correctly (https://github.com/nipy/nipype/pull/3340) * FIX: change fsl interface randomise --f_only to --fonly for #3322 (https://github.com/nipy/nipype/pull/3325) * FIX: BET raising "No image files match: ..." with very long file names (https://github.com/nipy/nipype/pull/3309) * FIX: Update SmoothEstimateOutputSpec resels description (https://github.com/nipy/nipype/pull/3316) * ENH: Adds interfaces for MRtrix utils shconv and sh2amp (https://github.com/nipy/nipype/pull/3280) * ENH: Interface for R (https://github.com/nipy/nipype/pull/3291) * ENH: Add CAT12 interfaces (https://github.com/nipy/nipype/pull/3310) * ENH: AFNI 3dNetCorr as afni.NetCorr (https://github.com/nipy/nipype/pull/3263) * ENH: Skip newline before Python call in batch submission to facilitate containerized runs (https://github.com/nipy/nipype/pull/3297) * ENH: Add new dwifslpreproc interface for MRtrix3 (https://github.com/nipy/nipype/pull/3278) * REF: Cache nodes in workflow to speed up construction, other optimizations (https://github.com/nipy/nipype/pull/3331) * DOC: Fixed Developer Setup Link in install.rst (https://github.com/nipy/nipype/pull/3330) * MNT: Blacklist Dipy 1.4.1 (https://github.com/nipy/nipype/pull/3335) * MNT: Drop support for numpy < 1.15.3 (https://github.com/nipy/nipype/pull/3284) * CI: Build docker images with Python 3.8 (https://github.com/nipy/nipype/pull/3287) * CI: Drop Circle doc builds (https://github.com/nipy/nipype/pull/3338) * CI: Drop Travis (https://github.com/nipy/nipype/pull/3332) * CI: Build docker images with Python 3.8 (https://github.com/nipy/nipype/pull/3287) * CI: Add specs and style checks (https://github.com/nipy/nipype/pull/3321) * CI: Move from Travis to GitHub actions (https://github.com/nipy/nipype/pull/3318) 1.6.0 (November 28, 2020) ========================= New feature release in the 1.6.x series. In addition to the usual bug fixes, significant reductions were made in workflow startup costs. (`Full changelog `__) * FIX: Canonicalize environment dicts to strings in Windows (https://github.com/nipy/nipype/pull/3267) * FIX: Purge deprecated exception content accesses (https://github.com/nipy/nipype/pull/3272) * FIX: Handle changes in CLI structure of mrtrix3.DWIBiasCorrect (https://github.com/nipy/nipype/pull/3248) * FIX: EpiReg changed to not list certain outputs when 'wmseg' input is specified (https://github.com/nipy/nipype/pull/3265) * FIX: CI issues (https://github.com/nipy/nipype/pull/3262) * FIX: SPM SliceTiming must accept either Int or float for ref_slice and sli… (https://github.com/nipy/nipype/pull/3255) * FIX: Raise version error when using ``-g`` with ``antsAI`` < 2.3.0 (https://github.com/nipy/nipype/pull/3256) * FIX: No longer depending on pydotplus (networkx >=2.0 update) (https://github.com/nipy/nipype/pull/3251) * FIX: ANTs' utilities revision - bug fixes and add more operations to ``ants.ImageMath`` (https://github.com/nipy/nipype/pull/3236) * ENH: Handle unavailable traits due to version differences (https://github.com/nipy/nipype/pull/3273) * ENH: Optimize workflow.run performance (https://github.com/nipy/nipype/pull/3260) * DOC: Remove myself (@mr-c) from the zenodo metadata (https://github.com/nipy/nipype/pull/3271) 1.5.1 (August 16, 2020) ======================= Bug-fix release in the 1.5.x series. This release includes small updates to ANTs utilities that lie somewhere between bug fixes and enhancements. (`Full changelog `__) * FIX: Warn for min/max_ver traits when tool version can't be parsed (https://github.com/nipy/nipype/pull/3241) * FIX: Serialize all interface arguments when exporting workflows (https://github.com/nipy/nipype/pull/3240) * FIX: Permit identity transforms in list of transforms given to ants.ApplyTransforms (https://github.com/nipy/nipype/pull/3237) * FIX: ANTs' utilities revision - bug fixes and add more operations to ``ants.ImageMath`` (https://github.com/nipy/nipype/pull/3236) * DOC: Skip BIDSDataGrabber doctest if pybids is missing (https://github.com/nipy/nipype/pull/3224) 1.5.0 (June 03, 2020) ===================== New feature release in the 1.5.x series. In this release, the example scripts have been split out into their own package: `niflow-nipype1-examples `__. (`Full changelog `__) * FIX: volterra_expansion_order documentation error (https://github.com/nipy/nipype/pull/3213) * FIX: BET incorrect output paths (https://github.com/nipy/nipype/pull/3214) * FIX: Terminal output in ``report.rst`` spreads one line per character (https://github.com/nipy/nipype/pull/3220) * FIX: Allow parsing freesurfer 7 version string (https://github.com/nipy/nipype/pull/3216) * FIX: Use PackageInfo to get NiftyReg version (https://github.com/nipy/nipype/pull/3194) * FIX: Partial rollback of N4BiasFieldCorrection (https://github.com/nipy/nipype/pull/3188) * FIX: ANTs' tools maintenance overhaul (https://github.com/nipy/nipype/pull/3180) * FIX: load_resultfile crashes if open resultsfile from crashed job (https://github.com/nipy/nipype/pull/3182) * FIX: FSL model.py make multiple F-tests (https://github.com/nipy/nipype/pull/3166) * ENH: Restore ants.legacy interfaces (https://github.com/nipy/nipype/pull/3222) * ENH: Add ``"TruncateImageIntensity"`` operation to ``ants.utils.Image.Math`` (https://github.com/nipy/nipype/pull/3210) * ENH: SPM NewSegment multi-channel segmentation (https://github.com/nipy/nipype/pull/3162) * ENH: Add reverse-ordered transform lists to ants.Registration outputs (https://github.com/nipy/nipype/pull/3192) * ENH: Improve workflow connect performance (https://github.com/nipy/nipype/pull/3184) * ENH: Add ``ConstrainedSphericalDeconvolution`` interface to replace ``EstimateFOD`` for MRtrix3's ``dwi2fod`` (https://github.com/nipy/nipype/pull/3176) * ENH: Detect values for EulerNumber interface (https://github.com/nipy/nipype/pull/3173) * ENH: Remove examples from repository (https://github.com/nipy/nipype/pull/3172) * TEST: Clean up tests (https://github.com/nipy/nipype/pull/3195) * TEST: Mock terminal output before testing changing default value (https://github.com/nipy/nipype/pull/3193) * REF: make invocations of python and pytest consistent with the one used/desired python (https://github.com/nipy/nipype/pull/3208) * REF: Prefer math.gcd to hand-rolled Euclid's algorithm (https://github.com/nipy/nipype/pull/3177) * REF: Removed all uses of numpy_mmap (https://github.com/nipy/nipype/pull/3121) * DOC: Sphinx 3 compatibility (https://github.com/nipy/nipype/pull/3206) * DOC: Update links, typos in contributing guide (https://github.com/nipy/nipype/pull/3160) * DOC: Update SelectFiles docstring to match actual behavior (https://github.com/nipy/nipype/pull/3041) * DOC: Updated .zenodo.json file (https://github.com/nipy/nipype/pull/3167) * DOC: Update .zenodo.json (https://github.com/nipy/nipype/pull/3165) * MNT: Permit recent nilearns (https://github.com/nipy/nipype/pull/2841) * MNT: Test Python 3.8 (https://github.com/nipy/nipype/pull/3154) * MNT: Restore ReadTheDocs (https://github.com/nipy/nipype/pull/3207) * MNT: Update Zenodo ordering based on commit count (https://github.com/nipy/nipype/pull/3169) 1.4.2 (February 14, 2020) ========================= (`Full changelog `__) * FIX: Allow ``fsl.MultipleRegressDesign`` to create multiple F-tests (https://github.com/nipy/nipype/pull/3166) * FIX: Reliably parse SGE job IDs in the presence of warnings (https://github.com/nipy/nipype/pull/3168) * FIX: Move TraitType import, handle API change for NoDefaultSpecified (https://github.com/nipy/nipype/pull/3159) 1.4.1 (January 27, 2020) ======================== (`Full changelog `__) * FIX: DataSink to S3 buckets (https://github.com/nipy/nipype/pull/3130) * FIX: improve version checking for nodes of workflows (https://github.com/nipy/nipype/pull/3152) * FIX: mapnode to generate result file when crashes in single node mode (https://github.com/nipy/nipype/pull/3143) * FIX: Can't seem to import workflows from niflows in CircleCI (https://github.com/nipy/nipype/pull/3134) * FIX: Repair aftermath of docs refactor (https://github.com/nipy/nipype/pull/3133) * FIX: change ANTS number_of_time_steps from Float to Int (https://github.com/nipy/nipype/pull/3118) * DOC: Revise generation of examples to work in RTD (https://github.com/nipy/nipype/pull/3132) * DOC: Bring examples generation back to ``doc/conf.py`` (https://github.com/nipy/nipype/pull/3131) * DOC: Documentation overhaul (https://github.com/nipy/nipype/pull/3124) * DOC: Deep revision of documentation building (https://github.com/nipy/nipype/pull/3120) * DOC: Deduplicate code for Sphinx's APIdoc generation (https://github.com/nipy/nipype/pull/3119) * MNT: Update requirements.txt post-1.4 (https://github.com/nipy/nipype/pull/3153) 1.4.0 (December 20, 2019) ========================= (`Full changelog `__) * FIX: Mark strings containing regex escapes as raw (https://github.com/nipy/nipype/pull/3106) * ENH: Pacify DeprecationWarnings caused by nibabel 3 pre-release (https://github.com/nipy/nipype/pull/3099) * ENH: Allow Nipype configuration directory to be specified with NIPYPE_CONFIG_DIR environment variable (https://github.com/nipy/nipype/pull/3073) * ENH: Add options and outputs to ``fsl.Eddy`` interface (https://github.com/nipy/nipype/pull/3034) * ENH: Add skull_file output to fsl.BET interface (https://github.com/nipy/nipype/pull/3095) * RF: Drop various remaining compatibilities for Python < 3.5 (https://github.com/nipy/nipype/pull/2831) * DOC: Add Python 2 statement to README, reference maintenance branch in CONTRIBUTING (https://github.com/nipy/nipype/pull/3115) * DOC: Miss underline before cmd in example code (https://github.com/nipy/nipype/pull/3107) * STY: Black (https://github.com/nipy/nipype/pull/3096) * MNT: Set junit_family to suppress pytest warning (https://github.com/nipy/nipype/pull/3111) * MNT: Fix Dorota Jarecka ORCID (https://github.com/nipy/nipype/pull/3100) * MNT: Drop Python 2 support (https://github.com/nipy/nipype/pull/2654) 1.3.1 (November 12, 2019) ========================= * FIX: Restore checking traits or bunch (https://github.com/nipy/nipype/pull/3094) 1.3.0 (November 11, 2019) ========================= (`Full changelog `__) * FIX: Fixed typo in QwarpInputSpec Trait description (https://github.com/nipy/nipype/pull/3079) * FIX: Restore ``AFNICommand._get_fname``, required by some interfaces (https://github.com/nipy/nipype/pull/3071) * FIX: Remove asynchronous chdir callback (https://github.com/nipy/nipype/pull/3060) * FIX: Minimize scope for directory changes while loading results file (https://github.com/nipy/nipype/pull/3061) * ENH: Minimize the number of calls to ``_load_results`` when populating inputs (https://github.com/nipy/nipype/pull/3075) * ENH: Refactor savepkl/loadpkl - add a window for loadpkl to wait for the file (https://github.com/nipy/nipype/pull/3089) * ENH: Add "ExportFile" interface as simple alternative to "DataSink" (https://github.com/nipy/nipype/pull/3054) * ENH: Allow nipype.cfg in cwd to be read even if ~/.nipype does not exist (https://github.com/nipy/nipype/pull/3072) * ENH: Add precommit information for contributors and pre-commit style (https://github.com/nipy/nipype/pull/3063) * ENH: Delay etelemetry for non-interactive sessions, report bad versions (https://github.com/nipy/nipype/pull/3049) * ENH: Run memoized check_version at REPL import, Node/Workflow/Interface init (https://github.com/nipy/nipype/pull/30) * RF: Provide functions to augment old Path.mkdir, Path.resolve methods (https://github.com/nipy/nipype/pull/3050) * RF: Redirect nipype.workflows to niflow.nipype1.workflows (https://github.com/nipy/nipype/pull/3067) * TST: Skip dcm2niix test if data fails to download (https://github.com/nipy/nipype/pull/3059) * TST: dcm2niix test fix (https://github.com/nipy/nipype/pull/3058) * MAINT: Drop Python 3.4 support (https://github.com/nipy/nipype/pull/3062) * CI: ``make specs`` on Travis (https://github.com/nipy/nipype/pull/3066) 1.2.3 (September 23, 2019) ========================== Python 1.2.3 will be the last version to support Python 3.4. (`Full changelog `__) * FIX: Patch Path.mkdir for Python 2 (https://github.com/nipy/nipype/pull/3037) * FIX: Drop deprecated message argument to ``FileNotFoundError`` (https://github.com/nipy/nipype/pull/3035) * FIX: Handle NIFTI extensions in ``afni.Qwarp`` (https://github.com/nipy/nipype/pull/3028) * FIX: Disallow returning ``None`` in ``pipeline.utils.load_resultfile`` (https://github.com/nipy/nipype/pull/3023) * ENH: Allow afni.CatMatvec to accept empty string opkeys (https://github.com/nipy/nipype/pull/2943) * ENH: Add ``mrrtrix3.MRResize`` interface (https://github.com/nipy/nipype/pull/3031) * ENH: Add version check / telemetry to Nipype (https://github.com/nipy/nipype/pull/3027) * ENH: Update MCFLIRT outputs for FSL 6+ (https://github.com/nipy/nipype/pull/3029) * ENH: Lightweight node cache checking (https://github.com/nipy/nipype/pull/3026) * ENH: Avoid loading result from file when writing reports (https://github.com/nipy/nipype/pull/3024) * ENH: replace portalocker with filelock (https://github.com/nipy/nipype/pull/3025) * MAINT: Set minimum yapf for `checkspecs` to 0.27 (https://github.com/nipy/nipype/pull/3033) 1.2.2 (September 07, 2019) ========================== (`Full changelog `__) * FIX: Ensure ``loadpkl`` returns a not None value (https://github.com/nipy/nipype/pull/3020) * FIX: ``loadpkl`` failed when pklz file contained versioning info (https://github.com/nipy/nipype/pull/3017) * FIX: Update mne.WatershedBEM command line (https://github.com/nipy/nipype/pull/3007) * FIX: Specify correct stop criterion flag in PETPVC (https://github.com/nipy/nipype/pull/3010) * ENH: Add interface for AFNI ``3dTsmooth`` (https://github.com/nipy/nipype/pull/2948) * ENH: Additional arguments to ANTs N4BiasFieldCorrection (https://github.com/nipy/nipype/pull/3012) * ENH: Add ``--rescale-intensities`` and name_source to N4BiasFieldCorrection (https://github.com/nipy/nipype/pull/3011) * ENH: Add index_mask_file input to ImageStats (https://github.com/nipy/nipype/pull/3005) * RF: Remove versioning from ``loadpkl`` (https://github.com/nipy/nipype/pull/3019) * MAINT: Add ``python_requires`` to package metadata (https://github.com/nipy/nipype/pull/3006) 1.2.1 (August 19, 2019) ======================= (`Full changelog `__) * FIX: Resolve/rebase paths from/to results files (https://github.com/nipy/nipype/pull/2971) * FIX: Use ``load_resultfile`` when loading a results pickle (https://github.com/nipy/nipype/pull/2985) * FIX: Incorrect extension identified when checking ``File`` traits (https://github.com/nipy/nipype/pull/2987) * FIX: Correctly pickle ``OuputMultiObject`` traits (https://github.com/nipy/nipype/pull/2983) * FIX: Improve output handling in DWIDenoise and DWIBiasCorrect (https://github.com/nipy/nipype/pull/2978) * FIX: Docker build (https://github.com/nipy/nipype/pull/2963) * FIX: Remove '=' signs from EddyQuad argument specifications (https://github.com/nipy/nipype/pull/2941) * FIX: Set input model to bedpostx for camino.TrackBedpostxProba (https://github.com/nipy/nipype/pull/2947) * FIX: Allow ``max_sh`` to not be set (auto mode) (https://github.com/nipy/nipype/pull/2940) * ENH: Update mrtrix reconst.py EstimateFOD max_sh to be able to accept list (https://github.com/nipy/nipype/pull/2990) * ENH: Let ``indirectory`` handle ``nipype.utils.filemanip.Path`` (https://github.com/nipy/nipype/pull/2989) * ENH: Add resolve/rebase ``BasePath`` traits methods & tests (https://github.com/nipy/nipype/pull/2970) * ENH: Modify ``Directory`` and ``File`` traits to get along with pathlib (https://github.com/nipy/nipype/pull/2962) * REF: Update nipype2boutiques script (https://github.com/nipy/nipype/pull/2894) * TST: Parametrize JoinNode expansion tests over config ``needed_outputs`` (https://github.com/nipy/nipype/pull/2981) * MAINT: Pin lxml<4.4.0 for Python 3.4 (https://github.com/nipy/nipype/pull/2980) * MAINT: Refactor ``aggregate_outputs`` for readability (https://github.com/nipy/nipype/pull/2969) * MAINT: Bump neurodocker version (https://github.com/nipy/nipype/pull/2965) * MAINT: Various minor improvements to complement previous PR (https://github.com/nipy/nipype/pull/2964) * MAINT: Sort dependencies alphabetically (https://github.com/nipy/nipype/pull/2961) 1.2.0 (May 09, 2019) ==================== (`Full changelog `__) * FIX: Parsing of filename in AlignEpiAnatPy when filename does not have + (https://github.com/nipy/nipype/pull/2909) * FIX: Import nibabel reorientation bug fix (https://github.com/nipy/nipype/pull/2912) * FIX: Update FNIRT outputs for warped_file log_file to include cwd (https://github.com/nipy/nipype/pull/2900) * FIX: Sort conditions in bids_gen_info to ensure consistent order (https://github.com/nipy/nipype/pull/2867) * FIX: Some traits-5.0.0 don't work with Python 2.7 (https://github.com/nipy/nipype/pull/1) * ENH: CompCor enhancement (https://github.com/nipy/nipype/pull/2878) * ENH: Do not override caught exceptions with FileNotFoundError from unfinished hashfile (https://github.com/nipy/nipype/pull/2919) * ENH: More verbose description when a faulty results file is loaded (https://github.com/nipy/nipype/pull/2920) * ENH: Add all DIPY workflows dynamically (https://github.com/nipy/nipype/pull/2905) * ENH: Add mrdegibbs and dwibiascorrect from mrtrix3 (https://github.com/nipy/nipype/pull/2904) * TEST: Fix CI builds (https://github.com/nipy/nipype/pull/2927) * MAINT: Reduce deprecation warnings (https://github.com/nipy/nipype/pull/2903) 1.1.9 (February 25, 2019) ========================= (`Full changelog `__) * FIX: Make positional arguments to LaplacianThickness require previous argument (https://github.com/nipy/nipype/pull/2848) * FIX: Import math and csv modules for bids_gen_info (https://github.com/nipy/nipype/pull/2881) * FIX: Ensure outputs can be listed in camino.ProcStreamlines by defining instance variable (https://github.com/nipy/nipype/pull/2739) * ENH: Allow afni.MaskTool to take multiple input files (https://github.com/nipy/nipype/pull/2892) * ENH: Add flags dictionary input to spm.Level1Design (https://github.com/nipy/nipype/pull/2861) * ENH: Threshold stddev once only in TSNR (https://github.com/nipy/nipype/pull/2883) * ENH: Add workbench.CiftiSmooth interface (https://github.com/nipy/nipype/pull/2871) * DOC: Replace initialism typo in comment with intended phrase (https://github.com/nipy/nipype/pull/2875) * DOC: Fix typos in ANTs Registration input documentation (https://github.com/nipy/nipype/pull/2869) 1.1.8 (January 28, 2019) ======================== (`Full changelog `__) * FIX: ANTS LaplacianThickness cmdline opts fixed up (https://github.com/nipy/nipype/pull/2846) * FIX: Resolve LinAlgError during SVD (https://github.com/nipy/nipype/pull/2838) * ENH: Add interfaces wrapping DIPY worflows (https://github.com/nipy/nipype/pull/2830) * ENH: Update BIDSDataGrabber for pybids 0.7 (https://github.com/nipy/nipype/pull/2737) * ENH: Add FSL `eddy_quad` interface (https://github.com/nipy/nipype/pull/2825) * ENH: Support tckgen -select in MRtrix3 v3+ (https://github.com/nipy/nipype/pull/2823) * ENH: Support for BIDS event files (https://github.com/nipy/nipype/pull/2845) * ENH: CompositeTransformUtil, new ANTs interface (https://github.com/nipy/nipype/pull/2785) * RF: Move pytest and pytest-xdist from general requirement into tests_required (https://github.com/nipy/nipype/pull/2850) * DOC: Add S3DataGrabber example (https://github.com/nipy/nipype/pull/2849) * DOC: Skip conftest module in API generation (https://github.com/nipy/nipype/pull/2852) * DOC: Hyperlink DOIs to preferred resolver (https://github.com/nipy/nipype/pull/2833) * MAINT: Install numpy!=1.16.0 from conda in Docker (https://github.com/nipy/nipype/pull/2862) * MAINT: Drop pytest-xdist requirement, minimum pytest version (https://github.com/nipy/nipype/pull/2856) * MAINT: Disable numpy 1.16.0 for Py2.7 (https://github.com/nipy/nipype/pull/2855) 1.1.7 (December 17, 2018) ========================= (`Full changelog `__) * FIX: Copy node list before generating a flat graph (https://github.com/nipy/nipype/pull/2828) * FIX: Update pytest req'd version to 3.6 (https://github.com/nipy/nipype/pull/2827) * FIX: Set ResourceMonitor.fname to an absolute path (https://github.com/nipy/nipype/pull/2824) * FIX: Order of SPM.NewSegment channel_info boolean tuple is (Field, Corrected) (https://github.com/nipy/nipype/pull/2817) * FIX: Indices were swapped for memory and cpu profile data (https://github.com/nipy/nipype/pull/2816) * FIX: ``status_callback`` not called with ``stop_on_first_crash`` (https://github.com/nipy/nipype/pull/2810) * FIX: Change undefined ScriptError on LFS plugin to IOError (https://github.com/nipy/nipype/pull/2803) * ENH: Add NaN failure mode to CompCor interfaces (https://github.com/nipy/nipype/pull/2819) * ENH: Enable cnr_maps and residuals outputs for FSL eddy (https://github.com/nipy/nipype/pull/2750) * ENH: Improve ``str2bool`` + doctests (https://github.com/nipy/nipype/pull/2807) * TST: Improve py.test configuration of doctests (https://github.com/nipy/nipype/pull/2802) * DOC: Update DOI badge to point to all versions (https://github.com/nipy/nipype/pull/2804) * MAINT: Offload interfaces with help formatting (https://github.com/nipy/nipype/pull/2797) * MAINT: Reduce minimal code redundancy in filemanip.get_dependencies (https://github.com/nipy/nipype/pull/2782) * MAINT: Delayed imports to reduce import time (https://github.com/nipy/nipype/pull/2809) 1.1.6 (November 26, 2018) ========================= (`Full changelog `__) * FIX: MapNodes fail when ``MultiProcPlugin`` passed by instance (https://github.com/nipy/nipype/pull/2786) * FIX: --fineTune arguments order for MeshFix command (https://github.com/nipy/nipype/pull/2780) * ENH: Add mp_context plugin arg for MultiProc (https://github.com/nipy/nipype/pull/2778) * ENH: Create a crashfile even if 'stop_on_first_crash' is set (https://github.com/nipy/nipype/pull/2774) * ENH: Add ExtractedBrainN4 output to ANTs CorticalThickness interface (https://github.com/nipy/nipype/pull/2784) * STY: Combine split import (https://github.com/nipy/nipype/pull/2801) * DOC: use https in css stylesheet url (https://github.com/nipy/nipype/pull/2779) * MAINT: Outsource ``get_filecopy_info()`` from interfaces (https://github.com/nipy/nipype/pull/2798) * MAINT: Import only Sequence to avoid DeprecationWarning (https://github.com/nipy/nipype/pull/2793) * MAINT: One less DeprecationWarning (configparser) (https://github.com/nipy/nipype/pull/2794) * MAINT: DeprecationWarning: use ``HasTraits.trait_set`` instead (https://github.com/nipy/nipype/pull/2792) * MAINT: Stop using deprecated ``logger.warn()`` (https://github.com/nipy/nipype/pull/2788) * MAINT: Move ``interfaces.base.run_command`` to ``nipype.utils.subprocess`` (https://github.com/nipy/nipype/pull/2777) * MAINT: Force numpy>=1.15.4 when Python>=3.7 (https://github.com/nipy/nipype/pull/2775) 1.1.5 (November 08, 2018) ========================= Hotfix release. (`Full changelog `__) * ENH: Allow timeouts during SLURM job status checks (https://github.com/nipy/nipype/pull/2767) * RF: Subclass non-daemon variants of all multiprocessing contexts (https://github.com/nipy/nipype/pull/2771) 1.1.4 (October 31, 2018) ======================== (`Full changelog `__) * FIX: Python 2.7-3.7.1 compatible NonDaemonPool (https://github.com/nipy/nipype/pull/2754) * FIX: VRML typo (VMRL) in MeshFix (https://github.com/nipy/nipype/pull/2757) * FIX: Refine FSL.split output identification (https://github.com/nipy/nipype/pull/2746) * FIX: Reuse _gen_filename logic in ants.LaplacianThickness (https://github.com/nipy/nipype/pull/2734) * FIX: Remove 'reg_term' default from dwi2tensor interface (https://github.com/nipy/nipype/pull/2731) * FIX: Keep profile files when ``remove_unnecessary_outputs = true`` (https://github.com/nipy/nipype/pull/2718) * ENH: Add afni.LocalStat and afni.ReHo, update afni.ROIStats inputs (https://github.com/nipy/nipype/pull/2740) * ENH: Add compression option for bru2nii (https://github.com/nipy/nipype/pull/2762) * ENH: Add slice_encoding_direction input to TShift (https://github.com/nipy/nipype/pull/2753) * ENH: Add 'sse' output to FSL DTIFit interface (https://github.com/nipy/nipype/pull/2749) * ENH: Update ``ants.LaplacianThickness`` to use ``name_source`` (https://github.com/nipy/nipype/pull/2747) * ENH: Add tab completion for node and interface inputs properties (https://github.com/nipy/nipype/pull/2735) * ENH: enable/disable resource monitor in the fixture per test (https://github.com/nipy/nipype/pull/2725) * TEST: Update expected dotfile text for networkx 1.x (https://github.com/nipy/nipype/pull/2730) * DOC: Move user docs from nipype to nipype_tutorial (https://github.com/nipy/nipype/pull/2726) * DOC: Use consistent name in reconall workflow docstring (https://github.com/nipy/nipype/pull/2758) * MAINT: Use neurodocker 0.4.1 + apt install afni (https://github.com/nipy/nipype/pull/2707) * MAINT: Fix prov and rdflib in nipype (https://github.com/nipy/nipype/pull/2701) * MAINT: Correct readthedocs build error (https://github.com/nipy/nipype/pull/2723) * MAINT: Pin codecov to <5.0 so Travis is fixed (https://github.com/nipy/nipype/pull/2728) * CI: Lock travis pybids 0.6.5 (https://github.com/nipy/nipype/pull/2720) 1.1.3 (September 24, 2018) ========================== (`Full changelog `__) * FIX: Return afni.Qwarp outputs as absolute paths (https://github.com/nipy/nipype/pull/2705) * FIX: Add informative error for interfaces that fail to return valid runtime object (https://github.com/nipy/nipype/pull/2692) * FIX: Construct MCFLIRT output paths relative to out_file (https://github.com/nipy/nipype/pull/2703) * FIX: SLURM plugin polling (https://github.com/nipy/nipype/pull/2693) * FIX: Handle missing substring in SPM docs (https://github.com/nipy/nipype/pull/2691) * ENH: Add colorFA output to DIPY DTI interface (https://github.com/nipy/nipype/pull/2695) * RF: Use runtime.cwd in Rename (https://github.com/nipy/nipype/pull/2688) * DOC: Fix naming of motion parameters (roll/yaw swapped) (https://github.com/nipy/nipype/pull/2696) * DOC: Update links to user and developer help forums (https://github.com/nipy/nipype/pull/2686) * CI: Test 3.7, resume testing nipy extras (https://github.com/nipy/nipype/pull/2682) 1.1.2 (August 11, 2018) ======================= Hot-fix release, resolving incorrect dependencies in 1.1.1 wheel. (`Full changelog `__) * FIX: Read BIDS config.json under grabbids or layout (https://github.com/nipy/nipype/pull/2679) * FIX: Node __repr__ and detailed graph expansion (https://github.com/nipy/nipype/pull/2669) * FIX: Prevent double-collapsing of nested lists by OutputMultiObject (https://github.com/nipy/nipype/pull/2673) * ENH: Add interface to SPM realign_unwarp (https://github.com/nipy/nipype/pull/2635) * MAINT: Fix wheel build to ensure futures is only required in Python 2 (https://github.com/nipy/nipype/pull/2678) * MAINT: ensure interface _cmd only includes executable (https://github.com/nipy/nipype/pull/2674) * MAINT: Issue template: Pretty print platform details (https://github.com/nipy/nipype/pull/2671) * CI: removing travis_retry for pip install pytest xdist 1.22.5 (https://github.com/nipy/nipype/pull/2664) 1.1.1 (July 30, 2018) ===================== (`Full changelog `__) * FIX: Un-set incorrect default options in TOPUP (https://github.com/nipy/nipype/pull/2637) * FIX: Copy FSCommand.version to ReconAll.version (https://github.com/nipy/nipype/pull/2656) * FIX: Various BIDSDataGrabber fixes (https://github.com/nipy/nipype/pull/2651) * FIX: changing Node._output_dir to realpath (https://github.com/nipy/nipype/pull/2639) * FIX: Typo in DWIExtract of Mrtrix3interface (https://github.com/nipy/nipype/pull/2634) * FIX: Typo in FSLXCommandInputSpec (https://github.com/nipy/nipype/pull/2628) * ENH: Allow transform to be saved from AFNI 3dWarp (https://github.com/nipy/nipype/pull/2642) * ENH: Allow BIDS-style slice timings to be passed directly to TShift (https://github.com/nipy/nipype/pull/2608) * ENH: S3 access using instance role (https://github.com/nipy/nipype/pull/2621) * ENH Minor improvements to PR template (https://github.com/nipy/nipype/pull/2636) * TEST: make specs (https://github.com/nipy/nipype/pull/2653) * DOC: update neurodocker tutorial for neurodocker version 0.4.0 (https://github.com/nipy/nipype/pull/2647) * MAINT: Remove vestiges of nose testing library (https://github.com/nipy/nipype/pull/2662) * MAINT: Make pytest-xdist a dependency (https://github.com/nipy/nipype/pull/2649) * CI: Install pytest>=3.4 in Travis (https://github.com/nipy/nipype/pull/2659) 1.1.0 (July 04, 2018) ===================== (`Full changelog `__) * RF: Futures-based MultiProc (https://github.com/nipy/nipype/pull/2598) * FIX: Avoid closing file descriptors on Windows (https://github.com/nipy/nipype/pull/2617) * MAINT: Play nice with external logging (https://github.com/nipy/nipype/pull/2611) * MAINT: Remove ignore_exception and terminal_output traits from input specs (https://github.com/nipy/nipype/pull/2618) * MAINT: Converge autotest names (https://github.com/nipy/nipype/pull/2610) * ENH: Add versioning metadata to crash files (https://github.com/nipy/nipype/pull/2626) * ENH add -dsort option to TProject (https://github.com/nipy/nipype/pull/2623) * ENH: Add Rescale interface (https://github.com/nipy/nipype/pull/2599) * DOC: Improve documentation for ANTs/FSL interfaces (https://github.com/nipy/nipype/pull/2593) * CI: Stop using Miniconda on Travis (https://github.com/nipy/nipype/pull/2600) * CI: Add PyPI validation on rel/* branches (https://github.com/nipy/nipype/pull/2603) 1.0.4 (May 29, 2018) ==================== (`Full changelog `__) * FIX: Update logging levels in enable_debug_mode (https://github.com/nipy/nipype/pull/2595) * FIX: Set default result in DistributedPluginBase._clean_queue (https://github.com/nipy/nipype/pull/2596) * FIX: Correctly connect JoinNodes in nested iterables (https://github.com/nipy/nipype/pull/2597) * FIX: DTITK nonlinear workflow origin reslicing (https://github.com/nipy/nipype/pull/2561) * FIX: ResponseSD support for multiple b-vals (https://github.com/nipy/nipype/pull/2582) * FIX: Workaround to ICA-AROMA change of directory (https://github.com/nipy/nipype/pull/2566) * FIX/TEST: Gunzip cleanup and test (https://github.com/nipy/nipype/pull/2564) * FIX: Print UID in crashfile if login name is unavailable (https://github.com/nipy/nipype/pull/2563) * ENH: initial connectome workbench support (https://github.com/nipy/nipype/pull/2594) * ENH: AFNI (3d)LocalBistat interface (https://github.com/nipy/nipype/pull/2590) * ENH: Reorient interface (https://github.com/nipy/nipype/pull/2572) * ENH: FSL slice interface (https://github.com/nipy/nipype/pull/2585) * ENH: LabelGeometryMeasures interface (https://github.com/nipy/nipype/pull/2586) * ENH: MRTrix3 dwidenoise interface (https://github.com/nipy/nipype/pull/2568) * ENH: ReportCapableInterface mix-in/base interface (https://github.com/nipy/nipype/pull/2560) * CI: Move PyPI deployment to Circle (https://github.com/nipy/nipype/pull/2587) * CI: Submit Travis results to codecov (https://github.com/nipy/nipype/pull/2574) 1.0.3 (April 30, 2018) ====================== (`Full changelog `__) * FIX: Propagate explicit Workflow config to Nodes (https://github.com/nipy/nipype/pull/2559) * FIX: Return non-enhanced volumes from dwi_flirt (https://github.com/nipy/nipype/pull/2547) * FIX: Skip filename generation when required fields are missing (https://github.com/nipy/nipype/pull/2549) * FIX: Fix Afni's Allineate hashing and out_file (https://github.com/nipy/nipype/pull/2502) * FIX: Replace deprecated ``HasTraits.get`` with ``trait_get`` (https://github.com/nipy/nipype/pull/2534) * FIX: Typo in "antsRegistrationSyNQuick.sh" (https://github.com/nipy/nipype/pull/2544) * FIX: DTITK Interface (https://github.com/nipy/nipype/pull/2514) * FIX: Add ``-mas`` argument to fsl.utils.ImageMaths (https://github.com/nipy/nipype/pull/2529) * FIX: Build cmdline from working directory (https://github.com/nipy/nipype/pull/2521) * FIX: FSL orthogonalization bug (https://github.com/nipy/nipype/pull/2523) * FIX: Re-enable dcm2niix source_names (https://github.com/nipy/nipype/pull/2550) * ENH: Add an activation count map interface (https://github.com/nipy/nipype/pull/2522) * ENH: Revise the implementation of FuzzyOverlap (https://github.com/nipy/nipype/pull/2530) * ENH: Add MultiObject, ensure/simplify_list; alias old names for 1.x compatibility (https://github.com/nipy/nipype/pull/2517) * ENH: Add LibraryBaseInterface (https://github.com/nipy/nipype/pull/2538) * ENH: Define default output file template for afni.CatMatvec (https://github.com/nipy/nipype/pull/2527) * MAINT: Deprecate terminal_output and ignore_exception from CommandLine (https://github.com/nipy/nipype/pull/2552) * MAINT: Set traits default values properly (https://github.com/nipy/nipype/pull/2533) * MAINT: use RawConfigParser (https://github.com/nipy/nipype/pull/2542) * MAINT: Minor autotest cleanups (https://github.com/nipy/nipype/pull/2519) * CI: Add retry script for Docker commands (https://github.com/nipy/nipype/pull/2516) 1.0.2 (March 27, 2018) ====================== (`Full changelog `__) * FIX: dcm2niix interface (https://github.com/nipy/nipype/pull/2498) * FIX: mark .niml.dset as special extension in utils.filemanip (https://github.com/nipy/nipype/pull/2495) * FIX: handle automatic module creation, name extraction, default value (https://github.com/nipy/nipype/pull/2490) * FIX: Check and report mount table parsing failures (https://github.com/nipy/nipype/pull/2476) * FIX: Check against full node name when reconnecting JoinNodes (https://github.com/nipy/nipype/pull/2479) * DOC: Add tutorials, porcupine to users TOC (https://github.com/nipy/nipype/pull/2503 * DOC: Contributing and testing (https://github.com/nipy/nipype/pull/2482) * DOC: Describe 'orphaned' tag in CONTRIBUTING (https://github.com/nipy/nipype/pull/2481) * DOC: Add details for dcm2niix output filename pattern (https://github.com/nipy/nipype/pull/2512) * ENH: Add interface for AFNI 3dNwarpAdjust (https://github.com/nipy/nipype/pull/2450) * ENH: Update SSHDataGrabber to fetch related files (https://github.com/nipy/nipype/pull/2104) * ENH: Add interpolation order parameter to NiftyReg's RegTools (https://github.com/nipy/nipype/pull/2471) * MAINT: Stray warnings and exceptions (https://github.com/nipy/nipype/pull/2478) * MAINT: Add dev install option, update CONTRIBUTING (https://github.com/nipy/nipype/pull/2477) * MAINT: Sync requirements with info.py (https://github.com/nipy/nipype/pull/2472) * CI: Update Travis builds, Docker to use latest miniconda (https://github.com/nipy/nipype/pull/2455) * TEST: Parallelize pytest (https://github.com/nipy/nipype/pull/2469) 1.0.1 (February 27, 2018) ========================= (`Full changelog `__) * FIX: Small bug in freesurfer label2annot fill_thresh specs [#2377](https://github.com/nipy/nipype/pull/2377) * FIX: Error creating gradients in DTIRecon [#2460](https://github.com/nipy/nipype/pull/2460) * FIX: improve matlab_cmd [#2452](https://github.com/nipy/nipype/pull/2452) * FIX: Extract unit information from image header in CompCor [#2458](https://github.com/nipy/nipype/pull/2458) * FIX: Update pybids data directory, unbundle out-of-date numpydoc [#2437](https://github.com/nipy/nipype/pull/2437) * FIX: Out_file bugs in Afni.Zcat and Afni.Merge interfaces [#2424](https://github.com/nipy/nipype/pull/2424) * FIX: Re-enable spm.Realign to take lists of lists of files [#2409](https://github.com/nipy/nipype/pull/2409) * FIX: Remove deprecated output from ICC interface [#2422](https://github.com/nipy/nipype/pull/2422) * FIX: Argstr for mask in Afni.BlurToFWHM [#2418](https://github.com/nipy/nipype/pull/2418) * FIX: Default value for sbatch_args (SLURMGraph) [#2417](https://github.com/nipy/nipype/pull/2417) * FIX: Ortvec argstr for Afni.Deconvolve [#2415](https://github.com/nipy/nipype/pull/2415) * FIX: Bug fixes for afni.model [#2398](https://github.com/nipy/nipype/pull/2398) * DOC: Add brief neurodocker tutorial [#2464](https://github.com/nipy/nipype/pull/2464) * DOC: Fix tutorials [#2459](https://github.com/nipy/nipype/pull/2459) * ENH: antsRegistrationSyNQuick interface [#2453](https://github.com/nipy/nipype/pull/2453) * ENH: Automate updates of CHANGES [#2440](https://github.com/nipy/nipype/pull/2440) * ENH: Add SPM Fieldmap Tool wrapper [#1905](https://github.com/nipy/nipype/pull/1905) * ENH: Additional option for DataGrabber [#1915](https://github.com/nipy/nipype/pull/1915) * ENH: Add 3dTproject AFNI interface, Fix OneDToolPy, Add -noFDR flag to 3dDeconvolve [#2426](https://github.com/nipy/nipype/pull/2426) * ENH: c3d/c4d interface [#2430](https://github.com/nipy/nipype/pull/2430) * ENH: Allow input weight for AFNI's volreg. [#2396](https://github.com/nipy/nipype/pull/2396) * ENH: Delay crashing if exception is raised in local hash check [#2410](https://github.com/nipy/nipype/pull/2410) * CI: Add doctests to pytest script [#2449](https://github.com/nipy/nipype/pull/2449) * CI: Ignore tests in calculating coverage [#2443](https://github.com/nipy/nipype/pull/2443) * CI: Saturate Circle workflow [#2386](https://github.com/nipy/nipype/pull/2386) * REF: Update and factor mount table parsing [#2444](https://github.com/nipy/nipype/pull/2444) * REF: Make ignore_exception a class attribute [#2414](https://github.com/nipy/nipype/pull/2414) 1.0.0 (January 24, 2018) ======================== (`Full changelog `__) * FIX: Change to interface workdir within ``Interface.run()`` instead Node (https://github.com/nipy/nipype/pull/2384) * FIX: PBS plugin submissions (https://github.com/nipy/nipype/pull/2344) * FIX: Graph plugins submissions (https://github.com/nipy/nipype/pull/2359) * FIX: Logging error if % in interface command (https://github.com/nipy/nipype/pull/2364) * FIX: Robustly handled outputs of 3dFWHMx across different versions of AFNI (https://github.com/nipy/nipype/pull/2373) * FIX: Cluster threshold in randomise + change default prefix (https://github.com/nipy/nipype/pull/2369) * FIX: Errors parsing ``$DISPLAY`` (https://github.com/nipy/nipype/pull/2363) * FIX: MultiProc starting workers at dubious wd (https://github.com/nipy/nipype/pull/2368) * FIX: Explicitly collect MultiProc job IDs (https://github.com/nipy/nipype/pull/2378) * FIX: Correct Windows environment canonicalization (https://github.com/nipy/nipype/pull/2328) * FIX: Linked libraries (https://github.com/nipy/nipype/pull/2322) * REF+FIX: Move BIDSDataGrabber to `interfaces.io` + fix correct default behavior (https://github.com/nipy/nipype/pull/2336) * REF+MAINT: Simplify hashing (https://github.com/nipy/nipype/pull/2383) * MAINT: Cleanup Interfaces base (https://github.com/nipy/nipype/pull/2387) * MAINT: Cleanup EngineBase (https://github.com/nipy/nipype/pull/2376) * MAINT: Cleaning / simplify ``Node`` (https://github.com/nipy/nipype/pull/2325) * MAINT+ENH: Update and extend MRtrix3 interfaces (https://github.com/nipy/nipype/pull/2338) * ENH: Add AFNI interface for 3dConvertDset (https://github.com/nipy/nipype/pull/2337) * ENH: Allow specific interface command prefixes (https://github.com/nipy/nipype/pull/2379) * STY: Cleanup of PEP8 violations (https://github.com/nipy/nipype/pull/2358) * STY: Cleanup of trailing spaces and adding of missing newlines at end of files (https://github.com/nipy/nipype/pull/2355) * STY: Apply yapf to codebase (https://github.com/nipy/nipype/pull/2371) * DOC: Updated guide for contributing (https://github.com/nipy/nipype/pull/2393) nipype-1.7.0/doc/changes.rst000066400000000000000000000003211413403311400157200ustar00rootroot00000000000000:orphan: :tocdepth: 2 .. _changes: ================= Changes in Nipype ================= .. include:: changelog/1.X.X-changelog.rst .. include:: changelog/0.X.X-changelog.rst .. include:: links_names.txt nipype-1.7.0/doc/conf.py000066400000000000000000000261071413403311400150670ustar00rootroot00000000000000# emacs: -*- coding: utf-8; mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set fileencoding=utf-8 ft=python sts=4 ts=4 sw=4 et: # # nipype documentation build configuration file, created by # sphinx-quickstart on Mon Jul 20 12:30:18 2009. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os from pathlib import Path from tempfile import TemporaryDirectory import shutil import sys from packaging.version import Version import nipype import subprocess as sp # Disable etelemetry during doc builds os.environ["NIPYPE_NO_ET"] = "1" conf_py = Path(__file__) example_dir = conf_py.parent / "users" / "examples" shutil.rmtree(example_dir, ignore_errors=True) example_dir.mkdir(parents=True) python_dir = conf_py.parent / "_static" / "python" shutil.rmtree(python_dir, ignore_errors=True) ex2rst = str(conf_py.parent.parent / "tools" / "ex2rst") with TemporaryDirectory() as tmpdir: sp.run( [ "git", "clone", "--depth", "1", "https://github.com/niflows/nipype1-examples.git", tmpdir, ], check=True, ) source_dir = Path(tmpdir) / "package" / "niflow" / "nipype1" / "examples" shutil.copytree( source_dir, python_dir, ignore=lambda src, names: [n for n in names if n.endswith(".ipynb")], ) sp.run( [ sys.executable, ex2rst, "--outdir", str(example_dir), str(python_dir), "-x", str(python_dir / "test_spm.py"), "-x", str(python_dir / "__init__.py"), "-x", str(python_dir / "cli.py"), ], check=True, ) sp.run( [ sys.executable, ex2rst, "--outdir", str(example_dir), str(python_dir / "frontiers_paper"), ], check=True, ) # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.append(os.path.abspath('sphinxext')) # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.graphviz", "sphinx.ext.mathjax", "sphinx.ext.inheritance_diagram", "sphinx.ext.todo", "sphinxcontrib.apidoc", "matplotlib.sphinxext.plot_directive", "nbsphinx", "nipype.sphinxext.plot_workflow", "nipype.sphinxext.apidoc", "nipype.sphinxext.documenter", ] autodoc_mock_imports = [ "matplotlib", "nilearn", "nipy", "nitime", "numpy", "pandas", "seaborn", "skimage", "svgutils", "transforms3d", "tvtk", "vtk", ] # Accept custom section names to be parsed for numpy-style docstrings # of parameters. # Requires pinning sphinxcontrib-napoleon to a specific commit while # https://github.com/sphinx-contrib/napoleon/pull/10 is merged. napoleon_use_param = False napoleon_custom_sections = [ ("Inputs", "Parameters"), ("Outputs", "Parameters"), ("Attributes", "Parameters"), ("Mandatory Inputs", "Parameters"), ("Optional Inputs", "Parameters"), ] on_rtd = os.environ.get("READTHEDOCS") == "True" if on_rtd: extensions.append("readthedocs_ext.readthedocs") # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix of source filenames. source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8' # The master toctree document. master_doc = "index" # General information about the project. project = u"nipype" copyright = u"2009-20, Neuroimaging in Python team" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = Version(nipype.__version__).public # The full version, including alpha/beta/rc tags. release = nipype.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. today_fmt = "%B %d, %Y, %H:%M PDT" # List of documents that shouldn't be included in the build. unused_docs = ["api/generated/gen"] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = ["_build"] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # -- Sphinxext configuration --------------------------------------------------- # Set attributes for layout of inheritance diagrams inheritance_graph_attrs = dict( rankdir="LR", size='"6.0, 8.0"', fontsize=14, ratio="compress" ) inheritance_node_attrs = dict( shape="ellipse", fontsize=14, height=0.75, color="dodgerblue1", style="filled" ) # Flag to show todo items in rendered output todo_include_todos = True # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = "sphinxdoc" # The style sheet to use for HTML and HTML Help pages. A file of that name # must exist either in Sphinx' static/ path, or in one of the custom paths # given in html_static_path. html_style = "nipype.css" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". html_title = "nipy pipeline and interfaces package" # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # Content template for the index page. html_index = "index.html" # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. html_sidebars = { "**": ["gse.html", "localtoc.html", "sidebar_versions.html", "indexsidebar.html"], "searchresults": ["sidebar_versions.html", "indexsidebar.html"], "version": [], } # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {'index': 'index.html'} # If false, no module index is generated. # html_use_modindex = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. html_show_sourcelink = False # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = "nipypedoc" # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). # latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). # latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ( "interfaces", "interfaces.tex", "Nipype Interfaces Documentation", "Neuroimaging in Python team", "manual", ), # ('developers', 'developers.tex', 'Nipype API', # 'Neuroimaging in Python team', 'manual'), ( "examples", "examples.tex", "Nipype Examples", "Neuroimaging in Python team", "manual", ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # Additional stuff for the LaTeX preamble. # latex_preamble = '' # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_use_modindex = True # -- apidoc extension configuration ------------------------------------------ apidoc_module_dir = "../nipype" apidoc_output_dir = "api/generated" apidoc_excluded_paths = [ "*/tests/*", "tests/*", "external/*", "fixes/*", "scripts/*", "testing/*", "workflows/*", "conftest.py", "info.py", "pkg_info.py", "refs.py", ] apidoc_separate_modules = True apidoc_extra_args = ["--module-first", "-d 1", "-T"] # -- Options for intersphinx extension --------------------------------------- # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {"http://docs.python.org/": None} nipype-1.7.0/doc/devel/000077500000000000000000000000001413403311400146615ustar00rootroot00000000000000nipype-1.7.0/doc/devel/architecture.rst000066400000000000000000000073451413403311400201060ustar00rootroot00000000000000====================================== Architecture (discussions from 2009) ====================================== This section reflects notes and discussion between developers during the start of the nipype project in 2009. Design Guidelines ----------------- These are guidelines that the core nipype developers have agreed on: Interfaces should keep all parameters affecting construction of the appropriate command in the "input" bunch. The .run() method of an Interface should include all required inputs as explicitly named parameters, and they should take a default value of None. Any Interface should at a minimum support cwd as a command-line argument to .run(). This may be accomplished by allowing cwd as an element of the input Bunch, or handled as a separate case. Relatedly, any Interface should output all files to cwd if it is set, and otherwise to os.getcwd() (or equivalent). We need to decide on a consistent policy towards the maintinence of paths to files. It seems like the best strategy might be to do absolute (os.realpath?) filenames by default, allowing for relative paths by explicitly including something that doesn't start with a '/'. This could include '.' in some sort of path-spec. Class attributes should never be modified by an instance of that class. And probably not ever. Providing for Provenance ------------------------ The following is a specific discussion that should be thought out an more generally applied to the way we handle auto-generation / or "sourcing" of settings in an interface. There are two possible sources (at a minimum) from which the interface instance could obtain "outputtype" - itself, or FSLInfo. Currently, the outputtype gets read from FSLInfo if self.outputtype (er, _outputtype?) is None. In the case of other opt_map specifications, there are defaults that get specified if the value is None. For example output filenames are often auto-generated. If you look at the code for fsl.Bet for example, there is no way for the outfile to get picked up at the pipeline level, because it is a transient variable. This is OK, as the generation of the outfile name is contingent ONLY on inputs which ARE available to the pipeline machinery (i.e., via inspection of the Bet instance's attributes). However, with outputtype, we are in a situation in which "autogeneration" incorporates potentially transient information external to the instance itself. Thus, some care needs to be taken in always ensuring this information is hashable. Design Principles ----------------- These are (currently) Dav Clark's best guess at what the group might agree on: It should be very easy to figure out what was done by the pypeline. Code should support relocatability - this could be via URIs, relative paths or potentially other mechanisms. Unless otherwise called for, code should be thread safe, just in case. The pipeline should make it easy to change aspects of an analysis with minimal recomputation, downloading, etc. (This is not the case currently - any change will overwrite the old node). Also, the fact that multiple files get rolled into a single node is problematic for similar reasons. E.g. - node([file1 ... file100]) will get recomputed if we add only one file!. However, it should also be easy to identify and delete things you don't need anymore. Pipelines and bits of pipelines should be easy to share. Things that are the same should be called the same thing in most places. For interfaces that have an obvious meaning for the terms, "infiles" and "outfile(s)". If a file is in both the inputs and outputs, it should be called the same thing in both places. If it is produced by one interface and consumed by another, same thing should be used. Discussions ----------- .. toctree:: :maxdepth: 1 filename_generation nipype-1.7.0/doc/devel/cmd_interface_devel.rst000066400000000000000000000200721413403311400213560ustar00rootroot00000000000000.. _interface_devel: =============================== How to wrap a command line tool =============================== The aim of this section is to describe how external programs and scripts can be wrapped for use in Nipype either as interactive interfaces or within the workflow/pipeline environment. Currently, there is support for command line executables/scripts and matlab scripts. One can also create pure Python interfaces. The key to defining interfaces is to provide a formal specification of inputs and outputs and determining what outputs are generated given a set of inputs. Defining inputs and outputs =========================== In Nipype we use Enthought Traits to define inputs and outputs of the interfaces. This allows to introduce easy type checking. Inputs and outputs are grouped into separate classes (usually suffixed with InputSpec and OutputSpec). For example: .. testcode:: class ExampleInputSpec(TraitedSpec): input_volume = File(desc = "Input volume", exists = True, mandatory = True) parameter = traits.Int(desc = "some parameter") class ExampleOutputSpec(TraitedSpec): output_volume = File(desc = "Output volume", exists = True) For the Traits (and Nipype) to work correctly output and input spec has to be inherited from TraitedSpec (however, this does not have to be direct inheritance). Traits (File, Int etc.) have different parameters (called metadata). In the above example we have used the ``desc`` metadata which holds human readable description of the input. The ``mandatory`` flag forces Nipype to throw an exception if the input was not set. ``exists`` is a special flag that works only for ``File traits`` and checks if the provided file exists. More details can be found at :doc:`interface_specs`. The input and output specifications have to be connected to the our example interface class: .. testcode:: class Example(Interface): input_spec = ExampleInputSpec output_spec = ExampleOutputSpec Where the names of the classes grouping inputs and outputs were arbitrary the names of the fields within the interface they are assigned are not (it always has to be input_spec and output_spec). Of course this interface does not do much because we have not specified how to process the inputs and create the outputs. This can be done in many ways. Command line executable ======================= As with all interfaces command line wrappers need to have inputs defined. Command line input spec has to inherit from CommandLineInputSpec which adds two extra inputs: environ (a dictionary of environmental variables), and args (a string defining extra flags). In addition input spec can define the relation between the inputs and the generated command line. To achieve this we have added two metadata: ``argstr`` (string defining how the argument should be formated) and ``position`` (number defining the order of the arguments). For example .. testcode:: class ExampleInputSpec(CommandLineSpec): input_volume = File(desc = "Input volume", exists = True, mandatory = True, position = 0, argstr="%s") parameter = traits.Int(desc = "some parameter", argstr = "--param %d") As you probably noticed the ``argstr`` is a printf type string with formatting symbols. For an input defined in InputSpec to be included into the executed commandline ``argstr`` has to be included. Additionally inside the main interface class you need to specify the name of the executable by assigning it to the ``_cmd`` field. Also the main interface class needs to inherit from :class:`CommandLine `: .. testcode:: class Example(CommandLine): _cmd = 'my_command' input_spec = ExampleInputSpec output_spec = ExampleOutputSpec There is one more thing we need to take care of. When the executable finishes processing it will presumably create some output files. We need to know which files to look for, check if they exist and expose them to whatever node would like to use them. This is done by implementing ``_list_outputs`` method in the main interface class. Basically what it does is assigning the expected output files to the fields of our output spec: .. testcode:: def _list_outputs(self): outputs = self.output_spec().get() outputs['output_volume'] = os.path.abspath('name_of_the_file_this_cmd_made.nii') return outputs Sometimes the inputs need extra parsing before turning into command line parameters. For example imagine a parameter selecting between three methods: "old", "standard" and "new". Imagine also that the command line accept this as a parameter "--method=" accepting 0, 1 or 2. Since we are aiming to make nipype scripts as informative as possible it's better to define the inputs as following: .. testcode:: class ExampleInputSpec(CommandLineSpec): method = traits.Enum("old", "standard", "new", desc = "method", argstr="--method=%d") Here we've used the Enum trait which restricts input a few fixed options. If we would leave it as it is it would not work since the argstr is expecting numbers. We need to do additional parsing by overloading the following method in the main interface class: .. testcode:: def _format_arg(self, name, spec, value): if name == 'method': return spec.argstr%{"old":0, "standard":1, "new":2}[value] return super(Example, self)._format_arg(name, spec, value) Here is a minimalistic interface for the gzip command: .. testcode:: from nipype.interfaces.base import ( TraitedSpec, CommandLineInputSpec, CommandLine, File ) import os class GZipInputSpec(CommandLineInputSpec): input_file = File(desc="File", exists=True, mandatory=True, argstr="%s") class GZipOutputSpec(TraitedSpec): output_file = File(desc = "Zip file", exists = True) class GZipTask(CommandLine): input_spec = GZipInputSpec output_spec = GZipOutputSpec _cmd = 'gzip' def _list_outputs(self): outputs = self.output_spec().get() outputs['output_file'] = os.path.abspath(self.inputs.input_file + ".gz") return outputs if __name__ == '__main__': zipper = GZipTask(input_file='an_existing_file') print zipper.cmdline zipper.run() Creating outputs on the fly =========================== In many cases, command line executables will require specifying output file names as arguments on the command line. We have simplified this procedure with three additional metadata terms: ``name_source``, ``name_template``, ``keep_extension``. For example in the :ref:`InvWarp ` class, the ``inverse_warp`` parameter is the name of the output file that is created by the routine. .. testcode:: class InvWarpInputSpec(FSLCommandInputSpec): ... inverse_warp = File(argstr='--out=%s', name_source=['warp'], hash_files=False, name_template='%s_inverse', ... we add several metadata to inputspec. name_source indicates which field to draw from, this field must be the name of a File. hash_files indicates that the input for this field if provided should not be used in computing the input hash for this interface. name_template (optional) overrides the default ``_generated`` suffix output_name (optional) name of the output (if this is not set same name as the input will be assumed) keep_extension (optional) if you want the extension from the input or name_template to be kept. The name_template extension always overrides the input extension. In addition one can add functionality to your class or base class, to allow changing extensions specific to package or interface. This overload function is trigerred only if keep_extension is not defined. .. testcode:: def self._overload_extension(self, value): return value #do whatever you want here with the name Finally, in the outputspec make sure the name matches that of the inputspec. .. testcode:: class InvWarpOutputSpec(TraitedSpec): inverse_warp = File(exists=True, desc=('Name of output file, containing warps that ' 'are the "reverse" of those in --warp.')) nipype-1.7.0/doc/devel/filename_generation.rst000066400000000000000000000132041413403311400214060ustar00rootroot00000000000000========================== Auto-generated filenames ========================== In refactoring the inputs in the traitlets branch I'm working through the different ways that filenames are generated and want to make sure the interface is consistent. The notes below are all using fsl.Bet as that's the first class we're Traiting. Other interface classes may handle this differently, but should agree on a convention and apply it across all Interfaces (if possible). Current Rules ------------- These rules are for fsl.Bet, but it appears they are the same for all fsl and spm Interfaces. Bet has two mandatory parameters, ``infile`` and ``outfile``. These are the rules for how they are handled in different use cases. 1. If ``infile`` or ``outfile`` are absolute paths, they are used as-is and never changed. This allows users to override any filename/path generation. 2. If ``outfile`` is not specified, a filename is generated. 3. Generated filenames (at least for ``outfile``) are based on: * ``infile``, the filename minus the extensions. * A suffix specified by the Interface. For example Bet uses *_brain* suffix. * The current working directory, os.getcwd(). Example: If ``infile`` == 'foo.nii' and the cwd is ``/home/cburns`` then generated ``outfile`` for Bet will be ``/home/cburns/foo_brain.nii.gz`` 4. If ``outfile`` is not an absolute path, for instance just a filename, the absolute path is generated using ``os.path.realpath``. This absolute path is needed to make sure the packages (Bet in this case) write the output file to a location of our choosing. The generated absolute path is only used in the ``cmdline`` at runtime and does __not__ overwrite the class attr ``self.inputs.outfile``. It is generated only when the ``cmdline`` is invoked. Walking through some examples ----------------------------- In this example we assign ``infile`` directly but ``outfile`` is generated in ``Bet._parse_inputs`` based on ``infile``. The generated ``outfile`` is only used in the cmdline at runtime and not stored in ``self.inputs.outfile``. This seems correct. .. sourcecode:: ipython In [15]: from nipype.interfaces import fsl In [16]: mybet = fsl.Bet() In [17]: mybet.inputs.infile = 'foo.nii' In [18]: res = mybet.run() In [19]: res.runtime.cmdline Out[19]: 'bet foo.nii /Users/cburns/src/nipy-sf/nipype/trunk/nipype/interfaces/tests/foo_brain.nii.gz' In [21]: mybet.inputs Out[21]: Bunch(center=None, flags=None, frac=None, functional=None, infile='foo.nii', mask=None, mesh=None, nooutput=None, outfile=None, outline=None, radius=None, reduce_bias=None, skull=None, threshold=None, verbose=None, vertical_gradient=None) In [24]: mybet.cmdline Out[24]: 'bet foo.nii /Users/cburns/src/nipy-sf/nipype/trunk/nipype/interfaces/tests/foo_brain.nii.gz' In [25]: mybet.inputs.outfile In [26]: mybet.inputs.infile Out[26]: 'foo.nii' We get the same behavior here when we assign ``infile`` at initialization: .. sourcecode:: ipython In [28]: mybet = fsl.Bet(infile='foo.nii') In [29]: mybet.cmdline Out[29]: 'bet foo.nii /Users/cburns/src/nipy-sf/nipype/trunk/nipype/interfaces/tests/foo_brain.nii.gz' In [30]: mybet.inputs Out[30]: Bunch(center=None, flags=None, frac=None, functional=None, infile='foo.nii', mask=None, mesh=None, nooutput=None, outfile=None, outline=None, radius=None, reduce_bias=None, skull=None, threshold=None, verbose=None, vertical_gradient=None) In [31]: res = mybet.run() In [32]: res.runtime.cmdline Out[32]: 'bet foo.nii /Users/cburns/src/nipy-sf/nipype/trunk/nipype/interfaces/tests/foo_brain.nii.gz' Here we specify absolute paths for both ``infile`` and ``outfile``. The command line's look as expected: .. sourcecode:: ipython In [53]: import os In [54]: mybet = fsl.Bet() In [55]: mybet.inputs.infile = os.path.join('/Users/cburns/tmp/junk', 'foo.nii') In [56]: mybet.inputs.outfile = os.path.join('/Users/cburns/tmp/junk', 'bar.nii') In [57]: mybet.cmdline Out[57]: 'bet /Users/cburns/tmp/junk/foo.nii /Users/cburns/tmp/junk/bar.nii' In [58]: res = mybet.run() In [59]: res.runtime.cmdline Out[59]: 'bet /Users/cburns/tmp/junk/foo.nii /Users/cburns/tmp/junk/bar.nii' Here passing in a new ``outfile`` in the ``run`` method will update ``mybet.inputs.outfile`` to the passed in value. Should this be the case? .. sourcecode:: ipython In [110]: mybet = fsl.Bet(infile='foo.nii', outfile='bar.nii') In [111]: mybet.inputs.outfile Out[111]: 'bar.nii' In [112]: mybet.cmdline Out[112]: 'bet foo.nii /Users/cburns/src/nipy-sf/nipype/trunk/nipype/interfaces/tests/bar.nii' In [113]: res = mybet.run(outfile = os.path.join('/Users/cburns/tmp/junk', 'not_bar.nii')) In [114]: mybet.inputs.outfile Out[114]: '/Users/cburns/tmp/junk/not_bar.nii' In [115]: mybet.cmdline Out[115]: 'bet foo.nii /Users/cburns/tmp/junk/not_bar.nii' In this case we provide ``outfile`` but not as an absolue path, so the absolue path is generated and used for the ``cmdline`` when run, but ``mybet.inputs.outfile`` is not updated with the absolute path. .. sourcecode:: ipython In [74]: mybet = fsl.Bet(infile='foo.nii', outfile='bar.nii') In [75]: mybet.inputs.outfile Out[75]: 'bar.nii' In [76]: mybet.cmdline Out[76]: 'bet foo.nii /Users/cburns/src/nipy-sf/nipype/trunk/nipype/interfaces/tests/bar.nii' In [77]: res = mybet.run() In [78]: res.runtime.cmdline Out[78]: 'bet foo.nii /Users/cburns/src/nipy-sf/nipype/trunk/nipype/interfaces/tests/bar.nii' In [80]: res.interface.inputs.outfile Out[80]: 'bar.nii' nipype-1.7.0/doc/devel/gitwash/000077500000000000000000000000001413403311400163275ustar00rootroot00000000000000nipype-1.7.0/doc/devel/gitwash/branch_list.png000066400000000000000000000320611413403311400213270ustar00rootroot00000000000000PNG  IHDRyU pHYs   IDATx]\T/,lcL-F1bSS^QcFhF+RK~g‚ vzΜ9sf]xp8WSG# <G##S\5G#<G## oΆJ߳=#<^D֜ȀRD"xq85Xl3 )xGGGܭAp8ՈJhwT#xG ,xe>x8Gf!`g;p855Kָi4 233QPP`U]IJhooWWWږLwGJ[J#SD@@REkl>甔h)W5pS@(yf3_^:;BSeG!`]\4̂;N"Yճ3G#x{\4ea"uY8GZKZz5-'!3@X(GC-y-bOH.#vyn0^z 8#*,8<  QgtHFuǔ;5&Z6o*q H#dy~LYhb [W?.!|%tR'ؗHV"5+^A\FKn]–+/$$i&-dnj2fnfhh1vQ0KɎUkƇ|53w > ,dľp,] )5Rѿ-drg9~oG]YuⓇ3qX|,,o@d=#rfD/0gr[Magx8kaFPkM e|z+1Pb(;_M:;OB/@Išطl.Q2).X30 )c"p!cC^ƣWpr**8  [ Iv!*m&nPiteW"!WU 0ط N&ՙX=&MEW2zR#Rى[ۊ|0{}܏؈kA #Vf5 ^9PԣWiq`~}Fv»=}XTi ;HN {79lsHiU]{6.Fہq-ĠɊуt*jI5dE]wr:'KZw-Ь-u#)lJ>8c`HDЫq+6.FD:5EG Quא5+SG§lMah*ԋu푹%[m;1]B˜ UY5,q{U_1eI{`ؿr)fb١' *%_֩VcLC#I`okep7=v,œkg>~>z{P&v| ~oLگFU7K wo@+\Z4-ImulN'ad5?=Rw,XZ O7QyX=DOSPթRBCcvy WWdҁ0v2VgN^-0h4u.~ߐH?s3Dj;q#S[Ƽ6DM:r6ߢ!`&v; boGJqw`/X=g>6vعu$+24 a/U%lz6fnc0EilRч{Ʒر堀L^ sHVړ ϿE&y#ZS\! -Y5Ur Zvơ\@.)U5v@x8h1ZO@kGd Gu[h {tC_j1B(yds]0zL >[y"4c/vaִGS<#S]U͸!)'7IpR:F,u!Y{k8'/HƐ~*pl] ع' %K*7 [BO& ؁RK|4c$IcZ7ǦQ8r1 A]eX`tP9Y{ڴ=@įǕh֪5d)5Zm޴`DoDշQi ǔ~1LĠٳ>diYdĚY7~=rq'\C}"-3ǐGKM}D8ΘxB ez>E _̌'_Q%60jg">j'f=y8C~M7KG47۠|Q8ID8iwG8 674^r#; X6ک)X4@Rʺض68y.n艄8Lx:^ŕT-h >O//aq%}F@O<'fk&%r߅-| 2uI|WR:M!bRl]-U>]HsԸz23*bpOOypew-1g?k'y"`Ş!=ÉVA;ԫ]}|:,l \d vE~j 2ӄ,C {psײ`Wʗo^WU\A2Lal߱ȅ‚;X`-Α7W0{=9P?Hn#<?!1{V0)F"R3!!*q /k aԸ-Ҵp:t2[dIƝ<2l eI^ mt\p#\w:Syptv(9"xZyY򎭇ൠXMV )o GWI|,Z~qG`{ˠ^B^l%P{?{;{?*+"n'xE3~dZ i GcL_'!}GY6b8P]@gANFiP~ixw` i/܅R0PsCr>_1{k>@,w Ǡe;}꓏7->}C{W>Zwr0"Ѕ@?>ȗR0384Pa0[TۣRD*9 /W(+'03̃G#V"<2W#7Z|Pwn߾#/`G/`P5,= ~3p_ITڹ{9KpE$\W˛8z&\: (̘ V|5(E&%[iS dhhch2W +[ϖxi*r"Dn @tAm컣 s i E3C`r'e"٥Zk !$C?l)hΫh>\Ov7#Ƞ-bsnLę<;]1.^maM8Q⎺n"2(d;2F$`Xeۇ*5֗Af+b5>9 Z+a/k%\Dޘ \8m; ""q6Y2z9^pшhЍ}pƷW2Dۤ-~XI$8PnDC;a5!Mb4ۖ|ZJ]0lg,eNi+7'Y/$42 1OFJܴ/t*%s)V򽩡ͱA$Rڣge Rd8aR{"0E2Ȫ37M+#ȨqPʧO=# jK<>펦*d0^y)ŒMEZ 1nŽezdn "J0kVSGh0SIVDvydّRQADVZCA$$<б=Y¤2 ,;( G&qo MD(O=k"N)#⥬$')qfY lzcuZ̶,^yy |Tј> χ`pg23=* LJݍ4 Q_b?"tB50lt0Eseđew3t:loڱo z-р(gDB~NFwhꌅQsTeN\*ph=IBVpJcDD$W]`D?]+p矈Sۑ &< 0h?ipgG{!hݒ-5x=7B̕%Ֆ>HWfRS*C½+,Rmx#mtahGSjKGMl;ƯRIsJe?Ej.bN_I#ȻhSߌwz= ʯIiG4~G".hbQF26s^OF_6H"=%o qÿ}'a ,:;o6TÓR+w c)X V d-le'bMWx W|_ b # k="3ƈE,):w}Pԇ sG1m3 /&c@ND3, zLǒՂ` NM pp֬ZͿڹ }0=J.]D:t *˳`KUe'_wZfȧ'eRtM6郃<Ҥ"rfλ1|8#C;- oąLglH-'Kq*7ՎYfmxcL@,nױG6 h4bz忇 QGxU3U\VM:C2_#yKexAtŐHb8a^IiƮ21o >{'NGF\[ĉ:njҋʰ!+I%1jί'ƿGoCP`ӄX "PyQe ϮQ t6dl${eaŜ؃5˗㷋9<1%a,zJ Þt|uXgc- vX,Zr؏znCю{1JӸӑ͊Pp镢,Xӂk/=xKقLMFf3${z`(ؒS!_nvj𨿫k5%jy@[{zR ?]-ox:OTާTҹJbN9G;;ɏJ9g)dѦZ+Yhr49)BM9r S+0lnKH]f,CE2#mfoX]R:˜CL_)st'JI\!7fTL9*6prs{5)a }٠P6ZЦ>8І&QW@* О6db(3"mlѶ3#.m-ah,9/bZ' al]U!7n%W7h>bvwܹ/ΌWvO3rטڒО*_ ɓYpZ9%9\npc*ڼUT*野 ssH>Id2+)@Hk1/6x6Kug:JIToIiwv^"2բJe0dV99B:MgOa)˽JK~DµFɗYI dJxHdذ,YXZr<e]pjUYeo.#p oaެ9C4v(; ԱXHDң"bʩp^-§<*HL ? PfqO1_l^q|cԿ&mE @=#iIo粖&{]_xw~ ҇EbIxڻTSFrpѧ?F/Wzs~ '$WqBtu<2e1m܂IMJ9߅J[jɫrR!r4sutDZ\_1O$XWos5,,O?O`*1Ub8>f7C*E"=)?LMD@/C.  Z5ơhM㮢9Vo,>G+2ikq| \9Bl #bp:4]%f;U4"e#R҅[ż1Pygbv$˙'o<_-qaB@a(F lMԦH]އG7<_Afo#:.l>XD@yup;$]vCOB\= ~$?mO`U kìi=w67`ոگ!2=Nl_$?G-Mѷ-L8湫iW`#$ h\} ƍh蝏tyϽ1Qrx ѷۚƉ( LAO5G^0cԡgD+WaYKU>y%uۡ1r0G=|*,_?'q;W'C߿6C19)~Qk 3S1oX8}㣙{UFRR^+b@t:Q |x~)b޼x"6#m9QI=pjd"߀6/c1N 6ChɎw牘DԹ}ײW\Ƃ=M̝>'w #30yH>Gi0:7)&D⽻‡i^ 6G'YǙ8tp;wNV9~v58 '.gzǘԧ.f"H|zpܳWs%9JO[dų6ѱ.\+ʫ'iJOt= %dOT N!ڼys1m]w톎@k*Я?Yl麞*a[>/očy6Gw£ӫ3~ĥ5Í$IC' ehe-ד'C Y8v\.L UL; lY]bOR[fKҍ됵8`oT)SxYɩYٰA@ oJG2ߌA̍3Z9]1czg`'bR 9b"Y{ j[=.3.5ɅuKa1u6+>|6gt?9R;l6$U:~g6CY<޴A $H=*U^mxKY%Idd+ 'r)fYy< zBoz"݀n>3ՅA g:jgJ,+to@~1y>"!\qh/Y~/`v^SK"ƆgJLy[صXLw,A/6fgMڷAP6hݦOCΤD7Xt_K.ooęJ4iYf9xFl0i@ C'4FY̑TiJe%]G>Z->:wX8ohXo!^HQ^bk S14j"2Uޚ4Q6FǞ0 tIh'˽^y?DbkHiҲ R±1DS?N3ˢP xC&7Ȗؽb^z[5oų`ǮY('qR #@"3e39o:a04cR|h~/9fDOi14Tbh,-)N; SC>kkȐ5GW1P9L-4׋cȔ0Q2 (筌4ͪ ef&2pa^wZ:9xK@tnV  d-P5f̮,φbHǕ†/hy.IdFH)8]t<@Ehڥ&9 7=~٠!TWn",.4*o+#֚K}׫[̂wqq1LYo/wAzlV?q okP5B}FhOtg4!_:Io⡶ %S.- _~Ӎ`⽱d\P}4xԚiNJMxmH[;X!]H梑,]sZؽ--YJ{\ yBI ,0MMIKFJr X4B'@fY'`.~)aaĴA+(xwucWڌ#6o!MmHAџ 7 ~ڄY*l7r٢r'i#MYL0s߉ϻ]&Z4_ ř,C-GyP??ޠ`)^T/+mÂӦM }޾t0YR;#k|0w$)ܵÙa0^ !"̟yZ=x:Hq BhRkþlL NNlT!D Мo:g!^Q%bFbc4N_[5qmr/8fp#ѻ{@,KUƼu8i)Z<>kWkll,$]~. 7_CYVCL0TP( fc,ɃT*χ YC*/ZE. :rub,@dp(5~={B-'$ 4>)Ao# */g+ 6Ԁ0sh<*bp=z~uφ5;]K*.f-NcxIENDB`nipype-1.7.0/doc/devel/gitwash/branch_list_compare.png000066400000000000000000000246671413403311400230520ustar00rootroot00000000000000PNG  IHDRL4sRGBbKGD pHYs  tIME0 IDATxy|u4I4mz=-Zh r! JUP\WPpZQ ޠTVXE[RP,-6M#Ee|B!2JB!OB B!$B!VB ֒cX]*S&D!^k]3WBH^xe 5 s{ 2;SO︗%fC%gLP <9;[F\/=:Jˤdqɲ>#n{J Bi9qRx|3"L#RBڒ2ג?#!?:d0o(SGDV"=w t08ztfÎT8BĮmeT8lmձ7e&d+ q0өG i+Y0e_sU7̵s]K9NWMd: G #enz6{-J2v7!5yԅNrol>Gj6c/$iC-A)buZw Ŷ۷RL,]څ7(͈߮#..6m0d q:F-!{>2ӗ0{VT˿x۷`j;@B\snѵ: $3H\;ݓr">٫v/U" JRQ#ڸ2h*B\l]ٴIϠ?,0b)sA9SȬQl 'gp+!5UU; qۍ)f%h?~{[ӱ[ PG.r } !1\F]B!% B!$B!B]!B!$B!B]!mces_r݇~U%JB\!e7/I.;g-y|kGbːdKn чֲB]!oZMat/jIPZ)߿n¥فC:ŲEiҮJl۲Ehkܿ&O `;_D.*7Dl! HׅvSisbH,M(.Ub4)hlXqoSI~ʯBhiM Ӓ=`[:b}wvx#_XP;`D 2cb& W fBqstx>r0Ex~ n ؀&fKş¢ZRU& Դ~`; )R!PQ]v39QҚ| UžP C (GH`laJ69нfq&BˁS.Z[5I.-;Cb-1F6<Ѐz]Ť<Il|g7YU[5*Rm*.⭫zUl nl6 hGz-!$.*6_ԊOVQG_Azi`^F86*؋ H`+Jcf\ӡNSgڰ(؈Ѓ^zܞ/'lJOa{]K$%` ^X9oC'_E!,uwV[XPj4r[;nЗl*qCo?nTa()jqnVuѦ} VR 7et zon\ui}74äT}TCpx[)|h܊:qN`WIk6Ǚv^r"0~6 Ź6ErO"e穀hsFwg9R Ԟ͵޸fU }XSܜ]yok% 37'jX!tY95&MKHuCiwS>`Ex<F V>FB {o檞'ȴhx] /~d=јPw1(\{T~-;dkYWx<8qr  !,, ?soUTUU8k3뉫)XN WqĮO捾ju`&[O,L46ޙO5_Vg6 哩 ٷAyi걱|1k*x)aXo7C80Mmg?Cu|5lj¼SD8|.<oԌכylku賯|_ËRVb0d;߫,vt&: 8nt'JP5nc ԂӉON.&4 !įwq<&IqTU ???6mz˝}6)Rt&ޖq귟yjT[k靦RM'=niUY%dU_:⿿]ˇ)LpD[8≯6YvXuW|UpЦRWYp 8p6޸306GܷJ[QyT&=oh˒x7JuxDڲ{R/槊:  -D9& 搐3$$8c g0aL3jk -Kk2K+, FQqEh4bXkֿ[njM,~7rޖMm+QA]IOs MlY (A`7|`y[jN8M@ۺ7Є!x2VPɫp࿷vN-8UK#meaUaފqJ7{Y9xya 1G>-f(pnB\5k^bygd(Y15]8^w#_OB(G9 iF\:#wy$3\ըed}Ŵp }6r%uᄧnz-:_.Jmu]ѺWІ1|ҁBN)ѱT}v1}`8>k5ԻWO]]PΩ~?gQ(8uP}s+h+k xfh.^2.ךb}]_Q]5Z/wUT.ժZ"!MHXɱ&Bb<{UUU5k3+9ZNg|lʔDw N _!!(χwB\-~k̘1(J /ԩSQVK>}NQ5ϥZFu;Zy ,fV-SNhO9_BQ5~gm@P&'jB{kzuẚ.H `K iZM_C{3fk5?Db`432.&ՅSc[PXsx(wԌvEC\E~׆sB!~?SO??W_}bt:o4|V^ݸy#+0~$}>0%q5Naݻ7a^F.b;q+1woy+yav7VBUU̓&3ݛ[|-6TUxv:N 7@U+INySne\T>?vMuƸy}_^E͓߼VJݠnݛ\'y 4+;h0p2@dlf`qCdTi׿O?`dbʔ)`09r$ .l0 5g!CRY0J*T@`2-[͍'SN"яf<,cғ<'~GS"$3۹Խ(F p>]3i}'7Mw/N#4iszt{%O$5yͩvZNgaaAm۵;_Į n2omQE)?{hMZFsitD6OF!>`^W;Y`vUUt' BCuU{\MU?m 1s8;ᶩd(fl7o#ۄ^}L&O_Oe'׵"7?Cܴ3+wQS}/6<q"ϬB!9/vr{2eoѭ0gǼ;י:(#w_]?&w¤QyLo& (z t#'y lPb<)h_wX6!iZ}yޖh%@5qo=OߟeZ]wzhB#7 !75{n"##Oŋcj9r$/`ȑ 毙qvva jFHӆ}̗NqQU.f!cힲ<Չ% @⩢܃S7_ACW܎oՕ>K rVr*ǨBGBꡦVrOU.Z.:y~:km z7.g<tҠʈQ*\3U=칳\2BqI Qߤ4Z\<~L].fտ?`yV71czg}iЇsIW@ɜ{Qy_ݭjxsxj LJOȶoEF?ى6RKv٩dI -|eX9gP!)<@;kϾo4Gxor <7<8n,%$qe7#o fi:U2` Ӡg3ɺYG8[R}&3n]mc{1Ww?;pZn"3 }B|t$1j>/O샓cG>^Z@ؑ{o0o%A[mE!1>z7ulR-JeQ7c-%UyY|4c~*>>w>1l]Xz\KѱpQ+ea>Lۺa߄ZFņ14Cók^Q$FCAeέMb4GrΥ܊泏l-Q/!s.]V>\N_5XYUn(sIP`/bkn[HC  ^c c&MeRJm|<~bԩoƚo UV0}JÄ$sm&?o/~k3ld&ȮT4G~{VR?Uֲ߮~O FLOiEӺH^ mOY`EǤkIRb8)3eTw4LC9OG._) 1uX/|IҪqՏ9Xshңvj>v擣.*t?k[槂J m;F5pURxFh 땈Fc_*̲V@+ |j@Bh4x<$ќqW%Zll3 %¤S,H`l[$0tP\duk.7=eCΠt!TEV@㧥d<ݺBCѨ`wGQbڸs@NAui#C! +IDAT<(8 uB ӌL`jU!`=OqW ;cV}OG_);o,yo,WnEMmL̘ "#Nnl}NYDMFmejQ~FIvaA!R[ OLe)Ir >1+b]bLиP ԢH7Ѥd3KW"QT~4rwJ[.eox8.Ն`Fud׵5q򯢴Nʍ,_N Wb.]7c-#d2I0.@UU'5Wo/bop}=QfPJ69нfqɦBƝp ),n]"44e9]@]Beh}qDMlYWet<32zJ茮8jиb`Uix ˹[y[ &ob/ȺI(ɣiֲ"=G_ \NhJT0(wFP@URR&"-/&5B!c z=3xCJi9wS[3Y[E_W 4Sо8:ϡLe+!)=3x{g9w9&\I/>H@2OαT@lu7ثM%a]ߩ\|Y(D h5 ^So'ʑÒm2D] qFp5Lk1vh]J0wm?*c+.*hNNq:\6 h- !ğ͛7y9%fTV?ȽڳvlƭtKaӆTOnȢgoO& 3bIK[a Z20&d%zZ:QRHf⽽Иaf|2PУ!덄| sly{S2xi_ȂSj/yl z3HayL4×EBYMB>n0gl[[vBZ̨赚}[5113ãi@H`Gՠ3,{9Zz& d;s=RW"1a&w }#WGID~/aׂ,$#`oN̰'Ƒ3u}=6|-UCHՒڦyXhrt nFVE'z$X6ѥ}8JV M/A+md_.찔bui3j`2,%EX &s:`:z)ưuSQR7O=Φk&a9QInk 'p,!aZqL5q8pi:5;,Tu[)=aEu[+8aDx/a`EC(7騢܎`=a'ǵ^Y㸴!`Ȧa+ =PuSܑ[8M wn>t= p=#hYn%Q7Bdk ;Xl#6{eh:eђzwOLɜվ/7X'QZ O{FOݣ F#e !݋q#?l<9;¯YMI<9(Asѳ];evA!Os[ƈPcۉZdB+<\V 6M A%B!n#7[3)D8lŹYdnR' =naPg-$B!B]!B]!B!NEF/B! !B!B! !B:/Q֭[!I!2B!,I!B!B! !B!B! !B!B!DW!BItB! hE$IENDB`nipype-1.7.0/doc/devel/gitwash/configure_git.rst000066400000000000000000000057631413403311400217200ustar00rootroot00000000000000.. _configure-git: =============== Configure git =============== .. _git-config-basic: Overview ======== Your personal git_ configurations are saved in the ``.gitconfig`` file in your home directory. Here is an example ``.gitconfig`` file:: [user] name = Your Name email = you@yourdomain.example.com [alias] ci = commit -a co = checkout st = status -a stat = status -a br = branch wdiff = diff --color-words [core] editor = vim [merge] summary = true You can edit this file directly or you can use the ``git config --global`` command:: git config --global user.name "Your Name" git config --global user.email you@yourdomain.example.com git config --global alias.ci "commit -a" git config --global alias.co checkout git config --global alias.st "status -a" git config --global alias.stat "status -a" git config --global alias.br branch git config --global alias.wdiff "diff --color-words" git config --global core.editor vim git config --global merge.summary true To set up on another computer, you can copy your ``~/.gitconfig`` file, or run the commands above. In detail ========= user.name and user.email ------------------------ It is good practice to tell git_ who you are, for labeling any changes you make to the code. The simplest way to do this is from the command line:: git config --global user.name "Your Name" git config --global user.email you@yourdomain.example.com This will write the settings into your git configuration file, which should now contain a user section with your name and email:: [user] name = Your Name email = you@yourdomain.example.com Of course you'll need to replace ``Your Name`` and ``you@yourdomain.example.com`` with your actual name and email address. Aliases ------- You might well benefit from some aliases to common commands. For example, you might well want to be able to shorten ``git checkout`` to ``git co``. Or you may want to alias ``git diff --color-words`` (which gives a nicely formatted output of the diff) to ``git wdiff`` The following ``git config --global`` commands:: git config --global alias.ci "commit -a" git config --global alias.co checkout git config --global alias.st "status -a" git config --global alias.stat "status -a" git config --global alias.br branch git config --global alias.wdiff "diff --color-words" will create an ``alias`` section in your ``.gitconfig`` file with contents like this:: [alias] ci = commit -a co = checkout st = status -a stat = status -a br = branch wdiff = diff --color-words Editor ------ You may also want to make sure that your editor of choice is used :: git config --global core.editor vim Merging ------- To enforce summaries when doing merges (``~/.gitconfig`` file again):: [merge] log = true Or from the command line:: git config --global merge.log true .. include:: links.inc nipype-1.7.0/doc/devel/gitwash/development_workflow.rst000066400000000000000000000173751413403311400233520ustar00rootroot00000000000000.. _development-workflow: ==================== Development workflow ==================== You already have your own forked copy of the nipype_ repository, by following :ref:`forking`, :ref:`set-up-fork`, and you have configured git_ by following :ref:`configure-git`. Workflow summary ================ * Keep your ``master`` branch clean of edits that have not been merged to the main nipype_ development repo. Your ``master`` then will follow the main nipype_ repository. * Start a new *feature branch* for each set of edits that you do. * If you can avoid it, try not to merge other branches into your feature branch while you are working. * Ask for review! This way of working really helps to keep work well organized, and in keeping history as clear as possible. See |emdash| for example |emdash| `linux git workflow`_. Making a new feature branch =========================== :: git branch my-new-feature git checkout my-new-feature Generally, you will want to keep this also on your public github_ fork of nipype_. To do this, you `git push`_ this new branch up to your github_ repo. Generally (if you followed the instructions in these pages, and by default), git will have a link to your github_ repo, called ``origin``. You push up to your own repo on github_ with:: git push origin my-new-feature In git >1.7 you can ensure that the link is correctly set by using the ``--set-upstream`` option:: git push --set-upstream origin my-new-feature From now on git_ will know that ``my-new-feature`` is related to the ``my-new-feature`` branch in the github_ repo. The editing workflow ==================== Overview -------- :: # hack hack git add my_new_file git commit -am 'NF - some message' git push In more detail -------------- #. Make some changes #. See which files have changed with ``git status`` (see `git status`_). You'll see a listing like this one:: # On branch ny-new-feature # Changed but not updated: # (use "git add ..." to update what will be committed) # (use "git checkout -- ..." to discard changes in working directory) # # modified: README # # Untracked files: # (use "git add ..." to include in what will be committed) # # INSTALL no changes added to commit (use "git add" and/or "git commit -a") #. Check what the actual changes are with ``git diff`` (`git diff`_). #. Add any new files to version control ``git add new_file_name`` (see `git add`_). #. To commit all modified files into the local copy of your repo,, do ``git commit -am 'A commit message'``. Note the ``-am`` options to ``commit``. The ``m`` flag just signals that you're going to type a message on the command line. The ``a`` flag |emdash| you can just take on faith |emdash| or see `why the -a flag?`_ |emdash| and the helpful use-case description in the `tangled working copy problem`_. The `git commit`_ manual page might also be useful. #. To push the changes up to your forked repo on github_, do a ``git push`` (see `git push`). Asking for code review ====================== #. Go to your repo URL |emdash| e.g. ``http://github.com/your-user-name/nipype``. #. Click on the *Branch list* button: .. image:: branch_list.png #. Click on the *Compare* button for your feature branch |emdash| here ``my-new-feature``: .. image:: branch_list_compare.png #. If asked, select the *base* and *comparison* branch names you want to compare. Usually these will be ``master`` and ``my-new-feature`` (where that is your feature branch name). #. At this point you should get a nice summary of the changes. Copy the URL for this, and post it to the `nipype mailing list`_, asking for review. The URL will look something like: ``http://github.com/your-user-name/nipype/compare/master...my-new-feature``. There's an example at http://github.com/matthew-brett/nipy/compare/master...find-install-data See: http://github.com/blog/612-introducing-github-compare-view for more detail. The generated comparison, is between your feature branch ``my-new-feature``, and the place in ``master`` from which you branched ``my-new-feature``. In other words, you can keep updating ``master`` without interfering with the output from the comparison. More detail? Note the three dots in the URL above (``master...my-new-feature``). .. admonition:: Two vs three dots Imagine a series of commits A, B, C, D... Imagine that there are two branches, *topic* and *master*. You branched *topic* off *master* when *master* was at commit 'E'. The graph of the commits looks like this:: A---B---C topic / D---E---F---G master Then:: git diff master..topic will output the difference from G to C (i.e. with effects of F and G), while:: git diff master...topic would output just differences in the topic branch (i.e. only A, B, and C). [#thank_yarik]_ Asking for your changes to be merged with the main repo ======================================================= When you are ready to ask for the merge of your code: #. Go to the URL of your forked repo, say ``http://github.com/your-user-name/nipype.git``. #. Click on the 'Pull request' button: .. image:: pull_button.png Enter a message; we suggest you select only ``nipype`` as the recipient. The message will go to the `nipype mailing list`_. Please feel free to add others from the list as you like. Merging from trunk ================== This updates your code from the upstream `nipype github`_ repo. Overview -------- :: # go to your master branch git checkout master # pull changes from github git fetch upstream # merge from upstream git merge upstream/master In detail --------- We suggest that you do this only for your ``master`` branch, and leave your 'feature' branches unmerged, to keep their history as clean as possible. This makes code review easier:: git checkout master Make sure you have done :ref:`linking-to-upstream`. Merge the upstream code into your current development by first pulling the upstream repo to a copy on your local machine:: git fetch upstream then merging into your current branch:: git merge upstream/master Deleting a branch on github_ ============================ :: git checkout master # delete branch locally git branch -D my-unwanted-branch # delete branch on github git push origin :my-unwanted-branch (Note the colon ``:`` before ``test-branch``. See also: http://github.com/guides/remove-a-remote-branch Several people sharing a single repository ========================================== If you want to work on some stuff with other people, where you are all committing into the same repository, or even the same branch, then just share it via github_. First fork nipype into your account, as from :ref:`forking`. Then, go to your forked repository github page, say ``http://github.com/your-user-name/nipype`` Click on the 'Admin' button, and add anyone else to the repo as a collaborator: .. image:: pull_button.png Now all those people can do:: git clone git@githhub.com:your-user-name/nipype.git Remember that links starting with ``git@`` use the ssh protocol and are read-write; links starting with ``git://`` are read-only. Your collaborators can then commit directly into that repo with the usual:: git commit -am 'ENH - much better code' git push origin master # pushes directly into your repo Exploring your repository ========================= To see a graphical representation of the repository branches and commits:: gitk --all To see a linear list of commits for this branch:: git log You can also look at the `network graph visualizer`_ for your github_ repo. .. include:: links.inc .. rubric:: Footnotes .. [#thank_yarik] Thanks to Yarik Halchenko for this explanation. nipype-1.7.0/doc/devel/gitwash/following_latest.rst000066400000000000000000000014761413403311400224450ustar00rootroot00000000000000.. _following-latest: ============================= Following the latest source ============================= These are the instructions if you just want to follow the latest *nipype* source, but you don't need to do any development for now. The steps are: * :ref:`install-git` * get local copy of the git repository from github_ * update local copy from time to time Get the local copy of the code ============================== From the command line:: git clone git://github.com/nipy/nipype.git You now have a copy of the code tree in the new ``nipype`` directory. Updating the code ================= From time to time you may want to pull down the latest code. Do this with:: cd nipype git pull The tree in ``nipype`` will now have the latest changes from the initial repository. .. include:: links.inc nipype-1.7.0/doc/devel/gitwash/forking_button.png000066400000000000000000000314441413403311400220750ustar00rootroot00000000000000PNG  IHDR]Vl8E pHYs   IDATx]|Tv7ݴMN]&O  X(("}`@z!!@$ I6{wHŐ ww|s,JlG#i7p8t"p8Dn ͛p8tp8Dn ͛p8tp8D۪SMYVܺu jN:H;Ц22{wG(7o#77}Z+6/_18+{22+ߔ'Tnn.\\\z)H~cCEҿ++3^KX,`B`UC(s*Ҁ{/22'7#G>F},\>ë˗ sݿ/^v;s+p[cF5nGd2{VVՍÙ3gh"A0'OFvٱ߹nɊd-ڕzzz:mۆ9s=33ž鯬C~0~Ure$!nm,W#q(gǝ݌XlP844\K7n' USֽ__\] \d 222@h[/Aadr]3\ĻTnJo8aӻ%&52_VN߫nV䭄ha9r0`0vލM6 駟.ڙ٦z$])Fg,Zn.P:٠3Uӕ+17sBo aMM0V9EGի3f S`f Bq]_w34&k%dAG=pS*V999;w.Fc$o0}tNANj̈́B!@<E~=ֹG ~Jk6hubC}@#S1@H³^gȌn1j˕+W" AAA5kؽS6> |oZlY`ݖ>,uh ؽę}&|~4Ú`{a{hi_}UΘ(d]ٳÐD74'Xfm Z`zd ޚ0Ԥ/{q٫!Lq)5(J/1+pũK|K+bէƏ>ƺ9Bc>GSޝfDND?](f[<kcc\wviprr\*씝6vTJ%m㿳b%/Q8`[BLjo?@ ew26؆c޻E'6W  _B8'^m6@m_ϚA'|%=s}]j8HGǦ`wgiSbȜy8|b7r$*&t+|Nc;#p헏P:GJ+.2. Ô)S'|RHc_}=Z6P(QKؘUN|N6ؤlEqbs!ES0nl"\CoBvn>lB{o6IBѽs#-F!#ؾ= D*βă84I!V<.#\iZe?`(l 6 >َ; pOoʼnU %/8. D/8˨㻘vn'[UH+w-_|@l1b9&N(rY-47{=Q/W[n4b`vq=b,bpu&=\Ip`g8ً+V!JJ@Gc?ExF(>1JO 2/s@~λvH4rl.ӻ-,Ĩc#U"~T|bBǽzDؗLrK@硫]+d)J ('Y<3nʲJZLeE;)yc3QiXʊYc,^A>,#El %I_ɎƙHB# <$W|,S. FqF{-.-R=F?zGzys> 2ȭ0.峂dSr%b>\f1eiggg和ܸhME(p]_ ĶC`zP__cgA ~U%%Yt5q˞ՁÈ!kl;HdiLט\.8>n0y: aPK%:`#"DJ.])b#P|cl,:hՂNB"C^~.n,"NG.zc2jb7Xƶň % ^.nJ?DKVfʃ,/S)].`:2= ]EXE>H{ Xt`qܹϟ/={~xxx7Do*|]k=&teAN:mj>:EO![N>EqwpJx CqḬlRR4TƋ k ¯Ɍr тTRF0r,zp9pG%4oo~)dߞn ƅ*X攝D4Ta3'XJB%Qj`.KPˋ9i M&[Yp{yq,Y\0*9a=!aON:`AB,l"mӏWr~ţcah|4$ꔠ`Bo3 {e5;Q#m.擏810I"'S33~^fӘah#18mkSp`rGZ(D}woo5+p_ +9[Kc;91czꅤ$pq>|*OJJ\ uX*:e> Q+ƌ8c1>[f(h^ de 76ѯ=.]a6 y_Bzϡg#3.7 1P-FbM&'c3Иz >5<( /^ Kj(ǸO1wٟ|e=?6M~A q`lѩS'!XYJN&&<9soVC_=똧s-? !:bs". ۿϛ/R􅊔ʫQБ=ˋs ONS.VM>‘tװ}ؿe"_^5^оMNָ.W 6C':ep[ȶ)o۠m]h c'q&iI>7`=p!xhVEφ2HGa{T*{ӾߊVC&&iɲv.p)2br-ˈYk:NgϞ &y @L W?}@+8w4¤sщfF 'ڲ3S9HF@O4)PZMh1Q̞0 pQ!E/G ز`sa&^~fXh4|]D7pTZ͛z/^T 53ĉ,"{e[C}#+N!vr_b\ \*w2l%7=gadaR.^H7n\A ޗ$ - Lj,::Sycr67'nAp_A+Y'A>0|1ާWg`ΆH<'x*A_IS"PNkcoǓf!#&Ǿo,ċt(r3i"m7с{ 55 N\D"R"G"_gRdU2VޖSqI=?r2eȶD]Rhew pMyhZ=#hӓ&6-P*/w;R\\=i&UZg&&3|=)L5@mh`+  =R\V;ӍZˡP J|MKKCHHH_+믿^Ph•ijrJ@J>K gOBC.gGZ-LdyzcW q!pKa1h_vPtE^5km+d4FAwJd`iǝg]7,)H/n@1 7Ȝ!w;CFq{Eƫ-ɭpÇZt+IF zm vAFZXԆAThO";l \vHftoTJ6PSwRrƀMvSfΪ%ҒU# ŋ.,VJ~C4T*R\VKe0fRفٯ*,^l2FF?qd__fޖ"*'ov(JH*VZ`E½<^?olz[wG8 7Z18'{322];aVun 1ǡ" +#3~{.ɍxq9.5s_VMv6Gy~ǘPʹȌ);+G>E|S>4G#-8]GN"IN#-8]GN"IN#-8]GN"IN#-8]GN"IN#-8]GN"IN#-8]GN"IN#-8]GN"IN#-8]GN"IN#-8]GN"IN#-km.G#s8yyyչAs8Bj y.G#P'plur|G6nmp,t9@ju&G#Pgn=8G#Ppҭ y@E9@[:o#pKΊ# [69:lʔ)ӫ:fD}$U-\OhZ #r4՛t.Y?p"ͭHbѦ…xe!-/_]oxDE"QxR5|\ing8]ܢ>d:jpv]9kMDMTMtȼQ Rq!1psqxÔeɦK/m804sayF ,YWcu DMkYaIߢ#GwgSp&"V(OaRa_7`?ķ"lQk\Nȸr+~ك[LW^w CQ#OCj$6l(=㥁Q8}KV}6"öduSLꫛj2dFaǎs4*ծŞp݋xUkشgM3k[KӜ[@)xl&!͟#c ZʧN0k"lV CDҁHt`12^$XL6]1wV7 R~ҟtp vGxcʳh+8f6$w'Ӟ*Y̮Pa܊9<=4/}k |MP[D??|e_ϫO˘0qHsK<1TO=$_jF¾q,^ v2b(Zkǖ}r 4n=26`Q`mBG*s[Ȃ͕'-/@Ett /2Dz%D;yxa` ",DYKTU8k$褰!awU1Hv͌ m}1v0 oÑ/w||}%DϨ=4ƀ^a”W81ra4ޱ O›<`p4]һ>-%t'`!'W'|t59*֯ .!H?1~@S$Dg!'0ok8[2m h  \ Nmؓe1rHsIYvPTѾC!,z >b0Z+q`8-4&5IȒ*i5f^GVB>T0K6z%6.sQ/C1bpONᗅ{`0(wʕЭPk:R6FhBd":Ҥ%زy%y\]4eBDGiK$[HUu1#е-#|֟"ԄHw#Ȉˑκ˩(ɇwסA#?S{h'\}(YQY0F 6p}Q#,|4_`IšP7t8 F- םi._i&Mg@LAPiySpխwm$QMՔ|rU{ KHI%-C1\ˆt˰)W2P Md,a#T:Qx 9ߦZ\s'LfA>[5&Mo£W'Q B " q1't"񮐑QKY˷yyUDC;"ڤB O/;Çggd \'!2<Ύe q1pUzbCå'iV#KxM¬ 4)7֞ z[ѭ[s@v)]BKKS*읤VF6fIVbs&4莽!ȴ6sg sx[9l0p#3ѡl̀ JgAR3 F3dMKdYìuXlr_udMȻ!?f|`IF(ĥg"W$ꎩS?YJڶrlsçtܳk&R0,bE<=;$bTv_F2+ԭ(rxȿoo!+a[I[.`EC/3aĠIDATF6`!vdI#:T ZT=$&{+-O^#XA>c 01x(`61ʠ\g/IkM"݊J/.\EK[4i@_ĥ,훉hZ5T=:df?ʒ47n@{mFL-\-yM4X>'6icTr9lthJGN}gr>\u#O/.d':F6$vgp&70eڹ;j?B;W*s=lɇ#m5Х>d aF~0 ޭ >e"4XPG^Cf1f`D=Ef@@61X?{26CmukhDii/( ;H#$2h)aQz#1hfˉ YKO im,?-(aoak&*"HI+q0<ӔR 5$ ! /Hw"\4~1߯u@9LGAeñ+7KE{KȲ!:ՆaP،ɡе\:1oJݣ`ީoȥcu bBGzBWZ $^!g67: c tf q$&_p=IB-4"{8W趒#zlBZ%RhEk5!Gk4(F'4 ͪe}-.PuKV`s@|^+D=.xێ.#6)Ei* i,<ĢFڡ)Vjq%|;-Phk&v!_ޱ㈊?oSc0HT#[ Tcv$zھ-OFd3[ 7PѷڊS.#!Uqe8pƍ4諔F`#I žHػvñq6RMdÉq*җHXgk1d mmT@R)[X vI2!d0˒yÍpv;0 if" YidD!YӬ3▧PgJȓ{%!rP]PN!CV:X[iAxx2BAvͰoWf:4Ul$+Š"RgCoڦ-4p/HgQacfANDf5.],$ &&ӡ&] 9 B 5$ە_xaI6ƢBKm&=?߹OgQ!xE= :Ǹsss^6'_3Y0I!6€aKm|T[0hj:B4R⥧GgB/AT}۰!IiCۋ+\FlpJUw ]qOҤ" t:Y~, F"/ĨehE@ m:HMh{Q}lD17R4ep&mq+C[D/]V q[xN-lH7ɔ *49v0i{= hN=)B !~2rHXXB=4΂gyXFH8 E +EP _-("Cv|ċT͚ةKHS_Xf&Y8vEI 9h dP]z :.'M;D-ċccwM˓ 歡ƉYrm߁_* ++pCtJߍҰ ,Y{U!aeҲ^4{F&G4Y\XciG%hأTV:@kv i׈Z6e7wع2VڤP߉~©rkbhr\Tn]_fc-jxV4hۊ&sVj\?a$N'WRdt;$dLV;[GYJG~4d%ŚG&F֣t]k#X):j_`pgx!$7Uy`X{ARؙ@c=ca&ůN%ȅ:(h6Wь"H[Oؿ E,3N^'7O¼6'[KUDt ?gc JtA`V>rV:~^O_$ڙ9匉gѧoZNuIUti $3KB;fA4.ApT#z5Act9EY* <HSI_4!4Kװh vb-!3iGךp! hEYqڴ\#It[0vcX=p)>)/)m'dۆ?>b]yOM\1phmO}P/+^5h5p4W]paزO֠&6ȅY*\6dg#UZ,)#a=MpǘIHOeߙ²4b+CflZ [z?? = ^]111 V%߅ ɕTwWVWNt^MŐ@oZ+\_"o_=4w1#ڒ0PxEKwDǚѵ]؏Sm3hڈ'RT*ꀓ™ɂ#n䅕8tyS`QZіᓸ9d2`9+L>MΡ LeMKvGv]+hRQφƒUO6fdUC88~+e=9ܝc[ȍKA !6w沸tƂہ ^Bf;W5H^C?K4 (NidQ8)TӬFnYE,tH樗.Ɓ]tp"W]) rA5*`9ɹ^|PZf>NÛ"nNHf:tc DE)Y~f|JMr+sjQU/QxCub|M. [Г޴"2QJ((/DYYEI?vX0&)<|}lVAlK ZJד_ |-f'DbYt`&wV+|UtKĂ'YwZ:H4* J %ʂnTN@aW;%x1paLͿUSg My::WY:Ҕ*_wY(Ț%pYFrZ<b7ɇ~?MehdE{ix*# hSId3}n÷coK[95w?]sN W9ёӎOQ١܂2V2rWᏠk3t@ -݂ɢX;l :+Jݙ\ڛ\s"Ǵ&&}/o/[^@[f%~5[,A\GFm˲feK'\Vd"kTirMn .Mƿ;8su*$TVY䋑poo kJ U-ro~Z p/gRUkiKJf$h.kj)$\]'q0tJ 7NGA^Ąv} &O Lj_DUXʪӒ(c3|WںhkpYIݮ`e )V2d 3YB^5 )qXE%W[{ {ѿDWҲ4} zTYx|# UC5[:TI/_wrȭpM~XL[,E1w<$!!sJM p8@8G#P>U2V~}<#pA[Ó8@u#-FpA[Ó8@u#Iq8rrIGd-N$IENDB`nipype-1.7.0/doc/devel/gitwash/forking_hell.rst000066400000000000000000000021451413403311400215260ustar00rootroot00000000000000.. _forking: ========================================== Making your own copy (fork) of nipype ========================================== You need to do this only once. The instructions here are very similar to the instructions at http://help.github.com/forking/ |emdash| please see that page for more detail. We're repeating some of it here just to give the specifics for the nipype_ project, and to suggest some default names. Set up and configure a github_ account ====================================== If you don't have a github_ account, go to the github_ page, and make one. You then need to configure your account to allow write access |emdash| see the ``Generating SSH keys`` help on `github help`_. Create your own forked copy of nipype_ =========================================== #. Log into your github_ account. #. Go to the nipype_ github home at `nipype github`_. #. Click on the *fork* button: .. image:: forking_button.png Now, after a short pause and some 'Hardcore forking action', you should find yourself at the home page for your own forked copy of nipype_. .. include:: links.inc nipype-1.7.0/doc/devel/gitwash/git_development.rst000066400000000000000000000003121413403311400222420ustar00rootroot00000000000000.. _git-development: ===================== Git for development ===================== Contents: .. toctree:: :maxdepth: 2 forking_hell set_up_fork configure_git development_workflow nipype-1.7.0/doc/devel/gitwash/git_install.rst000066400000000000000000000011111413403311400213640ustar00rootroot00000000000000.. _install-git: ============= Install git ============= Overview ======== ================ ============= Debian / Ubuntu ``sudo apt-get install git-core`` Fedora ``sudo yum install git-core`` Windows Download and install msysGit_ OS X Use the git-osx-installer_ ================ ============= In detail ========= See the git_ page for the most recent information. Have a look at the github_ install help pages available from `github help`_ There are good instructions here: http://book.git-scm.com/2_installing_git.html .. include:: links.inc nipype-1.7.0/doc/devel/gitwash/git_intro.rst000066400000000000000000000010361413403311400210570ustar00rootroot00000000000000============== Introduction ============== These pages describe a git_ and github_ workflow for the nipype_ project. There are several different workflows here, for different ways of working with *nipype*. This is not a comprehensive git_ reference, it's just a workflow for our own project. It's tailored to the github_ hosting service. You may well find better or quicker ways of getting stuff done with git_, but these should get you started. For general resources for learning git_ see :ref:`git-resources`. .. include:: links.inc nipype-1.7.0/doc/devel/gitwash/git_links.inc000066400000000000000000000060551413403311400210130ustar00rootroot00000000000000.. This (-*- rst -*-) format file contains commonly used link targets and name substitutions. It may be included in many files, therefore it should only contain link targets and name substitutions. Try grepping for "^\.\. _" to find plausible candidates for this list. .. NOTE: reST targets are __not_case_sensitive__, so only one target definition is needed for nipy, NIPY, Nipy, etc... .. git stuff .. _git: http://git-scm.com/ .. _github: http://github.com .. _github help: http://help.github.com .. _msysgit: http://code.google.com/p/msysgit/downloads/list .. _git-osx-installer: http://code.google.com/p/git-osx-installer/downloads/list .. _subversion: http://subversion.tigris.org/ .. _git cheat sheet: http://github.com/guides/git-cheat-sheet .. _pro git book: http://progit.org/ .. _git svn crash course: http://git-scm.com/course/svn.html .. _learn.github: http://learn.github.com/ .. _network graph visualizer: http://github.com/blog/39-say-hello-to-the-network-graph-visualizer .. _git user manual: http://www.kernel.org/pub/software/scm/git/docs/user-manual.html .. _git tutorial: http://www.kernel.org/pub/software/scm/git/docs/gittutorial.html .. _git community book: http://book.git-scm.com/ .. _git ready: http://www.gitready.com/ .. _git casts: http://www.gitcasts.com/ .. _Fernando's git page: http://www.fperez.org/py4science/git.html .. _git magic: http://www-cs-students.stanford.edu/~blynn/gitmagic/index.html .. _git concepts: http://www.eecs.harvard.edu/~cduan/technical/git/ .. _git clone: http://www.kernel.org/pub/software/scm/git/docs/git-clone.html .. _git checkout: http://www.kernel.org/pub/software/scm/git/docs/git-checkout.html .. _git commit: http://www.kernel.org/pub/software/scm/git/docs/git-commit.html .. _git push: http://www.kernel.org/pub/software/scm/git/docs/git-push.html .. _git pull: http://www.kernel.org/pub/software/scm/git/docs/git-pull.html .. _git add: http://www.kernel.org/pub/software/scm/git/docs/git-add.html .. _git status: http://www.kernel.org/pub/software/scm/git/docs/git-status.html .. _git diff: http://www.kernel.org/pub/software/scm/git/docs/git-diff.html .. _git log: http://www.kernel.org/pub/software/scm/git/docs/git-log.html .. _git branch: http://www.kernel.org/pub/software/scm/git/docs/git-branch.html .. _git remote: http://www.kernel.org/pub/software/scm/git/docs/git-remote.html .. _git config: http://www.kernel.org/pub/software/scm/git/docs/git-config.html .. _why the -a flag?: http://www.gitready.com/beginner/2009/01/18/the-staging-area.html .. _git staging area: http://www.gitready.com/beginner/2009/01/18/the-staging-area.html .. _tangled working copy problem: http://tomayko.com/writings/the-thing-about-git .. _git management: http://kerneltrap.org/Linux/Git_Management .. _linux git workflow: http://www.mail-archive.com/dri-devel@lists.sourceforge.net/msg39091.html .. _git parable: http://tom.preston-werner.com/2009/05/19/the-git-parable.html .. _git foundation: http://matthew-brett.github.com/pydagogue/foundation.html .. other stuff .. _python: http://www.python.org .. |emdash| unicode:: U+02014 nipype-1.7.0/doc/devel/gitwash/git_resources.rst000066400000000000000000000034421413403311400217410ustar00rootroot00000000000000.. _git-resources: ================ git_ resources ================ Tutorials and summaries ======================= * `github help`_ has an excellent series of how-to guides. * `learn.github`_ has an excellent series of tutorials * The `pro git book`_ is a good in-depth book on git. * A `git cheat sheet`_ is a page giving summaries of common commands. * The `git user manual`_ * The `git tutorial`_ * The `git community book`_ * `git ready`_ |emdash| a nice series of tutorials * `git casts`_ |emdash| video snippets giving git how-tos. * `git magic`_ |emdash| extended introduction with intermediate detail * The `git parable`_ is an easy read explaining the concepts behind git. * Our own `git foundation`_ expands on the `git parable`_. * Fernando Perez' git page |emdash| `Fernando's git page`_ |emdash| many links and tips * A good but technical page on `git concepts`_ * `git svn crash course`_: git_ for those of us used to subversion_ Advanced git workflow ===================== There are many ways of working with git_; here are some posts on the rules of thumb that other projects have come up with: * Linus Torvalds on `git management`_ * Linus Torvalds on `linux git workflow`_ . Summary; use the git tools to make the history of your edits as clean as possible; merge from upstream edits as little as possible in branches where you are doing active development. Manual pages online =================== You can get these on your own machine with (e.g) ``git help push`` or (same thing) ``git push --help``, but, for convenience, here are the online manual pages for some common commands: * `git add`_ * `git branch`_ * `git checkout`_ * `git clone`_ * `git commit`_ * `git config`_ * `git diff`_ * `git log`_ * `git pull`_ * `git push`_ * `git remote`_ * `git status`_ .. include:: links.inc nipype-1.7.0/doc/devel/gitwash/index.rst000066400000000000000000000003451413403311400201720ustar00rootroot00000000000000.. _using-git: Working with *nipype* source code ====================================== Contents: .. toctree:: :maxdepth: 2 git_intro git_install following_latest patching git_development git_resources nipype-1.7.0/doc/devel/gitwash/known_projects.inc000066400000000000000000000027021413403311400220700ustar00rootroot00000000000000.. Known projects .. PROJECTNAME placeholders .. _PROJECTNAME: http://neuroimaging.scipy.org .. _`PROJECTNAME github`: http://github.com/nipy .. _`PROJECTNAME mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging .. numpy .. _numpy: http://numpy.scipy.org .. _`numpy github`: http://github.com/numpy/numpy .. _`numpy mailing list`: http://mail.scipy.org/mailman/listinfo/numpy-discussion .. scipy .. _scipy: http://www.scipy.org .. _`scipy github`: http://github.com/scipy/scipy .. _`scipy mailing list`: http://mail.scipy.org/mailman/listinfo/scipy-dev .. nipy .. _nipy: http://nipy.org/nipy .. _`nipy github`: http://github.com/nipy/nipy .. _`nipy mailing list`: http://mail.scipy.org/mailman/listinfo/nipy-devel .. ipython .. _ipython: http://ipython.scipy.org .. _`ipython github`: http://github.com/ipython/ipython .. _`ipython mailing list`: http://mail.scipy.org/mailman/listinfo/IPython-dev .. dipy .. _dipy: http://nipy.org/dipy .. _`dipy github`: http://github.com/Garyfallidis/dipy .. _`dipy mailing list`: http://mail.scipy.org/mailman/listinfo/nipy-devel .. nibabel .. _nibabel: http://nipy.org/nibabel .. _`nibabel github`: http://github.com/nipy/nibabel .. _`nibabel mailing list`: http://mail.scipy.org/mailman/listinfo/nipy-devel .. marsbar .. _marsbar: http://marsbar.sourceforge.net .. _`marsbar github`: http://github.com/matthew-brett/marsbar .. _`MarsBaR mailing list`: https://lists.sourceforge.net/lists/listinfo/marsbar-users nipype-1.7.0/doc/devel/gitwash/links.inc000066400000000000000000000001611413403311400201400ustar00rootroot00000000000000.. compiling links file .. include:: known_projects.inc .. include:: this_project.inc .. include:: git_links.inc nipype-1.7.0/doc/devel/gitwash/patching.rst000066400000000000000000000076651413403311400206740ustar00rootroot00000000000000================ Making a patch ================ You've discovered a bug or something else you want to change in nipype_ .. |emdash| excellent! You've worked out a way to fix it |emdash| even better! You want to tell us about it |emdash| best of all! The easiest way is to make a *patch* or set of patches. Here we explain how. Making a patch is the simplest and quickest, but if you're going to be doing anything more than simple quick things, please consider following the :ref:`git-development` model instead. .. _making-patches: Making patches ============== Overview -------- :: # tell git who you are git config --global user.email you@yourdomain.example.com git config --global user.name "Your Name Comes Here" # get the repository if you don't have it git clone git://github.com/nipy/nipype.git # make a branch for your patching cd nipype git branch the-fix-im-thinking-of git checkout the-fix-im-thinking-of # hack, hack, hack # Tell git about any new files you've made git add somewhere/tests/test_my_bug.py # commit work in progress as you go git commit -am 'BF - added tests for Funny bug' # hack hack, hack git commit -am 'BF - added fix for Funny bug' # make the patch files git format-patch -M -C master Then, send the generated patch files to the `nipype mailing list`_ |emdash| where we will thank you warmly. In detail --------- #. Tell git_ who you are so it can label the commits you've made:: git config --global user.email you@yourdomain.example.com git config --global user.name "Your Name Comes Here" #. If you don't already have one, clone a copy of the nipype_ repository:: git clone git://github.com/nipy/nipype.git cd nipype #. Make a 'feature branch'. This will be where you work on your bug fix. It's nice and safe and leaves you with access to an unmodified copy of the code in the main branch:: git branch the-fix-im-thinking-of git checkout the-fix-im-thinking-of #. Do some edits, and commit them as you go:: # hack, hack, hack # Tell git about any new files you've made git add somewhere/tests/test_my_bug.py # commit work in progress as you go git commit -am 'BF - added tests for Funny bug' # hack hack, hack git commit -am 'BF - added fix for Funny bug' Note the ``-am`` options to ``commit``. The ``m`` flag just signals that you're going to type a message on the command line. The ``a`` flag |emdash| you can just take on faith |emdash| or see `why the -a flag?`_. #. When you have finished, check you have committed all your changes:: git status #. Finally, make your commits into patches. You want all the commits since you branched from the ``master`` branch:: git format-patch -M -C master You will now have several files named for the commits:: 0001-BF-added-tests-for-Funny-bug.patch 0002-BF-added-fix-for-Funny-bug.patch Send these files to the `nipype mailing list`_. When you are done, to switch back to the main copy of the code, just return to the ``master`` branch:: git checkout master Moving from patching to development =================================== If you find you have done some patches, and you have one or more feature branches, you will probably want to switch to development mode. You can do this with the repository you have. Fork the nipype_ repository on github_ |emdash| :ref:`forking`. Then:: # checkout and refresh master branch from main repo git checkout master git pull origin master # rename pointer to main repository to 'upstream' git remote rename origin upstream # point your repo to default read / write to your fork on github git remote add origin git@github.com:your-user-name/nipype.git # push up any branches you've made and want to keep git push origin the-fix-im-thinking-of Then you can, if you want, follow the :ref:`development-workflow`. .. include:: links.inc nipype-1.7.0/doc/devel/gitwash/pull_button.png000066400000000000000000000311351413403311400214070ustar00rootroot00000000000000PNG  IHDR~\iu pHYs   IDATx]|ToߔMHH PKT,"S, O}%"]zHR@dw޽fIBB$a&{3s̙3s72p8 G# ?G#p!}p]G#?G#p!}p]G#?G#p!HKKCVVVuNPF C./y>xbpˏUVe͛7׭=Obyyy()6c\,4lYѬ)4,= {$H& 0,n8^CtqѸ˱gEsZzl6~?vu;Oͽyv+gkx G#}كSԩSª8xy$;w}Z*.6.O[B$ɴ r)ɝĻB? Z}|?ìqu'.=IXHG.A0[rgcǎN,YDx_>kƮG όSN3^ǶQX+PmY+ˁ\K_,?~\#GBPN$,"E؟ A~ǧGLVs',o<5#3( fa0`9UI l>`ŒOлa彣p0e3ggx~Zb>1cƠiӦE*(O_˝)ޝ:8G| < 'g>!A>; oxMۍ9gH0taق.IFp̟3W6bά|`xhs &އĶ?EK+X4|ad#,0+VCSȦٳоq?$` 0ot$WRsp-.vx3k;~ŒO >`4|H>Og,O5E 4}LZ'Š&^xz` hXl,CbM8ִvXI:?? $̯?c{+]DJ___L4 ׿h5 &O  ^d g|tU"?5yɟ0pwX&<[?GWirToV|){<3mLAmxs 6'@9l>.#0}H4tŅx-w}__n Ơu~1b{֮҇#`4|(6y{fm^g][l?2wagsxqt'`oqѪ!!CMcZxL8ϦmE)*ue/fJ{ ž{LhϞ=WZˀ8y36 ‘3ݟL?w $ZMsѬ^ tɉ)"NKto+Щw+N7oG@2_ >tł@Y``0<\"aJ)Ϭ &57"&I! Rs=oֱ8) V8ޑxkme: #Hxӧp8ƿRh,@ #膣kw )@Ȥ r-e0gG|)o$b}"1bPddpyC#W.?E'\lǂ\Zز\m4rognYr2y^ /&욟*SY|!ڜi&rE|v9#' eH\|=0i;@F%!_Am62bK ? ( #`H.Ң #ӑݐ`6ֿfO< ͨOzde/mȄ<@soΛXyɒKcs30oOIZM}5 R-AnJiz$HlY3"1j^8A: GZE_ErrV $1W,PߐT?M!1v\w'_w :\db5nMQ^&SZ ,h׮Knq7mP?څ3PBK2}G6IjwMF:,`*Lъ<-r$PjZaY4l/,y&q-[$e]u߾}'ABWՂgyL&%1/H}0 3wSJ;Fc~1{NĿ?\ꂠ8\"`}H`5%zf40>-|ڊyJw-<˖-1}tt ؿ?q7ni#)}ӡd:җ؁hn)U8D_Nҗ"z@vM$Y/ kg߲CC0[mğev= Lg!Kĩ֮#_<*JzPp. ˌA"ө)(5K@0Mm?_@~}'v U8k ~Z(tn@Pu/71fM |_7P>1/fGn, K/)_QWKcAtهK嗰EVG,]0>\!dD6]4}.>_F MX5GfNY {;Ga1 Z*,Rf-Nslff*e0gJ1)#36fKGzܰ W&}9cpNA-[XP&ɹ[u.lNw}M=]gT̚ghSOgϞ`&eؤ) |4xnwH{ n>X'5!ȋQ(0Mз"d]H001\ka/<[`0->BkG8.ТVUo[1㥓l&IUF˾B["^.W".=REIiWiMb6WW/.MDvT~kg ;DY(4(sy#jᓟephn×/?Wx,j[ П|\Ld1q@)2j8ˊ}'`㝑|v ( j:/}Za;,Tw(pDQBU2Z}d.s^_|~j?^4IaA'¯vW>TaS I) ֖4$^+0ܹ^=NѺs ""]tg:ea3YLN˳g/tZ9llv4AAslnf6L4Ծbf"^}ϱS Y9P{“].ã䞠{'śrx)帬V _cO%U8c\LdҗVHe =S9SnLH'?lm|,:Z( cq8@,o]UW8UKi<+{? f˘0`X.pnPⱹWS8nL8ftv\+­=bcNIU:%qK^L<+B-p8!PiX\)p8U,8G|p_>x)G#PeʲpʇWÍp8U,8G|p_>x)G#PeʲpʇWÍp8U,8G|p_>x)G#PeʲpʇWÍp8U,8G|p_>x)G#PeʲpʇWÍp8U,8G|p_>x)G#PeʲpʇWÍp8U,8G|p_>x)G#Peʲpʇ+Kq8@D@YF*I8'#pʇw7^#TY p8#-cKp8*WU}xG#PvGJ#-*>NP{@)#mq57 B"hC<jE@O>ǧ"ӪC:9ш /1g.%O_V0$t3D& S݆WT`3kA-sprNdlg.Ν8x"g/!Eo.0nUՆ+ZjUW*@\=SXXe(^l0T:2hVquSM(lx:}0^su*i 7 ,I&g-51xk/cx2S&2[qyZHAͩ#ɑz[Pw ~!} _lH2pm0q;W۶q6_1\=D"|+`-'B-09Tyy0 "u Aj>{`ƄpmȭWw1XXZM90t$Q6h@<FsxE(٤mҮ/}|%fg7е7IAvAPz^TTKu8L: ځ,{+NZ*(Vw_Bn+oAxbPSĬɊ-ʋB_zeʠhƕ qB뗐Њ$㲞CzegڪZO$<g_b̛F>-rOG`V'%|<|࡮x y-U rYѦfdLYǾy8dAZ>\ IfEnA$_Nͳ/[liQyNxvyC;S[>SXm1dqd4v:mS`- ܈6tAx5nCfy9A7Nnʭ kja&5Rnhۼ6^6䦜ǁh3|zc՘W)0;BHwb9bj4 cuɰhjt/nF蓮!mHPHs`"nfVbl8l#ߠ@ {/|3cGw|g,h42G`Ӯ|l7aCwa\]T߉ 7"9=$FNm:6ݒ!c)v5.gBgNϧV㔃+Pw yF+D_ҝ EzqLhIf/@1[Ix}!⃺uB?m"A VD{lŔycۡtxb]?ca6JZIbvby-z==\dryW)ih__:(\JNESOgL"VtjڹkuImvHtUBe_h^&*oM6hGN>g WCaBU|A _ I+,N,ɉ~ʕF)2\ju~]8kGP^AwԈ9ʇum %O6B\ti-%IT2ꃊr*Cm& o^ f2f쒘pF6r%߾tzfdQ OQ2 hZKwV&v@taa iZV~ jOadtNC},ŏ4<RdL'Sԥ9s>ǡa,Jm{|˨CLN]3>^| vM.\5|xU wnsI,!Τ-gI<4 nۣ 1f4GOJ~F\VD؜A<)IaR$1tX6{y _?x$ ;6\KC=;!keؖpMhBT1X9]l A&-q4 ž|Yj:/; PÓ͂bdٱILMQjb =)7M7@ >G'5j4abwTm |9٠9!mʻ@ w+ۀVJh-7iĽ>;$ \V&·/>:=C+:joxFjobpH;;x'#%nȋJ$9nYioP.eӠ1Ypr>̦ oH ^Bs@jr)l+kl7QWSg]-u9[ӈՑIU.d+)/'Ъ jFw$@20HV~$˛')^ЫI-l}V z'&h`űbi.2b J+W"lR*d f$He bɔ)D{2E= u yS/,oe@t6Gځ+Fk 4UZ됰{-"ikKpq ώ u9yG; bYR~S&@tmļԀMBHygCju/.Ykس4c|:(>*6mBjڐ&lM4X.mv6\[lS)o"jegGʸ4@|d:tjH.*ъ x=ŷj 4$e%z FN8џEgD׎K( _kҥȩF~JeY.i7!%5O߫cm'lŶN6ܔl$ɊfD9hP3;okJ-RY~uʒ{?$Zܶ;۱yՏ$:ڣK;r;zCbwād >vvQ 6at܆BDȗ*XMTcKVGӪ. >,o;~g&M~)?lFRv_;r1gbK5$_5::EuZsZG)&rЊMn.@gT"T:5,JVC`R]Ehi~ 2߶hk'5!h߆k[&xB4v>EMWBb<iR+UŖ)$K'Ϯ3pSb7\8X0: <;+K#;Yv#^;7҆ZtBZш՘Xj,8(G0S[ gbI T&(dt Vn O t…NAF*Qϓtj#+)e& LdȾSx 2K.Ҳ ]?tj rrjCm#-ϡ{.M9d,de8_7,mf$i3O(lI IɆл 5$PER Cg1~wf )kVzփϥ7csb7?ƳSn>yH̫TShj$R:;o .<Ѫfڌ;^EapiWSGJ6#>|nV7ՄԠpǟxL{.鴡)kK~b$|& r's;% '* ;mAӬ?g~ݒ<|}=p)LW|>}LIRT/M}twI4mP{Ċuo" vZ ,7:t`J!6%) E ge4)ZF,{ MX%Ft8W23- ^ y/pQP2ZSyfRC1.6ꈼ>^l$ཹo9"l$Crr#6R $36iXiEG ╭_cYd;̚=3!k-Nr䲘dF #UəPFPBxKI+\;&{&{eTR%}@Y3rr)|wE1`h׬&RF\C1T~4j%3&lgz dQ4#-ߝYzr/! zjI^p8@+ hWp8:youSp8eA[eAp8nW&.p8 -r8jWՀ G,pWOYy9@5@[Հ G,?V#9}nIENDB`nipype-1.7.0/doc/devel/gitwash/set_up_fork.rst000066400000000000000000000036771413403311400214160ustar00rootroot00000000000000.. _set-up-fork: ================== Set up your fork ================== First you follow the instructions for :ref:`forking`. Overview ======== :: git clone git@github.com:your-user-name/nipype.git cd nipype git remote add upstream git://github.com/nipy/nipype.git In detail ========= Clone your fork --------------- #. Clone your fork to the local computer with ``git clone git@github.com:your-user-name/nipype.git`` #. Investigate. Change directory to your new repo: ``cd nipype``. Then ``git branch -a`` to show you all branches. You'll get something like:: * master remotes/origin/master This tells you that you are currently on the ``master`` branch, and that you also have a ``remote`` connection to ``origin/master``. What remote repository is ``remote/origin``? Try ``git remote -v`` to see the URLs for the remote. They will point to your github_ fork. Now you want to connect to the upstream `nipype github`_ repository, so you can merge in changes from trunk. .. _linking-to-upstream: Linking your repository to the upstream repo -------------------------------------------- :: cd nipype git remote add upstream git://github.com/nipy/nipype.git ``upstream`` here is just the arbitrary name we're using to refer to the main nipype_ repository at `nipype github`_. Note that we've used ``git://`` for the URL rather than ``git@``. The ``git://`` URL is read only. This means we that we can't accidentally (or deliberately) write to the upstream repo, and we are only going to use it to merge into our own code. Just for your own satisfaction, show yourself that you now have a new 'remote', with ``git remote -v show``, giving you something like:: upstream git://github.com/nipy/nipype.git (fetch) upstream git://github.com/nipy/nipype.git (push) origin git@github.com:your-user-name/nipype.git (fetch) origin git@github.com:your-user-name/nipype.git (push) .. include:: links.inc nipype-1.7.0/doc/devel/gitwash/this_project.inc000066400000000000000000000002561413403311400215220ustar00rootroot00000000000000.. nipype .. _nipype: http://nipy.org/nipype .. _`nipype github`: http://github.com/nipy/nipype .. _`nipype mailing list`: http://mail.scipy.org/mailman/listinfo/nipy-devel nipype-1.7.0/doc/devel/index.rst000066400000000000000000000010451413403311400165220ustar00rootroot00000000000000.. _developers-guide-index: ================= Developer Guide ================= :Release: |version| :Date: |today| Since nipype is part of the NIPY_ project, we follow the same conventions documented in the `NIPY Developers Guide `_. For bleeding-edge version help see `Nightly documentation `_ .. toctree:: :maxdepth: 2 writing_custom_interfaces gitwash/index architecture provenance software_using_nipype testing_nipype .. include:: ../links_names.txt nipype-1.7.0/doc/devel/interface_specs.rst000066400000000000000000000606321413403311400205570ustar00rootroot00000000000000.. _interface_specs: ======================== Interface Specifications ======================== Before you start ---------------- Nipype is maintained by an enthusiastic group of developers, and we're excited to have you join us! In case of trouble, we encourage you to post on `NeuroStars `_ with the `nipype` tag. NeuroStars.org is a platform similar to StackOverflow but dedicated to neuroinformatics. You can also post on the nipype developers mailing list: http://mail.python.org/mailman/listinfo/neuroimaging. As we are sharing a mailing list with the nipy community, please add ``[nipype]`` to the message title. Alternatively, you're welcome to chat with us in the Nipype `Gitter `_ channel or in the BrainHack `Slack `_ channel. (Click `here `_ to join the Slack workspace.) Overview -------- We're using the `Traits `_ (formerly known as Enthought Traits) package for all of our inputs and outputs. Traits allows us to validate user inputs and provides a mechanism to handle all the *special cases* in a simple and concise way though metadata. With the metadata, each input/output can have an optional set of metadata attributes (described in more detail below). The machinery for handling the metadata is located in the base classes, so all subclasses use the same code to handle these cases. This is in contrast to our previous code where every class defined it's own _parse_inputs, run and aggregate_outputs methods to handle these cases. Which of course leads to a dozen different ways to solve the same problem. Traits is a big package with a lot to learn in order to take full advantage of. But don't be intimidated! To write a Nipype Trait Specification, you only need to learn a few of the basics of Traits. Here are a few starting points in the documentation: * What are Traits? The `Introduction in the User Manual `_ gives a brief description of the functionality traits provides. * Traits and metadata. The `second section of the User Manual `_ gives more details on traits and how to use them. Plus there a section describing metadata, including the metadata all traits have. * If your interested in more of a *big picture* overview, `Gael wrote a good tutorial `_ that shows how to write a scientific application using traits for the benefit of the generated UI components. (For now, Nipype is not taking advantage of the generated UI feature of traits.) Traits version ^^^^^^^^^^^^^^ We're using Traits version 4.x which can be installed from `pypi `_ More documentation ^^^^^^^^^^^^^^^^^^ Not everything is documented in the User Manual, in those cases the the `API docs `_ is your next place to look. Nipype Interface Specifications ------------------------------- Each interface class defines two specifications: 1) an InputSpec and 2) an OutputSpec. Each of these are prefixed with the class name of the interfaces. For example, Bet has these specs: - BETInputSpec - BETOutputSpec Each of these Specs are classes, derived from a base TraitedSpec class (more on these below). The InputSpec consists of attributes which correspond to different parameters for the tool they wrap/interface. In the case of a command-line tool like Bet, the InputSpec attributes correspond to the different command-line parameters that can be passed to Bet. When an interfaces class is instantiated, the InputSpec is bound to the ``inputs`` attribute of that object. Below is an example of how the ``inputs`` appear to a user for Bet:: >>> from nipype.interfaces import fsl >>> bet = fsl.BET() >>> type(bet.inputs) >>> bet.inputs. bet.inputs.__class__ bet.inputs.center bet.inputs.__delattr__ bet.inputs.environ bet.inputs.__doc__ bet.inputs.frac bet.inputs.__getattribute__ bet.inputs.functional bet.inputs.__hash__ bet.inputs.hashval bet.inputs.__init__ bet.inputs.infile bet.inputs.__new__ bet.inputs.items bet.inputs.__reduce__ bet.inputs.mask bet.inputs.__reduce_ex__ bet.inputs.mesh bet.inputs.__repr__ bet.inputs.nooutput bet.inputs.__setattr__ bet.inputs.outfile bet.inputs.__str__ bet.inputs.outline bet.inputs._generate_handlers bet.inputs.outputtype bet.inputs._get_hashval bet.inputs.radius bet.inputs._hash_infile bet.inputs.reduce_bias bet.inputs._xor_inputs bet.inputs.skull bet.inputs._xor_warn bet.inputs.threshold bet.inputs.args bet.inputs.vertical_gradient Each Spec inherits from a parent Spec. The parent Specs provide attribute(s) that are common to all child classes. For example, FSL InputSpecs inherit from interfaces.fsl.base.FSLTraitedSpec. FSLTraitedSpec defines an ``outputtype`` attribute, which stores the file type (NIFTI, NIFTI_PAIR, etc...) for all generated output files. InputSpec class hierarchy ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Below is the current class hierarchy for InputSpec classes (from base class down to subclasses).: ``TraitedSpec``: Nipype's primary base class for all Specs. Provides initialization, some nipype-specific methods and any trait handlers we define. Inherits from traits.HasTraits. ``BaseInterfaceInputSpec``: Defines inputs common to all Interfaces (``ignore_exception``). If in doubt inherit from this. ``CommandLineInputSpec``: Defines inputs common to all command-line classes (``args`` and ``environ``) ``FSLTraitedSpec``: Defines inputs common to all FSL classes (``outputtype``) ``SPMCommandInputSpec``: Defines inputs common to all SPM classes (``matlab_cmd``, ``path``, and ``mfile``) ``FSTraitedSpec``: Defines inputs common to all FreeSurfer classes (``sbjects_dir``) ``MatlabInputSpec``: Defines inputs common to all Matlab classes (``script``, ``nodesktop``, ``nosplash``, ``logfile``, ``single_comp_thread``, ``mfile``, ``script_file``, and ``paths``) ``SlicerCommandLineInputSpec``: Defines inputs common to all Slicer classes (``module``) Most developers will only need to code at the the interface-level (i.e. implementing custom class inheriting from one of the above classes). Output Specs ^^^^^^^^^^^^ The OutputSpec defines the outputs that are generated, or possibly generated depending on inputs, by the tool. OutputSpecs inherit from ``interfaces.base.TraitedSpec`` directly. Controlling outputs to terminal ------------------------------- It is very likely that the software wrapped within the interface writes to the standard output or the standard error of the terminal. Interfaces provide a means to access and retrieve these outputs, by using the ``terminal_output`` attribute: :: import nipype.interfaces.fsl as fsl mybet = fsl.BET(from_file='bet-settings.json') mybet.terminal_output = 'file_split' In the example, the ``terminal_output = 'file_split'`` will redirect the standard output and the standard error to split files (called ``stdout.nipype`` and ``stderr.nipype`` respectively). The possible values for ``terminal_output`` are: *file* Redirects both standard output and standard error to the same file called ``output.nipype``. Messages from both streams will be overlapped as they arrive to the file. *file_split* Redirects the output streams separately, to ``stdout.nipype`` and ``stderr.nipype`` respectively, as described in the example. *file_stdout* Only the standard output will be redirected to ``stdout.nipype`` and the standard error will be discarded. *file_stderr* Only the standard error will be redirected to ``stderr.nipype`` and the standard output will be discarded. *stream* Both output streams are redirected to the current logger printing their messages interleaved and immediately to the terminal. *allatonce* Both output streams will be forwarded to a buffer and stored separately in the `runtime` object that the `run()` method returns. No files are written nor streams printed out to terminal. *none* Both outputs are discarded In all cases, except for the ``'none'`` setting of ``terminal_output``, the ``run()`` method will return a "runtime" object that will contain the streams in the corresponding properties (``runtime.stdout`` for the standard output, ``runtime.stderr`` for the standard error, and ``runtime.merged`` for both when streams are mixed, eg. when using the *file* option). :: import nipype.interfaces.fsl as fsl mybet = fsl.BET(from_file='bet-settings.json') mybet.terminal_output = 'file_split' ... result = mybet.run() result.runtime.stdout ' ... captured standard output ...' Traited Attributes ------------------ Each specification attribute is an instance of a Trait class. These classes encapsulate many standard Python types like Float and Int, but with additional behavior like type checking. (*See the documentation on traits for more information on these trait types.*) To handle unique behaviors of our attributes we us traits metadata. These are keyword arguments supplied in the initialization of the attributes. The base classes ``BaseInterface`` and ``CommandLine`` (defined in ``nipype.interfaces.base``) check for the existence/or value of these metadata and handle the inputs/outputs accordingly. For example, all mandatory parameters will have the ``mandatory = True`` metadata:: class BetInputSpec(FSLTraitedSpec): infile = File(exists=True, desc = 'input file to skull strip', argstr='%s', position=0, mandatory=True) Common ^^^^^^ ``exists`` For files, use ``nipype.interfaces.base.File`` as the trait type. If the file must exist for the tool to execute, specify ``exists = True`` in the initialization of File (as shown in BetInputSpec above). This will trigger the underlying traits code to confirm the file assigned to that *input* actually exists. If it does not exist, the user will be presented with an error message:: >>> bet.inputs.infile = 'does_not_exist.nii' ------------------------------------------------------------ Traceback (most recent call last): File "", line 1, in File "/Users/cburns/local/lib/python2.5/site-packages/nipype/interfaces/base.py", line 76, in validate self.error( object, name, value ) File "/Users/cburns/local/lib/python2.5/site-packages/enthought/traits/trait_handlers.py", line 175, in error value ) TraitError: The 'infile' trait of a BetInputSpec instance must be a file name, but a value of 'does_not_exist.nii' was specified. ``hash_files`` To be used with inputs that are defining output filenames. When this flag is set to false any Nipype will not try to hash any files described by this input. This is useful to avoid rerunning when the specified output file already exists and has changed. ``desc`` All trait objects have a set of default metadata attributes. ``desc`` is one of those and is used as a simple, one-line docstring. The ``desc`` is printed when users use the ``help()`` methods. **Required:** This metadata is required by all nipype interface classes. ``usedefault`` Set this metadata to True when the *default value* for the trait type of this attribute is an acceptable value. All trait objects have a default value, ``traits.Int`` has a default of ``0``, ``traits.Float`` has a default of ``0.0``, etc... You can also define a default value when you define the class. For example, in the code below all objects of ``Foo`` will have a default value of 12 for ``x``:: >>> import enthought.traits.api as traits >>> class Foo(traits.HasTraits): ... x = traits.Int(12) ... y = traits.Int ... >>> foo = Foo() >>> foo.x 12 >>> foo.y 0 Nipype only passes ``inputs`` on to the underlying package if they have been defined (more on this later). So if you specify ``usedefault = True``, you are telling the parser to pass the default value on to the underlying package. Let's look at the InputSpec for SPM Realign:: class RealignInputSpec(BaseInterfaceInputSpec): jobtype = traits.Enum('estwrite', 'estimate', 'write', desc='one of: estimate, write, estwrite', usedefault=True) Here we've defined ``jobtype`` to be an enumerated trait type, ``Enum``, which can be set to one of the following: ``estwrite``, ``estimate``, or ``write``. In a container, the default is always the first element. So in this case, the default will be ``estwrite``:: >>> from nipype.interfaces import spm >>> rlgn = spm.Realign() >>> rlgn.inputs.infile >>> rlgn.inputs.jobtype 'estwrite' ``xor`` and ``requires`` Both of these accept a list of trait names. The ``xor`` metadata reflects mutually exclusive traits, while the requires metadata reflects traits that have to be set together. When a xor-ed trait is set, all other traits belonging to the list are set to Undefined. The function check_mandatory_inputs ensures that all requirements (both mandatory and via the requires metadata are satisfied). These are also reflected in the help function. ``copyfile`` This is metadata for a File or Directory trait that is relevant only in the context of wrapping an interface in a `Node` and `MapNode`. `copyfile` can be set to either `True` or `False`. `False` indicates that contents should be symlinked, while `True` indicates that the contents should be copied over. ``min_ver`` and ``max_ver`` These metadata determine if a particular trait will be available when a given version of the underlying interface runs. Note that this check is performed at runtime.:: class RealignInputSpec(BaseInterfaceInputSpec): jobtype = traits.Enum('estwrite', 'estimate', 'write', min_ver='5', usedefault=True) ``deprecated`` and ``new_name`` This is metadata for removing or renaming an input field from a spec.:: class RealignInputSpec(BaseInterfaceInputSpec): jobtype = traits.Enum('estwrite', 'estimate', 'write', deprecated='0.8', desc='one of: estimate, write, estwrite', usedefault=True) In the above example this means that the `jobtype` input is deprecated and will be removed in version 0.8. Deprecation should be set to two versions from current release. Raises `TraitError` after package version crosses the deprecation version. For inputs that are being renamed, one can specify the new name of the field.:: class RealignInputSpec(BaseInterfaceInputSpec): jobtype = traits.Enum('estwrite', 'estimate', 'write', deprecated='0.8', new_name='job_type', desc='one of: estimate, write, estwrite', usedefault=True) job_type = traits.Enum('estwrite', 'estimate', 'write', desc='one of: estimate, write, estwrite', usedefault=True) In the above example, the `jobtype` field is being renamed to `job_type`. When `new_name` is provided it must exist as a trait, otherwise an exception will be raised. .. note:: The version information for `min_ver`, `max_ver` and `deprecated` has to be provided as a string. For example, `min_ver='0.1'`. CommandLine ^^^^^^^^^^^ ``argstr`` The metadata keyword for specifying the format strings for the parameters. This was the *value* string in the opt_map dictionaries of Nipype 0.2 code. If we look at the ``FlirtInputSpec``, the ``argstr`` for the reference file corresponds to the argument string I would need to provide with the command-line version of ``flirt``:: class FlirtInputSpec(FSLTraitedSpec): reference = File(exists = True, argstr = '-ref %s', mandatory = True, position = 1, desc = 'reference file') **Required:** This metadata is required by all command-line interface classes. ``position`` This metadata is used to specify the position of arguments. Both positive and negative values are accepted. ``position = 0`` will position this argument as the first parameter after the command name. ``position = -1`` will position this argument as the last parameter, after all other parameters. ``genfile`` If True, the ``genfile`` metadata specifies that a filename should be generated for this parameter *if-and-only-if* the user did not provide one. The nipype convention is to automatically generate output filenames when not specified by the user both as a convenience for the user and so the pipeline can easily gather the outputs. Requires ``_gen_filename()`` method to be implemented. This way should be used if the desired file name is dependent on some runtime variables (such as file name of one of the inputs, or current working directory). In case when it should be fixed it's recommended to just use ``usedefault``. ``sep`` For List traits the string with which elements of the list will be joined. ``name_source`` Indicates the list of input fields from which the value of the current File output variable will be drawn. This input field must be the name of a File. Chaining is allowed, meaning that an input field can point to another as ``name_source``, which also points as ``name_source`` to a third field. In this situation, the templates for substitutions are also accumulated. ``name_template`` By default a ``%s_generated`` template is used to create the output filename. This metadata keyword allows overriding the generated name. ``keep_extension`` Use this and set it ``True`` if you want the extension from the input to be kept. SPM ^^^ ``field`` name of the structure refered by the SPM job manager **Required:** This metadata is required by all SPM-mediated interface classes. Defining an interface class --------------------------- Common ^^^^^^ When you define an interface class, you will define these attributes and methods: * ``input_spec``: the InputSpec * ``output_spec``: the OutputSpec * ``_list_outputs()``: Returns a dictionary containing names of generated files that are expected after package completes execution. This is used by ``BaseInterface.aggregate_outputs`` to gather all output files for the pipeline. CommandLine ^^^^^^^^^^^ For command-line interfaces: * ``_cmd``: the command-line command If you used genfile: * ``_gen_filename(name)``: Generate filename, used for filenames that nipype generates as a convenience for users. This is for parameters that are required by the wrapped package, but we're generating from some other parameter. For example, ``BET.inputs.outfile`` is required by BET but we can generate the name from ``BET.inputs.infile``. Override this method in subclass to handle. And optionally: * ``_redirect_x``: If set to True it will make Nipype start Xvfb before running the interface and redirect X output to it. This is useful for commandlines that spawn a graphical user interface. * ``_format_arg(name, spec, value)``: For extra formatting of the input values before passing them to generic ``_parse_inputs()`` method. For example this is the class definition for Flirt, minus the docstring:: class FLIRTInputSpec(FSLCommandInputSpec): in_file = File(exists=True, argstr='-in %s', mandatory=True, position=0, desc='input file') reference = File(exists=True, argstr='-ref %s', mandatory=True, position=1, desc='reference file') out_file = File(argstr='-out %s', desc='registered output file', name_source=['in_file'], name_template='%s_flirt', position=2, hash_files=False) out_matrix_file = File(argstr='-omat %s', name_source=['in_file'], keep_extension=True, name_template='%s_flirt.mat', desc='output affine matrix in 4x4 asciii format', position=3, hash_files=False) out_log = File(name_source=['in_file'], keep_extension=True, requires=['save_log'], name_template='%s_flirt.log', desc='output log') ... class FLIRTOutputSpec(TraitedSpec): out_file = File(exists=True, desc='path/name of registered file (if generated)') out_matrix_file = File(exists=True, desc='path/name of calculated affine transform ' '(if generated)') out_log = File(desc='path/name of output log (if generated)') class Flirt(FSLCommand): _cmd = 'flirt' input_spec = FlirtInputSpec output_spec = FlirtOutputSpec There are two possible output files ``outfile`` and ``outmatrix``, both of which can be generated if not specified by the user. Also notice the use of ``self._gen_fname()`` - a FSLCommand helper method for generating filenames (with extensions conforming with FSLOUTPUTTYPE). See also :doc:`cmd_interface_devel`. SPM ^^^ For SPM-mediated interfaces: * ``_jobtype`` and ``_jobname``: special names used used by the SPM job manager. You can find them by saving your batch job as an .m file and looking up the code. And optionally: * ``_format_arg(name, spec, value)``: For extra formatting of the input values before passing them to generic ``_parse_inputs()`` method. Matlab ^^^^^^ See :doc:`matlab_interface_devel`. Python ^^^^^^ See :doc:`python_interface_devel`. Undefined inputs ---------------- All the inputs and outputs that were not explicitly set (And do not have a usedefault flag - see above) will have Undefined value. To check if something is defined you have to explicitly call ``isdefiend`` function (comparing to None will not work). Example of inputs ----------------- Below we have an example of using Bet. We can see from the help which inputs are mandatory and which are optional, along with the one-line description provided by the ``desc`` metadata:: >>> from nipype.interfaces import fsl >>> fsl.BET.help() Inputs ------ Mandatory: infile: input file to skull strip Optional: args: Additional parameters to the command center: center of gravity in voxels environ: Environment variables (default={}) frac: fractional intensity threshold functional: apply to 4D fMRI data mask: create binary mask image mesh: generate a vtk mesh brain surface nooutput: Don't generate segmented output outfile: name of output skull stripped image outline: create surface outline image outputtype: None radius: head radius reduce_bias: bias field and neck cleanup skull: create skull image threshold: apply thresholding to segmented brain image and mask vertical_gradient: vertical gradient in fractional intensity threshold (-1, 1) Outputs ------- maskfile: path/name of binary brain mask (if generated) meshfile: path/name of vtk mesh file (if generated) outfile: path/name of skullstripped file outlinefile: path/name of outline file (if generated) Here we create a bet object and specify the required input. We then check our inputs to see which are defined and which are not:: >>> bet = fsl.BET(infile = 'f3.nii') >>> bet.inputs args = center = environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'} frac = functional = infile = f3.nii mask = mesh = nooutput = outfile = outline = outputtype = NIFTI_GZ radius = reduce_bias = skull = threshold = vertical_gradient = >>> bet.cmdline 'bet f3.nii /Users/cburns/data/nipype/s1/f3_brain.nii.gz' We also checked the command-line that will be generated when we run the command and can see the generated output filename ``f3_brain.nii.gz``. nipype-1.7.0/doc/devel/matlab_example1.py000066400000000000000000000032321413403311400202670ustar00rootroot00000000000000from nipype.interfaces.matlab import MatlabCommand from nipype.interfaces.base import TraitedSpec, \ BaseInterface, BaseInterfaceInputSpec, File import os from string import Template class ConmapTxt2MatInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True) out_file = File('cmatrix.mat', usedefault=True) class ConmapTxt2MatOutputSpec(TraitedSpec): out_file = File(exists=True) class ConmapTxt2Mat(BaseInterface): input_spec = ConmapTxt2MatInputSpec output_spec = ConmapTxt2MatOutputSpec def _run_interface(self, runtime): d = dict(in_file=self.inputs.in_file, out_file=self.inputs.out_file) # This is your MATLAB code template script = Template("""in_file = '$in_file'; out_file = '$out_file'; ConmapTxt2Mat(in_file, out_file); exit; """).substitute(d) # mfile = True will create an .m file with your script and executed. # Alternatively # mfile can be set to False which will cause the matlab code to be # passed # as a commandline argument to the matlab executable # (without creating any files). # This, however, is less reliable and harder to debug # (code will be reduced to # a single line and stripped of any comments). mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime def _list_outputs(self): outputs = self._outputs().get() outputs['out_file'] = os.path.abspath(self.inputs.out_file) return outputs nipype-1.7.0/doc/devel/matlab_example2.py000066400000000000000000000027431413403311400202760ustar00rootroot00000000000000from nipype.interfaces.base import traits from nipype.interfaces.base import TraitedSpec from nipype.interfaces.matlab import MatlabCommand, MatlabInputSpec class HelloWorldInputSpec(MatlabInputSpec): name = traits.Str(mandatory=True, desc='Name of person to say hello to') class HelloWorldOutputSpec(TraitedSpec): matlab_output = traits.Str() class HelloWorld(MatlabCommand): """Basic Hello World that displays Hello in MATLAB Returns ------- matlab_output : capture of matlab output which may be parsed by user to get computation results Examples -------- >>> hello = HelloWorld() >>> hello.inputs.name = 'hello_world' >>> out = hello.run() >>> print out.outputs.matlab_output """ input_spec = HelloWorldInputSpec output_spec = HelloWorldOutputSpec def _my_script(self): """This is where you implement your script""" script = """ disp('Hello %s Python') two = 1 + 1 """ % (self.inputs.name) return script def run(self, **inputs): # Inject your script self.inputs.script = self._my_script() results = super(MatlabCommand, self).run(**inputs) stdout = results.runtime.stdout # Attach stdout to outputs to access matlab results results.outputs.matlab_output = stdout return results def _list_outputs(self): outputs = self._outputs().get() return outputs nipype-1.7.0/doc/devel/matlab_interface_devel.rst000066400000000000000000000015441413403311400220560ustar00rootroot00000000000000.. matlab_interface_devel: =========================== How to wrap a MATLAB script =========================== Example 1 +++++++++ This is a minimal script for wrapping MATLAB code. You should replace the MATLAB code template, and define approriate inputs and outputs. .. literalinclude:: matlab_example1.py .. admonition:: Example source code You can download :download:`the source code of this example `. Example 2 +++++++++ By subclassing :class:`nipype.interfaces.matlab.MatlabCommand` for your main class, and :class:`nipype.interfaces.matlab.MatlabInputSpec` for your input and output spec, you gain access to some useful MATLAB hooks .. literalinclude:: matlab_example2.py .. admonition:: Example source code You can download :download:`the source code of this example `. .. include:: ../links_names.txt nipype-1.7.0/doc/devel/provenance.rst000066400000000000000000000016211413403311400175530ustar00rootroot00000000000000================ W3C PROV support ================ Overview -------- We're using the the `W3C PROV data model `_ to capture and represent provenance in Nipype. For an overview see: `PROV-DM overview `_ Each interface writes out a provenance.json (currently prov-json) or provenance.rdf (if rdflib is available) file. The workflow engine can also write out a provenance of the workflow if instructed. This is very much an experimental feature as we continue to refine how exactly the provenance should be stored and how such information can be used for reporting or reconstituting workflows. By default provenance writing is disabled for the 0.9 release, to enable insert the following code at the top of your script:: >>> from nipype import config >>> config.enable_provenance() nipype-1.7.0/doc/devel/python_interface_devel.rst000066400000000000000000000035341413403311400221400ustar00rootroot00000000000000.. python_interface_devel: =========================== How to wrap a Python script =========================== This is a minimal pure python interface. As you can see all you need to do is to do is to define inputs, outputs, _run_interface() (not run()), and _list_outputs. .. testcode:: from nipype.interfaces.base import BaseInterface, \ BaseInterfaceInputSpec, traits, File, TraitedSpec from nipype.utils.filemanip import split_filename import nibabel as nb import numpy as np import os class SimpleThresholdInputSpec(BaseInterfaceInputSpec): volume = File(exists=True, desc='volume to be thresholded', mandatory=True) threshold = traits.Float(desc='everything below this value will be set to zero', mandatory=True) class SimpleThresholdOutputSpec(TraitedSpec): thresholded_volume = File(exists=True, desc="thresholded volume") class SimpleThreshold(BaseInterface): input_spec = SimpleThresholdInputSpec output_spec = SimpleThresholdOutputSpec def _run_interface(self, runtime): fname = self.inputs.volume img = nb.load(fname) data = np.array(img.get_data()) active_map = data > self.inputs.threshold thresholded_map = np.zeros(data.shape) thresholded_map[active_map] = data[active_map] new_img = nb.Nifti1Image(thresholded_map, img.affine, img.header) _, base, _ = split_filename(fname) nb.save(new_img, base + '_thresholded.nii') return runtime def _list_outputs(self): outputs = self._outputs().get() fname = self.inputs.volume _, base, _ = split_filename(fname) outputs["thresholded_volume"] = os.path.abspath(base + '_thresholded.nii') return outputs nipype-1.7.0/doc/devel/software_using_nipype.rst000066400000000000000000000101721413403311400220370ustar00rootroot00000000000000.. _software_using_nipype: ===================== Software using Nipype ===================== Configurable Pipeline for the Analysis of Connectomes (C-PAC) ------------------------------------------------------------- `C-PAC `_ is an open-source software pipeline for automated preprocessing and analysis of resting-state fMRI data. C-PAC builds upon a robust set of existing software packages including AFNI, FSL, and ANTS, and makes it easy for both novice users and experts to explore their data using a wide array of analytic tools. Users define analysis pipelines by specifying a combination of preprocessing options and analyses to be run on an arbitrary number of subjects. Results can then be compared across groups using the integrated group statistics feature. C-PAC makes extensive use of Nipype Workflows and Interfaces. BRAINSTools ----------- `BRAINSTools `_ is a suite of tools for medical image processing focused on brain analysis. Brain Imaging Pipelines (BIPs) ------------------------------ `BIPs `_ is a set of predefined Nipype workflows coupled with a graphical interface and ability to save and share workflow configurations. It provides both Nipype Workflows and Interfaces. BROCCOLI -------- `BROCCOLI `_ is a piece of software for fast fMRI analysis on many core CPUs and GPUs. It provides Nipype Interfaces. Forward ------- `Forward `_ is set of tools simplifying the preparation of accurate electromagnetic head models for EEG forward modeling. It uses Nipype Workflows and Interfaces. Limbo ----- `Limbo `_ is a toolbox for finding brain regions that are neither significantly active nor inactive, but rather “in limbo”. It was build using custom Nipype Interfaces and Workflows. Lyman ----- `Lyman `_ is a high-level ecosystem for analyzing task based fMRI neuroimaging data using open-source software. It aims to support an analysis workflow that is powerful, flexible, and reproducible, while automating as much of the processing as possible. It is build upon Nipype Workflows and Interfaces. Medimsight ---------- `Medimsight `_ is a commercial service medical imaging cloud platform. It uses Nipype to interface with various neuroimaging software. MIA --- `MIA `_ MIA is a a toolkit for gray scale medical image analysis. It provides Nipype interfaces for easy integration with other software. Mindboggle ---------- `Mindboggle `_ software package automates shape analysis of anatomical labels and features extracted from human brain MR image data. Mindboggle can be run as a single command, and can be easily installed as a cross-platform virtual machine for convenience and reproducibility of results. Behind the scenes, open source Python and C++ code run within a Nipype pipeline framework. OpenfMRI -------- `OpenfMRI `_ is a repository for task based fMRI datasets. It uses Nipype for automated analysis of the deposited data. serial functional Diffusion Mapping (sfDM) ------------------------------------------ 'sfDM '_ is a software package for looking at changes in diffusion profiles of different tissue types across time. It uses Nipype to process the data. The Stanford CNI MRS Library (SMAL) ----------------------------------- `SMAL `_ is a library providing algorithms and methods to read and analyze data from Magnetic Resonance Spectroscopy (MRS) experiments. It provides an API for fitting models of the spectral line-widths of several different molecular species, and quantify their relative abundance in human brain tissue. SMAL uses Nipype Workflows and Interfaces. tract_querier ------------- `tract_querier `_ is a White Matter Query Language tool. It provides Nipype interfaces. nipype-1.7.0/doc/devel/testing_nipype.rst000066400000000000000000000074221413403311400204610ustar00rootroot00000000000000.. _dev_testing_nipype: ============== Testing nipype ============== In order to ensure the stability of each release of Nipype, the project uses two continuous integration services: `CircleCI `_ and `Travis CI `_. If both batteries of tests are passing, the following badges should be shown in green color: .. image:: https://travis-ci.org/nipy/nipype.png?branch=master :target: https://travis-ci.org/nipy/nipype .. image:: https://circleci.com/gh/nipy/nipype/tree/master.svg?style=svg :target: https://circleci.com/gh/nipy/nipype/tree/master Installation for developers --------------------------- To check out the latest development version:: git clone https://github.com/nipy/nipype.git After cloning:: cd nipype pip install -r requirements.txt pip install -e .[dev] Test implementation ------------------- Nipype testing framework is built upon `pytest `_. After installation in developer mode, the tests can be run with the following command at the root folder of the project :: pytest -v --doctest-modules nipype A successful test run should complete in 10-30 minutes and end with something like:: ---------------------------------------------------------------------- 2445 passed, 41 skipped, 7 xfailed in 1277.66 seconds No test should fail (unless you're missing a dependency). If the ``SUBJECTS_DIR``` environment variable is not set, some FreeSurfer related tests will fail. If any of the tests failed, please report them on our `bug tracker `_. On Debian systems with a local copy of MATLAB installed, set the following environment variable before running tests:: export MATLABCMD=$pathtomatlabdir/bin/$platform/MATLAB where ``$pathtomatlabdir`` is the path to your matlab installation and ``$platform`` is the directory referring to x86 or x64 installations (typically ``glnxa64`` on 64-bit installations). Skipped tests ~~~~~~~~~~~~~ Nipype will skip some tests depending on the currently available software and data dependencies. Installing software dependencies and downloading the necessary data will reduce the number of skipped tests. A few tests in Nipype make use of some images distributed within the `FSL course data `_. This reduced version of the package can be downloaded `here `_. To enable the tests depending on these data, just unpack the targz file and set the :code:`FSL_COURSE_DATA` environment variable to point to that folder. Note, that the test execution time can increase significantly with these additional tests. Xfailed tests ~~~~~~~~~~~~~ Some tests are expect to fail until the code will be changed or for other reasons. Testing Nipype using Docker --------------------------- Nipype is tested inside Docker containers and users can use nipype images to test local versions. First, install the `Docker Engine `_. Nipype has one base docker image called nipype/nipype:base, that contains several useful tools (FreeSurfer, AFNI, FSL, ANTs, etc.), and an additional test image for Python 3.8: Users can pull the nipype image for Python 3.8 as follows:: docker pull nipype/nipype:py38 In order to test a local version of nipype you can run test within container as follows:: docker run -it -v $PWD:/src/nipype --rm nipype/nipype:py38 py.test -v --doctest-modules /src/nipype/nipype Additional comments ------------------- If the project is tested both on your local OS and within a Docker container, you might have to remove all ``__pycache__`` directories before switching between your OS and a container. nipype-1.7.0/doc/devel/writing_custom_interfaces.rst000066400000000000000000000002331413403311400226710ustar00rootroot00000000000000.. _writing_custom_interfaces: .. toctree:: :maxdepth: 2 interface_specs cmd_interface_devel matlab_interface_devel python_interface_devel nipype-1.7.0/doc/developers.rst000066400000000000000000000002751413403311400164700ustar00rootroot00000000000000:orphan: .. _developers: ================== Developer's Corner ================== .. toctree:: :maxdepth: 2 devel/index .. toctree:: :maxdepth: 3 api/generated/nipype nipype-1.7.0/doc/examples.rst000066400000000000000000000010621413403311400161310ustar00rootroot00000000000000:orphan: .. _examples: ======================= User Guide and Examples ======================= .. admonition:: Michael Notter's User Guide Be sure to read `Michael's excellent tutorials `__. Examples ~~~~~~~~ The following examples are `literate programming `__ documents which solve specific problems using Nipype. In some cases, the same problem is solved with multiple underlying tools. .. toctree:: :maxdepth: 1 :glob: users/examples/* nipype-1.7.0/doc/images/000077500000000000000000000000001413403311400150275ustar00rootroot00000000000000nipype-1.7.0/doc/images/nipype_architecture_overview2.png000066400000000000000000003112341413403311400236170ustar00rootroot00000000000000PNG  IHDRUTSbKGDQIDATxT,e齳&4)"E" v)*]DED,(V@EA@EED`TX^(a9ٽd{$7{AAAAAAAAAAAAAAAAAAAAAAAA^k)Zvc쌩d6$`0,!쐴l[ZVƍ"lA:8 `Ioi{2h{Hr=mJˁ`0 +hoP')v6ɘOuئhٙQf|mwfڶ~smFC ` `65:RlmO"T /:ޞ ;Htݠ& T4HL0uVSˮdNfgVAA~'bw,@)C`d^SO?$  [Z1 {UV-IJ-S<|<  GhI%*SB( Q 2kO)q;~oiIAAFXktaQcŸAEE<./s@ĘA}i'B,@} \΋V8q2E|6ՔmIYUA{eeܯt.I$Z\ @Uә ׄ*@A=)d%Ѕ y!sUI US_qdȪa rܘ&Te'9.޼T%5Tf֑x! _*&7r@Ձ܋])*jZɇ |̏Af"\Tj>R['PP&SAEM{UCU *Z=\~<AP*[UUeƐM#MvBv$j;.! 5o-V{5|lcͷ UѾ7k)CdLPA P+P?Lc@ZZ{GgvȩNU_Ͷ<˖k*C>v]%jm;nrT"r n~|Ll*O1F"b9β-Ci]@Sq!Tt,5 O1 "TDȸkɓa9pԩK"I@gx} f:@68c,{s#j\UֲσQZz9rs֖fW,_D_-L P!cP mD'Ri^ q6\`v&rUfgBhikWL*She-״, ZL/H~2vȞ%Sbo7!{@ pY$S6PUl|VI[sRcT!; N ;Gz? s Z^VO)oZY >9S@*?ϞTA*@PD8*T+-|_[YY]-0_n!ΟH 41W97Frbw?3(ɖ1.Ͷ7Rr~O?|2*Q*PօؿCʌd9r7{)66BlU9Lx}s~ ߷Ah\S4% T9*x8HmzU9w8ۙPCY᭭9 :Oŧv)=.d?? ~O?|2j+mmҽTA*@#Z L^cP4QA*ޠmC-3F>%+B~FМu۩9 PUPVBfjZE U#6,iG%To+榪TP6TmvU U P%Nj*uXgkG%TW~]Zm^MRM`?TA*@lxxov%c XOmoFP*ob{s]U~hBLٞGP:dprU P% gD&?eԀy*nk.ы5pNC@Us5SH ) hAU"2旐]APUPB-o` ܸ(F&B%X/ =w_m"_P xтt*sBXk T?Xk5xC/ǹ+9b49Oṷ{gbfi'LV$zb^sEhAn#qՌ5G @ r:UA Z^U*iTܭVA*@ }e J:A*@ diUA@ P TE1U*@AA*@AA*@AA*@AA*@  @  @*! f1~HK(VTZ`Ԣdup?`6V~(*FUrV)SfA,UR叒%KL&GqiȊEKK(q̊+Sj-l|4ZUl|jվsp/`갃TFMVgT5T/^ ZJꫯg'ׯSlYlYYҀP*T-i__^駟pO`lƍS39˗⣥XU\9n}]=~AC ^te?-)UE Uc(ꐳ|rj߾}#BG>YV븴py 0Pխ[7_۷oWP#G⣕إJ+V{__(QM4⣀8t CzP:r>`|NT"RER>Jٺu+ X@N~tX'Cp Vh"'++ EM6驩 A 2_Pnnٲe җ_~i $kORઆD<E>?~< rOH~rdmɚ \U/+,q9Qɟ:CքUEU(_dȐxewDSy@E@*pP-Ϛ5+$T{챎Ν'#e  '_FN|Nn1*#gV>]D\P/l}#%bM֭SRRkalb,+-^Z?r}//_4vi*. QFR"]Ta6c)P}[ovm:M*o&/^8> U(Pu$Y6i"`1!Kc'9T=@D3.\Ce&_GJw /4۷8ٳc,C@@*nfAn󶧞z-T5jnrgJ.ҘI U**g?s>Pr_ sxr:P*)T8 cۆ Bմiӌ:ꨐcv1q!A@_2x*svoܸQ?㌴k~a}ҥF -['N> =\~s?{ꩧ5ktÇL:qYv] p}GzNUMf7Pe>Py**T񢿕+W61dvviF`PA+zD8 *ʾʅK:TqZL0}A_GAsx?UnٳgO.y普r*XpQfwSLq}N? PU*Xmzl#UrEv-_P*T/Tu˔)?^뮻؆} dlǑ[o|;397O?u . cx衇hjaAUzP˜*sPőRJ۞r)Aj5`][=Tb7jnF^ذaC6>loPzSgj[6F24BE0r{N T%Ty}P?$k*~sgM a9DnŗEaglU"T^TrpBTPݚ5k] P}@UB ͛77~;a1lp>˦Ot| *l:;/(T*`PzP;a&T&L`|6y]+VTU U̟?ه{o_jUn]iFa@PzU*?hCcŸq 7nfmbvUEĄ*3 Gm,ef]afןU}@UbBsv؎Y`1Rm޽=:b^~edf̘?3g}fLK/|fr8OQ/jDnݺ厪yRNq]V-_U a. gvY#e*@UQ U F!므8*wfѢE1#k}Qt , ?Uh_~qgϞPT*@kqw6TGU*# PPQGe駟gu^fMtzViӦo>hj]tQ6֗.]jTbV˖-'?Cs4σ+ƍ;N/^nԩSm;`I&G}tq99N_z%#aN;w4EzZZq]v~4TGG 'cc'U7Xׯ7.W?p31c~e˖5ۍ"ѣSO=rx뭷m2Zpq+gffO2K*l3g,P>\!rl>Ɓrws}*h2h"CU,dc,$ P~s޽eȑ#={믿>w_pal~4h9ݽ[oFusL~0Mݺu2eǏ.֯_?"{Wcfee?St;;v1٬p*O&&*TŋO?ƪOUQm8d}|6lؐ{>lsٿ~ݻwyWmߟ\y,Y$K~W^?3AVT6m87N&o52*,&&2TŃO?z>@*j۶mDuN+֭[vmAgݹܴiSH`p'|/aӬ -ox7TGG'c'U6X\#3VիWh5OVfb ?֧OzF@YAl߾=dNnPMfMT,JUT`?ʡN:g͛77F3my 8aС!dN~sї_~YUV B)_TGG'c'UhGt^oTGm۬ ,"x~'r{}*h2(h}(1V}PLH m$HY&wtgܘtgJ3|M{~|}ȓ2 k6 LGz}*h2(h}21|P*(P%U*TA*@T @TA*@ TA TU P P @TA*@ TA*@T @ @ P% T=N@ddȪTA1U:1dTTl.^ yxLU}:tLP׶ٺɗVVίU M6PQd7 *T / V9 q8-?qp(hȵʥY'Q?!-siub.(aJ#jȕ֭[+3TH~u&PIB >7`4ZӦMk/ӓ TBv dTGg]"Qxm TKvh*'UEUH;Ǎ"֮]P2X#Pl&d琝@-TYr% X}w1 r3AdǑ|2Z_"… ŋ}dN&;.*oNwt1h$!ȉ>3y"zV֑Ugu!k,IiBᣤ)\̝;W?t r'|R89TL~4XˤkYu*;Y*E/jԨ~zLK3l*djT6T.d-81L2?o6E!p.>hTQ ۟o5T?%0F"+2Gv`(@EQ") RT+GULjo]'_IwOHB6TVdF젪tԑ 3gG~(QrR4/*Oe 5_)/ %?l%/`@UCUI {gH HY$rW=!:qέ ~yW?<.Ճ Izy+%WZ#ц9P.h2'+7 IAwIxw:|3r.bӮ|*DPR$]NB-$+qrcgKcD·I{R$^CM"ʈyhL GKtam Bi4&>'>6>$@sDQoAC%W2Mi]_^N #B9:Wꮛ.:@˛YG(p,B"4w#S^NI]-0% }yˠ>:Y`25W"MGb *b2Rrj X+yj#.(m83 ~Nͼ#ņ H-0G$oWeڽ4Xf4ZuIx|'L>zRgP=O*N(?U$ :ԌV/Z9/42|tm>:B3K`DПGU1UfUJ5Ql!9%K~IbC9Mkvj&՛wGN5ƪUI@emLⷷpE+|tG|T+yi-9Pe|~kԿ?ZUΥwB>Ke-%7]DU1U&X XB-i$R)$Wsa]Sk-o Q7!SF|4V1SrN>%>z=|26DP+HcPYt1֗sh>zJaRU:.MH|P#PY)opu!^ԁ'T /"J`+-O%I}(|GkV1?>Z\QFYYΡS2u៶>ZS|en+)u]> !6\fŐ&7X$✷&5;5ʈoY(>1)JUB2I]G'h)ђrd+GOOђ⣩|PPeTJ&k 9# ݩ*@*hAk'>z!|ё(>:I|+|1Ub]7(ձ#PPjhgEPTA TA*@ UT @ @ PTA*@U*@ @T PA*@UAoZ`FP PuB-@UP)4 T( @DOvD L* !{>U( Pn&<=Uj!@ @Ud܋W=9ٳZi @bG7%!Tu&FQdg΋Vkݵ@B TA*yA+rKW:_OYvZQ$(;½';@5X-0!̳/]( X)SPG@Yn& e|iGOKz5O3s_:Z0 >&=)P-qw{ͻ=I0o6V ,Fm,Fm( mD5 TKJ?71i1*(օ*(օ%{-=_@֠&]:n P(Q Cy9`ĘU P0GƤ|=dV,ѠjrZV*TA*WJ 7@HcUy@ @䫲+AgΘ\Ę2PA*PPQ/XvNfqKE~MS^ @1o_v?@Vvz|kfgU iOwލB"ѣGVGjU2pVL!:0Xm95U DKmQ7l\iҤI:AKG5|p V7z9;wDaCСCzut#wj)P^׍K޻woJN ~9VԒxIJM+Ed-0R)TRUPR_n4Z>N9,ɝu$k% WMFj(hȵ ~9nndZ j5I(8P*U~;ѐ'q AdRh&`UQ-7(dȵ8#K7JCF97RmuErCyѺut@t2™zB whȓ8 n&:Mj,V9 BD~C!C#ɗ"Ev\m#jcE(@T'mٲEi'd%bQ4B1 UY?8<s9+bz-0y2gΔ!i."_(dȓh Ȯ!'`CJhئM* 2͛7/.?'Lnݚ8b۪U+}СҥKm۶9 ?^XNݷFs=VTꫯL?K6b%_*@U<*> SD֭~w>pE0nݺ U[M'OvBRC_,瘡e FkX5Q]Dn|uX0z4Vtmٞc@Ճd X %Z*@U?N+>N/MSvYf'ΡEh]l}DʻTEד$[(Z`fCڵk9LkY~M7/ѕ˖-/"cx b_~%&Sq- rER) U`߫jLnɶyf}͚5zϞ=soܸ~Q^ yiPW:x1 ɖn޲"md$w$0W]'#V݂cJf8Qq]t?t[/㏹7ߌTWX"/WJ*WU`xn~cĈƿ^xa.ކ0#=U<x9}d]"oY{Q U4 U!G,JJE@U}駹uy+ Ks Br:OJĿ TbvR%ZB/rm磌29sB@d+-׳lɫ2Ci*wlazCrIrNFU?v<ՙgi ;#mUtM| 'IK5O?]7rv1ylڴ)X?Rl߾}kU*.";IT@*:vR`6ݻw 6OFhQ{.Ѡ~-= sdvM]SH4u$- Ppsdj֭:-. \rIL@UZbQ'.?3LZ P^ $YP*HI Dkt5,T]ƪ7xy@PIH>ϰ>DGVT'T-ROjKPŃbh:9Ǔ⩧2g~~0[/= ]VQҷzBQAU2şZ.n%[&tQsɯIŧ&8r(7T}iUͷ QǏwB'Yv@U*]NUazCc TMNWEޟcrQ>_]O bl]R6^{ M|ggg?+0M6mc?p(H)+UQfԨQu]aնm[x w;~8׊?wUI UN٢DB 4ȟ|o3" M18/ix@ڥf;~^PB_9*@U~_z饹ɮ]A՜9syJSW6>>{k*@?la:*^zA'0wypm9ٱdɒoSU]2+dj'6uTDwqQP*H P;{ͨΎ#"i,Uu3}YhεRTncq#G*LX. Ӿ}Pū TU.*@URTl?`Pŋ0QkTE j뀪*^Me#8[OO{ۨ€*PJJ5ԃMb w=/@*ZPm_T-TݼW_}ueuHeFxS̺Qcհa *@*)*Pv'{3ݻw}Uxu@UB ΜaÆEg&L嗐-2{l;nz+whCT5T`Gxt~hU뀪*vשS'w)S}1 /szF7uᇹ~s|Mc{;䓍 LHayƍO?<͚5+4Z.*@U\C_v*UPfv@UsuQasϹ~_d^_ zw}w}yl-,\U(zUXhKD*3 4Va1)ϛ7Ϙ//-<-E٩S'︫續Wd‰*^mXeᥘxšRrT⺲Tb뀪CUJ*P @ TA*@ T @UU*P @ @UU6Vj>|X͛֫WϘioIx1sOy^G}Ԩy!*PLT (!jѢE٣˘+W^yŨy,܌`'Ox @U!@39|P*~oР$o]Tm۶|޽[OOOnj 7x*g( PAqU֭[l\>}t XRعs>bc*U,1۷q۶m-[1MEw}gϙgn׮qMʖ-kwyFϓ1߿qLFZdgw{W^yްaCc6N8A߸q#>?> TAPCe]=:o~aя?x裏ׯ_;իW74jh@lժ19/ao^`EOǎ|^zI߰a~a6hܘptR4y@^z^3eAxvv7 1x9iӦq=|>n8c^rP]ƗUPեK}|m24p4{59s~7k,MߌY\F `9݇L_6&W\qE?7ܰ+W.bf;7bj# w'|4|P*X"ﯿڨ؂g /5k,M>} &ڵks?7x>f͚h"ه~mohdddhVZo>fn ;"g55͌ka.*eV5j*ɂƏUPc}#-;4*\~+`Ɯ,̕SO=np }ѣ޸qc}ʔ)F wX,aQA``q 0!Q` 1=\,on?B- ?CA|4|P*׸}vso>߿ j7:7wesdc<' iVkhRRR\u㦛n͍QːDe@Ut>_> TMeff^; 4(lx%6ol]M7B~3*,1$hQO? ` 5ϣ8G ͞=[+W]v50x˵sh$Kn9zͼy?P_l1?{'g3Xc]J܍b&5{w~5X|'tޤI{>_nQ`TE?壀*@!t&T]z&a S_{Q*f!hO^W(U7-Z-cW<駟oݺ5v=l0cW\AQh7X`6w~<ϏqjL~|ͮŋ?,sMQO*{-X b*@U@ub:#6&q!CwCPdb8; 2yQ+&;v8j0&TM6-|cpUu7TA8*PJNLB w@FM ,Rl?rY /`w]#PP.Vj^8Ds;v~ryѬPev/1KnĉU͘1 PX*T9H[n>0p<ZL .dm>&ιV1P#xr<} mk᠊ϗodzcŔz-o1x/1y_-b;G^zAՙgDu愲*(|PN* GQDpL <l?>'qj D<Cڥ XR2eN:S0*e#xG8B#ȕz-o/19R14֪UK )(TTaM( GU*LLb\+L šPP%U01]lLM_t?HڡCW_͍2$f7<؈Zrt͚5Fիm/fTy *4$w9ӟʄ*h2( P뼘.6bA7XŊ3*zȉ.X$s|/BMSxLq3j.vRn]?ws#7]L> TAJ,M7#Gh`n`rD<:-ƻロkE='F`qKL( &UG:ܸp '@kÍY^8`4&{6Km' t#ix -F&TGGU*TIoޜ w9 MnW̡ZaV\G7(Pu TUvUgS Py҆ xk'Pg*WSC GC "\lTP4ŋ!zwZh0EogC:T&@Ulk0+ aB't&_zlUWKMjU4X)l2G]"g.]zDL:5*PwcǎEACo>N:I*P^F k@UqK8Ut1gn("}vT-!Iv99d]ɚ*Vj/䒸\s*Z-YD/V`I7'DvY}JUD؟Z E$>O#dM'$cb_ӈov4o<^Ca]~X1PQj;SJe7$*@(Q /@a_SL16+>jFn xےn3Ȍcx1TqMd" dG@UCG)s222rMjT`jpUeN﨑[n3h (ڷoS߼(OlٵdDJ@<ݜRѥKv¬6p@`ԉߗGTR(x\$)u!*ULU$Qkj ɾ!l?’ھ%PBk"X%Q*j X" `Ito&y0DSI$u2jyhKD8yY|0%m֡eIYRC TPUR%&o]nR!=&$8 &\"TK$d=ClIRo KN& a."&II|| ˥bCvDRItcJufĿ4~]$~%[,R-:j,^_s7:*d(+5%E*u0Uc^, x\RT`sL*7ҍÑGȹT/t !%*J>4v$7_NH$R\!]0](uLɥ+9ݤ믶 T.GKuD.7%r5K*RQ?S{D& P"fҭRCev0Uh[JToJ@ B|tpyY#L"]3>ZV5<,>> U|eM$Tr̈UpI!Xf5%5XR ԒTy+;/+ W yȄ‚h#:T|y)r.Ml.9}Y +/+ J \\E"X䦛V4jbU(Hi"F:ERR>Z > h /iAVce yʱ0;hyiwUP%PԪT %"*mce`Icv?M*/TGan}V8)y0_RKGa!|e$bJ%Z*(̡GaqAAAAAAAAAAAAAAAAAAAAAAAAA%$chl{(T,H.e{ usa"LV!"oo ,!(N<#4g*DPU*@ Jt]-_yNG"k%mZwd%U*^7kEhed@A֐,Ǎx}X]ɞ*E}ߏHtRʲ/R͑)uJS\9,.ۼbh^;EGv$H#j A>4\XY!|&hyUT(8T^1o uOhDZeۋ;6Ǎd6onXaDzt{7cM'/0sCz>^GYz*7S Zc6F4tU*$[7/PV "K6Jނd -67pETH]kDKUsn|>mV aD*oWYrY ow ':q|@K'M F"Tpdr~%XZn|X|ьޯ?CYz=Sif8QZs*s~r[9U?]ϧg|U=> mVGTZl`y+pU{|mouY/;$T)Jg7XͷTW(e~8.yq\(޷TBт*_F) U.`]96uY^?dv]q^CBlzuq|eޑ6h_RV=Tk@H<'HXP5R7#1:*۝by#V"LmdwofC~]:+-iW]>Its]~SLŸ%rUg)A0}[EYq'P9 !ynC?,B/*KѸvzT V\neo3(1YE-/t\ʏHU8:Z9vm2T%Bʩ8N3W7s>tU]T Q>[ >z&%͌v_6˳2午1JPe7ւ؇hgzXz5|q^9u߹!&LPX9~_C rZYuNt$<}*AQo|Dg5f7#Iy6hyy7moy㊅AI>ͩP?)JZLBFqP5 Ls̮=@k52- UjGJ,Kq^9u¸PϡQD4hn7-o+TUDx2lr qu:A7KdΜ|DB~UTtyTVfނuFbV)y<<9-tRe8|(BU]ܲm)eˣj8sMYV͜8UG,gjR U2#ŊEp^͓ _"W.љZnce8N3:Q~r^ ' osUqC,ɱt9:TUJQw>k @c4kB*M O?k ܭH 1iyEVpPIMs_8WgkyAj,Z5 "*V-/?qWpY^}Pk МuYEz=^WY^b`ʏTO ;Ճ}jkB*Vgy[>yS"Vwjj-,-1|/9T uHޜ87j:VՓu4/r-Ѕݯ;ӴHMVl*M^y#\k}x#b4,=+R.?I93hP^y欑z[ٷZ(_ϡWY򤠯 }y*PlJ??R kFB* !@Jk{şkyiCq@%*hy]McP1~\K>FQ@;0)a&sN(ૢb5` 欘^s F;D%$L~U;5lƙy۝mwu5.dQ Q IN셢 du_SB',`dj(*)Au  ydJJB lb */x&/PX:UWlfSr啄Xd-QDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmvl6`0X^훯־xcJe{m: 8 ` ˟d;mkz@j]&y0٦c{0 Jnk,}TSV[O~sl: |ۋlub[EAAR[/Ёy~~ *76QͶ'[AEJҷvA>Ԋ>'UCC[ BVeJ@dal0A9j6ۑ(w5c{(,;0AAW*Pټ2Zs'8CA.`\juP,dcǹ? ͱSTUT:x7/EN["SH|ۜ8q〪:Zm ϶;mo@ PAC#"-~M~K !y]St }ܳUuM<9UyYZ2)3i;7lT# M6 4`OJٳgpۍs>#^^}H{s@UHcm@UIjh'J!|AnLFfR駽qtOxUkPDQC;}H)FƤMv5wIQ±1N!>;Fc+ sߎ *B&T7|pR Bc켐Is56IAJf.?q"nuQ} Wh"@UnXyFccrepB4K>K1em3v?H1W(dU{ߎݡ_ƱSe\\ }#x|^(:蟚-S2cmSGc1{_oQz_rcq=&ΆwUgSU ]x.b#11ct yˎi'ێT)__T*u"+*m-T8F^Y{O7Aaj@GeJ* ~qkN6q.W:ӫ\q̥>agy*Lr TOq1e.wwwTPuFI/+%ҏfs֡>5WOsܓ7EBf [Vma@ݙ8nG繌jOMF76 fc~Xt鯙3)4Tͨ9ۇࡋueuGgKg." q/R~FpF㟆-S(RjkJ`Ƒ96u>2m|?8k>a{gþ3)4T(2o.$~&#:.ն d*^D|JUӨSh*VYQe)s!sYQƀpeCFT71 @UE0w88gK+o(cs*ɞ]5,!T`._*VYS):);jHb^ށ(AmG:^FWvx.OT[WR] #=^c4א5Ӽ3f;9m\"J$FU(k[c%x))2A.c:x #USi}>vYA*@*Fc\:3R\nA8O6D!|qDZҐfڠXP%,%QֶnJQ'Mm"A ! у.I2>Gq^NɝJt%hJ5MTCF_\ΗktnU"q^J>,6Tm헢NdڏNsQ U[ Y9|j]٪k㳏CU( PFQ @*'h((Ie(7 ]ֶ*\_/.ZEɴ}Twi{{=\fʖlgN PsiXySt~_˞ |zFOCVmU`(S]b Y֦Nc=Ӓjk' "7TG.J`bQcvй侍PؠJ$y2-0rUHH:Ɋt Yۚt,CYC!Қ{}:Wb]?>V[T )gXQ$TAKEA*@A? UAV*/$?)FB PAd˶QgudkMCd|ꮄS P (P$fz(>T ,mLV10%AYek(:T jKJp;Țғ-Y$ ecl Vڛ T  UAA*@AA*@AA*@AA  PT;Ȋfʰ( P*g5aۊl&k돱l氭 A} ۫l dPlMQA v(6ΰ{ Ƹug6,  PFmz a-!A- YEo T"/%u%FB U`UQj%E}Ti @j6%74Bu͔\1wSj PNv)˴$r*[5ekǷ$ZAUIxVJo9WNP%j@67}FATDrloA&d;U PʏdC$+LqwBNTRr˛l1 A*@Uj?J<VJn|Zx% U*څ}_\NTYୟjYʶ^  @ʻQrϸWZaPOiyv  Pʬd[|JVPATK҇i9H zyzeVOdDžZ6!UZI-lF~=ζqP%#x2/Nc6% T֡LAmNbw(ӗ].|eA^x @'.{x?%l'wS;^b82%'ZUA2\*_Sf@e[gcO#{NBoA@ P{~m2TK&f,'*ߠ$//qJ)d8 ggR)o*P5Dq ŔHnCMвzQ*qד7[6m gJZVЍ]TEMdT~=,d -Q1o£Sh綌GqJ*5w ׻Zq;%$k%l;jP5Y[5 ( zh@Հ;׵C;P*Ppu6?e02T]ې5\:Q?k̪(*_:(FPo0`UDN\a TYz u5yME[kBZPqǵxű X]#4Ua{zOO^zR[ Sֲ>#+T.ħE(ސl˻P U dŦ Pu(kIhEqB~jtbM)NS?>H#st{)[;_NcxMGi zdE u1kKMUhݢ%!zĝ*'&3\4:'F:*(Q0^a5(;UcP,y>Ӳőաxj@'~8^“TUŖHpvG=Y8lIՁU!w?Ǡg[HPuOx*@i47E:!6Ut %W6Dq *9V-ɢGW Ks{:Ve])`ܪ>pW{8V⏽ƶm)[qhVUaF(d5fzGjuUV*f?kx9Mh~(-)o%MONq`*(ͫl PʿdlCAd|05fiSn$V)~s-,yci_]؜WT5 pޜi@mhv6jb|Yul&T UI`oӳK90ɓ)`~_̀BNs]j;PTQ/idi:uVR &2]hh7nxUV@os\!-}ؖͩxt /u4k) ;x*C\{TUyR7 Th;n k3oX]N^pѰ@Ul iY_|%۴,nrN! ~J%߇؛Rr}MekzehL#Y|SA}-1(k}&W> Sn,ivvN@* P%TԍŮ%4?MϺ,QĀiy$L@UP{XN="F},5-ee$q&OUO;R7G{=o^ooU~~X긯5UQ4olji{Eb_L(3=. +o Xvz};*);81AQ8]\SP|ew3N8vcv%n'P U.  wo^Oy4Gw\UC-*0 $Ӟϐu މ@A\~7O A 1%cW!=*k-)]2-غqeH!۩8"z*i)%Dl*߿ʨמMaR`H8~կ PDݩy?nteއU?ڕ_@4`Yr+6j4mE|Z)=9eWq{  ^_+\@3TeW{p'au$|nO?N\[Ud-=M:?9ćZKϯJU|ns ۖ󔏙R.y|V Xor-Ѧr(8{A?8crO;MwK5duTqW*I ͫ#vs}<=}^nNUa5 j}4onqc\ϱ|Tb=BLqlxCr\L_|$T@U9lk>\g(oZ]W}ljc { *vs7 赇$ TI؆ĩ^3WWjPpj#堣Vcw5fڀ*@UJVkHyl-6JI/hpHb%@ZUJ'R47.4TU/*UZ?`Wz_Z U@UW鴘-"ⲪNH+;cJm^)5 Pg:TYh`#rɫP_1ҩ6 Fc|*@U`4Bxs2Ɲ>dHb{ t/;.Md{(PUChTWkA 2`9sC>^5^~^DXut:yHϾ.26q,TzkPh!a`WrB3(qP J-\U"^+MMvy<}ǬCL`g;OYU%ZHV잶,PVV9?KLB-uﻧP%1"=.PuKF;ݠ4Ѧ)lV]W<&8%OsDǨZ J@fls:Fԇ66p9 NNc>ҸR_Ol⚾JOי-?z5<Ua4ojB5i4tguZ҆:}OgM;k>_s*/vHsa-Z dT:DԶtk@h[оp P*dki"U?串"1%SlȞg;AMdui{&Tz=4ݠJ?{vvv=sK_U&\WBU@@z6Y{^B*%j*oEUX:Xa 0K^:RxRVj[wku&.T=iA1 +6ق,jAm 9=izT #UY]#!*=8ml ?WDYX9}“TUҋ"U jPuAbJZ-(ݠZ׳$\UU.iK_9DGۤ*"WkA]>94+-_ ms-^IiddE{BUj#ǧi[jٷMOa{GU|He9lA.,j$?'#To&B N, U#PU+襎4͑t\`7 g,oEQݺ^?1%=岡J㧄Sr2Ofm3KՐAL +hgKUZoL3bu+, Pʇ< zf U񛦃*Tq~mB,OQY~DzݭķjRJ I0#q`޵ AkDPgHtR84A?j>:1G2w1 0VHYΡĹXxg&ݨC_%~Vӫmˏ kцr{ OPTK4Oߦ#Kj#Ke*;:9T2Ü MJXQ5:zGFBf/P׿5tNGJ^+ǽv S߫?׸:?XUi$g+BP _t *TiXԑQy_un  PFi~BQ*c7U=Ҏ75P%@ߋNhkKUcKC"mصЇw'.Sߏax*PTe8˖*PUG2)SU PJa LU"Zx*PTҩ{qyOJ{nˊY}=m. TU𲂪 ԌAc4ǖ9klY>~Uh?}O@ U*@U*$kx󲁪*Zjb"S*6j@U֒0! TUu^y؞zP%WԆG#:U}Co#z_&\M?*[.hwU PrtC&v0h41sXZ7r ]@TyQ6PU 0jb+Ra@Wu/U*@C4ħxxE5ٰ}O,T?lTA*@ʔ Գ؎\)פ:6Ӹ]JUnPU*[{i/ T4 %a'@Uz@ߛ @ Pe|MPTyV/}oC U*@{(N*@U 7ϠTA*@ ^f*@U7 @ Pek>2*@ʗdU PZSӞ֧TP*pP%m&lOh簣6J@*tT|Q}TZv/Y[man&_hV?U*:ZӾPTV?UM؎c{Mh$/(MF=KqrN&++UY M2@ P[s;hP% ndcg;)n>UF Q45VuЧU*S> PTV'}Td1lKetJF 8"hKcU*@G=iT|ks}TvcFZn6u]' eSm*@ʋާǨTJ,{l P%#HdmHÒtW #TU4A@πC̓t<^5NytlZ@ PJhPPTeU􊶋~6X;\^l6Ua PJ!8;OP*&PwLe$Mבl9k 4_U*@S@ P>h]@UP%#LOhy ZmUXs7U*@ƚG*@*k΀fJ);Ӛ\oc̶rU=JaMMC=s!TU]r̕mRPUPG{*`.e ֛Ԇl?U+'6nNeŖ)j'xޞ @ #U頪:U5:bXT7ZĶ>(>c|E}vTeUônXo3\g&(t[r:_piD T7TGcsswTh]| Uk.Tke\_1XV::t3iɶѪ U UX7ڌ?Ϲf!N똆/g+N*U U5ݯU*ΠrK,(jG-.㩼| ɸN_j6quT߰+k)DתR9]+LU4iTCvU-2֙~6Q_wajk=.<˦3([~ @Unj~U_eT0ԜYj*NNL癒Q=3Ɇt,sXp&׬MOMnվμGi!{뒶*_Pu*ѧz?T%TɴTy-)@Uukuζ|WV]ǞsBUVWƐ;6"tl4U7aktUiݜ5TMձGx^3~3 73T]s#b@X$@|cO&S P=W>PT< UhT :1 PUP[ӾPT֩TF!F T'TٍP@ P[7s>TA>).T@MiTU?-Pod)_6)8[+Nr1 T7Tִ'U*.):&^].&\GXV9MOs15O5?rw)K#ZzmoN;}*{U UOEv_D!TP![ӈ }6%d{z✽i\|Gz>=+Tۼ#U#P*O}oT<7S[Wˮ*bP*@UB*@ʳEJF}E TNV @|*@ʳ.moV|!"PNg"j PUP?PTypJT[. jbݛuy*;LP@jmM2 P,{e YAUg-;O'W&̉=2^GzBFUTV1T2JV6Bj6P%fk9&4ޘ"Qle.H@դĆ*pUMQU*@գ*@*l'Wf U{c:Pm ˳yK?:Eq@ P%yT2ʎnNPe  2ԙfl˹Z:ڟIϡ$#>DMTU5cU*@UF^hws*;1 9ח lɣT?x8 j PUm*dk TR+] (Tl(mHݸάdfxjw@oWZTL=TRjO}OF TGc~q9mΪ~o6 R&j Pr*@*n y%_F+!םl3qL+]][zJ99hD/y~A}@ P½U"+iJt$fyqd9ċdO> ښkކTRkf*@3^ۼCuEDb9jG4iU*W ּ\T$}?F{wI_7'T`$0Nzi~a[OPT~ё  SfP5˸]Ua+BB=Y֥Qݠʖ22tk>aPTy4`2r",²*IEAB-UdEvoq\_ m5TD`BH6c&@Y"ձTM,3 #TChk U"3E"*њl&.?8TU~doYCUMZ6xhs¿+պߪy*_zMߋ.hkUr3ɿlG2j/>t4}Y4 P+iLf5ad.X>6l˵m@F}dBTU9Fӭ*Q6sG*6>SVg6B߇!hgUvd00rd'ix*@*W /J4 _ulYājY>'5b9@t;%ْA)@oF3[?lYV0)a.+PUuב*[TQIU}#c57)TM-UvOƖlӉ:ZhCIDOՓm۵ W=ƹ2'^6 PVjTAeUk rԖ,?U*@U6(Ëzk@TFP m?XOznE],Td {UFNCqҕ\*C5/؏WvNeúٻOU4M+š4ǯu+VU(B/kkOǂAH6~ߨ;2#Y+VT > =0dϼW9 m=PUA\A,Tda<ER-ؾײ *P :}Ŷ:>*'\#P"T=>!WlI̛zgn?QZF UHyP UhB|Ai)P%%EXK7i|M0*$Tϐ3h jP UhBH籭d;uP%ڕw=o1!T.lٖTr.@ӛZTEv֚ml@,8Tcs$.*r 'ZU**:GmF U"zըWé<7 ;5RC$^UM<[USO{U*Q_Ȋd D=̨[o5/|UmFձ//T`@ P%^48 l$+n\+ ˠdĻzϲ/*)TU>اlP ۂ( PTP27Txol;G^{P-PU% Irul%U5&sg&#ViFU*@UAH>{}EHJ$l3 K{ȶP55)q Q *TU*&Jr/aߪ"Ah]Jp*/ %}6@T @dbAiY?e9;TU Pw=vl_X D2Si;qnbzb[U PUnP% 'u(Pe 5)ldF7DT @\J$~eD+Ah/9\}CR>MTF>!@ TDk& $`ƨE*DvJFL7K[T@ PT2`PU4I`(2P K%}HPTU**C,*T*5H VV ~h1}H U*@ PFl+Pe MSR͑rTU*@GD4;T~ѱK Uٖim]|ȊЗ42J PTU %C.`PUTIG4S󱈭[ӗu|TU*@*PjDGdG*/=T$:y_!M$ aUa* RU.6e]U*@*UhUELtY~NO'EPCbCTwgj;mLU*@*H4_s*hN,id I!@U*? *N`8zTĞ'WU*@U.UIT [^Fװmk2v3PxB ULxqbqZ>C-TUJVNԏLue[U_lu:ƶFcNb!Dm Pvd{(l=)U 7]=PUZ:= TUE4oe=)uUyL4/Fȶ PUK2].zd9cM@ P J&dt3!DV&Z#V T5ѼKTqp}m#(uGlr^ *2TIOٔxDSkJP XU*ȇlM$9ՄۈgM#*W9֭TVi6r6+gb# Uk(bPN!SxCq|r@ PٕȚ%ٞ`M *f>*(AGss܏@djIU3TuֺXĠN ^*PCȚ^:7J"hU_Hrښ~bLҺuGĠ YP@ )i@9BFn>dbP唬 U:r*PPu֧"UW}]TU*qmPeJ3;%FMr[U Uk%bPu%lse@%i,9{ u$2%N籍7lۣ+@w;$Pu*@ PU_)uc@諐Lm6(K P*{pnTZ_T TUQ)q(_ol5cjIH v@z2:bPeo+ PTEQ t x&U_gzlIY[ĠO", P BV%|k؎ IVDDU* 2$ZJYOfCU2b+g @I;:t{3T:=xr_Xj8:"PEg$ PTEAnJUpD܇{1%~=Ķ.lYCΈ@U B U53f}ة|R@ PnI-(auW{ ;TɆu["g7f;%ŕr**FdHբ*zBUU E+#V#}]cT«]Zi&'W 3Tm%7^JP{JFGU.:UrOcl؞[-U7h%[ܷU~[UYd|"taNls52rΗM±ㅉ }hB2!Nrȡ ԟ+TdĪUW٪tjj!=@ P0mg@D󾊢7F:Ӏ\ aeDne4{jY @: R?G!4NQ9[ɖi`v0bMfPT^p1_Z^m{ TInWx۱BkCU(ӼGiܟ)Z?M{Gh+M+|D;0P-4TUzdMH9꒞ YQUXJ2l[5ڣVclTPB(ߋ8!3/#3YZZWZc^dm.[ yߙլUZ :?<=ҋ՗^zlؖT K{ikO+X|ʴyTuW P/}_)\l)$Qyi-7@R7=i̴4sGz.C/5P+c{ MMΉ,ݾ*@ds[2CՁdmD-yOQ!j:mJs>ށ|$پO:F)_)rw(4/ PU֍4Y+\[fd6;DK/lwc{m J/kQ?~:%+>@=ya 6-g݆0/ PUƵZ'MA* h:%y3"?,_# Z| U9H9?zɳ{;L+ P$%4Z#VRq! M[Wd3% VzT.A-Y.-| %wLz~TMZgO5(TTJ;D0g;Ɵ6FUVw3yߤkXe bE*[(XǿNȂ 1TV;ז=W>)YfTx@*jgvC}M,j 8/QivU(lPV=熎6Jtiʔ?ܞ%kn?Z,"8Y\΋X8R˩a9*@Utj[@515TFO)9tt?PhOJ&+Ne U> TRk}(zBV PI '֒D'Tp^)Γ*a^۱[0e2%ħ2c;$S_o/TiChwV 6 i%6O*@U$  lQW T|d*cQddl#\2 ‿}֭7z TdASC$b2vrnhUKb$}Kɨא:3\>בͪ*eTdTV-Ps_;0h_jUaF P?jG:15$ y#S=YAJ;"8eڑ*oĀ|v a%, PzSj_L4f=IP% e/)S<#쨟qyUyD!2pV@*:lzI3 `B3LaT+fvǞ7J>d5dƭ2*R%JP5BS TEl? f#oNN29Ҁ#Hɴlc#!A9BuE>#<>T%$QˈվENW.@ TEdjEVv.f\:?J(/d RrMqb5ʬw-Cg})t[ [?1ϖPUltrҒAU*)zg]\:R{p05Y; ˼{hydɴWdV6Kq̦dQ[TLXF @iQԀ*@U]y u0/JQo&y8el'?y^ >HUdg3b); &"iZ]~|=J7 TEdٶL%͢] Œ(Cgm)K Pdb;01(/рñ8ɴb?; @*BP%׼78%S=n1Q UI(9תy_ +S=tҸSgU9C%*d{mk 8UhBU?+Adq(Oi$xߵfհ(t#+(PґE!2]P5=WTe,67QTg#2ZE; PUP,|~~Hٿl#xPJd{܍ g&ۖyhfg VU8_O64hھ>vPTGk b渁/Ml>_y g[Cz͑+TNmUEiGQ2.Ǵ4x,aT@*P]6wй;ݓmہaȧӸwb6NK N}U7F;L`u/ю2RrwT" U0M=/͙||։X)SUR>w'*Ԛfwof\v*sdźm *k]zDbGGf0¡U^e;D6{Qz?Uk%x6+N̪ydM}̶s:E.`*w+>ɞ!\yN[v;Ϳ-kc>4oeס5-?r{yhE8l2=O @UT˯|hNIGw"P`&HV)e[@Vtdm,* ciy{Un|o{r@UOFt_}K]>On/TXu`6z7kCw UMCxdJRFǓTe Ti[8 V%5t C TdmFxgLQgϴ@3#qx8VlOm*[o؞6fQ7 vG#k/ڃ*KOg2,Nj3:ig T<>&(Pu|)h3yy=Uzղ7 Pžo݌gW0ô{]l-ө[_@U-ȹע7n&Up@lG#1SdePvDԟa[un3X=&vpI"A5; <0I#w$@(A3ek+}U*@ PMh hcU D\nw%T])_ 8bUH>8? 4 lޭUhYW-TU̒5;ydh?ĸr|260 FO/$Th.TD ׿ӿp9kϐDrr0śMylPԑ~ȿtqL?yQH?#t䭡h |h\"\BURrK& 3 6Ty*؟lgfu `*@*PADr+UD^@ذ"6콠ذc bbE"mΰssd&~M&3_Μ9JpnJf}u T+ú-@B: ~(Ϥvs yx,EEEEEU9hD6OUU*y_> _:}%I/C_!2hI 5by? s.1^$p:8KS* |$)sSQTQTQTQTQTh9PQ*Q%bc_m"pmC7dh #lFT=WY:*UgsqZ****L٠EqK&tA**** m_hkwY]wC\.MKRTQT0y>.RTQTQTQTQT)%s((ʍYeME****|`=T)GQEQU&|mmy')((((Ŕ$*#&>UDբZ #XΦYQ5q**t*߁iiUI2.BQvvUEUVռ&׼VGAmj9EEEUDt9U7W%3tCx esmDB<$%UEջtm:CmFyGQEQEQ8QNTչ!\%kͻ)ÛteKQ|DwVTRRTQT%GT,p EU,"~GgnP5Uy^ݥxVw_沬W3wnSe-J)pEUjrU{:6*5yzx(.EU[)~:,-Ǚmk!EEUrDD9U1I4C Uw=,MQXV\*3~fՖk%NQEQQDS5H]OҴUhjM5UI3,Ԓm/Esgj륽I#_b*A4ztӥOєYoEURGl{t{*:/&$(f?9(**.Ql{ᆸcu9K$>*\ڛgYUQs EEEU`vT V #YDC, 4t\Cm, 1(IQbLUEU'ŕsmWEX$a UXUQ5QT %(fM[kIhS[Cz!, U"U.NQ)*gSj"N)IV4=Ri5M]ܔrثPTΊڶ[,ҍ*qmĘIDLJ(R$&ɱL MwmRT% "%l EB:UaÅ{HgNQ fBQE(\T!i)U~aJۄm0"U+̱S=tQU( ra5cЙAPT*Uq?0aYAێUeeai(%{'9Zm0"U+N*) H/5mLQ+9Zm0"U+q_*Q,Ѐ*rsT9E!Uű%zW* )"T LZa~ycL(EUr( *G("Qy`L(EUrIU\eF32l{5ԇ3qY>+s9==XMLuO\VE+LT3WfL(EUr( Qjmm7k1BqNT-=NQU^ `BQE(WT-")se6N)6w(ndm.\ST՛9Zm0qU1P|mPT ԘɍԱ{U3mUEKosVeYZDջbL(bɎgP )\Q~6*hڨ5w)MױMRSjJQ%x8I~TgRTGrzcU֬&)5c(RT_BQ5" ۋ"*RTQTXTY(zr&1}.wH4Nя 6Y/]~;m`IזRUqJUUe`٭\]!^~Vf6e}vd[[STQTQTf]y~Ƴ%"~նg+~ <\)Uk[49&\w)b^9UE{ׄ= $UZKgj`YhKN RRUrJ *S (4$|*DT f^R>džKRw[Uq{$ŪFQUNǿt.c#d ^2BD(yn%Ek}ҎU\B$ hcbPTE$F1E ++#h=fy.~i0N6\Ǎ16d,ZM՞z>EUrE; jQ5 5&5ݢ4ڞ*1`g:Тg3!Ul}ӞMQLQeHTjtt~)5*kWISCEI$;*' ڔy֣JZzBTMj]Mn|ԌփB5Uyc}gK,!k-ҭ IQUb&yY=WT.aE!J:BF**Ufِ3걏T. %J´obͩUUU(0iDc?[`/W|HQLQW _w*B?9zSRz6O*ē̗fU%USjƪG-;3񃵶>g1P# ~BHl3RT%STr~xr۪ƦNeNSySuzfZ\nk6'_ =E%ׅs<~BH`3KRT%WTTIse$~)-"egd+{sϴJpKRw"PTQTwJډ2y餵jċqUGh!RdF֕Fߧ\G"PTQTm/Q'Sj;h>[Eߟ, =Ti[T, FDenӢJ884=KU)U$/;jQkGSEG f޷.I Ļ%l)&U!X7+Ⱦ뫄aDRK恢*YR?K6NBj=T9™0,U)UtJ)V/ݫ2m&ݶS.NͿ( KEH}qfj:N<j^ C_|:lW̐rbUNL}d/Ɵ:_vsn+kڵ98ubyOPeE͓znl\仾B^gɀ_ߡO/|zD&$\?騇^i7kBժ']T͕ŪѶrf;(C;vfg;/ʯ1wmW2d17W$>K ;pEU*`|GotNѝnHmx3{uPeN/#i8o-dzt0b3Lp>'j M08U喳ZT$?&/xn9֝$xF͏1e}>-\g~).QY2R|~vK'D0AW)1HJN<qŠyYv"ȐЈ!Ms^xj"[kDOZ&@!uwy}ʳ:$–yʻ T.AT}W(STʆJ^ڠ-DݐRoV&PK f}͡=W<93\9 #Ux)C2[X@{g~fL*l' fM<~.QYZ *UjQՑ1Dzx7&~)RT=gխs\*N,-3,={nZhֈ-$ws|Uaut˳=U+N|3Hbٽ&Fq *_bUSl_U{8d|Mm- ƪ3lL ҮW6~lWl'Rh&ֵmg)[T-i\Ǟ#-5Y$L 1m!cV77¤Yy^*ݭ ɣS:FyelsF¼Y$h%o(8+ETyI?ݦn͵y:y7Qu:;ϖ2g;l6*[eGQTW~E!R9o9A1Ep\ ^nH ORޘ(p2$/K{N֘]I SOi8%CU{@o)g$U.W'D>exZ纍K߰_LDꤨ@Q7)r=4([uhGFI!/UsD?-1(Uʹ/2F=S*y:~¢$`d# g,pN螔bmUNxlҎ[iJ͐ΰ#tf_'r"Mc[. s/w}~E%@m(gq\År**tP߫OዕX$Y:KgZIqR!,s 3f8 UJ'fZG>G)?XU^KPe<*2'u<.S!KQU j|\Vgxp/m&J OW\f\3\p>UeNlIG{dNǬR#& (%JD@cXmNT:%.!ɇD,y.U68V^I*J_/AEQE*9={*X6&\%.m\0CF ;7*BEEU$RN>%tCJ.m\9wJQTB(("9auX6wYfr"˞ =G*BEEU`5YryJYL[k'E!*r6Sܪ$ҼY~U*XXSf>ʟH6#;+% E!*6zhJZ,m3E%idLBp/EUDF֫2]m+g gyU.EEUErDY|3ˣV}D6 ȹM)EeKQ0^K+MRJ<ׂ/ǾF"of(gn{rs< Lyυb–KhP͌y6fFmx*77^3)iDz*fFvyDՃ^wuFoWa'lt6~Ub&?E^y:*\˽yqXmS~! [s-'xtY#/9o/!ͽr&P`w(L7,T T 4ܬΑoZf˽2ϧKGg@9&} ϐ9}奦門h|v^MRr_SNlFVsN^.sŖw'-r_*zzW&,؄VlHdF0#I%:Nxݣ *ص۝rC}~FW{lٜ&O<mWM E_.خ0Dئ}ꖟ:vخUFW()mݙ%(bDURKQEQUoH'eބTNR.m̫Tefԏ[TMcOTyAg/ l< Д<؞!PbQ3]}!15<#ҫMOTˈ<۞"1D׸ZgWQRly#-r_*..mz_9C9JXQB://+N47W*x6 ~ cTncGg3=~zop5˳i. Saa1VG2KT-fo<ۮkctgL))ﰢ>RKQEQYU7D K`=֭J)NQU.;//Q6xsP՞F)c*%&s>[]c_bUٶK.XZ:X1"P<)BKtQRܺVYS:VaU,=,:+|Va U-b;] n%žUDT- sm`n2b;ORTQTroUZ7Kۼ)opתK Q(br#u/1yGgo7/U(0nsxʮ#9TīU)ﰢ>RKQEQ𖜨r "x:VQͬ2,顬QeӸT0 C|kd`'D.RTthؗW{GQez1)DZTէ )”K/EEU4:CػBͥ_ev#ߩ(V;q*rr{XD4K!)bwst>ר1 *ԏTn.֔4I14\2ׂ:A)!Ѝ*7=狫\* <]~r(pc,ɒۮ>>b˥\l4շVǪ$wS3+h4N˷ZdIGFHO^w-SSLRTQTz*?B DO)/S:^UuL%QTLE:@ )粸@l(o3N:C e9!zx0$زNXQoX鼐/ePr/#byޖ ǜ!"oQlС|!=YoA8EU>ZP9lRTQT%R)a^UzpHّwܴv[Havf{iUK}/pof&E!UU suPi|*%0qdzCD#%~P z)R`qYl"1; fLQEEEq(=r٪mU!"0U՗ZÀ )$ssT93⭭QE"iupmuA7B&|K3dCC^QF.$}Ko$nD׶{,>|"wslm0!8[q嬄|K ooM;=銈Wl)m}w\" =!}U=U24=}7r.Z!l:d>烕*PRcPTQTB#j)o#l>靖U&>1w<sDwG>@>Y=F~tS-T.)65U[~J1]>@y~[ k[\=[>_` Y]+[)Cs/f8;mφgGq|~I:4B>{V՚)G] z~JDUU􈪽}-Ńcuw*?Drw5"氟"'dkZ]+zo&ɵ؜}]/mm}fo%.^U~Lz.pc? Rdn F>-;{E]yOꮗ.?ɣ=?%UU􈪻\yU>ߛu">tJs] tYDKK(zC<8]=?UKuOI׶@<1^""~P<~\ď|wMo\ ?F傦'Dtρw^)!UOQ^f}߲/xnW^uUlkSUWœ_LQEQEIZTw\yy[c|'i/4)^}Ub=w}?;kDQ?KV_sw[b-*v?XOc(Cs/o7"^ p*Pa|@,LpuUUtW=DJvR;_򈪾]0U7^xFz{*PyNZ =~T⢕U/~e2.r7_( ͽ8ړ0[;UfU<}&۬T&#PTQTB*V? 3=Tݙn ̬=u@=w=AspnĠ -UnqTSNLZffܭEhI -|D<ꐉZc7GL"R̾[EY9ܣLy*[24+K.V>\w"C旰,k;?O |j2A]tS_Dvr  ՞MD3{#vjgE- ͽJ=7)w"`JyoP-rgՇGe~7ؿSTQTBQUBi}G+,⟼MV^nUr3U!%FIq~+{(va(R ~e9G1`2fQ(qEQEQEI23H9D=~iY-Rɓ*2U'EE!U%q:96}MFxxr̃rf\KN0鎝#z_ȉ**B(J ?Ѿ4zcRbmw+|:|Lø9EE!UBQU:Io:AE!$@Tmb/UýI:AE!hUZҶi+c "9jbqB"!icA g1ma:AHE2U8!0Mژ/J#eV&T$+X!,o1hfqi+sU݅ !#2,BH e=Kd0S&nRl5zX;R?*7״DHg]S?sY$臯ve@ni, tPa5Vwikb!~ړ[Kdߤpv+g2 !(g VG5[ۣNն.t9B!@`;9Ev*RH,Aaߐߴ]mm!D>z{m)'v,hir,""BRڦCJj-X4BHEr=m=X4F9cBHǿcD ԰hBZ^ghk̢!B*Fʙl6+g *3Nb!BNLD9S+MQG!T+a5('S,cCX$BP xLU$ۍEB!]Z.\?m"!B(g A +YtR( C۳H!m}IW`8蓥P,B!X!Ib\XdAA!b-ìMTeHxẍ́۬gj, .CQpcV>Gt״]/+EQźGH1`WBUU%k9΀*lGacرE}mS^3!@ujem ‡tUUa2{DVtl5\0BM. ,X`C'k߲MYvsMVX)as =B"ީ ~ R~\z8<ݝ{U$hkIUXI喻/uW3 U4 Uc>E!$"Fb*4U:D&|4*BHDTNڑ;+UN,.px:T[oxmiQc4VZHm;h@QUnvS W9+E庑TmCmSstkS*ZkU9lNԶ5rnQԉydmG('5B8fIg+Αm UhT^]m q*}#"I׵(Y*.x g?׻~/~s%!:1yDteksL*}IΧAžc]>Qe򢳶CMAmxnBDCfxd$dwA%]{,vY!jO2p36`yMh/TDL QB:Rb*U˹:Bzq\=D*ھx#"Tڡ ms*uX%1=I#UnE+LCNz U`km c|-oM]o|+=\z4f+dZ:5 +c>FzCT3m3\^>J$:K[){QK/BTa;od b=$9Lv/.QMq`yr©1(ăym7K;=$.sp`\ɞB! ]ywxx8+]>Ty/ag\Zv_:m}~ wO܉mй(v/:BK;_/qqmP]>]۞Qe=-DUz%wi"& DJ%\R*oDU?8Nc+덮Z`k:w :vy~["Qu*t?܂s?x|gZ  R Uux֨DU9Bi nP[uz_|&^8ED U{VXjR=!I$B٣,Us5]}+qvsm{4qU9[fr'+,}^80&QU:aIaFm^LE4N< bq*pG:akB^{_^͙%,JUCU]?Dh{b}/>D]NQὑucUgqopiUn܋#':зt0ϕ,-ߔߴŝr" h=v[8Xk C/ ЇACN\羢-E9PM*:"`oNsFjۿTbQfULpicTc:^!*s$QߋT|K]'NP|\ViI>x)'Ď bQz4F؇a{}lhQ4LQEQ&Qe]ĵ|bQ*g7U0lpZsR oTBg;9OC4УXa1J?Q?+x54K孔UQDm!U_WIɛ %zIT̑80̲4\Y=su!vqmTT0^RST[roW:U!£o6d$1m'eNq*73gΫ2Qxv=͈g+ߺbޚN!m۸=pm}>ǚ[6ruA:?d'-|Q0d|B@_"+/L@{CLr8T%f󴝸s`B'JQP L RFCX@Ӝ"H< BPP?@t~JMcT|ub}STq+ /_GW3'S*}9H:0Oi;r{U:*̹ުv<DV ѥ>0aeoL8#9=9uCmj>lvrjǮZTd=Z՝|OCY\OQzCtT4UXǸ'%N TfRT5;G.ry#gȟtrbK۶G\fZa$Et8WqؔфtH9!/ ѓz%@HaY"҇)/ xq²5>JKQE!)j bBQEQE!y%VaBQEQE!TSY$PTQTB }`!!UU$5 _`r!X{%$}D՝!PH"wh{]O1=44PӴ=,,kajW "'j>T/ֶr̳~N6mo&NyꩧsљK.$3| 'o2[lEfUV4ovrs|VSk֬Y絝æVlm vxwK/tf6 80sf[o1cFfsɊ27Bx뮙=zd]t /m/)gHs+ uHO~{챙o1ofLڙ7o^fY1O֭[aÆbb&vt]) l٩g֭ޟ#<2S￧~cv{GVl5jԨK򂴼#;H^ f7h"g;찬_믩x1ˆÈ#2{wVl˖~#~~RT%X9~"oq#d x^}լ ^nſt`YųU?=s!x^{QG类~a/<ӿLVUYfi~mݲs_}+B Ø ߉ږfJ"k;T9u*pϞ=C 1IJ|IZ-ܒb! _yOKB9ӤY 8@:e+g 1Ru3#S.qeVHIQ3}a* 3.~sSN)~jN]yLّ2>sw< f#Á輚*mlLّ.@S.vd6WVUE7UK/93+bv #;.$1US=b'F:%X"kfDä}@`wߝ]cVb]?ė!_Y.c.A[W +% 'zp$IW*bFJ;xKQ.\m`Ű fg^+ľ^%qVK{xPTQTٴ;,(֎ "*~B ~{%7\ˑ ^^x/A-[veb1)ʀkty踚&gmM-oN.K,C.՗*/,O?T'ƫu+Xg!m1_|cun:Jx 'eM7.*(kѺ?YA^*a*"1f֊lX,*lat'e7pü+ ~ᇵ>GLy4pX }L߾}ke,BfNt\\pAfڴiٙeQ% 'nR?<V}uwvt\޽{ޥܢ v&Pg߮RӶ楗^j_ ]*XJDu%_~ȗ)VTB$8XV$Fu%mDp4{(JȾ¾_u$)L*N"˦~` f )?ꅿr%n>ӅKpEfs?{-f*)+ەva!j; n' v ]/o-LU.;lQ5~:\fǤ L4m)gN VX$; <1,jUx>h~~Y%b VU% K@PT o[,ڊߡ"{'d?Ck}/>իW6a!$|ݝ5⻌+?2 >Of5 Ѳ *I1, akuڢx$j!Gfػ [T!XKIE* 0$ asΉ(1 i[G"ڤJQ{JһwH+%97&z0|#Ovxhcǎ5&NX뷈k2/ٸv`Rnݲ۸py1K֋= Esq6fu۶mxam泷ǰ ݺuP8p`0oOI¹ѓm)gِn'=Δݵ^mQq g?[T 8;B5iuzaD:u3tn #XEE'zL@̓VӱD 6 ;&fڱmڎVΪG¿$x3'.U3ZX@YK&`L6u+٩gϞP"allw K0 1SFRmJ>s=.lvm>R,qu-<#LRcf Yyw_wm$md٠QmQAbD3ƼmٿQK!̰8,lq&Ţ[4^ qvlef;Է0cnնx,ῴU?iM7gZT]L1dM| Ͳ׀EYm]^~兓jn$7] F 2~#u[6S9fRTNʚ{g:p}z}'9rdGkP.<_| omJf K~^w b:}vNk=qc2ު%EIh77e sQBU|B=CPxBsNx6gΜp[ (;|J@la|v|VGuuley 0z$*Qg DS7Z ~ցE;ofryz?/rc箿J&k6ڋ/cj_bjGWEUUMjqÝwޙMjCR[ pFA+Л#:蠅48+32dȐ:۝vi 0iΘX0GqDE+_ OAV —\ˢEHaJ 1߼QTa/;S6S1c}[ dCͳROu}j;F2LxS[yjHFQ`Bʤ0op*h+1~)=o*C˫LU 1T%tĬ|b3ĉR!StfKrIBI)9ra!@۵I@pvW֠ &u9@Ai#_dxr>l(٩#x[\Q/K W1!@e$f"5EU)*x\?\vMzQTE(ra.(ǫqoޘ⋐Jf΍qo,iJT/ ('”3&~V[ssau\J06 BYG5x/U (D@>Asqݘ&!NZkH3½C y>)2f;D@o&__J)(5$,Q}c :<]383 q :6;<$80$猩4D^9 Y[ەְ :ʙxL$L n cp+ћﰾ/Qe8F0:r./"H)ڦ,*!%"tq ^W2nӖpkSS$TRVl~{H4 5f0zؔ;eޮN**TX 2ְ_ȹo&gʕHax/L`?ᝲ!ְ UU^#B{B"$$=8H)^4kqkHCE)}gy{dwv׏"IU\Y L!1 Y]v%u!u+3q`Mea(3mY?4QUiXG0ViqIϷ1 >eʔIj!~FG;NȦJPbZU 1n`FO.?q* ׬$P8 B/%e}OguS.G7dFT:J_P[6',SeT^sQ1pݐ`P*nڐ 1TΈvOj>}=1m* itZ(:14 Q m03OHn9 >#S?^tFY>kU&U71Q4p3p0'j!B )o߾}Tc<#\2ffJTQJ2V,ØݘmÀL;1#F'1uA,؟A :b *U$a!r$Pfz܋sΤ/$6m?( 008w* `##$:@(A9U}~.*::_ I& ې0S #~agXePd7+R`S~epÀL^䀦sS % ^1u8{O2ss):F%j 2 Y&FRdjKJPWo("c9vmY:i(5"5'`hUwhSwPGQR% B8Yx`9M힚iӦP,[s8Hw\GMGςLR+aTV iD'ܕE^Q[.{rwDk X_}^/bj3 Gқ jf4QU"Ux-f5ZT0+0r5 V"8ܸ.e- |EIر#D8?4DQ)1XvLNk :̺)= BѪDfA YYfׯ/|?ݟ'|rX*⬯X>R.GG?&T~qgM:=YBMg&*:إ?{,wQ^qoQj 'x" B!" Ca@uLDVJ }-2 Hҏ ~VWtAe [&ZuaPfBD\-ZGIh<C @IM{2pʸCZlsn)c9Ge\C QՁGYMG5_LEWR/e)x⳶gfA&Q\-\K OG׮]{039KU2GuTXǵF ܆s'G`XB16c:3;Ո⸪{,lsSSfPӊyZVf Go߾:묳? V3UlZ$dnnYfw " O){"Q:+Y e FELA[\KTug *Ul:ZtܻͨR\ q2:48rE֞x,X 5gΜP\r%p8[ ј;QID}qVi fpQb & 2SM?^Xf;^s! ь^xaX~àƍ@ԣ駟,zy{h.b=|yOꯣx9]h3PiI_[e"i*MV(qՅ5AT2ZN#b X^7{l8- 'jC|!ZppidaG-~$ .F_`Ai\(%Po L68Ѿi|'a;#4uʶ*̪TdW%XL3SpglO+T\ƌr/ qFc9&,ch~Oyg)CyD… RȈ#}¡ʕ+-[ v4?ٌUW]3o?QB܁Q(?eK 71Sܬ*՟E06 F[2i.W\q@?K'ShS+^Gp ɿ7cP ^Ʒ؈Ž~0',6kpPbp)Um4uCe rcBsW lک@;ZIb2ߥ(Iw~2ktNWx^GC"Dpt5"%gՑ&JLI/hgӑ/(e? k?pf!aP ā;>2Uy*Kǽ?® n0*3i* U-z4S,H&Fb7i,ຜٞvӱ}LC 7t3|/M0^slc8JR94CЍQ8Ud + āI(#hl'S @*`u;q/:6!?_'?&?!PZKZo\DLJrbFľ8+LU*UL532@UNv~G1 NˋdL'|YG)'?!t}"RO"qwmͤ@`ILJcPDAbjr~F(Ddi'&BK;-u(a܀ؘ'3j;" Z@<ɆNI *A͡ (JH!hRKͿd 5cE2S@'&"k!+ȤdVh OLS4G5?#?O#q8=qPI-ޜϽ*٨q˃{rolb?Ue?og(cLȚh)O~^~t;U6~Us0j~]&b~JcPQey5P r`(HL-qd+iřmPc2xlw\]]V6}׎i@-V`&8QUN=+"QiSpa՚|}يϨw:\~)<ęuڦ/?ѭj-qP"ý:(qT]ib337Yc($(9 V*~jfa~Usc?g~NVA0Y]%dMKs0j*2 D#BgތĉКM3"D%)J u4QňlT$ڨlf_C#7|s+$j88QMj@Z+!01U ,wEV В" ѴL l%'sύ=ُ{{;%BB-9Y($;V:usc?+4{L܃C'8"ʵuY짉L*8Xu\ՖZ}i+5krhӔS(c! "n%\K,\: f4SmN>G?ieAfJZqRTF Aj8ZFqTvb,V~7G֓e GQ'(~cC~w8~.TbIst 9̽֗{mmn=[?B蛐J7F}Ϧn~J+)"pVsc?*)⾿"sojD~M&@H:01kFCц[*6X9܌c1qN595??Q9|]qF#h} pك⩳rNj8(Q2CPNL2"\G :(C. t9d9%G'54GG$#Z=JצQJ~J%.ƎlVgOOTOQ~U^gon ~*)򎑐HLX-e(;bK])`s3ܟ/7l? V~>||]qFh:)7~3F4x"TUI3fJ6Ӫh~ KV%9Ж)« Yο)=ӆ{HSc%(_vRDdͲOZ91yOTO^b?;Gϖ|M#O& '\G&B841Mhي W[s֖ __rH):J2#ב`@"$Z#fhmM7E4cS-$¯!e?u[F%d?%Q_/Y짉*UwbrfUCZMDQNN^Ն֫5P*%fCh%[nP t9Z;,Gk僣.?2f,_@Yg_~4Qe*5 GWEdV+GN)1(}g~V.?g25d*[JOU&ǡEw/zβMoȏ*j5 i?˚4QePb`5< `02 `0Qe`0 *Uql0wyb7_]?X`p}U?'Xooz(X`]إS1gg}hiԸ`.Xr`#X*aߩ$Xo`=ۃu=9z`B5U2`~b9gr`=/?g>#G֣ .Ru5YVU< *!.(h{n7CaT`=?x4\v%Mt8\:Wiґ2@pq4+7q5 .5>+XRש^e\8jDUrp8uh֋Ne˖kժhڵ?p@ܸq9srڵk#עEr~fʔ)޽{Z֭WT0`Ӽe< i=̋e0 ʔ)ry~.]cǎ݋W%Kr~fڴiȑ#CiƯ__JpC.af;q43Py3AȂ|YߴiSSN~~ѣGѕ+WFrtٲe9?3cƌv 6VZؠA/]Ůh5QUEUYf`f*W\(nƌ_߱cErOo7E~]w_ygo۶-4 ݺuˏc8tt={W,GbeJ'P\Gek{@ydFC EѱwygG_|Es_~?c_|Aꫯ>l,Xo(6e:AqDUV>#X=P߽{0"oy睡3L#<_r%͛aÆ_ٹGu*L\/֓Q^cǎSO=տ[ݻw2pjO>W۷o3 uɋ?1Ҷ]\( N(𑽜={ '7xc(ی?35\ LJ0``qDU6٨^LK< ;?&_av裏3ժUKd>+G,Cဋ]t0^tC̴T4'lpiz˗]wg Y*#KqshGhfT~5E/IaBɰbŊLV-_9j㭧c8)a xwO?=LiGq"Q>5jB cϞ=bi2_r'ʕ+k dZ^?Oߊ8Ac^lNR*A1;9N̙3vqH#abC&a*f0}9ͻN}oX8QW΢(ݗeO4xT_m* ӇCPQ[nJSW&JQ)ԮO:$KI#TKDfRAShb=#ž Co6ĉu8ʥ\18%Sels;w>D^lMiᨉb"I>z:7pJ0˜=+ aqqesVjb"~3y睰y|BYk)r\~h⋭Wi;,j3@Vr*zG=gpNJ/`$2WA@J/6SrhύȚi3y8GULx WzľUA!}o3  ".b͹'0!:gd:Pr8N0簾ϒ|q(bWD֪M{A 8mkHotk]bm*B(矷!@%" skTv՞u_30!u@"8Z>CV8.a!3Wx; qں R+f0GMT1\wjg:ܬ3 `b%Dp( sN* VR:]éO?Ծ :կ)`6dMT~#AS饗^2bNcp ާZp{`aG 2yj'\ӦM _34p)q"n"B SÅYw)?bfY6 3 VS$O%0}Ëq p{-.ӟKy |~yi~߄?۸qxnpԨQ `5c*&R7GS U% mٲ~xINk:묔<*weNaA8b{KգG<_Vw/vP}Z2i:?Wa/VtQ%%kGXb'[`ae*u'9M8El4wZj.+ٹPVsF&`6ZTk|K/GuTNoQDg dʋ\hŒu4V({8d$3޳>>oL!' &Ge  dG V7/vRQ +U)BE+I7nA)btDXUNSauw@\*&x\,%IdMT wTF*d,VQXuODU1 OdYTA ~w5A`>od D24jq Rr)͚5 73S-P[n)K+ )C\'R gbG1jMٿoEYɣtUzڵ+&GWYrFx`/rĻF~Æ s6v3ҥ/YBo Q=ˋ]?X؉+\F%^s5i'eW0m ضm(&SjsQUŀ:B + $+0@͂ B{.o}٫FD+q} ٺ;vY(DS'xb{T?8G!Gh Gyk p4xxzDY%*0WGS!֬Y#5ɈC)y禛ni=7o"I GS#;Xػ"et%Wt/:ddhO榌¼UAG[[o}"X3-YދbQ+8jQHQR=MɊ ?7p&e gWTy<N4)"SJQ%" 9AOú=XXtVU MQ Quߑ9Ip} ߧo(巧 +j27B 9bFL-T5ǽI,tUtY~%>]aE\ {5JgQ%{.\2eHDU SEUThcn~A2p$㈺ZFڡh-NQH3-/\ t3~-Ɋ*ߣL10JI#-VDqI?ET@ΤOPM-G; ] ܘbS =REUEFzSU-Q#Vl* L$3PQU}Sܢ Wq5XG`SLa \"MYr=ρkՒҥK<S|~lUM곾8)Ln*E`z,OHyIEm{NSp~V@tQw%#$Kvp F_Kz<5D ߋM&X+|>P 8EʎBoұzc?pH:@qWM9=(tJtP(m 1#jG*^#E]wݕSWvkP DU~Uh4&n\/tbt4xQPIJ2UtJ85F>{C !HaE`D,8rHOSJ|L>DX Tk Ճ`d\U#Tt嗉b2ؐDXFp^Ե*&EC+#sC=X/MJS=+eTdbw*lWw:f|ɜ~._UJ/w8f<.S%zB-q{*Y9D&/xIy?oUŜlU'yM(>:U/^5(]kN!hEjϋQwՕPfFsQ)ՒJ ΨJ@h"?TK;.lQ7ٓbz7Ke*"QU hFPطB.h>}z8GDUz'#'~*lWj! Io66S&Q @rW:3v\#G Ke, d1i\N6:3GEkg#YRiO_qTU93ώ, b{;:/B G__|MU&5T&(P ٳГ =7kA\M2%L㛨*`fOIB>֥*:=TۘA%Ϊc$]].8Vj2Sњٵ,Rgγ6lX* Fs\fMF@d qsjʧFqkhwG5Ϊ3RRf}*Qi ~ht^ڲ ؤIۼuSWPaDg麪'5{LTڇz?FmHb!^xBR=k`~F2`f3&AmKё?N)}K~ 8t7 XY6, ao57Q"5=0 C)yRt~7"j,e4.D"/ Q˂SG2_A) ɊnU"GP!+ROdJDUX$} 4ɇW$r̀E]khݺulSGpo~'/*ha%N GgRP>+zkY8(Ng+PʺJIԡ*3%bj2ci{St60C 4lMiS@$Gg3Xcy{D~̫)VIBرß0a{T/XbJtmRwfV!QcU c7fVqvT+r燧KkcBvi"z pT&L,A<.h L0S3 2puJ$Mm%soZʝCǵ76A{ 7B+ۜjpN9A:x4XO&R:ZjfQX!US~e2GĕY +q\؇ԫI *5fC"lsb(G r|;X2jzQGj駟3\{7{'p#pkd8JfU{Pr_,Ucf<:1j&9z XCU,׵kWaFE%܆CN]tQx%fNr1Ev7H)BjUNP:bj3(f)G7io|Geh1l8T3H%4kmG%as2[Q2C{"[n~a_ {c0@c(.dldld) UC4S:}6mv~0uު6 (&3c8nQ5T)*/e꾌"r\#i4S.ff@8z /6Y֬Y3l/X߲eKn$8ҍ˓B ydA݅X©1pwݫ>a(Bj5pt KC)ELP8Z>KDpn@>\ @r.ka@ǎC'b1}H;wwx~;bFj_gÑ0c l''gysG+.d{j*MrJrP/*|fhF@G[Z\tvs,L8aI^bC,LH[;8%UV~9?SreK*d?vq<A f0w EmP;w_J8M:X̀ppbXaa e6QQd=gMGD67{[v䓣GI͡ @;Y̞j(W(:!G%,Ga_.̶ 4G݌hƍs~&bTAqgu1;sQǷY"9ŊKPb*e#GO~"u|WgA+1 -IȮ45CE^h%CvFbHb:DߧhdDKwOܨ#M/IЮͿBt>}R#ivm2~LLEqTWX̀pYhOAWtSB}P%O8"I Q'P R]qUm8KqT lt8 gy }YoK?S#@y{+/~T'Q HEp'9K3Q-}ʴ7~2Y临QAЀƳ5~7%0L$g1C L҉Mn(lRx]x3΃=˅ r9G2BNq"$RQjFpN8)J5`JJG%3Xixd \9G3(kQ'zFqgVZy̆]spt )/N39Bq⨈rUuFUѡљK#8) +ČDأIr3ϐy~i/`:(Fpt)j!ՅG1%xL{㫼ٲqIC{5jHZ9^$`Ybh/~2":B Yux>a:TmCJ)qa1Ilnph0a,X EY%?'I6B ?U94tÕsKc؍vPi@PJFʜTrYh5G']^L8:#$G{3r9ڄk-6V؏m..GGUst0'G:(8]G3PpTDT ~iΜvWM&~̚UJàS:U[Z3%;ŇPøFѡƛM8rE :ܰz͎o3dOO4u8 %zue=IܒF#:毢&椊5#8ڜ@<#GV^Oj/mvPՌ%jyp-EFgrTt88:Xqt~nJ@MR~ܡnS)4 nD_8HVG,"Ht3FGЋ/ AܔCAq k V?^:4\]ՆoD"֤"JGQ椊Ӝ#Rю8Z Ѧ䨈*(Eсp8*Ur8Z8'&#S? zόCHL@tYn(1Z)NN\i@-bՙё=NMqH͔pj7DN'7NzG+%mnIrIrq4#EVf 8ڡ9 B]Jɹq4x˼{a[lfg\ >bc(jXVC[nFJ5qVSk%!_᫨M9^HGupqq۟UmņFJRȰ;M@3Phc1b@]ժL:]ovwۦ/5GU%EQN e$Ƣڀᮊy,w6z9؆7G fKxZ.ѢÃ"7gohk2 X2+ Qq8i@'jřa;wk0 ʆt ?s `(FȯC`߱`0 b5oy^`{`0 E0dMfV]lߵ`0 "^lO} `(tr(f?ㇼɾs`0 E5>be-ŋm0 *bǡ5;ʼnJ `HpUnj /YLrFe0 e\dp-?^,Ug0 PPU><֥ a `05Ş`.akXCx0X`0r--4 `0 +qfiv@4~ `H)pe-/w~:{$`0`BWu Zj`0 Cz>1=_yr3V=`0 ^n`G][# X `(+}{cmzp=`0J%/uo=QTY Pڰ8X_(=p'5!'XuYj٣1 !WhKإɆB`{2/64=`0 >٣ISuXqϚ`Uc0 CF^=Գ=Lo&vOj1 !,XǷٞV*r`z/v~b*nV+{L`0%Z`K߭}9&);kuwYXAY`0 F-^lq| Z}푕<`}k/6-[lٲeVGyc *XMƤ'AՋ1/Ֆ-[lٲUGfPlCzxa`)X~[lٲeVS@- VOfL `0 `0 `0 `0 `0 `0 `0 `0 `0 `0 `0 C1XIENDB`nipype-1.7.0/doc/images/nipype_architecture_overview2.svg000066400000000000000000003472341413403311400236430ustar00rootroot00000000000000 image/svg+xml (Command-line programs) (Matlab functions) (Command-line programs) SPM Interface Uniform Python API FSL Interface FreeSurfer Interface Interfaces Idiosyncratic, Heterogeneous APIs SPM FSL FreeSurfer Workflow Engine Workflow - inputs/outputs setting- graph transformations (e.g., iterable expansion) .run() Workflow (Map)Node Interface MultiProc Linear S/OGE IPython Torque SSH Execution Plugins nipype-1.7.0/doc/index.rst000066400000000000000000000041221413403311400154220ustar00rootroot00000000000000.. list-table:: * - .. image:: images/nipype_architecture_overview2.png :width: 100 % - .. container:: Current neuroimaging software offer users an incredible opportunity to analyze data using a variety of different algorithms. However, this has resulted in a heterogeneous collection of specialized applications without transparent interoperability or a uniform operating interface. *Nipype*, an open-source, community-developed initiative under the umbrella of NiPy_, is a Python project that provides a uniform interface to existing neuroimaging software and facilitates interaction between these packages within a single workflow. Nipype provides an environment that encourages interactive exploration of algorithms from different packages (e.g., ANTS_, SPM_, FSL_, FreeSurfer_, Camino_, MRtrix_, MNE_, AFNI_, Slicer_, DIPY_), eases the design of workflows within and between packages, and reduces the learning curve necessary to use different packages. Nipype is creating a collaborative platform for neuroimaging software development in a high-level language and addressing limitations of existing pipeline systems. *Nipype* allows you to: * easily interact with tools from different software packages * combine processing steps from different software packages * develop new workflows faster by reusing common steps from old ones * process data faster by running it in parallel on many cores/machines * make your research easily reproducible * share your processing workflows with the community .. admonition:: Reference Gorgolewski K, Burns CD, Madison C, Clark D, Halchenko YO, Waskom ML, Ghosh SS. (2011). Nipype: a flexible, lightweight and extensible neuroimaging data processing framework in Python. Front. Neuroinform. 5:13. `Download`__ __ paper_ .. tip:: To get started, click Quickstart above. The Links box on the right is available on any page of this website. .. include:: links_names.txt nipype-1.7.0/doc/interfaces.rst000066400000000000000000000171211413403311400164410ustar00rootroot00000000000000:orphan: .. _interfaces: ======================== Interfaces and Workflows ======================== :Release: |version| :Date: |today| Previous versions: `1.6.1 `_ `1.6.0 `_ Workflows --------- .. important:: The workflows that used to live as a module under ``nipype.workflows`` have been migrated to the new project `NiFlows `__. These may be installed with the `niflow-nipype1-examples `__ package, but their continued use is discouraged. Interfaces ---------- An index of all nipype interfaces is found below. Nipype provides some *in-house* interfaces to help with workflow management tasks, basic image manipulations, and filesystem/storage interfaces: * `Algorithms `__ * `Image manipulation `__ * `I/O Operations `__ * `Self-reporting interfaces `__ * `Utilities `__ Nipype provides interfaces for the following **third-party** tools: * `AFNI `__ (Analysis of Functional NeuroImages) is a leading software suite of C, Python, R programs and shell scripts primarily developed for the analysis and display of anatomical and functional MRI (fMRI) data. * `ANTs `__ (Advanced Normalization ToolS) computes high-dimensional mappings to capture the statistics of brain structure and function. * `BrainSuite `__ is a collection of open source software tools that enable largely automated processing of magnetic resonance images (MRI) of the human brain. * `BRU2NII `__ is a simple tool for converting Bruker ParaVision MRI data to NIfTI. * `Convert3D `__ is a command-line tool for converting 3D images between common file formats. * `Camino `__ is an open-source software toolkit for diffusion MRI processing. * `Camino-TrackVis `__ allows interoperability between Camino and TrackVis. * `CAT12 `__ (Computational Anatomy Toolbox) extends SPM12 to provide computational anatomy. * `Connectome Mapper (CMP) `__ implements a full processing pipeline for creating multi-variate and multi-resolution connectomes with dMRI data. * `dcm2nii `__ converts images from the proprietary scanner DICOM format to NIfTI * `DCMStack `__ allows series of DICOM images to be stacked into multi-dimensional arrays. * `Diffusion Toolkit `__ is a set of command-line tools with a GUI frontend that performs data reconstruction and fiber tracking on diffusion MR images. * `DIPY `__ is a free and open source software project for computational neuroanatomy, focusing mainly on diffusion magnetic resonance imaging (dMRI) analysis. * `DTITK `__ is a spatial normalization and atlas construction toolkit optimized for examining white matter morphometry using DTI data. * `Elastix `__ is a toolbox for rigid and nonrigid registration of images. * `FreeSurfer `__ is an open source software suite for processing and analyzing (human) brain MRI images. * `FSL `__ is a comprehensive library of analysis tools for fMRI, MRI and DTI brain imaging data. * Matlab `script wrapper `__ provides interfaces to integrate matlab scripts within workflows. * `MeshFix `__ converts a raw digitized polygon mesh to a clean mesh where all the occurrences of a specific set of "defects" are corrected. * `MINC Toolkit `__ contains the most commonly used tools developed at the McConnell Brain Imaging Centre, Montreal Neurological Institute. * `MIPAV (Medical Image Processing, Analysis, and Visualization) `__ enables quantitative analysis and visualization of medical images of numerous modalities such as PET, MRI, CT, or microscopy. * `MNE `__ is a software for exploring, visualizing, and analyzing human neurophysiological data: MEG, EEG, sEEG, ECoG, and more. * MRTrix is a set of tools to perform various types of diffusion MRI analyses, from various forms of tractography through to next-generation group-level analyses (`MRTrix3 `__, and the deprecated `MRTrix version 2 `__). * Nifty Tools: `NiftyFit `__ is a software package for multi-parametric model-fitting of 4D MRI; `NiftyReg `__ is an open-source software for efficient medical image registration; and `NiftySeg `__ contains programs to perform EM based segmentation of images in NIfTI or Analyze format. * `NiLearn `__ is a Python library for fast and easy statistical learning on NeuroImaging data. * `NiPy `__ is a Python project for analysis of structural and functional neuroimaging data. * `Nitime `__ is a library for time-series analysis of data from neuroscience experiments. * `PETPVC `__ is toolbox for :abbr:`PVC (partial volume correction)` of :abbr:`PET (positron emission tomography)` imaging. * `QuickShear `__ uses a skull-stripped version of an anatomical images as a reference to deface the unaltered anatomical image. * `SEM Tools `__ are useful tools for Structural Equation Modeling. * `SPM `__ (Statistical Parametric Mapping) is a software package for the analysis of brain imaging data sequences. * `VistaSoft `__ contains Matlab code to perform a variety of analysis on MRI data, including functional MRI and diffusion MRI. * `Connectome Workbench `__ is an open source, freely available visualization and discovery tool used to map neuroimaging data, especially data generated by the Human Connectome Project. * `3D Slicer `__ is an open source software platform for medical image informatics, image processing, and three-dimensional visualization. Index of Interfaces ~~~~~~~~~~~~~~~~~~~ .. toctree:: :maxdepth: 3 api/generated/nipype.algorithms api/generated/nipype.interfaces nipype-1.7.0/doc/links_names.txt000066400000000000000000000122141413403311400166260ustar00rootroot00000000000000.. This (-*- rst -*-) format file contains commonly used link targets and name substitutions. It may be included in many files, therefore it should only contain link targets and name substitutions. Try grepping for "^\.\. _" to find plausible candidates for this list. .. NOTE: reST targets are __not_case_sensitive__, so only one target definition is needed for nipy, NIPY, Nipy, etc... .. _nipy: http://nipy.org .. _`NIPY developer resources`: http://nipy.org/devel .. _`Brain Imaging Center`: http://bic.berkeley.edu/ .. _nitime: http://nipy.org/nitime/ .. _nibabel: http://nipy.org/nibabel/ .. _nipype: http://nipy.org/nipype/ .. _ConnectomeViewer: http://www.connectomeviewer.org/viewer/ .. _NeuroDebian: http://neuro.debian.net/ .. Documentation tools .. _graphviz: http://www.graphviz.org/ .. _Sphinx: http://sphinx.pocoo.org/ .. _`Sphinx reST`: http://sphinx.pocoo.org/rest.html .. _reST: http://docutils.sourceforge.net/rst.html .. _docutils: http://docutils.sourceforge.net .. Licenses .. _GPL: http://www.gnu.org/licenses/gpl.html .. _BSD: http://www.opensource.org/licenses/bsd-license.php .. _LGPL: http://www.gnu.org/copyleft/lesser.html .. Working process .. _pynifti: http://niftilib.sourceforge.net/pynifti/ .. _nifticlibs: http://nifti.nimh.nih.gov .. _nifti: http://nifti.nimh.nih.gov .. _`nipy sourceforge`: http://nipy.sourceforge.net/ .. _sourceforge: http://nipy.sourceforge.net/ .. _`nipy launchpad`: https://launchpad.net/nipy .. _launchpad: https://launchpad.net/ .. _`nipy trunk`: https://code.launchpad.net/~nipy-developers/nipy/trunk .. _`nipy mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging .. _`nipy bugs`: https://bugs.launchpad.net/nipy .. Code support stuff .. _pychecker: http://pychecker.sourceforge.net/ .. _pylint: http://www.logilab.org/project/pylint .. _pyflakes: http://divmod.org/trac/wiki/DivmodPyflakes .. _virtualenv: http://pypi.python.org/pypi/virtualenv .. _git: https://git-scm.com/ .. _flymake: http://flymake.sourceforge.net/ .. _rope: http://rope.sourceforge.net/ .. _pymacs: http://pymacs.progiciels-bpi.ca/pymacs.html .. _ropemacs: http://rope.sourceforge.net/ropemacs.html .. _ECB: http://ecb.sourceforge.net/ .. _emacs_python_mode: http://www.emacswiki.org/cgi-bin/wiki/PythonMode .. _doctest-mode: http://www.cis.upenn.edu/~edloper/projects/doctestmode/ .. _bazaar: http://bazaar-vcs.org/ .. _subversion: http://subversion.tigris.org/ .. _`python coverage tester`: http://nedbatchelder.com/code/modules/coverage.html .. Other python projects .. _numpy: http://www.numpy.org/ .. _scipy: http://www.scipy.org .. _ipython: https://ipython.org .. _`ipython manual`: https://ipython.readthedocs.org/en/stable/ .. _matplotlib: http://matplotlib.org/ .. _ETS: http://code.enthought.com/projects/tool-suite.php .. _`Enthought Tool Suite`: http://code.enthought.com/projects/tool-suite.php .. _python: http://www.python.org .. _mayavi: http://mayavi.sourceforge.net/ .. _sympy: http://www.sympy.org/ .. _networkx: https://networkx.github.io/ .. _pythonxy: https://python-xy.github.io/ .. _EPD: http://www.enthought.com/products/epd.php .. _Traits: http://code.enthought.com/projects/traits/ .. _Miniconda: https://conda.io/miniconda.html .. _NeuroDocker: https://github.com/kaczmarj/neurodocker .. Python imaging projects .. _PyMVPA: http://www.pymvpa.org .. _BrainVISA: http://brainvisa.info .. _anatomist: http://brainvisa.info .. Not so python imaging projects .. _matlab: http://www.mathworks.com .. _spm: http://www.fil.ion.ucl.ac.uk/spm .. _eeglab: http://sccn.ucsd.edu/eeglab .. _ANTS: http://stnava.github.io/ANTs/ .. _AFNI: http://afni.nimh.nih.gov/afni .. _FSL: http://www.fmrib.ox.ac.uk/fsl .. _FreeSurfer: http://surfer.nmr.mgh.harvard.edu .. _voxbo: http://www.voxbo.org .. _Slicer: http://slicer.org .. _Camino: http://web4.cs.ucl.ac.uk/research/medic/camino/pmwiki/pmwiki.php .. _Camino2Trackvis: http://camino-trackvis.sourceforge.net/ .. _MRtrix: http://www.brain.org.au/software/mrtrix/index.html .. _MRtrix3: http://www.mrtrix.org/ .. _MNE: https://martinos.org/mne/index.html .. _ANTS: http://stnava.github.io/ANTs/ .. _DIPY: http://dipy.org .. _BrainSuite: http://brainsuite.org/ .. General software .. _gcc: http://gcc.gnu.org .. _xcode: http://developer.apple.com/TOOLS/xcode .. _mingw: http://www.mingw.org .. _macports: http://www.macports.org/ .. _Vagrant: http://www.vagrantup.com/ .. _Docker: http://www.docker.com/ .. _Singularity: https://www.sylabs.io/singularity/ .. _Virtualbox: https://www.virtualbox.org/ .. Functional imaging labs .. _`functional imaging laboratory`: http://www.fil.ion.ucl.ac.uk .. _FMRIB: http://www.fmrib.ox.ac.uk .. Other organizations .. _enthought: http://www.enthought.com .. _kitware: http://www.kitware.com .. General information links .. _`wikipedia FMRI`: http://en.wikipedia.org/wiki/Functional_magnetic_resonance_imaging .. _`wikipedia PET`: http://en.wikipedia.org/wiki/Positron_emission_tomography .. Mathematical methods .. _`wikipedia ICA`: http://en.wikipedia.org/wiki/Independent_component_analysis .. _`wikipedia PCA`: http://en.wikipedia.org/wiki/Principal_component_analysis .. Nipype Paper .. _paper: http://www.frontiersin.org/Neuroinformatics/10.3389/fninf.2011.00013/abstract nipype-1.7.0/doc/quickstart.rst000066400000000000000000000034761413403311400165200ustar00rootroot00000000000000:orphan: .. _quickstart: ========== Quickstart ========== Downloading and installing ========================== .. toctree:: :maxdepth: 1 users/install Beginner's guide ================ Michael Notter's Nipype tutorial. `Available here`__ __ https://miykael.github.io/nipype_tutorial/ How should I ask questions or report bugs? ========================================== * If you find a bug or a suggestion for improvement report it here: `GitHub Issues `_ * If you have a conceptual or clarification question ask it here: `Neurostars `_ * If you want to hangout with developers, you can use either `Gitter `_ or `Slack `_ Nipype workshop materials ========================= Self-assessment questionnaire with links to learning about each piece. `Available here`__ Lecture slides `Available here`__ __ http://nipy.org/workshops/2017-03-boston/review.html __ http://nipy.org/workshops/2017-03-boston/index.html `Docker containers `_ `Github project for lectures `_ `Github project for dockerfiles + notebooks `_ `All notebooks visualized `_ Learning Resources ================== `Porcupine `_ : create Nipype pipelines using a graphical interface View the `examples gallery here `_ Developer guides ================ .. toctree:: :maxdepth: 1 devel/writing_custom_interfaces .. include:: links_names.txt nipype-1.7.0/doc/requirements.txt000066400000000000000000000002041413403311400170420ustar00rootroot00000000000000dipy ipython matplotlib nbsphinx sphinx-argparse sphinx>=2.1.2 sphinxcontrib-apidoc sphinxcontrib-napoleon niflow-nipype1-workflows nipype-1.7.0/doc/searchresults.rst000066400000000000000000000011721413403311400172040ustar00rootroot00000000000000:orphan: .. This displays the search results from the Google Custom Search engine. Don't link to it directly. Search results ============== .. raw:: html
Loading
nipype-1.7.0/doc/users/000077500000000000000000000000001413403311400147235ustar00rootroot00000000000000nipype-1.7.0/doc/users/install.rst000066400000000000000000000065751413403311400171400ustar00rootroot00000000000000.. _install: ==================== Download and install ==================== This page covers the necessary steps to install Nipype. Using docker ~~~~~~~~~~~~ To get started using Docker, you can follow the `Nipype tutorial `_, or pull the `nipype/nipype` image from Docker hub:: docker pull nipype/nipype You may also build custom docker containers with specific versions of software using NeuroDocker_ (see the `Neurodocker tutorial `_). Using conda ~~~~~~~~~~~ Installing nipype from the conda-forge channel can be achieved by:: conda install --channel conda-forge nipype It is possible to list all of the versions of nipype available on your platform with:: conda search nipype --channel conda-forge For more information, please see https://github.com/conda-forge/nipype-feedstock Using Pypi ~~~~~~~~~~ The installation process is similar to other Python packages. If you already have a Python environment set up, you can do:: pip install nipype If you want to install all the optional features of ``nipype``, use the following command:: pip install nipype[all] While `all` installs everything, one can also install select components as listed below:: 'doc': ['Sphinx>=1.4', 'matplotlib', 'pydotplus', 'pydot>=1.2.3'], 'tests': ['pytest-cov', 'codecov'], 'nipy': ['nitime', 'nilearn', 'dipy', 'nipy', 'matplotlib'], 'profiler': ['psutil'], 'duecredit': ['duecredit'], 'xvfbwrapper': ['xvfbwrapper'], Debian and Ubuntu ~~~~~~~~~~~~~~~~~ Add the NeuroDebian_ repository and install the ``python-nipype`` package using ``apt-get`` or your favorite package manager. Mac OS X ~~~~~~~~ The easiest way to get nipype running on Mac OS X is to install Miniconda_ and follow the instructions above. If you have a non-conda environment you can install nipype by typing:: pip install nipype Note that the above procedure may require availability of gcc on your system path to compile the traits package. From source ~~~~~~~~~~~ The most recent release is found here: ``_. The development version: [`zip `__ `tar.gz `__] For previous versions: `prior downloads `_ If you downloaded the source distribution named something like ``nipype-x.y.tar.gz``, then unpack the tarball, change into the ``nipype-x.y`` directory and install nipype using:: pip install . **Note:** Depending on permissions you may need to use ``sudo``. Testing the install ------------------- The best way to test the install is checking nipype's version and then running the tests:: python -c "import nipype; print(nipype.__version__)" python -c "import nipype; nipype.test()" Interface Dependencies ~~~~~~~~~~~~~~~~~~~~~~ Nipype provides wrappers around many neuroimaging tools and contains some algorithms. These tools will need to be installed for Nipype to run. You can create containers with different versions of these tools installed using NeuroDocker_. Installation for developers --------------------------- Developers should start `here <../devel/testing_nipype.rst>`_. Developers can also use this docker container: `docker pull nipype/nipype:master` .. include:: ../links_names.txt nipype-1.7.0/doc/version.rst000066400000000000000000000000731413403311400160010ustar00rootroot00000000000000:orphan: .. _version: :Release: |version| :Date: |today| nipype-1.7.0/docker/000077500000000000000000000000001413403311400142645ustar00rootroot00000000000000nipype-1.7.0/docker/files/000077500000000000000000000000001413403311400153665ustar00rootroot00000000000000nipype-1.7.0/docker/files/neurodebian.gpg000066400000000000000000000104331413403311400203610ustar00rootroot00000000000000-----BEGIN PGP PUBLIC KEY BLOCK----- Version: GnuPG v1 mQGiBEQ7TOgRBADvaRsIZ3VZ6Qy7PlDpdMm97m0OfvouOj/HhjOM4M3ECbGn4cYh vN1gK586s3sUsUcNQ8LuWvNsYhxYsVTZymCReJMEDxod0U6/z/oIbpWv5svF3kpl ogA66Ju/6cZx62RiCSOkskI6A3Waj6xHyEo8AGOPfzbMoOOQ1TS1u9s2FwCgxziL wADvKYlDZnWM03QtqIJVD8UEAOks9Q2OqFoqKarj6xTRdOYIBVEp2jhozZUZmLmz pKL9E4NKGfixqxdVimFcRUGM5h7R2w7ORqXjCzpiPmgdv3jJLWDnmHLmMYRYQc8p 5nqo8mxuO3zJugxBemWoacBDd1MJaH7nK20Hsk9L/jvU/qLxPJotMStTnwO+EpsK HlihA/9ZpvzR1QWNUd9nSuNR3byJhaXvxqQltsM7tLqAT4qAOJIcMjxr+qESdEbx NHM5M1Y21ZynrsQw+Fb1WHXNbP79vzOxHoZR0+OXe8uUpkri2d9iOocre3NUdpOO JHtl6cGGTFILt8tSuOVxMT/+nlo038JQB2jARe4B85O0tkPIPbQybmV1cm8uZGVi aWFuLm5ldCBhcmNoaXZlIDxtaWNoYWVsLmhhbmtlQGdtYWlsLmNvbT6IRgQQEQgA BgUCTVHJKwAKCRCNEUVjdcAkyOvzAJ0abJz+f2a6VZG1c9T8NHMTYh1atwCgt0EE 3ZZd/2in64jSzu0miqhXbOKISgQQEQIACgUCSotRlwMFAXgACgkQ93+NsjFEvg8n JgCfWcdJbILBtpLZCocvOzlLPqJ0Fn0AoI4EpJRxoUnrtzBGUC1MqecU7WsDiGAE ExECACAFAkqLUWcCGwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAAKCRCl0y8BJkml qVklAJ4h2V6MdQkSAThF5c2Gkq6eSoIQYQCeM0DWyB9Bl+tTPSTYXwwZi2uoif20 QmFwc3kuZ3NlLnVuaS1tYWdkZWJ1cmcuZGUgRGViaWFuIEFyY2hpdmUgPG1pY2hh ZWwuaGFua2VAZ21haWwuY29tPohGBBARAgAGBQJEO03FAAoJEPd/jbIxRL4PU18A n3tn7i4qdlMi8kHbYWFoabsKc9beAJ9sl/leZNCYNMGhz+u6BQgyeLKw94heBBMR AgAeBQJEO0zoAhsDBgsJCAcDAgMVAgMDFgIBAh4BAheAAAoJEKXTLwEmSaWpVdoA n27DvtZizNEbhz3wRUPQMiQjtqdvAJ9rS9YdPe5h5o5gHx3mw3BSkOttdYheBBMR AgAeBQJEO0zoAhsDBgsJCAcDAgMVAgMDFgIBAh4BAheAAAoJEKXTLwEmSaWpVdoA oLhwWL+E+2I9lrUf4Lf26quOK9vLAKC9ZpIF2tUirFFkBWnQvu13/TA0SokCHAQQ AQIABgUCTSNBgQAKCRDAc9Iof/uem4NpEACQ8jxmaCaS/qk/Y4GiwLA5bvKosG3B iARZ2v5UWqCZQ1tS56yKse/lCIzXQqU9BnYW6wOI2rvFf9meLfd8h96peG6oKscs fbclLDIf68bBvGBQaD0VYFi/Fk/rxmTQBOCQ3AJZs8O5rIM4gPGE0QGvSZ1h7VRw 3Uyeg4jKXLIeJn2xEmOJgt3auAR2FyKbzHaX9JCoByJZ/eU23akNl9hgt7ePlpXo 74KNYC58auuMUhCq3BQDB+II4ERYMcmFp1N5ZG05Cl6jcaRRHDXz+Ax6DWprRI1+ RH/Yyae6LmKpeJNwd+vM14aawnNO9h8IAQ+aJ3oYZdRhGyybbin3giJ10hmWveg/ Pey91Nh9vBCHdDkdPU0s9zE7z/PHT0c5ccZRukxfZfkrlWQ5iqu3V064ku5f4PBy 8UPSkETcjYgDnrdnwqIAO+oVg/SFlfsOzftnwUrvwIcZlXAgtP6MEEAs/38e/JIN g4VrpdAy7HMGEUsh6Ah6lvGQr+zBnG44XwKfl7e0uCYkrAzUJRGM5vx9iXvFMcMu jv9EBNNBOU8/Y6MBDzGZhgaoeI27nrUvaveJXjAiDKAQWBLjtQjINZ8I9uaSGOul 8kpbFavE4eS3+KhISrSHe4DuAa3dk9zI+FiPvXY1ZyfQBtNpR+gYFY6VxMbHhY1U lSLHO2eUIQLdYbRITmV1cm9EZWJpYW4gQXJjaGl2ZSBLZXkgPHBrZy1leHBwc3kt bWFpbnRhaW5lcnNAbGlzdHMuYWxpb3RoLmRlYmlhbi5vcmc+iEYEEBEIAAYFAk1R yQYACgkQjRFFY3XAJMgEWwCggx4Gqlcrt76TSMlbU94cESo55AEAoJ3asQEMpe8t QUX+5aikw3z1AUoCiEoEEBECAAoFAkqf/3cDBQF4AAoJEPd/jbIxRL4PxyMAoKUI RPWlHCj/+HSFfwhos68wcSwmAKChuC00qutDro+AOo+uuq6YoHXj+ohgBBMRAgAg BQJKn/8bAhsDBgsJCAcDAgQVAggDBBYCAwECHgECF4AACgkQpdMvASZJpalDggCe KF9KOgOPdQbFnKXl8KtHory4EEwAnA7jxgorE6kk2QHEXFSF8LzOOH4GiGMEExEC ACMCGwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCSp//RgIZAQAKCRCl0y8BJkml qekFAKCRyt4+FoCzmBbRUUP3Cr8PzH++IgCgkno4vdjsWdyAey8e0KpITTXMFrmJ AhwEEAECAAYFAk0jQYEACgkQwHPSKH/7npsFfw/+P8B8hpM3+T1fgboBa4R32deu n8m6b8vZMXwuo/awQtMpzjem8JGXSUQm8iiX4hDtjq6ZoPrlN8T4jNmviBt/F5jI Jji/PYmhq+Zn9s++mfx+aF4IJrcHJWFkg/6kJzn4oSdl/YlvKf4VRCcQNtj4xV87 GsdamnzU17XapLVMbSaVKh+6Af7ZLDerEH+iAq733HsYaTK+1xKmN7EFVXgS7bZ1 9C4LTzc97bVHSywpT9yIrg9QQs/1kshfVIHDKyhjF6IwzSVbeGAIL3Oqo5zOMkWv 7JlEIkkhTyl+FETxNMTMYjAk+Uei3kRodneq3YBF2uFYSEzrXQgHAyn37geiaMYj h8wu6a85nG1NS0SdxiZDIePmbvD9vWxFZUWYJ/h9ifsLivWcVXlvHoQ0emd+n2ai FhAck2xsuyHgnGIZMHww5IkQdu/TMqvbcR6d8Xulh+C4Tq7ppy+oTLADSBKII++p JQioYydRD529EUJgVlhyH27X6YAk3FuRD3zYZRYS2QECiKXvS665o3JRJ0ZSqNgv YOom8M0zz6bI9grnUoivMI4o7ISpE4ZwffEd37HVzmraaUHDXRhkulFSf1ImtXoj V9nNSM5p/+9eP7OioTZhSote6Vj6Ja1SZeRkXZK7BwqPbdO0VsYOb7G//ZiOlqs+ paRr92G/pwBfj5Dq8EK5Ag0ERDtM9RAIAN0EJqBPvLN0tEin/y4Fe0R4n+E+zNXg bBsq4WidwyUFy3h/6u86FYvegXwUqVS2OsEs5MwPcCVJOfaEthF7I89QJnP9Nfx7 V5I9yFB53o9ii38BN7X+9gSjpfwXOvf/wIDfggxX8/wRFel37GRB7TiiABRArBez s5x+zTXvT++WPhElySj0uY8bjVR6tso+d65K0UesvAa7PPWeRS+3nhqABSFLuTTT MMbnVXCGesBrYHlFVXClAYrSIOX8Ub/UnuEYs9+hIV7U4jKzRF9WJhIC1cXHPmOh vleAf/I9h/0KahD7HLYud40pNBo5tW8jSfp2/Q8TIE0xxshd51/xy4MAAwUH+wWn zsYVk981OKUEXul8JPyPxbw05fOd6gF4MJ3YodO+6dfoyIl3bewk+11KXZQALKaO 1xmkAEO1RqizPeetoadBVkQBp5xPudsVElUTOX0pTYhkUd3iBilsCYKK1/KQ9KzD I+O/lRsm6L9lc6rV0IgPU00P4BAwR+x8Rw7TJFbuS0miR3lP1NSguz+/kpjxzmGP LyHJ+LVDYFkk6t0jPXhqFdUY6McUTBDEvavTGlVO062l9APTmmSMVFDsPN/rBes2 rYhuuT+lDp+gcaS1UoaYCIm9kKOteQBnowX9V74Z+HKEYLtwILaSnNe6/fNSTvyj g0z+R+sPCY4nHewbVC+ISQQYEQIACQUCRDtM9QIbDAAKCRCl0y8BJkmlqbecAJ9B UdSKVg9H+fQNyP5sbOjj4RDtdACfXHrRHa2+XjJP0dhpvJ8IfvYnQsU= =fAJZ -----END PGP PUBLIC KEY BLOCK----- nipype-1.7.0/docker/files/run_builddocs.sh000066400000000000000000000004351413403311400205600ustar00rootroot00000000000000#!/bin/bash set -e set -x set -u WORKDIR=${WORK:-/work} mkdir -p ${WORKDIR}/docs make html 2>&1 | tee ${WORKDIR}/builddocs.log cp -r /src/nipype/doc/_build/html/* ${WORKDIR}/docs/ cat ${WORKDIR}/builddocs.log && if grep -q "ERROR" ${WORKDIR}/builddocs.log; then false; else true; fi nipype-1.7.0/docker/files/run_examples.sh000066400000000000000000000030121413403311400204200ustar00rootroot00000000000000#!/bin/bash set -e set -x set -u WORKDIR=${WORKDIR:-/work} arr=$@ tmp_var=$( IFS=$' '; echo "${arr[*]}" ) example_id=${tmp_var//[^A-Za-z0-9_-]/_} mkdir -p ${HOME}/.nipype ${WORKDIR}/logs/example_${example_id} ${WORKDIR}/tests ${WORKDIR}/crashfiles echo "[logging]" > ${HOME}/.nipype/nipype.cfg echo "workflow_level = DEBUG" >> ${HOME}/.nipype/nipype.cfg echo "interface_level = DEBUG" >> ${HOME}/.nipype/nipype.cfg echo "utils_level = DEBUG" >> ${HOME}/.nipype/nipype.cfg echo "log_to_file = true" >> ${HOME}/.nipype/nipype.cfg echo "log_directory = ${WORKDIR}/logs/example_${example_id}" >> ${HOME}/.nipype/nipype.cfg echo '[execution]' >> ${HOME}/.nipype/nipype.cfg echo 'crashfile_format = txt' >> ${HOME}/.nipype/nipype.cfg if [[ "${NIPYPE_RESOURCE_MONITOR:-0}" == "1" ]]; then echo '[monitoring]' >> ${HOME}/.nipype/nipype.cfg echo 'enabled = true' >> ${HOME}/.nipype/nipype.cfg echo 'sample_frequency = 3' >> ${HOME}/.nipype/nipype.cfg fi # Set up coverage export COVERAGE_FILE=${WORKDIR}/tests/.coverage.${example_id} if [ "$2" == "MultiProc" ]; then echo "concurrency = multiprocessing" >> /src/nipype/.coveragerc fi coverage run /src/nipype/tools/run_examples.py $@ exit_code=$? if [[ "${NIPYPE_RESOURCE_MONITOR:-0}" == "1" ]]; then cp resource_monitor.json 2>/dev/null ${WORKDIR}/logs/example_${example_id}/ || : fi # Collect crashfiles and generate xml report coverage xml -o ${WORKDIR}/tests/smoketest_${example_id}.xml find /work -maxdepth 1 -name "crash-*" -exec mv {} ${WORKDIR}/crashfiles/ \; exit $exit_code nipype-1.7.0/docker/files/run_pytests.sh000066400000000000000000000024721413403311400203260ustar00rootroot00000000000000#!/bin/bash set -e set -x set -u TESTPATH=${1:-/src/nipype/nipype} WORKDIR=${WORK:-/work} PYTHON_VERSION=$( python -c "import sys; print('{}{}'.format(sys.version_info[0], sys.version_info[1]))" ) # Create necessary directories mkdir -p ${WORKDIR}/tests ${WORKDIR}/crashfiles ${WORKDIR}/logs/py${PYTHON_VERSION} # Create a nipype config file mkdir -p ${HOME}/.nipype echo '[logging]' > ${HOME}/.nipype/nipype.cfg echo 'log_to_file = true' >> ${HOME}/.nipype/nipype.cfg echo "log_directory = ${WORKDIR}/logs/py${PYTHON_VERSION}" >> ${HOME}/.nipype/nipype.cfg echo '[execution]' >> ${HOME}/.nipype/nipype.cfg echo 'crashfile_format = txt' >> ${HOME}/.nipype/nipype.cfg if [[ "${NIPYPE_RESOURCE_MONITOR:-0}" == "1" ]]; then echo 'resource_monitor = true' >> ${HOME}/.nipype/nipype.cfg fi # Run tests using pytest export COVERAGE_FILE=${WORKDIR}/tests/.coverage.py${PYTHON_VERSION} py.test -v --junitxml=${WORKDIR}/tests/pytests_py${PYTHON_VERSION}.xml \ --cov nipype --cov-config /src/nipype/.coveragerc \ --cov-report xml:${WORKDIR}/tests/coverage_py${PYTHON_VERSION}.xml \ -n auto \ -c ${TESTPATH}/pytest.ini ${TESTPATH} exit_code=$? # Collect crashfiles find ${WORKDIR} -maxdepth 1 -name "crash-*" -exec mv {} ${WORKDIR}/crashfiles/ \; echo "Unit tests finished with exit code ${exit_code}" exit ${exit_code} nipype-1.7.0/docker/generate_dockerfiles.sh000077500000000000000000000107531413403311400207750ustar00rootroot00000000000000#!/usr/bin/env bash # # Generate base and main Dockerfiles for Nipype. set -e USAGE="usage: $(basename $0) [-h] [-b] [-m]" function Help { cat <&2 exit 1 ;; esac done # neurodocker version 0.5.0-3-g1788917 NEURODOCKER_IMAGE="kaczmarj/neurodocker:master@sha256:ac2085702daac716481daae5da055e2062be52075f8f3881672e958e0cd53e6b" # neurodebian:stretch-non-free pulled on September 19, 2018 BASE_IMAGE="neurodebian:stretch-non-free@sha256:7cd978427d7ad215834fee221d0536ed7825b3cddebc481eba2d792dfc2f7332" NIPYPE_BASE_IMAGE="nipype/nipype:base" PKG_MANAGER="apt" DIR="$(dirname "$0")" function generate_base_dockerfile() { docker run --rm "$NEURODOCKER_IMAGE" generate docker \ --base "$BASE_IMAGE" --pkg-manager "$PKG_MANAGER" \ --label maintainer="The nipype developers https://github.com/nipy/nipype" \ --spm12 version=r7219 \ --env 'LD_LIBRARY_PATH=/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH' \ --freesurfer version=6.0.0-min \ --dcm2niix version=v1.0.20190902 method=source \ --run 'echo "cHJpbnRmICJrcnp5c3p0b2YuZ29yZ29sZXdza2lAZ21haWwuY29tCjUxNzIKICpDdnVtdkVWM3pUZmcKRlM1Si8yYzFhZ2c0RQoiID4gL29wdC9mcmVlc3VyZmVyLTYuMC4wLW1pbi9saWNlbnNlLnR4dA==" | base64 -d | sh' \ --install afni ants apt-utils bzip2 convert3d file fsl-core \ fsl-mni152-templates fusefat g++ git graphviz make python ruby \ unzip xvfb git-annex-standalone liblzma-dev \ --add-to-entrypoint "source /etc/fsl/fsl.sh && source /etc/afni/afni.sh" \ --env ANTSPATH='/usr/lib/ants' \ PATH='/usr/lib/ants:$PATH' \ --run "gem install fakes3" \ > "$DIR/Dockerfile.base" } function generate_main_dockerfile() { docker run --rm "$NEURODOCKER_IMAGE" generate docker \ --base "$NIPYPE_BASE_IMAGE" --pkg-manager "$PKG_MANAGER" \ --label maintainer="The nipype developers https://github.com/nipy/nipype" \ --env MKL_NUM_THREADS=1 \ OMP_NUM_THREADS=1 \ --arg PYTHON_VERSION_MAJOR=3 PYTHON_VERSION_MINOR=8 BUILD_DATE VCS_REF VERSION \ --user neuro \ --run 'git config --global user.name nipybot && git config --global user.email "nipybot@gmail.com"' \ --workdir /home/neuro \ --miniconda create_env=neuro \ conda_install='python=${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR} libxml2 libxslt matplotlib mkl "numpy!=1.16.0" paramiko pandas psutil scikit-learn scipy traits rdflib' \ pip_install="pytest-xdist" \ activate=true \ --copy docker/files/run_builddocs.sh docker/files/run_examples.sh \ docker/files/run_pytests.sh nipype/external/fsl_imglob.py /usr/bin/ \ --copy . /src/nipype \ --user root \ --run 'chown -R neuro /src && chmod +x /usr/bin/fsl_imglob.py /usr/bin/run_*.sh && . /etc/fsl/fsl.sh && ln -sf /usr/bin/fsl_imglob.py ${FSLDIR}/bin/imglob && mkdir /work && chown neuro /work' \ --user neuro \ --miniconda use_env=neuro \ pip_opts="-e" \ pip_install="/src/nipype[all] https://github.com/bids-standard/pybids/tarball/0.7.0" \ --miniconda use_env=neuro \ pip_install='"niflow-nipype1-workflows>=0.4.0"' \ --workdir /work \ --label org.label-schema.build-date='$BUILD_DATE' \ org.label-schema.name="NIPYPE" \ org.label-schema.description="NIPYPE - Neuroimaging in Python: Pipelines and Interfaces" \ org.label-schema.url="http://nipype.readthedocs.io" \ org.label-schema.vcs-ref='$VCS_REF' \ org.label-schema.vcs-url="https://github.com/nipy/nipype" \ org.label-schema.version='$VERSION' \ org.label-schema.schema-version="1.0" } if [ "$GENERATE_BASE" == 1 ]; then generate_base_dockerfile > "$DIR/Dockerfile.base" fi if [ "$GENERATE_MAIN" == 1 ]; then generate_main_dockerfile > "$DIR/../Dockerfile" fi nipype-1.7.0/docker/prune_dockerfile.sh000066400000000000000000000003321413403311400201360ustar00rootroot00000000000000#!/usr/bin/env bash if [ -z "$1" ]; then echo "Usage: $(basename $0) " exit 1 fi # Remove empty lines, comments, and timestamp. sed -e '/\s*#.*$/d' -e '/^\s*$/d' -e '/generation_timestamp/d' "$1" nipype-1.7.0/examples/000077500000000000000000000000001413403311400146335ustar00rootroot00000000000000nipype-1.7.0/examples/README.md000066400000000000000000000006271413403311400161170ustar00rootroot00000000000000The examples directory previously held a set of [literate programming](https://en.wikipedia.org/wiki/Literate_programming) documents that demonstrated solutions to various problems using Nipype. These examples have been moved to the [Nipype1 Examples Niflow](https://github.com/niflows/nipype1-examples). Please refer to that repository for more information, and report any issues with the examples there. nipype-1.7.0/nipype/000077500000000000000000000000001413403311400143215ustar00rootroot00000000000000nipype-1.7.0/nipype/COMMIT_INFO.txt000066400000000000000000000004071413403311400166660ustar00rootroot00000000000000# This is an ini file that may contain information about the code state [commit hash] # The line below may contain a valid hash if it has been substituted during 'git archive' archive_subst_hash=%h # This line may be modified by the install process install_hash= nipype-1.7.0/nipype/__init__.py000066400000000000000000000047171413403311400164430ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Information on specific functions, classes, and methods. :Release: |version| :Date: |today| Top-level module API -------------------- """ import os from distutils.version import LooseVersion from .info import URL as __url__, STATUS as __status__, __version__ from .utils.config import NipypeConfig from .utils.logger import Logging from .refs import due from .pkg_info import get_pkg_info as _get_pkg_info try: import faulthandler faulthandler.enable() except (ImportError, IOError) as e: pass config = NipypeConfig() logging = Logging(config) class NipypeTester(object): def __call__(self, doctests=True, parallel=False): try: import pytest except ImportError: raise RuntimeError("py.test not installed, run: pip install pytest") args = [] if not doctests: args.extend(["-p", "no:doctest"]) if parallel: try: import xdist except ImportError: raise RuntimeError("pytest-xdist required for parallel run") args.append("-n auto") args.append(os.path.dirname(__file__)) pytest.main(args=args) test = NipypeTester() def get_info(): """Returns package information""" return _get_pkg_info(os.path.dirname(__file__)) from .pipeline import Node, MapNode, JoinNode, Workflow from .interfaces import ( DataGrabber, DataSink, SelectFiles, IdentityInterface, Rename, Function, Select, Merge, ) def check_latest_version(raise_exception=False): """ Check for the latest version of the library. Parameters ---------- raise_exception: bool Raise a RuntimeError if a bad version is being used """ import etelemetry logger = logging.getLogger("nipype.utils") return etelemetry.check_available_version( "nipy/nipype", __version__, logger, raise_exception ) # Run telemetry on import for interactive sessions, such as IPython, Jupyter notebooks, Python REPL if config.getboolean("execution", "check_version"): import __main__ if not hasattr(__main__, "__file__") and "NIPYPE_NO_ET" not in os.environ: from .interfaces.base import BaseInterface if BaseInterface._etelemetry_version_data is None: BaseInterface._etelemetry_version_data = check_latest_version() nipype-1.7.0/nipype/algorithms/000077500000000000000000000000001413403311400164725ustar00rootroot00000000000000nipype-1.7.0/nipype/algorithms/__init__.py000066400000000000000000000004041413403311400206010ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Package contains pure python neuroimaging algorithms Exaples: artifactdetect """ __docformat__ = "restructuredtext" nipype-1.7.0/nipype/algorithms/confounds.py000066400000000000000000001477161413403311400210620ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Algorithms to compute confounds in :abbr:`fMRI (functional MRI)` """ import os import os.path as op from collections import OrderedDict from itertools import chain import nibabel as nb import numpy as np from numpy.polynomial import Legendre from .. import config, logging from ..external.due import BibTeX from ..interfaces.base import ( traits, TraitedSpec, BaseInterface, BaseInterfaceInputSpec, File, isdefined, InputMultiPath, OutputMultiPath, SimpleInterface, ) from ..utils.misc import normalize_mc_params IFLOGGER = logging.getLogger("nipype.interface") def fallback_svd(a, full_matrices=True, compute_uv=True): try: return np.linalg.svd(a, full_matrices=full_matrices, compute_uv=compute_uv) except np.linalg.LinAlgError: pass from scipy.linalg import svd return svd( a, full_matrices=full_matrices, compute_uv=compute_uv, lapack_driver="gesvd" ) class ComputeDVARSInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="functional data, after HMC") in_mask = File(exists=True, mandatory=True, desc="a brain mask") remove_zerovariance = traits.Bool( True, usedefault=True, desc="remove voxels with zero variance" ) save_std = traits.Bool(True, usedefault=True, desc="save standardized DVARS") save_nstd = traits.Bool(False, usedefault=True, desc="save non-standardized DVARS") save_vxstd = traits.Bool( False, usedefault=True, desc="save voxel-wise standardized DVARS" ) save_all = traits.Bool(False, usedefault=True, desc="output all DVARS") series_tr = traits.Float(desc="repetition time in sec.") save_plot = traits.Bool(False, usedefault=True, desc="write DVARS plot") figdpi = traits.Int(100, usedefault=True, desc="output dpi for the plot") figsize = traits.Tuple( traits.Float(11.7), traits.Float(2.3), usedefault=True, desc="output figure size", ) figformat = traits.Enum( "png", "pdf", "svg", usedefault=True, desc="output format for figures" ) intensity_normalization = traits.Float( 1000.0, usedefault=True, desc="Divide value in each voxel at each timepoint " "by the median calculated across all voxels" "and timepoints within the mask (if specified)" "and then multiply by the value specified by" "this parameter. By using the default (1000)" "output DVARS will be expressed in " "x10 % BOLD units compatible with Power et al." "2012. Set this to 0 to disable intensity" "normalization altogether.", ) class ComputeDVARSOutputSpec(TraitedSpec): out_std = File(exists=True, desc="output text file") out_nstd = File(exists=True, desc="output text file") out_vxstd = File(exists=True, desc="output text file") out_all = File(exists=True, desc="output text file") avg_std = traits.Float() avg_nstd = traits.Float() avg_vxstd = traits.Float() fig_std = File(exists=True, desc="output DVARS plot") fig_nstd = File(exists=True, desc="output DVARS plot") fig_vxstd = File(exists=True, desc="output DVARS plot") class ComputeDVARS(BaseInterface): """ Computes the DVARS. """ input_spec = ComputeDVARSInputSpec output_spec = ComputeDVARSOutputSpec _references = [ { "entry": BibTeX( """\ @techreport{nichols_notes_2013, address = {Coventry, UK}, title = {Notes on {Creating} a {Standardized} {Version} of {DVARS}}, url = {http://www2.warwick.ac.uk/fac/sci/statistics/staff/academic-\ research/nichols/scripts/fsl/standardizeddvars.pdf}, urldate = {2016-08-16}, institution = {University of Warwick}, author = {Nichols, Thomas}, year = {2013} }""" ), "tags": ["method"], }, { "entry": BibTeX( """\ @article{power_spurious_2012, title = {Spurious but systematic correlations in functional connectivity {MRI} networks \ arise from subject motion}, volume = {59}, doi = {10.1016/j.neuroimage.2011.10.018}, number = {3}, urldate = {2016-08-16}, journal = {NeuroImage}, author = {Power, Jonathan D. and Barnes, Kelly A. and Snyder, Abraham Z. and Schlaggar, \ Bradley L. and Petersen, Steven E.}, year = {2012}, pages = {2142--2154}, } """ ), "tags": ["method"], }, ] def __init__(self, **inputs): self._results = {} super(ComputeDVARS, self).__init__(**inputs) def _gen_fname(self, suffix, ext=None): fname, in_ext = op.splitext(op.basename(self.inputs.in_file)) if in_ext == ".gz": fname, in_ext2 = op.splitext(fname) in_ext = in_ext2 + in_ext if ext is None: ext = in_ext if ext.startswith("."): ext = ext[1:] return op.abspath("{}_{}.{}".format(fname, suffix, ext)) def _run_interface(self, runtime): dvars = compute_dvars( self.inputs.in_file, self.inputs.in_mask, remove_zerovariance=self.inputs.remove_zerovariance, intensity_normalization=self.inputs.intensity_normalization, ) ( self._results["avg_std"], self._results["avg_nstd"], self._results["avg_vxstd"], ) = np.mean(dvars, axis=1).astype(float) tr = None if isdefined(self.inputs.series_tr): tr = self.inputs.series_tr if self.inputs.save_std: out_file = self._gen_fname("dvars_std", ext="tsv") np.savetxt(out_file, dvars[0], fmt=b"%0.6f") self._results["out_std"] = out_file if self.inputs.save_plot: self._results["fig_std"] = self._gen_fname( "dvars_std", ext=self.inputs.figformat ) fig = plot_confound( dvars[0], self.inputs.figsize, "Standardized DVARS", series_tr=tr ) fig.savefig( self._results["fig_std"], dpi=float(self.inputs.figdpi), format=self.inputs.figformat, bbox_inches="tight", ) fig.clf() if self.inputs.save_nstd: out_file = self._gen_fname("dvars_nstd", ext="tsv") np.savetxt(out_file, dvars[1], fmt=b"%0.6f") self._results["out_nstd"] = out_file if self.inputs.save_plot: self._results["fig_nstd"] = self._gen_fname( "dvars_nstd", ext=self.inputs.figformat ) fig = plot_confound( dvars[1], self.inputs.figsize, "DVARS", series_tr=tr ) fig.savefig( self._results["fig_nstd"], dpi=float(self.inputs.figdpi), format=self.inputs.figformat, bbox_inches="tight", ) fig.clf() if self.inputs.save_vxstd: out_file = self._gen_fname("dvars_vxstd", ext="tsv") np.savetxt(out_file, dvars[2], fmt=b"%0.6f") self._results["out_vxstd"] = out_file if self.inputs.save_plot: self._results["fig_vxstd"] = self._gen_fname( "dvars_vxstd", ext=self.inputs.figformat ) fig = plot_confound( dvars[2], self.inputs.figsize, "Voxelwise std DVARS", series_tr=tr ) fig.savefig( self._results["fig_vxstd"], dpi=float(self.inputs.figdpi), format=self.inputs.figformat, bbox_inches="tight", ) fig.clf() if self.inputs.save_all: out_file = self._gen_fname("dvars", ext="tsv") np.savetxt( out_file, np.vstack(dvars).T, fmt=b"%0.8f", delimiter=b"\t", header="std DVARS\tnon-std DVARS\tvx-wise std DVARS", comments="", ) self._results["out_all"] = out_file return runtime def _list_outputs(self): return self._results class FramewiseDisplacementInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="motion parameters") parameter_source = traits.Enum( "FSL", "AFNI", "SPM", "FSFAST", "NIPY", desc="Source of movement parameters", mandatory=True, ) radius = traits.Float( 50, usedefault=True, desc="radius in mm to calculate angular FDs, 50mm is the " "default since it is used in Power et al. 2012", ) out_file = File("fd_power_2012.txt", usedefault=True, desc="output file name") out_figure = File("fd_power_2012.pdf", usedefault=True, desc="output figure name") series_tr = traits.Float(desc="repetition time in sec.") save_plot = traits.Bool(False, usedefault=True, desc="write FD plot") normalize = traits.Bool(False, usedefault=True, desc="calculate FD in mm/s") figdpi = traits.Int(100, usedefault=True, desc="output dpi for the FD plot") figsize = traits.Tuple( traits.Float(11.7), traits.Float(2.3), usedefault=True, desc="output figure size", ) class FramewiseDisplacementOutputSpec(TraitedSpec): out_file = File(desc="calculated FD per timestep") out_figure = File(desc="output image file") fd_average = traits.Float(desc="average FD") class FramewiseDisplacement(BaseInterface): """ Calculate the :abbr:`FD (framewise displacement)` as in [Power2012]_. This implementation reproduces the calculation in fsl_motion_outliers .. [Power2012] Power et al., Spurious but systematic correlations in functional connectivity MRI networks arise from subject motion, NeuroImage 59(3), 2012. doi:`10.1016/j.neuroimage.2011.10.018 `_. """ input_spec = FramewiseDisplacementInputSpec output_spec = FramewiseDisplacementOutputSpec _references = [ { "entry": BibTeX( """\ @article{power_spurious_2012, title = {Spurious but systematic correlations in functional connectivity {MRI} networks \ arise from subject motion}, volume = {59}, doi = {10.1016/j.neuroimage.2011.10.018}, number = {3}, urldate = {2016-08-16}, journal = {NeuroImage}, author = {Power, Jonathan D. and Barnes, Kelly A. and Snyder, Abraham Z. and Schlaggar, \ Bradley L. and Petersen, Steven E.}, year = {2012}, pages = {2142--2154}, } """ ), "tags": ["method"], } ] def _run_interface(self, runtime): mpars = np.loadtxt(self.inputs.in_file) # mpars is N_t x 6 mpars = np.apply_along_axis( func1d=normalize_mc_params, axis=1, arr=mpars, source=self.inputs.parameter_source, ) diff = mpars[:-1, :6] - mpars[1:, :6] diff[:, 3:6] *= self.inputs.radius fd_res = np.abs(diff).sum(axis=1) self._results = { "out_file": op.abspath(self.inputs.out_file), "fd_average": float(fd_res.mean()), } np.savetxt( self.inputs.out_file, fd_res, header="FramewiseDisplacement", comments="" ) if self.inputs.save_plot: tr = None if isdefined(self.inputs.series_tr): tr = self.inputs.series_tr if self.inputs.normalize and tr is None: IFLOGGER.warning("FD plot cannot be normalized if TR is not set") self._results["out_figure"] = op.abspath(self.inputs.out_figure) fig = plot_confound( fd_res, self.inputs.figsize, "FD", units="mm", series_tr=tr, normalize=self.inputs.normalize, ) fig.savefig( self._results["out_figure"], dpi=float(self.inputs.figdpi), format=self.inputs.out_figure[-3:], bbox_inches="tight", ) fig.clf() return runtime def _list_outputs(self): return self._results class CompCorInputSpec(BaseInterfaceInputSpec): realigned_file = File( exists=True, mandatory=True, desc="already realigned brain image (4D)" ) mask_files = InputMultiPath( File(exists=True), desc=( "One or more mask files that determines " "ROI (3D). When more that one file is " "provided ``merge_method`` or " "``merge_index`` must be provided" ), ) merge_method = traits.Enum( "union", "intersect", "none", xor=["mask_index"], requires=["mask_files"], desc=( "Merge method if multiple masks are " "present - ``union`` uses voxels included in" " at least one input mask, ``intersect`` " "uses only voxels present in all input " "masks, ``none`` performs CompCor on " "each mask individually" ), ) mask_index = traits.Range( low=0, xor=["merge_method"], requires=["mask_files"], desc="Position of mask in ``mask_files`` to use - first is the default.", ) mask_names = traits.List( traits.Str, desc="Names for provided masks (for printing into metadata). " "If provided, it must be as long as the final mask list " "(after any merge and indexing operations).", ) components_file = traits.Str( "components_file.txt", usedefault=True, desc="Filename to store physiological components", ) num_components = traits.Either( "all", traits.Range(low=1), xor=["variance_threshold"], desc="Number of components to return from the decomposition. If " "``num_components`` is ``all``, then all components will be " "retained.", ) # 6 for BOLD, 4 for ASL # automatically instantiated to 6 in CompCor below if neither # ``num_components`` nor ``variance_threshold`` is defined (for # backward compatibility) variance_threshold = traits.Range( low=0.0, high=1.0, exclude_low=True, exclude_high=True, xor=["num_components"], desc="Select the number of components to be returned automatically " "based on their ability to explain variance in the dataset. " "``variance_threshold`` is a fractional value between 0 and 1; " "the number of components retained will be equal to the minimum " "number of components necessary to explain the provided " "fraction of variance in the masked time series.", ) pre_filter = traits.Enum( "polynomial", "cosine", False, usedefault=True, desc="Detrend time series prior to component " "extraction", ) use_regress_poly = traits.Bool( deprecated="0.15.0", new_name="pre_filter", desc=("use polynomial regression " "pre-component extraction"), ) regress_poly_degree = traits.Range( low=1, value=1, usedefault=True, desc="the degree polynomial to use" ) header_prefix = traits.Str( desc=( "the desired header for the output tsv " "file (one column). If undefined, will " 'default to "CompCor"' ) ) high_pass_cutoff = traits.Float( 128, usedefault=True, desc='Cutoff (in seconds) for "cosine" pre-filter' ) repetition_time = traits.Float( desc="Repetition time (TR) of series - derived from image header if " "unspecified" ) save_pre_filter = traits.Either( traits.Bool, File, default=False, usedefault=True, desc="Save pre-filter basis as text file", ) save_metadata = traits.Either( traits.Bool, File, default=False, usedefault=True, desc="Save component metadata as text file", ) ignore_initial_volumes = traits.Range( low=0, usedefault=True, desc="Number of volumes at start of series to ignore" ) failure_mode = traits.Enum( "error", "NaN", usedefault=True, desc="When no components are found or convergence fails, raise an error " "or silently return columns of NaNs.", ) class CompCorOutputSpec(TraitedSpec): components_file = File( exists=True, desc="text file containing the noise components" ) pre_filter_file = File(desc="text file containing high-pass filter basis") metadata_file = File(desc="text file containing component metadata") class CompCor(SimpleInterface): """ Interface with core CompCor computation, used in aCompCor and tCompCor. CompCor provides three pre-filter options, all of which include per-voxel mean removal: - ``'polynomial'``: Legendre polynomial basis - ``'cosine'``: Discrete cosine basis - ``False``: mean-removal only In the case of ``polynomial`` and ``cosine`` filters, a pre-filter file may be saved with a row for each volume/timepoint, and a column for each non-constant regressor. If no non-constant (mean-removal) columns are used, this file may be empty. If ``ignore_initial_volumes`` is set, then the specified number of initial volumes are excluded both from pre-filtering and CompCor component extraction. Each column in the components and pre-filter files are prefixe with zeros for each excluded volume so that the number of rows continues to match the number of volumes in the input file. In addition, for each excluded volume, a column is added to the pre-filter file with a 1 in the corresponding row. Example ------- >>> ccinterface = CompCor() >>> ccinterface.inputs.realigned_file = 'functional.nii' >>> ccinterface.inputs.mask_files = 'mask.nii' >>> ccinterface.inputs.num_components = 1 >>> ccinterface.inputs.pre_filter = 'polynomial' >>> ccinterface.inputs.regress_poly_degree = 2 """ input_spec = CompCorInputSpec output_spec = CompCorOutputSpec _references = [ { "tags": ["method", "implementation"], "entry": BibTeX( """\ @article{compcor_2007, title = {A component based noise correction method (CompCor) for BOLD and perfusion based}, volume = {37}, number = {1}, doi = {10.1016/j.neuroimage.2007.04.042}, urldate = {2016-08-13}, journal = {NeuroImage}, author = {Behzadi, Yashar and Restom, Khaled and Liau, Joy and Liu, Thomas T.}, year = {2007}, pages = {90-101} }""" ), } ] def __init__(self, *args, **kwargs): """exactly the same as compcor except the header""" super(CompCor, self).__init__(*args, **kwargs) self._header = "CompCor" def _run_interface(self, runtime): mask_images = [] if isdefined(self.inputs.mask_files): mask_images = combine_mask_files( self.inputs.mask_files, self.inputs.merge_method, self.inputs.mask_index ) if self.inputs.use_regress_poly: self.inputs.pre_filter = "polynomial" # Degree 0 == remove mean; see compute_noise_components degree = ( self.inputs.regress_poly_degree if self.inputs.pre_filter == "polynomial" else 0 ) imgseries = nb.load(self.inputs.realigned_file) if len(imgseries.shape) != 4: raise ValueError( "{} expected a 4-D nifti file. Input {} has " "{} dimensions (shape {})".format( self._header, self.inputs.realigned_file, len(imgseries.shape), imgseries.shape, ) ) if len(mask_images) == 0: img = nb.Nifti1Image( np.ones(imgseries.shape[:3], dtype=bool), affine=imgseries.affine, header=imgseries.header, ) mask_images = [img] skip_vols = self.inputs.ignore_initial_volumes if skip_vols: imgseries = imgseries.__class__( imgseries.dataobj[..., skip_vols:], imgseries.affine, imgseries.header ) mask_images = self._process_masks(mask_images, imgseries.dataobj) TR = 0 if self.inputs.pre_filter == "cosine": if isdefined(self.inputs.repetition_time): TR = self.inputs.repetition_time else: # Derive TR from NIfTI header, if possible try: TR = imgseries.header.get_zooms()[3] if imgseries.header.get_xyzt_units()[1] == "msec": TR /= 1000 except (AttributeError, IndexError): TR = 0 if TR == 0: raise ValueError( "{} cannot detect repetition time from image - " "Set the repetition_time input".format(self._header) ) if isdefined(self.inputs.variance_threshold): components_criterion = self.inputs.variance_threshold elif isdefined(self.inputs.num_components): components_criterion = self.inputs.num_components else: components_criterion = 6 IFLOGGER.warning( "`num_components` and `variance_threshold` are " "not defined. Setting number of components to 6 " "for backward compatibility. Please set either " "`num_components` or `variance_threshold`, as " "this feature may be deprecated in the future." ) components, filter_basis, metadata = compute_noise_components( imgseries.get_fdata(dtype=np.float32), mask_images, components_criterion, self.inputs.pre_filter, degree, self.inputs.high_pass_cutoff, TR, self.inputs.failure_mode, self.inputs.mask_names, ) if skip_vols: old_comp = components nrows = skip_vols + components.shape[0] components = np.zeros((nrows, components.shape[1]), dtype=components.dtype) components[skip_vols:] = old_comp components_file = os.path.join(os.getcwd(), self.inputs.components_file) components_header = self._make_headers(components.shape[1]) np.savetxt( components_file, components, fmt=b"%.10f", delimiter="\t", header="\t".join(components_header), comments="", ) self._results["components_file"] = os.path.join( runtime.cwd, self.inputs.components_file ) save_pre_filter = False if self.inputs.pre_filter in ["polynomial", "cosine"]: save_pre_filter = self.inputs.save_pre_filter if save_pre_filter: self._results["pre_filter_file"] = save_pre_filter if save_pre_filter is True: self._results["pre_filter_file"] = os.path.join( runtime.cwd, "pre_filter.tsv" ) ftype = {"polynomial": "Legendre", "cosine": "Cosine"}[ self.inputs.pre_filter ] ncols = filter_basis.shape[1] if filter_basis.size > 0 else 0 header = ["{}{:02d}".format(ftype, i) for i in range(ncols)] if skip_vols: old_basis = filter_basis # nrows defined above filter_basis = np.zeros( (nrows, ncols + skip_vols), dtype=filter_basis.dtype ) if old_basis.size > 0: filter_basis[skip_vols:, :ncols] = old_basis filter_basis[:skip_vols, -skip_vols:] = np.eye(skip_vols) header.extend( ["NonSteadyStateOutlier{:02d}".format(i) for i in range(skip_vols)] ) np.savetxt( self._results["pre_filter_file"], filter_basis, fmt=b"%.10f", delimiter="\t", header="\t".join(header), comments="", ) metadata_file = self.inputs.save_metadata if metadata_file: self._results["metadata_file"] = metadata_file if metadata_file is True: self._results["metadata_file"] = os.path.join( runtime.cwd, "component_metadata.tsv" ) components_names = np.empty(len(metadata["mask"]), dtype="object_") retained = np.where(metadata["retained"]) not_retained = np.where(np.logical_not(metadata["retained"])) components_names[retained] = components_header components_names[not_retained] = [ "dropped{}".format(i) for i in range(len(not_retained[0])) ] with open(self._results["metadata_file"], "w") as f: f.write("\t".join(["component"] + list(metadata.keys())) + "\n") for i in zip(components_names, *metadata.values()): f.write( "{0[0]}\t{0[1]}\t{0[2]:.10f}\t" "{0[3]:.10f}\t{0[4]:.10f}\t{0[5]}\n".format(i) ) return runtime def _process_masks(self, mask_images, timeseries=None): return mask_images def _make_headers(self, num_col): header = ( self.inputs.header_prefix if isdefined(self.inputs.header_prefix) else self._header ) headers = ["{}{:02d}".format(header, i) for i in range(num_col)] return headers class ACompCor(CompCor): """ Anatomical compcor: for inputs and outputs, see CompCor. When the mask provided is an anatomical mask, then CompCor is equivalent to ACompCor. """ def __init__(self, *args, **kwargs): """exactly the same as compcor except the header""" super(ACompCor, self).__init__(*args, **kwargs) self._header = "aCompCor" class TCompCorInputSpec(CompCorInputSpec): # and all the fields in CompCorInputSpec percentile_threshold = traits.Range( low=0.0, high=1.0, value=0.02, exclude_low=True, exclude_high=True, usedefault=True, desc="the percentile " "used to select highest-variance " "voxels, represented by a number " "between 0 and 1, exclusive. By " "default, this value is set to .02. " "That is, the 2% of voxels " "with the highest variance are used.", ) class TCompCorOutputSpec(CompCorOutputSpec): # and all the fields in CompCorOutputSpec high_variance_masks = OutputMultiPath( File(exists=True), desc=(("voxels exceeding the variance" " threshold")) ) class TCompCor(CompCor): """ Interface for tCompCor. Computes a ROI mask based on variance of voxels. Example ------- >>> ccinterface = TCompCor() >>> ccinterface.inputs.realigned_file = 'functional.nii' >>> ccinterface.inputs.mask_files = 'mask.nii' >>> ccinterface.inputs.num_components = 1 >>> ccinterface.inputs.pre_filter = 'polynomial' >>> ccinterface.inputs.regress_poly_degree = 2 >>> ccinterface.inputs.percentile_threshold = .03 """ input_spec = TCompCorInputSpec output_spec = TCompCorOutputSpec def __init__(self, *args, **kwargs): """exactly the same as compcor except the header""" super(TCompCor, self).__init__(*args, **kwargs) self._header = "tCompCor" self._mask_files = [] def _process_masks(self, mask_images, timeseries=None): out_images = [] self._mask_files = [] timeseries = np.asanyarray(timeseries) for i, img in enumerate(mask_images): mask = np.asanyarray(img.dataobj).astype(bool) imgseries = timeseries[mask, :] imgseries = regress_poly(2, imgseries)[0] tSTD = _compute_tSTD(imgseries, 0, axis=-1) threshold_std = np.percentile( tSTD, np.round(100.0 * (1.0 - self.inputs.percentile_threshold)).astype(int), ) mask_data = np.zeros_like(mask) mask_data[mask != 0] = tSTD >= threshold_std out_image = nb.Nifti1Image(mask_data, affine=img.affine, header=img.header) # save mask mask_file = os.path.abspath("mask_{:03d}.nii.gz".format(i)) out_image.to_filename(mask_file) IFLOGGER.debug( "tCompcor computed and saved mask of shape %s to " "mask_file %s", str(mask.shape), mask_file, ) self._mask_files.append(mask_file) out_images.append(out_image) return out_images def _list_outputs(self): outputs = super(TCompCor, self)._list_outputs() outputs["high_variance_masks"] = self._mask_files return outputs class TSNRInputSpec(BaseInterfaceInputSpec): in_file = InputMultiPath( File(exists=True), mandatory=True, desc="realigned 4D file or a list of 3D files", ) regress_poly = traits.Range(low=1, desc="Remove polynomials") tsnr_file = File( "tsnr.nii.gz", usedefault=True, hash_files=False, desc="output tSNR file" ) mean_file = File( "mean.nii.gz", usedefault=True, hash_files=False, desc="output mean file" ) stddev_file = File( "stdev.nii.gz", usedefault=True, hash_files=False, desc="output tSNR file" ) detrended_file = File( "detrend.nii.gz", usedefault=True, hash_files=False, desc="input file after detrending", ) class TSNROutputSpec(TraitedSpec): tsnr_file = File(exists=True, desc="tsnr image file") mean_file = File(exists=True, desc="mean image file") stddev_file = File(exists=True, desc="std dev image file") detrended_file = File(desc="detrended input file") class TSNR(BaseInterface): """ Computes the time-course SNR for a time series Typically you want to run this on a realigned time-series. Example ------- >>> tsnr = TSNR() >>> tsnr.inputs.in_file = 'functional.nii' >>> res = tsnr.run() # doctest: +SKIP """ input_spec = TSNRInputSpec output_spec = TSNROutputSpec def _run_interface(self, runtime): img = nb.load(self.inputs.in_file[0]) header = img.header.copy() vollist = [nb.load(filename) for filename in self.inputs.in_file] data = np.concatenate( [ vol.get_fdata(dtype=np.float32).reshape(vol.shape[:3] + (-1,)) for vol in vollist ], axis=3, ) data = np.nan_to_num(data) if data.dtype.kind == "i": header.set_data_dtype(np.float32) data = data.astype(np.float32) if isdefined(self.inputs.regress_poly): data = regress_poly(self.inputs.regress_poly, data, remove_mean=False)[0] img = nb.Nifti1Image(data, img.affine, header) nb.save(img, op.abspath(self.inputs.detrended_file)) meanimg = np.mean(data, axis=3) stddevimg = np.std(data, axis=3) tsnr = np.zeros_like(meanimg) stddevimg_nonzero = stddevimg > 1.0e-3 tsnr[stddevimg_nonzero] = ( meanimg[stddevimg_nonzero] / stddevimg[stddevimg_nonzero] ) img = nb.Nifti1Image(tsnr, img.affine, header) nb.save(img, op.abspath(self.inputs.tsnr_file)) img = nb.Nifti1Image(meanimg, img.affine, header) nb.save(img, op.abspath(self.inputs.mean_file)) img = nb.Nifti1Image(stddevimg, img.affine, header) nb.save(img, op.abspath(self.inputs.stddev_file)) return runtime def _list_outputs(self): outputs = self._outputs().get() for k in ["tsnr_file", "mean_file", "stddev_file"]: outputs[k] = op.abspath(getattr(self.inputs, k)) if isdefined(self.inputs.regress_poly): outputs["detrended_file"] = op.abspath(self.inputs.detrended_file) return outputs class NonSteadyStateDetectorInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="4D NIFTI EPI file") class NonSteadyStateDetectorOutputSpec(TraitedSpec): n_volumes_to_discard = traits.Int( desc="Number of non-steady state volumes" "detected in the beginning of the scan." ) class NonSteadyStateDetector(BaseInterface): """ Returns the number of non-steady state volumes detected at the beginning of the scan. """ input_spec = NonSteadyStateDetectorInputSpec output_spec = NonSteadyStateDetectorOutputSpec def _run_interface(self, runtime): in_nii = nb.load(self.inputs.in_file) global_signal = ( in_nii.dataobj[:, :, :, :50].mean(axis=0).mean(axis=0).mean(axis=0) ) self._results = {"n_volumes_to_discard": is_outlier(global_signal)} return runtime def _list_outputs(self): return self._results def compute_dvars( in_file, in_mask, remove_zerovariance=False, intensity_normalization=1000 ): """ Compute the :abbr:`DVARS (D referring to temporal derivative of timecourses, VARS referring to RMS variance over voxels)` [Power2012]_. Particularly, the *standardized* :abbr:`DVARS (D referring to temporal derivative of timecourses, VARS referring to RMS variance over voxels)` [Nichols2013]_ are computed. .. [Nichols2013] Nichols T, `Notes on creating a standardized version of DVARS `_, 2013. .. note:: Implementation details Uses the implementation of the `Yule-Walker equations from nitime `_ for the :abbr:`AR (auto-regressive)` filtering of the fMRI signal. :param numpy.ndarray func: functional data, after head-motion-correction. :param numpy.ndarray mask: a 3D mask of the brain :param bool output_all: write out all dvars :param str out_file: a path to which the standardized dvars should be saved. :return: the standardized DVARS """ import numpy as np import nibabel as nb from nitime.algorithms import AR_est_YW import warnings func = nb.load(in_file).get_fdata(dtype=np.float32) mask = np.asanyarray(nb.load(in_mask).dataobj).astype(np.uint8) if len(func.shape) != 4: raise RuntimeError("Input fMRI dataset should be 4-dimensional") idx = np.where(mask > 0) mfunc = func[idx[0], idx[1], idx[2], :] if intensity_normalization != 0: mfunc = (mfunc / np.median(mfunc)) * intensity_normalization # Robust standard deviation (we are using "lower" interpolation # because this is what FSL is doing func_sd = ( np.percentile(mfunc, 75, axis=1, interpolation="lower") - np.percentile(mfunc, 25, axis=1, interpolation="lower") ) / 1.349 if remove_zerovariance: mfunc = mfunc[func_sd != 0, :] func_sd = func_sd[func_sd != 0] # Compute (non-robust) estimate of lag-1 autocorrelation ar1 = np.apply_along_axis( AR_est_YW, 1, regress_poly(0, mfunc, remove_mean=True)[0].astype(np.float32), 1 )[:, 0] # Compute (predicted) standard deviation of temporal difference time series diff_sdhat = np.squeeze(np.sqrt(((1 - ar1) * 2).tolist())) * func_sd diff_sd_mean = diff_sdhat.mean() # Compute temporal difference time series func_diff = np.diff(mfunc, axis=1) # DVARS (no standardization) dvars_nstd = np.sqrt(np.square(func_diff).mean(axis=0)) # standardization dvars_stdz = dvars_nstd / diff_sd_mean with warnings.catch_warnings(): # catch, e.g., divide by zero errors warnings.filterwarnings("error") # voxelwise standardization diff_vx_stdz = np.square( func_diff / np.array([diff_sdhat] * func_diff.shape[-1]).T ) dvars_vx_stdz = np.sqrt(diff_vx_stdz.mean(axis=0)) return (dvars_stdz, dvars_nstd, dvars_vx_stdz) def plot_confound(tseries, figsize, name, units=None, series_tr=None, normalize=False): """ A helper function to plot :abbr:`fMRI (functional MRI)` confounds. """ import matplotlib matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt from matplotlib.gridspec import GridSpec from matplotlib.backends.backend_pdf import FigureCanvasPdf as FigureCanvas import seaborn as sns fig = plt.Figure(figsize=figsize) FigureCanvas(fig) grid = GridSpec(1, 2, width_ratios=[3, 1], wspace=0.025) grid.update(hspace=1.0, right=0.95, left=0.1, bottom=0.2) ax = fig.add_subplot(grid[0, :-1]) if normalize and series_tr is not None: tseries /= series_tr ax.plot(tseries) ax.set_xlim((0, len(tseries))) ylabel = name if units is not None: ylabel += (" speed [{}/s]" if normalize else " [{}]").format(units) ax.set_ylabel(ylabel) xlabel = "Frame #" if series_tr is not None: xlabel = "Frame # ({} sec TR)".format(series_tr) ax.set_xlabel(xlabel) ylim = ax.get_ylim() ax = fig.add_subplot(grid[0, -1]) sns.distplot(tseries, vertical=True, ax=ax) ax.set_xlabel("Frames") ax.set_ylim(ylim) ax.set_yticklabels([]) return fig def is_outlier(points, thresh=3.5): """ Returns a boolean array with True if points are outliers and False otherwise. :param nparray points: an numobservations by numdimensions numpy array of observations :param float thresh: the modified z-score to use as a threshold. Observations with a modified z-score (based on the median absolute deviation) greater than this value will be classified as outliers. :return: A bolean mask, of size numobservations-length array. .. note:: References Boris Iglewicz and David Hoaglin (1993), "Volume 16: How to Detect and Handle Outliers", The ASQC Basic References in Quality Control: Statistical Techniques, Edward F. Mykytka, Ph.D., Editor. """ if len(points.shape) == 1: points = points[:, None] median = np.median(points, axis=0) diff = np.sum((points - median) ** 2, axis=-1) diff = np.sqrt(diff) med_abs_deviation = np.median(diff) modified_z_score = 0.6745 * diff / med_abs_deviation timepoints_to_discard = 0 for i in range(len(modified_z_score)): if modified_z_score[i] <= thresh: break else: timepoints_to_discard += 1 return timepoints_to_discard def cosine_filter( data, timestep, period_cut, remove_mean=True, axis=-1, failure_mode="error" ): datashape = data.shape timepoints = datashape[axis] if datashape[0] == 0 and failure_mode != "error": return data, np.array([]) data = data.reshape((-1, timepoints)) frametimes = timestep * np.arange(timepoints) X = _full_rank(_cosine_drift(period_cut, frametimes))[0] non_constant_regressors = X[:, :-1] if X.shape[1] > 1 else np.array([]) betas = np.linalg.lstsq(X, data.T)[0] if not remove_mean: X = X[:, :-1] betas = betas[:-1] residuals = data - X.dot(betas).T return residuals.reshape(datashape), non_constant_regressors def regress_poly(degree, data, remove_mean=True, axis=-1, failure_mode="error"): """ Returns data with degree polynomial regressed out. :param bool remove_mean: whether or not demean data (i.e. degree 0), :param int axis: numpy array axes along which regression is performed """ IFLOGGER.debug( "Performing polynomial regression on data of shape %s", str(data.shape) ) datashape = data.shape timepoints = datashape[axis] if datashape[0] == 0 and failure_mode != "error": return data, np.array([]) # Rearrange all voxel-wise time-series in rows data = data.reshape((-1, timepoints)) # Generate design matrix X = np.ones((timepoints, 1)) # quick way to calc degree 0 for i in range(degree): polynomial_func = Legendre.basis(i + 1) value_array = np.linspace(-1, 1, timepoints) X = np.hstack((X, polynomial_func(value_array)[:, np.newaxis])) non_constant_regressors = X[:, :-1] if X.shape[1] > 1 else np.array([]) # Calculate coefficients betas = np.linalg.pinv(X).dot(data.T) # Estimation if remove_mean: datahat = X.dot(betas).T else: # disregard the first layer of X, which is degree 0 datahat = X[:, 1:].dot(betas[1:, ...]).T regressed_data = data - datahat # Back to original shape return regressed_data.reshape(datashape), non_constant_regressors def combine_mask_files(mask_files, mask_method=None, mask_index=None): """Combines input mask files into a single nibabel image A helper function for CompCor Parameters ---------- mask_files: a list one or more binary mask files mask_method: enum ('union', 'intersect', 'none') determines how to combine masks mask_index: an integer determines which file to return (mutually exclusive with mask_method) Returns ------- masks: a list of nibabel images """ if isdefined(mask_index) or not isdefined(mask_method): if not isdefined(mask_index): if len(mask_files) == 1: mask_index = 0 else: raise ValueError( ( "When more than one mask file is provided, " "one of merge_method or mask_index must be " "set" ) ) if mask_index < len(mask_files): mask = nb.load(mask_files[mask_index]) return [mask] raise ValueError( ("mask_index {0} must be less than number of mask " "files {1}").format( mask_index, len(mask_files) ) ) masks = [] if mask_method == "none": for filename in mask_files: masks.append(nb.load(filename)) return masks if mask_method == "union": mask = None for filename in mask_files: img = nb.load(filename) img_as_mask = np.asanyarray(img.dataobj).astype("int32") > 0 if mask is None: mask = img_as_mask np.logical_or(mask, img_as_mask, mask) img = nb.Nifti1Image(mask, img.affine, header=img.header) return [img] if mask_method == "intersect": mask = None for filename in mask_files: img = nb.load(filename) img_as_mask = np.asanyarray(img.dataobj).astype("int32") > 0 if mask is None: mask = img_as_mask np.logical_and(mask, img_as_mask, mask) img = nb.Nifti1Image(mask, img.affine, header=img.header) return [img] def compute_noise_components( imgseries, mask_images, components_criterion=0.5, filter_type=False, degree=0, period_cut=128, repetition_time=None, failure_mode="error", mask_names=None, ): """ Compute the noise components from the image series for each mask. Parameters ---------- imgseries: nibabel image Time series data to be decomposed. mask_images: list List of nibabel images. Time series data from ``img_series`` is subset according to the spatial extent of each mask, and the subset data is then decomposed using principal component analysis. Masks should be coextensive with either anatomical or spatial noise ROIs. components_criterion: float Number of noise components to return. If this is a decimal value between 0 and 1, then ``create_noise_components`` will instead return the smallest number of components necessary to explain the indicated fraction of variance. If ``components_criterion`` is ``all``, then all components will be returned. filter_type: str Type of filter to apply to time series before computing noise components. - 'polynomial' - Legendre polynomial basis - 'cosine' - Discrete cosine (DCT) basis - False - None (mean-removal only) failure_mode: str Action to be taken in the event that any decomposition fails to identify any components. ``error`` indicates that the routine should raise an exception and exit, while any other value indicates that the routine should return a matrix of NaN values equal in size to the requested decomposition matrix. mask_names: list or None List of names for each image in ``mask_images``. This should be equal in length to ``mask_images``, with the ith element of ``mask_names`` naming the ith element of ``mask_images``. degree: int Order of polynomial used to remove trends from the timeseries period_cut: float Minimum period (in sec) for DCT high-pass filter repetition_time: float Time (in sec) between volume acquisitions. This must be defined if the ``filter_type`` is ``cosine``. Returns ------- components: numpy array Numpy array containing the requested set of noise components basis: numpy array Numpy array containing the (non-constant) filter regressors metadata: OrderedDict{str: numpy array} Dictionary of eigenvalues, fractional explained variances, and cumulative explained variances. """ basis = np.array([]) if components_criterion == "all": components_criterion = -1 mask_names = mask_names or range(len(mask_images)) components = [] md_mask = [] md_sv = [] md_var = [] md_cumvar = [] md_retained = [] for name, img in zip(mask_names, mask_images): mask = np.asanyarray(nb.squeeze_image(img).dataobj).astype(bool) if imgseries.shape[:3] != mask.shape: raise ValueError( "Inputs for CompCor, timeseries and mask, do not have " "matching spatial dimensions ({} and {}, respectively)".format( imgseries.shape[:3], mask.shape ) ) voxel_timecourses = imgseries[mask, :] # Zero-out any bad values voxel_timecourses[np.isnan(np.sum(voxel_timecourses, axis=1)), :] = 0 # Currently support Legendre-polynomial or cosine or detrending # With no filter, the mean is nonetheless removed (poly w/ degree 0) if filter_type == "cosine": if repetition_time is None: raise ValueError("Repetition time must be provided for cosine filter") voxel_timecourses, basis = cosine_filter( voxel_timecourses, repetition_time, period_cut, failure_mode=failure_mode, ) elif filter_type in ("polynomial", False): # from paper: # "The constant and linear trends of the columns in the matrix M were # removed [prior to ...]" voxel_timecourses, basis = regress_poly( degree, voxel_timecourses, failure_mode=failure_mode ) # "Voxel time series from the noise ROI (either anatomical or tSTD) were # placed in a matrix M of size Nxm, with time along the row dimension # and voxels along the column dimension." M = voxel_timecourses.T # "[... were removed] prior to column-wise variance normalization." M = M / _compute_tSTD(M, 1.0) # "The covariance matrix C = MMT was constructed and decomposed into its # principal components using a singular value decomposition." try: u, s, _ = fallback_svd(M, full_matrices=False) except (np.linalg.LinAlgError, ValueError): if failure_mode == "error": raise s = np.full(M.shape[0], np.nan, dtype=np.float32) if components_criterion >= 1: u = np.full( (M.shape[0], components_criterion), np.nan, dtype=np.float32 ) else: u = np.full((M.shape[0], 1), np.nan, dtype=np.float32) variance_explained = (s ** 2) / np.sum(s ** 2) cumulative_variance_explained = np.cumsum(variance_explained) num_components = int(components_criterion) if 0 < components_criterion < 1: num_components = ( np.searchsorted(cumulative_variance_explained, components_criterion) + 1 ) elif components_criterion == -1: num_components = len(s) num_components = int(num_components) if num_components == 0: break components.append(u[:, :num_components]) md_mask.append([name] * len(s)) md_sv.append(s) md_var.append(variance_explained) md_cumvar.append(cumulative_variance_explained) md_retained.append([i < num_components for i in range(len(s))]) if len(components) > 0: components = np.hstack(components) else: if failure_mode == "error": raise ValueError("No components found") components = np.full((M.shape[0], num_components), np.nan, dtype=np.float32) metadata = OrderedDict( [ ("mask", list(chain(*md_mask))), ("singular_value", np.hstack(md_sv)), ("variance_explained", np.hstack(md_var)), ("cumulative_variance_explained", np.hstack(md_cumvar)), ("retained", list(chain(*md_retained))), ] ) return components, basis, metadata def _compute_tSTD(M, x, axis=0): stdM = np.std(M, axis=axis) # set bad values to x stdM[stdM == 0] = x stdM[np.isnan(stdM)] = x return stdM # _cosine_drift and _full_rank copied from nipy/modalities/fmri/design_matrix # # Nipy release: 0.4.1 # Modified for smooth integration in CompCor classes def _cosine_drift(period_cut, frametimes): """Create a cosine drift matrix with periods greater or equals to period_cut Parameters ---------- period_cut: float Cut period of the low-pass filter (in sec) frametimes: array of shape(nscans) The sampling times (in sec) Returns ------- cdrift: array of shape(n_scans, n_drifts) cosin drifts plus a constant regressor at cdrift[:,0] Ref: http://en.wikipedia.org/wiki/Discrete_cosine_transform DCT-II """ len_tim = len(frametimes) n_times = np.arange(len_tim) hfcut = 1.0 / period_cut # input parameter is the period # frametimes.max() should be (len_tim-1)*dt dt = frametimes[1] - frametimes[0] # hfcut = 1/(2*dt) yields len_time # If series is too short, return constant regressor order = max(int(np.floor(2 * len_tim * hfcut * dt)), 1) cdrift = np.zeros((len_tim, order)) nfct = np.sqrt(2.0 / len_tim) for k in range(1, order): cdrift[:, k - 1] = nfct * np.cos((np.pi / len_tim) * (n_times + 0.5) * k) cdrift[:, order - 1] = 1.0 # or 1./sqrt(len_tim) to normalize return cdrift def _full_rank(X, cmax=1e15): """ This function possibly adds a scalar matrix to X to guarantee that the condition number is smaller than a given threshold. Parameters ---------- X: array of shape(nrows, ncols) cmax=1.e-15, float tolerance for condition number Returns ------- X: array of shape(nrows, ncols) after regularization cmax=1.e-15, float tolerance for condition number """ U, s, V = fallback_svd(X, full_matrices=False) smax, smin = s.max(), s.min() c = smax / smin if c < cmax: return X, c IFLOGGER.warning("Matrix is singular at working precision, regularizing...") lda = (smax - cmax * smin) / (cmax - 1) s = s + lda X = np.dot(U, np.dot(np.diag(s), V)) return X, cmax nipype-1.7.0/nipype/algorithms/icc.py000066400000000000000000000110611413403311400176010ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os import numpy as np from numpy import ones, kron, mean, eye, hstack, dot, tile from numpy.linalg import pinv import nibabel as nb from ..interfaces.base import ( BaseInterfaceInputSpec, TraitedSpec, BaseInterface, traits, File, ) class ICCInputSpec(BaseInterfaceInputSpec): subjects_sessions = traits.List( traits.List(File(exists=True)), desc="n subjects m sessions 3D stat files", mandatory=True, ) mask = File(exists=True, mandatory=True) class ICCOutputSpec(TraitedSpec): icc_map = File(exists=True) session_var_map = File(exists=True, desc="variance between sessions") subject_var_map = File(exists=True, desc="variance between subjects") class ICC(BaseInterface): """ Calculates Interclass Correlation Coefficient (3,1) as defined in P. E. Shrout & Joseph L. Fleiss (1979). "Intraclass Correlations: Uses in Assessing Rater Reliability". Psychological Bulletin 86 (2): 420-428. This particular implementation is aimed at relaibility (test-retest) studies. """ input_spec = ICCInputSpec output_spec = ICCOutputSpec def _run_interface(self, runtime): maskdata = nb.load(self.inputs.mask).get_fdata() maskdata = np.logical_not(np.logical_or(maskdata == 0, np.isnan(maskdata))) session_datas = [ [nb.load(fname).get_fdata()[maskdata].reshape(-1, 1) for fname in sessions] for sessions in self.inputs.subjects_sessions ] list_of_sessions = [np.dstack(session_data) for session_data in session_datas] all_data = np.hstack(list_of_sessions) icc = np.zeros(session_datas[0][0].shape) session_F = np.zeros(session_datas[0][0].shape) session_var = np.zeros(session_datas[0][0].shape) subject_var = np.zeros(session_datas[0][0].shape) for x in range(icc.shape[0]): Y = all_data[x, :, :] icc[x], subject_var[x], session_var[x], session_F[x], _, _ = ICC_rep_anova( Y ) nim = nb.load(self.inputs.subjects_sessions[0][0]) new_data = np.zeros(nim.shape) new_data[maskdata] = icc.reshape(-1) new_img = nb.Nifti1Image(new_data, nim.affine, nim.header) nb.save(new_img, "icc_map.nii") new_data = np.zeros(nim.shape) new_data[maskdata] = session_var.reshape(-1) new_img = nb.Nifti1Image(new_data, nim.affine, nim.header) nb.save(new_img, "session_var_map.nii") new_data = np.zeros(nim.shape) new_data[maskdata] = subject_var.reshape(-1) new_img = nb.Nifti1Image(new_data, nim.affine, nim.header) nb.save(new_img, "subject_var_map.nii") return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["icc_map"] = os.path.abspath("icc_map.nii") outputs["session_var_map"] = os.path.abspath("session_var_map.nii") outputs["subject_var_map"] = os.path.abspath("subject_var_map.nii") return outputs def ICC_rep_anova(Y): """ the data Y are entered as a 'table' ie subjects are in rows and repeated measures in columns One Sample Repeated measure ANOVA Y = XB + E with X = [FaTor / Subjects] """ [nb_subjects, nb_conditions] = Y.shape dfc = nb_conditions - 1 dfe = (nb_subjects - 1) * dfc dfr = nb_subjects - 1 # Compute the repeated measure effect # ------------------------------------ # Sum Square Total mean_Y = mean(Y) SST = ((Y - mean_Y) ** 2).sum() # create the design matrix for the different levels x = kron(eye(nb_conditions), ones((nb_subjects, 1))) # sessions x0 = tile(eye(nb_subjects), (nb_conditions, 1)) # subjects X = hstack([x, x0]) # Sum Square Error predicted_Y = dot(dot(dot(X, pinv(dot(X.T, X))), X.T), Y.flatten("F")) residuals = Y.flatten("F") - predicted_Y SSE = (residuals ** 2).sum() residuals.shape = Y.shape MSE = SSE / dfe # Sum square session effect - between colums/sessions SSC = ((mean(Y, 0) - mean_Y) ** 2).sum() * nb_subjects MSC = SSC / dfc / nb_subjects session_effect_F = MSC / MSE # Sum Square subject effect - between rows/subjects SSR = SST - SSC - SSE MSR = SSR / dfr # ICC(3,1) = (mean square subjeT - mean square error) / # (mean square subjeT + (k-1)*-mean square error) ICC = (MSR - MSE) / (MSR + dfc * MSE) e_var = MSE # variance of error r_var = (MSR - MSE) / nb_conditions # variance between subjects return ICC, r_var, e_var, session_effect_F, dfc, dfe nipype-1.7.0/nipype/algorithms/mesh.py000066400000000000000000000316541413403311400200110ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Miscellaneous algorithms for 2D contours and 3D triangularized meshes handling """ import os.path as op import numpy as np from numpy import linalg as nla from .. import logging from ..interfaces.base import ( BaseInterface, traits, TraitedSpec, File, BaseInterfaceInputSpec, ) from ..interfaces.vtkbase import tvtk from ..interfaces import vtkbase as VTKInfo IFLOGGER = logging.getLogger("nipype.interface") class TVTKBaseInterface(BaseInterface): """A base class for interfaces using VTK""" _redirect_x = True def __init__(self, **inputs): if VTKInfo.no_tvtk(): raise ImportError("This interface requires tvtk to run.") super(TVTKBaseInterface, self).__init__(**inputs) class WarpPointsInputSpec(BaseInterfaceInputSpec): points = File(exists=True, mandatory=True, desc="file containing the point set") warp = File( exists=True, mandatory=True, desc="dense deformation field to be applied" ) interp = traits.Enum( "cubic", "nearest", "linear", usedefault=True, mandatory=True, desc="interpolation", ) out_points = File( name_source="points", name_template="%s_warped", output_name="out_points", keep_extension=True, desc="the warped point set", ) class WarpPointsOutputSpec(TraitedSpec): out_points = File(desc="the warped point set") class WarpPoints(TVTKBaseInterface): """ Applies a displacement field to a point set given in vtk format. Any discrete deformation field, given in physical coordinates and which volume covers the extent of the vtk point set, is a valid ``warp`` file. FSL interfaces are compatible, for instance any field computed with :class:`nipype.interfaces.fsl.utils.ConvertWarp`. Example:: from nipype.algorithms.mesh import WarpPoints wp = WarpPoints() wp.inputs.points = 'surf1.vtk' wp.inputs.warp = 'warpfield.nii' res = wp.run() """ input_spec = WarpPointsInputSpec output_spec = WarpPointsOutputSpec def _gen_fname(self, in_file, suffix="generated", ext=None): fname, fext = op.splitext(op.basename(in_file)) if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext if ext is None: ext = fext if ext[0] == ".": ext = ext[1:] return op.abspath("%s_%s.%s" % (fname, suffix, ext)) def _run_interface(self, runtime): import nibabel as nb from scipy import ndimage r = tvtk.PolyDataReader(file_name=self.inputs.points) r.update() mesh = VTKInfo.vtk_output(r) points = np.array(mesh.points) warp_dims = nb.funcs.four_to_three(nb.load(self.inputs.warp)) affine = warp_dims[0].affine # voxsize = warp_dims[0].header.get_zooms() vox2ras = affine[0:3, 0:3] ras2vox = np.linalg.inv(vox2ras) origin = affine[0:3, 3] voxpoints = np.array([np.dot(ras2vox, (p - origin)) for p in points]) warps = [] for axis in warp_dims: wdata = axis.dataobj # four_to_three ensures this is an array if np.any(wdata != 0): warp = ndimage.map_coordinates(wdata, voxpoints.transpose()) else: warp = np.zeros((points.shape[0],)) warps.append(warp) disps = np.squeeze(np.dstack(warps)) newpoints = [p + d for p, d in zip(points, disps)] mesh.points = newpoints w = tvtk.PolyDataWriter() VTKInfo.configure_input_data(w, mesh) w.file_name = self._gen_fname(self.inputs.points, suffix="warped", ext=".vtk") w.write() return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_points"] = self._gen_fname( self.inputs.points, suffix="warped", ext=".vtk" ) return outputs class ComputeMeshWarpInputSpec(BaseInterfaceInputSpec): surface1 = File( exists=True, mandatory=True, desc=("Reference surface (vtk format) to which compute " "distance."), ) surface2 = File( exists=True, mandatory=True, desc=("Test surface (vtk format) from which compute " "distance."), ) metric = traits.Enum( "euclidean", "sqeuclidean", usedefault=True, desc="norm used to report distance" ) weighting = traits.Enum( "none", "area", usedefault=True, desc=( '"none": no weighting is performed, surface": edge distance is ' "weighted by the corresponding surface area" ), ) out_warp = File( "surfwarp.vtk", usedefault=True, desc="vtk file based on surface1 and warpings mapping it " "to surface2", ) out_file = File( "distance.npy", usedefault=True, desc="numpy file keeping computed distances and weights", ) class ComputeMeshWarpOutputSpec(TraitedSpec): distance = traits.Float(desc="computed distance") out_warp = File( exists=True, desc=("vtk file with the vertex-wise " "mapping of surface1 to surface2"), ) out_file = File( exists=True, desc="numpy file keeping computed distances and weights" ) class ComputeMeshWarp(TVTKBaseInterface): """ Calculates a the vertex-wise warping to get surface2 from surface1. It also reports the average distance of vertices, using the norm specified as input. .. warning: A point-to-point correspondence between surfaces is required Example:: import nipype.algorithms.mesh as m dist = m.ComputeMeshWarp() dist.inputs.surface1 = 'surf1.vtk' dist.inputs.surface2 = 'surf2.vtk' res = dist.run() """ input_spec = ComputeMeshWarpInputSpec output_spec = ComputeMeshWarpOutputSpec def _triangle_area(self, A, B, C): A = np.array(A) B = np.array(B) C = np.array(C) ABxAC = nla.norm(A - B) * nla.norm(A - C) prod = np.dot(B - A, C - A) angle = np.arccos(prod / ABxAC) area = 0.5 * ABxAC * np.sin(angle) return area def _run_interface(self, runtime): r1 = tvtk.PolyDataReader(file_name=self.inputs.surface1) r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2) vtk1 = VTKInfo.vtk_output(r1) vtk2 = VTKInfo.vtk_output(r2) r1.update() r2.update() assert len(vtk1.points) == len(vtk2.points) points1 = np.array(vtk1.points) points2 = np.array(vtk2.points) diff = points2 - points1 weights = np.ones(len(diff)) try: errvector = nla.norm(diff, axis=1) except TypeError: # numpy < 1.9 errvector = np.apply_along_axis(nla.norm, 1, diff) if self.inputs.metric == "sqeuclidean": errvector **= 2 if self.inputs.weighting == "area": faces = vtk1.polys.to_array().reshape(-1, 4).astype(int)[:, 1:] for i, p1 in enumerate(points2): # compute surfaces, set in weight w = 0.0 point_faces = faces[(faces[:, :] == i).any(axis=1)] for idset in point_faces: fp1 = points1[int(idset[0])] fp2 = points1[int(idset[1])] fp3 = points1[int(idset[2])] w += self._triangle_area(fp1, fp2, fp3) weights[i] = w result = np.vstack([errvector, weights]) np.save(op.abspath(self.inputs.out_file), result.transpose()) out_mesh = tvtk.PolyData() out_mesh.points = vtk1.points out_mesh.polys = vtk1.polys out_mesh.point_data.vectors = diff out_mesh.point_data.vectors.name = "warpings" writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_warp)) VTKInfo.configure_input_data(writer, out_mesh) writer.write() self._distance = np.average(errvector, weights=weights) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = op.abspath(self.inputs.out_file) outputs["out_warp"] = op.abspath(self.inputs.out_warp) outputs["distance"] = self._distance return outputs class MeshWarpMathsInputSpec(BaseInterfaceInputSpec): in_surf = File( exists=True, mandatory=True, desc=( "Input surface in vtk format, with associated warp " "field as point data (ie. from ComputeMeshWarp" ), ) float_trait = traits.Either( traits.Float(1.0), traits.Tuple(traits.Float(1.0), traits.Float(1.0), traits.Float(1.0)), ) operator = traits.Either( float_trait, File(exists=True), default=1.0, usedefault=True, mandatory=True, desc="image, float or tuple of floats to act as operator", ) operation = traits.Enum( "sum", "sub", "mul", "div", usedefault=True, desc="operation to be performed" ) out_warp = File( "warp_maths.vtk", usedefault=True, desc="vtk file based on in_surf and warpings mapping it " "to out_file", ) out_file = File("warped_surf.vtk", usedefault=True, desc="vtk with surface warped") class MeshWarpMathsOutputSpec(TraitedSpec): out_warp = File( exists=True, desc=("vtk file with the vertex-wise " "mapping of surface1 to surface2"), ) out_file = File(exists=True, desc="vtk with surface warped") class MeshWarpMaths(TVTKBaseInterface): """ Performs the most basic mathematical operations on the warping field defined at each vertex of the input surface. A surface with scalar or vector data can be used as operator for non-uniform operations. .. warning: A point-to-point correspondence between surfaces is required Example:: import nipype.algorithms.mesh as m mmath = m.MeshWarpMaths() mmath.inputs.in_surf = 'surf1.vtk' mmath.inputs.operator = 'surf2.vtk' mmath.inputs.operation = 'mul' res = mmath.run() """ input_spec = MeshWarpMathsInputSpec output_spec = MeshWarpMathsOutputSpec def _run_interface(self, runtime): r1 = tvtk.PolyDataReader(file_name=self.inputs.in_surf) vtk1 = VTKInfo.vtk_output(r1) r1.update() points1 = np.array(vtk1.points) if vtk1.point_data.vectors is None: raise RuntimeError("No warping field was found in in_surf") operator = self.inputs.operator opfield = np.ones_like(points1) if isinstance(operator, (str, bytes)): r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2) vtk2 = VTKInfo.vtk_output(r2) r2.update() assert len(points1) == len(vtk2.points) opfield = vtk2.point_data.vectors if opfield is None: opfield = vtk2.point_data.scalars if opfield is None: raise RuntimeError("No operator values found in operator file") opfield = np.array(opfield) if opfield.shape[1] < points1.shape[1]: opfield = np.array([opfield.tolist()] * points1.shape[1]).T else: operator = np.atleast_1d(operator) opfield *= operator warping = np.array(vtk1.point_data.vectors) if self.inputs.operation == "sum": warping += opfield elif self.inputs.operation == "sub": warping -= opfield elif self.inputs.operation == "mul": warping *= opfield elif self.inputs.operation == "div": warping /= opfield vtk1.point_data.vectors = warping writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_warp)) VTKInfo.configure_input_data(writer, vtk1) writer.write() vtk1.point_data.vectors = None vtk1.points = points1 + warping writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_file)) VTKInfo.configure_input_data(writer, vtk1) writer.write() return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = op.abspath(self.inputs.out_file) outputs["out_warp"] = op.abspath(self.inputs.out_warp) return outputs class P2PDistance(ComputeMeshWarp): """ Calculates a point-to-point (p2p) distance between two corresponding VTK-readable meshes or contours. A point-to-point correspondence between nodes is required .. deprecated:: 1.0-dev Use :py:class:`ComputeMeshWarp` instead. """ def __init__(self, **inputs): super(P2PDistance, self).__init__(**inputs) IFLOGGER.warning( "This interface has been deprecated since 1.0, please " "use ComputeMeshWarp" ) nipype-1.7.0/nipype/algorithms/metrics.py000066400000000000000000000622351413403311400205220ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Image assessment algorithms. Typical overlap and error computation measures to evaluate results from other processing units. """ import os import os.path as op import nibabel as nb import numpy as np from .. import config, logging from ..interfaces.base import ( SimpleInterface, BaseInterface, traits, TraitedSpec, File, InputMultiPath, BaseInterfaceInputSpec, isdefined, ) from ..interfaces.nipy.base import NipyBaseInterface iflogger = logging.getLogger("nipype.interface") class DistanceInputSpec(BaseInterfaceInputSpec): volume1 = File( exists=True, mandatory=True, desc="Has to have the same dimensions as volume2." ) volume2 = File( exists=True, mandatory=True, desc="Has to have the same dimensions as volume1." ) method = traits.Enum( "eucl_min", "eucl_cog", "eucl_mean", "eucl_wmean", "eucl_max", desc='""eucl_min": Euclidean distance between two closest points\ "eucl_cog": mean Euclidian distance between the Center of Gravity\ of volume1 and CoGs of volume2\ "eucl_mean": mean Euclidian minimum distance of all volume2 voxels\ to volume1\ "eucl_wmean": mean Euclidian minimum distance of all volume2 voxels\ to volume1 weighted by their values\ "eucl_max": maximum over minimum Euclidian distances of all volume2\ voxels to volume1 (also known as the Hausdorff distance)', usedefault=True, ) mask_volume = File(exists=True, desc="calculate overlap only within this mask.") class DistanceOutputSpec(TraitedSpec): distance = traits.Float() point1 = traits.Array(shape=(3,)) point2 = traits.Array(shape=(3,)) histogram = File() class Distance(BaseInterface): """Calculates distance between two volumes.""" input_spec = DistanceInputSpec output_spec = DistanceOutputSpec _hist_filename = "hist.pdf" def _find_border(self, data): from scipy.ndimage.morphology import binary_erosion eroded = binary_erosion(data) border = np.logical_and(data, np.logical_not(eroded)) return border def _get_coordinates(self, data, affine): if len(data.shape) == 4: data = data[:, :, :, 0] indices = np.vstack(np.nonzero(data)) indices = np.vstack((indices, np.ones(indices.shape[1]))) coordinates = np.dot(affine, indices) return coordinates[:3, :] def _eucl_min(self, nii1, nii2): from scipy.spatial.distance import cdist, euclidean origdata1 = np.asanyarray(nii1.dataobj).astype(bool) border1 = self._find_border(origdata1) origdata2 = np.asanyarray(nii2.dataobj).astype(bool) border2 = self._find_border(origdata2) set1_coordinates = self._get_coordinates(border1, nii1.affine) set2_coordinates = self._get_coordinates(border2, nii2.affine) dist_matrix = cdist(set1_coordinates.T, set2_coordinates.T) (point1, point2) = np.unravel_index(np.argmin(dist_matrix), dist_matrix.shape) return ( euclidean(set1_coordinates.T[point1, :], set2_coordinates.T[point2, :]), set1_coordinates.T[point1, :], set2_coordinates.T[point2, :], ) def _eucl_cog(self, nii1, nii2): from scipy.spatial.distance import cdist from scipy.ndimage.measurements import center_of_mass, label origdata1 = np.asanyarray(nii1.dataobj) origdata1 = (np.rint(origdata1) != 0) & ~np.isnan(origdata1) cog_t = np.array(center_of_mass(origdata1)).reshape(-1, 1) cog_t = np.vstack((cog_t, np.array([1]))) cog_t_coor = np.dot(nii1.affine, cog_t)[:3, :] origdata2 = np.asanyarray(nii2.dataobj) origdata2 = (np.rint(origdata2) != 0) & ~np.isnan(origdata2) (labeled_data, n_labels) = label(origdata2) cogs = np.ones((4, n_labels)) for i in range(n_labels): cogs[:3, i] = np.array(center_of_mass(origdata2, labeled_data, i + 1)) cogs_coor = np.dot(nii2.affine, cogs)[:3, :] dist_matrix = cdist(cog_t_coor.T, cogs_coor.T) return np.mean(dist_matrix) def _eucl_mean(self, nii1, nii2, weighted=False): from scipy.spatial.distance import cdist origdata1 = np.asanyarray(nii1.dataobj).astype(bool) border1 = self._find_border(origdata1) origdata2 = np.asanyarray(nii2.dataobj).astype(bool) set1_coordinates = self._get_coordinates(border1, nii1.affine) set2_coordinates = self._get_coordinates(origdata2, nii2.affine) dist_matrix = cdist(set1_coordinates.T, set2_coordinates.T) min_dist_matrix = np.amin(dist_matrix, axis=0) import matplotlib matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt plt.figure() plt.hist(min_dist_matrix, 50, normed=1, facecolor="green") plt.savefig(self._hist_filename) plt.clf() plt.close() if weighted: return np.average(min_dist_matrix, weights=nii2.dataobj[origdata2].flat) else: return np.mean(min_dist_matrix) def _eucl_max(self, nii1, nii2): from scipy.spatial.distance import cdist origdata1 = np.asanyarray(nii1.dataobj) origdata1 = (np.rint(origdata1) != 0) & ~np.isnan(origdata1) origdata2 = np.asanyarray(nii2.dataobj) origdata2 = (np.rint(origdata2) != 0) & ~np.isnan(origdata2) if isdefined(self.inputs.mask_volume): maskdata = np.asanyarray(nb.load(self.inputs.mask_volume).dataobj) maskdata = (np.rint(maskdata) != 0) & ~np.isnan(maskdata) origdata1 = np.logical_and(maskdata, origdata1) origdata2 = np.logical_and(maskdata, origdata2) if origdata1.max() == 0 or origdata2.max() == 0: return np.nan border1 = self._find_border(origdata1) border2 = self._find_border(origdata2) set1_coordinates = self._get_coordinates(border1, nii1.affine) set2_coordinates = self._get_coordinates(border2, nii2.affine) distances = cdist(set1_coordinates.T, set2_coordinates.T) mins = np.concatenate((np.amin(distances, axis=0), np.amin(distances, axis=1))) return np.max(mins) def _run_interface(self, runtime): # there is a bug in some scipy ndimage methods that gets tripped by memory mapped objects nii1 = nb.load(self.inputs.volume1, mmap=False) nii2 = nb.load(self.inputs.volume2, mmap=False) if self.inputs.method == "eucl_min": self._distance, self._point1, self._point2 = self._eucl_min(nii1, nii2) elif self.inputs.method == "eucl_cog": self._distance = self._eucl_cog(nii1, nii2) elif self.inputs.method == "eucl_mean": self._distance = self._eucl_mean(nii1, nii2) elif self.inputs.method == "eucl_wmean": self._distance = self._eucl_mean(nii1, nii2, weighted=True) elif self.inputs.method == "eucl_max": self._distance = self._eucl_max(nii1, nii2) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["distance"] = self._distance if self.inputs.method == "eucl_min": outputs["point1"] = self._point1 outputs["point2"] = self._point2 elif self.inputs.method in ["eucl_mean", "eucl_wmean"]: outputs["histogram"] = os.path.abspath(self._hist_filename) return outputs class OverlapInputSpec(BaseInterfaceInputSpec): volume1 = File( exists=True, mandatory=True, desc="Has to have the same dimensions as volume2." ) volume2 = File( exists=True, mandatory=True, desc="Has to have the same dimensions as volume1." ) mask_volume = File(exists=True, desc="calculate overlap only within this mask.") bg_overlap = traits.Bool( False, usedefault=True, mandatory=True, desc="consider zeros as a label" ) out_file = File("diff.nii", usedefault=True) weighting = traits.Enum( "none", "volume", "squared_vol", usedefault=True, desc=( "'none': no class-overlap weighting is " "performed. 'volume': computed class-" "overlaps are weighted by class volume " "'squared_vol': computed class-overlaps " "are weighted by the squared volume of " "the class" ), ) vol_units = traits.Enum( "voxel", "mm", mandatory=True, usedefault=True, desc="units for volumes" ) class OverlapOutputSpec(TraitedSpec): jaccard = traits.Float(desc="averaged jaccard index") dice = traits.Float(desc="averaged dice index") roi_ji = traits.List(traits.Float(), desc=("the Jaccard index (JI) per ROI")) roi_di = traits.List(traits.Float(), desc=("the Dice index (DI) per ROI")) volume_difference = traits.Float(desc=("averaged volume difference")) roi_voldiff = traits.List(traits.Float(), desc=("volume differences of ROIs")) labels = traits.List(traits.Int(), desc=("detected labels")) diff_file = File(exists=True, desc="error map of differences") class Overlap(BaseInterface): """ Calculates Dice and Jaccard's overlap measures between two ROI maps. The interface is backwards compatible with the former version in which only binary files were accepted. The averaged values of overlap indices can be weighted. Volumes now can be reported in :math:`mm^3`, although they are given in voxels to keep backwards compatibility. Example ------- >>> overlap = Overlap() >>> overlap.inputs.volume1 = 'cont1.nii' >>> overlap.inputs.volume2 = 'cont2.nii' >>> res = overlap.run() # doctest: +SKIP """ input_spec = OverlapInputSpec output_spec = OverlapOutputSpec def _bool_vec_dissimilarity(self, booldata1, booldata2, method): from scipy.spatial.distance import dice, jaccard methods = {"dice": dice, "jaccard": jaccard} if not (np.any(booldata1) or np.any(booldata2)): return 0 return 1 - methods[method](booldata1.flat, booldata2.flat) def _run_interface(self, runtime): nii1 = nb.load(self.inputs.volume1) nii2 = nb.load(self.inputs.volume2) scale = 1.0 if self.inputs.vol_units == "mm": scale = np.prod(nii1.header.get_zooms()[:3]) data1 = np.asanyarray(nii1.dataobj) data1[np.logical_or(data1 < 0, np.isnan(data1))] = 0 max1 = int(data1.max()) data1 = data1.astype(np.min_scalar_type(max1)) data2 = np.asanyarray(nii2.dataobj).astype(np.min_scalar_type(max1)) data2[np.logical_or(data1 < 0, np.isnan(data1))] = 0 if isdefined(self.inputs.mask_volume): maskdata = np.asanyarray(nb.load(self.inputs.mask_volume).dataobj) maskdata = ~np.logical_or(maskdata == 0, np.isnan(maskdata)) data1[~maskdata] = 0 data2[~maskdata] = 0 res = [] volumes1 = [] volumes2 = [] labels = np.unique(data1[data1 > 0].reshape(-1)).tolist() if self.inputs.bg_overlap: labels.insert(0, 0) for l in labels: res.append( self._bool_vec_dissimilarity(data1 == l, data2 == l, method="jaccard") ) volumes1.append(scale * len(data1[data1 == l])) volumes2.append(scale * len(data2[data2 == l])) results = dict(jaccard=[], dice=[]) results["jaccard"] = np.array(res) results["dice"] = 2.0 * results["jaccard"] / (results["jaccard"] + 1.0) weights = np.ones((len(volumes1),), dtype=np.float32) if self.inputs.weighting != "none": weights = weights / np.array(volumes1) if self.inputs.weighting == "squared_vol": weights = weights ** 2 weights = weights / np.sum(weights) both_data = np.zeros(data1.shape) both_data[(data1 - data2) != 0] = 1 nb.save( nb.Nifti1Image(both_data, nii1.affine, nii1.header), self.inputs.out_file ) self._labels = labels self._ove_rois = results self._vol_rois = (np.array(volumes1) - np.array(volumes2)) / np.array(volumes1) self._dice = round(np.sum(weights * results["dice"]), 5) self._jaccard = round(np.sum(weights * results["jaccard"]), 5) self._volume = np.sum(weights * self._vol_rois) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["labels"] = self._labels outputs["jaccard"] = self._jaccard outputs["dice"] = self._dice outputs["volume_difference"] = self._volume outputs["roi_ji"] = self._ove_rois["jaccard"].tolist() outputs["roi_di"] = self._ove_rois["dice"].tolist() outputs["roi_voldiff"] = self._vol_rois.tolist() outputs["diff_file"] = os.path.abspath(self.inputs.out_file) return outputs class FuzzyOverlapInputSpec(BaseInterfaceInputSpec): in_ref = InputMultiPath( File(exists=True), mandatory=True, desc="Reference image. Requires the same dimensions as in_tst.", ) in_tst = InputMultiPath( File(exists=True), mandatory=True, desc="Test image. Requires the same dimensions as in_ref.", ) in_mask = File(exists=True, desc="calculate overlap only within mask") weighting = traits.Enum( "none", "volume", "squared_vol", usedefault=True, desc=( "'none': no class-overlap weighting is " "performed. 'volume': computed class-" "overlaps are weighted by class volume " "'squared_vol': computed class-overlaps " "are weighted by the squared volume of " "the class" ), ) out_file = File( "diff.nii", desc="alternative name for resulting difference-map", usedefault=True, ) class FuzzyOverlapOutputSpec(TraitedSpec): jaccard = traits.Float(desc="Fuzzy Jaccard Index (fJI), all the classes") dice = traits.Float(desc="Fuzzy Dice Index (fDI), all the classes") class_fji = traits.List( traits.Float(), desc="Array containing the fJIs of each computed class" ) class_fdi = traits.List( traits.Float(), desc="Array containing the fDIs of each computed class" ) class FuzzyOverlap(SimpleInterface): """Calculates various overlap measures between two maps, using the fuzzy definition proposed in: Crum et al., Generalized Overlap Measures for Evaluation and Validation in Medical Image Analysis, IEEE Trans. Med. Ima. 25(11),pp 1451-1461, Nov. 2006. in_ref and in_tst are lists of 2/3D images, each element on the list containing one volume fraction map of a class in a fuzzy partition of the domain. Example ------- >>> overlap = FuzzyOverlap() >>> overlap.inputs.in_ref = [ 'ref_class0.nii', 'ref_class1.nii' ] >>> overlap.inputs.in_tst = [ 'tst_class0.nii', 'tst_class1.nii' ] >>> overlap.inputs.weighting = 'volume' >>> res = overlap.run() # doctest: +SKIP """ input_spec = FuzzyOverlapInputSpec output_spec = FuzzyOverlapOutputSpec def _run_interface(self, runtime): # Load data refdata = nb.concat_images(self.inputs.in_ref).dataobj tstdata = nb.concat_images(self.inputs.in_tst).dataobj # Data must have same shape if not refdata.shape == tstdata.shape: raise RuntimeError( 'Size of "in_tst" %s must match that of "in_ref" %s.' % (tstdata.shape, refdata.shape) ) ncomp = refdata.shape[-1] # Load mask mask = np.ones_like(refdata, dtype=bool) if isdefined(self.inputs.in_mask): mask = np.asanyarray(nb.load(self.inputs.in_mask).dataobj) > 0 mask = np.repeat(mask[..., np.newaxis], ncomp, -1) assert mask.shape == refdata.shape # Drop data outside mask refdata = refdata[mask] tstdata = tstdata[mask] if np.any(refdata < 0.0): iflogger.warning( 'Negative values encountered in "in_ref" input, ' "taking absolute values." ) refdata = np.abs(refdata) if np.any(tstdata < 0.0): iflogger.warning( 'Negative values encountered in "in_tst" input, ' "taking absolute values." ) tstdata = np.abs(tstdata) if np.any(refdata > 1.0): iflogger.warning( 'Values greater than 1.0 found in "in_ref" input, ' "scaling values." ) refdata /= refdata.max() if np.any(tstdata > 1.0): iflogger.warning( 'Values greater than 1.0 found in "in_tst" input, ' "scaling values." ) tstdata /= tstdata.max() numerators = np.atleast_2d(np.minimum(refdata, tstdata).reshape((-1, ncomp))) denominators = np.atleast_2d(np.maximum(refdata, tstdata).reshape((-1, ncomp))) jaccards = numerators.sum(axis=0) / denominators.sum(axis=0) # Calculate weights weights = np.ones_like(jaccards, dtype=float) if self.inputs.weighting != "none": volumes = np.sum((refdata + tstdata) > 0, axis=1).reshape((-1, ncomp)) weights = 1.0 / volumes if self.inputs.weighting == "squared_vol": weights = weights ** 2 weights = weights / np.sum(weights) dices = 2.0 * jaccards / (jaccards + 1.0) # Fill-in the results object self._results["jaccard"] = float(weights.dot(jaccards)) self._results["dice"] = float(weights.dot(dices)) self._results["class_fji"] = [float(v) for v in jaccards] self._results["class_fdi"] = [float(v) for v in dices] return runtime class ErrorMapInputSpec(BaseInterfaceInputSpec): in_ref = File( exists=True, mandatory=True, desc="Reference image. Requires the same dimensions as in_tst.", ) in_tst = File( exists=True, mandatory=True, desc="Test image. Requires the same dimensions as in_ref.", ) mask = File(exists=True, desc="calculate overlap only within this mask.") metric = traits.Enum( "sqeuclidean", "euclidean", desc="error map metric (as implemented in scipy cdist)", usedefault=True, mandatory=True, ) out_map = File(desc="Name for the output file") class ErrorMapOutputSpec(TraitedSpec): out_map = File(exists=True, desc="resulting error map") distance = traits.Float(desc="Average distance between volume 1 and 2") class ErrorMap(BaseInterface): """Calculates the error (distance) map between two input volumes. Example ------- >>> errormap = ErrorMap() >>> errormap.inputs.in_ref = 'cont1.nii' >>> errormap.inputs.in_tst = 'cont2.nii' >>> res = errormap.run() # doctest: +SKIP """ input_spec = ErrorMapInputSpec output_spec = ErrorMapOutputSpec _out_file = "" def _run_interface(self, runtime): # Get two numpy data matrices nii_ref = nb.load(self.inputs.in_ref) ref_data = np.squeeze(nii_ref.dataobj) tst_data = np.squeeze(nb.load(self.inputs.in_tst).dataobj) assert ref_data.ndim == tst_data.ndim # Load mask comps = 1 mapshape = ref_data.shape if ref_data.ndim == 4: comps = ref_data.shape[-1] mapshape = ref_data.shape[:-1] if isdefined(self.inputs.mask): msk = np.asanyarray(nb.load(self.inputs.mask).dataobj) if mapshape != msk.shape: raise RuntimeError( "Mask should match volume shape, \ mask is %s and volumes are %s" % (list(msk.shape), list(mapshape)) ) else: msk = np.ones(shape=mapshape) # Flatten both volumes and make the pixel differennce mskvector = msk.reshape(-1) msk_idxs = np.where(mskvector == 1) refvector = ref_data.reshape(-1, comps)[msk_idxs].astype(np.float32) tstvector = tst_data.reshape(-1, comps)[msk_idxs].astype(np.float32) diffvector = refvector - tstvector # Scale the difference if self.inputs.metric == "sqeuclidean": errvector = diffvector ** 2 if comps > 1: errvector = np.sum(errvector, axis=1) else: errvector = np.squeeze(errvector) elif self.inputs.metric == "euclidean": errvector = np.linalg.norm(diffvector, axis=1) errvectorexp = np.zeros_like( mskvector, dtype=np.float32 ) # The default type is uint8 errvectorexp[msk_idxs] = errvector # Get averaged error self._distance = np.average(errvector) # Only average the masked voxels errmap = errvectorexp.reshape(mapshape) hdr = nii_ref.header.copy() hdr.set_data_dtype(np.float32) hdr["data_type"] = 16 hdr.set_data_shape(mapshape) if not isdefined(self.inputs.out_map): fname, ext = op.splitext(op.basename(self.inputs.in_tst)) if ext == ".gz": fname, ext2 = op.splitext(fname) ext = ext2 + ext self._out_file = op.abspath(fname + "_errmap" + ext) else: self._out_file = self.inputs.out_map nb.Nifti1Image(errmap.astype(np.float32), nii_ref.affine, hdr).to_filename( self._out_file ) return runtime def _list_outputs(self): outputs = self.output_spec().get() outputs["out_map"] = self._out_file outputs["distance"] = self._distance return outputs class SimilarityInputSpec(BaseInterfaceInputSpec): volume1 = File(exists=True, desc="3D/4D volume", mandatory=True) volume2 = File(exists=True, desc="3D/4D volume", mandatory=True) mask1 = File(exists=True, desc="3D volume") mask2 = File(exists=True, desc="3D volume") metric = traits.Either( traits.Enum("cc", "cr", "crl1", "mi", "nmi", "slr"), traits.Callable(), desc="""str or callable Cost-function for assessing image similarity. If a string, one of 'cc': correlation coefficient, 'cr': correlation ratio, 'crl1': L1-norm based correlation ratio, 'mi': mutual information, 'nmi': normalized mutual information, 'slr': supervised log-likelihood ratio. If a callable, it should take a two-dimensional array representing the image joint histogram as an input and return a float.""", usedefault=True, ) class SimilarityOutputSpec(TraitedSpec): similarity = traits.List( traits.Float(desc="Similarity between volume 1 and 2, frame by frame") ) class Similarity(NipyBaseInterface): """Calculates similarity between two 3D or 4D volumes. Both volumes have to be in the same coordinate system, same space within that coordinate system and with the same voxel dimensions. .. note:: This interface is an extension of :py:class:`nipype.interfaces.nipy.utils.Similarity` to support 4D files. Requires :py:mod:`nipy` Example ------- >>> from nipype.algorithms.metrics import Similarity >>> similarity = Similarity() >>> similarity.inputs.volume1 = 'rc1s1.nii' >>> similarity.inputs.volume2 = 'rc1s2.nii' >>> similarity.inputs.mask1 = 'mask.nii' >>> similarity.inputs.mask2 = 'mask.nii' >>> similarity.inputs.metric = 'cr' >>> res = similarity.run() # doctest: +SKIP """ input_spec = SimilarityInputSpec output_spec = SimilarityOutputSpec def _run_interface(self, runtime): from nipy.algorithms.registration.histogram_registration import ( HistogramRegistration, ) from nipy.algorithms.registration.affine import Affine vol1_nii = nb.load(self.inputs.volume1) vol2_nii = nb.load(self.inputs.volume2) dims = len(vol1_nii.shape) if dims == 3 or dims == 2: vols1 = [vol1_nii] vols2 = [vol2_nii] if dims == 4: vols1 = nb.four_to_three(vol1_nii) vols2 = nb.four_to_three(vol2_nii) if dims < 2 or dims > 4: raise RuntimeError( "Image dimensions not supported (detected %dD file)" % dims ) if isdefined(self.inputs.mask1): mask1 = np.asanyarray(nb.load(self.inputs.mask1).dataobj) == 1 else: mask1 = None if isdefined(self.inputs.mask2): mask2 = np.asanyarray(nb.load(self.inputs.mask2).dataobj) == 1 else: mask2 = None self._similarity = [] for ts1, ts2 in zip(vols1, vols2): histreg = HistogramRegistration( from_img=ts1, to_img=ts2, similarity=self.inputs.metric, from_mask=mask1, to_mask=mask2, ) self._similarity.append(histreg.eval(Affine())) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["similarity"] = self._similarity return outputs nipype-1.7.0/nipype/algorithms/misc.py000066400000000000000000001447001413403311400200050ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Miscellaneous algorithms.""" import os import os.path as op import nibabel as nb import numpy as np from math import floor, ceil import itertools import warnings from .. import logging from . import metrics as nam from ..interfaces.base import ( BaseInterface, traits, TraitedSpec, File, InputMultiPath, OutputMultiPath, BaseInterfaceInputSpec, isdefined, DynamicTraitedSpec, Undefined, ) from ..utils.filemanip import fname_presuffix, split_filename, ensure_list from . import confounds iflogger = logging.getLogger("nipype.interface") class PickAtlasInputSpec(BaseInterfaceInputSpec): atlas = File( exists=True, desc="Location of the atlas that will be used.", mandatory=True ) labels = traits.Either( traits.Int, traits.List(traits.Int), desc=( "Labels of regions that will be included in the mask. Must be\ compatible with the atlas used." ), mandatory=True, ) hemi = traits.Enum( "both", "left", "right", desc="Restrict the mask to only one hemisphere: left or right", usedefault=True, ) dilation_size = traits.Int( usedefault=True, desc="Defines how much the mask will be dilated (expanded in 3D).", ) output_file = File(desc="Where to store the output mask.") class PickAtlasOutputSpec(TraitedSpec): mask_file = File(exists=True, desc="output mask file") class PickAtlas(BaseInterface): """Returns ROI masks given an atlas and a list of labels. Supports dilation and left right masking (assuming the atlas is properly aligned). """ input_spec = PickAtlasInputSpec output_spec = PickAtlasOutputSpec def _run_interface(self, runtime): nim = self._get_brodmann_area() nb.save(nim, self._gen_output_filename()) return runtime def _gen_output_filename(self): if not isdefined(self.inputs.output_file): output = fname_presuffix( fname=self.inputs.atlas, suffix="_mask", newpath=os.getcwd(), use_ext=True, ) else: output = os.path.realpath(self.inputs.output_file) return output def _get_brodmann_area(self): nii = nb.load(self.inputs.atlas) origdata = np.asanyarray(nii.dataobj) newdata = np.zeros(origdata.shape) if not isinstance(self.inputs.labels, list): labels = [self.inputs.labels] else: labels = self.inputs.labels for lab in labels: newdata[origdata == lab] = 1 if self.inputs.hemi == "right": newdata[int(floor(float(origdata.shape[0]) / 2)) :, :, :] = 0 elif self.inputs.hemi == "left": newdata[: int(ceil(float(origdata.shape[0]) / 2)), :, :] = 0 if self.inputs.dilation_size != 0: from scipy.ndimage.morphology import grey_dilation newdata = grey_dilation( newdata, ( 2 * self.inputs.dilation_size + 1, 2 * self.inputs.dilation_size + 1, 2 * self.inputs.dilation_size + 1, ), ) return nb.Nifti1Image(newdata, nii.affine, nii.header) def _list_outputs(self): outputs = self._outputs().get() outputs["mask_file"] = self._gen_output_filename() return outputs class SimpleThresholdInputSpec(BaseInterfaceInputSpec): volumes = InputMultiPath( File(exists=True), desc="volumes to be thresholded", mandatory=True ) threshold = traits.Float( desc="volumes to be thresholdedeverything below this value will be set\ to zero", mandatory=True, ) class SimpleThresholdOutputSpec(TraitedSpec): thresholded_volumes = OutputMultiPath(File(exists=True), desc="thresholded volumes") class SimpleThreshold(BaseInterface): """Applies a threshold to input volumes""" input_spec = SimpleThresholdInputSpec output_spec = SimpleThresholdOutputSpec def _run_interface(self, runtime): for fname in self.inputs.volumes: img = nb.load(fname) data = img.get_fdata() active_map = data > self.inputs.threshold thresholded_map = np.zeros(data.shape) thresholded_map[active_map] = data[active_map] new_img = nb.Nifti1Image(thresholded_map, img.affine, img.header) _, base, _ = split_filename(fname) nb.save(new_img, base + "_thresholded.nii") return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["thresholded_volumes"] = [] for fname in self.inputs.volumes: _, base, _ = split_filename(fname) outputs["thresholded_volumes"].append( os.path.abspath(base + "_thresholded.nii") ) return outputs class ModifyAffineInputSpec(BaseInterfaceInputSpec): volumes = InputMultiPath( File(exists=True), desc="volumes which affine matrices will be modified", mandatory=True, ) transformation_matrix = traits.Array( value=np.eye(4), shape=(4, 4), desc="transformation matrix that will be left multiplied by the\ affine matrix", usedefault=True, ) class ModifyAffineOutputSpec(TraitedSpec): transformed_volumes = OutputMultiPath(File(exist=True)) class ModifyAffine(BaseInterface): """Left multiplies the affine matrix with a specified values. Saves the volume as a nifti file. """ input_spec = ModifyAffineInputSpec output_spec = ModifyAffineOutputSpec def _gen_output_filename(self, name): _, base, _ = split_filename(name) return os.path.abspath(base + "_transformed.nii") def _run_interface(self, runtime): for fname in self.inputs.volumes: img = nb.load(fname) affine = img.affine affine = np.dot(self.inputs.transformation_matrix, affine) nb.save( nb.Nifti1Image(img.dataobj, affine, img.header), self._gen_output_filename(fname), ) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["transformed_volumes"] = [] for fname in self.inputs.volumes: outputs["transformed_volumes"].append(self._gen_output_filename(fname)) return outputs class CreateNiftiInputSpec(BaseInterfaceInputSpec): data_file = File(exists=True, mandatory=True, desc="ANALYZE img file") header_file = File( exists=True, mandatory=True, desc="corresponding ANALYZE hdr file" ) affine = traits.Array(desc="affine transformation array") class CreateNiftiOutputSpec(TraitedSpec): nifti_file = File(exists=True) class CreateNifti(BaseInterface): """Creates a nifti volume""" input_spec = CreateNiftiInputSpec output_spec = CreateNiftiOutputSpec def _gen_output_file_name(self): _, base, _ = split_filename(self.inputs.data_file) return os.path.abspath(base + ".nii") def _run_interface(self, runtime): with open(self.inputs.header_file, "rb") as hdr_file: hdr = nb.AnalyzeHeader.from_fileobj(hdr_file) if isdefined(self.inputs.affine): affine = self.inputs.affine else: affine = None with open(self.inputs.data_file, "rb") as data_file: data = hdr.data_from_fileobj(data_file) img = nb.Nifti1Image(data, affine, hdr) nb.save(img, self._gen_output_file_name()) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["nifti_file"] = self._gen_output_file_name() return outputs class GunzipInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True) class GunzipOutputSpec(TraitedSpec): out_file = File(exists=True) class Gunzip(BaseInterface): """Gunzip wrapper >>> from nipype.algorithms.misc import Gunzip >>> gunzip = Gunzip(in_file='tpms_msk.nii.gz') >>> res = gunzip.run() >>> res.outputs.out_file # doctest: +ELLIPSIS '.../tpms_msk.nii' .. testcleanup:: >>> os.unlink('tpms_msk.nii') """ input_spec = GunzipInputSpec output_spec = GunzipOutputSpec def _gen_output_file_name(self): _, base, ext = split_filename(self.inputs.in_file) if ext[-3:].lower() == ".gz": ext = ext[:-3] return os.path.abspath(base + ext) def _run_interface(self, runtime): import gzip import shutil with gzip.open(self.inputs.in_file, "rb") as in_file: with open(self._gen_output_file_name(), "wb") as out_file: shutil.copyfileobj(in_file, out_file) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = self._gen_output_file_name() return outputs def replaceext(in_list, ext): out_list = list() for filename in in_list: path, name, _ = split_filename(op.abspath(filename)) out_name = op.join(path, name) + ext out_list.append(out_name) return out_list def _matlab2csv(in_array, name, reshape): output_array = np.asarray(in_array) if reshape: if len(np.shape(output_array)) > 1: output_array = np.reshape( output_array, (np.shape(output_array)[0] * np.shape(output_array)[1], 1) ) iflogger.info(np.shape(output_array)) output_name = op.abspath(name + ".csv") np.savetxt(output_name, output_array, delimiter=",") return output_name class Matlab2CSVInputSpec(TraitedSpec): in_file = File(exists=True, mandatory=True, desc="Input MATLAB .mat file") reshape_matrix = traits.Bool( True, usedefault=True, desc="The output of this interface is meant for R, so matrices will be\ reshaped to vectors by default.", ) class Matlab2CSVOutputSpec(TraitedSpec): csv_files = OutputMultiPath( File( desc="Output CSV files for each variable saved in the input .mat\ file" ) ) class Matlab2CSV(BaseInterface): """ Save the components of a MATLAB .mat file as a text file with comma-separated values (CSVs). CSV files are easily loaded in R, for use in statistical processing. For further information, see cran.r-project.org/doc/manuals/R-data.pdf Example ------- >>> from nipype.algorithms import misc >>> mat2csv = misc.Matlab2CSV() >>> mat2csv.inputs.in_file = 'cmatrix.mat' >>> mat2csv.run() # doctest: +SKIP """ input_spec = Matlab2CSVInputSpec output_spec = Matlab2CSVOutputSpec def _run_interface(self, runtime): import scipy.io as sio in_dict = sio.loadmat(op.abspath(self.inputs.in_file)) # Check if the file has multiple variables in it. If it does, loop # through them and save them as individual CSV files. # If not, save the variable as a single CSV file using the input file # name and a .csv extension. saved_variables = list() for key in list(in_dict.keys()): if not key.startswith("__"): if isinstance(in_dict[key][0], np.ndarray): saved_variables.append(key) else: iflogger.info( "One of the keys in the input file, %s, is " "not a Numpy array", key, ) if len(saved_variables) > 1: iflogger.info("%i variables found:", len(saved_variables)) iflogger.info(saved_variables) for variable in saved_variables: iflogger.info( "...Converting %s - type %s - to CSV", variable, type(in_dict[variable]), ) _matlab2csv(in_dict[variable], variable, self.inputs.reshape_matrix) elif len(saved_variables) == 1: _, name, _ = split_filename(self.inputs.in_file) variable = saved_variables[0] iflogger.info( "Single variable found %s, type %s:", variable, type(in_dict[variable]) ) iflogger.info( "...Converting %s to CSV from %s", variable, self.inputs.in_file ) _matlab2csv(in_dict[variable], name, self.inputs.reshape_matrix) else: iflogger.error("No values in the MATLAB file?!") return runtime def _list_outputs(self): import scipy.io as sio outputs = self.output_spec().get() in_dict = sio.loadmat(op.abspath(self.inputs.in_file)) saved_variables = list() for key in list(in_dict.keys()): if not key.startswith("__"): if isinstance(in_dict[key][0], np.ndarray): saved_variables.append(key) else: iflogger.error( "One of the keys in the input file, %s, is " "not a Numpy array", key, ) if len(saved_variables) > 1: outputs["csv_files"] = replaceext(saved_variables, ".csv") elif len(saved_variables) == 1: _, name, ext = split_filename(self.inputs.in_file) outputs["csv_files"] = op.abspath(name + ".csv") else: iflogger.error("No values in the MATLAB file?!") return outputs def merge_csvs(in_list): for idx, in_file in enumerate(in_list): try: in_array = np.loadtxt(in_file, delimiter=",") except ValueError: try: in_array = np.loadtxt(in_file, delimiter=",", skiprows=1) except ValueError: with open(in_file, "r") as first: header_line = first.readline() header_list = header_line.split(",") n_cols = len(header_list) try: in_array = np.loadtxt( in_file, delimiter=",", skiprows=1, usecols=list(range(1, n_cols)), ) except ValueError: in_array = np.loadtxt( in_file, delimiter=",", skiprows=1, usecols=list(range(1, n_cols - 1)), ) if idx == 0: out_array = in_array else: out_array = np.dstack((out_array, in_array)) out_array = np.squeeze(out_array) iflogger.info("Final output array shape:") iflogger.info(np.shape(out_array)) return out_array def remove_identical_paths(in_files): import os.path as op from ..utils.filemanip import split_filename if len(in_files) > 1: out_names = list() commonprefix = op.commonprefix(in_files) lastslash = commonprefix.rfind("/") commonpath = commonprefix[0 : (lastslash + 1)] for fileidx, in_file in enumerate(in_files): path, name, ext = split_filename(in_file) in_file = op.join(path, name) name = in_file.replace(commonpath, "") name = name.replace("_subject_id_", "") out_names.append(name) else: path, name, ext = split_filename(in_files[0]) out_names = [name] return out_names def maketypelist(rowheadings, shape, extraheadingBool, extraheading): typelist = [] if rowheadings: typelist.append(("heading", "a40")) if len(shape) > 1: for idx in range(1, (min(shape) + 1)): typelist.append((str(idx), float)) else: for idx in range(1, (shape[0] + 1)): typelist.append((str(idx), float)) if extraheadingBool: typelist.append((extraheading, "a40")) iflogger.info(typelist) return typelist def makefmtlist(output_array, typelist, rowheadingsBool, shape, extraheadingBool): fmtlist = [] if rowheadingsBool: fmtlist.append("%s") if len(shape) > 1: output = np.zeros(max(shape), typelist) for idx in range(1, min(shape) + 1): output[str(idx)] = output_array[:, idx - 1] fmtlist.append("%f") else: output = np.zeros(1, typelist) for idx in range(1, len(output_array) + 1): output[str(idx)] = output_array[idx - 1] fmtlist.append("%f") if extraheadingBool: fmtlist.append("%s") fmt = ",".join(fmtlist) return fmt, output class MergeCSVFilesInputSpec(TraitedSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, desc="Input comma-separated value (CSV) files", ) out_file = File( "merged.csv", usedefault=True, desc="Output filename for merged CSV file" ) column_headings = traits.List( traits.Str, desc="List of column headings to save in merged CSV file\ (must be equal to number of input files). If left undefined, these\ will be pulled from the input filenames.", ) row_headings = traits.List( traits.Str, desc="List of row headings to save in merged CSV file\ (must be equal to number of rows in the input files).", ) row_heading_title = traits.Str( "label", usedefault=True, desc="Column heading for the row headings\ added", ) extra_column_heading = traits.Str(desc="New heading to add for the added field.") extra_field = traits.Str( desc="New field to add to each row. This is useful for saving the\ group or subject ID in the file." ) class MergeCSVFilesOutputSpec(TraitedSpec): csv_file = File(desc="Output CSV file containing columns ") class MergeCSVFiles(BaseInterface): """ Merge several CSV files into a single CSV file. This interface is designed to facilitate data loading in the R environment. If provided, it will also incorporate column heading names into the resulting CSV file. CSV files are easily loaded in R, for use in statistical processing. For further information, see cran.r-project.org/doc/manuals/R-data.pdf Example ------- >>> from nipype.algorithms import misc >>> mat2csv = misc.MergeCSVFiles() >>> mat2csv.inputs.in_files = ['degree.mat','clustering.mat'] >>> mat2csv.inputs.column_headings = ['degree','clustering'] >>> mat2csv.run() # doctest: +SKIP """ input_spec = MergeCSVFilesInputSpec output_spec = MergeCSVFilesOutputSpec def _run_interface(self, runtime): extraheadingBool = False extraheading = "" rowheadingsBool = False """ This block defines the column headings. """ if isdefined(self.inputs.column_headings): iflogger.info("Column headings have been provided:") headings = self.inputs.column_headings else: iflogger.info("Column headings not provided! Pulled from input filenames:") headings = remove_identical_paths(self.inputs.in_files) if isdefined(self.inputs.extra_field): if isdefined(self.inputs.extra_column_heading): extraheading = self.inputs.extra_column_heading iflogger.info("Extra column heading provided: %s", extraheading) else: extraheading = "type" iflogger.info('Extra column heading was not defined. Using "type"') headings.append(extraheading) extraheadingBool = True if len(self.inputs.in_files) == 1: iflogger.warning("Only one file input!") if isdefined(self.inputs.row_headings): iflogger.info( 'Row headings have been provided. Adding "labels"' "column header." ) prefix = '"{p}","'.format(p=self.inputs.row_heading_title) csv_headings = prefix + '","'.join(itertools.chain(headings)) + '"\n' rowheadingsBool = True else: iflogger.info("Row headings have not been provided.") csv_headings = '"' + '","'.join(itertools.chain(headings)) + '"\n' iflogger.info("Final Headings:") iflogger.info(csv_headings) """ Next we merge the arrays and define the output text file """ output_array = merge_csvs(self.inputs.in_files) _, name, ext = split_filename(self.inputs.out_file) if not ext == ".csv": ext = ".csv" out_file = op.abspath(name + ext) with open(out_file, "w") as file_handle: file_handle.write(csv_headings) shape = np.shape(output_array) typelist = maketypelist(rowheadingsBool, shape, extraheadingBool, extraheading) fmt, output = makefmtlist( output_array, typelist, rowheadingsBool, shape, extraheadingBool ) if rowheadingsBool: row_heading_list = self.inputs.row_headings row_heading_list_with_quotes = [] for row_heading in row_heading_list: row_heading_with_quotes = '"' + row_heading + '"' row_heading_list_with_quotes.append(row_heading_with_quotes) row_headings = np.array(row_heading_list_with_quotes, dtype="|S40") output["heading"] = row_headings if isdefined(self.inputs.extra_field): extrafieldlist = [] if len(shape) > 1: mx = shape[0] else: mx = 1 for idx in range(0, mx): extrafieldlist.append(self.inputs.extra_field) iflogger.info(len(extrafieldlist)) output[extraheading] = extrafieldlist iflogger.info(output) iflogger.info(fmt) with open(out_file, "a") as file_handle: np.savetxt(file_handle, output, fmt, delimiter=",") return runtime def _list_outputs(self): outputs = self.output_spec().get() _, name, ext = split_filename(self.inputs.out_file) if not ext == ".csv": ext = ".csv" out_file = op.abspath(name + ext) outputs["csv_file"] = out_file return outputs class AddCSVColumnInputSpec(TraitedSpec): in_file = File( exists=True, mandatory=True, desc="Input comma-separated value (CSV) files" ) out_file = File( "extra_heading.csv", usedefault=True, desc="Output filename for merged CSV file" ) extra_column_heading = traits.Str(desc="New heading to add for the added field.") extra_field = traits.Str( desc="New field to add to each row. This is useful for saving the\ group or subject ID in the file." ) class AddCSVColumnOutputSpec(TraitedSpec): csv_file = File(desc="Output CSV file containing columns ") class AddCSVColumn(BaseInterface): """ Short interface to add an extra column and field to a text file. Example ------- >>> from nipype.algorithms import misc >>> addcol = misc.AddCSVColumn() >>> addcol.inputs.in_file = 'degree.csv' >>> addcol.inputs.extra_column_heading = 'group' >>> addcol.inputs.extra_field = 'male' >>> addcol.run() # doctest: +SKIP """ input_spec = AddCSVColumnInputSpec output_spec = AddCSVColumnOutputSpec def _run_interface(self, runtime): in_file = open(self.inputs.in_file, "r") _, name, ext = split_filename(self.inputs.out_file) if not ext == ".csv": ext = ".csv" out_file = op.abspath(name + ext) out_file = open(out_file, "w") firstline = in_file.readline() firstline = firstline.replace("\n", "") new_firstline = firstline + ',"' + self.inputs.extra_column_heading + '"\n' out_file.write(new_firstline) for line in in_file: new_line = line.replace("\n", "") new_line = new_line + "," + self.inputs.extra_field + "\n" out_file.write(new_line) in_file.close() out_file.close() return runtime def _list_outputs(self): outputs = self.output_spec().get() _, name, ext = split_filename(self.inputs.out_file) if not ext == ".csv": ext = ".csv" out_file = op.abspath(name + ext) outputs["csv_file"] = out_file return outputs class AddCSVRowInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): in_file = File(mandatory=True, desc="Input comma-separated value (CSV) files") _outputs = traits.Dict(traits.Any, value={}, usedefault=True) def __setattr__(self, key, value): if key not in self.copyable_trait_names(): if not isdefined(value): super(AddCSVRowInputSpec, self).__setattr__(key, value) self._outputs[key] = value else: if key in self._outputs: self._outputs[key] = value super(AddCSVRowInputSpec, self).__setattr__(key, value) class AddCSVRowOutputSpec(TraitedSpec): csv_file = File(desc="Output CSV file containing rows ") class AddCSVRow(BaseInterface): """ Simple interface to add an extra row to a CSV file. .. note:: Requires `pandas `_ .. warning:: Multi-platform thread-safe execution is possible with `lockfile `_. Please recall that (1) this module is alpha software; and (2) it should be installed for thread-safe writing. If lockfile is not installed, then the interface is not thread-safe. Example ------- >>> from nipype.algorithms import misc >>> addrow = misc.AddCSVRow() >>> addrow.inputs.in_file = 'scores.csv' >>> addrow.inputs.si = 0.74 >>> addrow.inputs.di = 0.93 >>> addrow.inputs.subject_id = 'S400' >>> addrow.inputs.list_of_values = [ 0.4, 0.7, 0.3 ] >>> addrow.run() # doctest: +SKIP """ input_spec = AddCSVRowInputSpec output_spec = AddCSVRowOutputSpec def __init__(self, infields=None, force_run=True, **kwargs): super(AddCSVRow, self).__init__(**kwargs) undefined_traits = {} self._infields = infields self._have_lock = False self._lock = None if infields: for key in infields: self.inputs.add_trait(key, traits.Any) self.inputs._outputs[key] = Undefined undefined_traits[key] = Undefined self.inputs.trait_set(trait_change_notify=False, **undefined_traits) if force_run: self._always_run = True def _run_interface(self, runtime): try: import pandas as pd except ImportError as e: raise ImportError( "This interface requires pandas " "(http://pandas.pydata.org/) to run." ) from e try: from filelock import SoftFileLock self._have_lock = True except ImportError: from warnings import warn warn( ( "Python module filelock was not found: AddCSVRow will not be" " thread-safe in multi-processor execution" ) ) input_dict = {} for key, val in list(self.inputs._outputs.items()): # expand lists to several columns if key == "trait_added" and val in self.inputs.copyable_trait_names(): continue if isinstance(val, list): for i, v in enumerate(val): input_dict["%s_%d" % (key, i)] = v else: input_dict[key] = val df = pd.DataFrame([input_dict]) if self._have_lock: self._lock = SoftFileLock("%s.lock" % self.inputs.in_file) # Acquire lock self._lock.acquire() if op.exists(self.inputs.in_file): formerdf = pd.read_csv(self.inputs.in_file, index_col=0) df = pd.concat([formerdf, df], ignore_index=True) with open(self.inputs.in_file, "w") as f: df.to_csv(f) if self._have_lock: self._lock.release() return runtime def _list_outputs(self): outputs = self.output_spec().get() outputs["csv_file"] = self.inputs.in_file return outputs def _outputs(self): return self._add_output_traits(super(AddCSVRow, self)._outputs()) def _add_output_traits(self, base): return base class CalculateNormalizedMomentsInputSpec(TraitedSpec): timeseries_file = File( exists=True, mandatory=True, desc="Text file with timeseries in columns and timepoints in rows,\ whitespace separated", ) moment = traits.Int( mandatory=True, desc="Define which moment should be calculated, 3 for skewness, 4 for\ kurtosis.", ) class CalculateNormalizedMomentsOutputSpec(TraitedSpec): moments = traits.List(traits.Float(), desc="Moments") class CalculateNormalizedMoments(BaseInterface): """ Calculates moments of timeseries. Example ------- >>> from nipype.algorithms import misc >>> skew = misc.CalculateNormalizedMoments() >>> skew.inputs.moment = 3 >>> skew.inputs.timeseries_file = 'timeseries.txt' >>> skew.run() # doctest: +SKIP """ input_spec = CalculateNormalizedMomentsInputSpec output_spec = CalculateNormalizedMomentsOutputSpec def _run_interface(self, runtime): self._moments = calc_moments(self.inputs.timeseries_file, self.inputs.moment) return runtime def _list_outputs(self): outputs = self.output_spec().get() outputs["skewness"] = self._moments return outputs def calc_moments(timeseries_file, moment): """Returns nth moment (3 for skewness, 4 for kurtosis) of timeseries (list of values; one per timeseries). Keyword arguments: timeseries_file -- text file with white space separated timepoints in rows """ import scipy.stats as stats timeseries = np.genfromtxt(timeseries_file) m2 = stats.moment(timeseries, 2, axis=0) m3 = stats.moment(timeseries, moment, axis=0) zero = m2 == 0 return np.where(zero, 0, m3 / m2 ** (moment / 2.0)) class AddNoiseInputSpec(TraitedSpec): in_file = File( exists=True, mandatory=True, desc="input image that will be corrupted with noise", ) in_mask = File( exists=True, desc=("input mask, voxels outside this mask " "will be considered background"), ) snr = traits.Float(10.0, desc="desired output SNR in dB", usedefault=True) dist = traits.Enum( "normal", "rician", usedefault=True, mandatory=True, desc=("desired noise distribution"), ) bg_dist = traits.Enum( "normal", "rayleigh", usedefault=True, mandatory=True, desc=("desired noise distribution, currently " "only normal is implemented"), ) out_file = File(desc="desired output filename") class AddNoiseOutputSpec(TraitedSpec): out_file = File(exists=True, desc="corrupted image") class AddNoise(BaseInterface): """ Corrupts with noise the input image. Example ------- >>> from nipype.algorithms.misc import AddNoise >>> noise = AddNoise() >>> noise.inputs.in_file = 'T1.nii' >>> noise.inputs.in_mask = 'mask.nii' >>> noise.snr = 30.0 >>> noise.run() # doctest: +SKIP """ input_spec = AddNoiseInputSpec output_spec = AddNoiseOutputSpec def _run_interface(self, runtime): in_image = nb.load(self.inputs.in_file) in_data = in_image.get_fdata() snr = self.inputs.snr if isdefined(self.inputs.in_mask): in_mask = np.asanyarray(nb.load(self.inputs.in_mask).dataobj) else: in_mask = np.ones_like(in_data) result = self.gen_noise( in_data, mask=in_mask, snr_db=snr, dist=self.inputs.dist, bg_dist=self.inputs.bg_dist, ) res_im = nb.Nifti1Image(result, in_image.affine, in_image.header) res_im.to_filename(self._gen_output_filename()) return runtime def _gen_output_filename(self): if not isdefined(self.inputs.out_file): _, base, ext = split_filename(self.inputs.in_file) out_file = os.path.abspath("%s_SNR%03.2f%s" % (base, self.inputs.snr, ext)) else: out_file = self.inputs.out_file return out_file def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self._gen_output_filename() return outputs def gen_noise(self, image, mask=None, snr_db=10.0, dist="normal", bg_dist="normal"): """ Generates a copy of an image with a certain amount of added gaussian noise (rayleigh for background in mask) """ from math import sqrt snr = sqrt(np.power(10.0, snr_db / 10.0)) if mask is None: mask = np.ones_like(image) else: mask[mask > 0] = 1 mask[mask < 1] = 0 if mask.ndim < image.ndim: mask = np.rollaxis(np.array([mask] * image.shape[3]), 0, 4) signal = image[mask > 0].reshape(-1) if dist == "normal": signal = signal - signal.mean() sigma_n = sqrt(signal.var() / snr) noise = np.random.normal(size=image.shape, scale=sigma_n) if (np.any(mask == 0)) and (bg_dist == "rayleigh"): bg_noise = np.random.rayleigh(size=image.shape, scale=sigma_n) noise[mask == 0] = bg_noise[mask == 0] im_noise = image + noise elif dist == "rician": sigma_n = signal.mean() / snr n_1 = np.random.normal(size=image.shape, scale=sigma_n) n_2 = np.random.normal(size=image.shape, scale=sigma_n) stde_1 = n_1 / sqrt(2.0) stde_2 = n_2 / sqrt(2.0) im_noise = np.sqrt((image + stde_1) ** 2 + (stde_2) ** 2) else: raise NotImplementedError( ("Only normal and rician distributions " "are supported") ) return im_noise class NormalizeProbabilityMapSetInputSpec(TraitedSpec): in_files = InputMultiPath( File(exists=True, mandatory=True, desc="The tpms to be normalized") ) in_mask = File(exists=True, desc="Masked voxels must sum up 1.0, 0.0 otherwise.") class NormalizeProbabilityMapSetOutputSpec(TraitedSpec): out_files = OutputMultiPath(File(exists=True), desc="normalized maps") class NormalizeProbabilityMapSet(BaseInterface): """ Returns the input tissue probability maps (tpms, aka volume fractions). The tissue probability maps are normalized to sum up 1.0 at each voxel within the mask. .. note:: Please recall this is not a spatial normalization algorithm Example ------- >>> from nipype.algorithms import misc >>> normalize = misc.NormalizeProbabilityMapSet() >>> normalize.inputs.in_files = [ 'tpm_00.nii.gz', 'tpm_01.nii.gz', \ 'tpm_02.nii.gz' ] >>> normalize.inputs.in_mask = 'tpms_msk.nii.gz' >>> normalize.run() # doctest: +SKIP """ input_spec = NormalizeProbabilityMapSetInputSpec output_spec = NormalizeProbabilityMapSetOutputSpec def _run_interface(self, runtime): mask = None if isdefined(self.inputs.in_mask): mask = self.inputs.in_mask self._out_filenames = normalize_tpms(self.inputs.in_files, mask) return runtime def _list_outputs(self): outputs = self.output_spec().get() outputs["out_files"] = self._out_filenames return outputs class SplitROIsInputSpec(TraitedSpec): in_file = File(exists=True, mandatory=True, desc="file to be splitted") in_mask = File(exists=True, desc="only process files inside mask") roi_size = traits.Tuple(traits.Int, traits.Int, traits.Int, desc="desired ROI size") class SplitROIsOutputSpec(TraitedSpec): out_files = OutputMultiPath(File(exists=True), desc="the resulting ROIs") out_masks = OutputMultiPath( File(exists=True), desc="a mask indicating valid values" ) out_index = OutputMultiPath( File(exists=True), desc="arrays keeping original locations" ) class SplitROIs(BaseInterface): """ Splits a 3D image in small chunks to enable parallel processing. ROIs keep time series structure in 4D images. Example ------- >>> from nipype.algorithms import misc >>> rois = misc.SplitROIs() >>> rois.inputs.in_file = 'diffusion.nii' >>> rois.inputs.in_mask = 'mask.nii' >>> rois.run() # doctest: +SKIP """ input_spec = SplitROIsInputSpec output_spec = SplitROIsOutputSpec def _run_interface(self, runtime): mask = None roisize = None self._outnames = {} if isdefined(self.inputs.in_mask): mask = self.inputs.in_mask if isdefined(self.inputs.roi_size): roisize = self.inputs.roi_size res = split_rois(self.inputs.in_file, mask, roisize) self._outnames["out_files"] = res[0] self._outnames["out_masks"] = res[1] self._outnames["out_index"] = res[2] return runtime def _list_outputs(self): outputs = self.output_spec().get() for k, v in list(self._outnames.items()): outputs[k] = v return outputs class MergeROIsInputSpec(TraitedSpec): in_files = InputMultiPath( File(exists=True, mandatory=True, desc="files to be re-merged") ) in_index = InputMultiPath( File(exists=True, mandatory=True), desc="array keeping original locations" ) in_reference = File(exists=True, desc="reference file") class MergeROIsOutputSpec(TraitedSpec): merged_file = File(exists=True, desc="the recomposed file") class MergeROIs(BaseInterface): """ Splits a 3D image in small chunks to enable parallel processing. ROIs keep time series structure in 4D images. Example ------- >>> from nipype.algorithms import misc >>> rois = misc.MergeROIs() >>> rois.inputs.in_files = ['roi%02d.nii' % i for i in range(1, 6)] >>> rois.inputs.in_reference = 'mask.nii' >>> rois.inputs.in_index = ['roi%02d_idx.npz' % i for i in range(1, 6)] >>> rois.run() # doctest: +SKIP """ input_spec = MergeROIsInputSpec output_spec = MergeROIsOutputSpec def _run_interface(self, runtime): res = merge_rois( self.inputs.in_files, self.inputs.in_index, self.inputs.in_reference ) self._merged = res return runtime def _list_outputs(self): outputs = self.output_spec().get() outputs["merged_file"] = self._merged return outputs def normalize_tpms(in_files, in_mask=None, out_files=None): """ Returns the input tissue probability maps (tpms, aka volume fractions) normalized to sum up 1.0 at each voxel within the mask. """ import nibabel as nb import numpy as np import os.path as op in_files = np.atleast_1d(in_files).tolist() if out_files is None: out_files = [] if len(out_files) != len(in_files): for i, finname in enumerate(in_files): fname, fext = op.splitext(op.basename(finname)) if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext out_file = op.abspath("%s_norm_%02d%s" % (fname, i, fext)) out_files += [out_file] imgs = [nb.load(fim) for fim in in_files] if len(in_files) == 1: img_data = imgs[0].get_fdata(dtype=np.float32) img_data[img_data > 0.0] = 1.0 hdr = imgs[0].header.copy() hdr.set_data_dtype(np.float32) nb.save(nb.Nifti1Image(img_data, imgs[0].affine, hdr), out_files[0]) return out_files[0] img_data = np.stack( [im.get_fdata(caching="unchanged", dtype=np.float32) for im in imgs] ) # img_data[img_data>1.0] = 1.0 img_data[img_data < 0.0] = 0.0 weights = np.sum(img_data, axis=0) msk = np.ones(imgs[0].shape) msk[weights <= 0] = 0 if in_mask is not None: msk = np.asanyarray(nb.load(in_mask).dataobj) msk[msk <= 0] = 0 msk[msk > 0] = 1 msk = np.ma.masked_equal(msk, 0) for i, out_file in enumerate(out_files): data = np.ma.masked_equal(img_data[i], 0) probmap = data / weights hdr = imgs[i].header.copy() hdr.set_data_dtype("float32") nb.save( nb.Nifti1Image(probmap.astype(np.float32), imgs[i].affine, hdr), out_file ) return out_files def split_rois(in_file, mask=None, roishape=None): """ Splits an image in ROIs for parallel processing """ import nibabel as nb import numpy as np from math import sqrt, ceil import os.path as op if roishape is None: roishape = (10, 10, 1) im = nb.load(in_file) imshape = im.shape dshape = imshape[:3] nvols = imshape[-1] roisize = roishape[0] * roishape[1] * roishape[2] droishape = (roishape[0], roishape[1], roishape[2], nvols) if mask is not None: mask = np.asanyarray(nb.load(mask).dataobj) mask[mask > 0] = 1 mask[mask < 1] = 0 else: mask = np.ones(dshape) mask = mask.reshape(-1).astype(np.uint8) nzels = np.nonzero(mask) els = np.sum(mask) nrois = int(ceil(els / float(roisize))) data = np.asanyarray(im.dataobj).reshape((mask.size, -1)) data = np.squeeze(data.take(nzels, axis=0)) nvols = data.shape[-1] roidefname = op.abspath("onesmask.nii.gz") nb.Nifti1Image(np.ones(roishape, dtype=np.uint8), None, None).to_filename( roidefname ) out_files = [] out_mask = [] out_idxs = [] for i in range(nrois): first = i * roisize last = (i + 1) * roisize fill = 0 if last > els: fill = last - els last = els droi = data[first:last, ...] iname = op.abspath("roi%010d_idx" % i) out_idxs.append(iname + ".npz") np.savez(iname, (nzels[0][first:last],)) if fill > 0: droi = np.vstack( (droi, np.zeros((int(fill), int(nvols)), dtype=np.float32)) ) partialmsk = np.ones((roisize,), dtype=np.uint8) partialmsk[-int(fill) :] = 0 partname = op.abspath("partialmask.nii.gz") nb.Nifti1Image(partialmsk.reshape(roishape), None, None).to_filename( partname ) out_mask.append(partname) else: out_mask.append(roidefname) fname = op.abspath("roi%010d.nii.gz" % i) nb.Nifti1Image(droi.reshape(droishape), None, None).to_filename(fname) out_files.append(fname) return out_files, out_mask, out_idxs def merge_rois(in_files, in_idxs, in_ref, dtype=None, out_file=None): """ Re-builds an image resulting from a parallelized processing """ import nibabel as nb import numpy as np import os.path as op import subprocess as sp if out_file is None: out_file = op.abspath("merged.nii.gz") if dtype is None: dtype = np.float32 # if file is compressed, uncompress using os # to avoid memory errors if op.splitext(in_ref)[1] == ".gz": try: iflogger.info("uncompress %s", in_ref) sp.check_call(["gunzip", in_ref], stdout=sp.PIPE, shell=True) in_ref = op.splitext(in_ref)[0] except: pass ref = nb.load(in_ref) aff = ref.affine hdr = ref.header.copy() rsh = ref.shape del ref npix = rsh[0] * rsh[1] * rsh[2] fcimg = nb.load(in_files[0]) if len(fcimg.shape) == 4: ndirs = fcimg.shape[-1] else: ndirs = 1 newshape = (rsh[0], rsh[1], rsh[2], ndirs) hdr.set_data_dtype(dtype) hdr.set_xyzt_units("mm", "sec") if ndirs < 300: data = np.zeros((npix, ndirs), dtype=dtype) for cname, iname in zip(in_files, in_idxs): f = np.load(iname) idxs = np.squeeze(f["arr_0"]) cdata = np.asanyarray(nb.load(cname).dataobj).reshape(-1, ndirs) nels = len(idxs) idata = (idxs,) try: data[idata, ...] = cdata[0:nels, ...] except: print( ( "Consistency between indexes and chunks was " "lost: data=%s, chunk=%s" ) % (str(data.shape), str(cdata.shape)) ) raise nb.Nifti1Image(data.reshape(newshape), aff, hdr).to_filename(out_file) else: hdr.set_data_shape(rsh[:3]) nii = [] for d in range(ndirs): fname = op.abspath("vol%06d.nii" % d) nb.Nifti1Image(np.zeros(rsh[:3]), aff, hdr).to_filename(fname) nii.append(fname) for cname, iname in zip(in_files, in_idxs): f = np.load(iname) idxs = np.squeeze(f["arr_0"]) for d, fname in enumerate(nii): data = np.asanyarray(nb.load(fname).dataobj).reshape(-1) cdata = nb.load(cname).dataobj[..., d].reshape(-1) nels = len(idxs) idata = (idxs,) data[idata] = cdata[0:nels] nb.Nifti1Image(data.reshape(rsh[:3]), aff, hdr).to_filename(fname) imgs = [nb.load(im) for im in nii] allim = nb.concat_images(imgs) allim.to_filename(out_file) return out_file class CalculateMedianInputSpec(BaseInterfaceInputSpec): in_files = InputMultiPath( File( exists=True, mandatory=True, desc="One or more realigned Nifti 4D timeseries", ) ) median_file = traits.Str(desc="Filename prefix to store median images") median_per_file = traits.Bool( False, usedefault=True, desc="Calculate a median file for each Nifti" ) class CalculateMedianOutputSpec(TraitedSpec): median_files = OutputMultiPath(File(exists=True), desc="One or more median images") class CalculateMedian(BaseInterface): """ Computes an average of the median across one or more 4D Nifti timeseries Example ------- >>> from nipype.algorithms.misc import CalculateMedian >>> mean = CalculateMedian() >>> mean.inputs.in_files = 'functional.nii' >>> mean.run() # doctest: +SKIP """ input_spec = CalculateMedianInputSpec output_spec = CalculateMedianOutputSpec def __init__(self, *args, **kwargs): super(CalculateMedian, self).__init__(*args, **kwargs) self._median_files = [] def _gen_fname(self, suffix, idx=None, ext=None): if idx: in_file = self.inputs.in_files[idx] else: if isinstance(self.inputs.in_files, list): in_file = self.inputs.in_files[0] else: in_file = self.inputs.in_files fname, in_ext = op.splitext(op.basename(in_file)) if in_ext == ".gz": fname, in_ext2 = op.splitext(fname) in_ext = in_ext2 + in_ext if ext is None: ext = in_ext if ext.startswith("."): ext = ext[1:] if self.inputs.median_file: outname = self.inputs.median_file else: outname = "{}_{}".format(fname, suffix) if idx: outname += str(idx) return op.abspath("{}.{}".format(outname, ext)) def _run_interface(self, runtime): total = None self._median_files = [] for idx, fname in enumerate(ensure_list(self.inputs.in_files)): img = nb.load(fname) data = np.median(img.get_fdata(), axis=3) if self.inputs.median_per_file: self._median_files.append(self._write_nifti(img, data, idx)) else: if total is None: total = data else: total += data if not self.inputs.median_per_file: self._median_files.append(self._write_nifti(img, total, idx)) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["median_files"] = self._median_files return outputs def _write_nifti(self, img, data, idx, suffix="median"): if self.inputs.median_per_file: median_img = nb.Nifti1Image(data, img.affine, img.header) filename = self._gen_fname(suffix, idx=idx) else: median_img = nb.Nifti1Image(data / (idx + 1), img.affine, img.header) filename = self._gen_fname(suffix) median_img.to_filename(filename) return filename # Deprecated interfaces ------------------------------------------------------ class Distance(nam.Distance): """Calculates distance between two volumes. .. deprecated:: 0.10.0 Use :py:class:`nipype.algorithms.metrics.Distance` instead. """ def __init__(self, **inputs): super(nam.Distance, self).__init__(**inputs) warnings.warn( ( "This interface has been deprecated since 0.10.0," " please use nipype.algorithms.metrics.Distance" ), DeprecationWarning, ) class Overlap(nam.Overlap): """Calculates various overlap measures between two maps. .. deprecated:: 0.10.0 Use :py:class:`nipype.algorithms.metrics.Overlap` instead. """ def __init__(self, **inputs): super(nam.Overlap, self).__init__(**inputs) warnings.warn( ( "This interface has been deprecated since 0.10.0," " please use nipype.algorithms.metrics.Overlap" ), DeprecationWarning, ) class FuzzyOverlap(nam.FuzzyOverlap): """Calculates various overlap measures between two maps, using a fuzzy definition. .. deprecated:: 0.10.0 Use :py:class:`nipype.algorithms.metrics.FuzzyOverlap` instead. """ def __init__(self, **inputs): super(nam.FuzzyOverlap, self).__init__(**inputs) warnings.warn( ( "This interface has been deprecated since 0.10.0," " please use nipype.algorithms.metrics.FuzzyOverlap" ), DeprecationWarning, ) class TSNR(confounds.TSNR): """ .. deprecated:: 0.12.1 Use :py:class:`nipype.algorithms.confounds.TSNR` instead """ def __init__(self, **inputs): super(confounds.TSNR, self).__init__(**inputs) warnings.warn( ( "This interface has been moved since 0.12.0," " please use nipype.algorithms.confounds.TSNR" ), UserWarning, ) nipype-1.7.0/nipype/algorithms/modelgen.py000066400000000000000000001144211413403311400206410ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The modelgen module provides classes for specifying designs for individual subject analysis of task-based fMRI experiments. In particular it also includes algorithms for generating regressors for sparse and sparse-clustered acquisition experiments. """ from copy import deepcopy import csv, math, os from nibabel import load import numpy as np from ..interfaces.base import ( BaseInterface, TraitedSpec, InputMultiPath, traits, File, Bunch, BaseInterfaceInputSpec, isdefined, ) from ..utils.filemanip import ensure_list from ..utils.misc import normalize_mc_params from .. import config, logging iflogger = logging.getLogger("nipype.interface") def spm_hrf(RT, P=None, fMRI_T=16): """ python implementation of spm_hrf See ``spm_hrf`` for implementation details:: % RT - scan repeat time % p - parameters of the response function (two gamma % functions) % defaults (seconds) % p(0) - delay of response (relative to onset) 6 % p(1) - delay of undershoot (relative to onset) 16 % p(2) - dispersion of response 1 % p(3) - dispersion of undershoot 1 % p(4) - ratio of response to undershoot 6 % p(5) - onset (seconds) 0 % p(6) - length of kernel (seconds) 32 % % hrf - hemodynamic response function % p - parameters of the response function The following code using ``scipy.stats.distributions.gamma`` doesn't return the same result as the ``spm_Gpdf`` function:: hrf = gamma.pdf(u, p[0]/p[2], scale=dt/p[2]) - gamma.pdf(u, p[1]/p[3], scale=dt/p[3])/p[4] Example ------- >>> print(spm_hrf(2)) [ 0.00000000e+00 8.65660810e-02 3.74888236e-01 3.84923382e-01 2.16117316e-01 7.68695653e-02 1.62017720e-03 -3.06078117e-02 -3.73060781e-02 -3.08373716e-02 -2.05161334e-02 -1.16441637e-02 -5.82063147e-03 -2.61854250e-03 -1.07732374e-03 -4.10443522e-04 -1.46257507e-04] """ from scipy.special import gammaln p = np.array([6, 16, 1, 1, 6, 0, 32], dtype=float) if P is not None: p[0 : len(P)] = P _spm_Gpdf = lambda x, h, l: np.exp( h * np.log(l) + (h - 1) * np.log(x) - (l * x) - gammaln(h) ) # modelled hemodynamic response function - {mixture of Gammas} dt = RT / float(fMRI_T) u = np.arange(0, int(p[6] / dt + 1)) - p[5] / dt with np.errstate(divide="ignore"): # Known division-by-zero hrf = ( _spm_Gpdf(u, p[0] / p[2], dt / p[2]) - _spm_Gpdf(u, p[1] / p[3], dt / p[3]) / p[4] ) idx = np.arange(0, int((p[6] / RT) + 1)) * fMRI_T hrf = hrf[idx] hrf = hrf / np.sum(hrf) return hrf def orth(x_in, y_in): """Orthogonalize y_in with respect to x_in. >>> orth_expected = np.array([1.7142857142857144, 0.42857142857142883, \ -0.85714285714285676]) >>> err = np.abs(np.array(orth([1, 2, 3],[4, 5, 6]) - orth_expected)) >>> all(err < np.finfo(float).eps) True """ x = np.array(x_in)[:, None] y = np.array(y_in)[:, None] y = y - np.dot(x, np.dot(np.linalg.inv(np.dot(x.T, x)), np.dot(x.T, y))) if np.linalg.norm(y, 1) > np.exp(-32): y = y[:, 0].tolist() else: y = y_in return y def scale_timings(timelist, input_units, output_units, time_repetition): """ Scale timings given input and output units (scans/secs). Parameters ---------- timelist: list of times to scale input_units: 'secs' or 'scans' output_units: Ibid. time_repetition: float in seconds """ if input_units == output_units: _scalefactor = 1.0 if (input_units == "scans") and (output_units == "secs"): _scalefactor = time_repetition if (input_units == "secs") and (output_units == "scans"): _scalefactor = 1.0 / time_repetition timelist = [np.max([0.0, _scalefactor * t]) for t in timelist] return timelist def bids_gen_info( bids_event_files, condition_column="", amplitude_column=None, time_repetition=False ): """ Generate a subject_info structure from a list of BIDS .tsv event files. Parameters ---------- bids_event_files : list of str Filenames of BIDS .tsv event files containing columns including: 'onset', 'duration', and 'trial_type' or the `condition_column` value. condition_column : str Column of files in `bids_event_files` based on the values of which events will be sorted into different regressors amplitude_column : str Column of files in `bids_event_files` based on the values of which to apply amplitudes to events. If unspecified, all events will be represented with an amplitude of 1. Returns ------- subject_info: list of Bunch """ info = [] for bids_event_file in bids_event_files: with open(bids_event_file) as f: f_events = csv.DictReader(f, skipinitialspace=True, delimiter="\t") events = [{k: v for k, v in row.items()} for row in f_events] if not condition_column: condition_column = "_trial_type" for i in events: i.update({condition_column: "ev0"}) conditions = sorted(set([i[condition_column] for i in events])) runinfo = Bunch(conditions=[], onsets=[], durations=[], amplitudes=[]) for condition in conditions: selected_events = [i for i in events if i[condition_column] == condition] onsets = [float(i["onset"]) for i in selected_events] durations = [float(i["duration"]) for i in selected_events] if time_repetition: decimals = math.ceil(-math.log10(time_repetition)) onsets = [np.round(i, decimals) for i in onsets] durations = [np.round(i, decimals) for i in durations] runinfo.conditions.append(condition) runinfo.onsets.append(onsets) runinfo.durations.append(durations) try: amplitudes = [float(i[amplitude_column]) for i in selected_events] runinfo.amplitudes.append(amplitudes) except KeyError: runinfo.amplitudes.append([1] * len(onsets)) info.append(runinfo) return info def gen_info(run_event_files): """Generate subject_info structure from a list of event files.""" info = [] for i, event_files in enumerate(run_event_files): runinfo = Bunch(conditions=[], onsets=[], durations=[], amplitudes=[]) for event_file in event_files: _, name = os.path.split(event_file) if ".run" in name: name, _ = name.split(".run%03d" % (i + 1)) elif ".txt" in name: name, _ = name.split(".txt") runinfo.conditions.append(name) event_info = np.atleast_2d(np.loadtxt(event_file)) runinfo.onsets.append(event_info[:, 0].tolist()) if event_info.shape[1] > 1: runinfo.durations.append(event_info[:, 1].tolist()) else: runinfo.durations.append([0]) if event_info.shape[1] > 2: runinfo.amplitudes.append(event_info[:, 2].tolist()) else: delattr(runinfo, "amplitudes") info.append(runinfo) return info class SpecifyModelInputSpec(BaseInterfaceInputSpec): subject_info = InputMultiPath( Bunch, mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], desc="Bunch or List(Bunch) subject-specific " "condition information. see " ":ref:`nipype.algorithms.modelgen.SpecifyModel` or for details", ) event_files = InputMultiPath( traits.List(File(exists=True)), mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], desc="List of event description files 1, 2 or 3 " "column format corresponding to onsets, " "durations and amplitudes", ) bids_event_file = InputMultiPath( File(exists=True), mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], desc="TSV event file containing common BIDS fields: `onset`," "`duration`, and categorization and amplitude columns", ) bids_condition_column = traits.Str( default_value="trial_type", usedefault=True, desc="Column of the file passed to ``bids_event_file`` to the " "unique values of which events will be assigned" "to regressors", ) bids_amplitude_column = traits.Str( desc="Column of the file passed to ``bids_event_file`` " "according to which to assign amplitudes to events" ) realignment_parameters = InputMultiPath( File(exists=True), desc="Realignment parameters returned by motion correction algorithm", copyfile=False, ) parameter_source = traits.Enum( "SPM", "FSL", "AFNI", "FSFAST", "NIPY", usedefault=True, desc="Source of motion parameters", ) outlier_files = InputMultiPath( File(exists=True), desc="Files containing scan outlier indices that should be tossed", copyfile=False, ) functional_runs = InputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), mandatory=True, desc="Data files for model. List of 4D " "files or list of list of 3D " "files per session", copyfile=False, ) input_units = traits.Enum( "secs", "scans", mandatory=True, desc="Units of event onsets and durations (secs " "or scans). Output units are always in secs", ) high_pass_filter_cutoff = traits.Float( mandatory=True, desc="High-pass filter cutoff in secs" ) time_repetition = traits.Float( mandatory=True, desc="Time between the start of one volume " "to the start of the next image volume.", ) # Not implemented yet # polynomial_order = traits.Range(0, low=0, # desc ='Number of polynomial functions to model high pass filter.') class SpecifyModelOutputSpec(TraitedSpec): session_info = traits.Any(desc="Session info for level1designs") class SpecifyModel(BaseInterface): """ Makes a model specification compatible with spm/fsl designers. The subject_info field should contain paradigm information in the form of a Bunch or a list of Bunch. The Bunch should contain the following information:: [Mandatory] conditions : list of names onsets : lists of onsets corresponding to each condition durations : lists of durations corresponding to each condition. Should be left to a single 0 if all events are being modelled as impulses. [Optional] regressor_names : list of str list of names corresponding to each column. Should be None if automatically assigned. regressors : list of lists values for each regressor - must correspond to the number of volumes in the functional run amplitudes : lists of amplitudes for each event. This will be ignored by SPM's Level1Design. The following two (tmod, pmod) will be ignored by any Level1Design class other than SPM: tmod : lists of conditions that should be temporally modulated. Should default to None if not being used. pmod : list of Bunch corresponding to conditions - name : name of parametric modulator - param : values of the modulator - poly : degree of modulation Alternatively, you can provide information through event files. The event files have to be in 1, 2 or 3 column format with the columns corresponding to Onsets, Durations and Amplitudes and they have to have the name event_name.runXXX... e.g.: Words.run001.txt. The event_name part will be used to create the condition names. Examples -------- >>> from nipype.algorithms import modelgen >>> from nipype.interfaces.base import Bunch >>> s = modelgen.SpecifyModel() >>> s.inputs.input_units = 'secs' >>> s.inputs.functional_runs = ['functional2.nii', 'functional3.nii'] >>> s.inputs.time_repetition = 6 >>> s.inputs.high_pass_filter_cutoff = 128. >>> evs_run2 = Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], durations=[[1]]) >>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]]) >>> s.inputs.subject_info = [evs_run2, evs_run3] >>> # Using pmod >>> evs_run2 = Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 50], [100, 180]], \ durations=[[0], [0]], pmod=[Bunch(name=['amp'], poly=[2], param=[[1, 2]]), \ None]) >>> evs_run3 = Bunch(conditions=['cond1', 'cond2'], onsets=[[20, 120], [80, 160]], \ durations=[[0], [0]], pmod=[Bunch(name=['amp'], poly=[2], param=[[1, 2]]), \ None]) >>> s.inputs.subject_info = [evs_run2, evs_run3] """ input_spec = SpecifyModelInputSpec output_spec = SpecifyModelOutputSpec def _generate_standard_design( self, infolist, functional_runs=None, realignment_parameters=None, outliers=None ): """Generate a standard design matrix paradigm given information about each run.""" sessinfo = [] output_units = "secs" if "output_units" in self.inputs.traits(): output_units = self.inputs.output_units for i, info in enumerate(infolist): sessinfo.insert(i, dict(cond=[])) if isdefined(self.inputs.high_pass_filter_cutoff): sessinfo[i]["hpf"] = float(self.inputs.high_pass_filter_cutoff) if hasattr(info, "conditions") and info.conditions is not None: for cid, cond in enumerate(info.conditions): sessinfo[i]["cond"].insert(cid, dict()) sessinfo[i]["cond"][cid]["name"] = info.conditions[cid] scaled_onset = scale_timings( info.onsets[cid], self.inputs.input_units, output_units, self.inputs.time_repetition, ) sessinfo[i]["cond"][cid]["onset"] = scaled_onset scaled_duration = scale_timings( info.durations[cid], self.inputs.input_units, output_units, self.inputs.time_repetition, ) sessinfo[i]["cond"][cid]["duration"] = scaled_duration if hasattr(info, "amplitudes") and info.amplitudes: sessinfo[i]["cond"][cid]["amplitudes"] = info.amplitudes[cid] if hasattr(info, "tmod") and info.tmod and len(info.tmod) > cid: sessinfo[i]["cond"][cid]["tmod"] = info.tmod[cid] if hasattr(info, "pmod") and info.pmod and len(info.pmod) > cid: if info.pmod[cid]: sessinfo[i]["cond"][cid]["pmod"] = [] for j, name in enumerate(info.pmod[cid].name): sessinfo[i]["cond"][cid]["pmod"].insert(j, {}) sessinfo[i]["cond"][cid]["pmod"][j]["name"] = name sessinfo[i]["cond"][cid]["pmod"][j]["poly"] = info.pmod[ cid ].poly[j] sessinfo[i]["cond"][cid]["pmod"][j][ "param" ] = info.pmod[cid].param[j] sessinfo[i]["regress"] = [] if hasattr(info, "regressors") and info.regressors is not None: for j, r in enumerate(info.regressors): sessinfo[i]["regress"].insert(j, dict(name="", val=[])) if ( hasattr(info, "regressor_names") and info.regressor_names is not None ): sessinfo[i]["regress"][j]["name"] = info.regressor_names[j] else: sessinfo[i]["regress"][j]["name"] = "UR%d" % (j + 1) sessinfo[i]["regress"][j]["val"] = info.regressors[j] sessinfo[i]["scans"] = functional_runs[i] if realignment_parameters is not None: for i, rp in enumerate(realignment_parameters): mc = realignment_parameters[i] for col in range(mc.shape[1]): colidx = len(sessinfo[i]["regress"]) sessinfo[i]["regress"].insert(colidx, dict(name="", val=[])) sessinfo[i]["regress"][colidx]["name"] = "Realign%d" % (col + 1) sessinfo[i]["regress"][colidx]["val"] = mc[:, col].tolist() if outliers is not None: for i, out in enumerate(outliers): numscans = 0 for f in ensure_list(sessinfo[i]["scans"]): shape = load(f).shape if len(shape) == 3 or shape[3] == 1: iflogger.warning( "You are using 3D instead of 4D " "files. Are you sure this was " "intended?" ) numscans += 1 else: numscans += shape[3] for j, scanno in enumerate(out): colidx = len(sessinfo[i]["regress"]) sessinfo[i]["regress"].insert(colidx, dict(name="", val=[])) sessinfo[i]["regress"][colidx]["name"] = "Outlier%d" % (j + 1) sessinfo[i]["regress"][colidx]["val"] = np.zeros((1, numscans))[ 0 ].tolist() sessinfo[i]["regress"][colidx]["val"][int(scanno)] = 1 return sessinfo def _generate_design(self, infolist=None): """Generate design specification for a typical fmri paradigm""" realignment_parameters = [] if isdefined(self.inputs.realignment_parameters): for parfile in self.inputs.realignment_parameters: realignment_parameters.append( np.apply_along_axis( func1d=normalize_mc_params, axis=1, arr=np.loadtxt(parfile), source=self.inputs.parameter_source, ) ) outliers = [] if isdefined(self.inputs.outlier_files): for filename in self.inputs.outlier_files: try: outindices = np.loadtxt(filename, dtype=int) except IOError: outliers.append([]) else: if outindices.size == 1: outliers.append([outindices.tolist()]) else: outliers.append(outindices.tolist()) if infolist is None: if isdefined(self.inputs.subject_info): infolist = self.inputs.subject_info elif isdefined(self.inputs.event_files): infolist = gen_info(self.inputs.event_files) elif isdefined(self.inputs.bids_event_file): infolist = bids_gen_info( self.inputs.bids_event_file, self.inputs.bids_condition_column, self.inputs.bids_amplitude_column, self.inputs.time_repetition, ) self._sessinfo = self._generate_standard_design( infolist, functional_runs=self.inputs.functional_runs, realignment_parameters=realignment_parameters, outliers=outliers, ) def _run_interface(self, runtime): """ """ self._sessioninfo = None self._generate_design() return runtime def _list_outputs(self): outputs = self._outputs().get() if not hasattr(self, "_sessinfo"): self._generate_design() outputs["session_info"] = self._sessinfo return outputs class SpecifySPMModelInputSpec(SpecifyModelInputSpec): concatenate_runs = traits.Bool( False, usedefault=True, desc="Concatenate all runs to look like a single session.", ) output_units = traits.Enum( "secs", "scans", usedefault=True, desc="Units of design event onsets and durations (secs or scans)", ) class SpecifySPMModel(SpecifyModel): """Add SPM specific options to SpecifyModel Adds: - concatenate_runs - output_units Examples -------- >>> from nipype.algorithms import modelgen >>> from nipype.interfaces.base import Bunch >>> s = modelgen.SpecifySPMModel() >>> s.inputs.input_units = 'secs' >>> s.inputs.output_units = 'scans' >>> s.inputs.high_pass_filter_cutoff = 128. >>> s.inputs.functional_runs = ['functional2.nii', 'functional3.nii'] >>> s.inputs.time_repetition = 6 >>> s.inputs.concatenate_runs = True >>> evs_run2 = Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], durations=[[1]]) >>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]]) >>> s.inputs.subject_info = [evs_run2, evs_run3] """ input_spec = SpecifySPMModelInputSpec def _concatenate_info(self, infolist): nscans = [] for i, f in enumerate(self.inputs.functional_runs): if isinstance(f, list): numscans = len(f) elif isinstance(f, (str, bytes)): img = load(f) numscans = img.shape[3] else: raise Exception("Functional input not specified correctly") nscans.insert(i, numscans) # now combine all fields into 1 # names, onsets, durations, amplitudes, pmod, tmod, regressor_names, # regressors infoout = infolist[0] for j, val in enumerate(infolist[0].durations): if len(infolist[0].onsets[j]) > 1 and len(val) == 1: infoout.durations[j] = infolist[0].durations[j] * len( infolist[0].onsets[j] ) for i, info in enumerate(infolist[1:]): # info.[conditions, tmod] remain the same if info.onsets: for j, val in enumerate(info.onsets): if self.inputs.input_units == "secs": onsets = np.array( info.onsets[j] ) + self.inputs.time_repetition * sum(nscans[0 : (i + 1)]) infoout.onsets[j].extend(onsets.tolist()) else: onsets = np.array(info.onsets[j]) + sum(nscans[0 : (i + 1)]) infoout.onsets[j].extend(onsets.tolist()) for j, val in enumerate(info.durations): if len(info.onsets[j]) > 1 and len(val) == 1: infoout.durations[j].extend( info.durations[j] * len(info.onsets[j]) ) elif len(info.onsets[j]) == len(val): infoout.durations[j].extend(info.durations[j]) else: raise ValueError( "Mismatch in number of onsets and \ durations for run {0}, condition \ {1}".format( i + 2, j + 1 ) ) if hasattr(info, "amplitudes") and info.amplitudes: for j, val in enumerate(info.amplitudes): infoout.amplitudes[j].extend(info.amplitudes[j]) if hasattr(info, "pmod") and info.pmod: for j, val in enumerate(info.pmod): if val: for key, data in enumerate(val.param): infoout.pmod[j].param[key].extend(data) if hasattr(info, "regressors") and info.regressors: # assumes same ordering of regressors across different # runs and the same names for the regressors for j, v in enumerate(info.regressors): infoout.regressors[j].extend(info.regressors[j]) # insert session regressors if not hasattr(infoout, "regressors") or not infoout.regressors: infoout.regressors = [] onelist = np.zeros((1, sum(nscans))) onelist[0, sum(nscans[0:i]) : sum(nscans[0 : (i + 1)])] = 1 infoout.regressors.insert(len(infoout.regressors), onelist.tolist()[0]) return [infoout], nscans def _generate_design(self, infolist=None): if ( not isdefined(self.inputs.concatenate_runs) or not self.inputs.concatenate_runs ): super(SpecifySPMModel, self)._generate_design(infolist=infolist) return if isdefined(self.inputs.subject_info): infolist = self.inputs.subject_info else: infolist = gen_info(self.inputs.event_files) concatlist, nscans = self._concatenate_info(infolist) functional_runs = [ensure_list(self.inputs.functional_runs)] realignment_parameters = [] if isdefined(self.inputs.realignment_parameters): realignment_parameters = [] for parfile in self.inputs.realignment_parameters: mc = np.apply_along_axis( func1d=normalize_mc_params, axis=1, arr=np.loadtxt(parfile), source=self.inputs.parameter_source, ) if not realignment_parameters: realignment_parameters.insert(0, mc) else: realignment_parameters[0] = np.concatenate( (realignment_parameters[0], mc) ) outliers = [] if isdefined(self.inputs.outlier_files): outliers = [[]] for i, filename in enumerate(self.inputs.outlier_files): try: out = np.loadtxt(filename) except IOError: iflogger.warning("Error reading outliers file %s", filename) out = np.array([]) if out.size > 0: iflogger.debug( "fname=%s, out=%s, nscans=%d", filename, out, sum(nscans[0:i]) ) sumscans = out.astype(int) + sum(nscans[0:i]) if out.size == 1: outliers[0] += [np.array(sumscans, dtype=int).tolist()] else: outliers[0] += np.array(sumscans, dtype=int).tolist() self._sessinfo = self._generate_standard_design( concatlist, functional_runs=functional_runs, realignment_parameters=realignment_parameters, outliers=outliers, ) class SpecifySparseModelInputSpec(SpecifyModelInputSpec): time_acquisition = traits.Float( 0, mandatory=True, desc="Time in seconds to acquire a single image volume" ) volumes_in_cluster = traits.Range( 1, usedefault=True, desc="Number of scan volumes in a cluster" ) model_hrf = traits.Bool(desc="Model sparse events with hrf") stimuli_as_impulses = traits.Bool( True, desc="Treat each stimulus to be impulse-like", usedefault=True ) use_temporal_deriv = traits.Bool( requires=["model_hrf"], desc="Create a temporal derivative in addition to regular regressor", ) scale_regressors = traits.Bool( True, desc="Scale regressors by the peak", usedefault=True ) scan_onset = traits.Float( 0.0, desc="Start of scanning relative to onset of run in secs", usedefault=True ) save_plot = traits.Bool( desc=("Save plot of sparse design calculation (requires matplotlib)") ) class SpecifySparseModelOutputSpec(SpecifyModelOutputSpec): sparse_png_file = File(desc="PNG file showing sparse design") sparse_svg_file = File(desc="SVG file showing sparse design") class SpecifySparseModel(SpecifyModel): """Specify a sparse model that is compatible with SPM/FSL designers [1]_. Examples -------- >>> from nipype.algorithms import modelgen >>> from nipype.interfaces.base import Bunch >>> s = modelgen.SpecifySparseModel() >>> s.inputs.input_units = 'secs' >>> s.inputs.functional_runs = ['functional2.nii', 'functional3.nii'] >>> s.inputs.time_repetition = 6 >>> s.inputs.time_acquisition = 2 >>> s.inputs.high_pass_filter_cutoff = 128. >>> s.inputs.model_hrf = True >>> evs_run2 = Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], ... durations=[[1]]) >>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], ... durations=[[1]]) >>> s.inputs.subject_info = [evs_run2, evs_run3] # doctest: +SKIP References ---------- .. [1] Perrachione TK and Ghosh SS (2013) Optimized design and analysis of sparse-sampling fMRI experiments. Front. Neurosci. 7:55 http://journal.frontiersin.org/Journal/10.3389/fnins.2013.00055/abstract """ input_spec = SpecifySparseModelInputSpec output_spec = SpecifySparseModelOutputSpec def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): """Generates a regressor for a sparse/clustered-sparse acquisition""" bplot = False if isdefined(self.inputs.save_plot) and self.inputs.save_plot: bplot = True import matplotlib matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt TR = int(np.round(self.inputs.time_repetition * 1000)) # in ms if self.inputs.time_acquisition: TA = int(np.round(self.inputs.time_acquisition * 1000)) # in ms else: TA = TR # in ms nvol = self.inputs.volumes_in_cluster SCANONSET = np.round(self.inputs.scan_onset * 1000) total_time = TR * (nscans - nvol) / nvol + TA * nvol + SCANONSET SILENCE = TR - TA * nvol dt = TA / 10.0 durations = np.round(np.array(i_durations) * 1000) if len(durations) == 1: durations = durations * np.ones((len(i_onsets))) onsets = np.round(np.array(i_onsets) * 1000) dttemp = math.gcd(TA, math.gcd(SILENCE, TR)) if dt < dttemp: if dttemp % dt != 0: dt = float(math.gcd(dttemp, int(dt))) if dt < 1: raise Exception("Time multiple less than 1 ms") iflogger.info("Setting dt = %d ms\n", dt) npts = int(np.ceil(total_time / dt)) times = np.arange(0, total_time, dt) * 1e-3 timeline = np.zeros((npts)) timeline2 = np.zeros((npts)) if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf: hrf = spm_hrf(dt * 1e-3) reg_scale = 1.0 if self.inputs.scale_regressors: boxcar = np.zeros(int(50.0 * 1e3 / dt)) if self.inputs.stimuli_as_impulses: boxcar[int(1.0 * 1e3 / dt)] = 1.0 reg_scale = float(TA / dt) else: boxcar[int(1.0 * 1e3 / dt) : int(2.0 * 1e3 / dt)] = 1.0 if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf: response = np.convolve(boxcar, hrf) reg_scale = 1.0 / response.max() iflogger.info( "response sum: %.4f max: %.4f", response.sum(), response.max() ) iflogger.info("reg_scale: %.4f", reg_scale) for i, t in enumerate(onsets): idx = int(np.round(t / dt)) if i_amplitudes: if len(i_amplitudes) > 1: timeline2[idx] = i_amplitudes[i] else: timeline2[idx] = i_amplitudes[0] else: timeline2[idx] = 1 if bplot: plt.subplot(4, 1, 1) plt.plot(times, timeline2) if not self.inputs.stimuli_as_impulses: if durations[i] == 0: durations[i] = TA * nvol stimdur = np.ones((int(durations[i] / dt))) timeline2 = np.convolve(timeline2, stimdur)[0 : len(timeline2)] timeline += timeline2 timeline2[:] = 0 if bplot: plt.subplot(4, 1, 2) plt.plot(times, timeline) if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf: timeline = np.convolve(timeline, hrf)[0 : len(timeline)] if ( isdefined(self.inputs.use_temporal_deriv) and self.inputs.use_temporal_deriv ): # create temporal deriv timederiv = np.concatenate(([0], np.diff(timeline))) if bplot: plt.subplot(4, 1, 3) plt.plot(times, timeline) if ( isdefined(self.inputs.use_temporal_deriv) and self.inputs.use_temporal_deriv ): plt.plot(times, timederiv) # sample timeline timeline2 = np.zeros((npts)) reg = [] regderiv = [] for i, trial in enumerate(np.arange(nscans) / nvol): scanstart = int((SCANONSET + trial * TR + (i % nvol) * TA) / dt) scanidx = scanstart + np.arange(int(TA / dt)) timeline2[scanidx] = np.max(timeline) reg.insert(i, np.mean(timeline[scanidx]) * reg_scale) if ( isdefined(self.inputs.use_temporal_deriv) and self.inputs.use_temporal_deriv ): regderiv.insert(i, np.mean(timederiv[scanidx]) * reg_scale) if isdefined(self.inputs.use_temporal_deriv) and self.inputs.use_temporal_deriv: iflogger.info("orthoganlizing derivative w.r.t. main regressor") regderiv = orth(reg, regderiv) if bplot: plt.subplot(4, 1, 3) plt.plot(times, timeline2) plt.subplot(4, 1, 4) plt.bar(np.arange(len(reg)), reg, width=0.5) plt.savefig("sparse.png") plt.savefig("sparse.svg") if regderiv: return [reg, regderiv] else: return reg def _cond_to_regress(self, info, nscans): """Converts condition information to full regressors""" reg = [] regnames = [] for i, cond in enumerate(info.conditions): if hasattr(info, "amplitudes") and info.amplitudes: amplitudes = info.amplitudes[i] else: amplitudes = None regnames.insert(len(regnames), cond) scaled_onsets = scale_timings( info.onsets[i], self.inputs.input_units, "secs", self.inputs.time_repetition, ) scaled_durations = scale_timings( info.durations[i], self.inputs.input_units, "secs", self.inputs.time_repetition, ) regressor = self._gen_regress( scaled_onsets, scaled_durations, amplitudes, nscans ) if ( isdefined(self.inputs.use_temporal_deriv) and self.inputs.use_temporal_deriv ): reg.insert(len(reg), regressor[0]) regnames.insert(len(regnames), cond + "_D") reg.insert(len(reg), regressor[1]) else: reg.insert(len(reg), regressor) # need to deal with temporal and parametric modulators # for sparse-clustered acquisitions enter T1-effect regressors nvol = self.inputs.volumes_in_cluster if nvol > 1: for i in range(nvol - 1): treg = np.zeros((nscans / nvol, nvol)) treg[:, i] = 1 reg.insert(len(reg), treg.ravel().tolist()) regnames.insert(len(regnames), "T1effect_%d" % i) return reg, regnames def _generate_clustered_design(self, infolist): """Generates condition information for sparse-clustered designs. """ infoout = deepcopy(infolist) for i, info in enumerate(infolist): infoout[i].conditions = None infoout[i].onsets = None infoout[i].durations = None if info.conditions: img = load(self.inputs.functional_runs[i]) nscans = img.shape[3] reg, regnames = self._cond_to_regress(info, nscans) if hasattr(infoout[i], "regressors") and infoout[i].regressors: if not infoout[i].regressor_names: infoout[i].regressor_names = [ "R%d" % j for j in range(len(infoout[i].regressors)) ] else: infoout[i].regressors = [] infoout[i].regressor_names = [] for j, r in enumerate(reg): regidx = len(infoout[i].regressors) infoout[i].regressor_names.insert(regidx, regnames[j]) infoout[i].regressors.insert(regidx, r) return infoout def _generate_design(self, infolist=None): if isdefined(self.inputs.subject_info): infolist = self.inputs.subject_info else: infolist = gen_info(self.inputs.event_files) sparselist = self._generate_clustered_design(infolist) super(SpecifySparseModel, self)._generate_design(infolist=sparselist) def _list_outputs(self): outputs = self._outputs().get() if not hasattr(self, "_sessinfo"): self._generate_design() outputs["session_info"] = self._sessinfo if isdefined(self.inputs.save_plot) and self.inputs.save_plot: outputs["sparse_png_file"] = os.path.join(os.getcwd(), "sparse.png") outputs["sparse_svg_file"] = os.path.join(os.getcwd(), "sparse.svg") return outputs nipype-1.7.0/nipype/algorithms/rapidart.py000066400000000000000000000723311413403311400206600ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The rapidart module provides routines for artifact detection and region of interest analysis. These functions include: * ArtifactDetect: performs artifact detection on functional images * StimulusCorrelation: determines correlation between stimuli schedule and movement/intensity parameters """ import os from copy import deepcopy from nibabel import load, funcs, Nifti1Image import numpy as np from ..interfaces.base import ( BaseInterface, traits, InputMultiPath, OutputMultiPath, TraitedSpec, File, BaseInterfaceInputSpec, isdefined, ) from ..utils.filemanip import ensure_list, save_json, split_filename from ..utils.misc import find_indices, normalize_mc_params from .. import logging, config iflogger = logging.getLogger("nipype.interface") def _get_affine_matrix(params, source): """Return affine matrix given a set of translation and rotation parameters params : np.array (upto 12 long) in native package format source : the package that generated the parameters supports SPM, AFNI, FSFAST, FSL, NIPY """ if source == "NIPY": # nipy does not store typical euler angles, use nipy to convert from nipy.algorithms.registration import to_matrix44 return to_matrix44(params) params = normalize_mc_params(params, source) # process for FSL, SPM, AFNI and FSFAST rotfunc = lambda x: np.array([[np.cos(x), np.sin(x)], [-np.sin(x), np.cos(x)]]) q = np.array([0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0]) if len(params) < 12: params = np.hstack((params, q[len(params) :])) params.shape = (len(params),) # Translation T = np.eye(4) T[0:3, -1] = params[0:3] # Rotation Rx = np.eye(4) Rx[1:3, 1:3] = rotfunc(params[3]) Ry = np.eye(4) Ry[(0, 0, 2, 2), (0, 2, 0, 2)] = rotfunc(params[4]).ravel() Rz = np.eye(4) Rz[0:2, 0:2] = rotfunc(params[5]) # Scaling S = np.eye(4) S[0:3, 0:3] = np.diag(params[6:9]) # Shear Sh = np.eye(4) Sh[(0, 0, 1), (1, 2, 2)] = params[9:12] if source in ("AFNI", "FSFAST"): return np.dot(T, np.dot(Ry, np.dot(Rx, np.dot(Rz, np.dot(S, Sh))))) return np.dot(T, np.dot(Rx, np.dot(Ry, np.dot(Rz, np.dot(S, Sh))))) def _calc_norm(mc, use_differences, source, brain_pts=None): """Calculates the maximum overall displacement of the midpoints of the faces of a cube due to translation and rotation. Parameters ---------- mc : motion parameter estimates [3 translation, 3 rotation (radians)] use_differences : boolean brain_pts : [4 x n_points] of coordinates Returns ------- norm : at each time point displacement : euclidean distance (mm) of displacement at each coordinate """ affines = [_get_affine_matrix(mc[i, :], source) for i in range(mc.shape[0])] return _calc_norm_affine(affines, use_differences, brain_pts) def _calc_norm_affine(affines, use_differences, brain_pts=None): """Calculates the maximum overall displacement of the midpoints of the faces of a cube due to translation and rotation. Parameters ---------- affines : list of [4 x 4] affine matrices use_differences : boolean brain_pts : [4 x n_points] of coordinates Returns ------- norm : at each time point displacement : euclidean distance (mm) of displacement at each coordinate """ if brain_pts is None: respos = np.diag([70, 70, 75]) resneg = np.diag([-70, -110, -45]) all_pts = np.vstack((np.hstack((respos, resneg)), np.ones((1, 6)))) displacement = None else: all_pts = brain_pts n_pts = all_pts.size - all_pts.shape[1] newpos = np.zeros((len(affines), n_pts)) if brain_pts is not None: displacement = np.zeros((len(affines), int(n_pts / 3))) for i, affine in enumerate(affines): newpos[i, :] = np.dot(affine, all_pts)[0:3, :].ravel() if brain_pts is not None: displacement[i, :] = np.sqrt( np.sum( np.power( np.reshape(newpos[i, :], (3, all_pts.shape[1])) - all_pts[0:3, :], 2, ), axis=0, ) ) # np.savez('displacement.npz', newpos=newpos, pts=all_pts) normdata = np.zeros(len(affines)) if use_differences: newpos = np.concatenate( (np.zeros((1, n_pts)), np.diff(newpos, n=1, axis=0)), axis=0 ) for i in range(newpos.shape[0]): normdata[i] = np.max( np.sqrt( np.sum( np.reshape( np.power(np.abs(newpos[i, :]), 2), (3, all_pts.shape[1]) ), axis=0, ) ) ) else: from scipy.signal import detrend newpos = np.abs(detrend(newpos, axis=0, type="constant")) normdata = np.sqrt(np.mean(np.power(newpos, 2), axis=1)) return normdata, displacement class ArtifactDetectInputSpec(BaseInterfaceInputSpec): realigned_files = InputMultiPath( File(exists=True), desc=("Names of realigned functional data " "files"), mandatory=True, ) realignment_parameters = InputMultiPath( File(exists=True), mandatory=True, desc=( "Names of realignment " "parameters corresponding to " "the functional data files" ), ) parameter_source = traits.Enum( "SPM", "FSL", "AFNI", "NiPy", "FSFAST", desc="Source of movement parameters", mandatory=True, ) use_differences = traits.ListBool( [True, False], minlen=2, maxlen=2, usedefault=True, desc=( "Use differences between successive" " motion (first element) and " "intensity parameter (second " "element) estimates in order to " "determine outliers. " "(default is [True, False])" ), ) use_norm = traits.Bool( True, usedefault=True, requires=["norm_threshold"], desc=( "Uses a composite of the motion parameters in " "order to determine outliers." ), ) norm_threshold = traits.Float( xor=["rotation_threshold", "translation_threshold"], mandatory=True, desc=( "Threshold to use to detect motion-rela" "ted outliers when composite motion is " "being used" ), ) rotation_threshold = traits.Float( mandatory=True, xor=["norm_threshold"], desc=("Threshold (in radians) to use to " "detect rotation-related outliers"), ) translation_threshold = traits.Float( mandatory=True, xor=["norm_threshold"], desc=("Threshold (in mm) to use to " "detect translation-related " "outliers"), ) zintensity_threshold = traits.Float( mandatory=True, desc=( "Intensity Z-threshold use to " "detection images that deviate " "from the mean" ), ) mask_type = traits.Enum( "spm_global", "file", "thresh", mandatory=True, desc=( "Type of mask that should be used to mask the" " functional data. *spm_global* uses an " "spm_global like calculation to determine the" " brain mask. *file* specifies a brain mask " "file (should be an image file consisting of " "0s and 1s). *thresh* specifies a threshold " "to use. By default all voxels are used," "unless one of these mask types are defined" ), ) mask_file = File(exists=True, desc="Mask file to be used if mask_type is 'file'.") mask_threshold = traits.Float( desc=("Mask threshold to be used if mask_type" " is 'thresh'.") ) intersect_mask = traits.Bool( True, usedefault=True, desc=("Intersect the masks when computed from " "spm_global."), ) save_plot = traits.Bool( True, desc="save plots containing outliers", usedefault=True ) plot_type = traits.Enum( "png", "svg", "eps", "pdf", desc="file type of the outlier plot", usedefault=True, ) bound_by_brainmask = traits.Bool( False, desc=( "use the brain mask to " "determine bounding box" "for composite norm (works" "for SPM and Nipy - currently" "inaccurate for FSL, AFNI" ), usedefault=True, ) global_threshold = traits.Float( 8.0, desc=("use this threshold when mask " "type equal's spm_global"), usedefault=True, ) class ArtifactDetectOutputSpec(TraitedSpec): outlier_files = OutputMultiPath( File(exists=True), desc=( "One file for each functional run " "containing a list of 0-based indices" " corresponding to outlier volumes" ), ) intensity_files = OutputMultiPath( File(exists=True), desc=( "One file for each functional run " "containing the global intensity " "values determined from the " "brainmask" ), ) norm_files = OutputMultiPath( File, desc=("One file for each functional run " "containing the composite norm") ) statistic_files = OutputMultiPath( File(exists=True), desc=( "One file for each functional run " "containing information about the " "different types of artifacts and " "if design info is provided then " "details of stimulus correlated " "motion and a listing or artifacts " "by event type." ), ) plot_files = OutputMultiPath( File, desc=( "One image file for each functional run " "containing the detected outliers" ), ) mask_files = OutputMultiPath( File, desc=( "One image file for each functional run " "containing the mask used for global " "signal calculation" ), ) displacement_files = OutputMultiPath( File, desc=( "One image file for each " "functional run containing the " "voxel displacement timeseries" ), ) class ArtifactDetect(BaseInterface): """Detects outliers in a functional imaging series Uses intensity and motion parameters to infer outliers. If `use_norm` is True, it computes the movement of the center of each face a cuboid centered around the head and returns the maximal movement across the centers. If you wish to use individual thresholds instead, import `Undefined` from `nipype.interfaces.base` and set `....inputs.use_norm = Undefined` Examples -------- >>> ad = ArtifactDetect() >>> ad.inputs.realigned_files = 'functional.nii' >>> ad.inputs.realignment_parameters = 'functional.par' >>> ad.inputs.parameter_source = 'FSL' >>> ad.inputs.norm_threshold = 1 >>> ad.inputs.use_differences = [True, False] >>> ad.inputs.zintensity_threshold = 3 >>> ad.run() # doctest: +SKIP """ input_spec = ArtifactDetectInputSpec output_spec = ArtifactDetectOutputSpec def __init__(self, **inputs): super(ArtifactDetect, self).__init__(**inputs) def _get_output_filenames(self, motionfile, output_dir): """Generate output files based on motion filenames Parameters ---------- motionfile: file/string Filename for motion parameter file output_dir: string output directory in which the files will be generated """ if isinstance(motionfile, (str, bytes)): infile = motionfile elif isinstance(motionfile, list): infile = motionfile[0] else: raise Exception("Unknown type of file") _, filename, ext = split_filename(infile) artifactfile = os.path.join( output_dir, "".join(("art.", filename, "_outliers.txt")) ) intensityfile = os.path.join( output_dir, "".join(("global_intensity.", filename, ".txt")) ) statsfile = os.path.join(output_dir, "".join(("stats.", filename, ".txt"))) normfile = os.path.join(output_dir, "".join(("norm.", filename, ".txt"))) plotfile = os.path.join( output_dir, "".join(("plot.", filename, ".", self.inputs.plot_type)) ) displacementfile = os.path.join(output_dir, "".join(("disp.", filename, ext))) maskfile = os.path.join(output_dir, "".join(("mask.", filename, ext))) return ( artifactfile, intensityfile, statsfile, normfile, plotfile, displacementfile, maskfile, ) def _list_outputs(self): outputs = self._outputs().get() outputs["outlier_files"] = [] outputs["intensity_files"] = [] outputs["statistic_files"] = [] outputs["mask_files"] = [] if isdefined(self.inputs.use_norm) and self.inputs.use_norm: outputs["norm_files"] = [] if self.inputs.bound_by_brainmask: outputs["displacement_files"] = [] if isdefined(self.inputs.save_plot) and self.inputs.save_plot: outputs["plot_files"] = [] for i, f in enumerate(ensure_list(self.inputs.realigned_files)): ( outlierfile, intensityfile, statsfile, normfile, plotfile, displacementfile, maskfile, ) = self._get_output_filenames(f, os.getcwd()) outputs["outlier_files"].insert(i, outlierfile) outputs["intensity_files"].insert(i, intensityfile) outputs["statistic_files"].insert(i, statsfile) outputs["mask_files"].insert(i, maskfile) if isdefined(self.inputs.use_norm) and self.inputs.use_norm: outputs["norm_files"].insert(i, normfile) if self.inputs.bound_by_brainmask: outputs["displacement_files"].insert(i, displacementfile) if isdefined(self.inputs.save_plot) and self.inputs.save_plot: outputs["plot_files"].insert(i, plotfile) return outputs def _plot_outliers_with_wave(self, wave, outliers, name): import matplotlib matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt plt.plot(wave) plt.ylim([wave.min(), wave.max()]) plt.xlim([0, len(wave) - 1]) if len(outliers): plt.plot( np.tile(outliers[:, None], (1, 2)).T, np.tile([wave.min(), wave.max()], (len(outliers), 1)).T, "r", ) plt.xlabel("Scans - 0-based") plt.ylabel(name) def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): """ Core routine for detecting outliers """ from scipy import signal if not cwd: cwd = os.getcwd() # read in functional image if isinstance(imgfile, (str, bytes)): nim = load(imgfile) elif isinstance(imgfile, list): if len(imgfile) == 1: nim = load(imgfile[0]) else: images = [load(f) for f in imgfile] nim = funcs.concat_images(images) # compute global intensity signal (x, y, z, timepoints) = nim.shape data = nim.get_fdata(dtype=np.float32) affine = nim.affine g = np.zeros((timepoints, 1)) masktype = self.inputs.mask_type if masktype == "spm_global": # spm_global like calculation iflogger.debug("art: using spm global") intersect_mask = self.inputs.intersect_mask if intersect_mask: mask = np.ones((x, y, z), dtype=bool) for t0 in range(timepoints): vol = data[:, :, :, t0] # Use an SPM like approach mask_tmp = vol > (np.nanmean(vol) / self.inputs.global_threshold) mask = mask * mask_tmp for t0 in range(timepoints): vol = data[:, :, :, t0] g[t0] = np.nanmean(vol[mask]) if len(find_indices(mask)) < (np.prod((x, y, z)) / 10): intersect_mask = False g = np.zeros((timepoints, 1)) if not intersect_mask: iflogger.info("not intersect_mask is True") mask = np.zeros((x, y, z, timepoints)) for t0 in range(timepoints): vol = data[:, :, :, t0] mask_tmp = vol > (np.nanmean(vol) / self.inputs.global_threshold) mask[:, :, :, t0] = mask_tmp g[t0] = np.nansum(vol * mask_tmp) / np.nansum(mask_tmp) elif masktype == "file": # uses a mask image to determine intensity maskimg = load(self.inputs.mask_file) mask = maskimg.get_fdata(dtype=np.float32) affine = maskimg.affine mask = mask > 0.5 for t0 in range(timepoints): vol = data[:, :, :, t0] g[t0] = np.nanmean(vol[mask]) elif masktype == "thresh": # uses a fixed signal threshold for t0 in range(timepoints): vol = data[:, :, :, t0] mask = vol > self.inputs.mask_threshold g[t0] = np.nanmean(vol[mask]) else: mask = np.ones((x, y, z)) g = np.nanmean(data[mask > 0, :], 1) # compute normalized intensity values gz = signal.detrend(g, axis=0) # detrend the signal if self.inputs.use_differences[1]: gz = np.concatenate((np.zeros((1, 1)), np.diff(gz, n=1, axis=0)), axis=0) gz = (gz - np.mean(gz)) / np.std(gz) # normalize the detrended signal iidx = find_indices(abs(gz) > self.inputs.zintensity_threshold) # read in motion parameters mc_in = np.loadtxt(motionfile) mc = deepcopy(mc_in) ( artifactfile, intensityfile, statsfile, normfile, plotfile, displacementfile, maskfile, ) = self._get_output_filenames(imgfile, cwd) mask_img = Nifti1Image(mask.astype(np.uint8), affine) mask_img.to_filename(maskfile) if self.inputs.use_norm: brain_pts = None if self.inputs.bound_by_brainmask: voxel_coords = np.nonzero(mask) coords = np.vstack( (voxel_coords[0], np.vstack((voxel_coords[1], voxel_coords[2]))) ).T brain_pts = np.dot( affine, np.hstack((coords, np.ones((coords.shape[0], 1)))).T ) # calculate the norm of the motion parameters normval, displacement = _calc_norm( mc, self.inputs.use_differences[0], self.inputs.parameter_source, brain_pts=brain_pts, ) tidx = find_indices(normval > self.inputs.norm_threshold) ridx = find_indices(normval < 0) if displacement is not None: dmap = np.zeros((x, y, z, timepoints), dtype=np.float64) for i in range(timepoints): dmap[ voxel_coords[0], voxel_coords[1], voxel_coords[2], i ] = displacement[i, :] dimg = Nifti1Image(dmap, affine) dimg.to_filename(displacementfile) else: if self.inputs.use_differences[0]: mc = np.concatenate( (np.zeros((1, 6)), np.diff(mc_in, n=1, axis=0)), axis=0 ) traval = mc[:, 0:3] # translation parameters (mm) rotval = mc[:, 3:6] # rotation parameters (rad) tidx = find_indices( np.sum(abs(traval) > self.inputs.translation_threshold, 1) > 0 ) ridx = find_indices( np.sum(abs(rotval) > self.inputs.rotation_threshold, 1) > 0 ) outliers = np.unique(np.union1d(iidx, np.union1d(tidx, ridx))) # write output to outputfile np.savetxt(artifactfile, outliers, fmt=b"%d", delimiter=" ") np.savetxt(intensityfile, g, fmt=b"%.2f", delimiter=" ") if self.inputs.use_norm: np.savetxt(normfile, normval, fmt=b"%.4f", delimiter=" ") if isdefined(self.inputs.save_plot) and self.inputs.save_plot: import matplotlib matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt fig = plt.figure() if isdefined(self.inputs.use_norm) and self.inputs.use_norm: plt.subplot(211) else: plt.subplot(311) self._plot_outliers_with_wave(gz, iidx, "Intensity") if isdefined(self.inputs.use_norm) and self.inputs.use_norm: plt.subplot(212) self._plot_outliers_with_wave( normval, np.union1d(tidx, ridx), "Norm (mm)" ) else: diff = "" if self.inputs.use_differences[0]: diff = "diff" plt.subplot(312) self._plot_outliers_with_wave(traval, tidx, "Translation (mm)" + diff) plt.subplot(313) self._plot_outliers_with_wave(rotval, ridx, "Rotation (rad)" + diff) plt.savefig(plotfile) plt.close(fig) motion_outliers = np.union1d(tidx, ridx) stats = [ {"motion_file": motionfile, "functional_file": imgfile}, { "common_outliers": len(np.intersect1d(iidx, motion_outliers)), "intensity_outliers": len(np.setdiff1d(iidx, motion_outliers)), "motion_outliers": len(np.setdiff1d(motion_outliers, iidx)), }, { "motion": [ {"using differences": self.inputs.use_differences[0]}, { "mean": np.mean(mc_in, axis=0).tolist(), "min": np.min(mc_in, axis=0).tolist(), "max": np.max(mc_in, axis=0).tolist(), "std": np.std(mc_in, axis=0).tolist(), }, ] }, { "intensity": [ {"using differences": self.inputs.use_differences[1]}, { "mean": np.mean(gz, axis=0).tolist(), "min": np.min(gz, axis=0).tolist(), "max": np.max(gz, axis=0).tolist(), "std": np.std(gz, axis=0).tolist(), }, ] }, ] if self.inputs.use_norm: stats.insert( 3, { "motion_norm": { "mean": np.mean(normval, axis=0).tolist(), "min": np.min(normval, axis=0).tolist(), "max": np.max(normval, axis=0).tolist(), "std": np.std(normval, axis=0).tolist(), } }, ) save_json(statsfile, stats) def _run_interface(self, runtime): """Execute this module.""" funcfilelist = ensure_list(self.inputs.realigned_files) motparamlist = ensure_list(self.inputs.realignment_parameters) for i, imgf in enumerate(funcfilelist): self._detect_outliers_core(imgf, motparamlist[i], i, cwd=os.getcwd()) return runtime class StimCorrInputSpec(BaseInterfaceInputSpec): realignment_parameters = InputMultiPath( File(exists=True), mandatory=True, desc=( "Names of realignment " "parameters corresponding to " "the functional data files" ), ) intensity_values = InputMultiPath( File(exists=True), mandatory=True, desc=("Name of file containing intensity " "values"), ) spm_mat_file = File( exists=True, mandatory=True, desc="SPM mat file (use pre-estimate SPM.mat file)" ) concatenated_design = traits.Bool( mandatory=True, desc=("state if the design matrix " "contains concatenated sessions"), ) class StimCorrOutputSpec(TraitedSpec): stimcorr_files = OutputMultiPath( File(exists=True), desc=("List of files containing " "correlation values") ) class StimulusCorrelation(BaseInterface): """Determines if stimuli are correlated with motion or intensity parameters. Currently this class supports an SPM generated design matrix and requires intensity parameters. This implies that one must run :ref:`ArtifactDetect ` and :ref:`Level1Design ` prior to running this or provide an SPM.mat file and intensity parameters through some other means. Examples -------- >>> sc = StimulusCorrelation() >>> sc.inputs.realignment_parameters = 'functional.par' >>> sc.inputs.intensity_values = 'functional.rms' >>> sc.inputs.spm_mat_file = 'SPM.mat' >>> sc.inputs.concatenated_design = False >>> sc.run() # doctest: +SKIP """ input_spec = StimCorrInputSpec output_spec = StimCorrOutputSpec def _get_output_filenames(self, motionfile, output_dir): """Generate output files based on motion filenames Parameters ---------- motionfile: file/string Filename for motion parameter file output_dir: string output directory in which the files will be generated """ (_, filename) = os.path.split(motionfile) (filename, _) = os.path.splitext(filename) corrfile = os.path.join(output_dir, "".join(("qa.", filename, "_stimcorr.txt"))) return corrfile def _stimcorr_core(self, motionfile, intensityfile, designmatrix, cwd=None): """ Core routine for determining stimulus correlation """ if not cwd: cwd = os.getcwd() # read in motion parameters mc_in = np.loadtxt(motionfile) g_in = np.loadtxt(intensityfile) g_in.shape = g_in.shape[0], 1 dcol = designmatrix.shape[1] mccol = mc_in.shape[1] concat_matrix = np.hstack((np.hstack((designmatrix, mc_in)), g_in)) cm = np.corrcoef(concat_matrix, rowvar=0) corrfile = self._get_output_filenames(motionfile, cwd) # write output to outputfile file = open(corrfile, "w") file.write("Stats for:\n") file.write("Stimulus correlated motion:\n%s\n" % motionfile) for i in range(dcol): file.write("SCM.%d:" % i) for v in cm[i, dcol + np.arange(mccol)]: file.write(" %.2f" % v) file.write("\n") file.write("Stimulus correlated intensity:\n%s\n" % intensityfile) for i in range(dcol): file.write("SCI.%d: %.2f\n" % (i, cm[i, -1])) file.close() def _get_spm_submatrix(self, spmmat, sessidx, rows=None): """ Parameters ---------- spmmat: scipy matlab object full SPM.mat file loaded into a scipy object sessidx: int index to session that needs to be extracted. """ designmatrix = spmmat["SPM"][0][0].xX[0][0].X U = spmmat["SPM"][0][0].Sess[0][sessidx].U[0] if rows is None: rows = spmmat["SPM"][0][0].Sess[0][sessidx].row[0] - 1 cols = spmmat["SPM"][0][0].Sess[0][sessidx].col[0][list(range(len(U)))] - 1 outmatrix = designmatrix.take(rows.tolist(), axis=0).take(cols.tolist(), axis=1) return outmatrix def _run_interface(self, runtime): """Execute this module.""" import scipy.io as sio motparamlist = self.inputs.realignment_parameters intensityfiles = self.inputs.intensity_values spmmat = sio.loadmat(self.inputs.spm_mat_file, struct_as_record=False) nrows = [] for i in range(len(motparamlist)): sessidx = i rows = None if self.inputs.concatenated_design: sessidx = 0 mc_in = np.loadtxt(motparamlist[i]) rows = np.sum(nrows) + np.arange(mc_in.shape[0]) nrows.append(mc_in.shape[0]) matrix = self._get_spm_submatrix(spmmat, sessidx, rows) self._stimcorr_core(motparamlist[i], intensityfiles[i], matrix, os.getcwd()) return runtime def _list_outputs(self): outputs = self._outputs().get() files = [] for i, f in enumerate(self.inputs.realignment_parameters): files.insert(i, self._get_output_filenames(f, os.getcwd())) if files: outputs["stimcorr_files"] = files return outputs nipype-1.7.0/nipype/algorithms/stats.py000066400000000000000000000046341413403311400202110ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Managing statistical maps """ import os import nibabel as nb import numpy as np from ..interfaces.base import ( BaseInterfaceInputSpec, TraitedSpec, SimpleInterface, traits, InputMultiPath, File, ) from ..utils.filemanip import split_filename class ActivationCountInputSpec(BaseInterfaceInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, desc="input file, generally a list of z-stat maps", ) threshold = traits.Float( mandatory=True, desc="binarization threshold. E.g. a threshold of 1.65 " "corresponds to a two-sided Z-test of p<.10", ) class ActivationCountOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output activation count map") acm_pos = File(exists=True, desc="positive activation count map") acm_neg = File(exists=True, desc="negative activation count map") class ActivationCount(SimpleInterface): """ Calculate a simple Activation Count Maps Adapted from: https://github.com/poldracklab/CNP_task_analysis/\ blob/61c27f5992db9d8800884f8ffceb73e6957db8af/CNP_2nd_level_ACM.py """ input_spec = ActivationCountInputSpec output_spec = ActivationCountOutputSpec def _run_interface(self, runtime): allmaps = nb.concat_images(self.inputs.in_files).dataobj acm_pos = np.mean(allmaps > self.inputs.threshold, axis=3, dtype=np.float32) acm_neg = np.mean( allmaps < -1.0 * self.inputs.threshold, axis=3, dtype=np.float32 ) acm_diff = acm_pos - acm_neg template_fname = self.inputs.in_files[0] ext = split_filename(template_fname)[2] fname_fmt = os.path.join(runtime.cwd, "acm_{}" + ext).format self._results["out_file"] = fname_fmt("diff") self._results["acm_pos"] = fname_fmt("pos") self._results["acm_neg"] = fname_fmt("neg") img = nb.load(template_fname) img.__class__(acm_diff, img.affine, img.header).to_filename( self._results["out_file"] ) img.__class__(acm_pos, img.affine, img.header).to_filename( self._results["acm_pos"] ) img.__class__(acm_neg, img.affine, img.header).to_filename( self._results["acm_neg"] ) return runtime nipype-1.7.0/nipype/algorithms/tests/000077500000000000000000000000001413403311400176345ustar00rootroot00000000000000nipype-1.7.0/nipype/algorithms/tests/__init__.py000066400000000000000000000002121413403311400217400ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: nipype-1.7.0/nipype/algorithms/tests/test_CompCor.py000066400000000000000000000227471413403311400226230ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import nibabel as nb import numpy as np import pytest from ...testing import utils from ..confounds import CompCor, TCompCor, ACompCor class TestCompCor: """Note: Tests currently do a poor job of testing functionality""" filenames = { "functionalnii": "compcorfunc.nii", "masknii": "compcormask.nii", "masknii2": "compcormask2.nii", "components_file": None, } @pytest.fixture(autouse=True) def setup_class(self, tmpdir): # setup tmpdir.chdir() noise = np.fromfunction(self.fake_noise_fun, self.fake_data.shape) self.realigned_file = utils.save_toy_nii( self.fake_data + noise, self.filenames["functionalnii"] ) mask = np.ones(self.fake_data.shape[:3]) mask[0, 0, 0] = 0 mask[0, 0, 1] = 0 mask1 = utils.save_toy_nii(mask, self.filenames["masknii"]) other_mask = np.ones(self.fake_data.shape[:3]) other_mask[0, 1, 0] = 0 other_mask[1, 1, 0] = 0 mask2 = utils.save_toy_nii(other_mask, self.filenames["masknii2"]) self.mask_files = [mask1, mask2] def test_compcor(self): expected_components = [ ["-0.1989607212", "-0.5753813646"], ["0.5692369697", "0.5674945949"], ["-0.6662573243", "0.4675843432"], ["0.4206466244", "-0.3361270124"], ["-0.1246655485", "-0.1235705610"], ] self.run_cc( CompCor( num_components=6, realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0, ), expected_components, ) self.run_cc( ACompCor( num_components=6, realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0, components_file="acc_components_file", ), expected_components, "aCompCor", ) def test_compcor_variance_threshold_and_metadata(self): expected_components = [ ["-0.2027150345", "-0.4954813834"], ["0.2565929051", "0.7866217875"], ["-0.3550986008", "-0.0089784905"], ["0.7512786244", "-0.3599828482"], ["-0.4500578942", "0.0778209345"], ] expected_metadata = { "component": "CompCor00", "mask": "mask", "singular_value": "4.0720553036", "variance_explained": "0.5527211465", "cumulative_variance_explained": "0.5527211465", "retained": "True", } ccinterface = CompCor( variance_threshold=0.7, realigned_file=self.realigned_file, mask_files=self.mask_files, mask_names=["mask"], mask_index=1, save_metadata=True, ) self.run_cc( ccinterface=ccinterface, expected_components=expected_components, expected_n_components=2, expected_metadata=expected_metadata, ) def test_tcompcor(self): ccinterface = TCompCor( num_components=6, realigned_file=self.realigned_file, percentile_threshold=0.75, ) self.run_cc( ccinterface, [ ["-0.1114536190", "-0.4632908609"], ["0.4566907310", "0.6983205193"], ["-0.7132557407", "0.1340170559"], ["0.5022537643", "-0.5098322262"], ["-0.1342351356", "0.1407855119"], ], "tCompCor", ) def test_tcompcor_no_percentile(self): ccinterface = TCompCor(num_components=6, realigned_file=self.realigned_file) ccinterface.run() mask = nb.load("mask_000.nii.gz").dataobj num_nonmasked_voxels = np.count_nonzero(mask) assert num_nonmasked_voxels == 1 def test_compcor_no_regress_poly(self): self.run_cc( CompCor( num_components=6, realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=0, pre_filter=False, ), [ ["0.4451946442", "-0.7683311482"], ["-0.4285129505", "-0.0926034137"], ["0.5721540256", "0.5608764842"], ["-0.5367548139", "0.0059943226"], ["-0.0520809054", "0.2940637551"], ], ) def test_tcompcor_asymmetric_dim(self): asymmetric_shape = (2, 3, 4, 5) asymmetric_data = utils.save_toy_nii( np.zeros(asymmetric_shape), "asymmetric.nii" ) TCompCor(realigned_file=asymmetric_data).run() assert nb.load("mask_000.nii.gz").shape == asymmetric_shape[:3] def test_compcor_bad_input_shapes(self): # dim 0 is < dim 0 of self.mask_files (2) shape_less_than = (1, 2, 2, 5) # dim 0 is > dim 0 of self.mask_files (2) shape_more_than = (3, 3, 3, 5) for data_shape in (shape_less_than, shape_more_than): data_file = utils.save_toy_nii(np.zeros(data_shape), "temp.nii") interface = CompCor(realigned_file=data_file, mask_files=self.mask_files[0]) with pytest.raises(ValueError): interface.run() # Dimension mismatch def test_tcompcor_bad_input_dim(self): bad_dims = (2, 2, 2) data_file = utils.save_toy_nii(np.zeros(bad_dims), "temp.nii") interface = TCompCor(realigned_file=data_file) with pytest.raises(ValueError): interface.run() # Not a 4D file def test_tcompcor_merge_intersect_masks(self): for method in ["union", "intersect"]: TCompCor( realigned_file=self.realigned_file, mask_files=self.mask_files, merge_method=method, ).run() if method == "union": assert np.array_equal( nb.load("mask_000.nii.gz").dataobj, ([[[0, 0], [0, 0]], [[0, 0], [1, 0]]]), ) if method == "intersect": assert np.array_equal( nb.load("mask_000.nii.gz").dataobj, ([[[0, 0], [0, 0]], [[0, 1], [0, 0]]]), ) def test_tcompcor_index_mask(self): TCompCor( realigned_file=self.realigned_file, mask_files=self.mask_files, mask_index=1 ).run() assert np.array_equal( nb.load("mask_000.nii.gz").dataobj, ([[[0, 0], [0, 0]], [[0, 1], [0, 0]]]) ) def test_tcompcor_multi_mask_no_index(self): interface = TCompCor( realigned_file=self.realigned_file, mask_files=self.mask_files ) with pytest.raises(ValueError): interface.run() # more than one mask file def run_cc( self, ccinterface, expected_components, expected_header="CompCor", expected_n_components=None, expected_metadata=None, ): # run ccresult = ccinterface.run() # assert expected_file = ccinterface._list_outputs()["components_file"] assert ccresult.outputs.components_file == expected_file assert os.path.exists(expected_file) assert os.path.getsize(expected_file) > 0 with open(ccresult.outputs.components_file, "r") as components_file: if expected_n_components is None: expected_n_components = min( ccinterface.inputs.num_components, self.fake_data.shape[3] ) components_data = [line.rstrip().split("\t") for line in components_file] # the first item will be '#', we can throw it out header = components_data.pop(0) expected_header = [ expected_header + "{:02d}".format(i) for i in range(expected_n_components) ] for i, heading in enumerate(header): assert expected_header[i] in heading num_got_timepoints = len(components_data) assert num_got_timepoints == self.fake_data.shape[3] for index, timepoint in enumerate(components_data): assert len(timepoint) == expected_n_components assert timepoint[:2] == expected_components[index] if ccinterface.inputs.save_metadata: expected_metadata_file = ccinterface._list_outputs()["metadata_file"] assert ccresult.outputs.metadata_file == expected_metadata_file assert os.path.exists(expected_metadata_file) assert os.path.getsize(expected_metadata_file) > 0 with open(ccresult.outputs.metadata_file, "r") as metadata_file: components_metadata = [ line.rstrip().split("\t") for line in metadata_file ] components_metadata = { i: j for i, j in zip(components_metadata[0], components_metadata[1]) } assert components_metadata == expected_metadata return ccresult @staticmethod def fake_noise_fun(i, j, l, m): return m * i + l - j fake_data = np.array( [ [[[8, 5, 3, 8, 0], [6, 7, 4, 7, 1]], [[7, 9, 1, 6, 5], [0, 7, 4, 7, 7]]], [[[2, 4, 5, 7, 0], [1, 7, 0, 5, 4]], [[7, 3, 9, 0, 4], [9, 4, 1, 5, 0]]], ] ) nipype-1.7.0/nipype/algorithms/tests/test_ErrorMap.py000066400000000000000000000047551413403311400230070ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- import pytest from nipype.testing import example_data from nipype.algorithms.metrics import ErrorMap import nibabel as nb import numpy as np import os def test_errormap(tmpdir): # Single-Spectual # Make two fake 2*2*2 voxel volumes # John von Neumann's birthday volume1 = np.array([[[2.0, 8.0], [1.0, 2.0]], [[1.0, 9.0], [0.0, 3.0]]]) # Alan Turing's birthday volume2 = np.array([[[0.0, 7.0], [2.0, 3.0]], [[1.0, 9.0], [1.0, 2.0]]]) mask = np.array([[[1, 0], [0, 1]], [[1, 0], [0, 1]]]) img1 = nb.Nifti1Image(volume1, np.eye(4)) img2 = nb.Nifti1Image(volume2, np.eye(4)) maskimg = nb.Nifti1Image(mask, np.eye(4)) nb.save(img1, tmpdir.join("von.nii.gz").strpath) nb.save(img2, tmpdir.join("alan.nii.gz").strpath) nb.save(maskimg, tmpdir.join("mask.nii.gz").strpath) # Default metric errmap = ErrorMap() errmap.inputs.in_tst = tmpdir.join("von.nii.gz").strpath errmap.inputs.in_ref = tmpdir.join("alan.nii.gz").strpath errmap.out_map = tmpdir.join("out_map.nii.gz").strpath result = errmap.run() assert result.outputs.distance == 1.125 # Square metric errmap.inputs.metric = "sqeuclidean" result = errmap.run() assert result.outputs.distance == 1.125 # Linear metric errmap.inputs.metric = "euclidean" result = errmap.run() assert result.outputs.distance == 0.875 # Masked errmap.inputs.mask = tmpdir.join("mask.nii.gz").strpath result = errmap.run() assert result.outputs.distance == 1.0 # Multi-Spectual # Raymond Vahan Damadian's birthday volume3 = np.array([[[1.0, 6.0], [0.0, 3.0]], [[1.0, 9.0], [3.0, 6.0]]]) msvolume1 = np.zeros(shape=(2, 2, 2, 2)) msvolume1[:, :, :, 0] = volume1 msvolume1[:, :, :, 1] = volume3 msimg1 = nb.Nifti1Image(msvolume1, np.eye(4)) msvolume2 = np.zeros(shape=(2, 2, 2, 2)) msvolume2[:, :, :, 0] = volume3 msvolume2[:, :, :, 1] = volume1 msimg2 = nb.Nifti1Image(msvolume2, np.eye(4)) nb.save(msimg1, tmpdir.join("von-ray.nii.gz").strpath) nb.save(msimg2, tmpdir.join("alan-ray.nii.gz").strpath) errmap.inputs.in_tst = tmpdir.join("von-ray.nii.gz").strpath errmap.inputs.in_ref = tmpdir.join("alan-ray.nii.gz").strpath errmap.inputs.metric = "sqeuclidean" result = errmap.run() assert result.outputs.distance == 5.5 errmap.inputs.metric = "euclidean" result = errmap.run() assert result.outputs.distance == np.float32(1.25 * (2 ** 0.5)) nipype-1.7.0/nipype/algorithms/tests/test_Overlap.py000066400000000000000000000022011413403311400226500ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os from nipype.testing import example_data import numpy as np def test_overlap(tmpdir): from nipype.algorithms.metrics import Overlap def check_close(val1, val2): import numpy.testing as npt return npt.assert_almost_equal(val1, val2, decimal=3) in1 = example_data("segmentation0.nii.gz") in2 = example_data("segmentation1.nii.gz") tmpdir.chdir() overlap = Overlap() overlap.inputs.volume1 = in1 overlap.inputs.volume2 = in1 res = overlap.run() check_close(res.outputs.jaccard, 1.0) overlap = Overlap() overlap.inputs.volume1 = in1 overlap.inputs.volume2 = in2 res = overlap.run() check_close(res.outputs.jaccard, 0.99705) overlap = Overlap() overlap.inputs.volume1 = in1 overlap.inputs.volume2 = in2 overlap.inputs.vol_units = "mm" res = overlap.run() check_close(res.outputs.jaccard, 0.99705) check_close(res.outputs.roi_voldiff, np.array([0.0063086, -0.0025506, 0.0])) nipype-1.7.0/nipype/algorithms/tests/test_TSNR.py000066400000000000000000000101761413403311400220400ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from ...testing import utils from ..confounds import TSNR from .. import misc import pytest import numpy.testing as npt from unittest import mock import nibabel as nb import numpy as np import os class TestTSNR: """Note: Tests currently do a poor job of testing functionality""" in_filenames = {"in_file": "tsnrinfile.nii"} out_filenames = { # default output file names "detrended_file": "detrend.nii.gz", "mean_file": "mean.nii.gz", "stddev_file": "stdev.nii.gz", "tsnr_file": "tsnr.nii.gz", } @pytest.fixture(autouse=True) def setup_class(self, tmpdir): # setup temp folder tmpdir.chdir() utils.save_toy_nii(self.fake_data, self.in_filenames["in_file"]) def test_tsnr(self): # run tsnrresult = TSNR(in_file=self.in_filenames["in_file"]).run() # assert self.assert_expected_outputs( tsnrresult, { "mean_file": (2.8, 7.4), "stddev_file": (0.8, 2.9), "tsnr_file": (1.3, 9.25), }, ) def test_tsnr_withpoly1(self): # run tsnrresult = TSNR(in_file=self.in_filenames["in_file"], regress_poly=1).run() # assert self.assert_expected_outputs_poly( tsnrresult, { "detrended_file": (-0.1, 8.7), "mean_file": (2.8, 7.4), "stddev_file": (0.75, 2.75), "tsnr_file": (1.4, 9.9), }, ) def test_tsnr_withpoly2(self): # run tsnrresult = TSNR(in_file=self.in_filenames["in_file"], regress_poly=2).run() # assert self.assert_expected_outputs_poly( tsnrresult, { "detrended_file": (-0.22, 8.55), "mean_file": (2.8, 7.7), "stddev_file": (0.21, 2.4), "tsnr_file": (1.7, 35.9), }, ) def test_tsnr_withpoly3(self): # run tsnrresult = TSNR(in_file=self.in_filenames["in_file"], regress_poly=3).run() # assert self.assert_expected_outputs_poly( tsnrresult, { "detrended_file": (1.8, 7.95), "mean_file": (2.8, 7.7), "stddev_file": (0.1, 1.7), "tsnr_file": (2.6, 57.3), }, ) @mock.patch("warnings.warn") def test_warning(self, mock_warn): """test that usage of misc.TSNR trips a warning to use confounds.TSNR instead""" # run misc.TSNR(in_file=self.in_filenames["in_file"]) # assert assert True in [ args[0].count("confounds") > 0 for _, args, _ in mock_warn.mock_calls ] def assert_expected_outputs_poly(self, tsnrresult, expected_ranges): assert ( os.path.basename(tsnrresult.outputs.detrended_file) == self.out_filenames["detrended_file"] ) self.assert_expected_outputs(tsnrresult, expected_ranges) def assert_expected_outputs(self, tsnrresult, expected_ranges): self.assert_default_outputs(tsnrresult.outputs) self.assert_unchanged(expected_ranges) def assert_default_outputs(self, outputs): assert os.path.basename(outputs.mean_file) == self.out_filenames["mean_file"] assert ( os.path.basename(outputs.stddev_file) == self.out_filenames["stddev_file"] ) assert os.path.basename(outputs.tsnr_file) == self.out_filenames["tsnr_file"] def assert_unchanged(self, expected_ranges): for key, (min_, max_) in expected_ranges.items(): data = np.asarray(nb.load(self.out_filenames[key]).dataobj) npt.assert_almost_equal(np.amin(data), min_, decimal=1) npt.assert_almost_equal(np.amax(data), max_, decimal=1) fake_data = np.array( [ [[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]], [[8, 3, 4, 6, 2], [4, 0, 4, 4, 2]]], [[[9, 7, 5, 5, 7], [7, 8, 4, 8, 4]], [[0, 4, 7, 1, 7], [6, 8, 8, 8, 7]]], ] ) nipype-1.7.0/nipype/algorithms/tests/test_auto_ACompCor.py000066400000000000000000000040521413403311400237410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..confounds import ACompCor def test_ACompCor_inputs(): input_map = dict( components_file=dict( usedefault=True, ), failure_mode=dict( usedefault=True, ), header_prefix=dict(), high_pass_cutoff=dict( usedefault=True, ), ignore_initial_volumes=dict( usedefault=True, ), mask_files=dict(), mask_index=dict( requires=["mask_files"], xor=["merge_method"], ), mask_names=dict(), merge_method=dict( requires=["mask_files"], xor=["mask_index"], ), num_components=dict( xor=["variance_threshold"], ), pre_filter=dict( usedefault=True, ), realigned_file=dict( extensions=None, mandatory=True, ), regress_poly_degree=dict( usedefault=True, ), repetition_time=dict(), save_metadata=dict( usedefault=True, ), save_pre_filter=dict( usedefault=True, ), use_regress_poly=dict( deprecated="0.15.0", new_name="pre_filter", ), variance_threshold=dict( xor=["num_components"], ), ) inputs = ACompCor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ACompCor_outputs(): output_map = dict( components_file=dict( extensions=None, ), metadata_file=dict( extensions=None, ), pre_filter_file=dict( extensions=None, ), ) outputs = ACompCor.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_ActivationCount.py000066400000000000000000000017061413403311400254130ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..stats import ActivationCount def test_ActivationCount_inputs(): input_map = dict( in_files=dict( mandatory=True, ), threshold=dict( mandatory=True, ), ) inputs = ActivationCount.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ActivationCount_outputs(): output_map = dict( acm_neg=dict( extensions=None, ), acm_pos=dict( extensions=None, ), out_file=dict( extensions=None, ), ) outputs = ActivationCount.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_AddCSVColumn.py000066400000000000000000000016641413403311400245260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import AddCSVColumn def test_AddCSVColumn_inputs(): input_map = dict( extra_column_heading=dict(), extra_field=dict(), in_file=dict( extensions=None, mandatory=True, ), out_file=dict( extensions=None, usedefault=True, ), ) inputs = AddCSVColumn.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AddCSVColumn_outputs(): output_map = dict( csv_file=dict( extensions=None, ), ) outputs = AddCSVColumn.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_AddCSVRow.py000066400000000000000000000015071413403311400240340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import AddCSVRow def test_AddCSVRow_inputs(): input_map = dict( _outputs=dict( usedefault=True, ), in_file=dict( extensions=None, mandatory=True, ), ) inputs = AddCSVRow.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AddCSVRow_outputs(): output_map = dict( csv_file=dict( extensions=None, ), ) outputs = AddCSVRow.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_AddNoise.py000066400000000000000000000021531413403311400237640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import AddNoise def test_AddNoise_inputs(): input_map = dict( bg_dist=dict( mandatory=True, usedefault=True, ), dist=dict( mandatory=True, usedefault=True, ), in_file=dict( extensions=None, mandatory=True, ), in_mask=dict( extensions=None, ), out_file=dict( extensions=None, ), snr=dict( usedefault=True, ), ) inputs = AddNoise.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AddNoise_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = AddNoise.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_ArtifactDetect.py000066400000000000000000000042541413403311400251700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..rapidart import ArtifactDetect def test_ArtifactDetect_inputs(): input_map = dict( bound_by_brainmask=dict( usedefault=True, ), global_threshold=dict( usedefault=True, ), intersect_mask=dict( usedefault=True, ), mask_file=dict( extensions=None, ), mask_threshold=dict(), mask_type=dict( mandatory=True, ), norm_threshold=dict( mandatory=True, xor=["rotation_threshold", "translation_threshold"], ), parameter_source=dict( mandatory=True, ), plot_type=dict( usedefault=True, ), realigned_files=dict( mandatory=True, ), realignment_parameters=dict( mandatory=True, ), rotation_threshold=dict( mandatory=True, xor=["norm_threshold"], ), save_plot=dict( usedefault=True, ), translation_threshold=dict( mandatory=True, xor=["norm_threshold"], ), use_differences=dict( maxlen=2, minlen=2, usedefault=True, ), use_norm=dict( requires=["norm_threshold"], usedefault=True, ), zintensity_threshold=dict( mandatory=True, ), ) inputs = ArtifactDetect.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ArtifactDetect_outputs(): output_map = dict( displacement_files=dict(), intensity_files=dict(), mask_files=dict(), norm_files=dict(), outlier_files=dict(), plot_files=dict(), statistic_files=dict(), ) outputs = ArtifactDetect.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_CalculateMedian.py000066400000000000000000000014451413403311400253140ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import CalculateMedian def test_CalculateMedian_inputs(): input_map = dict( in_files=dict(), median_file=dict(), median_per_file=dict( usedefault=True, ), ) inputs = CalculateMedian.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CalculateMedian_outputs(): output_map = dict( median_files=dict(), ) outputs = CalculateMedian.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_CalculateNormalizedMoments.py000066400000000000000000000015721413403311400275670ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import CalculateNormalizedMoments def test_CalculateNormalizedMoments_inputs(): input_map = dict( moment=dict( mandatory=True, ), timeseries_file=dict( extensions=None, mandatory=True, ), ) inputs = CalculateNormalizedMoments.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CalculateNormalizedMoments_outputs(): output_map = dict( moments=dict(), ) outputs = CalculateNormalizedMoments.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_ComputeDVARS.py000066400000000000000000000037461413403311400245230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..confounds import ComputeDVARS def test_ComputeDVARS_inputs(): input_map = dict( figdpi=dict( usedefault=True, ), figformat=dict( usedefault=True, ), figsize=dict( usedefault=True, ), in_file=dict( extensions=None, mandatory=True, ), in_mask=dict( extensions=None, mandatory=True, ), intensity_normalization=dict( usedefault=True, ), remove_zerovariance=dict( usedefault=True, ), save_all=dict( usedefault=True, ), save_nstd=dict( usedefault=True, ), save_plot=dict( usedefault=True, ), save_std=dict( usedefault=True, ), save_vxstd=dict( usedefault=True, ), series_tr=dict(), ) inputs = ComputeDVARS.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ComputeDVARS_outputs(): output_map = dict( avg_nstd=dict(), avg_std=dict(), avg_vxstd=dict(), fig_nstd=dict( extensions=None, ), fig_std=dict( extensions=None, ), fig_vxstd=dict( extensions=None, ), out_all=dict( extensions=None, ), out_nstd=dict( extensions=None, ), out_std=dict( extensions=None, ), out_vxstd=dict( extensions=None, ), ) outputs = ComputeDVARS.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_ComputeMeshWarp.py000066400000000000000000000024171413403311400253640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..mesh import ComputeMeshWarp def test_ComputeMeshWarp_inputs(): input_map = dict( metric=dict( usedefault=True, ), out_file=dict( extensions=None, usedefault=True, ), out_warp=dict( extensions=None, usedefault=True, ), surface1=dict( extensions=None, mandatory=True, ), surface2=dict( extensions=None, mandatory=True, ), weighting=dict( usedefault=True, ), ) inputs = ComputeMeshWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ComputeMeshWarp_outputs(): output_map = dict( distance=dict(), out_file=dict( extensions=None, ), out_warp=dict( extensions=None, ), ) outputs = ComputeMeshWarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_CreateNifti.py000066400000000000000000000016131413403311400244730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import CreateNifti def test_CreateNifti_inputs(): input_map = dict( affine=dict(), data_file=dict( extensions=None, mandatory=True, ), header_file=dict( extensions=None, mandatory=True, ), ) inputs = CreateNifti.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CreateNifti_outputs(): output_map = dict( nifti_file=dict( extensions=None, ), ) outputs = CreateNifti.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_Distance.py000066400000000000000000000020441413403311400240270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import Distance def test_Distance_inputs(): input_map = dict( mask_volume=dict( extensions=None, ), method=dict( usedefault=True, ), volume1=dict( extensions=None, mandatory=True, ), volume2=dict( extensions=None, mandatory=True, ), ) inputs = Distance.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Distance_outputs(): output_map = dict( distance=dict(), histogram=dict( extensions=None, ), point1=dict(), point2=dict(), ) outputs = Distance.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_FramewiseDisplacement.py000066400000000000000000000027671413403311400265640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..confounds import FramewiseDisplacement def test_FramewiseDisplacement_inputs(): input_map = dict( figdpi=dict( usedefault=True, ), figsize=dict( usedefault=True, ), in_file=dict( extensions=None, mandatory=True, ), normalize=dict( usedefault=True, ), out_figure=dict( extensions=None, usedefault=True, ), out_file=dict( extensions=None, usedefault=True, ), parameter_source=dict( mandatory=True, ), radius=dict( usedefault=True, ), save_plot=dict( usedefault=True, ), series_tr=dict(), ) inputs = FramewiseDisplacement.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FramewiseDisplacement_outputs(): output_map = dict( fd_average=dict(), out_figure=dict( extensions=None, ), out_file=dict( extensions=None, ), ) outputs = FramewiseDisplacement.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_FuzzyOverlap.py000066400000000000000000000020611413403311400247540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import FuzzyOverlap def test_FuzzyOverlap_inputs(): input_map = dict( in_mask=dict( extensions=None, ), in_ref=dict( mandatory=True, ), in_tst=dict( mandatory=True, ), out_file=dict( extensions=None, usedefault=True, ), weighting=dict( usedefault=True, ), ) inputs = FuzzyOverlap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FuzzyOverlap_outputs(): output_map = dict( class_fdi=dict(), class_fji=dict(), dice=dict(), jaccard=dict(), ) outputs = FuzzyOverlap.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_Gunzip.py000066400000000000000000000013711413403311400235530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import Gunzip def test_Gunzip_inputs(): input_map = dict( in_file=dict( extensions=None, mandatory=True, ), ) inputs = Gunzip.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Gunzip_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Gunzip.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_ICC.py000066400000000000000000000016701413403311400226770ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..icc import ICC def test_ICC_inputs(): input_map = dict( mask=dict( extensions=None, mandatory=True, ), subjects_sessions=dict( mandatory=True, ), ) inputs = ICC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ICC_outputs(): output_map = dict( icc_map=dict( extensions=None, ), session_var_map=dict( extensions=None, ), subject_var_map=dict( extensions=None, ), ) outputs = ICC.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_Matlab2CSV.py000066400000000000000000000014551413403311400241400ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import Matlab2CSV def test_Matlab2CSV_inputs(): input_map = dict( in_file=dict( extensions=None, mandatory=True, ), reshape_matrix=dict( usedefault=True, ), ) inputs = Matlab2CSV.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Matlab2CSV_outputs(): output_map = dict( csv_files=dict(), ) outputs = Matlab2CSV.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_MergeCSVFiles.py000066400000000000000000000020421413403311400246710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import MergeCSVFiles def test_MergeCSVFiles_inputs(): input_map = dict( column_headings=dict(), extra_column_heading=dict(), extra_field=dict(), in_files=dict( mandatory=True, ), out_file=dict( extensions=None, usedefault=True, ), row_heading_title=dict( usedefault=True, ), row_headings=dict(), ) inputs = MergeCSVFiles.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MergeCSVFiles_outputs(): output_map = dict( csv_file=dict( extensions=None, ), ) outputs = MergeCSVFiles.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_MergeROIs.py000066400000000000000000000014461413403311400240760ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import MergeROIs def test_MergeROIs_inputs(): input_map = dict( in_files=dict(), in_index=dict(), in_reference=dict( extensions=None, ), ) inputs = MergeROIs.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MergeROIs_outputs(): output_map = dict( merged_file=dict( extensions=None, ), ) outputs = MergeROIs.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_MeshWarpMaths.py000066400000000000000000000023121413403311400250160ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..mesh import MeshWarpMaths def test_MeshWarpMaths_inputs(): input_map = dict( float_trait=dict(), in_surf=dict( extensions=None, mandatory=True, ), operation=dict( usedefault=True, ), operator=dict( mandatory=True, usedefault=True, ), out_file=dict( extensions=None, usedefault=True, ), out_warp=dict( extensions=None, usedefault=True, ), ) inputs = MeshWarpMaths.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MeshWarpMaths_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_warp=dict( extensions=None, ), ) outputs = MeshWarpMaths.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_ModifyAffine.py000066400000000000000000000014531413403311400246400ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import ModifyAffine def test_ModifyAffine_inputs(): input_map = dict( transformation_matrix=dict( usedefault=True, ), volumes=dict( mandatory=True, ), ) inputs = ModifyAffine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ModifyAffine_outputs(): output_map = dict( transformed_volumes=dict(), ) outputs = ModifyAffine.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_NonSteadyStateDetector.py000066400000000000000000000014641413403311400267010ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..confounds import NonSteadyStateDetector def test_NonSteadyStateDetector_inputs(): input_map = dict( in_file=dict( extensions=None, mandatory=True, ), ) inputs = NonSteadyStateDetector.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_NonSteadyStateDetector_outputs(): output_map = dict( n_volumes_to_discard=dict(), ) outputs = NonSteadyStateDetector.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_NormalizeProbabilityMapSet.py000066400000000000000000000014651413403311400275560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import NormalizeProbabilityMapSet def test_NormalizeProbabilityMapSet_inputs(): input_map = dict( in_files=dict(), in_mask=dict( extensions=None, ), ) inputs = NormalizeProbabilityMapSet.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_NormalizeProbabilityMapSet_outputs(): output_map = dict( out_files=dict(), ) outputs = NormalizeProbabilityMapSet.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_P2PDistance.py000066400000000000000000000023731413403311400243560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..mesh import P2PDistance def test_P2PDistance_inputs(): input_map = dict( metric=dict( usedefault=True, ), out_file=dict( extensions=None, usedefault=True, ), out_warp=dict( extensions=None, usedefault=True, ), surface1=dict( extensions=None, mandatory=True, ), surface2=dict( extensions=None, mandatory=True, ), weighting=dict( usedefault=True, ), ) inputs = P2PDistance.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_P2PDistance_outputs(): output_map = dict( distance=dict(), out_file=dict( extensions=None, ), out_warp=dict( extensions=None, ), ) outputs = P2PDistance.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_PickAtlas.py000066400000000000000000000020041413403311400241440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import PickAtlas def test_PickAtlas_inputs(): input_map = dict( atlas=dict( extensions=None, mandatory=True, ), dilation_size=dict( usedefault=True, ), hemi=dict( usedefault=True, ), labels=dict( mandatory=True, ), output_file=dict( extensions=None, ), ) inputs = PickAtlas.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_PickAtlas_outputs(): output_map = dict( mask_file=dict( extensions=None, ), ) outputs = PickAtlas.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_Similarity.py000066400000000000000000000017731413403311400244330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..metrics import Similarity def test_Similarity_inputs(): input_map = dict( mask1=dict( extensions=None, ), mask2=dict( extensions=None, ), metric=dict( usedefault=True, ), volume1=dict( extensions=None, mandatory=True, ), volume2=dict( extensions=None, mandatory=True, ), ) inputs = Similarity.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Similarity_outputs(): output_map = dict( similarity=dict(), ) outputs = Similarity.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_SimpleThreshold.py000066400000000000000000000014551413403311400254100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import SimpleThreshold def test_SimpleThreshold_inputs(): input_map = dict( threshold=dict( mandatory=True, ), volumes=dict( mandatory=True, ), ) inputs = SimpleThreshold.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SimpleThreshold_outputs(): output_map = dict( thresholded_volumes=dict(), ) outputs = SimpleThreshold.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_SpecifyModel.py000066400000000000000000000032571413403311400246670ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..modelgen import SpecifyModel def test_SpecifyModel_inputs(): input_map = dict( bids_amplitude_column=dict(), bids_condition_column=dict( usedefault=True, ), bids_event_file=dict( mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), event_files=dict( mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), functional_runs=dict( copyfile=False, mandatory=True, ), high_pass_filter_cutoff=dict( mandatory=True, ), input_units=dict( mandatory=True, ), outlier_files=dict( copyfile=False, ), parameter_source=dict( usedefault=True, ), realignment_parameters=dict( copyfile=False, ), subject_info=dict( mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), time_repetition=dict( mandatory=True, ), ) inputs = SpecifyModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SpecifyModel_outputs(): output_map = dict( session_info=dict(), ) outputs = SpecifyModel.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_SpecifySPMModel.py000066400000000000000000000035101413403311400252370ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..modelgen import SpecifySPMModel def test_SpecifySPMModel_inputs(): input_map = dict( bids_amplitude_column=dict(), bids_condition_column=dict( usedefault=True, ), bids_event_file=dict( mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), concatenate_runs=dict( usedefault=True, ), event_files=dict( mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), functional_runs=dict( copyfile=False, mandatory=True, ), high_pass_filter_cutoff=dict( mandatory=True, ), input_units=dict( mandatory=True, ), outlier_files=dict( copyfile=False, ), output_units=dict( usedefault=True, ), parameter_source=dict( usedefault=True, ), realignment_parameters=dict( copyfile=False, ), subject_info=dict( mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), time_repetition=dict( mandatory=True, ), ) inputs = SpecifySPMModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SpecifySPMModel_outputs(): output_map = dict( session_info=dict(), ) outputs = SpecifySPMModel.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_SpecifySparseModel.py000066400000000000000000000044761413403311400260510ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..modelgen import SpecifySparseModel def test_SpecifySparseModel_inputs(): input_map = dict( bids_amplitude_column=dict(), bids_condition_column=dict( usedefault=True, ), bids_event_file=dict( mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), event_files=dict( mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), functional_runs=dict( copyfile=False, mandatory=True, ), high_pass_filter_cutoff=dict( mandatory=True, ), input_units=dict( mandatory=True, ), model_hrf=dict(), outlier_files=dict( copyfile=False, ), parameter_source=dict( usedefault=True, ), realignment_parameters=dict( copyfile=False, ), save_plot=dict(), scale_regressors=dict( usedefault=True, ), scan_onset=dict( usedefault=True, ), stimuli_as_impulses=dict( usedefault=True, ), subject_info=dict( mandatory=True, xor=["subject_info", "event_files", "bids_event_file"], ), time_acquisition=dict( mandatory=True, ), time_repetition=dict( mandatory=True, ), use_temporal_deriv=dict( requires=["model_hrf"], ), volumes_in_cluster=dict( usedefault=True, ), ) inputs = SpecifySparseModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SpecifySparseModel_outputs(): output_map = dict( session_info=dict(), sparse_png_file=dict( extensions=None, ), sparse_svg_file=dict( extensions=None, ), ) outputs = SpecifySparseModel.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_SplitROIs.py000066400000000000000000000015561413403311400241340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..misc import SplitROIs def test_SplitROIs_inputs(): input_map = dict( in_file=dict( extensions=None, mandatory=True, ), in_mask=dict( extensions=None, ), roi_size=dict(), ) inputs = SplitROIs.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SplitROIs_outputs(): output_map = dict( out_files=dict(), out_index=dict(), out_masks=dict(), ) outputs = SplitROIs.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_StimulusCorrelation.py000066400000000000000000000017761413403311400263370ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..rapidart import StimulusCorrelation def test_StimulusCorrelation_inputs(): input_map = dict( concatenated_design=dict( mandatory=True, ), intensity_values=dict( mandatory=True, ), realignment_parameters=dict( mandatory=True, ), spm_mat_file=dict( extensions=None, mandatory=True, ), ) inputs = StimulusCorrelation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_StimulusCorrelation_outputs(): output_map = dict( stimcorr_files=dict(), ) outputs = StimulusCorrelation.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_TCompCor.py000066400000000000000000000042311413403311400237630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..confounds import TCompCor def test_TCompCor_inputs(): input_map = dict( components_file=dict( usedefault=True, ), failure_mode=dict( usedefault=True, ), header_prefix=dict(), high_pass_cutoff=dict( usedefault=True, ), ignore_initial_volumes=dict( usedefault=True, ), mask_files=dict(), mask_index=dict( requires=["mask_files"], xor=["merge_method"], ), mask_names=dict(), merge_method=dict( requires=["mask_files"], xor=["mask_index"], ), num_components=dict( xor=["variance_threshold"], ), percentile_threshold=dict( usedefault=True, ), pre_filter=dict( usedefault=True, ), realigned_file=dict( extensions=None, mandatory=True, ), regress_poly_degree=dict( usedefault=True, ), repetition_time=dict(), save_metadata=dict( usedefault=True, ), save_pre_filter=dict( usedefault=True, ), use_regress_poly=dict( deprecated="0.15.0", new_name="pre_filter", ), variance_threshold=dict( xor=["num_components"], ), ) inputs = TCompCor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TCompCor_outputs(): output_map = dict( components_file=dict( extensions=None, ), high_variance_masks=dict(), metadata_file=dict( extensions=None, ), pre_filter_file=dict( extensions=None, ), ) outputs = TCompCor.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_TVTKBaseInterface.py000066400000000000000000000005611413403311400255030ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..mesh import TVTKBaseInterface def test_TVTKBaseInterface_inputs(): input_map = dict() inputs = TVTKBaseInterface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_auto_WarpPoints.py000066400000000000000000000022171413403311400244050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..mesh import WarpPoints def test_WarpPoints_inputs(): input_map = dict( interp=dict( mandatory=True, usedefault=True, ), out_points=dict( extensions=None, keep_extension=True, name_source="points", name_template="%s_warped", output_name="out_points", ), points=dict( extensions=None, mandatory=True, ), warp=dict( extensions=None, mandatory=True, ), ) inputs = WarpPoints.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_WarpPoints_outputs(): output_map = dict( out_points=dict( extensions=None, ), ) outputs = WarpPoints.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/algorithms/tests/test_confounds.py000066400000000000000000000043631413403311400232510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- import os import pytest from nipype.testing import example_data from nipype.algorithms.confounds import FramewiseDisplacement, ComputeDVARS, is_outlier import numpy as np nonitime = True try: import nitime nonitime = False except ImportError: pass def test_fd(tmpdir): tempdir = tmpdir.strpath ground_truth = np.loadtxt(example_data("fsl_motion_outliers_fd.txt")) fdisplacement = FramewiseDisplacement( in_file=example_data("fsl_mcflirt_movpar.txt"), out_file=tempdir + "/fd.txt", parameter_source="FSL", ) res = fdisplacement.run() with open(res.outputs.out_file) as all_lines: for line in all_lines: assert "FramewiseDisplacement" in line break assert np.allclose( ground_truth, np.loadtxt(res.outputs.out_file, skiprows=1), atol=0.16 ) assert np.abs(ground_truth.mean() - res.outputs.fd_average) < 1e-2 @pytest.mark.skipif(nonitime, reason="nitime is not installed") def test_dvars(tmpdir): ground_truth = np.loadtxt(example_data("ds003_sub-01_mc.DVARS")) dvars = ComputeDVARS( in_file=example_data("ds003_sub-01_mc.nii.gz"), in_mask=example_data("ds003_sub-01_mc_brainmask.nii.gz"), save_all=True, intensity_normalization=0, ) tmpdir.chdir() res = dvars.run() dv1 = np.loadtxt(res.outputs.out_all, skiprows=1) assert (np.abs(dv1[:, 0] - ground_truth[:, 0]).sum() / len(dv1)) < 0.05 assert (np.abs(dv1[:, 1] - ground_truth[:, 1]).sum() / len(dv1)) < 0.05 assert (np.abs(dv1[:, 2] - ground_truth[:, 2]).sum() / len(dv1)) < 0.05 dvars = ComputeDVARS( in_file=example_data("ds003_sub-01_mc.nii.gz"), in_mask=example_data("ds003_sub-01_mc_brainmask.nii.gz"), save_all=True, ) res = dvars.run() dv1 = np.loadtxt(res.outputs.out_all, skiprows=1) assert (np.abs(dv1[:, 0] - ground_truth[:, 0]).sum() / len(dv1)) < 0.05 assert (np.abs(dv1[:, 1] - ground_truth[:, 1]).sum() / len(dv1)) > 0.05 assert (np.abs(dv1[:, 2] - ground_truth[:, 2]).sum() / len(dv1)) < 0.05 def test_outliers(): np.random.seed(0) in_data = np.random.randn(100) in_data[0] += 10 assert is_outlier(in_data) == 1 nipype-1.7.0/nipype/algorithms/tests/test_icc_anova.py000066400000000000000000000012671413403311400231750ustar00rootroot00000000000000# -*- coding: utf-8 -*- import numpy as np from nipype.algorithms.icc import ICC_rep_anova def test_ICC_rep_anova(): # see table 2 in P. E. Shrout & Joseph L. Fleiss (1979). "Intraclass # Correlations: Uses in Assessing Rater Reliability". Psychological # Bulletin 86 (2): 420-428 Y = np.array( [ [9, 2, 5, 8], [6, 1, 3, 2], [8, 4, 6, 8], [7, 1, 2, 6], [10, 5, 6, 9], [6, 2, 4, 7], ] ) icc, r_var, e_var, _, dfc, dfe = ICC_rep_anova(Y) # see table 4 assert round(icc, 2) == 0.71 assert dfc == 3 assert dfe == 15 assert np.isclose(r_var / (r_var + e_var), icc) nipype-1.7.0/nipype/algorithms/tests/test_mesh_ops.py000066400000000000000000000046571413403311400230760ustar00rootroot00000000000000# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import pytest import nipype.testing as npt from nipype.testing import example_data import numpy as np from nipype.algorithms import mesh as m from ...interfaces import vtkbase as VTKInfo @pytest.mark.skipif(VTKInfo.no_tvtk(), reason="tvtk is not installed") def test_ident_distances(tmpdir): tmpdir.chdir() in_surf = example_data("surf01.vtk") dist_ident = m.ComputeMeshWarp() dist_ident.inputs.surface1 = in_surf dist_ident.inputs.surface2 = in_surf dist_ident.inputs.out_file = tmpdir.join("distance.npy").strpath res = dist_ident.run() assert res.outputs.distance == 0.0 dist_ident.inputs.weighting = "area" res = dist_ident.run() assert res.outputs.distance == 0.0 @pytest.mark.skipif(VTKInfo.no_tvtk(), reason="tvtk is not installed") def test_trans_distances(tmpdir): from ...interfaces.vtkbase import tvtk in_surf = example_data("surf01.vtk") warped_surf = tmpdir.join("warped.vtk").strpath inc = np.array([0.7, 0.3, -0.2]) r1 = tvtk.PolyDataReader(file_name=in_surf) vtk1 = VTKInfo.vtk_output(r1) r1.update() vtk1.points = np.array(vtk1.points) + inc writer = tvtk.PolyDataWriter(file_name=warped_surf) VTKInfo.configure_input_data(writer, vtk1) writer.write() dist = m.ComputeMeshWarp() dist.inputs.surface1 = in_surf dist.inputs.surface2 = warped_surf dist.inputs.out_file = tmpdir.join("distance.npy").strpath res = dist.run() assert np.allclose(res.outputs.distance, np.linalg.norm(inc), 4) dist.inputs.weighting = "area" res = dist.run() assert np.allclose(res.outputs.distance, np.linalg.norm(inc), 4) @pytest.mark.skipif(VTKInfo.no_tvtk(), reason="tvtk is not installed") def test_warppoints(tmpdir): tmpdir.chdir() # TODO: include regression tests for when tvtk is installed @pytest.mark.skipif(VTKInfo.no_tvtk(), reason="tvtk is not installed") def test_meshwarpmaths(tmpdir): tmpdir.chdir() # TODO: include regression tests for when tvtk is installed @pytest.mark.skipif(not VTKInfo.no_tvtk(), reason="tvtk is installed") def test_importerror(): with pytest.raises(ImportError): m.ComputeMeshWarp() with pytest.raises(ImportError): m.WarpPoints() with pytest.raises(ImportError): m.MeshWarpMaths() nipype-1.7.0/nipype/algorithms/tests/test_metrics.py000066400000000000000000000035741413403311400227240ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import numpy as np import nibabel as nb from nipype.testing import example_data from ..metrics import FuzzyOverlap def test_fuzzy_overlap(tmpdir): tmpdir.chdir() # Tests with tissue probability maps in_mask = example_data("tpms_msk.nii.gz") tpms = [example_data("tpm_%02d.nii.gz" % i) for i in range(3)] out = FuzzyOverlap(in_ref=tpms[0], in_tst=tpms[0]).run().outputs assert out.dice == 1 out = FuzzyOverlap(in_mask=in_mask, in_ref=tpms[0], in_tst=tpms[0]).run().outputs assert out.dice == 1 out = FuzzyOverlap(in_mask=in_mask, in_ref=tpms[0], in_tst=tpms[1]).run().outputs assert 0 < out.dice < 1 out = FuzzyOverlap(in_ref=tpms, in_tst=tpms).run().outputs assert out.dice == 1.0 out = FuzzyOverlap(in_mask=in_mask, in_ref=tpms, in_tst=tpms).run().outputs assert out.dice == 1.0 # Tests with synthetic 3x3x3 images data = np.zeros((3, 3, 3), dtype=float) data[0, 0, 0] = 0.5 data[2, 2, 2] = 0.25 data[1, 1, 1] = 0.3 nb.Nifti1Image(data, np.eye(4)).to_filename("test1.nii.gz") data = np.zeros((3, 3, 3), dtype=float) data[0, 0, 0] = 0.6 data[1, 1, 1] = 0.3 nb.Nifti1Image(data, np.eye(4)).to_filename("test2.nii.gz") out = FuzzyOverlap(in_ref="test1.nii.gz", in_tst="test2.nii.gz").run().outputs assert np.allclose(out.dice, 0.82051) # Just considering the mask, the central pixel # that raised the index now is left aside. data = np.zeros((3, 3, 3), dtype=int) data[0, 0, 0] = 1 data[2, 2, 2] = 1 nb.Nifti1Image(data, np.eye(4)).to_filename("mask.nii.gz") out = ( FuzzyOverlap( in_ref="test1.nii.gz", in_tst="test2.nii.gz", in_mask="mask.nii.gz" ) .run() .outputs ) assert np.allclose(out.dice, 0.74074) nipype-1.7.0/nipype/algorithms/tests/test_misc.py000066400000000000000000000024371413403311400222060ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pytest import os import nibabel as nb from nipype.algorithms import misc from nipype.utils.filemanip import fname_presuffix from nipype.testing.fixtures import create_analyze_pair_file_in_directory from nipype.testing import example_data def test_CreateNifti(create_analyze_pair_file_in_directory): filelist, outdir = create_analyze_pair_file_in_directory create_nifti = misc.CreateNifti() # test raising error with mandatory args absent with pytest.raises(ValueError): create_nifti.run() # .inputs based parameters setting create_nifti.inputs.header_file = filelist[0] create_nifti.inputs.data_file = fname_presuffix( filelist[0], "", ".img", use_ext=False ) result = create_nifti.run() assert os.path.exists(result.outputs.nifti_file) assert nb.load(result.outputs.nifti_file) def test_CalculateMedian(create_analyze_pair_file_in_directory): mean = misc.CalculateMedian() with pytest.raises(TypeError): mean.run() mean.inputs.in_files = example_data("ds003_sub-01_mc.nii.gz") eg = mean.run() assert os.path.exists(eg.outputs.median_files) assert nb.load(eg.outputs.median_files) nipype-1.7.0/nipype/algorithms/tests/test_modelgen.py000066400000000000000000000227441413403311400230500ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from copy import deepcopy import os from nibabel import Nifti1Image import numpy as np import pytest import numpy.testing as npt from nipype.testing import example_data from nipype.interfaces.base import Bunch, TraitError from nipype.algorithms.modelgen import ( bids_gen_info, SpecifyModel, SpecifySparseModel, SpecifySPMModel, ) def test_bids_gen_info(): fname = example_data("events.tsv") res = bids_gen_info([fname]) assert res[0].onsets == [ [183.75, 313.75, 483.75, 633.75, 783.75, 933.75, 1083.75, 1233.75] ] assert res[0].durations == [[20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0]] assert res[0].amplitudes == [[1, 1, 1, 1, 1, 1, 1, 1]] assert res[0].conditions == ["ev0"] def test_modelgen1(tmpdir): filename1 = tmpdir.join("test1.nii").strpath filename2 = tmpdir.join("test2.nii").strpath Nifti1Image(np.random.rand(10, 10, 10, 200), np.eye(4)).to_filename(filename1) Nifti1Image(np.random.rand(10, 10, 10, 200), np.eye(4)).to_filename(filename2) s = SpecifyModel() s.inputs.input_units = "scans" set_output_units = lambda: setattr(s.inputs, "output_units", "scans") with pytest.raises(TraitError): set_output_units() s.inputs.functional_runs = [filename1, filename2] s.inputs.time_repetition = 6 s.inputs.high_pass_filter_cutoff = 128.0 info = [ Bunch( conditions=["cond1"], onsets=[[2, 50, 100, 180]], durations=[[1]], amplitudes=None, pmod=None, regressors=None, regressor_names=None, tmod=None, ), Bunch( conditions=["cond1"], onsets=[[30, 40, 100, 150]], durations=[[1]], amplitudes=None, pmod=None, regressors=None, regressor_names=None, tmod=None, ), ] s.inputs.subject_info = info res = s.run() assert len(res.outputs.session_info) == 2 assert len(res.outputs.session_info[0]["regress"]) == 0 assert len(res.outputs.session_info[0]["cond"]) == 1 npt.assert_almost_equal( np.array(res.outputs.session_info[0]["cond"][0]["onset"]), np.array([12, 300, 600, 1080]), ) info = [ Bunch(conditions=["cond1"], onsets=[[2]], durations=[[1]]), Bunch(conditions=["cond1"], onsets=[[3]], durations=[[1]]), ] s.inputs.subject_info = deepcopy(info) res = s.run() npt.assert_almost_equal( np.array(res.outputs.session_info[0]["cond"][0]["duration"]), np.array([6.0]) ) npt.assert_almost_equal( np.array(res.outputs.session_info[1]["cond"][0]["duration"]), np.array([6.0]) ) info = [ Bunch( conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] ), Bunch( conditions=["cond1", "cond2"], onsets=[[2, 3], [2, 4]], durations=[[1, 1], [1, 1]], ), ] s.inputs.subject_info = deepcopy(info) s.inputs.input_units = "scans" res = s.run() npt.assert_almost_equal( np.array(res.outputs.session_info[0]["cond"][0]["duration"]), np.array([6.0, 6.0]), ) npt.assert_almost_equal( np.array(res.outputs.session_info[0]["cond"][1]["duration"]), np.array([6.0]) ) npt.assert_almost_equal( np.array(res.outputs.session_info[1]["cond"][1]["duration"]), np.array([6.0, 6.0]), ) def test_modelgen_spm_concat(tmpdir): filename1 = tmpdir.join("test1.nii").strpath filename2 = tmpdir.join("test2.nii").strpath Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename1) Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename2) # Test case when only one duration is passed, as being the same for all onsets. s = SpecifySPMModel() s.inputs.input_units = "secs" s.inputs.concatenate_runs = True setattr(s.inputs, "output_units", "secs") assert s.inputs.output_units == "secs" s.inputs.functional_runs = [filename1, filename2] s.inputs.time_repetition = 6 s.inputs.high_pass_filter_cutoff = 128.0 info = [ Bunch(conditions=["cond1"], onsets=[[2, 50, 100, 170]], durations=[[1]]), Bunch(conditions=["cond1"], onsets=[[30, 40, 100, 150]], durations=[[1]]), ] s.inputs.subject_info = deepcopy(info) res = s.run() assert len(res.outputs.session_info) == 1 assert len(res.outputs.session_info[0]["regress"]) == 1 assert np.sum(res.outputs.session_info[0]["regress"][0]["val"]) == 30 assert len(res.outputs.session_info[0]["cond"]) == 1 npt.assert_almost_equal( np.array(res.outputs.session_info[0]["cond"][0]["onset"]), np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0]), ) npt.assert_almost_equal( np.array(res.outputs.session_info[0]["cond"][0]["duration"]), np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]), ) # Test case of scans as output units instead of seconds setattr(s.inputs, "output_units", "scans") assert s.inputs.output_units == "scans" s.inputs.subject_info = deepcopy(info) res = s.run() npt.assert_almost_equal( np.array(res.outputs.session_info[0]["cond"][0]["onset"]), np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0]) / 6, ) # Test case for no concatenation with seconds as output units s.inputs.concatenate_runs = False s.inputs.subject_info = deepcopy(info) s.inputs.output_units = "secs" res = s.run() npt.assert_almost_equal( np.array(res.outputs.session_info[0]["cond"][0]["onset"]), np.array([2.0, 50.0, 100.0, 170.0]), ) # Test case for variable number of events in separate runs, sometimes unique. filename3 = tmpdir.join("test3.nii").strpath Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename3) s.inputs.functional_runs = [filename1, filename2, filename3] info = [ Bunch( conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] ), Bunch( conditions=["cond1", "cond2"], onsets=[[2, 3], [2, 4]], durations=[[1, 1], [1, 1]], ), Bunch( conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] ), ] s.inputs.subject_info = deepcopy(info) res = s.run() npt.assert_almost_equal( np.array(res.outputs.session_info[0]["cond"][0]["duration"]), np.array([1.0, 1.0]), ) npt.assert_almost_equal( np.array(res.outputs.session_info[0]["cond"][1]["duration"]), np.array([1.0]) ) npt.assert_almost_equal( np.array(res.outputs.session_info[1]["cond"][1]["duration"]), np.array([1.0, 1.0]), ) npt.assert_almost_equal( np.array(res.outputs.session_info[2]["cond"][1]["duration"]), np.array([1.0]) ) # Test case for variable number of events in concatenated runs, sometimes unique. s.inputs.concatenate_runs = True info = [ Bunch( conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] ), Bunch( conditions=["cond1", "cond2"], onsets=[[2, 3], [2, 4]], durations=[[1, 1], [1, 1]], ), Bunch( conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]] ), ] s.inputs.subject_info = deepcopy(info) res = s.run() npt.assert_almost_equal( np.array(res.outputs.session_info[0]["cond"][0]["duration"]), np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0]), ) npt.assert_almost_equal( np.array(res.outputs.session_info[0]["cond"][1]["duration"]), np.array([1.0, 1.0, 1.0, 1.0]), ) def test_modelgen_sparse(tmpdir): filename1 = tmpdir.join("test1.nii").strpath filename2 = tmpdir.join("test2.nii").strpath Nifti1Image(np.random.rand(10, 10, 10, 50), np.eye(4)).to_filename(filename1) Nifti1Image(np.random.rand(10, 10, 10, 50), np.eye(4)).to_filename(filename2) s = SpecifySparseModel() s.inputs.input_units = "secs" s.inputs.functional_runs = [filename1, filename2] s.inputs.time_repetition = 6 info = [ Bunch(conditions=["cond1"], onsets=[[0, 50, 100, 180]], durations=[[2]]), Bunch(conditions=["cond1"], onsets=[[30, 40, 100, 150]], durations=[[1]]), ] s.inputs.subject_info = info s.inputs.volumes_in_cluster = 1 s.inputs.time_acquisition = 2 s.inputs.high_pass_filter_cutoff = np.inf res = s.run() assert len(res.outputs.session_info) == 2 assert len(res.outputs.session_info[0]["regress"]) == 1 assert len(res.outputs.session_info[0]["cond"]) == 0 s.inputs.stimuli_as_impulses = False res = s.run() assert res.outputs.session_info[0]["regress"][0]["val"][0] == 1.0 s.inputs.model_hrf = True res = s.run() npt.assert_almost_equal( res.outputs.session_info[0]["regress"][0]["val"][0], 0.016675298129743384 ) assert len(res.outputs.session_info[0]["regress"]) == 1 s.inputs.use_temporal_deriv = True res = s.run() assert len(res.outputs.session_info[0]["regress"]) == 2 npt.assert_almost_equal( res.outputs.session_info[0]["regress"][0]["val"][0], 0.016675298129743384 ) npt.assert_almost_equal( res.outputs.session_info[1]["regress"][1]["val"][5], 0.007671459162258378 ) nipype-1.7.0/nipype/algorithms/tests/test_moments.py000066400000000000000000000323051413403311400227320ustar00rootroot00000000000000# -*- coding: utf-8 -*- import numpy as np from nipype.algorithms.misc import calc_moments def test_skew(tmpdir): data = """14.62418305 5.916396751 -1.658088086 4.71113546 1.598428608 5.612553811 -5.004056368 -4.057513911 11.16365251 17.32688599 -3.099920667 2.630189741 2.389709914 0.379332731 -0.2899694205 -4.363591482 2.059205599 23.90705054 0.7180462297 -1.976963652 7.487682025 -5.583986129 1.094800525 -2.319858134 -1.907579712 22.08277347 4.595575886 -3.869054671 8.214834769 -3.442156385 2.428766374 0.7736184662 0.6535290043 14.1320384 0.9458768261 -2.577892846 -0.8925440241 3.177128674 6.048546332 1.736059675 3.149271524 8.106285467 -6.173280371 -0.5146958863 -11.83574747 4.066575201 9.160589786 0.1680632718 3.089673173 8.736851925 -5.624227736 1.386441126 -12.58621755 -0.726443824 8.036414499 -0.3318169666 2.685349599 9.968755255 2.965603277 2.634928414 -3.783441929 -1.858587372 3.238274675 2.594880211 0.870577208 2.323455904 7.840351954 1.635436162 2.451630603 2.834494164 -1.384081764 5.840475644 -4.421008251 -12.78755879 2.985581265 -1.609381512 -0.1816579797 5.448215202 -2.855889998 5.041186537 -8.502455278 -22.66799593 -3.964218147 -4.180363107 -5.061764789 2.439737668 -0.9988071581 1.437142327 -5.355058719 -19.00567875 -4.803737548 -3.884369973 -4.977945181 -0.4758749938 1.894453988 0.003263759218 1.29682909 -8.295173365 -1.51226274 -1.611159469 -2.5403281 -0.2155584519 2.597114132 1.16528519 3.162947556 -3.093405654 0.4782790153 1.015061011 -2.755821487 -1.015685899 0.1402527399 0.05435017236 0.9158883917 -6.679241736 0.9376568982 3.175011335 -2.712383777 -3.836563374 -2.270503748 -4.593165145 0.5468675209 -11.14130502 1.420140475 3.506045445 2.777240829 -3.14187819 -0.7823285883 -6.84663074 -0.5754863055 -9.638785593 0.2926825231 1.039079149 9.613209645 1.300380032 3.755092776 -2.30881605 -9.12095608 -5.422145216 -3.089096046 -1.913969236 8.36828235 1.622740946 6.756285589 4.803793558 -18.6459149 -5.677906762 -4.447399529 -1.826561667 -1.179681537 -3.51737806 6.062770694 7.743917051 -14.12032005 -9.346953111 -0.3927872312 0.5116398162 -8.814603334 -4.191932775 3.735940996 5.926107194 3.984986352 -7.490234063 5.101302343 0.6359344324 -8.098435707 3.372259941 1.603560776 2.787631701 16.74369044 2.523688856 4.825375014 -2.888386026 -2.929939078 7.41176576 -0.9444665519 -0.5476924783 13.0864062 10.44887074 -2.409155335 -6.466987193 2.038766622 -0.9844478726 -3.872608358 -3.903240663 3.888161509 7.356308659 -9.783752602 -6.593576679 7.785360016 -11.59798121 -5.359996968 -4.646576281 2.919034842 0.4926039084 -9.765686304 -3.169484175 13.3885185 -10.00053277 -5.284251069 -1.953467094 7.762685816 3.138596183 -2.417670781 2.087535944 12.09072814 0.3201456619 -5.986630196 -0.393473785 8.598656701 12.64638617 4.32929224 6.665685612 2.52013659 4.924021467 -7.729146671 -2.531538284 4.286211902 12.70121508 4.197284784 7.586579174 -4.511459665 1.039992021 -7.200406996 -2.678018972 -0.206805413 -1.118395095 1.251956053 4.927663964 -0.3269306726 -1.614001868 -2.858296125 3.708027659 -3.615745533 -13.26040515 4.163662563 3.376525012 6.876574727 1.021356663 1.813515644 9.401028448 -6.392625018 -11.19412506 11.70010341 5.557449086 3.188483207 3.033109557 3.108015432 5.00732323 -5.697688304 -1.564055358 12.53451981 6.641295722 -9.330508253 1.60952695 1.985401431 -4.635334005 -0.4739120366 5.308731294 3.209488234 1.907340382 -15.26443399 1.262158357 1.288838724 -6.54661201 3.733208755 11.99608217 -4.121352088 -3.787629919 -8.977806581 3.760241115 1.048439633 -0.2497259139 1.633682769 21.98252106 0.008457593931 -2.863301753 -1.475378656 4.854200462 -0.156717616 2.028483989 -4.262983941 24.73198623 6.529712692 1.286325062 -1.857794734 2.962236297 -1.586154566 -3.6478191 -7.502330557 10.60931417 2.397686502 -1.56459968 -4.721655517 2.006857078 -1.490344215 -7.044842318 -5.198726771 -8.273929595 -7.6127574 -11.03862432 -1.592101433 3.747829535 -0.06779667515 -2.412618507 0.7334095101 -11.76661769 -9.165804187 -14.81298889 5.36362746 4.955331255 1.673488979 2.0899358 5.517823916 -1.529874203 -2.421802273 -6.947139589 8.366593034 3.55375893 4.03335273 -0.05524186477 1.474077483 2.649817521 7.255506458 6.068405441 -2.220943179 -0.6343270953 1.382522916 -2.748044018 -6.776840898 2.855345278 -3.570626044 1.654853143 -2.838161622 0.755210647 7.252510904 1.235575241 -14.86022341 -0.8943548346 -10.36165869 -1.966680076 -3.641426564 -3.670112785 8.644955043 6.859610046 -7.145239483 -0.1458937017 -3.867994525 -0.9484554762 -2.48227248 -8.36071796 2.539637492 5.399990929 8.804929045 1.925551314 3.240568033 1.273961559 2.104351411 -6.141864838 -5.255423549 -0.7896387751 9.735755254 -1.862844212 -2.552156104 -0.3902178948 5.745817797 -1.515932976 -8.546922674 -3.440929455 -5.837957148 -8.226266393 -13.20837554 -4.385043051 2.553090991 -4.209818986 -8.331176217 -1.707250641 -12.64051676 -8.2399894 -12.76990779 -5.960467624 -4.294427772 -10.92374675 -8.6902905 0.3421994093 1.17028221 -1.953361346 -2.607159313 -4.896369845 -4.519583123 -8.055510792 -9.019182555 3.36412153 14.48433641 2.152584104 3.178007658 -3.9792054 3.873546228 5.321306118 -5.445499499 8.684509027 8.116988393 0.4683619278 1.046001596 -3.128586059 10.0250152 12.58326776 1.447856102 10.18164703 -4.706381289 -1.788728553 0.6563335204 -0.5831451131 5.744824049 3.988876139 5.65836796 2.189197844 -2.76704126 -0.495980308 6.533235978 2.372759856 -2.792331174 -7.896310272 3.502571539 -8.556072249 8.315654337 0.7043190657 11.38508989 2.565286445 -5.081739754 -6.900720718 -1.667312154 -10.59024727 9.909297104 -2.934946689 8.968652164 -0.5610029798 -0.6957945725 3.815352939 -4.277309457 -4.346939024 3.809478921 -8.178727502 2.78966603 -4.568498377 3.295953611 9.457549108 -2.931773943 -0.04922082646 4.940986376 -6.906199411 -0.6726182267 -6.550149966 3.251783239 6.324220141 0.1496185048 -1.7701633 10.55846735 1.720423345 -0.02248084003 -4.475053837 0.3943175795 3.615274407 3.17786214 -4.661015894 5.164991215 7.975239079 2.030845129 1.259865261 -3.543706118 6.424886561 5.257164014 -5.686755714 -7.85360929 4.585684687 2.641661508 6.399259194 -5.791994946 9.620021677 5.059618162 -5.841773643 -7.887333445 -1.663863126 0.531225876 6.442066641 -2.580841985 8.356612294 2.609875283 -3.391732494 7.467417207 0.7346301535 -2.719728468 2.822035284 4.54698989 4.221046784 0.791568596 3.728706407 14.76100347 9.382305581 -3.17219641 1.381585183 7.754995237 -0.3908054543 1.355349478 9.807914939 0.1267792801 9.818588278 0.5608772817 3.633460684 3.711951896 -5.421004876 1.162611597 7.001274262 -19.35283277 -2.103358718 4.16130701 4.67192889 -0.8231375514 -8.81474386 -2.846417531 -1.268859264 -20.80038431 -11.76135621 2.944594891 1.64388247 -0.1668629943 -6.707442921 -6.544901517 -3.830974298 -5.592905106 -6.057725588 -1.233208621 -1.339964983 0.7299911265 -0.7530015377 -3.117175727 1.142381884 7.890421323 8.119524766 -2.606602104 0.007101965698 -4.473969864 1.35750371 5.357618774 4.161238035 9.600782899 14.52365435 0.1990637024 3.403466406 -11.59507852 -3.675154543 8.718678202 0.7825822225 3.703846665 8.748127367 3.135332804 4.127582534 -12.38852274 -9.447080613 3.417599727 -1.915488323 -3.011725724 -0.5381126202 3.567929983 2.184591464 -7.411651508 -9.252946446 -1.827784625 1.560496584 -7.142629796 -5.355184696 3.289780212 1.113331632 -3.105505654 -5.606446238 0.1961208934 6.334603712 -6.659543803 -4.245675975 3.726757782 1.953178495 -0.7484610023 -4.426403774 3.716311729 6.200735049 -1.643440395 0.7536090906 2.509268017 2.15471156 2.374200456 -3.774138064 -0.1428981969 2.646676328 3.686406766 4.827058909 -2.458101484 -0.39559615 5.082577298 3.167157352 -8.147321924 -0.03506891856 4.407495284 2.5606793 -8.149493446 -4.632729429 4.938050013 14.56344531 -9.374945991 -1.3893417 -0.1687177084 -4.106757231 -9.343602374 -7.415904922 4.749022091 18.81314153 -1.749200795 -2.02566815 -6.507688641 -6.001538055 -6.108916568 -6.784929595 7.21051134 10.59847744 5.776257506 -0.4990570991 -9.820082348 -0.5741078285 -4.687969138 -4.377866052 7.40862329 -0.06470407472 6.857336593 2.745243336 -7.04365894 2.689020958 -8.804350547 -3.506610093 0.5732906688 -1.771827007 4.332768659 3.537426733 -0.4346222942 -2.295147419 -12.91289393 -3.95705062 -7.130741497 1.478867856 2.340197798 -0.2224791818 2.355519667 -7.446912611 -8.580935982 -1.515500603 -6.545362285 -2.460234117 0.4822626914 -5.261252431 -3.230857748 -4.456435972 3.105258325 4.868182005 -0.3155725672 -12.9461276 -1.81314629 -7.915543953 -10.61694158 1.023409988 11.23695246 9.13393953 2.080132446 -15.68433051 -2.452603277 -8.067702457 -8.952785439 0.3914623321 9.072213866 5.788054925 0.5661677477 -4.862572943 -1.253393229 -6.497656047 1.825216246 -2.868761361 2.684946057 -1.702605515 2.524615008 6.658427102 -1.464383881 -3.333412097 10.52499456 -1.807928838 1.602770946 -5.693835167 7.025193015 6.172728664 -3.989160551 -0.7754719889 10.83430082 0.3010957187 5.703164372 -4.7215044 5.747620411 -0.6137370397 -5.393253651 -1.967790019 9.084992271 -1.297359974 7.313272774 -2.919262371 -0.341939585 -0.488964096 -3.962652217 -5.129527247 11.86896398 -0.4901633845 3.193953846 -1.811431925 -0.3604987261 6.192234507 -2.348495577 -4.159036411 14.81736012 7.870835671 -2.04922723 0.122245812 7.807201578 8.435263453 -1.994392703 2.494961459 10.99679669 13.62141018 -3.175917696 1.68947873 12.43613872 4.131979444 -0.8035598171 8.583091116 3.538171963 6.008200439 0.5876902994 0.4403643142 6.183013749 2.012581919 1.090536757 8.392496526 0.5460594103 -6.259032909 6.647104433 -1.43557129 -3.452884137 4.366160275 -0.2274303705 3.900139848 1.772017802 -8.109091085 10.50095909 -0.1621391129 -7.608906136 2.481208401 -4.509906047 0.7763248812 0.606115406 -2.603199426 7.692974034 2.104967053 -8.226098406 -6.837921596 -4.561655055 1.015397953 -2.978847372 -2.385761908 -0.8339871055 0.6707971346 -9.874595181 -13.39338209 3.157380259 2.413897035 -2.985013991 -5.160444086 -7.29279473 -2.371762765 -10.03622895 -9.34912711 10.97609581 2.654665151 -1.068091568 -0.2479914452 -6.107351633 -0.9239821871 -5.835733231 -2.189236707 9.811317248 1.508754364 -6.520427038 7.430392097 -1.95095948 4.15525371 -2.032963385 -2.693509918 2.091753969 0.4782648423 -18.09299987 4.740223292 -2.838854108 6.118069011 -3.664423954 -7.91518773 -2.533067915 1.120941519 -19.32711056 -3.231687054 -8.04776777 3.689162869 -6.952885159 -6.854774161 -1.172264884 2.581894309 -2.203996345 -0.5339747203 -10.27858531 1.833505163 -5.406679162 1.678416611 0.871971912 1.837113402 15.60657966 8.749980935 -7.560269196 1.70515063 0.1003191195 8.04135078 1.044572756 -1.582641946 12.19564564 5.273436246 -4.367517279 -0.0400759142 4.431313549 7.067826794 2.741622337 -3.458851463 -6.44120462 -9.849684434 -1.946651925 -2.183436603 6.686267514 4.016449169 6.302612811 -0.9698019507 -13.80878408 -13.92578887 3.071419193 -0.156449455 8.551444945 4.051266929 5.541317929 1.901010931 -1.084801367 -1.267516734 9.774222689 3.461150291 8.195043157 4.77412064 -2.223359889 0.07143463336 11.95939854 7.195316999 11.93418631 1.472618288 3.247038347 2.656123844 -9.091445458 -4.097157466 -2.752420619 -1.103781682 -3.382675846 -3.9326499 0.3168555978 -2.600573426 -9.409987851 -1.564842317 -11.68718367 -12.62978052 -7.436711849 -11.05071165 -4.535693861 -4.973062537 -9.154275121 -0.8478464554 -11.1129098 -8.014294516 -5.818564146 -6.557508409 -4.920322355 -2.444494132 -0.762850219 -1.035995467 -0.1942650118 5.507757423 -0.6713848498 2.045539379 0.2907563314 2.654730384 5.268838031 -2.711154892 6.638825325 9.118078409 2.220738816 5.875202986 0.6059672284 -5.305207318 -0.08004872831 -2.950039659 12.18704972 0.6256114468 2.352153233 8.701077613 4.804756766 -6.163162012 -1.779998967 -6.493561445 4.442326811 -15.10908307 4.919949591 3.969210961 7.004029439 0.1398435001 -4.659976897 -3.899267451 -7.594265524 -20.77328745 5.94521557 -2.385814065 3.224509406 8.943882025 -3.270587613 3.470325906 -8.696673766 -12.29052026 -0.3763403003 -5.55470641 -3.51572569 12.51259902 3.753517263 8.67338497 -0.5057854071 -2.415896554 -9.663571931 -5.714041661 -6.037933426 8.673756933 10.03557773 8.629816199 3.622185659 0.4716627142 -10.92515308 -3.705286841 -2.776089545 2.271920902 9.251504922 5.744980887 """ f = tmpdir.join("filetest") f.write(data) skewness = calc_moments(f.strpath, 3) assert np.allclose( skewness, np.array( [ -0.23418937314622, 0.2946365564954823, -0.05781002053540932, -0.3512508282578762, -0.07035664150233077, -0.01935867699166935, 0.00483863369427428, 0.21879460029850167, ] ), ) nipype-1.7.0/nipype/algorithms/tests/test_normalize_tpms.py000066400000000000000000000025611413403311400243140ustar00rootroot00000000000000# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import pytest from nipype.testing import example_data import numpy as np import nibabel as nb import nipype.testing as nit from nipype.algorithms.misc import normalize_tpms def test_normalize_tpms(tmpdir): in_mask = example_data("tpms_msk.nii.gz") mskdata = np.asanyarray(nb.load(in_mask).dataobj) mskdata[mskdata > 0.0] = 1.0 mapdata = [] in_files = [] out_files = [] for i in range(3): mapname = example_data("tpm_%02d.nii.gz" % i) filename = tmpdir.join("modtpm_%02d.nii.gz" % i).strpath out_files.append(tmpdir.join("normtpm_%02d.nii.gz" % i).strpath) im = nb.load(mapname) data = im.get_fdata() mapdata.append(data) nb.Nifti1Image(2.0 * (data * mskdata), im.affine, im.header).to_filename( filename ) in_files.append(filename) normalize_tpms(in_files, in_mask, out_files=out_files) sumdata = np.zeros_like(mskdata) for i, tstfname in enumerate(out_files): normdata = nb.load(tstfname).get_fdata() sumdata += normdata assert np.all(normdata[mskdata == 0] == 0) assert np.allclose(normdata, mapdata[i]) assert np.allclose(sumdata[sumdata > 0.0], 1.0) nipype-1.7.0/nipype/algorithms/tests/test_rapidart.py000066400000000000000000000065651413403311400230670ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import numpy as np import numpy.testing as npt from .. import rapidart as ra from ...interfaces.base import Bunch def test_ad_init(): ad = ra.ArtifactDetect(use_differences=[True, False]) assert ad.inputs.use_differences[0] assert not ad.inputs.use_differences[1] def test_ad_output_filenames(): ad = ra.ArtifactDetect() outputdir = "/tmp" f = "motion.nii" ( outlierfile, intensityfile, statsfile, normfile, plotfile, displacementfile, maskfile, ) = ad._get_output_filenames(f, outputdir) assert outlierfile == "/tmp/art.motion_outliers.txt" assert intensityfile == "/tmp/global_intensity.motion.txt" assert statsfile == "/tmp/stats.motion.txt" assert normfile == "/tmp/norm.motion.txt" assert plotfile == "/tmp/plot.motion.png" assert displacementfile == "/tmp/disp.motion.nii" assert maskfile == "/tmp/mask.motion.nii" def test_ad_get_affine_matrix(): matrix = ra._get_affine_matrix(np.array([0]), "SPM") npt.assert_equal(matrix, np.eye(4)) # test translation params = [1, 2, 3] matrix = ra._get_affine_matrix(params, "SPM") out = np.eye(4) out[0:3, 3] = params npt.assert_equal(matrix, out) # test rotation params = np.array([0, 0, 0, np.pi / 2, np.pi / 2, np.pi / 2]) matrix = ra._get_affine_matrix(params, "SPM") out = np.array([0, 0, 1, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1]).reshape((4, 4)) npt.assert_almost_equal(matrix, out) # test scaling params = np.array([0, 0, 0, 0, 0, 0, 1, 2, 3]) matrix = ra._get_affine_matrix(params, "SPM") out = np.array([1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 3, 0, 0, 0, 0, 1]).reshape((4, 4)) npt.assert_equal(matrix, out) # test shear params = np.array([0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 3]) matrix = ra._get_affine_matrix(params, "SPM") out = np.array([1, 1, 2, 0, 0, 1, 3, 0, 0, 0, 1, 0, 0, 0, 0, 1]).reshape((4, 4)) npt.assert_equal(matrix, out) def test_ad_get_norm(): params = np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, np.pi / 4, np.pi / 4, np.pi / 4, 0, 0, 0, -np.pi / 4, -np.pi / 4, -np.pi / 4, ] ).reshape((3, 6)) norm, _ = ra._calc_norm(params, False, "SPM") npt.assert_almost_equal(norm, np.array([18.86436316, 37.74610158, 31.29780829])) norm, _ = ra._calc_norm(params, True, "SPM") npt.assert_almost_equal(norm, np.array([0.0, 143.72192614, 173.92527131])) def test_sc_init(): sc = ra.StimulusCorrelation(concatenated_design=True) assert sc.inputs.concatenated_design def test_sc_populate_inputs(): sc = ra.StimulusCorrelation() inputs = Bunch( realignment_parameters=None, intensity_values=None, spm_mat_file=None, concatenated_design=None, ) assert set(sc.inputs.__dict__.keys()) == set(inputs.__dict__.keys()) def test_sc_output_filenames(): sc = ra.StimulusCorrelation() outputdir = "/tmp" f = "motion.nii" corrfile = sc._get_output_filenames(f, outputdir) assert corrfile == "/tmp/qa.motion_stimcorr.txt" nipype-1.7.0/nipype/algorithms/tests/test_splitmerge.py000066400000000000000000000017061413403311400234240ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- from nipype.testing import example_data def test_split_and_merge(tmpdir): import numpy as np import nibabel as nb import os.path as op import os from nipype.algorithms.misc import split_rois, merge_rois in_mask = example_data("tpms_msk.nii.gz") dwfile = tmpdir.join("dwi.nii.gz").strpath mask_img = nb.load(in_mask) mskdata = np.asanyarray(mask_img.dataobj) aff = mask_img.affine dwshape = (mskdata.shape[0], mskdata.shape[1], mskdata.shape[2], 6) dwdata = np.random.normal(size=dwshape) tmpdir.chdir() nb.Nifti1Image(dwdata.astype(np.float32), aff, None).to_filename(dwfile) resdw, resmsk, resid = split_rois(dwfile, in_mask, roishape=(20, 20, 2)) merged = merge_rois(resdw, resid, in_mask) dwmerged = nb.load(merged).get_fdata(dtype=np.float32) dwmasked = dwdata * mskdata[:, :, :, np.newaxis] assert np.allclose(dwmasked, dwmerged) nipype-1.7.0/nipype/algorithms/tests/test_stats.py000066400000000000000000000032201413403311400224000ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import numpy as np import nibabel as nb from nipype.algorithms.stats import ActivationCount import pytest def test_ActivationCount(tmpdir): tmpdir.chdir() in_files = ["{:d}.nii".format(i) for i in range(3)] for fname in in_files: nb.Nifti1Image(np.random.normal(size=(5, 5, 5)), np.eye(4)).to_filename(fname) acm = ActivationCount(in_files=in_files, threshold=1.65) res = acm.run() diff = nb.load(res.outputs.out_file) pos = nb.load(res.outputs.acm_pos) neg = nb.load(res.outputs.acm_neg) assert np.allclose(diff.get_fdata(), pos.get_fdata() - neg.get_fdata()) @pytest.mark.parametrize( "threshold, above_thresh", [ (1, 15.865), # above one standard deviation (one side) (2, 2.275), # above two standard deviations (one side) (3, 0.135), # above three standard deviations (one side) ], ) def test_ActivationCount_normaldistr(tmpdir, threshold, above_thresh): tmpdir.chdir() in_files = ["{:d}.nii".format(i) for i in range(3)] for fname in in_files: nb.Nifti1Image(np.random.normal(size=(100, 100, 100)), np.eye(4)).to_filename( fname ) acm = ActivationCount(in_files=in_files, threshold=threshold) res = acm.run() pos = nb.load(res.outputs.acm_pos) neg = nb.load(res.outputs.acm_neg) assert np.isclose( pos.get_fdata().mean(), above_thresh * 1.0e-2, rtol=0.1, atol=1.0e-4 ) assert np.isclose( neg.get_fdata().mean(), above_thresh * 1.0e-2, rtol=0.1, atol=1.0e-4 ) nipype-1.7.0/nipype/caching/000077500000000000000000000000001413403311400157155ustar00rootroot00000000000000nipype-1.7.0/nipype/caching/__init__.py000066400000000000000000000000631413403311400200250ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .memory import Memory nipype-1.7.0/nipype/caching/memory.py000066400000000000000000000241651413403311400176070ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ Using nipype with persistence and lazy recomputation but without explicit name-steps pipeline: getting back scope in command-line based programming. """ import os import hashlib import pickle import time import shutil import glob from ..interfaces.base import BaseInterface from ..pipeline.engine import Node from ..pipeline.engine.utils import modify_paths ############################################################################### # PipeFunc object: callable interface to nipype.interface objects class PipeFunc(object): """Callable interface to nipype.interface objects Use this to wrap nipype.interface object and call them specifying their input with keyword arguments:: fsl_merge = PipeFunc(fsl.Merge, base_dir='.') out = fsl_merge(in_files=files, dimension='t') """ def __init__(self, interface, base_dir, callback=None): """ Parameters =========== interface: a nipype interface class The interface class to wrap base_dir: a string The directory in which the computation will be stored callback: a callable An optional callable called each time after the function is called. """ if not (isinstance(interface, type) and issubclass(interface, BaseInterface)): raise ValueError( "the interface argument should be a nipype " "interface class, but %s (type %s) was passed." % (interface, type(interface)) ) self.interface = interface base_dir = os.path.abspath(base_dir) if not os.path.exists(base_dir) and os.path.isdir(base_dir): raise ValueError("base_dir should be an existing directory") self.base_dir = base_dir doc = "%s\n%s" % (self.interface.__doc__, self.interface.help(returnhelp=True)) self.__doc__ = doc self.callback = callback def __call__(self, **kwargs): kwargs = modify_paths(kwargs, relative=False) interface = self.interface() # Set the inputs early to get some argument checking interface.inputs.trait_set(**kwargs) # Make a name for our node inputs = interface.inputs.get_hashval() hasher = hashlib.new("md5") hasher.update(pickle.dumps(inputs)) dir_name = "%s-%s" % ( interface.__class__.__module__.replace(".", "-"), interface.__class__.__name__, ) job_name = hasher.hexdigest() node = Node(interface, name=job_name) node.base_dir = os.path.join(self.base_dir, dir_name) cwd = os.getcwd() try: out = node.run() finally: # node.run() changes to the node directory - if something goes # wrong before it cds back you would end up in strange places os.chdir(cwd) if self.callback is not None: self.callback(dir_name, job_name) return out def __repr__(self): return "{}({}.{}), base_dir={})".format( self.__class__.__name__, self.interface.__module__, self.interface.__name__, self.base_dir, ) ############################################################################### # Memory manager: provide some tracking about what is computed when, to # be able to flush the disk def read_log(filename, run_dict=None): if run_dict is None: run_dict = dict() with open(filename, "r") as logfile: for line in logfile: dir_name, job_name = line[:-1].split("/") jobs = run_dict.get(dir_name, set()) jobs.add(job_name) run_dict[dir_name] = jobs return run_dict def rm_all_but(base_dir, dirs_to_keep, warn=False): """Remove all the sub-directories of base_dir, but those listed Parameters ============ base_dir: string The base directory dirs_to_keep: set The names of the directories to keep """ try: all_dirs = os.listdir(base_dir) except OSError: "Dir has been deleted" return all_dirs = [d for d in all_dirs if not d.startswith("log.")] dirs_to_rm = list(dirs_to_keep.symmetric_difference(all_dirs)) for dir_name in dirs_to_rm: dir_name = os.path.join(base_dir, dir_name) if os.path.exists(dir_name): if warn: print("removing directory: %s" % dir_name) shutil.rmtree(dir_name) class _MemoryCallback(object): "An object to avoid closures and have everything pickle" def __init__(self, memory): self.memory = memory def __call__(self, dir_name, job_name): self.memory._log_name(dir_name, job_name) class Memory(object): """Memory context to provide caching for interfaces Parameters ========== base_dir: string The directory name of the location for the caching Methods ======= cache Creates a cacheable function from an nipype Interface class clear_previous_runs Removes from the disk all the runs that where not used after the creation time of the specific Memory instance clear_previous_runs Removes from the disk all the runs that where not used after the given time """ def __init__(self, base_dir): base_dir = os.path.join(os.path.abspath(base_dir), "nipype_mem") if not os.path.exists(base_dir): os.mkdir(base_dir) elif not os.path.isdir(base_dir): raise ValueError("base_dir should be a directory") self.base_dir = base_dir open(os.path.join(base_dir, "log.current"), "a").close() def cache(self, interface): """Returns a callable that caches the output of an interface Parameters ========== interface: nipype interface The nipype interface class to be wrapped and cached Returns ======= pipe_func: a PipeFunc callable object An object that can be used as a function to apply the interface to arguments. Inputs of the interface are given as keyword arguments, bearing the same name as the name in the inputs specs of the interface. Examples ======== >>> from tempfile import mkdtemp >>> mem = Memory(mkdtemp()) >>> from nipype.interfaces import fsl Here we create a callable that can be used to apply an fsl.Merge interface to files >>> fsl_merge = mem.cache(fsl.Merge) Now we apply it to a list of files. We need to specify the list of input files and the dimension along which the files should be merged. >>> results = fsl_merge(in_files=['a.nii', 'b.nii'], ... dimension='t') # doctest: +SKIP We can retrieve the resulting file from the outputs: >>> results.outputs.merged_file # doctest: +SKIP '...' """ return PipeFunc(interface, self.base_dir, _MemoryCallback(self)) def _log_name(self, dir_name, job_name): """Increment counters tracking which cached function get executed.""" base_dir = self.base_dir # Every counter is a file opened in append mode and closed # immediately to avoid race conditions in parallel computing: # file appends are atomic with open(os.path.join(base_dir, "log.current"), "a") as currentlog: currentlog.write("%s/%s\n" % (dir_name, job_name)) t = time.localtime() year_dir = os.path.join(base_dir, "log.%i" % t.tm_year) try: os.mkdir(year_dir) except OSError: "Dir exists" month_dir = os.path.join(year_dir, "%02i" % t.tm_mon) try: os.mkdir(month_dir) except OSError: "Dir exists" with open(os.path.join(month_dir, "%02i.log" % t.tm_mday), "a") as rotatefile: rotatefile.write("%s/%s\n" % (dir_name, job_name)) def clear_previous_runs(self, warn=True): """Remove all the cache that where not used in the latest run of the memory object: i.e. since the corresponding Python object was created. Parameters ========== warn: boolean, optional If true, echoes warning messages for all directory removed """ base_dir = self.base_dir latest_runs = read_log(os.path.join(base_dir, "log.current")) self._clear_all_but(latest_runs, warn=warn) def clear_runs_since(self, day=None, month=None, year=None, warn=True): """Remove all the cache that where not used since the given date Parameters ========== day, month, year: integers, optional The integers specifying the latest day (in localtime) that a node should have been accessed to be kept. If not given, the current date is used. warn: boolean, optional If true, echoes warning messages for all directory removed """ t = time.localtime() day = day if day is not None else t.tm_mday month = month if month is not None else t.tm_mon year = year if year is not None else t.tm_year base_dir = self.base_dir cut_off_file = "%s/log.%i/%02i/%02i.log" % (base_dir, year, month, day) logs_to_flush = list() recent_runs = dict() for log_name in glob.glob("%s/log.*/*/*.log" % base_dir): if log_name < cut_off_file: logs_to_flush.append(log_name) else: recent_runs = read_log(log_name, recent_runs) self._clear_all_but(recent_runs, warn=warn) for log_name in logs_to_flush: os.remove(log_name) def _clear_all_but(self, runs, warn=True): """Remove all the runs appart from those given to the function input. """ rm_all_but(self.base_dir, set(runs.keys()), warn=warn) for dir_name, job_names in list(runs.items()): rm_all_but(os.path.join(self.base_dir, dir_name), job_names, warn=warn) def __repr__(self): return "{}(base_dir={})".format(self.__class__.__name__, self.base_dir) nipype-1.7.0/nipype/caching/tests/000077500000000000000000000000001413403311400170575ustar00rootroot00000000000000nipype-1.7.0/nipype/caching/tests/__init__.py000066400000000000000000000000301413403311400211610ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/caching/tests/test_memory.py000066400000000000000000000025651413403311400220100ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ Test the nipype interface caching mechanism """ from .. import Memory from ...pipeline.engine.tests.test_engine import EngineTestInterface from ... import config config.set_default_config() nb_runs = 0 class SideEffectInterface(EngineTestInterface): def _run_interface(self, runtime): global nb_runs nb_runs += 1 return super(SideEffectInterface, self)._run_interface(runtime) def test_caching(tmpdir): old_rerun = config.get("execution", "stop_on_first_rerun") try: # Prevent rerun to check that evaluation is computed only once config.set("execution", "stop_on_first_rerun", "true") mem = Memory(tmpdir.strpath) first_nb_run = nb_runs results = mem.cache(SideEffectInterface)(input1=2, input2=1) assert nb_runs == first_nb_run + 1 assert results.outputs.output1 == [1, 2] results = mem.cache(SideEffectInterface)(input1=2, input2=1) # Check that the node hasn't been rerun assert nb_runs == first_nb_run + 1 assert results.outputs.output1 == [1, 2] results = mem.cache(SideEffectInterface)(input1=1, input2=1) # Check that the node hasn been rerun assert nb_runs == first_nb_run + 2 assert results.outputs.output1 == [1, 1] finally: config.set("execution", "stop_on_first_rerun", old_rerun) nipype-1.7.0/nipype/conftest.py000066400000000000000000000023001413403311400165130ustar00rootroot00000000000000import os import shutil from tempfile import mkdtemp import pytest import numpy import py.path as pp NIPYPE_DATADIR = os.path.realpath( os.path.join(os.path.dirname(__file__), "testing/data") ) temp_folder = mkdtemp() data_dir = os.path.join(temp_folder, "data") shutil.copytree(NIPYPE_DATADIR, data_dir) @pytest.fixture(autouse=True) def add_np(doctest_namespace): doctest_namespace["np"] = numpy doctest_namespace["os"] = os doctest_namespace["pytest"] = pytest doctest_namespace["datadir"] = data_dir @pytest.fixture(autouse=True) def _docdir(request): """Grabbed from https://stackoverflow.com/a/46991331""" # Trigger ONLY for the doctests. doctest_plugin = request.config.pluginmanager.getplugin("doctest") if isinstance(request.node, doctest_plugin.DoctestItem): # Get the fixture dynamically by its name. tmpdir = pp.local(data_dir) # Chdir only for the duration of the test. with tmpdir.as_cwd(): yield else: # For normal tests, we have to yield, since this is a yield-fixture. yield def pytest_unconfigure(config): # Delete temp folder after session is finished shutil.rmtree(temp_folder) nipype-1.7.0/nipype/external/000077500000000000000000000000001413403311400161435ustar00rootroot00000000000000nipype-1.7.0/nipype/external/__init__.py000066400000000000000000000000301413403311400202450ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/external/cloghandler.py000066400000000000000000000343501413403311400210040ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright 2008 Lowell Alleman # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy # of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ cloghandler.py: A smart replacement for the standard RotatingFileHandler ConcurrentRotatingFileHandler: This class is a log handler which is a drop-in replacement for the python standard log handler 'RotateFileHandler', the primary difference being that this handler will continue to write to the same file if the file cannot be rotated for some reason, whereas the RotatingFileHandler will strictly adhere to the maximum file size. Unfortunately, if you are using the RotatingFileHandler on Windows, you will find that once an attempted rotation fails, all subsequent log messages are dropped. The other major advantage of this module is that multiple processes can safely write to a single log file. To put it another way: This module's top priority is preserving your log records, whereas the standard library attempts to limit disk usage, which can potentially drop log messages. If you are trying to determine which module to use, there are number of considerations: What is most important: strict disk space usage or preservation of log messages? What OSes are you supporting? Can you afford to have processes blocked by file locks? Concurrent access is handled by using file locks, which should ensure that log messages are not dropped or clobbered. This means that a file lock is acquired and released for every log message that is written to disk. (On Windows, you may also run into a temporary situation where the log file must be opened and closed for each log message.) This can have potentially performance implications. In my testing, performance was more than adequate, but if you need a high-volume or low-latency solution, I suggest you look elsewhere. See the README file for an example usage of this module. """ from builtins import range __version__ = "$Id: cloghandler.py 6175 2009-11-02 18:40:35Z lowell $" __author__ = "Lowell Alleman" __all__ = ["ConcurrentRotatingFileHandler"] import os import sys from random import randint from logging import Handler from logging.handlers import BaseRotatingHandler try: import codecs except ImportError: codecs = None from filelock import SoftFileLock # A client can set this to true to automatically convert relative paths to # absolute paths (which will also hide the absolute path warnings) FORCE_ABSOLUTE_PATH = False class ConcurrentRotatingFileHandler(BaseRotatingHandler): """ Handler for logging to a set of files, which switches from one file to the next when the current file reaches a certain size. Multiple processes can write to the log file concurrently, but this may mean that the file will exceed the given size. """ def __init__( self, filename, mode="a", maxBytes=0, backupCount=0, encoding=None, debug=True, supress_abs_warn=False, ): """ Open the specified file and use it as the stream for logging. By default, the file grows indefinitely. You can specify particular values of maxBytes and backupCount to allow the file to rollover at a predetermined size. Rollover occurs whenever the current log file is nearly maxBytes in length. If backupCount is >= 1, the system will successively create new files with the same pathname as the base file, but with extensions ".1", ".2" etc. appended to it. For example, with a backupCount of 5 and a base file name of "app.log", you would get "app.log", "app.log.1", "app.log.2", ... through to "app.log.5". The file being written to is always "app.log" - when it gets filled up, it is closed and renamed to "app.log.1", and if files "app.log.1", "app.log.2" etc. exist, then they are renamed to "app.log.2", "app.log.3" etc. respectively. If maxBytes is zero, rollover never occurs. On Windows, it is not possible to rename a file that is currently opened by another process. This means that it is not possible to rotate the log files if multiple processes is using the same log file. In this case, the current log file will continue to grow until the rotation can be completed successfully. In order for rotation to be possible, all of the other processes need to close the file first. A mechanism, called "degraded" mode, has been created for this scenario. In degraded mode, the log file is closed after each log message is written. So once all processes have entered degraded mode, the next rotate log attempt should be successful and then normal logging can be resumed. This log handler assumes that all concurrent processes logging to a single file will are using only this class, and that the exact same parameters are provided to each instance of this class. If, for example, two different processes are using this class, but with different values for 'maxBytes' or 'backupCount', then odd behavior is expected. The same is true if this class is used by one application, but the RotatingFileHandler is used by another. NOTE: You should always provide 'filename' as an absolute path, since this class will need to re-open the file during rotation. If your application call os.chdir() then subsequent log files could be created in the wrong directory. """ # The question of absolute paths: I'm not sure what the 'right thing' is # to do here. RotatingFileHander simply ignores this possibility. I was # going call os.path.abspath(), but that potentially limits uses. For # example, on Linux (any posix system?) you can rename a directory of a # running app, and the app wouldn't notice as long as it only opens new # files using relative paths. But since that's not a "normal" thing to # do, and having an app call os.chdir() is a much more likely scenario # that should be supported. For the moment, we are just going to warn # the user if they provide a relative path and do some other voodoo # logic that you'll just have to review for yourself. # if the given filename contains no path, we make an absolute path if not os.path.isabs(filename): if FORCE_ABSOLUTE_PATH or not os.path.split(filename)[0]: filename = os.path.abspath(filename) elif not supress_abs_warn: from warnings import warn warn( "The given 'filename' should be an absolute path. If your " "application calls os.chdir(), your logs may get messed up. " "Use 'supress_abs_warn=True' to hide this message." ) try: BaseRotatingHandler.__init__(self, filename, mode, encoding) except TypeError: # Due to a different logging release without encoding support (Python 2.4.1 and earlier?) BaseRotatingHandler.__init__(self, filename, mode) self.encoding = encoding self._rotateFailed = False self.maxBytes = maxBytes self.backupCount = backupCount # Prevent multiple extensions on the lock file (Only handles the normal "*.log" case.) self.lock_file = "%s.lock" % filename self.stream_lock = SoftFileLock(self.lock_file) # For debug mode, swap out the "_degrade()" method with a more a verbose one. if debug: self._degrade = self._degrade_debug def _openFile(self, mode): if self.encoding: self.stream = codecs.open(self.baseFilename, mode, self.encoding) else: self.stream = open(self.baseFilename, mode) def acquire(self): """Acquire thread and file locks. Also re-opening log file when running in 'degraded' mode.""" # handle thread lock Handler.acquire(self) self.stream_lock.acquire() if self.stream.closed: self._openFile(self.mode) def release(self): """Release file and thread locks. Flush stream and take care of closing stream in 'degraded' mode.""" try: if not self.stream.closed: self.stream.flush() if self._rotateFailed: self.stream.close() except IOError: if self._rotateFailed: self.stream.close() finally: try: self.stream_lock.release() finally: # release thread lock Handler.release(self) def close(self): """ Closes the stream. """ if not self.stream.closed: self.stream.flush() self.stream.close() Handler.close(self) def flush(self): """flush(): Do nothing. Since a flush is issued in release(), we don't do it here. To do a flush here, it would be necessary to re-lock everything, and it is just easier and cleaner to do it all in release(), rather than requiring two lock ops per handle() call. Doing a flush() here would also introduces a window of opportunity for another process to write to the log file in between calling stream.write() and stream.flush(), which seems like a bad thing.""" pass def _degrade(self, degrade, msg, *args): """Set degrade mode or not. Ignore msg.""" self._rotateFailed = degrade del msg, args # avoid pychecker warnings def _degrade_debug(self, degrade, msg, *args): """A more colorful version of _degade(). (This is enabled by passing "debug=True" at initialization). """ if degrade: if not self._rotateFailed: sys.stderr.write( "Degrade mode - ENTERING - (pid=%d) %s\n" % (os.getpid(), msg % args) ) self._rotateFailed = True else: if self._rotateFailed: sys.stderr.write( "Degrade mode - EXITING - (pid=%d) %s\n" % (os.getpid(), msg % args) ) self._rotateFailed = False def doRollover(self): """ Do a rollover, as described in __init__(). """ if self.backupCount <= 0: # Don't keep any backups, just overwrite the existing backup file # Locking doesn't much matter here; since we are overwriting it anyway self.stream.close() self._openFile("w") return self.stream.close() try: # Attempt to rename logfile to tempname: There is a slight race-condition here, but it seems unavoidable tmpname = None while not tmpname or os.path.exists(tmpname): tmpname = "%s.rotate.%08d" % (self.baseFilename, randint(0, 99999999)) try: # Do a rename test to determine if we can successfully rename the log file os.rename(self.baseFilename, tmpname) except (IOError, OSError): exc_value = sys.exc_info()[1] self._degrade( True, "rename failed. File in use? " "exception=%s", exc_value ) return # Q: Is there some way to protect this code from a KeboardInterupt? # This isn't necessarily a data loss issue, but it certainly would # break the rotation process during my stress testing. # There is currently no mechanism in place to handle the situation # where one of these log files cannot be renamed. (Example, user # opens "logfile.3" in notepad) for i in range(self.backupCount - 1, 0, -1): sfn = "%s.%d" % (self.baseFilename, i) dfn = "%s.%d" % (self.baseFilename, i + 1) if os.path.exists(sfn): # print "%s -> %s" % (sfn, dfn) if os.path.exists(dfn): os.remove(dfn) os.rename(sfn, dfn) dfn = self.baseFilename + ".1" if os.path.exists(dfn): os.remove(dfn) os.rename(tmpname, dfn) # print "%s -> %s" % (self.baseFilename, dfn) self._degrade(False, "Rotation completed") finally: self._openFile(self.mode) def shouldRollover(self, record): """ Determine if rollover should occur. For those that are keeping track. This differs from the standard library's RotatingLogHandler class. Because there is no promise to keep the file size under maxBytes we ignore the length of the current record. """ del record # avoid pychecker warnings if self._shouldRollover(): # if some other process already did the rollover we might # checked log.1, so we reopen the stream and check again on # the right log file self.stream.close() self._openFile(self.mode) return self._shouldRollover() return False def _shouldRollover(self): if self.maxBytes > 0: # are we rolling over? try: self.stream.seek(0, 2) # due to non-posix-compliant Windows feature except IOError: return True if self.stream.tell() >= self.maxBytes: return True else: self._degrade(False, "Rotation done or not needed at this time") return False # Publish this class to the "logging.handlers" module so that it can be use # from a logging config file via logging.config.fileConfig(). import logging.handlers logging.handlers.ConcurrentRotatingFileHandler = ConcurrentRotatingFileHandler nipype-1.7.0/nipype/external/d3.js000066400000000000000000011751661413403311400170300ustar00rootroot00000000000000!function() { var d3 = { version: "3.4.8" }; if (!Date.now) Date.now = function() { return +new Date(); }; var d3_arraySlice = [].slice, d3_array = function(list) { return d3_arraySlice.call(list); }; var d3_document = document, d3_documentElement = d3_document.documentElement, d3_window = window; try { d3_array(d3_documentElement.childNodes)[0].nodeType; } catch (e) { d3_array = function(list) { var i = list.length, array = new Array(i); while (i--) array[i] = list[i]; return array; }; } try { d3_document.createElement("div").style.setProperty("opacity", 0, ""); } catch (error) { var d3_element_prototype = d3_window.Element.prototype, d3_element_setAttribute = d3_element_prototype.setAttribute, d3_element_setAttributeNS = d3_element_prototype.setAttributeNS, d3_style_prototype = d3_window.CSSStyleDeclaration.prototype, d3_style_setProperty = d3_style_prototype.setProperty; d3_element_prototype.setAttribute = function(name, value) { d3_element_setAttribute.call(this, name, value + ""); }; d3_element_prototype.setAttributeNS = function(space, local, value) { d3_element_setAttributeNS.call(this, space, local, value + ""); }; d3_style_prototype.setProperty = function(name, value, priority) { d3_style_setProperty.call(this, name, value + "", priority); }; } d3.ascending = d3_ascending; function d3_ascending(a, b) { return a < b ? -1 : a > b ? 1 : a >= b ? 0 : NaN; } d3.descending = function(a, b) { return b < a ? -1 : b > a ? 1 : b >= a ? 0 : NaN; }; d3.min = function(array, f) { var i = -1, n = array.length, a, b; if (arguments.length === 1) { while (++i < n && !((a = array[i]) != null && a <= a)) a = undefined; while (++i < n) if ((b = array[i]) != null && a > b) a = b; } else { while (++i < n && !((a = f.call(array, array[i], i)) != null && a <= a)) a = undefined; while (++i < n) if ((b = f.call(array, array[i], i)) != null && a > b) a = b; } return a; }; d3.max = function(array, f) { var i = -1, n = array.length, a, b; if (arguments.length === 1) { while (++i < n && !((a = array[i]) != null && a <= a)) a = undefined; while (++i < n) if ((b = array[i]) != null && b > a) a = b; } else { while (++i < n && !((a = f.call(array, array[i], i)) != null && a <= a)) a = undefined; while (++i < n) if ((b = f.call(array, array[i], i)) != null && b > a) a = b; } return a; }; d3.extent = function(array, f) { var i = -1, n = array.length, a, b, c; if (arguments.length === 1) { while (++i < n && !((a = c = array[i]) != null && a <= a)) a = c = undefined; while (++i < n) if ((b = array[i]) != null) { if (a > b) a = b; if (c < b) c = b; } } else { while (++i < n && !((a = c = f.call(array, array[i], i)) != null && a <= a)) a = undefined; while (++i < n) if ((b = f.call(array, array[i], i)) != null) { if (a > b) a = b; if (c < b) c = b; } } return [ a, c ]; }; d3.sum = function(array, f) { var s = 0, n = array.length, a, i = -1; if (arguments.length === 1) { while (++i < n) if (!isNaN(a = +array[i])) s += a; } else { while (++i < n) if (!isNaN(a = +f.call(array, array[i], i))) s += a; } return s; }; function d3_number(x) { return x != null && !isNaN(x); } d3.mean = function(array, f) { var s = 0, n = array.length, a, i = -1, j = n; if (arguments.length === 1) { while (++i < n) if (d3_number(a = array[i])) s += a; else --j; } else { while (++i < n) if (d3_number(a = f.call(array, array[i], i))) s += a; else --j; } return j ? s / j : undefined; }; d3.quantile = function(values, p) { var H = (values.length - 1) * p + 1, h = Math.floor(H), v = +values[h - 1], e = H - h; return e ? v + e * (values[h] - v) : v; }; d3.median = function(array, f) { if (arguments.length > 1) array = array.map(f); array = array.filter(d3_number); return array.length ? d3.quantile(array.sort(d3_ascending), .5) : undefined; }; function d3_bisector(compare) { return { left: function(a, x, lo, hi) { if (arguments.length < 3) lo = 0; if (arguments.length < 4) hi = a.length; while (lo < hi) { var mid = lo + hi >>> 1; if (compare(a[mid], x) < 0) lo = mid + 1; else hi = mid; } return lo; }, right: function(a, x, lo, hi) { if (arguments.length < 3) lo = 0; if (arguments.length < 4) hi = a.length; while (lo < hi) { var mid = lo + hi >>> 1; if (compare(a[mid], x) > 0) hi = mid; else lo = mid + 1; } return lo; } }; } var d3_bisect = d3_bisector(d3_ascending); d3.bisectLeft = d3_bisect.left; d3.bisect = d3.bisectRight = d3_bisect.right; d3.bisector = function(f) { return d3_bisector(f.length === 1 ? function(d, x) { return d3_ascending(f(d), x); } : f); }; d3.shuffle = function(array) { var m = array.length, t, i; while (m) { i = Math.random() * m-- | 0; t = array[m], array[m] = array[i], array[i] = t; } return array; }; d3.permute = function(array, indexes) { var i = indexes.length, permutes = new Array(i); while (i--) permutes[i] = array[indexes[i]]; return permutes; }; d3.pairs = function(array) { var i = 0, n = array.length - 1, p0, p1 = array[0], pairs = new Array(n < 0 ? 0 : n); while (i < n) pairs[i] = [ p0 = p1, p1 = array[++i] ]; return pairs; }; d3.zip = function() { if (!(n = arguments.length)) return []; for (var i = -1, m = d3.min(arguments, d3_zipLength), zips = new Array(m); ++i < m; ) { for (var j = -1, n, zip = zips[i] = new Array(n); ++j < n; ) { zip[j] = arguments[j][i]; } } return zips; }; function d3_zipLength(d) { return d.length; } d3.transpose = function(matrix) { return d3.zip.apply(d3, matrix); }; d3.keys = function(map) { var keys = []; for (var key in map) keys.push(key); return keys; }; d3.values = function(map) { var values = []; for (var key in map) values.push(map[key]); return values; }; d3.entries = function(map) { var entries = []; for (var key in map) entries.push({ key: key, value: map[key] }); return entries; }; d3.merge = function(arrays) { var n = arrays.length, m, i = -1, j = 0, merged, array; while (++i < n) j += arrays[i].length; merged = new Array(j); while (--n >= 0) { array = arrays[n]; m = array.length; while (--m >= 0) { merged[--j] = array[m]; } } return merged; }; var abs = Math.abs; d3.range = function(start, stop, step) { if (arguments.length < 3) { step = 1; if (arguments.length < 2) { stop = start; start = 0; } } if ((stop - start) / step === Infinity) throw new Error("infinite range"); var range = [], k = d3_range_integerScale(abs(step)), i = -1, j; start *= k, stop *= k, step *= k; if (step < 0) while ((j = start + step * ++i) > stop) range.push(j / k); else while ((j = start + step * ++i) < stop) range.push(j / k); return range; }; function d3_range_integerScale(x) { var k = 1; while (x * k % 1) k *= 10; return k; } function d3_class(ctor, properties) { try { for (var key in properties) { Object.defineProperty(ctor.prototype, key, { value: properties[key], enumerable: false }); } } catch (e) { ctor.prototype = properties; } } d3.map = function(object) { var map = new d3_Map(); if (object instanceof d3_Map) object.forEach(function(key, value) { map.set(key, value); }); else for (var key in object) map.set(key, object[key]); return map; }; function d3_Map() {} d3_class(d3_Map, { has: d3_map_has, get: function(key) { return this[d3_map_prefix + key]; }, set: function(key, value) { return this[d3_map_prefix + key] = value; }, remove: d3_map_remove, keys: d3_map_keys, values: function() { var values = []; this.forEach(function(key, value) { values.push(value); }); return values; }, entries: function() { var entries = []; this.forEach(function(key, value) { entries.push({ key: key, value: value }); }); return entries; }, size: d3_map_size, empty: d3_map_empty, forEach: function(f) { for (var key in this) if (key.charCodeAt(0) === d3_map_prefixCode) f.call(this, key.substring(1), this[key]); } }); var d3_map_prefix = "\x00", d3_map_prefixCode = d3_map_prefix.charCodeAt(0); function d3_map_has(key) { return d3_map_prefix + key in this; } function d3_map_remove(key) { key = d3_map_prefix + key; return key in this && delete this[key]; } function d3_map_keys() { var keys = []; this.forEach(function(key) { keys.push(key); }); return keys; } function d3_map_size() { var size = 0; for (var key in this) if (key.charCodeAt(0) === d3_map_prefixCode) ++size; return size; } function d3_map_empty() { for (var key in this) if (key.charCodeAt(0) === d3_map_prefixCode) return false; return true; } d3.nest = function() { var nest = {}, keys = [], sortKeys = [], sortValues, rollup; function map(mapType, array, depth) { if (depth >= keys.length) return rollup ? rollup.call(nest, array) : sortValues ? array.sort(sortValues) : array; var i = -1, n = array.length, key = keys[depth++], keyValue, object, setter, valuesByKey = new d3_Map(), values; while (++i < n) { if (values = valuesByKey.get(keyValue = key(object = array[i]))) { values.push(object); } else { valuesByKey.set(keyValue, [ object ]); } } if (mapType) { object = mapType(); setter = function(keyValue, values) { object.set(keyValue, map(mapType, values, depth)); }; } else { object = {}; setter = function(keyValue, values) { object[keyValue] = map(mapType, values, depth); }; } valuesByKey.forEach(setter); return object; } function entries(map, depth) { if (depth >= keys.length) return map; var array = [], sortKey = sortKeys[depth++]; map.forEach(function(key, keyMap) { array.push({ key: key, values: entries(keyMap, depth) }); }); return sortKey ? array.sort(function(a, b) { return sortKey(a.key, b.key); }) : array; } nest.map = function(array, mapType) { return map(mapType, array, 0); }; nest.entries = function(array) { return entries(map(d3.map, array, 0), 0); }; nest.key = function(d) { keys.push(d); return nest; }; nest.sortKeys = function(order) { sortKeys[keys.length - 1] = order; return nest; }; nest.sortValues = function(order) { sortValues = order; return nest; }; nest.rollup = function(f) { rollup = f; return nest; }; return nest; }; d3.set = function(array) { var set = new d3_Set(); if (array) for (var i = 0, n = array.length; i < n; ++i) set.add(array[i]); return set; }; function d3_Set() {} d3_class(d3_Set, { has: d3_map_has, add: function(value) { this[d3_map_prefix + value] = true; return value; }, remove: function(value) { value = d3_map_prefix + value; return value in this && delete this[value]; }, values: d3_map_keys, size: d3_map_size, empty: d3_map_empty, forEach: function(f) { for (var value in this) if (value.charCodeAt(0) === d3_map_prefixCode) f.call(this, value.substring(1)); } }); d3.behavior = {}; d3.rebind = function(target, source) { var i = 1, n = arguments.length, method; while (++i < n) target[method = arguments[i]] = d3_rebind(target, source, source[method]); return target; }; function d3_rebind(target, source, method) { return function() { var value = method.apply(source, arguments); return value === source ? target : value; }; } function d3_vendorSymbol(object, name) { if (name in object) return name; name = name.charAt(0).toUpperCase() + name.substring(1); for (var i = 0, n = d3_vendorPrefixes.length; i < n; ++i) { var prefixName = d3_vendorPrefixes[i] + name; if (prefixName in object) return prefixName; } } var d3_vendorPrefixes = [ "webkit", "ms", "moz", "Moz", "o", "O" ]; function d3_noop() {} d3.dispatch = function() { var dispatch = new d3_dispatch(), i = -1, n = arguments.length; while (++i < n) dispatch[arguments[i]] = d3_dispatch_event(dispatch); return dispatch; }; function d3_dispatch() {} d3_dispatch.prototype.on = function(type, listener) { var i = type.indexOf("."), name = ""; if (i >= 0) { name = type.substring(i + 1); type = type.substring(0, i); } if (type) return arguments.length < 2 ? this[type].on(name) : this[type].on(name, listener); if (arguments.length === 2) { if (listener == null) for (type in this) { if (this.hasOwnProperty(type)) this[type].on(name, null); } return this; } }; function d3_dispatch_event(dispatch) { var listeners = [], listenerByName = new d3_Map(); function event() { var z = listeners, i = -1, n = z.length, l; while (++i < n) if (l = z[i].on) l.apply(this, arguments); return dispatch; } event.on = function(name, listener) { var l = listenerByName.get(name), i; if (arguments.length < 2) return l && l.on; if (l) { l.on = null; listeners = listeners.slice(0, i = listeners.indexOf(l)).concat(listeners.slice(i + 1)); listenerByName.remove(name); } if (listener) listeners.push(listenerByName.set(name, { on: listener })); return dispatch; }; return event; } d3.event = null; function d3_eventPreventDefault() { d3.event.preventDefault(); } function d3_eventSource() { var e = d3.event, s; while (s = e.sourceEvent) e = s; return e; } function d3_eventDispatch(target) { var dispatch = new d3_dispatch(), i = 0, n = arguments.length; while (++i < n) dispatch[arguments[i]] = d3_dispatch_event(dispatch); dispatch.of = function(thiz, argumentz) { return function(e1) { try { var e0 = e1.sourceEvent = d3.event; e1.target = target; d3.event = e1; dispatch[e1.type].apply(thiz, argumentz); } finally { d3.event = e0; } }; }; return dispatch; } d3.requote = function(s) { return s.replace(d3_requote_re, "\\$&"); }; var d3_requote_re = /[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g; var d3_subclass = {}.__proto__ ? function(object, prototype) { object.__proto__ = prototype; } : function(object, prototype) { for (var property in prototype) object[property] = prototype[property]; }; function d3_selection(groups) { d3_subclass(groups, d3_selectionPrototype); return groups; } var d3_select = function(s, n) { return n.querySelector(s); }, d3_selectAll = function(s, n) { return n.querySelectorAll(s); }, d3_selectMatcher = d3_documentElement[d3_vendorSymbol(d3_documentElement, "matchesSelector")], d3_selectMatches = function(n, s) { return d3_selectMatcher.call(n, s); }; if (typeof Sizzle === "function") { d3_select = function(s, n) { return Sizzle(s, n)[0] || null; }; d3_selectAll = Sizzle; d3_selectMatches = Sizzle.matchesSelector; } d3.selection = function() { return d3_selectionRoot; }; var d3_selectionPrototype = d3.selection.prototype = []; d3_selectionPrototype.select = function(selector) { var subgroups = [], subgroup, subnode, group, node; selector = d3_selection_selector(selector); for (var j = -1, m = this.length; ++j < m; ) { subgroups.push(subgroup = []); subgroup.parentNode = (group = this[j]).parentNode; for (var i = -1, n = group.length; ++i < n; ) { if (node = group[i]) { subgroup.push(subnode = selector.call(node, node.__data__, i, j)); if (subnode && "__data__" in node) subnode.__data__ = node.__data__; } else { subgroup.push(null); } } } return d3_selection(subgroups); }; function d3_selection_selector(selector) { return typeof selector === "function" ? selector : function() { return d3_select(selector, this); }; } d3_selectionPrototype.selectAll = function(selector) { var subgroups = [], subgroup, node; selector = d3_selection_selectorAll(selector); for (var j = -1, m = this.length; ++j < m; ) { for (var group = this[j], i = -1, n = group.length; ++i < n; ) { if (node = group[i]) { subgroups.push(subgroup = d3_array(selector.call(node, node.__data__, i, j))); subgroup.parentNode = node; } } } return d3_selection(subgroups); }; function d3_selection_selectorAll(selector) { return typeof selector === "function" ? selector : function() { return d3_selectAll(selector, this); }; } var d3_nsPrefix = { svg: "http://www.w3.org/2000/svg", xhtml: "http://www.w3.org/1999/xhtml", xlink: "http://www.w3.org/1999/xlink", xml: "http://www.w3.org/XML/1998/namespace", xmlns: "http://www.w3.org/2000/xmlns/" }; d3.ns = { prefix: d3_nsPrefix, qualify: function(name) { var i = name.indexOf(":"), prefix = name; if (i >= 0) { prefix = name.substring(0, i); name = name.substring(i + 1); } return d3_nsPrefix.hasOwnProperty(prefix) ? { space: d3_nsPrefix[prefix], local: name } : name; } }; d3_selectionPrototype.attr = function(name, value) { if (arguments.length < 2) { if (typeof name === "string") { var node = this.node(); name = d3.ns.qualify(name); return name.local ? node.getAttributeNS(name.space, name.local) : node.getAttribute(name); } for (value in name) this.each(d3_selection_attr(value, name[value])); return this; } return this.each(d3_selection_attr(name, value)); }; function d3_selection_attr(name, value) { name = d3.ns.qualify(name); function attrNull() { this.removeAttribute(name); } function attrNullNS() { this.removeAttributeNS(name.space, name.local); } function attrConstant() { this.setAttribute(name, value); } function attrConstantNS() { this.setAttributeNS(name.space, name.local, value); } function attrFunction() { var x = value.apply(this, arguments); if (x == null) this.removeAttribute(name); else this.setAttribute(name, x); } function attrFunctionNS() { var x = value.apply(this, arguments); if (x == null) this.removeAttributeNS(name.space, name.local); else this.setAttributeNS(name.space, name.local, x); } return value == null ? name.local ? attrNullNS : attrNull : typeof value === "function" ? name.local ? attrFunctionNS : attrFunction : name.local ? attrConstantNS : attrConstant; } function d3_collapse(s) { return s.trim().replace(/\s+/g, " "); } d3_selectionPrototype.classed = function(name, value) { if (arguments.length < 2) { if (typeof name === "string") { var node = this.node(), n = (name = d3_selection_classes(name)).length, i = -1; if (value = node.classList) { while (++i < n) if (!value.contains(name[i])) return false; } else { value = node.getAttribute("class"); while (++i < n) if (!d3_selection_classedRe(name[i]).test(value)) return false; } return true; } for (value in name) this.each(d3_selection_classed(value, name[value])); return this; } return this.each(d3_selection_classed(name, value)); }; function d3_selection_classedRe(name) { return new RegExp("(?:^|\\s+)" + d3.requote(name) + "(?:\\s+|$)", "g"); } function d3_selection_classes(name) { return name.trim().split(/^|\s+/); } function d3_selection_classed(name, value) { name = d3_selection_classes(name).map(d3_selection_classedName); var n = name.length; function classedConstant() { var i = -1; while (++i < n) name[i](this, value); } function classedFunction() { var i = -1, x = value.apply(this, arguments); while (++i < n) name[i](this, x); } return typeof value === "function" ? classedFunction : classedConstant; } function d3_selection_classedName(name) { var re = d3_selection_classedRe(name); return function(node, value) { if (c = node.classList) return value ? c.add(name) : c.remove(name); var c = node.getAttribute("class") || ""; if (value) { re.lastIndex = 0; if (!re.test(c)) node.setAttribute("class", d3_collapse(c + " " + name)); } else { node.setAttribute("class", d3_collapse(c.replace(re, " "))); } }; } d3_selectionPrototype.style = function(name, value, priority) { var n = arguments.length; if (n < 3) { if (typeof name !== "string") { if (n < 2) value = ""; for (priority in name) this.each(d3_selection_style(priority, name[priority], value)); return this; } if (n < 2) return d3_window.getComputedStyle(this.node(), null).getPropertyValue(name); priority = ""; } return this.each(d3_selection_style(name, value, priority)); }; function d3_selection_style(name, value, priority) { function styleNull() { this.style.removeProperty(name); } function styleConstant() { this.style.setProperty(name, value, priority); } function styleFunction() { var x = value.apply(this, arguments); if (x == null) this.style.removeProperty(name); else this.style.setProperty(name, x, priority); } return value == null ? styleNull : typeof value === "function" ? styleFunction : styleConstant; } d3_selectionPrototype.property = function(name, value) { if (arguments.length < 2) { if (typeof name === "string") return this.node()[name]; for (value in name) this.each(d3_selection_property(value, name[value])); return this; } return this.each(d3_selection_property(name, value)); }; function d3_selection_property(name, value) { function propertyNull() { delete this[name]; } function propertyConstant() { this[name] = value; } function propertyFunction() { var x = value.apply(this, arguments); if (x == null) delete this[name]; else this[name] = x; } return value == null ? propertyNull : typeof value === "function" ? propertyFunction : propertyConstant; } d3_selectionPrototype.text = function(value) { return arguments.length ? this.each(typeof value === "function" ? function() { var v = value.apply(this, arguments); this.textContent = v == null ? "" : v; } : value == null ? function() { this.textContent = ""; } : function() { this.textContent = value; }) : this.node().textContent; }; d3_selectionPrototype.html = function(value) { return arguments.length ? this.each(typeof value === "function" ? function() { var v = value.apply(this, arguments); this.innerHTML = v == null ? "" : v; } : value == null ? function() { this.innerHTML = ""; } : function() { this.innerHTML = value; }) : this.node().innerHTML; }; d3_selectionPrototype.append = function(name) { name = d3_selection_creator(name); return this.select(function() { return this.appendChild(name.apply(this, arguments)); }); }; function d3_selection_creator(name) { return typeof name === "function" ? name : (name = d3.ns.qualify(name)).local ? function() { return this.ownerDocument.createElementNS(name.space, name.local); } : function() { return this.ownerDocument.createElementNS(this.namespaceURI, name); }; } d3_selectionPrototype.insert = function(name, before) { name = d3_selection_creator(name); before = d3_selection_selector(before); return this.select(function() { return this.insertBefore(name.apply(this, arguments), before.apply(this, arguments) || null); }); }; d3_selectionPrototype.remove = function() { return this.each(function() { var parent = this.parentNode; if (parent) parent.removeChild(this); }); }; d3_selectionPrototype.data = function(value, key) { var i = -1, n = this.length, group, node; if (!arguments.length) { value = new Array(n = (group = this[0]).length); while (++i < n) { if (node = group[i]) { value[i] = node.__data__; } } return value; } function bind(group, groupData) { var i, n = group.length, m = groupData.length, n0 = Math.min(n, m), updateNodes = new Array(m), enterNodes = new Array(m), exitNodes = new Array(n), node, nodeData; if (key) { var nodeByKeyValue = new d3_Map(), dataByKeyValue = new d3_Map(), keyValues = [], keyValue; for (i = -1; ++i < n; ) { keyValue = key.call(node = group[i], node.__data__, i); if (nodeByKeyValue.has(keyValue)) { exitNodes[i] = node; } else { nodeByKeyValue.set(keyValue, node); } keyValues.push(keyValue); } for (i = -1; ++i < m; ) { keyValue = key.call(groupData, nodeData = groupData[i], i); if (node = nodeByKeyValue.get(keyValue)) { updateNodes[i] = node; node.__data__ = nodeData; } else if (!dataByKeyValue.has(keyValue)) { enterNodes[i] = d3_selection_dataNode(nodeData); } dataByKeyValue.set(keyValue, nodeData); nodeByKeyValue.remove(keyValue); } for (i = -1; ++i < n; ) { if (nodeByKeyValue.has(keyValues[i])) { exitNodes[i] = group[i]; } } } else { for (i = -1; ++i < n0; ) { node = group[i]; nodeData = groupData[i]; if (node) { node.__data__ = nodeData; updateNodes[i] = node; } else { enterNodes[i] = d3_selection_dataNode(nodeData); } } for (;i < m; ++i) { enterNodes[i] = d3_selection_dataNode(groupData[i]); } for (;i < n; ++i) { exitNodes[i] = group[i]; } } enterNodes.update = updateNodes; enterNodes.parentNode = updateNodes.parentNode = exitNodes.parentNode = group.parentNode; enter.push(enterNodes); update.push(updateNodes); exit.push(exitNodes); } var enter = d3_selection_enter([]), update = d3_selection([]), exit = d3_selection([]); if (typeof value === "function") { while (++i < n) { bind(group = this[i], value.call(group, group.parentNode.__data__, i)); } } else { while (++i < n) { bind(group = this[i], value); } } update.enter = function() { return enter; }; update.exit = function() { return exit; }; return update; }; function d3_selection_dataNode(data) { return { __data__: data }; } d3_selectionPrototype.datum = function(value) { return arguments.length ? this.property("__data__", value) : this.property("__data__"); }; d3_selectionPrototype.filter = function(filter) { var subgroups = [], subgroup, group, node; if (typeof filter !== "function") filter = d3_selection_filter(filter); for (var j = 0, m = this.length; j < m; j++) { subgroups.push(subgroup = []); subgroup.parentNode = (group = this[j]).parentNode; for (var i = 0, n = group.length; i < n; i++) { if ((node = group[i]) && filter.call(node, node.__data__, i, j)) { subgroup.push(node); } } } return d3_selection(subgroups); }; function d3_selection_filter(selector) { return function() { return d3_selectMatches(this, selector); }; } d3_selectionPrototype.order = function() { for (var j = -1, m = this.length; ++j < m; ) { for (var group = this[j], i = group.length - 1, next = group[i], node; --i >= 0; ) { if (node = group[i]) { if (next && next !== node.nextSibling) next.parentNode.insertBefore(node, next); next = node; } } } return this; }; d3_selectionPrototype.sort = function(comparator) { comparator = d3_selection_sortComparator.apply(this, arguments); for (var j = -1, m = this.length; ++j < m; ) this[j].sort(comparator); return this.order(); }; function d3_selection_sortComparator(comparator) { if (!arguments.length) comparator = d3_ascending; return function(a, b) { return a && b ? comparator(a.__data__, b.__data__) : !a - !b; }; } d3_selectionPrototype.each = function(callback) { return d3_selection_each(this, function(node, i, j) { callback.call(node, node.__data__, i, j); }); }; function d3_selection_each(groups, callback) { for (var j = 0, m = groups.length; j < m; j++) { for (var group = groups[j], i = 0, n = group.length, node; i < n; i++) { if (node = group[i]) callback(node, i, j); } } return groups; } d3_selectionPrototype.call = function(callback) { var args = d3_array(arguments); callback.apply(args[0] = this, args); return this; }; d3_selectionPrototype.empty = function() { return !this.node(); }; d3_selectionPrototype.node = function() { for (var j = 0, m = this.length; j < m; j++) { for (var group = this[j], i = 0, n = group.length; i < n; i++) { var node = group[i]; if (node) return node; } } return null; }; d3_selectionPrototype.size = function() { var n = 0; this.each(function() { ++n; }); return n; }; function d3_selection_enter(selection) { d3_subclass(selection, d3_selection_enterPrototype); return selection; } var d3_selection_enterPrototype = []; d3.selection.enter = d3_selection_enter; d3.selection.enter.prototype = d3_selection_enterPrototype; d3_selection_enterPrototype.append = d3_selectionPrototype.append; d3_selection_enterPrototype.empty = d3_selectionPrototype.empty; d3_selection_enterPrototype.node = d3_selectionPrototype.node; d3_selection_enterPrototype.call = d3_selectionPrototype.call; d3_selection_enterPrototype.size = d3_selectionPrototype.size; d3_selection_enterPrototype.select = function(selector) { var subgroups = [], subgroup, subnode, upgroup, group, node; for (var j = -1, m = this.length; ++j < m; ) { upgroup = (group = this[j]).update; subgroups.push(subgroup = []); subgroup.parentNode = group.parentNode; for (var i = -1, n = group.length; ++i < n; ) { if (node = group[i]) { subgroup.push(upgroup[i] = subnode = selector.call(group.parentNode, node.__data__, i, j)); subnode.__data__ = node.__data__; } else { subgroup.push(null); } } } return d3_selection(subgroups); }; d3_selection_enterPrototype.insert = function(name, before) { if (arguments.length < 2) before = d3_selection_enterInsertBefore(this); return d3_selectionPrototype.insert.call(this, name, before); }; function d3_selection_enterInsertBefore(enter) { var i0, j0; return function(d, i, j) { var group = enter[j].update, n = group.length, node; if (j != j0) j0 = j, i0 = 0; if (i >= i0) i0 = i + 1; while (!(node = group[i0]) && ++i0 < n) ; return node; }; } d3_selectionPrototype.transition = function() { var id = d3_transitionInheritId || ++d3_transitionId, subgroups = [], subgroup, node, transition = d3_transitionInherit || { time: Date.now(), ease: d3_ease_cubicInOut, delay: 0, duration: 250 }; for (var j = -1, m = this.length; ++j < m; ) { subgroups.push(subgroup = []); for (var group = this[j], i = -1, n = group.length; ++i < n; ) { if (node = group[i]) d3_transitionNode(node, i, id, transition); subgroup.push(node); } } return d3_transition(subgroups, id); }; d3_selectionPrototype.interrupt = function() { return this.each(d3_selection_interrupt); }; function d3_selection_interrupt() { var lock = this.__transition__; if (lock) ++lock.active; } d3.select = function(node) { var group = [ typeof node === "string" ? d3_select(node, d3_document) : node ]; group.parentNode = d3_documentElement; return d3_selection([ group ]); }; d3.selectAll = function(nodes) { var group = d3_array(typeof nodes === "string" ? d3_selectAll(nodes, d3_document) : nodes); group.parentNode = d3_documentElement; return d3_selection([ group ]); }; var d3_selectionRoot = d3.select(d3_documentElement); d3_selectionPrototype.on = function(type, listener, capture) { var n = arguments.length; if (n < 3) { if (typeof type !== "string") { if (n < 2) listener = false; for (capture in type) this.each(d3_selection_on(capture, type[capture], listener)); return this; } if (n < 2) return (n = this.node()["__on" + type]) && n._; capture = false; } return this.each(d3_selection_on(type, listener, capture)); }; function d3_selection_on(type, listener, capture) { var name = "__on" + type, i = type.indexOf("."), wrap = d3_selection_onListener; if (i > 0) type = type.substring(0, i); var filter = d3_selection_onFilters.get(type); if (filter) type = filter, wrap = d3_selection_onFilter; function onRemove() { var l = this[name]; if (l) { this.removeEventListener(type, l, l.$); delete this[name]; } } function onAdd() { var l = wrap(listener, d3_array(arguments)); onRemove.call(this); this.addEventListener(type, this[name] = l, l.$ = capture); l._ = listener; } function removeAll() { var re = new RegExp("^__on([^.]+)" + d3.requote(type) + "$"), match; for (var name in this) { if (match = name.match(re)) { var l = this[name]; this.removeEventListener(match[1], l, l.$); delete this[name]; } } } return i ? listener ? onAdd : onRemove : listener ? d3_noop : removeAll; } var d3_selection_onFilters = d3.map({ mouseenter: "mouseover", mouseleave: "mouseout" }); d3_selection_onFilters.forEach(function(k) { if ("on" + k in d3_document) d3_selection_onFilters.remove(k); }); function d3_selection_onListener(listener, argumentz) { return function(e) { var o = d3.event; d3.event = e; argumentz[0] = this.__data__; try { listener.apply(this, argumentz); } finally { d3.event = o; } }; } function d3_selection_onFilter(listener, argumentz) { var l = d3_selection_onListener(listener, argumentz); return function(e) { var target = this, related = e.relatedTarget; if (!related || related !== target && !(related.compareDocumentPosition(target) & 8)) { l.call(target, e); } }; } var d3_event_dragSelect = "onselectstart" in d3_document ? null : d3_vendorSymbol(d3_documentElement.style, "userSelect"), d3_event_dragId = 0; function d3_event_dragSuppress() { var name = ".dragsuppress-" + ++d3_event_dragId, click = "click" + name, w = d3.select(d3_window).on("touchmove" + name, d3_eventPreventDefault).on("dragstart" + name, d3_eventPreventDefault).on("selectstart" + name, d3_eventPreventDefault); if (d3_event_dragSelect) { var style = d3_documentElement.style, select = style[d3_event_dragSelect]; style[d3_event_dragSelect] = "none"; } return function(suppressClick) { w.on(name, null); if (d3_event_dragSelect) style[d3_event_dragSelect] = select; if (suppressClick) { function off() { w.on(click, null); } w.on(click, function() { d3_eventPreventDefault(); off(); }, true); setTimeout(off, 0); } }; } d3.mouse = function(container) { return d3_mousePoint(container, d3_eventSource()); }; function d3_mousePoint(container, e) { if (e.changedTouches) e = e.changedTouches[0]; var svg = container.ownerSVGElement || container; if (svg.createSVGPoint) { var point = svg.createSVGPoint(); point.x = e.clientX, point.y = e.clientY; point = point.matrixTransform(container.getScreenCTM().inverse()); return [ point.x, point.y ]; } var rect = container.getBoundingClientRect(); return [ e.clientX - rect.left - container.clientLeft, e.clientY - rect.top - container.clientTop ]; } d3.touches = function(container, touches) { if (arguments.length < 2) touches = d3_eventSource().touches; return touches ? d3_array(touches).map(function(touch) { var point = d3_mousePoint(container, touch); point.identifier = touch.identifier; return point; }) : []; }; d3.behavior.drag = function() { var event = d3_eventDispatch(drag, "drag", "dragstart", "dragend"), origin = null, mousedown = dragstart(d3_noop, d3.mouse, d3_behavior_dragMouseSubject, "mousemove", "mouseup"), touchstart = dragstart(d3_behavior_dragTouchId, d3.touch, d3_behavior_dragTouchSubject, "touchmove", "touchend"); function drag() { this.on("mousedown.drag", mousedown).on("touchstart.drag", touchstart); } function dragstart(id, position, subject, move, end) { return function() { var that = this, target = d3.event.target, parent = that.parentNode, dispatch = event.of(that, arguments), dragged = 0, dragId = id(), dragName = ".drag" + (dragId == null ? "" : "-" + dragId), dragOffset, dragSubject = d3.select(subject()).on(move + dragName, moved).on(end + dragName, ended), dragRestore = d3_event_dragSuppress(), position0 = position(parent, dragId); if (origin) { dragOffset = origin.apply(that, arguments); dragOffset = [ dragOffset.x - position0[0], dragOffset.y - position0[1] ]; } else { dragOffset = [ 0, 0 ]; } dispatch({ type: "dragstart" }); function moved() { var position1 = position(parent, dragId), dx, dy; if (!position1) return; dx = position1[0] - position0[0]; dy = position1[1] - position0[1]; dragged |= dx | dy; position0 = position1; dispatch({ type: "drag", x: position1[0] + dragOffset[0], y: position1[1] + dragOffset[1], dx: dx, dy: dy }); } function ended() { if (!position(parent, dragId)) return; dragSubject.on(move + dragName, null).on(end + dragName, null); dragRestore(dragged && d3.event.target === target); dispatch({ type: "dragend" }); } }; } drag.origin = function(x) { if (!arguments.length) return origin; origin = x; return drag; }; return d3.rebind(drag, event, "on"); }; function d3_behavior_dragTouchId() { return d3.event.changedTouches[0].identifier; } function d3_behavior_dragTouchSubject() { return d3.event.target; } function d3_behavior_dragMouseSubject() { return d3_window; } var π = Math.PI, τ = 2 * π, halfπ = π / 2, ε = 1e-6, ε2 = ε * ε, d3_radians = π / 180, d3_degrees = 180 / π; function d3_sgn(x) { return x > 0 ? 1 : x < 0 ? -1 : 0; } function d3_cross2d(a, b, c) { return (b[0] - a[0]) * (c[1] - a[1]) - (b[1] - a[1]) * (c[0] - a[0]); } function d3_acos(x) { return x > 1 ? 0 : x < -1 ? π : Math.acos(x); } function d3_asin(x) { return x > 1 ? halfπ : x < -1 ? -halfπ : Math.asin(x); } function d3_sinh(x) { return ((x = Math.exp(x)) - 1 / x) / 2; } function d3_cosh(x) { return ((x = Math.exp(x)) + 1 / x) / 2; } function d3_tanh(x) { return ((x = Math.exp(2 * x)) - 1) / (x + 1); } function d3_haversin(x) { return (x = Math.sin(x / 2)) * x; } var ρ = Math.SQRT2, ρ2 = 2, ρ4 = 4; d3.interpolateZoom = function(p0, p1) { var ux0 = p0[0], uy0 = p0[1], w0 = p0[2], ux1 = p1[0], uy1 = p1[1], w1 = p1[2]; var dx = ux1 - ux0, dy = uy1 - uy0, d2 = dx * dx + dy * dy, d1 = Math.sqrt(d2), b0 = (w1 * w1 - w0 * w0 + ρ4 * d2) / (2 * w0 * ρ2 * d1), b1 = (w1 * w1 - w0 * w0 - ρ4 * d2) / (2 * w1 * ρ2 * d1), r0 = Math.log(Math.sqrt(b0 * b0 + 1) - b0), r1 = Math.log(Math.sqrt(b1 * b1 + 1) - b1), dr = r1 - r0, S = (dr || Math.log(w1 / w0)) / ρ; function interpolate(t) { var s = t * S; if (dr) { var coshr0 = d3_cosh(r0), u = w0 / (ρ2 * d1) * (coshr0 * d3_tanh(ρ * s + r0) - d3_sinh(r0)); return [ ux0 + u * dx, uy0 + u * dy, w0 * coshr0 / d3_cosh(ρ * s + r0) ]; } return [ ux0 + t * dx, uy0 + t * dy, w0 * Math.exp(ρ * s) ]; } interpolate.duration = S * 1e3; return interpolate; }; d3.behavior.zoom = function() { var view = { x: 0, y: 0, k: 1 }, translate0, center, size = [ 960, 500 ], scaleExtent = d3_behavior_zoomInfinity, mousedown = "mousedown.zoom", mousemove = "mousemove.zoom", mouseup = "mouseup.zoom", mousewheelTimer, touchstart = "touchstart.zoom", touchtime, event = d3_eventDispatch(zoom, "zoomstart", "zoom", "zoomend"), x0, x1, y0, y1; function zoom(g) { g.on(mousedown, mousedowned).on(d3_behavior_zoomWheel + ".zoom", mousewheeled).on(mousemove, mousewheelreset).on("dblclick.zoom", dblclicked).on(touchstart, touchstarted); } zoom.event = function(g) { g.each(function() { var dispatch = event.of(this, arguments), view1 = view; if (d3_transitionInheritId) { d3.select(this).transition().each("start.zoom", function() { view = this.__chart__ || { x: 0, y: 0, k: 1 }; zoomstarted(dispatch); }).tween("zoom:zoom", function() { var dx = size[0], dy = size[1], cx = dx / 2, cy = dy / 2, i = d3.interpolateZoom([ (cx - view.x) / view.k, (cy - view.y) / view.k, dx / view.k ], [ (cx - view1.x) / view1.k, (cy - view1.y) / view1.k, dx / view1.k ]); return function(t) { var l = i(t), k = dx / l[2]; this.__chart__ = view = { x: cx - l[0] * k, y: cy - l[1] * k, k: k }; zoomed(dispatch); }; }).each("end.zoom", function() { zoomended(dispatch); }); } else { this.__chart__ = view; zoomstarted(dispatch); zoomed(dispatch); zoomended(dispatch); } }); }; zoom.translate = function(_) { if (!arguments.length) return [ view.x, view.y ]; view = { x: +_[0], y: +_[1], k: view.k }; rescale(); return zoom; }; zoom.scale = function(_) { if (!arguments.length) return view.k; view = { x: view.x, y: view.y, k: +_ }; rescale(); return zoom; }; zoom.scaleExtent = function(_) { if (!arguments.length) return scaleExtent; scaleExtent = _ == null ? d3_behavior_zoomInfinity : [ +_[0], +_[1] ]; return zoom; }; zoom.center = function(_) { if (!arguments.length) return center; center = _ && [ +_[0], +_[1] ]; return zoom; }; zoom.size = function(_) { if (!arguments.length) return size; size = _ && [ +_[0], +_[1] ]; return zoom; }; zoom.x = function(z) { if (!arguments.length) return x1; x1 = z; x0 = z.copy(); view = { x: 0, y: 0, k: 1 }; return zoom; }; zoom.y = function(z) { if (!arguments.length) return y1; y1 = z; y0 = z.copy(); view = { x: 0, y: 0, k: 1 }; return zoom; }; function location(p) { return [ (p[0] - view.x) / view.k, (p[1] - view.y) / view.k ]; } function point(l) { return [ l[0] * view.k + view.x, l[1] * view.k + view.y ]; } function scaleTo(s) { view.k = Math.max(scaleExtent[0], Math.min(scaleExtent[1], s)); } function translateTo(p, l) { l = point(l); view.x += p[0] - l[0]; view.y += p[1] - l[1]; } function rescale() { if (x1) x1.domain(x0.range().map(function(x) { return (x - view.x) / view.k; }).map(x0.invert)); if (y1) y1.domain(y0.range().map(function(y) { return (y - view.y) / view.k; }).map(y0.invert)); } function zoomstarted(dispatch) { dispatch({ type: "zoomstart" }); } function zoomed(dispatch) { rescale(); dispatch({ type: "zoom", scale: view.k, translate: [ view.x, view.y ] }); } function zoomended(dispatch) { dispatch({ type: "zoomend" }); } function mousedowned() { var that = this, target = d3.event.target, dispatch = event.of(that, arguments), dragged = 0, subject = d3.select(d3_window).on(mousemove, moved).on(mouseup, ended), location0 = location(d3.mouse(that)), dragRestore = d3_event_dragSuppress(); d3_selection_interrupt.call(that); zoomstarted(dispatch); function moved() { dragged = 1; translateTo(d3.mouse(that), location0); zoomed(dispatch); } function ended() { subject.on(mousemove, d3_window === that ? mousewheelreset : null).on(mouseup, null); dragRestore(dragged && d3.event.target === target); zoomended(dispatch); } } function touchstarted() { var that = this, dispatch = event.of(that, arguments), locations0 = {}, distance0 = 0, scale0, zoomName = ".zoom-" + d3.event.changedTouches[0].identifier, touchmove = "touchmove" + zoomName, touchend = "touchend" + zoomName, targets = [], subject = d3.select(that).on(mousedown, null).on(touchstart, started), dragRestore = d3_event_dragSuppress(); d3_selection_interrupt.call(that); started(); zoomstarted(dispatch); function relocate() { var touches = d3.touches(that); scale0 = view.k; touches.forEach(function(t) { if (t.identifier in locations0) locations0[t.identifier] = location(t); }); return touches; } function started() { var target = d3.event.target; d3.select(target).on(touchmove, moved).on(touchend, ended); targets.push(target); var changed = d3.event.changedTouches; for (var i = 0, n = changed.length; i < n; ++i) { locations0[changed[i].identifier] = null; } var touches = relocate(), now = Date.now(); if (touches.length === 1) { if (now - touchtime < 500) { var p = touches[0], l = locations0[p.identifier]; scaleTo(view.k * 2); translateTo(p, l); d3_eventPreventDefault(); zoomed(dispatch); } touchtime = now; } else if (touches.length > 1) { var p = touches[0], q = touches[1], dx = p[0] - q[0], dy = p[1] - q[1]; distance0 = dx * dx + dy * dy; } } function moved() { var touches = d3.touches(that), p0, l0, p1, l1; for (var i = 0, n = touches.length; i < n; ++i, l1 = null) { p1 = touches[i]; if (l1 = locations0[p1.identifier]) { if (l0) break; p0 = p1, l0 = l1; } } if (l1) { var distance1 = (distance1 = p1[0] - p0[0]) * distance1 + (distance1 = p1[1] - p0[1]) * distance1, scale1 = distance0 && Math.sqrt(distance1 / distance0); p0 = [ (p0[0] + p1[0]) / 2, (p0[1] + p1[1]) / 2 ]; l0 = [ (l0[0] + l1[0]) / 2, (l0[1] + l1[1]) / 2 ]; scaleTo(scale1 * scale0); } touchtime = null; translateTo(p0, l0); zoomed(dispatch); } function ended() { if (d3.event.touches.length) { var changed = d3.event.changedTouches; for (var i = 0, n = changed.length; i < n; ++i) { delete locations0[changed[i].identifier]; } for (var identifier in locations0) { return void relocate(); } } d3.selectAll(targets).on(zoomName, null); subject.on(mousedown, mousedowned).on(touchstart, touchstarted); dragRestore(); zoomended(dispatch); } } function mousewheeled() { var dispatch = event.of(this, arguments); if (mousewheelTimer) clearTimeout(mousewheelTimer); else d3_selection_interrupt.call(this), zoomstarted(dispatch); mousewheelTimer = setTimeout(function() { mousewheelTimer = null; zoomended(dispatch); }, 50); d3_eventPreventDefault(); var point = center || d3.mouse(this); if (!translate0) translate0 = location(point); scaleTo(Math.pow(2, d3_behavior_zoomDelta() * .002) * view.k); translateTo(point, translate0); zoomed(dispatch); } function mousewheelreset() { translate0 = null; } function dblclicked() { var dispatch = event.of(this, arguments), p = d3.mouse(this), l = location(p), k = Math.log(view.k) / Math.LN2; zoomstarted(dispatch); scaleTo(Math.pow(2, d3.event.shiftKey ? Math.ceil(k) - 1 : Math.floor(k) + 1)); translateTo(p, l); zoomed(dispatch); zoomended(dispatch); } return d3.rebind(zoom, event, "on"); }; var d3_behavior_zoomInfinity = [ 0, Infinity ]; var d3_behavior_zoomDelta, d3_behavior_zoomWheel = "onwheel" in d3_document ? (d3_behavior_zoomDelta = function() { return -d3.event.deltaY * (d3.event.deltaMode ? 120 : 1); }, "wheel") : "onmousewheel" in d3_document ? (d3_behavior_zoomDelta = function() { return d3.event.wheelDelta; }, "mousewheel") : (d3_behavior_zoomDelta = function() { return -d3.event.detail; }, "MozMousePixelScroll"); function d3_Color() {} d3_Color.prototype.toString = function() { return this.rgb() + ""; }; d3.hsl = function(h, s, l) { return arguments.length === 1 ? h instanceof d3_Hsl ? d3_hsl(h.h, h.s, h.l) : d3_rgb_parse("" + h, d3_rgb_hsl, d3_hsl) : d3_hsl(+h, +s, +l); }; function d3_hsl(h, s, l) { return new d3_Hsl(h, s, l); } function d3_Hsl(h, s, l) { this.h = h; this.s = s; this.l = l; } var d3_hslPrototype = d3_Hsl.prototype = new d3_Color(); d3_hslPrototype.brighter = function(k) { k = Math.pow(.7, arguments.length ? k : 1); return d3_hsl(this.h, this.s, this.l / k); }; d3_hslPrototype.darker = function(k) { k = Math.pow(.7, arguments.length ? k : 1); return d3_hsl(this.h, this.s, k * this.l); }; d3_hslPrototype.rgb = function() { return d3_hsl_rgb(this.h, this.s, this.l); }; function d3_hsl_rgb(h, s, l) { var m1, m2; h = isNaN(h) ? 0 : (h %= 360) < 0 ? h + 360 : h; s = isNaN(s) ? 0 : s < 0 ? 0 : s > 1 ? 1 : s; l = l < 0 ? 0 : l > 1 ? 1 : l; m2 = l <= .5 ? l * (1 + s) : l + s - l * s; m1 = 2 * l - m2; function v(h) { if (h > 360) h -= 360; else if (h < 0) h += 360; if (h < 60) return m1 + (m2 - m1) * h / 60; if (h < 180) return m2; if (h < 240) return m1 + (m2 - m1) * (240 - h) / 60; return m1; } function vv(h) { return Math.round(v(h) * 255); } return d3_rgb(vv(h + 120), vv(h), vv(h - 120)); } d3.hcl = function(h, c, l) { return arguments.length === 1 ? h instanceof d3_Hcl ? d3_hcl(h.h, h.c, h.l) : h instanceof d3_Lab ? d3_lab_hcl(h.l, h.a, h.b) : d3_lab_hcl((h = d3_rgb_lab((h = d3.rgb(h)).r, h.g, h.b)).l, h.a, h.b) : d3_hcl(+h, +c, +l); }; function d3_hcl(h, c, l) { return new d3_Hcl(h, c, l); } function d3_Hcl(h, c, l) { this.h = h; this.c = c; this.l = l; } var d3_hclPrototype = d3_Hcl.prototype = new d3_Color(); d3_hclPrototype.brighter = function(k) { return d3_hcl(this.h, this.c, Math.min(100, this.l + d3_lab_K * (arguments.length ? k : 1))); }; d3_hclPrototype.darker = function(k) { return d3_hcl(this.h, this.c, Math.max(0, this.l - d3_lab_K * (arguments.length ? k : 1))); }; d3_hclPrototype.rgb = function() { return d3_hcl_lab(this.h, this.c, this.l).rgb(); }; function d3_hcl_lab(h, c, l) { if (isNaN(h)) h = 0; if (isNaN(c)) c = 0; return d3_lab(l, Math.cos(h *= d3_radians) * c, Math.sin(h) * c); } d3.lab = function(l, a, b) { return arguments.length === 1 ? l instanceof d3_Lab ? d3_lab(l.l, l.a, l.b) : l instanceof d3_Hcl ? d3_hcl_lab(l.l, l.c, l.h) : d3_rgb_lab((l = d3.rgb(l)).r, l.g, l.b) : d3_lab(+l, +a, +b); }; function d3_lab(l, a, b) { return new d3_Lab(l, a, b); } function d3_Lab(l, a, b) { this.l = l; this.a = a; this.b = b; } var d3_lab_K = 18; var d3_lab_X = .95047, d3_lab_Y = 1, d3_lab_Z = 1.08883; var d3_labPrototype = d3_Lab.prototype = new d3_Color(); d3_labPrototype.brighter = function(k) { return d3_lab(Math.min(100, this.l + d3_lab_K * (arguments.length ? k : 1)), this.a, this.b); }; d3_labPrototype.darker = function(k) { return d3_lab(Math.max(0, this.l - d3_lab_K * (arguments.length ? k : 1)), this.a, this.b); }; d3_labPrototype.rgb = function() { return d3_lab_rgb(this.l, this.a, this.b); }; function d3_lab_rgb(l, a, b) { var y = (l + 16) / 116, x = y + a / 500, z = y - b / 200; x = d3_lab_xyz(x) * d3_lab_X; y = d3_lab_xyz(y) * d3_lab_Y; z = d3_lab_xyz(z) * d3_lab_Z; return d3_rgb(d3_xyz_rgb(3.2404542 * x - 1.5371385 * y - .4985314 * z), d3_xyz_rgb(-.969266 * x + 1.8760108 * y + .041556 * z), d3_xyz_rgb(.0556434 * x - .2040259 * y + 1.0572252 * z)); } function d3_lab_hcl(l, a, b) { return l > 0 ? d3_hcl(Math.atan2(b, a) * d3_degrees, Math.sqrt(a * a + b * b), l) : d3_hcl(NaN, NaN, l); } function d3_lab_xyz(x) { return x > .206893034 ? x * x * x : (x - 4 / 29) / 7.787037; } function d3_xyz_lab(x) { return x > .008856 ? Math.pow(x, 1 / 3) : 7.787037 * x + 4 / 29; } function d3_xyz_rgb(r) { return Math.round(255 * (r <= .00304 ? 12.92 * r : 1.055 * Math.pow(r, 1 / 2.4) - .055)); } d3.rgb = function(r, g, b) { return arguments.length === 1 ? r instanceof d3_Rgb ? d3_rgb(r.r, r.g, r.b) : d3_rgb_parse("" + r, d3_rgb, d3_hsl_rgb) : d3_rgb(~~r, ~~g, ~~b); }; function d3_rgbNumber(value) { return d3_rgb(value >> 16, value >> 8 & 255, value & 255); } function d3_rgbString(value) { return d3_rgbNumber(value) + ""; } function d3_rgb(r, g, b) { return new d3_Rgb(r, g, b); } function d3_Rgb(r, g, b) { this.r = r; this.g = g; this.b = b; } var d3_rgbPrototype = d3_Rgb.prototype = new d3_Color(); d3_rgbPrototype.brighter = function(k) { k = Math.pow(.7, arguments.length ? k : 1); var r = this.r, g = this.g, b = this.b, i = 30; if (!r && !g && !b) return d3_rgb(i, i, i); if (r && r < i) r = i; if (g && g < i) g = i; if (b && b < i) b = i; return d3_rgb(Math.min(255, ~~(r / k)), Math.min(255, ~~(g / k)), Math.min(255, ~~(b / k))); }; d3_rgbPrototype.darker = function(k) { k = Math.pow(.7, arguments.length ? k : 1); return d3_rgb(~~(k * this.r), ~~(k * this.g), ~~(k * this.b)); }; d3_rgbPrototype.hsl = function() { return d3_rgb_hsl(this.r, this.g, this.b); }; d3_rgbPrototype.toString = function() { return "#" + d3_rgb_hex(this.r) + d3_rgb_hex(this.g) + d3_rgb_hex(this.b); }; function d3_rgb_hex(v) { return v < 16 ? "0" + Math.max(0, v).toString(16) : Math.min(255, v).toString(16); } function d3_rgb_parse(format, rgb, hsl) { var r = 0, g = 0, b = 0, m1, m2, color; m1 = /([a-z]+)\((.*)\)/i.exec(format); if (m1) { m2 = m1[2].split(","); switch (m1[1]) { case "hsl": { return hsl(parseFloat(m2[0]), parseFloat(m2[1]) / 100, parseFloat(m2[2]) / 100); } case "rgb": { return rgb(d3_rgb_parseNumber(m2[0]), d3_rgb_parseNumber(m2[1]), d3_rgb_parseNumber(m2[2])); } } } if (color = d3_rgb_names.get(format)) return rgb(color.r, color.g, color.b); if (format != null && format.charAt(0) === "#" && !isNaN(color = parseInt(format.substring(1), 16))) { if (format.length === 4) { r = (color & 3840) >> 4; r = r >> 4 | r; g = color & 240; g = g >> 4 | g; b = color & 15; b = b << 4 | b; } else if (format.length === 7) { r = (color & 16711680) >> 16; g = (color & 65280) >> 8; b = color & 255; } } return rgb(r, g, b); } function d3_rgb_hsl(r, g, b) { var min = Math.min(r /= 255, g /= 255, b /= 255), max = Math.max(r, g, b), d = max - min, h, s, l = (max + min) / 2; if (d) { s = l < .5 ? d / (max + min) : d / (2 - max - min); if (r == max) h = (g - b) / d + (g < b ? 6 : 0); else if (g == max) h = (b - r) / d + 2; else h = (r - g) / d + 4; h *= 60; } else { h = NaN; s = l > 0 && l < 1 ? 0 : h; } return d3_hsl(h, s, l); } function d3_rgb_lab(r, g, b) { r = d3_rgb_xyz(r); g = d3_rgb_xyz(g); b = d3_rgb_xyz(b); var x = d3_xyz_lab((.4124564 * r + .3575761 * g + .1804375 * b) / d3_lab_X), y = d3_xyz_lab((.2126729 * r + .7151522 * g + .072175 * b) / d3_lab_Y), z = d3_xyz_lab((.0193339 * r + .119192 * g + .9503041 * b) / d3_lab_Z); return d3_lab(116 * y - 16, 500 * (x - y), 200 * (y - z)); } function d3_rgb_xyz(r) { return (r /= 255) <= .04045 ? r / 12.92 : Math.pow((r + .055) / 1.055, 2.4); } function d3_rgb_parseNumber(c) { var f = parseFloat(c); return c.charAt(c.length - 1) === "%" ? Math.round(f * 2.55) : f; } var d3_rgb_names = d3.map({ aliceblue: 15792383, antiquewhite: 16444375, aqua: 65535, aquamarine: 8388564, azure: 15794175, beige: 16119260, bisque: 16770244, black: 0, blanchedalmond: 16772045, blue: 255, blueviolet: 9055202, brown: 10824234, burlywood: 14596231, cadetblue: 6266528, chartreuse: 8388352, chocolate: 13789470, coral: 16744272, cornflowerblue: 6591981, cornsilk: 16775388, crimson: 14423100, cyan: 65535, darkblue: 139, darkcyan: 35723, darkgoldenrod: 12092939, darkgray: 11119017, darkgreen: 25600, darkgrey: 11119017, darkkhaki: 12433259, darkmagenta: 9109643, darkolivegreen: 5597999, darkorange: 16747520, darkorchid: 10040012, darkred: 9109504, darksalmon: 15308410, darkseagreen: 9419919, darkslateblue: 4734347, darkslategray: 3100495, darkslategrey: 3100495, darkturquoise: 52945, darkviolet: 9699539, deeppink: 16716947, deepskyblue: 49151, dimgray: 6908265, dimgrey: 6908265, dodgerblue: 2003199, firebrick: 11674146, floralwhite: 16775920, forestgreen: 2263842, fuchsia: 16711935, gainsboro: 14474460, ghostwhite: 16316671, gold: 16766720, goldenrod: 14329120, gray: 8421504, green: 32768, greenyellow: 11403055, grey: 8421504, honeydew: 15794160, hotpink: 16738740, indianred: 13458524, indigo: 4915330, ivory: 16777200, khaki: 15787660, lavender: 15132410, lavenderblush: 16773365, lawngreen: 8190976, lemonchiffon: 16775885, lightblue: 11393254, lightcoral: 15761536, lightcyan: 14745599, lightgoldenrodyellow: 16448210, lightgray: 13882323, lightgreen: 9498256, lightgrey: 13882323, lightpink: 16758465, lightsalmon: 16752762, lightseagreen: 2142890, lightskyblue: 8900346, lightslategray: 7833753, lightslategrey: 7833753, lightsteelblue: 11584734, lightyellow: 16777184, lime: 65280, limegreen: 3329330, linen: 16445670, magenta: 16711935, maroon: 8388608, mediumaquamarine: 6737322, mediumblue: 205, mediumorchid: 12211667, mediumpurple: 9662683, mediumseagreen: 3978097, mediumslateblue: 8087790, mediumspringgreen: 64154, mediumturquoise: 4772300, mediumvioletred: 13047173, midnightblue: 1644912, mintcream: 16121850, mistyrose: 16770273, moccasin: 16770229, navajowhite: 16768685, navy: 128, oldlace: 16643558, olive: 8421376, olivedrab: 7048739, orange: 16753920, orangered: 16729344, orchid: 14315734, palegoldenrod: 15657130, palegreen: 10025880, paleturquoise: 11529966, palevioletred: 14381203, papayawhip: 16773077, peachpuff: 16767673, peru: 13468991, pink: 16761035, plum: 14524637, powderblue: 11591910, purple: 8388736, red: 16711680, rosybrown: 12357519, royalblue: 4286945, saddlebrown: 9127187, salmon: 16416882, sandybrown: 16032864, seagreen: 3050327, seashell: 16774638, sienna: 10506797, silver: 12632256, skyblue: 8900331, slateblue: 6970061, slategray: 7372944, slategrey: 7372944, snow: 16775930, springgreen: 65407, steelblue: 4620980, tan: 13808780, teal: 32896, thistle: 14204888, tomato: 16737095, turquoise: 4251856, violet: 15631086, wheat: 16113331, white: 16777215, whitesmoke: 16119285, yellow: 16776960, yellowgreen: 10145074 }); d3_rgb_names.forEach(function(key, value) { d3_rgb_names.set(key, d3_rgbNumber(value)); }); function d3_functor(v) { return typeof v === "function" ? v : function() { return v; }; } d3.functor = d3_functor; function d3_identity(d) { return d; } d3.xhr = d3_xhrType(d3_identity); function d3_xhrType(response) { return function(url, mimeType, callback) { if (arguments.length === 2 && typeof mimeType === "function") callback = mimeType, mimeType = null; return d3_xhr(url, mimeType, response, callback); }; } function d3_xhr(url, mimeType, response, callback) { var xhr = {}, dispatch = d3.dispatch("beforesend", "progress", "load", "error"), headers = {}, request = new XMLHttpRequest(), responseType = null; if (d3_window.XDomainRequest && !("withCredentials" in request) && /^(http(s)?:)?\/\//.test(url)) request = new XDomainRequest(); "onload" in request ? request.onload = request.onerror = respond : request.onreadystatechange = function() { request.readyState > 3 && respond(); }; function respond() { var status = request.status, result; if (!status && request.responseText || status >= 200 && status < 300 || status === 304) { try { result = response.call(xhr, request); } catch (e) { dispatch.error.call(xhr, e); return; } dispatch.load.call(xhr, result); } else { dispatch.error.call(xhr, request); } } request.onprogress = function(event) { var o = d3.event; d3.event = event; try { dispatch.progress.call(xhr, request); } finally { d3.event = o; } }; xhr.header = function(name, value) { name = (name + "").toLowerCase(); if (arguments.length < 2) return headers[name]; if (value == null) delete headers[name]; else headers[name] = value + ""; return xhr; }; xhr.mimeType = function(value) { if (!arguments.length) return mimeType; mimeType = value == null ? null : value + ""; return xhr; }; xhr.responseType = function(value) { if (!arguments.length) return responseType; responseType = value; return xhr; }; xhr.response = function(value) { response = value; return xhr; }; [ "get", "post" ].forEach(function(method) { xhr[method] = function() { return xhr.send.apply(xhr, [ method ].concat(d3_array(arguments))); }; }); xhr.send = function(method, data, callback) { if (arguments.length === 2 && typeof data === "function") callback = data, data = null; request.open(method, url, true); if (mimeType != null && !("accept" in headers)) headers["accept"] = mimeType + ",*/*"; if (request.setRequestHeader) for (var name in headers) request.setRequestHeader(name, headers[name]); if (mimeType != null && request.overrideMimeType) request.overrideMimeType(mimeType); if (responseType != null) request.responseType = responseType; if (callback != null) xhr.on("error", callback).on("load", function(request) { callback(null, request); }); dispatch.beforesend.call(xhr, request); request.send(data == null ? null : data); return xhr; }; xhr.abort = function() { request.abort(); return xhr; }; d3.rebind(xhr, dispatch, "on"); return callback == null ? xhr : xhr.get(d3_xhr_fixCallback(callback)); } function d3_xhr_fixCallback(callback) { return callback.length === 1 ? function(error, request) { callback(error == null ? request : null); } : callback; } d3.dsv = function(delimiter, mimeType) { var reFormat = new RegExp('["' + delimiter + "\n]"), delimiterCode = delimiter.charCodeAt(0); function dsv(url, row, callback) { if (arguments.length < 3) callback = row, row = null; var xhr = d3_xhr(url, mimeType, row == null ? response : typedResponse(row), callback); xhr.row = function(_) { return arguments.length ? xhr.response((row = _) == null ? response : typedResponse(_)) : row; }; return xhr; } function response(request) { return dsv.parse(request.responseText); } function typedResponse(f) { return function(request) { return dsv.parse(request.responseText, f); }; } dsv.parse = function(text, f) { var o; return dsv.parseRows(text, function(row, i) { if (o) return o(row, i - 1); var a = new Function("d", "return {" + row.map(function(name, i) { return JSON.stringify(name) + ": d[" + i + "]"; }).join(",") + "}"); o = f ? function(row, i) { return f(a(row), i); } : a; }); }; dsv.parseRows = function(text, f) { var EOL = {}, EOF = {}, rows = [], N = text.length, I = 0, n = 0, t, eol; function token() { if (I >= N) return EOF; if (eol) return eol = false, EOL; var j = I; if (text.charCodeAt(j) === 34) { var i = j; while (i++ < N) { if (text.charCodeAt(i) === 34) { if (text.charCodeAt(i + 1) !== 34) break; ++i; } } I = i + 2; var c = text.charCodeAt(i + 1); if (c === 13) { eol = true; if (text.charCodeAt(i + 2) === 10) ++I; } else if (c === 10) { eol = true; } return text.substring(j + 1, i).replace(/""/g, '"'); } while (I < N) { var c = text.charCodeAt(I++), k = 1; if (c === 10) eol = true; else if (c === 13) { eol = true; if (text.charCodeAt(I) === 10) ++I, ++k; } else if (c !== delimiterCode) continue; return text.substring(j, I - k); } return text.substring(j); } while ((t = token()) !== EOF) { var a = []; while (t !== EOL && t !== EOF) { a.push(t); t = token(); } if (f && !(a = f(a, n++))) continue; rows.push(a); } return rows; }; dsv.format = function(rows) { if (Array.isArray(rows[0])) return dsv.formatRows(rows); var fieldSet = new d3_Set(), fields = []; rows.forEach(function(row) { for (var field in row) { if (!fieldSet.has(field)) { fields.push(fieldSet.add(field)); } } }); return [ fields.map(formatValue).join(delimiter) ].concat(rows.map(function(row) { return fields.map(function(field) { return formatValue(row[field]); }).join(delimiter); })).join("\n"); }; dsv.formatRows = function(rows) { return rows.map(formatRow).join("\n"); }; function formatRow(row) { return row.map(formatValue).join(delimiter); } function formatValue(text) { return reFormat.test(text) ? '"' + text.replace(/\"/g, '""') + '"' : text; } return dsv; }; d3.csv = d3.dsv(",", "text/csv"); d3.tsv = d3.dsv(" ", "text/tab-separated-values"); d3.touch = function(container, touches, identifier) { if (arguments.length < 3) identifier = touches, touches = d3_eventSource().changedTouches; if (touches) for (var i = 0, n = touches.length, touch; i < n; ++i) { if ((touch = touches[i]).identifier === identifier) { return d3_mousePoint(container, touch); } } }; var d3_timer_queueHead, d3_timer_queueTail, d3_timer_interval, d3_timer_timeout, d3_timer_active, d3_timer_frame = d3_window[d3_vendorSymbol(d3_window, "requestAnimationFrame")] || function(callback) { setTimeout(callback, 17); }; d3.timer = function(callback, delay, then) { var n = arguments.length; if (n < 2) delay = 0; if (n < 3) then = Date.now(); var time = then + delay, timer = { c: callback, t: time, f: false, n: null }; if (d3_timer_queueTail) d3_timer_queueTail.n = timer; else d3_timer_queueHead = timer; d3_timer_queueTail = timer; if (!d3_timer_interval) { d3_timer_timeout = clearTimeout(d3_timer_timeout); d3_timer_interval = 1; d3_timer_frame(d3_timer_step); } }; function d3_timer_step() { var now = d3_timer_mark(), delay = d3_timer_sweep() - now; if (delay > 24) { if (isFinite(delay)) { clearTimeout(d3_timer_timeout); d3_timer_timeout = setTimeout(d3_timer_step, delay); } d3_timer_interval = 0; } else { d3_timer_interval = 1; d3_timer_frame(d3_timer_step); } } d3.timer.flush = function() { d3_timer_mark(); d3_timer_sweep(); }; function d3_timer_mark() { var now = Date.now(); d3_timer_active = d3_timer_queueHead; while (d3_timer_active) { if (now >= d3_timer_active.t) d3_timer_active.f = d3_timer_active.c(now - d3_timer_active.t); d3_timer_active = d3_timer_active.n; } return now; } function d3_timer_sweep() { var t0, t1 = d3_timer_queueHead, time = Infinity; while (t1) { if (t1.f) { t1 = t0 ? t0.n = t1.n : d3_timer_queueHead = t1.n; } else { if (t1.t < time) time = t1.t; t1 = (t0 = t1).n; } } d3_timer_queueTail = t0; return time; } function d3_format_precision(x, p) { return p - (x ? Math.ceil(Math.log(x) / Math.LN10) : 1); } d3.round = function(x, n) { return n ? Math.round(x * (n = Math.pow(10, n))) / n : Math.round(x); }; var d3_formatPrefixes = [ "y", "z", "a", "f", "p", "n", "µ", "m", "", "k", "M", "G", "T", "P", "E", "Z", "Y" ].map(d3_formatPrefix); d3.formatPrefix = function(value, precision) { var i = 0; if (value) { if (value < 0) value *= -1; if (precision) value = d3.round(value, d3_format_precision(value, precision)); i = 1 + Math.floor(1e-12 + Math.log(value) / Math.LN10); i = Math.max(-24, Math.min(24, Math.floor((i - 1) / 3) * 3)); } return d3_formatPrefixes[8 + i / 3]; }; function d3_formatPrefix(d, i) { var k = Math.pow(10, abs(8 - i) * 3); return { scale: i > 8 ? function(d) { return d / k; } : function(d) { return d * k; }, symbol: d }; } function d3_locale_numberFormat(locale) { var locale_decimal = locale.decimal, locale_thousands = locale.thousands, locale_grouping = locale.grouping, locale_currency = locale.currency, formatGroup = locale_grouping ? function(value) { var i = value.length, t = [], j = 0, g = locale_grouping[0]; while (i > 0 && g > 0) { t.push(value.substring(i -= g, i + g)); g = locale_grouping[j = (j + 1) % locale_grouping.length]; } return t.reverse().join(locale_thousands); } : d3_identity; return function(specifier) { var match = d3_format_re.exec(specifier), fill = match[1] || " ", align = match[2] || ">", sign = match[3] || "", symbol = match[4] || "", zfill = match[5], width = +match[6], comma = match[7], precision = match[8], type = match[9], scale = 1, prefix = "", suffix = "", integer = false; if (precision) precision = +precision.substring(1); if (zfill || fill === "0" && align === "=") { zfill = fill = "0"; align = "="; if (comma) width -= Math.floor((width - 1) / 4); } switch (type) { case "n": comma = true; type = "g"; break; case "%": scale = 100; suffix = "%"; type = "f"; break; case "p": scale = 100; suffix = "%"; type = "r"; break; case "b": case "o": case "x": case "X": if (symbol === "#") prefix = "0" + type.toLowerCase(); case "c": case "d": integer = true; precision = 0; break; case "s": scale = -1; type = "r"; break; } if (symbol === "$") prefix = locale_currency[0], suffix = locale_currency[1]; if (type == "r" && !precision) type = "g"; if (precision != null) { if (type == "g") precision = Math.max(1, Math.min(21, precision)); else if (type == "e" || type == "f") precision = Math.max(0, Math.min(20, precision)); } type = d3_format_types.get(type) || d3_format_typeDefault; var zcomma = zfill && comma; return function(value) { var fullSuffix = suffix; if (integer && value % 1) return ""; var negative = value < 0 || value === 0 && 1 / value < 0 ? (value = -value, "-") : sign; if (scale < 0) { var unit = d3.formatPrefix(value, precision); value = unit.scale(value); fullSuffix = unit.symbol + suffix; } else { value *= scale; } value = type(value, precision); var i = value.lastIndexOf("."), before = i < 0 ? value : value.substring(0, i), after = i < 0 ? "" : locale_decimal + value.substring(i + 1); if (!zfill && comma) before = formatGroup(before); var length = prefix.length + before.length + after.length + (zcomma ? 0 : negative.length), padding = length < width ? new Array(length = width - length + 1).join(fill) : ""; if (zcomma) before = formatGroup(padding + before); negative += prefix; value = before + after; return (align === "<" ? negative + value + padding : align === ">" ? padding + negative + value : align === "^" ? padding.substring(0, length >>= 1) + negative + value + padding.substring(length) : negative + (zcomma ? value : padding + value)) + fullSuffix; }; }; } var d3_format_re = /(?:([^{])?([<>=^]))?([+\- ])?([$#])?(0)?(\d+)?(,)?(\.-?\d+)?([a-z%])?/i; var d3_format_types = d3.map({ b: function(x) { return x.toString(2); }, c: function(x) { return String.fromCharCode(x); }, o: function(x) { return x.toString(8); }, x: function(x) { return x.toString(16); }, X: function(x) { return x.toString(16).toUpperCase(); }, g: function(x, p) { return x.toPrecision(p); }, e: function(x, p) { return x.toExponential(p); }, f: function(x, p) { return x.toFixed(p); }, r: function(x, p) { return (x = d3.round(x, d3_format_precision(x, p))).toFixed(Math.max(0, Math.min(20, d3_format_precision(x * (1 + 1e-15), p)))); } }); function d3_format_typeDefault(x) { return x + ""; } var d3_time = d3.time = {}, d3_date = Date; function d3_date_utc() { this._ = new Date(arguments.length > 1 ? Date.UTC.apply(this, arguments) : arguments[0]); } d3_date_utc.prototype = { getDate: function() { return this._.getUTCDate(); }, getDay: function() { return this._.getUTCDay(); }, getFullYear: function() { return this._.getUTCFullYear(); }, getHours: function() { return this._.getUTCHours(); }, getMilliseconds: function() { return this._.getUTCMilliseconds(); }, getMinutes: function() { return this._.getUTCMinutes(); }, getMonth: function() { return this._.getUTCMonth(); }, getSeconds: function() { return this._.getUTCSeconds(); }, getTime: function() { return this._.getTime(); }, getTimezoneOffset: function() { return 0; }, valueOf: function() { return this._.valueOf(); }, setDate: function() { d3_time_prototype.setUTCDate.apply(this._, arguments); }, setDay: function() { d3_time_prototype.setUTCDay.apply(this._, arguments); }, setFullYear: function() { d3_time_prototype.setUTCFullYear.apply(this._, arguments); }, setHours: function() { d3_time_prototype.setUTCHours.apply(this._, arguments); }, setMilliseconds: function() { d3_time_prototype.setUTCMilliseconds.apply(this._, arguments); }, setMinutes: function() { d3_time_prototype.setUTCMinutes.apply(this._, arguments); }, setMonth: function() { d3_time_prototype.setUTCMonth.apply(this._, arguments); }, setSeconds: function() { d3_time_prototype.setUTCSeconds.apply(this._, arguments); }, setTime: function() { d3_time_prototype.setTime.apply(this._, arguments); } }; var d3_time_prototype = Date.prototype; function d3_time_interval(local, step, number) { function round(date) { var d0 = local(date), d1 = offset(d0, 1); return date - d0 < d1 - date ? d0 : d1; } function ceil(date) { step(date = local(new d3_date(date - 1)), 1); return date; } function offset(date, k) { step(date = new d3_date(+date), k); return date; } function range(t0, t1, dt) { var time = ceil(t0), times = []; if (dt > 1) { while (time < t1) { if (!(number(time) % dt)) times.push(new Date(+time)); step(time, 1); } } else { while (time < t1) times.push(new Date(+time)), step(time, 1); } return times; } function range_utc(t0, t1, dt) { try { d3_date = d3_date_utc; var utc = new d3_date_utc(); utc._ = t0; return range(utc, t1, dt); } finally { d3_date = Date; } } local.floor = local; local.round = round; local.ceil = ceil; local.offset = offset; local.range = range; var utc = local.utc = d3_time_interval_utc(local); utc.floor = utc; utc.round = d3_time_interval_utc(round); utc.ceil = d3_time_interval_utc(ceil); utc.offset = d3_time_interval_utc(offset); utc.range = range_utc; return local; } function d3_time_interval_utc(method) { return function(date, k) { try { d3_date = d3_date_utc; var utc = new d3_date_utc(); utc._ = date; return method(utc, k)._; } finally { d3_date = Date; } }; } d3_time.year = d3_time_interval(function(date) { date = d3_time.day(date); date.setMonth(0, 1); return date; }, function(date, offset) { date.setFullYear(date.getFullYear() + offset); }, function(date) { return date.getFullYear(); }); d3_time.years = d3_time.year.range; d3_time.years.utc = d3_time.year.utc.range; d3_time.day = d3_time_interval(function(date) { var day = new d3_date(2e3, 0); day.setFullYear(date.getFullYear(), date.getMonth(), date.getDate()); return day; }, function(date, offset) { date.setDate(date.getDate() + offset); }, function(date) { return date.getDate() - 1; }); d3_time.days = d3_time.day.range; d3_time.days.utc = d3_time.day.utc.range; d3_time.dayOfYear = function(date) { var year = d3_time.year(date); return Math.floor((date - year - (date.getTimezoneOffset() - year.getTimezoneOffset()) * 6e4) / 864e5); }; [ "sunday", "monday", "tuesday", "wednesday", "thursday", "friday", "saturday" ].forEach(function(day, i) { i = 7 - i; var interval = d3_time[day] = d3_time_interval(function(date) { (date = d3_time.day(date)).setDate(date.getDate() - (date.getDay() + i) % 7); return date; }, function(date, offset) { date.setDate(date.getDate() + Math.floor(offset) * 7); }, function(date) { var day = d3_time.year(date).getDay(); return Math.floor((d3_time.dayOfYear(date) + (day + i) % 7) / 7) - (day !== i); }); d3_time[day + "s"] = interval.range; d3_time[day + "s"].utc = interval.utc.range; d3_time[day + "OfYear"] = function(date) { var day = d3_time.year(date).getDay(); return Math.floor((d3_time.dayOfYear(date) + (day + i) % 7) / 7); }; }); d3_time.week = d3_time.sunday; d3_time.weeks = d3_time.sunday.range; d3_time.weeks.utc = d3_time.sunday.utc.range; d3_time.weekOfYear = d3_time.sundayOfYear; function d3_locale_timeFormat(locale) { var locale_dateTime = locale.dateTime, locale_date = locale.date, locale_time = locale.time, locale_periods = locale.periods, locale_days = locale.days, locale_shortDays = locale.shortDays, locale_months = locale.months, locale_shortMonths = locale.shortMonths; function d3_time_format(template) { var n = template.length; function format(date) { var string = [], i = -1, j = 0, c, p, f; while (++i < n) { if (template.charCodeAt(i) === 37) { string.push(template.substring(j, i)); if ((p = d3_time_formatPads[c = template.charAt(++i)]) != null) c = template.charAt(++i); if (f = d3_time_formats[c]) c = f(date, p == null ? c === "e" ? " " : "0" : p); string.push(c); j = i + 1; } } string.push(template.substring(j, i)); return string.join(""); } format.parse = function(string) { var d = { y: 1900, m: 0, d: 1, H: 0, M: 0, S: 0, L: 0, Z: null }, i = d3_time_parse(d, template, string, 0); if (i != string.length) return null; if ("p" in d) d.H = d.H % 12 + d.p * 12; var localZ = d.Z != null && d3_date !== d3_date_utc, date = new (localZ ? d3_date_utc : d3_date)(); if ("j" in d) date.setFullYear(d.y, 0, d.j); else if ("w" in d && ("W" in d || "U" in d)) { date.setFullYear(d.y, 0, 1); date.setFullYear(d.y, 0, "W" in d ? (d.w + 6) % 7 + d.W * 7 - (date.getDay() + 5) % 7 : d.w + d.U * 7 - (date.getDay() + 6) % 7); } else date.setFullYear(d.y, d.m, d.d); date.setHours(d.H + Math.floor(d.Z / 100), d.M + d.Z % 100, d.S, d.L); return localZ ? date._ : date; }; format.toString = function() { return template; }; return format; } function d3_time_parse(date, template, string, j) { var c, p, t, i = 0, n = template.length, m = string.length; while (i < n) { if (j >= m) return -1; c = template.charCodeAt(i++); if (c === 37) { t = template.charAt(i++); p = d3_time_parsers[t in d3_time_formatPads ? template.charAt(i++) : t]; if (!p || (j = p(date, string, j)) < 0) return -1; } else if (c != string.charCodeAt(j++)) { return -1; } } return j; } d3_time_format.utc = function(template) { var local = d3_time_format(template); function format(date) { try { d3_date = d3_date_utc; var utc = new d3_date(); utc._ = date; return local(utc); } finally { d3_date = Date; } } format.parse = function(string) { try { d3_date = d3_date_utc; var date = local.parse(string); return date && date._; } finally { d3_date = Date; } }; format.toString = local.toString; return format; }; d3_time_format.multi = d3_time_format.utc.multi = d3_time_formatMulti; var d3_time_periodLookup = d3.map(), d3_time_dayRe = d3_time_formatRe(locale_days), d3_time_dayLookup = d3_time_formatLookup(locale_days), d3_time_dayAbbrevRe = d3_time_formatRe(locale_shortDays), d3_time_dayAbbrevLookup = d3_time_formatLookup(locale_shortDays), d3_time_monthRe = d3_time_formatRe(locale_months), d3_time_monthLookup = d3_time_formatLookup(locale_months), d3_time_monthAbbrevRe = d3_time_formatRe(locale_shortMonths), d3_time_monthAbbrevLookup = d3_time_formatLookup(locale_shortMonths); locale_periods.forEach(function(p, i) { d3_time_periodLookup.set(p.toLowerCase(), i); }); var d3_time_formats = { a: function(d) { return locale_shortDays[d.getDay()]; }, A: function(d) { return locale_days[d.getDay()]; }, b: function(d) { return locale_shortMonths[d.getMonth()]; }, B: function(d) { return locale_months[d.getMonth()]; }, c: d3_time_format(locale_dateTime), d: function(d, p) { return d3_time_formatPad(d.getDate(), p, 2); }, e: function(d, p) { return d3_time_formatPad(d.getDate(), p, 2); }, H: function(d, p) { return d3_time_formatPad(d.getHours(), p, 2); }, I: function(d, p) { return d3_time_formatPad(d.getHours() % 12 || 12, p, 2); }, j: function(d, p) { return d3_time_formatPad(1 + d3_time.dayOfYear(d), p, 3); }, L: function(d, p) { return d3_time_formatPad(d.getMilliseconds(), p, 3); }, m: function(d, p) { return d3_time_formatPad(d.getMonth() + 1, p, 2); }, M: function(d, p) { return d3_time_formatPad(d.getMinutes(), p, 2); }, p: function(d) { return locale_periods[+(d.getHours() >= 12)]; }, S: function(d, p) { return d3_time_formatPad(d.getSeconds(), p, 2); }, U: function(d, p) { return d3_time_formatPad(d3_time.sundayOfYear(d), p, 2); }, w: function(d) { return d.getDay(); }, W: function(d, p) { return d3_time_formatPad(d3_time.mondayOfYear(d), p, 2); }, x: d3_time_format(locale_date), X: d3_time_format(locale_time), y: function(d, p) { return d3_time_formatPad(d.getFullYear() % 100, p, 2); }, Y: function(d, p) { return d3_time_formatPad(d.getFullYear() % 1e4, p, 4); }, Z: d3_time_zone, "%": function() { return "%"; } }; var d3_time_parsers = { a: d3_time_parseWeekdayAbbrev, A: d3_time_parseWeekday, b: d3_time_parseMonthAbbrev, B: d3_time_parseMonth, c: d3_time_parseLocaleFull, d: d3_time_parseDay, e: d3_time_parseDay, H: d3_time_parseHour24, I: d3_time_parseHour24, j: d3_time_parseDayOfYear, L: d3_time_parseMilliseconds, m: d3_time_parseMonthNumber, M: d3_time_parseMinutes, p: d3_time_parseAmPm, S: d3_time_parseSeconds, U: d3_time_parseWeekNumberSunday, w: d3_time_parseWeekdayNumber, W: d3_time_parseWeekNumberMonday, x: d3_time_parseLocaleDate, X: d3_time_parseLocaleTime, y: d3_time_parseYear, Y: d3_time_parseFullYear, Z: d3_time_parseZone, "%": d3_time_parseLiteralPercent }; function d3_time_parseWeekdayAbbrev(date, string, i) { d3_time_dayAbbrevRe.lastIndex = 0; var n = d3_time_dayAbbrevRe.exec(string.substring(i)); return n ? (date.w = d3_time_dayAbbrevLookup.get(n[0].toLowerCase()), i + n[0].length) : -1; } function d3_time_parseWeekday(date, string, i) { d3_time_dayRe.lastIndex = 0; var n = d3_time_dayRe.exec(string.substring(i)); return n ? (date.w = d3_time_dayLookup.get(n[0].toLowerCase()), i + n[0].length) : -1; } function d3_time_parseMonthAbbrev(date, string, i) { d3_time_monthAbbrevRe.lastIndex = 0; var n = d3_time_monthAbbrevRe.exec(string.substring(i)); return n ? (date.m = d3_time_monthAbbrevLookup.get(n[0].toLowerCase()), i + n[0].length) : -1; } function d3_time_parseMonth(date, string, i) { d3_time_monthRe.lastIndex = 0; var n = d3_time_monthRe.exec(string.substring(i)); return n ? (date.m = d3_time_monthLookup.get(n[0].toLowerCase()), i + n[0].length) : -1; } function d3_time_parseLocaleFull(date, string, i) { return d3_time_parse(date, d3_time_formats.c.toString(), string, i); } function d3_time_parseLocaleDate(date, string, i) { return d3_time_parse(date, d3_time_formats.x.toString(), string, i); } function d3_time_parseLocaleTime(date, string, i) { return d3_time_parse(date, d3_time_formats.X.toString(), string, i); } function d3_time_parseAmPm(date, string, i) { var n = d3_time_periodLookup.get(string.substring(i, i += 2).toLowerCase()); return n == null ? -1 : (date.p = n, i); } return d3_time_format; } var d3_time_formatPads = { "-": "", _: " ", "0": "0" }, d3_time_numberRe = /^\s*\d+/, d3_time_percentRe = /^%/; function d3_time_formatPad(value, fill, width) { var sign = value < 0 ? "-" : "", string = (sign ? -value : value) + "", length = string.length; return sign + (length < width ? new Array(width - length + 1).join(fill) + string : string); } function d3_time_formatRe(names) { return new RegExp("^(?:" + names.map(d3.requote).join("|") + ")", "i"); } function d3_time_formatLookup(names) { var map = new d3_Map(), i = -1, n = names.length; while (++i < n) map.set(names[i].toLowerCase(), i); return map; } function d3_time_parseWeekdayNumber(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 1)); return n ? (date.w = +n[0], i + n[0].length) : -1; } function d3_time_parseWeekNumberSunday(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i)); return n ? (date.U = +n[0], i + n[0].length) : -1; } function d3_time_parseWeekNumberMonday(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i)); return n ? (date.W = +n[0], i + n[0].length) : -1; } function d3_time_parseFullYear(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 4)); return n ? (date.y = +n[0], i + n[0].length) : -1; } function d3_time_parseYear(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 2)); return n ? (date.y = d3_time_expandYear(+n[0]), i + n[0].length) : -1; } function d3_time_parseZone(date, string, i) { return /^[+-]\d{4}$/.test(string = string.substring(i, i + 5)) ? (date.Z = -string, i + 5) : -1; } function d3_time_expandYear(d) { return d + (d > 68 ? 1900 : 2e3); } function d3_time_parseMonthNumber(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 2)); return n ? (date.m = n[0] - 1, i + n[0].length) : -1; } function d3_time_parseDay(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 2)); return n ? (date.d = +n[0], i + n[0].length) : -1; } function d3_time_parseDayOfYear(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 3)); return n ? (date.j = +n[0], i + n[0].length) : -1; } function d3_time_parseHour24(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 2)); return n ? (date.H = +n[0], i + n[0].length) : -1; } function d3_time_parseMinutes(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 2)); return n ? (date.M = +n[0], i + n[0].length) : -1; } function d3_time_parseSeconds(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 2)); return n ? (date.S = +n[0], i + n[0].length) : -1; } function d3_time_parseMilliseconds(date, string, i) { d3_time_numberRe.lastIndex = 0; var n = d3_time_numberRe.exec(string.substring(i, i + 3)); return n ? (date.L = +n[0], i + n[0].length) : -1; } function d3_time_zone(d) { var z = d.getTimezoneOffset(), zs = z > 0 ? "-" : "+", zh = ~~(abs(z) / 60), zm = abs(z) % 60; return zs + d3_time_formatPad(zh, "0", 2) + d3_time_formatPad(zm, "0", 2); } function d3_time_parseLiteralPercent(date, string, i) { d3_time_percentRe.lastIndex = 0; var n = d3_time_percentRe.exec(string.substring(i, i + 1)); return n ? i + n[0].length : -1; } function d3_time_formatMulti(formats) { var n = formats.length, i = -1; while (++i < n) formats[i][0] = this(formats[i][0]); return function(date) { var i = 0, f = formats[i]; while (!f[1](date)) f = formats[++i]; return f[0](date); }; } d3.locale = function(locale) { return { numberFormat: d3_locale_numberFormat(locale), timeFormat: d3_locale_timeFormat(locale) }; }; var d3_locale_enUS = d3.locale({ decimal: ".", thousands: ",", grouping: [ 3 ], currency: [ "$", "" ], dateTime: "%a %b %e %X %Y", date: "%m/%d/%Y", time: "%H:%M:%S", periods: [ "AM", "PM" ], days: [ "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" ], shortDays: [ "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" ], months: [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ], shortMonths: [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" ] }); d3.format = d3_locale_enUS.numberFormat; d3.geo = {}; function d3_adder() {} d3_adder.prototype = { s: 0, t: 0, add: function(y) { d3_adderSum(y, this.t, d3_adderTemp); d3_adderSum(d3_adderTemp.s, this.s, this); if (this.s) this.t += d3_adderTemp.t; else this.s = d3_adderTemp.t; }, reset: function() { this.s = this.t = 0; }, valueOf: function() { return this.s; } }; var d3_adderTemp = new d3_adder(); function d3_adderSum(a, b, o) { var x = o.s = a + b, bv = x - a, av = x - bv; o.t = a - av + (b - bv); } d3.geo.stream = function(object, listener) { if (object && d3_geo_streamObjectType.hasOwnProperty(object.type)) { d3_geo_streamObjectType[object.type](object, listener); } else { d3_geo_streamGeometry(object, listener); } }; function d3_geo_streamGeometry(geometry, listener) { if (geometry && d3_geo_streamGeometryType.hasOwnProperty(geometry.type)) { d3_geo_streamGeometryType[geometry.type](geometry, listener); } } var d3_geo_streamObjectType = { Feature: function(feature, listener) { d3_geo_streamGeometry(feature.geometry, listener); }, FeatureCollection: function(object, listener) { var features = object.features, i = -1, n = features.length; while (++i < n) d3_geo_streamGeometry(features[i].geometry, listener); } }; var d3_geo_streamGeometryType = { Sphere: function(object, listener) { listener.sphere(); }, Point: function(object, listener) { object = object.coordinates; listener.point(object[0], object[1], object[2]); }, MultiPoint: function(object, listener) { var coordinates = object.coordinates, i = -1, n = coordinates.length; while (++i < n) object = coordinates[i], listener.point(object[0], object[1], object[2]); }, LineString: function(object, listener) { d3_geo_streamLine(object.coordinates, listener, 0); }, MultiLineString: function(object, listener) { var coordinates = object.coordinates, i = -1, n = coordinates.length; while (++i < n) d3_geo_streamLine(coordinates[i], listener, 0); }, Polygon: function(object, listener) { d3_geo_streamPolygon(object.coordinates, listener); }, MultiPolygon: function(object, listener) { var coordinates = object.coordinates, i = -1, n = coordinates.length; while (++i < n) d3_geo_streamPolygon(coordinates[i], listener); }, GeometryCollection: function(object, listener) { var geometries = object.geometries, i = -1, n = geometries.length; while (++i < n) d3_geo_streamGeometry(geometries[i], listener); } }; function d3_geo_streamLine(coordinates, listener, closed) { var i = -1, n = coordinates.length - closed, coordinate; listener.lineStart(); while (++i < n) coordinate = coordinates[i], listener.point(coordinate[0], coordinate[1], coordinate[2]); listener.lineEnd(); } function d3_geo_streamPolygon(coordinates, listener) { var i = -1, n = coordinates.length; listener.polygonStart(); while (++i < n) d3_geo_streamLine(coordinates[i], listener, 1); listener.polygonEnd(); } d3.geo.area = function(object) { d3_geo_areaSum = 0; d3.geo.stream(object, d3_geo_area); return d3_geo_areaSum; }; var d3_geo_areaSum, d3_geo_areaRingSum = new d3_adder(); var d3_geo_area = { sphere: function() { d3_geo_areaSum += 4 * π; }, point: d3_noop, lineStart: d3_noop, lineEnd: d3_noop, polygonStart: function() { d3_geo_areaRingSum.reset(); d3_geo_area.lineStart = d3_geo_areaRingStart; }, polygonEnd: function() { var area = 2 * d3_geo_areaRingSum; d3_geo_areaSum += area < 0 ? 4 * π + area : area; d3_geo_area.lineStart = d3_geo_area.lineEnd = d3_geo_area.point = d3_noop; } }; function d3_geo_areaRingStart() { var λ00, φ00, λ0, cosφ0, sinφ0; d3_geo_area.point = function(λ, φ) { d3_geo_area.point = nextPoint; λ0 = (λ00 = λ) * d3_radians, cosφ0 = Math.cos(φ = (φ00 = φ) * d3_radians / 2 + π / 4), sinφ0 = Math.sin(φ); }; function nextPoint(λ, φ) { λ *= d3_radians; φ = φ * d3_radians / 2 + π / 4; var dλ = λ - λ0, sdλ = dλ >= 0 ? 1 : -1, adλ = sdλ * dλ, cosφ = Math.cos(φ), sinφ = Math.sin(φ), k = sinφ0 * sinφ, u = cosφ0 * cosφ + k * Math.cos(adλ), v = k * sdλ * Math.sin(adλ); d3_geo_areaRingSum.add(Math.atan2(v, u)); λ0 = λ, cosφ0 = cosφ, sinφ0 = sinφ; } d3_geo_area.lineEnd = function() { nextPoint(λ00, φ00); }; } function d3_geo_cartesian(spherical) { var λ = spherical[0], φ = spherical[1], cosφ = Math.cos(φ); return [ cosφ * Math.cos(λ), cosφ * Math.sin(λ), Math.sin(φ) ]; } function d3_geo_cartesianDot(a, b) { return a[0] * b[0] + a[1] * b[1] + a[2] * b[2]; } function d3_geo_cartesianCross(a, b) { return [ a[1] * b[2] - a[2] * b[1], a[2] * b[0] - a[0] * b[2], a[0] * b[1] - a[1] * b[0] ]; } function d3_geo_cartesianAdd(a, b) { a[0] += b[0]; a[1] += b[1]; a[2] += b[2]; } function d3_geo_cartesianScale(vector, k) { return [ vector[0] * k, vector[1] * k, vector[2] * k ]; } function d3_geo_cartesianNormalize(d) { var l = Math.sqrt(d[0] * d[0] + d[1] * d[1] + d[2] * d[2]); d[0] /= l; d[1] /= l; d[2] /= l; } function d3_geo_spherical(cartesian) { return [ Math.atan2(cartesian[1], cartesian[0]), d3_asin(cartesian[2]) ]; } function d3_geo_sphericalEqual(a, b) { return abs(a[0] - b[0]) < ε && abs(a[1] - b[1]) < ε; } d3.geo.bounds = function() { var λ0, φ0, λ1, φ1, λ_, λ__, φ__, p0, dλSum, ranges, range; var bound = { point: point, lineStart: lineStart, lineEnd: lineEnd, polygonStart: function() { bound.point = ringPoint; bound.lineStart = ringStart; bound.lineEnd = ringEnd; dλSum = 0; d3_geo_area.polygonStart(); }, polygonEnd: function() { d3_geo_area.polygonEnd(); bound.point = point; bound.lineStart = lineStart; bound.lineEnd = lineEnd; if (d3_geo_areaRingSum < 0) λ0 = -(λ1 = 180), φ0 = -(φ1 = 90); else if (dλSum > ε) φ1 = 90; else if (dλSum < -ε) φ0 = -90; range[0] = λ0, range[1] = λ1; } }; function point(λ, φ) { ranges.push(range = [ λ0 = λ, λ1 = λ ]); if (φ < φ0) φ0 = φ; if (φ > φ1) φ1 = φ; } function linePoint(λ, φ) { var p = d3_geo_cartesian([ λ * d3_radians, φ * d3_radians ]); if (p0) { var normal = d3_geo_cartesianCross(p0, p), equatorial = [ normal[1], -normal[0], 0 ], inflection = d3_geo_cartesianCross(equatorial, normal); d3_geo_cartesianNormalize(inflection); inflection = d3_geo_spherical(inflection); var dλ = λ - λ_, s = dλ > 0 ? 1 : -1, λi = inflection[0] * d3_degrees * s, antimeridian = abs(dλ) > 180; if (antimeridian ^ (s * λ_ < λi && λi < s * λ)) { var φi = inflection[1] * d3_degrees; if (φi > φ1) φ1 = φi; } else if (λi = (λi + 360) % 360 - 180, antimeridian ^ (s * λ_ < λi && λi < s * λ)) { var φi = -inflection[1] * d3_degrees; if (φi < φ0) φ0 = φi; } else { if (φ < φ0) φ0 = φ; if (φ > φ1) φ1 = φ; } if (antimeridian) { if (λ < λ_) { if (angle(λ0, λ) > angle(λ0, λ1)) λ1 = λ; } else { if (angle(λ, λ1) > angle(λ0, λ1)) λ0 = λ; } } else { if (λ1 >= λ0) { if (λ < λ0) λ0 = λ; if (λ > λ1) λ1 = λ; } else { if (λ > λ_) { if (angle(λ0, λ) > angle(λ0, λ1)) λ1 = λ; } else { if (angle(λ, λ1) > angle(λ0, λ1)) λ0 = λ; } } } } else { point(λ, φ); } p0 = p, λ_ = λ; } function lineStart() { bound.point = linePoint; } function lineEnd() { range[0] = λ0, range[1] = λ1; bound.point = point; p0 = null; } function ringPoint(λ, φ) { if (p0) { var dλ = λ - λ_; dλSum += abs(dλ) > 180 ? dλ + (dλ > 0 ? 360 : -360) : dλ; } else λ__ = λ, φ__ = φ; d3_geo_area.point(λ, φ); linePoint(λ, φ); } function ringStart() { d3_geo_area.lineStart(); } function ringEnd() { ringPoint(λ__, φ__); d3_geo_area.lineEnd(); if (abs(dλSum) > ε) λ0 = -(λ1 = 180); range[0] = λ0, range[1] = λ1; p0 = null; } function angle(λ0, λ1) { return (λ1 -= λ0) < 0 ? λ1 + 360 : λ1; } function compareRanges(a, b) { return a[0] - b[0]; } function withinRange(x, range) { return range[0] <= range[1] ? range[0] <= x && x <= range[1] : x < range[0] || range[1] < x; } return function(feature) { φ1 = λ1 = -(λ0 = φ0 = Infinity); ranges = []; d3.geo.stream(feature, bound); var n = ranges.length; if (n) { ranges.sort(compareRanges); for (var i = 1, a = ranges[0], b, merged = [ a ]; i < n; ++i) { b = ranges[i]; if (withinRange(b[0], a) || withinRange(b[1], a)) { if (angle(a[0], b[1]) > angle(a[0], a[1])) a[1] = b[1]; if (angle(b[0], a[1]) > angle(a[0], a[1])) a[0] = b[0]; } else { merged.push(a = b); } } var best = -Infinity, dλ; for (var n = merged.length - 1, i = 0, a = merged[n], b; i <= n; a = b, ++i) { b = merged[i]; if ((dλ = angle(a[1], b[0])) > best) best = dλ, λ0 = b[0], λ1 = a[1]; } } ranges = range = null; return λ0 === Infinity || φ0 === Infinity ? [ [ NaN, NaN ], [ NaN, NaN ] ] : [ [ λ0, φ0 ], [ λ1, φ1 ] ]; }; }(); d3.geo.centroid = function(object) { d3_geo_centroidW0 = d3_geo_centroidW1 = d3_geo_centroidX0 = d3_geo_centroidY0 = d3_geo_centroidZ0 = d3_geo_centroidX1 = d3_geo_centroidY1 = d3_geo_centroidZ1 = d3_geo_centroidX2 = d3_geo_centroidY2 = d3_geo_centroidZ2 = 0; d3.geo.stream(object, d3_geo_centroid); var x = d3_geo_centroidX2, y = d3_geo_centroidY2, z = d3_geo_centroidZ2, m = x * x + y * y + z * z; if (m < ε2) { x = d3_geo_centroidX1, y = d3_geo_centroidY1, z = d3_geo_centroidZ1; if (d3_geo_centroidW1 < ε) x = d3_geo_centroidX0, y = d3_geo_centroidY0, z = d3_geo_centroidZ0; m = x * x + y * y + z * z; if (m < ε2) return [ NaN, NaN ]; } return [ Math.atan2(y, x) * d3_degrees, d3_asin(z / Math.sqrt(m)) * d3_degrees ]; }; var d3_geo_centroidW0, d3_geo_centroidW1, d3_geo_centroidX0, d3_geo_centroidY0, d3_geo_centroidZ0, d3_geo_centroidX1, d3_geo_centroidY1, d3_geo_centroidZ1, d3_geo_centroidX2, d3_geo_centroidY2, d3_geo_centroidZ2; var d3_geo_centroid = { sphere: d3_noop, point: d3_geo_centroidPoint, lineStart: d3_geo_centroidLineStart, lineEnd: d3_geo_centroidLineEnd, polygonStart: function() { d3_geo_centroid.lineStart = d3_geo_centroidRingStart; }, polygonEnd: function() { d3_geo_centroid.lineStart = d3_geo_centroidLineStart; } }; function d3_geo_centroidPoint(λ, φ) { λ *= d3_radians; var cosφ = Math.cos(φ *= d3_radians); d3_geo_centroidPointXYZ(cosφ * Math.cos(λ), cosφ * Math.sin(λ), Math.sin(φ)); } function d3_geo_centroidPointXYZ(x, y, z) { ++d3_geo_centroidW0; d3_geo_centroidX0 += (x - d3_geo_centroidX0) / d3_geo_centroidW0; d3_geo_centroidY0 += (y - d3_geo_centroidY0) / d3_geo_centroidW0; d3_geo_centroidZ0 += (z - d3_geo_centroidZ0) / d3_geo_centroidW0; } function d3_geo_centroidLineStart() { var x0, y0, z0; d3_geo_centroid.point = function(λ, φ) { λ *= d3_radians; var cosφ = Math.cos(φ *= d3_radians); x0 = cosφ * Math.cos(λ); y0 = cosφ * Math.sin(λ); z0 = Math.sin(φ); d3_geo_centroid.point = nextPoint; d3_geo_centroidPointXYZ(x0, y0, z0); }; function nextPoint(λ, φ) { λ *= d3_radians; var cosφ = Math.cos(φ *= d3_radians), x = cosφ * Math.cos(λ), y = cosφ * Math.sin(λ), z = Math.sin(φ), w = Math.atan2(Math.sqrt((w = y0 * z - z0 * y) * w + (w = z0 * x - x0 * z) * w + (w = x0 * y - y0 * x) * w), x0 * x + y0 * y + z0 * z); d3_geo_centroidW1 += w; d3_geo_centroidX1 += w * (x0 + (x0 = x)); d3_geo_centroidY1 += w * (y0 + (y0 = y)); d3_geo_centroidZ1 += w * (z0 + (z0 = z)); d3_geo_centroidPointXYZ(x0, y0, z0); } } function d3_geo_centroidLineEnd() { d3_geo_centroid.point = d3_geo_centroidPoint; } function d3_geo_centroidRingStart() { var λ00, φ00, x0, y0, z0; d3_geo_centroid.point = function(λ, φ) { λ00 = λ, φ00 = φ; d3_geo_centroid.point = nextPoint; λ *= d3_radians; var cosφ = Math.cos(φ *= d3_radians); x0 = cosφ * Math.cos(λ); y0 = cosφ * Math.sin(λ); z0 = Math.sin(φ); d3_geo_centroidPointXYZ(x0, y0, z0); }; d3_geo_centroid.lineEnd = function() { nextPoint(λ00, φ00); d3_geo_centroid.lineEnd = d3_geo_centroidLineEnd; d3_geo_centroid.point = d3_geo_centroidPoint; }; function nextPoint(λ, φ) { λ *= d3_radians; var cosφ = Math.cos(φ *= d3_radians), x = cosφ * Math.cos(λ), y = cosφ * Math.sin(λ), z = Math.sin(φ), cx = y0 * z - z0 * y, cy = z0 * x - x0 * z, cz = x0 * y - y0 * x, m = Math.sqrt(cx * cx + cy * cy + cz * cz), u = x0 * x + y0 * y + z0 * z, v = m && -d3_acos(u) / m, w = Math.atan2(m, u); d3_geo_centroidX2 += v * cx; d3_geo_centroidY2 += v * cy; d3_geo_centroidZ2 += v * cz; d3_geo_centroidW1 += w; d3_geo_centroidX1 += w * (x0 + (x0 = x)); d3_geo_centroidY1 += w * (y0 + (y0 = y)); d3_geo_centroidZ1 += w * (z0 + (z0 = z)); d3_geo_centroidPointXYZ(x0, y0, z0); } } function d3_true() { return true; } function d3_geo_clipPolygon(segments, compare, clipStartInside, interpolate, listener) { var subject = [], clip = []; segments.forEach(function(segment) { if ((n = segment.length - 1) <= 0) return; var n, p0 = segment[0], p1 = segment[n]; if (d3_geo_sphericalEqual(p0, p1)) { listener.lineStart(); for (var i = 0; i < n; ++i) listener.point((p0 = segment[i])[0], p0[1]); listener.lineEnd(); return; } var a = new d3_geo_clipPolygonIntersection(p0, segment, null, true), b = new d3_geo_clipPolygonIntersection(p0, null, a, false); a.o = b; subject.push(a); clip.push(b); a = new d3_geo_clipPolygonIntersection(p1, segment, null, false); b = new d3_geo_clipPolygonIntersection(p1, null, a, true); a.o = b; subject.push(a); clip.push(b); }); clip.sort(compare); d3_geo_clipPolygonLinkCircular(subject); d3_geo_clipPolygonLinkCircular(clip); if (!subject.length) return; for (var i = 0, entry = clipStartInside, n = clip.length; i < n; ++i) { clip[i].e = entry = !entry; } var start = subject[0], points, point; while (1) { var current = start, isSubject = true; while (current.v) if ((current = current.n) === start) return; points = current.z; listener.lineStart(); do { current.v = current.o.v = true; if (current.e) { if (isSubject) { for (var i = 0, n = points.length; i < n; ++i) listener.point((point = points[i])[0], point[1]); } else { interpolate(current.x, current.n.x, 1, listener); } current = current.n; } else { if (isSubject) { points = current.p.z; for (var i = points.length - 1; i >= 0; --i) listener.point((point = points[i])[0], point[1]); } else { interpolate(current.x, current.p.x, -1, listener); } current = current.p; } current = current.o; points = current.z; isSubject = !isSubject; } while (!current.v); listener.lineEnd(); } } function d3_geo_clipPolygonLinkCircular(array) { if (!(n = array.length)) return; var n, i = 0, a = array[0], b; while (++i < n) { a.n = b = array[i]; b.p = a; a = b; } a.n = b = array[0]; b.p = a; } function d3_geo_clipPolygonIntersection(point, points, other, entry) { this.x = point; this.z = points; this.o = other; this.e = entry; this.v = false; this.n = this.p = null; } function d3_geo_clip(pointVisible, clipLine, interpolate, clipStart) { return function(rotate, listener) { var line = clipLine(listener), rotatedClipStart = rotate.invert(clipStart[0], clipStart[1]); var clip = { point: point, lineStart: lineStart, lineEnd: lineEnd, polygonStart: function() { clip.point = pointRing; clip.lineStart = ringStart; clip.lineEnd = ringEnd; segments = []; polygon = []; }, polygonEnd: function() { clip.point = point; clip.lineStart = lineStart; clip.lineEnd = lineEnd; segments = d3.merge(segments); var clipStartInside = d3_geo_pointInPolygon(rotatedClipStart, polygon); if (segments.length) { if (!polygonStarted) listener.polygonStart(), polygonStarted = true; d3_geo_clipPolygon(segments, d3_geo_clipSort, clipStartInside, interpolate, listener); } else if (clipStartInside) { if (!polygonStarted) listener.polygonStart(), polygonStarted = true; listener.lineStart(); interpolate(null, null, 1, listener); listener.lineEnd(); } if (polygonStarted) listener.polygonEnd(), polygonStarted = false; segments = polygon = null; }, sphere: function() { listener.polygonStart(); listener.lineStart(); interpolate(null, null, 1, listener); listener.lineEnd(); listener.polygonEnd(); } }; function point(λ, φ) { var point = rotate(λ, φ); if (pointVisible(λ = point[0], φ = point[1])) listener.point(λ, φ); } function pointLine(λ, φ) { var point = rotate(λ, φ); line.point(point[0], point[1]); } function lineStart() { clip.point = pointLine; line.lineStart(); } function lineEnd() { clip.point = point; line.lineEnd(); } var segments; var buffer = d3_geo_clipBufferListener(), ringListener = clipLine(buffer), polygonStarted = false, polygon, ring; function pointRing(λ, φ) { ring.push([ λ, φ ]); var point = rotate(λ, φ); ringListener.point(point[0], point[1]); } function ringStart() { ringListener.lineStart(); ring = []; } function ringEnd() { pointRing(ring[0][0], ring[0][1]); ringListener.lineEnd(); var clean = ringListener.clean(), ringSegments = buffer.buffer(), segment, n = ringSegments.length; ring.pop(); polygon.push(ring); ring = null; if (!n) return; if (clean & 1) { segment = ringSegments[0]; var n = segment.length - 1, i = -1, point; if (n > 0) { if (!polygonStarted) listener.polygonStart(), polygonStarted = true; listener.lineStart(); while (++i < n) listener.point((point = segment[i])[0], point[1]); listener.lineEnd(); } return; } if (n > 1 && clean & 2) ringSegments.push(ringSegments.pop().concat(ringSegments.shift())); segments.push(ringSegments.filter(d3_geo_clipSegmentLength1)); } return clip; }; } function d3_geo_clipSegmentLength1(segment) { return segment.length > 1; } function d3_geo_clipBufferListener() { var lines = [], line; return { lineStart: function() { lines.push(line = []); }, point: function(λ, φ) { line.push([ λ, φ ]); }, lineEnd: d3_noop, buffer: function() { var buffer = lines; lines = []; line = null; return buffer; }, rejoin: function() { if (lines.length > 1) lines.push(lines.pop().concat(lines.shift())); } }; } function d3_geo_clipSort(a, b) { return ((a = a.x)[0] < 0 ? a[1] - halfπ - ε : halfπ - a[1]) - ((b = b.x)[0] < 0 ? b[1] - halfπ - ε : halfπ - b[1]); } function d3_geo_pointInPolygon(point, polygon) { var meridian = point[0], parallel = point[1], meridianNormal = [ Math.sin(meridian), -Math.cos(meridian), 0 ], polarAngle = 0, winding = 0; d3_geo_areaRingSum.reset(); for (var i = 0, n = polygon.length; i < n; ++i) { var ring = polygon[i], m = ring.length; if (!m) continue; var point0 = ring[0], λ0 = point0[0], φ0 = point0[1] / 2 + π / 4, sinφ0 = Math.sin(φ0), cosφ0 = Math.cos(φ0), j = 1; while (true) { if (j === m) j = 0; point = ring[j]; var λ = point[0], φ = point[1] / 2 + π / 4, sinφ = Math.sin(φ), cosφ = Math.cos(φ), dλ = λ - λ0, sdλ = dλ >= 0 ? 1 : -1, adλ = sdλ * dλ, antimeridian = adλ > π, k = sinφ0 * sinφ; d3_geo_areaRingSum.add(Math.atan2(k * sdλ * Math.sin(adλ), cosφ0 * cosφ + k * Math.cos(adλ))); polarAngle += antimeridian ? dλ + sdλ * τ : dλ; if (antimeridian ^ λ0 >= meridian ^ λ >= meridian) { var arc = d3_geo_cartesianCross(d3_geo_cartesian(point0), d3_geo_cartesian(point)); d3_geo_cartesianNormalize(arc); var intersection = d3_geo_cartesianCross(meridianNormal, arc); d3_geo_cartesianNormalize(intersection); var φarc = (antimeridian ^ dλ >= 0 ? -1 : 1) * d3_asin(intersection[2]); if (parallel > φarc || parallel === φarc && (arc[0] || arc[1])) { winding += antimeridian ^ dλ >= 0 ? 1 : -1; } } if (!j++) break; λ0 = λ, sinφ0 = sinφ, cosφ0 = cosφ, point0 = point; } } return (polarAngle < -ε || polarAngle < ε && d3_geo_areaRingSum < 0) ^ winding & 1; } var d3_geo_clipAntimeridian = d3_geo_clip(d3_true, d3_geo_clipAntimeridianLine, d3_geo_clipAntimeridianInterpolate, [ -π, -π / 2 ]); function d3_geo_clipAntimeridianLine(listener) { var λ0 = NaN, φ0 = NaN, sλ0 = NaN, clean; return { lineStart: function() { listener.lineStart(); clean = 1; }, point: function(λ1, φ1) { var sλ1 = λ1 > 0 ? π : -π, dλ = abs(λ1 - λ0); if (abs(dλ - π) < ε) { listener.point(λ0, φ0 = (φ0 + φ1) / 2 > 0 ? halfπ : -halfπ); listener.point(sλ0, φ0); listener.lineEnd(); listener.lineStart(); listener.point(sλ1, φ0); listener.point(λ1, φ0); clean = 0; } else if (sλ0 !== sλ1 && dλ >= π) { if (abs(λ0 - sλ0) < ε) λ0 -= sλ0 * ε; if (abs(λ1 - sλ1) < ε) λ1 -= sλ1 * ε; φ0 = d3_geo_clipAntimeridianIntersect(λ0, φ0, λ1, φ1); listener.point(sλ0, φ0); listener.lineEnd(); listener.lineStart(); listener.point(sλ1, φ0); clean = 0; } listener.point(λ0 = λ1, φ0 = φ1); sλ0 = sλ1; }, lineEnd: function() { listener.lineEnd(); λ0 = φ0 = NaN; }, clean: function() { return 2 - clean; } }; } function d3_geo_clipAntimeridianIntersect(λ0, φ0, λ1, φ1) { var cosφ0, cosφ1, sinλ0_λ1 = Math.sin(λ0 - λ1); return abs(sinλ0_λ1) > ε ? Math.atan((Math.sin(φ0) * (cosφ1 = Math.cos(φ1)) * Math.sin(λ1) - Math.sin(φ1) * (cosφ0 = Math.cos(φ0)) * Math.sin(λ0)) / (cosφ0 * cosφ1 * sinλ0_λ1)) : (φ0 + φ1) / 2; } function d3_geo_clipAntimeridianInterpolate(from, to, direction, listener) { var φ; if (from == null) { φ = direction * halfπ; listener.point(-π, φ); listener.point(0, φ); listener.point(π, φ); listener.point(π, 0); listener.point(π, -φ); listener.point(0, -φ); listener.point(-π, -φ); listener.point(-π, 0); listener.point(-π, φ); } else if (abs(from[0] - to[0]) > ε) { var s = from[0] < to[0] ? π : -π; φ = direction * s / 2; listener.point(-s, φ); listener.point(0, φ); listener.point(s, φ); } else { listener.point(to[0], to[1]); } } function d3_geo_clipCircle(radius) { var cr = Math.cos(radius), smallRadius = cr > 0, notHemisphere = abs(cr) > ε, interpolate = d3_geo_circleInterpolate(radius, 6 * d3_radians); return d3_geo_clip(visible, clipLine, interpolate, smallRadius ? [ 0, -radius ] : [ -π, radius - π ]); function visible(λ, φ) { return Math.cos(λ) * Math.cos(φ) > cr; } function clipLine(listener) { var point0, c0, v0, v00, clean; return { lineStart: function() { v00 = v0 = false; clean = 1; }, point: function(λ, φ) { var point1 = [ λ, φ ], point2, v = visible(λ, φ), c = smallRadius ? v ? 0 : code(λ, φ) : v ? code(λ + (λ < 0 ? π : -π), φ) : 0; if (!point0 && (v00 = v0 = v)) listener.lineStart(); if (v !== v0) { point2 = intersect(point0, point1); if (d3_geo_sphericalEqual(point0, point2) || d3_geo_sphericalEqual(point1, point2)) { point1[0] += ε; point1[1] += ε; v = visible(point1[0], point1[1]); } } if (v !== v0) { clean = 0; if (v) { listener.lineStart(); point2 = intersect(point1, point0); listener.point(point2[0], point2[1]); } else { point2 = intersect(point0, point1); listener.point(point2[0], point2[1]); listener.lineEnd(); } point0 = point2; } else if (notHemisphere && point0 && smallRadius ^ v) { var t; if (!(c & c0) && (t = intersect(point1, point0, true))) { clean = 0; if (smallRadius) { listener.lineStart(); listener.point(t[0][0], t[0][1]); listener.point(t[1][0], t[1][1]); listener.lineEnd(); } else { listener.point(t[1][0], t[1][1]); listener.lineEnd(); listener.lineStart(); listener.point(t[0][0], t[0][1]); } } } if (v && (!point0 || !d3_geo_sphericalEqual(point0, point1))) { listener.point(point1[0], point1[1]); } point0 = point1, v0 = v, c0 = c; }, lineEnd: function() { if (v0) listener.lineEnd(); point0 = null; }, clean: function() { return clean | (v00 && v0) << 1; } }; } function intersect(a, b, two) { var pa = d3_geo_cartesian(a), pb = d3_geo_cartesian(b); var n1 = [ 1, 0, 0 ], n2 = d3_geo_cartesianCross(pa, pb), n2n2 = d3_geo_cartesianDot(n2, n2), n1n2 = n2[0], determinant = n2n2 - n1n2 * n1n2; if (!determinant) return !two && a; var c1 = cr * n2n2 / determinant, c2 = -cr * n1n2 / determinant, n1xn2 = d3_geo_cartesianCross(n1, n2), A = d3_geo_cartesianScale(n1, c1), B = d3_geo_cartesianScale(n2, c2); d3_geo_cartesianAdd(A, B); var u = n1xn2, w = d3_geo_cartesianDot(A, u), uu = d3_geo_cartesianDot(u, u), t2 = w * w - uu * (d3_geo_cartesianDot(A, A) - 1); if (t2 < 0) return; var t = Math.sqrt(t2), q = d3_geo_cartesianScale(u, (-w - t) / uu); d3_geo_cartesianAdd(q, A); q = d3_geo_spherical(q); if (!two) return q; var λ0 = a[0], λ1 = b[0], φ0 = a[1], φ1 = b[1], z; if (λ1 < λ0) z = λ0, λ0 = λ1, λ1 = z; var δλ = λ1 - λ0, polar = abs(δλ - π) < ε, meridian = polar || δλ < ε; if (!polar && φ1 < φ0) z = φ0, φ0 = φ1, φ1 = z; if (meridian ? polar ? φ0 + φ1 > 0 ^ q[1] < (abs(q[0] - λ0) < ε ? φ0 : φ1) : φ0 <= q[1] && q[1] <= φ1 : δλ > π ^ (λ0 <= q[0] && q[0] <= λ1)) { var q1 = d3_geo_cartesianScale(u, (-w + t) / uu); d3_geo_cartesianAdd(q1, A); return [ q, d3_geo_spherical(q1) ]; } } function code(λ, φ) { var r = smallRadius ? radius : π - radius, code = 0; if (λ < -r) code |= 1; else if (λ > r) code |= 2; if (φ < -r) code |= 4; else if (φ > r) code |= 8; return code; } } function d3_geom_clipLine(x0, y0, x1, y1) { return function(line) { var a = line.a, b = line.b, ax = a.x, ay = a.y, bx = b.x, by = b.y, t0 = 0, t1 = 1, dx = bx - ax, dy = by - ay, r; r = x0 - ax; if (!dx && r > 0) return; r /= dx; if (dx < 0) { if (r < t0) return; if (r < t1) t1 = r; } else if (dx > 0) { if (r > t1) return; if (r > t0) t0 = r; } r = x1 - ax; if (!dx && r < 0) return; r /= dx; if (dx < 0) { if (r > t1) return; if (r > t0) t0 = r; } else if (dx > 0) { if (r < t0) return; if (r < t1) t1 = r; } r = y0 - ay; if (!dy && r > 0) return; r /= dy; if (dy < 0) { if (r < t0) return; if (r < t1) t1 = r; } else if (dy > 0) { if (r > t1) return; if (r > t0) t0 = r; } r = y1 - ay; if (!dy && r < 0) return; r /= dy; if (dy < 0) { if (r > t1) return; if (r > t0) t0 = r; } else if (dy > 0) { if (r < t0) return; if (r < t1) t1 = r; } if (t0 > 0) line.a = { x: ax + t0 * dx, y: ay + t0 * dy }; if (t1 < 1) line.b = { x: ax + t1 * dx, y: ay + t1 * dy }; return line; }; } var d3_geo_clipExtentMAX = 1e9; d3.geo.clipExtent = function() { var x0, y0, x1, y1, stream, clip, clipExtent = { stream: function(output) { if (stream) stream.valid = false; stream = clip(output); stream.valid = true; return stream; }, extent: function(_) { if (!arguments.length) return [ [ x0, y0 ], [ x1, y1 ] ]; clip = d3_geo_clipExtent(x0 = +_[0][0], y0 = +_[0][1], x1 = +_[1][0], y1 = +_[1][1]); if (stream) stream.valid = false, stream = null; return clipExtent; } }; return clipExtent.extent([ [ 0, 0 ], [ 960, 500 ] ]); }; function d3_geo_clipExtent(x0, y0, x1, y1) { return function(listener) { var listener_ = listener, bufferListener = d3_geo_clipBufferListener(), clipLine = d3_geom_clipLine(x0, y0, x1, y1), segments, polygon, ring; var clip = { point: point, lineStart: lineStart, lineEnd: lineEnd, polygonStart: function() { listener = bufferListener; segments = []; polygon = []; clean = true; }, polygonEnd: function() { listener = listener_; segments = d3.merge(segments); var clipStartInside = insidePolygon([ x0, y1 ]), inside = clean && clipStartInside, visible = segments.length; if (inside || visible) { listener.polygonStart(); if (inside) { listener.lineStart(); interpolate(null, null, 1, listener); listener.lineEnd(); } if (visible) { d3_geo_clipPolygon(segments, compare, clipStartInside, interpolate, listener); } listener.polygonEnd(); } segments = polygon = ring = null; } }; function insidePolygon(p) { var wn = 0, n = polygon.length, y = p[1]; for (var i = 0; i < n; ++i) { for (var j = 1, v = polygon[i], m = v.length, a = v[0], b; j < m; ++j) { b = v[j]; if (a[1] <= y) { if (b[1] > y && d3_cross2d(a, b, p) > 0) ++wn; } else { if (b[1] <= y && d3_cross2d(a, b, p) < 0) --wn; } a = b; } } return wn !== 0; } function interpolate(from, to, direction, listener) { var a = 0, a1 = 0; if (from == null || (a = corner(from, direction)) !== (a1 = corner(to, direction)) || comparePoints(from, to) < 0 ^ direction > 0) { do { listener.point(a === 0 || a === 3 ? x0 : x1, a > 1 ? y1 : y0); } while ((a = (a + direction + 4) % 4) !== a1); } else { listener.point(to[0], to[1]); } } function pointVisible(x, y) { return x0 <= x && x <= x1 && y0 <= y && y <= y1; } function point(x, y) { if (pointVisible(x, y)) listener.point(x, y); } var x__, y__, v__, x_, y_, v_, first, clean; function lineStart() { clip.point = linePoint; if (polygon) polygon.push(ring = []); first = true; v_ = false; x_ = y_ = NaN; } function lineEnd() { if (segments) { linePoint(x__, y__); if (v__ && v_) bufferListener.rejoin(); segments.push(bufferListener.buffer()); } clip.point = point; if (v_) listener.lineEnd(); } function linePoint(x, y) { x = Math.max(-d3_geo_clipExtentMAX, Math.min(d3_geo_clipExtentMAX, x)); y = Math.max(-d3_geo_clipExtentMAX, Math.min(d3_geo_clipExtentMAX, y)); var v = pointVisible(x, y); if (polygon) ring.push([ x, y ]); if (first) { x__ = x, y__ = y, v__ = v; first = false; if (v) { listener.lineStart(); listener.point(x, y); } } else { if (v && v_) listener.point(x, y); else { var l = { a: { x: x_, y: y_ }, b: { x: x, y: y } }; if (clipLine(l)) { if (!v_) { listener.lineStart(); listener.point(l.a.x, l.a.y); } listener.point(l.b.x, l.b.y); if (!v) listener.lineEnd(); clean = false; } else if (v) { listener.lineStart(); listener.point(x, y); clean = false; } } } x_ = x, y_ = y, v_ = v; } return clip; }; function corner(p, direction) { return abs(p[0] - x0) < ε ? direction > 0 ? 0 : 3 : abs(p[0] - x1) < ε ? direction > 0 ? 2 : 1 : abs(p[1] - y0) < ε ? direction > 0 ? 1 : 0 : direction > 0 ? 3 : 2; } function compare(a, b) { return comparePoints(a.x, b.x); } function comparePoints(a, b) { var ca = corner(a, 1), cb = corner(b, 1); return ca !== cb ? ca - cb : ca === 0 ? b[1] - a[1] : ca === 1 ? a[0] - b[0] : ca === 2 ? a[1] - b[1] : b[0] - a[0]; } } function d3_geo_compose(a, b) { function compose(x, y) { return x = a(x, y), b(x[0], x[1]); } if (a.invert && b.invert) compose.invert = function(x, y) { return x = b.invert(x, y), x && a.invert(x[0], x[1]); }; return compose; } function d3_geo_conic(projectAt) { var φ0 = 0, φ1 = π / 3, m = d3_geo_projectionMutator(projectAt), p = m(φ0, φ1); p.parallels = function(_) { if (!arguments.length) return [ φ0 / π * 180, φ1 / π * 180 ]; return m(φ0 = _[0] * π / 180, φ1 = _[1] * π / 180); }; return p; } function d3_geo_conicEqualArea(φ0, φ1) { var sinφ0 = Math.sin(φ0), n = (sinφ0 + Math.sin(φ1)) / 2, C = 1 + sinφ0 * (2 * n - sinφ0), ρ0 = Math.sqrt(C) / n; function forward(λ, φ) { var ρ = Math.sqrt(C - 2 * n * Math.sin(φ)) / n; return [ ρ * Math.sin(λ *= n), ρ0 - ρ * Math.cos(λ) ]; } forward.invert = function(x, y) { var ρ0_y = ρ0 - y; return [ Math.atan2(x, ρ0_y) / n, d3_asin((C - (x * x + ρ0_y * ρ0_y) * n * n) / (2 * n)) ]; }; return forward; } (d3.geo.conicEqualArea = function() { return d3_geo_conic(d3_geo_conicEqualArea); }).raw = d3_geo_conicEqualArea; d3.geo.albers = function() { return d3.geo.conicEqualArea().rotate([ 96, 0 ]).center([ -.6, 38.7 ]).parallels([ 29.5, 45.5 ]).scale(1070); }; d3.geo.albersUsa = function() { var lower48 = d3.geo.albers(); var alaska = d3.geo.conicEqualArea().rotate([ 154, 0 ]).center([ -2, 58.5 ]).parallels([ 55, 65 ]); var hawaii = d3.geo.conicEqualArea().rotate([ 157, 0 ]).center([ -3, 19.9 ]).parallels([ 8, 18 ]); var point, pointStream = { point: function(x, y) { point = [ x, y ]; } }, lower48Point, alaskaPoint, hawaiiPoint; function albersUsa(coordinates) { var x = coordinates[0], y = coordinates[1]; point = null; (lower48Point(x, y), point) || (alaskaPoint(x, y), point) || hawaiiPoint(x, y); return point; } albersUsa.invert = function(coordinates) { var k = lower48.scale(), t = lower48.translate(), x = (coordinates[0] - t[0]) / k, y = (coordinates[1] - t[1]) / k; return (y >= .12 && y < .234 && x >= -.425 && x < -.214 ? alaska : y >= .166 && y < .234 && x >= -.214 && x < -.115 ? hawaii : lower48).invert(coordinates); }; albersUsa.stream = function(stream) { var lower48Stream = lower48.stream(stream), alaskaStream = alaska.stream(stream), hawaiiStream = hawaii.stream(stream); return { point: function(x, y) { lower48Stream.point(x, y); alaskaStream.point(x, y); hawaiiStream.point(x, y); }, sphere: function() { lower48Stream.sphere(); alaskaStream.sphere(); hawaiiStream.sphere(); }, lineStart: function() { lower48Stream.lineStart(); alaskaStream.lineStart(); hawaiiStream.lineStart(); }, lineEnd: function() { lower48Stream.lineEnd(); alaskaStream.lineEnd(); hawaiiStream.lineEnd(); }, polygonStart: function() { lower48Stream.polygonStart(); alaskaStream.polygonStart(); hawaiiStream.polygonStart(); }, polygonEnd: function() { lower48Stream.polygonEnd(); alaskaStream.polygonEnd(); hawaiiStream.polygonEnd(); } }; }; albersUsa.precision = function(_) { if (!arguments.length) return lower48.precision(); lower48.precision(_); alaska.precision(_); hawaii.precision(_); return albersUsa; }; albersUsa.scale = function(_) { if (!arguments.length) return lower48.scale(); lower48.scale(_); alaska.scale(_ * .35); hawaii.scale(_); return albersUsa.translate(lower48.translate()); }; albersUsa.translate = function(_) { if (!arguments.length) return lower48.translate(); var k = lower48.scale(), x = +_[0], y = +_[1]; lower48Point = lower48.translate(_).clipExtent([ [ x - .455 * k, y - .238 * k ], [ x + .455 * k, y + .238 * k ] ]).stream(pointStream).point; alaskaPoint = alaska.translate([ x - .307 * k, y + .201 * k ]).clipExtent([ [ x - .425 * k + ε, y + .12 * k + ε ], [ x - .214 * k - ε, y + .234 * k - ε ] ]).stream(pointStream).point; hawaiiPoint = hawaii.translate([ x - .205 * k, y + .212 * k ]).clipExtent([ [ x - .214 * k + ε, y + .166 * k + ε ], [ x - .115 * k - ε, y + .234 * k - ε ] ]).stream(pointStream).point; return albersUsa; }; return albersUsa.scale(1070); }; var d3_geo_pathAreaSum, d3_geo_pathAreaPolygon, d3_geo_pathArea = { point: d3_noop, lineStart: d3_noop, lineEnd: d3_noop, polygonStart: function() { d3_geo_pathAreaPolygon = 0; d3_geo_pathArea.lineStart = d3_geo_pathAreaRingStart; }, polygonEnd: function() { d3_geo_pathArea.lineStart = d3_geo_pathArea.lineEnd = d3_geo_pathArea.point = d3_noop; d3_geo_pathAreaSum += abs(d3_geo_pathAreaPolygon / 2); } }; function d3_geo_pathAreaRingStart() { var x00, y00, x0, y0; d3_geo_pathArea.point = function(x, y) { d3_geo_pathArea.point = nextPoint; x00 = x0 = x, y00 = y0 = y; }; function nextPoint(x, y) { d3_geo_pathAreaPolygon += y0 * x - x0 * y; x0 = x, y0 = y; } d3_geo_pathArea.lineEnd = function() { nextPoint(x00, y00); }; } var d3_geo_pathBoundsX0, d3_geo_pathBoundsY0, d3_geo_pathBoundsX1, d3_geo_pathBoundsY1; var d3_geo_pathBounds = { point: d3_geo_pathBoundsPoint, lineStart: d3_noop, lineEnd: d3_noop, polygonStart: d3_noop, polygonEnd: d3_noop }; function d3_geo_pathBoundsPoint(x, y) { if (x < d3_geo_pathBoundsX0) d3_geo_pathBoundsX0 = x; if (x > d3_geo_pathBoundsX1) d3_geo_pathBoundsX1 = x; if (y < d3_geo_pathBoundsY0) d3_geo_pathBoundsY0 = y; if (y > d3_geo_pathBoundsY1) d3_geo_pathBoundsY1 = y; } function d3_geo_pathBuffer() { var pointCircle = d3_geo_pathBufferCircle(4.5), buffer = []; var stream = { point: point, lineStart: function() { stream.point = pointLineStart; }, lineEnd: lineEnd, polygonStart: function() { stream.lineEnd = lineEndPolygon; }, polygonEnd: function() { stream.lineEnd = lineEnd; stream.point = point; }, pointRadius: function(_) { pointCircle = d3_geo_pathBufferCircle(_); return stream; }, result: function() { if (buffer.length) { var result = buffer.join(""); buffer = []; return result; } } }; function point(x, y) { buffer.push("M", x, ",", y, pointCircle); } function pointLineStart(x, y) { buffer.push("M", x, ",", y); stream.point = pointLine; } function pointLine(x, y) { buffer.push("L", x, ",", y); } function lineEnd() { stream.point = point; } function lineEndPolygon() { buffer.push("Z"); } return stream; } function d3_geo_pathBufferCircle(radius) { return "m0," + radius + "a" + radius + "," + radius + " 0 1,1 0," + -2 * radius + "a" + radius + "," + radius + " 0 1,1 0," + 2 * radius + "z"; } var d3_geo_pathCentroid = { point: d3_geo_pathCentroidPoint, lineStart: d3_geo_pathCentroidLineStart, lineEnd: d3_geo_pathCentroidLineEnd, polygonStart: function() { d3_geo_pathCentroid.lineStart = d3_geo_pathCentroidRingStart; }, polygonEnd: function() { d3_geo_pathCentroid.point = d3_geo_pathCentroidPoint; d3_geo_pathCentroid.lineStart = d3_geo_pathCentroidLineStart; d3_geo_pathCentroid.lineEnd = d3_geo_pathCentroidLineEnd; } }; function d3_geo_pathCentroidPoint(x, y) { d3_geo_centroidX0 += x; d3_geo_centroidY0 += y; ++d3_geo_centroidZ0; } function d3_geo_pathCentroidLineStart() { var x0, y0; d3_geo_pathCentroid.point = function(x, y) { d3_geo_pathCentroid.point = nextPoint; d3_geo_pathCentroidPoint(x0 = x, y0 = y); }; function nextPoint(x, y) { var dx = x - x0, dy = y - y0, z = Math.sqrt(dx * dx + dy * dy); d3_geo_centroidX1 += z * (x0 + x) / 2; d3_geo_centroidY1 += z * (y0 + y) / 2; d3_geo_centroidZ1 += z; d3_geo_pathCentroidPoint(x0 = x, y0 = y); } } function d3_geo_pathCentroidLineEnd() { d3_geo_pathCentroid.point = d3_geo_pathCentroidPoint; } function d3_geo_pathCentroidRingStart() { var x00, y00, x0, y0; d3_geo_pathCentroid.point = function(x, y) { d3_geo_pathCentroid.point = nextPoint; d3_geo_pathCentroidPoint(x00 = x0 = x, y00 = y0 = y); }; function nextPoint(x, y) { var dx = x - x0, dy = y - y0, z = Math.sqrt(dx * dx + dy * dy); d3_geo_centroidX1 += z * (x0 + x) / 2; d3_geo_centroidY1 += z * (y0 + y) / 2; d3_geo_centroidZ1 += z; z = y0 * x - x0 * y; d3_geo_centroidX2 += z * (x0 + x); d3_geo_centroidY2 += z * (y0 + y); d3_geo_centroidZ2 += z * 3; d3_geo_pathCentroidPoint(x0 = x, y0 = y); } d3_geo_pathCentroid.lineEnd = function() { nextPoint(x00, y00); }; } function d3_geo_pathContext(context) { var pointRadius = 4.5; var stream = { point: point, lineStart: function() { stream.point = pointLineStart; }, lineEnd: lineEnd, polygonStart: function() { stream.lineEnd = lineEndPolygon; }, polygonEnd: function() { stream.lineEnd = lineEnd; stream.point = point; }, pointRadius: function(_) { pointRadius = _; return stream; }, result: d3_noop }; function point(x, y) { context.moveTo(x, y); context.arc(x, y, pointRadius, 0, τ); } function pointLineStart(x, y) { context.moveTo(x, y); stream.point = pointLine; } function pointLine(x, y) { context.lineTo(x, y); } function lineEnd() { stream.point = point; } function lineEndPolygon() { context.closePath(); } return stream; } function d3_geo_resample(project) { var δ2 = .5, cosMinDistance = Math.cos(30 * d3_radians), maxDepth = 16; function resample(stream) { return (maxDepth ? resampleRecursive : resampleNone)(stream); } function resampleNone(stream) { return d3_geo_transformPoint(stream, function(x, y) { x = project(x, y); stream.point(x[0], x[1]); }); } function resampleRecursive(stream) { var λ00, φ00, x00, y00, a00, b00, c00, λ0, x0, y0, a0, b0, c0; var resample = { point: point, lineStart: lineStart, lineEnd: lineEnd, polygonStart: function() { stream.polygonStart(); resample.lineStart = ringStart; }, polygonEnd: function() { stream.polygonEnd(); resample.lineStart = lineStart; } }; function point(x, y) { x = project(x, y); stream.point(x[0], x[1]); } function lineStart() { x0 = NaN; resample.point = linePoint; stream.lineStart(); } function linePoint(λ, φ) { var c = d3_geo_cartesian([ λ, φ ]), p = project(λ, φ); resampleLineTo(x0, y0, λ0, a0, b0, c0, x0 = p[0], y0 = p[1], λ0 = λ, a0 = c[0], b0 = c[1], c0 = c[2], maxDepth, stream); stream.point(x0, y0); } function lineEnd() { resample.point = point; stream.lineEnd(); } function ringStart() { lineStart(); resample.point = ringPoint; resample.lineEnd = ringEnd; } function ringPoint(λ, φ) { linePoint(λ00 = λ, φ00 = φ), x00 = x0, y00 = y0, a00 = a0, b00 = b0, c00 = c0; resample.point = linePoint; } function ringEnd() { resampleLineTo(x0, y0, λ0, a0, b0, c0, x00, y00, λ00, a00, b00, c00, maxDepth, stream); resample.lineEnd = lineEnd; lineEnd(); } return resample; } function resampleLineTo(x0, y0, λ0, a0, b0, c0, x1, y1, λ1, a1, b1, c1, depth, stream) { var dx = x1 - x0, dy = y1 - y0, d2 = dx * dx + dy * dy; if (d2 > 4 * δ2 && depth--) { var a = a0 + a1, b = b0 + b1, c = c0 + c1, m = Math.sqrt(a * a + b * b + c * c), φ2 = Math.asin(c /= m), λ2 = abs(abs(c) - 1) < ε || abs(λ0 - λ1) < ε ? (λ0 + λ1) / 2 : Math.atan2(b, a), p = project(λ2, φ2), x2 = p[0], y2 = p[1], dx2 = x2 - x0, dy2 = y2 - y0, dz = dy * dx2 - dx * dy2; if (dz * dz / d2 > δ2 || abs((dx * dx2 + dy * dy2) / d2 - .5) > .3 || a0 * a1 + b0 * b1 + c0 * c1 < cosMinDistance) { resampleLineTo(x0, y0, λ0, a0, b0, c0, x2, y2, λ2, a /= m, b /= m, c, depth, stream); stream.point(x2, y2); resampleLineTo(x2, y2, λ2, a, b, c, x1, y1, λ1, a1, b1, c1, depth, stream); } } } resample.precision = function(_) { if (!arguments.length) return Math.sqrt(δ2); maxDepth = (δ2 = _ * _) > 0 && 16; return resample; }; return resample; } d3.geo.path = function() { var pointRadius = 4.5, projection, context, projectStream, contextStream, cacheStream; function path(object) { if (object) { if (typeof pointRadius === "function") contextStream.pointRadius(+pointRadius.apply(this, arguments)); if (!cacheStream || !cacheStream.valid) cacheStream = projectStream(contextStream); d3.geo.stream(object, cacheStream); } return contextStream.result(); } path.area = function(object) { d3_geo_pathAreaSum = 0; d3.geo.stream(object, projectStream(d3_geo_pathArea)); return d3_geo_pathAreaSum; }; path.centroid = function(object) { d3_geo_centroidX0 = d3_geo_centroidY0 = d3_geo_centroidZ0 = d3_geo_centroidX1 = d3_geo_centroidY1 = d3_geo_centroidZ1 = d3_geo_centroidX2 = d3_geo_centroidY2 = d3_geo_centroidZ2 = 0; d3.geo.stream(object, projectStream(d3_geo_pathCentroid)); return d3_geo_centroidZ2 ? [ d3_geo_centroidX2 / d3_geo_centroidZ2, d3_geo_centroidY2 / d3_geo_centroidZ2 ] : d3_geo_centroidZ1 ? [ d3_geo_centroidX1 / d3_geo_centroidZ1, d3_geo_centroidY1 / d3_geo_centroidZ1 ] : d3_geo_centroidZ0 ? [ d3_geo_centroidX0 / d3_geo_centroidZ0, d3_geo_centroidY0 / d3_geo_centroidZ0 ] : [ NaN, NaN ]; }; path.bounds = function(object) { d3_geo_pathBoundsX1 = d3_geo_pathBoundsY1 = -(d3_geo_pathBoundsX0 = d3_geo_pathBoundsY0 = Infinity); d3.geo.stream(object, projectStream(d3_geo_pathBounds)); return [ [ d3_geo_pathBoundsX0, d3_geo_pathBoundsY0 ], [ d3_geo_pathBoundsX1, d3_geo_pathBoundsY1 ] ]; }; path.projection = function(_) { if (!arguments.length) return projection; projectStream = (projection = _) ? _.stream || d3_geo_pathProjectStream(_) : d3_identity; return reset(); }; path.context = function(_) { if (!arguments.length) return context; contextStream = (context = _) == null ? new d3_geo_pathBuffer() : new d3_geo_pathContext(_); if (typeof pointRadius !== "function") contextStream.pointRadius(pointRadius); return reset(); }; path.pointRadius = function(_) { if (!arguments.length) return pointRadius; pointRadius = typeof _ === "function" ? _ : (contextStream.pointRadius(+_), +_); return path; }; function reset() { cacheStream = null; return path; } return path.projection(d3.geo.albersUsa()).context(null); }; function d3_geo_pathProjectStream(project) { var resample = d3_geo_resample(function(x, y) { return project([ x * d3_degrees, y * d3_degrees ]); }); return function(stream) { return d3_geo_projectionRadians(resample(stream)); }; } d3.geo.transform = function(methods) { return { stream: function(stream) { var transform = new d3_geo_transform(stream); for (var k in methods) transform[k] = methods[k]; return transform; } }; }; function d3_geo_transform(stream) { this.stream = stream; } d3_geo_transform.prototype = { point: function(x, y) { this.stream.point(x, y); }, sphere: function() { this.stream.sphere(); }, lineStart: function() { this.stream.lineStart(); }, lineEnd: function() { this.stream.lineEnd(); }, polygonStart: function() { this.stream.polygonStart(); }, polygonEnd: function() { this.stream.polygonEnd(); } }; function d3_geo_transformPoint(stream, point) { return { point: point, sphere: function() { stream.sphere(); }, lineStart: function() { stream.lineStart(); }, lineEnd: function() { stream.lineEnd(); }, polygonStart: function() { stream.polygonStart(); }, polygonEnd: function() { stream.polygonEnd(); } }; } d3.geo.projection = d3_geo_projection; d3.geo.projectionMutator = d3_geo_projectionMutator; function d3_geo_projection(project) { return d3_geo_projectionMutator(function() { return project; })(); } function d3_geo_projectionMutator(projectAt) { var project, rotate, projectRotate, projectResample = d3_geo_resample(function(x, y) { x = project(x, y); return [ x[0] * k + δx, δy - x[1] * k ]; }), k = 150, x = 480, y = 250, λ = 0, φ = 0, δλ = 0, δφ = 0, δγ = 0, δx, δy, preclip = d3_geo_clipAntimeridian, postclip = d3_identity, clipAngle = null, clipExtent = null, stream; function projection(point) { point = projectRotate(point[0] * d3_radians, point[1] * d3_radians); return [ point[0] * k + δx, δy - point[1] * k ]; } function invert(point) { point = projectRotate.invert((point[0] - δx) / k, (δy - point[1]) / k); return point && [ point[0] * d3_degrees, point[1] * d3_degrees ]; } projection.stream = function(output) { if (stream) stream.valid = false; stream = d3_geo_projectionRadians(preclip(rotate, projectResample(postclip(output)))); stream.valid = true; return stream; }; projection.clipAngle = function(_) { if (!arguments.length) return clipAngle; preclip = _ == null ? (clipAngle = _, d3_geo_clipAntimeridian) : d3_geo_clipCircle((clipAngle = +_) * d3_radians); return invalidate(); }; projection.clipExtent = function(_) { if (!arguments.length) return clipExtent; clipExtent = _; postclip = _ ? d3_geo_clipExtent(_[0][0], _[0][1], _[1][0], _[1][1]) : d3_identity; return invalidate(); }; projection.scale = function(_) { if (!arguments.length) return k; k = +_; return reset(); }; projection.translate = function(_) { if (!arguments.length) return [ x, y ]; x = +_[0]; y = +_[1]; return reset(); }; projection.center = function(_) { if (!arguments.length) return [ λ * d3_degrees, φ * d3_degrees ]; λ = _[0] % 360 * d3_radians; φ = _[1] % 360 * d3_radians; return reset(); }; projection.rotate = function(_) { if (!arguments.length) return [ δλ * d3_degrees, δφ * d3_degrees, δγ * d3_degrees ]; δλ = _[0] % 360 * d3_radians; δφ = _[1] % 360 * d3_radians; δγ = _.length > 2 ? _[2] % 360 * d3_radians : 0; return reset(); }; d3.rebind(projection, projectResample, "precision"); function reset() { projectRotate = d3_geo_compose(rotate = d3_geo_rotation(δλ, δφ, δγ), project); var center = project(λ, φ); δx = x - center[0] * k; δy = y + center[1] * k; return invalidate(); } function invalidate() { if (stream) stream.valid = false, stream = null; return projection; } return function() { project = projectAt.apply(this, arguments); projection.invert = project.invert && invert; return reset(); }; } function d3_geo_projectionRadians(stream) { return d3_geo_transformPoint(stream, function(x, y) { stream.point(x * d3_radians, y * d3_radians); }); } function d3_geo_equirectangular(λ, φ) { return [ λ, φ ]; } (d3.geo.equirectangular = function() { return d3_geo_projection(d3_geo_equirectangular); }).raw = d3_geo_equirectangular.invert = d3_geo_equirectangular; d3.geo.rotation = function(rotate) { rotate = d3_geo_rotation(rotate[0] % 360 * d3_radians, rotate[1] * d3_radians, rotate.length > 2 ? rotate[2] * d3_radians : 0); function forward(coordinates) { coordinates = rotate(coordinates[0] * d3_radians, coordinates[1] * d3_radians); return coordinates[0] *= d3_degrees, coordinates[1] *= d3_degrees, coordinates; } forward.invert = function(coordinates) { coordinates = rotate.invert(coordinates[0] * d3_radians, coordinates[1] * d3_radians); return coordinates[0] *= d3_degrees, coordinates[1] *= d3_degrees, coordinates; }; return forward; }; function d3_geo_identityRotation(λ, φ) { return [ λ > π ? λ - τ : λ < -π ? λ + τ : λ, φ ]; } d3_geo_identityRotation.invert = d3_geo_equirectangular; function d3_geo_rotation(δλ, δφ, δγ) { return δλ ? δφ || δγ ? d3_geo_compose(d3_geo_rotationλ(δλ), d3_geo_rotationφγ(δφ, δγ)) : d3_geo_rotationλ(δλ) : δφ || δγ ? d3_geo_rotationφγ(δφ, δγ) : d3_geo_identityRotation; } function d3_geo_forwardRotationλ(δλ) { return function(λ, φ) { return λ += δλ, [ λ > π ? λ - τ : λ < -π ? λ + τ : λ, φ ]; }; } function d3_geo_rotationλ(δλ) { var rotation = d3_geo_forwardRotationλ(δλ); rotation.invert = d3_geo_forwardRotationλ(-δλ); return rotation; } function d3_geo_rotationφγ(δφ, δγ) { var cosδφ = Math.cos(δφ), sinδφ = Math.sin(δφ), cosδγ = Math.cos(δγ), sinδγ = Math.sin(δγ); function rotation(λ, φ) { var cosφ = Math.cos(φ), x = Math.cos(λ) * cosφ, y = Math.sin(λ) * cosφ, z = Math.sin(φ), k = z * cosδφ + x * sinδφ; return [ Math.atan2(y * cosδγ - k * sinδγ, x * cosδφ - z * sinδφ), d3_asin(k * cosδγ + y * sinδγ) ]; } rotation.invert = function(λ, φ) { var cosφ = Math.cos(φ), x = Math.cos(λ) * cosφ, y = Math.sin(λ) * cosφ, z = Math.sin(φ), k = z * cosδγ - y * sinδγ; return [ Math.atan2(y * cosδγ + z * sinδγ, x * cosδφ + k * sinδφ), d3_asin(k * cosδφ - x * sinδφ) ]; }; return rotation; } d3.geo.circle = function() { var origin = [ 0, 0 ], angle, precision = 6, interpolate; function circle() { var center = typeof origin === "function" ? origin.apply(this, arguments) : origin, rotate = d3_geo_rotation(-center[0] * d3_radians, -center[1] * d3_radians, 0).invert, ring = []; interpolate(null, null, 1, { point: function(x, y) { ring.push(x = rotate(x, y)); x[0] *= d3_degrees, x[1] *= d3_degrees; } }); return { type: "Polygon", coordinates: [ ring ] }; } circle.origin = function(x) { if (!arguments.length) return origin; origin = x; return circle; }; circle.angle = function(x) { if (!arguments.length) return angle; interpolate = d3_geo_circleInterpolate((angle = +x) * d3_radians, precision * d3_radians); return circle; }; circle.precision = function(_) { if (!arguments.length) return precision; interpolate = d3_geo_circleInterpolate(angle * d3_radians, (precision = +_) * d3_radians); return circle; }; return circle.angle(90); }; function d3_geo_circleInterpolate(radius, precision) { var cr = Math.cos(radius), sr = Math.sin(radius); return function(from, to, direction, listener) { var step = direction * precision; if (from != null) { from = d3_geo_circleAngle(cr, from); to = d3_geo_circleAngle(cr, to); if (direction > 0 ? from < to : from > to) from += direction * τ; } else { from = radius + direction * τ; to = radius - .5 * step; } for (var point, t = from; direction > 0 ? t > to : t < to; t -= step) { listener.point((point = d3_geo_spherical([ cr, -sr * Math.cos(t), -sr * Math.sin(t) ]))[0], point[1]); } }; } function d3_geo_circleAngle(cr, point) { var a = d3_geo_cartesian(point); a[0] -= cr; d3_geo_cartesianNormalize(a); var angle = d3_acos(-a[1]); return ((-a[2] < 0 ? -angle : angle) + 2 * Math.PI - ε) % (2 * Math.PI); } d3.geo.distance = function(a, b) { var Δλ = (b[0] - a[0]) * d3_radians, φ0 = a[1] * d3_radians, φ1 = b[1] * d3_radians, sinΔλ = Math.sin(Δλ), cosΔλ = Math.cos(Δλ), sinφ0 = Math.sin(φ0), cosφ0 = Math.cos(φ0), sinφ1 = Math.sin(φ1), cosφ1 = Math.cos(φ1), t; return Math.atan2(Math.sqrt((t = cosφ1 * sinΔλ) * t + (t = cosφ0 * sinφ1 - sinφ0 * cosφ1 * cosΔλ) * t), sinφ0 * sinφ1 + cosφ0 * cosφ1 * cosΔλ); }; d3.geo.graticule = function() { var x1, x0, X1, X0, y1, y0, Y1, Y0, dx = 10, dy = dx, DX = 90, DY = 360, x, y, X, Y, precision = 2.5; function graticule() { return { type: "MultiLineString", coordinates: lines() }; } function lines() { return d3.range(Math.ceil(X0 / DX) * DX, X1, DX).map(X).concat(d3.range(Math.ceil(Y0 / DY) * DY, Y1, DY).map(Y)).concat(d3.range(Math.ceil(x0 / dx) * dx, x1, dx).filter(function(x) { return abs(x % DX) > ε; }).map(x)).concat(d3.range(Math.ceil(y0 / dy) * dy, y1, dy).filter(function(y) { return abs(y % DY) > ε; }).map(y)); } graticule.lines = function() { return lines().map(function(coordinates) { return { type: "LineString", coordinates: coordinates }; }); }; graticule.outline = function() { return { type: "Polygon", coordinates: [ X(X0).concat(Y(Y1).slice(1), X(X1).reverse().slice(1), Y(Y0).reverse().slice(1)) ] }; }; graticule.extent = function(_) { if (!arguments.length) return graticule.minorExtent(); return graticule.majorExtent(_).minorExtent(_); }; graticule.majorExtent = function(_) { if (!arguments.length) return [ [ X0, Y0 ], [ X1, Y1 ] ]; X0 = +_[0][0], X1 = +_[1][0]; Y0 = +_[0][1], Y1 = +_[1][1]; if (X0 > X1) _ = X0, X0 = X1, X1 = _; if (Y0 > Y1) _ = Y0, Y0 = Y1, Y1 = _; return graticule.precision(precision); }; graticule.minorExtent = function(_) { if (!arguments.length) return [ [ x0, y0 ], [ x1, y1 ] ]; x0 = +_[0][0], x1 = +_[1][0]; y0 = +_[0][1], y1 = +_[1][1]; if (x0 > x1) _ = x0, x0 = x1, x1 = _; if (y0 > y1) _ = y0, y0 = y1, y1 = _; return graticule.precision(precision); }; graticule.step = function(_) { if (!arguments.length) return graticule.minorStep(); return graticule.majorStep(_).minorStep(_); }; graticule.majorStep = function(_) { if (!arguments.length) return [ DX, DY ]; DX = +_[0], DY = +_[1]; return graticule; }; graticule.minorStep = function(_) { if (!arguments.length) return [ dx, dy ]; dx = +_[0], dy = +_[1]; return graticule; }; graticule.precision = function(_) { if (!arguments.length) return precision; precision = +_; x = d3_geo_graticuleX(y0, y1, 90); y = d3_geo_graticuleY(x0, x1, precision); X = d3_geo_graticuleX(Y0, Y1, 90); Y = d3_geo_graticuleY(X0, X1, precision); return graticule; }; return graticule.majorExtent([ [ -180, -90 + ε ], [ 180, 90 - ε ] ]).minorExtent([ [ -180, -80 - ε ], [ 180, 80 + ε ] ]); }; function d3_geo_graticuleX(y0, y1, dy) { var y = d3.range(y0, y1 - ε, dy).concat(y1); return function(x) { return y.map(function(y) { return [ x, y ]; }); }; } function d3_geo_graticuleY(x0, x1, dx) { var x = d3.range(x0, x1 - ε, dx).concat(x1); return function(y) { return x.map(function(x) { return [ x, y ]; }); }; } function d3_source(d) { return d.source; } function d3_target(d) { return d.target; } d3.geo.greatArc = function() { var source = d3_source, source_, target = d3_target, target_; function greatArc() { return { type: "LineString", coordinates: [ source_ || source.apply(this, arguments), target_ || target.apply(this, arguments) ] }; } greatArc.distance = function() { return d3.geo.distance(source_ || source.apply(this, arguments), target_ || target.apply(this, arguments)); }; greatArc.source = function(_) { if (!arguments.length) return source; source = _, source_ = typeof _ === "function" ? null : _; return greatArc; }; greatArc.target = function(_) { if (!arguments.length) return target; target = _, target_ = typeof _ === "function" ? null : _; return greatArc; }; greatArc.precision = function() { return arguments.length ? greatArc : 0; }; return greatArc; }; d3.geo.interpolate = function(source, target) { return d3_geo_interpolate(source[0] * d3_radians, source[1] * d3_radians, target[0] * d3_radians, target[1] * d3_radians); }; function d3_geo_interpolate(x0, y0, x1, y1) { var cy0 = Math.cos(y0), sy0 = Math.sin(y0), cy1 = Math.cos(y1), sy1 = Math.sin(y1), kx0 = cy0 * Math.cos(x0), ky0 = cy0 * Math.sin(x0), kx1 = cy1 * Math.cos(x1), ky1 = cy1 * Math.sin(x1), d = 2 * Math.asin(Math.sqrt(d3_haversin(y1 - y0) + cy0 * cy1 * d3_haversin(x1 - x0))), k = 1 / Math.sin(d); var interpolate = d ? function(t) { var B = Math.sin(t *= d) * k, A = Math.sin(d - t) * k, x = A * kx0 + B * kx1, y = A * ky0 + B * ky1, z = A * sy0 + B * sy1; return [ Math.atan2(y, x) * d3_degrees, Math.atan2(z, Math.sqrt(x * x + y * y)) * d3_degrees ]; } : function() { return [ x0 * d3_degrees, y0 * d3_degrees ]; }; interpolate.distance = d; return interpolate; } d3.geo.length = function(object) { d3_geo_lengthSum = 0; d3.geo.stream(object, d3_geo_length); return d3_geo_lengthSum; }; var d3_geo_lengthSum; var d3_geo_length = { sphere: d3_noop, point: d3_noop, lineStart: d3_geo_lengthLineStart, lineEnd: d3_noop, polygonStart: d3_noop, polygonEnd: d3_noop }; function d3_geo_lengthLineStart() { var λ0, sinφ0, cosφ0; d3_geo_length.point = function(λ, φ) { λ0 = λ * d3_radians, sinφ0 = Math.sin(φ *= d3_radians), cosφ0 = Math.cos(φ); d3_geo_length.point = nextPoint; }; d3_geo_length.lineEnd = function() { d3_geo_length.point = d3_geo_length.lineEnd = d3_noop; }; function nextPoint(λ, φ) { var sinφ = Math.sin(φ *= d3_radians), cosφ = Math.cos(φ), t = abs((λ *= d3_radians) - λ0), cosΔλ = Math.cos(t); d3_geo_lengthSum += Math.atan2(Math.sqrt((t = cosφ * Math.sin(t)) * t + (t = cosφ0 * sinφ - sinφ0 * cosφ * cosΔλ) * t), sinφ0 * sinφ + cosφ0 * cosφ * cosΔλ); λ0 = λ, sinφ0 = sinφ, cosφ0 = cosφ; } } function d3_geo_azimuthal(scale, angle) { function azimuthal(λ, φ) { var cosλ = Math.cos(λ), cosφ = Math.cos(φ), k = scale(cosλ * cosφ); return [ k * cosφ * Math.sin(λ), k * Math.sin(φ) ]; } azimuthal.invert = function(x, y) { var ρ = Math.sqrt(x * x + y * y), c = angle(ρ), sinc = Math.sin(c), cosc = Math.cos(c); return [ Math.atan2(x * sinc, ρ * cosc), Math.asin(ρ && y * sinc / ρ) ]; }; return azimuthal; } var d3_geo_azimuthalEqualArea = d3_geo_azimuthal(function(cosλcosφ) { return Math.sqrt(2 / (1 + cosλcosφ)); }, function(ρ) { return 2 * Math.asin(ρ / 2); }); (d3.geo.azimuthalEqualArea = function() { return d3_geo_projection(d3_geo_azimuthalEqualArea); }).raw = d3_geo_azimuthalEqualArea; var d3_geo_azimuthalEquidistant = d3_geo_azimuthal(function(cosλcosφ) { var c = Math.acos(cosλcosφ); return c && c / Math.sin(c); }, d3_identity); (d3.geo.azimuthalEquidistant = function() { return d3_geo_projection(d3_geo_azimuthalEquidistant); }).raw = d3_geo_azimuthalEquidistant; function d3_geo_conicConformal(φ0, φ1) { var cosφ0 = Math.cos(φ0), t = function(φ) { return Math.tan(π / 4 + φ / 2); }, n = φ0 === φ1 ? Math.sin(φ0) : Math.log(cosφ0 / Math.cos(φ1)) / Math.log(t(φ1) / t(φ0)), F = cosφ0 * Math.pow(t(φ0), n) / n; if (!n) return d3_geo_mercator; function forward(λ, φ) { if (F > 0) { if (φ < -halfπ + ε) φ = -halfπ + ε; } else { if (φ > halfπ - ε) φ = halfπ - ε; } var ρ = F / Math.pow(t(φ), n); return [ ρ * Math.sin(n * λ), F - ρ * Math.cos(n * λ) ]; } forward.invert = function(x, y) { var ρ0_y = F - y, ρ = d3_sgn(n) * Math.sqrt(x * x + ρ0_y * ρ0_y); return [ Math.atan2(x, ρ0_y) / n, 2 * Math.atan(Math.pow(F / ρ, 1 / n)) - halfπ ]; }; return forward; } (d3.geo.conicConformal = function() { return d3_geo_conic(d3_geo_conicConformal); }).raw = d3_geo_conicConformal; function d3_geo_conicEquidistant(φ0, φ1) { var cosφ0 = Math.cos(φ0), n = φ0 === φ1 ? Math.sin(φ0) : (cosφ0 - Math.cos(φ1)) / (φ1 - φ0), G = cosφ0 / n + φ0; if (abs(n) < ε) return d3_geo_equirectangular; function forward(λ, φ) { var ρ = G - φ; return [ ρ * Math.sin(n * λ), G - ρ * Math.cos(n * λ) ]; } forward.invert = function(x, y) { var ρ0_y = G - y; return [ Math.atan2(x, ρ0_y) / n, G - d3_sgn(n) * Math.sqrt(x * x + ρ0_y * ρ0_y) ]; }; return forward; } (d3.geo.conicEquidistant = function() { return d3_geo_conic(d3_geo_conicEquidistant); }).raw = d3_geo_conicEquidistant; var d3_geo_gnomonic = d3_geo_azimuthal(function(cosλcosφ) { return 1 / cosλcosφ; }, Math.atan); (d3.geo.gnomonic = function() { return d3_geo_projection(d3_geo_gnomonic); }).raw = d3_geo_gnomonic; function d3_geo_mercator(λ, φ) { return [ λ, Math.log(Math.tan(π / 4 + φ / 2)) ]; } d3_geo_mercator.invert = function(x, y) { return [ x, 2 * Math.atan(Math.exp(y)) - halfπ ]; }; function d3_geo_mercatorProjection(project) { var m = d3_geo_projection(project), scale = m.scale, translate = m.translate, clipExtent = m.clipExtent, clipAuto; m.scale = function() { var v = scale.apply(m, arguments); return v === m ? clipAuto ? m.clipExtent(null) : m : v; }; m.translate = function() { var v = translate.apply(m, arguments); return v === m ? clipAuto ? m.clipExtent(null) : m : v; }; m.clipExtent = function(_) { var v = clipExtent.apply(m, arguments); if (v === m) { if (clipAuto = _ == null) { var k = π * scale(), t = translate(); clipExtent([ [ t[0] - k, t[1] - k ], [ t[0] + k, t[1] + k ] ]); } } else if (clipAuto) { v = null; } return v; }; return m.clipExtent(null); } (d3.geo.mercator = function() { return d3_geo_mercatorProjection(d3_geo_mercator); }).raw = d3_geo_mercator; var d3_geo_orthographic = d3_geo_azimuthal(function() { return 1; }, Math.asin); (d3.geo.orthographic = function() { return d3_geo_projection(d3_geo_orthographic); }).raw = d3_geo_orthographic; var d3_geo_stereographic = d3_geo_azimuthal(function(cosλcosφ) { return 1 / (1 + cosλcosφ); }, function(ρ) { return 2 * Math.atan(ρ); }); (d3.geo.stereographic = function() { return d3_geo_projection(d3_geo_stereographic); }).raw = d3_geo_stereographic; function d3_geo_transverseMercator(λ, φ) { return [ Math.log(Math.tan(π / 4 + φ / 2)), -λ ]; } d3_geo_transverseMercator.invert = function(x, y) { return [ -y, 2 * Math.atan(Math.exp(x)) - halfπ ]; }; (d3.geo.transverseMercator = function() { var projection = d3_geo_mercatorProjection(d3_geo_transverseMercator), center = projection.center, rotate = projection.rotate; projection.center = function(_) { return _ ? center([ -_[1], _[0] ]) : (_ = center(), [ -_[1], _[0] ]); }; projection.rotate = function(_) { return _ ? rotate([ _[0], _[1], _.length > 2 ? _[2] + 90 : 90 ]) : (_ = rotate(), [ _[0], _[1], _[2] - 90 ]); }; return projection.rotate([ 0, 0 ]); }).raw = d3_geo_transverseMercator; d3.geom = {}; function d3_geom_pointX(d) { return d[0]; } function d3_geom_pointY(d) { return d[1]; } d3.geom.hull = function(vertices) { var x = d3_geom_pointX, y = d3_geom_pointY; if (arguments.length) return hull(vertices); function hull(data) { if (data.length < 3) return []; var fx = d3_functor(x), fy = d3_functor(y), i, n = data.length, points = [], flippedPoints = []; for (i = 0; i < n; i++) { points.push([ +fx.call(this, data[i], i), +fy.call(this, data[i], i), i ]); } points.sort(d3_geom_hullOrder); for (i = 0; i < n; i++) flippedPoints.push([ points[i][0], -points[i][1] ]); var upper = d3_geom_hullUpper(points), lower = d3_geom_hullUpper(flippedPoints); var skipLeft = lower[0] === upper[0], skipRight = lower[lower.length - 1] === upper[upper.length - 1], polygon = []; for (i = upper.length - 1; i >= 0; --i) polygon.push(data[points[upper[i]][2]]); for (i = +skipLeft; i < lower.length - skipRight; ++i) polygon.push(data[points[lower[i]][2]]); return polygon; } hull.x = function(_) { return arguments.length ? (x = _, hull) : x; }; hull.y = function(_) { return arguments.length ? (y = _, hull) : y; }; return hull; }; function d3_geom_hullUpper(points) { var n = points.length, hull = [ 0, 1 ], hs = 2; for (var i = 2; i < n; i++) { while (hs > 1 && d3_cross2d(points[hull[hs - 2]], points[hull[hs - 1]], points[i]) <= 0) --hs; hull[hs++] = i; } return hull.slice(0, hs); } function d3_geom_hullOrder(a, b) { return a[0] - b[0] || a[1] - b[1]; } d3.geom.polygon = function(coordinates) { d3_subclass(coordinates, d3_geom_polygonPrototype); return coordinates; }; var d3_geom_polygonPrototype = d3.geom.polygon.prototype = []; d3_geom_polygonPrototype.area = function() { var i = -1, n = this.length, a, b = this[n - 1], area = 0; while (++i < n) { a = b; b = this[i]; area += a[1] * b[0] - a[0] * b[1]; } return area * .5; }; d3_geom_polygonPrototype.centroid = function(k) { var i = -1, n = this.length, x = 0, y = 0, a, b = this[n - 1], c; if (!arguments.length) k = -1 / (6 * this.area()); while (++i < n) { a = b; b = this[i]; c = a[0] * b[1] - b[0] * a[1]; x += (a[0] + b[0]) * c; y += (a[1] + b[1]) * c; } return [ x * k, y * k ]; }; d3_geom_polygonPrototype.clip = function(subject) { var input, closed = d3_geom_polygonClosed(subject), i = -1, n = this.length - d3_geom_polygonClosed(this), j, m, a = this[n - 1], b, c, d; while (++i < n) { input = subject.slice(); subject.length = 0; b = this[i]; c = input[(m = input.length - closed) - 1]; j = -1; while (++j < m) { d = input[j]; if (d3_geom_polygonInside(d, a, b)) { if (!d3_geom_polygonInside(c, a, b)) { subject.push(d3_geom_polygonIntersect(c, d, a, b)); } subject.push(d); } else if (d3_geom_polygonInside(c, a, b)) { subject.push(d3_geom_polygonIntersect(c, d, a, b)); } c = d; } if (closed) subject.push(subject[0]); a = b; } return subject; }; function d3_geom_polygonInside(p, a, b) { return (b[0] - a[0]) * (p[1] - a[1]) < (b[1] - a[1]) * (p[0] - a[0]); } function d3_geom_polygonIntersect(c, d, a, b) { var x1 = c[0], x3 = a[0], x21 = d[0] - x1, x43 = b[0] - x3, y1 = c[1], y3 = a[1], y21 = d[1] - y1, y43 = b[1] - y3, ua = (x43 * (y1 - y3) - y43 * (x1 - x3)) / (y43 * x21 - x43 * y21); return [ x1 + ua * x21, y1 + ua * y21 ]; } function d3_geom_polygonClosed(coordinates) { var a = coordinates[0], b = coordinates[coordinates.length - 1]; return !(a[0] - b[0] || a[1] - b[1]); } var d3_geom_voronoiEdges, d3_geom_voronoiCells, d3_geom_voronoiBeaches, d3_geom_voronoiBeachPool = [], d3_geom_voronoiFirstCircle, d3_geom_voronoiCircles, d3_geom_voronoiCirclePool = []; function d3_geom_voronoiBeach() { d3_geom_voronoiRedBlackNode(this); this.edge = this.site = this.circle = null; } function d3_geom_voronoiCreateBeach(site) { var beach = d3_geom_voronoiBeachPool.pop() || new d3_geom_voronoiBeach(); beach.site = site; return beach; } function d3_geom_voronoiDetachBeach(beach) { d3_geom_voronoiDetachCircle(beach); d3_geom_voronoiBeaches.remove(beach); d3_geom_voronoiBeachPool.push(beach); d3_geom_voronoiRedBlackNode(beach); } function d3_geom_voronoiRemoveBeach(beach) { var circle = beach.circle, x = circle.x, y = circle.cy, vertex = { x: x, y: y }, previous = beach.P, next = beach.N, disappearing = [ beach ]; d3_geom_voronoiDetachBeach(beach); var lArc = previous; while (lArc.circle && abs(x - lArc.circle.x) < ε && abs(y - lArc.circle.cy) < ε) { previous = lArc.P; disappearing.unshift(lArc); d3_geom_voronoiDetachBeach(lArc); lArc = previous; } disappearing.unshift(lArc); d3_geom_voronoiDetachCircle(lArc); var rArc = next; while (rArc.circle && abs(x - rArc.circle.x) < ε && abs(y - rArc.circle.cy) < ε) { next = rArc.N; disappearing.push(rArc); d3_geom_voronoiDetachBeach(rArc); rArc = next; } disappearing.push(rArc); d3_geom_voronoiDetachCircle(rArc); var nArcs = disappearing.length, iArc; for (iArc = 1; iArc < nArcs; ++iArc) { rArc = disappearing[iArc]; lArc = disappearing[iArc - 1]; d3_geom_voronoiSetEdgeEnd(rArc.edge, lArc.site, rArc.site, vertex); } lArc = disappearing[0]; rArc = disappearing[nArcs - 1]; rArc.edge = d3_geom_voronoiCreateEdge(lArc.site, rArc.site, null, vertex); d3_geom_voronoiAttachCircle(lArc); d3_geom_voronoiAttachCircle(rArc); } function d3_geom_voronoiAddBeach(site) { var x = site.x, directrix = site.y, lArc, rArc, dxl, dxr, node = d3_geom_voronoiBeaches._; while (node) { dxl = d3_geom_voronoiLeftBreakPoint(node, directrix) - x; if (dxl > ε) node = node.L; else { dxr = x - d3_geom_voronoiRightBreakPoint(node, directrix); if (dxr > ε) { if (!node.R) { lArc = node; break; } node = node.R; } else { if (dxl > -ε) { lArc = node.P; rArc = node; } else if (dxr > -ε) { lArc = node; rArc = node.N; } else { lArc = rArc = node; } break; } } } var newArc = d3_geom_voronoiCreateBeach(site); d3_geom_voronoiBeaches.insert(lArc, newArc); if (!lArc && !rArc) return; if (lArc === rArc) { d3_geom_voronoiDetachCircle(lArc); rArc = d3_geom_voronoiCreateBeach(lArc.site); d3_geom_voronoiBeaches.insert(newArc, rArc); newArc.edge = rArc.edge = d3_geom_voronoiCreateEdge(lArc.site, newArc.site); d3_geom_voronoiAttachCircle(lArc); d3_geom_voronoiAttachCircle(rArc); return; } if (!rArc) { newArc.edge = d3_geom_voronoiCreateEdge(lArc.site, newArc.site); return; } d3_geom_voronoiDetachCircle(lArc); d3_geom_voronoiDetachCircle(rArc); var lSite = lArc.site, ax = lSite.x, ay = lSite.y, bx = site.x - ax, by = site.y - ay, rSite = rArc.site, cx = rSite.x - ax, cy = rSite.y - ay, d = 2 * (bx * cy - by * cx), hb = bx * bx + by * by, hc = cx * cx + cy * cy, vertex = { x: (cy * hb - by * hc) / d + ax, y: (bx * hc - cx * hb) / d + ay }; d3_geom_voronoiSetEdgeEnd(rArc.edge, lSite, rSite, vertex); newArc.edge = d3_geom_voronoiCreateEdge(lSite, site, null, vertex); rArc.edge = d3_geom_voronoiCreateEdge(site, rSite, null, vertex); d3_geom_voronoiAttachCircle(lArc); d3_geom_voronoiAttachCircle(rArc); } function d3_geom_voronoiLeftBreakPoint(arc, directrix) { var site = arc.site, rfocx = site.x, rfocy = site.y, pby2 = rfocy - directrix; if (!pby2) return rfocx; var lArc = arc.P; if (!lArc) return -Infinity; site = lArc.site; var lfocx = site.x, lfocy = site.y, plby2 = lfocy - directrix; if (!plby2) return lfocx; var hl = lfocx - rfocx, aby2 = 1 / pby2 - 1 / plby2, b = hl / plby2; if (aby2) return (-b + Math.sqrt(b * b - 2 * aby2 * (hl * hl / (-2 * plby2) - lfocy + plby2 / 2 + rfocy - pby2 / 2))) / aby2 + rfocx; return (rfocx + lfocx) / 2; } function d3_geom_voronoiRightBreakPoint(arc, directrix) { var rArc = arc.N; if (rArc) return d3_geom_voronoiLeftBreakPoint(rArc, directrix); var site = arc.site; return site.y === directrix ? site.x : Infinity; } function d3_geom_voronoiCell(site) { this.site = site; this.edges = []; } d3_geom_voronoiCell.prototype.prepare = function() { var halfEdges = this.edges, iHalfEdge = halfEdges.length, edge; while (iHalfEdge--) { edge = halfEdges[iHalfEdge].edge; if (!edge.b || !edge.a) halfEdges.splice(iHalfEdge, 1); } halfEdges.sort(d3_geom_voronoiHalfEdgeOrder); return halfEdges.length; }; function d3_geom_voronoiCloseCells(extent) { var x0 = extent[0][0], x1 = extent[1][0], y0 = extent[0][1], y1 = extent[1][1], x2, y2, x3, y3, cells = d3_geom_voronoiCells, iCell = cells.length, cell, iHalfEdge, halfEdges, nHalfEdges, start, end; while (iCell--) { cell = cells[iCell]; if (!cell || !cell.prepare()) continue; halfEdges = cell.edges; nHalfEdges = halfEdges.length; iHalfEdge = 0; while (iHalfEdge < nHalfEdges) { end = halfEdges[iHalfEdge].end(), x3 = end.x, y3 = end.y; start = halfEdges[++iHalfEdge % nHalfEdges].start(), x2 = start.x, y2 = start.y; if (abs(x3 - x2) > ε || abs(y3 - y2) > ε) { halfEdges.splice(iHalfEdge, 0, new d3_geom_voronoiHalfEdge(d3_geom_voronoiCreateBorderEdge(cell.site, end, abs(x3 - x0) < ε && y1 - y3 > ε ? { x: x0, y: abs(x2 - x0) < ε ? y2 : y1 } : abs(y3 - y1) < ε && x1 - x3 > ε ? { x: abs(y2 - y1) < ε ? x2 : x1, y: y1 } : abs(x3 - x1) < ε && y3 - y0 > ε ? { x: x1, y: abs(x2 - x1) < ε ? y2 : y0 } : abs(y3 - y0) < ε && x3 - x0 > ε ? { x: abs(y2 - y0) < ε ? x2 : x0, y: y0 } : null), cell.site, null)); ++nHalfEdges; } } } } function d3_geom_voronoiHalfEdgeOrder(a, b) { return b.angle - a.angle; } function d3_geom_voronoiCircle() { d3_geom_voronoiRedBlackNode(this); this.x = this.y = this.arc = this.site = this.cy = null; } function d3_geom_voronoiAttachCircle(arc) { var lArc = arc.P, rArc = arc.N; if (!lArc || !rArc) return; var lSite = lArc.site, cSite = arc.site, rSite = rArc.site; if (lSite === rSite) return; var bx = cSite.x, by = cSite.y, ax = lSite.x - bx, ay = lSite.y - by, cx = rSite.x - bx, cy = rSite.y - by; var d = 2 * (ax * cy - ay * cx); if (d >= -ε2) return; var ha = ax * ax + ay * ay, hc = cx * cx + cy * cy, x = (cy * ha - ay * hc) / d, y = (ax * hc - cx * ha) / d, cy = y + by; var circle = d3_geom_voronoiCirclePool.pop() || new d3_geom_voronoiCircle(); circle.arc = arc; circle.site = cSite; circle.x = x + bx; circle.y = cy + Math.sqrt(x * x + y * y); circle.cy = cy; arc.circle = circle; var before = null, node = d3_geom_voronoiCircles._; while (node) { if (circle.y < node.y || circle.y === node.y && circle.x <= node.x) { if (node.L) node = node.L; else { before = node.P; break; } } else { if (node.R) node = node.R; else { before = node; break; } } } d3_geom_voronoiCircles.insert(before, circle); if (!before) d3_geom_voronoiFirstCircle = circle; } function d3_geom_voronoiDetachCircle(arc) { var circle = arc.circle; if (circle) { if (!circle.P) d3_geom_voronoiFirstCircle = circle.N; d3_geom_voronoiCircles.remove(circle); d3_geom_voronoiCirclePool.push(circle); d3_geom_voronoiRedBlackNode(circle); arc.circle = null; } } function d3_geom_voronoiClipEdges(extent) { var edges = d3_geom_voronoiEdges, clip = d3_geom_clipLine(extent[0][0], extent[0][1], extent[1][0], extent[1][1]), i = edges.length, e; while (i--) { e = edges[i]; if (!d3_geom_voronoiConnectEdge(e, extent) || !clip(e) || abs(e.a.x - e.b.x) < ε && abs(e.a.y - e.b.y) < ε) { e.a = e.b = null; edges.splice(i, 1); } } } function d3_geom_voronoiConnectEdge(edge, extent) { var vb = edge.b; if (vb) return true; var va = edge.a, x0 = extent[0][0], x1 = extent[1][0], y0 = extent[0][1], y1 = extent[1][1], lSite = edge.l, rSite = edge.r, lx = lSite.x, ly = lSite.y, rx = rSite.x, ry = rSite.y, fx = (lx + rx) / 2, fy = (ly + ry) / 2, fm, fb; if (ry === ly) { if (fx < x0 || fx >= x1) return; if (lx > rx) { if (!va) va = { x: fx, y: y0 }; else if (va.y >= y1) return; vb = { x: fx, y: y1 }; } else { if (!va) va = { x: fx, y: y1 }; else if (va.y < y0) return; vb = { x: fx, y: y0 }; } } else { fm = (lx - rx) / (ry - ly); fb = fy - fm * fx; if (fm < -1 || fm > 1) { if (lx > rx) { if (!va) va = { x: (y0 - fb) / fm, y: y0 }; else if (va.y >= y1) return; vb = { x: (y1 - fb) / fm, y: y1 }; } else { if (!va) va = { x: (y1 - fb) / fm, y: y1 }; else if (va.y < y0) return; vb = { x: (y0 - fb) / fm, y: y0 }; } } else { if (ly < ry) { if (!va) va = { x: x0, y: fm * x0 + fb }; else if (va.x >= x1) return; vb = { x: x1, y: fm * x1 + fb }; } else { if (!va) va = { x: x1, y: fm * x1 + fb }; else if (va.x < x0) return; vb = { x: x0, y: fm * x0 + fb }; } } } edge.a = va; edge.b = vb; return true; } function d3_geom_voronoiEdge(lSite, rSite) { this.l = lSite; this.r = rSite; this.a = this.b = null; } function d3_geom_voronoiCreateEdge(lSite, rSite, va, vb) { var edge = new d3_geom_voronoiEdge(lSite, rSite); d3_geom_voronoiEdges.push(edge); if (va) d3_geom_voronoiSetEdgeEnd(edge, lSite, rSite, va); if (vb) d3_geom_voronoiSetEdgeEnd(edge, rSite, lSite, vb); d3_geom_voronoiCells[lSite.i].edges.push(new d3_geom_voronoiHalfEdge(edge, lSite, rSite)); d3_geom_voronoiCells[rSite.i].edges.push(new d3_geom_voronoiHalfEdge(edge, rSite, lSite)); return edge; } function d3_geom_voronoiCreateBorderEdge(lSite, va, vb) { var edge = new d3_geom_voronoiEdge(lSite, null); edge.a = va; edge.b = vb; d3_geom_voronoiEdges.push(edge); return edge; } function d3_geom_voronoiSetEdgeEnd(edge, lSite, rSite, vertex) { if (!edge.a && !edge.b) { edge.a = vertex; edge.l = lSite; edge.r = rSite; } else if (edge.l === rSite) { edge.b = vertex; } else { edge.a = vertex; } } function d3_geom_voronoiHalfEdge(edge, lSite, rSite) { var va = edge.a, vb = edge.b; this.edge = edge; this.site = lSite; this.angle = rSite ? Math.atan2(rSite.y - lSite.y, rSite.x - lSite.x) : edge.l === lSite ? Math.atan2(vb.x - va.x, va.y - vb.y) : Math.atan2(va.x - vb.x, vb.y - va.y); } d3_geom_voronoiHalfEdge.prototype = { start: function() { return this.edge.l === this.site ? this.edge.a : this.edge.b; }, end: function() { return this.edge.l === this.site ? this.edge.b : this.edge.a; } }; function d3_geom_voronoiRedBlackTree() { this._ = null; } function d3_geom_voronoiRedBlackNode(node) { node.U = node.C = node.L = node.R = node.P = node.N = null; } d3_geom_voronoiRedBlackTree.prototype = { insert: function(after, node) { var parent, grandpa, uncle; if (after) { node.P = after; node.N = after.N; if (after.N) after.N.P = node; after.N = node; if (after.R) { after = after.R; while (after.L) after = after.L; after.L = node; } else { after.R = node; } parent = after; } else if (this._) { after = d3_geom_voronoiRedBlackFirst(this._); node.P = null; node.N = after; after.P = after.L = node; parent = after; } else { node.P = node.N = null; this._ = node; parent = null; } node.L = node.R = null; node.U = parent; node.C = true; after = node; while (parent && parent.C) { grandpa = parent.U; if (parent === grandpa.L) { uncle = grandpa.R; if (uncle && uncle.C) { parent.C = uncle.C = false; grandpa.C = true; after = grandpa; } else { if (after === parent.R) { d3_geom_voronoiRedBlackRotateLeft(this, parent); after = parent; parent = after.U; } parent.C = false; grandpa.C = true; d3_geom_voronoiRedBlackRotateRight(this, grandpa); } } else { uncle = grandpa.L; if (uncle && uncle.C) { parent.C = uncle.C = false; grandpa.C = true; after = grandpa; } else { if (after === parent.L) { d3_geom_voronoiRedBlackRotateRight(this, parent); after = parent; parent = after.U; } parent.C = false; grandpa.C = true; d3_geom_voronoiRedBlackRotateLeft(this, grandpa); } } parent = after.U; } this._.C = false; }, remove: function(node) { if (node.N) node.N.P = node.P; if (node.P) node.P.N = node.N; node.N = node.P = null; var parent = node.U, sibling, left = node.L, right = node.R, next, red; if (!left) next = right; else if (!right) next = left; else next = d3_geom_voronoiRedBlackFirst(right); if (parent) { if (parent.L === node) parent.L = next; else parent.R = next; } else { this._ = next; } if (left && right) { red = next.C; next.C = node.C; next.L = left; left.U = next; if (next !== right) { parent = next.U; next.U = node.U; node = next.R; parent.L = node; next.R = right; right.U = next; } else { next.U = parent; parent = next; node = next.R; } } else { red = node.C; node = next; } if (node) node.U = parent; if (red) return; if (node && node.C) { node.C = false; return; } do { if (node === this._) break; if (node === parent.L) { sibling = parent.R; if (sibling.C) { sibling.C = false; parent.C = true; d3_geom_voronoiRedBlackRotateLeft(this, parent); sibling = parent.R; } if (sibling.L && sibling.L.C || sibling.R && sibling.R.C) { if (!sibling.R || !sibling.R.C) { sibling.L.C = false; sibling.C = true; d3_geom_voronoiRedBlackRotateRight(this, sibling); sibling = parent.R; } sibling.C = parent.C; parent.C = sibling.R.C = false; d3_geom_voronoiRedBlackRotateLeft(this, parent); node = this._; break; } } else { sibling = parent.L; if (sibling.C) { sibling.C = false; parent.C = true; d3_geom_voronoiRedBlackRotateRight(this, parent); sibling = parent.L; } if (sibling.L && sibling.L.C || sibling.R && sibling.R.C) { if (!sibling.L || !sibling.L.C) { sibling.R.C = false; sibling.C = true; d3_geom_voronoiRedBlackRotateLeft(this, sibling); sibling = parent.L; } sibling.C = parent.C; parent.C = sibling.L.C = false; d3_geom_voronoiRedBlackRotateRight(this, parent); node = this._; break; } } sibling.C = true; node = parent; parent = parent.U; } while (!node.C); if (node) node.C = false; } }; function d3_geom_voronoiRedBlackRotateLeft(tree, node) { var p = node, q = node.R, parent = p.U; if (parent) { if (parent.L === p) parent.L = q; else parent.R = q; } else { tree._ = q; } q.U = parent; p.U = q; p.R = q.L; if (p.R) p.R.U = p; q.L = p; } function d3_geom_voronoiRedBlackRotateRight(tree, node) { var p = node, q = node.L, parent = p.U; if (parent) { if (parent.L === p) parent.L = q; else parent.R = q; } else { tree._ = q; } q.U = parent; p.U = q; p.L = q.R; if (p.L) p.L.U = p; q.R = p; } function d3_geom_voronoiRedBlackFirst(node) { while (node.L) node = node.L; return node; } function d3_geom_voronoi(sites, bbox) { var site = sites.sort(d3_geom_voronoiVertexOrder).pop(), x0, y0, circle; d3_geom_voronoiEdges = []; d3_geom_voronoiCells = new Array(sites.length); d3_geom_voronoiBeaches = new d3_geom_voronoiRedBlackTree(); d3_geom_voronoiCircles = new d3_geom_voronoiRedBlackTree(); while (true) { circle = d3_geom_voronoiFirstCircle; if (site && (!circle || site.y < circle.y || site.y === circle.y && site.x < circle.x)) { if (site.x !== x0 || site.y !== y0) { d3_geom_voronoiCells[site.i] = new d3_geom_voronoiCell(site); d3_geom_voronoiAddBeach(site); x0 = site.x, y0 = site.y; } site = sites.pop(); } else if (circle) { d3_geom_voronoiRemoveBeach(circle.arc); } else { break; } } if (bbox) d3_geom_voronoiClipEdges(bbox), d3_geom_voronoiCloseCells(bbox); var diagram = { cells: d3_geom_voronoiCells, edges: d3_geom_voronoiEdges }; d3_geom_voronoiBeaches = d3_geom_voronoiCircles = d3_geom_voronoiEdges = d3_geom_voronoiCells = null; return diagram; } function d3_geom_voronoiVertexOrder(a, b) { return b.y - a.y || b.x - a.x; } d3.geom.voronoi = function(points) { var x = d3_geom_pointX, y = d3_geom_pointY, fx = x, fy = y, clipExtent = d3_geom_voronoiClipExtent; if (points) return voronoi(points); function voronoi(data) { var polygons = new Array(data.length), x0 = clipExtent[0][0], y0 = clipExtent[0][1], x1 = clipExtent[1][0], y1 = clipExtent[1][1]; d3_geom_voronoi(sites(data), clipExtent).cells.forEach(function(cell, i) { var edges = cell.edges, site = cell.site, polygon = polygons[i] = edges.length ? edges.map(function(e) { var s = e.start(); return [ s.x, s.y ]; }) : site.x >= x0 && site.x <= x1 && site.y >= y0 && site.y <= y1 ? [ [ x0, y1 ], [ x1, y1 ], [ x1, y0 ], [ x0, y0 ] ] : []; polygon.point = data[i]; }); return polygons; } function sites(data) { return data.map(function(d, i) { return { x: Math.round(fx(d, i) / ε) * ε, y: Math.round(fy(d, i) / ε) * ε, i: i }; }); } voronoi.links = function(data) { return d3_geom_voronoi(sites(data)).edges.filter(function(edge) { return edge.l && edge.r; }).map(function(edge) { return { source: data[edge.l.i], target: data[edge.r.i] }; }); }; voronoi.triangles = function(data) { var triangles = []; d3_geom_voronoi(sites(data)).cells.forEach(function(cell, i) { var site = cell.site, edges = cell.edges.sort(d3_geom_voronoiHalfEdgeOrder), j = -1, m = edges.length, e0, s0, e1 = edges[m - 1].edge, s1 = e1.l === site ? e1.r : e1.l; while (++j < m) { e0 = e1; s0 = s1; e1 = edges[j].edge; s1 = e1.l === site ? e1.r : e1.l; if (i < s0.i && i < s1.i && d3_geom_voronoiTriangleArea(site, s0, s1) < 0) { triangles.push([ data[i], data[s0.i], data[s1.i] ]); } } }); return triangles; }; voronoi.x = function(_) { return arguments.length ? (fx = d3_functor(x = _), voronoi) : x; }; voronoi.y = function(_) { return arguments.length ? (fy = d3_functor(y = _), voronoi) : y; }; voronoi.clipExtent = function(_) { if (!arguments.length) return clipExtent === d3_geom_voronoiClipExtent ? null : clipExtent; clipExtent = _ == null ? d3_geom_voronoiClipExtent : _; return voronoi; }; voronoi.size = function(_) { if (!arguments.length) return clipExtent === d3_geom_voronoiClipExtent ? null : clipExtent && clipExtent[1]; return voronoi.clipExtent(_ && [ [ 0, 0 ], _ ]); }; return voronoi; }; var d3_geom_voronoiClipExtent = [ [ -1e6, -1e6 ], [ 1e6, 1e6 ] ]; function d3_geom_voronoiTriangleArea(a, b, c) { return (a.x - c.x) * (b.y - a.y) - (a.x - b.x) * (c.y - a.y); } d3.geom.delaunay = function(vertices) { return d3.geom.voronoi().triangles(vertices); }; d3.geom.quadtree = function(points, x1, y1, x2, y2) { var x = d3_geom_pointX, y = d3_geom_pointY, compat; if (compat = arguments.length) { x = d3_geom_quadtreeCompatX; y = d3_geom_quadtreeCompatY; if (compat === 3) { y2 = y1; x2 = x1; y1 = x1 = 0; } return quadtree(points); } function quadtree(data) { var d, fx = d3_functor(x), fy = d3_functor(y), xs, ys, i, n, x1_, y1_, x2_, y2_; if (x1 != null) { x1_ = x1, y1_ = y1, x2_ = x2, y2_ = y2; } else { x2_ = y2_ = -(x1_ = y1_ = Infinity); xs = [], ys = []; n = data.length; if (compat) for (i = 0; i < n; ++i) { d = data[i]; if (d.x < x1_) x1_ = d.x; if (d.y < y1_) y1_ = d.y; if (d.x > x2_) x2_ = d.x; if (d.y > y2_) y2_ = d.y; xs.push(d.x); ys.push(d.y); } else for (i = 0; i < n; ++i) { var x_ = +fx(d = data[i], i), y_ = +fy(d, i); if (x_ < x1_) x1_ = x_; if (y_ < y1_) y1_ = y_; if (x_ > x2_) x2_ = x_; if (y_ > y2_) y2_ = y_; xs.push(x_); ys.push(y_); } } var dx = x2_ - x1_, dy = y2_ - y1_; if (dx > dy) y2_ = y1_ + dx; else x2_ = x1_ + dy; function insert(n, d, x, y, x1, y1, x2, y2) { if (isNaN(x) || isNaN(y)) return; if (n.leaf) { var nx = n.x, ny = n.y; if (nx != null) { if (abs(nx - x) + abs(ny - y) < .01) { insertChild(n, d, x, y, x1, y1, x2, y2); } else { var nPoint = n.point; n.x = n.y = n.point = null; insertChild(n, nPoint, nx, ny, x1, y1, x2, y2); insertChild(n, d, x, y, x1, y1, x2, y2); } } else { n.x = x, n.y = y, n.point = d; } } else { insertChild(n, d, x, y, x1, y1, x2, y2); } } function insertChild(n, d, x, y, x1, y1, x2, y2) { var sx = (x1 + x2) * .5, sy = (y1 + y2) * .5, right = x >= sx, bottom = y >= sy, i = (bottom << 1) + right; n.leaf = false; n = n.nodes[i] || (n.nodes[i] = d3_geom_quadtreeNode()); if (right) x1 = sx; else x2 = sx; if (bottom) y1 = sy; else y2 = sy; insert(n, d, x, y, x1, y1, x2, y2); } var root = d3_geom_quadtreeNode(); root.add = function(d) { insert(root, d, +fx(d, ++i), +fy(d, i), x1_, y1_, x2_, y2_); }; root.visit = function(f) { d3_geom_quadtreeVisit(f, root, x1_, y1_, x2_, y2_); }; i = -1; if (x1 == null) { while (++i < n) { insert(root, data[i], xs[i], ys[i], x1_, y1_, x2_, y2_); } --i; } else data.forEach(root.add); xs = ys = data = d = null; return root; } quadtree.x = function(_) { return arguments.length ? (x = _, quadtree) : x; }; quadtree.y = function(_) { return arguments.length ? (y = _, quadtree) : y; }; quadtree.extent = function(_) { if (!arguments.length) return x1 == null ? null : [ [ x1, y1 ], [ x2, y2 ] ]; if (_ == null) x1 = y1 = x2 = y2 = null; else x1 = +_[0][0], y1 = +_[0][1], x2 = +_[1][0], y2 = +_[1][1]; return quadtree; }; quadtree.size = function(_) { if (!arguments.length) return x1 == null ? null : [ x2 - x1, y2 - y1 ]; if (_ == null) x1 = y1 = x2 = y2 = null; else x1 = y1 = 0, x2 = +_[0], y2 = +_[1]; return quadtree; }; return quadtree; }; function d3_geom_quadtreeCompatX(d) { return d.x; } function d3_geom_quadtreeCompatY(d) { return d.y; } function d3_geom_quadtreeNode() { return { leaf: true, nodes: [], point: null, x: null, y: null }; } function d3_geom_quadtreeVisit(f, node, x1, y1, x2, y2) { if (!f(node, x1, y1, x2, y2)) { var sx = (x1 + x2) * .5, sy = (y1 + y2) * .5, children = node.nodes; if (children[0]) d3_geom_quadtreeVisit(f, children[0], x1, y1, sx, sy); if (children[1]) d3_geom_quadtreeVisit(f, children[1], sx, y1, x2, sy); if (children[2]) d3_geom_quadtreeVisit(f, children[2], x1, sy, sx, y2); if (children[3]) d3_geom_quadtreeVisit(f, children[3], sx, sy, x2, y2); } } d3.interpolateRgb = d3_interpolateRgb; function d3_interpolateRgb(a, b) { a = d3.rgb(a); b = d3.rgb(b); var ar = a.r, ag = a.g, ab = a.b, br = b.r - ar, bg = b.g - ag, bb = b.b - ab; return function(t) { return "#" + d3_rgb_hex(Math.round(ar + br * t)) + d3_rgb_hex(Math.round(ag + bg * t)) + d3_rgb_hex(Math.round(ab + bb * t)); }; } d3.interpolateObject = d3_interpolateObject; function d3_interpolateObject(a, b) { var i = {}, c = {}, k; for (k in a) { if (k in b) { i[k] = d3_interpolate(a[k], b[k]); } else { c[k] = a[k]; } } for (k in b) { if (!(k in a)) { c[k] = b[k]; } } return function(t) { for (k in i) c[k] = i[k](t); return c; }; } d3.interpolateNumber = d3_interpolateNumber; function d3_interpolateNumber(a, b) { b -= a = +a; return function(t) { return a + b * t; }; } d3.interpolateString = d3_interpolateString; function d3_interpolateString(a, b) { var bi = d3_interpolate_numberA.lastIndex = d3_interpolate_numberB.lastIndex = 0, am, bm, bs, i = -1, s = [], q = []; a = a + "", b = b + ""; while ((am = d3_interpolate_numberA.exec(a)) && (bm = d3_interpolate_numberB.exec(b))) { if ((bs = bm.index) > bi) { bs = b.substring(bi, bs); if (s[i]) s[i] += bs; else s[++i] = bs; } if ((am = am[0]) === (bm = bm[0])) { if (s[i]) s[i] += bm; else s[++i] = bm; } else { s[++i] = null; q.push({ i: i, x: d3_interpolateNumber(am, bm) }); } bi = d3_interpolate_numberB.lastIndex; } if (bi < b.length) { bs = b.substring(bi); if (s[i]) s[i] += bs; else s[++i] = bs; } return s.length < 2 ? q[0] ? (b = q[0].x, function(t) { return b(t) + ""; }) : function() { return b; } : (b = q.length, function(t) { for (var i = 0, o; i < b; ++i) s[(o = q[i]).i] = o.x(t); return s.join(""); }); } var d3_interpolate_numberA = /[-+]?(?:\d+\.?\d*|\.?\d+)(?:[eE][-+]?\d+)?/g, d3_interpolate_numberB = new RegExp(d3_interpolate_numberA.source, "g"); d3.interpolate = d3_interpolate; function d3_interpolate(a, b) { var i = d3.interpolators.length, f; while (--i >= 0 && !(f = d3.interpolators[i](a, b))) ; return f; } d3.interpolators = [ function(a, b) { var t = typeof b; return (t === "string" ? d3_rgb_names.has(b) || /^(#|rgb\(|hsl\()/.test(b) ? d3_interpolateRgb : d3_interpolateString : b instanceof d3_Color ? d3_interpolateRgb : Array.isArray(b) ? d3_interpolateArray : t === "object" && isNaN(b) ? d3_interpolateObject : d3_interpolateNumber)(a, b); } ]; d3.interpolateArray = d3_interpolateArray; function d3_interpolateArray(a, b) { var x = [], c = [], na = a.length, nb = b.length, n0 = Math.min(a.length, b.length), i; for (i = 0; i < n0; ++i) x.push(d3_interpolate(a[i], b[i])); for (;i < na; ++i) c[i] = a[i]; for (;i < nb; ++i) c[i] = b[i]; return function(t) { for (i = 0; i < n0; ++i) c[i] = x[i](t); return c; }; } var d3_ease_default = function() { return d3_identity; }; var d3_ease = d3.map({ linear: d3_ease_default, poly: d3_ease_poly, quad: function() { return d3_ease_quad; }, cubic: function() { return d3_ease_cubic; }, sin: function() { return d3_ease_sin; }, exp: function() { return d3_ease_exp; }, circle: function() { return d3_ease_circle; }, elastic: d3_ease_elastic, back: d3_ease_back, bounce: function() { return d3_ease_bounce; } }); var d3_ease_mode = d3.map({ "in": d3_identity, out: d3_ease_reverse, "in-out": d3_ease_reflect, "out-in": function(f) { return d3_ease_reflect(d3_ease_reverse(f)); } }); d3.ease = function(name) { var i = name.indexOf("-"), t = i >= 0 ? name.substring(0, i) : name, m = i >= 0 ? name.substring(i + 1) : "in"; t = d3_ease.get(t) || d3_ease_default; m = d3_ease_mode.get(m) || d3_identity; return d3_ease_clamp(m(t.apply(null, d3_arraySlice.call(arguments, 1)))); }; function d3_ease_clamp(f) { return function(t) { return t <= 0 ? 0 : t >= 1 ? 1 : f(t); }; } function d3_ease_reverse(f) { return function(t) { return 1 - f(1 - t); }; } function d3_ease_reflect(f) { return function(t) { return .5 * (t < .5 ? f(2 * t) : 2 - f(2 - 2 * t)); }; } function d3_ease_quad(t) { return t * t; } function d3_ease_cubic(t) { return t * t * t; } function d3_ease_cubicInOut(t) { if (t <= 0) return 0; if (t >= 1) return 1; var t2 = t * t, t3 = t2 * t; return 4 * (t < .5 ? t3 : 3 * (t - t2) + t3 - .75); } function d3_ease_poly(e) { return function(t) { return Math.pow(t, e); }; } function d3_ease_sin(t) { return 1 - Math.cos(t * halfπ); } function d3_ease_exp(t) { return Math.pow(2, 10 * (t - 1)); } function d3_ease_circle(t) { return 1 - Math.sqrt(1 - t * t); } function d3_ease_elastic(a, p) { var s; if (arguments.length < 2) p = .45; if (arguments.length) s = p / τ * Math.asin(1 / a); else a = 1, s = p / 4; return function(t) { return 1 + a * Math.pow(2, -10 * t) * Math.sin((t - s) * τ / p); }; } function d3_ease_back(s) { if (!s) s = 1.70158; return function(t) { return t * t * ((s + 1) * t - s); }; } function d3_ease_bounce(t) { return t < 1 / 2.75 ? 7.5625 * t * t : t < 2 / 2.75 ? 7.5625 * (t -= 1.5 / 2.75) * t + .75 : t < 2.5 / 2.75 ? 7.5625 * (t -= 2.25 / 2.75) * t + .9375 : 7.5625 * (t -= 2.625 / 2.75) * t + .984375; } d3.interpolateHcl = d3_interpolateHcl; function d3_interpolateHcl(a, b) { a = d3.hcl(a); b = d3.hcl(b); var ah = a.h, ac = a.c, al = a.l, bh = b.h - ah, bc = b.c - ac, bl = b.l - al; if (isNaN(bc)) bc = 0, ac = isNaN(ac) ? b.c : ac; if (isNaN(bh)) bh = 0, ah = isNaN(ah) ? b.h : ah; else if (bh > 180) bh -= 360; else if (bh < -180) bh += 360; return function(t) { return d3_hcl_lab(ah + bh * t, ac + bc * t, al + bl * t) + ""; }; } d3.interpolateHsl = d3_interpolateHsl; function d3_interpolateHsl(a, b) { a = d3.hsl(a); b = d3.hsl(b); var ah = a.h, as = a.s, al = a.l, bh = b.h - ah, bs = b.s - as, bl = b.l - al; if (isNaN(bs)) bs = 0, as = isNaN(as) ? b.s : as; if (isNaN(bh)) bh = 0, ah = isNaN(ah) ? b.h : ah; else if (bh > 180) bh -= 360; else if (bh < -180) bh += 360; return function(t) { return d3_hsl_rgb(ah + bh * t, as + bs * t, al + bl * t) + ""; }; } d3.interpolateLab = d3_interpolateLab; function d3_interpolateLab(a, b) { a = d3.lab(a); b = d3.lab(b); var al = a.l, aa = a.a, ab = a.b, bl = b.l - al, ba = b.a - aa, bb = b.b - ab; return function(t) { return d3_lab_rgb(al + bl * t, aa + ba * t, ab + bb * t) + ""; }; } d3.interpolateRound = d3_interpolateRound; function d3_interpolateRound(a, b) { b -= a; return function(t) { return Math.round(a + b * t); }; } d3.transform = function(string) { var g = d3_document.createElementNS(d3.ns.prefix.svg, "g"); return (d3.transform = function(string) { if (string != null) { g.setAttribute("transform", string); var t = g.transform.baseVal.consolidate(); } return new d3_transform(t ? t.matrix : d3_transformIdentity); })(string); }; function d3_transform(m) { var r0 = [ m.a, m.b ], r1 = [ m.c, m.d ], kx = d3_transformNormalize(r0), kz = d3_transformDot(r0, r1), ky = d3_transformNormalize(d3_transformCombine(r1, r0, -kz)) || 0; if (r0[0] * r1[1] < r1[0] * r0[1]) { r0[0] *= -1; r0[1] *= -1; kx *= -1; kz *= -1; } this.rotate = (kx ? Math.atan2(r0[1], r0[0]) : Math.atan2(-r1[0], r1[1])) * d3_degrees; this.translate = [ m.e, m.f ]; this.scale = [ kx, ky ]; this.skew = ky ? Math.atan2(kz, ky) * d3_degrees : 0; } d3_transform.prototype.toString = function() { return "translate(" + this.translate + ")rotate(" + this.rotate + ")skewX(" + this.skew + ")scale(" + this.scale + ")"; }; function d3_transformDot(a, b) { return a[0] * b[0] + a[1] * b[1]; } function d3_transformNormalize(a) { var k = Math.sqrt(d3_transformDot(a, a)); if (k) { a[0] /= k; a[1] /= k; } return k; } function d3_transformCombine(a, b, k) { a[0] += k * b[0]; a[1] += k * b[1]; return a; } var d3_transformIdentity = { a: 1, b: 0, c: 0, d: 1, e: 0, f: 0 }; d3.interpolateTransform = d3_interpolateTransform; function d3_interpolateTransform(a, b) { var s = [], q = [], n, A = d3.transform(a), B = d3.transform(b), ta = A.translate, tb = B.translate, ra = A.rotate, rb = B.rotate, wa = A.skew, wb = B.skew, ka = A.scale, kb = B.scale; if (ta[0] != tb[0] || ta[1] != tb[1]) { s.push("translate(", null, ",", null, ")"); q.push({ i: 1, x: d3_interpolateNumber(ta[0], tb[0]) }, { i: 3, x: d3_interpolateNumber(ta[1], tb[1]) }); } else if (tb[0] || tb[1]) { s.push("translate(" + tb + ")"); } else { s.push(""); } if (ra != rb) { if (ra - rb > 180) rb += 360; else if (rb - ra > 180) ra += 360; q.push({ i: s.push(s.pop() + "rotate(", null, ")") - 2, x: d3_interpolateNumber(ra, rb) }); } else if (rb) { s.push(s.pop() + "rotate(" + rb + ")"); } if (wa != wb) { q.push({ i: s.push(s.pop() + "skewX(", null, ")") - 2, x: d3_interpolateNumber(wa, wb) }); } else if (wb) { s.push(s.pop() + "skewX(" + wb + ")"); } if (ka[0] != kb[0] || ka[1] != kb[1]) { n = s.push(s.pop() + "scale(", null, ",", null, ")"); q.push({ i: n - 4, x: d3_interpolateNumber(ka[0], kb[0]) }, { i: n - 2, x: d3_interpolateNumber(ka[1], kb[1]) }); } else if (kb[0] != 1 || kb[1] != 1) { s.push(s.pop() + "scale(" + kb + ")"); } n = q.length; return function(t) { var i = -1, o; while (++i < n) s[(o = q[i]).i] = o.x(t); return s.join(""); }; } function d3_uninterpolateNumber(a, b) { b = b - (a = +a) ? 1 / (b - a) : 0; return function(x) { return (x - a) * b; }; } function d3_uninterpolateClamp(a, b) { b = b - (a = +a) ? 1 / (b - a) : 0; return function(x) { return Math.max(0, Math.min(1, (x - a) * b)); }; } d3.layout = {}; d3.layout.bundle = function() { return function(links) { var paths = [], i = -1, n = links.length; while (++i < n) paths.push(d3_layout_bundlePath(links[i])); return paths; }; }; function d3_layout_bundlePath(link) { var start = link.source, end = link.target, lca = d3_layout_bundleLeastCommonAncestor(start, end), points = [ start ]; while (start !== lca) { start = start.parent; points.push(start); } var k = points.length; while (end !== lca) { points.splice(k, 0, end); end = end.parent; } return points; } function d3_layout_bundleAncestors(node) { var ancestors = [], parent = node.parent; while (parent != null) { ancestors.push(node); node = parent; parent = parent.parent; } ancestors.push(node); return ancestors; } function d3_layout_bundleLeastCommonAncestor(a, b) { if (a === b) return a; var aNodes = d3_layout_bundleAncestors(a), bNodes = d3_layout_bundleAncestors(b), aNode = aNodes.pop(), bNode = bNodes.pop(), sharedNode = null; while (aNode === bNode) { sharedNode = aNode; aNode = aNodes.pop(); bNode = bNodes.pop(); } return sharedNode; } d3.layout.chord = function() { var chord = {}, chords, groups, matrix, n, padding = 0, sortGroups, sortSubgroups, sortChords; function relayout() { var subgroups = {}, groupSums = [], groupIndex = d3.range(n), subgroupIndex = [], k, x, x0, i, j; chords = []; groups = []; k = 0, i = -1; while (++i < n) { x = 0, j = -1; while (++j < n) { x += matrix[i][j]; } groupSums.push(x); subgroupIndex.push(d3.range(n)); k += x; } if (sortGroups) { groupIndex.sort(function(a, b) { return sortGroups(groupSums[a], groupSums[b]); }); } if (sortSubgroups) { subgroupIndex.forEach(function(d, i) { d.sort(function(a, b) { return sortSubgroups(matrix[i][a], matrix[i][b]); }); }); } k = (τ - padding * n) / k; x = 0, i = -1; while (++i < n) { x0 = x, j = -1; while (++j < n) { var di = groupIndex[i], dj = subgroupIndex[di][j], v = matrix[di][dj], a0 = x, a1 = x += v * k; subgroups[di + "-" + dj] = { index: di, subindex: dj, startAngle: a0, endAngle: a1, value: v }; } groups[di] = { index: di, startAngle: x0, endAngle: x, value: (x - x0) / k }; x += padding; } i = -1; while (++i < n) { j = i - 1; while (++j < n) { var source = subgroups[i + "-" + j], target = subgroups[j + "-" + i]; if (source.value || target.value) { chords.push(source.value < target.value ? { source: target, target: source } : { source: source, target: target }); } } } if (sortChords) resort(); } function resort() { chords.sort(function(a, b) { return sortChords((a.source.value + a.target.value) / 2, (b.source.value + b.target.value) / 2); }); } chord.matrix = function(x) { if (!arguments.length) return matrix; n = (matrix = x) && matrix.length; chords = groups = null; return chord; }; chord.padding = function(x) { if (!arguments.length) return padding; padding = x; chords = groups = null; return chord; }; chord.sortGroups = function(x) { if (!arguments.length) return sortGroups; sortGroups = x; chords = groups = null; return chord; }; chord.sortSubgroups = function(x) { if (!arguments.length) return sortSubgroups; sortSubgroups = x; chords = null; return chord; }; chord.sortChords = function(x) { if (!arguments.length) return sortChords; sortChords = x; if (chords) resort(); return chord; }; chord.chords = function() { if (!chords) relayout(); return chords; }; chord.groups = function() { if (!groups) relayout(); return groups; }; return chord; }; d3.layout.force = function() { var force = {}, event = d3.dispatch("start", "tick", "end"), size = [ 1, 1 ], drag, alpha, friction = .9, linkDistance = d3_layout_forceLinkDistance, linkStrength = d3_layout_forceLinkStrength, charge = -30, chargeDistance2 = d3_layout_forceChargeDistance2, gravity = .1, theta2 = .64, nodes = [], links = [], distances, strengths, charges; function repulse(node) { return function(quad, x1, _, x2) { if (quad.point !== node) { var dx = quad.cx - node.x, dy = quad.cy - node.y, dw = x2 - x1, dn = dx * dx + dy * dy; if (dw * dw / theta2 < dn) { if (dn < chargeDistance2) { var k = quad.charge / dn; node.px -= dx * k; node.py -= dy * k; } return true; } if (quad.point && dn && dn < chargeDistance2) { var k = quad.pointCharge / dn; node.px -= dx * k; node.py -= dy * k; } } return !quad.charge; }; } force.tick = function() { if ((alpha *= .99) < .005) { event.end({ type: "end", alpha: alpha = 0 }); return true; } var n = nodes.length, m = links.length, q, i, o, s, t, l, k, x, y; for (i = 0; i < m; ++i) { o = links[i]; s = o.source; t = o.target; x = t.x - s.x; y = t.y - s.y; if (l = x * x + y * y) { l = alpha * strengths[i] * ((l = Math.sqrt(l)) - distances[i]) / l; x *= l; y *= l; t.x -= x * (k = s.weight / (t.weight + s.weight)); t.y -= y * k; s.x += x * (k = 1 - k); s.y += y * k; } } if (k = alpha * gravity) { x = size[0] / 2; y = size[1] / 2; i = -1; if (k) while (++i < n) { o = nodes[i]; o.x += (x - o.x) * k; o.y += (y - o.y) * k; } } if (charge) { d3_layout_forceAccumulate(q = d3.geom.quadtree(nodes), alpha, charges); i = -1; while (++i < n) { if (!(o = nodes[i]).fixed) { q.visit(repulse(o)); } } } i = -1; while (++i < n) { o = nodes[i]; if (o.fixed) { o.x = o.px; o.y = o.py; } else { o.x -= (o.px - (o.px = o.x)) * friction; o.y -= (o.py - (o.py = o.y)) * friction; } } event.tick({ type: "tick", alpha: alpha }); }; force.nodes = function(x) { if (!arguments.length) return nodes; nodes = x; return force; }; force.links = function(x) { if (!arguments.length) return links; links = x; return force; }; force.size = function(x) { if (!arguments.length) return size; size = x; return force; }; force.linkDistance = function(x) { if (!arguments.length) return linkDistance; linkDistance = typeof x === "function" ? x : +x; return force; }; force.distance = force.linkDistance; force.linkStrength = function(x) { if (!arguments.length) return linkStrength; linkStrength = typeof x === "function" ? x : +x; return force; }; force.friction = function(x) { if (!arguments.length) return friction; friction = +x; return force; }; force.charge = function(x) { if (!arguments.length) return charge; charge = typeof x === "function" ? x : +x; return force; }; force.chargeDistance = function(x) { if (!arguments.length) return Math.sqrt(chargeDistance2); chargeDistance2 = x * x; return force; }; force.gravity = function(x) { if (!arguments.length) return gravity; gravity = +x; return force; }; force.theta = function(x) { if (!arguments.length) return Math.sqrt(theta2); theta2 = x * x; return force; }; force.alpha = function(x) { if (!arguments.length) return alpha; x = +x; if (alpha) { if (x > 0) alpha = x; else alpha = 0; } else if (x > 0) { event.start({ type: "start", alpha: alpha = x }); d3.timer(force.tick); } return force; }; force.start = function() { var i, n = nodes.length, m = links.length, w = size[0], h = size[1], neighbors, o; for (i = 0; i < n; ++i) { (o = nodes[i]).index = i; o.weight = 0; } for (i = 0; i < m; ++i) { o = links[i]; if (typeof o.source == "number") o.source = nodes[o.source]; if (typeof o.target == "number") o.target = nodes[o.target]; ++o.source.weight; ++o.target.weight; } for (i = 0; i < n; ++i) { o = nodes[i]; if (isNaN(o.x)) o.x = position("x", w); if (isNaN(o.y)) o.y = position("y", h); if (isNaN(o.px)) o.px = o.x; if (isNaN(o.py)) o.py = o.y; } distances = []; if (typeof linkDistance === "function") for (i = 0; i < m; ++i) distances[i] = +linkDistance.call(this, links[i], i); else for (i = 0; i < m; ++i) distances[i] = linkDistance; strengths = []; if (typeof linkStrength === "function") for (i = 0; i < m; ++i) strengths[i] = +linkStrength.call(this, links[i], i); else for (i = 0; i < m; ++i) strengths[i] = linkStrength; charges = []; if (typeof charge === "function") for (i = 0; i < n; ++i) charges[i] = +charge.call(this, nodes[i], i); else for (i = 0; i < n; ++i) charges[i] = charge; function position(dimension, size) { if (!neighbors) { neighbors = new Array(n); for (j = 0; j < n; ++j) { neighbors[j] = []; } for (j = 0; j < m; ++j) { var o = links[j]; neighbors[o.source.index].push(o.target); neighbors[o.target.index].push(o.source); } } var candidates = neighbors[i], j = -1, m = candidates.length, x; while (++j < m) if (!isNaN(x = candidates[j][dimension])) return x; return Math.random() * size; } return force.resume(); }; force.resume = function() { return force.alpha(.1); }; force.stop = function() { return force.alpha(0); }; force.drag = function() { if (!drag) drag = d3.behavior.drag().origin(d3_identity).on("dragstart.force", d3_layout_forceDragstart).on("drag.force", dragmove).on("dragend.force", d3_layout_forceDragend); if (!arguments.length) return drag; this.on("mouseover.force", d3_layout_forceMouseover).on("mouseout.force", d3_layout_forceMouseout).call(drag); }; function dragmove(d) { d.px = d3.event.x, d.py = d3.event.y; force.resume(); } return d3.rebind(force, event, "on"); }; function d3_layout_forceDragstart(d) { d.fixed |= 2; } function d3_layout_forceDragend(d) { d.fixed &= ~6; } function d3_layout_forceMouseover(d) { d.fixed |= 4; d.px = d.x, d.py = d.y; } function d3_layout_forceMouseout(d) { d.fixed &= ~4; } function d3_layout_forceAccumulate(quad, alpha, charges) { var cx = 0, cy = 0; quad.charge = 0; if (!quad.leaf) { var nodes = quad.nodes, n = nodes.length, i = -1, c; while (++i < n) { c = nodes[i]; if (c == null) continue; d3_layout_forceAccumulate(c, alpha, charges); quad.charge += c.charge; cx += c.charge * c.cx; cy += c.charge * c.cy; } } if (quad.point) { if (!quad.leaf) { quad.point.x += Math.random() - .5; quad.point.y += Math.random() - .5; } var k = alpha * charges[quad.point.index]; quad.charge += quad.pointCharge = k; cx += k * quad.point.x; cy += k * quad.point.y; } quad.cx = cx / quad.charge; quad.cy = cy / quad.charge; } var d3_layout_forceLinkDistance = 20, d3_layout_forceLinkStrength = 1, d3_layout_forceChargeDistance2 = Infinity; d3.layout.hierarchy = function() { var sort = d3_layout_hierarchySort, children = d3_layout_hierarchyChildren, value = d3_layout_hierarchyValue; function hierarchy(root) { var stack = [ root ], nodes = [], node; root.depth = 0; while ((node = stack.pop()) != null) { nodes.push(node); if ((childs = children.call(hierarchy, node, node.depth)) && (n = childs.length)) { var n, childs, child; while (--n >= 0) { stack.push(child = childs[n]); child.parent = node; child.depth = node.depth + 1; } if (value) node.value = 0; node.children = childs; } else { if (value) node.value = +value.call(hierarchy, node, node.depth) || 0; delete node.children; } } d3_layout_hierarchyVisitAfter(root, function(node) { var childs, parent; if (sort && (childs = node.children)) childs.sort(sort); if (value && (parent = node.parent)) parent.value += node.value; }); return nodes; } hierarchy.sort = function(x) { if (!arguments.length) return sort; sort = x; return hierarchy; }; hierarchy.children = function(x) { if (!arguments.length) return children; children = x; return hierarchy; }; hierarchy.value = function(x) { if (!arguments.length) return value; value = x; return hierarchy; }; hierarchy.revalue = function(root) { if (value) { d3_layout_hierarchyVisitBefore(root, function(node) { if (node.children) node.value = 0; }); d3_layout_hierarchyVisitAfter(root, function(node) { var parent; if (!node.children) node.value = +value.call(hierarchy, node, node.depth) || 0; if (parent = node.parent) parent.value += node.value; }); } return root; }; return hierarchy; }; function d3_layout_hierarchyRebind(object, hierarchy) { d3.rebind(object, hierarchy, "sort", "children", "value"); object.nodes = object; object.links = d3_layout_hierarchyLinks; return object; } function d3_layout_hierarchyVisitBefore(node, callback) { var nodes = [ node ]; while ((node = nodes.pop()) != null) { callback(node); if ((children = node.children) && (n = children.length)) { var n, children; while (--n >= 0) nodes.push(children[n]); } } } function d3_layout_hierarchyVisitAfter(node, callback) { var nodes = [ node ], nodes2 = []; while ((node = nodes.pop()) != null) { nodes2.push(node); if ((children = node.children) && (n = children.length)) { var i = -1, n, children; while (++i < n) nodes.push(children[i]); } } while ((node = nodes2.pop()) != null) { callback(node); } } function d3_layout_hierarchyChildren(d) { return d.children; } function d3_layout_hierarchyValue(d) { return d.value; } function d3_layout_hierarchySort(a, b) { return b.value - a.value; } function d3_layout_hierarchyLinks(nodes) { return d3.merge(nodes.map(function(parent) { return (parent.children || []).map(function(child) { return { source: parent, target: child }; }); })); } d3.layout.partition = function() { var hierarchy = d3.layout.hierarchy(), size = [ 1, 1 ]; function position(node, x, dx, dy) { var children = node.children; node.x = x; node.y = node.depth * dy; node.dx = dx; node.dy = dy; if (children && (n = children.length)) { var i = -1, n, c, d; dx = node.value ? dx / node.value : 0; while (++i < n) { position(c = children[i], x, d = c.value * dx, dy); x += d; } } } function depth(node) { var children = node.children, d = 0; if (children && (n = children.length)) { var i = -1, n; while (++i < n) d = Math.max(d, depth(children[i])); } return 1 + d; } function partition(d, i) { var nodes = hierarchy.call(this, d, i); position(nodes[0], 0, size[0], size[1] / depth(nodes[0])); return nodes; } partition.size = function(x) { if (!arguments.length) return size; size = x; return partition; }; return d3_layout_hierarchyRebind(partition, hierarchy); }; d3.layout.pie = function() { var value = Number, sort = d3_layout_pieSortByValue, startAngle = 0, endAngle = τ; function pie(data) { var values = data.map(function(d, i) { return +value.call(pie, d, i); }); var a = +(typeof startAngle === "function" ? startAngle.apply(this, arguments) : startAngle); var k = ((typeof endAngle === "function" ? endAngle.apply(this, arguments) : endAngle) - a) / d3.sum(values); var index = d3.range(data.length); if (sort != null) index.sort(sort === d3_layout_pieSortByValue ? function(i, j) { return values[j] - values[i]; } : function(i, j) { return sort(data[i], data[j]); }); var arcs = []; index.forEach(function(i) { var d; arcs[i] = { data: data[i], value: d = values[i], startAngle: a, endAngle: a += d * k }; }); return arcs; } pie.value = function(x) { if (!arguments.length) return value; value = x; return pie; }; pie.sort = function(x) { if (!arguments.length) return sort; sort = x; return pie; }; pie.startAngle = function(x) { if (!arguments.length) return startAngle; startAngle = x; return pie; }; pie.endAngle = function(x) { if (!arguments.length) return endAngle; endAngle = x; return pie; }; return pie; }; var d3_layout_pieSortByValue = {}; d3.layout.stack = function() { var values = d3_identity, order = d3_layout_stackOrderDefault, offset = d3_layout_stackOffsetZero, out = d3_layout_stackOut, x = d3_layout_stackX, y = d3_layout_stackY; function stack(data, index) { var series = data.map(function(d, i) { return values.call(stack, d, i); }); var points = series.map(function(d) { return d.map(function(v, i) { return [ x.call(stack, v, i), y.call(stack, v, i) ]; }); }); var orders = order.call(stack, points, index); series = d3.permute(series, orders); points = d3.permute(points, orders); var offsets = offset.call(stack, points, index); var n = series.length, m = series[0].length, i, j, o; for (j = 0; j < m; ++j) { out.call(stack, series[0][j], o = offsets[j], points[0][j][1]); for (i = 1; i < n; ++i) { out.call(stack, series[i][j], o += points[i - 1][j][1], points[i][j][1]); } } return data; } stack.values = function(x) { if (!arguments.length) return values; values = x; return stack; }; stack.order = function(x) { if (!arguments.length) return order; order = typeof x === "function" ? x : d3_layout_stackOrders.get(x) || d3_layout_stackOrderDefault; return stack; }; stack.offset = function(x) { if (!arguments.length) return offset; offset = typeof x === "function" ? x : d3_layout_stackOffsets.get(x) || d3_layout_stackOffsetZero; return stack; }; stack.x = function(z) { if (!arguments.length) return x; x = z; return stack; }; stack.y = function(z) { if (!arguments.length) return y; y = z; return stack; }; stack.out = function(z) { if (!arguments.length) return out; out = z; return stack; }; return stack; }; function d3_layout_stackX(d) { return d.x; } function d3_layout_stackY(d) { return d.y; } function d3_layout_stackOut(d, y0, y) { d.y0 = y0; d.y = y; } var d3_layout_stackOrders = d3.map({ "inside-out": function(data) { var n = data.length, i, j, max = data.map(d3_layout_stackMaxIndex), sums = data.map(d3_layout_stackReduceSum), index = d3.range(n).sort(function(a, b) { return max[a] - max[b]; }), top = 0, bottom = 0, tops = [], bottoms = []; for (i = 0; i < n; ++i) { j = index[i]; if (top < bottom) { top += sums[j]; tops.push(j); } else { bottom += sums[j]; bottoms.push(j); } } return bottoms.reverse().concat(tops); }, reverse: function(data) { return d3.range(data.length).reverse(); }, "default": d3_layout_stackOrderDefault }); var d3_layout_stackOffsets = d3.map({ silhouette: function(data) { var n = data.length, m = data[0].length, sums = [], max = 0, i, j, o, y0 = []; for (j = 0; j < m; ++j) { for (i = 0, o = 0; i < n; i++) o += data[i][j][1]; if (o > max) max = o; sums.push(o); } for (j = 0; j < m; ++j) { y0[j] = (max - sums[j]) / 2; } return y0; }, wiggle: function(data) { var n = data.length, x = data[0], m = x.length, i, j, k, s1, s2, s3, dx, o, o0, y0 = []; y0[0] = o = o0 = 0; for (j = 1; j < m; ++j) { for (i = 0, s1 = 0; i < n; ++i) s1 += data[i][j][1]; for (i = 0, s2 = 0, dx = x[j][0] - x[j - 1][0]; i < n; ++i) { for (k = 0, s3 = (data[i][j][1] - data[i][j - 1][1]) / (2 * dx); k < i; ++k) { s3 += (data[k][j][1] - data[k][j - 1][1]) / dx; } s2 += s3 * data[i][j][1]; } y0[j] = o -= s1 ? s2 / s1 * dx : 0; if (o < o0) o0 = o; } for (j = 0; j < m; ++j) y0[j] -= o0; return y0; }, expand: function(data) { var n = data.length, m = data[0].length, k = 1 / n, i, j, o, y0 = []; for (j = 0; j < m; ++j) { for (i = 0, o = 0; i < n; i++) o += data[i][j][1]; if (o) for (i = 0; i < n; i++) data[i][j][1] /= o; else for (i = 0; i < n; i++) data[i][j][1] = k; } for (j = 0; j < m; ++j) y0[j] = 0; return y0; }, zero: d3_layout_stackOffsetZero }); function d3_layout_stackOrderDefault(data) { return d3.range(data.length); } function d3_layout_stackOffsetZero(data) { var j = -1, m = data[0].length, y0 = []; while (++j < m) y0[j] = 0; return y0; } function d3_layout_stackMaxIndex(array) { var i = 1, j = 0, v = array[0][1], k, n = array.length; for (;i < n; ++i) { if ((k = array[i][1]) > v) { j = i; v = k; } } return j; } function d3_layout_stackReduceSum(d) { return d.reduce(d3_layout_stackSum, 0); } function d3_layout_stackSum(p, d) { return p + d[1]; } d3.layout.histogram = function() { var frequency = true, valuer = Number, ranger = d3_layout_histogramRange, binner = d3_layout_histogramBinSturges; function histogram(data, i) { var bins = [], values = data.map(valuer, this), range = ranger.call(this, values, i), thresholds = binner.call(this, range, values, i), bin, i = -1, n = values.length, m = thresholds.length - 1, k = frequency ? 1 : 1 / n, x; while (++i < m) { bin = bins[i] = []; bin.dx = thresholds[i + 1] - (bin.x = thresholds[i]); bin.y = 0; } if (m > 0) { i = -1; while (++i < n) { x = values[i]; if (x >= range[0] && x <= range[1]) { bin = bins[d3.bisect(thresholds, x, 1, m) - 1]; bin.y += k; bin.push(data[i]); } } } return bins; } histogram.value = function(x) { if (!arguments.length) return valuer; valuer = x; return histogram; }; histogram.range = function(x) { if (!arguments.length) return ranger; ranger = d3_functor(x); return histogram; }; histogram.bins = function(x) { if (!arguments.length) return binner; binner = typeof x === "number" ? function(range) { return d3_layout_histogramBinFixed(range, x); } : d3_functor(x); return histogram; }; histogram.frequency = function(x) { if (!arguments.length) return frequency; frequency = !!x; return histogram; }; return histogram; }; function d3_layout_histogramBinSturges(range, values) { return d3_layout_histogramBinFixed(range, Math.ceil(Math.log(values.length) / Math.LN2 + 1)); } function d3_layout_histogramBinFixed(range, n) { var x = -1, b = +range[0], m = (range[1] - b) / n, f = []; while (++x <= n) f[x] = m * x + b; return f; } function d3_layout_histogramRange(values) { return [ d3.min(values), d3.max(values) ]; } d3.layout.pack = function() { var hierarchy = d3.layout.hierarchy().sort(d3_layout_packSort), padding = 0, size = [ 1, 1 ], radius; function pack(d, i) { var nodes = hierarchy.call(this, d, i), root = nodes[0], w = size[0], h = size[1], r = radius == null ? Math.sqrt : typeof radius === "function" ? radius : function() { return radius; }; root.x = root.y = 0; d3_layout_hierarchyVisitAfter(root, function(d) { d.r = +r(d.value); }); d3_layout_hierarchyVisitAfter(root, d3_layout_packSiblings); if (padding) { var dr = padding * (radius ? 1 : Math.max(2 * root.r / w, 2 * root.r / h)) / 2; d3_layout_hierarchyVisitAfter(root, function(d) { d.r += dr; }); d3_layout_hierarchyVisitAfter(root, d3_layout_packSiblings); d3_layout_hierarchyVisitAfter(root, function(d) { d.r -= dr; }); } d3_layout_packTransform(root, w / 2, h / 2, radius ? 1 : 1 / Math.max(2 * root.r / w, 2 * root.r / h)); return nodes; } pack.size = function(_) { if (!arguments.length) return size; size = _; return pack; }; pack.radius = function(_) { if (!arguments.length) return radius; radius = _ == null || typeof _ === "function" ? _ : +_; return pack; }; pack.padding = function(_) { if (!arguments.length) return padding; padding = +_; return pack; }; return d3_layout_hierarchyRebind(pack, hierarchy); }; function d3_layout_packSort(a, b) { return a.value - b.value; } function d3_layout_packInsert(a, b) { var c = a._pack_next; a._pack_next = b; b._pack_prev = a; b._pack_next = c; c._pack_prev = b; } function d3_layout_packSplice(a, b) { a._pack_next = b; b._pack_prev = a; } function d3_layout_packIntersects(a, b) { var dx = b.x - a.x, dy = b.y - a.y, dr = a.r + b.r; return .999 * dr * dr > dx * dx + dy * dy; } function d3_layout_packSiblings(node) { if (!(nodes = node.children) || !(n = nodes.length)) return; var nodes, xMin = Infinity, xMax = -Infinity, yMin = Infinity, yMax = -Infinity, a, b, c, i, j, k, n; function bound(node) { xMin = Math.min(node.x - node.r, xMin); xMax = Math.max(node.x + node.r, xMax); yMin = Math.min(node.y - node.r, yMin); yMax = Math.max(node.y + node.r, yMax); } nodes.forEach(d3_layout_packLink); a = nodes[0]; a.x = -a.r; a.y = 0; bound(a); if (n > 1) { b = nodes[1]; b.x = b.r; b.y = 0; bound(b); if (n > 2) { c = nodes[2]; d3_layout_packPlace(a, b, c); bound(c); d3_layout_packInsert(a, c); a._pack_prev = c; d3_layout_packInsert(c, b); b = a._pack_next; for (i = 3; i < n; i++) { d3_layout_packPlace(a, b, c = nodes[i]); var isect = 0, s1 = 1, s2 = 1; for (j = b._pack_next; j !== b; j = j._pack_next, s1++) { if (d3_layout_packIntersects(j, c)) { isect = 1; break; } } if (isect == 1) { for (k = a._pack_prev; k !== j._pack_prev; k = k._pack_prev, s2++) { if (d3_layout_packIntersects(k, c)) { break; } } } if (isect) { if (s1 < s2 || s1 == s2 && b.r < a.r) d3_layout_packSplice(a, b = j); else d3_layout_packSplice(a = k, b); i--; } else { d3_layout_packInsert(a, c); b = c; bound(c); } } } } var cx = (xMin + xMax) / 2, cy = (yMin + yMax) / 2, cr = 0; for (i = 0; i < n; i++) { c = nodes[i]; c.x -= cx; c.y -= cy; cr = Math.max(cr, c.r + Math.sqrt(c.x * c.x + c.y * c.y)); } node.r = cr; nodes.forEach(d3_layout_packUnlink); } function d3_layout_packLink(node) { node._pack_next = node._pack_prev = node; } function d3_layout_packUnlink(node) { delete node._pack_next; delete node._pack_prev; } function d3_layout_packTransform(node, x, y, k) { var children = node.children; node.x = x += k * node.x; node.y = y += k * node.y; node.r *= k; if (children) { var i = -1, n = children.length; while (++i < n) d3_layout_packTransform(children[i], x, y, k); } } function d3_layout_packPlace(a, b, c) { var db = a.r + c.r, dx = b.x - a.x, dy = b.y - a.y; if (db && (dx || dy)) { var da = b.r + c.r, dc = dx * dx + dy * dy; da *= da; db *= db; var x = .5 + (db - da) / (2 * dc), y = Math.sqrt(Math.max(0, 2 * da * (db + dc) - (db -= dc) * db - da * da)) / (2 * dc); c.x = a.x + x * dx + y * dy; c.y = a.y + x * dy - y * dx; } else { c.x = a.x + db; c.y = a.y; } } d3.layout.tree = function() { var hierarchy = d3.layout.hierarchy().sort(null).value(null), separation = d3_layout_treeSeparation, size = [ 1, 1 ], nodeSize = null; function tree(d, i) { var nodes = hierarchy.call(this, d, i), root0 = nodes[0], root1 = wrapTree(root0); d3_layout_hierarchyVisitAfter(root1, firstWalk), root1.parent.m = -root1.z; d3_layout_hierarchyVisitBefore(root1, secondWalk); if (nodeSize) d3_layout_hierarchyVisitBefore(root0, sizeNode); else { var left = root0, right = root0, bottom = root0; d3_layout_hierarchyVisitBefore(root0, function(node) { if (node.x < left.x) left = node; if (node.x > right.x) right = node; if (node.depth > bottom.depth) bottom = node; }); var tx = separation(left, right) / 2 - left.x, kx = size[0] / (right.x + separation(right, left) / 2 + tx), ky = size[1] / (bottom.depth || 1); d3_layout_hierarchyVisitBefore(root0, function(node) { node.x = (node.x + tx) * kx; node.y = node.depth * ky; }); } return nodes; } function wrapTree(root0) { var root1 = { A: null, children: [ root0 ] }, queue = [ root1 ], node1; while ((node1 = queue.pop()) != null) { for (var children = node1.children, child, i = 0, n = children.length; i < n; ++i) { queue.push((children[i] = child = { _: children[i], parent: node1, children: (child = children[i].children) && child.slice() || [], A: null, a: null, z: 0, m: 0, c: 0, s: 0, t: null, i: i }).a = child); } } return root1.children[0]; } function firstWalk(v) { var children = v.children, siblings = v.parent.children, w = v.i ? siblings[v.i - 1] : null; if (children.length) { d3_layout_treeShift(v); var midpoint = (children[0].z + children[children.length - 1].z) / 2; if (w) { v.z = w.z + separation(v._, w._); v.m = v.z - midpoint; } else { v.z = midpoint; } } else if (w) { v.z = w.z + separation(v._, w._); } v.parent.A = apportion(v, w, v.parent.A || siblings[0]); } function secondWalk(v) { v._.x = v.z + v.parent.m; v.m += v.parent.m; } function apportion(v, w, ancestor) { if (w) { var vip = v, vop = v, vim = w, vom = vip.parent.children[0], sip = vip.m, sop = vop.m, sim = vim.m, som = vom.m, shift; while (vim = d3_layout_treeRight(vim), vip = d3_layout_treeLeft(vip), vim && vip) { vom = d3_layout_treeLeft(vom); vop = d3_layout_treeRight(vop); vop.a = v; shift = vim.z + sim - vip.z - sip + separation(vim._, vip._); if (shift > 0) { d3_layout_treeMove(d3_layout_treeAncestor(vim, v, ancestor), v, shift); sip += shift; sop += shift; } sim += vim.m; sip += vip.m; som += vom.m; sop += vop.m; } if (vim && !d3_layout_treeRight(vop)) { vop.t = vim; vop.m += sim - sop; } if (vip && !d3_layout_treeLeft(vom)) { vom.t = vip; vom.m += sip - som; ancestor = v; } } return ancestor; } function sizeNode(node) { node.x *= size[0]; node.y = node.depth * size[1]; } tree.separation = function(x) { if (!arguments.length) return separation; separation = x; return tree; }; tree.size = function(x) { if (!arguments.length) return nodeSize ? null : size; nodeSize = (size = x) == null ? sizeNode : null; return tree; }; tree.nodeSize = function(x) { if (!arguments.length) return nodeSize ? size : null; nodeSize = (size = x) == null ? null : sizeNode; return tree; }; return d3_layout_hierarchyRebind(tree, hierarchy); }; function d3_layout_treeSeparation(a, b) { return a.parent == b.parent ? 1 : 2; } function d3_layout_treeLeft(v) { var children = v.children; return children.length ? children[0] : v.t; } function d3_layout_treeRight(v) { var children = v.children, n; return (n = children.length) ? children[n - 1] : v.t; } function d3_layout_treeMove(wm, wp, shift) { var change = shift / (wp.i - wm.i); wp.c -= change; wp.s += shift; wm.c += change; wp.z += shift; wp.m += shift; } function d3_layout_treeShift(v) { var shift = 0, change = 0, children = v.children, i = children.length, w; while (--i >= 0) { w = children[i]; w.z += shift; w.m += shift; shift += w.s + (change += w.c); } } function d3_layout_treeAncestor(vim, v, ancestor) { return vim.a.parent === v.parent ? vim.a : ancestor; } d3.layout.cluster = function() { var hierarchy = d3.layout.hierarchy().sort(null).value(null), separation = d3_layout_treeSeparation, size = [ 1, 1 ], nodeSize = false; function cluster(d, i) { var nodes = hierarchy.call(this, d, i), root = nodes[0], previousNode, x = 0; d3_layout_hierarchyVisitAfter(root, function(node) { var children = node.children; if (children && children.length) { node.x = d3_layout_clusterX(children); node.y = d3_layout_clusterY(children); } else { node.x = previousNode ? x += separation(node, previousNode) : 0; node.y = 0; previousNode = node; } }); var left = d3_layout_clusterLeft(root), right = d3_layout_clusterRight(root), x0 = left.x - separation(left, right) / 2, x1 = right.x + separation(right, left) / 2; d3_layout_hierarchyVisitAfter(root, nodeSize ? function(node) { node.x = (node.x - root.x) * size[0]; node.y = (root.y - node.y) * size[1]; } : function(node) { node.x = (node.x - x0) / (x1 - x0) * size[0]; node.y = (1 - (root.y ? node.y / root.y : 1)) * size[1]; }); return nodes; } cluster.separation = function(x) { if (!arguments.length) return separation; separation = x; return cluster; }; cluster.size = function(x) { if (!arguments.length) return nodeSize ? null : size; nodeSize = (size = x) == null; return cluster; }; cluster.nodeSize = function(x) { if (!arguments.length) return nodeSize ? size : null; nodeSize = (size = x) != null; return cluster; }; return d3_layout_hierarchyRebind(cluster, hierarchy); }; function d3_layout_clusterY(children) { return 1 + d3.max(children, function(child) { return child.y; }); } function d3_layout_clusterX(children) { return children.reduce(function(x, child) { return x + child.x; }, 0) / children.length; } function d3_layout_clusterLeft(node) { var children = node.children; return children && children.length ? d3_layout_clusterLeft(children[0]) : node; } function d3_layout_clusterRight(node) { var children = node.children, n; return children && (n = children.length) ? d3_layout_clusterRight(children[n - 1]) : node; } d3.layout.treemap = function() { var hierarchy = d3.layout.hierarchy(), round = Math.round, size = [ 1, 1 ], padding = null, pad = d3_layout_treemapPadNull, sticky = false, stickies, mode = "squarify", ratio = .5 * (1 + Math.sqrt(5)); function scale(children, k) { var i = -1, n = children.length, child, area; while (++i < n) { area = (child = children[i]).value * (k < 0 ? 0 : k); child.area = isNaN(area) || area <= 0 ? 0 : area; } } function squarify(node) { var children = node.children; if (children && children.length) { var rect = pad(node), row = [], remaining = children.slice(), child, best = Infinity, score, u = mode === "slice" ? rect.dx : mode === "dice" ? rect.dy : mode === "slice-dice" ? node.depth & 1 ? rect.dy : rect.dx : Math.min(rect.dx, rect.dy), n; scale(remaining, rect.dx * rect.dy / node.value); row.area = 0; while ((n = remaining.length) > 0) { row.push(child = remaining[n - 1]); row.area += child.area; if (mode !== "squarify" || (score = worst(row, u)) <= best) { remaining.pop(); best = score; } else { row.area -= row.pop().area; position(row, u, rect, false); u = Math.min(rect.dx, rect.dy); row.length = row.area = 0; best = Infinity; } } if (row.length) { position(row, u, rect, true); row.length = row.area = 0; } children.forEach(squarify); } } function stickify(node) { var children = node.children; if (children && children.length) { var rect = pad(node), remaining = children.slice(), child, row = []; scale(remaining, rect.dx * rect.dy / node.value); row.area = 0; while (child = remaining.pop()) { row.push(child); row.area += child.area; if (child.z != null) { position(row, child.z ? rect.dx : rect.dy, rect, !remaining.length); row.length = row.area = 0; } } children.forEach(stickify); } } function worst(row, u) { var s = row.area, r, rmax = 0, rmin = Infinity, i = -1, n = row.length; while (++i < n) { if (!(r = row[i].area)) continue; if (r < rmin) rmin = r; if (r > rmax) rmax = r; } s *= s; u *= u; return s ? Math.max(u * rmax * ratio / s, s / (u * rmin * ratio)) : Infinity; } function position(row, u, rect, flush) { var i = -1, n = row.length, x = rect.x, y = rect.y, v = u ? round(row.area / u) : 0, o; if (u == rect.dx) { if (flush || v > rect.dy) v = rect.dy; while (++i < n) { o = row[i]; o.x = x; o.y = y; o.dy = v; x += o.dx = Math.min(rect.x + rect.dx - x, v ? round(o.area / v) : 0); } o.z = true; o.dx += rect.x + rect.dx - x; rect.y += v; rect.dy -= v; } else { if (flush || v > rect.dx) v = rect.dx; while (++i < n) { o = row[i]; o.x = x; o.y = y; o.dx = v; y += o.dy = Math.min(rect.y + rect.dy - y, v ? round(o.area / v) : 0); } o.z = false; o.dy += rect.y + rect.dy - y; rect.x += v; rect.dx -= v; } } function treemap(d) { var nodes = stickies || hierarchy(d), root = nodes[0]; root.x = 0; root.y = 0; root.dx = size[0]; root.dy = size[1]; if (stickies) hierarchy.revalue(root); scale([ root ], root.dx * root.dy / root.value); (stickies ? stickify : squarify)(root); if (sticky) stickies = nodes; return nodes; } treemap.size = function(x) { if (!arguments.length) return size; size = x; return treemap; }; treemap.padding = function(x) { if (!arguments.length) return padding; function padFunction(node) { var p = x.call(treemap, node, node.depth); return p == null ? d3_layout_treemapPadNull(node) : d3_layout_treemapPad(node, typeof p === "number" ? [ p, p, p, p ] : p); } function padConstant(node) { return d3_layout_treemapPad(node, x); } var type; pad = (padding = x) == null ? d3_layout_treemapPadNull : (type = typeof x) === "function" ? padFunction : type === "number" ? (x = [ x, x, x, x ], padConstant) : padConstant; return treemap; }; treemap.round = function(x) { if (!arguments.length) return round != Number; round = x ? Math.round : Number; return treemap; }; treemap.sticky = function(x) { if (!arguments.length) return sticky; sticky = x; stickies = null; return treemap; }; treemap.ratio = function(x) { if (!arguments.length) return ratio; ratio = x; return treemap; }; treemap.mode = function(x) { if (!arguments.length) return mode; mode = x + ""; return treemap; }; return d3_layout_hierarchyRebind(treemap, hierarchy); }; function d3_layout_treemapPadNull(node) { return { x: node.x, y: node.y, dx: node.dx, dy: node.dy }; } function d3_layout_treemapPad(node, padding) { var x = node.x + padding[3], y = node.y + padding[0], dx = node.dx - padding[1] - padding[3], dy = node.dy - padding[0] - padding[2]; if (dx < 0) { x += dx / 2; dx = 0; } if (dy < 0) { y += dy / 2; dy = 0; } return { x: x, y: y, dx: dx, dy: dy }; } d3.random = { normal: function(µ, σ) { var n = arguments.length; if (n < 2) σ = 1; if (n < 1) µ = 0; return function() { var x, y, r; do { x = Math.random() * 2 - 1; y = Math.random() * 2 - 1; r = x * x + y * y; } while (!r || r > 1); return µ + σ * x * Math.sqrt(-2 * Math.log(r) / r); }; }, logNormal: function() { var random = d3.random.normal.apply(d3, arguments); return function() { return Math.exp(random()); }; }, bates: function(m) { var random = d3.random.irwinHall(m); return function() { return random() / m; }; }, irwinHall: function(m) { return function() { for (var s = 0, j = 0; j < m; j++) s += Math.random(); return s; }; } }; d3.scale = {}; function d3_scaleExtent(domain) { var start = domain[0], stop = domain[domain.length - 1]; return start < stop ? [ start, stop ] : [ stop, start ]; } function d3_scaleRange(scale) { return scale.rangeExtent ? scale.rangeExtent() : d3_scaleExtent(scale.range()); } function d3_scale_bilinear(domain, range, uninterpolate, interpolate) { var u = uninterpolate(domain[0], domain[1]), i = interpolate(range[0], range[1]); return function(x) { return i(u(x)); }; } function d3_scale_nice(domain, nice) { var i0 = 0, i1 = domain.length - 1, x0 = domain[i0], x1 = domain[i1], dx; if (x1 < x0) { dx = i0, i0 = i1, i1 = dx; dx = x0, x0 = x1, x1 = dx; } domain[i0] = nice.floor(x0); domain[i1] = nice.ceil(x1); return domain; } function d3_scale_niceStep(step) { return step ? { floor: function(x) { return Math.floor(x / step) * step; }, ceil: function(x) { return Math.ceil(x / step) * step; } } : d3_scale_niceIdentity; } var d3_scale_niceIdentity = { floor: d3_identity, ceil: d3_identity }; function d3_scale_polylinear(domain, range, uninterpolate, interpolate) { var u = [], i = [], j = 0, k = Math.min(domain.length, range.length) - 1; if (domain[k] < domain[0]) { domain = domain.slice().reverse(); range = range.slice().reverse(); } while (++j <= k) { u.push(uninterpolate(domain[j - 1], domain[j])); i.push(interpolate(range[j - 1], range[j])); } return function(x) { var j = d3.bisect(domain, x, 1, k) - 1; return i[j](u[j](x)); }; } d3.scale.linear = function() { return d3_scale_linear([ 0, 1 ], [ 0, 1 ], d3_interpolate, false); }; function d3_scale_linear(domain, range, interpolate, clamp) { var output, input; function rescale() { var linear = Math.min(domain.length, range.length) > 2 ? d3_scale_polylinear : d3_scale_bilinear, uninterpolate = clamp ? d3_uninterpolateClamp : d3_uninterpolateNumber; output = linear(domain, range, uninterpolate, interpolate); input = linear(range, domain, uninterpolate, d3_interpolate); return scale; } function scale(x) { return output(x); } scale.invert = function(y) { return input(y); }; scale.domain = function(x) { if (!arguments.length) return domain; domain = x.map(Number); return rescale(); }; scale.range = function(x) { if (!arguments.length) return range; range = x; return rescale(); }; scale.rangeRound = function(x) { return scale.range(x).interpolate(d3_interpolateRound); }; scale.clamp = function(x) { if (!arguments.length) return clamp; clamp = x; return rescale(); }; scale.interpolate = function(x) { if (!arguments.length) return interpolate; interpolate = x; return rescale(); }; scale.ticks = function(m) { return d3_scale_linearTicks(domain, m); }; scale.tickFormat = function(m, format) { return d3_scale_linearTickFormat(domain, m, format); }; scale.nice = function(m) { d3_scale_linearNice(domain, m); return rescale(); }; scale.copy = function() { return d3_scale_linear(domain, range, interpolate, clamp); }; return rescale(); } function d3_scale_linearRebind(scale, linear) { return d3.rebind(scale, linear, "range", "rangeRound", "interpolate", "clamp"); } function d3_scale_linearNice(domain, m) { return d3_scale_nice(domain, d3_scale_niceStep(d3_scale_linearTickRange(domain, m)[2])); } function d3_scale_linearTickRange(domain, m) { if (m == null) m = 10; var extent = d3_scaleExtent(domain), span = extent[1] - extent[0], step = Math.pow(10, Math.floor(Math.log(span / m) / Math.LN10)), err = m / span * step; if (err <= .15) step *= 10; else if (err <= .35) step *= 5; else if (err <= .75) step *= 2; extent[0] = Math.ceil(extent[0] / step) * step; extent[1] = Math.floor(extent[1] / step) * step + step * .5; extent[2] = step; return extent; } function d3_scale_linearTicks(domain, m) { return d3.range.apply(d3, d3_scale_linearTickRange(domain, m)); } function d3_scale_linearTickFormat(domain, m, format) { var range = d3_scale_linearTickRange(domain, m); if (format) { var match = d3_format_re.exec(format); match.shift(); if (match[8] === "s") { var prefix = d3.formatPrefix(Math.max(abs(range[0]), abs(range[1]))); if (!match[7]) match[7] = "." + d3_scale_linearPrecision(prefix.scale(range[2])); match[8] = "f"; format = d3.format(match.join("")); return function(d) { return format(prefix.scale(d)) + prefix.symbol; }; } if (!match[7]) match[7] = "." + d3_scale_linearFormatPrecision(match[8], range); format = match.join(""); } else { format = ",." + d3_scale_linearPrecision(range[2]) + "f"; } return d3.format(format); } var d3_scale_linearFormatSignificant = { s: 1, g: 1, p: 1, r: 1, e: 1 }; function d3_scale_linearPrecision(value) { return -Math.floor(Math.log(value) / Math.LN10 + .01); } function d3_scale_linearFormatPrecision(type, range) { var p = d3_scale_linearPrecision(range[2]); return type in d3_scale_linearFormatSignificant ? Math.abs(p - d3_scale_linearPrecision(Math.max(abs(range[0]), abs(range[1])))) + +(type !== "e") : p - (type === "%") * 2; } d3.scale.log = function() { return d3_scale_log(d3.scale.linear().domain([ 0, 1 ]), 10, true, [ 1, 10 ]); }; function d3_scale_log(linear, base, positive, domain) { function log(x) { return (positive ? Math.log(x < 0 ? 0 : x) : -Math.log(x > 0 ? 0 : -x)) / Math.log(base); } function pow(x) { return positive ? Math.pow(base, x) : -Math.pow(base, -x); } function scale(x) { return linear(log(x)); } scale.invert = function(x) { return pow(linear.invert(x)); }; scale.domain = function(x) { if (!arguments.length) return domain; positive = x[0] >= 0; linear.domain((domain = x.map(Number)).map(log)); return scale; }; scale.base = function(_) { if (!arguments.length) return base; base = +_; linear.domain(domain.map(log)); return scale; }; scale.nice = function() { var niced = d3_scale_nice(domain.map(log), positive ? Math : d3_scale_logNiceNegative); linear.domain(niced); domain = niced.map(pow); return scale; }; scale.ticks = function() { var extent = d3_scaleExtent(domain), ticks = [], u = extent[0], v = extent[1], i = Math.floor(log(u)), j = Math.ceil(log(v)), n = base % 1 ? 2 : base; if (isFinite(j - i)) { if (positive) { for (;i < j; i++) for (var k = 1; k < n; k++) ticks.push(pow(i) * k); ticks.push(pow(i)); } else { ticks.push(pow(i)); for (;i++ < j; ) for (var k = n - 1; k > 0; k--) ticks.push(pow(i) * k); } for (i = 0; ticks[i] < u; i++) {} for (j = ticks.length; ticks[j - 1] > v; j--) {} ticks = ticks.slice(i, j); } return ticks; }; scale.tickFormat = function(n, format) { if (!arguments.length) return d3_scale_logFormat; if (arguments.length < 2) format = d3_scale_logFormat; else if (typeof format !== "function") format = d3.format(format); var k = Math.max(.1, n / scale.ticks().length), f = positive ? (e = 1e-12, Math.ceil) : (e = -1e-12, Math.floor), e; return function(d) { return d / pow(f(log(d) + e)) <= k ? format(d) : ""; }; }; scale.copy = function() { return d3_scale_log(linear.copy(), base, positive, domain); }; return d3_scale_linearRebind(scale, linear); } var d3_scale_logFormat = d3.format(".0e"), d3_scale_logNiceNegative = { floor: function(x) { return -Math.ceil(-x); }, ceil: function(x) { return -Math.floor(-x); } }; d3.scale.pow = function() { return d3_scale_pow(d3.scale.linear(), 1, [ 0, 1 ]); }; function d3_scale_pow(linear, exponent, domain) { var powp = d3_scale_powPow(exponent), powb = d3_scale_powPow(1 / exponent); function scale(x) { return linear(powp(x)); } scale.invert = function(x) { return powb(linear.invert(x)); }; scale.domain = function(x) { if (!arguments.length) return domain; linear.domain((domain = x.map(Number)).map(powp)); return scale; }; scale.ticks = function(m) { return d3_scale_linearTicks(domain, m); }; scale.tickFormat = function(m, format) { return d3_scale_linearTickFormat(domain, m, format); }; scale.nice = function(m) { return scale.domain(d3_scale_linearNice(domain, m)); }; scale.exponent = function(x) { if (!arguments.length) return exponent; powp = d3_scale_powPow(exponent = x); powb = d3_scale_powPow(1 / exponent); linear.domain(domain.map(powp)); return scale; }; scale.copy = function() { return d3_scale_pow(linear.copy(), exponent, domain); }; return d3_scale_linearRebind(scale, linear); } function d3_scale_powPow(e) { return function(x) { return x < 0 ? -Math.pow(-x, e) : Math.pow(x, e); }; } d3.scale.sqrt = function() { return d3.scale.pow().exponent(.5); }; d3.scale.ordinal = function() { return d3_scale_ordinal([], { t: "range", a: [ [] ] }); }; function d3_scale_ordinal(domain, ranger) { var index, range, rangeBand; function scale(x) { return range[((index.get(x) || (ranger.t === "range" ? index.set(x, domain.push(x)) : NaN)) - 1) % range.length]; } function steps(start, step) { return d3.range(domain.length).map(function(i) { return start + step * i; }); } scale.domain = function(x) { if (!arguments.length) return domain; domain = []; index = new d3_Map(); var i = -1, n = x.length, xi; while (++i < n) if (!index.has(xi = x[i])) index.set(xi, domain.push(xi)); return scale[ranger.t].apply(scale, ranger.a); }; scale.range = function(x) { if (!arguments.length) return range; range = x; rangeBand = 0; ranger = { t: "range", a: arguments }; return scale; }; scale.rangePoints = function(x, padding) { if (arguments.length < 2) padding = 0; var start = x[0], stop = x[1], step = (stop - start) / (Math.max(1, domain.length - 1) + padding); range = steps(domain.length < 2 ? (start + stop) / 2 : start + step * padding / 2, step); rangeBand = 0; ranger = { t: "rangePoints", a: arguments }; return scale; }; scale.rangeBands = function(x, padding, outerPadding) { if (arguments.length < 2) padding = 0; if (arguments.length < 3) outerPadding = padding; var reverse = x[1] < x[0], start = x[reverse - 0], stop = x[1 - reverse], step = (stop - start) / (domain.length - padding + 2 * outerPadding); range = steps(start + step * outerPadding, step); if (reverse) range.reverse(); rangeBand = step * (1 - padding); ranger = { t: "rangeBands", a: arguments }; return scale; }; scale.rangeRoundBands = function(x, padding, outerPadding) { if (arguments.length < 2) padding = 0; if (arguments.length < 3) outerPadding = padding; var reverse = x[1] < x[0], start = x[reverse - 0], stop = x[1 - reverse], step = Math.floor((stop - start) / (domain.length - padding + 2 * outerPadding)), error = stop - start - (domain.length - padding) * step; range = steps(start + Math.round(error / 2), step); if (reverse) range.reverse(); rangeBand = Math.round(step * (1 - padding)); ranger = { t: "rangeRoundBands", a: arguments }; return scale; }; scale.rangeBand = function() { return rangeBand; }; scale.rangeExtent = function() { return d3_scaleExtent(ranger.a[0]); }; scale.copy = function() { return d3_scale_ordinal(domain, ranger); }; return scale.domain(domain); } d3.scale.category10 = function() { return d3.scale.ordinal().range(d3_category10); }; d3.scale.category20 = function() { return d3.scale.ordinal().range(d3_category20); }; d3.scale.category20b = function() { return d3.scale.ordinal().range(d3_category20b); }; d3.scale.category20c = function() { return d3.scale.ordinal().range(d3_category20c); }; var d3_category10 = [ 2062260, 16744206, 2924588, 14034728, 9725885, 9197131, 14907330, 8355711, 12369186, 1556175 ].map(d3_rgbString); var d3_category20 = [ 2062260, 11454440, 16744206, 16759672, 2924588, 10018698, 14034728, 16750742, 9725885, 12955861, 9197131, 12885140, 14907330, 16234194, 8355711, 13092807, 12369186, 14408589, 1556175, 10410725 ].map(d3_rgbString); var d3_category20b = [ 3750777, 5395619, 7040719, 10264286, 6519097, 9216594, 11915115, 13556636, 9202993, 12426809, 15186514, 15190932, 8666169, 11356490, 14049643, 15177372, 8077683, 10834324, 13528509, 14589654 ].map(d3_rgbString); var d3_category20c = [ 3244733, 7057110, 10406625, 13032431, 15095053, 16616764, 16625259, 16634018, 3253076, 7652470, 10607003, 13101504, 7695281, 10394312, 12369372, 14342891, 6513507, 9868950, 12434877, 14277081 ].map(d3_rgbString); d3.scale.quantile = function() { return d3_scale_quantile([], []); }; function d3_scale_quantile(domain, range) { var thresholds; function rescale() { var k = 0, q = range.length; thresholds = []; while (++k < q) thresholds[k - 1] = d3.quantile(domain, k / q); return scale; } function scale(x) { if (!isNaN(x = +x)) return range[d3.bisect(thresholds, x)]; } scale.domain = function(x) { if (!arguments.length) return domain; domain = x.filter(d3_number).sort(d3_ascending); return rescale(); }; scale.range = function(x) { if (!arguments.length) return range; range = x; return rescale(); }; scale.quantiles = function() { return thresholds; }; scale.invertExtent = function(y) { y = range.indexOf(y); return y < 0 ? [ NaN, NaN ] : [ y > 0 ? thresholds[y - 1] : domain[0], y < thresholds.length ? thresholds[y] : domain[domain.length - 1] ]; }; scale.copy = function() { return d3_scale_quantile(domain, range); }; return rescale(); } d3.scale.quantize = function() { return d3_scale_quantize(0, 1, [ 0, 1 ]); }; function d3_scale_quantize(x0, x1, range) { var kx, i; function scale(x) { return range[Math.max(0, Math.min(i, Math.floor(kx * (x - x0))))]; } function rescale() { kx = range.length / (x1 - x0); i = range.length - 1; return scale; } scale.domain = function(x) { if (!arguments.length) return [ x0, x1 ]; x0 = +x[0]; x1 = +x[x.length - 1]; return rescale(); }; scale.range = function(x) { if (!arguments.length) return range; range = x; return rescale(); }; scale.invertExtent = function(y) { y = range.indexOf(y); y = y < 0 ? NaN : y / kx + x0; return [ y, y + 1 / kx ]; }; scale.copy = function() { return d3_scale_quantize(x0, x1, range); }; return rescale(); } d3.scale.threshold = function() { return d3_scale_threshold([ .5 ], [ 0, 1 ]); }; function d3_scale_threshold(domain, range) { function scale(x) { if (x <= x) return range[d3.bisect(domain, x)]; } scale.domain = function(_) { if (!arguments.length) return domain; domain = _; return scale; }; scale.range = function(_) { if (!arguments.length) return range; range = _; return scale; }; scale.invertExtent = function(y) { y = range.indexOf(y); return [ domain[y - 1], domain[y] ]; }; scale.copy = function() { return d3_scale_threshold(domain, range); }; return scale; } d3.scale.identity = function() { return d3_scale_identity([ 0, 1 ]); }; function d3_scale_identity(domain) { function identity(x) { return +x; } identity.invert = identity; identity.domain = identity.range = function(x) { if (!arguments.length) return domain; domain = x.map(identity); return identity; }; identity.ticks = function(m) { return d3_scale_linearTicks(domain, m); }; identity.tickFormat = function(m, format) { return d3_scale_linearTickFormat(domain, m, format); }; identity.copy = function() { return d3_scale_identity(domain); }; return identity; } d3.svg = {}; d3.svg.arc = function() { var innerRadius = d3_svg_arcInnerRadius, outerRadius = d3_svg_arcOuterRadius, startAngle = d3_svg_arcStartAngle, endAngle = d3_svg_arcEndAngle; function arc() { var r0 = innerRadius.apply(this, arguments), r1 = outerRadius.apply(this, arguments), a0 = startAngle.apply(this, arguments) + d3_svg_arcOffset, a1 = endAngle.apply(this, arguments) + d3_svg_arcOffset, da = (a1 < a0 && (da = a0, a0 = a1, a1 = da), a1 - a0), df = da < π ? "0" : "1", c0 = Math.cos(a0), s0 = Math.sin(a0), c1 = Math.cos(a1), s1 = Math.sin(a1); return da >= d3_svg_arcMax ? r0 ? "M0," + r1 + "A" + r1 + "," + r1 + " 0 1,1 0," + -r1 + "A" + r1 + "," + r1 + " 0 1,1 0," + r1 + "M0," + r0 + "A" + r0 + "," + r0 + " 0 1,0 0," + -r0 + "A" + r0 + "," + r0 + " 0 1,0 0," + r0 + "Z" : "M0," + r1 + "A" + r1 + "," + r1 + " 0 1,1 0," + -r1 + "A" + r1 + "," + r1 + " 0 1,1 0," + r1 + "Z" : r0 ? "M" + r1 * c0 + "," + r1 * s0 + "A" + r1 + "," + r1 + " 0 " + df + ",1 " + r1 * c1 + "," + r1 * s1 + "L" + r0 * c1 + "," + r0 * s1 + "A" + r0 + "," + r0 + " 0 " + df + ",0 " + r0 * c0 + "," + r0 * s0 + "Z" : "M" + r1 * c0 + "," + r1 * s0 + "A" + r1 + "," + r1 + " 0 " + df + ",1 " + r1 * c1 + "," + r1 * s1 + "L0,0" + "Z"; } arc.innerRadius = function(v) { if (!arguments.length) return innerRadius; innerRadius = d3_functor(v); return arc; }; arc.outerRadius = function(v) { if (!arguments.length) return outerRadius; outerRadius = d3_functor(v); return arc; }; arc.startAngle = function(v) { if (!arguments.length) return startAngle; startAngle = d3_functor(v); return arc; }; arc.endAngle = function(v) { if (!arguments.length) return endAngle; endAngle = d3_functor(v); return arc; }; arc.centroid = function() { var r = (innerRadius.apply(this, arguments) + outerRadius.apply(this, arguments)) / 2, a = (startAngle.apply(this, arguments) + endAngle.apply(this, arguments)) / 2 + d3_svg_arcOffset; return [ Math.cos(a) * r, Math.sin(a) * r ]; }; return arc; }; var d3_svg_arcOffset = -halfπ, d3_svg_arcMax = τ - ε; function d3_svg_arcInnerRadius(d) { return d.innerRadius; } function d3_svg_arcOuterRadius(d) { return d.outerRadius; } function d3_svg_arcStartAngle(d) { return d.startAngle; } function d3_svg_arcEndAngle(d) { return d.endAngle; } function d3_svg_line(projection) { var x = d3_geom_pointX, y = d3_geom_pointY, defined = d3_true, interpolate = d3_svg_lineLinear, interpolateKey = interpolate.key, tension = .7; function line(data) { var segments = [], points = [], i = -1, n = data.length, d, fx = d3_functor(x), fy = d3_functor(y); function segment() { segments.push("M", interpolate(projection(points), tension)); } while (++i < n) { if (defined.call(this, d = data[i], i)) { points.push([ +fx.call(this, d, i), +fy.call(this, d, i) ]); } else if (points.length) { segment(); points = []; } } if (points.length) segment(); return segments.length ? segments.join("") : null; } line.x = function(_) { if (!arguments.length) return x; x = _; return line; }; line.y = function(_) { if (!arguments.length) return y; y = _; return line; }; line.defined = function(_) { if (!arguments.length) return defined; defined = _; return line; }; line.interpolate = function(_) { if (!arguments.length) return interpolateKey; if (typeof _ === "function") interpolateKey = interpolate = _; else interpolateKey = (interpolate = d3_svg_lineInterpolators.get(_) || d3_svg_lineLinear).key; return line; }; line.tension = function(_) { if (!arguments.length) return tension; tension = _; return line; }; return line; } d3.svg.line = function() { return d3_svg_line(d3_identity); }; var d3_svg_lineInterpolators = d3.map({ linear: d3_svg_lineLinear, "linear-closed": d3_svg_lineLinearClosed, step: d3_svg_lineStep, "step-before": d3_svg_lineStepBefore, "step-after": d3_svg_lineStepAfter, basis: d3_svg_lineBasis, "basis-open": d3_svg_lineBasisOpen, "basis-closed": d3_svg_lineBasisClosed, bundle: d3_svg_lineBundle, cardinal: d3_svg_lineCardinal, "cardinal-open": d3_svg_lineCardinalOpen, "cardinal-closed": d3_svg_lineCardinalClosed, monotone: d3_svg_lineMonotone }); d3_svg_lineInterpolators.forEach(function(key, value) { value.key = key; value.closed = /-closed$/.test(key); }); function d3_svg_lineLinear(points) { return points.join("L"); } function d3_svg_lineLinearClosed(points) { return d3_svg_lineLinear(points) + "Z"; } function d3_svg_lineStep(points) { var i = 0, n = points.length, p = points[0], path = [ p[0], ",", p[1] ]; while (++i < n) path.push("H", (p[0] + (p = points[i])[0]) / 2, "V", p[1]); if (n > 1) path.push("H", p[0]); return path.join(""); } function d3_svg_lineStepBefore(points) { var i = 0, n = points.length, p = points[0], path = [ p[0], ",", p[1] ]; while (++i < n) path.push("V", (p = points[i])[1], "H", p[0]); return path.join(""); } function d3_svg_lineStepAfter(points) { var i = 0, n = points.length, p = points[0], path = [ p[0], ",", p[1] ]; while (++i < n) path.push("H", (p = points[i])[0], "V", p[1]); return path.join(""); } function d3_svg_lineCardinalOpen(points, tension) { return points.length < 4 ? d3_svg_lineLinear(points) : points[1] + d3_svg_lineHermite(points.slice(1, points.length - 1), d3_svg_lineCardinalTangents(points, tension)); } function d3_svg_lineCardinalClosed(points, tension) { return points.length < 3 ? d3_svg_lineLinear(points) : points[0] + d3_svg_lineHermite((points.push(points[0]), points), d3_svg_lineCardinalTangents([ points[points.length - 2] ].concat(points, [ points[1] ]), tension)); } function d3_svg_lineCardinal(points, tension) { return points.length < 3 ? d3_svg_lineLinear(points) : points[0] + d3_svg_lineHermite(points, d3_svg_lineCardinalTangents(points, tension)); } function d3_svg_lineHermite(points, tangents) { if (tangents.length < 1 || points.length != tangents.length && points.length != tangents.length + 2) { return d3_svg_lineLinear(points); } var quad = points.length != tangents.length, path = "", p0 = points[0], p = points[1], t0 = tangents[0], t = t0, pi = 1; if (quad) { path += "Q" + (p[0] - t0[0] * 2 / 3) + "," + (p[1] - t0[1] * 2 / 3) + "," + p[0] + "," + p[1]; p0 = points[1]; pi = 2; } if (tangents.length > 1) { t = tangents[1]; p = points[pi]; pi++; path += "C" + (p0[0] + t0[0]) + "," + (p0[1] + t0[1]) + "," + (p[0] - t[0]) + "," + (p[1] - t[1]) + "," + p[0] + "," + p[1]; for (var i = 2; i < tangents.length; i++, pi++) { p = points[pi]; t = tangents[i]; path += "S" + (p[0] - t[0]) + "," + (p[1] - t[1]) + "," + p[0] + "," + p[1]; } } if (quad) { var lp = points[pi]; path += "Q" + (p[0] + t[0] * 2 / 3) + "," + (p[1] + t[1] * 2 / 3) + "," + lp[0] + "," + lp[1]; } return path; } function d3_svg_lineCardinalTangents(points, tension) { var tangents = [], a = (1 - tension) / 2, p0, p1 = points[0], p2 = points[1], i = 1, n = points.length; while (++i < n) { p0 = p1; p1 = p2; p2 = points[i]; tangents.push([ a * (p2[0] - p0[0]), a * (p2[1] - p0[1]) ]); } return tangents; } function d3_svg_lineBasis(points) { if (points.length < 3) return d3_svg_lineLinear(points); var i = 1, n = points.length, pi = points[0], x0 = pi[0], y0 = pi[1], px = [ x0, x0, x0, (pi = points[1])[0] ], py = [ y0, y0, y0, pi[1] ], path = [ x0, ",", y0, "L", d3_svg_lineDot4(d3_svg_lineBasisBezier3, px), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, py) ]; points.push(points[n - 1]); while (++i <= n) { pi = points[i]; px.shift(); px.push(pi[0]); py.shift(); py.push(pi[1]); d3_svg_lineBasisBezier(path, px, py); } points.pop(); path.push("L", pi); return path.join(""); } function d3_svg_lineBasisOpen(points) { if (points.length < 4) return d3_svg_lineLinear(points); var path = [], i = -1, n = points.length, pi, px = [ 0 ], py = [ 0 ]; while (++i < 3) { pi = points[i]; px.push(pi[0]); py.push(pi[1]); } path.push(d3_svg_lineDot4(d3_svg_lineBasisBezier3, px) + "," + d3_svg_lineDot4(d3_svg_lineBasisBezier3, py)); --i; while (++i < n) { pi = points[i]; px.shift(); px.push(pi[0]); py.shift(); py.push(pi[1]); d3_svg_lineBasisBezier(path, px, py); } return path.join(""); } function d3_svg_lineBasisClosed(points) { var path, i = -1, n = points.length, m = n + 4, pi, px = [], py = []; while (++i < 4) { pi = points[i % n]; px.push(pi[0]); py.push(pi[1]); } path = [ d3_svg_lineDot4(d3_svg_lineBasisBezier3, px), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, py) ]; --i; while (++i < m) { pi = points[i % n]; px.shift(); px.push(pi[0]); py.shift(); py.push(pi[1]); d3_svg_lineBasisBezier(path, px, py); } return path.join(""); } function d3_svg_lineBundle(points, tension) { var n = points.length - 1; if (n) { var x0 = points[0][0], y0 = points[0][1], dx = points[n][0] - x0, dy = points[n][1] - y0, i = -1, p, t; while (++i <= n) { p = points[i]; t = i / n; p[0] = tension * p[0] + (1 - tension) * (x0 + t * dx); p[1] = tension * p[1] + (1 - tension) * (y0 + t * dy); } } return d3_svg_lineBasis(points); } function d3_svg_lineDot4(a, b) { return a[0] * b[0] + a[1] * b[1] + a[2] * b[2] + a[3] * b[3]; } var d3_svg_lineBasisBezier1 = [ 0, 2 / 3, 1 / 3, 0 ], d3_svg_lineBasisBezier2 = [ 0, 1 / 3, 2 / 3, 0 ], d3_svg_lineBasisBezier3 = [ 0, 1 / 6, 2 / 3, 1 / 6 ]; function d3_svg_lineBasisBezier(path, x, y) { path.push("C", d3_svg_lineDot4(d3_svg_lineBasisBezier1, x), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier1, y), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier2, x), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier2, y), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, x), ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, y)); } function d3_svg_lineSlope(p0, p1) { return (p1[1] - p0[1]) / (p1[0] - p0[0]); } function d3_svg_lineFiniteDifferences(points) { var i = 0, j = points.length - 1, m = [], p0 = points[0], p1 = points[1], d = m[0] = d3_svg_lineSlope(p0, p1); while (++i < j) { m[i] = (d + (d = d3_svg_lineSlope(p0 = p1, p1 = points[i + 1]))) / 2; } m[i] = d; return m; } function d3_svg_lineMonotoneTangents(points) { var tangents = [], d, a, b, s, m = d3_svg_lineFiniteDifferences(points), i = -1, j = points.length - 1; while (++i < j) { d = d3_svg_lineSlope(points[i], points[i + 1]); if (abs(d) < ε) { m[i] = m[i + 1] = 0; } else { a = m[i] / d; b = m[i + 1] / d; s = a * a + b * b; if (s > 9) { s = d * 3 / Math.sqrt(s); m[i] = s * a; m[i + 1] = s * b; } } } i = -1; while (++i <= j) { s = (points[Math.min(j, i + 1)][0] - points[Math.max(0, i - 1)][0]) / (6 * (1 + m[i] * m[i])); tangents.push([ s || 0, m[i] * s || 0 ]); } return tangents; } function d3_svg_lineMonotone(points) { return points.length < 3 ? d3_svg_lineLinear(points) : points[0] + d3_svg_lineHermite(points, d3_svg_lineMonotoneTangents(points)); } d3.svg.line.radial = function() { var line = d3_svg_line(d3_svg_lineRadial); line.radius = line.x, delete line.x; line.angle = line.y, delete line.y; return line; }; function d3_svg_lineRadial(points) { var point, i = -1, n = points.length, r, a; while (++i < n) { point = points[i]; r = point[0]; a = point[1] + d3_svg_arcOffset; point[0] = r * Math.cos(a); point[1] = r * Math.sin(a); } return points; } function d3_svg_area(projection) { var x0 = d3_geom_pointX, x1 = d3_geom_pointX, y0 = 0, y1 = d3_geom_pointY, defined = d3_true, interpolate = d3_svg_lineLinear, interpolateKey = interpolate.key, interpolateReverse = interpolate, L = "L", tension = .7; function area(data) { var segments = [], points0 = [], points1 = [], i = -1, n = data.length, d, fx0 = d3_functor(x0), fy0 = d3_functor(y0), fx1 = x0 === x1 ? function() { return x; } : d3_functor(x1), fy1 = y0 === y1 ? function() { return y; } : d3_functor(y1), x, y; function segment() { segments.push("M", interpolate(projection(points1), tension), L, interpolateReverse(projection(points0.reverse()), tension), "Z"); } while (++i < n) { if (defined.call(this, d = data[i], i)) { points0.push([ x = +fx0.call(this, d, i), y = +fy0.call(this, d, i) ]); points1.push([ +fx1.call(this, d, i), +fy1.call(this, d, i) ]); } else if (points0.length) { segment(); points0 = []; points1 = []; } } if (points0.length) segment(); return segments.length ? segments.join("") : null; } area.x = function(_) { if (!arguments.length) return x1; x0 = x1 = _; return area; }; area.x0 = function(_) { if (!arguments.length) return x0; x0 = _; return area; }; area.x1 = function(_) { if (!arguments.length) return x1; x1 = _; return area; }; area.y = function(_) { if (!arguments.length) return y1; y0 = y1 = _; return area; }; area.y0 = function(_) { if (!arguments.length) return y0; y0 = _; return area; }; area.y1 = function(_) { if (!arguments.length) return y1; y1 = _; return area; }; area.defined = function(_) { if (!arguments.length) return defined; defined = _; return area; }; area.interpolate = function(_) { if (!arguments.length) return interpolateKey; if (typeof _ === "function") interpolateKey = interpolate = _; else interpolateKey = (interpolate = d3_svg_lineInterpolators.get(_) || d3_svg_lineLinear).key; interpolateReverse = interpolate.reverse || interpolate; L = interpolate.closed ? "M" : "L"; return area; }; area.tension = function(_) { if (!arguments.length) return tension; tension = _; return area; }; return area; } d3_svg_lineStepBefore.reverse = d3_svg_lineStepAfter; d3_svg_lineStepAfter.reverse = d3_svg_lineStepBefore; d3.svg.area = function() { return d3_svg_area(d3_identity); }; d3.svg.area.radial = function() { var area = d3_svg_area(d3_svg_lineRadial); area.radius = area.x, delete area.x; area.innerRadius = area.x0, delete area.x0; area.outerRadius = area.x1, delete area.x1; area.angle = area.y, delete area.y; area.startAngle = area.y0, delete area.y0; area.endAngle = area.y1, delete area.y1; return area; }; d3.svg.chord = function() { var source = d3_source, target = d3_target, radius = d3_svg_chordRadius, startAngle = d3_svg_arcStartAngle, endAngle = d3_svg_arcEndAngle; function chord(d, i) { var s = subgroup(this, source, d, i), t = subgroup(this, target, d, i); return "M" + s.p0 + arc(s.r, s.p1, s.a1 - s.a0) + (equals(s, t) ? curve(s.r, s.p1, s.r, s.p0) : curve(s.r, s.p1, t.r, t.p0) + arc(t.r, t.p1, t.a1 - t.a0) + curve(t.r, t.p1, s.r, s.p0)) + "Z"; } function subgroup(self, f, d, i) { var subgroup = f.call(self, d, i), r = radius.call(self, subgroup, i), a0 = startAngle.call(self, subgroup, i) + d3_svg_arcOffset, a1 = endAngle.call(self, subgroup, i) + d3_svg_arcOffset; return { r: r, a0: a0, a1: a1, p0: [ r * Math.cos(a0), r * Math.sin(a0) ], p1: [ r * Math.cos(a1), r * Math.sin(a1) ] }; } function equals(a, b) { return a.a0 == b.a0 && a.a1 == b.a1; } function arc(r, p, a) { return "A" + r + "," + r + " 0 " + +(a > π) + ",1 " + p; } function curve(r0, p0, r1, p1) { return "Q 0,0 " + p1; } chord.radius = function(v) { if (!arguments.length) return radius; radius = d3_functor(v); return chord; }; chord.source = function(v) { if (!arguments.length) return source; source = d3_functor(v); return chord; }; chord.target = function(v) { if (!arguments.length) return target; target = d3_functor(v); return chord; }; chord.startAngle = function(v) { if (!arguments.length) return startAngle; startAngle = d3_functor(v); return chord; }; chord.endAngle = function(v) { if (!arguments.length) return endAngle; endAngle = d3_functor(v); return chord; }; return chord; }; function d3_svg_chordRadius(d) { return d.radius; } d3.svg.diagonal = function() { var source = d3_source, target = d3_target, projection = d3_svg_diagonalProjection; function diagonal(d, i) { var p0 = source.call(this, d, i), p3 = target.call(this, d, i), m = (p0.y + p3.y) / 2, p = [ p0, { x: p0.x, y: m }, { x: p3.x, y: m }, p3 ]; p = p.map(projection); return "M" + p[0] + "C" + p[1] + " " + p[2] + " " + p[3]; } diagonal.source = function(x) { if (!arguments.length) return source; source = d3_functor(x); return diagonal; }; diagonal.target = function(x) { if (!arguments.length) return target; target = d3_functor(x); return diagonal; }; diagonal.projection = function(x) { if (!arguments.length) return projection; projection = x; return diagonal; }; return diagonal; }; function d3_svg_diagonalProjection(d) { return [ d.x, d.y ]; } d3.svg.diagonal.radial = function() { var diagonal = d3.svg.diagonal(), projection = d3_svg_diagonalProjection, projection_ = diagonal.projection; diagonal.projection = function(x) { return arguments.length ? projection_(d3_svg_diagonalRadialProjection(projection = x)) : projection; }; return diagonal; }; function d3_svg_diagonalRadialProjection(projection) { return function() { var d = projection.apply(this, arguments), r = d[0], a = d[1] + d3_svg_arcOffset; return [ r * Math.cos(a), r * Math.sin(a) ]; }; } d3.svg.symbol = function() { var type = d3_svg_symbolType, size = d3_svg_symbolSize; function symbol(d, i) { return (d3_svg_symbols.get(type.call(this, d, i)) || d3_svg_symbolCircle)(size.call(this, d, i)); } symbol.type = function(x) { if (!arguments.length) return type; type = d3_functor(x); return symbol; }; symbol.size = function(x) { if (!arguments.length) return size; size = d3_functor(x); return symbol; }; return symbol; }; function d3_svg_symbolSize() { return 64; } function d3_svg_symbolType() { return "circle"; } function d3_svg_symbolCircle(size) { var r = Math.sqrt(size / π); return "M0," + r + "A" + r + "," + r + " 0 1,1 0," + -r + "A" + r + "," + r + " 0 1,1 0," + r + "Z"; } var d3_svg_symbols = d3.map({ circle: d3_svg_symbolCircle, cross: function(size) { var r = Math.sqrt(size / 5) / 2; return "M" + -3 * r + "," + -r + "H" + -r + "V" + -3 * r + "H" + r + "V" + -r + "H" + 3 * r + "V" + r + "H" + r + "V" + 3 * r + "H" + -r + "V" + r + "H" + -3 * r + "Z"; }, diamond: function(size) { var ry = Math.sqrt(size / (2 * d3_svg_symbolTan30)), rx = ry * d3_svg_symbolTan30; return "M0," + -ry + "L" + rx + ",0" + " 0," + ry + " " + -rx + ",0" + "Z"; }, square: function(size) { var r = Math.sqrt(size) / 2; return "M" + -r + "," + -r + "L" + r + "," + -r + " " + r + "," + r + " " + -r + "," + r + "Z"; }, "triangle-down": function(size) { var rx = Math.sqrt(size / d3_svg_symbolSqrt3), ry = rx * d3_svg_symbolSqrt3 / 2; return "M0," + ry + "L" + rx + "," + -ry + " " + -rx + "," + -ry + "Z"; }, "triangle-up": function(size) { var rx = Math.sqrt(size / d3_svg_symbolSqrt3), ry = rx * d3_svg_symbolSqrt3 / 2; return "M0," + -ry + "L" + rx + "," + ry + " " + -rx + "," + ry + "Z"; } }); d3.svg.symbolTypes = d3_svg_symbols.keys(); var d3_svg_symbolSqrt3 = Math.sqrt(3), d3_svg_symbolTan30 = Math.tan(30 * d3_radians); function d3_transition(groups, id) { d3_subclass(groups, d3_transitionPrototype); groups.id = id; return groups; } var d3_transitionPrototype = [], d3_transitionId = 0, d3_transitionInheritId, d3_transitionInherit; d3_transitionPrototype.call = d3_selectionPrototype.call; d3_transitionPrototype.empty = d3_selectionPrototype.empty; d3_transitionPrototype.node = d3_selectionPrototype.node; d3_transitionPrototype.size = d3_selectionPrototype.size; d3.transition = function(selection) { return arguments.length ? d3_transitionInheritId ? selection.transition() : selection : d3_selectionRoot.transition(); }; d3.transition.prototype = d3_transitionPrototype; d3_transitionPrototype.select = function(selector) { var id = this.id, subgroups = [], subgroup, subnode, node; selector = d3_selection_selector(selector); for (var j = -1, m = this.length; ++j < m; ) { subgroups.push(subgroup = []); for (var group = this[j], i = -1, n = group.length; ++i < n; ) { if ((node = group[i]) && (subnode = selector.call(node, node.__data__, i, j))) { if ("__data__" in node) subnode.__data__ = node.__data__; d3_transitionNode(subnode, i, id, node.__transition__[id]); subgroup.push(subnode); } else { subgroup.push(null); } } } return d3_transition(subgroups, id); }; d3_transitionPrototype.selectAll = function(selector) { var id = this.id, subgroups = [], subgroup, subnodes, node, subnode, transition; selector = d3_selection_selectorAll(selector); for (var j = -1, m = this.length; ++j < m; ) { for (var group = this[j], i = -1, n = group.length; ++i < n; ) { if (node = group[i]) { transition = node.__transition__[id]; subnodes = selector.call(node, node.__data__, i, j); subgroups.push(subgroup = []); for (var k = -1, o = subnodes.length; ++k < o; ) { if (subnode = subnodes[k]) d3_transitionNode(subnode, k, id, transition); subgroup.push(subnode); } } } } return d3_transition(subgroups, id); }; d3_transitionPrototype.filter = function(filter) { var subgroups = [], subgroup, group, node; if (typeof filter !== "function") filter = d3_selection_filter(filter); for (var j = 0, m = this.length; j < m; j++) { subgroups.push(subgroup = []); for (var group = this[j], i = 0, n = group.length; i < n; i++) { if ((node = group[i]) && filter.call(node, node.__data__, i, j)) { subgroup.push(node); } } } return d3_transition(subgroups, this.id); }; d3_transitionPrototype.tween = function(name, tween) { var id = this.id; if (arguments.length < 2) return this.node().__transition__[id].tween.get(name); return d3_selection_each(this, tween == null ? function(node) { node.__transition__[id].tween.remove(name); } : function(node) { node.__transition__[id].tween.set(name, tween); }); }; function d3_transition_tween(groups, name, value, tween) { var id = groups.id; return d3_selection_each(groups, typeof value === "function" ? function(node, i, j) { node.__transition__[id].tween.set(name, tween(value.call(node, node.__data__, i, j))); } : (value = tween(value), function(node) { node.__transition__[id].tween.set(name, value); })); } d3_transitionPrototype.attr = function(nameNS, value) { if (arguments.length < 2) { for (value in nameNS) this.attr(value, nameNS[value]); return this; } var interpolate = nameNS == "transform" ? d3_interpolateTransform : d3_interpolate, name = d3.ns.qualify(nameNS); function attrNull() { this.removeAttribute(name); } function attrNullNS() { this.removeAttributeNS(name.space, name.local); } function attrTween(b) { return b == null ? attrNull : (b += "", function() { var a = this.getAttribute(name), i; return a !== b && (i = interpolate(a, b), function(t) { this.setAttribute(name, i(t)); }); }); } function attrTweenNS(b) { return b == null ? attrNullNS : (b += "", function() { var a = this.getAttributeNS(name.space, name.local), i; return a !== b && (i = interpolate(a, b), function(t) { this.setAttributeNS(name.space, name.local, i(t)); }); }); } return d3_transition_tween(this, "attr." + nameNS, value, name.local ? attrTweenNS : attrTween); }; d3_transitionPrototype.attrTween = function(nameNS, tween) { var name = d3.ns.qualify(nameNS); function attrTween(d, i) { var f = tween.call(this, d, i, this.getAttribute(name)); return f && function(t) { this.setAttribute(name, f(t)); }; } function attrTweenNS(d, i) { var f = tween.call(this, d, i, this.getAttributeNS(name.space, name.local)); return f && function(t) { this.setAttributeNS(name.space, name.local, f(t)); }; } return this.tween("attr." + nameNS, name.local ? attrTweenNS : attrTween); }; d3_transitionPrototype.style = function(name, value, priority) { var n = arguments.length; if (n < 3) { if (typeof name !== "string") { if (n < 2) value = ""; for (priority in name) this.style(priority, name[priority], value); return this; } priority = ""; } function styleNull() { this.style.removeProperty(name); } function styleString(b) { return b == null ? styleNull : (b += "", function() { var a = d3_window.getComputedStyle(this, null).getPropertyValue(name), i; return a !== b && (i = d3_interpolate(a, b), function(t) { this.style.setProperty(name, i(t), priority); }); }); } return d3_transition_tween(this, "style." + name, value, styleString); }; d3_transitionPrototype.styleTween = function(name, tween, priority) { if (arguments.length < 3) priority = ""; function styleTween(d, i) { var f = tween.call(this, d, i, d3_window.getComputedStyle(this, null).getPropertyValue(name)); return f && function(t) { this.style.setProperty(name, f(t), priority); }; } return this.tween("style." + name, styleTween); }; d3_transitionPrototype.text = function(value) { return d3_transition_tween(this, "text", value, d3_transition_text); }; function d3_transition_text(b) { if (b == null) b = ""; return function() { this.textContent = b; }; } d3_transitionPrototype.remove = function() { return this.each("end.transition", function() { var p; if (this.__transition__.count < 2 && (p = this.parentNode)) p.removeChild(this); }); }; d3_transitionPrototype.ease = function(value) { var id = this.id; if (arguments.length < 1) return this.node().__transition__[id].ease; if (typeof value !== "function") value = d3.ease.apply(d3, arguments); return d3_selection_each(this, function(node) { node.__transition__[id].ease = value; }); }; d3_transitionPrototype.delay = function(value) { var id = this.id; if (arguments.length < 1) return this.node().__transition__[id].delay; return d3_selection_each(this, typeof value === "function" ? function(node, i, j) { node.__transition__[id].delay = +value.call(node, node.__data__, i, j); } : (value = +value, function(node) { node.__transition__[id].delay = value; })); }; d3_transitionPrototype.duration = function(value) { var id = this.id; if (arguments.length < 1) return this.node().__transition__[id].duration; return d3_selection_each(this, typeof value === "function" ? function(node, i, j) { node.__transition__[id].duration = Math.max(1, value.call(node, node.__data__, i, j)); } : (value = Math.max(1, value), function(node) { node.__transition__[id].duration = value; })); }; d3_transitionPrototype.each = function(type, listener) { var id = this.id; if (arguments.length < 2) { var inherit = d3_transitionInherit, inheritId = d3_transitionInheritId; d3_transitionInheritId = id; d3_selection_each(this, function(node, i, j) { d3_transitionInherit = node.__transition__[id]; type.call(node, node.__data__, i, j); }); d3_transitionInherit = inherit; d3_transitionInheritId = inheritId; } else { d3_selection_each(this, function(node) { var transition = node.__transition__[id]; (transition.event || (transition.event = d3.dispatch("start", "end"))).on(type, listener); }); } return this; }; d3_transitionPrototype.transition = function() { var id0 = this.id, id1 = ++d3_transitionId, subgroups = [], subgroup, group, node, transition; for (var j = 0, m = this.length; j < m; j++) { subgroups.push(subgroup = []); for (var group = this[j], i = 0, n = group.length; i < n; i++) { if (node = group[i]) { transition = Object.create(node.__transition__[id0]); transition.delay += transition.duration; d3_transitionNode(node, i, id1, transition); } subgroup.push(node); } } return d3_transition(subgroups, id1); }; function d3_transitionNode(node, i, id, inherit) { var lock = node.__transition__ || (node.__transition__ = { active: 0, count: 0 }), transition = lock[id]; if (!transition) { var time = inherit.time; transition = lock[id] = { tween: new d3_Map(), time: time, ease: inherit.ease, delay: inherit.delay, duration: inherit.duration }; ++lock.count; d3.timer(function(elapsed) { var d = node.__data__, ease = transition.ease, delay = transition.delay, duration = transition.duration, timer = d3_timer_active, tweened = []; timer.t = delay + time; if (delay <= elapsed) return start(elapsed - delay); timer.c = start; function start(elapsed) { if (lock.active > id) return stop(); lock.active = id; transition.event && transition.event.start.call(node, d, i); transition.tween.forEach(function(key, value) { if (value = value.call(node, d, i)) { tweened.push(value); } }); d3.timer(function() { timer.c = tick(elapsed || 1) ? d3_true : tick; return 1; }, 0, time); } function tick(elapsed) { if (lock.active !== id) return stop(); var t = elapsed / duration, e = ease(t), n = tweened.length; while (n > 0) { tweened[--n].call(node, e); } if (t >= 1) { transition.event && transition.event.end.call(node, d, i); return stop(); } } function stop() { if (--lock.count) delete lock[id]; else delete node.__transition__; return 1; } }, 0, time); } } d3.svg.axis = function() { var scale = d3.scale.linear(), orient = d3_svg_axisDefaultOrient, innerTickSize = 6, outerTickSize = 6, tickPadding = 3, tickArguments_ = [ 10 ], tickValues = null, tickFormat_; function axis(g) { g.each(function() { var g = d3.select(this); var scale0 = this.__chart__ || scale, scale1 = this.__chart__ = scale.copy(); var ticks = tickValues == null ? scale1.ticks ? scale1.ticks.apply(scale1, tickArguments_) : scale1.domain() : tickValues, tickFormat = tickFormat_ == null ? scale1.tickFormat ? scale1.tickFormat.apply(scale1, tickArguments_) : d3_identity : tickFormat_, tick = g.selectAll(".tick").data(ticks, scale1), tickEnter = tick.enter().insert("g", ".domain").attr("class", "tick").style("opacity", ε), tickExit = d3.transition(tick.exit()).style("opacity", ε).remove(), tickUpdate = d3.transition(tick.order()).style("opacity", 1), tickTransform; var range = d3_scaleRange(scale1), path = g.selectAll(".domain").data([ 0 ]), pathUpdate = (path.enter().append("path").attr("class", "domain"), d3.transition(path)); tickEnter.append("line"); tickEnter.append("text"); var lineEnter = tickEnter.select("line"), lineUpdate = tickUpdate.select("line"), text = tick.select("text").text(tickFormat), textEnter = tickEnter.select("text"), textUpdate = tickUpdate.select("text"); switch (orient) { case "bottom": { tickTransform = d3_svg_axisX; lineEnter.attr("y2", innerTickSize); textEnter.attr("y", Math.max(innerTickSize, 0) + tickPadding); lineUpdate.attr("x2", 0).attr("y2", innerTickSize); textUpdate.attr("x", 0).attr("y", Math.max(innerTickSize, 0) + tickPadding); text.attr("dy", ".71em").style("text-anchor", "middle"); pathUpdate.attr("d", "M" + range[0] + "," + outerTickSize + "V0H" + range[1] + "V" + outerTickSize); break; } case "top": { tickTransform = d3_svg_axisX; lineEnter.attr("y2", -innerTickSize); textEnter.attr("y", -(Math.max(innerTickSize, 0) + tickPadding)); lineUpdate.attr("x2", 0).attr("y2", -innerTickSize); textUpdate.attr("x", 0).attr("y", -(Math.max(innerTickSize, 0) + tickPadding)); text.attr("dy", "0em").style("text-anchor", "middle"); pathUpdate.attr("d", "M" + range[0] + "," + -outerTickSize + "V0H" + range[1] + "V" + -outerTickSize); break; } case "left": { tickTransform = d3_svg_axisY; lineEnter.attr("x2", -innerTickSize); textEnter.attr("x", -(Math.max(innerTickSize, 0) + tickPadding)); lineUpdate.attr("x2", -innerTickSize).attr("y2", 0); textUpdate.attr("x", -(Math.max(innerTickSize, 0) + tickPadding)).attr("y", 0); text.attr("dy", ".32em").style("text-anchor", "end"); pathUpdate.attr("d", "M" + -outerTickSize + "," + range[0] + "H0V" + range[1] + "H" + -outerTickSize); break; } case "right": { tickTransform = d3_svg_axisY; lineEnter.attr("x2", innerTickSize); textEnter.attr("x", Math.max(innerTickSize, 0) + tickPadding); lineUpdate.attr("x2", innerTickSize).attr("y2", 0); textUpdate.attr("x", Math.max(innerTickSize, 0) + tickPadding).attr("y", 0); text.attr("dy", ".32em").style("text-anchor", "start"); pathUpdate.attr("d", "M" + outerTickSize + "," + range[0] + "H0V" + range[1] + "H" + outerTickSize); break; } } if (scale1.rangeBand) { var x = scale1, dx = x.rangeBand() / 2; scale0 = scale1 = function(d) { return x(d) + dx; }; } else if (scale0.rangeBand) { scale0 = scale1; } else { tickExit.call(tickTransform, scale1); } tickEnter.call(tickTransform, scale0); tickUpdate.call(tickTransform, scale1); }); } axis.scale = function(x) { if (!arguments.length) return scale; scale = x; return axis; }; axis.orient = function(x) { if (!arguments.length) return orient; orient = x in d3_svg_axisOrients ? x + "" : d3_svg_axisDefaultOrient; return axis; }; axis.ticks = function() { if (!arguments.length) return tickArguments_; tickArguments_ = arguments; return axis; }; axis.tickValues = function(x) { if (!arguments.length) return tickValues; tickValues = x; return axis; }; axis.tickFormat = function(x) { if (!arguments.length) return tickFormat_; tickFormat_ = x; return axis; }; axis.tickSize = function(x) { var n = arguments.length; if (!n) return innerTickSize; innerTickSize = +x; outerTickSize = +arguments[n - 1]; return axis; }; axis.innerTickSize = function(x) { if (!arguments.length) return innerTickSize; innerTickSize = +x; return axis; }; axis.outerTickSize = function(x) { if (!arguments.length) return outerTickSize; outerTickSize = +x; return axis; }; axis.tickPadding = function(x) { if (!arguments.length) return tickPadding; tickPadding = +x; return axis; }; axis.tickSubdivide = function() { return arguments.length && axis; }; return axis; }; var d3_svg_axisDefaultOrient = "bottom", d3_svg_axisOrients = { top: 1, right: 1, bottom: 1, left: 1 }; function d3_svg_axisX(selection, x) { selection.attr("transform", function(d) { return "translate(" + x(d) + ",0)"; }); } function d3_svg_axisY(selection, y) { selection.attr("transform", function(d) { return "translate(0," + y(d) + ")"; }); } d3.svg.brush = function() { var event = d3_eventDispatch(brush, "brushstart", "brush", "brushend"), x = null, y = null, xExtent = [ 0, 0 ], yExtent = [ 0, 0 ], xExtentDomain, yExtentDomain, xClamp = true, yClamp = true, resizes = d3_svg_brushResizes[0]; function brush(g) { g.each(function() { var g = d3.select(this).style("pointer-events", "all").style("-webkit-tap-highlight-color", "rgba(0,0,0,0)").on("mousedown.brush", brushstart).on("touchstart.brush", brushstart); var background = g.selectAll(".background").data([ 0 ]); background.enter().append("rect").attr("class", "background").style("visibility", "hidden").style("cursor", "crosshair"); g.selectAll(".extent").data([ 0 ]).enter().append("rect").attr("class", "extent").style("cursor", "move"); var resize = g.selectAll(".resize").data(resizes, d3_identity); resize.exit().remove(); resize.enter().append("g").attr("class", function(d) { return "resize " + d; }).style("cursor", function(d) { return d3_svg_brushCursor[d]; }).append("rect").attr("x", function(d) { return /[ew]$/.test(d) ? -3 : null; }).attr("y", function(d) { return /^[ns]/.test(d) ? -3 : null; }).attr("width", 6).attr("height", 6).style("visibility", "hidden"); resize.style("display", brush.empty() ? "none" : null); var gUpdate = d3.transition(g), backgroundUpdate = d3.transition(background), range; if (x) { range = d3_scaleRange(x); backgroundUpdate.attr("x", range[0]).attr("width", range[1] - range[0]); redrawX(gUpdate); } if (y) { range = d3_scaleRange(y); backgroundUpdate.attr("y", range[0]).attr("height", range[1] - range[0]); redrawY(gUpdate); } redraw(gUpdate); }); } brush.event = function(g) { g.each(function() { var event_ = event.of(this, arguments), extent1 = { x: xExtent, y: yExtent, i: xExtentDomain, j: yExtentDomain }, extent0 = this.__chart__ || extent1; this.__chart__ = extent1; if (d3_transitionInheritId) { d3.select(this).transition().each("start.brush", function() { xExtentDomain = extent0.i; yExtentDomain = extent0.j; xExtent = extent0.x; yExtent = extent0.y; event_({ type: "brushstart" }); }).tween("brush:brush", function() { var xi = d3_interpolateArray(xExtent, extent1.x), yi = d3_interpolateArray(yExtent, extent1.y); xExtentDomain = yExtentDomain = null; return function(t) { xExtent = extent1.x = xi(t); yExtent = extent1.y = yi(t); event_({ type: "brush", mode: "resize" }); }; }).each("end.brush", function() { xExtentDomain = extent1.i; yExtentDomain = extent1.j; event_({ type: "brush", mode: "resize" }); event_({ type: "brushend" }); }); } else { event_({ type: "brushstart" }); event_({ type: "brush", mode: "resize" }); event_({ type: "brushend" }); } }); }; function redraw(g) { g.selectAll(".resize").attr("transform", function(d) { return "translate(" + xExtent[+/e$/.test(d)] + "," + yExtent[+/^s/.test(d)] + ")"; }); } function redrawX(g) { g.select(".extent").attr("x", xExtent[0]); g.selectAll(".extent,.n>rect,.s>rect").attr("width", xExtent[1] - xExtent[0]); } function redrawY(g) { g.select(".extent").attr("y", yExtent[0]); g.selectAll(".extent,.e>rect,.w>rect").attr("height", yExtent[1] - yExtent[0]); } function brushstart() { var target = this, eventTarget = d3.select(d3.event.target), event_ = event.of(target, arguments), g = d3.select(target), resizing = eventTarget.datum(), resizingX = !/^(n|s)$/.test(resizing) && x, resizingY = !/^(e|w)$/.test(resizing) && y, dragging = eventTarget.classed("extent"), dragRestore = d3_event_dragSuppress(), center, origin = d3.mouse(target), offset; var w = d3.select(d3_window).on("keydown.brush", keydown).on("keyup.brush", keyup); if (d3.event.changedTouches) { w.on("touchmove.brush", brushmove).on("touchend.brush", brushend); } else { w.on("mousemove.brush", brushmove).on("mouseup.brush", brushend); } g.interrupt().selectAll("*").interrupt(); if (dragging) { origin[0] = xExtent[0] - origin[0]; origin[1] = yExtent[0] - origin[1]; } else if (resizing) { var ex = +/w$/.test(resizing), ey = +/^n/.test(resizing); offset = [ xExtent[1 - ex] - origin[0], yExtent[1 - ey] - origin[1] ]; origin[0] = xExtent[ex]; origin[1] = yExtent[ey]; } else if (d3.event.altKey) center = origin.slice(); g.style("pointer-events", "none").selectAll(".resize").style("display", null); d3.select("body").style("cursor", eventTarget.style("cursor")); event_({ type: "brushstart" }); brushmove(); function keydown() { if (d3.event.keyCode == 32) { if (!dragging) { center = null; origin[0] -= xExtent[1]; origin[1] -= yExtent[1]; dragging = 2; } d3_eventPreventDefault(); } } function keyup() { if (d3.event.keyCode == 32 && dragging == 2) { origin[0] += xExtent[1]; origin[1] += yExtent[1]; dragging = 0; d3_eventPreventDefault(); } } function brushmove() { var point = d3.mouse(target), moved = false; if (offset) { point[0] += offset[0]; point[1] += offset[1]; } if (!dragging) { if (d3.event.altKey) { if (!center) center = [ (xExtent[0] + xExtent[1]) / 2, (yExtent[0] + yExtent[1]) / 2 ]; origin[0] = xExtent[+(point[0] < center[0])]; origin[1] = yExtent[+(point[1] < center[1])]; } else center = null; } if (resizingX && move1(point, x, 0)) { redrawX(g); moved = true; } if (resizingY && move1(point, y, 1)) { redrawY(g); moved = true; } if (moved) { redraw(g); event_({ type: "brush", mode: dragging ? "move" : "resize" }); } } function move1(point, scale, i) { var range = d3_scaleRange(scale), r0 = range[0], r1 = range[1], position = origin[i], extent = i ? yExtent : xExtent, size = extent[1] - extent[0], min, max; if (dragging) { r0 -= position; r1 -= size + position; } min = (i ? yClamp : xClamp) ? Math.max(r0, Math.min(r1, point[i])) : point[i]; if (dragging) { max = (min += position) + size; } else { if (center) position = Math.max(r0, Math.min(r1, 2 * center[i] - min)); if (position < min) { max = min; min = position; } else { max = position; } } if (extent[0] != min || extent[1] != max) { if (i) yExtentDomain = null; else xExtentDomain = null; extent[0] = min; extent[1] = max; return true; } } function brushend() { brushmove(); g.style("pointer-events", "all").selectAll(".resize").style("display", brush.empty() ? "none" : null); d3.select("body").style("cursor", null); w.on("mousemove.brush", null).on("mouseup.brush", null).on("touchmove.brush", null).on("touchend.brush", null).on("keydown.brush", null).on("keyup.brush", null); dragRestore(); event_({ type: "brushend" }); } } brush.x = function(z) { if (!arguments.length) return x; x = z; resizes = d3_svg_brushResizes[!x << 1 | !y]; return brush; }; brush.y = function(z) { if (!arguments.length) return y; y = z; resizes = d3_svg_brushResizes[!x << 1 | !y]; return brush; }; brush.clamp = function(z) { if (!arguments.length) return x && y ? [ xClamp, yClamp ] : x ? xClamp : y ? yClamp : null; if (x && y) xClamp = !!z[0], yClamp = !!z[1]; else if (x) xClamp = !!z; else if (y) yClamp = !!z; return brush; }; brush.extent = function(z) { var x0, x1, y0, y1, t; if (!arguments.length) { if (x) { if (xExtentDomain) { x0 = xExtentDomain[0], x1 = xExtentDomain[1]; } else { x0 = xExtent[0], x1 = xExtent[1]; if (x.invert) x0 = x.invert(x0), x1 = x.invert(x1); if (x1 < x0) t = x0, x0 = x1, x1 = t; } } if (y) { if (yExtentDomain) { y0 = yExtentDomain[0], y1 = yExtentDomain[1]; } else { y0 = yExtent[0], y1 = yExtent[1]; if (y.invert) y0 = y.invert(y0), y1 = y.invert(y1); if (y1 < y0) t = y0, y0 = y1, y1 = t; } } return x && y ? [ [ x0, y0 ], [ x1, y1 ] ] : x ? [ x0, x1 ] : y && [ y0, y1 ]; } if (x) { x0 = z[0], x1 = z[1]; if (y) x0 = x0[0], x1 = x1[0]; xExtentDomain = [ x0, x1 ]; if (x.invert) x0 = x(x0), x1 = x(x1); if (x1 < x0) t = x0, x0 = x1, x1 = t; if (x0 != xExtent[0] || x1 != xExtent[1]) xExtent = [ x0, x1 ]; } if (y) { y0 = z[0], y1 = z[1]; if (x) y0 = y0[1], y1 = y1[1]; yExtentDomain = [ y0, y1 ]; if (y.invert) y0 = y(y0), y1 = y(y1); if (y1 < y0) t = y0, y0 = y1, y1 = t; if (y0 != yExtent[0] || y1 != yExtent[1]) yExtent = [ y0, y1 ]; } return brush; }; brush.clear = function() { if (!brush.empty()) { xExtent = [ 0, 0 ], yExtent = [ 0, 0 ]; xExtentDomain = yExtentDomain = null; } return brush; }; brush.empty = function() { return !!x && xExtent[0] == xExtent[1] || !!y && yExtent[0] == yExtent[1]; }; return d3.rebind(brush, event, "on"); }; var d3_svg_brushCursor = { n: "ns-resize", e: "ew-resize", s: "ns-resize", w: "ew-resize", nw: "nwse-resize", ne: "nesw-resize", se: "nwse-resize", sw: "nesw-resize" }; var d3_svg_brushResizes = [ [ "n", "e", "s", "w", "nw", "ne", "se", "sw" ], [ "e", "w" ], [ "n", "s" ], [] ]; var d3_time_format = d3_time.format = d3_locale_enUS.timeFormat; var d3_time_formatUtc = d3_time_format.utc; var d3_time_formatIso = d3_time_formatUtc("%Y-%m-%dT%H:%M:%S.%LZ"); d3_time_format.iso = Date.prototype.toISOString && +new Date("2000-01-01T00:00:00.000Z") ? d3_time_formatIsoNative : d3_time_formatIso; function d3_time_formatIsoNative(date) { return date.toISOString(); } d3_time_formatIsoNative.parse = function(string) { var date = new Date(string); return isNaN(date) ? null : date; }; d3_time_formatIsoNative.toString = d3_time_formatIso.toString; d3_time.second = d3_time_interval(function(date) { return new d3_date(Math.floor(date / 1e3) * 1e3); }, function(date, offset) { date.setTime(date.getTime() + Math.floor(offset) * 1e3); }, function(date) { return date.getSeconds(); }); d3_time.seconds = d3_time.second.range; d3_time.seconds.utc = d3_time.second.utc.range; d3_time.minute = d3_time_interval(function(date) { return new d3_date(Math.floor(date / 6e4) * 6e4); }, function(date, offset) { date.setTime(date.getTime() + Math.floor(offset) * 6e4); }, function(date) { return date.getMinutes(); }); d3_time.minutes = d3_time.minute.range; d3_time.minutes.utc = d3_time.minute.utc.range; d3_time.hour = d3_time_interval(function(date) { var timezone = date.getTimezoneOffset() / 60; return new d3_date((Math.floor(date / 36e5 - timezone) + timezone) * 36e5); }, function(date, offset) { date.setTime(date.getTime() + Math.floor(offset) * 36e5); }, function(date) { return date.getHours(); }); d3_time.hours = d3_time.hour.range; d3_time.hours.utc = d3_time.hour.utc.range; d3_time.month = d3_time_interval(function(date) { date = d3_time.day(date); date.setDate(1); return date; }, function(date, offset) { date.setMonth(date.getMonth() + offset); }, function(date) { return date.getMonth(); }); d3_time.months = d3_time.month.range; d3_time.months.utc = d3_time.month.utc.range; function d3_time_scale(linear, methods, format) { function scale(x) { return linear(x); } scale.invert = function(x) { return d3_time_scaleDate(linear.invert(x)); }; scale.domain = function(x) { if (!arguments.length) return linear.domain().map(d3_time_scaleDate); linear.domain(x); return scale; }; function tickMethod(extent, count) { var span = extent[1] - extent[0], target = span / count, i = d3.bisect(d3_time_scaleSteps, target); return i == d3_time_scaleSteps.length ? [ methods.year, d3_scale_linearTickRange(extent.map(function(d) { return d / 31536e6; }), count)[2] ] : !i ? [ d3_time_scaleMilliseconds, d3_scale_linearTickRange(extent, count)[2] ] : methods[target / d3_time_scaleSteps[i - 1] < d3_time_scaleSteps[i] / target ? i - 1 : i]; } scale.nice = function(interval, skip) { var domain = scale.domain(), extent = d3_scaleExtent(domain), method = interval == null ? tickMethod(extent, 10) : typeof interval === "number" && tickMethod(extent, interval); if (method) interval = method[0], skip = method[1]; function skipped(date) { return !isNaN(date) && !interval.range(date, d3_time_scaleDate(+date + 1), skip).length; } return scale.domain(d3_scale_nice(domain, skip > 1 ? { floor: function(date) { while (skipped(date = interval.floor(date))) date = d3_time_scaleDate(date - 1); return date; }, ceil: function(date) { while (skipped(date = interval.ceil(date))) date = d3_time_scaleDate(+date + 1); return date; } } : interval)); }; scale.ticks = function(interval, skip) { var extent = d3_scaleExtent(scale.domain()), method = interval == null ? tickMethod(extent, 10) : typeof interval === "number" ? tickMethod(extent, interval) : !interval.range && [ { range: interval }, skip ]; if (method) interval = method[0], skip = method[1]; return interval.range(extent[0], d3_time_scaleDate(+extent[1] + 1), skip < 1 ? 1 : skip); }; scale.tickFormat = function() { return format; }; scale.copy = function() { return d3_time_scale(linear.copy(), methods, format); }; return d3_scale_linearRebind(scale, linear); } function d3_time_scaleDate(t) { return new Date(t); } var d3_time_scaleSteps = [ 1e3, 5e3, 15e3, 3e4, 6e4, 3e5, 9e5, 18e5, 36e5, 108e5, 216e5, 432e5, 864e5, 1728e5, 6048e5, 2592e6, 7776e6, 31536e6 ]; var d3_time_scaleLocalMethods = [ [ d3_time.second, 1 ], [ d3_time.second, 5 ], [ d3_time.second, 15 ], [ d3_time.second, 30 ], [ d3_time.minute, 1 ], [ d3_time.minute, 5 ], [ d3_time.minute, 15 ], [ d3_time.minute, 30 ], [ d3_time.hour, 1 ], [ d3_time.hour, 3 ], [ d3_time.hour, 6 ], [ d3_time.hour, 12 ], [ d3_time.day, 1 ], [ d3_time.day, 2 ], [ d3_time.week, 1 ], [ d3_time.month, 1 ], [ d3_time.month, 3 ], [ d3_time.year, 1 ] ]; var d3_time_scaleLocalFormat = d3_time_format.multi([ [ ".%L", function(d) { return d.getMilliseconds(); } ], [ ":%S", function(d) { return d.getSeconds(); } ], [ "%I:%M", function(d) { return d.getMinutes(); } ], [ "%I %p", function(d) { return d.getHours(); } ], [ "%a %d", function(d) { return d.getDay() && d.getDate() != 1; } ], [ "%b %d", function(d) { return d.getDate() != 1; } ], [ "%B", function(d) { return d.getMonth(); } ], [ "%Y", d3_true ] ]); var d3_time_scaleMilliseconds = { range: function(start, stop, step) { return d3.range(Math.ceil(start / step) * step, +stop, step).map(d3_time_scaleDate); }, floor: d3_identity, ceil: d3_identity }; d3_time_scaleLocalMethods.year = d3_time.year; d3_time.scale = function() { return d3_time_scale(d3.scale.linear(), d3_time_scaleLocalMethods, d3_time_scaleLocalFormat); }; var d3_time_scaleUtcMethods = d3_time_scaleLocalMethods.map(function(m) { return [ m[0].utc, m[1] ]; }); var d3_time_scaleUtcFormat = d3_time_formatUtc.multi([ [ ".%L", function(d) { return d.getUTCMilliseconds(); } ], [ ":%S", function(d) { return d.getUTCSeconds(); } ], [ "%I:%M", function(d) { return d.getUTCMinutes(); } ], [ "%I %p", function(d) { return d.getUTCHours(); } ], [ "%a %d", function(d) { return d.getUTCDay() && d.getUTCDate() != 1; } ], [ "%b %d", function(d) { return d.getUTCDate() != 1; } ], [ "%B", function(d) { return d.getUTCMonth(); } ], [ "%Y", d3_true ] ]); d3_time_scaleUtcMethods.year = d3_time.year.utc; d3_time.scale.utc = function() { return d3_time_scale(d3.scale.linear(), d3_time_scaleUtcMethods, d3_time_scaleUtcFormat); }; d3.text = d3_xhrType(function(request) { return request.responseText; }); d3.json = function(url, callback) { return d3_xhr(url, "application/json", d3_json, callback); }; function d3_json(request) { return JSON.parse(request.responseText); } d3.html = function(url, callback) { return d3_xhr(url, "text/html", d3_html, callback); }; function d3_html(request) { var range = d3_document.createRange(); range.selectNode(d3_document.body); return range.createContextualFragment(request.responseText); } d3.xml = d3_xhrType(function(request) { return request.responseXML; }); if (typeof define === "function" && define.amd) { define(d3); } else if (typeof module === "object" && module.exports) { module.exports = d3; } else { this.d3 = d3; } }(); nipype-1.7.0/nipype/external/due.py000066400000000000000000000033751413403311400173020ustar00rootroot00000000000000# emacs: at the end of the file # ex: set sts=4 ts=4 sw=4 et: # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### # """ Stub file for a guaranteed safe import of duecredit constructs: if duecredit is not available. To use it, place it into your project codebase to be imported, e.g. copy as cp stub.py /path/tomodule/module/due.py Note that it might be better to avoid naming it duecredit.py to avoid shadowing installed duecredit. Then use in your code as from .due import due, Doi, BibTeX See https://github.com/duecredit/duecredit/blob/master/README.md for examples. Origin: Originally a part of the duecredit Copyright: 2015-2016 DueCredit developers License: BSD-2 """ __version__ = "0.0.5" class InactiveDueCreditCollector(object): """Just a stub at the Collector which would not do anything""" def _donothing(self, *args, **kwargs): """Perform no good and no bad""" pass def dcite(self, *args, **kwargs): """If I could cite I would""" def nondecorating_decorator(func): return func return nondecorating_decorator cite = load = add = _donothing def __repr__(self): return "{}()".format(self.__class__.__name__) def _donothing_func(*args, **kwargs): """Perform no good and no bad""" pass try: from duecredit import due, BibTeX, Doi, Url if "due" in locals() and not hasattr(due, "cite"): raise RuntimeError("Imported due lacks .cite. DueCredit is now disabled") except ImportError: # Initiate due stub due = InactiveDueCreditCollector() BibTeX = Doi = Url = _donothing_func # Emacs mode definitions # Local Variables: # mode: python # py-indent-offset: 4 # tab-width: 4 # indent-tabs-mode: nil # End: nipype-1.7.0/nipype/external/fsl_imglob.py000077500000000000000000000135641413403311400206460ustar00rootroot00000000000000#!/usr/bin/env python # imglob - expand list of image filenames # Stephen Smith, Mark Jenkinson & Matthew Webster FMRIB Image Analysis Group # Copyright (C) 2009 University of Oxford # Part of FSL - FMRIB's Software Library # http://www.fmrib.ox.ac.uk/fsl # fsl@fmrib.ox.ac.uk # # Developed at FMRIB (Oxford Centre for Functional Magnetic Resonance # Imaging of the Brain), Department of Clinical Neurology, Oxford # University, Oxford, UK # # # LICENCE # # FMRIB Software Library, Release 5.0 (c) 2012, The University of # Oxford (the "Software") # # The Software remains the property of the University of Oxford ("the # University"). # # The Software is distributed "AS IS" under this Licence solely for # non-commercial use in the hope that it will be useful, but in order # that the University as a charitable foundation protects its assets for # the benefit of its educational and research purposes, the University # makes clear that no condition is made or to be implied, nor is any # warranty given or to be implied, as to the accuracy of the Software, # or that it will be suitable for any particular purpose or for use # under any specific conditions. Furthermore, the University disclaims # all responsibility for the use which is made of the Software. It # further disclaims any liability for the outcomes arising from using # the Software. # # The Licensee agrees to indemnify the University and hold the # University harmless from and against any and all claims, damages and # liabilities asserted by third parties (including claims for # negligence) which arise directly or indirectly from the use of the # Software or the sale of any products based on the Software. # # No part of the Software may be reproduced, modified, transmitted or # transferred in any form or by any means, electronic or mechanical, # without the express permission of the University. The permission of # the University is not required if the said reproduction, modification, # transmission or transference is done without financial return, the # conditions of this Licence are imposed upon the receiver of the # product, and all original and amended source code is included in any # transmitted product. You may be held legally responsible for any # copyright infringement that is caused or encouraged by your failure to # abide by these terms and conditions. # # You are not permitted under this Licence to use this Software # commercially. Use for which any financial return is received shall be # defined as commercial use, and includes (1) integration of all or part # of the source code or the Software into a product for sale or license # by or on behalf of Licensee to third parties or (2) use of the # Software or any derivative of it for research with the final aim of # developing software products for sale or license to a third party or # (3) use of the Software or any derivative of it for research with the # final aim of developing non-software products for sale or license to a # third party, or (4) use of the Software to provide any service to an # external organisation for which payment is received. If you are # interested in using the Software commercially, please contact Isis # Innovation Limited ("Isis"), the technology transfer company of the # University, to negotiate a licence. Contact details are: # innovation@isis.ox.ac.uk quoting reference DE/9564. import sys import glob from builtins import range def usage(): print("Usage: $0 [-extension/extensions] ") print(" -extension for one image with full extension") print(" -extensions for image list with full extensions") sys.exit(1) # Returns whether an input filename has an image extension ( and the # basename and extension pair ) def isImage(input, allExtensions): for extension in allExtensions: if input[-len(extension) :] == extension: return True, input[: -len(extension)], extension return False, input, "" def removeImageExtension(input, allExtensions): return isImage(input, allExtensions)[1] def main(): if len(sys.argv) <= 1: usage() if sys.version_info < (2, 4): import sets from sets import Set setAvailable = False else: setAvailable = True deleteExtensions = True primaryExtensions = [".nii.gz", ".nii", ".hdr.gz", ".hdr"] secondaryExtensions = [".img.gz", ".img"] allExtensions = primaryExtensions + secondaryExtensions validExtensions = primaryExtensions startingArg = 1 if sys.argv[1] == "-extensions": validExtensions = allExtensions deleteExtensions = False startingArg = 2 if sys.argv[1] == "-extension": deleteExtensions = False startingArg = 2 filelist = [] for arg in range(startingArg, len(sys.argv)): # #These if enables a "pedantic" style mode currently not used # if isImage(sys.argv[arg],allExtensions)[0]: # filelist.extend(glob.glob(sys.argv[arg])) # else: # for currentExtension in validExtensions: # filelist.extend(glob.glob(sys.argv[arg]+currentExtension)) for currentExtension in validExtensions: filelist.extend( glob.glob( removeImageExtension(sys.argv[arg], allExtensions) + currentExtension ) ) if deleteExtensions: for file in range(0, len(filelist)): filelist[file] = removeImageExtension(filelist[file], allExtensions) if setAvailable: filelist = list(set(filelist)) else: filelist = list(Set(filelist)) filelist.sort() for file in range(0, len(filelist)): print(filelist[file], end=" ") if file < len(filelist) - 1: print(" ", end=" ") if __name__ == "__main__": main() nipype-1.7.0/nipype/info.py000066400000000000000000000144201413403311400156270ustar00rootroot00000000000000""" This file contains defines parameters for nipy that we use to fill settings in setup.py, the nipy top-level docstring, and for building the docs. In setup.py in particular, we exec this file, so it cannot import nipy """ # nipype version information # Remove -dev for release __version__ = "1.7.0" def get_nipype_gitversion(): """Nipype version as reported by the last commit in git Returns ------- None or str Version of Nipype according to git. """ import os import subprocess try: import nipype gitpath = os.path.realpath( os.path.join(os.path.dirname(nipype.__file__), os.path.pardir) ) except: gitpath = os.getcwd() gitpathgit = os.path.join(gitpath, ".git") if not os.path.exists(gitpathgit): return None ver = None try: o, _ = subprocess.Popen( "git describe", shell=True, cwd=gitpath, stdout=subprocess.PIPE ).communicate() except Exception: pass else: ver = o.decode().strip().split("-")[-1] return ver if __version__.endswith("-dev"): gitversion = get_nipype_gitversion() if gitversion: __version__ = "{}+{}".format(__version__, gitversion) CLASSIFIERS = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Science/Research", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX :: Linux", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Topic :: Scientific/Engineering", ] PYTHON_REQUIRES = ">= 3.6" description = "Neuroimaging in Python: Pipelines and Interfaces" # Note: this long_description is actually a copy/paste from the top-level # README.txt, so that it shows up nicely on PyPI. So please remember to edit # it only in one place and sync it correctly. long_description = """======================================================== NIPYPE: Neuroimaging in Python: Pipelines and Interfaces ======================================================== Current neuroimaging software offer users an incredible opportunity to analyze data using a variety of different algorithms. However, this has resulted in a heterogeneous collection of specialized applications without transparent interoperability or a uniform operating interface. *Nipype*, an open-source, community-developed initiative under the umbrella of `NiPy `_, is a Python project that provides a uniform interface to existing neuroimaging software and facilitates interaction between these packages within a single workflow. Nipype provides an environment that encourages interactive exploration of algorithms from different packages (e.g., AFNI, ANTS, BRAINS, BrainSuite, Camino, FreeSurfer, FSL, MNE, MRtrix, MNE, Nipy, Slicer, SPM), eases the design of workflows within and between packages, and reduces the learning curve necessary to use different \ packages. Nipype is creating a collaborative platform for neuroimaging \ software development in a high-level language and addressing limitations of \ existing pipeline systems. *Nipype* allows you to: * easily interact with tools from different software packages * combine processing steps from different software packages * develop new workflows faster by reusing common steps from old ones * process data faster by running it in parallel on many cores/machines * make your research easily reproducible * share your processing workflows with the community """ # versions NIBABEL_MIN_VERSION = "2.1.0" NETWORKX_MIN_VERSION = "2.0" # Numpy bug in python 3.7: # https://www.opensourceanswers.com/blog/you-shouldnt-use-python-37-for-data-science-right-now.html NUMPY_MIN_VERSION = "1.15.3" SCIPY_MIN_VERSION = "0.14" TRAITS_MIN_VERSION = "4.6" DATEUTIL_MIN_VERSION = "2.2" FUTURE_MIN_VERSION = "0.16.0" SIMPLEJSON_MIN_VERSION = "3.8.0" PROV_VERSION = "1.5.2" RDFLIB_MIN_VERSION = "5.0.0" CLICK_MIN_VERSION = "6.6.0" PYDOT_MIN_VERSION = "1.2.3" NAME = "nipype" MAINTAINER = "nipype developers" MAINTAINER_EMAIL = "neuroimaging@python.org" DESCRIPTION = description LONG_DESCRIPTION = long_description URL = "http://nipy.org/nipype" DOWNLOAD_URL = "http://github.com/nipy/nipype/archives/master" LICENSE = "Apache License, 2.0" AUTHOR = "nipype developers" AUTHOR_EMAIL = "neuroimaging@python.org" PLATFORMS = "OS Independent" MAJOR = __version__.split(".")[0] MINOR = __version__.split(".")[1] MICRO = __version__.replace("-", ".").split(".")[2] ISRELEASE = ( len(__version__.replace("-", ".").split(".")) == 3 or "post" in __version__.replace("-", ".").split(".")[-1] ) VERSION = __version__ PROVIDES = ["nipype"] REQUIRES = [ "click>=%s" % CLICK_MIN_VERSION, "networkx>=%s" % NETWORKX_MIN_VERSION, "nibabel>=%s" % NIBABEL_MIN_VERSION, "numpy>=%s" % NUMPY_MIN_VERSION, "packaging", "prov>=%s" % PROV_VERSION, "pydot>=%s" % PYDOT_MIN_VERSION, "python-dateutil>=%s" % DATEUTIL_MIN_VERSION, "rdflib>=%s" % RDFLIB_MIN_VERSION, "scipy>=%s" % SCIPY_MIN_VERSION, "simplejson>=%s" % SIMPLEJSON_MIN_VERSION, "traits>=%s,!=5.0" % TRAITS_MIN_VERSION, "filelock>=3.0.0", "etelemetry>=0.2.0", ] TESTS_REQUIRES = [ "codecov", "coverage<5", "pytest", "pytest-cov", "pytest-env", "pytest-timeout", ] EXTRA_REQUIRES = { "data": ["datalad"], "doc": [ "dipy!=1.4.1", "ipython", "matplotlib", "nbsphinx", "sphinx-argparse", "sphinx>=2.1.2", "sphinxcontrib-apidoc", "sphinxcontrib-napoleon", ], "duecredit": ["duecredit"], "nipy": ["nitime", "nilearn", "dipy!=1.4.1", "nipy", "matplotlib"], "profiler": ["psutil>=5.0"], "pybids": ["pybids>=0.7.0"], "specs": ["black"], "ssh": ["paramiko"], "tests": TESTS_REQUIRES, "xvfbwrapper": ["xvfbwrapper"], # 'mesh': ['mayavi'] # Enable when it works } def _list_union(iterable): return list(set(sum(iterable, []))) # Enable a handle to install all extra dependencies at once EXTRA_REQUIRES["all"] = _list_union(EXTRA_REQUIRES.values()) # dev = doc + tests + specs EXTRA_REQUIRES["dev"] = _list_union( val for key, val in EXTRA_REQUIRES.items() if key in ("doc", "tests", "specs") ) STATUS = "stable" nipype-1.7.0/nipype/interfaces/000077500000000000000000000000001413403311400164445ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/__init__.py000066400000000000000000000006641413403311400205630ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Package contains interfaces for using existing functionality in other packages Requires Packages to be installed """ __docformat__ = "restructuredtext" from .io import DataGrabber, DataSink, SelectFiles, BIDSDataGrabber from .utility import IdentityInterface, Rename, Function, Select, Merge nipype-1.7.0/nipype/interfaces/afni/000077500000000000000000000000001413403311400173615ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/afni/__init__.py000066400000000000000000000026521413403311400214770ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ AFNI_ is a software suite for the analysis and display of anatomical and functional MRI data. .. include:: ../../../doc/links_names.txt """ from .base import Info from .preprocess import ( AlignEpiAnatPy, Allineate, Automask, AutoTcorrelate, AutoTLRC, Bandpass, BlurInMask, BlurToFWHM, ClipLevel, DegreeCentrality, Despike, Detrend, ECM, Fim, Fourier, Hist, LFCD, Maskave, Means, NetCorr, OutlierCount, QualityIndex, ROIStats, Retroicor, Seg, SkullStrip, TCorr1D, TCorrMap, TCorrelate, TNorm, TProject, TShift, TSmooth, Volreg, Warp, QwarpPlusMinus, Qwarp, ) from .svm import SVMTest, SVMTrain from .utils import ( ABoverlap, AFNItoNIFTI, Autobox, Axialize, BrickStat, Bucket, Calc, Cat, CatMatvec, CenterMass, ConvertDset, Copy, Dot, Edge3, Eval, FWHMx, LocalBistat, Localstat, MaskTool, Merge, Notes, NwarpApply, NwarpAdjust, NwarpCat, OneDToolPy, Refit, ReHo, Resample, TCat, TCatSubBrick, TStat, To3D, Unifize, Undump, ZCutUp, GCOR, Zcat, Zeropad, ) from .model import Deconvolve, Remlfit, Synthesize nipype-1.7.0/nipype/interfaces/afni/base.py000066400000000000000000000235471413403311400206600ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provide a base interface to AFNI commands.""" import os from sys import platform from distutils import spawn from ... import logging, LooseVersion from ...utils.filemanip import split_filename, fname_presuffix from ..base import ( CommandLine, traits, CommandLineInputSpec, isdefined, File, TraitedSpec, PackageInfo, ) from ...external.due import BibTeX # Use nipype's logging system IFLOGGER = logging.getLogger("nipype.interface") class Info(PackageInfo): """Handle afni output type and version information.""" __outputtype = "AFNI" ftypes = {"NIFTI": ".nii", "AFNI": "", "NIFTI_GZ": ".nii.gz"} version_cmd = "afni --version" @staticmethod def parse_version(raw_info): """Check and parse AFNI's version.""" version_stamp = raw_info.split("\n")[0].split("Version ")[1] if version_stamp.startswith("AFNI"): version_stamp = version_stamp.split("AFNI_")[1] elif version_stamp.startswith("Debian"): version_stamp = version_stamp.split("Debian-")[1].split("~")[0] else: return None version = LooseVersion(version_stamp.replace("_", ".")).version[:3] if version[0] < 1000: version[0] = version[0] + 2000 return tuple(version) @classmethod def output_type_to_ext(cls, outputtype): """ Get the file extension for the given output type. Parameters ---------- outputtype : {'NIFTI', 'NIFTI_GZ', 'AFNI'} String specifying the output type. Returns ------- extension : str The file extension for the output type. """ try: return cls.ftypes[outputtype] except KeyError as e: msg = "Invalid AFNIOUTPUTTYPE: ", outputtype raise KeyError(msg) from e @classmethod def outputtype(cls): """ Set default output filetype. AFNI has no environment variables, Output filetypes get set in command line calls Nipype uses ``AFNI`` as default Returns ------- None """ return "AFNI" @staticmethod def standard_image(img_name): """ Grab an image from the standard location. Could be made more fancy to allow for more relocatability """ clout = CommandLine( "which afni", ignore_exception=True, resource_monitor=False, terminal_output="allatonce", ).run() if clout.runtime.returncode != 0: return None out = clout.runtime.stdout basedir = os.path.split(out)[0] return os.path.join(basedir, img_name) class AFNICommandBase(CommandLine): """ A base class to fix a linking problem in OSX and AFNI. See Also -------- `This thread `__ about the particular environment variable that fixes this problem. """ def _run_interface(self, runtime, correct_return_codes=(0,)): if platform == "darwin": runtime.environ["DYLD_FALLBACK_LIBRARY_PATH"] = "/usr/local/afni/" return super(AFNICommandBase, self)._run_interface( runtime, correct_return_codes ) class AFNICommandInputSpec(CommandLineInputSpec): num_threads = traits.Int( 1, usedefault=True, nohash=True, desc="set number of threads" ) outputtype = traits.Enum( "AFNI", list(Info.ftypes.keys()), desc="AFNI output filetype" ) out_file = File( name_template="%s_afni", desc="output image file name", argstr="-prefix %s", name_source=["in_file"], ) class AFNICommandOutputSpec(TraitedSpec): out_file = File(desc="output file", exists=True) class AFNICommand(AFNICommandBase): """Shared options for several AFNI commands.""" input_spec = AFNICommandInputSpec _outputtype = None _references = [ { "entry": BibTeX( "@article{Cox1996," "author={R.W. Cox}," "title={AFNI: software for analysis and " "visualization of functional magnetic " "resonance neuroimages}," "journal={Computers and Biomedical research}," "volume={29}," "number={3}," "pages={162-173}," "year={1996}," "}" ), "tags": ["implementation"], }, { "entry": BibTeX( "@article{CoxHyde1997," "author={R.W. Cox and J.S. Hyde}," "title={Software tools for analysis and " "visualization of fMRI data}," "journal={NMR in Biomedicine}," "volume={10}," "number={45}," "pages={171-178}," "year={1997}," "}" ), "tags": ["implementation"], }, ] @property def num_threads(self): """Get number of threads.""" return self.inputs.num_threads @num_threads.setter def num_threads(self, value): self.inputs.num_threads = value @classmethod def set_default_output_type(cls, outputtype): """ Set the default output type for AFNI classes. This method is used to set the default output type for all afni subclasses. However, setting this will not update the output type for any existing instances. For these, assign the .inputs.outputtype. """ if outputtype in Info.ftypes: cls._outputtype = outputtype else: raise AttributeError("Invalid AFNI outputtype: %s" % outputtype) def __init__(self, **inputs): """Instantiate an AFNI command tool wrapper.""" super(AFNICommand, self).__init__(**inputs) self.inputs.on_trait_change(self._output_update, "outputtype") if hasattr(self.inputs, "num_threads"): self.inputs.on_trait_change(self._nthreads_update, "num_threads") if self._outputtype is None: self._outputtype = Info.outputtype() if not isdefined(self.inputs.outputtype): self.inputs.outputtype = self._outputtype else: self._output_update() def _nthreads_update(self): """Update environment with new number of threads.""" self.inputs.environ["OMP_NUM_THREADS"] = "%d" % self.inputs.num_threads def _output_update(self): """ Update the internal property with the provided input. i think? updates class private attribute based on instance input in fsl also updates ENVIRON variable....not valid in afni as it uses no environment variables """ self._outputtype = self.inputs.outputtype def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) return os.path.join( path, base + Info.output_type_to_ext(self.inputs.outputtype) ) def _list_outputs(self): outputs = super(AFNICommand, self)._list_outputs() metadata = dict(name_source=lambda t: t is not None) out_names = list(self.inputs.traits(**metadata).keys()) if out_names: for name in out_names: if outputs[name]: _, _, ext = split_filename(outputs[name]) if ext == "": outputs[name] = outputs[name] + "+orig.BRIK" return outputs def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None): """ Generate a filename based on the given parameters. The filename will take the form: cwd/basename. If change_ext is True, it will use the extentions specified in intputs.output_type. Parameters ---------- basename : str Filename to base the new filename on. cwd : str Path to prefix to the new filename. (default is os.getcwd()) suffix : str Suffix to add to the `basename`. (defaults is '' ) change_ext : bool Flag to change the filename extension to the FSL output type. (default True) Returns ------- fname : str New filename based on given parameters. """ if not basename: msg = "Unable to generate filename for command %s. " % self.cmd msg += "basename is not set!" raise ValueError(msg) if cwd is None: cwd = os.getcwd() if ext is None: ext = Info.output_type_to_ext(self.inputs.outputtype) if change_ext: suffix = "".join((suffix, ext)) if suffix else ext if suffix is None: suffix = "" fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) return fname class AFNIPythonCommandInputSpec(CommandLineInputSpec): outputtype = traits.Enum( "AFNI", list(Info.ftypes.keys()), desc="AFNI output filetype" ) py27_path = traits.Either( "python2", File(exists=True), usedefault=True, default="python2" ) class AFNIPythonCommand(AFNICommand): """A subtype of AFNI command line for Python scripts.""" @property def cmd(self): """Revise the command path.""" orig_cmd = super(AFNIPythonCommand, self).cmd found = spawn.find_executable(orig_cmd) return found if found is not None else orig_cmd @property def _cmd_prefix(self): return "{} ".format(self.inputs.py27_path) def no_afni(): """Check whether AFNI is not available.""" if Info.version() is None: return True return False nipype-1.7.0/nipype/interfaces/afni/model.py000066400000000000000000000665611413403311400210510ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft = python sts = 4 ts = 4 sw = 4 et: """ AFNI modeling interfaces. Examples -------- See the docstrings of the individual classes for examples. """ import os from ..base import ( CommandLineInputSpec, CommandLine, Directory, TraitedSpec, traits, isdefined, File, InputMultiPath, Undefined, Str, ) from ...external.due import BibTeX from .base import ( AFNICommandBase, AFNICommand, AFNICommandInputSpec, AFNICommandOutputSpec, Info, ) class DeconvolveInputSpec(AFNICommandInputSpec): in_files = InputMultiPath( File(exists=True), desc="filenames of 3D+time input datasets. More than one filename can " "be given and the datasets will be auto-catenated in time. " "You can input a 1D time series file here, but the time axis " "should run along the ROW direction, not the COLUMN direction as " "in the 'input1D' option.", argstr="-input %s", copyfile=False, sep=" ", position=1, ) sat = traits.Bool( desc="check the dataset time series for initial saturation transients," " which should normally have been excised before data analysis.", argstr="-sat", xor=["trans"], ) trans = traits.Bool( desc="check the dataset time series for initial saturation transients," " which should normally have been excised before data analysis.", argstr="-trans", xor=["sat"], ) noblock = traits.Bool( desc="normally, if you input multiple datasets with 'input', then " "the separate datasets are taken to be separate image runs that " "get separate baseline models. Use this options if you want to " "have the program consider these to be all one big run." "* If any of the input dataset has only 1 sub-brick, then this " "option is automatically invoked!" "* If the auto-catenation feature isn't used, then this option " "has no effect, no how, no way.", argstr="-noblock", ) force_TR = traits.Float( desc="use this value instead of the TR in the 'input' " "dataset. (It's better to fix the input using Refit.)", argstr="-force_TR %f", position=0, ) input1D = File( desc="filename of single (fMRI) .1D time series where time runs down " "the column.", argstr="-input1D %s", exists=True, ) TR_1D = traits.Float( desc="TR to use with 'input1D'. This option has no effect if you do " "not also use 'input1D'.", argstr="-TR_1D %f", ) legendre = traits.Bool( desc="use Legendre polynomials for null hypothesis (baseline model)", argstr="-legendre", ) nolegendre = traits.Bool( desc="use power polynomials for null hypotheses. Don't do this " "unless you are crazy!", argstr="-nolegendre", ) nodmbase = traits.Bool( desc="don't de-mean baseline time series", argstr="-nodmbase" ) dmbase = traits.Bool( desc="de-mean baseline time series (default if 'polort' >= 0)", argstr="-dmbase" ) svd = traits.Bool( desc="use SVD instead of Gaussian elimination (default)", argstr="-svd" ) nosvd = traits.Bool(desc="use Gaussian elimination instead of SVD", argstr="-nosvd") rmsmin = traits.Float( desc="minimum rms error to reject reduced model (default = 0; don't " "use this option normally!)", argstr="-rmsmin %f", ) nocond = traits.Bool( desc="DON'T calculate matrix condition number", argstr="-nocond" ) singvals = traits.Bool( desc="print out the matrix singular values", argstr="-singvals" ) goforit = traits.Int( desc="use this to proceed even if the matrix has bad problems (e.g., " "duplicate columns, large condition number, etc.).", argstr="-GOFORIT %i", ) allzero_OK = traits.Bool( desc="don't consider all zero matrix columns to be the type of error " "that 'gotforit' is needed to ignore.", argstr="-allzero_OK", ) dname = traits.Tuple( Str, Str, desc="set environmental variable to provided value", argstr="-D%s=%s" ) mask = File( desc="filename of 3D mask dataset; only data time series from within " "the mask will be analyzed; results for voxels outside the mask " "will be set to zero.", argstr="-mask %s", exists=True, ) automask = traits.Bool( desc="build a mask automatically from input data (will be slow for " "long time series datasets)", argstr="-automask", ) STATmask = File( desc="build a mask from provided file, and use this mask for the " "purpose of reporting truncation-to float issues AND for " "computing the FDR curves. The actual results ARE not masked " "with this option (only with 'mask' or 'automask' options).", argstr="-STATmask %s", exists=True, ) censor = File( desc="filename of censor .1D time series. This is a file of 1s and " "0s, indicating which time points are to be included (1) and " "which are to be excluded (0).", argstr="-censor %s", exists=True, ) polort = traits.Int( desc="degree of polynomial corresponding to the null hypothesis " "[default: 1]", argstr="-polort %d", ) ortvec = traits.Tuple( File(desc="filename", exists=True), Str(desc="label"), desc="this option lets you input a rectangular array of 1 or more " "baseline vectors from a file. This method is a fast way to " "include a lot of baseline regressors in one step. ", argstr="-ortvec %s %s", ) x1D = File(desc="specify name for saved X matrix", argstr="-x1D %s") x1D_stop = traits.Bool( desc="stop running after writing .xmat.1D file", argstr="-x1D_stop" ) cbucket = traits.Str( desc="Name for dataset in which to save the regression " "coefficients (no statistics). This dataset " "will be used in a -xrestore run [not yet implemented] " "instead of the bucket dataset, if possible.", argstr="-cbucket %s", ) out_file = File(desc="output statistics file", argstr="-bucket %s") num_threads = traits.Int( desc="run the program with provided number of sub-processes", argstr="-jobs %d", nohash=True, ) fout = traits.Bool(desc="output F-statistic for each stimulus", argstr="-fout") rout = traits.Bool( desc="output the R^2 statistic for each stimulus", argstr="-rout" ) tout = traits.Bool(desc="output the T-statistic for each stimulus", argstr="-tout") vout = traits.Bool( desc="output the sample variance (MSE) for each stimulus", argstr="-vout" ) nofdr = traits.Bool( desc="Don't compute the statistic-vs-FDR curves for the bucket " "dataset.", argstr="-noFDR", ) global_times = traits.Bool( desc="use global timing for stimulus timing files", argstr="-global_times", xor=["local_times"], ) local_times = traits.Bool( desc="use local timing for stimulus timing files", argstr="-local_times", xor=["global_times"], ) num_stimts = traits.Int( desc="number of stimulus timing files", argstr="-num_stimts %d", position=-6 ) stim_times = traits.List( traits.Tuple( traits.Int(desc="k-th response model"), File(desc="stimulus timing file", exists=True), Str(desc="model"), ), desc="generate a response model from a set of stimulus times" " given in file.", argstr="-stim_times %d %s '%s'...", position=-5, ) stim_label = traits.List( traits.Tuple( traits.Int(desc="k-th input stimulus"), Str(desc="stimulus label") ), desc="label for kth input stimulus (e.g., Label1)", argstr="-stim_label %d %s...", requires=["stim_times"], position=-4, ) stim_times_subtract = traits.Float( desc="this option means to subtract specified seconds from each time " "encountered in any 'stim_times' option. The purpose of this " "option is to make it simple to adjust timing files for the " "removal of images from the start of each imaging run.", argstr="-stim_times_subtract %f", ) num_glt = traits.Int( desc="number of general linear tests (i.e., contrasts)", argstr="-num_glt %d", position=-3, ) gltsym = traits.List( Str(desc="symbolic general linear test"), desc="general linear tests (i.e., contrasts) using symbolic " "conventions (e.g., '+Label1 -Label2')", argstr="-gltsym 'SYM: %s'...", position=-2, ) glt_label = traits.List( traits.Tuple( traits.Int(desc="k-th general linear test"), Str(desc="GLT label") ), desc="general linear test (i.e., contrast) labels", argstr="-glt_label %d %s...", requires=["gltsym"], position=-1, ) class DeconvolveOutputSpec(TraitedSpec): out_file = File(desc="output statistics file", exists=True) reml_script = File( desc="automatical generated script to run 3dREMLfit", exists=True ) x1D = File(desc="save out X matrix", exists=True) cbucket = File(desc="output regression coefficients file (if generated)") class Deconvolve(AFNICommand): """Performs OLS regression given a 4D neuroimage file and stimulus timings For complete details, see the `3dDeconvolve Documentation. `_ Examples ======== >>> from nipype.interfaces import afni >>> deconvolve = afni.Deconvolve() >>> deconvolve.inputs.in_files = ['functional.nii', 'functional2.nii'] >>> deconvolve.inputs.out_file = 'output.nii' >>> deconvolve.inputs.x1D = 'output.1D' >>> stim_times = [(1, 'timeseries.txt', 'SPMG1(4)')] >>> deconvolve.inputs.stim_times = stim_times >>> deconvolve.inputs.stim_label = [(1, 'Houses')] >>> deconvolve.inputs.gltsym = ['SYM: +Houses'] >>> deconvolve.inputs.glt_label = [(1, 'Houses')] >>> deconvolve.cmdline "3dDeconvolve -input functional.nii functional2.nii -bucket output.nii -x1D output.1D -num_stimts 1 -stim_times 1 timeseries.txt 'SPMG1(4)' -stim_label 1 Houses -num_glt 1 -gltsym 'SYM: +Houses' -glt_label 1 Houses" >>> res = deconvolve.run() # doctest: +SKIP """ _cmd = "3dDeconvolve" input_spec = DeconvolveInputSpec output_spec = DeconvolveOutputSpec def _format_arg(self, name, trait_spec, value): if name == "gltsym": for n, val in enumerate(value): if val.startswith("SYM: "): value[n] = val.lstrip("SYM: ") return super(Deconvolve, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): if skip is None: skip = [] if len(self.inputs.stim_times) and not isdefined(self.inputs.num_stimts): self.inputs.num_stimts = len(self.inputs.stim_times) if len(self.inputs.gltsym) and not isdefined(self.inputs.num_glt): self.inputs.num_glt = len(self.inputs.gltsym) if not isdefined(self.inputs.out_file): self.inputs.out_file = "Decon.nii" return super(Deconvolve, self)._parse_inputs(skip) def _list_outputs(self): outputs = self.output_spec().get() _gen_fname_opts = {} _gen_fname_opts["basename"] = self.inputs.out_file _gen_fname_opts["cwd"] = os.getcwd() if isdefined(self.inputs.x1D): if not self.inputs.x1D.endswith(".xmat.1D"): outputs["x1D"] = os.path.abspath(self.inputs.x1D + ".xmat.1D") else: outputs["x1D"] = os.path.abspath(self.inputs.x1D) else: outputs["x1D"] = self._gen_fname(suffix=".xmat.1D", **_gen_fname_opts) if isdefined(self.inputs.cbucket): outputs["cbucket"] = os.path.abspath(self.inputs.cbucket) outputs["reml_script"] = self._gen_fname(suffix=".REML_cmd", **_gen_fname_opts) # remove out_file from outputs if x1d_stop set to True if self.inputs.x1D_stop: del outputs["out_file"], outputs["cbucket"] else: outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class RemlfitInputSpec(AFNICommandInputSpec): # mandatory files in_files = InputMultiPath( File(exists=True), desc="Read time series dataset", argstr='-input "%s"', mandatory=True, copyfile=False, sep=" ", ) matrix = File( desc="the design matrix file, which should have been output from " "Deconvolve via the 'x1D' option", argstr="-matrix %s", mandatory=True, ) # "Semi-Hidden Alternative Ways to Define the Matrix" polort = traits.Int( desc="if no 'matrix' option is given, AND no 'matim' option, " "create a matrix with Legendre polynomial regressors" "up to the specified order. The default value is 0, which" "produces a matrix with a single column of all ones", argstr="-polort %d", xor=["matrix"], ) matim = File( desc="read a standard file as the matrix. You can use only Col as " "a name in GLTs with these nonstandard matrix input methods, " "since the other names come from the 'matrix' file. " "These mutually exclusive options are ignored if 'matrix' " "is used.", argstr="-matim %s", xor=["matrix"], ) # Other arguments mask = File( desc="filename of 3D mask dataset; only data time series from within " "the mask will be analyzed; results for voxels outside the mask " "will be set to zero.", argstr="-mask %s", exists=True, ) automask = traits.Bool( usedefault=True, argstr="-automask", desc="build a mask automatically from input data (will be slow for " "long time series datasets)", ) STATmask = File( desc="filename of 3D mask dataset to be used for the purpose " "of reporting truncation-to float issues AND for computing the " "FDR curves. The actual results ARE not masked with this option " "(only with 'mask' or 'automask' options).", argstr="-STATmask %s", exists=True, ) addbase = InputMultiPath( File(exists=True, desc="file containing columns to add to regression matrix"), desc="file(s) to add baseline model columns to the matrix with this " "option. Each column in the specified file(s) will be appended " "to the matrix. File(s) must have at least as many rows as the " "matrix does.", copyfile=False, sep=" ", argstr="-addbase %s", ) slibase = InputMultiPath( File(exists=True, desc="file containing columns to add to regression matrix"), desc="similar to 'addbase' in concept, BUT each specified file " "must have an integer multiple of the number of slices " "in the input dataset(s); then, separate regression " "matrices are generated for each slice, with the " "first column of the file appended to the matrix for " "the first slice of the dataset, the second column of the file " "appended to the matrix for the first slice of the dataset, " "and so on. Intended to help model physiological noise in FMRI, " "or other effects you want to regress out that might " "change significantly in the inter-slice time intervals. This " "will slow the program down, and make it use a lot more memory " "(to hold all the matrix stuff).", argstr="-slibase %s", ) slibase_sm = InputMultiPath( File(exists=True, desc="file containing columns to add to regression matrix"), desc="similar to 'slibase', BUT each file much be in slice major " "order (i.e. all slice0 columns come first, then all slice1 " "columns, etc).", argstr="-slibase_sm %s", ) usetemp = traits.Bool( desc="write intermediate stuff to disk, to economize on RAM. " "Using this option might be necessary to run with " "'slibase' and with 'Grid' values above the default, " "since the program has to store a large number of " "matrices for such a problem: two for every slice and " "for every (a,b) pair in the ARMA parameter grid. Temporary " "files are written to the directory given in environment " "variable TMPDIR, or in /tmp, or in ./ (preference is in that " "order)", argstr="-usetemp", ) nodmbase = traits.Bool( desc="by default, baseline columns added to the matrix via " "'addbase' or 'slibase' or 'dsort' will each have their " "mean removed (as is done in Deconvolve); this option turns this " "centering off", argstr="-nodmbase", requires=["addbase", "dsort"], ) dsort = File( desc="4D dataset to be used as voxelwise baseline regressor", exists=True, copyfile=False, argstr="-dsort %s", ) dsort_nods = traits.Bool( desc="if 'dsort' option is used, this command will output " "additional results files excluding the 'dsort' file", argstr="-dsort_nods", requires=["dsort"], ) fout = traits.Bool(desc="output F-statistic for each stimulus", argstr="-fout") rout = traits.Bool( desc="output the R^2 statistic for each stimulus", argstr="-rout" ) tout = traits.Bool( desc="output the T-statistic for each stimulus; if you use " "'out_file' and do not give any of 'fout', 'tout'," "or 'rout', then the program assumes 'fout' is activated.", argstr="-tout", ) nofdr = traits.Bool( desc="do NOT add FDR curve data to bucket datasets; FDR curves can " "take a long time if 'tout' is used", argstr="-noFDR", ) nobout = traits.Bool( desc="do NOT add baseline (null hypothesis) regressor betas " "to the 'rbeta_file' and/or 'obeta_file' output datasets.", argstr="-nobout", ) gltsym = traits.List( traits.Either( traits.Tuple(File(exists=True), Str()), traits.Tuple(Str(), Str()) ), desc="read a symbolic GLT from input file and associate it with a " "label. As in Deconvolve, you can also use the 'SYM:' method " "to provide the definition of the GLT directly as a string " "(e.g., with 'SYM: +Label1 -Label2'). Unlike Deconvolve, you " "MUST specify 'SYM: ' if providing the GLT directly as a " "string instead of from a file", argstr='-gltsym "%s" %s...', ) out_file = File( desc="output dataset for beta + statistics from the REML estimation; " "also contains the results of any GLT analysis requested " "in the Deconvolve setup, similar to the 'bucket' output " "from Deconvolve. This dataset does NOT get the betas " "(or statistics) of those regressors marked as 'baseline' " "in the matrix file.", argstr="-Rbuck %s", ) var_file = File( desc="output dataset for REML variance parameters", argstr="-Rvar %s" ) rbeta_file = File( desc="output dataset for beta weights from the REML estimation, " "similar to the 'cbucket' output from Deconvolve. This dataset " "will contain all the beta weights, for baseline and stimulus " "regressors alike, unless the '-nobout' option is given -- " "in that case, this dataset will only get the betas for the " "stimulus regressors.", argstr="-Rbeta %s", ) glt_file = File( desc="output dataset for beta + statistics from the REML estimation, " "but ONLY for the GLTs added on the REMLfit command line itself " "via 'gltsym'; GLTs from Deconvolve's command line will NOT " "be included.", argstr="-Rglt %s", ) fitts_file = File(desc="ouput dataset for REML fitted model", argstr="-Rfitts %s") errts_file = File( desc="output dataset for REML residuals = data - fitted model", argstr="-Rerrts %s", ) wherr_file = File( desc="dataset for REML residual, whitened using the estimated " "ARMA(1,1) correlation matrix of the noise", argstr="-Rwherr %s", ) quiet = traits.Bool(desc="turn off most progress messages", argstr="-quiet") verb = traits.Bool( desc="turns on more progress messages, including memory usage " "progress reports at various stages", argstr="-verb", ) goforit = traits.Bool( desc="With potential issues flagged in the design matrix, an attempt " "will nevertheless be made to fit the model", argstr="-GOFORIT", ) ovar = File( desc="dataset for OLSQ st.dev. parameter (kind of boring)", argstr="-Ovar %s" ) obeta = File( desc="dataset for beta weights from the OLSQ estimation", argstr="-Obeta %s" ) obuck = File( desc="dataset for beta + statistics from the OLSQ estimation", argstr="-Obuck %s", ) oglt = File( desc="dataset for beta + statistics from 'gltsym' options", argstr="-Oglt %s" ) ofitts = File(desc="dataset for OLSQ fitted model", argstr="-Ofitts %s") oerrts = File( desc="dataset for OLSQ residuals (data - fitted model)", argstr="-Oerrts %s" ) class RemlfitOutputSpec(AFNICommandOutputSpec): out_file = File( desc="dataset for beta + statistics from the REML estimation (if " "generated" ) var_file = File(desc="dataset for REML variance parameters (if generated)") rbeta_file = File( desc="dataset for beta weights from the REML estimation (if " "generated)" ) rbeta_file = File( desc="output dataset for beta weights from the REML estimation (if " "generated" ) glt_file = File( desc="output dataset for beta + statistics from the REML estimation, " "but ONLY for the GLTs added on the REMLfit command " "line itself via 'gltsym' (if generated)" ) fitts_file = File(desc="ouput dataset for REML fitted model (if generated)") errts_file = File( desc="output dataset for REML residuals = data - fitted model (if " "generated" ) wherr_file = File( desc="dataset for REML residual, whitened using the estimated " "ARMA(1,1) correlation matrix of the noise (if generated)" ) ovar = File(desc="dataset for OLSQ st.dev. parameter (if generated)") obeta = File( desc="dataset for beta weights from the OLSQ estimation (if " "generated)" ) obuck = File( desc="dataset for beta + statistics from the OLSQ estimation (if " "generated)" ) oglt = File( desc="dataset for beta + statistics from 'gltsym' options (if " "generated" ) ofitts = File(desc="dataset for OLSQ fitted model (if generated)") oerrts = File( desc="dataset for OLSQ residuals = data - fitted model (if " "generated" ) class Remlfit(AFNICommand): """Performs Generalized least squares time series fit with Restricted Maximum Likelihood (REML) estimation of the temporal auto-correlation structure. For complete details, see the `3dREMLfit Documentation. `_ Examples ======== >>> from nipype.interfaces import afni >>> remlfit = afni.Remlfit() >>> remlfit.inputs.in_files = ['functional.nii', 'functional2.nii'] >>> remlfit.inputs.out_file = 'output.nii' >>> remlfit.inputs.matrix = 'output.1D' >>> remlfit.inputs.gltsym = [('SYM: +Lab1 -Lab2', 'TestSYM'), ('timeseries.txt', 'TestFile')] >>> remlfit.cmdline '3dREMLfit -gltsym "SYM: +Lab1 -Lab2" TestSYM -gltsym "timeseries.txt" TestFile -input "functional.nii functional2.nii" -matrix output.1D -Rbuck output.nii' >>> res = remlfit.run() # doctest: +SKIP """ _cmd = "3dREMLfit" input_spec = RemlfitInputSpec output_spec = RemlfitOutputSpec def _parse_inputs(self, skip=None): if skip is None: skip = [] return super(Remlfit, self)._parse_inputs(skip) def _list_outputs(self): outputs = self.output_spec().get() for key in outputs.keys(): if isdefined(self.inputs.get()[key]): outputs[key] = os.path.abspath(self.inputs.get()[key]) return outputs class SynthesizeInputSpec(AFNICommandInputSpec): cbucket = File( desc="Read the dataset output from " "3dDeconvolve via the '-cbucket' option.", argstr="-cbucket %s", copyfile=False, mandatory=True, ) matrix = File( desc="Read the matrix output from " "3dDeconvolve via the '-x1D' option.", argstr="-matrix %s", copyfile=False, mandatory=True, ) select = traits.List( Str(desc="selected columns to synthesize"), argstr="-select %s", desc="A list of selected columns from the matrix (and the " "corresponding coefficient sub-bricks from the " "cbucket). Valid types include 'baseline', " " 'polort', 'allfunc', 'allstim', 'all', " "Can also provide 'something' where something matches " "a stim_label from 3dDeconvolve, and 'digits' where digits " "are the numbers of the select matrix columns by " "numbers (starting at 0), or number ranges of the form " "'3..7' and '3-7'.", mandatory=True, ) out_file = File( name_template="syn", desc="output dataset prefix name (default 'syn')", argstr="-prefix %s", ) dry_run = traits.Bool( desc="Don't compute the output, just " "check the inputs.", argstr="-dry" ) TR = traits.Float( desc="TR to set in the output. The default value of " "TR is read from the header of the matrix file.", argstr="-TR %f", ) cenfill = traits.Enum( "zero", "nbhr", "none", argstr="-cenfill %s", desc="Determines how censored time points from the " "3dDeconvolve run will be filled. Valid types " "are 'zero', 'nbhr' and 'none'.", ) class Synthesize(AFNICommand): """Reads a '-cbucket' dataset and a '.xmat.1D' matrix from 3dDeconvolve, and synthesizes a fit dataset using user-selected sub-bricks and matrix columns. For complete details, see the `3dSynthesize Documentation. `_ Examples ======== >>> from nipype.interfaces import afni >>> synthesize = afni.Synthesize() >>> synthesize.inputs.cbucket = 'functional.nii' >>> synthesize.inputs.matrix = 'output.1D' >>> synthesize.inputs.select = ['baseline'] >>> synthesize.cmdline '3dSynthesize -cbucket functional.nii -matrix output.1D -select baseline' >>> syn = synthesize.run() # doctest: +SKIP """ _cmd = "3dSynthesize" input_spec = SynthesizeInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() for key in outputs.keys(): if isdefined(self.inputs.get()[key]): outputs[key] = os.path.abspath(self.inputs.get()[key]) return outputs nipype-1.7.0/nipype/interfaces/afni/preprocess.py000066400000000000000000004576161413403311400221430ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """AFNI preprocessing interfaces.""" import os import os.path as op from ...utils.filemanip import load_json, save_json, split_filename, fname_presuffix from ..base import ( CommandLineInputSpec, CommandLine, TraitedSpec, traits, isdefined, File, InputMultiPath, Undefined, Str, InputMultiObject, ) from .base import ( AFNICommandBase, AFNICommand, AFNICommandInputSpec, AFNICommandOutputSpec, AFNIPythonCommandInputSpec, AFNIPythonCommand, Info, no_afni, ) from ... import logging iflogger = logging.getLogger("nipype.interface") class CentralityInputSpec(AFNICommandInputSpec): """Common input spec class for all centrality-related commands""" mask = File(desc="mask file to mask input data", argstr="-mask %s", exists=True) thresh = traits.Float( desc="threshold to exclude connections where corr <= thresh", argstr="-thresh %f", ) polort = traits.Int(desc="", argstr="-polort %d") autoclip = traits.Bool( desc="Clip off low-intensity regions in the dataset", argstr="-autoclip" ) automask = traits.Bool( desc="Mask the dataset to target brain-only voxels", argstr="-automask" ) class AlignEpiAnatPyInputSpec(AFNIPythonCommandInputSpec): in_file = File( desc="EPI dataset to align", argstr="-epi %s", mandatory=True, exists=True, copyfile=False, ) anat = File( desc="name of structural dataset", argstr="-anat %s", mandatory=True, exists=True, copyfile=False, ) epi_base = traits.Either( traits.Range(low=0), traits.Enum("mean", "median", "max"), desc="the epi base used in alignment" "should be one of (0/mean/median/max/subbrick#)", mandatory=True, argstr="-epi_base %s", ) anat2epi = traits.Bool( desc="align anatomical to EPI dataset (default)", argstr="-anat2epi" ) epi2anat = traits.Bool(desc="align EPI to anatomical dataset", argstr="-epi2anat") save_skullstrip = traits.Bool( desc="save skull-stripped (not aligned)", argstr="-save_skullstrip" ) suffix = traits.Str( "_al", desc="append suffix to the original anat/epi dataset to use" 'in the resulting dataset names (default is "_al")', usedefault=True, argstr="-suffix %s", ) epi_strip = traits.Enum( ("3dSkullStrip", "3dAutomask", "None"), desc="method to mask brain in EPI data" "should be one of[3dSkullStrip]/3dAutomask/None)", argstr="-epi_strip %s", ) volreg = traits.Enum( "on", "off", usedefault=True, desc="do volume registration on EPI dataset before alignment" "should be 'on' or 'off', defaults to 'on'", argstr="-volreg %s", ) tshift = traits.Enum( "on", "off", usedefault=True, desc="do time shifting of EPI dataset before alignment" "should be 'on' or 'off', defaults to 'on'", argstr="-tshift %s", ) class AlignEpiAnatPyOutputSpec(TraitedSpec): anat_al_orig = File(desc="A version of the anatomy that is aligned to the EPI") epi_al_orig = File(desc="A version of the EPI dataset aligned to the anatomy") epi_tlrc_al = File( desc="A version of the EPI dataset aligned to a standard template" ) anat_al_mat = File(desc="matrix to align anatomy to the EPI") epi_al_mat = File(desc="matrix to align EPI to anatomy") epi_vr_al_mat = File(desc="matrix to volume register EPI") epi_reg_al_mat = File(desc="matrix to volume register and align epi to anatomy") epi_al_tlrc_mat = File( desc="matrix to volume register and align epi" "to anatomy and put into standard space" ) epi_vr_motion = File( desc="motion parameters from EPI time-series" "registration (tsh included in name if slice" "timing correction is also included)." ) skullstrip = File(desc="skull-stripped (not aligned) volume") class AlignEpiAnatPy(AFNIPythonCommand): """Align EPI to anatomical datasets or vice versa. This Python script computes the alignment between two datasets, typically an EPI and an anatomical structural dataset, and applies the resulting transformation to one or the other to bring them into alignment. This script computes the transforms needed to align EPI and anatomical datasets using a cost function designed for this purpose. The script combines multiple transformations, thereby minimizing the amount of interpolation applied to the data. Basic Usage:: align_epi_anat.py -anat anat+orig -epi epi+orig -epi_base 5 The user must provide :abbr:`EPI (echo-planar imaging)` and anatomical datasets and specify the EPI sub-brick to use as a base in the alignment. Internally, the script always aligns the anatomical to the EPI dataset, and the resulting transformation is saved to a 1D file. As a user option, the inverse of this transformation may be applied to the EPI dataset in order to align it to the anatomical data instead. This program generates several kinds of output in the form of datasets and transformation matrices which can be applied to other datasets if needed. Time-series volume registration, oblique data transformations and Talairach (standard template) transformations will be combined as needed and requested (with options to turn on and off each of the steps) in order to create the aligned datasets. Examples -------- >>> from nipype.interfaces import afni >>> al_ea = afni.AlignEpiAnatPy() >>> al_ea.inputs.anat = "structural.nii" >>> al_ea.inputs.in_file = "functional.nii" >>> al_ea.inputs.epi_base = 0 >>> al_ea.inputs.epi_strip = '3dAutomask' >>> al_ea.inputs.volreg = 'off' >>> al_ea.inputs.tshift = 'off' >>> al_ea.inputs.save_skullstrip = True >>> al_ea.cmdline # doctest: +ELLIPSIS 'python2 ...align_epi_anat.py -anat structural.nii -epi_base 0 -epi_strip 3dAutomask -epi \ functional.nii -save_skullstrip -suffix _al -tshift off -volreg off' >>> res = allineate.run() # doctest: +SKIP See Also -------- For complete details, see the `align_epi_anat.py documentation. `__. """ _cmd = "align_epi_anat.py" input_spec = AlignEpiAnatPyInputSpec output_spec = AlignEpiAnatPyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() anat_prefix = self._gen_fname(self.inputs.anat) epi_prefix = self._gen_fname(self.inputs.in_file) if "+" in anat_prefix: anat_prefix = "".join(anat_prefix.split("+")[:-1]) if "+" in epi_prefix: epi_prefix = "".join(epi_prefix.split("+")[:-1]) outputtype = self.inputs.outputtype if outputtype == "AFNI": ext = ".HEAD" else: ext = Info.output_type_to_ext(outputtype) matext = ".1D" suffix = self.inputs.suffix if self.inputs.anat2epi: outputs["anat_al_orig"] = self._gen_fname( anat_prefix, suffix=suffix + "+orig", ext=ext ) outputs["anat_al_mat"] = self._gen_fname( anat_prefix, suffix=suffix + "_mat.aff12", ext=matext ) if self.inputs.epi2anat: outputs["epi_al_orig"] = self._gen_fname( epi_prefix, suffix=suffix + "+orig", ext=ext ) outputs["epi_al_mat"] = self._gen_fname( epi_prefix, suffix=suffix + "_mat.aff12", ext=matext ) if self.inputs.volreg == "on": outputs["epi_vr_al_mat"] = self._gen_fname( epi_prefix, suffix="_vr" + suffix + "_mat.aff12", ext=matext ) if self.inputs.tshift == "on": outputs["epi_vr_motion"] = self._gen_fname( epi_prefix, suffix="tsh_vr_motion", ext=matext ) elif self.inputs.tshift == "off": outputs["epi_vr_motion"] = self._gen_fname( epi_prefix, suffix="vr_motion", ext=matext ) if self.inputs.volreg == "on" and self.inputs.epi2anat: outputs["epi_reg_al_mat"] = self._gen_fname( epi_prefix, suffix="_reg" + suffix + "_mat.aff12", ext=matext ) if self.inputs.save_skullstrip: outputs.skullstrip = self._gen_fname( anat_prefix, suffix="_ns" + "+orig", ext=ext ) return outputs class AllineateInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dAllineate", argstr="-source %s", mandatory=True, exists=True, copyfile=False, ) reference = File( exists=True, argstr="-base %s", desc="file to be used as reference, the first volume will be used if " "not given the reference will be the first volume of in_file.", ) out_file = File( desc="output file from 3dAllineate", argstr="-prefix %s", name_template="%s_allineate", name_source="in_file", hash_files=False, xor=["allcostx"], ) out_param_file = File( argstr="-1Dparam_save %s", desc="Save the warp parameters in ASCII (.1D) format.", xor=["in_param_file", "allcostx"], ) in_param_file = File( exists=True, argstr="-1Dparam_apply %s", desc="Read warp parameters from file and apply them to " "the source dataset, and produce a new dataset", xor=["out_param_file"], ) out_matrix = File( argstr="-1Dmatrix_save %s", desc="Save the transformation matrix for each volume.", xor=["in_matrix", "allcostx"], ) in_matrix = File( desc="matrix to align input file", argstr="-1Dmatrix_apply %s", position=-3, xor=["out_matrix"], ) overwrite = traits.Bool( desc="overwrite output file if it already exists", argstr="-overwrite" ) allcostx = File( desc="Compute and print ALL available cost functionals for the un-warped inputs" "AND THEN QUIT. If you use this option none of the other expected outputs will be produced", argstr="-allcostx |& tee %s", position=-1, xor=["out_file", "out_matrix", "out_param_file", "out_weight_file"], ) _cost_funcs = [ "leastsq", "ls", "mutualinfo", "mi", "corratio_mul", "crM", "norm_mutualinfo", "nmi", "hellinger", "hel", "corratio_add", "crA", "corratio_uns", "crU", ] cost = traits.Enum( *_cost_funcs, argstr="-cost %s", desc="Defines the 'cost' function that defines the matching between " "the source and the base" ) _interp_funcs = ["nearestneighbour", "linear", "cubic", "quintic", "wsinc5"] interpolation = traits.Enum( *_interp_funcs[:-1], argstr="-interp %s", desc="Defines interpolation method to use during matching" ) final_interpolation = traits.Enum( *_interp_funcs, argstr="-final %s", desc="Defines interpolation method used to create the output dataset" ) # TECHNICAL OPTIONS (used for fine control of the program): nmatch = traits.Int( argstr="-nmatch %d", desc="Use at most n scattered points to match the datasets.", ) no_pad = traits.Bool( argstr="-nopad", desc="Do not use zero-padding on the base image." ) zclip = traits.Bool( argstr="-zclip", desc="Replace negative values in the input datasets (source & base) " "with zero.", ) convergence = traits.Float( argstr="-conv %f", desc="Convergence test in millimeters (default 0.05mm)." ) usetemp = traits.Bool(argstr="-usetemp", desc="temporary file use") check = traits.List( traits.Enum(*_cost_funcs), argstr="-check %s", desc="After cost functional optimization is done, start at the final " "parameters and RE-optimize using this new cost functions. If " "the results are too different, a warning message will be " "printed. However, the final parameters from the original " "optimization will be used to create the output dataset.", ) # ** PARAMETERS THAT AFFECT THE COST OPTIMIZATION STRATEGY ** one_pass = traits.Bool( argstr="-onepass", desc="Use only the refining pass -- do not try a coarse resolution " "pass first. Useful if you know that only small amounts of " "image alignment are needed.", ) two_pass = traits.Bool( argstr="-twopass", desc="Use a two pass alignment strategy for all volumes, searching " "for a large rotation+shift and then refining the alignment.", ) two_blur = traits.Float( argstr="-twoblur %f", desc="Set the blurring radius for the first pass in mm." ) two_first = traits.Bool( argstr="-twofirst", desc="Use -twopass on the first image to be registered, and " "then on all subsequent images from the source dataset, " "use results from the first image's coarse pass to start " "the fine pass.", ) two_best = traits.Int( argstr="-twobest %d", desc="In the coarse pass, use the best 'bb' set of initial" "points to search for the starting point for the fine" "pass. If bb==0, then no search is made for the best" "starting point, and the identity transformation is" "used as the starting point. [Default=5; min=0 max=11]", ) fine_blur = traits.Float( argstr="-fineblur %f", desc="Set the blurring radius to use in the fine resolution " "pass to 'x' mm. A small amount (1-2 mm?) of blurring at " "the fine step may help with convergence, if there is " "some problem, especially if the base volume is very noisy. " "[Default == 0 mm = no blurring at the final alignment pass]", ) center_of_mass = Str( argstr="-cmass%s", desc="Use the center-of-mass calculation to bracket the shifts.", ) autoweight = Str( argstr="-autoweight%s", desc="Compute a weight function using the 3dAutomask " "algorithm plus some blurring of the base image.", ) automask = traits.Int( argstr="-automask+%d", desc="Compute a mask function, set a value for dilation or 0.", ) autobox = traits.Bool( argstr="-autobox", desc="Expand the -automask function to enclose a rectangular " "box that holds the irregular mask.", ) nomask = traits.Bool( argstr="-nomask", desc="Don't compute the autoweight/mask; if -weight is not " "also used, then every voxel will be counted equally.", ) weight_file = File( argstr="-weight %s", exists=True, deprecated="1.0.0", new_name="weight", desc="Set the weighting for each voxel in the base dataset; " "larger weights mean that voxel count more in the cost function. " "Must be defined on the same grid as the base dataset", ) weight = traits.Either( File(exists=True), traits.Float(), argstr="-weight %s", desc="Set the weighting for each voxel in the base dataset; " "larger weights mean that voxel count more in the cost function. " "If an image file is given, the volume must be defined on the " "same grid as the base dataset", ) out_weight_file = File( argstr="-wtprefix %s", desc="Write the weight volume to disk as a dataset", xor=["allcostx"], ) source_mask = File( exists=True, argstr="-source_mask %s", desc="mask the input dataset" ) source_automask = traits.Int( argstr="-source_automask+%d", desc="Automatically mask the source dataset with dilation or 0.", ) warp_type = traits.Enum( "shift_only", "shift_rotate", "shift_rotate_scale", "affine_general", argstr="-warp %s", desc="Set the warp type.", ) warpfreeze = traits.Bool( argstr="-warpfreeze", desc="Freeze the non-rigid body parameters after first volume.", ) replacebase = traits.Bool( argstr="-replacebase", desc="If the source has more than one volume, then after the first " "volume is aligned to the base.", ) replacemeth = traits.Enum( *_cost_funcs, argstr="-replacemeth %s", desc="After first volume is aligned, switch method for later volumes. " "For use with '-replacebase'." ) epi = traits.Bool( argstr="-EPI", desc="Treat the source dataset as being composed of warped " "EPI slices, and the base as comprising anatomically " "'true' images. Only phase-encoding direction image " "shearing and scaling will be allowed with this option.", ) maxrot = traits.Float( argstr="-maxrot %f", desc="Maximum allowed rotation in degrees." ) maxshf = traits.Float(argstr="-maxshf %f", desc="Maximum allowed shift in mm.") maxscl = traits.Float(argstr="-maxscl %f", desc="Maximum allowed scaling factor.") maxshr = traits.Float(argstr="-maxshr %f", desc="Maximum allowed shearing factor.") master = File( exists=True, argstr="-master %s", desc="Write the output dataset on the same grid as this file.", ) newgrid = traits.Float( argstr="-newgrid %f", desc="Write the output dataset using isotropic grid spacing in mm.", ) # Non-linear experimental _nwarp_types = [ "bilinear", "cubic", "quintic", "heptic", "nonic", "poly3", "poly5", "poly7", "poly9", ] # same non-hellenistic nwarp = traits.Enum( *_nwarp_types, argstr="-nwarp %s", desc="Experimental nonlinear warping: bilinear or legendre poly." ) _dirs = ["X", "Y", "Z", "I", "J", "K"] nwarp_fixmot = traits.List( traits.Enum(*_dirs), argstr="-nwarp_fixmot%s...", desc="To fix motion along directions.", ) nwarp_fixdep = traits.List( traits.Enum(*_dirs), argstr="-nwarp_fixdep%s...", desc="To fix non-linear warp dependency along directions.", ) verbose = traits.Bool(argstr="-verb", desc="Print out verbose progress reports.") quiet = traits.Bool( argstr="-quiet", desc="Don't print out verbose progress reports." ) class AllineateOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output image file name") out_matrix = File(exists=True, desc="matrix to align input file") out_param_file = File(exists=True, desc="warp parameters") out_weight_file = File(exists=True, desc="weight volume") allcostx = File( desc="Compute and print ALL available cost functionals for the un-warped inputs" ) class Allineate(AFNICommand): """Program to align one dataset (the 'source') to a base dataset For complete details, see the `3dAllineate Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> allineate = afni.Allineate() >>> allineate.inputs.in_file = 'functional.nii' >>> allineate.inputs.out_file = 'functional_allineate.nii' >>> allineate.inputs.in_matrix = 'cmatrix.mat' >>> allineate.cmdline '3dAllineate -source functional.nii -prefix functional_allineate.nii -1Dmatrix_apply cmatrix.mat' >>> res = allineate.run() # doctest: +SKIP >>> allineate = afni.Allineate() >>> allineate.inputs.in_file = 'functional.nii' >>> allineate.inputs.reference = 'structural.nii' >>> allineate.inputs.allcostx = 'out.allcostX.txt' >>> allineate.cmdline '3dAllineate -source functional.nii -base structural.nii -allcostx |& tee out.allcostX.txt' >>> res = allineate.run() # doctest: +SKIP >>> allineate = afni.Allineate() >>> allineate.inputs.in_file = 'functional.nii' >>> allineate.inputs.reference = 'structural.nii' >>> allineate.inputs.nwarp_fixmot = ['X', 'Y'] >>> allineate.cmdline '3dAllineate -source functional.nii -nwarp_fixmotX -nwarp_fixmotY -prefix functional_allineate -base structural.nii' >>> res = allineate.run() # doctest: +SKIP """ _cmd = "3dAllineate" input_spec = AllineateInputSpec output_spec = AllineateOutputSpec def _list_outputs(self): outputs = super(Allineate, self)._list_outputs() if self.inputs.out_weight_file: outputs["out_weight_file"] = op.abspath(self.inputs.out_weight_file) if self.inputs.out_matrix: ext = split_filename(self.inputs.out_matrix)[-1] if ext.lower() not in [".1d", ".1D"]: outputs["out_matrix"] = self._gen_fname( self.inputs.out_matrix, suffix=".aff12.1D" ) else: outputs["out_matrix"] = op.abspath(self.inputs.out_matrix) if self.inputs.out_param_file: ext = split_filename(self.inputs.out_param_file)[-1] if ext.lower() not in [".1d", ".1D"]: outputs["out_param_file"] = self._gen_fname( self.inputs.out_param_file, suffix=".param.1D" ) else: outputs["out_param_file"] = op.abspath(self.inputs.out_param_file) if self.inputs.allcostx: outputs["allcostX"] = os.path.abspath(self.inputs.allcostx) return outputs class AutoTcorrelateInputSpec(AFNICommandInputSpec): in_file = File( desc="timeseries x space (volume or surface) file", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) polort = traits.Int( desc="Remove polynomical trend of order m or -1 for no detrending", argstr="-polort %d", ) eta2 = traits.Bool(desc="eta^2 similarity", argstr="-eta2") mask = File(exists=True, desc="mask of voxels", argstr="-mask %s") mask_only_targets = traits.Bool( desc="use mask only on targets voxels", argstr="-mask_only_targets", xor=["mask_source"], ) mask_source = File( exists=True, desc="mask for source voxels", argstr="-mask_source %s", xor=["mask_only_targets"], ) out_file = File( name_template="%s_similarity_matrix.1D", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) class AutoTcorrelate(AFNICommand): """Computes the correlation coefficient between the time series of each pair of voxels in the input dataset, and stores the output into a new anatomical bucket dataset [scaled to shorts to save memory space]. For complete details, see the `3dAutoTcorrelate Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> corr = afni.AutoTcorrelate() >>> corr.inputs.in_file = 'functional.nii' >>> corr.inputs.polort = -1 >>> corr.inputs.eta2 = True >>> corr.inputs.mask = 'mask.nii' >>> corr.inputs.mask_only_targets = True >>> corr.cmdline # doctest: +ELLIPSIS '3dAutoTcorrelate -eta2 -mask mask.nii -mask_only_targets -prefix functional_similarity_matrix.1D -polort -1 functional.nii' >>> res = corr.run() # doctest: +SKIP """ input_spec = AutoTcorrelateInputSpec output_spec = AFNICommandOutputSpec _cmd = "3dAutoTcorrelate" def _overload_extension(self, value, name=None): path, base, ext = split_filename(value) if ext.lower() not in [".1d", ".1D", ".nii.gz", ".nii"]: ext = ext + ".1D" return os.path.join(path, base + ext) class AutomaskInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dAutomask", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_mask", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) brain_file = File( name_template="%s_masked", desc="output file from 3dAutomask", argstr="-apply_prefix %s", name_source="in_file", ) clfrac = traits.Float( desc="sets the clip level fraction (must be 0.1-0.9). A small value " "will tend to make the mask larger [default = 0.5].", argstr="-clfrac %s", ) dilate = traits.Int(desc="dilate the mask outwards", argstr="-dilate %s") erode = traits.Int(desc="erode the mask inwards", argstr="-erode %s") class AutomaskOutputSpec(TraitedSpec): out_file = File(desc="mask file", exists=True) brain_file = File(desc="brain file (skull stripped)", exists=True) class Automask(AFNICommand): """Create a brain-only mask of the image using AFNI 3dAutomask command For complete details, see the `3dAutomask Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> automask = afni.Automask() >>> automask.inputs.in_file = 'functional.nii' >>> automask.inputs.dilate = 1 >>> automask.inputs.outputtype = 'NIFTI' >>> automask.cmdline # doctest: +ELLIPSIS '3dAutomask -apply_prefix functional_masked.nii -dilate 1 -prefix functional_mask.nii functional.nii' >>> res = automask.run() # doctest: +SKIP """ _cmd = "3dAutomask" input_spec = AutomaskInputSpec output_spec = AutomaskOutputSpec class AutoTLRCInputSpec(CommandLineInputSpec): outputtype = traits.Enum( "AFNI", list(Info.ftypes.keys()), desc="AFNI output filetype" ) in_file = File( desc="Original anatomical volume (+orig)." "The skull is removed by this script" "unless instructed otherwise (-no_ss).", argstr="-input %s", mandatory=True, exists=True, copyfile=False, ) base = traits.Str( desc="""\ Reference anatomical volume. Usually this volume is in some standard space like TLRC or MNI space and with afni dataset view of (+tlrc). Preferably, this reference volume should have had the skull removed but that is not mandatory. AFNI's distribution contains several templates. For a longer list, use "whereami -show_templates" TT_N27+tlrc --> Single subject, skull stripped volume. This volume is also known as N27_SurfVol_NoSkull+tlrc elsewhere in AFNI and SUMA land. (www.loni.ucla.edu, www.bic.mni.mcgill.ca) This template has a full set of FreeSurfer (surfer.nmr.mgh.harvard.edu) surface models that can be used in SUMA. For details, see Talairach-related link: https://afni.nimh.nih.gov/afni/suma TT_icbm452+tlrc --> Average volume of 452 normal brains. Skull Stripped. (www.loni.ucla.edu) TT_avg152T1+tlrc --> Average volume of 152 normal brains. Skull Stripped.(www.bic.mni.mcgill.ca) TT_EPI+tlrc --> EPI template from spm2, masked as TT_avg152T1 TT_avg152 and TT_EPI volume sources are from SPM's distribution. (www.fil.ion.ucl.ac.uk/spm/) If you do not specify a path for the template, the script will attempt to locate the template AFNI's binaries directory. NOTE: These datasets have been slightly modified from their original size to match the standard TLRC dimensions (Jean Talairach and Pierre Tournoux Co-Planar Stereotaxic Atlas of the Human Brain Thieme Medical Publishers, New York, 1988). That was done for internal consistency in AFNI. You may use the original form of these volumes if you choose but your TLRC coordinates will not be consistent with AFNI's TLRC database (San Antonio Talairach Daemon database), for example.""", mandatory=True, argstr="-base %s", ) no_ss = traits.Bool( desc="""\ Do not strip skull of input data set (because skull has already been removed or because template still has the skull) NOTE: The ``-no_ss`` option is not all that optional. Here is a table of when you should and should not use ``-no_ss`` +------------------+------------+---------------+ | Dataset | Template | +==================+============+===============+ | | w/ skull | wo/ skull | +------------------+------------+---------------+ | WITH skull | ``-no_ss`` | xxx | +------------------+------------+---------------+ | WITHOUT skull | No Cigar | ``-no_ss`` | +------------------+------------+---------------+ Template means: Your template of choice Dset. means: Your anatomical dataset ``-no_ss`` means: Skull stripping should not be attempted on Dset xxx means: Don't put anything, the script will strip Dset No Cigar means: Don't try that combination, it makes no sense.""", argstr="-no_ss", ) class AutoTLRC(AFNICommand): """A minmal wrapper for the AutoTLRC script The only option currently supported is no_ss. For complete details, see the `3dQwarp Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> autoTLRC = afni.AutoTLRC() >>> autoTLRC.inputs.in_file = 'structural.nii' >>> autoTLRC.inputs.no_ss = True >>> autoTLRC.inputs.base = "TT_N27+tlrc" >>> autoTLRC.cmdline '@auto_tlrc -base TT_N27+tlrc -input structural.nii -no_ss' >>> res = autoTLRC.run() # doctest: +SKIP """ _cmd = "@auto_tlrc" input_spec = AutoTLRCInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() ext = ".HEAD" outputs["out_file"] = os.path.abspath( self._gen_fname(self.inputs.in_file, suffix="+tlrc") + ext ) return outputs class BandpassInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dBandpass", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_bp", desc="output file from 3dBandpass", argstr="-prefix %s", position=1, name_source="in_file", ) lowpass = traits.Float(desc="lowpass", argstr="%f", position=-2, mandatory=True) highpass = traits.Float(desc="highpass", argstr="%f", position=-3, mandatory=True) mask = File(desc="mask file", position=2, argstr="-mask %s", exists=True) despike = traits.Bool( argstr="-despike", desc="Despike each time series before other processing. Hopefully, " "you don't actually need to do this, which is why it is " "optional.", ) orthogonalize_file = InputMultiPath( File(exists=True), argstr="-ort %s", desc="Also orthogonalize input to columns in f.1D. Multiple '-ort' " "options are allowed.", ) orthogonalize_dset = File( exists=True, argstr="-dsort %s", desc="Orthogonalize each voxel to the corresponding voxel time series " "in dataset 'fset', which must have the same spatial and " "temporal grid structure as the main input dataset. At present, " "only one '-dsort' option is allowed.", ) no_detrend = traits.Bool( argstr="-nodetrend", desc="Skip the quadratic detrending of the input that occurs before " "the FFT-based bandpassing. You would only want to do this if " "the dataset had been detrended already in some other program.", ) tr = traits.Float( argstr="-dt %f", desc="Set time step (TR) in sec [default=from dataset header]." ) nfft = traits.Int( argstr="-nfft %d", desc="Set the FFT length [must be a legal value]." ) normalize = traits.Bool( argstr="-norm", desc="Make all output time series have L2 norm = 1 (i.e., sum of " "squares = 1).", ) automask = traits.Bool( argstr="-automask", desc="Create a mask from the input dataset." ) blur = traits.Float( argstr="-blur %f", desc="Blur (inside the mask only) with a filter width (FWHM) of " "'fff' millimeters.", ) localPV = traits.Float( argstr="-localPV %f", desc="Replace each vector by the local Principal Vector (AKA first " "singular vector) from a neighborhood of radius 'rrr' " "millimeters. Note that the PV time series is L2 normalized. " "This option is mostly for Bob Cox to have fun with.", ) notrans = traits.Bool( argstr="-notrans", desc="Don't check for initial positive transients in the data. " "The test is a little slow, so skipping it is OK, if you KNOW " "the data time series are transient-free.", ) class Bandpass(AFNICommand): """Program to lowpass and/or highpass each voxel time series in a dataset, offering more/different options than Fourier For complete details, see the `3dBandpass Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> from nipype.testing import example_data >>> bandpass = afni.Bandpass() >>> bandpass.inputs.in_file = 'functional.nii' >>> bandpass.inputs.highpass = 0.005 >>> bandpass.inputs.lowpass = 0.1 >>> bandpass.cmdline '3dBandpass -prefix functional_bp 0.005000 0.100000 functional.nii' >>> res = bandpass.run() # doctest: +SKIP """ _cmd = "3dBandpass" input_spec = BandpassInputSpec output_spec = AFNICommandOutputSpec class BlurInMaskInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dSkullStrip", argstr="-input %s", position=1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_blur", desc="output to the file", argstr="-prefix %s", name_source="in_file", position=-1, ) mask = File( desc="Mask dataset, if desired. Blurring will occur only within the " "mask. Voxels NOT in the mask will be set to zero in the output.", argstr="-mask %s", ) multimask = File( desc="Multi-mask dataset -- each distinct nonzero value in dataset " "will be treated as a separate mask for blurring purposes.", argstr="-Mmask %s", ) automask = traits.Bool( desc="Create an automask from the input dataset.", argstr="-automask" ) fwhm = traits.Float(desc="fwhm kernel size", argstr="-FWHM %f", mandatory=True) preserve = traits.Bool( desc="Normally, voxels not in the mask will be set to zero in the " "output. If you want the original values in the dataset to be " "preserved in the output, use this option.", argstr="-preserve", ) float_out = traits.Bool( desc="Save dataset as floats, no matter what the input data type is.", argstr="-float", ) options = Str(desc="options", argstr="%s", position=2) class BlurInMask(AFNICommand): """Blurs a dataset spatially inside a mask. That's all. Experimental. For complete details, see the `3dBlurInMask Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> bim = afni.BlurInMask() >>> bim.inputs.in_file = 'functional.nii' >>> bim.inputs.mask = 'mask.nii' >>> bim.inputs.fwhm = 5.0 >>> bim.cmdline # doctest: +ELLIPSIS '3dBlurInMask -input functional.nii -FWHM 5.000000 -mask mask.nii -prefix functional_blur' >>> res = bim.run() # doctest: +SKIP """ _cmd = "3dBlurInMask" input_spec = BlurInMaskInputSpec output_spec = AFNICommandOutputSpec class BlurToFWHMInputSpec(AFNICommandInputSpec): in_file = File( desc="The dataset that will be smoothed", argstr="-input %s", mandatory=True, exists=True, ) automask = traits.Bool( desc="Create an automask from the input dataset.", argstr="-automask" ) fwhm = traits.Float( desc="Blur until the 3D FWHM reaches this value (in mm)", argstr="-FWHM %f" ) fwhmxy = traits.Float( desc="Blur until the 2D (x,y)-plane FWHM reaches this value (in mm)", argstr="-FWHMxy %f", ) blurmaster = File( desc="The dataset whose smoothness controls the process.", argstr="-blurmaster %s", exists=True, ) mask = File( desc="Mask dataset, if desired. Voxels NOT in mask will be set to zero " "in output.", argstr="-mask %s", exists=True, ) class BlurToFWHM(AFNICommand): """Blurs a 'master' dataset until it reaches a specified FWHM smoothness (approximately). For complete details, see the `3dBlurToFWHM Documentation `_ Examples -------- >>> from nipype.interfaces import afni >>> blur = afni.preprocess.BlurToFWHM() >>> blur.inputs.in_file = 'epi.nii' >>> blur.inputs.fwhm = 2.5 >>> blur.cmdline # doctest: +ELLIPSIS '3dBlurToFWHM -FWHM 2.500000 -input epi.nii -prefix epi_afni' >>> res = blur.run() # doctest: +SKIP """ _cmd = "3dBlurToFWHM" input_spec = BlurToFWHMInputSpec output_spec = AFNICommandOutputSpec class ClipLevelInputSpec(CommandLineInputSpec): in_file = File( desc="input file to 3dClipLevel", argstr="%s", position=-1, mandatory=True, exists=True, ) mfrac = traits.Float( desc="Use the number ff instead of 0.50 in the algorithm", argstr="-mfrac %s", position=2, ) doall = traits.Bool( desc="Apply the algorithm to each sub-brick separately.", argstr="-doall", position=3, xor=("grad"), ) grad = File( desc="Also compute a 'gradual' clip level as a function of voxel " "position, and output that to a dataset.", argstr="-grad %s", position=3, xor=("doall"), ) class ClipLevelOutputSpec(TraitedSpec): clip_val = traits.Float(desc="output") class ClipLevel(AFNICommandBase): """Estimates the value at which to clip the anatomical dataset so that background regions are set to zero. For complete details, see the `3dClipLevel Documentation. `_ Examples -------- >>> from nipype.interfaces.afni import preprocess >>> cliplevel = preprocess.ClipLevel() >>> cliplevel.inputs.in_file = 'anatomical.nii' >>> cliplevel.cmdline '3dClipLevel anatomical.nii' >>> res = cliplevel.run() # doctest: +SKIP """ _cmd = "3dClipLevel" input_spec = ClipLevelInputSpec output_spec = ClipLevelOutputSpec def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() outfile = os.path.join(os.getcwd(), "stat_result.json") if runtime is None: try: clip_val = load_json(outfile)["stat"] except IOError: return self.run().outputs else: clip_val = [] for line in runtime.stdout.split("\n"): if line: values = line.split() if len(values) > 1: clip_val.append([float(val) for val in values]) else: clip_val.extend([float(val) for val in values]) if len(clip_val) == 1: clip_val = clip_val[0] save_json(outfile, dict(stat=clip_val)) outputs.clip_val = clip_val return outputs class DegreeCentralityInputSpec(CentralityInputSpec): """DegreeCentrality inputspec""" in_file = File( desc="input file to 3dDegreeCentrality", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) sparsity = traits.Float( desc="only take the top percent of connections", argstr="-sparsity %f" ) oned_file = Str( desc="output filepath to text dump of correlation matrix", argstr="-out1D %s" ) class DegreeCentralityOutputSpec(AFNICommandOutputSpec): """DegreeCentrality outputspec""" oned_file = File( desc="The text output of the similarity matrix computed after " "thresholding with one-dimensional and ijk voxel indices, " "correlations, image extents, and affine matrix." ) class DegreeCentrality(AFNICommand): """Performs degree centrality on a dataset using a given maskfile via 3dDegreeCentrality For complete details, see the `3dDegreeCentrality Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> degree = afni.DegreeCentrality() >>> degree.inputs.in_file = 'functional.nii' >>> degree.inputs.mask = 'mask.nii' >>> degree.inputs.sparsity = 1 # keep the top one percent of connections >>> degree.inputs.out_file = 'out.nii' >>> degree.cmdline '3dDegreeCentrality -mask mask.nii -prefix out.nii -sparsity 1.000000 functional.nii' >>> res = degree.run() # doctest: +SKIP """ _cmd = "3dDegreeCentrality" input_spec = DegreeCentralityInputSpec output_spec = DegreeCentralityOutputSpec # Re-define generated inputs def _list_outputs(self): # Update outputs dictionary if oned file is defined outputs = super(DegreeCentrality, self)._list_outputs() if self.inputs.oned_file: outputs["oned_file"] = os.path.abspath(self.inputs.oned_file) return outputs class DespikeInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dDespike", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_despike", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) class Despike(AFNICommand): """Removes 'spikes' from the 3D+time input dataset For complete details, see the `3dDespike Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> despike = afni.Despike() >>> despike.inputs.in_file = 'functional.nii' >>> despike.cmdline '3dDespike -prefix functional_despike functional.nii' >>> res = despike.run() # doctest: +SKIP """ _cmd = "3dDespike" input_spec = DespikeInputSpec output_spec = AFNICommandOutputSpec class DetrendInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dDetrend", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_detrend", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) class Detrend(AFNICommand): """This program removes components from voxel time series using linear least squares For complete details, see the `3dDetrend Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> detrend = afni.Detrend() >>> detrend.inputs.in_file = 'functional.nii' >>> detrend.inputs.args = '-polort 2' >>> detrend.inputs.outputtype = 'AFNI' >>> detrend.cmdline '3dDetrend -polort 2 -prefix functional_detrend functional.nii' >>> res = detrend.run() # doctest: +SKIP """ _cmd = "3dDetrend" input_spec = DetrendInputSpec output_spec = AFNICommandOutputSpec class ECMInputSpec(CentralityInputSpec): """ECM inputspec""" in_file = File( desc="input file to 3dECM", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) sparsity = traits.Float( desc="only take the top percent of connections", argstr="-sparsity %f" ) full = traits.Bool( desc="Full power method; enables thresholding; automatically selected " "if -thresh or -sparsity are set", argstr="-full", ) fecm = traits.Bool( desc="Fast centrality method; substantial speed increase but cannot " "accomodate thresholding; automatically selected if -thresh or " "-sparsity are not set", argstr="-fecm", ) shift = traits.Float( desc="shift correlation coefficients in similarity matrix to enforce " "non-negativity, s >= 0.0; default = 0.0 for -full, 1.0 for -fecm", argstr="-shift %f", ) scale = traits.Float( desc="scale correlation coefficients in similarity matrix to after " "shifting, x >= 0.0; default = 1.0 for -full, 0.5 for -fecm", argstr="-scale %f", ) eps = traits.Float( desc="sets the stopping criterion for the power iteration; " ":math:`l2\\|v_\\text{old} - v_\\text{new}\\| < eps\\|v_\\text{old}\\|`; " "default = 0.001", argstr="-eps %f", ) max_iter = traits.Int( desc="sets the maximum number of iterations to use in the power " "iteration; default = 1000", argstr="-max_iter %d", ) memory = traits.Float( desc="Limit memory consumption on system by setting the amount of GB " "to limit the algorithm to; default = 2GB", argstr="-memory %f", ) class ECM(AFNICommand): """Performs degree centrality on a dataset using a given maskfile via the 3dECM command For complete details, see the `3dECM Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> ecm = afni.ECM() >>> ecm.inputs.in_file = 'functional.nii' >>> ecm.inputs.mask = 'mask.nii' >>> ecm.inputs.sparsity = 0.1 # keep top 0.1% of connections >>> ecm.inputs.out_file = 'out.nii' >>> ecm.cmdline '3dECM -mask mask.nii -prefix out.nii -sparsity 0.100000 functional.nii' >>> res = ecm.run() # doctest: +SKIP """ _cmd = "3dECM" input_spec = ECMInputSpec output_spec = AFNICommandOutputSpec class FimInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dfim+", argstr="-input %s", position=1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_fim", desc="output image file name", argstr="-bucket %s", name_source="in_file", ) ideal_file = File( desc="ideal time series file name", argstr="-ideal_file %s", position=2, mandatory=True, exists=True, ) fim_thr = traits.Float( desc="fim internal mask threshold value", argstr="-fim_thr %f", position=3 ) out = Str( desc="Flag to output the specified parameter", argstr="-out %s", position=4 ) class Fim(AFNICommand): """Program to calculate the cross-correlation of an ideal reference waveform with the measured FMRI time series for each voxel. For complete details, see the `3dfim+ Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> fim = afni.Fim() >>> fim.inputs.in_file = 'functional.nii' >>> fim.inputs.ideal_file= 'seed.1D' >>> fim.inputs.out_file = 'functional_corr.nii' >>> fim.inputs.out = 'Correlation' >>> fim.inputs.fim_thr = 0.0009 >>> fim.cmdline '3dfim+ -input functional.nii -ideal_file seed.1D -fim_thr 0.000900 -out Correlation -bucket functional_corr.nii' >>> res = fim.run() # doctest: +SKIP """ _cmd = "3dfim+" input_spec = FimInputSpec output_spec = AFNICommandOutputSpec class FourierInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dFourier", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_fourier", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) lowpass = traits.Float(desc="lowpass", argstr="-lowpass %f", mandatory=True) highpass = traits.Float(desc="highpass", argstr="-highpass %f", mandatory=True) retrend = traits.Bool( desc="Any mean and linear trend are removed before filtering. This " "will restore the trend after filtering.", argstr="-retrend", ) class Fourier(AFNICommand): """Program to lowpass and/or highpass each voxel time series in a dataset, via the FFT For complete details, see the `3dFourier Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> fourier = afni.Fourier() >>> fourier.inputs.in_file = 'functional.nii' >>> fourier.inputs.retrend = True >>> fourier.inputs.highpass = 0.005 >>> fourier.inputs.lowpass = 0.1 >>> fourier.cmdline '3dFourier -highpass 0.005000 -lowpass 0.100000 -prefix functional_fourier -retrend functional.nii' >>> res = fourier.run() # doctest: +SKIP """ _cmd = "3dFourier" input_spec = FourierInputSpec output_spec = AFNICommandOutputSpec class HistInputSpec(CommandLineInputSpec): in_file = File( desc="input file to 3dHist", argstr="-input %s", position=1, mandatory=True, exists=True, copyfile=False, ) out_file = File( desc="Write histogram to niml file with this prefix", name_template="%s_hist", keep_extension=False, argstr="-prefix %s", name_source=["in_file"], ) showhist = traits.Bool( False, usedefault=True, desc="write a text visual histogram", argstr="-showhist" ) out_show = File( name_template="%s_hist.out", desc="output image file name", keep_extension=False, argstr="> %s", name_source="in_file", position=-1, ) mask = File(desc="matrix to align input file", argstr="-mask %s", exists=True) nbin = traits.Int(desc="number of bins", argstr="-nbin %d") max_value = traits.Float(argstr="-max %f", desc="maximum intensity value") min_value = traits.Float(argstr="-min %f", desc="minimum intensity value") bin_width = traits.Float(argstr="-binwidth %f", desc="bin width") class HistOutputSpec(TraitedSpec): out_file = File(desc="output file", exists=True) out_show = File(desc="output visual histogram") class Hist(AFNICommandBase): """Computes average of all voxels in the input dataset which satisfy the criterion in the options list For complete details, see the `3dHist Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> hist = afni.Hist() >>> hist.inputs.in_file = 'functional.nii' >>> hist.cmdline '3dHist -input functional.nii -prefix functional_hist' >>> res = hist.run() # doctest: +SKIP """ _cmd = "3dHist" input_spec = HistInputSpec output_spec = HistOutputSpec _redirect_x = True def __init__(self, **inputs): super(Hist, self).__init__(**inputs) if not no_afni(): version = Info.version() # As of AFNI 16.0.00, redirect_x is not needed if version[0] > 2015: self._redirect_x = False def _parse_inputs(self, skip=None): if not self.inputs.showhist: if skip is None: skip = [] skip += ["out_show"] return super(Hist, self)._parse_inputs(skip=skip) def _list_outputs(self): outputs = super(Hist, self)._list_outputs() outputs["out_file"] += ".niml.hist" if not self.inputs.showhist: outputs["out_show"] = Undefined return outputs class LFCDInputSpec(CentralityInputSpec): """LFCD inputspec""" in_file = File( desc="input file to 3dLFCD", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) class LFCD(AFNICommand): """Performs degree centrality on a dataset using a given maskfile via the 3dLFCD command For complete details, see the `3dLFCD Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> lfcd = afni.LFCD() >>> lfcd.inputs.in_file = 'functional.nii' >>> lfcd.inputs.mask = 'mask.nii' >>> lfcd.inputs.thresh = 0.8 # keep all connections with corr >= 0.8 >>> lfcd.inputs.out_file = 'out.nii' >>> lfcd.cmdline '3dLFCD -mask mask.nii -prefix out.nii -thresh 0.800000 functional.nii' >>> res = lfcd.run() # doctest: +SKIP """ _cmd = "3dLFCD" input_spec = LFCDInputSpec output_spec = AFNICommandOutputSpec class MaskaveInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dmaskave", argstr="%s", position=-2, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_maskave.1D", desc="output image file name", keep_extension=True, argstr="> %s", name_source="in_file", position=-1, ) mask = File( desc="matrix to align input file", argstr="-mask %s", position=1, exists=True ) quiet = traits.Bool(desc="matrix to align input file", argstr="-quiet", position=2) class Maskave(AFNICommand): """Computes average of all voxels in the input dataset which satisfy the criterion in the options list For complete details, see the `3dmaskave Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> maskave = afni.Maskave() >>> maskave.inputs.in_file = 'functional.nii' >>> maskave.inputs.mask= 'seed_mask.nii' >>> maskave.inputs.quiet= True >>> maskave.cmdline # doctest: +ELLIPSIS '3dmaskave -mask seed_mask.nii -quiet functional.nii > functional_maskave.1D' >>> res = maskave.run() # doctest: +SKIP """ _cmd = "3dmaskave" input_spec = MaskaveInputSpec output_spec = AFNICommandOutputSpec class MeansInputSpec(AFNICommandInputSpec): in_file_a = File( desc="input file to 3dMean", argstr="%s", position=-2, mandatory=True, exists=True, ) in_file_b = File( desc="another input file to 3dMean", argstr="%s", position=-1, exists=True ) datum = traits.Str( desc="Sets the data type of the output dataset", argstr="-datum %s" ) out_file = File( name_template="%s_mean", desc="output image file name", argstr="-prefix %s", name_source="in_file_a", ) scale = Str(desc="scaling of output", argstr="-%sscale") non_zero = traits.Bool(desc="use only non-zero values", argstr="-non_zero") std_dev = traits.Bool(desc="calculate std dev", argstr="-stdev") sqr = traits.Bool(desc="mean square instead of value", argstr="-sqr") summ = traits.Bool(desc="take sum, (not average)", argstr="-sum") count = traits.Bool(desc="compute count of non-zero voxels", argstr="-count") mask_inter = traits.Bool(desc="create intersection mask", argstr="-mask_inter") mask_union = traits.Bool(desc="create union mask", argstr="-mask_union") class Means(AFNICommand): """Takes the voxel-by-voxel mean of all input datasets using 3dMean For complete details, see the `3dMean Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> means = afni.Means() >>> means.inputs.in_file_a = 'im1.nii' >>> means.inputs.in_file_b = 'im2.nii' >>> means.inputs.out_file = 'output.nii' >>> means.cmdline '3dMean -prefix output.nii im1.nii im2.nii' >>> res = means.run() # doctest: +SKIP >>> from nipype.interfaces import afni >>> means = afni.Means() >>> means.inputs.in_file_a = 'im1.nii' >>> means.inputs.out_file = 'output.nii' >>> means.inputs.datum = 'short' >>> means.cmdline '3dMean -datum short -prefix output.nii im1.nii' >>> res = means.run() # doctest: +SKIP """ _cmd = "3dMean" input_spec = MeansInputSpec output_spec = AFNICommandOutputSpec class OutlierCountInputSpec(CommandLineInputSpec): in_file = File( argstr="%s", mandatory=True, exists=True, position=-2, desc="input dataset" ) mask = File( exists=True, argstr="-mask %s", xor=["autoclip", "automask"], desc="only count voxels within the given mask", ) qthr = traits.Range( value=1e-3, low=0.0, high=1.0, usedefault=True, argstr="-qthr %.5f", desc="indicate a value for q to compute alpha", ) autoclip = traits.Bool( False, usedefault=True, argstr="-autoclip", xor=["mask"], desc="clip off small voxels", ) automask = traits.Bool( False, usedefault=True, argstr="-automask", xor=["mask"], desc="clip off small voxels", ) fraction = traits.Bool( False, usedefault=True, argstr="-fraction", desc="write out the fraction of masked voxels which are outliers at " "each timepoint", ) interval = traits.Bool( False, usedefault=True, argstr="-range", desc="write out the median + 3.5 MAD of outlier count with each timepoint", ) save_outliers = traits.Bool(False, usedefault=True, desc="enables out_file option") outliers_file = File( name_template="%s_outliers", argstr="-save %s", name_source=["in_file"], output_name="out_outliers", keep_extension=True, desc="output image file name", ) polort = traits.Int( argstr="-polort %d", desc="detrend each voxel timeseries with polynomials" ) legendre = traits.Bool( False, usedefault=True, argstr="-legendre", desc="use Legendre polynomials" ) out_file = File( name_template="%s_outliers", name_source=["in_file"], keep_extension=False, desc="capture standard output", ) class OutlierCountOutputSpec(TraitedSpec): out_outliers = File(exists=True, desc="output image file name") out_file = File(desc="capture standard output") class OutlierCount(CommandLine): """Calculates number of 'outliers' at each time point of a a 3D+time dataset. For complete details, see the `3dToutcount Documentation `_ Examples -------- >>> from nipype.interfaces import afni >>> toutcount = afni.OutlierCount() >>> toutcount.inputs.in_file = 'functional.nii' >>> toutcount.cmdline # doctest: +ELLIPSIS '3dToutcount -qthr 0.00100 functional.nii' >>> res = toutcount.run() # doctest: +SKIP """ _cmd = "3dToutcount" input_spec = OutlierCountInputSpec output_spec = OutlierCountOutputSpec _terminal_output = "file_split" def _parse_inputs(self, skip=None): if skip is None: skip = [] # This is not strictly an input, but needs be # set before run() is called. if self.terminal_output == "none": self.terminal_output = "file_split" if not self.inputs.save_outliers: skip += ["outliers_file"] return super(OutlierCount, self)._parse_inputs(skip) def _run_interface(self, runtime, correct_return_codes=(0,)): runtime = super(OutlierCount, self)._run_interface( runtime, correct_return_codes ) # Read from runtime.stdout or runtime.merged with open(op.abspath(self.inputs.out_file), "w") as outfh: outfh.write(runtime.stdout or runtime.merged) return runtime def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) if self.inputs.save_outliers: outputs["out_outliers"] = op.abspath(self.inputs.outliers_file) return outputs class QualityIndexInputSpec(CommandLineInputSpec): in_file = File( argstr="%s", mandatory=True, exists=True, position=-2, desc="input dataset" ) mask = File( exists=True, argstr="-mask %s", xor=["autoclip", "automask"], desc="compute correlation only across masked voxels", ) spearman = traits.Bool( False, usedefault=True, argstr="-spearman", desc="Quality index is 1 minus the Spearman (rank) correlation " "coefficient of each sub-brick with the median sub-brick. " "(default).", ) quadrant = traits.Bool( False, usedefault=True, argstr="-quadrant", desc="Similar to -spearman, but using 1 minus the quadrant correlation " "coefficient as the quality index.", ) autoclip = traits.Bool( False, usedefault=True, argstr="-autoclip", xor=["mask"], desc="clip off small voxels", ) automask = traits.Bool( False, usedefault=True, argstr="-automask", xor=["mask"], desc="clip off small voxels", ) clip = traits.Float(argstr="-clip %f", desc="clip off values below") interval = traits.Bool( False, usedefault=True, argstr="-range", desc="write out the median + 3.5 MAD of outlier count with each timepoint", ) out_file = File( name_template="%s_tqual", name_source=["in_file"], argstr="> %s", keep_extension=False, position=-1, desc="capture standard output", ) class QualityIndexOutputSpec(TraitedSpec): out_file = File(desc="file containing the captured standard output") class QualityIndex(CommandLine): """Computes a quality index for each sub-brick in a 3D+time dataset. The output is a 1D time series with the index for each sub-brick. The results are written to stdout. Examples -------- >>> from nipype.interfaces import afni >>> tqual = afni.QualityIndex() >>> tqual.inputs.in_file = 'functional.nii' >>> tqual.cmdline # doctest: +ELLIPSIS '3dTqual functional.nii > functional_tqual' >>> res = tqual.run() # doctest: +SKIP See Also -------- For complete details, see the `3dTqual Documentation `_ """ _cmd = "3dTqual" input_spec = QualityIndexInputSpec output_spec = QualityIndexOutputSpec class ROIStatsInputSpec(CommandLineInputSpec): in_file = File( desc="input dataset", argstr="%s", position=-2, mandatory=True, exists=True ) mask = File( desc="input mask", argstr="-mask %s", position=3, exists=True, deprecated="1.1.4", new_name="mask_file", ) mask_file = File(desc="input mask", argstr="-mask %s", exists=True) mask_f2short = traits.Bool( desc="Tells the program to convert a float mask to short integers, " "by simple rounding.", argstr="-mask_f2short", ) num_roi = traits.Int( desc="Forces the assumption that the mask dataset's ROIs are " "denoted by 1 to n inclusive. Normally, the program " "figures out the ROIs on its own. This option is " "useful if a) you are certain that the mask dataset " "has no values outside the range [0 n], b) there may " "be some ROIs missing between [1 n] in the mask data-" "set and c) you want those columns in the output any-" "way so the output lines up with the output from other " "invocations of 3dROIstats.", argstr="-numroi %s", ) zerofill = traits.Str( requires=["num_roi"], desc="For ROI labels not found, use the provided string instead of " "a '0' in the output file. Only active if `num_roi` is " "enabled.", argstr="-zerofill %s", ) roisel = File( exists=True, desc="Only considers ROIs denoted by values found in the specified " "file. Note that the order of the ROIs as specified in the file " "is not preserved. So an SEL.1D of '2 8 20' produces the same " "output as '8 20 2'", argstr="-roisel %s", ) debug = traits.Bool(desc="print debug information", argstr="-debug") quiet = traits.Bool(desc="execute quietly", argstr="-quiet") nomeanout = traits.Bool( desc="Do not include the (zero-inclusive) mean among computed stats", argstr="-nomeanout", ) nobriklab = traits.Bool( desc="Do not print the sub-brick label next to its index", argstr="-nobriklab" ) format1D = traits.Bool( xor=["format1DR"], desc="Output results in a 1D format that includes commented labels", argstr="-1Dformat", ) format1DR = traits.Bool( xor=["format1D"], desc="Output results in a 1D format that includes uncommented " "labels. May not work optimally with typical 1D functions, " "but is useful for R functions.", argstr="-1DRformat", ) _stat_names = [ "mean", "sum", "voxels", "minmax", "sigma", "median", "mode", "summary", "zerominmax", "zerosigma", "zeromedian", "zeromode", ] stat = InputMultiObject( traits.Enum(_stat_names), desc="""\ Statistics to compute. Options include: * mean = Compute the mean using only non_zero voxels. Implies the opposite for the mean computed by default. * median = Compute the median of nonzero voxels * mode = Compute the mode of nonzero voxels. (integral valued sets only) * minmax = Compute the min/max of nonzero voxels * sum = Compute the sum using only nonzero voxels. * voxels = Compute the number of nonzero voxels * sigma = Compute the standard deviation of nonzero voxels Statistics that include zero-valued voxels: * zerominmax = Compute the min/max of all voxels. * zerosigma = Compute the standard deviation of all voxels. * zeromedian = Compute the median of all voxels. * zeromode = Compute the mode of all voxels. * summary = Only output a summary line with the grand mean across all briks in the input dataset. This option cannot be used with nomeanout. More that one option can be specified.""", argstr="%s...", ) out_file = File( name_template="%s_roistat.1D", desc="output file", keep_extension=False, argstr="> %s", name_source="in_file", position=-1, ) class ROIStatsOutputSpec(TraitedSpec): out_file = File(desc="output tab-separated values file", exists=True) class ROIStats(AFNICommandBase): """Display statistics over masked regions For complete details, see the `3dROIstats Documentation `_ Examples -------- >>> from nipype.interfaces import afni >>> roistats = afni.ROIStats() >>> roistats.inputs.in_file = 'functional.nii' >>> roistats.inputs.mask_file = 'skeleton_mask.nii.gz' >>> roistats.inputs.stat = ['mean', 'median', 'voxels'] >>> roistats.inputs.nomeanout = True >>> roistats.cmdline '3dROIstats -mask skeleton_mask.nii.gz -nomeanout -nzmean -nzmedian -nzvoxels functional.nii > functional_roistat.1D' >>> res = roistats.run() # doctest: +SKIP """ _cmd = "3dROIstats" _terminal_output = "allatonce" input_spec = ROIStatsInputSpec output_spec = ROIStatsOutputSpec def _format_arg(self, name, trait_spec, value): _stat_dict = { "mean": "-nzmean", "median": "-nzmedian", "mode": "-nzmode", "minmax": "-nzminmax", "sigma": "-nzsigma", "voxels": "-nzvoxels", "sum": "-nzsum", "summary": "-summary", "zerominmax": "-minmax", "zeromedian": "-median", "zerosigma": "-sigma", "zeromode": "-mode", } if name == "stat": value = [_stat_dict[v] for v in value] return super(ROIStats, self)._format_arg(name, trait_spec, value) class RetroicorInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dretroicor", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_retroicor", name_source=["in_file"], desc="output image file name", argstr="-prefix %s", position=1, ) card = File( desc="1D cardiac data file for cardiac correction", argstr="-card %s", position=-2, exists=True, ) resp = File( desc="1D respiratory waveform data for correction", argstr="-resp %s", position=-3, exists=True, ) threshold = traits.Int( desc="Threshold for detection of R-wave peaks in input (Make sure it " "is above the background noise level, Try 3/4 or 4/5 times range " "plus minimum)", argstr="-threshold %d", position=-4, ) order = traits.Int( desc="The order of the correction (2 is typical)", argstr="-order %s", position=-5, ) cardphase = File( desc="Filename for 1D cardiac phase output", argstr="-cardphase %s", position=-6, hash_files=False, ) respphase = File( desc="Filename for 1D resp phase output", argstr="-respphase %s", position=-7, hash_files=False, ) class Retroicor(AFNICommand): """Performs Retrospective Image Correction for physiological motion effects, using a slightly modified version of the RETROICOR algorithm The durations of the physiological inputs are assumed to equal the duration of the dataset. Any constant sampling rate may be used, but 40 Hz seems to be acceptable. This program's cardiac peak detection algorithm is rather simplistic, so you might try using the scanner's cardiac gating output (transform it to a spike wave if necessary). This program uses slice timing information embedded in the dataset to estimate the proper cardiac/respiratory phase for each slice. It makes sense to run this program before any program that may destroy the slice timings (e.g. 3dvolreg for motion correction). For complete details, see the `3dretroicor Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> ret = afni.Retroicor() >>> ret.inputs.in_file = 'functional.nii' >>> ret.inputs.card = 'mask.1D' >>> ret.inputs.resp = 'resp.1D' >>> ret.inputs.outputtype = 'NIFTI' >>> ret.cmdline '3dretroicor -prefix functional_retroicor.nii -resp resp.1D -card mask.1D functional.nii' >>> res = ret.run() # doctest: +SKIP """ _cmd = "3dretroicor" input_spec = RetroicorInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, trait_spec, value): if name == "in_file": if not isdefined(self.inputs.card) and not isdefined(self.inputs.resp): return None return super(Retroicor, self)._format_arg(name, trait_spec, value) class SegInputSpec(CommandLineInputSpec): in_file = File( desc="ANAT is the volume to segment", argstr="-anat %s", position=-1, mandatory=True, exists=True, copyfile=True, ) mask = traits.Either( traits.Enum("AUTO"), File(exists=True), desc="only non-zero voxels in mask are analyzed. mask can either be a " 'dataset or the string "AUTO" which would use AFNI\'s automask ' "function to create the mask.", argstr="-mask %s", position=-2, mandatory=True, ) blur_meth = traits.Enum( "BFT", "BIM", argstr="-blur_meth %s", desc="set the blurring method for bias field estimation", ) bias_fwhm = traits.Float( desc="The amount of blurring used when estimating the field bias with " "the Wells method", argstr="-bias_fwhm %f", ) classes = Str( desc="CLASS_STRING is a semicolon delimited string of class labels", argstr="-classes %s", ) bmrf = traits.Float( desc="Weighting factor controlling spatial homogeneity of the " "classifications", argstr="-bmrf %f", ) bias_classes = Str( desc="A semicolon delimited string of classes that contribute to the " "estimation of the bias field", argstr="-bias_classes %s", ) prefix = Str( desc="the prefix for the output folder containing all output volumes", argstr="-prefix %s", ) mixfrac = Str( desc="MIXFRAC sets up the volume-wide (within mask) tissue fractions " "while initializing the segmentation (see IGNORE for exception)", argstr="-mixfrac %s", ) mixfloor = traits.Float( desc="Set the minimum value for any class's mixing fraction", argstr="-mixfloor %f", ) main_N = traits.Int(desc="Number of iterations to perform.", argstr="-main_N %d") class Seg(AFNICommandBase): """3dSeg segments brain volumes into tissue classes. The program allows for adding a variety of global and voxelwise priors. However for the moment, only mixing fractions and MRF are documented. For complete details, see the `3dSeg Documentation. `_ Examples -------- >>> from nipype.interfaces.afni import preprocess >>> seg = preprocess.Seg() >>> seg.inputs.in_file = 'structural.nii' >>> seg.inputs.mask = 'AUTO' >>> seg.cmdline '3dSeg -mask AUTO -anat structural.nii' >>> res = seg.run() # doctest: +SKIP """ _cmd = "3dSeg" input_spec = SegInputSpec output_spec = AFNICommandOutputSpec def aggregate_outputs(self, runtime=None, needed_outputs=None): import glob outputs = self._outputs() if isdefined(self.inputs.prefix): outfile = os.path.join(os.getcwd(), self.inputs.prefix, "Classes+*.BRIK") else: outfile = os.path.join(os.getcwd(), "Segsy", "Classes+*.BRIK") outputs.out_file = glob.glob(outfile)[0] return outputs class SkullStripInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dSkullStrip", argstr="-input %s", position=1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_skullstrip", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) class SkullStrip(AFNICommand): """A program to extract the brain from surrounding tissue from MRI T1-weighted images. TODO Add optional arguments. For complete details, see the `3dSkullStrip Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> skullstrip = afni.SkullStrip() >>> skullstrip.inputs.in_file = 'functional.nii' >>> skullstrip.inputs.args = '-o_ply' >>> skullstrip.cmdline '3dSkullStrip -input functional.nii -o_ply -prefix functional_skullstrip' >>> res = skullstrip.run() # doctest: +SKIP """ _cmd = "3dSkullStrip" _redirect_x = True input_spec = SkullStripInputSpec output_spec = AFNICommandOutputSpec def __init__(self, **inputs): super(SkullStrip, self).__init__(**inputs) if not no_afni(): v = Info.version() # Between AFNI 16.0.00 and 16.2.07, redirect_x is not needed if v >= (2016, 0, 0) and v < (2016, 2, 7): self._redirect_x = False class TCorr1DInputSpec(AFNICommandInputSpec): xset = File( desc="3d+time dataset input", argstr=" %s", position=-2, mandatory=True, exists=True, copyfile=False, ) y_1d = File( desc="1D time series file input", argstr=" %s", position=-1, mandatory=True, exists=True, ) out_file = File( desc="output filename prefix", name_template="%s_correlation.nii.gz", argstr="-prefix %s", name_source="xset", keep_extension=True, ) pearson = traits.Bool( desc="Correlation is the normal Pearson correlation coefficient", argstr=" -pearson", xor=["spearman", "quadrant", "ktaub"], position=1, ) spearman = traits.Bool( desc="Correlation is the Spearman (rank) correlation coefficient", argstr=" -spearman", xor=["pearson", "quadrant", "ktaub"], position=1, ) quadrant = traits.Bool( desc="Correlation is the quadrant correlation coefficient", argstr=" -quadrant", xor=["pearson", "spearman", "ktaub"], position=1, ) ktaub = traits.Bool( desc="Correlation is the Kendall's tau_b correlation coefficient", argstr=" -ktaub", xor=["pearson", "spearman", "quadrant"], position=1, ) class TCorr1DOutputSpec(TraitedSpec): out_file = File(desc="output file containing correlations", exists=True) class TCorr1D(AFNICommand): """Computes the correlation coefficient between each voxel time series in the input 3D+time dataset. For complete details, see the `3dTcorr1D Documentation. `_ >>> from nipype.interfaces import afni >>> tcorr1D = afni.TCorr1D() >>> tcorr1D.inputs.xset= 'u_rc1s1_Template.nii' >>> tcorr1D.inputs.y_1d = 'seed.1D' >>> tcorr1D.cmdline '3dTcorr1D -prefix u_rc1s1_Template_correlation.nii.gz u_rc1s1_Template.nii seed.1D' >>> res = tcorr1D.run() # doctest: +SKIP """ _cmd = "3dTcorr1D" input_spec = TCorr1DInputSpec output_spec = TCorr1DOutputSpec class TCorrMapInputSpec(AFNICommandInputSpec): in_file = File(exists=True, argstr="-input %s", mandatory=True, copyfile=False) seeds = File(exists=True, argstr="-seed %s", xor=("seeds_width")) mask = File(exists=True, argstr="-mask %s") automask = traits.Bool(argstr="-automask") polort = traits.Int(argstr="-polort %d") bandpass = traits.Tuple((traits.Float(), traits.Float()), argstr="-bpass %f %f") regress_out_timeseries = File(exists=True, argstr="-ort %s") blur_fwhm = traits.Float(argstr="-Gblur %f") seeds_width = traits.Float(argstr="-Mseed %f", xor=("seeds")) # outputs mean_file = File(argstr="-Mean %s", suffix="_mean", name_source="in_file") zmean = File(argstr="-Zmean %s", suffix="_zmean", name_source="in_file") qmean = File(argstr="-Qmean %s", suffix="_qmean", name_source="in_file") pmean = File(argstr="-Pmean %s", suffix="_pmean", name_source="in_file") _thresh_opts = ( "absolute_threshold", "var_absolute_threshold", "var_absolute_threshold_normalize", ) thresholds = traits.List(traits.Int()) absolute_threshold = File( argstr="-Thresh %f %s", suffix="_thresh", name_source="in_file", xor=_thresh_opts, ) var_absolute_threshold = File( argstr="-VarThresh %f %f %f %s", suffix="_varthresh", name_source="in_file", xor=_thresh_opts, ) var_absolute_threshold_normalize = File( argstr="-VarThreshN %f %f %f %s", suffix="_varthreshn", name_source="in_file", xor=_thresh_opts, ) correlation_maps = File(argstr="-CorrMap %s", name_source="in_file") correlation_maps_masked = File(argstr="-CorrMask %s", name_source="in_file") _expr_opts = ("average_expr", "average_expr_nonzero", "sum_expr") expr = Str() average_expr = File( argstr="-Aexpr %s %s", suffix="_aexpr", name_source="in_file", xor=_expr_opts ) average_expr_nonzero = File( argstr="-Cexpr %s %s", suffix="_cexpr", name_source="in_file", xor=_expr_opts ) sum_expr = File( argstr="-Sexpr %s %s", suffix="_sexpr", name_source="in_file", xor=_expr_opts ) histogram_bin_numbers = traits.Int() histogram = File(name_source="in_file", argstr="-Hist %d %s", suffix="_hist") class TCorrMapOutputSpec(TraitedSpec): mean_file = File() zmean = File() qmean = File() pmean = File() absolute_threshold = File() var_absolute_threshold = File() var_absolute_threshold_normalize = File() correlation_maps = File() correlation_maps_masked = File() average_expr = File() average_expr_nonzero = File() sum_expr = File() histogram = File() class TCorrMap(AFNICommand): """For each voxel time series, computes the correlation between it and all other voxels, and combines this set of values into the output dataset(s) in some way. For complete details, see the `3dTcorrMap Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> tcm = afni.TCorrMap() >>> tcm.inputs.in_file = 'functional.nii' >>> tcm.inputs.mask = 'mask.nii' >>> tcm.mean_file = 'functional_meancorr.nii' >>> tcm.cmdline # doctest: +SKIP '3dTcorrMap -input functional.nii -mask mask.nii -Mean functional_meancorr.nii' >>> res = tcm.run() # doctest: +SKIP """ _cmd = "3dTcorrMap" input_spec = TCorrMapInputSpec output_spec = TCorrMapOutputSpec _additional_metadata = ["suffix"] def _format_arg(self, name, trait_spec, value): if name in self.inputs._thresh_opts: return trait_spec.argstr % self.inputs.thresholds + [value] elif name in self.inputs._expr_opts: return trait_spec.argstr % (self.inputs.expr, value) elif name == "histogram": return trait_spec.argstr % (self.inputs.histogram_bin_numbers, value) else: return super(TCorrMap, self)._format_arg(name, trait_spec, value) class NetCorrInputSpec(AFNICommandInputSpec): in_file = File( desc="input time series file (4D data set)", exists=True, argstr="-inset %s", mandatory=True, ) in_rois = File( desc="input set of ROIs, each labelled with distinct integers", exists=True, argstr="-in_rois %s", mandatory=True, ) mask = File( desc="can include a whole brain mask within which to " "calculate correlation. Otherwise, data should be " "masked already", exists=True, argstr="-mask %s", ) weight_ts = File( desc="input a 1D file WTS of weights that will be applied " "multiplicatively to each ROI's average time series. " "WTS can be a column- or row-file of values, but it " "must have the same length as the input time series " "volume. " "If the initial average time series was A[n] for " "n=0,..,(N-1) time points, then applying a set of " "weights W[n] of the same length from WTS would " "produce a new time series: B[n] = A[n] * W[n]", exists=True, argstr="-weight_ts %s", ) fish_z = traits.Bool( desc="switch to also output a matrix of Fisher Z-transform " "values for the corr coefs (r): " "Z = atanh(r) , " "(with Z=4 being output along matrix diagonals where " "r=1, as the r-to-Z conversion is ceilinged at " "Z = atanh(r=0.999329) = 4, which is still *quite* a " "high Pearson-r value", argstr="-fish_z", ) part_corr = traits.Bool( desc="output the partial correlation matrix", argstr="-part_corr" ) ts_out = traits.Bool( desc="switch to output the mean time series of the ROIs that " "have been used to generate the correlation matrices. " "Output filenames mirror those of the correlation " "matrix files, with a '.netts' postfix", argstr="-ts_out", ) ts_label = traits.Bool( desc="additional switch when using '-ts_out'. Using this " "option will insert the integer ROI label at the start " "of each line of the *.netts file created. Thus, for " "a time series of length N, each line will have N+1 " "numbers, where the first is the integer ROI label " "and the subsequent N are scientific notation values", argstr="-ts_label", ) ts_indiv = traits.Bool( desc="switch to create a directory for each network that " "contains the average time series for each ROI in " "individual files (each file has one line). " "The directories are labelled PREFIX_000_INDIV/, " "PREFIX_001_INDIV/, etc. (one per network). Within each " "directory, the files are labelled ROI_001.netts, " "ROI_002.netts, etc., with the numbers given by the " "actual ROI integer labels", argstr="-ts_indiv", ) ts_wb_corr = traits.Bool( desc="switch to create a set of whole brain correlation maps. " "Performs whole brain correlation for each " "ROI's average time series; this will automatically " "create a directory for each network that contains the " "set of whole brain correlation maps (Pearson 'r's). " "The directories are labelled as above for '-ts_indiv' " "Within each directory, the files are labelled " "WB_CORR_ROI_001+orig, WB_CORR_ROI_002+orig, etc., with " "the numbers given by the actual ROI integer labels", argstr="-ts_wb_corr", ) ts_wb_Z = traits.Bool( desc="same as above in '-ts_wb_corr', except that the maps " "have been Fisher transformed to Z-scores the relation: " "Z=atanh(r). " "To avoid infinities in the transform, Pearson values " "are effectively capped at |r| = 0.999329 (where |Z| = 4.0). " "Files are labelled WB_Z_ROI_001+orig, etc", argstr="-ts_wb_Z", ) ts_wb_strlabel = traits.Bool( desc="by default, '-ts_wb_{corr,Z}' output files are named " "using the int number of a given ROI, such as: " "WB_Z_ROI_001+orig. " "With this option, one can replace the int (such as '001') " "with the string label (such as 'L-thalamus') " "*if* one has a labeltable attached to the file", argstr="-ts_wb_strlabel", ) nifti = traits.Bool( desc="output any correlation map files as NIFTI files " "(default is BRIK/HEAD). Only useful if using " "'-ts_wb_corr' and/or '-ts_wb_Z'", argstr="-nifti", ) output_mask_nonnull = traits.Bool( desc="internally, this program checks for where there are " "nonnull time series, because we don't like those, in " "general. With this flag, the user can output the " "determined mask of non-null time series.", argstr="-output_mask_nonnull", ) push_thru_many_zeros = traits.Bool( desc="by default, this program will grind to a halt and " "refuse to calculate if any ROI contains >10 percent " "of voxels with null times series (i.e., each point is " "0), as of April, 2017. This is because it seems most " "likely that hidden badness is responsible. However, " "if the user still wants to carry on the calculation " "anyways, then this option will allow one to push on " "through. However, if any ROI *only* has null time " "series, then the program will not calculate and the " "user will really, really, really need to address their masking", argstr="-push_thru_many_zeros", ) ignore_LT = traits.Bool( desc="switch to ignore any label table labels in the " "'-in_rois' file, if there are any labels attached", argstr="-ignore_LT", ) out_file = File( desc="output file name part", name_template="%s_netcorr", argstr="-prefix %s", position=1, name_source="in_file", ) class NetCorrOutputSpec(TraitedSpec): out_corr_matrix = File( desc="output correlation matrix between ROIs written to a text file with .netcc suffix" ) out_corr_maps = traits.List( File(), desc="output correlation maps in Pearson and/or Z-scores" ) class NetCorr(AFNICommand): """Calculate correlation matrix of a set of ROIs (using mean time series of each). Several networks may be analyzed simultaneously, one per brick. For complete details, see the `3dNetCorr Documentation `_. Examples -------- >>> from nipype.interfaces import afni >>> ncorr = afni.NetCorr() >>> ncorr.inputs.in_file = 'functional.nii' >>> ncorr.inputs.mask = 'mask.nii' >>> ncorr.inputs.in_rois = 'maps.nii' >>> ncorr.inputs.ts_wb_corr = True >>> ncorr.inputs.ts_wb_Z = True >>> ncorr.inputs.fish_z = True >>> ncorr.inputs.out_file = 'sub0.tp1.ncorr' >>> ncorr.cmdline '3dNetCorr -prefix sub0.tp1.ncorr -fish_z -inset functional.nii -in_rois maps.nii -mask mask.nii -ts_wb_Z -ts_wb_corr' >>> res = ncorr.run() # doctest: +SKIP """ _cmd = "3dNetCorr" input_spec = NetCorrInputSpec output_spec = NetCorrOutputSpec def _list_outputs(self): import glob outputs = self.output_spec().get() if not isdefined(self.inputs.out_file): prefix = self._gen_fname(self.inputs.in_file, suffix="_netcorr") else: prefix = self.inputs.out_file # All outputs should be in the same directory as the prefix odir = os.path.dirname(os.path.abspath(prefix)) outputs["out_corr_matrix"] = glob.glob(os.path.join(odir, "*.netcc"))[0] if isdefined(self.inputs.ts_wb_corr) or isdefined(self.inputs.ts_Z_corr): corrdir = os.path.join(odir, prefix + "_000_INDIV") outputs["out_corr_maps"] = glob.glob(os.path.join(corrdir, "*.nii.gz")) return outputs class TCorrelateInputSpec(AFNICommandInputSpec): xset = File( desc="input xset", argstr="%s", position=-2, mandatory=True, exists=True, copyfile=False, ) yset = File( desc="input yset", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_tcorr", desc="output image file name", argstr="-prefix %s", name_source="xset", ) pearson = traits.Bool( desc="Correlation is the normal Pearson correlation coefficient", argstr="-pearson", ) polort = traits.Int(desc="Remove polynomical trend of order m", argstr="-polort %d") class TCorrelate(AFNICommand): """Computes the correlation coefficient between corresponding voxel time series in two input 3D+time datasets 'xset' and 'yset' For complete details, see the `3dTcorrelate Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> tcorrelate = afni.TCorrelate() >>> tcorrelate.inputs.xset= 'u_rc1s1_Template.nii' >>> tcorrelate.inputs.yset = 'u_rc1s2_Template.nii' >>> tcorrelate.inputs.out_file = 'functional_tcorrelate.nii.gz' >>> tcorrelate.inputs.polort = -1 >>> tcorrelate.inputs.pearson = True >>> tcorrelate.cmdline '3dTcorrelate -prefix functional_tcorrelate.nii.gz -pearson -polort -1 u_rc1s1_Template.nii u_rc1s2_Template.nii' >>> res = tcarrelate.run() # doctest: +SKIP """ _cmd = "3dTcorrelate" input_spec = TCorrelateInputSpec output_spec = AFNICommandOutputSpec class TNormInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dTNorm", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_tnorm", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) norm2 = traits.Bool( desc="L2 normalize (sum of squares = 1) [DEFAULT]", argstr="-norm2" ) normR = traits.Bool( desc="normalize so sum of squares = number of time points \\* e.g., so RMS = 1.", argstr="-normR", ) norm1 = traits.Bool( desc="L1 normalize (sum of absolute values = 1)", argstr="-norm1" ) normx = traits.Bool( desc="Scale so max absolute value = 1 (L_infinity norm)", argstr="-normx" ) polort = traits.Int( desc="""\ Detrend with polynomials of order p before normalizing [DEFAULT = don't do this]. Use '-polort 0' to remove the mean, for example""", argstr="-polort %s", ) L1fit = traits.Bool( desc="""\ Detrend with L1 regression (L2 is the default) This option is here just for the hell of it""", argstr="-L1fit", ) class TNorm(AFNICommand): """Shifts voxel time series from input so that separate slices are aligned to the same temporal origin. For complete details, see the `3dTnorm Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> tnorm = afni.TNorm() >>> tnorm.inputs.in_file = 'functional.nii' >>> tnorm.inputs.norm2 = True >>> tnorm.inputs.out_file = 'rm.errts.unit errts+tlrc' >>> tnorm.cmdline '3dTnorm -norm2 -prefix rm.errts.unit errts+tlrc functional.nii' >>> res = tshift.run() # doctest: +SKIP """ _cmd = "3dTnorm" input_spec = TNormInputSpec output_spec = AFNICommandOutputSpec class TProjectInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dTproject", argstr="-input %s", position=1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_tproject", desc="output image file name", position=-1, argstr="-prefix %s", name_source="in_file", ) censor = File( desc="""\ Filename of censor .1D time series. This is a file of 1s and 0s, indicating which time points are to be included (1) and which are to be excluded (0).""", argstr="-censor %s", exists=True, ) censortr = traits.List( traits.Str(), desc="""\ List of strings that specify time indexes to be removed from the analysis. Each string is of one of the following forms: * ``37`` => remove global time index #37 * ``2:37`` => remove time index #37 in run #2 * ``37..47`` => remove global time indexes #37-47 * ``37-47`` => same as above * ``2:37..47`` => remove time indexes #37-47 in run #2 * ``*:0-2`` => remove time indexes #0-2 in all runs * Time indexes within each run start at 0. * Run indexes start at 1 (just be to confusing). * N.B.: 2:37,47 means index #37 in run #2 and global time index 47; it does NOT mean index #37 in run #2 AND index #47 in run #2. """, argstr="-CENSORTR %s", ) cenmode = traits.Enum( "KILL", "ZERO", "NTRP", desc="""\ Specifies how censored time points are treated in the output dataset: * mode = ZERO -- put zero values in their place; output datset is same length as input * mode = KILL -- remove those time points; output dataset is shorter than input * mode = NTRP -- censored values are replaced by interpolated neighboring (in time) non-censored values, BEFORE any projections, and then the analysis proceeds without actual removal of any time points -- this feature is to keep the Spanish Inquisition happy. * The default mode is KILL !!! """, argstr="-cenmode %s", ) concat = File( desc="""\ The catenation file, as in 3dDeconvolve, containing the TR indexes of the start points for each contiguous run within the input dataset (the first entry should be 0). * Also as in 3dDeconvolve, if the input dataset is automatically catenated from a collection of datasets, then the run start indexes are determined directly, and '-concat' is not needed (and will be ignored). * Each run must have at least 9 time points AFTER censoring, or the program will not work! * The only use made of this input is in setting up the bandpass/stopband regressors. * '-ort' and '-dsort' regressors run through all time points, as read in. If you want separate projections in each run, then you must either break these ort files into appropriate components, OR you must run 3dTproject for each run separately, using the appropriate pieces from the ort files via the ``{...}`` selector for the 1D files and the ``[...]`` selector for the datasets. """, exists=True, argstr="-concat %s", ) noblock = traits.Bool( desc="""\ Also as in 3dDeconvolve, if you want the program to treat an auto-catenated dataset as one long run, use this option. However, '-noblock' will not affect catenation if you use the '-concat' option.""", argstr="-noblock", ) ort = File( desc="""\ Remove each column in file. Each column will have its mean removed.""", exists=True, argstr="-ort %s", ) polort = traits.Int( desc="""\ Remove polynomials up to and including degree pp. * Default value is 2. * It makes no sense to use a value of pp greater than 2, if you are bandpassing out the lower frequencies! * For catenated datasets, each run gets a separate set set of pp+1 Legendre polynomial regressors. * Use of -polort -1 is not advised (if data mean != 0), even if -ort contains constant terms, as all means are removed. """, argstr="-polort %d", ) dsort = InputMultiObject( File(exists=True, copyfile=False), argstr="-dsort %s...", desc="""\ Remove the 3D+time time series in dataset fset. * That is, 'fset' contains a different nuisance time series for each voxel (e.g., from AnatICOR). * Multiple -dsort options are allowed. """, ) bandpass = traits.Tuple( traits.Float, traits.Float, desc="""Remove all frequencies EXCEPT those in the range""", argstr="-bandpass %g %g", ) stopband = traits.Tuple( traits.Float, traits.Float, desc="""Remove all frequencies in the range""", argstr="-stopband %g %g", ) TR = traits.Float( desc="""\ Use time step dd for the frequency calculations, rather than the value stored in the dataset header.""", argstr="-TR %g", ) mask = File( exists=True, desc="""\ Only operate on voxels nonzero in the mset dataset. * Voxels outside the mask will be filled with zeros. * If no masking option is given, then all voxels will be processed. """, argstr="-mask %s", ) automask = traits.Bool( desc="""Generate a mask automatically""", xor=["mask"], argstr="-automask" ) blur = traits.Float( desc="""\ Blur (inside the mask only) with a filter that has width (FWHM) of fff millimeters. Spatial blurring (if done) is after the time series filtering.""", argstr="-blur %g", ) norm = traits.Bool( desc=""" Normalize each output time series to have sum of squares = 1. This is the LAST operation.""", argstr="-norm", ) class TProject(AFNICommand): """ This program projects (detrends) out various 'nuisance' time series from each voxel in the input dataset. Note that all the projections are done via linear regression, including the frequency-based options such as ``-passband``. In this way, you can bandpass time-censored data, and at the same time, remove other time series of no interest (e.g., physiological estimates, motion parameters). Shifts voxel time series from input so that seperate slices are aligned to the same temporal origin. Examples -------- >>> from nipype.interfaces import afni >>> tproject = afni.TProject() >>> tproject.inputs.in_file = 'functional.nii' >>> tproject.inputs.bandpass = (0.00667, 99999) >>> tproject.inputs.polort = 3 >>> tproject.inputs.automask = True >>> tproject.inputs.out_file = 'projected.nii.gz' >>> tproject.cmdline '3dTproject -input functional.nii -automask -bandpass 0.00667 99999 -polort 3 -prefix projected.nii.gz' >>> res = tproject.run() # doctest: +SKIP See Also -------- For complete details, see the `3dTproject Documentation. `__ """ _cmd = "3dTproject" input_spec = TProjectInputSpec output_spec = AFNICommandOutputSpec class TShiftInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dTshift", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_tshift", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) tr = Str( desc='manually set the TR. You can attach suffix "s" for seconds ' 'or "ms" for milliseconds.', argstr="-TR %s", ) tzero = traits.Float( desc="align each slice to given time offset", argstr="-tzero %s", xor=["tslice"] ) tslice = traits.Int( desc="align each slice to time offset of given slice", argstr="-slice %s", xor=["tzero"], ) ignore = traits.Int( desc="ignore the first set of points specified", argstr="-ignore %s" ) interp = traits.Enum( ("Fourier", "linear", "cubic", "quintic", "heptic"), desc="different interpolation methods (see 3dTshift for details) " "default = Fourier", argstr="-%s", ) tpattern = traits.Either( traits.Enum( "alt+z", "altplus", # Synonyms "alt+z2", "alt-z", "altminus", # Synonyms "alt-z2", "seq+z", "seqplus", # Synonyms "seq-z", "seqminus", ), # Synonyms Str, # For backwards compatibility desc="use specified slice time pattern rather than one in header", argstr="-tpattern %s", xor=["slice_timing"], ) slice_timing = traits.Either( File(exists=True), traits.List(traits.Float), desc="time offsets from the volume acquisition onset for each slice", argstr="-tpattern @%s", xor=["tpattern"], ) slice_encoding_direction = traits.Enum( "k", "k-", usedefault=True, desc="Direction in which slice_timing is specified (default: k). If negative," "slice_timing is defined in reverse order, that is, the first entry " "corresponds to the slice with the largest index, and the final entry " "corresponds to slice index zero. Only in effect when slice_timing is " "passed as list, not when it is passed as file.", ) rlt = traits.Bool( desc="Before shifting, remove the mean and linear trend", argstr="-rlt" ) rltplus = traits.Bool( desc="Before shifting, remove the mean and linear trend and later put " "back the mean", argstr="-rlt+", ) class TShiftOutputSpec(AFNICommandOutputSpec): timing_file = File(desc="AFNI formatted timing file, if ``slice_timing`` is a list") class TShift(AFNICommand): """Shifts voxel time series from input so that seperate slices are aligned to the same temporal origin. For complete details, see the `3dTshift Documentation. `_ Examples -------- Slice timing details may be specified explicitly via the ``slice_timing`` input: >>> from nipype.interfaces import afni >>> TR = 2.5 >>> tshift = afni.TShift() >>> tshift.inputs.in_file = 'functional.nii' >>> tshift.inputs.tzero = 0.0 >>> tshift.inputs.tr = '%.1fs' % TR >>> tshift.inputs.slice_timing = list(np.arange(40) / TR) >>> tshift.cmdline '3dTshift -prefix functional_tshift -tpattern @slice_timing.1D -TR 2.5s -tzero 0.0 functional.nii' When the ``slice_timing`` input is used, the ``timing_file`` output is populated, in this case with the generated file. >>> tshift._list_outputs()['timing_file'] # doctest: +ELLIPSIS '.../slice_timing.1D' >>> np.loadtxt(tshift._list_outputs()['timing_file']).tolist()[:5] [0.0, 0.4, 0.8, 1.2, 1.6] If ``slice_encoding_direction`` is set to ``'k-'``, the slice timing is reversed: >>> tshift.inputs.slice_encoding_direction = 'k-' >>> tshift.cmdline '3dTshift -prefix functional_tshift -tpattern @slice_timing.1D -TR 2.5s -tzero 0.0 functional.nii' >>> np.loadtxt(tshift._list_outputs()['timing_file']).tolist()[:5] [15.6, 15.2, 14.8, 14.4, 14.0] This method creates a ``slice_timing.1D`` file to be passed to ``3dTshift``. A pre-existing slice-timing file may be used in the same way: >>> tshift = afni.TShift() >>> tshift.inputs.in_file = 'functional.nii' >>> tshift.inputs.tzero = 0.0 >>> tshift.inputs.tr = '%.1fs' % TR >>> tshift.inputs.slice_timing = 'slice_timing.1D' >>> tshift.cmdline '3dTshift -prefix functional_tshift -tpattern @slice_timing.1D -TR 2.5s -tzero 0.0 functional.nii' When a pre-existing file is provided, ``timing_file`` is simply passed through. >>> tshift._list_outputs()['timing_file'] # doctest: +ELLIPSIS '.../slice_timing.1D' Alternatively, pre-specified slice timing patterns may be specified with the ``tpattern`` input. For example, to specify an alternating, ascending slice timing pattern: >>> tshift = afni.TShift() >>> tshift.inputs.in_file = 'functional.nii' >>> tshift.inputs.tzero = 0.0 >>> tshift.inputs.tr = '%.1fs' % TR >>> tshift.inputs.tpattern = 'alt+z' >>> tshift.cmdline '3dTshift -prefix functional_tshift -tpattern alt+z -TR 2.5s -tzero 0.0 functional.nii' For backwards compatibility, ``tpattern`` may also take filenames prefixed with ``@``. However, in this case, filenames are not validated, so this usage will be deprecated in future versions of Nipype. >>> tshift = afni.TShift() >>> tshift.inputs.in_file = 'functional.nii' >>> tshift.inputs.tzero = 0.0 >>> tshift.inputs.tr = '%.1fs' % TR >>> tshift.inputs.tpattern = '@slice_timing.1D' >>> tshift.cmdline '3dTshift -prefix functional_tshift -tpattern @slice_timing.1D -TR 2.5s -tzero 0.0 functional.nii' In these cases, ``timing_file`` is undefined. >>> tshift._list_outputs()['timing_file'] # doctest: +ELLIPSIS In any configuration, the interface may be run as usual: >>> res = tshift.run() # doctest: +SKIP """ _cmd = "3dTshift" input_spec = TShiftInputSpec output_spec = TShiftOutputSpec def _format_arg(self, name, trait_spec, value): if name == "tpattern" and value.startswith("@"): iflogger.warning( 'Passing a file prefixed by "@" will be deprecated' "; please use the `slice_timing` input" ) elif name == "slice_timing" and isinstance(value, list): value = self._write_slice_timing() return super(TShift, self)._format_arg(name, trait_spec, value) def _write_slice_timing(self): slice_timing = list(self.inputs.slice_timing) if self.inputs.slice_encoding_direction.endswith("-"): slice_timing.reverse() fname = "slice_timing.1D" with open(fname, "w") as fobj: fobj.write("\t".join(map(str, slice_timing))) return fname def _list_outputs(self): outputs = super(TShift, self)._list_outputs() if isdefined(self.inputs.slice_timing): if isinstance(self.inputs.slice_timing, list): outputs["timing_file"] = os.path.abspath("slice_timing.1D") else: outputs["timing_file"] = os.path.abspath(self.inputs.slice_timing) return outputs class TSmoothInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dTSmooth", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_smooth", desc="output file from 3dTSmooth", argstr="-prefix %s", name_source="in_file", ) datum = traits.Str( desc="Sets the data type of the output dataset", argstr="-datum %s" ) lin = traits.Bool( desc=r"3 point linear filter: :math:`0.15\,a + 0.70\,b + 0.15\,c`" " [This is the default smoother]", argstr="-lin", ) med = traits.Bool(desc="3 point median filter: median(a,b,c)", argstr="-med") osf = traits.Bool( desc="3 point order statistics filter:" r":math:`0.15\,min(a,b,c) + 0.70\,median(a,b,c) + 0.15\,max(a,b,c)`", argstr="-osf", ) lin3 = traits.Int( desc=r"3 point linear filter: :math:`0.5\,(1-m)\,a + m\,b + 0.5\,(1-m)\,c`. " "Here, 'm' is a number strictly between 0 and 1.", argstr="-3lin %d", ) hamming = traits.Int( argstr="-hamming %d", desc="Use N point Hamming windows. (N must be odd and bigger than 1.)", ) blackman = traits.Int( argstr="-blackman %d", desc="Use N point Blackman windows. (N must be odd and bigger than 1.)", ) custom = File( argstr="-custom %s", desc="odd # of coefficients must be in a single column in ASCII file", ) adaptive = traits.Int( argstr="-adaptive %d", desc="use adaptive mean filtering of width N " "(where N must be odd and bigger than 3).", ) class TSmooth(AFNICommand): """Smooths each voxel time series in a 3D+time dataset and produces as output a new 3D+time dataset (e.g., lowpass filter in time). For complete details, see the `3dTsmooth Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> from nipype.testing import example_data >>> smooth = afni.TSmooth() >>> smooth.inputs.in_file = 'functional.nii' >>> smooth.inputs.adaptive = 5 >>> smooth.cmdline '3dTsmooth -adaptive 5 -prefix functional_smooth functional.nii' >>> res = smooth.run() # doctest: +SKIP """ _cmd = "3dTsmooth" input_spec = TSmoothInputSpec output_spec = AFNICommandOutputSpec class VolregInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dvolreg", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) in_weight_volume = traits.Either( traits.Tuple(File(exists=True), traits.Int), File(exists=True), desc="weights for each voxel specified by a file with an " "optional volume number (defaults to 0)", argstr="-weight '%s[%d]'", ) out_file = File( name_template="%s_volreg", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) basefile = File( desc="base file for registration", argstr="-base %s", position=-6, exists=True ) zpad = traits.Int( desc="Zeropad around the edges by 'n' voxels during rotations", argstr="-zpad %d", position=-5, ) md1d_file = File( name_template="%s_md.1D", desc="max displacement output file", argstr="-maxdisp1D %s", name_source="in_file", keep_extension=True, position=-4, ) oned_file = File( name_template="%s.1D", desc="1D movement parameters output file", argstr="-1Dfile %s", name_source="in_file", keep_extension=True, ) verbose = traits.Bool( desc="more detailed description of the process", argstr="-verbose" ) timeshift = traits.Bool( desc="time shift to mean slice time offset", argstr="-tshift 0" ) copyorigin = traits.Bool( desc="copy base file origin coords to output", argstr="-twodup" ) oned_matrix_save = File( name_template="%s.aff12.1D", desc="Save the matrix transformation", argstr="-1Dmatrix_save %s", keep_extension=True, name_source="in_file", ) interp = traits.Enum( ("Fourier", "cubic", "heptic", "quintic", "linear"), desc="spatial interpolation methods [default = heptic]", argstr="-%s", ) class VolregOutputSpec(TraitedSpec): out_file = File(desc="registered file", exists=True) md1d_file = File(desc="max displacement info file", exists=True) oned_file = File(desc="movement parameters info file", exists=True) oned_matrix_save = File( desc="matrix transformation from base to input", exists=True ) class Volreg(AFNICommand): """Register input volumes to a base volume using AFNI 3dvolreg command For complete details, see the `3dvolreg Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> volreg = afni.Volreg() >>> volreg.inputs.in_file = 'functional.nii' >>> volreg.inputs.args = '-Fourier -twopass' >>> volreg.inputs.zpad = 4 >>> volreg.inputs.outputtype = 'NIFTI' >>> volreg.cmdline # doctest: +ELLIPSIS '3dvolreg -Fourier -twopass -1Dfile functional.1D -1Dmatrix_save functional.aff12.1D -prefix \ functional_volreg.nii -zpad 4 -maxdisp1D functional_md.1D functional.nii' >>> res = volreg.run() # doctest: +SKIP >>> from nipype.interfaces import afni >>> volreg = afni.Volreg() >>> volreg.inputs.in_file = 'functional.nii' >>> volreg.inputs.interp = 'cubic' >>> volreg.inputs.verbose = True >>> volreg.inputs.zpad = 1 >>> volreg.inputs.basefile = 'functional.nii' >>> volreg.inputs.out_file = 'rm.epi.volreg.r1' >>> volreg.inputs.oned_file = 'dfile.r1.1D' >>> volreg.inputs.oned_matrix_save = 'mat.r1.tshift+orig.1D' >>> volreg.cmdline '3dvolreg -cubic -1Dfile dfile.r1.1D -1Dmatrix_save mat.r1.tshift+orig.1D -prefix \ rm.epi.volreg.r1 -verbose -base functional.nii -zpad 1 -maxdisp1D functional_md.1D functional.nii' >>> res = volreg.run() # doctest: +SKIP """ _cmd = "3dvolreg" input_spec = VolregInputSpec output_spec = VolregOutputSpec def _format_arg(self, name, trait_spec, value): if name == "in_weight_volume" and not isinstance(value, tuple): value = (value, 0) return super(Volreg, self)._format_arg(name, trait_spec, value) class WarpInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dWarp", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_warp", desc="output image file name", argstr="-prefix %s", name_source="in_file", keep_extension=True, ) tta2mni = traits.Bool( desc="transform dataset from Talairach to MNI152", argstr="-tta2mni" ) mni2tta = traits.Bool( desc="transform dataset from MNI152 to Talaraich", argstr="-mni2tta" ) matparent = File( desc="apply transformation from 3dWarpDrive", argstr="-matparent %s", exists=True, ) oblique_parent = File( desc="Read in the oblique transformation matrix from an oblique " "dataset and make cardinal dataset oblique to match", argstr="-oblique_parent %s", exists=True, ) deoblique = traits.Bool( desc="transform dataset from oblique to cardinal", argstr="-deoblique" ) interp = traits.Enum( ("linear", "cubic", "NN", "quintic"), desc="spatial interpolation methods [default = linear]", argstr="-%s", ) gridset = File( desc="copy grid of specified dataset", argstr="-gridset %s", exists=True ) newgrid = traits.Float(desc="specify grid of this size (mm)", argstr="-newgrid %f") zpad = traits.Int( desc="pad input dataset with N planes of zero on all sides.", argstr="-zpad %d" ) verbose = traits.Bool( desc="Print out some information along the way.", argstr="-verb" ) save_warp = traits.Bool(desc="save warp as .mat file", requires=["verbose"]) class WarpOutputSpec(TraitedSpec): out_file = File(desc="Warped file.", exists=True) warp_file = File(desc="warp transform .mat file") class Warp(AFNICommand): """Use 3dWarp for spatially transforming a dataset. Examples -------- >>> from nipype.interfaces import afni >>> warp = afni.Warp() >>> warp.inputs.in_file = 'structural.nii' >>> warp.inputs.deoblique = True >>> warp.inputs.out_file = 'trans.nii.gz' >>> warp.cmdline '3dWarp -deoblique -prefix trans.nii.gz structural.nii' >>> res = warp.run() # doctest: +SKIP >>> warp_2 = afni.Warp() >>> warp_2.inputs.in_file = 'structural.nii' >>> warp_2.inputs.newgrid = 1.0 >>> warp_2.inputs.out_file = 'trans.nii.gz' >>> warp_2.cmdline '3dWarp -newgrid 1.000000 -prefix trans.nii.gz structural.nii' >>> res = warp_2.run() # doctest: +SKIP See Also -------- For complete details, see the `3dWarp Documentation. `__. """ _cmd = "3dWarp" input_spec = WarpInputSpec output_spec = WarpOutputSpec def _run_interface(self, runtime, correct_return_codes=(0,)): runtime = super(Warp, self)._run_interface(runtime, correct_return_codes) if self.inputs.save_warp: import numpy as np warp_file = self._list_outputs()["warp_file"] np.savetxt(warp_file, [runtime.stdout], fmt=str("%s")) return runtime def _list_outputs(self): outputs = super(Warp, self)._list_outputs() if self.inputs.save_warp: outputs["warp_file"] = fname_presuffix( outputs["out_file"], suffix="_transform.mat", use_ext=False ) return outputs class QwarpInputSpec(AFNICommandInputSpec): in_file = File( desc="Source image (opposite phase encoding direction than base image).", argstr="-source %s", mandatory=True, exists=True, copyfile=False, ) base_file = File( desc="Base image (opposite phase encoding direction than source image).", argstr="-base %s", mandatory=True, exists=True, copyfile=False, ) out_file = File( argstr="-prefix %s", name_template="ppp_%s", name_source=["in_file"], desc="""\ Sets the prefix/suffix for the output datasets. * The source dataset is warped to match the base and gets prefix 'ppp'. (Except if '-plusminus' is used * The final interpolation to this output dataset is done using the 'wsinc5' method. See the output of 3dAllineate -HELP (in the "Modifying '-final wsinc5'" section) for the lengthy technical details. * The 3D warp used is saved in a dataset with prefix 'ppp_WARP' -- this dataset can be used with 3dNwarpApply and 3dNwarpCat, for example. * To be clear, this is the warp from source dataset coordinates to base dataset coordinates, where the values at each base grid point are the xyz displacments needed to move that grid point's xyz values to the corresponding xyz values in the source dataset: base( (x,y,z) + WARP(x,y,z) ) matches source(x,y,z) Another way to think of this warp is that it 'pulls' values back from source space to base space. * 3dNwarpApply would use 'ppp_WARP' to transform datasets aligned with the source dataset to be aligned with the base dataset. **If you do NOT want this warp saved, use the option '-nowarp'**. (However, this warp is usually the most valuable possible output!) * If you want to calculate and save the inverse 3D warp, use the option '-iwarp'. This inverse warp will then be saved in a dataset with prefix 'ppp_WARPINV'. * This inverse warp could be used to transform data from base space to source space, if you need to do such an operation. * You can easily compute the inverse later, say by a command like 3dNwarpCat -prefix Z_WARPINV 'INV(Z_WARP+tlrc)' or the inverse can be computed as needed in 3dNwarpApply, like 3dNwarpApply -nwarp 'INV(Z_WARP+tlrc)' -source Dataset.nii ... """, ) resample = traits.Bool( desc="""\ This option simply resamples the source dataset to match the base dataset grid. You can use this if the two datasets overlap well (as seen in the AFNI GUI), but are not on the same 3D grid. * If they don't overlap well, allineate them first * The reampling here is done with the 'wsinc5' method, which has very little blurring artifact. * If the base and source datasets ARE on the same 3D grid, then the -resample option will be ignored. * You CAN use -resample with these 3dQwarp options: -plusminus -inilev -iniwarp -duplo """, argstr="-resample", ) allineate = traits.Bool( desc="This option will make 3dQwarp run 3dAllineate first, to align " "the source dataset to the base with an affine transformation. " "It will then use that alignment as a starting point for the " "nonlinear warping.", argstr="-allineate", ) allineate_opts = traits.Str( desc="add extra options to the 3dAllineate command to be run by 3dQwarp.", argstr="-allineate_opts %s", requires=["allineate"], ) nowarp = traits.Bool(desc="Do not save the _WARP file.", argstr="-nowarp") iwarp = traits.Bool( desc="Do compute and save the _WARPINV file.", argstr="-iwarp", xor=["plusminus"], ) pear = traits.Bool( desc="Use strict Pearson correlation for matching." "Not usually recommended, since the 'clipped Pearson' method" "used by default will reduce the impact of outlier values.", argstr="-pear", ) noneg = traits.Bool( desc="""\ Replace negative values in either input volume with 0. * If there ARE negative input values, and you do NOT use -noneg, then strict Pearson correlation will be used, since the 'clipped' method only is implemented for non-negative volumes. * '-noneg' is not the default, since there might be situations where you want to align datasets with positive and negative values mixed. * But, in many cases, the negative values in a dataset are just the result of interpolation artifacts (or other peculiarities), and so they should be ignored. That is what '-noneg' is for. """, argstr="-noneg", ) nopenalty = traits.Bool( desc="""\ Replace negative values in either input volume with 0. * If there ARE negative input values, and you do NOT use -noneg, then strict Pearson correlation will be used, since the 'clipped' method only is implemented for non-negative volumes. * '-noneg' is not the default, since there might be situations where you want to align datasets with positive and negative values mixed. * But, in many cases, the negative values in a dataset are just the result of interpolation artifacts (or other peculiarities), and so they should be ignored. That is what '-noneg' is for. """, argstr="-nopenalty", ) penfac = traits.Float( argstr="-penfac %f", desc="""\ Use this value to weight the penalty. The default value is 1. Larger values mean the penalty counts more, reducing grid distortions, insha'Allah; '-nopenalty' is the same as '-penfac 0'. In 23 Sep 2013 Zhark increased the default value of the penalty by a factor of 5, and also made it get progressively larger with each level of refinement. Thus, warping results will vary from earlier instances of 3dQwarp. * The progressive increase in the penalty at higher levels means that the 'cost function' can actually look like the alignment is getting worse when the levels change. * IF you wish to turn off this progression, for whatever reason (e.g., to keep compatibility with older results), use the option '-penold'.To be completely compatible with the older 3dQwarp, you'll also have to use '-penfac 0.2'. """, ) noweight = traits.Bool( desc="If you want a binary weight (the old default), use this option." "That is, each voxel in the base volume automask will be" "weighted the same in the computation of the cost functional.", argstr="-noweight", ) weight = File( desc="Instead of computing the weight from the base dataset," "directly input the weight volume from dataset 'www'." "Useful if you know what over parts of the base image you" "want to emphasize or de-emphasize the matching functional.", argstr="-weight %s", exists=True, ) wball = traits.List( traits.Int(), desc=""""\ ``-wball x y z r f`` Enhance automatic weight from '-useweight' by a factor of 1+f\\*Gaussian(FWHM=r) centered in the base image at DICOM coordinates (x,y,z) and with radius 'r'. The goal of this option is to try and make the alignment better in a specific part of the brain. Example: -wball 0 14 6 30 40 to emphasize the thalamic area (in MNI/Talairach space). * The 'r' parameter must be positive! * The 'f' parameter must be between 1 and 100 (inclusive). * '-wball' does nothing if you input your own weight with the '-weight' option. * '-wball' does change the binary weight created by the '-noweight' option. * You can only use '-wball' once in a run of 3dQwarp. **The effect of '-wball' is not dramatic.** The example above makes the average brain image across a collection of subjects a little sharper in the thalamic area, which might have some small value. If you care enough about alignment to use '-wball', then you should examine the results from 3dQwarp for each subject, to see if the alignments are good enough for your purposes.""", argstr="-wball %s", minlen=5, maxlen=5, xor=["wmask"], ) traits.Tuple((traits.Float(), traits.Float()), argstr="-bpass %f %f") wmask = traits.Tuple( (File(exists=True), traits.Float()), desc="""\ Similar to '-wball', but here, you provide a dataset 'ws' that indicates where to increase the weight. * The 'ws' dataset must be on the same 3D grid as the base dataset. * 'ws' is treated as a mask -- it only matters where it is nonzero -- otherwise, the values inside are not used. * After 'ws' comes the factor 'f' by which to increase the automatically computed weight. Where 'ws' is nonzero, the weighting will be multiplied by (1+f). * As with '-wball', the factor 'f' should be between 1 and 100. """, argstr="-wpass %s %f", xor=["wball"], ) out_weight_file = File( argstr="-wtprefix %s", desc="Write the weight volume to disk as a dataset" ) blur = traits.List( traits.Float(), desc="""\ Gaussian blur the input images by 'bb' (FWHM) voxels before doing the alignment (the output dataset will not be blurred). The default is 2.345 (for no good reason). * Optionally, you can provide 2 values for 'bb', and then the first one is applied to the base volume, the second to the source volume. e.g., '-blur 0 3' to skip blurring the base image (if the base is a blurry template, for example). * A negative blur radius means to use 3D median filtering, rather than Gaussian blurring. This type of filtering will better preserve edges, which can be important in alignment. * If the base is a template volume that is already blurry, you probably don't want to blur it again, but blurring the source volume a little is probably a good idea, to help the program avoid trying to match tiny features. * Note that -duplo will blur the volumes some extra amount for the initial small-scale warping, to make that phase of the program converge more rapidly. """, argstr="-blur %s", minlen=1, maxlen=2, ) pblur = traits.List( traits.Float(), desc="""\ Use progressive blurring; that is, for larger patch sizes, the amount of blurring is larger. The general idea is to avoid trying to match finer details when the patch size and incremental warps are coarse. When '-blur' is used as well, it sets a minimum amount of blurring that will be used. [06 Aug 2014 -- '-pblur' may become the default someday]. * You can optionally give the fraction of the patch size that is used for the progressive blur by providing a value between 0 and 0.25 after '-pblur'. If you provide TWO values, the the first fraction is used for progressively blurring the base image and the second for the source image. The default parameters when just '-pblur' is given is the same as giving the options as '-pblur 0.09 0.09'. * '-pblur' is useful when trying to match 2 volumes with high amounts of detail; e.g, warping one subject's brain image to match another's, or trying to warp to match a detailed template. * Note that using negative values with '-blur' means that the progressive blurring will be done with median filters, rather than Gaussian linear blurring. Note: The combination of the -allineate and -pblur options will make the results of using 3dQwarp to align to a template somewhat less sensitive to initial head position and scaling.""", argstr="-pblur %s", minlen=1, maxlen=2, ) emask = File( desc="Here, 'ee' is a dataset to specify a mask of voxels" "to EXCLUDE from the analysis -- all voxels in 'ee'" "that are NONZERO will not be used in the alignment." "The base image always automasked -- the emask is" "extra, to indicate voxels you definitely DON'T want" "included in the matching process, even if they are" "inside the brain.", argstr="-emask %s", exists=True, copyfile=False, ) noXdis = traits.Bool(desc="Warp will not displace in x direction", argstr="-noXdis") noYdis = traits.Bool(desc="Warp will not displace in y direction", argstr="-noYdis") noZdis = traits.Bool(desc="Warp will not displace in z direction", argstr="-noZdis") iniwarp = traits.List( File(exists=True, copyfile=False), desc="""\ A dataset with an initial nonlinear warp to use. * If this option is not used, the initial warp is the identity. * You can specify a catenation of warps (in quotes) here, as in program 3dNwarpApply. * As a special case, if you just input an affine matrix in a .1D file, that will work also -- it is treated as giving the initial warp via the string "IDENT(base_dataset) matrix_file.aff12.1D". * You CANNOT use this option with -duplo !! * -iniwarp is usually used with -inilev to re-start 3dQwarp from a previous stopping point. """, argstr="-iniwarp %s", xor=["duplo"], ) inilev = traits.Int( desc="""\ The initial refinement 'level' at which to start. * Usually used with -iniwarp; CANNOT be used with -duplo. * The combination of -inilev and -iniwarp lets you take the results of a previous 3dQwarp run and refine them further: Note that the source dataset in the second run is the SAME as in the first run. If you don't see why this is necessary, then you probably need to seek help from an AFNI guru. """, argstr="-inilev %d", xor=["duplo"], ) minpatch = traits.Int( desc="""\ The value of mm should be an odd integer. * The default value of mm is 25. * For more accurate results than mm=25, try 19 or 13. * The smallest allowed patch size is 5. * You may want stop at a larger patch size (say 7 or 9) and use the -Qfinal option to run that final level with quintic warps, which might run faster and provide the same degree of warp detail. * Trying to make two different brain volumes match in fine detail is usually a waste of time, especially in humans. There is too much variability in anatomy to match gyrus to gyrus accurately. For this reason, the default minimum patch size is 25 voxels. Using a smaller '-minpatch' might try to force the warp to match features that do not match, and the result can be useless image distortions -- another reason to LOOK AT THE RESULTS. """, argstr="-minpatch %d", ) maxlev = traits.Int( desc="""\ The initial refinement 'level' at which to start. * Usually used with -iniwarp; CANNOT be used with -duplo. * The combination of -inilev and -iniwarp lets you take the results of a previous 3dQwarp run and refine them further: Note that the source dataset in the second run is the SAME as in the first run. If you don't see why this is necessary, then you probably need to seek help from an AFNI guru. """, argstr="-maxlev %d", xor=["duplo"], position=-1, ) gridlist = File( desc="""\ This option provides an alternate way to specify the patch grid sizes used in the warp optimization process. 'gl' is a 1D file with a list of patches to use -- in most cases, you will want to use it in the following form: ``-gridlist '1D: 0 151 101 75 51'`` * Here, a 0 patch size means the global domain. Patch sizes otherwise should be odd integers >= 5. * If you use the '0' patch size again after the first position, you will actually get an iteration at the size of the default patch level 1, where the patch sizes are 75% of the volume dimension. There is no way to force the program to literally repeat the sui generis step of lev=0. """, argstr="-gridlist %s", exists=True, copyfile=False, xor=["duplo", "plusminus"], ) allsave = traits.Bool( desc=""" This option lets you save the output warps from each level" of the refinement process. Mostly used for experimenting." Will only save all the outputs if the program terminates" normally -- if it crashes, or freezes, then all these" warps are lost.""", argstr="-allsave", xor=["nopadWARP", "duplo", "plusminus"], ) duplo = traits.Bool( desc="""\ Start off with 1/2 scale versions of the volumes," for getting a speedy coarse first alignment." * Then scales back up to register the full volumes." The goal is greater speed, and it seems to help this" positively piggish program to be more expeditious." * However, accuracy is somewhat lower with '-duplo'," for reasons that currenly elude Zhark; for this reason," the Emperor does not usually use '-duplo'. """, argstr="-duplo", xor=["gridlist", "maxlev", "inilev", "iniwarp", "plusminus", "allsave"], ) workhard = traits.Bool( desc="""\ Iterate more times, which can help when the volumes are hard to align at all, or when you hope to get a more precise alignment. * Slows the program down (possibly a lot), of course. * When you combine '-workhard' with '-duplo', only the full size volumes get the extra iterations. * For finer control over which refinement levels work hard, you can use this option in the form (for example) ``-workhard:4:7`` which implies the extra iterations will be done at levels 4, 5, 6, and 7, but not otherwise. * You can also use '-superhard' to iterate even more, but this extra option will REALLY slow things down. * Under most circumstances, you should not need to use either ``-workhard`` or ``-superhard``. * The fastest way to register to a template image is via the ``-duplo`` option, and without the ``-workhard`` or ``-superhard`` options. * If you use this option in the form '-Workhard' (first letter in upper case), then the second iteration at each level is done with quintic polynomial warps. """, argstr="-workhard", xor=["boxopt", "ballopt"], ) Qfinal = traits.Bool( desc="""\ At the finest patch size (the final level), use Hermite quintic polynomials for the warp instead of cubic polynomials. * In a 3D 'patch', there are 2x2x2x3=24 cubic polynomial basis function parameters over which to optimize (2 polynomials dependent on each of the x,y,z directions, and 3 different directions of displacement). * There are 3x3x3x3=81 quintic polynomial parameters per patch. * With -Qfinal, the final level will have more detail in the allowed warps, at the cost of yet more CPU time. * However, no patch below 7x7x7 in size will be done with quintic polynomials. * This option is also not usually needed, and is experimental. """, argstr="-Qfinal", ) Qonly = traits.Bool( desc="""\ Use Hermite quintic polynomials at all levels. * Very slow (about 4 times longer). Also experimental. * Will produce a (discrete representation of a) C2 warp. """, argstr="-Qonly", ) plusminus = traits.Bool( desc="""\ Normally, the warp displacements dis(x) are defined to match base(x) to source(x+dis(x)). With this option, the match is between base(x-dis(x)) and source(x+dis(x)) -- the two images 'meet in the middle'. * One goal is to mimic the warping done to MRI EPI data by field inhomogeneities, when registering between a 'blip up' and a 'blip down' down volume, which will have opposite distortions. * Define Wp(x) = x+dis(x) and Wm(x) = x-dis(x). Then since base(Wm(x)) matches source(Wp(x)), by substituting INV(Wm(x)) wherever we see x, we have base(x) matches source(Wp(INV(Wm(x)))); that is, the warp V(x) that one would get from the 'usual' way of running 3dQwarp is V(x) = Wp(INV(Wm(x))). * Conversely, we can calculate Wp(x) in terms of V(x) as follows: If V(x) = x + dv(x), define Vh(x) = x + dv(x)/2; then Wp(x) = V(INV(Vh(x))) * With the above formulas, it is possible to compute Wp(x) from V(x) and vice-versa, using program 3dNwarpCalc. The requisite commands are left as an exercise for the aspiring AFNI Jedi Master. * You can use the semi-secret '-pmBASE' option to get the V(x) warp and the source dataset warped to base space, in addition to the Wp(x) '_PLUS' and Wm(x) '_MINUS' warps. * Alas: -plusminus does not work with -duplo or -allineate :-( * However, you can use -iniwarp with -plusminus :-) * The outputs have _PLUS (from the source dataset) and _MINUS (from the base dataset) in their filenames, in addition to the prefix. The -iwarp option, if present, will be ignored. """, argstr="-plusminus", xor=["duplo", "allsave", "iwarp"], ) nopad = traits.Bool( desc="""\ Do NOT use zero-padding on the 3D base and source images. [Default == zero-pad, if needed] * The underlying model for deformations goes to zero at the edge of the volume being warped. However, if there is significant data near an edge of the volume, then it won't get displaced much, and so the results might not be good. * Zero padding is designed as a way to work around this potential problem. You should NOT need the '-nopad' option for any reason that Zhark can think of, but it is here to be symmetrical with 3dAllineate. * Note that the output (warped from source) dataset will be on the base dataset grid whether or not zero-padding is allowed. However, unless you use the following option, allowing zero-padding (i.e., the default operation) will make the output WARP dataset(s) be on a larger grid (also see '-expad' below). """, argstr="-nopad", ) nopadWARP = traits.Bool( desc="If for some reason you require the warp volume to" "match the base volume, then use this option to have the output" "WARP dataset(s) truncated.", argstr="-nopadWARP", xor=["allsave", "expad"], ) expad = traits.Int( desc="This option instructs the program to pad the warp by an extra" "'EE' voxels (and then 3dQwarp starts optimizing it)." "This option is seldom needed, but can be useful if you" "might later catenate the nonlinear warp -- via 3dNwarpCat --" "with an affine transformation that contains a large shift." "Under that circumstance, the nonlinear warp might be shifted" "partially outside its original grid, so expanding that grid" "can avoid this problem." "Note that this option perforce turns off '-nopadWARP'.", argstr="-expad %d", xor=["nopadWARP"], ) ballopt = traits.Bool( desc="Normally, the incremental warp parameters are optimized inside" "a rectangular 'box' (24 dimensional for cubic patches, 81 for" "quintic patches), whose limits define the amount of distortion" "allowed at each step. Using '-ballopt' switches these limits" "to be applied to a 'ball' (interior of a hypersphere), which" "can allow for larger incremental displacements. Use this" "option if you think things need to be able to move farther.", argstr="-ballopt", xor=["workhard", "boxopt"], ) baxopt = traits.Bool( desc="Use the 'box' optimization limits instead of the 'ball'" "[this is the default at present]." "Note that if '-workhard' is used, then ball and box optimization" "are alternated in the different iterations at each level, so" "these two options have no effect in that case.", argstr="-boxopt", xor=["workhard", "ballopt"], ) verb = traits.Bool( desc="more detailed description of the process", argstr="-verb", xor=["quiet"] ) quiet = traits.Bool( desc="Cut out most of the fun fun fun progress messages :-(", argstr="-quiet", xor=["verb"], ) # Hidden and semi-hidden options overwrite = traits.Bool(desc="Overwrite outputs", argstr="-overwrite") lpc = traits.Bool( desc="Local Pearson minimization (i.e., EPI-T1 registration)" "This option has not be extensively tested" "If you use '-lpc', then '-maxlev 0' is automatically set." "If you want to go to more refined levels, you can set '-maxlev'" "This should be set up to have lpc as the second to last argument" "and maxlev as the second to last argument, as needed by AFNI" "Using maxlev > 1 is not recommended for EPI-T1 alignment.", argstr="-lpc", xor=["nmi", "mi", "hel", "lpa", "pear"], position=-2, ) lpa = traits.Bool( desc="Local Pearson maximization. This option has not be extensively tested", argstr="-lpa", xor=["nmi", "mi", "lpc", "hel", "pear"], ) hel = traits.Bool( desc="Hellinger distance: a matching function for the adventurous" "This option has NOT be extensively tested for usefullness" "and should be considered experimental at this infundibulum.", argstr="-hel", xor=["nmi", "mi", "lpc", "lpa", "pear"], ) mi = traits.Bool( desc="Mutual Information: a matching function for the adventurous" "This option has NOT be extensively tested for usefullness" "and should be considered experimental at this infundibulum.", argstr="-mi", xor=["mi", "hel", "lpc", "lpa", "pear"], ) nmi = traits.Bool( desc="Normalized Mutual Information: a matching function for the adventurous" "This option has NOT been extensively tested for usefullness" "and should be considered experimental at this infundibulum.", argstr="-nmi", xor=["nmi", "hel", "lpc", "lpa", "pear"], ) class QwarpOutputSpec(TraitedSpec): warped_source = File( desc="Warped source file. If plusminus is used, this is the undistorted" "source file." ) warped_base = File(desc="Undistorted base file.") source_warp = File( desc="Displacement in mm for the source image." "If plusminus is used this is the field suceptibility correction" "warp (in 'mm') for source image." ) base_warp = File( desc="Displacement in mm for the base image." "If plus minus is used, this is the field suceptibility correction" "warp (in 'mm') for base image. This is only output if plusminus" "or iwarp options are passed" ) weights = File(desc="Auto-computed weight volume.") class Qwarp(AFNICommand): """ Allineate your images prior to passing them to this workflow. Examples -------- >>> from nipype.interfaces import afni >>> qwarp = afni.Qwarp() >>> qwarp.inputs.in_file = 'sub-01_dir-LR_epi.nii.gz' >>> qwarp.inputs.nopadWARP = True >>> qwarp.inputs.base_file = 'sub-01_dir-RL_epi.nii.gz' >>> qwarp.inputs.plusminus = True >>> qwarp.cmdline '3dQwarp -base sub-01_dir-RL_epi.nii.gz -source sub-01_dir-LR_epi.nii.gz -nopadWARP \ -prefix ppp_sub-01_dir-LR_epi -plusminus' >>> res = qwarp.run() # doctest: +SKIP >>> from nipype.interfaces import afni >>> qwarp = afni.Qwarp() >>> qwarp.inputs.in_file = 'structural.nii' >>> qwarp.inputs.base_file = 'mni.nii' >>> qwarp.inputs.resample = True >>> qwarp.cmdline '3dQwarp -base mni.nii -source structural.nii -prefix ppp_structural -resample' >>> res = qwarp.run() # doctest: +SKIP >>> from nipype.interfaces import afni >>> qwarp = afni.Qwarp() >>> qwarp.inputs.in_file = 'structural.nii' >>> qwarp.inputs.base_file = 'epi.nii' >>> qwarp.inputs.out_file = 'anatSSQ.nii.gz' >>> qwarp.inputs.resample = True >>> qwarp.inputs.lpc = True >>> qwarp.inputs.verb = True >>> qwarp.inputs.iwarp = True >>> qwarp.inputs.blur = [0,3] >>> qwarp.cmdline '3dQwarp -base epi.nii -blur 0.0 3.0 -source structural.nii -iwarp -prefix anatSSQ.nii.gz \ -resample -verb -lpc' >>> res = qwarp.run() # doctest: +SKIP >>> from nipype.interfaces import afni >>> qwarp = afni.Qwarp() >>> qwarp.inputs.in_file = 'structural.nii' >>> qwarp.inputs.base_file = 'mni.nii' >>> qwarp.inputs.duplo = True >>> qwarp.inputs.blur = [0,3] >>> qwarp.cmdline '3dQwarp -base mni.nii -blur 0.0 3.0 -duplo -source structural.nii -prefix ppp_structural' >>> res = qwarp.run() # doctest: +SKIP >>> from nipype.interfaces import afni >>> qwarp = afni.Qwarp() >>> qwarp.inputs.in_file = 'structural.nii' >>> qwarp.inputs.base_file = 'mni.nii' >>> qwarp.inputs.duplo = True >>> qwarp.inputs.minpatch = 25 >>> qwarp.inputs.blur = [0,3] >>> qwarp.inputs.out_file = 'Q25' >>> qwarp.cmdline '3dQwarp -base mni.nii -blur 0.0 3.0 -duplo -source structural.nii -minpatch 25 -prefix Q25' >>> res = qwarp.run() # doctest: +SKIP >>> qwarp2 = afni.Qwarp() >>> qwarp2.inputs.in_file = 'structural.nii' >>> qwarp2.inputs.base_file = 'mni.nii' >>> qwarp2.inputs.blur = [0,2] >>> qwarp2.inputs.out_file = 'Q11' >>> qwarp2.inputs.inilev = 7 >>> qwarp2.inputs.iniwarp = ['Q25_warp+tlrc.HEAD'] >>> qwarp2.cmdline '3dQwarp -base mni.nii -blur 0.0 2.0 -source structural.nii -inilev 7 -iniwarp Q25_\ warp+tlrc.HEAD -prefix Q11' >>> res2 = qwarp2.run() # doctest: +SKIP >>> res2 = qwarp2.run() # doctest: +SKIP >>> qwarp3 = afni.Qwarp() >>> qwarp3.inputs.in_file = 'structural.nii' >>> qwarp3.inputs.base_file = 'mni.nii' >>> qwarp3.inputs.allineate = True >>> qwarp3.inputs.allineate_opts = '-cose lpa -verb' >>> qwarp3.cmdline "3dQwarp -allineate -allineate_opts '-cose lpa -verb' -base mni.nii -source structural.nii \ -prefix ppp_structural" >>> res3 = qwarp3.run() # doctest: +SKIP See Also -------- For complete details, see the `3dQwarp Documentation. `__ """ _cmd = "3dQwarp" input_spec = QwarpInputSpec output_spec = QwarpOutputSpec def _format_arg(self, name, trait_spec, value): if name == "allineate_opts": return trait_spec.argstr % ("'" + value + "'") return super(Qwarp, self)._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_file): prefix = self._gen_fname(self.inputs.in_file, suffix="_QW") outputtype = self.inputs.outputtype if outputtype == "AFNI": ext = ".HEAD" suffix = "+tlrc" else: ext = Info.output_type_to_ext(outputtype) suffix = "" else: prefix = self.inputs.out_file ext_ind = max( [prefix.lower().rfind(".nii.gz"), prefix.lower().rfind(".nii")] ) if ext_ind == -1: ext = ".HEAD" suffix = "+tlrc" else: ext = prefix[ext_ind:] suffix = "" # All outputs should be in the same directory as the prefix out_dir = os.path.dirname(os.path.abspath(prefix)) outputs["warped_source"] = ( fname_presuffix(prefix, suffix=suffix, use_ext=False, newpath=out_dir) + ext ) if not self.inputs.nowarp: outputs["source_warp"] = ( fname_presuffix( prefix, suffix="_WARP" + suffix, use_ext=False, newpath=out_dir ) + ext ) if self.inputs.iwarp: outputs["base_warp"] = ( fname_presuffix( prefix, suffix="_WARPINV" + suffix, use_ext=False, newpath=out_dir ) + ext ) if isdefined(self.inputs.out_weight_file): outputs["weights"] = os.path.abspath(self.inputs.out_weight_file) if self.inputs.plusminus: outputs["warped_source"] = ( fname_presuffix( prefix, suffix="_PLUS" + suffix, use_ext=False, newpath=out_dir ) + ext ) outputs["warped_base"] = ( fname_presuffix( prefix, suffix="_MINUS" + suffix, use_ext=False, newpath=out_dir ) + ext ) outputs["source_warp"] = ( fname_presuffix( prefix, suffix="_PLUS_WARP" + suffix, use_ext=False, newpath=out_dir ) + ext ) outputs["base_warp"] = ( fname_presuffix( prefix, suffix="_MINUS_WARP" + suffix, use_ext=False, newpath=out_dir, ) + ext ) return outputs def _gen_filename(self, name): if name == "out_file": return self._gen_fname(self.inputs.in_file, suffix="_QW") class QwarpPlusMinusInputSpec(QwarpInputSpec): source_file = File( desc="Source image (opposite phase encoding direction than base image)", argstr="-source %s", exists=True, deprecated="1.1.2", new_name="in_file", copyfile=False, ) out_file = File( "Qwarp.nii.gz", argstr="-prefix %s", position=0, usedefault=True, desc="Output file", ) plusminus = traits.Bool( True, usedefault=True, position=1, desc="Normally, the warp displacements dis(x) are defined to match" "base(x) to source(x+dis(x)). With this option, the match" "is between base(x-dis(x)) and source(x+dis(x)) -- the two" "images 'meet in the middle'. For more info, view Qwarp` interface", argstr="-plusminus", xor=["duplo", "allsave", "iwarp"], ) class QwarpPlusMinus(Qwarp): """A version of 3dQwarp for performing field susceptibility correction using two images with opposing phase encoding directions. Examples -------- >>> from nipype.interfaces import afni >>> qwarp = afni.QwarpPlusMinus() >>> qwarp.inputs.in_file = 'sub-01_dir-LR_epi.nii.gz' >>> qwarp.inputs.nopadWARP = True >>> qwarp.inputs.base_file = 'sub-01_dir-RL_epi.nii.gz' >>> qwarp.cmdline '3dQwarp -prefix Qwarp.nii.gz -plusminus -base sub-01_dir-RL_epi.nii.gz \ -source sub-01_dir-LR_epi.nii.gz -nopadWARP' >>> res = warp.run() # doctest: +SKIP See Also -------- For complete details, see the `3dQwarp Documentation. `__ """ input_spec = QwarpPlusMinusInputSpec nipype-1.7.0/nipype/interfaces/afni/svm.py000066400000000000000000000132311413403311400205400ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """AFNI's svm interfaces.""" from ..base import TraitedSpec, traits, File from .base import AFNICommand, AFNICommandInputSpec, AFNICommandOutputSpec class SVMTrainInputSpec(AFNICommandInputSpec): # training options ttype = traits.Str( desc="tname: classification or regression", argstr="-type %s", mandatory=True ) in_file = File( desc="A 3D+t AFNI brik dataset to be used for training.", argstr="-trainvol %s", mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_vectors", desc="output sum of weighted linear support vectors file name", argstr="-bucket %s", suffix="_bucket", name_source="in_file", ) model = File( name_template="%s_model", desc="basename for the brik containing the SVM model", argstr="-model %s", suffix="_model", name_source="in_file", ) alphas = File( name_template="%s_alphas", desc="output alphas file name", argstr="-alpha %s", suffix="_alphas", name_source="in_file", ) mask = File( desc="byte-format brik file used to mask voxels in the analysis", argstr="-mask %s", position=-1, exists=True, copyfile=False, ) nomodelmask = traits.Bool( desc="Flag to enable the omission of a mask file", argstr="-nomodelmask" ) trainlabels = File( desc=".1D labels corresponding to the stimulus paradigm for the training data.", argstr="-trainlabels %s", exists=True, ) censor = File( desc=".1D censor file that allows the user to ignore certain samples in the training data.", argstr="-censor %s", exists=True, ) kernel = traits.Str( desc="string specifying type of kernel function:linear, polynomial, rbf, sigmoid", argstr="-kernel %s", ) max_iterations = traits.Int( desc="Specify the maximum number of iterations for the optimization.", argstr="-max_iterations %d", ) w_out = traits.Bool( desc="output sum of weighted linear support vectors", argstr="-wout" ) options = traits.Str(desc="additional options for SVM-light", argstr="%s") class SVMTrainOutputSpec(TraitedSpec): out_file = File(desc="sum of weighted linear support vectors file name") model = File(desc="brik containing the SVM model file name") alphas = File(desc="output alphas file name") class SVMTrain(AFNICommand): """Temporally predictive modeling with the support vector machine SVM Train Only For complete details, see the `3dsvm Documentation. `_ Examples ======== >>> from nipype.interfaces import afni as afni >>> svmTrain = afni.SVMTrain() >>> svmTrain.inputs.in_file = 'run1+orig' >>> svmTrain.inputs.trainlabels = 'run1_categories.1D' >>> svmTrain.inputs.ttype = 'regression' >>> svmTrain.inputs.mask = 'mask.nii' >>> svmTrain.inputs.model = 'model_run1' >>> svmTrain.inputs.alphas = 'alphas_run1' >>> res = svmTrain.run() # doctest: +SKIP """ _cmd = "3dsvm" input_spec = SVMTrainInputSpec output_spec = SVMTrainOutputSpec _additional_metadata = ["suffix"] def _format_arg(self, name, trait_spec, value): return super(SVMTrain, self)._format_arg(name, trait_spec, value) class SVMTestInputSpec(AFNICommandInputSpec): # testing options model = traits.Str( desc="modname is the basename for the brik containing the SVM model", argstr="-model %s", mandatory=True, ) in_file = File( desc="A 3D or 3D+t AFNI brik dataset to be used for testing.", argstr="-testvol %s", exists=True, mandatory=True, ) out_file = File( name_template="%s_predictions", desc="filename for .1D prediction file(s).", argstr="-predictions %s", ) testlabels = File( desc="*true* class category .1D labels for the test dataset. It is used to calculate the prediction accuracy performance", exists=True, argstr="-testlabels %s", ) classout = traits.Bool( desc="Flag to specify that pname files should be integer-valued, corresponding to class category decisions.", argstr="-classout", ) nopredcensord = traits.Bool( desc="Flag to prevent writing predicted values for censored time-points", argstr="-nopredcensord", ) nodetrend = traits.Bool( desc="Flag to specify that pname files should not be linearly detrended", argstr="-nodetrend", ) multiclass = traits.Bool( desc="Specifies multiclass algorithm for classification", argstr="-multiclass %s", ) options = traits.Str(desc="additional options for SVM-light", argstr="%s") class SVMTest(AFNICommand): """Temporally predictive modeling with the support vector machine SVM Test Only For complete details, see the `3dsvm Documentation. `_ Examples ======== >>> from nipype.interfaces import afni as afni >>> svmTest = afni.SVMTest() >>> svmTest.inputs.in_file= 'run2+orig' >>> svmTest.inputs.model= 'run1+orig_model' >>> svmTest.inputs.testlabels= 'run2_categories.1D' >>> svmTest.inputs.out_file= 'pred2_model1' >>> res = svmTest.run() # doctest: +SKIP """ _cmd = "3dsvm" input_spec = SVMTestInputSpec output_spec = AFNICommandOutputSpec nipype-1.7.0/nipype/interfaces/afni/tests/000077500000000000000000000000001413403311400205235ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/afni/tests/__init__.py000066400000000000000000000002121413403311400226270ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_ABoverlap.py000066400000000000000000000030761413403311400250450ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ABoverlap def test_ABoverlap_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file_a=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-3, ), in_file_b=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-2, ), no_automask=dict( argstr="-no_automask", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr=" |& tee %s", extensions=None, position=-1, ), outputtype=dict(), quiet=dict( argstr="-quiet", ), verb=dict( argstr="-verb", ), ) inputs = ABoverlap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ABoverlap_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ABoverlap.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_AFNICommand.py000066400000000000000000000014221413403311400251770ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import AFNICommand def test_AFNICommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source=["in_file"], name_template="%s_afni", ), outputtype=dict(), ) inputs = AFNICommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_AFNICommandBase.py000066400000000000000000000007761413403311400260050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import AFNICommandBase def test_AFNICommandBase_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ) inputs = AFNICommandBase.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_AFNIPythonCommand.py000066400000000000000000000014441413403311400264050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import AFNIPythonCommand def test_AFNIPythonCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source=["in_file"], name_template="%s_afni", ), outputtype=dict(), ) inputs = AFNIPythonCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_AFNItoNIFTI.py000066400000000000000000000031201413403311400250320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import AFNItoNIFTI def test_AFNItoNIFTI_inputs(): input_map = dict( args=dict( argstr="%s", ), denote=dict( argstr="-denote", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), newid=dict( argstr="-newid", xor=["oldid"], ), num_threads=dict( nohash=True, usedefault=True, ), oldid=dict( argstr="-oldid", xor=["newid"], ), out_file=dict( argstr="-prefix %s", extensions=None, hash_files=False, name_source="in_file", name_template="%s.nii", ), outputtype=dict(), pure=dict( argstr="-pure", ), ) inputs = AFNItoNIFTI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AFNItoNIFTI_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = AFNItoNIFTI.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_AlignEpiAnatPy.py000066400000000000000000000047211413403311400257750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import AlignEpiAnatPy def test_AlignEpiAnatPy_inputs(): input_map = dict( anat=dict( argstr="-anat %s", copyfile=False, extensions=None, mandatory=True, ), anat2epi=dict( argstr="-anat2epi", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), epi2anat=dict( argstr="-epi2anat", ), epi_base=dict( argstr="-epi_base %s", mandatory=True, ), epi_strip=dict( argstr="-epi_strip %s", ), in_file=dict( argstr="-epi %s", copyfile=False, extensions=None, mandatory=True, ), outputtype=dict(), py27_path=dict( usedefault=True, ), save_skullstrip=dict( argstr="-save_skullstrip", ), suffix=dict( argstr="-suffix %s", usedefault=True, ), tshift=dict( argstr="-tshift %s", usedefault=True, ), volreg=dict( argstr="-volreg %s", usedefault=True, ), ) inputs = AlignEpiAnatPy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AlignEpiAnatPy_outputs(): output_map = dict( anat_al_mat=dict( extensions=None, ), anat_al_orig=dict( extensions=None, ), epi_al_mat=dict( extensions=None, ), epi_al_orig=dict( extensions=None, ), epi_al_tlrc_mat=dict( extensions=None, ), epi_reg_al_mat=dict( extensions=None, ), epi_tlrc_al=dict( extensions=None, ), epi_vr_al_mat=dict( extensions=None, ), epi_vr_motion=dict( extensions=None, ), skullstrip=dict( extensions=None, ), ) outputs = AlignEpiAnatPy.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Allineate.py000066400000000000000000000127011413403311400250630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Allineate def test_Allineate_inputs(): input_map = dict( allcostx=dict( argstr="-allcostx |& tee %s", extensions=None, position=-1, xor=["out_file", "out_matrix", "out_param_file", "out_weight_file"], ), args=dict( argstr="%s", ), autobox=dict( argstr="-autobox", ), automask=dict( argstr="-automask+%d", ), autoweight=dict( argstr="-autoweight%s", ), center_of_mass=dict( argstr="-cmass%s", ), check=dict( argstr="-check %s", ), convergence=dict( argstr="-conv %f", ), cost=dict( argstr="-cost %s", ), environ=dict( nohash=True, usedefault=True, ), epi=dict( argstr="-EPI", ), final_interpolation=dict( argstr="-final %s", ), fine_blur=dict( argstr="-fineblur %f", ), in_file=dict( argstr="-source %s", copyfile=False, extensions=None, mandatory=True, ), in_matrix=dict( argstr="-1Dmatrix_apply %s", extensions=None, position=-3, xor=["out_matrix"], ), in_param_file=dict( argstr="-1Dparam_apply %s", extensions=None, xor=["out_param_file"], ), interpolation=dict( argstr="-interp %s", ), master=dict( argstr="-master %s", extensions=None, ), maxrot=dict( argstr="-maxrot %f", ), maxscl=dict( argstr="-maxscl %f", ), maxshf=dict( argstr="-maxshf %f", ), maxshr=dict( argstr="-maxshr %f", ), newgrid=dict( argstr="-newgrid %f", ), nmatch=dict( argstr="-nmatch %d", ), no_pad=dict( argstr="-nopad", ), nomask=dict( argstr="-nomask", ), num_threads=dict( nohash=True, usedefault=True, ), nwarp=dict( argstr="-nwarp %s", ), nwarp_fixdep=dict( argstr="-nwarp_fixdep%s...", ), nwarp_fixmot=dict( argstr="-nwarp_fixmot%s...", ), one_pass=dict( argstr="-onepass", ), out_file=dict( argstr="-prefix %s", extensions=None, hash_files=False, name_source="in_file", name_template="%s_allineate", xor=["allcostx"], ), out_matrix=dict( argstr="-1Dmatrix_save %s", extensions=None, xor=["in_matrix", "allcostx"], ), out_param_file=dict( argstr="-1Dparam_save %s", extensions=None, xor=["in_param_file", "allcostx"], ), out_weight_file=dict( argstr="-wtprefix %s", extensions=None, xor=["allcostx"], ), outputtype=dict(), overwrite=dict( argstr="-overwrite", ), quiet=dict( argstr="-quiet", ), reference=dict( argstr="-base %s", extensions=None, ), replacebase=dict( argstr="-replacebase", ), replacemeth=dict( argstr="-replacemeth %s", ), source_automask=dict( argstr="-source_automask+%d", ), source_mask=dict( argstr="-source_mask %s", extensions=None, ), two_best=dict( argstr="-twobest %d", ), two_blur=dict( argstr="-twoblur %f", ), two_first=dict( argstr="-twofirst", ), two_pass=dict( argstr="-twopass", ), usetemp=dict( argstr="-usetemp", ), verbose=dict( argstr="-verb", ), warp_type=dict( argstr="-warp %s", ), warpfreeze=dict( argstr="-warpfreeze", ), weight=dict( argstr="-weight %s", ), weight_file=dict( argstr="-weight %s", deprecated="1.0.0", extensions=None, new_name="weight", ), zclip=dict( argstr="-zclip", ), ) inputs = Allineate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Allineate_outputs(): output_map = dict( allcostx=dict( extensions=None, ), out_file=dict( extensions=None, ), out_matrix=dict( extensions=None, ), out_param_file=dict( extensions=None, ), out_weight_file=dict( extensions=None, ), ) outputs = Allineate.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_AutoTLRC.py000066400000000000000000000022031413403311400245560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import AutoTLRC def test_AutoTLRC_inputs(): input_map = dict( args=dict( argstr="%s", ), base=dict( argstr="-base %s", mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-input %s", copyfile=False, extensions=None, mandatory=True, ), no_ss=dict( argstr="-no_ss", ), outputtype=dict(), ) inputs = AutoTLRC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AutoTLRC_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = AutoTLRC.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_AutoTcorrelate.py000066400000000000000000000034121413403311400261210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import AutoTcorrelate def test_AutoTcorrelate_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), eta2=dict( argstr="-eta2", ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), mask=dict( argstr="-mask %s", extensions=None, ), mask_only_targets=dict( argstr="-mask_only_targets", xor=["mask_source"], ), mask_source=dict( argstr="-mask_source %s", extensions=None, xor=["mask_only_targets"], ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_similarity_matrix.1D", ), outputtype=dict(), polort=dict( argstr="-polort %d", ), ) inputs = AutoTcorrelate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AutoTcorrelate_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = AutoTcorrelate.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Autobox.py000066400000000000000000000027641413403311400246160ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Autobox def test_Autobox_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-input %s", copyfile=False, extensions=None, mandatory=True, ), no_clustering=dict( argstr="-noclust", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_autobox", ), outputtype=dict(), padding=dict( argstr="-npad %d", ), ) inputs = Autobox.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Autobox_outputs(): output_map = dict( out_file=dict( extensions=None, ), x_max=dict(), x_min=dict(), y_max=dict(), y_min=dict(), z_max=dict(), z_min=dict(), ) outputs = Autobox.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Automask.py000066400000000000000000000032671413403311400247600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Automask def test_Automask_inputs(): input_map = dict( args=dict( argstr="%s", ), brain_file=dict( argstr="-apply_prefix %s", extensions=None, name_source="in_file", name_template="%s_masked", ), clfrac=dict( argstr="-clfrac %s", ), dilate=dict( argstr="-dilate %s", ), environ=dict( nohash=True, usedefault=True, ), erode=dict( argstr="-erode %s", ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_mask", ), outputtype=dict(), ) inputs = Automask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Automask_outputs(): output_map = dict( brain_file=dict( extensions=None, ), out_file=dict( extensions=None, ), ) outputs = Automask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Axialize.py000066400000000000000000000032661413403311400247410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Axialize def test_Axialize_inputs(): input_map = dict( args=dict( argstr="%s", ), axial=dict( argstr="-axial", xor=["coronal", "sagittal"], ), coronal=dict( argstr="-coronal", xor=["sagittal", "axial"], ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-2, ), num_threads=dict( nohash=True, usedefault=True, ), orientation=dict( argstr="-orient %s", ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_axialize", ), outputtype=dict(), sagittal=dict( argstr="-sagittal", xor=["coronal", "axial"], ), verb=dict( argstr="-verb", ), ) inputs = Axialize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Axialize_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Axialize.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Bandpass.py000066400000000000000000000045311413403311400247220ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Bandpass def test_Bandpass_inputs(): input_map = dict( args=dict( argstr="%s", ), automask=dict( argstr="-automask", ), blur=dict( argstr="-blur %f", ), despike=dict( argstr="-despike", ), environ=dict( nohash=True, usedefault=True, ), highpass=dict( argstr="%f", mandatory=True, position=-3, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), localPV=dict( argstr="-localPV %f", ), lowpass=dict( argstr="%f", mandatory=True, position=-2, ), mask=dict( argstr="-mask %s", extensions=None, position=2, ), nfft=dict( argstr="-nfft %d", ), no_detrend=dict( argstr="-nodetrend", ), normalize=dict( argstr="-norm", ), notrans=dict( argstr="-notrans", ), num_threads=dict( nohash=True, usedefault=True, ), orthogonalize_dset=dict( argstr="-dsort %s", extensions=None, ), orthogonalize_file=dict( argstr="-ort %s", ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_bp", position=1, ), outputtype=dict(), tr=dict( argstr="-dt %f", ), ) inputs = Bandpass.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Bandpass_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Bandpass.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_BlurInMask.py000066400000000000000000000035251413403311400252000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import BlurInMask def test_BlurInMask_inputs(): input_map = dict( args=dict( argstr="%s", ), automask=dict( argstr="-automask", ), environ=dict( nohash=True, usedefault=True, ), float_out=dict( argstr="-float", ), fwhm=dict( argstr="-FWHM %f", mandatory=True, ), in_file=dict( argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=1, ), mask=dict( argstr="-mask %s", extensions=None, ), multimask=dict( argstr="-Mmask %s", extensions=None, ), num_threads=dict( nohash=True, usedefault=True, ), options=dict( argstr="%s", position=2, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_blur", position=-1, ), outputtype=dict(), preserve=dict( argstr="-preserve", ), ) inputs = BlurInMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BlurInMask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = BlurInMask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_BlurToFWHM.py000066400000000000000000000031411413403311400250540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import BlurToFWHM def test_BlurToFWHM_inputs(): input_map = dict( args=dict( argstr="%s", ), automask=dict( argstr="-automask", ), blurmaster=dict( argstr="-blurmaster %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), fwhm=dict( argstr="-FWHM %f", ), fwhmxy=dict( argstr="-FWHMxy %f", ), in_file=dict( argstr="-input %s", extensions=None, mandatory=True, ), mask=dict( argstr="-mask %s", extensions=None, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source=["in_file"], name_template="%s_afni", ), outputtype=dict(), ) inputs = BlurToFWHM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BlurToFWHM_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = BlurToFWHM.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_BrickStat.py000066400000000000000000000027251413403311400250600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import BrickStat def test_BrickStat_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), mask=dict( argstr="-mask %s", extensions=None, position=2, ), max=dict( argstr="-max", ), mean=dict( argstr="-mean", ), min=dict( argstr="-min", position=1, ), percentile=dict( argstr="-percentile %.3f %.3f %.3f", ), slow=dict( argstr="-slow", ), sum=dict( argstr="-sum", ), var=dict( argstr="-var", ), ) inputs = BrickStat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BrickStat_outputs(): output_map = dict( min_val=dict(), ) outputs = BrickStat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Bucket.py000066400000000000000000000022251413403311400244020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Bucket def test_Bucket_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", mandatory=True, position=-1, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_template="buck", ), outputtype=dict(), ) inputs = Bucket.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Bucket_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Bucket.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Calc.py000066400000000000000000000035551413403311400240360ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Calc def test_Calc_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), expr=dict( argstr='-expr "%s"', mandatory=True, position=3, ), in_file_a=dict( argstr="-a %s", extensions=None, mandatory=True, position=0, ), in_file_b=dict( argstr="-b %s", extensions=None, position=1, ), in_file_c=dict( argstr="-c %s", extensions=None, position=2, ), num_threads=dict( nohash=True, usedefault=True, ), other=dict( argstr="", extensions=None, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file_a", name_template="%s_calc", ), outputtype=dict(), overwrite=dict( argstr="-overwrite", ), single_idx=dict(), start_idx=dict( requires=["stop_idx"], ), stop_idx=dict( requires=["start_idx"], ), ) inputs = Calc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Calc_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Calc.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Cat.py000066400000000000000000000043421413403311400236760ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Cat def test_Cat_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr="%s", mandatory=True, position=-2, ), keepfree=dict( argstr="-nonfixed", ), num_threads=dict( nohash=True, usedefault=True, ), omitconst=dict( argstr="-nonconst", ), out_cint=dict( xor=["out_format", "out_nice", "out_double", "out_fint", "out_int"], ), out_double=dict( argstr="-d", xor=["out_format", "out_nice", "out_int", "out_fint", "out_cint"], ), out_file=dict( argstr="> %s", extensions=None, mandatory=True, position=-1, usedefault=True, ), out_fint=dict( argstr="-f", xor=["out_format", "out_nice", "out_double", "out_int", "out_cint"], ), out_format=dict( argstr="-form %s", xor=["out_int", "out_nice", "out_double", "out_fint", "out_cint"], ), out_int=dict( argstr="-i", xor=["out_format", "out_nice", "out_double", "out_fint", "out_cint"], ), out_nice=dict( argstr="-n", xor=["out_format", "out_int", "out_double", "out_fint", "out_cint"], ), outputtype=dict(), sel=dict( argstr="-sel %s", ), stack=dict( argstr="-stack", ), ) inputs = Cat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Cat_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Cat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_CatMatvec.py000066400000000000000000000031321413403311400250320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import CatMatvec def test_CatMatvec_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fourxfour=dict( argstr="-4x4", xor=["matrix", "oneline"], ), in_file=dict( argstr="%s", mandatory=True, position=-2, ), matrix=dict( argstr="-MATRIX", xor=["oneline", "fourxfour"], ), num_threads=dict( nohash=True, usedefault=True, ), oneline=dict( argstr="-ONELINE", xor=["matrix", "fourxfour"], ), out_file=dict( argstr=" > %s", extensions=None, keep_extension=False, mandatory=True, name_source="in_file", name_template="%s_cat.aff12.1D", position=-1, ), outputtype=dict(), ) inputs = CatMatvec.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CatMatvec_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = CatMatvec.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_CenterMass.py000066400000000000000000000033461413403311400252360ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import CenterMass def test_CenterMass_inputs(): input_map = dict( all_rois=dict( argstr="-all_rois", ), args=dict( argstr="%s", ), automask=dict( argstr="-automask", ), cm_file=dict( argstr="> %s", extensions=None, hash_files=False, keep_extension=False, name_source="in_file", name_template="%s_cm.out", position=-1, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, ), local_ijk=dict( argstr="-local_ijk", ), mask_file=dict( argstr="-mask %s", extensions=None, ), roi_vals=dict( argstr="-roi_vals %s", ), set_cm=dict( argstr="-set %f %f %f", ), ) inputs = CenterMass.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CenterMass_outputs(): output_map = dict( cm=dict(), cm_file=dict( extensions=None, ), out_file=dict( extensions=None, ), ) outputs = CenterMass.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_ClipLevel.py000066400000000000000000000023661413403311400250520ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ClipLevel def test_ClipLevel_inputs(): input_map = dict( args=dict( argstr="%s", ), doall=dict( argstr="-doall", position=3, xor="grad", ), environ=dict( nohash=True, usedefault=True, ), grad=dict( argstr="-grad %s", extensions=None, position=3, xor="doall", ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), mfrac=dict( argstr="-mfrac %s", position=2, ), ) inputs = ClipLevel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ClipLevel_outputs(): output_map = dict( clip_val=dict(), ) outputs = ClipLevel.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_ConvertDset.py000066400000000000000000000025271413403311400254320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ConvertDset def test_ConvertDset_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-input %s", extensions=None, mandatory=True, position=-2, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, mandatory=True, position=-1, ), out_type=dict( argstr="-o_%s", mandatory=True, position=0, ), outputtype=dict(), ) inputs = ConvertDset.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ConvertDset_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ConvertDset.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Copy.py000066400000000000000000000024701413403311400241010ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Copy def test_Copy_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-2, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="%s", extensions=None, name_source="in_file", name_template="%s_copy", position=-1, ), outputtype=dict(), verbose=dict( argstr="-verb", ), ) inputs = Copy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Copy_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Copy.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Deconvolve.py000066400000000000000000000107731413403311400253000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import Deconvolve def test_Deconvolve_inputs(): input_map = dict( STATmask=dict( argstr="-STATmask %s", extensions=None, ), TR_1D=dict( argstr="-TR_1D %f", ), allzero_OK=dict( argstr="-allzero_OK", ), args=dict( argstr="%s", ), automask=dict( argstr="-automask", ), cbucket=dict( argstr="-cbucket %s", ), censor=dict( argstr="-censor %s", extensions=None, ), dmbase=dict( argstr="-dmbase", ), dname=dict( argstr="-D%s=%s", ), environ=dict( nohash=True, usedefault=True, ), force_TR=dict( argstr="-force_TR %f", position=0, ), fout=dict( argstr="-fout", ), global_times=dict( argstr="-global_times", xor=["local_times"], ), glt_label=dict( argstr="-glt_label %d %s...", position=-1, requires=["gltsym"], ), gltsym=dict( argstr="-gltsym 'SYM: %s'...", position=-2, ), goforit=dict( argstr="-GOFORIT %i", ), in_files=dict( argstr="-input %s", copyfile=False, position=1, sep=" ", ), input1D=dict( argstr="-input1D %s", extensions=None, ), legendre=dict( argstr="-legendre", ), local_times=dict( argstr="-local_times", xor=["global_times"], ), mask=dict( argstr="-mask %s", extensions=None, ), noblock=dict( argstr="-noblock", ), nocond=dict( argstr="-nocond", ), nodmbase=dict( argstr="-nodmbase", ), nofdr=dict( argstr="-noFDR", ), nolegendre=dict( argstr="-nolegendre", ), nosvd=dict( argstr="-nosvd", ), num_glt=dict( argstr="-num_glt %d", position=-3, ), num_stimts=dict( argstr="-num_stimts %d", position=-6, ), num_threads=dict( argstr="-jobs %d", nohash=True, ), ortvec=dict( argstr="-ortvec %s %s", ), out_file=dict( argstr="-bucket %s", extensions=None, ), outputtype=dict(), polort=dict( argstr="-polort %d", ), rmsmin=dict( argstr="-rmsmin %f", ), rout=dict( argstr="-rout", ), sat=dict( argstr="-sat", xor=["trans"], ), singvals=dict( argstr="-singvals", ), stim_label=dict( argstr="-stim_label %d %s...", position=-4, requires=["stim_times"], ), stim_times=dict( argstr="-stim_times %d %s '%s'...", position=-5, ), stim_times_subtract=dict( argstr="-stim_times_subtract %f", ), svd=dict( argstr="-svd", ), tout=dict( argstr="-tout", ), trans=dict( argstr="-trans", xor=["sat"], ), vout=dict( argstr="-vout", ), x1D=dict( argstr="-x1D %s", extensions=None, ), x1D_stop=dict( argstr="-x1D_stop", ), ) inputs = Deconvolve.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Deconvolve_outputs(): output_map = dict( cbucket=dict( extensions=None, ), out_file=dict( extensions=None, ), reml_script=dict( extensions=None, ), x1D=dict( extensions=None, ), ) outputs = Deconvolve.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_DegreeCentrality.py000066400000000000000000000035251413403311400264230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import DegreeCentrality def test_DegreeCentrality_inputs(): input_map = dict( args=dict( argstr="%s", ), autoclip=dict( argstr="-autoclip", ), automask=dict( argstr="-automask", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), mask=dict( argstr="-mask %s", extensions=None, ), num_threads=dict( nohash=True, usedefault=True, ), oned_file=dict( argstr="-out1D %s", ), out_file=dict( argstr="-prefix %s", extensions=None, name_source=["in_file"], name_template="%s_afni", ), outputtype=dict(), polort=dict( argstr="-polort %d", ), sparsity=dict( argstr="-sparsity %f", ), thresh=dict( argstr="-thresh %f", ), ) inputs = DegreeCentrality.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DegreeCentrality_outputs(): output_map = dict( oned_file=dict( extensions=None, ), out_file=dict( extensions=None, ), ) outputs = DegreeCentrality.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Despike.py000066400000000000000000000024011413403311400245450ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Despike def test_Despike_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_despike", ), outputtype=dict(), ) inputs = Despike.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Despike_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Despike.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Detrend.py000066400000000000000000000024011413403311400245460ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Detrend def test_Detrend_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_detrend", ), outputtype=dict(), ) inputs = Detrend.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Detrend_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Detrend.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Dot.py000066400000000000000000000035601413403311400237160ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Dot def test_Dot_inputs(): input_map = dict( args=dict( argstr="%s", ), demean=dict( argstr="-demean", ), docoef=dict( argstr="-docoef", ), docor=dict( argstr="-docor", ), dodice=dict( argstr="-dodice", ), dodot=dict( argstr="-dodot", ), doeta2=dict( argstr="-doeta2", ), dosums=dict( argstr="-dosums", ), environ=dict( nohash=True, usedefault=True, ), full=dict( argstr="-full", ), in_files=dict( argstr="%s ...", position=-2, ), mask=dict( argstr="-mask %s", extensions=None, ), mrange=dict( argstr="-mrange %s %s", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr=" |& tee %s", extensions=None, position=-1, ), outputtype=dict(), show_labels=dict( argstr="-show_labels", ), upper=dict( argstr="-upper", ), ) inputs = Dot.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Dot_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Dot.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_ECM.py000066400000000000000000000041041413403311400235670ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ECM def test_ECM_inputs(): input_map = dict( args=dict( argstr="%s", ), autoclip=dict( argstr="-autoclip", ), automask=dict( argstr="-automask", ), environ=dict( nohash=True, usedefault=True, ), eps=dict( argstr="-eps %f", ), fecm=dict( argstr="-fecm", ), full=dict( argstr="-full", ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), mask=dict( argstr="-mask %s", extensions=None, ), max_iter=dict( argstr="-max_iter %d", ), memory=dict( argstr="-memory %f", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source=["in_file"], name_template="%s_afni", ), outputtype=dict(), polort=dict( argstr="-polort %d", ), scale=dict( argstr="-scale %f", ), shift=dict( argstr="-shift %f", ), sparsity=dict( argstr="-sparsity %f", ), thresh=dict( argstr="-thresh %f", ), ) inputs = ECM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ECM_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ECM.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Edge3.py000066400000000000000000000034361413403311400241210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Edge3 def test_Edge3_inputs(): input_map = dict( args=dict( argstr="%s", ), datum=dict( argstr="-datum %s", ), environ=dict( nohash=True, usedefault=True, ), fscale=dict( argstr="-fscale", xor=["gscale", "nscale", "scale_floats"], ), gscale=dict( argstr="-gscale", xor=["fscale", "nscale", "scale_floats"], ), in_file=dict( argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=0, ), nscale=dict( argstr="-nscale", xor=["fscale", "gscale", "scale_floats"], ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, position=-1, ), outputtype=dict(), scale_floats=dict( argstr="-scale_floats %f", xor=["fscale", "gscale", "nscale"], ), verbose=dict( argstr="-verbose", ), ) inputs = Edge3.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Edge3_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Edge3.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Eval.py000066400000000000000000000035421413403311400240570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Eval def test_Eval_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), expr=dict( argstr='-expr "%s"', mandatory=True, position=3, ), in_file_a=dict( argstr="-a %s", extensions=None, mandatory=True, position=0, ), in_file_b=dict( argstr="-b %s", extensions=None, position=1, ), in_file_c=dict( argstr="-c %s", extensions=None, position=2, ), num_threads=dict( nohash=True, usedefault=True, ), other=dict( argstr="", extensions=None, ), out1D=dict( argstr="-1D", ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file_a", name_template="%s_calc", ), outputtype=dict(), single_idx=dict(), start_idx=dict( requires=["stop_idx"], ), stop_idx=dict( requires=["start_idx"], ), ) inputs = Eval.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Eval_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Eval.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_FWHMx.py000066400000000000000000000051421413403311400241170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import FWHMx def test_FWHMx_inputs(): input_map = dict( acf=dict( argstr="-acf", usedefault=True, ), args=dict( argstr="%s", ), arith=dict( argstr="-arith", xor=["geom"], ), automask=dict( argstr="-automask", usedefault=True, ), combine=dict( argstr="-combine", ), compat=dict( argstr="-compat", ), demed=dict( argstr="-demed", xor=["detrend"], ), detrend=dict( argstr="-detrend", usedefault=True, xor=["demed"], ), environ=dict( nohash=True, usedefault=True, ), geom=dict( argstr="-geom", xor=["arith"], ), in_file=dict( argstr="-input %s", extensions=None, mandatory=True, ), mask=dict( argstr="-mask %s", extensions=None, ), out_detrend=dict( argstr="-detprefix %s", extensions=None, keep_extension=False, name_source="in_file", name_template="%s_detrend", ), out_file=dict( argstr="> %s", extensions=None, keep_extension=False, name_source="in_file", name_template="%s_fwhmx.out", position=-1, ), out_subbricks=dict( argstr="-out %s", extensions=None, keep_extension=False, name_source="in_file", name_template="%s_subbricks.out", ), unif=dict( argstr="-unif", ), ) inputs = FWHMx.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FWHMx_outputs(): output_map = dict( acf_param=dict(), fwhm=dict(), out_acf=dict( extensions=None, ), out_detrend=dict( extensions=None, ), out_file=dict( extensions=None, ), out_subbricks=dict( extensions=None, ), ) outputs = FWHMx.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Fim.py000066400000000000000000000030671413403311400237050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Fim def test_Fim_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fim_thr=dict( argstr="-fim_thr %f", position=3, ), ideal_file=dict( argstr="-ideal_file %s", extensions=None, mandatory=True, position=2, ), in_file=dict( argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=1, ), num_threads=dict( nohash=True, usedefault=True, ), out=dict( argstr="-out %s", position=4, ), out_file=dict( argstr="-bucket %s", extensions=None, name_source="in_file", name_template="%s_fim", ), outputtype=dict(), ) inputs = Fim.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Fim_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Fim.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Fourier.py000066400000000000000000000030011413403311400245710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Fourier def test_Fourier_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), highpass=dict( argstr="-highpass %f", mandatory=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), lowpass=dict( argstr="-lowpass %f", mandatory=True, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_fourier", ), outputtype=dict(), retrend=dict( argstr="-retrend", ), ) inputs = Fourier.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Fourier_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Fourier.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_GCOR.py000066400000000000000000000022431413403311400237170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import GCOR def test_GCOR_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=-1, ), mask=dict( argstr="-mask %s", copyfile=False, extensions=None, ), nfirst=dict( argstr="-nfirst %d", ), no_demean=dict( argstr="-no_demean", ), ) inputs = GCOR.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GCOR_outputs(): output_map = dict( out=dict(), ) outputs = GCOR.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Hist.py000066400000000000000000000036011413403311400240730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Hist def test_Hist_inputs(): input_map = dict( args=dict( argstr="%s", ), bin_width=dict( argstr="-binwidth %f", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=1, ), mask=dict( argstr="-mask %s", extensions=None, ), max_value=dict( argstr="-max %f", ), min_value=dict( argstr="-min %f", ), nbin=dict( argstr="-nbin %d", ), out_file=dict( argstr="-prefix %s", extensions=None, keep_extension=False, name_source=["in_file"], name_template="%s_hist", ), out_show=dict( argstr="> %s", extensions=None, keep_extension=False, name_source="in_file", name_template="%s_hist.out", position=-1, ), showhist=dict( argstr="-showhist", usedefault=True, ), ) inputs = Hist.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Hist_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_show=dict( extensions=None, ), ) outputs = Hist.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_LFCD.py000066400000000000000000000031211413403311400236710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import LFCD def test_LFCD_inputs(): input_map = dict( args=dict( argstr="%s", ), autoclip=dict( argstr="-autoclip", ), automask=dict( argstr="-automask", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), mask=dict( argstr="-mask %s", extensions=None, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source=["in_file"], name_template="%s_afni", ), outputtype=dict(), polort=dict( argstr="-polort %d", ), thresh=dict( argstr="-thresh %f", ), ) inputs = LFCD.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_LFCD_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = LFCD.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_LocalBistat.py000066400000000000000000000037011413403311400253660ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import LocalBistat def test_LocalBistat_inputs(): input_map = dict( args=dict( argstr="%s", ), automask=dict( argstr="-automask", xor=["weight_file"], ), environ=dict( nohash=True, usedefault=True, ), in_file1=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), in_file2=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), mask_file=dict( argstr="-mask %s", extensions=None, ), neighborhood=dict( argstr="-nbhd '%s(%s)'", mandatory=True, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, keep_extension=True, name_source="in_file1", name_template="%s_bistat", position=0, ), outputtype=dict(), stat=dict( argstr="-stat %s...", mandatory=True, ), weight_file=dict( argstr="-weight %s", extensions=None, xor=["automask"], ), ) inputs = LocalBistat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_LocalBistat_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = LocalBistat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Localstat.py000066400000000000000000000045411413403311400251160ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Localstat def test_Localstat_inputs(): input_map = dict( args=dict( argstr="%s", ), automask=dict( argstr="-automask", ), environ=dict( nohash=True, usedefault=True, ), grid_rmode=dict( argstr="-grid_rmode %s", requires=["reduce_restore_grid"], ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), mask_file=dict( argstr="-mask %s", extensions=None, ), neighborhood=dict( argstr="-nbhd '%s(%s)'", mandatory=True, ), nonmask=dict( argstr="-use_nonmask", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_localstat", position=0, ), outputtype=dict(), overwrite=dict( argstr="-overwrite", ), quiet=dict( argstr="-quiet", ), reduce_grid=dict( argstr="-reduce_grid %s", xor=["reduce_restore_grid", "reduce_max_vox"], ), reduce_max_vox=dict( argstr="-reduce_max_vox %s", xor=["reduce_restore_grid", "reduce_grid"], ), reduce_restore_grid=dict( argstr="-reduce_restore_grid %s", xor=["reduce_max_vox", "reduce_grid"], ), stat=dict( argstr="-stat %s...", mandatory=True, ), ) inputs = Localstat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Localstat_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Localstat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_MaskTool.py000066400000000000000000000037021413403311400247170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MaskTool def test_MaskTool_inputs(): input_map = dict( args=dict( argstr="%s", ), count=dict( argstr="-count", position=2, ), datum=dict( argstr="-datum %s", ), dilate_inputs=dict( argstr="-dilate_inputs %s", ), dilate_results=dict( argstr="-dilate_results %s", ), environ=dict( nohash=True, usedefault=True, ), fill_dirs=dict( argstr="-fill_dirs %s", requires=["fill_holes"], ), fill_holes=dict( argstr="-fill_holes", ), frac=dict( argstr="-frac %s", ), in_file=dict( argstr="-input %s", copyfile=False, mandatory=True, position=-1, ), inter=dict( argstr="-inter", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_mask", ), outputtype=dict(), union=dict( argstr="-union", ), verbose=dict( argstr="-verb %s", ), ) inputs = MaskTool.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MaskTool_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MaskTool.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Maskave.py000066400000000000000000000027761413403311400245670ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Maskave def test_Maskave_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-2, ), mask=dict( argstr="-mask %s", extensions=None, position=1, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="> %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_maskave.1D", position=-1, ), outputtype=dict(), quiet=dict( argstr="-quiet", position=2, ), ) inputs = Maskave.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Maskave_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Maskave.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Means.py000066400000000000000000000036041413403311400242320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Means def test_Means_inputs(): input_map = dict( args=dict( argstr="%s", ), count=dict( argstr="-count", ), datum=dict( argstr="-datum %s", ), environ=dict( nohash=True, usedefault=True, ), in_file_a=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), in_file_b=dict( argstr="%s", extensions=None, position=-1, ), mask_inter=dict( argstr="-mask_inter", ), mask_union=dict( argstr="-mask_union", ), non_zero=dict( argstr="-non_zero", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file_a", name_template="%s_mean", ), outputtype=dict(), scale=dict( argstr="-%sscale", ), sqr=dict( argstr="-sqr", ), std_dev=dict( argstr="-stdev", ), summ=dict( argstr="-sum", ), ) inputs = Means.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Means_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Means.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Merge.py000066400000000000000000000025601413403311400242260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Merge def test_Merge_inputs(): input_map = dict( args=dict( argstr="%s", ), blurfwhm=dict( argstr="-1blur_fwhm %d", units="mm", ), doall=dict( argstr="-doall", ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr="%s", copyfile=False, mandatory=True, position=-1, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_files", name_template="%s_merge", ), outputtype=dict(), ) inputs = Merge.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Merge_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Merge.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_NetCorr.py000066400000000000000000000046241413403311400245460ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import NetCorr def test_NetCorr_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fish_z=dict( argstr="-fish_z", ), ignore_LT=dict( argstr="-ignore_LT", ), in_file=dict( argstr="-inset %s", extensions=None, mandatory=True, ), in_rois=dict( argstr="-in_rois %s", extensions=None, mandatory=True, ), mask=dict( argstr="-mask %s", extensions=None, ), nifti=dict( argstr="-nifti", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_netcorr", position=1, ), output_mask_nonnull=dict( argstr="-output_mask_nonnull", ), outputtype=dict(), part_corr=dict( argstr="-part_corr", ), push_thru_many_zeros=dict( argstr="-push_thru_many_zeros", ), ts_indiv=dict( argstr="-ts_indiv", ), ts_label=dict( argstr="-ts_label", ), ts_out=dict( argstr="-ts_out", ), ts_wb_Z=dict( argstr="-ts_wb_Z", ), ts_wb_corr=dict( argstr="-ts_wb_corr", ), ts_wb_strlabel=dict( argstr="-ts_wb_strlabel", ), weight_ts=dict( argstr="-weight_ts %s", extensions=None, ), ) inputs = NetCorr.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_NetCorr_outputs(): output_map = dict( out_corr_maps=dict(), out_corr_matrix=dict( extensions=None, ), ) outputs = NetCorr.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Notes.py000066400000000000000000000030271413403311400242560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Notes def test_Notes_inputs(): input_map = dict( add=dict( argstr='-a "%s"', ), add_history=dict( argstr='-h "%s"', xor=["rep_history"], ), args=dict( argstr="%s", ), delete=dict( argstr="-d %d", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="%s", extensions=None, ), outputtype=dict(), rep_history=dict( argstr='-HH "%s"', xor=["add_history"], ), ses=dict( argstr="-ses", ), ) inputs = Notes.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Notes_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Notes.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_NwarpAdjust.py000066400000000000000000000025171413403311400254330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import NwarpAdjust def test_NwarpAdjust_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr="-source %s", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, keep_extension=True, name_source="in_files", name_template="%s_NwarpAdjust", requires=["in_files"], ), outputtype=dict(), warps=dict( argstr="-nwarp %s", mandatory=True, ), ) inputs = NwarpAdjust.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_NwarpAdjust_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = NwarpAdjust.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_NwarpApply.py000066400000000000000000000033101413403311400252560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import NwarpApply def test_NwarpApply_inputs(): input_map = dict( ainterp=dict( argstr="-ainterp %s", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-source %s", mandatory=True, ), interp=dict( argstr="-interp %s", usedefault=True, ), inv_warp=dict( argstr="-iwarp", ), master=dict( argstr="-master %s", extensions=None, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_Nwarp", ), quiet=dict( argstr="-quiet", xor=["verb"], ), short=dict( argstr="-short", ), verb=dict( argstr="-verb", xor=["quiet"], ), warp=dict( argstr="-nwarp %s", mandatory=True, ), ) inputs = NwarpApply.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_NwarpApply_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = NwarpApply.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_NwarpCat.py000066400000000000000000000030401413403311400247000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import NwarpCat def test_NwarpCat_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), expad=dict( argstr="-expad %d", ), in_files=dict( argstr="%s", mandatory=True, position=-1, ), interp=dict( argstr="-interp %s", usedefault=True, ), inv_warp=dict( argstr="-iwarp", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_files", name_template="%s_NwarpCat", ), outputtype=dict(), space=dict( argstr="-space %s", ), verb=dict( argstr="-verb", ), ) inputs = NwarpCat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_NwarpCat_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = NwarpCat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_OneDToolPy.py000066400000000000000000000040341413403311400251610ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import OneDToolPy def test_OneDToolPy_inputs(): input_map = dict( args=dict( argstr="%s", ), censor_motion=dict( argstr="-censor_motion %f %s", ), censor_prev_TR=dict( argstr="-censor_prev_TR", ), demean=dict( argstr="-demean", ), derivative=dict( argstr="-derivative", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-infile %s", extensions=None, mandatory=True, ), out_file=dict( argstr="-write %s", extensions=None, xor=["show_cormat_warnings"], ), outputtype=dict(), py27_path=dict( usedefault=True, ), set_nruns=dict( argstr="-set_nruns %d", ), show_censor_count=dict( argstr="-show_censor_count", ), show_cormat_warnings=dict( argstr="-show_cormat_warnings |& tee %s", extensions=None, position=-1, xor=["out_file"], ), show_indices_interest=dict( argstr="-show_indices_interest", ), show_trs_run=dict( argstr="-show_trs_run %d", ), show_trs_uncensored=dict( argstr="-show_trs_uncensored %s", ), ) inputs = OneDToolPy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_OneDToolPy_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = OneDToolPy.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_OutlierCount.py000066400000000000000000000044721413403311400256270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import OutlierCount def test_OutlierCount_inputs(): input_map = dict( args=dict( argstr="%s", ), autoclip=dict( argstr="-autoclip", usedefault=True, xor=["mask"], ), automask=dict( argstr="-automask", usedefault=True, xor=["mask"], ), environ=dict( nohash=True, usedefault=True, ), fraction=dict( argstr="-fraction", usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), interval=dict( argstr="-range", usedefault=True, ), legendre=dict( argstr="-legendre", usedefault=True, ), mask=dict( argstr="-mask %s", extensions=None, xor=["autoclip", "automask"], ), out_file=dict( extensions=None, keep_extension=False, name_source=["in_file"], name_template="%s_outliers", ), outliers_file=dict( argstr="-save %s", extensions=None, keep_extension=True, name_source=["in_file"], name_template="%s_outliers", output_name="out_outliers", ), polort=dict( argstr="-polort %d", ), qthr=dict( argstr="-qthr %.5f", usedefault=True, ), save_outliers=dict( usedefault=True, ), ) inputs = OutlierCount.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_OutlierCount_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_outliers=dict( extensions=None, ), ) outputs = OutlierCount.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_QualityIndex.py000066400000000000000000000036121413403311400256060ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import QualityIndex def test_QualityIndex_inputs(): input_map = dict( args=dict( argstr="%s", ), autoclip=dict( argstr="-autoclip", usedefault=True, xor=["mask"], ), automask=dict( argstr="-automask", usedefault=True, xor=["mask"], ), clip=dict( argstr="-clip %f", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), interval=dict( argstr="-range", usedefault=True, ), mask=dict( argstr="-mask %s", extensions=None, xor=["autoclip", "automask"], ), out_file=dict( argstr="> %s", extensions=None, keep_extension=False, name_source=["in_file"], name_template="%s_tqual", position=-1, ), quadrant=dict( argstr="-quadrant", usedefault=True, ), spearman=dict( argstr="-spearman", usedefault=True, ), ) inputs = QualityIndex.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_QualityIndex_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = QualityIndex.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Qwarp.py000066400000000000000000000127601413403311400242640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Qwarp def test_Qwarp_inputs(): input_map = dict( Qfinal=dict( argstr="-Qfinal", ), Qonly=dict( argstr="-Qonly", ), allineate=dict( argstr="-allineate", ), allineate_opts=dict( argstr="-allineate_opts %s", requires=["allineate"], ), allsave=dict( argstr="-allsave", xor=["nopadWARP", "duplo", "plusminus"], ), args=dict( argstr="%s", ), ballopt=dict( argstr="-ballopt", xor=["workhard", "boxopt"], ), base_file=dict( argstr="-base %s", copyfile=False, extensions=None, mandatory=True, ), baxopt=dict( argstr="-boxopt", xor=["workhard", "ballopt"], ), blur=dict( argstr="-blur %s", ), duplo=dict( argstr="-duplo", xor=["gridlist", "maxlev", "inilev", "iniwarp", "plusminus", "allsave"], ), emask=dict( argstr="-emask %s", copyfile=False, extensions=None, ), environ=dict( nohash=True, usedefault=True, ), expad=dict( argstr="-expad %d", xor=["nopadWARP"], ), gridlist=dict( argstr="-gridlist %s", copyfile=False, extensions=None, xor=["duplo", "plusminus"], ), hel=dict( argstr="-hel", xor=["nmi", "mi", "lpc", "lpa", "pear"], ), in_file=dict( argstr="-source %s", copyfile=False, extensions=None, mandatory=True, ), inilev=dict( argstr="-inilev %d", xor=["duplo"], ), iniwarp=dict( argstr="-iniwarp %s", xor=["duplo"], ), iwarp=dict( argstr="-iwarp", xor=["plusminus"], ), lpa=dict( argstr="-lpa", xor=["nmi", "mi", "lpc", "hel", "pear"], ), lpc=dict( argstr="-lpc", position=-2, xor=["nmi", "mi", "hel", "lpa", "pear"], ), maxlev=dict( argstr="-maxlev %d", position=-1, xor=["duplo"], ), mi=dict( argstr="-mi", xor=["mi", "hel", "lpc", "lpa", "pear"], ), minpatch=dict( argstr="-minpatch %d", ), nmi=dict( argstr="-nmi", xor=["nmi", "hel", "lpc", "lpa", "pear"], ), noXdis=dict( argstr="-noXdis", ), noYdis=dict( argstr="-noYdis", ), noZdis=dict( argstr="-noZdis", ), noneg=dict( argstr="-noneg", ), nopad=dict( argstr="-nopad", ), nopadWARP=dict( argstr="-nopadWARP", xor=["allsave", "expad"], ), nopenalty=dict( argstr="-nopenalty", ), nowarp=dict( argstr="-nowarp", ), noweight=dict( argstr="-noweight", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source=["in_file"], name_template="ppp_%s", ), out_weight_file=dict( argstr="-wtprefix %s", extensions=None, ), outputtype=dict(), overwrite=dict( argstr="-overwrite", ), pblur=dict( argstr="-pblur %s", ), pear=dict( argstr="-pear", ), penfac=dict( argstr="-penfac %f", ), plusminus=dict( argstr="-plusminus", xor=["duplo", "allsave", "iwarp"], ), quiet=dict( argstr="-quiet", xor=["verb"], ), resample=dict( argstr="-resample", ), verb=dict( argstr="-verb", xor=["quiet"], ), wball=dict( argstr="-wball %s", xor=["wmask"], ), weight=dict( argstr="-weight %s", extensions=None, ), wmask=dict( argstr="-wpass %s %f", xor=["wball"], ), workhard=dict( argstr="-workhard", xor=["boxopt", "ballopt"], ), ) inputs = Qwarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Qwarp_outputs(): output_map = dict( base_warp=dict( extensions=None, ), source_warp=dict( extensions=None, ), warped_base=dict( extensions=None, ), warped_source=dict( extensions=None, ), weights=dict( extensions=None, ), ) outputs = Qwarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_QwarpPlusMinus.py000066400000000000000000000133751413403311400261470ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import QwarpPlusMinus def test_QwarpPlusMinus_inputs(): input_map = dict( Qfinal=dict( argstr="-Qfinal", ), Qonly=dict( argstr="-Qonly", ), allineate=dict( argstr="-allineate", ), allineate_opts=dict( argstr="-allineate_opts %s", requires=["allineate"], ), allsave=dict( argstr="-allsave", xor=["nopadWARP", "duplo", "plusminus"], ), args=dict( argstr="%s", ), ballopt=dict( argstr="-ballopt", xor=["workhard", "boxopt"], ), base_file=dict( argstr="-base %s", copyfile=False, extensions=None, mandatory=True, ), baxopt=dict( argstr="-boxopt", xor=["workhard", "ballopt"], ), blur=dict( argstr="-blur %s", ), duplo=dict( argstr="-duplo", xor=["gridlist", "maxlev", "inilev", "iniwarp", "plusminus", "allsave"], ), emask=dict( argstr="-emask %s", copyfile=False, extensions=None, ), environ=dict( nohash=True, usedefault=True, ), expad=dict( argstr="-expad %d", xor=["nopadWARP"], ), gridlist=dict( argstr="-gridlist %s", copyfile=False, extensions=None, xor=["duplo", "plusminus"], ), hel=dict( argstr="-hel", xor=["nmi", "mi", "lpc", "lpa", "pear"], ), in_file=dict( argstr="-source %s", copyfile=False, extensions=None, mandatory=True, ), inilev=dict( argstr="-inilev %d", xor=["duplo"], ), iniwarp=dict( argstr="-iniwarp %s", xor=["duplo"], ), iwarp=dict( argstr="-iwarp", xor=["plusminus"], ), lpa=dict( argstr="-lpa", xor=["nmi", "mi", "lpc", "hel", "pear"], ), lpc=dict( argstr="-lpc", position=-2, xor=["nmi", "mi", "hel", "lpa", "pear"], ), maxlev=dict( argstr="-maxlev %d", position=-1, xor=["duplo"], ), mi=dict( argstr="-mi", xor=["mi", "hel", "lpc", "lpa", "pear"], ), minpatch=dict( argstr="-minpatch %d", ), nmi=dict( argstr="-nmi", xor=["nmi", "hel", "lpc", "lpa", "pear"], ), noXdis=dict( argstr="-noXdis", ), noYdis=dict( argstr="-noYdis", ), noZdis=dict( argstr="-noZdis", ), noneg=dict( argstr="-noneg", ), nopad=dict( argstr="-nopad", ), nopadWARP=dict( argstr="-nopadWARP", xor=["allsave", "expad"], ), nopenalty=dict( argstr="-nopenalty", ), nowarp=dict( argstr="-nowarp", ), noweight=dict( argstr="-noweight", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, position=0, usedefault=True, ), out_weight_file=dict( argstr="-wtprefix %s", extensions=None, ), outputtype=dict(), overwrite=dict( argstr="-overwrite", ), pblur=dict( argstr="-pblur %s", ), pear=dict( argstr="-pear", ), penfac=dict( argstr="-penfac %f", ), plusminus=dict( argstr="-plusminus", position=1, usedefault=True, xor=["duplo", "allsave", "iwarp"], ), quiet=dict( argstr="-quiet", xor=["verb"], ), resample=dict( argstr="-resample", ), source_file=dict( argstr="-source %s", copyfile=False, deprecated="1.1.2", extensions=None, new_name="in_file", ), verb=dict( argstr="-verb", xor=["quiet"], ), wball=dict( argstr="-wball %s", xor=["wmask"], ), weight=dict( argstr="-weight %s", extensions=None, ), wmask=dict( argstr="-wpass %s %f", xor=["wball"], ), workhard=dict( argstr="-workhard", xor=["boxopt", "ballopt"], ), ) inputs = QwarpPlusMinus.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_QwarpPlusMinus_outputs(): output_map = dict( base_warp=dict( extensions=None, ), source_warp=dict( extensions=None, ), warped_base=dict( extensions=None, ), warped_source=dict( extensions=None, ), weights=dict( extensions=None, ), ) outputs = QwarpPlusMinus.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_ROIStats.py000066400000000000000000000044151413403311400246400ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ROIStats def test_ROIStats_inputs(): input_map = dict( args=dict( argstr="%s", ), debug=dict( argstr="-debug", ), environ=dict( nohash=True, usedefault=True, ), format1D=dict( argstr="-1Dformat", xor=["format1DR"], ), format1DR=dict( argstr="-1DRformat", xor=["format1D"], ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), mask=dict( argstr="-mask %s", deprecated="1.1.4", extensions=None, new_name="mask_file", position=3, ), mask_f2short=dict( argstr="-mask_f2short", ), mask_file=dict( argstr="-mask %s", extensions=None, ), nobriklab=dict( argstr="-nobriklab", ), nomeanout=dict( argstr="-nomeanout", ), num_roi=dict( argstr="-numroi %s", ), out_file=dict( argstr="> %s", extensions=None, keep_extension=False, name_source="in_file", name_template="%s_roistat.1D", position=-1, ), quiet=dict( argstr="-quiet", ), roisel=dict( argstr="-roisel %s", extensions=None, ), stat=dict( argstr="%s...", ), zerofill=dict( argstr="-zerofill %s", requires=["num_roi"], ), ) inputs = ROIStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ROIStats_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ROIStats.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_ReHo.py000066400000000000000000000036051413403311400240250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ReHo def test_ReHo_inputs(): input_map = dict( args=dict( argstr="%s", ), chi_sq=dict( argstr="-chi_sq", ), ellipsoid=dict( argstr="-neigh_X %s -neigh_Y %s -neigh_Z %s", xor=["sphere", "neighborhood"], ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-inset %s", extensions=None, mandatory=True, position=1, ), label_set=dict( argstr="-in_rois %s", extensions=None, ), mask_file=dict( argstr="-mask %s", extensions=None, ), neighborhood=dict( argstr="-nneigh %s", xor=["sphere", "ellipsoid"], ), out_file=dict( argstr="-prefix %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_reho", position=0, ), overwrite=dict( argstr="-overwrite", ), sphere=dict( argstr="-neigh_RAD %s", xor=["neighborhood", "ellipsoid"], ), ) inputs = ReHo.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ReHo_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_vals=dict( extensions=None, ), ) outputs = ReHo.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Refit.py000066400000000000000000000040411413403311400242340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Refit def test_Refit_inputs(): input_map = dict( args=dict( argstr="%s", ), atrcopy=dict( argstr="-atrcopy %s %s", ), atrfloat=dict( argstr="-atrfloat %s %s", ), atrint=dict( argstr="-atrint %s %s", ), atrstring=dict( argstr="-atrstring %s %s", ), deoblique=dict( argstr="-deoblique", ), duporigin_file=dict( argstr="-duporigin %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-1, ), nosaveatr=dict( argstr="-nosaveatr", ), saveatr=dict( argstr="-saveatr", ), space=dict( argstr="-space %s", ), xdel=dict( argstr="-xdel %f", ), xorigin=dict( argstr="-xorigin %s", ), xyzscale=dict( argstr="-xyzscale %f", ), ydel=dict( argstr="-ydel %f", ), yorigin=dict( argstr="-yorigin %s", ), zdel=dict( argstr="-zdel %f", ), zorigin=dict( argstr="-zorigin %s", ), ) inputs = Refit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Refit_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Refit.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Remlfit.py000066400000000000000000000114121413403311400245650ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import Remlfit def test_Remlfit_inputs(): input_map = dict( STATmask=dict( argstr="-STATmask %s", extensions=None, ), addbase=dict( argstr="-addbase %s", copyfile=False, sep=" ", ), args=dict( argstr="%s", ), automask=dict( argstr="-automask", usedefault=True, ), dsort=dict( argstr="-dsort %s", copyfile=False, extensions=None, ), dsort_nods=dict( argstr="-dsort_nods", requires=["dsort"], ), environ=dict( nohash=True, usedefault=True, ), errts_file=dict( argstr="-Rerrts %s", extensions=None, ), fitts_file=dict( argstr="-Rfitts %s", extensions=None, ), fout=dict( argstr="-fout", ), glt_file=dict( argstr="-Rglt %s", extensions=None, ), gltsym=dict( argstr='-gltsym "%s" %s...', ), goforit=dict( argstr="-GOFORIT", ), in_files=dict( argstr='-input "%s"', copyfile=False, mandatory=True, sep=" ", ), mask=dict( argstr="-mask %s", extensions=None, ), matim=dict( argstr="-matim %s", extensions=None, xor=["matrix"], ), matrix=dict( argstr="-matrix %s", extensions=None, mandatory=True, ), nobout=dict( argstr="-nobout", ), nodmbase=dict( argstr="-nodmbase", requires=["addbase", "dsort"], ), nofdr=dict( argstr="-noFDR", ), num_threads=dict( nohash=True, usedefault=True, ), obeta=dict( argstr="-Obeta %s", extensions=None, ), obuck=dict( argstr="-Obuck %s", extensions=None, ), oerrts=dict( argstr="-Oerrts %s", extensions=None, ), ofitts=dict( argstr="-Ofitts %s", extensions=None, ), oglt=dict( argstr="-Oglt %s", extensions=None, ), out_file=dict( argstr="-Rbuck %s", extensions=None, ), outputtype=dict(), ovar=dict( argstr="-Ovar %s", extensions=None, ), polort=dict( argstr="-polort %d", xor=["matrix"], ), quiet=dict( argstr="-quiet", ), rbeta_file=dict( argstr="-Rbeta %s", extensions=None, ), rout=dict( argstr="-rout", ), slibase=dict( argstr="-slibase %s", ), slibase_sm=dict( argstr="-slibase_sm %s", ), tout=dict( argstr="-tout", ), usetemp=dict( argstr="-usetemp", ), var_file=dict( argstr="-Rvar %s", extensions=None, ), verb=dict( argstr="-verb", ), wherr_file=dict( argstr="-Rwherr %s", extensions=None, ), ) inputs = Remlfit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Remlfit_outputs(): output_map = dict( errts_file=dict( extensions=None, ), fitts_file=dict( extensions=None, ), glt_file=dict( extensions=None, ), obeta=dict( extensions=None, ), obuck=dict( extensions=None, ), oerrts=dict( extensions=None, ), ofitts=dict( extensions=None, ), oglt=dict( extensions=None, ), out_file=dict( extensions=None, ), ovar=dict( extensions=None, ), rbeta_file=dict( extensions=None, ), var_file=dict( extensions=None, ), wherr_file=dict( extensions=None, ), ) outputs = Remlfit.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Resample.py000066400000000000000000000030751413403311400247410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Resample def test_Resample_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-inset %s", copyfile=False, extensions=None, mandatory=True, position=-1, ), master=dict( argstr="-master %s", extensions=None, ), num_threads=dict( nohash=True, usedefault=True, ), orientation=dict( argstr="-orient %s", ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_resample", ), outputtype=dict(), resample_mode=dict( argstr="-rmode %s", ), voxel_size=dict( argstr="-dxyz %f %f %f", ), ) inputs = Resample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Resample_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Resample.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Retroicor.py000066400000000000000000000037731413403311400251460ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Retroicor def test_Retroicor_inputs(): input_map = dict( args=dict( argstr="%s", ), card=dict( argstr="-card %s", extensions=None, position=-2, ), cardphase=dict( argstr="-cardphase %s", extensions=None, hash_files=False, position=-6, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), num_threads=dict( nohash=True, usedefault=True, ), order=dict( argstr="-order %s", position=-5, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source=["in_file"], name_template="%s_retroicor", position=1, ), outputtype=dict(), resp=dict( argstr="-resp %s", extensions=None, position=-3, ), respphase=dict( argstr="-respphase %s", extensions=None, hash_files=False, position=-7, ), threshold=dict( argstr="-threshold %d", position=-4, ), ) inputs = Retroicor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Retroicor_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Retroicor.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_SVMTest.py000066400000000000000000000033121413403311400244700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..svm import SVMTest def test_SVMTest_inputs(): input_map = dict( args=dict( argstr="%s", ), classout=dict( argstr="-classout", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-testvol %s", extensions=None, mandatory=True, ), model=dict( argstr="-model %s", mandatory=True, ), multiclass=dict( argstr="-multiclass %s", ), nodetrend=dict( argstr="-nodetrend", ), nopredcensord=dict( argstr="-nopredcensord", ), num_threads=dict( nohash=True, usedefault=True, ), options=dict( argstr="%s", ), out_file=dict( argstr="-predictions %s", extensions=None, name_template="%s_predictions", ), outputtype=dict(), testlabels=dict( argstr="-testlabels %s", extensions=None, ), ) inputs = SVMTest.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SVMTest_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SVMTest.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_SVMTrain.py000066400000000000000000000050141413403311400246270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..svm import SVMTrain def test_SVMTrain_inputs(): input_map = dict( alphas=dict( argstr="-alpha %s", extensions=None, name_source="in_file", name_template="%s_alphas", suffix="_alphas", ), args=dict( argstr="%s", ), censor=dict( argstr="-censor %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-trainvol %s", copyfile=False, extensions=None, mandatory=True, ), kernel=dict( argstr="-kernel %s", ), mask=dict( argstr="-mask %s", copyfile=False, extensions=None, position=-1, ), max_iterations=dict( argstr="-max_iterations %d", ), model=dict( argstr="-model %s", extensions=None, name_source="in_file", name_template="%s_model", suffix="_model", ), nomodelmask=dict( argstr="-nomodelmask", ), num_threads=dict( nohash=True, usedefault=True, ), options=dict( argstr="%s", ), out_file=dict( argstr="-bucket %s", extensions=None, name_source="in_file", name_template="%s_vectors", suffix="_bucket", ), outputtype=dict(), trainlabels=dict( argstr="-trainlabels %s", extensions=None, ), ttype=dict( argstr="-type %s", mandatory=True, ), w_out=dict( argstr="-wout", ), ) inputs = SVMTrain.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SVMTrain_outputs(): output_map = dict( alphas=dict( extensions=None, ), model=dict( extensions=None, ), out_file=dict( extensions=None, ), ) outputs = SVMTrain.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Seg.py000066400000000000000000000032501413403311400237020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Seg def test_Seg_inputs(): input_map = dict( args=dict( argstr="%s", ), bias_classes=dict( argstr="-bias_classes %s", ), bias_fwhm=dict( argstr="-bias_fwhm %f", ), blur_meth=dict( argstr="-blur_meth %s", ), bmrf=dict( argstr="-bmrf %f", ), classes=dict( argstr="-classes %s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-anat %s", copyfile=True, extensions=None, mandatory=True, position=-1, ), main_N=dict( argstr="-main_N %d", ), mask=dict( argstr="-mask %s", mandatory=True, position=-2, ), mixfloor=dict( argstr="-mixfloor %f", ), mixfrac=dict( argstr="-mixfrac %s", ), prefix=dict( argstr="-prefix %s", ), ) inputs = Seg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Seg_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Seg.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_SkullStrip.py000066400000000000000000000024311413403311400253000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import SkullStrip def test_SkullStrip_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=1, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_skullstrip", ), outputtype=dict(), ) inputs = SkullStrip.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SkullStrip_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SkullStrip.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Synthesize.py000066400000000000000000000031741413403311400253360ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import Synthesize def test_Synthesize_inputs(): input_map = dict( TR=dict( argstr="-TR %f", ), args=dict( argstr="%s", ), cbucket=dict( argstr="-cbucket %s", copyfile=False, extensions=None, mandatory=True, ), cenfill=dict( argstr="-cenfill %s", ), dry_run=dict( argstr="-dry", ), environ=dict( nohash=True, usedefault=True, ), matrix=dict( argstr="-matrix %s", copyfile=False, extensions=None, mandatory=True, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_template="syn", ), outputtype=dict(), select=dict( argstr="-select %s", mandatory=True, ), ) inputs = Synthesize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Synthesize_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Synthesize.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_TCat.py000066400000000000000000000025371413403311400240260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TCat def test_TCat_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr=" %s", copyfile=False, mandatory=True, position=-1, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_files", name_template="%s_tcat", ), outputtype=dict(), rlt=dict( argstr="-rlt%s", position=1, ), verbose=dict( argstr="-verb", ), ) inputs = TCat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TCat_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TCat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_TCatSubBrick.py000066400000000000000000000024401413403311400254440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TCatSubBrick def test_TCatSubBrick_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr="%s%s ...", copyfile=False, mandatory=True, position=-1, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, genfile=True, ), outputtype=dict(), rlt=dict( argstr="-rlt%s", position=1, ), ) inputs = TCatSubBrick.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TCatSubBrick_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TCatSubBrick.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_TCorr1D.py000066400000000000000000000037421413403311400244100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import TCorr1D def test_TCorr1D_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ktaub=dict( argstr=" -ktaub", position=1, xor=["pearson", "spearman", "quadrant"], ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, keep_extension=True, name_source="xset", name_template="%s_correlation.nii.gz", ), outputtype=dict(), pearson=dict( argstr=" -pearson", position=1, xor=["spearman", "quadrant", "ktaub"], ), quadrant=dict( argstr=" -quadrant", position=1, xor=["pearson", "spearman", "ktaub"], ), spearman=dict( argstr=" -spearman", position=1, xor=["pearson", "quadrant", "ktaub"], ), xset=dict( argstr=" %s", copyfile=False, extensions=None, mandatory=True, position=-2, ), y_1d=dict( argstr=" %s", extensions=None, mandatory=True, position=-1, ), ) inputs = TCorr1D.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TCorr1D_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TCorr1D.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_TCorrMap.py000066400000000000000000000131251413403311400246550ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import TCorrMap def test_TCorrMap_inputs(): input_map = dict( absolute_threshold=dict( argstr="-Thresh %f %s", extensions=None, name_source="in_file", suffix="_thresh", xor=( "absolute_threshold", "var_absolute_threshold", "var_absolute_threshold_normalize", ), ), args=dict( argstr="%s", ), automask=dict( argstr="-automask", ), average_expr=dict( argstr="-Aexpr %s %s", extensions=None, name_source="in_file", suffix="_aexpr", xor=("average_expr", "average_expr_nonzero", "sum_expr"), ), average_expr_nonzero=dict( argstr="-Cexpr %s %s", extensions=None, name_source="in_file", suffix="_cexpr", xor=("average_expr", "average_expr_nonzero", "sum_expr"), ), bandpass=dict( argstr="-bpass %f %f", ), blur_fwhm=dict( argstr="-Gblur %f", ), correlation_maps=dict( argstr="-CorrMap %s", extensions=None, name_source="in_file", ), correlation_maps_masked=dict( argstr="-CorrMask %s", extensions=None, name_source="in_file", ), environ=dict( nohash=True, usedefault=True, ), expr=dict(), histogram=dict( argstr="-Hist %d %s", extensions=None, name_source="in_file", suffix="_hist", ), histogram_bin_numbers=dict(), in_file=dict( argstr="-input %s", copyfile=False, extensions=None, mandatory=True, ), mask=dict( argstr="-mask %s", extensions=None, ), mean_file=dict( argstr="-Mean %s", extensions=None, name_source="in_file", suffix="_mean", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source=["in_file"], name_template="%s_afni", ), outputtype=dict(), pmean=dict( argstr="-Pmean %s", extensions=None, name_source="in_file", suffix="_pmean", ), polort=dict( argstr="-polort %d", ), qmean=dict( argstr="-Qmean %s", extensions=None, name_source="in_file", suffix="_qmean", ), regress_out_timeseries=dict( argstr="-ort %s", extensions=None, ), seeds=dict( argstr="-seed %s", extensions=None, xor="seeds_width", ), seeds_width=dict( argstr="-Mseed %f", xor="seeds", ), sum_expr=dict( argstr="-Sexpr %s %s", extensions=None, name_source="in_file", suffix="_sexpr", xor=("average_expr", "average_expr_nonzero", "sum_expr"), ), thresholds=dict(), var_absolute_threshold=dict( argstr="-VarThresh %f %f %f %s", extensions=None, name_source="in_file", suffix="_varthresh", xor=( "absolute_threshold", "var_absolute_threshold", "var_absolute_threshold_normalize", ), ), var_absolute_threshold_normalize=dict( argstr="-VarThreshN %f %f %f %s", extensions=None, name_source="in_file", suffix="_varthreshn", xor=( "absolute_threshold", "var_absolute_threshold", "var_absolute_threshold_normalize", ), ), zmean=dict( argstr="-Zmean %s", extensions=None, name_source="in_file", suffix="_zmean", ), ) inputs = TCorrMap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TCorrMap_outputs(): output_map = dict( absolute_threshold=dict( extensions=None, ), average_expr=dict( extensions=None, ), average_expr_nonzero=dict( extensions=None, ), correlation_maps=dict( extensions=None, ), correlation_maps_masked=dict( extensions=None, ), histogram=dict( extensions=None, ), mean_file=dict( extensions=None, ), pmean=dict( extensions=None, ), qmean=dict( extensions=None, ), sum_expr=dict( extensions=None, ), var_absolute_threshold=dict( extensions=None, ), var_absolute_threshold_normalize=dict( extensions=None, ), zmean=dict( extensions=None, ), ) outputs = TCorrMap.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_TCorrelate.py000066400000000000000000000030561413403311400252340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import TCorrelate def test_TCorrelate_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="xset", name_template="%s_tcorr", ), outputtype=dict(), pearson=dict( argstr="-pearson", ), polort=dict( argstr="-polort %d", ), xset=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-2, ), yset=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), ) inputs = TCorrelate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TCorrelate_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TCorrelate.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_TNorm.py000066400000000000000000000031421413403311400242230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import TNorm def test_TNorm_inputs(): input_map = dict( L1fit=dict( argstr="-L1fit", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), norm1=dict( argstr="-norm1", ), norm2=dict( argstr="-norm2", ), normR=dict( argstr="-normR", ), normx=dict( argstr="-normx", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_tnorm", ), outputtype=dict(), polort=dict( argstr="-polort %s", ), ) inputs = TNorm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TNorm_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TNorm.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_TProject.py000066400000000000000000000045731413403311400247270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import TProject def test_TProject_inputs(): input_map = dict( TR=dict( argstr="-TR %g", ), args=dict( argstr="%s", ), automask=dict( argstr="-automask", xor=["mask"], ), bandpass=dict( argstr="-bandpass %g %g", ), blur=dict( argstr="-blur %g", ), cenmode=dict( argstr="-cenmode %s", ), censor=dict( argstr="-censor %s", extensions=None, ), censortr=dict( argstr="-CENSORTR %s", ), concat=dict( argstr="-concat %s", extensions=None, ), dsort=dict( argstr="-dsort %s...", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=1, ), mask=dict( argstr="-mask %s", extensions=None, ), noblock=dict( argstr="-noblock", ), norm=dict( argstr="-norm", ), num_threads=dict( nohash=True, usedefault=True, ), ort=dict( argstr="-ort %s", extensions=None, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_tproject", position=-1, ), outputtype=dict(), polort=dict( argstr="-polort %d", ), stopband=dict( argstr="-stopband %g %g", ), ) inputs = TProject.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TProject_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TProject.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_TShift.py000066400000000000000000000040721413403311400243700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import TShift def test_TShift_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ignore=dict( argstr="-ignore %s", ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), interp=dict( argstr="-%s", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_tshift", ), outputtype=dict(), rlt=dict( argstr="-rlt", ), rltplus=dict( argstr="-rlt+", ), slice_encoding_direction=dict( usedefault=True, ), slice_timing=dict( argstr="-tpattern @%s", xor=["tpattern"], ), tpattern=dict( argstr="-tpattern %s", xor=["slice_timing"], ), tr=dict( argstr="-TR %s", ), tslice=dict( argstr="-slice %s", xor=["tzero"], ), tzero=dict( argstr="-tzero %s", xor=["tslice"], ), ) inputs = TShift.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TShift_outputs(): output_map = dict( out_file=dict( extensions=None, ), timing_file=dict( extensions=None, ), ) outputs = TShift.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_TSmooth.py000066400000000000000000000035171413403311400245670ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import TSmooth def test_TSmooth_inputs(): input_map = dict( adaptive=dict( argstr="-adaptive %d", ), args=dict( argstr="%s", ), blackman=dict( argstr="-blackman %d", ), custom=dict( argstr="-custom %s", extensions=None, ), datum=dict( argstr="-datum %s", ), environ=dict( nohash=True, usedefault=True, ), hamming=dict( argstr="-hamming %d", ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), lin=dict( argstr="-lin", ), lin3=dict( argstr="-3lin %d", ), med=dict( argstr="-med", ), num_threads=dict( nohash=True, usedefault=True, ), osf=dict( argstr="-osf", ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_smooth", ), outputtype=dict(), ) inputs = TSmooth.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TSmooth_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TSmooth.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_TStat.py000066400000000000000000000026041413403311400242250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TStat def test_TStat_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), mask=dict( argstr="-mask %s", extensions=None, ), num_threads=dict( nohash=True, usedefault=True, ), options=dict( argstr="%s", ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_tstat", ), outputtype=dict(), ) inputs = TStat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TStat_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TStat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_To3D.py000066400000000000000000000030371413403311400237400ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import To3D def test_To3D_inputs(): input_map = dict( args=dict( argstr="%s", ), assumemosaic=dict( argstr="-assume_dicom_mosaic", ), datatype=dict( argstr="-datum %s", ), environ=dict( nohash=True, usedefault=True, ), filetype=dict( argstr="-%s", ), funcparams=dict( argstr="-time:zt %s alt+z2", ), in_folder=dict( argstr="%s/*.dcm", mandatory=True, position=-1, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source=["in_folder"], name_template="%s", ), outputtype=dict(), skipoutliers=dict( argstr="-skip_outliers", ), ) inputs = To3D.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_To3D_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = To3D.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Undump.py000066400000000000000000000034201413403311400244330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Undump def test_Undump_inputs(): input_map = dict( args=dict( argstr="%s", ), coordinates_specification=dict( argstr="-%s", ), datatype=dict( argstr="-datum %s", ), default_value=dict( argstr="-dval %f", ), environ=dict( nohash=True, usedefault=True, ), fill_value=dict( argstr="-fval %f", ), head_only=dict( argstr="-head_only", ), in_file=dict( argstr="-master %s", copyfile=False, extensions=None, mandatory=True, position=-1, ), mask_file=dict( argstr="-mask %s", extensions=None, ), num_threads=dict( nohash=True, usedefault=True, ), orient=dict( argstr="-orient %s", ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", ), outputtype=dict(), srad=dict( argstr="-srad %f", ), ) inputs = Undump.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Undump_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Undump.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Unifize.py000066400000000000000000000040061413403311400245750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Unifize def test_Unifize_inputs(): input_map = dict( args=dict( argstr="%s", ), cl_frac=dict( argstr="-clfrac %f", ), environ=dict( nohash=True, usedefault=True, ), epi=dict( argstr="-EPI", requires=["no_duplo", "t2"], xor=["gm"], ), gm=dict( argstr="-GM", ), in_file=dict( argstr="-input %s", copyfile=False, extensions=None, mandatory=True, position=-1, ), no_duplo=dict( argstr="-noduplo", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_unifized", ), outputtype=dict(), quiet=dict( argstr="-quiet", ), rbt=dict( argstr="-rbt %f %f %f", ), scale_file=dict( argstr="-ssave %s", extensions=None, ), t2=dict( argstr="-T2", ), t2_up=dict( argstr="-T2up %f", ), urad=dict( argstr="-Urad %s", ), ) inputs = Unifize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Unifize_outputs(): output_map = dict( out_file=dict( extensions=None, ), scale_file=dict( extensions=None, ), ) outputs = Unifize.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Volreg.py000066400000000000000000000051521413403311400244250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Volreg def test_Volreg_inputs(): input_map = dict( args=dict( argstr="%s", ), basefile=dict( argstr="-base %s", extensions=None, position=-6, ), copyorigin=dict( argstr="-twodup", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), in_weight_volume=dict( argstr="-weight '%s[%d]'", ), interp=dict( argstr="-%s", ), md1d_file=dict( argstr="-maxdisp1D %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_md.1D", position=-4, ), num_threads=dict( nohash=True, usedefault=True, ), oned_file=dict( argstr="-1Dfile %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s.1D", ), oned_matrix_save=dict( argstr="-1Dmatrix_save %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s.aff12.1D", ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_volreg", ), outputtype=dict(), timeshift=dict( argstr="-tshift 0", ), verbose=dict( argstr="-verbose", ), zpad=dict( argstr="-zpad %d", position=-5, ), ) inputs = Volreg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Volreg_outputs(): output_map = dict( md1d_file=dict( extensions=None, ), oned_file=dict( extensions=None, ), oned_matrix_save=dict( extensions=None, ), out_file=dict( extensions=None, ), ) outputs = Volreg.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Warp.py000066400000000000000000000042021413403311400240730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Warp def test_Warp_inputs(): input_map = dict( args=dict( argstr="%s", ), deoblique=dict( argstr="-deoblique", ), environ=dict( nohash=True, usedefault=True, ), gridset=dict( argstr="-gridset %s", extensions=None, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), interp=dict( argstr="-%s", ), matparent=dict( argstr="-matparent %s", extensions=None, ), mni2tta=dict( argstr="-mni2tta", ), newgrid=dict( argstr="-newgrid %f", ), num_threads=dict( nohash=True, usedefault=True, ), oblique_parent=dict( argstr="-oblique_parent %s", extensions=None, ), out_file=dict( argstr="-prefix %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_warp", ), outputtype=dict(), save_warp=dict( requires=["verbose"], ), tta2mni=dict( argstr="-tta2mni", ), verbose=dict( argstr="-verb", ), zpad=dict( argstr="-zpad %d", ), ) inputs = Warp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Warp_outputs(): output_map = dict( out_file=dict( extensions=None, ), warp_file=dict( extensions=None, ), ) outputs = Warp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_ZCutUp.py000066400000000000000000000024631413403311400243630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ZCutUp def test_ZCutUp_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), keep=dict( argstr="-keep %s", ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_file", name_template="%s_zcutup", ), outputtype=dict(), ) inputs = ZCutUp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ZCutUp_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ZCutUp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Zcat.py000066400000000000000000000027741413403311400240770ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Zcat def test_Zcat_inputs(): input_map = dict( args=dict( argstr="%s", ), datum=dict( argstr="-datum %s", ), environ=dict( nohash=True, usedefault=True, ), fscale=dict( argstr="-fscale", xor=["nscale"], ), in_files=dict( argstr="%s", copyfile=False, mandatory=True, position=-1, ), nscale=dict( argstr="-nscale", xor=["fscale"], ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_source="in_files", name_template="%s_zcat", ), outputtype=dict(), verb=dict( argstr="-verb", ), ) inputs = Zcat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Zcat_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Zcat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_auto_Zeropad.py000066400000000000000000000044301413403311400245710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Zeropad def test_Zeropad_inputs(): input_map = dict( A=dict( argstr="-A %i", xor=["master"], ), AP=dict( argstr="-AP %i", xor=["master"], ), I=dict( argstr="-I %i", xor=["master"], ), IS=dict( argstr="-IS %i", xor=["master"], ), L=dict( argstr="-L %i", xor=["master"], ), P=dict( argstr="-P %i", xor=["master"], ), R=dict( argstr="-R %i", xor=["master"], ), RL=dict( argstr="-RL %i", xor=["master"], ), S=dict( argstr="-S %i", xor=["master"], ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-1, ), master=dict( argstr="-master %s", extensions=None, xor=["I", "S", "A", "P", "L", "R", "z", "RL", "AP", "IS", "mm"], ), mm=dict( argstr="-mm", xor=["master"], ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="-prefix %s", extensions=None, name_template="zeropad", ), outputtype=dict(), z=dict( argstr="-z %i", xor=["master"], ), ) inputs = Zeropad.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Zeropad_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Zeropad.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/afni/tests/test_extra_Deconvolve.py000066400000000000000000000005141413403311400254430ustar00rootroot00000000000000"""Test afni deconvolve""" from ..model import Deconvolve def test_x1dstop(): deconv = Deconvolve() deconv.inputs.out_file = "file.nii" assert "out_file" in deconv._list_outputs() deconv.inputs.x1D_stop = True assert "out_file" not in deconv._list_outputs() assert "cbucket" not in deconv._list_outputs() nipype-1.7.0/nipype/interfaces/afni/utils.py000066400000000000000000003456321413403311400211100ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """AFNI utility interfaces.""" import os import os.path as op import re import numpy as np from ...utils.filemanip import load_json, save_json, split_filename from ..base import ( CommandLineInputSpec, CommandLine, Directory, TraitedSpec, traits, isdefined, File, InputMultiObject, InputMultiPath, Undefined, Str, ) from ...external.due import BibTeX from .base import ( AFNICommandBase, AFNICommand, AFNICommandInputSpec, AFNICommandOutputSpec, AFNIPythonCommandInputSpec, AFNIPythonCommand, ) class ABoverlapInputSpec(AFNICommandInputSpec): in_file_a = File( desc="input file A", argstr="%s", position=-3, mandatory=True, exists=True, copyfile=False, ) in_file_b = File( desc="input file B", argstr="%s", position=-2, mandatory=True, exists=True, copyfile=False, ) out_file = File(desc="collect output to a file", argstr=" |& tee %s", position=-1) no_automask = traits.Bool( desc="consider input datasets as masks", argstr="-no_automask" ) quiet = traits.Bool( desc="be as quiet as possible (without being entirely mute)", argstr="-quiet" ) verb = traits.Bool( desc="print out some progress reports (to stderr)", argstr="-verb" ) class ABoverlap(AFNICommand): """Output (to screen) is a count of various things about how the automasks of datasets A and B overlap or don't overlap. For complete details, see the `3dABoverlap Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> aboverlap = afni.ABoverlap() >>> aboverlap.inputs.in_file_a = 'functional.nii' >>> aboverlap.inputs.in_file_b = 'structural.nii' >>> aboverlap.inputs.out_file = 'out.mask_ae_overlap.txt' >>> aboverlap.cmdline '3dABoverlap functional.nii structural.nii |& tee out.mask_ae_overlap.txt' >>> res = aboverlap.run() # doctest: +SKIP """ _cmd = "3dABoverlap" input_spec = ABoverlapInputSpec output_spec = AFNICommandOutputSpec class AFNItoNIFTIInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dAFNItoNIFTI", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s.nii", desc="output image file name", argstr="-prefix %s", name_source="in_file", hash_files=False, ) float_ = traits.Bool( desc="Force the output dataset to be 32-bit floats. This option " "should be used when the input AFNI dataset has different float " "scale factors for different sub-bricks, an option that " "NIfTI-1.1 does not support.", argstr="-float", ) pure = traits.Bool( desc="Do NOT write an AFNI extension field into the output file. Only " "use this option if needed. You can also use the 'nifti_tool' " "program to strip extensions from a file.", argstr="-pure", ) denote = traits.Bool( desc="When writing the AFNI extension field, remove text notes that " "might contain subject identifying information.", argstr="-denote", ) oldid = traits.Bool( desc="Give the new dataset the input dataset" "s AFNI ID code.", argstr="-oldid", xor=["newid"], ) newid = traits.Bool( desc="Give the new dataset a new AFNI ID code, to distinguish it from " "the input dataset.", argstr="-newid", xor=["oldid"], ) class AFNItoNIFTI(AFNICommand): """Converts AFNI format files to NIFTI format. This can also convert 2D or 1D data, which you can numpy.squeeze() to remove extra dimensions. For complete details, see the `3dAFNItoNIFTI Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> a2n = afni.AFNItoNIFTI() >>> a2n.inputs.in_file = 'afni_output.3D' >>> a2n.inputs.out_file = 'afni_output.nii' >>> a2n.cmdline '3dAFNItoNIFTI -prefix afni_output.nii afni_output.3D' >>> res = a2n.run() # doctest: +SKIP """ _cmd = "3dAFNItoNIFTI" input_spec = AFNItoNIFTIInputSpec output_spec = AFNICommandOutputSpec def _overload_extension(self, value, name=None): path, base, ext = split_filename(value) if ext.lower() not in [".nii", ".nii.gz", ".1d", ".1D"]: ext += ".nii" return os.path.join(path, base + ext) def _gen_filename(self, name): return os.path.abspath(super(AFNItoNIFTI, self)._gen_filename(name)) class AutoboxInputSpec(AFNICommandInputSpec): in_file = File( exists=True, mandatory=True, argstr="-input %s", desc="input file", copyfile=False, ) padding = traits.Int( argstr="-npad %d", desc="Number of extra voxels to pad on each side of box" ) out_file = File( argstr="-prefix %s", name_source="in_file", name_template="%s_autobox" ) no_clustering = traits.Bool( argstr="-noclust", desc="Don't do any clustering to find box. Any non-zero voxel will " "be preserved in the cropped volume. The default method uses " "some clustering to find the cropping box, and will clip off " "small isolated blobs.", ) class AutoboxOutputSpec(TraitedSpec): # out_file not mandatory x_min = traits.Int() x_max = traits.Int() y_min = traits.Int() y_max = traits.Int() z_min = traits.Int() z_max = traits.Int() out_file = File(desc="output file") class Autobox(AFNICommand): """Computes size of a box that fits around the volume. Also can be used to crop the volume to that box. For complete details, see the `3dAutobox Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> abox = afni.Autobox() >>> abox.inputs.in_file = 'structural.nii' >>> abox.inputs.padding = 5 >>> abox.cmdline '3dAutobox -input structural.nii -prefix structural_autobox -npad 5' >>> res = abox.run() # doctest: +SKIP """ _cmd = "3dAutobox" input_spec = AutoboxInputSpec output_spec = AutoboxOutputSpec def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = super(Autobox, self).aggregate_outputs(runtime, needed_outputs) pattern = ( r"x=(?P-?\d+)\.\.(?P-?\d+) " r"y=(?P-?\d+)\.\.(?P-?\d+) " r"z=(?P-?\d+)\.\.(?P-?\d+)" ) for line in runtime.stderr.split("\n"): m = re.search(pattern, line) if m: d = m.groupdict() outputs.trait_set(**{k: int(d[k]) for k in d.keys()}) return outputs class BrickStatInputSpec(CommandLineInputSpec): in_file = File( desc="input file to 3dmaskave", argstr="%s", position=-1, mandatory=True, exists=True, ) mask = File( desc="-mask dset = use dset as mask to include/exclude voxels", argstr="-mask %s", position=2, exists=True, ) min = traits.Bool( desc="print the minimum value in dataset", argstr="-min", position=1 ) slow = traits.Bool( desc="read the whole dataset to find the min and max values", argstr="-slow" ) max = traits.Bool(desc="print the maximum value in the dataset", argstr="-max") mean = traits.Bool(desc="print the mean value in the dataset", argstr="-mean") sum = traits.Bool(desc="print the sum of values in the dataset", argstr="-sum") var = traits.Bool(desc="print the variance in the dataset", argstr="-var") percentile = traits.Tuple( traits.Float, traits.Float, traits.Float, desc="p0 ps p1 write the percentile values starting " "at p0% and ending at p1% at a step of ps%. " "only one sub-brick is accepted.", argstr="-percentile %.3f %.3f %.3f", ) class BrickStatOutputSpec(TraitedSpec): min_val = traits.Float(desc="output") class BrickStat(AFNICommandBase): """Computes maximum and/or minimum voxel values of an input dataset. TODO Add optional arguments. For complete details, see the `3dBrickStat Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> brickstat = afni.BrickStat() >>> brickstat.inputs.in_file = 'functional.nii' >>> brickstat.inputs.mask = 'skeleton_mask.nii.gz' >>> brickstat.inputs.min = True >>> brickstat.cmdline '3dBrickStat -min -mask skeleton_mask.nii.gz functional.nii' >>> res = brickstat.run() # doctest: +SKIP """ _cmd = "3dBrickStat" input_spec = BrickStatInputSpec output_spec = BrickStatOutputSpec def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() outfile = os.path.join(os.getcwd(), "stat_result.json") if runtime is None: try: min_val = load_json(outfile)["stat"] except IOError: return self.run().outputs else: min_val = [] for line in runtime.stdout.split("\n"): if line: values = line.split() if len(values) > 1: min_val.append([float(val) for val in values]) else: min_val.extend([float(val) for val in values]) if len(min_val) == 1: min_val = min_val[0] save_json(outfile, dict(stat=min_val)) outputs.min_val = min_val return outputs class BucketInputSpec(AFNICommandInputSpec): in_file = traits.List( traits.Tuple( (File(exists=True, copyfile=False), traits.Str(argstr="'%s'")), artstr="%s%s", ), position=-1, mandatory=True, argstr="%s", desc="""\ List of tuples of input datasets and subbrick selection strings as described in more detail in the following afni help string Input dataset specified using one of these forms: ``prefix+view``, ``prefix+view.HEAD``, or ``prefix+view.BRIK``. You can also add a sub-brick selection list after the end of the dataset name. This allows only a subset of the sub-bricks to be included into the output (by default, all of the input dataset is copied into the output). A sub-brick selection list looks like one of the following forms:: fred+orig[5] ==> use only sub-brick #5 fred+orig[5,9,17] ==> use #5, #9, and #17 fred+orig[5..8] or [5-8] ==> use #5, #6, #7, and #8 fred+orig[5..13(2)] or [5-13(2)] ==> use #5, #7, #9, #11, and #13 Sub-brick indexes start at 0. You can use the character '$' to indicate the last sub-brick in a dataset; for example, you can select every third sub-brick by using the selection list ``fred+orig[0..$(3)]`` N.B.: The sub-bricks are output in the order specified, which may not be the order in the original datasets. For example, using ``fred+orig[0..$(2),1..$(2)]`` will cause the sub-bricks in fred+orig to be output into the new dataset in an interleaved fashion. Using ``fred+orig[$..0]`` will reverse the order of the sub-bricks in the output. N.B.: Bucket datasets have multiple sub-bricks, but do NOT have a time dimension. You can input sub-bricks from a 3D+time dataset into a bucket dataset. You can use the '3dinfo' program to see how many sub-bricks a 3D+time or a bucket dataset contains. N.B.: In non-bucket functional datasets (like the 'fico' datasets output by FIM, or the 'fitt' datasets output by 3dttest), sub-brick ``[0]`` is the 'intensity' and sub-brick [1] is the statistical parameter used as a threshold. Thus, to create a bucket dataset using the intensity from dataset A and the threshold from dataset B, and calling the output dataset C, you would type:: 3dbucket -prefix C -fbuc 'A+orig[0]' -fbuc 'B+orig[1] """, ) out_file = File(argstr="-prefix %s", name_template="buck") class Bucket(AFNICommand): """Concatenate sub-bricks from input datasets into one big 'bucket' dataset. .. danger:: Using this program, it is possible to create a dataset that has different basic datum types for different sub-bricks (e.g., shorts for brick 0, floats for brick 1). Do NOT do this! Very few AFNI programs will work correctly with such datasets! Examples -------- >>> from nipype.interfaces import afni >>> bucket = afni.Bucket() >>> bucket.inputs.in_file = [('functional.nii',"{2..$}"), ('functional.nii',"{1}")] >>> bucket.inputs.out_file = 'vr_base' >>> bucket.cmdline "3dbucket -prefix vr_base functional.nii'{2..$}' functional.nii'{1}'" >>> res = bucket.run() # doctest: +SKIP See Also -------- For complete details, see the `3dbucket Documentation. `__. """ _cmd = "3dbucket" input_spec = BucketInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): if name == "in_file": return spec.argstr % (" ".join([i[0] + "'" + i[1] + "'" for i in value])) return super(Bucket, self)._format_arg(name, spec, value) class CalcInputSpec(AFNICommandInputSpec): in_file_a = File( desc="input file to 3dcalc", argstr="-a %s", position=0, mandatory=True, exists=True, ) in_file_b = File( desc="operand file to 3dcalc", argstr="-b %s", position=1, exists=True ) in_file_c = File( desc="operand file to 3dcalc", argstr="-c %s", position=2, exists=True ) out_file = File( name_template="%s_calc", desc="output image file name", argstr="-prefix %s", name_source="in_file_a", ) expr = Str(desc="expr", argstr='-expr "%s"', position=3, mandatory=True) start_idx = traits.Int(desc="start index for in_file_a", requires=["stop_idx"]) stop_idx = traits.Int(desc="stop index for in_file_a", requires=["start_idx"]) single_idx = traits.Int(desc="volume index for in_file_a") overwrite = traits.Bool(desc="overwrite output", argstr="-overwrite") other = File(desc="other options", argstr="") class Calc(AFNICommand): """This program does voxel-by-voxel arithmetic on 3D datasets. For complete details, see the `3dcalc Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> calc = afni.Calc() >>> calc.inputs.in_file_a = 'functional.nii' >>> calc.inputs.in_file_b = 'functional2.nii' >>> calc.inputs.expr='a*b' >>> calc.inputs.out_file = 'functional_calc.nii.gz' >>> calc.inputs.outputtype = 'NIFTI' >>> calc.cmdline # doctest: +ELLIPSIS '3dcalc -a functional.nii -b functional2.nii -expr "a*b" -prefix functional_calc.nii.gz' >>> res = calc.run() # doctest: +SKIP >>> from nipype.interfaces import afni >>> calc = afni.Calc() >>> calc.inputs.in_file_a = 'functional.nii' >>> calc.inputs.expr = '1' >>> calc.inputs.out_file = 'rm.epi.all1' >>> calc.inputs.overwrite = True >>> calc.cmdline '3dcalc -a functional.nii -expr "1" -prefix rm.epi.all1 -overwrite' >>> res = calc.run() # doctest: +SKIP """ _cmd = "3dcalc" input_spec = CalcInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, trait_spec, value): if name == "in_file_a": arg = trait_spec.argstr % value if isdefined(self.inputs.start_idx): arg += "[%d..%d]" % (self.inputs.start_idx, self.inputs.stop_idx) if isdefined(self.inputs.single_idx): arg += "[%d]" % (self.inputs.single_idx) return arg return super(Calc, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): """Skip the arguments without argstr metadata""" return super(Calc, self)._parse_inputs(skip=("start_idx", "stop_idx", "other")) class CatInputSpec(AFNICommandInputSpec): in_files = traits.List(File(exists=True), argstr="%s", mandatory=True, position=-2) out_file = File( argstr="> %s", value="catout.1d", usedefault=True, desc="output (concatenated) file name", position=-1, mandatory=True, ) omitconst = traits.Bool( desc="Omit columns that are identically constant from output.", argstr="-nonconst", ) keepfree = traits.Bool( desc="Keep only columns that are marked as 'free' in the " "3dAllineate header from '-1Dparam_save'. " "If there is no such header, all columns are kept.", argstr="-nonfixed", ) out_format = traits.Enum( "int", "nice", "double", "fint", "cint", argstr="-form %s", desc="specify data type for output.", xor=["out_int", "out_nice", "out_double", "out_fint", "out_cint"], ) stack = traits.Bool( desc="Stack the columns of the resultant matrix in the output.", argstr="-stack" ) sel = traits.Str( desc="Apply the same column/row selection string to all filenames " "on the command line.", argstr="-sel %s", ) out_int = traits.Bool( desc="specifiy int data type for output", argstr="-i", xor=["out_format", "out_nice", "out_double", "out_fint", "out_cint"], ) out_nice = traits.Bool( desc="specifiy nice data type for output", argstr="-n", xor=["out_format", "out_int", "out_double", "out_fint", "out_cint"], ) out_double = traits.Bool( desc="specifiy double data type for output", argstr="-d", xor=["out_format", "out_nice", "out_int", "out_fint", "out_cint"], ) out_fint = traits.Bool( desc="specifiy int, rounded down, data type for output", argstr="-f", xor=["out_format", "out_nice", "out_double", "out_int", "out_cint"], ) out_cint = traits.Bool( desc="specifiy int, rounded up, data type for output", xor=["out_format", "out_nice", "out_double", "out_fint", "out_int"], ) class Cat(AFNICommand): """1dcat takes as input one or more 1D files, and writes out a 1D file containing the side-by-side concatenation of all or a subset of the columns from the input files. For complete details, see the `1dcat Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> cat1d = afni.Cat() >>> cat1d.inputs.sel = "'[0,2]'" >>> cat1d.inputs.in_files = ['f1.1D', 'f2.1D'] >>> cat1d.inputs.out_file = 'catout.1d' >>> cat1d.cmdline "1dcat -sel '[0,2]' f1.1D f2.1D > catout.1d" >>> res = cat1d.run() # doctest: +SKIP """ _cmd = "1dcat" input_spec = CatInputSpec output_spec = AFNICommandOutputSpec class CatMatvecInputSpec(AFNICommandInputSpec): in_file = traits.List( traits.Tuple(traits.Str(), traits.Str()), desc="list of tuples of mfiles and associated opkeys", mandatory=True, argstr="%s", position=-2, ) out_file = File( argstr=" > %s", name_template="%s_cat.aff12.1D", name_source="in_file", keep_extension=False, desc="File to write concattenated matvecs to", position=-1, mandatory=True, ) matrix = traits.Bool( desc="indicates that the resulting matrix will" "be written to outfile in the 'MATRIX(...)' format (FORM 3)." "This feature could be used, with clever scripting, to input" "a matrix directly on the command line to program 3dWarp.", argstr="-MATRIX", xor=["oneline", "fourxfour"], ) oneline = traits.Bool( desc="indicates that the resulting matrix" "will simply be written as 12 numbers on one line.", argstr="-ONELINE", xor=["matrix", "fourxfour"], ) fourxfour = traits.Bool( desc="Output matrix in augmented form (last row is 0 0 0 1)" "This option does not work with -MATRIX or -ONELINE", argstr="-4x4", xor=["matrix", "oneline"], ) class CatMatvec(AFNICommand): """Catenates 3D rotation+shift matrix+vector transformations. For complete details, see the `cat_matvec Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> cmv = afni.CatMatvec() >>> cmv.inputs.in_file = [('structural.BRIK::WARP_DATA','I')] >>> cmv.inputs.out_file = 'warp.anat.Xat.1D' >>> cmv.cmdline 'cat_matvec structural.BRIK::WARP_DATA -I > warp.anat.Xat.1D' >>> res = cmv.run() # doctest: +SKIP """ _cmd = "cat_matvec" input_spec = CatMatvecInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): if name == "in_file": # Concatenate a series of filenames, with optional opkeys return " ".join( "%s -%s" % (mfile, opkey) if opkey else mfile for mfile, opkey in value ) return super(CatMatvec, self)._format_arg(name, spec, value) class CenterMassInputSpec(CommandLineInputSpec): in_file = File( desc="input file to 3dCM", argstr="%s", position=-2, mandatory=True, exists=True, copyfile=True, ) cm_file = File( name_source="in_file", name_template="%s_cm.out", hash_files=False, keep_extension=False, desc="File to write center of mass to", argstr="> %s", position=-1, ) mask_file = File( desc="Only voxels with nonzero values in the provided mask will be " "averaged.", argstr="-mask %s", exists=True, ) automask = traits.Bool(desc="Generate the mask automatically", argstr="-automask") set_cm = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="After computing the center of mass, set the origin fields in " "the header so that the center of mass will be at (x,y,z) in " "DICOM coords.", argstr="-set %f %f %f", ) local_ijk = traits.Bool( desc="Output values as (i,j,k) in local orienation", argstr="-local_ijk" ) roi_vals = traits.List( traits.Int, desc="Compute center of mass for each blob with voxel value of v0, " "v1, v2, etc. This option is handy for getting ROI centers of " "mass.", argstr="-roi_vals %s", ) all_rois = traits.Bool( desc="Don't bother listing the values of ROIs you want: The program " "will find all of them and produce a full list", argstr="-all_rois", ) class CenterMassOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output file") cm_file = File(desc="file with the center of mass coordinates") cm = traits.List( traits.Tuple(traits.Float(), traits.Float(), traits.Float()), desc="center of mass", ) class CenterMass(AFNICommandBase): """Computes center of mass using 3dCM command .. note:: By default, the output is (x,y,z) values in DICOM coordinates. But as of Dec, 2016, there are now command line switches for other options. For complete details, see the `3dCM Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> cm = afni.CenterMass() >>> cm.inputs.in_file = 'structural.nii' >>> cm.inputs.cm_file = 'cm.txt' >>> cm.inputs.roi_vals = [2, 10] >>> cm.cmdline '3dCM -roi_vals 2 10 structural.nii > cm.txt' >>> res = 3dcm.run() # doctest: +SKIP """ _cmd = "3dCM" input_spec = CenterMassInputSpec output_spec = CenterMassOutputSpec def _list_outputs(self): outputs = super(CenterMass, self)._list_outputs() outputs["out_file"] = os.path.abspath(self.inputs.in_file) outputs["cm_file"] = os.path.abspath(self.inputs.cm_file) sout = np.loadtxt(outputs["cm_file"], ndmin=2) outputs["cm"] = [tuple(s) for s in sout] return outputs class ConvertDsetInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to ConvertDset", argstr="-input %s", position=-2, mandatory=True, exists=True, ) out_file = File( desc="output file for ConvertDset", argstr="-prefix %s", position=-1, mandatory=True, ) out_type = traits.Enum( ( "niml", "niml_asc", "niml_bi", "1D", "1Dp", "1Dpt", "gii", "gii_asc", "gii_b64", "gii_b64gz", ), desc="output type", argstr="-o_%s", mandatory=True, position=0, ) class ConvertDset(AFNICommandBase): """Converts a surface dataset from one format to another. For complete details, see the `ConvertDset Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> convertdset = afni.ConvertDset() >>> convertdset.inputs.in_file = 'lh.pial_converted.gii' >>> convertdset.inputs.out_type = 'niml_asc' >>> convertdset.inputs.out_file = 'lh.pial_converted.niml.dset' >>> convertdset.cmdline 'ConvertDset -o_niml_asc -input lh.pial_converted.gii -prefix lh.pial_converted.niml.dset' >>> res = convertdset.run() # doctest: +SKIP """ _cmd = "ConvertDset" input_spec = ConvertDsetInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class CopyInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dcopy", argstr="%s", position=-2, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_copy", desc="output image file name", argstr="%s", position=-1, name_source="in_file", ) verbose = traits.Bool(desc="print progress reports", argstr="-verb") class Copy(AFNICommand): """Copies an image of one type to an image of the same or different type using 3dcopy command For complete details, see the `3dcopy Documentation. `__ Examples -------- >>> from nipype.interfaces import afni >>> copy3d = afni.Copy() >>> copy3d.inputs.in_file = 'functional.nii' >>> copy3d.cmdline '3dcopy functional.nii functional_copy' >>> res = copy3d.run() # doctest: +SKIP >>> from copy import deepcopy >>> copy3d_2 = deepcopy(copy3d) >>> copy3d_2.inputs.outputtype = 'NIFTI' >>> copy3d_2.cmdline '3dcopy functional.nii functional_copy.nii' >>> res = copy3d_2.run() # doctest: +SKIP >>> copy3d_3 = deepcopy(copy3d) >>> copy3d_3.inputs.outputtype = 'NIFTI_GZ' >>> copy3d_3.cmdline '3dcopy functional.nii functional_copy.nii.gz' >>> res = copy3d_3.run() # doctest: +SKIP >>> copy3d_4 = deepcopy(copy3d) >>> copy3d_4.inputs.out_file = 'new_func.nii' >>> copy3d_4.cmdline '3dcopy functional.nii new_func.nii' >>> res = copy3d_4.run() # doctest: +SKIP """ _cmd = "3dcopy" input_spec = CopyInputSpec output_spec = AFNICommandOutputSpec class DotInputSpec(AFNICommandInputSpec): in_files = traits.List( (File()), desc="list of input files, possibly with subbrick selectors", argstr="%s ...", position=-2, ) out_file = File(desc="collect output to a file", argstr=" |& tee %s", position=-1) mask = File(desc="Use this dataset as a mask", argstr="-mask %s") mrange = traits.Tuple( (traits.Float(), traits.Float()), desc="Means to further restrict the voxels from 'mset' so that" "only those mask values within this range (inclusive) willbe used.", argstr="-mrange %s %s", ) demean = traits.Bool( desc="Remove the mean from each volume prior to computing the correlation", argstr="-demean", ) docor = traits.Bool( desc="Return the correlation coefficient (default).", argstr="-docor" ) dodot = traits.Bool(desc="Return the dot product (unscaled).", argstr="-dodot") docoef = traits.Bool( desc="Return the least square fit coefficients {{a,b}} so that dset2 is approximately a + b\\*dset1", argstr="-docoef", ) dosums = traits.Bool( desc="Return the 6 numbers xbar= ybar= <(x-xbar)^2> <(y-ybar)^2> <(x-xbar)(y-ybar)> and the correlation coefficient.", argstr="-dosums", ) dodice = traits.Bool( desc="Return the Dice coefficient (the Sorensen-Dice index).", argstr="-dodice" ) doeta2 = traits.Bool( desc="Return eta-squared (Cohen, NeuroImage 2008).", argstr="-doeta2" ) full = traits.Bool( desc="Compute the whole matrix. A waste of time, but handy for parsing.", argstr="-full", ) show_labels = traits.Bool( desc="Print sub-brick labels to help identify what is being correlated. This option is useful when" "you have more than 2 sub-bricks at input.", argstr="-show_labels", ) upper = traits.Bool(desc="Compute upper triangular matrix", argstr="-upper") class Dot(AFNICommand): """Correlation coefficient between sub-brick pairs. All datasets in in_files list will be concatenated. You can use sub-brick selectors in the file specification. .. warning:: This program is not efficient when more than two subbricks are input. For complete details, see the `3ddot Documentation. `_ >>> from nipype.interfaces import afni >>> dot = afni.Dot() >>> dot.inputs.in_files = ['functional.nii[0]', 'structural.nii'] >>> dot.inputs.dodice = True >>> dot.inputs.out_file = 'out.mask_ae_dice.txt' >>> dot.cmdline '3dDot -dodice functional.nii[0] structural.nii |& tee out.mask_ae_dice.txt' >>> res = copy3d.run() # doctest: +SKIP """ _cmd = "3dDot" input_spec = DotInputSpec output_spec = AFNICommandOutputSpec class Edge3InputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dedge3", argstr="-input %s", position=0, mandatory=True, exists=True, copyfile=False, ) out_file = File(desc="output image file name", position=-1, argstr="-prefix %s") datum = traits.Enum( "byte", "short", "float", argstr="-datum %s", desc="specify data type for output. Valid types are 'byte', " "'short' and 'float'.", ) fscale = traits.Bool( desc="Force scaling of the output to the maximum integer range.", argstr="-fscale", xor=["gscale", "nscale", "scale_floats"], ) gscale = traits.Bool( desc="Same as '-fscale', but also forces each output sub-brick to " "to get the same scaling factor.", argstr="-gscale", xor=["fscale", "nscale", "scale_floats"], ) nscale = traits.Bool( desc="Don't do any scaling on output to byte or short datasets.", argstr="-nscale", xor=["fscale", "gscale", "scale_floats"], ) scale_floats = traits.Float( desc="Multiply input by VAL, but only if the input datum is " "float. This is needed when the input dataset " "has a small range, like 0 to 2.0 for instance. " "With such a range, very few edges are detected due to " "what I suspect to be truncation problems. " "Multiplying such a dataset by 10000 fixes the problem " "and the scaling is undone at the output.", argstr="-scale_floats %f", xor=["fscale", "gscale", "nscale"], ) verbose = traits.Bool( desc="Print out some information along the way.", argstr="-verbose" ) class Edge3(AFNICommand): """Does 3D Edge detection using the library 3DEdge by Gregoire Malandain. For complete details, see the `3dedge3 Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> edge3 = afni.Edge3() >>> edge3.inputs.in_file = 'functional.nii' >>> edge3.inputs.out_file = 'edges.nii' >>> edge3.inputs.datum = 'byte' >>> edge3.cmdline '3dedge3 -input functional.nii -datum byte -prefix edges.nii' >>> res = edge3.run() # doctest: +SKIP """ _cmd = "3dedge3" input_spec = Edge3InputSpec output_spec = AFNICommandOutputSpec _references = [ { "entry": BibTeX( """\ @article{Deriche1987, author={R. Deriche}, title={Optimal edge detection using recursive filtering}, journal={International Journal of Computer Vision}, volume={2},' pages={167-187}, year={1987}, }""" ), "tags": ["method"], }, { "entry": BibTeX( """\ @article{MongaDericheMalandainCocquerez1991, author={O. Monga, R. Deriche, G. Malandain, J.P. Cocquerez}, title={Recursive filtering and edge tracking: two primary tools for 3D edge detection}, journal={Image and vision computing}, volume={9},' pages={203-214}, year={1991}, }""" ), "tags": ["method"], }, ] class EvalInputSpec(AFNICommandInputSpec): in_file_a = File( desc="input file to 1deval", argstr="-a %s", position=0, mandatory=True, exists=True, ) in_file_b = File( desc="operand file to 1deval", argstr="-b %s", position=1, exists=True ) in_file_c = File( desc="operand file to 1deval", argstr="-c %s", position=2, exists=True ) out_file = File( name_template="%s_calc", desc="output image file name", argstr="-prefix %s", name_source="in_file_a", ) out1D = traits.Bool(desc="output in 1D", argstr="-1D") expr = Str(desc="expr", argstr='-expr "%s"', position=3, mandatory=True) start_idx = traits.Int(desc="start index for in_file_a", requires=["stop_idx"]) stop_idx = traits.Int(desc="stop index for in_file_a", requires=["start_idx"]) single_idx = traits.Int(desc="volume index for in_file_a") other = File(desc="other options", argstr="") class Eval(AFNICommand): """Evaluates an expression that may include columns of data from one or more text files. For complete details, see the `1deval Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> eval = afni.Eval() >>> eval.inputs.in_file_a = 'seed.1D' >>> eval.inputs.in_file_b = 'resp.1D' >>> eval.inputs.expr = 'a*b' >>> eval.inputs.out1D = True >>> eval.inputs.out_file = 'data_calc.1D' >>> eval.cmdline '1deval -a seed.1D -b resp.1D -expr "a*b" -1D -prefix data_calc.1D' >>> res = eval.run() # doctest: +SKIP """ _cmd = "1deval" input_spec = EvalInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, trait_spec, value): if name == "in_file_a": arg = trait_spec.argstr % value if isdefined(self.inputs.start_idx): arg += "[%d..%d]" % (self.inputs.start_idx, self.inputs.stop_idx) if isdefined(self.inputs.single_idx): arg += "[%d]" % (self.inputs.single_idx) return arg return super(Eval, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): """Skip the arguments without argstr metadata""" return super(Eval, self)._parse_inputs(skip=("start_idx", "stop_idx", "other")) class FWHMxInputSpec(CommandLineInputSpec): in_file = File( desc="input dataset", argstr="-input %s", mandatory=True, exists=True ) out_file = File( argstr="> %s", name_source="in_file", name_template="%s_fwhmx.out", position=-1, keep_extension=False, desc="output file", ) out_subbricks = File( argstr="-out %s", name_source="in_file", name_template="%s_subbricks.out", keep_extension=False, desc="output file listing the subbricks FWHM", ) mask = File( desc="use only voxels that are nonzero in mask", argstr="-mask %s", exists=True ) automask = traits.Bool( False, usedefault=True, argstr="-automask", desc="compute a mask from THIS dataset, a la 3dAutomask", ) detrend = traits.Either( traits.Bool(), traits.Int(), default=False, argstr="-detrend", xor=["demed"], usedefault=True, desc="instead of demed (0th order detrending), detrend to the " "specified order. If order is not given, the program picks " "q=NT/30. -detrend disables -demed, and includes -unif.", ) demed = traits.Bool( False, argstr="-demed", xor=["detrend"], desc="If the input dataset has more than one sub-brick (e.g., has a " "time axis), then subtract the median of each voxel's time " "series before processing FWHM. This will tend to remove " "intrinsic spatial structure and leave behind the noise.", ) unif = traits.Bool( False, argstr="-unif", desc="If the input dataset has more than one sub-brick, then " "normalize each voxel's time series to have the same MAD before " "processing FWHM.", ) out_detrend = File( argstr="-detprefix %s", name_source="in_file", name_template="%s_detrend", keep_extension=False, desc="Save the detrended file into a dataset", ) geom = traits.Bool( argstr="-geom", xor=["arith"], desc="if in_file has more than one sub-brick, compute the final " "estimate as the geometric mean of the individual sub-brick FWHM " "estimates", ) arith = traits.Bool( argstr="-arith", xor=["geom"], desc="if in_file has more than one sub-brick, compute the final " "estimate as the arithmetic mean of the individual sub-brick " "FWHM estimates", ) combine = traits.Bool( argstr="-combine", desc="combine the final measurements along each axis" ) compat = traits.Bool(argstr="-compat", desc="be compatible with the older 3dFWHM") acf = traits.Either( traits.Bool(), File(), traits.Tuple(File(exists=True), traits.Float()), default=False, usedefault=True, argstr="-acf", desc="computes the spatial autocorrelation", ) class FWHMxOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output file") out_subbricks = File(exists=True, desc="output file (subbricks)") out_detrend = File(desc="output file, detrended") fwhm = traits.Either( traits.Tuple(traits.Float(), traits.Float(), traits.Float()), traits.Tuple(traits.Float(), traits.Float(), traits.Float(), traits.Float()), desc="FWHM along each axis", ) acf_param = traits.Either( traits.Tuple(traits.Float(), traits.Float(), traits.Float()), traits.Tuple(traits.Float(), traits.Float(), traits.Float(), traits.Float()), desc="fitted ACF model parameters", ) out_acf = File(exists=True, desc="output acf file") class FWHMx(AFNICommandBase): """ Unlike the older 3dFWHM, this program computes FWHMs for all sub-bricks in the input dataset, each one separately. The output for each one is written to the file specified by '-out'. The mean (arithmetic or geometric) of all the FWHMs along each axis is written to stdout. (A non-positive output value indicates something bad happened; e.g., FWHM in z is meaningless for a 2D dataset; the estimation method computed incoherent intermediate results.) For complete details, see the `3dFWHMx Documentation. `_ (Classic) METHOD: * Calculate ratio of variance of first differences to data variance. * Should be the same as 3dFWHM for a 1-brick dataset. (But the output format is simpler to use in a script.) .. note:: IMPORTANT NOTE [AFNI > 16] A completely new method for estimating and using noise smoothness values is now available in 3dFWHMx and 3dClustSim. This method is implemented in the '-acf' options to both programs. 'ACF' stands for (spatial) AutoCorrelation Function, and it is estimated by calculating moments of differences out to a larger radius than before. Notably, real FMRI data does not actually have a Gaussian-shaped ACF, so the estimated ACF is then fit (in 3dFWHMx) to a mixed model (Gaussian plus mono-exponential) of the form .. math:: ACF(r) = a * exp(-r*r/(2*b*b)) + (1-a)*exp(-r/c) where :math:`r` is the radius, and :math:`a, b, c` are the fitted parameters. The apparent FWHM from this model is usually somewhat larger in real data than the FWHM estimated from just the nearest-neighbor differences used in the 'classic' analysis. The longer tails provided by the mono-exponential are also significant. 3dClustSim has also been modified to use the ACF model given above to generate noise random fields. .. note:: TL;DR or summary The take-awaymessage is that the 'classic' 3dFWHMx and 3dClustSim analysis, using a pure Gaussian ACF, is not very correct for FMRI data -- I cannot speak for PET or MEG data. .. warning:: Do NOT use 3dFWHMx on the statistical results (e.g., '-bucket') from 3dDeconvolve or 3dREMLfit!!! The function of 3dFWHMx is to estimate the smoothness of the time series NOISE, not of the statistics. This proscription is especially true if you plan to use 3dClustSim next!! .. note:: Recommendations * For FMRI statistical purposes, you DO NOT want the FWHM to reflect the spatial structure of the underlying anatomy. Rather, you want the FWHM to reflect the spatial structure of the noise. This means that the input dataset should not have anatomical (spatial) structure. * One good form of input is the output of '3dDeconvolve -errts', which is the dataset of residuals left over after the GLM fitted signal model is subtracted out from each voxel's time series. * If you don't want to go to that much trouble, use '-detrend' to approximately subtract out the anatomical spatial structure, OR use the output of 3dDetrend for the same purpose. * If you do not use '-detrend', the program attempts to find non-zero spatial structure in the input, and will print a warning message if it is detected. .. note:: Notes on -demend * I recommend this option, and it is not the default only for historical compatibility reasons. It may become the default someday. * It is already the default in program 3dBlurToFWHM. This is the same detrending as done in 3dDespike; using 2*q+3 basis functions for q > 0. * If you don't use '-detrend', the program now [Aug 2010] checks if a large number of voxels are have significant nonzero means. If so, the program will print a warning message suggesting the use of '-detrend', since inherent spatial structure in the image will bias the estimation of the FWHM of the image time series NOISE (which is usually the point of using 3dFWHMx). Examples -------- >>> from nipype.interfaces import afni >>> fwhm = afni.FWHMx() >>> fwhm.inputs.in_file = 'functional.nii' >>> fwhm.cmdline '3dFWHMx -input functional.nii -out functional_subbricks.out > functional_fwhmx.out' >>> res = fwhm.run() # doctest: +SKIP """ _cmd = "3dFWHMx" input_spec = FWHMxInputSpec output_spec = FWHMxOutputSpec _references = [ { "entry": BibTeX( "@article{CoxReynoldsTaylor2016," "author={R.W. Cox, R.C. Reynolds, and P.A. Taylor}," "title={AFNI and clustering: false positive rates redux}," "journal={bioRxiv}," "year={2016}," "}" ), "tags": ["method"], } ] _acf = True def _parse_inputs(self, skip=None): if not self.inputs.detrend: if skip is None: skip = [] skip += ["out_detrend"] return super(FWHMx, self)._parse_inputs(skip=skip) def _format_arg(self, name, trait_spec, value): if name == "detrend": if value is True: return trait_spec.argstr elif value is False: return None elif isinstance(value, int): return trait_spec.argstr + " %d" % value if name == "acf": if value is True: return trait_spec.argstr elif value is False: self._acf = False return None elif isinstance(value, tuple): return trait_spec.argstr + " %s %f" % value elif isinstance(value, (str, bytes)): return trait_spec.argstr + " " + value return super(FWHMx, self)._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = super(FWHMx, self)._list_outputs() if self.inputs.detrend: fname, ext = op.splitext(self.inputs.in_file) if ".gz" in ext: _, ext2 = op.splitext(fname) ext = ext2 + ext outputs["out_detrend"] += ext else: outputs["out_detrend"] = Undefined sout = np.loadtxt(outputs["out_file"]) # handle newer versions of AFNI if sout.size == 8: outputs["fwhm"] = tuple(sout[0, :]) else: outputs["fwhm"] = tuple(sout) if self._acf: assert sout.size == 8, "Wrong number of elements in %s" % str(sout) outputs["acf_param"] = tuple(sout[1]) outputs["out_acf"] = op.abspath("3dFWHMx.1D") if isinstance(self.inputs.acf, (str, bytes)): outputs["out_acf"] = op.abspath(self.inputs.acf) return outputs class LocalBistatInputSpec(AFNICommandInputSpec): in_file1 = File( exists=True, mandatory=True, argstr="%s", position=-2, desc="Filename of the first image", ) in_file2 = File( exists=True, mandatory=True, argstr="%s", position=-1, desc="Filename of the second image", ) neighborhood = traits.Either( traits.Tuple(traits.Enum("SPHERE", "RHDD", "TOHD"), traits.Float()), traits.Tuple( traits.Enum("RECT"), traits.Tuple(traits.Float(), traits.Float(), traits.Float()), ), mandatory=True, desc="The region around each voxel that will be extracted for " "the statistics calculation. Possible regions are: " "'SPHERE', 'RHDD' (rhombic dodecahedron), 'TOHD' " "(truncated octahedron) with a given radius in mm or " "'RECT' (rectangular block) with dimensions to specify in mm.", argstr="-nbhd '%s(%s)'", ) _stat_names = [ "pearson", "spearman", "quadrant", "mutinfo", "normuti", "jointent", "hellinger", "crU", "crM", "crA", "L2slope", "L1slope", "num", "ALL", ] stat = InputMultiPath( traits.Enum(_stat_names), mandatory=True, desc="""\ Statistics to compute. Possible names are: * pearson = Pearson correlation coefficient * spearman = Spearman correlation coefficient * quadrant = Quadrant correlation coefficient * mutinfo = Mutual Information * normuti = Normalized Mutual Information * jointent = Joint entropy * hellinger= Hellinger metric * crU = Correlation ratio (Unsymmetric) * crM = Correlation ratio (symmetrized by Multiplication) * crA = Correlation ratio (symmetrized by Addition) * L2slope = slope of least-squares (L2) linear regression of the data from dataset1 vs. the dataset2 (i.e., d2 = a + b*d1 ==> this is 'b') * L1slope = slope of least-absolute-sum (L1) linear regression of the data from dataset1 vs. the dataset2 * num = number of the values in the region: with the use of -mask or -automask, the size of the region around any given voxel will vary; this option lets you map that size. * ALL = all of the above, in that order More than one option can be used.""", argstr="-stat %s...", ) mask_file = File( exists=True, desc="mask image file name. Voxels NOT in the mask will not be used " "in the neighborhood of any voxel. Also, a voxel NOT in the mask " "will have its statistic(s) computed as zero (0).", argstr="-mask %s", ) automask = traits.Bool( desc="Compute the mask as in program 3dAutomask.", argstr="-automask", xor=["weight_file"], ) weight_file = File( exists=True, desc="File name of an image to use as a weight. Only applies to " "'pearson' statistics.", argstr="-weight %s", xor=["automask"], ) out_file = File( desc="Output dataset.", argstr="-prefix %s", name_source="in_file1", name_template="%s_bistat", keep_extension=True, position=0, ) class LocalBistat(AFNICommand): """3dLocalBistat - computes statistics between 2 datasets, at each voxel, based on a local neighborhood of that voxel. For complete details, see the `3dLocalBistat Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> bistat = afni.LocalBistat() >>> bistat.inputs.in_file1 = 'functional.nii' >>> bistat.inputs.in_file2 = 'structural.nii' >>> bistat.inputs.neighborhood = ('SPHERE', 1.2) >>> bistat.inputs.stat = 'pearson' >>> bistat.inputs.outputtype = 'NIFTI' >>> bistat.cmdline "3dLocalBistat -prefix functional_bistat.nii -nbhd 'SPHERE(1.2)' -stat pearson functional.nii structural.nii" >>> res = automask.run() # doctest: +SKIP """ _cmd = "3dLocalBistat" input_spec = LocalBistatInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): if name == "neighborhood" and value[0] == "RECT": value = ("RECT", "%s,%s,%s" % value[1]) return super(LocalBistat, self)._format_arg(name, spec, value) class LocalstatInputSpec(AFNICommandInputSpec): in_file = File( exists=True, mandatory=True, argstr="%s", position=-1, desc="input dataset" ) neighborhood = traits.Either( traits.Tuple(traits.Enum("SPHERE", "RHDD", "TOHD"), traits.Float()), traits.Tuple( traits.Enum("RECT"), traits.Tuple(traits.Float(), traits.Float(), traits.Float()), ), mandatory=True, desc="The region around each voxel that will be extracted for " "the statistics calculation. Possible regions are: " "'SPHERE', 'RHDD' (rhombic dodecahedron), 'TOHD' " "(truncated octahedron) with a given radius in mm or " "'RECT' (rectangular block) with dimensions to specify in mm.", argstr="-nbhd '%s(%s)'", ) _stat_names = [ "mean", "stdev", "var", "cvar", "median", "MAD", "min", "max", "absmax", "num", "sum", "FWHM", "FWHMbar", "rank", "frank", "P2skew", "ALL", "mMP2s", "mmMP2s", ] stat = InputMultiObject( traits.Either( traits.Enum(_stat_names), traits.Tuple( traits.Enum("perc"), traits.Tuple(traits.Float, traits.Float, traits.Float), ), ), mandatory=True, desc="""\ statistics to compute. Possible names are: * mean = average of the values * stdev = standard deviation * var = variance (stdev\\*stdev) * cvar = coefficient of variation = stdev/fabs(mean) * median = median of the values * MAD = median absolute deviation * min = minimum * max = maximum * absmax = maximum of the absolute values * num = number of the values in the region: with the use of -mask or -automask, the size of the region around any given voxel will vary; this option lets you map that size. It may be useful if you plan to compute a t-statistic (say) from the mean and stdev outputs. * sum = sum of the values in the region * FWHM = compute (like 3dFWHM) image smoothness inside each voxel's neighborhood. Results are in 3 sub-bricks: FWHMx, FHWMy, and FWHMz. Places where an output is -1 are locations where the FWHM value could not be computed (e.g., outside the mask). * FWHMbar= Compute just the average of the 3 FWHM values (normally would NOT do this with FWHM also). * perc:P0:P1:Pstep = Compute percentiles between P0 and P1 with a step of Pstep. Default P1 is equal to P0 and default P2 = 1 * rank = rank of the voxel's intensity * frank = rank / number of voxels in neighborhood * P2skew = Pearson's second skewness coefficient 3 \\* (mean - median) / stdev * ALL = all of the above, in that order (except for FWHMbar and perc). * mMP2s = Exactly the same output as: median, MAD, P2skew, but a little faster * mmMP2s = Exactly the same output as: mean, median, MAD, P2skew More than one option can be used.""", argstr="-stat %s...", ) mask_file = File( exists=True, desc="Mask image file name. Voxels NOT in the mask will not be used " "in the neighborhood of any voxel. Also, a voxel NOT in the " "mask will have its statistic(s) computed as zero (0) unless " "the parameter 'nonmask' is set to true.", argstr="-mask %s", ) automask = traits.Bool( desc="Compute the mask as in program 3dAutomask.", argstr="-automask" ) nonmask = traits.Bool( desc="""\ Voxels not in the mask WILL have their local statistics computed from all voxels in their neighborhood that ARE in the mask. For instance, this option can be used to compute the average local white matter time series, even at non-WM voxels.""", argstr="-use_nonmask", ) reduce_grid = traits.Either( traits.Float, traits.Tuple(traits.Float, traits.Float, traits.Float), argstr="-reduce_grid %s", xor=["reduce_restore_grid", "reduce_max_vox"], desc="Compute output on a grid that is reduced by the specified " "factors. If a single value is passed, output is resampled " "to the specified isotropic grid. Otherwise, the 3 inputs " "describe the reduction in the X, Y, and Z directions. This " "option speeds up computations at the expense of resolution. " "It should only be used when the nbhd is quite large with " "respect to the input's resolution, and the resultant stats " "are expected to be smooth.", ) reduce_restore_grid = traits.Either( traits.Float, traits.Tuple(traits.Float, traits.Float, traits.Float), argstr="-reduce_restore_grid %s", xor=["reduce_max_vox", "reduce_grid"], desc="Like reduce_grid, but also resample output back to input" "grid.", ) reduce_max_vox = traits.Float( argstr="-reduce_max_vox %s", xor=["reduce_restore_grid", "reduce_grid"], desc="Like reduce_restore_grid, but automatically set Rx Ry Rz so" "that the computation grid is at a resolution of nbhd/MAX_VOX" "voxels.", ) grid_rmode = traits.Enum( "NN", "Li", "Cu", "Bk", argstr="-grid_rmode %s", requires=["reduce_restore_grid"], desc="Interpolant to use when resampling the output with the" "reduce_restore_grid option. The resampling method string " "RESAM should come from the set {'NN', 'Li', 'Cu', " "'Bk'}. These stand for 'Nearest Neighbor', 'Linear', " "'Cubic', and 'Blocky' interpolation, respectively.", ) quiet = traits.Bool( argstr="-quiet", desc="Stop the highly informative progress reports." ) overwrite = traits.Bool( desc="overwrite output file if it already exists", argstr="-overwrite" ) out_file = File( desc="Output dataset.", argstr="-prefix %s", name_source="in_file", name_template="%s_localstat", keep_extension=True, position=0, ) class Localstat(AFNICommand): """3dLocalstat - computes statistics at each voxel, based on a local neighborhood of that voxel. For complete details, see the `3dLocalstat Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> localstat = afni.Localstat() >>> localstat.inputs.in_file = 'functional.nii' >>> localstat.inputs.mask_file = 'skeleton_mask.nii.gz' >>> localstat.inputs.neighborhood = ('SPHERE', 45) >>> localstat.inputs.stat = 'mean' >>> localstat.inputs.nonmask = True >>> localstat.inputs.outputtype = 'NIFTI_GZ' >>> localstat.cmdline "3dLocalstat -prefix functional_localstat.nii -mask skeleton_mask.nii.gz -nbhd 'SPHERE(45.0)' -use_nonmask -stat mean functional.nii" >>> res = localstat.run() # doctest: +SKIP """ _cmd = "3dLocalstat" input_spec = LocalstatInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): if name == "neighborhood" and value[0] == "RECT": value = ("RECT", "%s,%s,%s" % value[1]) if name == "stat": value = ["perc:%s:%s:%s" % v[1] if len(v) == 2 else v for v in value] if name == "reduce_grid" or name == "reduce_restore_grid": if len(value) == 3: value = "%s %s %s" % value return super(Localstat, self)._format_arg(name, spec, value) class MaskToolInputSpec(AFNICommandInputSpec): in_file = InputMultiPath( File(exists=True), desc="input file or files to 3dmask_tool", argstr="-input %s", position=-1, mandatory=True, copyfile=False, ) out_file = File( name_template="%s_mask", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) count = traits.Bool( desc="Instead of created a binary 0/1 mask dataset, create one with " "counts of voxel overlap, i.e., each voxel will contain the " "number of masks that it is set in.", argstr="-count", position=2, ) datum = traits.Enum( "byte", "short", "float", argstr="-datum %s", desc="specify data type for output.", ) dilate_inputs = Str( desc="Use this option to dilate and/or erode datasets as they are " "read. ex. '5 -5' to dilate and erode 5 times", argstr="-dilate_inputs %s", ) dilate_results = Str( desc="dilate and/or erode combined mask at the given levels.", argstr="-dilate_results %s", ) frac = traits.Float( desc="When combining masks (across datasets and sub-bricks), use " "this option to restrict the result to a certain fraction of the " "set of volumes", argstr="-frac %s", ) inter = traits.Bool(desc="intersection, this means -frac 1.0", argstr="-inter") union = traits.Bool(desc="union, this means -frac 0", argstr="-union") fill_holes = traits.Bool( desc="This option can be used to fill holes in the resulting mask, " "i.e. after all other processing has been done.", argstr="-fill_holes", ) fill_dirs = Str( desc="fill holes only in the given directions. This option is for use " "with -fill holes. should be a single string that specifies " "1-3 of the axes using {x,y,z} labels (i.e. dataset axis order), " "or using the labels in {R,L,A,P,I,S}.", argstr="-fill_dirs %s", requires=["fill_holes"], ) verbose = traits.Int(desc="specify verbosity level, for 0 to 3", argstr="-verb %s") class MaskToolOutputSpec(TraitedSpec): out_file = File(desc="mask file", exists=True) class MaskTool(AFNICommand): """3dmask_tool - for combining/dilating/eroding/filling masks For complete details, see the `3dmask_tool Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> masktool = afni.MaskTool() >>> masktool.inputs.in_file = 'functional.nii' >>> masktool.inputs.outputtype = 'NIFTI' >>> masktool.cmdline '3dmask_tool -prefix functional_mask.nii -input functional.nii' >>> res = automask.run() # doctest: +SKIP """ _cmd = "3dmask_tool" input_spec = MaskToolInputSpec output_spec = MaskToolOutputSpec class MergeInputSpec(AFNICommandInputSpec): in_files = InputMultiPath( File(desc="input file to 3dmerge", exists=True), argstr="%s", position=-1, mandatory=True, copyfile=False, ) out_file = File( name_template="%s_merge", desc="output image file name", argstr="-prefix %s", name_source="in_files", ) doall = traits.Bool( desc="apply options to all sub-bricks in dataset", argstr="-doall" ) blurfwhm = traits.Int( desc="FWHM blur value (mm)", argstr="-1blur_fwhm %d", units="mm" ) class Merge(AFNICommand): """Merge or edit volumes using AFNI 3dmerge command For complete details, see the `3dmerge Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> merge = afni.Merge() >>> merge.inputs.in_files = ['functional.nii', 'functional2.nii'] >>> merge.inputs.blurfwhm = 4 >>> merge.inputs.doall = True >>> merge.inputs.out_file = 'e7.nii' >>> merge.cmdline '3dmerge -1blur_fwhm 4 -doall -prefix e7.nii functional.nii functional2.nii' >>> res = merge.run() # doctest: +SKIP """ _cmd = "3dmerge" input_spec = MergeInputSpec output_spec = AFNICommandOutputSpec class NotesInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dNotes", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) add = Str(desc="note to add", argstr='-a "%s"') add_history = Str( desc="note to add to history", argstr='-h "%s"', xor=["rep_history"] ) rep_history = Str( desc="note with which to replace history", argstr='-HH "%s"', xor=["add_history"], ) delete = traits.Int(desc="delete note number num", argstr="-d %d") ses = traits.Bool(desc="print to stdout the expanded notes", argstr="-ses") out_file = File(desc="output image file name", argstr="%s") class Notes(CommandLine): """A program to add, delete, and show notes for AFNI datasets. For complete details, see the `3dNotes Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> notes = afni.Notes() >>> notes.inputs.in_file = 'functional.HEAD' >>> notes.inputs.add = 'This note is added.' >>> notes.inputs.add_history = 'This note is added to history.' >>> notes.cmdline '3dNotes -a "This note is added." -h "This note is added to history." functional.HEAD' >>> res = notes.run() # doctest: +SKIP """ _cmd = "3dNotes" input_spec = NotesInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.in_file) return outputs class NwarpAdjustInputSpec(AFNICommandInputSpec): warps = InputMultiPath( File(exists=True), minlen=5, mandatory=True, argstr="-nwarp %s", desc="List of input 3D warp datasets", ) in_files = InputMultiPath( File(exists=True), minlen=5, argstr="-source %s", desc="List of input 3D datasets to be warped by the adjusted warp " "datasets. There must be exactly as many of these datasets as " "there are input warps.", ) out_file = File( desc="Output mean dataset, only needed if in_files are also given. " "The output dataset will be on the common grid shared by the " "source datasets.", argstr="-prefix %s", name_source="in_files", name_template="%s_NwarpAdjust", keep_extension=True, requires=["in_files"], ) class NwarpAdjust(AFNICommandBase): """This program takes as input a bunch of 3D warps, averages them, and computes the inverse of this average warp. It then composes each input warp with this inverse average to 'adjust' the set of warps. Optionally, it can also read in a set of 1-brick datasets corresponding to the input warps, and warp each of them, and average those. For complete details, see the `3dNwarpAdjust Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> adjust = afni.NwarpAdjust() >>> adjust.inputs.warps = ['func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz'] >>> adjust.cmdline '3dNwarpAdjust -nwarp func2anat_InverseWarp.nii.gz func2anat_InverseWarp.nii.gz func2anat_InverseWarp.nii.gz func2anat_InverseWarp.nii.gz func2anat_InverseWarp.nii.gz' >>> res = adjust.run() # doctest: +SKIP """ _cmd = "3dNwarpAdjust" input_spec = NwarpAdjustInputSpec output_spec = AFNICommandOutputSpec def _parse_inputs(self, skip=None): if not self.inputs.in_files: if skip is None: skip = [] skip += ["out_file"] return super(NwarpAdjust, self)._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() if self.inputs.in_files: if self.inputs.out_file: outputs["out_file"] = os.path.abspath(self.inputs.out_file) else: basename = os.path.basename(self.inputs.in_files[0]) basename_noext, ext = op.splitext(basename) if ".gz" in ext: basename_noext, ext2 = op.splitext(basename_noext) ext = ext2 + ext outputs["out_file"] = os.path.abspath( basename_noext + "_NwarpAdjust" + ext ) return outputs class NwarpApplyInputSpec(CommandLineInputSpec): in_file = traits.Either( File(exists=True), traits.List(File(exists=True)), mandatory=True, argstr="-source %s", desc="the name of the dataset to be warped " "can be multiple datasets", ) warp = traits.String( desc="the name of the warp dataset. " "multiple warps can be concatenated (make sure they exist)", argstr="-nwarp %s", mandatory=True, ) inv_warp = traits.Bool( desc="After the warp specified in '-nwarp' is computed, invert it", argstr="-iwarp", ) master = File( exists=True, desc="the name of the master dataset, which defines the output grid", argstr="-master %s", ) interp = traits.Enum( "wsinc5", "NN", "nearestneighbour", "nearestneighbor", "linear", "trilinear", "cubic", "tricubic", "quintic", "triquintic", desc="defines interpolation method to use during warp", argstr="-interp %s", usedefault=True, ) ainterp = traits.Enum( "NN", "nearestneighbour", "nearestneighbor", "linear", "trilinear", "cubic", "tricubic", "quintic", "triquintic", "wsinc5", desc="specify a different interpolation method than might " "be used for the warp", argstr="-ainterp %s", ) out_file = File( name_template="%s_Nwarp", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) short = traits.Bool( desc="Write output dataset using 16-bit short integers, rather than " "the usual 32-bit floats.", argstr="-short", ) quiet = traits.Bool(desc="don't be verbose :(", argstr="-quiet", xor=["verb"]) verb = traits.Bool(desc="be extra verbose :)", argstr="-verb", xor=["quiet"]) class NwarpApply(AFNICommandBase): """Program to apply a nonlinear 3D warp saved from 3dQwarp (or 3dNwarpCat, etc.) to a 3D dataset, to produce a warped version of the source dataset. For complete details, see the `3dNwarpApply Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> nwarp = afni.NwarpApply() >>> nwarp.inputs.in_file = 'Fred+orig' >>> nwarp.inputs.master = 'NWARP' >>> nwarp.inputs.warp = "'Fred_WARP+tlrc Fred.Xaff12.1D'" >>> nwarp.cmdline "3dNwarpApply -source Fred+orig -interp wsinc5 -master NWARP -prefix Fred+orig_Nwarp -nwarp \'Fred_WARP+tlrc Fred.Xaff12.1D\'" >>> res = nwarp.run() # doctest: +SKIP """ _cmd = "3dNwarpApply" input_spec = NwarpApplyInputSpec output_spec = AFNICommandOutputSpec class NwarpCatInputSpec(AFNICommandInputSpec): in_files = traits.List( traits.Either( File(), traits.Tuple(traits.Enum("IDENT", "INV", "SQRT", "SQRTINV"), File()) ), desc="list of tuples of 3D warps and associated functions", mandatory=True, argstr="%s", position=-1, ) space = traits.String( desc="string to attach to the output dataset as its atlas space " "marker.", argstr="-space %s", ) inv_warp = traits.Bool(desc="invert the final warp before output", argstr="-iwarp") interp = traits.Enum( "wsinc5", "linear", "quintic", desc="specify a different interpolation method than might " "be used for the warp", argstr="-interp %s", usedefault=True, ) expad = traits.Int( desc="Pad the nonlinear warps by the given number of voxels voxels in " "all directions. The warp displacements are extended by linear " "extrapolation from the faces of the input grid..", argstr="-expad %d", ) out_file = File( name_template="%s_NwarpCat", desc="output image file name", argstr="-prefix %s", name_source="in_files", ) verb = traits.Bool(desc="be verbose", argstr="-verb") class NwarpCat(AFNICommand): """Catenates (composes) 3D warps defined on a grid, OR via a matrix. .. note:: * All transformations are from DICOM xyz (in mm) to DICOM xyz. * Matrix warps are in files that end in '.1D' or in '.txt'. A matrix warp file should have 12 numbers in it, as output (for example), by '3dAllineate -1Dmatrix_save'. * Nonlinear warps are in dataset files (AFNI .HEAD/.BRIK or NIfTI .nii) with 3 sub-bricks giving the DICOM order xyz grid displacements in mm. * If all the input warps are matrices, then the output is a matrix and will be written to the file 'prefix.aff12.1D'. Unless the prefix already contains the string '.1D', in which case the filename is just the prefix. * If 'prefix' is just 'stdout', then the output matrix is written to standard output. In any of these cases, the output format is 12 numbers in one row. * If any of the input warps are datasets, they must all be defined on the same 3D grid! And of course, then the output will be a dataset on the same grid. However, you can expand the grid using the '-expad' option. * The order of operations in the final (output) warp is, for the case of 3 input warps: OUTPUT(x) = warp3( warp2( warp1(x) ) ) That is, warp1 is applied first, then warp2, et cetera. The 3D x coordinates are taken from each grid location in the first dataset defined on a grid. For complete details, see the `3dNwarpCat Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> nwarpcat = afni.NwarpCat() >>> nwarpcat.inputs.in_files = ['Q25_warp+tlrc.HEAD', ('IDENT', 'structural.nii')] >>> nwarpcat.inputs.out_file = 'Fred_total_WARP' >>> nwarpcat.cmdline "3dNwarpCat -interp wsinc5 -prefix Fred_total_WARP Q25_warp+tlrc.HEAD 'IDENT(structural.nii)'" >>> res = nwarpcat.run() # doctest: +SKIP """ _cmd = "3dNwarpCat" input_spec = NwarpCatInputSpec output_spec = AFNICommandOutputSpec def _format_arg(self, name, spec, value): if name == "in_files": return spec.argstr % ( " ".join( [ "'" + v[0] + "(" + v[1] + ")'" if isinstance(v, tuple) else v for v in value ] ) ) return super(NwarpCat, self)._format_arg(name, spec, value) def _gen_filename(self, name): if name == "out_file": return self._gen_fname(self.inputs.in_files[0][0], suffix="_NwarpCat") def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): outputs["out_file"] = os.path.abspath(self.inputs.out_file) else: outputs["out_file"] = os.path.abspath( self._gen_fname( self.inputs.in_files[0], suffix="_NwarpCat+tlrc", ext=".HEAD" ) ) return outputs class OneDToolPyInputSpec(AFNIPythonCommandInputSpec): in_file = File( desc="input file to OneDTool", argstr="-infile %s", mandatory=True, exists=True ) set_nruns = traits.Int( desc="treat the input data as if it has nruns", argstr="-set_nruns %d" ) derivative = traits.Bool( desc="take the temporal derivative of each vector (done as first backward difference)", argstr="-derivative", ) demean = traits.Bool( desc="demean each run (new mean of each run = 0.0)", argstr="-demean" ) out_file = File( desc="write the current 1D data to FILE", argstr="-write %s", xor=["show_cormat_warnings"], ) show_censor_count = traits.Bool( desc="display the total number of censored TRs Note : if input is a valid xmat.1D dataset, " "then the count will come from the header. Otherwise the input is assumed to be a binary censor" "file, and zeros are simply counted.", argstr="-show_censor_count", ) censor_motion = traits.Tuple( (traits.Float(), File()), desc="Tuple of motion limit and outfile prefix. need to also set set_nruns -r set_run_lengths", argstr="-censor_motion %f %s", ) censor_prev_TR = traits.Bool( desc="for each censored TR, also censor previous", argstr="-censor_prev_TR" ) show_trs_uncensored = traits.Enum( "comma", "space", "encoded", "verbose", desc="display a list of TRs which were not censored in the specified style", argstr="-show_trs_uncensored %s", ) show_cormat_warnings = File( desc="Write cormat warnings to a file", argstr="-show_cormat_warnings |& tee %s", position=-1, xor=["out_file"], ) show_indices_interest = traits.Bool( desc="display column indices for regs of interest", argstr="-show_indices_interest", ) show_trs_run = traits.Int( desc="restrict -show_trs_[un]censored to the given 1-based run", argstr="-show_trs_run %d", ) class OneDToolPyOutputSpec(AFNICommandOutputSpec): out_file = File(desc="output of 1D_tool.py") class OneDToolPy(AFNIPythonCommand): """This program is meant to read/manipulate/write/diagnose 1D datasets. Input can be specified using AFNI sub-brick[]/time{} selectors. >>> from nipype.interfaces import afni >>> odt = afni.OneDToolPy() >>> odt.inputs.in_file = 'f1.1D' >>> odt.inputs.set_nruns = 3 >>> odt.inputs.demean = True >>> odt.inputs.out_file = 'motion_dmean.1D' >>> odt.cmdline # doctest: +ELLIPSIS 'python2 ...1d_tool.py -demean -infile f1.1D -write motion_dmean.1D -set_nruns 3' >>> res = odt.run() # doctest: +SKIP""" _cmd = "1d_tool.py" input_spec = OneDToolPyInputSpec output_spec = OneDToolPyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): outputs["out_file"] = os.path.join(os.getcwd(), self.inputs.out_file) if isdefined(self.inputs.show_cormat_warnings): outputs["out_file"] = os.path.join( os.getcwd(), self.inputs.show_cormat_warnings ) if isdefined(self.inputs.censor_motion): outputs["out_file"] = os.path.join( os.getcwd(), self.inputs.censor_motion[1] + "_censor.1D" ) return outputs class RefitInputSpec(CommandLineInputSpec): in_file = File( desc="input file to 3drefit", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=True, ) deoblique = traits.Bool( desc="replace current transformation matrix with cardinal matrix", argstr="-deoblique", ) xorigin = Str(desc="x distance for edge voxel offset", argstr="-xorigin %s") yorigin = Str(desc="y distance for edge voxel offset", argstr="-yorigin %s") zorigin = Str(desc="z distance for edge voxel offset", argstr="-zorigin %s") duporigin_file = File( argstr="-duporigin %s", exists=True, desc="Copies the xorigin, yorigin, and zorigin values from the header " "of the given dataset", ) xdel = traits.Float(desc="new x voxel dimension in mm", argstr="-xdel %f") ydel = traits.Float(desc="new y voxel dimension in mm", argstr="-ydel %f") zdel = traits.Float(desc="new z voxel dimension in mm", argstr="-zdel %f") xyzscale = traits.Float( desc="Scale the size of the dataset voxels by the given factor", argstr="-xyzscale %f", ) space = traits.Enum( "TLRC", "MNI", "ORIG", argstr="-space %s", desc="Associates the dataset with a specific template type, e.g. " "TLRC, MNI, ORIG", ) atrcopy = traits.Tuple( File(exists=True), traits.Str(), argstr="-atrcopy %s %s", desc="Copy AFNI header attribute from the given file into the header " "of the dataset(s) being modified. For more information on AFNI " "header attributes, see documentation file README.attributes. " "More than one '-atrcopy' option can be used. For AFNI " "advanced users only. Do NOT use -atrcopy or -atrstring with " "other modification options. See also -copyaux.", ) atrstring = traits.Tuple( traits.Str(), traits.Str(), argstr="-atrstring %s %s", desc="Copy the last given string into the dataset(s) being modified, " "giving it the attribute name given by the last string." "To be safe, the last string should be in quotes.", ) atrfloat = traits.Tuple( traits.Str(), traits.Str(), argstr="-atrfloat %s %s", desc="Create or modify floating point attributes. " "The input values may be specified as a single string in quotes " "or as a 1D filename or string, example " "'1 0.2 0 0 -0.2 1 0 0 0 0 1 0' or " "flipZ.1D or '1D:1,0.2,2@0,-0.2,1,2@0,2@0,1,0'", ) atrint = traits.Tuple( traits.Str(), traits.Str(), argstr="-atrint %s %s", desc="Create or modify integer attributes. " "The input values may be specified as a single string in quotes " "or as a 1D filename or string, example " "'1 0 0 0 0 1 0 0 0 0 1 0' or " "flipZ.1D or '1D:1,0,2@0,-0,1,2@0,2@0,1,0'", ) saveatr = traits.Bool( argstr="-saveatr", desc="(default) Copy the attributes that are known to AFNI into " "the dset->dblk structure thereby forcing changes to known " "attributes to be present in the output. This option only makes " "sense with -atrcopy.", ) nosaveatr = traits.Bool(argstr="-nosaveatr", desc="Opposite of -saveatr") class Refit(AFNICommandBase): """Changes some of the information inside a 3D dataset's header For complete details, see the `3drefit Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> refit = afni.Refit() >>> refit.inputs.in_file = 'structural.nii' >>> refit.inputs.deoblique = True >>> refit.cmdline '3drefit -deoblique structural.nii' >>> res = refit.run() # doctest: +SKIP >>> refit_2 = afni.Refit() >>> refit_2.inputs.in_file = 'structural.nii' >>> refit_2.inputs.atrfloat = ("IJK_TO_DICOM_REAL", "'1 0.2 0 0 -0.2 1 0 0 0 0 1 0'") >>> refit_2.cmdline "3drefit -atrfloat IJK_TO_DICOM_REAL '1 0.2 0 0 -0.2 1 0 0 0 0 1 0' structural.nii" >>> res = refit_2.run() # doctest: +SKIP """ _cmd = "3drefit" input_spec = RefitInputSpec output_spec = AFNICommandOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.in_file) return outputs class ReHoInputSpec(CommandLineInputSpec): in_file = File( desc="input dataset", argstr="-inset %s", position=1, mandatory=True, exists=True, ) out_file = File( desc="Output dataset.", argstr="-prefix %s", name_source="in_file", name_template="%s_reho", keep_extension=True, position=0, ) chi_sq = traits.Bool( argstr="-chi_sq", desc="Output the Friedman chi-squared value in addition to the " "Kendall's W. This option is currently compatible only with " "the AFNI (BRIK/HEAD) output type; the chi-squared value will " "be the second sub-brick of the output dataset.", ) mask_file = File( desc="Mask within which ReHo should be calculated voxelwise", argstr="-mask %s" ) neighborhood = traits.Enum( "faces", "edges", "vertices", xor=["sphere", "ellipsoid"], argstr="-nneigh %s", desc=""" voxels in neighborhood. can be: ``faces`` (for voxel and 6 facewise neighbors, only), ``edges`` (for voxel and 18 face- and edge-wise neighbors), ``vertices`` (for voxel and 26 face-, edge-, and node-wise neighbors).""", ) sphere = traits.Float( argstr="-neigh_RAD %s", xor=["neighborhood", "ellipsoid"], desc=r"""\ For additional voxelwise neighborhood control, the radius R of a desired neighborhood can be put in; R is a floating point number, and must be >1. Examples of the numbers of voxels in a given radius are as follows (you can roughly approximate with the ol' :math:`4\pi\,R^3/3` thing): * R=2.0 -> V=33 * R=2.3 -> V=57, * R=2.9 -> V=93, * R=3.1 -> V=123, * R=3.9 -> V=251, * R=4.5 -> V=389, * R=6.1 -> V=949, but you can choose most any value.""", ) ellipsoid = traits.Tuple( traits.Float, traits.Float, traits.Float, xor=["sphere", "neighborhood"], argstr="-neigh_X %s -neigh_Y %s -neigh_Z %s", desc=r"""\ Tuple indicating the x, y, and z radius of an ellipsoid defining the neighbourhood of each voxel. The 'hood is then made according to the following relation: :math:`(i/A)^2 + (j/B)^2 + (k/C)^2 \le 1.` which will have approx. :math:`V=4 \pi \, A B C/3`. The impetus for this freedom was for use with data having anisotropic voxel edge lengths.""", ) label_set = File( exists=True, argstr="-in_rois %s", desc="a set of ROIs, each labelled with distinct " "integers. ReHo will then be calculated per ROI.", ) overwrite = traits.Bool( desc="overwrite output file if it already exists", argstr="-overwrite" ) class ReHoOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Voxelwise regional homogeneity map") out_vals = File(desc="Table of labelwise regional homogenity values") class ReHo(AFNICommandBase): """Compute regional homogenity for a given neighbourhood.l, based on a local neighborhood of that voxel. For complete details, see the `3dReHo Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> reho = afni.ReHo() >>> reho.inputs.in_file = 'functional.nii' >>> reho.inputs.out_file = 'reho.nii.gz' >>> reho.inputs.neighborhood = 'vertices' >>> reho.cmdline '3dReHo -prefix reho.nii.gz -inset functional.nii -nneigh 27' >>> res = reho.run() # doctest: +SKIP """ _cmd = "3dReHo" input_spec = ReHoInputSpec output_spec = ReHoOutputSpec def _list_outputs(self): outputs = super(ReHo, self)._list_outputs() if self.inputs.label_set: outputs["out_vals"] = outputs["out_file"] + "_ROI_reho.vals" return outputs def _format_arg(self, name, spec, value): _neigh_dict = {"faces": 7, "edges": 19, "vertices": 27} if name == "neighborhood": value = _neigh_dict[value] return super(ReHo, self)._format_arg(name, spec, value) class ResampleInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dresample", argstr="-inset %s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_resample", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) orientation = Str(desc="new orientation code", argstr="-orient %s") resample_mode = traits.Enum( "NN", "Li", "Cu", "Bk", argstr="-rmode %s", desc='resampling method from set {"NN", "Li", "Cu", "Bk"}. These are ' 'for "Nearest Neighbor", "Linear", "Cubic" and "Blocky"' "interpolation, respectively. Default is NN.", ) voxel_size = traits.Tuple( *[traits.Float()] * 3, argstr="-dxyz %f %f %f", desc="resample to new dx, dy and dz" ) master = File(argstr="-master %s", desc="align dataset grid to a reference file") class Resample(AFNICommand): """Resample or reorient an image using AFNI 3dresample command For complete details, see the `3dresample Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> resample = afni.Resample() >>> resample.inputs.in_file = 'functional.nii' >>> resample.inputs.orientation= 'RPI' >>> resample.inputs.outputtype = 'NIFTI' >>> resample.cmdline '3dresample -orient RPI -prefix functional_resample.nii -inset functional.nii' >>> res = resample.run() # doctest: +SKIP """ _cmd = "3dresample" input_spec = ResampleInputSpec output_spec = AFNICommandOutputSpec class TCatInputSpec(AFNICommandInputSpec): in_files = InputMultiPath( File(exists=True), desc="input file to 3dTcat", argstr=" %s", position=-1, mandatory=True, copyfile=False, ) out_file = File( name_template="%s_tcat", desc="output image file name", argstr="-prefix %s", name_source="in_files", ) rlt = traits.Enum( "", "+", "++", argstr="-rlt%s", desc="Remove linear trends in each voxel time series loaded from each " "input dataset, SEPARATELY. Option -rlt removes the least squares " "fit of 'a+b*t' to each voxel time series. Option -rlt+ adds " "dataset mean back in. Option -rlt++ adds overall mean of all " "dataset timeseries back in.", position=1, ) verbose = traits.Bool( desc="Print out some verbose output as the program", argstr="-verb" ) class TCat(AFNICommand): """Concatenate sub-bricks from input datasets into one big 3D+time dataset. TODO Replace InputMultiPath in_files with Traits.List, if possible. Current version adds extra whitespace. For complete details, see the `3dTcat Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> tcat = afni.TCat() >>> tcat.inputs.in_files = ['functional.nii', 'functional2.nii'] >>> tcat.inputs.out_file= 'functional_tcat.nii' >>> tcat.inputs.rlt = '+' >>> tcat.cmdline '3dTcat -rlt+ -prefix functional_tcat.nii functional.nii functional2.nii' >>> res = tcat.run() # doctest: +SKIP """ _cmd = "3dTcat" input_spec = TCatInputSpec output_spec = AFNICommandOutputSpec class TCatSBInputSpec(AFNICommandInputSpec): in_files = traits.List( traits.Tuple(File(exists=True), Str()), desc="List of tuples of file names and subbrick selectors as strings." "Don't forget to protect the single quotes in the subbrick selector" "so the contents are protected from the command line interpreter.", argstr="%s%s ...", position=-1, mandatory=True, copyfile=False, ) out_file = File(desc="output image file name", argstr="-prefix %s", genfile=True) rlt = traits.Enum( "", "+", "++", argstr="-rlt%s", desc="Remove linear trends in each voxel time series loaded from each " "input dataset, SEPARATELY. Option -rlt removes the least squares " "fit of 'a+b*t' to each voxel time series. Option -rlt+ adds " "dataset mean back in. Option -rlt++ adds overall mean of all " "dataset timeseries back in.", position=1, ) class TCatSubBrick(AFNICommand): """Hopefully a temporary function to allow sub-brick selection until afni file managment is improved. For complete details, see the `3dTcat Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> tcsb = afni.TCatSubBrick() >>> tcsb.inputs.in_files = [('functional.nii', "'{2..$}'"), ('functional2.nii', "'{2..$}'")] >>> tcsb.inputs.out_file= 'functional_tcat.nii' >>> tcsb.inputs.rlt = '+' >>> tcsb.cmdline "3dTcat -rlt+ -prefix functional_tcat.nii functional.nii'{2..$}' functional2.nii'{2..$}' " >>> res = tcsb.run() # doctest: +SKIP """ _cmd = "3dTcat" input_spec = TCatSBInputSpec output_spec = AFNICommandOutputSpec def _gen_filename(self, name): if name == "out_file": return self._gen_fname(self.inputs.in_files[0][0], suffix="_tcat") class TStatInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dTstat", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_tstat", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) mask = File(desc="mask file", argstr="-mask %s", exists=True) options = Str(desc="selected statistical output", argstr="%s") class TStat(AFNICommand): """Compute voxel-wise statistics using AFNI 3dTstat command For complete details, see the `3dTstat Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> tstat = afni.TStat() >>> tstat.inputs.in_file = 'functional.nii' >>> tstat.inputs.args = '-mean' >>> tstat.inputs.out_file = 'stats' >>> tstat.cmdline '3dTstat -mean -prefix stats functional.nii' >>> res = tstat.run() # doctest: +SKIP """ _cmd = "3dTstat" input_spec = TStatInputSpec output_spec = AFNICommandOutputSpec class To3DInputSpec(AFNICommandInputSpec): out_file = File( name_template="%s", desc="output image file name", argstr="-prefix %s", name_source=["in_folder"], ) in_folder = Directory( desc="folder with DICOM images to convert", argstr="%s/*.dcm", position=-1, mandatory=True, exists=True, ) filetype = traits.Enum( "spgr", "fse", "epan", "anat", "ct", "spct", "pet", "mra", "bmap", "diff", "omri", "abuc", "fim", "fith", "fico", "fitt", "fift", "fizt", "fict", "fibt", "fibn", "figt", "fipt", "fbuc", argstr="-%s", desc="type of datafile being converted", ) skipoutliers = traits.Bool(desc="skip the outliers check", argstr="-skip_outliers") assumemosaic = traits.Bool( desc="assume that Siemens image is mosaic", argstr="-assume_dicom_mosaic" ) datatype = traits.Enum( "short", "float", "byte", "complex", desc="set output file datatype", argstr="-datum %s", ) funcparams = Str(desc="parameters for functional data", argstr="-time:zt %s alt+z2") class To3D(AFNICommand): """Create a 3D dataset from 2D image files using AFNI to3d command For complete details, see the `to3d Documentation `_ Examples -------- >>> from nipype.interfaces import afni >>> to3d = afni.To3D() >>> to3d.inputs.datatype = 'float' >>> to3d.inputs.in_folder = '.' >>> to3d.inputs.out_file = 'dicomdir.nii' >>> to3d.inputs.filetype = 'anat' >>> to3d.cmdline # doctest: +ELLIPSIS 'to3d -datum float -anat -prefix dicomdir.nii ./*.dcm' >>> res = to3d.run() # doctest: +SKIP """ _cmd = "to3d" input_spec = To3DInputSpec output_spec = AFNICommandOutputSpec class UndumpInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dUndump, whose geometry will determine" "the geometry of the output", argstr="-master %s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( desc="output image file name", argstr="-prefix %s", name_source="in_file" ) mask_file = File( desc="mask image file name. Only voxels that are nonzero in the mask " "can be set.", argstr="-mask %s", ) datatype = traits.Enum( "short", "float", "byte", desc="set output file datatype", argstr="-datum %s" ) default_value = traits.Float( desc="default value stored in each input voxel that does not have " "a value supplied in the input file", argstr="-dval %f", ) fill_value = traits.Float( desc="value, used for each voxel in the output dataset that is NOT " "listed in the input file", argstr="-fval %f", ) coordinates_specification = traits.Enum( "ijk", "xyz", desc="Coordinates in the input file as index triples (i, j, k) " "or spatial coordinates (x, y, z) in mm", argstr="-%s", ) srad = traits.Float( desc="radius in mm of the sphere that will be filled about each input " "(x,y,z) or (i,j,k) voxel. If the radius is not given, or is 0, " "then each input data line sets the value in only one voxel.", argstr="-srad %f", ) orient = traits.Tuple( traits.Enum("R", "L"), traits.Enum("A", "P"), traits.Enum("I", "S"), desc="Specifies the coordinate order used by -xyz. " "The code must be 3 letters, one each from the pairs " "{R,L} {A,P} {I,S}. The first letter gives the " "orientation of the x-axis, the second the orientation " "of the y-axis, the third the z-axis: " "R = right-to-left L = left-to-right " "A = anterior-to-posterior P = posterior-to-anterior " "I = inferior-to-superior S = superior-to-inferior " "If -orient isn't used, then the coordinate order of the " "-master (in_file) dataset is used to interpret (x,y,z) inputs.", argstr="-orient %s", ) head_only = traits.Bool( desc="create only the .HEAD file which gets exploited by " "the AFNI matlab library function New_HEAD.m", argstr="-head_only", ) class UndumpOutputSpec(TraitedSpec): out_file = File(desc="assembled file", exists=True) class Undump(AFNICommand): """3dUndump - Assembles a 3D dataset from an ASCII list of coordinates and (optionally) values. The input file(s) are ASCII files, with one voxel specification per line. A voxel specification is 3 numbers (-ijk or -xyz coordinates), with an optional 4th number giving the voxel value. For example: 1 2 3 3 2 1 5 5.3 6.2 3.7 // this line illustrates a comment The first line puts a voxel (with value given by '-dval') at point (1,2,3). The second line puts a voxel (with value 5) at point (3,2,1). The third line puts a voxel (with value given by '-dval') at point (5.3,6.2,3.7). If -ijk is in effect, and fractional coordinates are given, they will be rounded to the nearest integers; for example, the third line would be equivalent to (i,j,k) = (5,6,4). For complete details, see the `3dUndump Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> unndump = afni.Undump() >>> unndump.inputs.in_file = 'structural.nii' >>> unndump.inputs.out_file = 'structural_undumped.nii' >>> unndump.cmdline '3dUndump -prefix structural_undumped.nii -master structural.nii' >>> res = unndump.run() # doctest: +SKIP """ _cmd = "3dUndump" input_spec = UndumpInputSpec output_spec = UndumpOutputSpec class UnifizeInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dUnifize", argstr="-input %s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_unifized", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) t2 = traits.Bool( desc="Treat the input as if it were T2-weighted, rather than " "T1-weighted. This processing is done simply by inverting " "the image contrast, processing it as if that result were " "T1-weighted, and then re-inverting the results " "counts of voxel overlap, i.e., each voxel will contain the " "number of masks that it is set in.", argstr="-T2", ) gm = traits.Bool( desc="Also scale to unifize 'gray matter' = lower intensity voxels " "(to aid in registering images from different scanners).", argstr="-GM", ) urad = traits.Float( desc="Sets the radius (in voxels) of the ball used for the sneaky " "trick. Default value is 18.3, and should be changed " "proportionally if the dataset voxel size differs significantly " "from 1 mm.", argstr="-Urad %s", ) scale_file = File( desc="output file name to save the scale factor used at each voxel ", argstr="-ssave %s", ) no_duplo = traits.Bool( desc="Do NOT use the 'duplo down' step; this can be useful for " "lower resolution datasets.", argstr="-noduplo", ) epi = traits.Bool( desc="Assume the input dataset is a T2 (or T2\\*) weighted EPI time " "series. After computing the scaling, apply it to ALL volumes " "(TRs) in the input dataset. That is, a given voxel will be " "scaled by the same factor at each TR. " "This option also implies '-noduplo' and '-T2'." "This option turns off '-GM' if you turned it on.", argstr="-EPI", requires=["no_duplo", "t2"], xor=["gm"], ) rbt = traits.Tuple( traits.Float(), traits.Float(), traits.Float(), desc="Option for AFNI experts only." "Specify the 3 parameters for the algorithm:\n" "R = radius; same as given by option '-Urad', [default=18.3]\n" "b = bottom percentile of normalizing data range, [default=70.0]\n" "r = top percentile of normalizing data range, [default=80.0]\n", argstr="-rbt %f %f %f", ) t2_up = traits.Float( desc="Option for AFNI experts only." "Set the upper percentile point used for T2-T1 inversion. " "Allowed to be anything between 90 and 100 (inclusive), with " "default to 98.5 (for no good reason).", argstr="-T2up %f", ) cl_frac = traits.Float( desc="Option for AFNI experts only." "Set the automask 'clip level fraction'. Must be between " "0.1 and 0.9. A small fraction means to make the initial " "threshold for clipping (a la 3dClipLevel) smaller, which " "will tend to make the mask larger. [default=0.1]", argstr="-clfrac %f", ) quiet = traits.Bool(desc="Don't print the progress messages.", argstr="-quiet") class UnifizeOutputSpec(TraitedSpec): scale_file = File(desc="scale factor file") out_file = File(desc="unifized file", exists=True) class Unifize(AFNICommand): """3dUnifize - for uniformizing image intensity * The input dataset is supposed to be a T1-weighted volume, possibly already skull-stripped (e.g., via 3dSkullStrip). However, this program can be a useful step to take BEFORE 3dSkullStrip, since the latter program can fail if the input volume is strongly shaded -- 3dUnifize will (mostly) remove such shading artifacts. * The output dataset has the white matter (WM) intensity approximately uniformized across space, and scaled to peak at about 1000. * The output dataset is always stored in float format! * If the input dataset has more than 1 sub-brick, only sub-brick #0 will be processed! * Want to correct EPI datasets for nonuniformity? You can try the new and experimental [Mar 2017] '-EPI' option. * The principal motive for this program is for use in an image registration script, and it may or may not be useful otherwise. * This program replaces the older (and very different) 3dUniformize, which is no longer maintained and may sublimate at any moment. (In other words, we do not recommend the use of 3dUniformize.) For complete details, see the `3dUnifize Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> unifize = afni.Unifize() >>> unifize.inputs.in_file = 'structural.nii' >>> unifize.inputs.out_file = 'structural_unifized.nii' >>> unifize.cmdline '3dUnifize -prefix structural_unifized.nii -input structural.nii' >>> res = unifize.run() # doctest: +SKIP """ _cmd = "3dUnifize" input_spec = UnifizeInputSpec output_spec = UnifizeOutputSpec class ZCutUpInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3dZcutup", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_zcutup", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) keep = Str(desc="slice range to keep in output", argstr="-keep %s") class ZCutUp(AFNICommand): """Cut z-slices from a volume using AFNI 3dZcutup command For complete details, see the `3dZcutup Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> zcutup = afni.ZCutUp() >>> zcutup.inputs.in_file = 'functional.nii' >>> zcutup.inputs.out_file = 'functional_zcutup.nii' >>> zcutup.inputs.keep= '0 10' >>> zcutup.cmdline '3dZcutup -keep 0 10 -prefix functional_zcutup.nii functional.nii' >>> res = zcutup.run() # doctest: +SKIP """ _cmd = "3dZcutup" input_spec = ZCutUpInputSpec output_spec = AFNICommandOutputSpec class GCORInputSpec(CommandLineInputSpec): in_file = File( desc="input dataset to compute the GCOR over", argstr="-input %s", position=-1, mandatory=True, exists=True, copyfile=False, ) mask = File( desc="mask dataset, for restricting the computation", argstr="-mask %s", exists=True, copyfile=False, ) nfirst = traits.Int( 0, argstr="-nfirst %d", desc="specify number of initial TRs to ignore" ) no_demean = traits.Bool( False, argstr="-no_demean", desc="do not (need to) demean as first step" ) class GCOROutputSpec(TraitedSpec): out = traits.Float(desc="global correlation value") class GCOR(CommandLine): """ Computes the average correlation between every voxel and ever other voxel, over any give mask. For complete details, see the `@compute_gcor Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> gcor = afni.GCOR() >>> gcor.inputs.in_file = 'structural.nii' >>> gcor.inputs.nfirst = 4 >>> gcor.cmdline '@compute_gcor -nfirst 4 -input structural.nii' >>> res = gcor.run() # doctest: +SKIP """ _cmd = "@compute_gcor" input_spec = GCORInputSpec output_spec = GCOROutputSpec def _run_interface(self, runtime): runtime = super(GCOR, self)._run_interface(runtime) gcor_line = [ line.strip() for line in runtime.stdout.split("\n") if line.strip().startswith("GCOR = ") ][-1] setattr(self, "_gcor", float(gcor_line[len("GCOR = ") :])) return runtime def _list_outputs(self): return {"out": getattr(self, "_gcor")} class AxializeInputSpec(AFNICommandInputSpec): in_file = File( desc="input file to 3daxialize", argstr="%s", position=-2, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="%s_axialize", desc="output image file name", argstr="-prefix %s", name_source="in_file", ) verb = traits.Bool(desc="Print out a progerss report", argstr="-verb") sagittal = traits.Bool( desc="Do sagittal slice order [-orient ASL]", argstr="-sagittal", xor=["coronal", "axial"], ) coronal = traits.Bool( desc="Do coronal slice order [-orient RSA]", argstr="-coronal", xor=["sagittal", "axial"], ) axial = traits.Bool( desc="Do axial slice order [-orient RAI]" "This is the default AFNI axial order, and" "is the one currently required by the" "volume rendering plugin; this is also" "the default orientation output by this" "program (hence the program's name).", argstr="-axial", xor=["coronal", "sagittal"], ) orientation = Str(desc="new orientation code", argstr="-orient %s") class Axialize(AFNICommand): """Read in a dataset and write it out as a new dataset with the data brick oriented as axial slices. For complete details, see the `3dcopy Documentation. `__ Examples -------- >>> from nipype.interfaces import afni >>> axial3d = afni.Axialize() >>> axial3d.inputs.in_file = 'functional.nii' >>> axial3d.inputs.out_file = 'axialized.nii' >>> axial3d.cmdline '3daxialize -prefix axialized.nii functional.nii' >>> res = axial3d.run() # doctest: +SKIP """ _cmd = "3daxialize" input_spec = AxializeInputSpec output_spec = AFNICommandOutputSpec class ZcatInputSpec(AFNICommandInputSpec): in_files = InputMultiPath( File(desc="input files to 3dZcat", exists=True), argstr="%s", position=-1, mandatory=True, copyfile=False, ) out_file = File( name_template="%s_zcat", desc="output dataset prefix name (default 'zcat')", argstr="-prefix %s", name_source="in_files", ) datum = traits.Enum( "byte", "short", "float", argstr="-datum %s", desc="specify data type for output. Valid types are 'byte', " "'short' and 'float'.", ) verb = traits.Bool( desc="print out some verbositiness as the program proceeds.", argstr="-verb" ) fscale = traits.Bool( desc="Force scaling of the output to the maximum integer " "range. This only has effect if the output datum is " "byte or short (either forced or defaulted). This " "option is sometimes necessary to eliminate " "unpleasant truncation artifacts.", argstr="-fscale", xor=["nscale"], ) nscale = traits.Bool( desc="Don't do any scaling on output to byte or short " "datasets. This may be especially useful when " "operating on mask datasets whose output values " "are only 0's and 1's.", argstr="-nscale", xor=["fscale"], ) class Zcat(AFNICommand): """Copies an image of one type to an image of the same or different type using 3dZcat command For complete details, see the `3dZcat Documentation. `_ Examples -------- >>> from nipype.interfaces import afni >>> zcat = afni.Zcat() >>> zcat.inputs.in_files = ['functional2.nii', 'functional3.nii'] >>> zcat.inputs.out_file = 'cat_functional.nii' >>> zcat.cmdline '3dZcat -prefix cat_functional.nii functional2.nii functional3.nii' >>> res = zcat.run() # doctest: +SKIP """ _cmd = "3dZcat" input_spec = ZcatInputSpec output_spec = AFNICommandOutputSpec class ZeropadInputSpec(AFNICommandInputSpec): in_files = File( desc="input dataset", argstr="%s", position=-1, mandatory=True, exists=True, copyfile=False, ) out_file = File( name_template="zeropad", desc="output dataset prefix name (default 'zeropad')", argstr="-prefix %s", ) I = traits.Int( desc="adds 'n' planes of zero at the Inferior edge", argstr="-I %i", xor=["master"], ) S = traits.Int( desc="adds 'n' planes of zero at the Superior edge", argstr="-S %i", xor=["master"], ) A = traits.Int( desc="adds 'n' planes of zero at the Anterior edge", argstr="-A %i", xor=["master"], ) P = traits.Int( desc="adds 'n' planes of zero at the Posterior edge", argstr="-P %i", xor=["master"], ) L = traits.Int( desc="adds 'n' planes of zero at the Left edge", argstr="-L %i", xor=["master"] ) R = traits.Int( desc="adds 'n' planes of zero at the Right edge", argstr="-R %i", xor=["master"] ) z = traits.Int( desc="adds 'n' planes of zero on EACH of the " "dataset z-axis (slice-direction) faces", argstr="-z %i", xor=["master"], ) RL = traits.Int( desc="specify that planes should be added or cut " "symmetrically to make the resulting volume have" "N slices in the right-left direction", argstr="-RL %i", xor=["master"], ) AP = traits.Int( desc="specify that planes should be added or cut " "symmetrically to make the resulting volume have" "N slices in the anterior-posterior direction", argstr="-AP %i", xor=["master"], ) IS = traits.Int( desc="specify that planes should be added or cut " "symmetrically to make the resulting volume have" "N slices in the inferior-superior direction", argstr="-IS %i", xor=["master"], ) mm = traits.Bool( desc="pad counts 'n' are in mm instead of slices, " "where each 'n' is an integer and at least 'n' " "mm of slices will be added/removed; e.g., n = 3 " "and slice thickness = 2.5 mm ==> 2 slices added", argstr="-mm", xor=["master"], ) master = File( desc="match the volume described in dataset " "'mset', where mset must have the same " "orientation and grid spacing as dataset to be " "padded. the goal of -master is to make the " "output dataset from 3dZeropad match the " "spatial 'extents' of mset by adding or " "subtracting slices as needed. You can't use " "-I,-S,..., or -mm with -master", argstr="-master %s", xor=["I", "S", "A", "P", "L", "R", "z", "RL", "AP", "IS", "mm"], ) class Zeropad(AFNICommand): """Adds planes of zeros to a dataset (i.e., pads it out). For complete details, see the `3dZeropad Documentation. `__ Examples -------- >>> from nipype.interfaces import afni >>> zeropad = afni.Zeropad() >>> zeropad.inputs.in_files = 'functional.nii' >>> zeropad.inputs.out_file = 'pad_functional.nii' >>> zeropad.inputs.I = 10 >>> zeropad.inputs.S = 10 >>> zeropad.inputs.A = 10 >>> zeropad.inputs.P = 10 >>> zeropad.inputs.R = 10 >>> zeropad.inputs.L = 10 >>> zeropad.cmdline '3dZeropad -A 10 -I 10 -L 10 -P 10 -R 10 -S 10 -prefix pad_functional.nii functional.nii' >>> res = zeropad.run() # doctest: +SKIP """ _cmd = "3dZeropad" input_spec = ZeropadInputSpec output_spec = AFNICommandOutputSpec nipype-1.7.0/nipype/interfaces/ants/000077500000000000000000000000001413403311400174115ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/ants/__init__.py000066400000000000000000000035171413403311400215300ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Top-level namespace for ants.""" # Registration programs from .registration import ( ANTS, CompositeTransformUtil, MeasureImageSimilarity, Registration, RegistrationSynQuick, ) # Resampling Programs from .resampling import ( ApplyTransforms, ApplyTransformsToPoints, WarpImageMultiTransform, WarpTimeSeriesImageMultiTransform, ) # Segmentation Programs from .segmentation import ( AntsJointFusion, Atropos, BrainExtraction, CorticalThickness, DenoiseImage, JointFusion, LaplacianThickness, N4BiasFieldCorrection, ) # Visualization Programs from .visualization import ConvertScalarImageToRGB, CreateTiledMosaic # Utility Programs from .utils import ( AffineInitializer, AI, AverageAffineTransform, AverageImages, ComposeMultiTransform, CreateJacobianDeterminantImage, ImageMath, LabelGeometry, MultiplyImages, ResampleImageBySpacing, ThresholdImage, ) __all__ = [ "AffineInitializer", "AI", "ANTS", "AntsJointFusion", "ApplyTransforms", "ApplyTransformsToPoints", "Atropos", "AverageAffineTransform", "AverageImages", "BrainExtraction", "ComposeMultiTransform", "CompositeTransformUtil", "ConvertScalarImageToRGB", "CorticalThickness", "CreateJacobianDeterminantImage", "CreateTiledMosaic", "DenoiseImage", "ImageMath", "JointFusion", "LabelGeometry", "LaplacianThickness", "MeasureImageSimilarity", "MultiplyImages", "N4BiasFieldCorrection", "Registration", "RegistrationSynQuick", "ResampleImageBySpacing", "ThresholdImage", "WarpImageMultiTransform", "WarpTimeSeriesImageMultiTransform", ] nipype-1.7.0/nipype/interfaces/ants/base.py000066400000000000000000000107361413403311400207040ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The ants module provides basic functions for interfacing with ANTS tools.""" import os # Local imports from ... import logging, LooseVersion from ..base import CommandLine, CommandLineInputSpec, traits, isdefined, PackageInfo iflogger = logging.getLogger("nipype.interface") # -Using -1 gives primary responsibilty to ITKv4 to do the correct # thread limitings. # -Using 1 takes a very conservative approach to avoid overloading # the computer (when running MultiProc) by forcing everything to # single threaded. This can be a severe penalty for registration # performance. LOCAL_DEFAULT_NUMBER_OF_THREADS = 1 # -Using NSLOTS has the same behavior as ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS # as long as ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS is not set. Otherwise # ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS takes precidence. # This behavior states that you the user explicitly specifies # num_threads, then respect that no matter what SGE tries to limit. PREFERED_ITKv4_THREAD_LIMIT_VARIABLE = "NSLOTS" ALT_ITKv4_THREAD_LIMIT_VARIABLE = "ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS" class Info(PackageInfo): version_cmd = ( os.path.join(os.getenv("ANTSPATH", ""), "antsRegistration") + " --version" ) @staticmethod def parse_version(raw_info): for line in raw_info.splitlines(): if line.startswith("ANTs Version: "): v_string = line.split()[2] break else: return None # -githash may or may not be appended v_string = v_string.split("-")[0] # 2.2.0-equivalent version string if "post" in v_string and LooseVersion(v_string) >= LooseVersion( "2.1.0.post789" ): return "2.2.0" else: return ".".join(v_string.split(".")[:3]) class ANTSCommandInputSpec(CommandLineInputSpec): """Base Input Specification for all ANTS Commands""" num_threads = traits.Int( LOCAL_DEFAULT_NUMBER_OF_THREADS, usedefault=True, nohash=True, desc="Number of ITK threads to use", ) class ANTSCommand(CommandLine): """Base class for ANTS interfaces""" input_spec = ANTSCommandInputSpec _num_threads = LOCAL_DEFAULT_NUMBER_OF_THREADS def __init__(self, **inputs): super(ANTSCommand, self).__init__(**inputs) self.inputs.on_trait_change(self._num_threads_update, "num_threads") if not isdefined(self.inputs.num_threads): self.inputs.num_threads = self._num_threads else: self._num_threads_update() def _num_threads_update(self): self._num_threads = self.inputs.num_threads # ONLY SET THE ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS if requested # by the end user. The default setting did not allow for # overwriting the default values. # In ITKv4 (the version used for all ANTS programs), ITK respects # the SGE controlled $NSLOTS environmental variable. # If user specifies -1, then that indicates that the system # default behavior should be the one specified by ITKv4 rules # (i.e. respect SGE $NSLOTS or environmental variables of threads, or # user environmental settings) if self.inputs.num_threads == -1: if ALT_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ: del self.inputs.environ[ALT_ITKv4_THREAD_LIMIT_VARIABLE] if PREFERED_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ: del self.inputs.environ[PREFERED_ITKv4_THREAD_LIMIT_VARIABLE] else: self.inputs.environ.update( {PREFERED_ITKv4_THREAD_LIMIT_VARIABLE: "%s" % self.inputs.num_threads} ) @staticmethod def _format_xarray(val): """Convenience method for converting input arrays [1,2,3] to commandline format '1x2x3'""" return "x".join([str(x) for x in val]) @classmethod def set_default_num_threads(cls, num_threads): """Set the default number of threads for ITK calls This method is used to set the default number of ITK threads for all the ANTS interfaces. However, setting this will not update the output type for any existing instances. For these, assign the .inputs.num_threads """ cls._num_threads = num_threads @property def version(self): return Info.version() nipype-1.7.0/nipype/interfaces/ants/legacy.py000066400000000000000000000270721413403311400212370ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ANTS Legacy Interfaces These interfaces are for programs that have been deprecated by ANTs, but are preserved for backwards compatibility. """ from builtins import range import os from glob import glob from .base import ANTSCommand, ANTSCommandInputSpec from ..base import TraitedSpec, File, traits, isdefined, OutputMultiPath from ...utils.filemanip import split_filename class antsIntroductionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="-d %d", usedefault=True, desc="image dimension (2 or 3)", position=1, ) reference_image = File( exists=True, argstr="-r %s", desc="template file to warp to", mandatory=True, copyfile=True, ) input_image = File( exists=True, argstr="-i %s", desc="input image to warp to template", mandatory=True, copyfile=False, ) force_proceed = traits.Bool( argstr="-f 1", desc=("force script to proceed even if headers " "may be incompatible"), ) inverse_warp_template_labels = traits.Bool( argstr="-l", desc=( "Applies inverse warp to the template labels " "to estimate label positions in target space (use " "for template-based segmentation)" ), ) max_iterations = traits.List( traits.Int, argstr="-m %s", sep="x", desc=( "maximum number of iterations (must be " "list of integers in the form [J,K,L...]: " "J = coarsest resolution iterations, K = " "middle resolution interations, L = fine " "resolution iterations" ), ) bias_field_correction = traits.Bool( argstr="-n 1", desc=("Applies bias field correction to moving " "image") ) similarity_metric = traits.Enum( "PR", "CC", "MI", "MSQ", argstr="-s %s", desc=( "Type of similartiy metric used for registration " "(CC = cross correlation, MI = mutual information, " "PR = probability mapping, MSQ = mean square difference)" ), ) transformation_model = traits.Enum( "GR", "EL", "SY", "S2", "EX", "DD", "RI", "RA", argstr="-t %s", usedefault=True, desc=( "Type of transofmration model used for registration " "(EL = elastic transformation model, SY = SyN with time, " "arbitrary number of time points, S2 = SyN with time " "optimized for 2 time points, GR = greedy SyN, EX = " "exponential, DD = diffeomorphic demons style exponential " "mapping, RI = purely rigid, RA = affine rigid" ), ) out_prefix = traits.Str( "ants_", argstr="-o %s", usedefault=True, desc=("Prefix that is prepended to all output " "files (default = ants_)"), ) quality_check = traits.Bool( argstr="-q 1", desc="Perform a quality check of the result" ) class antsIntroductionOutputSpec(TraitedSpec): affine_transformation = File(exists=True, desc="affine (prefix_Affine.txt)") warp_field = File(exists=True, desc="warp field (prefix_Warp.nii)") inverse_warp_field = File( exists=True, desc="inverse warp field (prefix_InverseWarp.nii)" ) input_file = File(exists=True, desc="input image (prefix_repaired.nii)") output_file = File(exists=True, desc="output image (prefix_deformed.nii)") class antsIntroduction(ANTSCommand): """Uses ANTS to generate matrices to warp data from one space to another. Examples -------- >>> from nipype.interfaces.ants.legacy import antsIntroduction >>> warp = antsIntroduction() >>> warp.inputs.reference_image = 'Template_6.nii' >>> warp.inputs.input_image = 'structural.nii' >>> warp.inputs.max_iterations = [30,90,20] >>> warp.cmdline 'antsIntroduction.sh -d 3 -i structural.nii -m 30x90x20 -o ants_ -r Template_6.nii -t GR' """ _cmd = "antsIntroduction.sh" input_spec = antsIntroductionInputSpec output_spec = antsIntroductionOutputSpec def _list_outputs(self): outputs = self._outputs().get() transmodel = self.inputs.transformation_model # When transform is set as 'RI'/'RA', wrap fields should not be expected # The default transformation is GR, which outputs the wrap fields if not isdefined(transmodel) or ( isdefined(transmodel) and transmodel not in ["RI", "RA"] ): outputs["warp_field"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "Warp.nii.gz" ) outputs["inverse_warp_field"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "InverseWarp.nii.gz" ) outputs["affine_transformation"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "Affine.txt" ) outputs["input_file"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "repaired.nii.gz" ) outputs["output_file"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "deformed.nii.gz" ) return outputs # How do we make a pass through so that GenWarpFields is just an alias for antsIntroduction ? class GenWarpFields(antsIntroduction): pass class buildtemplateparallelInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, 4, argstr="-d %d", usedefault=True, desc="image dimension (2, 3 or 4)", position=1, ) out_prefix = traits.Str( "antsTMPL_", argstr="-o %s", usedefault=True, desc=("Prefix that is prepended to all output " "files (default = antsTMPL_)"), ) in_files = traits.List( File(exists=True), mandatory=True, desc="list of images to generate template from", argstr="%s", position=-1, ) parallelization = traits.Enum( 0, 1, 2, argstr="-c %d", usedefault=True, desc=( "control for parallel processing (0 = " "serial, 1 = use PBS, 2 = use PEXEC, 3 = " "use Apple XGrid" ), ) gradient_step_size = traits.Float( argstr="-g %f", desc=("smaller magnitude results in " "more cautious steps (default = " ".25)"), ) iteration_limit = traits.Int( 4, argstr="-i %d", usedefault=True, desc="iterations of template construction" ) num_cores = traits.Int( argstr="-j %d", requires=["parallelization"], desc=( "Requires parallelization = 2 (PEXEC). " "Sets number of cpu cores to use" ), ) max_iterations = traits.List( traits.Int, argstr="-m %s", sep="x", desc=( "maximum number of iterations (must be " "list of integers in the form [J,K,L...]: " "J = coarsest resolution iterations, K = " "middle resolution interations, L = fine " "resolution iterations" ), ) bias_field_correction = traits.Bool( argstr="-n 1", desc=("Applies bias field correction to moving " "image") ) rigid_body_registration = traits.Bool( argstr="-r 1", desc=( "registers inputs before creating template " "(useful if no initial template available)" ), ) similarity_metric = traits.Enum( "PR", "CC", "MI", "MSQ", argstr="-s %s", desc=( "Type of similartiy metric used for registration " "(CC = cross correlation, MI = mutual information, " "PR = probability mapping, MSQ = mean square difference)" ), ) transformation_model = traits.Enum( "GR", "EL", "SY", "S2", "EX", "DD", argstr="-t %s", usedefault=True, desc=( "Type of transofmration model used for registration " "(EL = elastic transformation model, SY = SyN with time, " "arbitrary number of time points, S2 = SyN with time " "optimized for 2 time points, GR = greedy SyN, EX = " "exponential, DD = diffeomorphic demons style exponential " "mapping" ), ) use_first_as_target = traits.Bool( desc=( "uses first volume as target of " "all inputs. When not used, an " "unbiased average image is used " "to start." ) ) class buildtemplateparallelOutputSpec(TraitedSpec): final_template_file = File(exists=True, desc="final ANTS template") template_files = OutputMultiPath( File(exists=True), desc="Templates from different stages of iteration" ) subject_outfiles = OutputMultiPath( File(exists=True), desc=( "Outputs for each input image. Includes warp " "field, inverse warp, Affine, original image " "(repaired) and warped image (deformed)" ), ) class buildtemplateparallel(ANTSCommand): """Generate a optimal average template .. warning:: This can take a VERY long time to complete Examples -------- >>> from nipype.interfaces.ants.legacy import buildtemplateparallel >>> tmpl = buildtemplateparallel() >>> tmpl.inputs.in_files = ['T1.nii', 'structural.nii'] >>> tmpl.inputs.max_iterations = [30, 90, 20] >>> tmpl.cmdline 'buildtemplateparallel.sh -d 3 -i 4 -m 30x90x20 -o antsTMPL_ -c 0 -t GR T1.nii structural.nii' """ _cmd = "buildtemplateparallel.sh" input_spec = buildtemplateparallelInputSpec output_spec = buildtemplateparallelOutputSpec def _format_arg(self, opt, spec, val): if opt == "num_cores": if self.inputs.parallelization == 2: return "-j " + str(val) else: return "" if opt == "in_files": if self.inputs.use_first_as_target: start = "-z " else: start = "" return start + " ".join(name for name in val) return super(buildtemplateparallel, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["template_files"] = [] for i in range(len(glob(os.path.realpath("*iteration*")))): temp = os.path.realpath( "%s_iteration_%d/%stemplate.nii.gz" % (self.inputs.transformation_model, i, self.inputs.out_prefix) ) os.rename( temp, os.path.realpath( "%s_iteration_%d/%stemplate_i%d.nii.gz" % (self.inputs.transformation_model, i, self.inputs.out_prefix, i) ), ) file_ = "%s_iteration_%d/%stemplate_i%d.nii.gz" % ( self.inputs.transformation_model, i, self.inputs.out_prefix, i, ) outputs["template_files"].append(os.path.realpath(file_)) outputs["final_template_file"] = os.path.realpath( "%stemplate.nii.gz" % self.inputs.out_prefix ) outputs["subject_outfiles"] = [] for filename in self.inputs.in_files: _, base, _ = split_filename(filename) temp = glob(os.path.realpath("%s%s*" % (self.inputs.out_prefix, base))) for file_ in temp: outputs["subject_outfiles"].append(file_) return outputs nipype-1.7.0/nipype/interfaces/ants/registration.py000066400000000000000000002410301413403311400224750ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The ants module provides basic functions for interfacing with ants functions. """ import os from ...utils.filemanip import ensure_list from ..base import TraitedSpec, File, Str, traits, InputMultiPath, isdefined from .base import ANTSCommand, ANTSCommandInputSpec, LOCAL_DEFAULT_NUMBER_OF_THREADS class ANTSInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="%d", position=1, desc="image dimension (2 or 3)" ) fixed_image = InputMultiPath( File(exists=True), mandatory=True, desc=("image to which the moving image is " "warped"), ) moving_image = InputMultiPath( File(exists=True), argstr="%s", mandatory=True, desc=( "image to apply transformation to " "(generally a coregistered" "functional)" ), ) # Not all metrics are appropriate for all modalities. Also, not all metrics # are efficeint or appropriate at all resolution levels, Some metrics # perform well for gross global registraiton, but do poorly for small # changes (i.e. Mattes), and some metrics do well for small changes but # don't work well for gross level changes (i.e. 'CC'). # # This is a two stage registration. in the first stage # [ 'Mattes', .................] # ^^^^^^ <- First stage # Do a unimodal registration of the first elements of the fixed/moving input # list use the"CC" as the metric. # # In the second stage # [ ....., ['Mattes','CC'] ] # ^^^^^^^^^^^^^^^ <- Second stage # Do a multi-modal registration where the first elements of fixed/moving # input list use 'CC' metric and that is added to 'Mattes' metric result of # the second elements of the fixed/moving input. # # Cost = Sum_i ( metricweight[i] Metric_i ( fixedimage[i], movingimage[i]) ) metric = traits.List( traits.Enum("CC", "MI", "SMI", "PR", "SSD", "MSQ", "PSE"), mandatory=True, desc="", ) metric_weight = traits.List( traits.Float(), value=[1.0], usedefault=True, requires=["metric"], mandatory=True, desc="the metric weight(s) for each stage. " "The weights must sum to 1 per stage.", ) radius = traits.List( traits.Int(), requires=["metric"], mandatory=True, desc="radius of the region (i.e. number of layers around a voxel/pixel)" " that is used for computing cross correlation", ) output_transform_prefix = Str( "out", usedefault=True, argstr="--output-naming %s", mandatory=True, desc="" ) transformation_model = traits.Enum( "Diff", "Elast", "Exp", "Greedy Exp", "SyN", argstr="%s", mandatory=True, desc="", ) gradient_step_length = traits.Float(requires=["transformation_model"], desc="") number_of_time_steps = traits.Int(requires=["gradient_step_length"], desc="") delta_time = traits.Float(requires=["number_of_time_steps"], desc="") symmetry_type = traits.Float(requires=["delta_time"], desc="") use_histogram_matching = traits.Bool( argstr="%s", default_value=True, usedefault=True ) number_of_iterations = traits.List( traits.Int(), argstr="--number-of-iterations %s", sep="x" ) smoothing_sigmas = traits.List( traits.Int(), argstr="--gaussian-smoothing-sigmas %s", sep="x" ) subsampling_factors = traits.List( traits.Int(), argstr="--subsampling-factors %s", sep="x" ) affine_gradient_descent_option = traits.List(traits.Float(), argstr="%s") mi_option = traits.List(traits.Int(), argstr="--MI-option %s", sep="x") regularization = traits.Enum("Gauss", "DMFFD", argstr="%s", desc="") regularization_gradient_field_sigma = traits.Float( requires=["regularization"], desc="" ) regularization_deformation_field_sigma = traits.Float( requires=["regularization"], desc="" ) number_of_affine_iterations = traits.List( traits.Int(), argstr="--number-of-affine-iterations %s", sep="x" ) class ANTSOutputSpec(TraitedSpec): affine_transform = File(exists=True, desc="Affine transform file") warp_transform = File(exists=True, desc="Warping deformation field") inverse_warp_transform = File(exists=True, desc="Inverse warping deformation field") metaheader = File(exists=True, desc="VTK metaheader .mhd file") metaheader_raw = File(exists=True, desc="VTK metaheader .raw file") class ANTS(ANTSCommand): """ANTS wrapper for registration of images (old, use Registration instead) Examples -------- >>> from nipype.interfaces.ants import ANTS >>> ants = ANTS() >>> ants.inputs.dimension = 3 >>> ants.inputs.output_transform_prefix = 'MY' >>> ants.inputs.metric = ['CC'] >>> ants.inputs.fixed_image = ['T1.nii'] >>> ants.inputs.moving_image = ['resting.nii'] >>> ants.inputs.metric_weight = [1.0] >>> ants.inputs.radius = [5] >>> ants.inputs.transformation_model = 'SyN' >>> ants.inputs.gradient_step_length = 0.25 >>> ants.inputs.number_of_iterations = [50, 35, 15] >>> ants.inputs.use_histogram_matching = True >>> ants.inputs.mi_option = [32, 16000] >>> ants.inputs.regularization = 'Gauss' >>> ants.inputs.regularization_gradient_field_sigma = 3 >>> ants.inputs.regularization_deformation_field_sigma = 0 >>> ants.inputs.number_of_affine_iterations = [10000,10000,10000,10000,10000] >>> ants.cmdline 'ANTS 3 --MI-option 32x16000 --image-metric CC[ T1.nii, resting.nii, 1, 5 ] --number-of-affine-iterations \ 10000x10000x10000x10000x10000 --number-of-iterations 50x35x15 --output-naming MY --regularization Gauss[3.0,0.0] \ --transformation-model SyN[0.25] --use-Histogram-Matching 1' """ _cmd = "ANTS" input_spec = ANTSInputSpec output_spec = ANTSOutputSpec def _image_metric_constructor(self): retval = [] intensity_based = ["CC", "MI", "SMI", "PR", "SSD", "MSQ"] point_set_based = ["PSE", "JTB"] for ii in range(len(self.inputs.moving_image)): if self.inputs.metric[ii] in intensity_based: retval.append( "--image-metric %s[ %s, %s, %g, %d ]" % ( self.inputs.metric[ii], self.inputs.fixed_image[ii], self.inputs.moving_image[ii], self.inputs.metric_weight[ii], self.inputs.radius[ii], ) ) elif self.inputs.metric[ii] == point_set_based: pass # retval.append('--image-metric %s[%s, %s, ...'.format(self.inputs.metric[ii], # self.inputs.fixed_image[ii], self.inputs.moving_image[ii], ...)) return " ".join(retval) def _transformation_constructor(self): model = self.inputs.transformation_model step_length = self.inputs.gradient_step_length time_step = self.inputs.number_of_time_steps delta_time = self.inputs.delta_time symmetry_type = self.inputs.symmetry_type retval = ["--transformation-model %s" % model] parameters = [] for elem in (step_length, time_step, delta_time, symmetry_type): if elem is not traits.Undefined: parameters.append("%#.2g" % elem) if len(parameters) > 0: if len(parameters) > 1: parameters = ",".join(parameters) else: parameters = "".join(parameters) retval.append("[%s]" % parameters) return "".join(retval) def _regularization_constructor(self): return "--regularization {0}[{1},{2}]".format( self.inputs.regularization, self.inputs.regularization_gradient_field_sigma, self.inputs.regularization_deformation_field_sigma, ) def _affine_gradient_descent_option_constructor(self): values = self.inputs.affine_gradient_descent_option defaults = [0.1, 0.5, 1.0e-4, 1.0e-4] for ii in range(len(defaults)): try: defaults[ii] = values[ii] except IndexError: break parameters = self._format_xarray( [("%g" % defaults[index]) for index in range(4)] ) retval = ["--affine-gradient-descent-option", parameters] return " ".join(retval) def _format_arg(self, opt, spec, val): if opt == "moving_image": return self._image_metric_constructor() elif opt == "transformation_model": return self._transformation_constructor() elif opt == "regularization": return self._regularization_constructor() elif opt == "affine_gradient_descent_option": return self._affine_gradient_descent_option_constructor() elif opt == "use_histogram_matching": if self.inputs.use_histogram_matching: return "--use-Histogram-Matching 1" else: return "--use-Histogram-Matching 0" return super(ANTS, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["affine_transform"] = os.path.abspath( self.inputs.output_transform_prefix + "Affine.txt" ) outputs["warp_transform"] = os.path.abspath( self.inputs.output_transform_prefix + "Warp.nii.gz" ) outputs["inverse_warp_transform"] = os.path.abspath( self.inputs.output_transform_prefix + "InverseWarp.nii.gz" ) # outputs['metaheader'] = os.path.abspath(self.inputs.output_transform_prefix + 'velocity.mhd') # outputs['metaheader_raw'] = os.path.abspath(self.inputs.output_transform_prefix + 'velocity.raw') return outputs class RegistrationInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="--dimensionality %d", usedefault=True, desc="image dimension (2 or 3)", ) fixed_image = InputMultiPath( File(exists=True), mandatory=True, desc="Image to which the moving_image should be transformed" "(usually a structural image)", ) fixed_image_mask = File( exists=True, argstr="%s", max_ver="2.1.0", xor=["fixed_image_masks"], desc="Mask used to limit metric sampling region of the fixed image" "in all stages", ) fixed_image_masks = InputMultiPath( traits.Either("NULL", File(exists=True)), min_ver="2.2.0", xor=["fixed_image_mask"], desc="Masks used to limit metric sampling region of the fixed image, defined per registration stage" '(Use "NULL" to omit a mask at a given stage)', ) moving_image = InputMultiPath( File(exists=True), mandatory=True, desc="Image that will be registered to the space of fixed_image. This is the" "image on which the transformations will be applied to", ) moving_image_mask = File( exists=True, requires=["fixed_image_mask"], max_ver="2.1.0", xor=["moving_image_masks"], desc="mask used to limit metric sampling region of the moving image" "in all stages", ) moving_image_masks = InputMultiPath( traits.Either("NULL", File(exists=True)), min_ver="2.2.0", xor=["moving_image_mask"], desc="Masks used to limit metric sampling region of the moving image, defined per registration stage" '(Use "NULL" to omit a mask at a given stage)', ) save_state = File( argstr="--save-state %s", exists=False, desc="Filename for saving the internal restorable state of the registration", ) restore_state = File( argstr="--restore-state %s", exists=True, desc="Filename for restoring the internal restorable state of the registration", ) initial_moving_transform = InputMultiPath( File(exists=True), argstr="%s", desc="A transform or a list of transforms that should be applied " "before the registration begins. Note that, when a list is given, " "the transformations are applied in reverse order.", xor=["initial_moving_transform_com"], ) invert_initial_moving_transform = InputMultiPath( traits.Bool(), requires=["initial_moving_transform"], desc="One boolean or a list of booleans that indicate" "whether the inverse(s) of the transform(s) defined" "in initial_moving_transform should be used.", xor=["initial_moving_transform_com"], ) initial_moving_transform_com = traits.Enum( 0, 1, 2, argstr="%s", xor=["initial_moving_transform"], desc="Align the moving_image and fixed_image before registration using " "the geometric center of the images (=0), the image intensities (=1), " "or the origin of the images (=2).", ) metric_item_trait = traits.Enum("CC", "MeanSquares", "Demons", "GC", "MI", "Mattes") metric_stage_trait = traits.Either( metric_item_trait, traits.List(metric_item_trait) ) metric = traits.List( metric_stage_trait, mandatory=True, desc="the metric(s) to use for each stage. " "Note that multiple metrics per stage are not supported " "in ANTS 1.9.1 and earlier.", ) metric_weight_item_trait = traits.Float(1.0, usedefault=True) metric_weight_stage_trait = traits.Either( metric_weight_item_trait, traits.List(metric_weight_item_trait) ) metric_weight = traits.List( metric_weight_stage_trait, value=[1.0], usedefault=True, requires=["metric"], mandatory=True, desc="the metric weight(s) for each stage. " "The weights must sum to 1 per stage.", ) radius_bins_item_trait = traits.Int(5, usedefault=True) radius_bins_stage_trait = traits.Either( radius_bins_item_trait, traits.List(radius_bins_item_trait) ) radius_or_number_of_bins = traits.List( radius_bins_stage_trait, value=[5], usedefault=True, requires=["metric_weight"], desc="the number of bins in each stage for the MI and Mattes metric, " "the radius for other metrics", ) sampling_strategy_item_trait = traits.Enum("None", "Regular", "Random", None) sampling_strategy_stage_trait = traits.Either( sampling_strategy_item_trait, traits.List(sampling_strategy_item_trait) ) sampling_strategy = traits.List( trait=sampling_strategy_stage_trait, requires=["metric_weight"], desc="the metric sampling strategy (strategies) for each stage", ) sampling_percentage_item_trait = traits.Either( traits.Range(low=0.0, high=1.0), None ) sampling_percentage_stage_trait = traits.Either( sampling_percentage_item_trait, traits.List(sampling_percentage_item_trait) ) sampling_percentage = traits.List( trait=sampling_percentage_stage_trait, requires=["sampling_strategy"], desc="the metric sampling percentage(s) to use for each stage", ) use_estimate_learning_rate_once = traits.List(traits.Bool(), desc="") use_histogram_matching = traits.Either( traits.Bool, traits.List(traits.Bool(argstr="%s")), default=True, usedefault=True, desc="Histogram match the images before registration.", ) interpolation = traits.Enum( "Linear", "NearestNeighbor", "CosineWindowedSinc", "WelchWindowedSinc", "HammingWindowedSinc", "LanczosWindowedSinc", "BSpline", "MultiLabel", "Gaussian", argstr="%s", usedefault=True, ) interpolation_parameters = traits.Either( traits.Tuple(traits.Int()), # BSpline (order) traits.Tuple( traits.Float(), traits.Float() # Gaussian/MultiLabel (sigma, alpha) ), ) write_composite_transform = traits.Bool( argstr="--write-composite-transform %d", default_value=False, usedefault=True, desc="", ) collapse_output_transforms = traits.Bool( argstr="--collapse-output-transforms %d", default_value=True, usedefault=True, # This should be true for explicit completeness desc=( "Collapse output transforms. Specifically, enabling this option " "combines all adjacent linear transforms and composes all " "adjacent displacement field transforms before writing the " "results to disk." ), ) initialize_transforms_per_stage = traits.Bool( argstr="--initialize-transforms-per-stage %d", default_value=False, usedefault=True, # This should be true for explicit completeness desc=( "Initialize linear transforms from the previous stage. By enabling this option, " "the current linear stage transform is directly intialized from the previous " "stages linear transform; this allows multiple linear stages to be run where " "each stage directly updates the estimated linear transform from the previous " "stage. (e.g. Translation -> Rigid -> Affine). " ), ) # NOTE: Even though only 0=False and 1=True are allowed, ants uses integer # values instead of booleans float = traits.Bool( argstr="--float %d", default_value=False, desc="Use float instead of double for computations.", ) transforms = traits.List( traits.Enum( "Rigid", "Affine", "CompositeAffine", "Similarity", "Translation", "BSpline", "GaussianDisplacementField", "TimeVaryingVelocityField", "TimeVaryingBSplineVelocityField", "SyN", "BSplineSyN", "Exponential", "BSplineExponential", ), argstr="%s", mandatory=True, ) # TODO: input checking and allow defaults # All parameters must be specified for BSplineDisplacementField, TimeVaryingBSplineVelocityField, BSplineSyN, # Exponential, and BSplineExponential. EVEN DEFAULTS! transform_parameters = traits.List( traits.Either( traits.Tuple(traits.Float()), # Translation, Rigid, Affine, # CompositeAffine, Similarity traits.Tuple( traits.Float(), # GaussianDisplacementField, SyN traits.Float(), traits.Float(), ), traits.Tuple( traits.Float(), # BSplineSyn, traits.Int(), # BSplineDisplacementField, traits.Int(), # TimeVaryingBSplineVelocityField traits.Int(), ), traits.Tuple( traits.Float(), # TimeVaryingVelocityField traits.Int(), traits.Float(), traits.Float(), traits.Float(), traits.Float(), ), traits.Tuple( traits.Float(), # Exponential traits.Float(), traits.Float(), traits.Int(), ), traits.Tuple( traits.Float(), # BSplineExponential traits.Int(), traits.Int(), traits.Int(), traits.Int(), ), ) ) restrict_deformation = traits.List( traits.List(traits.Range(low=0.0, high=1.0)), desc=( "This option allows the user to restrict the optimization of " "the displacement field, translation, rigid or affine transform " "on a per-component basis. For example, if one wants to limit " "the deformation or rotation of 3-D volume to the first two " "dimensions, this is possible by specifying a weight vector of " "'1x1x0' for a deformation field or '1x1x0x1x1x0' for a rigid " "transformation. Low-dimensional restriction only works if " "there are no preceding transformations." ), ) # Convergence flags number_of_iterations = traits.List(traits.List(traits.Int())) smoothing_sigmas = traits.List(traits.List(traits.Float()), mandatory=True) sigma_units = traits.List( traits.Enum("mm", "vox"), requires=["smoothing_sigmas"], desc="units for smoothing sigmas", ) shrink_factors = traits.List(traits.List(traits.Int()), mandatory=True) convergence_threshold = traits.List( trait=traits.Float(), value=[1e-6], minlen=1, requires=["number_of_iterations"], usedefault=True, ) convergence_window_size = traits.List( trait=traits.Int(), value=[10], minlen=1, requires=["convergence_threshold"], usedefault=True, ) # Output flags output_transform_prefix = Str("transform", usedefault=True, argstr="%s", desc="") output_warped_image = traits.Either(traits.Bool, File(), hash_files=False, desc="") output_inverse_warped_image = traits.Either( traits.Bool, File(), hash_files=False, requires=["output_warped_image"], desc="" ) winsorize_upper_quantile = traits.Range( low=0.0, high=1.0, value=1.0, argstr="%s", usedefault=True, desc="The Upper quantile to clip image ranges", ) winsorize_lower_quantile = traits.Range( low=0.0, high=1.0, value=0.0, argstr="%s", usedefault=True, desc="The Lower quantile to clip image ranges", ) verbose = traits.Bool( argstr="-v", default_value=False, usedefault=True, nohash=True ) class RegistrationOutputSpec(TraitedSpec): forward_transforms = traits.List( File(exists=True), desc="List of output transforms for forward registration" ) reverse_forward_transforms = traits.List( File(exists=True), desc="List of output transforms for forward registration reversed for antsApplyTransform", ) reverse_transforms = traits.List( File(exists=True), desc="List of output transforms for reverse registration" ) forward_invert_flags = traits.List( traits.Bool(), desc="List of flags corresponding to the forward transforms" ) reverse_forward_invert_flags = traits.List( traits.Bool(), desc="List of flags corresponding to the forward transforms reversed for antsApplyTransform", ) reverse_invert_flags = traits.List( traits.Bool(), desc="List of flags corresponding to the reverse transforms" ) composite_transform = File(exists=True, desc="Composite transform file") inverse_composite_transform = File(desc="Inverse composite transform file") warped_image = File(desc="Outputs warped image") inverse_warped_image = File(desc="Outputs the inverse of the warped image") save_state = File(desc="The saved registration state to be restored") metric_value = traits.Float(desc="the final value of metric") elapsed_time = traits.Float(desc="the total elapsed time as reported by ANTs") class Registration(ANTSCommand): """ANTs Registration command for registration of images `antsRegistration `_ registers a ``moving_image`` to a ``fixed_image``, using a predefined (sequence of) cost function(s) and transformation operations. The cost function is defined using one or more 'metrics', specifically local cross-correlation (``CC``), Mean Squares (``MeanSquares``), Demons (``Demons``), global correlation (``GC``), or Mutual Information (``Mattes`` or ``MI``). ANTS can use both linear (``Translation``, ``Rigid``, ``Affine``, ``CompositeAffine``, or ``Translation``) and non-linear transformations (``BSpline``, ``GaussianDisplacementField``, ``TimeVaryingVelocityField``, ``TimeVaryingBSplineVelocityField``, ``SyN``, ``BSplineSyN``, ``Exponential``, or ``BSplineExponential``). Usually, registration is done in multiple *stages*. For example first an Affine, then a Rigid, and ultimately a non-linear (Syn)-transformation. antsRegistration can be initialized using one ore more transforms from moving_image to fixed_image with the ``initial_moving_transform``-input. For example, when you already have a warpfield that corrects for geometrical distortions in an EPI (functional) image, that you want to apply before an Affine registration to a structural image. You could put this transform into 'intial_moving_transform'. The Registration-interface can output the resulting transform(s) that map moving_image to fixed_image in a single file as a ``composite_transform`` (if ``write_composite_transform`` is set to ``True``), or a list of transforms as ``forwards_transforms``. It can also output inverse transforms (from ``fixed_image`` to ``moving_image``) in a similar fashion using ``inverse_composite_transform``. Note that the order of ``forward_transforms`` is in 'natural' order: the first element should be applied first, the last element should be applied last. Note, however, that ANTS tools always apply lists of transformations in reverse order (the last transformation in the list is applied first). Therefore, if the output forward_transforms is a list, one can not directly feed it into, for example, ``ants.ApplyTransforms``. To make ``ants.ApplyTransforms`` apply the transformations in the same order as ``ants.Registration``, you have to provide the list of transformations in reverse order from ``forward_transforms``. ``reverse_forward_transforms`` outputs ``forward_transforms`` in reverse order and can be used for this purpose. Note also that, because ``composite_transform`` is always a single file, this output is preferred for most use-cases. More information can be found in the `ANTS manual `_. See below for some useful examples. Examples -------- Set up a Registration node with some default settings. This Node registers 'fixed1.nii' to 'moving1.nii' by first fitting a linear 'Affine' transformation, and then a non-linear 'SyN' transformation, both using the Mutual Information-cost metric. The registration is initialized by first applying the (linear) transform trans.mat. >>> import copy, pprint >>> from nipype.interfaces.ants import Registration >>> reg = Registration() >>> reg.inputs.fixed_image = 'fixed1.nii' >>> reg.inputs.moving_image = 'moving1.nii' >>> reg.inputs.output_transform_prefix = "output_" >>> reg.inputs.initial_moving_transform = 'trans.mat' >>> reg.inputs.transforms = ['Affine', 'SyN'] >>> reg.inputs.transform_parameters = [(2.0,), (0.25, 3.0, 0.0)] >>> reg.inputs.number_of_iterations = [[1500, 200], [100, 50, 30]] >>> reg.inputs.dimension = 3 >>> reg.inputs.write_composite_transform = True >>> reg.inputs.collapse_output_transforms = False >>> reg.inputs.initialize_transforms_per_stage = False >>> reg.inputs.metric = ['Mattes']*2 >>> reg.inputs.metric_weight = [1]*2 # Default (value ignored currently by ANTs) >>> reg.inputs.radius_or_number_of_bins = [32]*2 >>> reg.inputs.sampling_strategy = ['Random', None] >>> reg.inputs.sampling_percentage = [0.05, None] >>> reg.inputs.convergence_threshold = [1.e-8, 1.e-9] >>> reg.inputs.convergence_window_size = [20]*2 >>> reg.inputs.smoothing_sigmas = [[1,0], [2,1,0]] >>> reg.inputs.sigma_units = ['vox'] * 2 >>> reg.inputs.shrink_factors = [[2,1], [3,2,1]] >>> reg.inputs.use_estimate_learning_rate_once = [True, True] >>> reg.inputs.use_histogram_matching = [True, True] # This is the default >>> reg.inputs.output_warped_image = 'output_warped_image.nii.gz' >>> reg.cmdline 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 0 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' >>> reg.run() # doctest: +SKIP Same as reg1, but first invert the initial transform ('trans.mat') before applying it. >>> reg.inputs.invert_initial_moving_transform = True >>> reg1 = copy.deepcopy(reg) >>> reg1.inputs.winsorize_lower_quantile = 0.025 >>> reg1.cmdline 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.025, 1.0 ] --write-composite-transform 1' >>> reg1.run() # doctest: +SKIP Clip extremely high intensity data points using winsorize_upper_quantile. All data points higher than the 0.975 quantile are set to the value of the 0.975 quantile. >>> reg2 = copy.deepcopy(reg) >>> reg2.inputs.winsorize_upper_quantile = 0.975 >>> reg2.cmdline 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 0.975 ] --write-composite-transform 1' Clip extremely low intensity data points using winsorize_lower_quantile. All data points lower than the 0.025 quantile are set to the original value at the 0.025 quantile. >>> reg3 = copy.deepcopy(reg) >>> reg3.inputs.winsorize_lower_quantile = 0.025 >>> reg3.inputs.winsorize_upper_quantile = 0.975 >>> reg3.cmdline 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.025, 0.975 ] --write-composite-transform 1' Use float instead of double for computations (saves memory usage) >>> reg3a = copy.deepcopy(reg) >>> reg3a.inputs.float = True >>> reg3a.cmdline 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --float 1 \ --initial-moving-transform [ trans.mat, 1 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' Force to use double instead of float for computations (more precision and memory usage). >>> reg3b = copy.deepcopy(reg) >>> reg3b.inputs.float = False >>> reg3b.cmdline 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --float 0 \ --initial-moving-transform [ trans.mat, 1 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' 'collapse_output_transforms' can be used to put all transformation in a single 'composite_transform'- file. Note that forward_transforms will now be an empty list. >>> # Test collapse transforms flag >>> reg4 = copy.deepcopy(reg) >>> reg4.inputs.save_state = 'trans.mat' >>> reg4.inputs.restore_state = 'trans.mat' >>> reg4.inputs.initialize_transforms_per_stage = True >>> reg4.inputs.collapse_output_transforms = True >>> outputs = reg4._list_outputs() >>> pprint.pprint(outputs) # doctest: +ELLIPSIS, {'composite_transform': '...data/output_Composite.h5', 'elapsed_time': , 'forward_invert_flags': [], 'forward_transforms': [], 'inverse_composite_transform': '...data/output_InverseComposite.h5', 'inverse_warped_image': , 'metric_value': , 'reverse_forward_invert_flags': [], 'reverse_forward_transforms': [], 'reverse_invert_flags': [], 'reverse_transforms': [], 'save_state': '...data/trans.mat', 'warped_image': '...data/output_warped_image.nii.gz'} >>> reg4.cmdline 'antsRegistration --collapse-output-transforms 1 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 1 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --restore-state trans.mat --save-state trans.mat --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' >>> # Test collapse transforms flag >>> reg4b = copy.deepcopy(reg4) >>> reg4b.inputs.write_composite_transform = False >>> outputs = reg4b._list_outputs() >>> pprint.pprint(outputs) # doctest: +ELLIPSIS, {'composite_transform': , 'elapsed_time': , 'forward_invert_flags': [False, False], 'forward_transforms': ['...data/output_0GenericAffine.mat', '...data/output_1Warp.nii.gz'], 'inverse_composite_transform': , 'inverse_warped_image': , 'metric_value': , 'reverse_forward_invert_flags': [False, False], 'reverse_forward_transforms': ['...data/output_1Warp.nii.gz', '...data/output_0GenericAffine.mat'], 'reverse_invert_flags': [True, False], 'reverse_transforms': ['...data/output_0GenericAffine.mat', \ '...data/output_1InverseWarp.nii.gz'], 'save_state': '...data/trans.mat', 'warped_image': '...data/output_warped_image.nii.gz'} >>> reg4b.aggregate_outputs() # doctest: +SKIP >>> reg4b.cmdline 'antsRegistration --collapse-output-transforms 1 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 1 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --restore-state trans.mat --save-state trans.mat --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 0' One can use multiple similarity metrics in a single registration stage.The Node below first performs a linear registation using only the Mutual Information ('Mattes')-metric. In a second stage, it performs a non-linear registration ('Syn') using both a Mutual Information and a local cross-correlation ('CC')-metric. Both metrics are weighted equally ('metric_weight' is .5 for both). The Mutual Information- metric uses 32 bins. The local cross-correlations (correlations between every voxel's neighborhoods) is computed with a radius of 4. >>> # Test multiple metrics per stage >>> reg5 = copy.deepcopy(reg) >>> reg5.inputs.fixed_image = 'fixed1.nii' >>> reg5.inputs.moving_image = 'moving1.nii' >>> reg5.inputs.metric = ['Mattes', ['Mattes', 'CC']] >>> reg5.inputs.metric_weight = [1, [.5,.5]] >>> reg5.inputs.radius_or_number_of_bins = [32, [32, 4] ] >>> reg5.inputs.sampling_strategy = ['Random', None] # use default strategy in second stage >>> reg5.inputs.sampling_percentage = [0.05, [0.05, 0.10]] >>> reg5.cmdline 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 0.5, 32, None, 0.05 ] \ --metric CC[ fixed1.nii, moving1.nii, 0.5, 4, None, 0.1 ] --convergence [ 100x50x30, 1e-09, 20 ] \ --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' ANTS Registration can also use multiple modalities to perform the registration. Here it is assumed that fixed1.nii and fixed2.nii are in the same space, and so are moving1.nii and moving2.nii. First, a linear registration is performed matching fixed1.nii to moving1.nii, then a non-linear registration is performed to match fixed2.nii to moving2.nii, starting from the transformation of the first step. >>> # Test multiple inputS >>> reg6 = copy.deepcopy(reg5) >>> reg6.inputs.fixed_image = ['fixed1.nii', 'fixed2.nii'] >>> reg6.inputs.moving_image = ['moving1.nii', 'moving2.nii'] >>> reg6.cmdline 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 0.5, 32, None, 0.05 ] \ --metric CC[ fixed2.nii, moving2.nii, 0.5, 4, None, 0.1 ] --convergence [ 100x50x30, 1e-09, 20 ] \ --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' Different methods can be used for the interpolation when applying transformations. >>> # Test Interpolation Parameters (BSpline) >>> reg7a = copy.deepcopy(reg) >>> reg7a.inputs.interpolation = 'BSpline' >>> reg7a.inputs.interpolation_parameters = (3,) >>> reg7a.cmdline 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation BSpline[ 3 ] --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform SyN[ 0.25, 3.0, 0.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' >>> # Test Interpolation Parameters (MultiLabel/Gaussian) >>> reg7b = copy.deepcopy(reg) >>> reg7b.inputs.interpolation = 'Gaussian' >>> reg7b.inputs.interpolation_parameters = (1.0, 1.0) >>> reg7b.cmdline 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Gaussian[ 1.0, 1.0 ] \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' BSplineSyN non-linear registration with custom parameters. >>> # Test Extended Transform Parameters >>> reg8 = copy.deepcopy(reg) >>> reg8.inputs.transforms = ['Affine', 'BSplineSyN'] >>> reg8.inputs.transform_parameters = [(2.0,), (0.25, 26, 0, 3)] >>> reg8.cmdline 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --transform BSplineSyN[ 0.25, 26, 0, 3 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] --convergence [ 100x50x30, 1e-09, 20 ] \ --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 --use-estimate-learning-rate-once 1 \ --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' Mask the fixed image in the second stage of the registration (but not the first). >>> # Test masking >>> reg9 = copy.deepcopy(reg) >>> reg9.inputs.fixed_image_masks = ['NULL', 'fixed1.nii'] >>> reg9.cmdline 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform [ trans.mat, 1 ] \ --initialize-transforms-per-stage 0 --interpolation Linear --output [ output_, output_warped_image.nii.gz ] \ --transform Affine[ 2.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] \ --convergence [ 1500x200, 1e-08, 20 ] --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --masks [ NULL, NULL ] \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --masks [ fixed1.nii, NULL ] \ --winsorize-image-intensities [ 0.0, 1.0 ] --write-composite-transform 1' Here we use both a warpfield and a linear transformation, before registration commences. Note that the first transformation that needs to be applied ('ants_Warp.nii.gz') is last in the list of 'initial_moving_transform'. >>> # Test initialization with multiple transforms matrices (e.g., unwarp and affine transform) >>> reg10 = copy.deepcopy(reg) >>> reg10.inputs.initial_moving_transform = ['func_to_struct.mat', 'ants_Warp.nii.gz'] >>> reg10.inputs.invert_initial_moving_transform = [False, False] >>> reg10.cmdline 'antsRegistration --collapse-output-transforms 0 --dimensionality 3 --initial-moving-transform \ [ func_to_struct.mat, 0 ] [ ants_Warp.nii.gz, 0 ] --initialize-transforms-per-stage 0 --interpolation Linear \ --output [ output_, output_warped_image.nii.gz ] --transform Affine[ 2.0 ] \ --metric Mattes[ fixed1.nii, moving1.nii, 1, 32, Random, 0.05 ] --convergence [ 1500x200, 1e-08, 20 ] \ --smoothing-sigmas 1.0x0.0vox --shrink-factors 2x1 --use-estimate-learning-rate-once 1 --use-histogram-matching 1 \ --transform SyN[ 0.25, 3.0, 0.0 ] --metric Mattes[ fixed1.nii, moving1.nii, 1, 32 ] \ --convergence [ 100x50x30, 1e-09, 20 ] --smoothing-sigmas 2.0x1.0x0.0vox --shrink-factors 3x2x1 \ --use-estimate-learning-rate-once 1 --use-histogram-matching 1 --winsorize-image-intensities [ 0.0, 1.0 ] \ --write-composite-transform 1' """ DEF_SAMPLING_STRATEGY = "None" """The default sampling strategy argument.""" _cmd = "antsRegistration" input_spec = RegistrationInputSpec output_spec = RegistrationOutputSpec _quantilesDone = False _linear_transform_names = [ "Rigid", "Affine", "Translation", "CompositeAffine", "Similarity", ] def __init__(self, **inputs): super(Registration, self).__init__(**inputs) self._elapsed_time = None self._metric_value = None def _run_interface(self, runtime, correct_return_codes=(0,)): runtime = super(Registration, self)._run_interface(runtime) # Parse some profiling info output = runtime.stdout or runtime.merged if output: lines = output.split("\n") for l in lines[::-1]: # This should be the last line if l.strip().startswith("Total elapsed time:"): self._elapsed_time = float( l.strip().replace("Total elapsed time: ", "") ) elif "DIAGNOSTIC" in l: self._metric_value = float(l.split(",")[2]) break return runtime def _format_metric(self, index): """ Format the antsRegistration -m metric argument(s). Parameters ---------- index: the stage index """ # The metric name input for the current stage. name_input = self.inputs.metric[index] # The stage-specific input dictionary. stage_inputs = dict( fixed_image=self.inputs.fixed_image[0], moving_image=self.inputs.moving_image[0], metric=name_input, weight=self.inputs.metric_weight[index], radius_or_bins=self.inputs.radius_or_number_of_bins[index], optional=self.inputs.radius_or_number_of_bins[index], ) # The optional sampling strategy and percentage. if isdefined(self.inputs.sampling_strategy) and self.inputs.sampling_strategy: sampling_strategy = self.inputs.sampling_strategy[index] if sampling_strategy: stage_inputs["sampling_strategy"] = sampling_strategy if ( isdefined(self.inputs.sampling_percentage) and self.inputs.sampling_percentage ): sampling_percentage = self.inputs.sampling_percentage[index] if sampling_percentage: stage_inputs["sampling_percentage"] = sampling_percentage # Make a list of metric specifications, one per -m command line # argument for the current stage. # If there are multiple inputs for this stage, then convert the # dictionary of list inputs into a list of metric specifications. # Otherwise, make a singleton list of the metric specification # from the non-list inputs. if isinstance(name_input, list): items = list(stage_inputs.items()) indexes = list(range(0, len(name_input))) specs = list() for i in indexes: temp = dict([(k, v[i]) for k, v in items]) if len(self.inputs.fixed_image) == 1: temp["fixed_image"] = self.inputs.fixed_image[0] else: temp["fixed_image"] = self.inputs.fixed_image[i] if len(self.inputs.moving_image) == 1: temp["moving_image"] = self.inputs.moving_image[0] else: temp["moving_image"] = self.inputs.moving_image[i] specs.append(temp) else: specs = [stage_inputs] # Format the --metric command line metric arguments, one per # specification. return [self._format_metric_argument(**spec) for spec in specs] @staticmethod def _format_metric_argument(**kwargs): retval = "%s[ %s, %s, %g, %d" % ( kwargs["metric"], kwargs["fixed_image"], kwargs["moving_image"], kwargs["weight"], kwargs["radius_or_bins"], ) # The optional sampling strategy. if "sampling_strategy" in kwargs: sampling_strategy = kwargs["sampling_strategy"] elif "sampling_percentage" in kwargs: # The sampling percentage is specified but not the # sampling strategy. Use the default strategy. sampling_strategy = Registration.DEF_SAMPLING_STRATEGY else: sampling_strategy = None # Format the optional sampling arguments. if sampling_strategy: retval += ", %s" % sampling_strategy if "sampling_percentage" in kwargs: retval += ", %g" % kwargs["sampling_percentage"] retval += " ]" return retval def _format_transform(self, index): retval = [] retval.append("%s[ " % self.inputs.transforms[index]) parameters = ", ".join( [str(element) for element in self.inputs.transform_parameters[index]] ) retval.append("%s" % parameters) retval.append(" ]") return "".join(retval) def _format_registration(self): retval = [] for ii in range(len(self.inputs.transforms)): retval.append("--transform %s" % (self._format_transform(ii))) for metric in self._format_metric(ii): retval.append("--metric %s" % metric) retval.append("--convergence %s" % self._format_convergence(ii)) if isdefined(self.inputs.sigma_units): retval.append( "--smoothing-sigmas %s%s" % ( self._format_xarray(self.inputs.smoothing_sigmas[ii]), self.inputs.sigma_units[ii], ) ) else: retval.append( "--smoothing-sigmas %s" % self._format_xarray(self.inputs.smoothing_sigmas[ii]) ) retval.append( "--shrink-factors %s" % self._format_xarray(self.inputs.shrink_factors[ii]) ) if isdefined(self.inputs.use_estimate_learning_rate_once): retval.append( "--use-estimate-learning-rate-once %d" % self.inputs.use_estimate_learning_rate_once[ii] ) if isdefined(self.inputs.use_histogram_matching): # use_histogram_matching is either a common flag for all transforms # or a list of transform-specific flags if isinstance(self.inputs.use_histogram_matching, bool): histval = self.inputs.use_histogram_matching else: histval = self.inputs.use_histogram_matching[ii] retval.append("--use-histogram-matching %d" % histval) if isdefined(self.inputs.restrict_deformation): retval.append( "--restrict-deformation %s" % self._format_xarray(self.inputs.restrict_deformation[ii]) ) if any( ( isdefined(self.inputs.fixed_image_masks), isdefined(self.inputs.moving_image_masks), ) ): if isdefined(self.inputs.fixed_image_masks): fixed_masks = ensure_list(self.inputs.fixed_image_masks) fixed_mask = fixed_masks[ii if len(fixed_masks) > 1 else 0] else: fixed_mask = "NULL" if isdefined(self.inputs.moving_image_masks): moving_masks = ensure_list(self.inputs.moving_image_masks) moving_mask = moving_masks[ii if len(moving_masks) > 1 else 0] else: moving_mask = "NULL" retval.append("--masks [ %s, %s ]" % (fixed_mask, moving_mask)) return " ".join(retval) def _get_outputfilenames(self, inverse=False): output_filename = None if not inverse: if ( isdefined(self.inputs.output_warped_image) and self.inputs.output_warped_image ): output_filename = self.inputs.output_warped_image if isinstance(output_filename, bool): output_filename = ( "%s_Warped.nii.gz" % self.inputs.output_transform_prefix ) return output_filename inv_output_filename = None if ( isdefined(self.inputs.output_inverse_warped_image) and self.inputs.output_inverse_warped_image ): inv_output_filename = self.inputs.output_inverse_warped_image if isinstance(inv_output_filename, bool): inv_output_filename = ( "%s_InverseWarped.nii.gz" % self.inputs.output_transform_prefix ) return inv_output_filename def _format_convergence(self, ii): convergence_iter = self._format_xarray(self.inputs.number_of_iterations[ii]) if len(self.inputs.convergence_threshold) > ii: convergence_value = self.inputs.convergence_threshold[ii] else: convergence_value = self.inputs.convergence_threshold[0] if len(self.inputs.convergence_window_size) > ii: convergence_ws = self.inputs.convergence_window_size[ii] else: convergence_ws = self.inputs.convergence_window_size[0] return "[ %s, %g, %d ]" % (convergence_iter, convergence_value, convergence_ws) def _format_winsorize_image_intensities(self): if ( not self.inputs.winsorize_upper_quantile > self.inputs.winsorize_lower_quantile ): raise RuntimeError( "Upper bound MUST be more than lower bound: %g > %g" % ( self.inputs.winsorize_upper_quantile, self.inputs.winsorize_lower_quantile, ) ) self._quantilesDone = True return "--winsorize-image-intensities [ %s, %s ]" % ( self.inputs.winsorize_lower_quantile, self.inputs.winsorize_upper_quantile, ) def _get_initial_transform_filenames(self): n_transforms = len(self.inputs.initial_moving_transform) # Assume transforms should not be inverted by default invert_flags = [0] * n_transforms if isdefined(self.inputs.invert_initial_moving_transform): if len(self.inputs.invert_initial_moving_transform) != n_transforms: raise Exception( 'Inputs "initial_moving_transform" and "invert_initial_moving_transform"' "should have the same length." ) invert_flags = self.inputs.invert_initial_moving_transform retval = [ "[ %s, %d ]" % (xfm, int(flag)) for xfm, flag in zip(self.inputs.initial_moving_transform, invert_flags) ] return " ".join(["--initial-moving-transform"] + retval) def _format_arg(self, opt, spec, val): if opt == "fixed_image_mask": if isdefined(self.inputs.moving_image_mask): return "--masks [ %s, %s ]" % ( self.inputs.fixed_image_mask, self.inputs.moving_image_mask, ) else: return "--masks %s" % self.inputs.fixed_image_mask elif opt == "transforms": return self._format_registration() elif opt == "initial_moving_transform": return self._get_initial_transform_filenames() elif opt == "initial_moving_transform_com": do_center_of_mass_init = ( self.inputs.initial_moving_transform_com if isdefined(self.inputs.initial_moving_transform_com) else 0 ) # Just do the default behavior return "--initial-moving-transform [ %s, %s, %d ]" % ( self.inputs.fixed_image[0], self.inputs.moving_image[0], do_center_of_mass_init, ) elif opt == "interpolation": if ( self.inputs.interpolation in [ "BSpline", "MultiLabel", "Gaussian", ] and isdefined(self.inputs.interpolation_parameters) ): return "--interpolation %s[ %s ]" % ( self.inputs.interpolation, ", ".join( [str(param) for param in self.inputs.interpolation_parameters] ), ) else: return "--interpolation %s" % self.inputs.interpolation elif opt == "output_transform_prefix": out_filename = self._get_outputfilenames(inverse=False) inv_out_filename = self._get_outputfilenames(inverse=True) if out_filename and inv_out_filename: return "--output [ %s, %s, %s ]" % ( self.inputs.output_transform_prefix, out_filename, inv_out_filename, ) elif out_filename: return "--output [ %s, %s ]" % ( self.inputs.output_transform_prefix, out_filename, ) else: return "--output %s" % self.inputs.output_transform_prefix elif opt == "winsorize_upper_quantile" or opt == "winsorize_lower_quantile": if not self._quantilesDone: return self._format_winsorize_image_intensities() else: self._quantilesDone = False return "" # Must return something for argstr! # This feature was removed from recent versions of antsRegistration due to corrupt outputs. # elif opt == 'collapse_linear_transforms_to_fixed_image_header': # return self._formatCollapseLinearTransformsToFixedImageHeader() return super(Registration, self)._format_arg(opt, spec, val) def _output_filenames(self, prefix, count, transform, inverse=False): self.low_dimensional_transform_map = { "Rigid": "Rigid.mat", "Affine": "Affine.mat", "GenericAffine": "GenericAffine.mat", "CompositeAffine": "Affine.mat", "Similarity": "Similarity.mat", "Translation": "Translation.mat", "BSpline": "BSpline.txt", "Initial": "DerivedInitialMovingTranslation.mat", } if transform in list(self.low_dimensional_transform_map.keys()): suffix = self.low_dimensional_transform_map[transform] inverse_mode = inverse else: inverse_mode = False # These are not analytically invertable if inverse: suffix = "InverseWarp.nii.gz" else: suffix = "Warp.nii.gz" return "%s%d%s" % (prefix, count, suffix), inverse_mode def _list_outputs(self): outputs = self._outputs().get() outputs["forward_transforms"] = [] outputs["forward_invert_flags"] = [] outputs["reverse_transforms"] = [] outputs["reverse_invert_flags"] = [] # invert_initial_moving_transform should be always defined, even if # there's no initial transform invert_initial_moving_transform = [False] * len( self.inputs.initial_moving_transform ) if isdefined(self.inputs.invert_initial_moving_transform): invert_initial_moving_transform = ( self.inputs.invert_initial_moving_transform ) if self.inputs.write_composite_transform: filename = self.inputs.output_transform_prefix + "Composite.h5" outputs["composite_transform"] = os.path.abspath(filename) filename = self.inputs.output_transform_prefix + "InverseComposite.h5" outputs["inverse_composite_transform"] = os.path.abspath(filename) # If composite transforms are written, then individuals are not written (as of 2014-10-26 else: if not self.inputs.collapse_output_transforms: transform_count = 0 if isdefined(self.inputs.initial_moving_transform): outputs[ "forward_transforms" ] += self.inputs.initial_moving_transform outputs["forward_invert_flags"] += invert_initial_moving_transform outputs["reverse_transforms"] = ( self.inputs.initial_moving_transform + outputs["reverse_transforms"] ) outputs["reverse_invert_flags"] = [ not e for e in invert_initial_moving_transform ] + outputs[ "reverse_invert_flags" ] # Prepend transform_count += len(self.inputs.initial_moving_transform) elif isdefined(self.inputs.initial_moving_transform_com): forward_filename, forward_inversemode = self._output_filenames( self.inputs.output_transform_prefix, transform_count, "Initial" ) reverse_filename, reverse_inversemode = self._output_filenames( self.inputs.output_transform_prefix, transform_count, "Initial", True, ) outputs["forward_transforms"].append( os.path.abspath(forward_filename) ) outputs["forward_invert_flags"].append(False) outputs["reverse_transforms"].insert( 0, os.path.abspath(reverse_filename) ) outputs["reverse_invert_flags"].insert(0, True) transform_count += 1 for count in range(len(self.inputs.transforms)): forward_filename, forward_inversemode = self._output_filenames( self.inputs.output_transform_prefix, transform_count, self.inputs.transforms[count], ) reverse_filename, reverse_inversemode = self._output_filenames( self.inputs.output_transform_prefix, transform_count, self.inputs.transforms[count], True, ) outputs["forward_transforms"].append( os.path.abspath(forward_filename) ) outputs["forward_invert_flags"].append(forward_inversemode) outputs["reverse_transforms"].insert( 0, os.path.abspath(reverse_filename) ) outputs["reverse_invert_flags"].insert(0, reverse_inversemode) transform_count += 1 else: transform_count = 0 is_linear = [ t in self._linear_transform_names for t in self.inputs.transforms ] collapse_list = [] if isdefined(self.inputs.initial_moving_transform) or isdefined( self.inputs.initial_moving_transform_com ): is_linear.insert(0, True) # Only files returned by collapse_output_transforms if any(is_linear): collapse_list.append("GenericAffine") if not all(is_linear): collapse_list.append("SyN") for transform in collapse_list: forward_filename, forward_inversemode = self._output_filenames( self.inputs.output_transform_prefix, transform_count, transform, inverse=False, ) reverse_filename, reverse_inversemode = self._output_filenames( self.inputs.output_transform_prefix, transform_count, transform, inverse=True, ) outputs["forward_transforms"].append( os.path.abspath(forward_filename) ) outputs["forward_invert_flags"].append(forward_inversemode) outputs["reverse_transforms"].append( os.path.abspath(reverse_filename) ) outputs["reverse_invert_flags"].append(reverse_inversemode) transform_count += 1 out_filename = self._get_outputfilenames(inverse=False) inv_out_filename = self._get_outputfilenames(inverse=True) if out_filename: outputs["warped_image"] = os.path.abspath(out_filename) if inv_out_filename: outputs["inverse_warped_image"] = os.path.abspath(inv_out_filename) if len(self.inputs.save_state): outputs["save_state"] = os.path.abspath(self.inputs.save_state) if self._metric_value: outputs["metric_value"] = self._metric_value if self._elapsed_time: outputs["elapsed_time"] = self._elapsed_time outputs["reverse_forward_transforms"] = outputs["forward_transforms"][::-1] outputs["reverse_forward_invert_flags"] = outputs["forward_invert_flags"][::-1] return outputs class MeasureImageSimilarityInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 2, 3, 4, argstr="--dimensionality %d", position=1, desc="Dimensionality of the fixed/moving image pair", ) fixed_image = File( exists=True, mandatory=True, desc="Image to which the moving image is warped" ) moving_image = File( exists=True, mandatory=True, desc="Image to apply transformation to (generally a coregistered functional)", ) metric = traits.Enum( "CC", "MI", "Mattes", "MeanSquares", "Demons", "GC", argstr="%s", mandatory=True ) metric_weight = traits.Float( requires=["metric"], default_value=1.0, usedefault=True, desc='The "metricWeight" variable is not used.', ) radius_or_number_of_bins = traits.Int( requires=["metric"], mandatory=True, desc="The number of bins in each stage for the MI and Mattes metric, " "or the radius for other metrics", ) sampling_strategy = traits.Enum( "None", "Regular", "Random", requires=["metric"], usedefault=True, desc="Manner of choosing point set over which to optimize the metric. " 'Defaults to "None" (i.e. a dense sampling of one sample per voxel).', ) sampling_percentage = traits.Either( traits.Range(low=0.0, high=1.0), requires=["metric"], mandatory=True, desc="Percentage of points accessible to the sampling strategy over which " "to optimize the metric.", ) fixed_image_mask = File( exists=True, argstr="%s", desc="mask used to limit metric sampling region of the fixed image", ) moving_image_mask = File( exists=True, requires=["fixed_image_mask"], desc="mask used to limit metric sampling region of the moving image", ) class MeasureImageSimilarityOutputSpec(TraitedSpec): similarity = traits.Float() class MeasureImageSimilarity(ANTSCommand): """ Examples -------- >>> from nipype.interfaces.ants import MeasureImageSimilarity >>> sim = MeasureImageSimilarity() >>> sim.inputs.dimension = 3 >>> sim.inputs.metric = 'MI' >>> sim.inputs.fixed_image = 'T1.nii' >>> sim.inputs.moving_image = 'resting.nii' >>> sim.inputs.metric_weight = 1.0 >>> sim.inputs.radius_or_number_of_bins = 5 >>> sim.inputs.sampling_strategy = 'Regular' >>> sim.inputs.sampling_percentage = 1.0 >>> sim.inputs.fixed_image_mask = 'mask.nii' >>> sim.inputs.moving_image_mask = 'mask.nii.gz' >>> sim.cmdline 'MeasureImageSimilarity --dimensionality 3 --masks ["mask.nii","mask.nii.gz"] \ --metric MI["T1.nii","resting.nii",1.0,5,Regular,1.0]' """ _cmd = "MeasureImageSimilarity" input_spec = MeasureImageSimilarityInputSpec output_spec = MeasureImageSimilarityOutputSpec def _metric_constructor(self): retval = '--metric {metric}["{fixed_image}","{moving_image}",{metric_weight},' "{radius_or_number_of_bins},{sampling_strategy},{sampling_percentage}]".format( metric=self.inputs.metric, fixed_image=self.inputs.fixed_image, moving_image=self.inputs.moving_image, metric_weight=self.inputs.metric_weight, radius_or_number_of_bins=self.inputs.radius_or_number_of_bins, sampling_strategy=self.inputs.sampling_strategy, sampling_percentage=self.inputs.sampling_percentage, ) return retval def _mask_constructor(self): if self.inputs.moving_image_mask: retval = '--masks ["{fixed_image_mask}","{moving_image_mask}"]'.format( fixed_image_mask=self.inputs.fixed_image_mask, moving_image_mask=self.inputs.moving_image_mask, ) else: retval = '--masks "{fixed_image_mask}"'.format( fixed_image_mask=self.inputs.fixed_image_mask ) return retval def _format_arg(self, opt, spec, val): if opt == "metric": return self._metric_constructor() elif opt == "fixed_image_mask": return self._mask_constructor() return super(MeasureImageSimilarity, self)._format_arg(opt, spec, val) def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() stdout = runtime.stdout.split("\n") outputs.similarity = float(stdout[0]) return outputs class RegistrationSynQuickInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="-d %d", usedefault=True, desc="image dimension (2 or 3)" ) fixed_image = InputMultiPath( File(exists=True), mandatory=True, argstr="-f %s...", desc="Fixed image or source image or reference image", ) moving_image = InputMultiPath( File(exists=True), mandatory=True, argstr="-m %s...", desc="Moving image or target image", ) output_prefix = Str( "transform", usedefault=True, argstr="-o %s", desc="A prefix that is prepended to all output files", ) num_threads = traits.Int( default_value=LOCAL_DEFAULT_NUMBER_OF_THREADS, usedefault=True, desc="Number of threads (default = 1)", argstr="-n %d", ) transform_type = traits.Enum( "s", "t", "r", "a", "sr", "b", "br", argstr="-t %s", desc="""\ Transform type * t: translation * r: rigid * a: rigid + affine * s: rigid + affine + deformable syn (default) * sr: rigid + deformable syn * b: rigid + affine + deformable b-spline syn * br: rigid + deformable b-spline syn """, usedefault=True, ) use_histogram_matching = traits.Bool( False, argstr="-j %d", desc="use histogram matching" ) histogram_bins = traits.Int( default_value=32, usedefault=True, argstr="-r %d", desc="histogram bins for mutual information in SyN stage \ (default = 32)", ) spline_distance = traits.Int( default_value=26, usedefault=True, argstr="-s %d", desc="spline distance for deformable B-spline SyN transform \ (default = 26)", ) precision_type = traits.Enum( "double", "float", argstr="-p %s", desc="precision type (default = double)", usedefault=True, ) class RegistrationSynQuickOutputSpec(TraitedSpec): warped_image = File(exists=True, desc="Warped image") inverse_warped_image = File(exists=True, desc="Inverse warped image") out_matrix = File(exists=True, desc="Affine matrix") forward_warp_field = File(exists=True, desc="Forward warp field") inverse_warp_field = File(exists=True, desc="Inverse warp field") class RegistrationSynQuick(ANTSCommand): """ Registration using a symmetric image normalization method (SyN). You can read more in Avants et al.; Med Image Anal., 2008 (https://www.ncbi.nlm.nih.gov/pubmed/17659998). Examples -------- >>> from nipype.interfaces.ants import RegistrationSynQuick >>> reg = RegistrationSynQuick() >>> reg.inputs.fixed_image = 'fixed1.nii' >>> reg.inputs.moving_image = 'moving1.nii' >>> reg.inputs.num_threads = 2 >>> reg.cmdline 'antsRegistrationSyNQuick.sh -d 3 -f fixed1.nii -r 32 -m moving1.nii -n 2 -o transform -p d -s 26 -t s' >>> reg.run() # doctest: +SKIP example for multiple images >>> from nipype.interfaces.ants import RegistrationSynQuick >>> reg = RegistrationSynQuick() >>> reg.inputs.fixed_image = ['fixed1.nii', 'fixed2.nii'] >>> reg.inputs.moving_image = ['moving1.nii', 'moving2.nii'] >>> reg.inputs.num_threads = 2 >>> reg.cmdline 'antsRegistrationSyNQuick.sh -d 3 -f fixed1.nii -f fixed2.nii -r 32 -m moving1.nii -m moving2.nii \ -n 2 -o transform -p d -s 26 -t s' >>> reg.run() # doctest: +SKIP """ _cmd = "antsRegistrationSyNQuick.sh" input_spec = RegistrationSynQuickInputSpec output_spec = RegistrationSynQuickOutputSpec def _num_threads_update(self): """ antsRegistrationSyNQuick.sh ignores environment variables, so override environment update from ANTSCommand class """ pass def _format_arg(self, name, spec, value): if name == "precision_type": return spec.argstr % value[0] return super(RegistrationSynQuick, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() out_base = os.path.abspath(self.inputs.output_prefix) outputs["warped_image"] = out_base + "Warped.nii.gz" outputs["inverse_warped_image"] = out_base + "InverseWarped.nii.gz" outputs["out_matrix"] = out_base + "0GenericAffine.mat" if self.inputs.transform_type not in ("t", "r", "a"): outputs["forward_warp_field"] = out_base + "1Warp.nii.gz" outputs["inverse_warp_field"] = out_base + "1InverseWarp.nii.gz" return outputs class CompositeTransformUtilInputSpec(ANTSCommandInputSpec): process = traits.Enum( "assemble", "disassemble", argstr="--%s", position=1, usedefault=True, desc="What to do with the transform inputs (assemble or disassemble)", ) out_file = File( exists=False, argstr="%s", position=2, desc="Output file path (only used for disassembly).", ) in_file = InputMultiPath( File(exists=True), mandatory=True, argstr="%s...", position=3, desc="Input transform file(s)", ) output_prefix = Str( "transform", usedefault=True, argstr="%s", position=4, desc="A prefix that is prepended to all output files (only used for assembly).", ) class CompositeTransformUtilOutputSpec(TraitedSpec): affine_transform = File(desc="Affine transform component") displacement_field = File(desc="Displacement field component") out_file = File(desc="Compound transformation file") class CompositeTransformUtil(ANTSCommand): """ ANTs utility which can combine or break apart transform files into their individual constituent components. Examples -------- >>> from nipype.interfaces.ants import CompositeTransformUtil >>> tran = CompositeTransformUtil() >>> tran.inputs.process = 'disassemble' >>> tran.inputs.in_file = 'output_Composite.h5' >>> tran.cmdline 'CompositeTransformUtil --disassemble output_Composite.h5 transform' >>> tran.run() # doctest: +SKIP example for assembling transformation files >>> from nipype.interfaces.ants import CompositeTransformUtil >>> tran = CompositeTransformUtil() >>> tran.inputs.process = 'assemble' >>> tran.inputs.out_file = 'my.h5' >>> tran.inputs.in_file = ['AffineTransform.mat', 'DisplacementFieldTransform.nii.gz'] >>> tran.cmdline 'CompositeTransformUtil --assemble my.h5 AffineTransform.mat DisplacementFieldTransform.nii.gz ' >>> tran.run() # doctest: +SKIP """ _cmd = "CompositeTransformUtil" input_spec = CompositeTransformUtilInputSpec output_spec = CompositeTransformUtilOutputSpec def _num_threads_update(self): """ CompositeTransformUtil ignores environment variables, so override environment update from ANTSCommand class """ pass def _format_arg(self, name, spec, value): if name == "output_prefix" and self.inputs.process == "assemble": return "" if name == "out_file" and self.inputs.process == "disassemble": return "" return super(CompositeTransformUtil, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() if self.inputs.process == "disassemble": outputs["affine_transform"] = os.path.abspath( "00_{}_AffineTransform.mat".format(self.inputs.output_prefix) ) outputs["displacement_field"] = os.path.abspath( "01_{}_DisplacementFieldTransform.nii.gz".format( self.inputs.output_prefix ) ) if self.inputs.process == "assemble": outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs nipype-1.7.0/nipype/interfaces/ants/resampling.py000066400000000000000000000556361413403311400221430ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ANTS Apply Transforms interface """ import os from .base import ANTSCommand, ANTSCommandInputSpec from ..base import TraitedSpec, File, traits, isdefined, InputMultiObject from ...utils.filemanip import split_filename class WarpTimeSeriesImageMultiTransformInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 4, 3, argstr="%d", usedefault=True, desc="image dimension (3 or 4)", position=1 ) input_image = File( argstr="%s", mandatory=True, copyfile=True, desc=( "image to apply transformation to (generally a " "coregistered functional)" ), ) out_postfix = traits.Str( "_wtsimt", argstr="%s", usedefault=True, desc=("Postfix that is prepended to all output " "files (default = _wtsimt)"), ) reference_image = File( argstr="-R %s", xor=["tightest_box"], desc="reference image space that you wish to warp INTO", ) tightest_box = traits.Bool( argstr="--tightest-bounding-box", desc=( "computes tightest bounding box (overrided by " "reference_image if given)" ), xor=["reference_image"], ) reslice_by_header = traits.Bool( argstr="--reslice-by-header", desc=( "Uses orientation matrix and origin encoded in " "reference image file header. Not typically used " "with additional transforms" ), ) use_nearest = traits.Bool( argstr="--use-NN", desc="Use nearest neighbor interpolation" ) use_bspline = traits.Bool( argstr="--use-Bspline", desc="Use 3rd order B-Spline interpolation" ) transformation_series = InputMultiObject( File(exists=True), argstr="%s", desc="transformation file(s) to be applied", mandatory=True, copyfile=False, ) invert_affine = traits.List( traits.Int, desc=( "List of Affine transformations to invert." "E.g.: [1,4,5] inverts the 1st, 4th, and 5th Affines " "found in transformation_series. Note that indexing " "starts with 1 and does not include warp fields. Affine " "transformations are distinguished " 'from warp fields by the word "affine" included in their filenames.' ), ) class WarpTimeSeriesImageMultiTransformOutputSpec(TraitedSpec): output_image = File(exists=True, desc="Warped image") class WarpTimeSeriesImageMultiTransform(ANTSCommand): """Warps a time-series from one space to another Examples -------- >>> from nipype.interfaces.ants import WarpTimeSeriesImageMultiTransform >>> wtsimt = WarpTimeSeriesImageMultiTransform() >>> wtsimt.inputs.input_image = 'resting.nii' >>> wtsimt.inputs.reference_image = 'ants_deformed.nii.gz' >>> wtsimt.inputs.transformation_series = ['ants_Warp.nii.gz','ants_Affine.txt'] >>> wtsimt.cmdline 'WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii -R ants_deformed.nii.gz ants_Warp.nii.gz \ ants_Affine.txt' >>> wtsimt = WarpTimeSeriesImageMultiTransform() >>> wtsimt.inputs.input_image = 'resting.nii' >>> wtsimt.inputs.reference_image = 'ants_deformed.nii.gz' >>> wtsimt.inputs.transformation_series = ['ants_Warp.nii.gz','ants_Affine.txt'] >>> wtsimt.inputs.invert_affine = [1] # # this will invert the 1st Affine file: ants_Affine.txt >>> wtsimt.cmdline 'WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii -R ants_deformed.nii.gz ants_Warp.nii.gz \ -i ants_Affine.txt' """ _cmd = "WarpTimeSeriesImageMultiTransform" input_spec = WarpTimeSeriesImageMultiTransformInputSpec output_spec = WarpTimeSeriesImageMultiTransformOutputSpec def _format_arg(self, opt, spec, val): if opt == "out_postfix": _, name, ext = split_filename(os.path.abspath(self.inputs.input_image)) return name + val + ext if opt == "transformation_series": series = [] affine_counter = 0 affine_invert = [] for transformation in val: if "Affine" in transformation and isdefined(self.inputs.invert_affine): affine_counter += 1 if affine_counter in self.inputs.invert_affine: series += ["-i"] affine_invert.append(affine_counter) series += [transformation] if isdefined(self.inputs.invert_affine): diff_inv = set(self.inputs.invert_affine) - set(affine_invert) if diff_inv: raise Exceptions( "Review invert_affine, not all indexes from invert_affine were used, " "check the description for the full definition" ) return " ".join(series) return super(WarpTimeSeriesImageMultiTransform, self)._format_arg( opt, spec, val ) def _list_outputs(self): outputs = self._outputs().get() _, name, ext = split_filename(os.path.abspath(self.inputs.input_image)) outputs["output_image"] = os.path.join( os.getcwd(), "".join((name, self.inputs.out_postfix, ext)) ) return outputs def _run_interface(self, runtime, correct_return_codes=[0]): runtime = super(WarpTimeSeriesImageMultiTransform, self)._run_interface( runtime, correct_return_codes=[0, 1] ) if "100 % complete" not in runtime.stdout: self.raise_exception(runtime) return runtime class WarpImageMultiTransformInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="%d", usedefault=True, desc="image dimension (2 or 3)", position=1 ) input_image = File( argstr="%s", mandatory=True, desc=( "image to apply transformation to (generally a " "coregistered functional)" ), position=2, ) output_image = File( genfile=True, hash_files=False, argstr="%s", desc="name of the output warped image", position=3, xor=["out_postfix"], ) out_postfix = File( "_wimt", usedefault=True, hash_files=False, desc=("Postfix that is prepended to all output " "files (default = _wimt)"), xor=["output_image"], ) reference_image = File( argstr="-R %s", xor=["tightest_box"], desc="reference image space that you wish to warp INTO", ) tightest_box = traits.Bool( argstr="--tightest-bounding-box", desc=( "computes tightest bounding box (overrided by " "reference_image if given)" ), xor=["reference_image"], ) reslice_by_header = traits.Bool( argstr="--reslice-by-header", desc=( "Uses orientation matrix and origin encoded in " "reference image file header. Not typically used " "with additional transforms" ), ) use_nearest = traits.Bool( argstr="--use-NN", desc="Use nearest neighbor interpolation" ) use_bspline = traits.Bool( argstr="--use-BSpline", desc="Use 3rd order B-Spline interpolation" ) transformation_series = InputMultiObject( File(exists=True), argstr="%s", desc="transformation file(s) to be applied", mandatory=True, position=-1, ) invert_affine = traits.List( traits.Int, desc=( "List of Affine transformations to invert." "E.g.: [1,4,5] inverts the 1st, 4th, and 5th Affines " "found in transformation_series. Note that indexing " "starts with 1 and does not include warp fields. Affine " "transformations are distinguished " 'from warp fields by the word "affine" included in their filenames.' ), ) class WarpImageMultiTransformOutputSpec(TraitedSpec): output_image = File(exists=True, desc="Warped image") class WarpImageMultiTransform(ANTSCommand): """Warps an image from one space to another Examples -------- >>> from nipype.interfaces.ants import WarpImageMultiTransform >>> wimt = WarpImageMultiTransform() >>> wimt.inputs.input_image = 'structural.nii' >>> wimt.inputs.reference_image = 'ants_deformed.nii.gz' >>> wimt.inputs.transformation_series = ['ants_Warp.nii.gz','ants_Affine.txt'] >>> wimt.cmdline 'WarpImageMultiTransform 3 structural.nii structural_wimt.nii -R ants_deformed.nii.gz ants_Warp.nii.gz \ ants_Affine.txt' >>> wimt = WarpImageMultiTransform() >>> wimt.inputs.input_image = 'diffusion_weighted.nii' >>> wimt.inputs.reference_image = 'functional.nii' >>> wimt.inputs.transformation_series = ['func2anat_coreg_Affine.txt','func2anat_InverseWarp.nii.gz', \ 'dwi2anat_Warp.nii.gz','dwi2anat_coreg_Affine.txt'] >>> wimt.inputs.invert_affine = [1] # this will invert the 1st Affine file: 'func2anat_coreg_Affine.txt' >>> wimt.cmdline 'WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ -i func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt' """ _cmd = "WarpImageMultiTransform" input_spec = WarpImageMultiTransformInputSpec output_spec = WarpImageMultiTransformOutputSpec def _gen_filename(self, name): if name == "output_image": _, name, ext = split_filename(os.path.abspath(self.inputs.input_image)) return "".join((name, self.inputs.out_postfix, ext)) return None def _format_arg(self, opt, spec, val): if opt == "transformation_series": series = [] affine_counter = 0 affine_invert = [] for transformation in val: if "affine" in transformation.lower() and isdefined( self.inputs.invert_affine ): affine_counter += 1 if affine_counter in self.inputs.invert_affine: series += ["-i"] affine_invert.append(affine_counter) series += [transformation] if isdefined(self.inputs.invert_affine): diff_inv = set(self.inputs.invert_affine) - set(affine_invert) if diff_inv: raise Exceptions( "Review invert_affine, not all indexes from invert_affine were used, " "check the description for the full definition" ) return " ".join(series) return super(WarpImageMultiTransform, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.output_image): outputs["output_image"] = os.path.abspath(self.inputs.output_image) else: outputs["output_image"] = os.path.abspath( self._gen_filename("output_image") ) return outputs class ApplyTransformsInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 2, 3, 4, argstr="--dimensionality %d", desc=( "This option forces the image to be treated " "as a specified-dimensional image. If not " "specified, antsWarp tries to infer the " "dimensionality from the input image." ), ) input_image_type = traits.Enum( 0, 1, 2, 3, argstr="--input-image-type %d", desc=( "Option specifying the input image " "type of scalar (default), vector, " "tensor, or time series." ), ) input_image = File( argstr="--input %s", mandatory=True, desc=( "image to apply transformation to (generally a " "coregistered functional)" ), exists=True, ) output_image = traits.Str( argstr="--output %s", desc="output file name", genfile=True, hash_files=False ) out_postfix = traits.Str( "_trans", usedefault=True, desc=("Postfix that is appended to all output " "files (default = _trans)"), ) reference_image = File( argstr="--reference-image %s", mandatory=True, desc="reference image space that you wish to warp INTO", exists=True, ) interpolation = traits.Enum( "Linear", "NearestNeighbor", "CosineWindowedSinc", "WelchWindowedSinc", "HammingWindowedSinc", "LanczosWindowedSinc", "MultiLabel", "Gaussian", "BSpline", argstr="%s", usedefault=True, ) interpolation_parameters = traits.Either( traits.Tuple(traits.Int()), # BSpline (order) traits.Tuple( traits.Float(), traits.Float() # Gaussian/MultiLabel (sigma, alpha) ), ) transforms = InputMultiObject( traits.Either(File(exists=True), "identity"), argstr="%s", mandatory=True, desc="transform files: will be applied in reverse order. For " "example, the last specified transform will be applied first.", ) invert_transform_flags = InputMultiObject(traits.Bool()) default_value = traits.Float(0.0, argstr="--default-value %g", usedefault=True) print_out_composite_warp_file = traits.Bool( False, requires=["output_image"], desc="output a composite warp file instead of a transformed image", ) float = traits.Bool( argstr="--float %d", default_value=False, usedefault=True, desc="Use float instead of double for computations.", ) class ApplyTransformsOutputSpec(TraitedSpec): output_image = File(exists=True, desc="Warped image") class ApplyTransforms(ANTSCommand): """ApplyTransforms, applied to an input image, transforms it according to a reference image and a transform (or a set of transforms). Examples -------- >>> from nipype.interfaces.ants import ApplyTransforms >>> at = ApplyTransforms() >>> at.inputs.input_image = 'moving1.nii' >>> at.inputs.reference_image = 'fixed1.nii' >>> at.inputs.transforms = 'identity' >>> at.cmdline 'antsApplyTransforms --default-value 0 --float 0 --input moving1.nii \ --interpolation Linear --output moving1_trans.nii \ --reference-image fixed1.nii --transform identity' >>> at = ApplyTransforms() >>> at.inputs.dimension = 3 >>> at.inputs.input_image = 'moving1.nii' >>> at.inputs.reference_image = 'fixed1.nii' >>> at.inputs.output_image = 'deformed_moving1.nii' >>> at.inputs.interpolation = 'Linear' >>> at.inputs.default_value = 0 >>> at.inputs.transforms = ['ants_Warp.nii.gz', 'trans.mat'] >>> at.inputs.invert_transform_flags = [False, True] >>> at.cmdline 'antsApplyTransforms --default-value 0 --dimensionality 3 --float 0 --input moving1.nii \ --interpolation Linear --output deformed_moving1.nii --reference-image fixed1.nii \ --transform ants_Warp.nii.gz --transform [ trans.mat, 1 ]' >>> at1 = ApplyTransforms() >>> at1.inputs.dimension = 3 >>> at1.inputs.input_image = 'moving1.nii' >>> at1.inputs.reference_image = 'fixed1.nii' >>> at1.inputs.output_image = 'deformed_moving1.nii' >>> at1.inputs.interpolation = 'BSpline' >>> at1.inputs.interpolation_parameters = (5,) >>> at1.inputs.default_value = 0 >>> at1.inputs.transforms = ['ants_Warp.nii.gz', 'trans.mat'] >>> at1.inputs.invert_transform_flags = [False, False] >>> at1.cmdline 'antsApplyTransforms --default-value 0 --dimensionality 3 --float 0 --input moving1.nii \ --interpolation BSpline[ 5 ] --output deformed_moving1.nii --reference-image fixed1.nii \ --transform ants_Warp.nii.gz --transform trans.mat' Identity transforms may be used as part of a chain: >>> at2 = ApplyTransforms() >>> at2.inputs.dimension = 3 >>> at2.inputs.input_image = 'moving1.nii' >>> at2.inputs.reference_image = 'fixed1.nii' >>> at2.inputs.output_image = 'deformed_moving1.nii' >>> at2.inputs.interpolation = 'BSpline' >>> at2.inputs.interpolation_parameters = (5,) >>> at2.inputs.default_value = 0 >>> at2.inputs.transforms = ['identity', 'ants_Warp.nii.gz', 'trans.mat'] >>> at2.cmdline 'antsApplyTransforms --default-value 0 --dimensionality 3 --float 0 --input moving1.nii \ --interpolation BSpline[ 5 ] --output deformed_moving1.nii --reference-image fixed1.nii \ --transform identity --transform ants_Warp.nii.gz --transform trans.mat' """ _cmd = "antsApplyTransforms" input_spec = ApplyTransformsInputSpec output_spec = ApplyTransformsOutputSpec def _gen_filename(self, name): if name == "output_image": output = self.inputs.output_image if not isdefined(output): _, name, ext = split_filename(self.inputs.input_image) output = name + self.inputs.out_postfix + ext return output return None def _get_transform_filenames(self): retval = [] invert_flags = self.inputs.invert_transform_flags if not isdefined(invert_flags): invert_flags = [False] * len(self.inputs.transforms) elif len(self.inputs.transforms) != len(invert_flags): raise ValueError( "ERROR: The invert_transform_flags list must have the same number " "of entries as the transforms list." ) for transform, invert in zip(self.inputs.transforms, invert_flags): if invert: retval.append(f"--transform [ {transform}, 1 ]") else: retval.append(f"--transform {transform}") return " ".join(retval) def _get_output_warped_filename(self): if isdefined(self.inputs.print_out_composite_warp_file): return "--output [ %s, %d ]" % ( self._gen_filename("output_image"), int(self.inputs.print_out_composite_warp_file), ) else: return "--output %s" % (self._gen_filename("output_image")) def _format_arg(self, opt, spec, val): if opt == "output_image": return self._get_output_warped_filename() elif opt == "transforms": return self._get_transform_filenames() elif opt == "interpolation": if ( self.inputs.interpolation in [ "BSpline", "MultiLabel", "Gaussian", ] and isdefined(self.inputs.interpolation_parameters) ): return "--interpolation %s[ %s ]" % ( self.inputs.interpolation, ", ".join( [str(param) for param in self.inputs.interpolation_parameters] ), ) else: return "--interpolation %s" % self.inputs.interpolation return super(ApplyTransforms, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["output_image"] = os.path.abspath(self._gen_filename("output_image")) return outputs class ApplyTransformsToPointsInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 2, 3, 4, argstr="--dimensionality %d", desc=( "This option forces the image to be treated " "as a specified-dimensional image. If not " "specified, antsWarp tries to infer the " "dimensionality from the input image." ), ) input_file = File( argstr="--input %s", mandatory=True, desc=( "Currently, the only input supported is a csv file with" " columns including x,y (2D), x,y,z (3D) or x,y,z,t,label (4D) column headers." " The points should be defined in physical space." " If in doubt how to convert coordinates from your files to the space" " required by antsApplyTransformsToPoints try creating/drawing a simple" " label volume with only one voxel set to 1 and all others set to 0." " Write down the voxel coordinates. Then use ImageMaths LabelStats to find" " out what coordinates for this voxel antsApplyTransformsToPoints is" " expecting." ), exists=True, ) output_file = traits.Str( argstr="--output %s", desc="Name of the output CSV file", name_source=["input_file"], hash_files=False, name_template="%s_transformed.csv", ) transforms = traits.List( File(exists=True), argstr="%s", mandatory=True, desc="transforms that will be applied to the points", ) invert_transform_flags = traits.List( traits.Bool(), desc="list indicating if a transform should be reversed" ) class ApplyTransformsToPointsOutputSpec(TraitedSpec): output_file = File(exists=True, desc="csv file with transformed coordinates") class ApplyTransformsToPoints(ANTSCommand): """ApplyTransformsToPoints, applied to an CSV file, transforms coordinates using provided transform (or a set of transforms). Examples -------- >>> from nipype.interfaces.ants import ApplyTransforms >>> at = ApplyTransformsToPoints() >>> at.inputs.dimension = 3 >>> at.inputs.input_file = 'moving.csv' >>> at.inputs.transforms = ['trans.mat', 'ants_Warp.nii.gz'] >>> at.inputs.invert_transform_flags = [False, False] >>> at.cmdline 'antsApplyTransformsToPoints --dimensionality 3 --input moving.csv --output moving_transformed.csv \ --transform [ trans.mat, 0 ] --transform [ ants_Warp.nii.gz, 0 ]' """ _cmd = "antsApplyTransformsToPoints" input_spec = ApplyTransformsToPointsInputSpec output_spec = ApplyTransformsToPointsOutputSpec def _get_transform_filenames(self): retval = [] for ii in range(len(self.inputs.transforms)): if isdefined(self.inputs.invert_transform_flags): if len(self.inputs.transforms) == len( self.inputs.invert_transform_flags ): invert_code = 1 if self.inputs.invert_transform_flags[ii] else 0 retval.append( "--transform [ %s, %d ]" % (self.inputs.transforms[ii], invert_code) ) else: raise Exception( ( "ERROR: The useInverse list must have the same number " "of entries as the transformsFileName list." ) ) else: retval.append("--transform %s" % self.inputs.transforms[ii]) return " ".join(retval) def _format_arg(self, opt, spec, val): if opt == "transforms": return self._get_transform_filenames() return super(ApplyTransformsToPoints, self)._format_arg(opt, spec, val) nipype-1.7.0/nipype/interfaces/ants/segmentation.py000066400000000000000000002075111413403311400224660ustar00rootroot00000000000000"""Wrappers for segmentation utilities within ANTs.""" import os from glob import glob from ...external.due import BibTeX from ...utils.filemanip import split_filename, copyfile, which, fname_presuffix from ..base import TraitedSpec, File, traits, InputMultiPath, OutputMultiPath, isdefined from ..mixins import CopyHeaderInterface from .base import ANTSCommand, ANTSCommandInputSpec class AtroposInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, 4, argstr="--image-dimensionality %d", usedefault=True, desc="image dimension (2, 3, or 4)", ) intensity_images = InputMultiPath( File(exists=True), argstr="--intensity-image %s...", mandatory=True ) mask_image = File(exists=True, argstr="--mask-image %s", mandatory=True) initialization = traits.Enum( "Random", "Otsu", "KMeans", "PriorProbabilityImages", "PriorLabelImage", argstr="%s", requires=["number_of_tissue_classes"], mandatory=True, ) kmeans_init_centers = traits.List(traits.Either(traits.Int, traits.Float), minlen=1) prior_image = traits.Either( File(exists=True), traits.Str, desc="either a string pattern (e.g., 'prior%02d.nii') or an existing vector-image file.", ) number_of_tissue_classes = traits.Int(mandatory=True) prior_weighting = traits.Float() prior_probability_threshold = traits.Float(requires=["prior_weighting"]) likelihood_model = traits.Str(argstr="--likelihood-model %s") mrf_smoothing_factor = traits.Float(argstr="%s") mrf_radius = traits.List(traits.Int(), requires=["mrf_smoothing_factor"]) icm_use_synchronous_update = traits.Bool(argstr="%s") maximum_number_of_icm_terations = traits.Int( requires=["icm_use_synchronous_update"] ) n_iterations = traits.Int(argstr="%s") convergence_threshold = traits.Float(requires=["n_iterations"]) posterior_formulation = traits.Str(argstr="%s") use_random_seed = traits.Bool( True, argstr="--use-random-seed %d", desc="use random seed value over constant", usedefault=True, ) use_mixture_model_proportions = traits.Bool(requires=["posterior_formulation"]) out_classified_image_name = File(argstr="%s", genfile=True, hash_files=False) save_posteriors = traits.Bool() output_posteriors_name_template = traits.Str( "POSTERIOR_%02d.nii.gz", usedefault=True ) class AtroposOutputSpec(TraitedSpec): classified_image = File(exists=True) posteriors = OutputMultiPath(File(exist=True)) class Atropos(ANTSCommand): """ A multivariate n-class segmentation algorithm. A finite mixture modeling (FMM) segmentation approach with possibilities for specifying prior constraints. These prior constraints include the specification of a prior label image, prior probability images (one for each class), and/or an MRF prior to enforce spatial smoothing of the labels. Similar algorithms include FAST and SPM. Examples -------- >>> from nipype.interfaces.ants import Atropos >>> at = Atropos( ... dimension=3, intensity_images='structural.nii', mask_image='mask.nii', ... number_of_tissue_classes=2, likelihood_model='Gaussian', save_posteriors=True, ... mrf_smoothing_factor=0.2, mrf_radius=[1, 1, 1], icm_use_synchronous_update=True, ... maximum_number_of_icm_terations=1, n_iterations=5, convergence_threshold=0.000001, ... posterior_formulation='Socrates', use_mixture_model_proportions=True) >>> at.inputs.initialization = 'Random' >>> at.cmdline 'Atropos --image-dimensionality 3 --icm [1,1] --initialization Random[2] --intensity-image structural.nii --likelihood-model Gaussian --mask-image mask.nii --mrf [0.2,1x1x1] --convergence [5,1e-06] --output [structural_labeled.nii,POSTERIOR_%02d.nii.gz] --posterior-formulation Socrates[1] --use-random-seed 1' >>> at = Atropos( ... dimension=3, intensity_images='structural.nii', mask_image='mask.nii', ... number_of_tissue_classes=2, likelihood_model='Gaussian', save_posteriors=True, ... mrf_smoothing_factor=0.2, mrf_radius=[1, 1, 1], icm_use_synchronous_update=True, ... maximum_number_of_icm_terations=1, n_iterations=5, convergence_threshold=0.000001, ... posterior_formulation='Socrates', use_mixture_model_proportions=True) >>> at.inputs.initialization = 'KMeans' >>> at.inputs.kmeans_init_centers = [100, 200] >>> at.cmdline 'Atropos --image-dimensionality 3 --icm [1,1] --initialization KMeans[2,100,200] --intensity-image structural.nii --likelihood-model Gaussian --mask-image mask.nii --mrf [0.2,1x1x1] --convergence [5,1e-06] --output [structural_labeled.nii,POSTERIOR_%02d.nii.gz] --posterior-formulation Socrates[1] --use-random-seed 1' >>> at = Atropos( ... dimension=3, intensity_images='structural.nii', mask_image='mask.nii', ... number_of_tissue_classes=2, likelihood_model='Gaussian', save_posteriors=True, ... mrf_smoothing_factor=0.2, mrf_radius=[1, 1, 1], icm_use_synchronous_update=True, ... maximum_number_of_icm_terations=1, n_iterations=5, convergence_threshold=0.000001, ... posterior_formulation='Socrates', use_mixture_model_proportions=True) >>> at.inputs.initialization = 'PriorProbabilityImages' >>> at.inputs.prior_image = 'BrainSegmentationPrior%02d.nii.gz' >>> at.inputs.prior_weighting = 0.8 >>> at.inputs.prior_probability_threshold = 0.0000001 >>> at.cmdline 'Atropos --image-dimensionality 3 --icm [1,1] --initialization PriorProbabilityImages[2,BrainSegmentationPrior%02d.nii.gz,0.8,1e-07] --intensity-image structural.nii --likelihood-model Gaussian --mask-image mask.nii --mrf [0.2,1x1x1] --convergence [5,1e-06] --output [structural_labeled.nii,POSTERIOR_%02d.nii.gz] --posterior-formulation Socrates[1] --use-random-seed 1' >>> at = Atropos( ... dimension=3, intensity_images='structural.nii', mask_image='mask.nii', ... number_of_tissue_classes=2, likelihood_model='Gaussian', save_posteriors=True, ... mrf_smoothing_factor=0.2, mrf_radius=[1, 1, 1], icm_use_synchronous_update=True, ... maximum_number_of_icm_terations=1, n_iterations=5, convergence_threshold=0.000001, ... posterior_formulation='Socrates', use_mixture_model_proportions=True) >>> at.inputs.initialization = 'PriorLabelImage' >>> at.inputs.prior_image = 'segmentation0.nii.gz' >>> at.inputs.number_of_tissue_classes = 2 >>> at.inputs.prior_weighting = 0.8 >>> at.cmdline 'Atropos --image-dimensionality 3 --icm [1,1] --initialization PriorLabelImage[2,segmentation0.nii.gz,0.8] --intensity-image structural.nii --likelihood-model Gaussian --mask-image mask.nii --mrf [0.2,1x1x1] --convergence [5,1e-06] --output [structural_labeled.nii,POSTERIOR_%02d.nii.gz] --posterior-formulation Socrates[1] --use-random-seed 1' """ input_spec = AtroposInputSpec output_spec = AtroposOutputSpec _cmd = "Atropos" def _format_arg(self, opt, spec, val): if opt == "initialization": n_classes = self.inputs.number_of_tissue_classes brackets = ["%d" % n_classes] if val == "KMeans" and isdefined(self.inputs.kmeans_init_centers): centers = sorted(set(self.inputs.kmeans_init_centers)) if len(centers) != n_classes: raise ValueError( "KMeans initialization with initial cluster centers requires " "the number of centers to match number_of_tissue_classes" ) brackets += ["%g" % c for c in centers] if val in ("PriorProbabilityImages", "PriorLabelImage"): if not isdefined(self.inputs.prior_image) or not isdefined( self.inputs.prior_weighting ): raise ValueError( "'%s' initialization requires setting " "prior_image and prior_weighting" % val ) priors_paths = [self.inputs.prior_image] if "%02d" in priors_paths[0]: if val == "PriorLabelImage": raise ValueError( "'PriorLabelImage' initialization does not " "accept patterns for prior_image." ) priors_paths = [ priors_paths[0] % i for i in range(1, n_classes + 1) ] if not all([os.path.exists(p) for p in priors_paths]): raise FileNotFoundError( "One or more prior images do not exist: " "%s." % ", ".join(priors_paths) ) brackets += [ self.inputs.prior_image, "%g" % self.inputs.prior_weighting, ] if val == "PriorProbabilityImages" and isdefined( self.inputs.prior_probability_threshold ): brackets.append("%g" % self.inputs.prior_probability_threshold) return "--initialization %s[%s]" % (val, ",".join(brackets)) if opt == "mrf_smoothing_factor": retval = "--mrf [%g" % val if isdefined(self.inputs.mrf_radius): retval += ",%s" % self._format_xarray( [str(s) for s in self.inputs.mrf_radius] ) return retval + "]" if opt == "icm_use_synchronous_update": retval = "--icm [%d" % val if isdefined(self.inputs.maximum_number_of_icm_terations): retval += ",%g" % self.inputs.maximum_number_of_icm_terations return retval + "]" if opt == "n_iterations": retval = "--convergence [%d" % val if isdefined(self.inputs.convergence_threshold): retval += ",%g" % self.inputs.convergence_threshold return retval + "]" if opt == "posterior_formulation": retval = "--posterior-formulation %s" % val if isdefined(self.inputs.use_mixture_model_proportions): retval += "[%d]" % self.inputs.use_mixture_model_proportions return retval if opt == "out_classified_image_name": retval = "--output [%s" % val if isdefined(self.inputs.save_posteriors): retval += ",%s" % self.inputs.output_posteriors_name_template return retval + "]" return super(Atropos, self)._format_arg(opt, spec, val) def _gen_filename(self, name): if name == "out_classified_image_name": output = self.inputs.out_classified_image_name if not isdefined(output): _, name, ext = split_filename(self.inputs.intensity_images[0]) output = name + "_labeled" + ext return output def _list_outputs(self): outputs = self._outputs().get() outputs["classified_image"] = os.path.abspath( self._gen_filename("out_classified_image_name") ) if isdefined(self.inputs.save_posteriors) and self.inputs.save_posteriors: outputs["posteriors"] = [] for i in range(self.inputs.number_of_tissue_classes): outputs["posteriors"].append( os.path.abspath( self.inputs.output_posteriors_name_template % (i + 1) ) ) return outputs class LaplacianThicknessInputSpec(ANTSCommandInputSpec): input_wm = File( argstr="%s", mandatory=True, copyfile=True, desc="white matter segmentation image", position=1, ) input_gm = File( argstr="%s", mandatory=True, copyfile=True, desc="gray matter segmentation image", position=2, ) output_image = traits.Str( desc="name of output file", argstr="%s", position=3, name_source=["input_wm"], name_template="%s_thickness", keep_extension=True, hash_files=False, ) smooth_param = traits.Float( argstr="%s", desc="Sigma of the Laplacian Recursive Image Filter (defaults to 1)", position=4, ) prior_thickness = traits.Float( argstr="%s", desc="Prior thickness (defaults to 500)", requires=["smooth_param"], position=5, ) dT = traits.Float( argstr="%s", desc="Time delta used during integration (defaults to 0.01)", requires=["prior_thickness"], position=6, ) sulcus_prior = traits.Float( argstr="%s", desc="Positive floating point number for sulcus prior. " "Authors said that 0.15 might be a reasonable value", requires=["dT"], position=7, ) tolerance = traits.Float( argstr="%s", desc="Tolerance to reach during optimization (defaults to 0.001)", requires=["sulcus_prior"], position=8, ) class LaplacianThicknessOutputSpec(TraitedSpec): output_image = File(exists=True, desc="Cortical thickness") class LaplacianThickness(ANTSCommand): """Calculates the cortical thickness from an anatomical image Examples -------- >>> from nipype.interfaces.ants import LaplacianThickness >>> cort_thick = LaplacianThickness() >>> cort_thick.inputs.input_wm = 'white_matter.nii.gz' >>> cort_thick.inputs.input_gm = 'gray_matter.nii.gz' >>> cort_thick.cmdline 'LaplacianThickness white_matter.nii.gz gray_matter.nii.gz white_matter_thickness.nii.gz' >>> cort_thick.inputs.output_image = 'output_thickness.nii.gz' >>> cort_thick.cmdline 'LaplacianThickness white_matter.nii.gz gray_matter.nii.gz output_thickness.nii.gz' """ _cmd = "LaplacianThickness" input_spec = LaplacianThicknessInputSpec output_spec = LaplacianThicknessOutputSpec class N4BiasFieldCorrectionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, 4, argstr="-d %d", usedefault=True, desc="image dimension (2, 3 or 4)" ) input_image = File( argstr="--input-image %s", mandatory=True, desc=( "input for bias correction. Negative values or values close to " "zero should be processed prior to correction" ), ) mask_image = File( argstr="--mask-image %s", desc=("image to specify region to perform final bias correction in"), ) weight_image = File( argstr="--weight-image %s", desc=( "image for relative weighting (e.g. probability map of the white " "matter) of voxels during the B-spline fitting. " ), ) output_image = traits.Str( argstr="--output %s", desc="output file name", name_source=["input_image"], name_template="%s_corrected", keep_extension=True, hash_files=False, ) bspline_fitting_distance = traits.Float(argstr="--bspline-fitting %s") bspline_order = traits.Int(requires=["bspline_fitting_distance"]) shrink_factor = traits.Int(argstr="--shrink-factor %d") n_iterations = traits.List(traits.Int(), argstr="--convergence %s") convergence_threshold = traits.Float(requires=["n_iterations"]) save_bias = traits.Bool( False, mandatory=True, usedefault=True, desc=("True if the estimated bias should be saved to file."), xor=["bias_image"], ) bias_image = File(desc="Filename for the estimated bias.", hash_files=False) copy_header = traits.Bool( False, mandatory=True, usedefault=True, desc="copy headers of the original image into the output (corrected) file", ) rescale_intensities = traits.Bool( False, usedefault=True, argstr="-r", min_ver="2.1.0", desc="""\ [NOTE: Only ANTs>=2.1.0] At each iteration, a new intensity mapping is calculated and applied but there is nothing which constrains the new intensity range to be within certain values. The result is that the range can "drift" from the original at each iteration. This option rescales to the [min,max] range of the original image intensities within the user-specified mask.""", ) histogram_sharpening = traits.Tuple( (0.15, 0.01, 200), traits.Float, traits.Float, traits.Int, argstr="--histogram-sharpening [%g,%g,%d]", desc="""\ Three-values tuple of histogram sharpening parameters \ (FWHM, wienerNose, numberOfHistogramBins). These options describe the histogram sharpening parameters, i.e. the \ deconvolution step parameters described in the original N3 algorithm. The default values have been shown to work fairly well.""", ) class N4BiasFieldCorrectionOutputSpec(TraitedSpec): output_image = File(exists=True, desc="Warped image") bias_image = File(exists=True, desc="Estimated bias") class N4BiasFieldCorrection(ANTSCommand, CopyHeaderInterface): """ Bias field correction. N4 is a variant of the popular N3 (nonparameteric nonuniform normalization) retrospective bias correction algorithm. Based on the assumption that the corruption of the low frequency bias field can be modeled as a convolution of the intensity histogram by a Gaussian, the basic algorithmic protocol is to iterate between deconvolving the intensity histogram by a Gaussian, remapping the intensities, and then spatially smoothing this result by a B-spline modeling of the bias field itself. The modifications from and improvements obtained over the original N3 algorithm are described in [Tustison2010]_. .. [Tustison2010] N. Tustison et al., N4ITK: Improved N3 Bias Correction, IEEE Transactions on Medical Imaging, 29(6):1310-1320, June 2010. Examples -------- >>> import copy >>> from nipype.interfaces.ants import N4BiasFieldCorrection >>> n4 = N4BiasFieldCorrection() >>> n4.inputs.dimension = 3 >>> n4.inputs.input_image = 'structural.nii' >>> n4.inputs.bspline_fitting_distance = 300 >>> n4.inputs.shrink_factor = 3 >>> n4.inputs.n_iterations = [50,50,30,20] >>> n4.cmdline 'N4BiasFieldCorrection --bspline-fitting [ 300 ] -d 3 --input-image structural.nii --convergence [ 50x50x30x20 ] --output structural_corrected.nii --shrink-factor 3' >>> n4_2 = copy.deepcopy(n4) >>> n4_2.inputs.convergence_threshold = 1e-6 >>> n4_2.cmdline 'N4BiasFieldCorrection --bspline-fitting [ 300 ] -d 3 --input-image structural.nii --convergence [ 50x50x30x20, 1e-06 ] --output structural_corrected.nii --shrink-factor 3' >>> n4_3 = copy.deepcopy(n4_2) >>> n4_3.inputs.bspline_order = 5 >>> n4_3.cmdline 'N4BiasFieldCorrection --bspline-fitting [ 300, 5 ] -d 3 --input-image structural.nii --convergence [ 50x50x30x20, 1e-06 ] --output structural_corrected.nii --shrink-factor 3' >>> n4_4 = N4BiasFieldCorrection() >>> n4_4.inputs.input_image = 'structural.nii' >>> n4_4.inputs.save_bias = True >>> n4_4.inputs.dimension = 3 >>> n4_4.cmdline 'N4BiasFieldCorrection -d 3 --input-image structural.nii --output [ structural_corrected.nii, structural_bias.nii ]' >>> n4_5 = N4BiasFieldCorrection() >>> n4_5.inputs.input_image = 'structural.nii' >>> n4_5.inputs.dimension = 3 >>> n4_5.inputs.histogram_sharpening = (0.12, 0.02, 200) >>> n4_5.cmdline 'N4BiasFieldCorrection -d 3 --histogram-sharpening [0.12,0.02,200] --input-image structural.nii --output structural_corrected.nii' """ _cmd = "N4BiasFieldCorrection" input_spec = N4BiasFieldCorrectionInputSpec output_spec = N4BiasFieldCorrectionOutputSpec _copy_header_map = { "output_image": ("input_image", False), "bias_image": ("input_image", True), } def __init__(self, *args, **kwargs): """Instantiate the N4BiasFieldCorrection interface.""" self._out_bias_file = None super(N4BiasFieldCorrection, self).__init__(*args, **kwargs) def _format_arg(self, name, trait_spec, value): if name == "output_image" and self._out_bias_file: newval = "[ %s, %s ]" % (value, self._out_bias_file) return trait_spec.argstr % newval if name == "bspline_fitting_distance": if isdefined(self.inputs.bspline_order): newval = "[ %g, %d ]" % (value, self.inputs.bspline_order) else: newval = "[ %g ]" % value return trait_spec.argstr % newval if name == "n_iterations": if isdefined(self.inputs.convergence_threshold): newval = "[ %s, %g ]" % ( self._format_xarray([str(elt) for elt in value]), self.inputs.convergence_threshold, ) else: newval = "[ %s ]" % self._format_xarray([str(elt) for elt in value]) return trait_spec.argstr % newval return super(N4BiasFieldCorrection, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): skip = (skip or []) + ["save_bias", "bias_image"] self._out_bias_file = None if self.inputs.save_bias or isdefined(self.inputs.bias_image): bias_image = self.inputs.bias_image if not isdefined(bias_image): bias_image = fname_presuffix( os.path.basename(self.inputs.input_image), suffix="_bias" ) self._out_bias_file = bias_image return super(N4BiasFieldCorrection, self)._parse_inputs(skip=skip) def _list_outputs(self): outputs = super(N4BiasFieldCorrection, self)._list_outputs() if self._out_bias_file: outputs["bias_image"] = os.path.abspath(self._out_bias_file) return outputs class CorticalThicknessInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="-d %d", usedefault=True, desc="image dimension (2 or 3)" ) anatomical_image = File( exists=True, argstr="-a %s", desc=( "Structural *intensity* image, typically T1." " If more than one anatomical image is specified," " subsequently specified images are used during the" " segmentation process. However, only the first" " image is used in the registration of priors." " Our suggestion would be to specify the T1" " as the first image." ), mandatory=True, ) brain_template = File( exists=True, argstr="-e %s", desc=( "Anatomical *intensity* template (possibly created using a" " population data set with buildtemplateparallel.sh in ANTs)." " This template is *not* skull-stripped." ), mandatory=True, ) brain_probability_mask = File( exists=True, argstr="-m %s", desc="brain probability mask in template space", copyfile=False, mandatory=True, ) segmentation_priors = InputMultiPath( File(exists=True), argstr="-p %s", mandatory=True ) out_prefix = traits.Str( "antsCT_", argstr="-o %s", usedefault=True, desc=("Prefix that is prepended to all output files"), ) image_suffix = traits.Str( "nii.gz", desc=("any of standard ITK formats, nii.gz is default"), argstr="-s %s", usedefault=True, ) t1_registration_template = File( exists=True, desc=( "Anatomical *intensity* template" " (assumed to be skull-stripped). A common" " case would be where this would be the same" " template as specified in the -e option which" " is not skull stripped." ), argstr="-t %s", mandatory=True, ) extraction_registration_mask = File( exists=True, argstr="-f %s", desc=( "Mask (defined in the template space) used during" " registration for brain extraction." ), ) keep_temporary_files = traits.Int( argstr="-k %d", desc="Keep brain extraction/segmentation warps, etc (default = 0).", ) max_iterations = traits.Int( argstr="-i %d", desc=("ANTS registration max iterations (default = 100x100x70x20)"), ) prior_segmentation_weight = traits.Float( argstr="-w %f", desc=("Atropos spatial prior *probability* weight for the segmentation"), ) segmentation_iterations = traits.Int( argstr="-n %d", desc=("N4 -> Atropos -> N4 iterations during segmentation (default = 3)"), ) posterior_formulation = traits.Str( argstr="-b %s", desc=( "Atropos posterior formulation and whether or not" " to use mixture model proportions." """ e.g 'Socrates[1]' (default) or 'Aristotle[1]'.""" " Choose the latter if you" " want use the distance priors (see also the -l option" " for label propagation control)." ), ) use_floatingpoint_precision = traits.Enum( 0, 1, argstr="-j %d", desc=("Use floating point precision in registrations (default = 0)"), ) use_random_seeding = traits.Enum( 0, 1, argstr="-u %d", desc=("Use random number generated from system clock in Atropos (default = 1)"), ) b_spline_smoothing = traits.Bool( argstr="-v", desc=( "Use B-spline SyN for registrations and B-spline" " exponential mapping in DiReCT." ), ) cortical_label_image = File( exists=True, desc="Cortical ROI labels to use as a prior for ATITH." ) label_propagation = traits.Str( argstr="-l %s", desc=( "Incorporate a distance prior one the posterior formulation. Should be" """ of the form 'label[lambda,boundaryProbability]' where label""" " is a value of 1,2,3,... denoting label ID. The label" " probability for anything outside the current label" " = boundaryProbability * exp( -lambda * distanceFromBoundary )" " Intuitively, smaller lambda values will increase the spatial capture" " range of the distance prior. To apply to all label values, simply omit" " specifying the label, i.e. -l [lambda,boundaryProbability]." ), ) quick_registration = traits.Bool( argstr="-q 1", desc=( "If = 1, use antsRegistrationSyNQuick.sh as the basis for registration" " during brain extraction, brain segmentation, and" " (optional) normalization to a template." " Otherwise use antsRegistrationSyN.sh (default = 0)." ), ) debug = traits.Bool( argstr="-z 1", desc=( "If > 0, runs a faster version of the script." " Only for testing. Implies -u 0." " Requires single thread computation for complete reproducibility." ), ) class CorticalThicknessOutputSpec(TraitedSpec): BrainExtractionMask = File(exists=True, desc="brain extraction mask") ExtractedBrainN4 = File(exists=True, desc="extracted brain from N4 image") BrainSegmentation = File(exists=True, desc="brain segmentaion image") BrainSegmentationN4 = File(exists=True, desc="N4 corrected image") BrainSegmentationPosteriors = OutputMultiPath( File(exists=True), desc="Posterior probability images" ) CorticalThickness = File(exists=True, desc="cortical thickness file") TemplateToSubject1GenericAffine = File( exists=True, desc="Template to subject affine" ) TemplateToSubject0Warp = File(exists=True, desc="Template to subject warp") SubjectToTemplate1Warp = File(exists=True, desc="Template to subject inverse warp") SubjectToTemplate0GenericAffine = File( exists=True, desc="Template to subject inverse affine" ) SubjectToTemplateLogJacobian = File( exists=True, desc="Template to subject log jacobian" ) CorticalThicknessNormedToTemplate = File( exists=True, desc="Normalized cortical thickness" ) BrainVolumes = File(exists=True, desc="Brain volumes as text") class CorticalThickness(ANTSCommand): """ Examples -------- >>> from nipype.interfaces.ants.segmentation import CorticalThickness >>> corticalthickness = CorticalThickness() >>> corticalthickness.inputs.dimension = 3 >>> corticalthickness.inputs.anatomical_image ='T1.nii.gz' >>> corticalthickness.inputs.brain_template = 'study_template.nii.gz' >>> corticalthickness.inputs.brain_probability_mask ='ProbabilityMaskOfStudyTemplate.nii.gz' >>> corticalthickness.inputs.segmentation_priors = ['BrainSegmentationPrior01.nii.gz', ... 'BrainSegmentationPrior02.nii.gz', ... 'BrainSegmentationPrior03.nii.gz', ... 'BrainSegmentationPrior04.nii.gz'] >>> corticalthickness.inputs.t1_registration_template = 'brain_study_template.nii.gz' >>> corticalthickness.cmdline 'antsCorticalThickness.sh -a T1.nii.gz -m ProbabilityMaskOfStudyTemplate.nii.gz -e study_template.nii.gz -d 3 -s nii.gz -o antsCT_ -p nipype_priors/BrainSegmentationPrior%02d.nii.gz -t brain_study_template.nii.gz' """ input_spec = CorticalThicknessInputSpec output_spec = CorticalThicknessOutputSpec _cmd = "antsCorticalThickness.sh" def _format_arg(self, opt, spec, val): if opt == "anatomical_image": retval = "-a %s" % val return retval if opt == "brain_template": retval = "-e %s" % val return retval if opt == "brain_probability_mask": retval = "-m %s" % val return retval if opt == "out_prefix": retval = "-o %s" % val return retval if opt == "t1_registration_template": retval = "-t %s" % val return retval if opt == "segmentation_priors": _, _, ext = split_filename(self.inputs.segmentation_priors[0]) retval = "-p nipype_priors/BrainSegmentationPrior%02d" + ext return retval return super(CorticalThickness, self)._format_arg(opt, spec, val) def _run_interface(self, runtime, correct_return_codes=[0]): priors_directory = os.path.join(os.getcwd(), "nipype_priors") if not os.path.exists(priors_directory): os.makedirs(priors_directory) _, _, ext = split_filename(self.inputs.segmentation_priors[0]) for i, f in enumerate(self.inputs.segmentation_priors): target = os.path.join( priors_directory, "BrainSegmentationPrior%02d" % (i + 1) + ext ) if not ( os.path.exists(target) and os.path.realpath(target) == os.path.abspath(f) ): copyfile(os.path.abspath(f), target) runtime = super(CorticalThickness, self)._run_interface(runtime) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["BrainExtractionMask"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionMask." + self.inputs.image_suffix, ) outputs["ExtractedBrainN4"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "ExtractedBrain0N4." + self.inputs.image_suffix, ) outputs["BrainSegmentation"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainSegmentation." + self.inputs.image_suffix, ) outputs["BrainSegmentationN4"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainSegmentation0N4." + self.inputs.image_suffix, ) posteriors = [] for i in range(len(self.inputs.segmentation_priors)): posteriors.append( os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainSegmentationPosteriors%02d." % (i + 1) + self.inputs.image_suffix, ) ) outputs["BrainSegmentationPosteriors"] = posteriors outputs["CorticalThickness"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "CorticalThickness." + self.inputs.image_suffix, ) outputs["TemplateToSubject1GenericAffine"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "TemplateToSubject1GenericAffine.mat" ) outputs["TemplateToSubject0Warp"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "TemplateToSubject0Warp." + self.inputs.image_suffix, ) outputs["SubjectToTemplate1Warp"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "SubjectToTemplate1Warp." + self.inputs.image_suffix, ) outputs["SubjectToTemplate0GenericAffine"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "SubjectToTemplate0GenericAffine.mat" ) outputs["SubjectToTemplateLogJacobian"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "SubjectToTemplateLogJacobian." + self.inputs.image_suffix, ) outputs["CorticalThicknessNormedToTemplate"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "CorticalThickness." + self.inputs.image_suffix, ) outputs["BrainVolumes"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "brainvols.csv" ) return outputs class BrainExtractionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="-d %d", usedefault=True, desc="image dimension (2 or 3)" ) anatomical_image = File( exists=True, argstr="-a %s", desc=( "Structural image, typically T1. If more than one" " anatomical image is specified, subsequently specified" " images are used during the segmentation process. However," " only the first image is used in the registration of priors." " Our suggestion would be to specify the T1 as the first image." " Anatomical template created using e.g. LPBA40 data set with" " buildtemplateparallel.sh in ANTs." ), mandatory=True, ) brain_template = File( exists=True, argstr="-e %s", desc=( "Anatomical template created using e.g. LPBA40 data set with" " buildtemplateparallel.sh in ANTs." ), mandatory=True, ) brain_probability_mask = File( exists=True, argstr="-m %s", desc=( "Brain probability mask created using e.g. LPBA40 data set which" " have brain masks defined, and warped to anatomical template and" " averaged resulting in a probability image." ), copyfile=False, mandatory=True, ) out_prefix = traits.Str( "highres001_", argstr="-o %s", usedefault=True, desc=("Prefix that is prepended to all output files"), ) extraction_registration_mask = File( exists=True, argstr="-f %s", desc=( "Mask (defined in the template space) used during" " registration for brain extraction." " To limit the metric computation to a specific region." ), ) image_suffix = traits.Str( "nii.gz", desc=("any of standard ITK formats, nii.gz is default"), argstr="-s %s", usedefault=True, ) use_random_seeding = traits.Enum( 0, 1, argstr="-u %d", desc=("Use random number generated from system clock in Atropos (default = 1)"), ) keep_temporary_files = traits.Int( argstr="-k %d", desc="Keep brain extraction/segmentation warps, etc (default = 0).", ) use_floatingpoint_precision = traits.Enum( 0, 1, argstr="-q %d", desc=("Use floating point precision in registrations (default = 0)"), ) debug = traits.Bool( argstr="-z 1", desc=( "If > 0, runs a faster version of the script." " Only for testing. Implies -u 0." " Requires single thread computation for complete reproducibility." ), ) class BrainExtractionOutputSpec(TraitedSpec): BrainExtractionMask = File(exists=True, desc="brain extraction mask") BrainExtractionBrain = File(exists=True, desc="brain extraction image") BrainExtractionCSF = File(exists=True, desc="segmentation mask with only CSF") BrainExtractionGM = File( exists=True, desc="segmentation mask with only grey matter" ) BrainExtractionInitialAffine = File(exists=True, desc="") BrainExtractionInitialAffineFixed = File(exists=True, desc="") BrainExtractionInitialAffineMoving = File(exists=True, desc="") BrainExtractionLaplacian = File(exists=True, desc="") BrainExtractionPrior0GenericAffine = File(exists=True, desc="") BrainExtractionPrior1InverseWarp = File(exists=True, desc="") BrainExtractionPrior1Warp = File(exists=True, desc="") BrainExtractionPriorWarped = File(exists=True, desc="") BrainExtractionSegmentation = File( exists=True, desc="segmentation mask with CSF, GM, and WM" ) BrainExtractionTemplateLaplacian = File(exists=True, desc="") BrainExtractionTmp = File(exists=True, desc="") BrainExtractionWM = File( exists=True, desc="segmenration mask with only white matter" ) N4Corrected0 = File(exists=True, desc="N4 bias field corrected image") N4Truncated0 = File(exists=True, desc="") class BrainExtraction(ANTSCommand): """ Atlas-based brain extraction. Examples -------- >>> from nipype.interfaces.ants.segmentation import BrainExtraction >>> brainextraction = BrainExtraction() >>> brainextraction.inputs.dimension = 3 >>> brainextraction.inputs.anatomical_image ='T1.nii.gz' >>> brainextraction.inputs.brain_template = 'study_template.nii.gz' >>> brainextraction.inputs.brain_probability_mask ='ProbabilityMaskOfStudyTemplate.nii.gz' >>> brainextraction.cmdline 'antsBrainExtraction.sh -a T1.nii.gz -m ProbabilityMaskOfStudyTemplate.nii.gz -e study_template.nii.gz -d 3 -s nii.gz -o highres001_' """ input_spec = BrainExtractionInputSpec output_spec = BrainExtractionOutputSpec _cmd = "antsBrainExtraction.sh" def _run_interface(self, runtime, correct_return_codes=(0,)): # antsBrainExtraction.sh requires ANTSPATH to be defined out_environ = self._get_environ() ants_path = out_environ.get("ANTSPATH", None) or os.getenv("ANTSPATH", None) if ants_path is None: # Check for antsRegistration, which is under bin/ (the $ANTSPATH) instead of # checking for antsBrainExtraction.sh which is under script/ cmd_path = which("antsRegistration", env=runtime.environ) if not cmd_path: raise RuntimeError( 'The environment variable $ANTSPATH is not defined in host "%s", ' "and Nipype could not determine it automatically." % runtime.hostname ) ants_path = os.path.dirname(cmd_path) self.inputs.environ.update({"ANTSPATH": ants_path}) runtime.environ.update({"ANTSPATH": ants_path}) runtime = super(BrainExtraction, self)._run_interface(runtime) # Still, double-check if it didn't found N4 if "we cant find" in runtime.stdout: for line in runtime.stdout.split("\n"): if line.strip().startswith("we cant find"): tool = line.strip().replace("we cant find the", "").split(" ")[0] break errmsg = ( 'antsBrainExtraction.sh requires "%s" to be found in $ANTSPATH ' '($ANTSPATH="%s").' ) % (tool, ants_path) if runtime.stderr is None: runtime.stderr = errmsg else: runtime.stderr += "\n" + errmsg runtime.returncode = 1 self.raise_exception(runtime) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["BrainExtractionMask"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionMask." + self.inputs.image_suffix, ) outputs["BrainExtractionBrain"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionBrain." + self.inputs.image_suffix, ) if ( isdefined(self.inputs.keep_temporary_files) and self.inputs.keep_temporary_files != 0 ): outputs["BrainExtractionCSF"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionCSF." + self.inputs.image_suffix, ) outputs["BrainExtractionGM"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionGM." + self.inputs.image_suffix, ) outputs["BrainExtractionInitialAffine"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionInitialAffine.mat" ) outputs["BrainExtractionInitialAffineFixed"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionInitialAffineFixed." + self.inputs.image_suffix, ) outputs["BrainExtractionInitialAffineMoving"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionInitialAffineMoving." + self.inputs.image_suffix, ) outputs["BrainExtractionLaplacian"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionLaplacian." + self.inputs.image_suffix, ) outputs["BrainExtractionPrior0GenericAffine"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionPrior0GenericAffine.mat", ) outputs["BrainExtractionPrior1InverseWarp"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionPrior1InverseWarp." + self.inputs.image_suffix, ) outputs["BrainExtractionPrior1Warp"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionPrior1Warp." + self.inputs.image_suffix, ) outputs["BrainExtractionPriorWarped"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionPriorWarped." + self.inputs.image_suffix, ) outputs["BrainExtractionSegmentation"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionSegmentation." + self.inputs.image_suffix, ) outputs["BrainExtractionTemplateLaplacian"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionTemplateLaplacian." + self.inputs.image_suffix, ) outputs["BrainExtractionTmp"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionTmp." + self.inputs.image_suffix, ) outputs["BrainExtractionWM"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "BrainExtractionWM." + self.inputs.image_suffix, ) outputs["N4Corrected0"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "N4Corrected0." + self.inputs.image_suffix, ) outputs["N4Truncated0"] = os.path.join( os.getcwd(), self.inputs.out_prefix + "N4Truncated0." + self.inputs.image_suffix, ) return outputs class DenoiseImageInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 2, 3, 4, argstr="-d %d", desc="This option forces the image to be treated " "as a specified-dimensional image. If not " "specified, the program tries to infer the " "dimensionality from the input image.", ) input_image = File( exists=True, argstr="-i %s", mandatory=True, desc="A scalar image is expected as input for noise correction.", ) noise_model = traits.Enum( "Gaussian", "Rician", argstr="-n %s", usedefault=True, desc=("Employ a Rician or Gaussian noise model."), ) shrink_factor = traits.Int( default_value=1, usedefault=True, argstr="-s %s", desc=( "Running noise correction on large images can" " be time consuming. To lessen computation time," " the input image can be resampled. The shrink" " factor, specified as a single integer, describes" " this resampling. Shrink factor = 1 is the default." ), ) output_image = File( argstr="-o %s", name_source=["input_image"], hash_files=False, keep_extension=True, name_template="%s_noise_corrected", desc="The output consists of the noise corrected" " version of the input image.", ) save_noise = traits.Bool( False, mandatory=True, usedefault=True, desc=("True if the estimated noise should be saved to file."), xor=["noise_image"], ) noise_image = File( name_source=["input_image"], hash_files=False, keep_extension=True, name_template="%s_noise", desc="Filename for the estimated noise.", ) verbose = traits.Bool(False, argstr="-v", desc=("Verbose output.")) class DenoiseImageOutputSpec(TraitedSpec): output_image = File(exists=True) noise_image = File() class DenoiseImage(ANTSCommand): """ Examples -------- >>> import copy >>> from nipype.interfaces.ants import DenoiseImage >>> denoise = DenoiseImage() >>> denoise.inputs.dimension = 3 >>> denoise.inputs.input_image = 'im1.nii' >>> denoise.cmdline 'DenoiseImage -d 3 -i im1.nii -n Gaussian -o im1_noise_corrected.nii -s 1' >>> denoise_2 = copy.deepcopy(denoise) >>> denoise_2.inputs.output_image = 'output_corrected_image.nii.gz' >>> denoise_2.inputs.noise_model = 'Rician' >>> denoise_2.inputs.shrink_factor = 2 >>> denoise_2.cmdline 'DenoiseImage -d 3 -i im1.nii -n Rician -o output_corrected_image.nii.gz -s 2' >>> denoise_3 = DenoiseImage() >>> denoise_3.inputs.input_image = 'im1.nii' >>> denoise_3.inputs.save_noise = True >>> denoise_3.cmdline 'DenoiseImage -i im1.nii -n Gaussian -o [ im1_noise_corrected.nii, im1_noise.nii ] -s 1' """ input_spec = DenoiseImageInputSpec output_spec = DenoiseImageOutputSpec _cmd = "DenoiseImage" def _format_arg(self, name, trait_spec, value): if (name == "output_image") and ( self.inputs.save_noise or isdefined(self.inputs.noise_image) ): newval = "[ %s, %s ]" % ( self._filename_from_source("output_image"), self._filename_from_source("noise_image"), ) return trait_spec.argstr % newval return super(DenoiseImage, self)._format_arg(name, trait_spec, value) class JointFusionInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, 4, argstr="-d %d", desc="This option forces the image to be treated " "as a specified-dimensional image. If not " "specified, the program tries to infer the " "dimensionality from the input image.", ) target_image = traits.List( InputMultiPath(File(exists=True)), argstr="-t %s", mandatory=True, desc="The target image (or " "multimodal target images) assumed to be " "aligned to a common image domain.", ) atlas_image = traits.List( InputMultiPath(File(exists=True)), argstr="-g %s...", mandatory=True, desc="The atlas image (or " "multimodal atlas images) assumed to be " "aligned to a common image domain.", ) atlas_segmentation_image = InputMultiPath( File(exists=True), argstr="-l %s...", mandatory=True, desc="The atlas segmentation " "images. For performing label fusion the number " "of specified segmentations should be identical " "to the number of atlas image sets.", ) alpha = traits.Float( default_value=0.1, usedefault=True, argstr="-a %s", desc=( "Regularization " "term added to matrix Mx for calculating the inverse. Default = 0.1" ), ) beta = traits.Float( default_value=2.0, usedefault=True, argstr="-b %s", desc=( "Exponent for mapping " "intensity difference to the joint error. Default = 2.0" ), ) retain_label_posterior_images = traits.Bool( False, argstr="-r", usedefault=True, requires=["atlas_segmentation_image"], desc=( "Retain label posterior probability images. Requires " "atlas segmentations to be specified. Default = false" ), ) retain_atlas_voting_images = traits.Bool( False, argstr="-f", usedefault=True, desc=("Retain atlas voting images. Default = false"), ) constrain_nonnegative = traits.Bool( False, argstr="-c", usedefault=True, desc=("Constrain solution to non-negative weights."), ) patch_radius = traits.ListInt( minlen=3, maxlen=3, argstr="-p %s", desc=("Patch radius for similarity measures. Default: 2x2x2"), ) patch_metric = traits.Enum( "PC", "MSQ", argstr="-m %s", desc=( "Metric to be used in determining the most similar " "neighborhood patch. Options include Pearson's " "correlation (PC) and mean squares (MSQ). Default = " "PC (Pearson correlation)." ), ) search_radius = traits.List( [3, 3, 3], minlen=1, maxlen=3, argstr="-s %s", usedefault=True, desc=( "Search radius for similarity measures. Default = 3x3x3. " "One can also specify an image where the value at the " "voxel specifies the isotropic search radius at that voxel." ), ) exclusion_image_label = traits.List( traits.Str(), argstr="-e %s", requires=["exclusion_image"], desc=("Specify a label for the exclusion region."), ) exclusion_image = traits.List( File(exists=True), desc=("Specify an exclusion region for the given label.") ) mask_image = File( argstr="-x %s", exists=True, desc="If a mask image " "is specified, fusion is only performed in the mask region.", ) out_label_fusion = File( argstr="%s", hash_files=False, desc="The output label fusion image." ) out_intensity_fusion_name_format = traits.Str( argstr="", desc="Optional intensity fusion " "image file name format. " '(e.g. "antsJointFusionIntensity_%d.nii.gz")', ) out_label_post_prob_name_format = traits.Str( "antsJointFusionPosterior_%d.nii.gz", requires=["out_label_fusion", "out_intensity_fusion_name_format"], desc="Optional label posterior probability image file name format.", ) out_atlas_voting_weight_name_format = traits.Str( "antsJointFusionVotingWeight_%d.nii.gz", requires=[ "out_label_fusion", "out_intensity_fusion_name_format", "out_label_post_prob_name_format", ], desc="Optional atlas voting weight image file name format.", ) verbose = traits.Bool(False, argstr="-v", desc=("Verbose output.")) class JointFusionOutputSpec(TraitedSpec): out_label_fusion = File(exists=True) out_intensity_fusion = OutputMultiPath(File(exists=True)) out_label_post_prob = OutputMultiPath(File(exists=True)) out_atlas_voting_weight = OutputMultiPath(File(exists=True)) class JointFusion(ANTSCommand): """ An image fusion algorithm. Developed by Hongzhi Wang and Paul Yushkevich, and it won segmentation challenges at MICCAI 2012 and MICCAI 2013. The original label fusion framework was extended to accommodate intensities by Brian Avants. This implementation is based on Paul's original ITK-style implementation and Brian's ANTsR implementation. References include 1) H. Wang, J. W. Suh, S. Das, J. Pluta, C. Craige, P. Yushkevich, Multi-atlas segmentation with joint label fusion IEEE Trans. on Pattern Analysis and Machine Intelligence, 35(3), 611-623, 2013. and 2) H. Wang and P. A. Yushkevich, Multi-atlas segmentation with joint label fusion and corrective learning--an open source implementation, Front. Neuroinform., 2013. Examples -------- >>> from nipype.interfaces.ants import JointFusion >>> jf = JointFusion() >>> jf.inputs.out_label_fusion = 'ants_fusion_label_output.nii' >>> jf.inputs.atlas_image = [ ['rc1s1.nii','rc1s2.nii'] ] >>> jf.inputs.atlas_segmentation_image = ['segmentation0.nii.gz'] >>> jf.inputs.target_image = ['im1.nii'] >>> jf.cmdline "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii']" >>> jf.inputs.target_image = [ ['im1.nii', 'im2.nii'] ] >>> jf.cmdline "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" >>> jf.inputs.atlas_image = [ ['rc1s1.nii','rc1s2.nii'], ... ['rc2s1.nii','rc2s2.nii'] ] >>> jf.inputs.atlas_segmentation_image = ['segmentation0.nii.gz', ... 'segmentation1.nii.gz'] >>> jf.cmdline "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" >>> jf.inputs.dimension = 3 >>> jf.inputs.alpha = 0.5 >>> jf.inputs.beta = 1.0 >>> jf.inputs.patch_radius = [3,2,1] >>> jf.inputs.search_radius = [3] >>> jf.cmdline "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -o ants_fusion_label_output.nii -p 3x2x1 -s 3 -t ['im1.nii', 'im2.nii']" >>> jf.inputs.search_radius = ['mask.nii'] >>> jf.inputs.verbose = True >>> jf.inputs.exclusion_image = ['roi01.nii', 'roi02.nii'] >>> jf.inputs.exclusion_image_label = ['1','2'] >>> jf.cmdline "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] -o ants_fusion_label_output.nii -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" >>> jf.inputs.out_label_fusion = 'ants_fusion_label_output.nii' >>> jf.inputs.out_intensity_fusion_name_format = 'ants_joint_fusion_intensity_%d.nii.gz' >>> jf.inputs.out_label_post_prob_name_format = 'ants_joint_fusion_posterior_%d.nii.gz' >>> jf.inputs.out_atlas_voting_weight_name_format = 'ants_joint_fusion_voting_weight_%d.nii.gz' >>> jf.cmdline "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] -o [ants_fusion_label_output.nii, ants_joint_fusion_intensity_%d.nii.gz, ants_joint_fusion_posterior_%d.nii.gz, ants_joint_fusion_voting_weight_%d.nii.gz] -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" """ input_spec = JointFusionInputSpec output_spec = JointFusionOutputSpec _cmd = "antsJointFusion" def _format_arg(self, opt, spec, val): if opt == "exclusion_image_label": retval = [] for ii in range(len(self.inputs.exclusion_image_label)): retval.append( "-e {0}[{1}]".format( self.inputs.exclusion_image_label[ii], self.inputs.exclusion_image[ii], ) ) return " ".join(retval) if opt == "patch_radius": return "-p {0}".format(self._format_xarray(val)) if opt == "search_radius": return "-s {0}".format(self._format_xarray(val)) if opt == "out_label_fusion": args = [self.inputs.out_label_fusion] for option in ( self.inputs.out_intensity_fusion_name_format, self.inputs.out_label_post_prob_name_format, self.inputs.out_atlas_voting_weight_name_format, ): if isdefined(option): args.append(option) else: break if len(args) == 1: return " ".join(("-o", args[0])) return "-o [{}]".format(", ".join(args)) if opt == "out_intensity_fusion_name_format": if not isdefined(self.inputs.out_label_fusion): return "-o {0}".format(self.inputs.out_intensity_fusion_name_format) return "" if opt == "atlas_image": return " ".join( [ "-g [{0}]".format(", ".join("'%s'" % fn for fn in ai)) for ai in self.inputs.atlas_image ] ) if opt == "target_image": return " ".join( [ "-t [{0}]".format(", ".join("'%s'" % fn for fn in ai)) for ai in self.inputs.target_image ] ) if opt == "atlas_segmentation_image": if len(val) != len(self.inputs.atlas_image): raise ValueError( "Number of specified segmentations should be identical to the number " "of atlas image sets {0}!={1}".format( len(val), len(self.inputs.atlas_image) ) ) return " ".join( ["-l {0}".format(fn) for fn in self.inputs.atlas_segmentation_image] ) return super(AntsJointFusion, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.out_label_fusion): outputs["out_label_fusion"] = os.path.abspath(self.inputs.out_label_fusion) if isdefined(self.inputs.out_intensity_fusion_name_format): outputs["out_intensity_fusion"] = glob( os.path.abspath( self.inputs.out_intensity_fusion_name_format.replace("%d", "*") ) ) if isdefined(self.inputs.out_label_post_prob_name_format): outputs["out_label_post_prob"] = glob( os.path.abspath( self.inputs.out_label_post_prob_name_format.replace("%d", "*") ) ) if isdefined(self.inputs.out_atlas_voting_weight_name_format): outputs["out_atlas_voting_weight"] = glob( os.path.abspath( self.inputs.out_atlas_voting_weight_name_format.replace("%d", "*") ) ) return outputs # For backwards compatibility AntsJointFusion = JointFusion AntsJointFusionInputSpec = JointFusionInputSpec AntsJointFusionOutputSpec = JointFusionOutputSpec class KellyKapowskiInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="--image-dimensionality %d", usedefault=True, desc="image dimension (2 or 3)", ) segmentation_image = File( exists=True, argstr='--segmentation-image "%s"', mandatory=True, desc="A segmentation image must be supplied labeling the gray and white matters. " "Default values = 2 and 3, respectively.", ) gray_matter_label = traits.Int( 2, usedefault=True, desc="The label value for the gray matter label in the segmentation_image.", ) white_matter_label = traits.Int( 3, usedefault=True, desc="The label value for the white matter label in the segmentation_image.", ) gray_matter_prob_image = File( exists=True, argstr='--gray-matter-probability-image "%s"', desc="In addition to the segmentation image, a gray matter probability image can be" " used. If no such image is supplied, one is created using the segmentation image" " and a variance of 1.0 mm.", ) white_matter_prob_image = File( exists=True, argstr='--white-matter-probability-image "%s"', desc="In addition to the segmentation image, a white matter probability image can be" " used. If no such image is supplied, one is created using the segmentation image" " and a variance of 1.0 mm.", ) convergence = traits.Str( "[50,0.001,10]", argstr='--convergence "%s"', usedefault=True, desc="Convergence is determined by fitting a line to the normalized energy profile of" " the last N iterations (where N is specified by the window size) and determining" " the slope which is then compared with the convergence threshold.", ) thickness_prior_estimate = traits.Float( 10, usedefault=True, argstr="--thickness-prior-estimate %f", desc="Provides a prior constraint on the final thickness measurement in mm.", ) thickness_prior_image = File( exists=True, argstr='--thickness-prior-image "%s"', desc="An image containing spatially varying prior thickness values.", ) gradient_step = traits.Float( 0.025, usedefault=True, argstr="--gradient-step %f", desc="Gradient step size for the optimization.", ) smoothing_variance = traits.Float( 1.0, usedefault=True, argstr="--smoothing-variance %f", desc="Defines the Gaussian smoothing of the hit and total images.", ) smoothing_velocity_field = traits.Float( 1.5, usedefault=True, argstr="--smoothing-velocity-field-parameter %f", desc="Defines the Gaussian smoothing of the velocity field (default = 1.5)." " If the b-spline smoothing option is chosen, then this defines the" " isotropic mesh spacing for the smoothing spline (default = 15).", ) use_bspline_smoothing = traits.Bool( argstr="--use-bspline-smoothing 1", desc="Sets the option for B-spline smoothing of the velocity field.", ) number_integration_points = traits.Int( 10, usedefault=True, argstr="--number-of-integration-points %d", desc="Number of compositions of the diffeomorphism per iteration.", ) max_invert_displacement_field_iters = traits.Int( 20, usedefault=True, argstr="--maximum-number-of-invert-displacement-field-iterations %d", desc="Maximum number of iterations for estimating the invert" "displacement field.", ) cortical_thickness = File( argstr='--output "%s"', keep_extension=True, name_source=["segmentation_image"], name_template="%s_cortical_thickness", desc="Filename for the cortical thickness.", hash_files=False, ) warped_white_matter = File( name_source=["segmentation_image"], keep_extension=True, name_template="%s_warped_white_matter", desc="Filename for the warped white matter file.", hash_files=False, ) class KellyKapowskiOutputSpec(TraitedSpec): cortical_thickness = File( desc="A thickness map defined in the segmented gray matter." ) warped_white_matter = File(desc="A warped white matter image.") class KellyKapowski(ANTSCommand): """ Nipype Interface to ANTs' KellyKapowski, also known as DiReCT. DiReCT is a registration based estimate of cortical thickness. It was published in S. R. Das, B. B. Avants, M. Grossman, and J. C. Gee, Registration based cortical thickness measurement, Neuroimage 2009, 45:867--879. Examples -------- >>> from nipype.interfaces.ants.segmentation import KellyKapowski >>> kk = KellyKapowski() >>> kk.inputs.dimension = 3 >>> kk.inputs.segmentation_image = "segmentation0.nii.gz" >>> kk.inputs.convergence = "[45,0.0,10]" >>> kk.inputs.thickness_prior_estimate = 10 >>> kk.cmdline 'KellyKapowski --convergence "[45,0.0,10]" --output "[segmentation0_cortical_thickness.nii.gz,segmentation0_warped_white_matter.nii.gz]" --image-dimensionality 3 --gradient-step 0.025000 --maximum-number-of-invert-displacement-field-iterations 20 --number-of-integration-points 10 --segmentation-image "[segmentation0.nii.gz,2,3]" --smoothing-variance 1.000000 --smoothing-velocity-field-parameter 1.500000 --thickness-prior-estimate 10.000000' """ _cmd = "KellyKapowski" input_spec = KellyKapowskiInputSpec output_spec = KellyKapowskiOutputSpec _references = [ { "entry": BibTeX( """\ @book{Das2009867, author={Sandhitsu R. Das and Brian B. Avants and Murray Grossman and James C. Gee}, title={Registration based cortical thickness measurement.}, journal={NeuroImage}, volume={45}, number={37}, pages={867--879}, year={2009}, issn={1053-8119}, url={http://www.sciencedirect.com/science/article/pii/S1053811908012780}, doi={https://doi.org/10.1016/j.neuroimage.2008.12.016} }""" ), "description": "The details on the implementation of DiReCT.", "tags": ["implementation"], } ] def _parse_inputs(self, skip=None): if skip is None: skip = [] skip += ["warped_white_matter", "gray_matter_label", "white_matter_label"] return super(KellyKapowski, self)._parse_inputs(skip=skip) def _gen_filename(self, name): if name == "cortical_thickness": output = self.inputs.cortical_thickness if not isdefined(output): _, name, ext = split_filename(self.inputs.segmentation_image) output = name + "_cortical_thickness" + ext return output if name == "warped_white_matter": output = self.inputs.warped_white_matter if not isdefined(output): _, name, ext = split_filename(self.inputs.segmentation_image) output = name + "_warped_white_matter" + ext return output def _format_arg(self, opt, spec, val): if opt == "segmentation_image": newval = "[{0},{1},{2}]".format( self.inputs.segmentation_image, self.inputs.gray_matter_label, self.inputs.white_matter_label, ) return spec.argstr % newval if opt == "cortical_thickness": ct = self._gen_filename("cortical_thickness") wm = self._gen_filename("warped_white_matter") newval = "[{},{}]".format(ct, wm) return spec.argstr % newval return super(KellyKapowski, self)._format_arg(opt, spec, val) nipype-1.7.0/nipype/interfaces/ants/tests/000077500000000000000000000000001413403311400205535ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/ants/tests/__init__.py000066400000000000000000000002121413403311400226570ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_AI.py000066400000000000000000000042431413403311400235100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import AI def test_AI_inputs(): input_map = dict( args=dict( argstr="%s", ), convergence=dict( argstr="-c [%d,%g,%d]", usedefault=True, ), dimension=dict( argstr="-d %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), fixed_image=dict( extensions=None, mandatory=True, ), fixed_image_mask=dict( argstr="-x %s", extensions=None, ), metric=dict( argstr="-m %s", mandatory=True, ), moving_image=dict( extensions=None, mandatory=True, ), moving_image_mask=dict( extensions=None, requires=["fixed_image_mask"], ), num_threads=dict( nohash=True, usedefault=True, ), output_transform=dict( argstr="-o %s", extensions=None, usedefault=True, ), principal_axes=dict( argstr="-p %d", usedefault=True, xor=["blobs"], ), search_factor=dict( argstr="-s [%g,%g]", usedefault=True, ), search_grid=dict( argstr="-g %s", min_ver="2.3.0", ), transform=dict( argstr="-t %s[%g]", usedefault=True, ), verbose=dict( argstr="-v %d", usedefault=True, ), ) inputs = AI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AI_outputs(): output_map = dict( output_transform=dict( extensions=None, ), ) outputs = AI.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_ANTS.py000066400000000000000000000063741413403311400237730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import ANTS def test_ANTS_inputs(): input_map = dict( affine_gradient_descent_option=dict( argstr="%s", ), args=dict( argstr="%s", ), delta_time=dict( requires=["number_of_time_steps"], ), dimension=dict( argstr="%d", position=1, ), environ=dict( nohash=True, usedefault=True, ), fixed_image=dict( mandatory=True, ), gradient_step_length=dict( requires=["transformation_model"], ), metric=dict( mandatory=True, ), metric_weight=dict( mandatory=True, requires=["metric"], usedefault=True, ), mi_option=dict( argstr="--MI-option %s", sep="x", ), moving_image=dict( argstr="%s", mandatory=True, ), num_threads=dict( nohash=True, usedefault=True, ), number_of_affine_iterations=dict( argstr="--number-of-affine-iterations %s", sep="x", ), number_of_iterations=dict( argstr="--number-of-iterations %s", sep="x", ), number_of_time_steps=dict( requires=["gradient_step_length"], ), output_transform_prefix=dict( argstr="--output-naming %s", mandatory=True, usedefault=True, ), radius=dict( mandatory=True, requires=["metric"], ), regularization=dict( argstr="%s", ), regularization_deformation_field_sigma=dict( requires=["regularization"], ), regularization_gradient_field_sigma=dict( requires=["regularization"], ), smoothing_sigmas=dict( argstr="--gaussian-smoothing-sigmas %s", sep="x", ), subsampling_factors=dict( argstr="--subsampling-factors %s", sep="x", ), symmetry_type=dict( requires=["delta_time"], ), transformation_model=dict( argstr="%s", mandatory=True, ), use_histogram_matching=dict( argstr="%s", usedefault=True, ), ) inputs = ANTS.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ANTS_outputs(): output_map = dict( affine_transform=dict( extensions=None, ), inverse_warp_transform=dict( extensions=None, ), metaheader=dict( extensions=None, ), metaheader_raw=dict( extensions=None, ), warp_transform=dict( extensions=None, ), ) outputs = ANTS.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_ANTSCommand.py000066400000000000000000000011151413403311400252560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import ANTSCommand def test_ANTSCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), num_threads=dict( nohash=True, usedefault=True, ), ) inputs = ANTSCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_AffineInitializer.py000066400000000000000000000036631413403311400266200ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import AffineInitializer def test_AffineInitializer_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="%s", position=0, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), fixed_image=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), local_search=dict( argstr="%d", position=7, usedefault=True, ), moving_image=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="%s", extensions=None, position=3, usedefault=True, ), principal_axes=dict( argstr="%d", position=6, usedefault=True, ), radian_fraction=dict( argstr="%f", position=5, usedefault=True, ), search_factor=dict( argstr="%f", position=4, usedefault=True, ), ) inputs = AffineInitializer.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AffineInitializer_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = AffineInitializer.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_ApplyTransforms.py000066400000000000000000000041361413403311400263640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..resampling import ApplyTransforms def test_ApplyTransforms_inputs(): input_map = dict( args=dict( argstr="%s", ), default_value=dict( argstr="--default-value %g", usedefault=True, ), dimension=dict( argstr="--dimensionality %d", ), environ=dict( nohash=True, usedefault=True, ), float=dict( argstr="--float %d", usedefault=True, ), input_image=dict( argstr="--input %s", extensions=None, mandatory=True, ), input_image_type=dict( argstr="--input-image-type %d", ), interpolation=dict( argstr="%s", usedefault=True, ), interpolation_parameters=dict(), invert_transform_flags=dict(), num_threads=dict( nohash=True, usedefault=True, ), out_postfix=dict( usedefault=True, ), output_image=dict( argstr="--output %s", genfile=True, hash_files=False, ), print_out_composite_warp_file=dict( requires=["output_image"], ), reference_image=dict( argstr="--reference-image %s", extensions=None, mandatory=True, ), transforms=dict( argstr="%s", mandatory=True, ), ) inputs = ApplyTransforms.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ApplyTransforms_outputs(): output_map = dict( output_image=dict( extensions=None, ), ) outputs = ApplyTransforms.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_ApplyTransformsToPoints.py000066400000000000000000000027561413403311400300720ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..resampling import ApplyTransformsToPoints def test_ApplyTransformsToPoints_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="--dimensionality %d", ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( argstr="--input %s", extensions=None, mandatory=True, ), invert_transform_flags=dict(), num_threads=dict( nohash=True, usedefault=True, ), output_file=dict( argstr="--output %s", hash_files=False, name_source=["input_file"], name_template="%s_transformed.csv", ), transforms=dict( argstr="%s", mandatory=True, ), ) inputs = ApplyTransformsToPoints.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ApplyTransformsToPoints_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = ApplyTransformsToPoints.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_Atropos.py000066400000000000000000000054631413403311400246530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..segmentation import Atropos def test_Atropos_inputs(): input_map = dict( args=dict( argstr="%s", ), convergence_threshold=dict( requires=["n_iterations"], ), dimension=dict( argstr="--image-dimensionality %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), icm_use_synchronous_update=dict( argstr="%s", ), initialization=dict( argstr="%s", mandatory=True, requires=["number_of_tissue_classes"], ), intensity_images=dict( argstr="--intensity-image %s...", mandatory=True, ), kmeans_init_centers=dict(), likelihood_model=dict( argstr="--likelihood-model %s", ), mask_image=dict( argstr="--mask-image %s", extensions=None, mandatory=True, ), maximum_number_of_icm_terations=dict( requires=["icm_use_synchronous_update"], ), mrf_radius=dict( requires=["mrf_smoothing_factor"], ), mrf_smoothing_factor=dict( argstr="%s", ), n_iterations=dict( argstr="%s", ), num_threads=dict( nohash=True, usedefault=True, ), number_of_tissue_classes=dict( mandatory=True, ), out_classified_image_name=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, ), output_posteriors_name_template=dict( usedefault=True, ), posterior_formulation=dict( argstr="%s", ), prior_image=dict(), prior_probability_threshold=dict( requires=["prior_weighting"], ), prior_weighting=dict(), save_posteriors=dict(), use_mixture_model_proportions=dict( requires=["posterior_formulation"], ), use_random_seed=dict( argstr="--use-random-seed %d", usedefault=True, ), ) inputs = Atropos.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Atropos_outputs(): output_map = dict( classified_image=dict( extensions=None, ), posteriors=dict(), ) outputs = Atropos.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_AverageAffineTransform.py000066400000000000000000000025351413403311400276000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import AverageAffineTransform def test_AverageAffineTransform_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="%d", mandatory=True, position=0, ), environ=dict( nohash=True, usedefault=True, ), num_threads=dict( nohash=True, usedefault=True, ), output_affine_transform=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), transforms=dict( argstr="%s", mandatory=True, position=3, ), ) inputs = AverageAffineTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AverageAffineTransform_outputs(): output_map = dict( affine_transform=dict( extensions=None, ), ) outputs = AverageAffineTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_AverageImages.py000066400000000000000000000026741413403311400257250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import AverageImages def test_AverageImages_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="%d", mandatory=True, position=0, ), environ=dict( nohash=True, usedefault=True, ), images=dict( argstr="%s", mandatory=True, position=3, ), normalize=dict( argstr="%d", mandatory=True, position=2, ), num_threads=dict( nohash=True, usedefault=True, ), output_average_image=dict( argstr="%s", extensions=None, hash_files=False, position=1, usedefault=True, ), ) inputs = AverageImages.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AverageImages_outputs(): output_map = dict( output_average_image=dict( extensions=None, ), ) outputs = AverageImages.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_BrainExtraction.py000066400000000000000000000065111413403311400263130ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..segmentation import BrainExtraction def test_BrainExtraction_inputs(): input_map = dict( anatomical_image=dict( argstr="-a %s", extensions=None, mandatory=True, ), args=dict( argstr="%s", ), brain_probability_mask=dict( argstr="-m %s", copyfile=False, extensions=None, mandatory=True, ), brain_template=dict( argstr="-e %s", extensions=None, mandatory=True, ), debug=dict( argstr="-z 1", ), dimension=dict( argstr="-d %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), extraction_registration_mask=dict( argstr="-f %s", extensions=None, ), image_suffix=dict( argstr="-s %s", usedefault=True, ), keep_temporary_files=dict( argstr="-k %d", ), num_threads=dict( nohash=True, usedefault=True, ), out_prefix=dict( argstr="-o %s", usedefault=True, ), use_floatingpoint_precision=dict( argstr="-q %d", ), use_random_seeding=dict( argstr="-u %d", ), ) inputs = BrainExtraction.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BrainExtraction_outputs(): output_map = dict( BrainExtractionBrain=dict( extensions=None, ), BrainExtractionCSF=dict( extensions=None, ), BrainExtractionGM=dict( extensions=None, ), BrainExtractionInitialAffine=dict( extensions=None, ), BrainExtractionInitialAffineFixed=dict( extensions=None, ), BrainExtractionInitialAffineMoving=dict( extensions=None, ), BrainExtractionLaplacian=dict( extensions=None, ), BrainExtractionMask=dict( extensions=None, ), BrainExtractionPrior0GenericAffine=dict( extensions=None, ), BrainExtractionPrior1InverseWarp=dict( extensions=None, ), BrainExtractionPrior1Warp=dict( extensions=None, ), BrainExtractionPriorWarped=dict( extensions=None, ), BrainExtractionSegmentation=dict( extensions=None, ), BrainExtractionTemplateLaplacian=dict( extensions=None, ), BrainExtractionTmp=dict( extensions=None, ), BrainExtractionWM=dict( extensions=None, ), N4Corrected0=dict( extensions=None, ), N4Truncated0=dict( extensions=None, ), ) outputs = BrainExtraction.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_ComposeMultiTransform.py000066400000000000000000000030371413403311400275330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ComposeMultiTransform def test_ComposeMultiTransform_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="%d", position=0, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), num_threads=dict( nohash=True, usedefault=True, ), output_transform=dict( argstr="%s", extensions=None, keep_extension=True, name_source=["transforms"], name_template="%s_composed", position=1, ), reference_image=dict( argstr="%s", extensions=None, position=2, ), transforms=dict( argstr="%s", mandatory=True, position=3, ), ) inputs = ComposeMultiTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ComposeMultiTransform_outputs(): output_map = dict( output_transform=dict( extensions=None, ), ) outputs = ComposeMultiTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_CompositeTransformUtil.py000066400000000000000000000030671413403311400277160ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import CompositeTransformUtil def test_CompositeTransformUtil_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s...", mandatory=True, position=3, ), num_threads=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="%s", extensions=None, position=2, ), output_prefix=dict( argstr="%s", position=4, usedefault=True, ), process=dict( argstr="--%s", position=1, usedefault=True, ), ) inputs = CompositeTransformUtil.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CompositeTransformUtil_outputs(): output_map = dict( affine_transform=dict( extensions=None, ), displacement_field=dict( extensions=None, ), out_file=dict( extensions=None, ), ) outputs = CompositeTransformUtil.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py000066400000000000000000000043211413403311400275630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..visualization import ConvertScalarImageToRGB def test_ConvertScalarImageToRGB_inputs(): input_map = dict( args=dict( argstr="%s", ), colormap=dict( argstr="%s", mandatory=True, position=4, ), custom_color_map_file=dict( argstr="%s", position=5, usedefault=True, ), dimension=dict( argstr="%d", mandatory=True, position=0, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), input_image=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), mask_image=dict( argstr="%s", extensions=None, position=3, usedefault=True, ), maximum_RGB_output=dict( argstr="%d", position=9, usedefault=True, ), maximum_input=dict( argstr="%d", mandatory=True, position=7, ), minimum_RGB_output=dict( argstr="%d", position=8, usedefault=True, ), minimum_input=dict( argstr="%d", mandatory=True, position=6, ), num_threads=dict( nohash=True, usedefault=True, ), output_image=dict( argstr="%s", position=2, usedefault=True, ), ) inputs = ConvertScalarImageToRGB.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ConvertScalarImageToRGB_outputs(): output_map = dict( output_image=dict( extensions=None, ), ) outputs = ConvertScalarImageToRGB.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_CorticalThickness.py000066400000000000000000000073111413403311400266320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..segmentation import CorticalThickness def test_CorticalThickness_inputs(): input_map = dict( anatomical_image=dict( argstr="-a %s", extensions=None, mandatory=True, ), args=dict( argstr="%s", ), b_spline_smoothing=dict( argstr="-v", ), brain_probability_mask=dict( argstr="-m %s", copyfile=False, extensions=None, mandatory=True, ), brain_template=dict( argstr="-e %s", extensions=None, mandatory=True, ), cortical_label_image=dict( extensions=None, ), debug=dict( argstr="-z 1", ), dimension=dict( argstr="-d %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), extraction_registration_mask=dict( argstr="-f %s", extensions=None, ), image_suffix=dict( argstr="-s %s", usedefault=True, ), keep_temporary_files=dict( argstr="-k %d", ), label_propagation=dict( argstr="-l %s", ), max_iterations=dict( argstr="-i %d", ), num_threads=dict( nohash=True, usedefault=True, ), out_prefix=dict( argstr="-o %s", usedefault=True, ), posterior_formulation=dict( argstr="-b %s", ), prior_segmentation_weight=dict( argstr="-w %f", ), quick_registration=dict( argstr="-q 1", ), segmentation_iterations=dict( argstr="-n %d", ), segmentation_priors=dict( argstr="-p %s", mandatory=True, ), t1_registration_template=dict( argstr="-t %s", extensions=None, mandatory=True, ), use_floatingpoint_precision=dict( argstr="-j %d", ), use_random_seeding=dict( argstr="-u %d", ), ) inputs = CorticalThickness.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CorticalThickness_outputs(): output_map = dict( BrainExtractionMask=dict( extensions=None, ), BrainSegmentation=dict( extensions=None, ), BrainSegmentationN4=dict( extensions=None, ), BrainSegmentationPosteriors=dict(), BrainVolumes=dict( extensions=None, ), CorticalThickness=dict( extensions=None, ), CorticalThicknessNormedToTemplate=dict( extensions=None, ), ExtractedBrainN4=dict( extensions=None, ), SubjectToTemplate0GenericAffine=dict( extensions=None, ), SubjectToTemplate1Warp=dict( extensions=None, ), SubjectToTemplateLogJacobian=dict( extensions=None, ), TemplateToSubject0Warp=dict( extensions=None, ), TemplateToSubject1GenericAffine=dict( extensions=None, ), ) outputs = CorticalThickness.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_CreateJacobianDeterminantImage.py000066400000000000000000000031161413403311400312050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import CreateJacobianDeterminantImage def test_CreateJacobianDeterminantImage_inputs(): input_map = dict( args=dict( argstr="%s", ), deformationField=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), doLogJacobian=dict( argstr="%d", position=3, ), environ=dict( nohash=True, usedefault=True, ), imageDimension=dict( argstr="%d", mandatory=True, position=0, ), num_threads=dict( nohash=True, usedefault=True, ), outputImage=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), useGeometric=dict( argstr="%d", position=4, ), ) inputs = CreateJacobianDeterminantImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CreateJacobianDeterminantImage_outputs(): output_map = dict( jacobian_image=dict( extensions=None, ), ) outputs = CreateJacobianDeterminantImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_CreateTiledMosaic.py000066400000000000000000000034731413403311400265440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..visualization import CreateTiledMosaic def test_CreateTiledMosaic_inputs(): input_map = dict( alpha_value=dict( argstr="-a %.2f", ), args=dict( argstr="%s", ), direction=dict( argstr="-d %d", ), environ=dict( nohash=True, usedefault=True, ), flip_slice=dict( argstr="-f %s", ), input_image=dict( argstr="-i %s", extensions=None, mandatory=True, ), mask_image=dict( argstr="-x %s", extensions=None, ), num_threads=dict( nohash=True, usedefault=True, ), output_image=dict( argstr="-o %s", usedefault=True, ), pad_or_crop=dict( argstr="-p %s", ), permute_axes=dict( argstr="-g", ), rgb_image=dict( argstr="-r %s", extensions=None, mandatory=True, ), slices=dict( argstr="-s %s", ), tile_geometry=dict( argstr="-t %s", ), ) inputs = CreateTiledMosaic.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CreateTiledMosaic_outputs(): output_map = dict( output_image=dict( extensions=None, ), ) outputs = CreateTiledMosaic.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_DenoiseImage.py000066400000000000000000000037521413403311400255540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..segmentation import DenoiseImage def test_DenoiseImage_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="-d %d", ), environ=dict( nohash=True, usedefault=True, ), input_image=dict( argstr="-i %s", extensions=None, mandatory=True, ), noise_image=dict( extensions=None, hash_files=False, keep_extension=True, name_source=["input_image"], name_template="%s_noise", ), noise_model=dict( argstr="-n %s", usedefault=True, ), num_threads=dict( nohash=True, usedefault=True, ), output_image=dict( argstr="-o %s", extensions=None, hash_files=False, keep_extension=True, name_source=["input_image"], name_template="%s_noise_corrected", ), save_noise=dict( mandatory=True, usedefault=True, xor=["noise_image"], ), shrink_factor=dict( argstr="-s %s", usedefault=True, ), verbose=dict( argstr="-v", ), ) inputs = DenoiseImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DenoiseImage_outputs(): output_map = dict( noise_image=dict( extensions=None, ), output_image=dict( extensions=None, ), ) outputs = DenoiseImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_GenWarpFields.py000066400000000000000000000043531413403311400257130ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..legacy import GenWarpFields def test_GenWarpFields_inputs(): input_map = dict( args=dict( argstr="%s", ), bias_field_correction=dict( argstr="-n 1", ), dimension=dict( argstr="-d %d", position=1, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), force_proceed=dict( argstr="-f 1", ), input_image=dict( argstr="-i %s", copyfile=False, extensions=None, mandatory=True, ), inverse_warp_template_labels=dict( argstr="-l", ), max_iterations=dict( argstr="-m %s", sep="x", ), num_threads=dict( nohash=True, usedefault=True, ), out_prefix=dict( argstr="-o %s", usedefault=True, ), quality_check=dict( argstr="-q 1", ), reference_image=dict( argstr="-r %s", copyfile=True, extensions=None, mandatory=True, ), similarity_metric=dict( argstr="-s %s", ), transformation_model=dict( argstr="-t %s", usedefault=True, ), ) inputs = GenWarpFields.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GenWarpFields_outputs(): output_map = dict( affine_transformation=dict( extensions=None, ), input_file=dict( extensions=None, ), inverse_warp_field=dict( extensions=None, ), output_file=dict( extensions=None, ), warp_field=dict( extensions=None, ), ) outputs = GenWarpFields.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_ImageMath.py000066400000000000000000000031621413403311400250520ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ImageMath def test_ImageMath_inputs(): input_map = dict( args=dict( argstr="%s", ), copy_header=dict( usedefault=True, ), dimension=dict( argstr="%d", position=1, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), num_threads=dict( nohash=True, usedefault=True, ), op1=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), op2=dict( argstr="%s", position=-1, ), operation=dict( argstr="%s", mandatory=True, position=3, ), output_image=dict( argstr="%s", extensions=None, keep_extension=True, name_source=["op1"], name_template="%s_maths", position=2, ), ) inputs = ImageMath.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ImageMath_outputs(): output_map = dict( output_image=dict( extensions=None, ), ) outputs = ImageMath.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_JointFusion.py000066400000000000000000000060531413403311400254670ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..segmentation import JointFusion def test_JointFusion_inputs(): input_map = dict( alpha=dict( argstr="-a %s", usedefault=True, ), args=dict( argstr="%s", ), atlas_image=dict( argstr="-g %s...", mandatory=True, ), atlas_segmentation_image=dict( argstr="-l %s...", mandatory=True, ), beta=dict( argstr="-b %s", usedefault=True, ), constrain_nonnegative=dict( argstr="-c", usedefault=True, ), dimension=dict( argstr="-d %d", ), environ=dict( nohash=True, usedefault=True, ), exclusion_image=dict(), exclusion_image_label=dict( argstr="-e %s", requires=["exclusion_image"], ), mask_image=dict( argstr="-x %s", extensions=None, ), num_threads=dict( nohash=True, usedefault=True, ), out_atlas_voting_weight_name_format=dict( requires=[ "out_label_fusion", "out_intensity_fusion_name_format", "out_label_post_prob_name_format", ], ), out_intensity_fusion_name_format=dict( argstr="", ), out_label_fusion=dict( argstr="%s", extensions=None, hash_files=False, ), out_label_post_prob_name_format=dict( requires=["out_label_fusion", "out_intensity_fusion_name_format"], ), patch_metric=dict( argstr="-m %s", ), patch_radius=dict( argstr="-p %s", maxlen=3, minlen=3, ), retain_atlas_voting_images=dict( argstr="-f", usedefault=True, ), retain_label_posterior_images=dict( argstr="-r", requires=["atlas_segmentation_image"], usedefault=True, ), search_radius=dict( argstr="-s %s", usedefault=True, ), target_image=dict( argstr="-t %s", mandatory=True, ), verbose=dict( argstr="-v", ), ) inputs = JointFusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JointFusion_outputs(): output_map = dict( out_atlas_voting_weight=dict(), out_intensity_fusion=dict(), out_label_fusion=dict( extensions=None, ), out_label_post_prob=dict(), ) outputs = JointFusion.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_KellyKapowski.py000066400000000000000000000064341413403311400260140ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..segmentation import KellyKapowski def test_KellyKapowski_inputs(): input_map = dict( args=dict( argstr="%s", ), convergence=dict( argstr='--convergence "%s"', usedefault=True, ), cortical_thickness=dict( argstr='--output "%s"', extensions=None, hash_files=False, keep_extension=True, name_source=["segmentation_image"], name_template="%s_cortical_thickness", ), dimension=dict( argstr="--image-dimensionality %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), gradient_step=dict( argstr="--gradient-step %f", usedefault=True, ), gray_matter_label=dict( usedefault=True, ), gray_matter_prob_image=dict( argstr='--gray-matter-probability-image "%s"', extensions=None, ), max_invert_displacement_field_iters=dict( argstr="--maximum-number-of-invert-displacement-field-iterations %d", usedefault=True, ), num_threads=dict( nohash=True, usedefault=True, ), number_integration_points=dict( argstr="--number-of-integration-points %d", usedefault=True, ), segmentation_image=dict( argstr='--segmentation-image "%s"', extensions=None, mandatory=True, ), smoothing_variance=dict( argstr="--smoothing-variance %f", usedefault=True, ), smoothing_velocity_field=dict( argstr="--smoothing-velocity-field-parameter %f", usedefault=True, ), thickness_prior_estimate=dict( argstr="--thickness-prior-estimate %f", usedefault=True, ), thickness_prior_image=dict( argstr='--thickness-prior-image "%s"', extensions=None, ), use_bspline_smoothing=dict( argstr="--use-bspline-smoothing 1", ), warped_white_matter=dict( extensions=None, hash_files=False, keep_extension=True, name_source=["segmentation_image"], name_template="%s_warped_white_matter", ), white_matter_label=dict( usedefault=True, ), white_matter_prob_image=dict( argstr='--white-matter-probability-image "%s"', extensions=None, ), ) inputs = KellyKapowski.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_KellyKapowski_outputs(): output_map = dict( cortical_thickness=dict( extensions=None, ), warped_white_matter=dict( extensions=None, ), ) outputs = KellyKapowski.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_LabelGeometry.py000066400000000000000000000030021413403311400257420ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import LabelGeometry def test_LabelGeometry_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="%d", position=0, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), intensity_image=dict( argstr="%s", extensions=None, mandatory=True, position=2, usedefault=True, ), label_image=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), num_threads=dict( nohash=True, usedefault=True, ), output_file=dict( argstr="%s", name_source=["label_image"], name_template="%s.csv", position=3, ), ) inputs = LabelGeometry.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_LabelGeometry_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = LabelGeometry.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_LaplacianThickness.py000066400000000000000000000041021413403311400267510ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..segmentation import LaplacianThickness def test_LaplacianThickness_inputs(): input_map = dict( args=dict( argstr="%s", ), dT=dict( argstr="%s", position=6, requires=["prior_thickness"], ), environ=dict( nohash=True, usedefault=True, ), input_gm=dict( argstr="%s", copyfile=True, extensions=None, mandatory=True, position=2, ), input_wm=dict( argstr="%s", copyfile=True, extensions=None, mandatory=True, position=1, ), num_threads=dict( nohash=True, usedefault=True, ), output_image=dict( argstr="%s", hash_files=False, keep_extension=True, name_source=["input_wm"], name_template="%s_thickness", position=3, ), prior_thickness=dict( argstr="%s", position=5, requires=["smooth_param"], ), smooth_param=dict( argstr="%s", position=4, ), sulcus_prior=dict( argstr="%s", position=7, requires=["dT"], ), tolerance=dict( argstr="%s", position=8, requires=["sulcus_prior"], ), ) inputs = LaplacianThickness.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_LaplacianThickness_outputs(): output_map = dict( output_image=dict( extensions=None, ), ) outputs = LaplacianThickness.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_MeasureImageSimilarity.py000066400000000000000000000036551413403311400276400ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import MeasureImageSimilarity def test_MeasureImageSimilarity_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="--dimensionality %d", position=1, ), environ=dict( nohash=True, usedefault=True, ), fixed_image=dict( extensions=None, mandatory=True, ), fixed_image_mask=dict( argstr="%s", extensions=None, ), metric=dict( argstr="%s", mandatory=True, ), metric_weight=dict( requires=["metric"], usedefault=True, ), moving_image=dict( extensions=None, mandatory=True, ), moving_image_mask=dict( extensions=None, requires=["fixed_image_mask"], ), num_threads=dict( nohash=True, usedefault=True, ), radius_or_number_of_bins=dict( mandatory=True, requires=["metric"], ), sampling_percentage=dict( mandatory=True, requires=["metric"], ), sampling_strategy=dict( requires=["metric"], usedefault=True, ), ) inputs = MeasureImageSimilarity.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MeasureImageSimilarity_outputs(): output_map = dict( similarity=dict(), ) outputs = MeasureImageSimilarity.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_MultiplyImages.py000066400000000000000000000027071413403311400261670ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MultiplyImages def test_MultiplyImages_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="%d", mandatory=True, position=0, ), environ=dict( nohash=True, usedefault=True, ), first_input=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), num_threads=dict( nohash=True, usedefault=True, ), output_product_image=dict( argstr="%s", extensions=None, mandatory=True, position=3, ), second_input=dict( argstr="%s", mandatory=True, position=2, ), ) inputs = MultiplyImages.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MultiplyImages_outputs(): output_map = dict( output_product_image=dict( extensions=None, ), ) outputs = MultiplyImages.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_N4BiasFieldCorrection.py000066400000000000000000000051661413403311400273000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..segmentation import N4BiasFieldCorrection def test_N4BiasFieldCorrection_inputs(): input_map = dict( args=dict( argstr="%s", ), bias_image=dict( extensions=None, hash_files=False, ), bspline_fitting_distance=dict( argstr="--bspline-fitting %s", ), bspline_order=dict( requires=["bspline_fitting_distance"], ), convergence_threshold=dict( requires=["n_iterations"], ), copy_header=dict( mandatory=True, usedefault=True, ), dimension=dict( argstr="-d %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), histogram_sharpening=dict( argstr="--histogram-sharpening [%g,%g,%d]", ), input_image=dict( argstr="--input-image %s", extensions=None, mandatory=True, ), mask_image=dict( argstr="--mask-image %s", extensions=None, ), n_iterations=dict( argstr="--convergence %s", ), num_threads=dict( nohash=True, usedefault=True, ), output_image=dict( argstr="--output %s", hash_files=False, keep_extension=True, name_source=["input_image"], name_template="%s_corrected", ), rescale_intensities=dict( argstr="-r", min_ver="2.1.0", usedefault=True, ), save_bias=dict( mandatory=True, usedefault=True, xor=["bias_image"], ), shrink_factor=dict( argstr="--shrink-factor %d", ), weight_image=dict( argstr="--weight-image %s", extensions=None, ), ) inputs = N4BiasFieldCorrection.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_N4BiasFieldCorrection_outputs(): output_map = dict( bias_image=dict( extensions=None, ), output_image=dict( extensions=None, ), ) outputs = N4BiasFieldCorrection.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_Registration.py000066400000000000000000000135121413403311400256700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import Registration def test_Registration_inputs(): input_map = dict( args=dict( argstr="%s", ), collapse_output_transforms=dict( argstr="--collapse-output-transforms %d", usedefault=True, ), convergence_threshold=dict( requires=["number_of_iterations"], usedefault=True, ), convergence_window_size=dict( requires=["convergence_threshold"], usedefault=True, ), dimension=dict( argstr="--dimensionality %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), fixed_image=dict( mandatory=True, ), fixed_image_mask=dict( argstr="%s", extensions=None, max_ver="2.1.0", xor=["fixed_image_masks"], ), fixed_image_masks=dict( min_ver="2.2.0", xor=["fixed_image_mask"], ), float=dict( argstr="--float %d", ), initial_moving_transform=dict( argstr="%s", xor=["initial_moving_transform_com"], ), initial_moving_transform_com=dict( argstr="%s", xor=["initial_moving_transform"], ), initialize_transforms_per_stage=dict( argstr="--initialize-transforms-per-stage %d", usedefault=True, ), interpolation=dict( argstr="%s", usedefault=True, ), interpolation_parameters=dict(), invert_initial_moving_transform=dict( requires=["initial_moving_transform"], xor=["initial_moving_transform_com"], ), metric=dict( mandatory=True, ), metric_item_trait=dict(), metric_stage_trait=dict(), metric_weight=dict( mandatory=True, requires=["metric"], usedefault=True, ), metric_weight_item_trait=dict( usedefault=True, ), metric_weight_stage_trait=dict(), moving_image=dict( mandatory=True, ), moving_image_mask=dict( extensions=None, max_ver="2.1.0", requires=["fixed_image_mask"], xor=["moving_image_masks"], ), moving_image_masks=dict( min_ver="2.2.0", xor=["moving_image_mask"], ), num_threads=dict( nohash=True, usedefault=True, ), number_of_iterations=dict(), output_inverse_warped_image=dict( hash_files=False, requires=["output_warped_image"], ), output_transform_prefix=dict( argstr="%s", usedefault=True, ), output_warped_image=dict( hash_files=False, ), radius_bins_item_trait=dict( usedefault=True, ), radius_bins_stage_trait=dict(), radius_or_number_of_bins=dict( requires=["metric_weight"], usedefault=True, ), restore_state=dict( argstr="--restore-state %s", extensions=None, ), restrict_deformation=dict(), sampling_percentage=dict( requires=["sampling_strategy"], ), sampling_percentage_item_trait=dict(), sampling_percentage_stage_trait=dict(), sampling_strategy=dict( requires=["metric_weight"], ), sampling_strategy_item_trait=dict(), sampling_strategy_stage_trait=dict(), save_state=dict( argstr="--save-state %s", extensions=None, ), shrink_factors=dict( mandatory=True, ), sigma_units=dict( requires=["smoothing_sigmas"], ), smoothing_sigmas=dict( mandatory=True, ), transform_parameters=dict(), transforms=dict( argstr="%s", mandatory=True, ), use_estimate_learning_rate_once=dict(), use_histogram_matching=dict( usedefault=True, ), verbose=dict( argstr="-v", nohash=True, usedefault=True, ), winsorize_lower_quantile=dict( argstr="%s", usedefault=True, ), winsorize_upper_quantile=dict( argstr="%s", usedefault=True, ), write_composite_transform=dict( argstr="--write-composite-transform %d", usedefault=True, ), ) inputs = Registration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Registration_outputs(): output_map = dict( composite_transform=dict( extensions=None, ), elapsed_time=dict(), forward_invert_flags=dict(), forward_transforms=dict(), inverse_composite_transform=dict( extensions=None, ), inverse_warped_image=dict( extensions=None, ), metric_value=dict(), reverse_forward_invert_flags=dict(), reverse_forward_transforms=dict(), reverse_invert_flags=dict(), reverse_transforms=dict(), save_state=dict( extensions=None, ), warped_image=dict( extensions=None, ), ) outputs = Registration.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_RegistrationSynQuick.py000066400000000000000000000041111413403311400273520ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import RegistrationSynQuick def test_RegistrationSynQuick_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="-d %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), fixed_image=dict( argstr="-f %s...", mandatory=True, ), histogram_bins=dict( argstr="-r %d", usedefault=True, ), moving_image=dict( argstr="-m %s...", mandatory=True, ), num_threads=dict( argstr="-n %d", usedefault=True, ), output_prefix=dict( argstr="-o %s", usedefault=True, ), precision_type=dict( argstr="-p %s", usedefault=True, ), spline_distance=dict( argstr="-s %d", usedefault=True, ), transform_type=dict( argstr="-t %s", usedefault=True, ), use_histogram_matching=dict( argstr="-j %d", ), ) inputs = RegistrationSynQuick.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RegistrationSynQuick_outputs(): output_map = dict( forward_warp_field=dict( extensions=None, ), inverse_warp_field=dict( extensions=None, ), inverse_warped_image=dict( extensions=None, ), out_matrix=dict( extensions=None, ), warped_image=dict( extensions=None, ), ) outputs = RegistrationSynQuick.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_ResampleImageBySpacing.py000066400000000000000000000036021413403311400275300ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ResampleImageBySpacing def test_ResampleImageBySpacing_inputs(): input_map = dict( addvox=dict( argstr="%d", position=6, requires=["apply_smoothing"], ), apply_smoothing=dict( argstr="%d", position=5, ), args=dict( argstr="%s", ), dimension=dict( argstr="%d", position=1, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), input_image=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), nn_interp=dict( argstr="%d", position=-1, requires=["addvox"], ), num_threads=dict( nohash=True, usedefault=True, ), out_spacing=dict( argstr="%s", mandatory=True, position=4, ), output_image=dict( argstr="%s", extensions=None, keep_extension=True, name_source=["input_image"], name_template="%s_resampled", position=3, ), ) inputs = ResampleImageBySpacing.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ResampleImageBySpacing_outputs(): output_map = dict( output_image=dict( extensions=None, ), ) outputs = ResampleImageBySpacing.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_ThresholdImage.py000066400000000000000000000044761413403311400261260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ThresholdImage def test_ThresholdImage_inputs(): input_map = dict( args=dict( argstr="%s", ), copy_header=dict( mandatory=True, usedefault=True, ), dimension=dict( argstr="%d", position=1, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), input_image=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), input_mask=dict( argstr="%s", extensions=None, requires=["num_thresholds"], ), inside_value=dict( argstr="%f", position=6, requires=["th_low"], ), mode=dict( argstr="%s", position=4, requires=["num_thresholds"], xor=["th_low", "th_high"], ), num_threads=dict( nohash=True, usedefault=True, ), num_thresholds=dict( argstr="%d", position=5, ), output_image=dict( argstr="%s", extensions=None, keep_extension=True, name_source=["input_image"], name_template="%s_resampled", position=3, ), outside_value=dict( argstr="%f", position=7, requires=["th_low"], ), th_high=dict( argstr="%f", position=5, xor=["mode"], ), th_low=dict( argstr="%f", position=4, xor=["mode"], ), ) inputs = ThresholdImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ThresholdImage_outputs(): output_map = dict( output_image=dict( extensions=None, ), ) outputs = ThresholdImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_WarpImageMultiTransform.py000066400000000000000000000043151413403311400300020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..resampling import WarpImageMultiTransform def test_WarpImageMultiTransform_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="%d", position=1, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), input_image=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), invert_affine=dict(), num_threads=dict( nohash=True, usedefault=True, ), out_postfix=dict( extensions=None, hash_files=False, usedefault=True, xor=["output_image"], ), output_image=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=3, xor=["out_postfix"], ), reference_image=dict( argstr="-R %s", extensions=None, xor=["tightest_box"], ), reslice_by_header=dict( argstr="--reslice-by-header", ), tightest_box=dict( argstr="--tightest-bounding-box", xor=["reference_image"], ), transformation_series=dict( argstr="%s", mandatory=True, position=-1, ), use_bspline=dict( argstr="--use-BSpline", ), use_nearest=dict( argstr="--use-NN", ), ) inputs = WarpImageMultiTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_WarpImageMultiTransform_outputs(): output_map = dict( output_image=dict( extensions=None, ), ) outputs = WarpImageMultiTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_WarpTimeSeriesImageMultiTransform.py000066400000000000000000000037641413403311400320030ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..resampling import WarpTimeSeriesImageMultiTransform def test_WarpTimeSeriesImageMultiTransform_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="%d", position=1, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), input_image=dict( argstr="%s", copyfile=True, extensions=None, mandatory=True, ), invert_affine=dict(), num_threads=dict( nohash=True, usedefault=True, ), out_postfix=dict( argstr="%s", usedefault=True, ), reference_image=dict( argstr="-R %s", extensions=None, xor=["tightest_box"], ), reslice_by_header=dict( argstr="--reslice-by-header", ), tightest_box=dict( argstr="--tightest-bounding-box", xor=["reference_image"], ), transformation_series=dict( argstr="%s", copyfile=False, mandatory=True, ), use_bspline=dict( argstr="--use-Bspline", ), use_nearest=dict( argstr="--use-NN", ), ) inputs = WarpTimeSeriesImageMultiTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_WarpTimeSeriesImageMultiTransform_outputs(): output_map = dict( output_image=dict( extensions=None, ), ) outputs = WarpTimeSeriesImageMultiTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_antsIntroduction.py000066400000000000000000000043721413403311400265710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..legacy import antsIntroduction def test_antsIntroduction_inputs(): input_map = dict( args=dict( argstr="%s", ), bias_field_correction=dict( argstr="-n 1", ), dimension=dict( argstr="-d %d", position=1, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), force_proceed=dict( argstr="-f 1", ), input_image=dict( argstr="-i %s", copyfile=False, extensions=None, mandatory=True, ), inverse_warp_template_labels=dict( argstr="-l", ), max_iterations=dict( argstr="-m %s", sep="x", ), num_threads=dict( nohash=True, usedefault=True, ), out_prefix=dict( argstr="-o %s", usedefault=True, ), quality_check=dict( argstr="-q 1", ), reference_image=dict( argstr="-r %s", copyfile=True, extensions=None, mandatory=True, ), similarity_metric=dict( argstr="-s %s", ), transformation_model=dict( argstr="-t %s", usedefault=True, ), ) inputs = antsIntroduction.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_antsIntroduction_outputs(): output_map = dict( affine_transformation=dict( extensions=None, ), input_file=dict( extensions=None, ), inverse_warp_field=dict( extensions=None, ), output_file=dict( extensions=None, ), warp_field=dict( extensions=None, ), ) outputs = antsIntroduction.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_auto_buildtemplateparallel.py000066400000000000000000000042271413403311400275710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..legacy import buildtemplateparallel def test_buildtemplateparallel_inputs(): input_map = dict( args=dict( argstr="%s", ), bias_field_correction=dict( argstr="-n 1", ), dimension=dict( argstr="-d %d", position=1, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), gradient_step_size=dict( argstr="-g %f", ), in_files=dict( argstr="%s", mandatory=True, position=-1, ), iteration_limit=dict( argstr="-i %d", usedefault=True, ), max_iterations=dict( argstr="-m %s", sep="x", ), num_cores=dict( argstr="-j %d", requires=["parallelization"], ), num_threads=dict( nohash=True, usedefault=True, ), out_prefix=dict( argstr="-o %s", usedefault=True, ), parallelization=dict( argstr="-c %d", usedefault=True, ), rigid_body_registration=dict( argstr="-r 1", ), similarity_metric=dict( argstr="-s %s", ), transformation_model=dict( argstr="-t %s", usedefault=True, ), use_first_as_target=dict(), ) inputs = buildtemplateparallel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_buildtemplateparallel_outputs(): output_map = dict( final_template_file=dict( extensions=None, ), subject_outfiles=dict(), template_files=dict(), ) outputs = buildtemplateparallel.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/ants/tests/test_extra_Registration.py000066400000000000000000000013561413403311400260460ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from nipype.interfaces.ants import registration import os import pytest def test_ants_mand(tmpdir): tmpdir.chdir() filepath = os.path.dirname(os.path.realpath(__file__)) datadir = os.path.realpath(os.path.join(filepath, "../../../testing/data")) ants = registration.ANTS() ants.inputs.transformation_model = "SyN" ants.inputs.moving_image = [os.path.join(datadir, "resting.nii")] ants.inputs.fixed_image = [os.path.join(datadir, "T1.nii")] ants.inputs.metric = ["MI"] with pytest.raises(ValueError) as er: ants.run() assert "ANTS requires a value for input 'radius'" in str(er.value) nipype-1.7.0/nipype/interfaces/ants/tests/test_resampling.py000066400000000000000000000064741413403311400243400ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from nipype.interfaces.ants import ( WarpImageMultiTransform, WarpTimeSeriesImageMultiTransform, ) import os import pytest @pytest.fixture() def change_dir(request): orig_dir = os.getcwd() filepath = os.path.dirname(os.path.realpath(__file__)) datadir = os.path.realpath(os.path.join(filepath, "../../../testing/data")) os.chdir(datadir) def move2orig(): os.chdir(orig_dir) request.addfinalizer(move2orig) @pytest.fixture() def create_wimt(): wimt = WarpImageMultiTransform() wimt.inputs.input_image = "diffusion_weighted.nii" wimt.inputs.reference_image = "functional.nii" wimt.inputs.transformation_series = [ "func2anat_coreg_Affine.txt", "func2anat_InverseWarp.nii.gz", "dwi2anat_Warp.nii.gz", "dwi2anat_coreg_Affine.txt", ] return wimt def test_WarpImageMultiTransform(change_dir, create_wimt): wimt = create_wimt assert ( wimt.cmdline == "WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt" ) def test_WarpImageMultiTransform_invaffine_1(change_dir, create_wimt): wimt = create_wimt wimt.inputs.invert_affine = [1] assert ( wimt.cmdline == "WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii \ -i func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz dwi2anat_coreg_Affine.txt" ) def test_WarpImageMultiTransform_invaffine_2(change_dir, create_wimt): wimt = create_wimt wimt.inputs.invert_affine = [2] assert ( wimt.cmdline == "WarpImageMultiTransform 3 diffusion_weighted.nii diffusion_weighted_wimt.nii -R functional.nii func2anat_coreg_Affine.txt func2anat_InverseWarp.nii.gz dwi2anat_Warp.nii.gz -i dwi2anat_coreg_Affine.txt" ) def test_WarpImageMultiTransform_invaffine_wrong(change_dir, create_wimt): wimt = create_wimt wimt.inputs.invert_affine = [3] with pytest.raises(Exception): assert wimt.cmdline @pytest.fixture() def create_wtsimt(): wtsimt = WarpTimeSeriesImageMultiTransform() wtsimt.inputs.input_image = "resting.nii" wtsimt.inputs.reference_image = "ants_deformed.nii.gz" wtsimt.inputs.transformation_series = ["ants_Warp.nii.gz", "ants_Affine.txt"] return wtsimt def test_WarpTimeSeriesImageMultiTransform(change_dir, create_wtsimt): wtsimt = create_wtsimt assert ( wtsimt.cmdline == "WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ -R ants_deformed.nii.gz ants_Warp.nii.gz ants_Affine.txt" ) def test_WarpTimeSeriesImageMultiTransform_invaffine(change_dir, create_wtsimt): wtsimt = create_wtsimt wtsimt.inputs.invert_affine = [1] assert ( wtsimt.cmdline == "WarpTimeSeriesImageMultiTransform 4 resting.nii resting_wtsimt.nii \ -R ants_deformed.nii.gz ants_Warp.nii.gz -i ants_Affine.txt" ) def test_WarpTimeSeriesImageMultiTransform_invaffine_wrong(change_dir, create_wtsimt): wtsimt = create_wtsimt wtsimt.inputs.invert_affine = [0] with pytest.raises(Exception): wtsimt.cmdline nipype-1.7.0/nipype/interfaces/ants/tests/test_segmentation.py000066400000000000000000000040031413403311400246560ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from ..segmentation import LaplacianThickness from .test_resampling import change_dir import os import pytest @pytest.fixture() def change_dir(request): orig_dir = os.getcwd() filepath = os.path.dirname(os.path.realpath(__file__)) datadir = os.path.realpath(os.path.join(filepath, "../../../testing/data")) os.chdir(datadir) def move2orig(): os.chdir(orig_dir) request.addfinalizer(move2orig) @pytest.fixture() def create_lt(): lt = LaplacianThickness() # we do not run, so I stick some not really proper files as input lt.inputs.input_gm = "diffusion_weighted.nii" lt.inputs.input_wm = "functional.nii" return lt def test_LaplacianThickness_defaults(change_dir, create_lt): lt = create_lt base_cmd = "LaplacianThickness functional.nii diffusion_weighted.nii functional_thickness.nii" assert lt.cmdline == base_cmd lt.inputs.smooth_param = 4.5 assert lt.cmdline == base_cmd + " 4.5" lt.inputs.prior_thickness = 5.9 assert lt.cmdline == base_cmd + " 4.5 5.9" def test_LaplacianThickness_wrongargs(change_dir, create_lt): lt = create_lt lt.inputs.tolerance = 0.001 with pytest.raises( ValueError, match=r".* requires a value for input 'sulcus_prior' .*" ): lt.cmdline lt.inputs.sulcus_prior = 0.15 with pytest.raises(ValueError, match=r".* requires a value for input 'dT' .*"): lt.cmdline lt.inputs.dT = 0.01 with pytest.raises( ValueError, match=r".* requires a value for input 'prior_thickness' .*" ): lt.cmdline lt.inputs.prior_thickness = 5.9 with pytest.raises( ValueError, match=r".* requires a value for input 'smooth_param' .*" ): lt.cmdline lt.inputs.smooth_param = 4.5 assert ( lt.cmdline == "LaplacianThickness functional.nii diffusion_weighted.nii " "functional_thickness.nii 4.5 5.9 0.01 0.15 0.001" ) nipype-1.7.0/nipype/interfaces/ants/utils.py000066400000000000000000000641321413403311400211310ustar00rootroot00000000000000"""ANTs' utilities.""" import os from warnings import warn from ..base import traits, isdefined, TraitedSpec, File, Str, InputMultiObject from ..mixins import CopyHeaderInterface from .base import ANTSCommandInputSpec, ANTSCommand class ImageMathInputSpec(ANTSCommandInputSpec): dimension = traits.Int( 3, usedefault=True, position=1, argstr="%d", desc="dimension of output image" ) output_image = File( position=2, argstr="%s", name_source=["op1"], name_template="%s_maths", desc="output image file", keep_extension=True, ) operation = traits.Enum( "m", "vm", "+", "v+", "-", "v-", "/", "^", "max", "exp", "addtozero", "overadd", "abs", "total", "mean", "vtotal", "Decision", "Neg", "Project", "G", "MD", "ME", "MO", "MC", "GD", "GE", "GO", "GC", "TruncateImageIntensity", "Laplacian", "GetLargestComponent", "FillHoles", "PadImage", mandatory=True, position=3, argstr="%s", desc="mathematical operations", ) op1 = File( exists=True, mandatory=True, position=-2, argstr="%s", desc="first operator" ) op2 = traits.Either( File(exists=True), Str, position=-1, argstr="%s", desc="second operator" ) copy_header = traits.Bool( True, usedefault=True, desc="copy headers of the original image into the output (corrected) file", ) class ImageMathOuputSpec(TraitedSpec): output_image = File(exists=True, desc="output image file") class ImageMath(ANTSCommand, CopyHeaderInterface): """ Operations over images. Examples -------- >>> ImageMath( ... op1='structural.nii', ... operation='+', ... op2='2').cmdline 'ImageMath 3 structural_maths.nii + structural.nii 2' >>> ImageMath( ... op1='structural.nii', ... operation='Project', ... op2='1 2').cmdline 'ImageMath 3 structural_maths.nii Project structural.nii 1 2' >>> ImageMath( ... op1='structural.nii', ... operation='G', ... op2='4').cmdline 'ImageMath 3 structural_maths.nii G structural.nii 4' >>> ImageMath( ... op1='structural.nii', ... operation='TruncateImageIntensity', ... op2='0.005 0.999 256').cmdline 'ImageMath 3 structural_maths.nii TruncateImageIntensity structural.nii 0.005 0.999 256' By default, Nipype copies headers from the first input image (``op1``) to the output image. For the ``PadImage`` operation, the header cannot be copied from inputs to outputs, and so ``copy_header`` option is automatically set to ``False``. >>> pad = ImageMath( ... op1='structural.nii', ... operation='PadImage') >>> pad.inputs.copy_header False While the operation is set to ``PadImage``, setting ``copy_header = True`` will have no effect. >>> pad.inputs.copy_header = True >>> pad.inputs.copy_header False For any other operation, ``copy_header`` can be enabled/disabled normally: >>> pad.inputs.operation = "ME" >>> pad.inputs.copy_header = True >>> pad.inputs.copy_header True """ _cmd = "ImageMath" input_spec = ImageMathInputSpec output_spec = ImageMathOuputSpec _copy_header_map = {"output_image": "op1"} def __init__(self, **inputs): super(ImageMath, self).__init__(**inputs) if self.inputs.operation in ("PadImage",): self.inputs.copy_header = False self.inputs.on_trait_change(self._operation_update, "operation") self.inputs.on_trait_change(self._copyheader_update, "copy_header") def _operation_update(self): if self.inputs.operation in ("PadImage",): self.inputs.copy_header = False def _copyheader_update(self): if self.inputs.copy_header and self.inputs.operation in ("PadImage",): warn("copy_header cannot be updated to True with PadImage as operation.") self.inputs.copy_header = False class ResampleImageBySpacingInputSpec(ANTSCommandInputSpec): dimension = traits.Int( 3, usedefault=True, position=1, argstr="%d", desc="dimension of output image" ) input_image = File( exists=True, mandatory=True, position=2, argstr="%s", desc="input image file" ) output_image = File( position=3, argstr="%s", name_source=["input_image"], name_template="%s_resampled", desc="output image file", keep_extension=True, ) out_spacing = traits.Either( traits.List(traits.Float, minlen=2, maxlen=3), traits.Tuple(traits.Float, traits.Float, traits.Float), traits.Tuple(traits.Float, traits.Float), position=4, argstr="%s", mandatory=True, desc="output spacing", ) apply_smoothing = traits.Bool( False, argstr="%d", position=5, desc="smooth before resampling" ) addvox = traits.Int( argstr="%d", position=6, requires=["apply_smoothing"], desc="addvox pads each dimension by addvox", ) nn_interp = traits.Bool( argstr="%d", desc="nn interpolation", position=-1, requires=["addvox"] ) class ResampleImageBySpacingOutputSpec(TraitedSpec): output_image = File(exists=True, desc="resampled file") class ResampleImageBySpacing(ANTSCommand): """ Resample an image with a given spacing. Examples -------- >>> res = ResampleImageBySpacing(dimension=3) >>> res.inputs.input_image = 'structural.nii' >>> res.inputs.output_image = 'output.nii.gz' >>> res.inputs.out_spacing = (4, 4, 4) >>> res.cmdline #doctest: +ELLIPSIS 'ResampleImageBySpacing 3 structural.nii output.nii.gz 4 4 4' >>> res = ResampleImageBySpacing(dimension=3) >>> res.inputs.input_image = 'structural.nii' >>> res.inputs.output_image = 'output.nii.gz' >>> res.inputs.out_spacing = (4, 4, 4) >>> res.inputs.apply_smoothing = True >>> res.cmdline #doctest: +ELLIPSIS 'ResampleImageBySpacing 3 structural.nii output.nii.gz 4 4 4 1' >>> res = ResampleImageBySpacing(dimension=3) >>> res.inputs.input_image = 'structural.nii' >>> res.inputs.output_image = 'output.nii.gz' >>> res.inputs.out_spacing = (0.4, 0.4, 0.4) >>> res.inputs.apply_smoothing = True >>> res.inputs.addvox = 2 >>> res.inputs.nn_interp = False >>> res.cmdline #doctest: +ELLIPSIS 'ResampleImageBySpacing 3 structural.nii output.nii.gz 0.4 0.4 0.4 1 2 0' """ _cmd = "ResampleImageBySpacing" input_spec = ResampleImageBySpacingInputSpec output_spec = ResampleImageBySpacingOutputSpec def _format_arg(self, name, trait_spec, value): if name == "out_spacing": if len(value) != self.inputs.dimension: raise ValueError("out_spacing dimensions should match dimension") value = " ".join(["%g" % d for d in value]) return super(ResampleImageBySpacing, self)._format_arg(name, trait_spec, value) class ThresholdImageInputSpec(ANTSCommandInputSpec): dimension = traits.Int( 3, usedefault=True, position=1, argstr="%d", desc="dimension of output image" ) input_image = File( exists=True, mandatory=True, position=2, argstr="%s", desc="input image file" ) output_image = File( position=3, argstr="%s", name_source=["input_image"], name_template="%s_resampled", desc="output image file", keep_extension=True, ) mode = traits.Enum( "Otsu", "Kmeans", argstr="%s", position=4, requires=["num_thresholds"], xor=["th_low", "th_high"], desc="whether to run Otsu / Kmeans thresholding", ) num_thresholds = traits.Int(position=5, argstr="%d", desc="number of thresholds") input_mask = File( exists=True, requires=["num_thresholds"], argstr="%s", desc="input mask for Otsu, Kmeans", ) th_low = traits.Float(position=4, argstr="%f", xor=["mode"], desc="lower threshold") th_high = traits.Float( position=5, argstr="%f", xor=["mode"], desc="upper threshold" ) inside_value = traits.Float( 1, position=6, argstr="%f", requires=["th_low"], desc="inside value" ) outside_value = traits.Float( 0, position=7, argstr="%f", requires=["th_low"], desc="outside value" ) copy_header = traits.Bool( True, mandatory=True, usedefault=True, desc="copy headers of the original image into the output (corrected) file", ) class ThresholdImageOutputSpec(TraitedSpec): output_image = File(exists=True, desc="resampled file") class ThresholdImage(ANTSCommand, CopyHeaderInterface): """ Apply thresholds on images. Examples -------- >>> thres = ThresholdImage(dimension=3) >>> thres.inputs.input_image = 'structural.nii' >>> thres.inputs.output_image = 'output.nii.gz' >>> thres.inputs.th_low = 0.5 >>> thres.inputs.th_high = 1.0 >>> thres.inputs.inside_value = 1.0 >>> thres.inputs.outside_value = 0.0 >>> thres.cmdline #doctest: +ELLIPSIS 'ThresholdImage 3 structural.nii output.nii.gz 0.500000 1.000000 1.000000 0.000000' >>> thres = ThresholdImage(dimension=3) >>> thres.inputs.input_image = 'structural.nii' >>> thres.inputs.output_image = 'output.nii.gz' >>> thres.inputs.mode = 'Kmeans' >>> thres.inputs.num_thresholds = 4 >>> thres.cmdline #doctest: +ELLIPSIS 'ThresholdImage 3 structural.nii output.nii.gz Kmeans 4' """ _cmd = "ThresholdImage" input_spec = ThresholdImageInputSpec output_spec = ThresholdImageOutputSpec _copy_header_map = {"output_image": "input_image"} class AIInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, usedefault=True, argstr="-d %d", desc="dimension of output image" ) verbose = traits.Bool( False, usedefault=True, argstr="-v %d", desc="enable verbosity" ) fixed_image = File( exists=True, mandatory=True, desc="Image to which the moving_image should be transformed", ) moving_image = File( exists=True, mandatory=True, desc="Image that will be transformed to fixed_image", ) fixed_image_mask = File(exists=True, argstr="-x %s", desc="fixed mage mask") moving_image_mask = File( exists=True, requires=["fixed_image_mask"], desc="moving mage mask" ) metric_trait = ( traits.Enum("Mattes", "GC", "MI"), traits.Int(32), traits.Enum("Regular", "Random", "None"), traits.Range(value=0.2, low=0.0, high=1.0), ) metric = traits.Tuple( *metric_trait, argstr="-m %s", mandatory=True, desc="the metric(s) to use." ) transform = traits.Tuple( traits.Enum("Affine", "Rigid", "Similarity"), traits.Range(value=0.1, low=0.0, exclude_low=True), argstr="-t %s[%g]", usedefault=True, desc="Several transform options are available", ) principal_axes = traits.Bool( False, usedefault=True, argstr="-p %d", xor=["blobs"], desc="align using principal axes", ) search_factor = traits.Tuple( traits.Float(20), traits.Range(value=0.12, low=0.0, high=1.0), usedefault=True, argstr="-s [%g,%g]", desc="search factor", ) search_grid = traits.Either( traits.Tuple( traits.Float, traits.Tuple(traits.Float, traits.Float, traits.Float) ), traits.Tuple(traits.Float, traits.Tuple(traits.Float, traits.Float)), argstr="-g %s", desc="Translation search grid in mm", min_ver="2.3.0", ) convergence = traits.Tuple( traits.Range(low=1, high=10000, value=10), traits.Float(1e-6), traits.Range(low=1, high=100, value=10), usedefault=True, argstr="-c [%d,%g,%d]", desc="convergence", ) output_transform = File( "initialization.mat", usedefault=True, argstr="-o %s", desc="output file name" ) class AIOuputSpec(TraitedSpec): output_transform = File(exists=True, desc="output file name") class AI(ANTSCommand): """ Calculate the optimal linear transform parameters for aligning two images. Examples -------- >>> AI( ... fixed_image='structural.nii', ... moving_image='epi.nii', ... metric=('Mattes', 32, 'Regular', 1), ... ).cmdline 'antsAI -c [10,1e-06,10] -d 3 -m Mattes[structural.nii,epi.nii,32,Regular,1] -o initialization.mat -p 0 -s [20,0.12] -t Affine[0.1] -v 0' >>> AI(fixed_image='structural.nii', ... moving_image='epi.nii', ... metric=('Mattes', 32, 'Regular', 1), ... search_grid=(12, (1, 1, 1)), ... ).cmdline 'antsAI -c [10,1e-06,10] -d 3 -m Mattes[structural.nii,epi.nii,32,Regular,1] -o initialization.mat -p 0 -s [20,0.12] -g [12.0,1x1x1] -t Affine[0.1] -v 0' """ _cmd = "antsAI" input_spec = AIInputSpec output_spec = AIOuputSpec def _run_interface(self, runtime, correct_return_codes=(0,)): runtime = super(AI, self)._run_interface(runtime, correct_return_codes) self._output = { "output_transform": os.path.join( runtime.cwd, os.path.basename(self.inputs.output_transform) ) } return runtime def _format_arg(self, opt, spec, val): if opt == "metric": val = "%s[{fixed_image},{moving_image},%d,%s,%g]" % val val = val.format( fixed_image=self.inputs.fixed_image, moving_image=self.inputs.moving_image, ) return spec.argstr % val if opt == "search_grid": fmtval = "[%s,%s]" % (val[0], "x".join("%g" % v for v in val[1])) return spec.argstr % fmtval if opt == "fixed_image_mask": if isdefined(self.inputs.moving_image_mask): return spec.argstr % ("[%s,%s]" % (val, self.inputs.moving_image_mask)) return super(AI, self)._format_arg(opt, spec, val) def _list_outputs(self): return getattr(self, "_output") class AverageAffineTransformInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="%d", mandatory=True, position=0, desc="image dimension (2 or 3)" ) output_affine_transform = File( argstr="%s", mandatory=True, position=1, desc="Outputfname.txt: the name of the resulting transform.", ) transforms = InputMultiObject( File(exists=True), argstr="%s", mandatory=True, position=3, desc="transforms to average", ) class AverageAffineTransformOutputSpec(TraitedSpec): affine_transform = File(exists=True, desc="average transform file") class AverageAffineTransform(ANTSCommand): """ Examples -------- >>> from nipype.interfaces.ants import AverageAffineTransform >>> avg = AverageAffineTransform() >>> avg.inputs.dimension = 3 >>> avg.inputs.transforms = ['trans.mat', 'func_to_struct.mat'] >>> avg.inputs.output_affine_transform = 'MYtemplatewarp.mat' >>> avg.cmdline 'AverageAffineTransform 3 MYtemplatewarp.mat trans.mat func_to_struct.mat' """ _cmd = "AverageAffineTransform" input_spec = AverageAffineTransformInputSpec output_spec = AverageAffineTransformOutputSpec def _format_arg(self, opt, spec, val): return super(AverageAffineTransform, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["affine_transform"] = os.path.abspath( self.inputs.output_affine_transform ) return outputs class AverageImagesInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="%d", mandatory=True, position=0, desc="image dimension (2 or 3)" ) output_average_image = File( "average.nii", argstr="%s", position=1, usedefault=True, hash_files=False, desc="the name of the resulting image.", ) normalize = traits.Bool( argstr="%d", mandatory=True, position=2, desc="Normalize: if true, the 2nd image is divided by its mean. " "This will select the largest image to average into.", ) images = InputMultiObject( File(exists=True), argstr="%s", mandatory=True, position=3, desc="image to apply transformation to (generally a coregistered functional)", ) class AverageImagesOutputSpec(TraitedSpec): output_average_image = File(exists=True, desc="average image file") class AverageImages(ANTSCommand): """ Examples -------- >>> from nipype.interfaces.ants import AverageImages >>> avg = AverageImages() >>> avg.inputs.dimension = 3 >>> avg.inputs.output_average_image = "average.nii.gz" >>> avg.inputs.normalize = True >>> avg.inputs.images = ['rc1s1.nii', 'rc1s1.nii'] >>> avg.cmdline 'AverageImages 3 average.nii.gz 1 rc1s1.nii rc1s1.nii' """ _cmd = "AverageImages" input_spec = AverageImagesInputSpec output_spec = AverageImagesOutputSpec def _format_arg(self, opt, spec, val): return super(AverageImages, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["output_average_image"] = os.path.realpath( self.inputs.output_average_image ) return outputs class MultiplyImagesInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="%d", mandatory=True, position=0, desc="image dimension (2 or 3)" ) first_input = File( argstr="%s", exists=True, mandatory=True, position=1, desc="image 1" ) second_input = traits.Either( File(exists=True), traits.Float, argstr="%s", mandatory=True, position=2, desc="image 2 or multiplication weight", ) output_product_image = File( argstr="%s", mandatory=True, position=3, desc="Outputfname.nii.gz: the name of the resulting image.", ) class MultiplyImagesOutputSpec(TraitedSpec): output_product_image = File(exists=True, desc="average image file") class MultiplyImages(ANTSCommand): """ Examples -------- >>> from nipype.interfaces.ants import MultiplyImages >>> test = MultiplyImages() >>> test.inputs.dimension = 3 >>> test.inputs.first_input = 'moving2.nii' >>> test.inputs.second_input = 0.25 >>> test.inputs.output_product_image = "out.nii" >>> test.cmdline 'MultiplyImages 3 moving2.nii 0.25 out.nii' """ _cmd = "MultiplyImages" input_spec = MultiplyImagesInputSpec output_spec = MultiplyImagesOutputSpec def _format_arg(self, opt, spec, val): return super(MultiplyImages, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["output_product_image"] = os.path.abspath( self.inputs.output_product_image ) return outputs class CreateJacobianDeterminantImageInputSpec(ANTSCommandInputSpec): imageDimension = traits.Enum( 3, 2, argstr="%d", mandatory=True, position=0, desc="image dimension (2 or 3)" ) deformationField = File( argstr="%s", exists=True, mandatory=True, position=1, desc="deformation transformation file", ) outputImage = File(argstr="%s", mandatory=True, position=2, desc="output filename") doLogJacobian = traits.Enum( 0, 1, argstr="%d", position=3, desc="return the log jacobian" ) useGeometric = traits.Enum( 0, 1, argstr="%d", position=4, desc="return the geometric jacobian" ) class CreateJacobianDeterminantImageOutputSpec(TraitedSpec): jacobian_image = File(exists=True, desc="jacobian image") class CreateJacobianDeterminantImage(ANTSCommand): """ Examples -------- >>> from nipype.interfaces.ants import CreateJacobianDeterminantImage >>> jacobian = CreateJacobianDeterminantImage() >>> jacobian.inputs.imageDimension = 3 >>> jacobian.inputs.deformationField = 'ants_Warp.nii.gz' >>> jacobian.inputs.outputImage = 'out_name.nii.gz' >>> jacobian.cmdline 'CreateJacobianDeterminantImage 3 ants_Warp.nii.gz out_name.nii.gz' """ _cmd = "CreateJacobianDeterminantImage" input_spec = CreateJacobianDeterminantImageInputSpec output_spec = CreateJacobianDeterminantImageOutputSpec def _format_arg(self, opt, spec, val): return super(CreateJacobianDeterminantImage, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["jacobian_image"] = os.path.abspath(self.inputs.outputImage) return outputs class AffineInitializerInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, usedefault=True, position=0, argstr="%s", desc="dimension" ) fixed_image = File( exists=True, mandatory=True, position=1, argstr="%s", desc="reference image" ) moving_image = File( exists=True, mandatory=True, position=2, argstr="%s", desc="moving image" ) out_file = File( "transform.mat", usedefault=True, position=3, argstr="%s", desc="output transform file", ) # Defaults in antsBrainExtraction.sh -> 15 0.1 0 10 search_factor = traits.Float( 15.0, usedefault=True, position=4, argstr="%f", desc="increments (degrees) for affine search", ) radian_fraction = traits.Range( 0.0, 1.0, value=0.1, usedefault=True, position=5, argstr="%f", desc="search this arc +/- principal axes", ) principal_axes = traits.Bool( False, usedefault=True, position=6, argstr="%d", desc="whether the rotation is searched around an initial principal axis alignment.", ) local_search = traits.Int( 10, usedefault=True, position=7, argstr="%d", desc=" determines if a local optimization is run at each search point for the set " "number of iterations", ) class AffineInitializerOutputSpec(TraitedSpec): out_file = File(desc="output transform file") class AffineInitializer(ANTSCommand): """ Initialize an affine transform (as in antsBrainExtraction.sh) >>> from nipype.interfaces.ants import AffineInitializer >>> init = AffineInitializer() >>> init.inputs.fixed_image = 'fixed1.nii' >>> init.inputs.moving_image = 'moving1.nii' >>> init.cmdline 'antsAffineInitializer 3 fixed1.nii moving1.nii transform.mat 15.000000 0.100000 0 10' """ _cmd = "antsAffineInitializer" input_spec = AffineInitializerInputSpec output_spec = AffineInitializerOutputSpec def _list_outputs(self): return {"out_file": os.path.abspath(self.inputs.out_file)} class ComposeMultiTransformInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="%d", usedefault=True, position=0, desc="image dimension (2 or 3)" ) output_transform = File( argstr="%s", position=1, name_source=["transforms"], name_template="%s_composed", keep_extension=True, desc="the name of the resulting transform.", ) reference_image = File( argstr="%s", position=2, desc="Reference image (only necessary when output is warpfield)", ) transforms = InputMultiObject( File(exists=True), argstr="%s", mandatory=True, position=3, desc="transforms to average", ) class ComposeMultiTransformOutputSpec(TraitedSpec): output_transform = File(exists=True, desc="Composed transform file") class ComposeMultiTransform(ANTSCommand): """ Take a set of transformations and convert them to a single transformation matrix/warpfield. Examples -------- >>> from nipype.interfaces.ants import ComposeMultiTransform >>> compose_transform = ComposeMultiTransform() >>> compose_transform.inputs.dimension = 3 >>> compose_transform.inputs.transforms = ['struct_to_template.mat', 'func_to_struct.mat'] >>> compose_transform.cmdline 'ComposeMultiTransform 3 struct_to_template_composed.mat struct_to_template.mat func_to_struct.mat' """ _cmd = "ComposeMultiTransform" input_spec = ComposeMultiTransformInputSpec output_spec = ComposeMultiTransformOutputSpec class LabelGeometryInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="%d", usedefault=True, position=0, desc="image dimension (2 or 3)" ) label_image = File( argstr="%s", position=1, mandatory=True, desc="label image to use for extracting geometry measures", ) intensity_image = File( value="[]", exists=True, argstr="%s", mandatory=True, usedefault=True, position=2, desc="Intensity image to extract values from. " "This is an optional input", ) output_file = traits.Str( name_source=["label_image"], name_template="%s.csv", argstr="%s", position=3, desc="name of output file", ) class LabelGeometryOutputSpec(TraitedSpec): output_file = File(exists=True, desc="CSV file of geometry measures") class LabelGeometry(ANTSCommand): """ Extracts geometry measures using a label file and an optional image file Examples -------- >>> from nipype.interfaces.ants import LabelGeometry >>> label_extract = LabelGeometry() >>> label_extract.inputs.dimension = 3 >>> label_extract.inputs.label_image = 'atlas.nii.gz' >>> label_extract.cmdline 'LabelGeometryMeasures 3 atlas.nii.gz [] atlas.csv' >>> label_extract.inputs.intensity_image = 'ants_Warp.nii.gz' >>> label_extract.cmdline 'LabelGeometryMeasures 3 atlas.nii.gz ants_Warp.nii.gz atlas.csv' """ _cmd = "LabelGeometryMeasures" input_spec = LabelGeometryInputSpec output_spec = LabelGeometryOutputSpec nipype-1.7.0/nipype/interfaces/ants/visualization.py000066400000000000000000000156521413403311400226750ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The ants visualisation module provides basic functions based on ITK. """ import os from ..base import TraitedSpec, File, traits from .base import ANTSCommand, ANTSCommandInputSpec class ConvertScalarImageToRGBInputSpec(ANTSCommandInputSpec): dimension = traits.Enum( 3, 2, argstr="%d", usedefault=True, desc="image dimension (2 or 3)", mandatory=True, position=0, ) input_image = File( argstr="%s", exists=True, desc="Main input is a 3-D grayscale image.", mandatory=True, position=1, ) output_image = traits.Str( "rgb.nii.gz", argstr="%s", usedefault=True, desc="rgb output image", position=2 ) mask_image = File( "none", argstr="%s", exists=True, desc="mask image", position=3, usedefault=True ) colormap = traits.Enum( "grey", "red", "green", "blue", "copper", "jet", "hsv", "spring", "summer", "autumn", "winter", "hot", "cool", "overunder", "custom", argstr="%s", desc="Select a colormap", mandatory=True, position=4, ) custom_color_map_file = traits.Str( "none", argstr="%s", usedefault=True, desc="custom color map file", position=5 ) minimum_input = traits.Int( argstr="%d", desc="minimum input", mandatory=True, position=6 ) maximum_input = traits.Int( argstr="%d", desc="maximum input", mandatory=True, position=7 ) minimum_RGB_output = traits.Int(0, usedefault=True, argstr="%d", position=8) maximum_RGB_output = traits.Int(255, usedefault=True, argstr="%d", position=9) class ConvertScalarImageToRGBOutputSpec(TraitedSpec): output_image = File(exists=True, desc="converted RGB image") class ConvertScalarImageToRGB(ANTSCommand): """ Convert scalar images to RGB. Examples -------- >>> from nipype.interfaces.ants.visualization import ConvertScalarImageToRGB >>> converter = ConvertScalarImageToRGB() >>> converter.inputs.dimension = 3 >>> converter.inputs.input_image = 'T1.nii.gz' >>> converter.inputs.colormap = 'jet' >>> converter.inputs.minimum_input = 0 >>> converter.inputs.maximum_input = 6 >>> converter.cmdline 'ConvertScalarImageToRGB 3 T1.nii.gz rgb.nii.gz none jet none 0 6 0 255' """ _cmd = "ConvertScalarImageToRGB" input_spec = ConvertScalarImageToRGBInputSpec output_spec = ConvertScalarImageToRGBOutputSpec def _format_arg(self, opt, spec, val): return super(ConvertScalarImageToRGB, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["output_image"] = os.path.join(os.getcwd(), self.inputs.output_image) return outputs class CreateTiledMosaicInputSpec(ANTSCommandInputSpec): input_image = File( argstr="-i %s", exists=True, desc="Main input is a 3-D grayscale image.", mandatory=True, ) rgb_image = File( argstr="-r %s", exists=True, desc=( "An optional Rgb image can be added as an overlay." "It must have the same image" "geometry as the input grayscale image." ), mandatory=True, ) mask_image = File( argstr="-x %s", exists=True, desc="Specifies the ROI of the RGB voxels used." ) alpha_value = traits.Float( argstr="-a %.2f", desc=( "If an Rgb image is provided, render the overlay " "using the specified alpha parameter." ), ) output_image = traits.Str( "output.png", argstr="-o %s", desc="The output consists of the tiled mosaic image.", usedefault=True, ) tile_geometry = traits.Str( argstr="-t %s", desc=( "The tile geometry specifies the number of rows and columns" 'in the output image. For example, if the user specifies "5x10", ' "then 5 rows by 10 columns of slices are rendered. If R < 0 and C > " "0 (or vice versa), the negative value is selected" "based on direction." ), ) direction = traits.Int( argstr="-d %d", desc=( "Specifies the direction of " "the slices. If no direction is specified, the " "direction with the coarsest spacing is chosen." ), ) pad_or_crop = traits.Str( argstr="-p %s", desc="argument passed to -p flag:" "[padVoxelWidth,]" "[lowerPadding[0]xlowerPadding[1],upperPadding[0]xupperPadding[1]," "constantValue]" "The user can specify whether to pad or crop a specified " "voxel-width boundary of each individual slice. For this " "program, cropping is simply padding with negative voxel-widths." "If one pads (+), the user can also specify a constant pad " "value (default = 0). If a mask is specified, the user can use " 'the mask to define the region, by using the keyword "mask"' ' plus an offset, e.g. "-p mask+3".', ) slices = traits.Str( argstr="-s %s", desc=( "Number of slices to increment Slice1xSlice2xSlice3" "[numberOfSlicesToIncrement,,]" ), ) flip_slice = traits.Str(argstr="-f %s", desc="flipXxflipY") permute_axes = traits.Bool(argstr="-g", desc="doPermute") class CreateTiledMosaicOutputSpec(TraitedSpec): output_image = File(exists=True, desc="image file") class CreateTiledMosaic(ANTSCommand): """The program CreateTiledMosaic in conjunction with ConvertScalarImageToRGB provides useful functionality for common image analysis tasks. The basic usage of CreateTiledMosaic is to tile a 3-D image volume slice-wise into a 2-D image. Examples -------- >>> from nipype.interfaces.ants.visualization import CreateTiledMosaic >>> mosaic_slicer = CreateTiledMosaic() >>> mosaic_slicer.inputs.input_image = 'T1.nii.gz' >>> mosaic_slicer.inputs.rgb_image = 'rgb.nii.gz' >>> mosaic_slicer.inputs.mask_image = 'mask.nii.gz' >>> mosaic_slicer.inputs.output_image = 'output.png' >>> mosaic_slicer.inputs.alpha_value = 0.5 >>> mosaic_slicer.inputs.direction = 2 >>> mosaic_slicer.inputs.pad_or_crop = '[ -15x -50 , -15x -30 ,0]' >>> mosaic_slicer.inputs.slices = '[2 ,100 ,160]' >>> mosaic_slicer.cmdline 'CreateTiledMosaic -a 0.50 -d 2 -i T1.nii.gz -x mask.nii.gz -o output.png -p [ -15x -50 , -15x -30 ,0] \ -r rgb.nii.gz -s [2 ,100 ,160]' """ _cmd = "CreateTiledMosaic" input_spec = CreateTiledMosaicInputSpec output_spec = CreateTiledMosaicOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["output_image"] = os.path.join(os.getcwd(), self.inputs.output_image) return outputs nipype-1.7.0/nipype/interfaces/base/000077500000000000000000000000001413403311400173565ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/base/__init__.py000066400000000000000000000020301413403311400214620ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Nipype base interfaces ---------------------- This module defines the API of all nipype interfaces. """ from traits.trait_handlers import TraitDictObject, TraitListObject from traits.trait_errors import TraitError from .core import ( Interface, BaseInterface, SimpleInterface, CommandLine, StdOutCommandLine, MpiCommandLine, SEMLikeCommandLine, LibraryBaseInterface, PackageInfo, ) from .specs import ( BaseTraitedSpec, TraitedSpec, DynamicTraitedSpec, BaseInterfaceInputSpec, CommandLineInputSpec, StdOutCommandLineInputSpec, ) from .traits_extension import ( traits, Undefined, isdefined, has_metadata, File, ImageFile, Directory, Str, DictStrStr, OutputMultiObject, InputMultiObject, OutputMultiPath, InputMultiPath, ) from .support import Bunch, InterfaceResult, NipypeInterfaceError nipype-1.7.0/nipype/interfaces/base/core.py000066400000000000000000001107661413403311400206730ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Nipype interfaces core ...................... Defines the ``Interface`` API and the body of the most basic interfaces. The I/O specifications corresponding to these base interfaces are found in the ``specs`` module. """ import os import subprocess as sp import shlex import simplejson as json from traits.trait_errors import TraitError from ... import config, logging, LooseVersion from ...utils.provenance import write_provenance from ...utils.misc import str2bool from ...utils.filemanip import ( canonicalize_env, get_dependencies, indirectory, split_filename, which, ) from ...utils.subprocess import run_command from ...external.due import due from .traits_extension import traits, isdefined, Undefined from .specs import ( BaseInterfaceInputSpec, CommandLineInputSpec, StdOutCommandLineInputSpec, MpiCommandLineInputSpec, get_filecopy_info, ) from .support import ( RuntimeContext, InterfaceResult, NipypeInterfaceError, format_help, ) iflogger = logging.getLogger("nipype.interface") VALID_TERMINAL_OUTPUT = [ "stream", "allatonce", "file", "file_split", "file_stdout", "file_stderr", "none", ] __docformat__ = "restructuredtext" class Interface(object): """This is an abstract definition for Interface objects. It provides no functionality. It defines the necessary attributes and methods all Interface objects should have. """ input_spec = None """ The specification of the input, defined by a :py:class:`~traits.has_traits.HasTraits` class. """ output_spec = None """ The specification of the output, defined by a :py:class:`~traits.has_traits.HasTraits` class. """ _can_resume = False # See property below _always_run = False # See property below @property def can_resume(self): """Defines if the interface can reuse partial results after interruption. Only applies to interfaces being run within a workflow context.""" return self._can_resume @property def always_run(self): """Should the interface be always run even if the inputs were not changed? Only applies to interfaces being run within a workflow context.""" return self._always_run @property def version(self): """interfaces should implement a version property""" raise NotImplementedError @classmethod def _outputs(cls): """Initializes outputs""" raise NotImplementedError @classmethod def help(cls, returnhelp=False): """Prints class help""" allhelp = format_help(cls) if returnhelp: return allhelp print(allhelp) return None # R1710 def __init__(self): """Subclasses must implement __init__""" raise NotImplementedError def run(self): """Execute the command.""" raise NotImplementedError def aggregate_outputs(self, runtime=None, needed_outputs=None): """Called to populate outputs""" raise NotImplementedError def _list_outputs(self): """List expected outputs""" raise NotImplementedError @classmethod def _get_filecopy_info(cls): """Provides information about file inputs to copy or link to cwd. Necessary for pipeline operation """ iflogger.warning( "_get_filecopy_info member of Interface was deprecated " "in nipype-1.1.6 and will be removed in 1.2.0" ) return get_filecopy_info(cls) class BaseInterface(Interface): """Implement common interface functionality. * Initializes inputs/outputs from input_spec/output_spec * Provides help based on input_spec and output_spec * Checks for mandatory inputs before running an interface * Runs an interface and returns results * Determines which inputs should be copied or linked to cwd This class does not implement aggregate_outputs, input_spec or output_spec. These should be defined by derived classes. This class cannot be instantiated. Attributes ---------- input_spec: :obj:`nipype.interfaces.base.specs.TraitedSpec` points to the traited class for the inputs output_spec: :obj:`nipype.interfaces.base.specs.TraitedSpec` points to the traited class for the outputs _redirect_x: bool should be set to ``True`` when the interface requires connecting to a ``$DISPLAY`` (default is ``False``). resource_monitor: bool If ``False``, prevents resource-monitoring this interface If ``True`` monitoring will be enabled IFF the general Nipype config is set on (``resource_monitor = true``). """ input_spec = BaseInterfaceInputSpec _version = None _additional_metadata = [] _redirect_x = False _references = [] resource_monitor = True # Enabled for this interface IFF enabled in the config _etelemetry_version_data = None def __init__( self, from_file=None, resource_monitor=None, ignore_exception=False, **inputs ): if ( config.getboolean("execution", "check_version") and "NIPYPE_NO_ET" not in os.environ ): from ... import check_latest_version if BaseInterface._etelemetry_version_data is None: BaseInterface._etelemetry_version_data = check_latest_version() if not self.input_spec: raise Exception("No input_spec in class: %s" % self.__class__.__name__) # Create input spec, disable any defaults that are unavailable due to # version, and then apply the inputs that were passed. self.inputs = self.input_spec() unavailable_traits = self._check_version_requirements( self.inputs, permissive=True ) if unavailable_traits: self.inputs.trait_set(**{k: Undefined for k in unavailable_traits}) self.inputs.trait_set(**inputs) self.ignore_exception = ignore_exception if resource_monitor is not None: self.resource_monitor = resource_monitor if from_file is not None: self.load_inputs_from_json(from_file, overwrite=True) for name, value in list(inputs.items()): setattr(self.inputs, name, value) def _outputs(self): """Returns a bunch containing output fields for the class""" outputs = None if self.output_spec: outputs = self.output_spec() return outputs def _check_requires(self, spec, name, value): """check if required inputs are satisfied""" if spec.requires: values = [ not isdefined(getattr(self.inputs, field)) for field in spec.requires ] if any(values) and isdefined(value): if len(values) > 1: fmt = ( "%s requires values for inputs %s because '%s' is set. " "For a list of required inputs, see %s.help()" ) else: fmt = ( "%s requires a value for input %s because '%s' is set. " "For a list of required inputs, see %s.help()" ) msg = fmt % ( self.__class__.__name__, ", ".join("'%s'" % req for req in spec.requires), name, self.__class__.__name__, ) raise ValueError(msg) def _check_xor(self, spec, name, value): """check if mutually exclusive inputs are satisfied""" if spec.xor: values = [isdefined(getattr(self.inputs, field)) for field in spec.xor] if not any(values) and not isdefined(value): msg = ( "%s requires a value for one of the inputs '%s'. " "For a list of required inputs, see %s.help()" % ( self.__class__.__name__, ", ".join(spec.xor), self.__class__.__name__, ) ) raise ValueError(msg) def _check_mandatory_inputs(self): """Raises an exception if a mandatory input is Undefined""" for name, spec in list(self.inputs.traits(mandatory=True).items()): value = getattr(self.inputs, name) self._check_xor(spec, name, value) if not isdefined(value) and spec.xor is None: msg = ( "%s requires a value for input '%s'. " "For a list of required inputs, see %s.help()" % (self.__class__.__name__, name, self.__class__.__name__) ) raise ValueError(msg) if isdefined(value): self._check_requires(spec, name, value) for name, spec in list( self.inputs.traits(mandatory=None, transient=None).items() ): self._check_requires(spec, name, getattr(self.inputs, name)) def _check_version_requirements(self, trait_object, permissive=False): """Raises an exception on version mismatch Set the ``permissive`` attribute to True to suppress warnings and exceptions. This is currently only used in __init__ to silently identify unavailable traits. """ unavailable_traits = [] # check minimum version check = dict(min_ver=lambda t: t is not None) names = trait_object.trait_names(**check) if names and self.version: version = LooseVersion(str(self.version)) for name in names: min_ver = LooseVersion(str(trait_object.traits()[name].min_ver)) try: too_old = min_ver > version except TypeError as err: msg = ( f"Nipype cannot validate the package version {version!r} for " f"{self.__class__.__name__}. Trait {name} requires version >={min_ver}." ) if not permissive: iflogger.warning(f"{msg}. Please verify validity.") if config.getboolean("execution", "stop_on_unknown_version"): raise ValueError(msg) from err continue if too_old: unavailable_traits.append(name) if not isdefined(getattr(trait_object, name)): continue if not permissive: raise Exception( "Trait %s (%s) (version %s < required %s)" % (name, self.__class__.__name__, version, min_ver) ) # check maximum version check = dict(max_ver=lambda t: t is not None) names = trait_object.trait_names(**check) if names and self.version: version = LooseVersion(str(self.version)) for name in names: max_ver = LooseVersion(str(trait_object.traits()[name].max_ver)) try: too_new = max_ver < version except TypeError as err: msg = ( f"Nipype cannot validate the package version {version!r} for " f"{self.__class__.__name__}. Trait {name} requires version <={max_ver}." ) if not permissive: iflogger.warning(f"{msg}. Please verify validity.") if config.getboolean("execution", "stop_on_unknown_version"): raise ValueError(msg) from err continue if too_new: unavailable_traits.append(name) if not isdefined(getattr(trait_object, name)): continue if not permissive: raise Exception( "Trait %s (%s) (version %s > required %s)" % (name, self.__class__.__name__, version, max_ver) ) return unavailable_traits def _run_interface(self, runtime): """Core function that executes interface""" raise NotImplementedError def _duecredit_cite(self): """Add the interface references to the duecredit citations""" for r in self._references: r["path"] = self.__module__ due.cite(**r) def run(self, cwd=None, ignore_exception=None, **inputs): """Execute this interface. This interface will not raise an exception if runtime.returncode is non-zero. Parameters ---------- cwd : specify a folder where the interface should be run inputs : allows the interface settings to be updated Returns ------- results : :obj:`nipype.interfaces.base.support.InterfaceResult` A copy of the instance that was executed, provenance information and, if successful, results """ rtc = RuntimeContext( resource_monitor=config.resource_monitor and self.resource_monitor, ignore_exception=ignore_exception if ignore_exception is not None else self.ignore_exception, ) with indirectory(cwd or os.getcwd()): self.inputs.trait_set(**inputs) self._check_mandatory_inputs() self._check_version_requirements(self.inputs) with rtc(self, cwd=cwd, redirect_x=self._redirect_x) as runtime: # Grab inputs now, as they should not change during execution inputs = self.inputs.get_traitsfree() outputs = None # Run interface runtime = self._pre_run_hook(runtime) runtime = self._run_interface(runtime) runtime = self._post_run_hook(runtime) # Collect outputs outputs = self.aggregate_outputs(runtime) results = InterfaceResult( self.__class__, rtc.runtime, inputs=inputs, outputs=outputs, provenance=None, ) # Add provenance (if required) if str2bool(config.get("execution", "write_provenance", "false")): # Provenance will only throw a warning if something went wrong results.provenance = write_provenance(results) self._duecredit_cite() return results def _list_outputs(self): """List the expected outputs""" if self.output_spec: raise NotImplementedError else: return None def aggregate_outputs(self, runtime=None, needed_outputs=None): """Collate expected outputs and apply output traits validation.""" outputs = self._outputs() # Generate an empty output spec object predicted_outputs = self._list_outputs() # Predictions from _list_outputs if not predicted_outputs: return outputs # Precalculate the list of output trait names that should be aggregated aggregate_names = set(predicted_outputs) if needed_outputs is not None: aggregate_names = set(needed_outputs).intersection(aggregate_names) if aggregate_names: # Make sure outputs are compatible _na_outputs = self._check_version_requirements(outputs) na_names = aggregate_names.intersection(_na_outputs) if na_names: # XXX Change to TypeError in Nipype 2.0 raise KeyError( """\ Output trait(s) %s not available in version %s of interface %s.\ """ % (", ".join(na_names), self.version, self.__class__.__name__) ) for key in aggregate_names: # Final aggregation val = predicted_outputs[key] try: setattr(outputs, key, val) except TraitError as error: if "an existing" in getattr(error, "info", "default"): msg = ( "No such file or directory '%s' for output '%s' of a %s interface" % (val, key, self.__class__.__name__) ) raise FileNotFoundError(msg) raise error return outputs @property def version(self): if self._version is None: if str2bool(config.get("execution", "stop_on_unknown_version")): raise ValueError( "Interface %s has no version information" % self.__class__.__name__ ) return self._version def load_inputs_from_json(self, json_file, overwrite=True): """ A convenient way to load pre-set inputs from a JSON file. """ with open(json_file) as fhandle: inputs_dict = json.load(fhandle) def_inputs = [] if not overwrite: def_inputs = list(self.inputs.get_traitsfree().keys()) new_inputs = list(set(list(inputs_dict.keys())) - set(def_inputs)) for key in new_inputs: if hasattr(self.inputs, key): setattr(self.inputs, key, inputs_dict[key]) def save_inputs_to_json(self, json_file): """ A convenient way to save current inputs to a JSON file. """ inputs = self.inputs.get_traitsfree() iflogger.debug("saving inputs %s", inputs) with open(json_file, "w") as fhandle: json.dump(inputs, fhandle, indent=4, ensure_ascii=False) def _pre_run_hook(self, runtime): """ Perform any pre-_run_interface() processing Subclasses may override this function to modify ``runtime`` object or interface state MUST return runtime object """ return runtime def _post_run_hook(self, runtime): """ Perform any post-_run_interface() processing Subclasses may override this function to modify ``runtime`` object or interface state MUST return runtime object """ return runtime class SimpleInterface(BaseInterface): """An interface pattern that allows outputs to be set in a dictionary called ``_results`` that is automatically interpreted by ``_list_outputs()`` to find the outputs. When implementing ``_run_interface``, set outputs with:: self._results[out_name] = out_value This can be a way to upgrade a ``Function`` interface to do type checking. Examples -------- >>> from nipype.interfaces.base import ( ... SimpleInterface, BaseInterfaceInputSpec, TraitedSpec) >>> def double(x): ... return 2 * x ... >>> class DoubleInputSpec(BaseInterfaceInputSpec): ... x = traits.Float(mandatory=True) ... >>> class DoubleOutputSpec(TraitedSpec): ... doubled = traits.Float() ... >>> class Double(SimpleInterface): ... input_spec = DoubleInputSpec ... output_spec = DoubleOutputSpec ... ... def _run_interface(self, runtime): ... self._results['doubled'] = double(self.inputs.x) ... return runtime >>> dbl = Double() >>> dbl.inputs.x = 2 >>> dbl.run().outputs.doubled 4.0 """ def __init__(self, from_file=None, resource_monitor=None, **inputs): super(SimpleInterface, self).__init__( from_file=from_file, resource_monitor=resource_monitor, **inputs ) self._results = {} def _list_outputs(self): return self._results class CommandLine(BaseInterface): """Implements functionality to interact with command line programs class must be instantiated with a command argument Parameters ---------- command : str define base immutable `command` you wish to run args : str, optional optional arguments passed to base `command` Examples -------- >>> import pprint >>> from nipype.interfaces.base import CommandLine >>> cli = CommandLine(command='ls', environ={'DISPLAY': ':1'}) >>> cli.inputs.args = '-al' >>> cli.cmdline 'ls -al' >>> # Use get_traitsfree() to check all inputs set >>> pprint.pprint(cli.inputs.get_traitsfree()) # doctest: {'args': '-al', 'environ': {'DISPLAY': ':1'}} >>> cli.inputs.get_hashval()[0][0] ('args', '-al') >>> cli.inputs.get_hashval()[1] '11c37f97649cd61627f4afe5136af8c0' """ input_spec = CommandLineInputSpec _cmd_prefix = "" _cmd = None _version = None _terminal_output = "stream" @classmethod def set_default_terminal_output(cls, output_type): """Set the default terminal output for CommandLine Interfaces. This method is used to set default terminal output for CommandLine Interfaces. However, setting this will not update the output type for any existing instances. For these, assign the .terminal_output. """ if output_type in VALID_TERMINAL_OUTPUT: cls._terminal_output = output_type else: raise AttributeError("Invalid terminal output_type: %s" % output_type) def __init__(self, command=None, terminal_output=None, **inputs): super(CommandLine, self).__init__(**inputs) self._environ = None # Set command. Input argument takes precedence self._cmd = command or getattr(self, "_cmd", None) # Store dependencies in runtime object self._ldd = str2bool(config.get("execution", "get_linked_libs", "true")) if self._cmd is None: raise Exception("Missing command") if terminal_output is not None: self.terminal_output = terminal_output @property def cmd(self): """sets base command, immutable""" if not self._cmd: raise NotImplementedError( "CommandLineInterface should wrap an executable, but " "none has been set." ) return self._cmd @property def cmdline(self): """`command` plus any arguments (args) validates arguments and generates command line""" self._check_mandatory_inputs() allargs = [self._cmd_prefix + self.cmd] + self._parse_inputs() return " ".join(allargs) @property def terminal_output(self): return self._terminal_output @terminal_output.setter def terminal_output(self, value): if value not in VALID_TERMINAL_OUTPUT: raise RuntimeError( 'Setting invalid value "%s" for terminal_output. Valid values are ' "%s." % (value, ", ".join(['"%s"' % v for v in VALID_TERMINAL_OUTPUT])) ) self._terminal_output = value def raise_exception(self, runtime): raise RuntimeError( ( "Command:\n{cmdline}\nStandard output:\n{stdout}\n" "Standard error:\n{stderr}\nReturn code: {returncode}" ).format(**runtime.dictcopy()) ) def _get_environ(self): return getattr(self.inputs, "environ", {}) def version_from_command(self, flag="-v", cmd=None): iflogger.warning( "version_from_command member of CommandLine was " "Deprecated in nipype-1.0.0 and deleted in 1.1.0" ) if cmd is None: cmd = self.cmd.split()[0] env = dict(os.environ) if which(cmd, env=env): out_environ = self._get_environ() env.update(out_environ) proc = sp.Popen( " ".join((cmd, flag)), shell=True, env=canonicalize_env(env), stdout=sp.PIPE, stderr=sp.PIPE, ) o, e = proc.communicate() return o def _run_interface(self, runtime, correct_return_codes=(0,)): """Execute command via subprocess Parameters ---------- runtime : passed by the run function Returns ------- runtime : updated runtime information adds stdout, stderr, merged, cmdline, dependencies, command_path """ out_environ = self._get_environ() # Initialize runtime Bunch runtime.stdout = None runtime.stderr = None runtime.cmdline = self.cmdline runtime.environ.update(out_environ) runtime.success_codes = correct_return_codes # which $cmd executable_name = shlex.split(self._cmd_prefix + self.cmd)[0] cmd_path = which(executable_name, env=runtime.environ) if cmd_path is None: raise IOError( 'No command "%s" found on host %s. Please check that the ' "corresponding package is installed." % (executable_name, runtime.hostname) ) runtime.command_path = cmd_path runtime.dependencies = ( get_dependencies(executable_name, runtime.environ) if self._ldd else "" ) runtime = run_command(runtime, output=self.terminal_output) return runtime def _format_arg(self, name, trait_spec, value): """A helper function for _parse_inputs Formats a trait containing argstr metadata """ argstr = trait_spec.argstr iflogger.debug("%s_%s", name, value) if trait_spec.is_trait_type(traits.Bool) and "%" not in argstr: # Boolean options have no format string. Just append options if True. return argstr if value else None # traits.Either turns into traits.TraitCompound and does not have any # inner_traits elif trait_spec.is_trait_type(traits.List) or ( trait_spec.is_trait_type(traits.TraitCompound) and isinstance(value, list) ): # This is a bit simple-minded at present, and should be # construed as the default. If more sophisticated behavior # is needed, it can be accomplished with metadata (e.g. # format string for list member str'ification, specifying # the separator, etc.) # Depending on whether we stick with traitlets, and whether or # not we beef up traitlets.List, we may want to put some # type-checking code here as well sep = trait_spec.sep if trait_spec.sep is not None else " " if argstr.endswith("..."): # repeatable option # --id %d... will expand to # --id 1 --id 2 --id 3 etc.,. argstr = argstr.replace("...", "") return sep.join([argstr % elt for elt in value]) else: return argstr % sep.join(str(elt) for elt in value) else: # Append options using format string. return argstr % value def _filename_from_source(self, name, chain=None): if chain is None: chain = [] trait_spec = self.inputs.trait(name) retval = getattr(self.inputs, name) source_ext = None if not isdefined(retval) or "%s" in retval: if not trait_spec.name_source: return retval # Do not generate filename when excluded by other inputs if any( isdefined(getattr(self.inputs, field)) for field in trait_spec.xor or () ): return retval # Do not generate filename when required fields are missing if not all( isdefined(getattr(self.inputs, field)) for field in trait_spec.requires or () ): return retval if isdefined(retval) and "%s" in retval: name_template = retval else: name_template = trait_spec.name_template if not name_template: name_template = "%s_generated" ns = trait_spec.name_source while isinstance(ns, (list, tuple)): if len(ns) > 1: iflogger.warning("Only one name_source per trait is allowed") ns = ns[0] if not isinstance(ns, (str, bytes)): raise ValueError( "name_source of '{}' trait should be an input trait " "name, but a type {} object was found".format(name, type(ns)) ) if isdefined(getattr(self.inputs, ns)): name_source = ns source = getattr(self.inputs, name_source) while isinstance(source, list): source = source[0] # special treatment for files try: _, base, source_ext = split_filename(source) except (AttributeError, TypeError): base = source else: if name in chain: raise NipypeInterfaceError("Mutually pointing name_sources") chain.append(name) base = self._filename_from_source(ns, chain) if isdefined(base): _, _, source_ext = split_filename(base) else: # Do not generate filename when required fields are missing return retval chain = None retval = name_template % base _, _, ext = split_filename(retval) if trait_spec.keep_extension and (ext or source_ext): if (ext is None or not ext) and source_ext: retval = retval + source_ext else: retval = self._overload_extension(retval, name) return retval def _gen_filename(self, name): raise NotImplementedError def _overload_extension(self, value, name=None): return value def _list_outputs(self): metadata = dict(name_source=lambda t: t is not None) traits = self.inputs.traits(**metadata) if traits: outputs = self.output_spec().trait_get() for name, trait_spec in list(traits.items()): out_name = name if trait_spec.output_name is not None: out_name = trait_spec.output_name fname = self._filename_from_source(name) if isdefined(fname): outputs[out_name] = os.path.abspath(fname) return outputs def _parse_inputs(self, skip=None): """Parse all inputs using the ``argstr`` format string in the Trait. Any inputs that are assigned (not the default_value) are formatted to be added to the command line. Returns ------- all_args : list A list of all inputs formatted for the command line. """ all_args = [] initial_args = {} final_args = {} metadata = dict(argstr=lambda t: t is not None) for name, spec in sorted(self.inputs.traits(**metadata).items()): if skip and name in skip: continue value = getattr(self.inputs, name) if spec.name_source: value = self._filename_from_source(name) elif spec.genfile: if not isdefined(value) or value is None: value = self._gen_filename(name) if not isdefined(value): continue arg = self._format_arg(name, spec, value) if arg is None: continue pos = spec.position if pos is not None: if int(pos) >= 0: initial_args[pos] = arg else: final_args[pos] = arg else: all_args.append(arg) first_args = [el for _, el in sorted(initial_args.items())] last_args = [el for _, el in sorted(final_args.items())] return first_args + all_args + last_args class StdOutCommandLine(CommandLine): input_spec = StdOutCommandLineInputSpec def _gen_filename(self, name): return self._gen_outfilename() if name == "out_file" else None def _gen_outfilename(self): raise NotImplementedError class MpiCommandLine(CommandLine): """Implements functionality to interact with command line programs that can be run with MPI (i.e. using 'mpiexec'). Examples -------- >>> from nipype.interfaces.base import MpiCommandLine >>> mpi_cli = MpiCommandLine(command='my_mpi_prog') >>> mpi_cli.inputs.args = '-v' >>> mpi_cli.cmdline 'my_mpi_prog -v' >>> mpi_cli.inputs.use_mpi = True >>> mpi_cli.inputs.n_procs = 8 >>> mpi_cli.cmdline 'mpiexec -n 8 my_mpi_prog -v' """ input_spec = MpiCommandLineInputSpec @property def cmdline(self): """Adds 'mpiexec' to begining of command""" result = [] if self.inputs.use_mpi: result.append("mpiexec") if self.inputs.n_procs: result.append("-n %d" % self.inputs.n_procs) result.append(super(MpiCommandLine, self).cmdline) return " ".join(result) class SEMLikeCommandLine(CommandLine): """In SEM derived interface all outputs have corresponding inputs. However, some SEM commands create outputs that are not defined in the XML. In those cases one has to create a subclass of the autogenerated one and overload the _list_outputs method. _outputs_from_inputs should still be used but only for the reduced (by excluding those that do not have corresponding inputs list of outputs. """ def _list_outputs(self): outputs = self.output_spec().trait_get() return self._outputs_from_inputs(outputs) def _outputs_from_inputs(self, outputs): for name in list(outputs.keys()): corresponding_input = getattr(self.inputs, name) if isdefined(corresponding_input): if isinstance(corresponding_input, bool) and corresponding_input: outputs[name] = os.path.abspath(self._outputs_filenames[name]) else: if isinstance(corresponding_input, list): outputs[name] = [ os.path.abspath(inp) for inp in corresponding_input ] else: outputs[name] = os.path.abspath(corresponding_input) return outputs def _format_arg(self, name, spec, value): if name in list(self._outputs_filenames.keys()): if isinstance(value, bool): if value: value = os.path.abspath(self._outputs_filenames[name]) else: return "" return super(SEMLikeCommandLine, self)._format_arg(name, spec, value) class LibraryBaseInterface(BaseInterface): _pkg = None imports = () def __init__(self, check_import=True, *args, **kwargs): super(LibraryBaseInterface, self).__init__(*args, **kwargs) if check_import: import pkgutil failed_imports = [] for pkg in (self._pkg,) + tuple(self.imports): if pkgutil.find_loader(pkg) is None: failed_imports.append(pkg) if failed_imports: iflogger.warning( "Unable to import %s; %s interface may fail to " "run", failed_imports, self.__class__.__name__, ) @property def version(self): if self._version is None: import importlib try: self._version = importlib.import_module(self._pkg).__version__ except (ImportError, AttributeError): pass return super(LibraryBaseInterface, self).version class PackageInfo(object): _version = None version_cmd = None version_file = None @classmethod def version(klass): if klass._version is None: if klass.version_cmd is not None: try: clout = CommandLine( command=klass.version_cmd, resource_monitor=False, terminal_output="allatonce", ).run() except IOError: return None raw_info = clout.runtime.stdout elif klass.version_file is not None: try: with open(klass.version_file, "rt") as fobj: raw_info = fobj.read() except OSError: return None else: return None klass._version = klass.parse_version(raw_info) return klass._version @staticmethod def parse_version(raw_info): raise NotImplementedError nipype-1.7.0/nipype/interfaces/base/specs.py000066400000000000000000000360031413403311400210470ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Base I/O specifications for Nipype interfaces ............................................. Define the API for the I/O of interfaces """ import os from inspect import isclass from copy import deepcopy from warnings import warn from packaging.version import Version from traits.trait_errors import TraitError from traits.trait_handlers import TraitDictObject, TraitListObject from ...utils.filemanip import md5, hash_infile, hash_timestamp from .traits_extension import ( traits, File, Str, Undefined, isdefined, has_metadata, OutputMultiObject, ) from ... import config, __version__ _float_fmt = "{:.10f}".format nipype_version = Version(__version__) class BaseTraitedSpec(traits.HasTraits): """ Provide a few methods necessary to support nipype interface api The inputs attribute of interfaces call certain methods that are not available in traits.HasTraits. These are provided here. new metadata: * usedefault : set this to True if the default value of the trait should be used. Unless this is set, the attributes are set to traits.Undefined new attribute: * get_hashval : returns a tuple containing the state of the trait as a dict and hashvalue corresponding to dict. XXX Reconsider this in the long run, but it seems like the best solution to move forward on the refactoring. """ package_version = nipype_version def __init__(self, **kwargs): """Initialize handlers and inputs""" # NOTE: In python 2.6, object.__init__ no longer accepts input # arguments. HasTraits does not define an __init__ and # therefore these args were being ignored. # super(TraitedSpec, self).__init__(*args, **kwargs) super(BaseTraitedSpec, self).__init__(**kwargs) traits.push_exception_handler(reraise_exceptions=True) undefined_traits = {} for trait in self.copyable_trait_names(): if not self.traits()[trait].usedefault: undefined_traits[trait] = Undefined self.trait_set(trait_change_notify=False, **undefined_traits) self._generate_handlers() self.trait_set(**kwargs) def items(self): """Name, trait generator for user modifiable traits""" for name in sorted(self.copyable_trait_names()): yield name, self.traits()[name] def __repr__(self): """Return a well-formatted representation of the traits""" outstr = [] for name, value in sorted(self.trait_get().items()): outstr.append("%s = %s" % (name, value)) return "\n{}\n".format("\n".join(outstr)) def _generate_handlers(self): """Find all traits with the 'xor' metadata and attach an event handler to them. """ has_xor = dict(xor=lambda t: t is not None) xors = self.trait_names(**has_xor) for elem in xors: self.on_trait_change(self._xor_warn, elem) has_deprecation = dict(deprecated=lambda t: t is not None) deprecated = self.trait_names(**has_deprecation) for elem in deprecated: self.on_trait_change(self._deprecated_warn, elem) def _xor_warn(self, obj, name, old, new): """Generates warnings for xor traits""" if isdefined(new): trait_spec = self.traits()[name] # for each xor, set to default_value for trait_name in trait_spec.xor: if trait_name == name: # skip ourself continue if isdefined(getattr(self, trait_name)): self.trait_set( trait_change_notify=False, **{"%s" % name: Undefined} ) msg = ( 'Input "%s" is mutually exclusive with input "%s", ' "which is already set" ) % (name, trait_name) raise IOError(msg) def _deprecated_warn(self, obj, name, old, new): """Checks if a user assigns a value to a deprecated trait""" if isdefined(new): trait_spec = self.traits()[name] msg1 = "Input %s in interface %s is deprecated." % ( name, self.__class__.__name__.split("InputSpec")[0], ) msg2 = ( "Will be removed or raise an error as of release %s" % trait_spec.deprecated ) if trait_spec.new_name: if trait_spec.new_name not in self.copyable_trait_names(): raise TraitError( msg1 + " Replacement trait %s not found" % trait_spec.new_name ) msg3 = "It has been replaced by %s." % trait_spec.new_name else: msg3 = "" msg = " ".join((msg1, msg2, msg3)) if Version(str(trait_spec.deprecated)) < self.package_version: raise TraitError(msg) else: if trait_spec.new_name: msg += "Unsetting old value %s; setting new value %s." % ( name, trait_spec.new_name, ) warn(msg) if trait_spec.new_name: self.trait_set( trait_change_notify=False, **{"%s" % name: Undefined, "%s" % trait_spec.new_name: new} ) def trait_get(self, **kwargs): """Returns traited class as a dict Augments the trait get function to return a dictionary without notification handles """ out = super(BaseTraitedSpec, self).trait_get(**kwargs) out = self._clean_container(out, Undefined) return out get = trait_get def get_traitsfree(self, **kwargs): """Returns traited class as a dict Augments the trait get function to return a dictionary without any traits. The dictionary does not contain any attributes that were Undefined """ out = super(BaseTraitedSpec, self).trait_get(**kwargs) out = self._clean_container(out, skipundefined=True) return out def _clean_container(self, objekt, undefinedval=None, skipundefined=False): """Convert a traited obejct into a pure python representation.""" if isinstance(objekt, TraitDictObject) or isinstance(objekt, dict): out = {} for key, val in list(objekt.items()): if isdefined(val): out[key] = self._clean_container(val, undefinedval) else: if not skipundefined: out[key] = undefinedval elif ( isinstance(objekt, TraitListObject) or isinstance(objekt, list) or isinstance(objekt, tuple) ): out = [] for val in objekt: if isdefined(val): out.append(self._clean_container(val, undefinedval)) else: if not skipundefined: out.append(undefinedval) else: out.append(None) if isinstance(objekt, tuple): out = tuple(out) else: out = None if isdefined(objekt): out = objekt else: if not skipundefined: out = undefinedval return out def has_metadata(self, name, metadata, value=None, recursive=True): """ Return has_metadata for the requested trait name in this interface """ return has_metadata(self.trait(name).trait_type, metadata, value, recursive) def get_hashval(self, hash_method=None): """Return a dictionary of our items with hashes for each file. Searches through dictionary items and if an item is a file, it calculates the md5 hash of the file contents and stores the file name and hash value as the new key value. However, the overall bunch hash is calculated only on the hash value of a file. The path and name of the file are not used in the overall hash calculation. Returns ------- list_withhash : dict Copy of our dictionary with the new file hashes included with each file. hashvalue : str The md5 hash value of the traited spec """ list_withhash = [] list_nofilename = [] for name, val in sorted(self.trait_get().items()): if not isdefined(val) or self.has_metadata(name, "nohash", True): # skip undefined traits and traits with nohash=True continue hash_files = not self.has_metadata( name, "hash_files", False ) and not self.has_metadata(name, "name_source") list_nofilename.append( ( name, self._get_sorteddict( val, hash_method=hash_method, hash_files=hash_files ), ) ) list_withhash.append( ( name, self._get_sorteddict( val, True, hash_method=hash_method, hash_files=hash_files ), ) ) return list_withhash, md5(str(list_nofilename).encode()).hexdigest() def _get_sorteddict( self, objekt, dictwithhash=False, hash_method=None, hash_files=True ): if isinstance(objekt, dict): out = [] for key, val in sorted(objekt.items()): if isdefined(val): out.append( ( key, self._get_sorteddict( val, dictwithhash, hash_method=hash_method, hash_files=hash_files, ), ) ) elif isinstance(objekt, (list, tuple)): out = [] for val in objekt: if isdefined(val): out.append( self._get_sorteddict( val, dictwithhash, hash_method=hash_method, hash_files=hash_files, ) ) if isinstance(objekt, tuple): out = tuple(out) else: out = None if isdefined(objekt): if ( hash_files and isinstance(objekt, (str, bytes)) and os.path.isfile(objekt) ): if hash_method is None: hash_method = config.get("execution", "hash_method") if hash_method.lower() == "timestamp": hash = hash_timestamp(objekt) elif hash_method.lower() == "content": hash = hash_infile(objekt) else: raise Exception("Unknown hash method: %s" % hash_method) if dictwithhash: out = (objekt, hash) else: out = hash elif isinstance(objekt, float): out = _float_fmt(objekt) else: out = objekt return out @property def __all__(self): return self.copyable_trait_names() def __getstate__(self): """ Override __getstate__ so that OutputMultiObjects are correctly pickled. >>> class OutputSpec(TraitedSpec): ... out = OutputMultiObject(traits.List(traits.Int)) >>> spec = OutputSpec() >>> spec.out = [[4]] >>> spec.out [4] >>> spec.__getstate__()['out'] [[4]] >>> spec.__setstate__(spec.__getstate__()) >>> spec.out [4] """ state = super(BaseTraitedSpec, self).__getstate__() for key in self.__all__: _trait_spec = self.trait(key) if _trait_spec.is_trait_type(OutputMultiObject): state[key] = _trait_spec.handler.get_value(self, key) return state class TraitedSpec(BaseTraitedSpec): """Create a subclass with strict traits. This is used in 90% of the cases. """ _ = traits.Disallow class BaseInterfaceInputSpec(TraitedSpec): pass class DynamicTraitedSpec(BaseTraitedSpec): """A subclass to handle dynamic traits This class is a workaround for add_traits and clone_traits not functioning well together. """ def __deepcopy__(self, memo): """ Replace the ``__deepcopy__`` member with a traits-friendly implementation. A bug in ``__deepcopy__`` for ``HasTraits`` results in weird cloning behaviors. """ id_self = id(self) if id_self in memo: return memo[id_self] dup_dict = deepcopy(self.trait_get(), memo) # access all keys for key in self.copyable_trait_names(): if key in self.__dict__.keys(): _ = getattr(self, key) # clone once dup = self.clone_traits(memo=memo) for key in self.copyable_trait_names(): try: _ = getattr(dup, key) except: pass # clone twice dup = self.clone_traits(memo=memo) dup.trait_set(**dup_dict) return dup class CommandLineInputSpec(BaseInterfaceInputSpec): args = Str(argstr="%s", desc="Additional parameters to the command") environ = traits.DictStrStr( desc="Environment variables", usedefault=True, nohash=True ) class StdOutCommandLineInputSpec(CommandLineInputSpec): out_file = File(argstr="> %s", position=-1, genfile=True) class MpiCommandLineInputSpec(CommandLineInputSpec): use_mpi = traits.Bool( False, desc="Whether or not to run the command with mpiexec", usedefault=True ) n_procs = traits.Int( desc="Num processors to specify to mpiexec. Do not " "specify if this is managed externally (e.g. through " "SGE)" ) def get_filecopy_info(cls): """Provides information about file inputs to copy or link to cwd. Necessary for pipeline operation """ if cls.input_spec is None: return None # normalize_filenames is not a classmethod, hence check first if not isclass(cls) and hasattr(cls, "normalize_filenames"): cls.normalize_filenames() info = [] inputs = cls.input_spec() if isclass(cls) else cls.inputs metadata = dict(copyfile=lambda t: t is not None) for name, spec in sorted(inputs.traits(**metadata).items()): info.append(dict(key=name, copy=spec.copyfile)) return info nipype-1.7.0/nipype/interfaces/base/support.py000066400000000000000000000376631413403311400214630ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Miscellaneous tools to support Interface functionality ...................................................... """ import os from contextlib import AbstractContextManager from copy import deepcopy from textwrap import wrap import re from datetime import datetime as dt from dateutil.parser import parse as parseutc import platform from ... import logging, config from ...utils.misc import is_container, rgetcwd from ...utils.filemanip import md5, hash_infile iflogger = logging.getLogger("nipype.interface") HELP_LINEWIDTH = 70 class RuntimeContext(AbstractContextManager): """A context manager to run NiPype interfaces.""" __slots__ = ("_runtime", "_resmon", "_ignore_exc") def __init__(self, resource_monitor=False, ignore_exception=False): """Initialize the context manager object.""" self._ignore_exc = ignore_exception _proc_pid = os.getpid() if resource_monitor: from ...utils.profiler import ResourceMonitor else: from ...utils.profiler import ResourceMonitorMock as ResourceMonitor self._resmon = ResourceMonitor( _proc_pid, freq=float(config.get("execution", "resource_monitor_frequency", 1)), ) def __call__(self, interface, cwd=None, redirect_x=False): """Generate a new runtime object.""" # Tear-up: get current and prev directories _syscwd = rgetcwd(error=False) # Recover when wd does not exist if cwd is None: cwd = _syscwd self._runtime = Bunch( cwd=str(cwd), duration=None, endTime=None, environ=deepcopy(dict(os.environ)), hostname=platform.node(), interface=interface.__class__.__name__, platform=platform.platform(), prevcwd=str(_syscwd), redirect_x=redirect_x, resmon=self._resmon.fname or "off", returncode=None, startTime=None, version=interface.version, ) return self def __enter__(self): """Tear-up the execution of an interface.""" if self._runtime.redirect_x: self._runtime.environ["DISPLAY"] = config.get_display() self._runtime.startTime = dt.isoformat(dt.utcnow()) self._resmon.start() # TODO: Perhaps clean-up path and ensure it exists? os.chdir(self._runtime.cwd) return self._runtime def __exit__(self, exc_type, exc_value, exc_tb): """Tear-down interface execution.""" self._runtime.endTime = dt.isoformat(dt.utcnow()) timediff = parseutc(self._runtime.endTime) - parseutc(self._runtime.startTime) self._runtime.duration = ( timediff.days * 86400 + timediff.seconds + timediff.microseconds / 1e6 ) # Collect monitored data for k, v in self._resmon.stop(): setattr(self._runtime, k, v) os.chdir(self._runtime.prevcwd) if exc_type is not None or exc_value is not None or exc_tb is not None: import traceback # Retrieve the maximum info fast self._runtime.traceback = "".join( traceback.format_exception(exc_type, exc_value, exc_tb) ) # Gather up the exception arguments and append nipype info. exc_args = exc_value.args if getattr(exc_value, "args") else tuple() exc_args += ( f"An exception of type {exc_type.__name__} occurred while " f"running interface {self._runtime.interface}.", ) self._runtime.traceback_args = ("\n".join([f"{arg}" for arg in exc_args]),) if self._ignore_exc: return True if hasattr(self._runtime, "cmdline"): retcode = self._runtime.returncode if retcode not in self._runtime.success_codes: self._runtime.traceback = ( f"RuntimeError: subprocess exited with code {retcode}." ) @property def runtime(self): return self._runtime class NipypeInterfaceError(Exception): """Custom error for interfaces""" def __init__(self, value): self.value = value def __str__(self): return "{}".format(self.value) class Bunch(object): """ Dictionary-like class that provides attribute-style access to its items. A ``Bunch`` is a simple container that stores its items as class attributes [1]_. Internally all items are stored in a dictionary and the class exposes several of the dictionary methods. Examples -------- >>> from nipype.interfaces.base import Bunch >>> inputs = Bunch(infile='subj.nii', fwhm=6.0, register_to_mean=True) >>> inputs Bunch(fwhm=6.0, infile='subj.nii', register_to_mean=True) >>> inputs.register_to_mean = False >>> inputs Bunch(fwhm=6.0, infile='subj.nii', register_to_mean=False) References ---------- .. [1] A. Martelli, D. Hudgeon, "Collecting a Bunch of Named Items", Python Cookbook, 2nd Ed, Chapter 4.18, 2005. """ def __init__(self, *args, **kwargs): self.__dict__.update(*args, **kwargs) def update(self, *args, **kwargs): """update existing attribute, or create new attribute Note: update is very much like HasTraits.set""" self.__dict__.update(*args, **kwargs) def items(self): """iterates over bunch attributes as key, value pairs""" return list(self.__dict__.items()) def iteritems(self): """iterates over bunch attributes as key, value pairs""" iflogger.warning("iteritems is deprecated, use items instead") return list(self.items()) def get(self, *args): """Support dictionary get() functionality""" return self.__dict__.get(*args) def set(self, **kwargs): """Support dictionary get() functionality""" return self.__dict__.update(**kwargs) def dictcopy(self): """returns a deep copy of existing Bunch as a dictionary""" return deepcopy(self.__dict__) def __repr__(self): """representation of the sorted Bunch as a string Currently, this string representation of the `inputs` Bunch of interfaces is hashed to determine if the process' dirty-bit needs setting or not. Till that mechanism changes, only alter this after careful consideration. """ outstr = ["Bunch("] first = True for k, v in sorted(self.items()): if not first: outstr.append(", ") if isinstance(v, dict): pairs = [] for key, value in sorted(v.items()): pairs.append("'%s': %s" % (key, value)) v = "{" + ", ".join(pairs) + "}" outstr.append("%s=%s" % (k, v)) else: outstr.append("%s=%r" % (k, v)) first = False outstr.append(")") return "".join(outstr) def _get_bunch_hash(self): """Return a dictionary of our items with hashes for each file. Searches through dictionary items and if an item is a file, it calculates the md5 hash of the file contents and stores the file name and hash value as the new key value. However, the overall bunch hash is calculated only on the hash value of a file. The path and name of the file are not used in the overall hash calculation. Returns ------- dict_withhash : dict Copy of our dictionary with the new file hashes included with each file. hashvalue : str The md5 hash value of the `dict_withhash` """ infile_list = [] for key, val in list(self.items()): if is_container(val): # XXX - SG this probably doesn't catch numpy arrays # containing embedded file names either. if isinstance(val, dict): # XXX - SG should traverse dicts, but ignoring for now item = None else: if len(val) == 0: raise AttributeError("%s attribute is empty" % key) item = val[0] else: item = val try: if isinstance(item, str) and os.path.isfile(item): infile_list.append(key) except TypeError: # `item` is not a file or string. continue dict_withhash = self.dictcopy() dict_nofilename = self.dictcopy() for item in infile_list: dict_withhash[item] = _hash_bunch_dict(dict_withhash, item) dict_nofilename[item] = [val[1] for val in dict_withhash[item]] # Sort the items of the dictionary, before hashing the string # representation so we get a predictable order of the # dictionary. sorted_dict = str(sorted(dict_nofilename.items())) return dict_withhash, md5(sorted_dict.encode()).hexdigest() def _repr_pretty_(self, p, cycle): """Support for the pretty module from ipython.externals""" if cycle: p.text("Bunch(...)") else: p.begin_group(6, "Bunch(") first = True for k, v in sorted(self.items()): if not first: p.text(",") p.breakable() p.text(k + "=") p.pretty(v) first = False p.end_group(6, ")") def _hash_bunch_dict(adict, key): """Inject file hashes into adict[key]""" stuff = adict[key] if not is_container(stuff): stuff = [stuff] return [(afile, hash_infile(afile)) for afile in stuff] class InterfaceResult(object): """Object that contains the results of running a particular Interface. Attributes ---------- version : version of this Interface result object (a readonly property) interface : class type A copy of the `Interface` class that was run to generate this result. inputs : a traits free representation of the inputs outputs : Bunch An `Interface` specific Bunch that contains all possible files that are generated by the interface. The `outputs` are used as the `inputs` to another node when interfaces are used in the pipeline. runtime : Bunch Contains attributes that describe the runtime environment when the `Interface` was run. Contains the attributes: * cmdline : The command line string that was executed * cwd : The directory the ``cmdline`` was executed in. * stdout : The output of running the ``cmdline``. * stderr : Any error messages output from running ``cmdline``. * returncode : The code returned from running the ``cmdline``. """ def __init__(self, interface, runtime, inputs=None, outputs=None, provenance=None): self._version = 2.0 self.interface = interface self.runtime = runtime self.inputs = inputs self.outputs = outputs self.provenance = provenance @property def version(self): return self._version def format_help(cls): """ Prints help text of a Nipype interface >>> from nipype.interfaces.afni import GCOR >>> GCOR.help() # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE Wraps the executable command ``@compute_gcor``. Computes the average correlation between every voxel and ever other voxel, over any give mask. For complete details, ... """ from ...utils.misc import trim docstring = [] cmd = getattr(cls, "_cmd", None) if cmd: docstring += ["Wraps the executable command ``%s``." % cmd, ""] if cls.__doc__: docstring += trim(cls.__doc__).split("\n") + [""] allhelp = "\n".join( docstring + _inputs_help(cls) + [""] + _outputs_help(cls) + [""] + _refs_help(cls) ) return allhelp.expandtabs(8) def _inputs_help(cls): r""" Prints description for input parameters >>> from nipype.interfaces.afni import GCOR >>> _inputs_help(GCOR) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE ['Inputs::', '', '\t[Mandatory]', '\tin_file: (a pathlike object or string... """ helpstr = ["Inputs::"] mandatory_keys = [] optional_items = [] if cls.input_spec: inputs = cls.input_spec() mandatory_items = list(inputs.traits(mandatory=True).items()) if mandatory_items: helpstr += ["", "\t[Mandatory]"] for name, spec in mandatory_items: helpstr += get_trait_desc(inputs, name, spec) mandatory_keys = {item[0] for item in mandatory_items} optional_items = [ "\n".join(get_trait_desc(inputs, name, val)) for name, val in inputs.traits(transient=None).items() if name not in mandatory_keys ] if optional_items: helpstr += ["", "\t[Optional]"] + optional_items if not mandatory_keys and not optional_items: helpstr += ["", "\tNone"] return helpstr def _outputs_help(cls): r""" Prints description for output parameters >>> from nipype.interfaces.afni import GCOR >>> _outputs_help(GCOR) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE ['Outputs::', '', '\tout: (a float)\n\t\tglobal correlation value'] """ helpstr = ["Outputs::", "", "\tNone"] if cls.output_spec: outputs = cls.output_spec() outhelpstr = [ "\n".join(get_trait_desc(outputs, name, spec)) for name, spec in outputs.traits(transient=None).items() ] if outhelpstr: helpstr = helpstr[:-1] + outhelpstr return helpstr def _refs_help(cls): """Prints interface references.""" references = getattr(cls, "_references", None) if not references: return [] helpstr = ["References:", "-----------"] for r in references: helpstr += ["{}".format(r["entry"])] return helpstr def get_trait_desc(inputs, name, spec): """Parses a HasTraits object into a nipype documentation string""" desc = spec.desc xor = spec.xor requires = spec.requires argstr = spec.argstr manhelpstr = ["\t%s" % name] type_info = spec.full_info(inputs, name, None) default = "" if spec.usedefault: default = ", nipype default value: %s" % str(spec.default_value()[1]) line = "(%s%s)" % (type_info, default) manhelpstr = wrap( line, HELP_LINEWIDTH, initial_indent=manhelpstr[0] + ": ", subsequent_indent="\t\t ", ) if desc: for line in desc.split("\n"): line = re.sub(r"\s+", " ", line) manhelpstr += wrap( line, HELP_LINEWIDTH, initial_indent="\t\t", subsequent_indent="\t\t" ) if argstr: pos = spec.position if pos is not None: manhelpstr += wrap( "argument: ``%s``, position: %s" % (argstr, pos), HELP_LINEWIDTH, initial_indent="\t\t", subsequent_indent="\t\t", ) else: manhelpstr += wrap( "argument: ``%s``" % argstr, HELP_LINEWIDTH, initial_indent="\t\t", subsequent_indent="\t\t", ) if xor: line = "%s" % ", ".join(xor) manhelpstr += wrap( line, HELP_LINEWIDTH, initial_indent="\t\tmutually_exclusive: ", subsequent_indent="\t\t ", ) if requires: others = [field for field in requires if field != name] line = "%s" % ", ".join(others) manhelpstr += wrap( line, HELP_LINEWIDTH, initial_indent="\t\trequires: ", subsequent_indent="\t\t ", ) return manhelpstr nipype-1.7.0/nipype/interfaces/base/tests/000077500000000000000000000000001413403311400205205ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/base/tests/__init__.py000066400000000000000000000000301413403311400226220ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/base/tests/test_auto_BaseInterface.py000066400000000000000000000005451413403311400256600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..core import BaseInterface def test_BaseInterface_inputs(): input_map = dict() inputs = BaseInterface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/base/tests/test_auto_CommandLine.py000066400000000000000000000007621413403311400253540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..core import CommandLine def test_CommandLine_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ) inputs = CommandLine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/base/tests/test_auto_LibraryBaseInterface.py000066400000000000000000000005721413403311400272050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..core import LibraryBaseInterface def test_LibraryBaseInterface_inputs(): input_map = dict() inputs = LibraryBaseInterface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/base/tests/test_auto_MpiCommandLine.py000066400000000000000000000011211413403311400260100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..core import MpiCommandLine def test_MpiCommandLine_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), n_procs=dict(), use_mpi=dict( usedefault=True, ), ) inputs = MpiCommandLine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/base/tests/test_auto_SEMLikeCommandLine.py000066400000000000000000000010071413403311400265170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..core import SEMLikeCommandLine def test_SEMLikeCommandLine_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ) inputs = SEMLikeCommandLine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/base/tests/test_auto_SimpleInterface.py000066400000000000000000000005531413403311400262360ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..core import SimpleInterface def test_SimpleInterface_inputs(): input_map = dict() inputs = SimpleInterface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/base/tests/test_auto_StdOutCommandLine.py000066400000000000000000000012211413403311400265060ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..core import StdOutCommandLine def test_StdOutCommandLine_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), ) inputs = StdOutCommandLine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/base/tests/test_core.py000066400000000000000000000441531413403311400230700ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import simplejson as json import logging import pytest from unittest import mock from .... import config from ....testing import example_data from ... import base as nib from ..support import _inputs_help def check_dict(ref_dict, tst_dict): """Compare dictionaries of inputs and and those loaded from json files""" def to_list(x): if isinstance(x, tuple): x = list(x) if isinstance(x, list): for i, xel in enumerate(x): x[i] = to_list(xel) return x failed_dict = {} for key, value in list(ref_dict.items()): newval = to_list(tst_dict[key]) if newval != value: failed_dict[key] = (value, newval) return failed_dict def test_Interface(): assert nib.Interface.input_spec is None assert nib.Interface.output_spec is None with pytest.raises(NotImplementedError): nib.Interface() class DerivedInterface(nib.Interface): def __init__(self): pass nif = DerivedInterface() with pytest.raises(NotImplementedError): nif.run() with pytest.raises(NotImplementedError): nif.aggregate_outputs() with pytest.raises(NotImplementedError): nif._list_outputs() def test_BaseInterface(): config.set("monitoring", "enable", "0") assert nib.BaseInterface.help() is None class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc="a random int") goo = nib.traits.Int(desc="a random int", mandatory=True) moo = nib.traits.Int(desc="a random int", mandatory=False) hoo = nib.traits.Int(desc="a random int", usedefault=True) zoo = nib.File(desc="a file", copyfile=False) woo = nib.File(desc="a file", copyfile=True) class OutputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc="a random int") class DerivedInterface(nib.BaseInterface): input_spec = InputSpec resource_monitor = False assert DerivedInterface.help() is None assert "moo" in "".join(_inputs_help(DerivedInterface)) assert DerivedInterface()._outputs() is None assert DerivedInterface().inputs.foo == nib.Undefined with pytest.raises(ValueError): DerivedInterface()._check_mandatory_inputs() assert DerivedInterface(goo=1)._check_mandatory_inputs() is None with pytest.raises(ValueError): DerivedInterface().run() with pytest.raises(NotImplementedError): DerivedInterface(goo=1).run() class DerivedInterface2(DerivedInterface): output_spec = OutputSpec def _run_interface(self, runtime): return runtime assert DerivedInterface2.help() is None assert DerivedInterface2()._outputs().foo == nib.Undefined with pytest.raises(NotImplementedError): DerivedInterface2(goo=1).run() default_inpu_spec = nib.BaseInterface.input_spec nib.BaseInterface.input_spec = None with pytest.raises(Exception): nib.BaseInterface() nib.BaseInterface.input_spec = default_inpu_spec def test_BaseInterface_load_save_inputs(tmpdir): tmp_json = tmpdir.join("settings.json").strpath class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int() input2 = nib.traits.Float() input3 = nib.traits.Bool() input4 = nib.traits.Str() class DerivedInterface(nib.BaseInterface): input_spec = InputSpec def __init__(self, **inputs): super(DerivedInterface, self).__init__(**inputs) inputs_dict = {"input1": 12, "input3": True, "input4": "some string"} bif = DerivedInterface(**inputs_dict) bif.save_inputs_to_json(tmp_json) bif2 = DerivedInterface() bif2.load_inputs_from_json(tmp_json) assert bif2.inputs.get_traitsfree() == inputs_dict bif3 = DerivedInterface(from_file=tmp_json) assert bif3.inputs.get_traitsfree() == inputs_dict inputs_dict2 = inputs_dict.copy() inputs_dict2.update({"input4": "some other string"}) bif4 = DerivedInterface(from_file=tmp_json, input4=inputs_dict2["input4"]) assert bif4.inputs.get_traitsfree() == inputs_dict2 bif5 = DerivedInterface(input4=inputs_dict2["input4"]) bif5.load_inputs_from_json(tmp_json, overwrite=False) assert bif5.inputs.get_traitsfree() == inputs_dict2 bif6 = DerivedInterface(input4=inputs_dict2["input4"]) bif6.load_inputs_from_json(tmp_json) assert bif6.inputs.get_traitsfree() == inputs_dict # test get hashval in a complex interface from nipype.interfaces.ants import Registration settings = example_data(example_data("smri_ants_registration_settings.json")) with open(settings) as setf: data_dict = json.load(setf) tsthash = Registration() tsthash.load_inputs_from_json(settings) assert {} == check_dict(data_dict, tsthash.inputs.get_traitsfree()) tsthash2 = Registration(from_file=settings) assert {} == check_dict(data_dict, tsthash2.inputs.get_traitsfree()) _, hashvalue = tsthash.inputs.get_hashval(hash_method="timestamp") assert hashvalue == "e35bf07fea8049cc02de9235f85e8903" class MinVerInputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc="a random int", min_ver="0.9") class MaxVerInputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc="a random int", max_ver="0.7") def test_input_version_1(): class DerivedInterface1(nib.BaseInterface): input_spec = MinVerInputSpec obj = DerivedInterface1() obj._check_version_requirements(obj.inputs) config.set("execution", "stop_on_unknown_version", True) with pytest.raises(ValueError) as excinfo: obj._check_version_requirements(obj.inputs) assert "no version information" in str(excinfo.value) config.set_default_config() def test_input_version_2(): class DerivedInterface1(nib.BaseInterface): input_spec = MinVerInputSpec _version = "0.8" obj = DerivedInterface1() obj.inputs.foo = 1 with pytest.raises(Exception) as excinfo: obj._check_version_requirements(obj.inputs) assert "version 0.8 < required 0.9" in str(excinfo.value) def test_input_version_3(): class DerivedInterface1(nib.BaseInterface): input_spec = MinVerInputSpec _version = "0.10" obj = DerivedInterface1() obj._check_version_requirements(obj.inputs) def test_input_version_4(): class DerivedInterface1(nib.BaseInterface): input_spec = MinVerInputSpec _version = "0.9" obj = DerivedInterface1() obj.inputs.foo = 1 obj._check_version_requirements(obj.inputs) def test_input_version_5(): class DerivedInterface2(nib.BaseInterface): input_spec = MaxVerInputSpec _version = "0.8" obj = DerivedInterface2() obj.inputs.foo = 1 with pytest.raises(Exception) as excinfo: obj._check_version_requirements(obj.inputs) assert "version 0.8 > required 0.7" in str(excinfo.value) def test_input_version_6(): class DerivedInterface1(nib.BaseInterface): input_spec = MaxVerInputSpec _version = "0.7" obj = DerivedInterface1() obj.inputs.foo = 1 obj._check_version_requirements(obj.inputs) def test_input_version_missing(caplog): class DerivedInterface(nib.BaseInterface): class input_spec(nib.TraitedSpec): foo = nib.traits.Int(min_ver="0.9") bar = nib.traits.Int(max_ver="0.9") _version = "misparsed-garbage" obj = DerivedInterface() obj.inputs.foo = 1 obj.inputs.bar = 1 with caplog.at_level(logging.WARNING, logger="nipype.interface"): obj._check_version_requirements(obj.inputs) assert len(caplog.records) == 2 def test_input_version_missing_error(caplog): from nipype import config class DerivedInterface(nib.BaseInterface): class input_spec(nib.TraitedSpec): foo = nib.traits.Int(min_ver="0.9") bar = nib.traits.Int(max_ver="0.9") _version = "misparsed-garbage" obj1 = DerivedInterface(foo=1) obj2 = DerivedInterface(bar=1) with caplog.at_level(logging.WARNING, logger="nipype.interface"): with mock.patch.object(config, "getboolean", return_value=True): with pytest.raises(ValueError): obj1._check_version_requirements(obj1.inputs) with pytest.raises(ValueError): obj2._check_version_requirements(obj2.inputs) assert len(caplog.records) == 2 def test_unavailable_input(): class WithInput(nib.BaseInterface): class input_spec(nib.TraitedSpec): foo = nib.traits.Int(3, usedefault=True, max_ver="0.5") _version = "0.4" def _run_interface(self, runtime): return runtime class WithoutInput(WithInput): _version = "0.6" has = WithInput() hasnt = WithoutInput() trying_anyway = WithoutInput(foo=3) assert has.inputs.foo == 3 assert not nib.isdefined(hasnt.inputs.foo) assert trying_anyway.inputs.foo == 3 has.run() hasnt.run() with pytest.raises(Exception): trying_anyway.run() # Still settable has.inputs.foo = 4 hasnt.inputs.foo = 4 trying_anyway.inputs.foo = 4 assert has.inputs.foo == 4 assert hasnt.inputs.foo == 4 assert trying_anyway.inputs.foo == 4 has.run() with pytest.raises(Exception): hasnt.run() with pytest.raises(Exception): trying_anyway.run() def test_output_version(): class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc="a random int", min_ver="0.9") class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec _version = "0.10" resource_monitor = False obj = DerivedInterface1() assert obj._check_version_requirements(obj._outputs()) == [] class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc="a random int", min_ver="0.11") class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec _version = "0.10" resource_monitor = False obj = DerivedInterface1() assert obj._check_version_requirements(obj._outputs()) == ["foo"] class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc="a random int", min_ver="0.11") class DerivedInterface1(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec _version = "0.10" resource_monitor = False def _run_interface(self, runtime): return runtime def _list_outputs(self): return {"foo": 1} obj = DerivedInterface1() with pytest.raises(KeyError): obj.run() def test_Commandline(): with pytest.raises(Exception): nib.CommandLine() ci = nib.CommandLine(command="which") assert ci.cmd == "which" assert ci.inputs.args == nib.Undefined ci2 = nib.CommandLine(command="which", args="ls") assert ci2.cmdline == "which ls" ci3 = nib.CommandLine(command="echo") ci3.resource_monitor = False ci3.inputs.environ = {"MYENV": "foo"} res = ci3.run() assert res.runtime.environ["MYENV"] == "foo" assert res.outputs is None class CommandLineInputSpec1(nib.CommandLineInputSpec): foo = nib.Str(argstr="%s", desc="a str") goo = nib.traits.Bool(argstr="-g", desc="a bool", position=0) hoo = nib.traits.List(argstr="-l %s", desc="a list") moo = nib.traits.List(argstr="-i %d...", desc="a repeated list", position=-1) noo = nib.traits.Int(argstr="-x %d", desc="an int") roo = nib.traits.Str(desc="not on command line") soo = nib.traits.Bool(argstr="-soo") nib.CommandLine.input_spec = CommandLineInputSpec1 ci4 = nib.CommandLine(command="cmd") ci4.inputs.foo = "foo" ci4.inputs.goo = True ci4.inputs.hoo = ["a", "b"] ci4.inputs.moo = [1, 2, 3] ci4.inputs.noo = 0 ci4.inputs.roo = "hello" ci4.inputs.soo = False cmd = ci4._parse_inputs() assert cmd[0] == "-g" assert cmd[-1] == "-i 1 -i 2 -i 3" assert "hello" not in " ".join(cmd) assert "-soo" not in " ".join(cmd) ci4.inputs.soo = True cmd = ci4._parse_inputs() assert "-soo" in " ".join(cmd) class CommandLineInputSpec2(nib.CommandLineInputSpec): foo = nib.File(argstr="%s", desc="a str", genfile=True) nib.CommandLine.input_spec = CommandLineInputSpec2 ci5 = nib.CommandLine(command="cmd") with pytest.raises(NotImplementedError): ci5._parse_inputs() class DerivedClass(nib.CommandLine): input_spec = CommandLineInputSpec2 def _gen_filename(self, name): return "filename" ci6 = DerivedClass(command="cmd") assert ci6._parse_inputs()[0] == "filename" nib.CommandLine.input_spec = nib.CommandLineInputSpec def test_Commandline_environ(monkeypatch, tmpdir): from nipype import config config.set_default_config() tmpdir.chdir() monkeypatch.setitem(os.environ, "DISPLAY", ":1") # Test environment ci3 = nib.CommandLine(command="echo") res = ci3.run() assert res.runtime.environ["DISPLAY"] == ":1" # Test display_variable option monkeypatch.delitem(os.environ, "DISPLAY", raising=False) config.set("execution", "display_variable", ":3") res = ci3.run() assert "DISPLAY" not in ci3.inputs.environ assert "DISPLAY" not in res.runtime.environ # If the interface has _redirect_x then yes, it should be set ci3._redirect_x = True res = ci3.run() assert res.runtime.environ["DISPLAY"] == ":3" # Test overwrite monkeypatch.setitem(os.environ, "DISPLAY", ":1") ci3.inputs.environ = {"DISPLAY": ":2"} res = ci3.run() assert res.runtime.environ["DISPLAY"] == ":2" def test_CommandLine_output(tmpdir): # Create one file tmpdir.chdir() file = tmpdir.join("foo.txt") file.write("123456\n") name = os.path.basename(file.strpath) ci = nib.CommandLine(command="ls -l") ci.terminal_output = "allatonce" res = ci.run() assert res.runtime.merged == "" assert name in res.runtime.stdout # Check stdout is written ci = nib.CommandLine(command="ls -l") ci.terminal_output = "file_stdout" res = ci.run() assert os.path.isfile("stdout.nipype") assert name in res.runtime.stdout tmpdir.join("stdout.nipype").remove(ignore_errors=True) # Check stderr is written ci = nib.CommandLine(command="ls -l") ci.terminal_output = "file_stderr" res = ci.run() assert os.path.isfile("stderr.nipype") tmpdir.join("stderr.nipype").remove(ignore_errors=True) # Check outputs are thrown away ci = nib.CommandLine(command="ls -l") ci.terminal_output = "none" res = ci.run() assert ( res.runtime.stdout == "" and res.runtime.stderr == "" and res.runtime.merged == "" ) # Check that new interfaces are set to default 'stream' ci = nib.CommandLine(command="ls -l") res = ci.run() assert ci.terminal_output == "stream" assert name in res.runtime.stdout and res.runtime.stderr == "" # Check only one file is generated ci = nib.CommandLine(command="ls -l") ci.terminal_output = "file" res = ci.run() assert os.path.isfile("output.nipype") assert ( name in res.runtime.merged and res.runtime.stdout == "" and res.runtime.stderr == "" ) tmpdir.join("output.nipype").remove(ignore_errors=True) # Check split files are generated ci = nib.CommandLine(command="ls -l") ci.terminal_output = "file_split" res = ci.run() assert os.path.isfile("stdout.nipype") assert os.path.isfile("stderr.nipype") assert name in res.runtime.stdout def test_global_CommandLine_output(tmpdir): """Ensures CommandLine.set_default_terminal_output works""" from nipype.interfaces.fsl import BET ci = nib.CommandLine(command="ls -l") assert ci.terminal_output == "stream" # default case ci = BET() assert ci.terminal_output == "stream" # default case with mock.patch.object(nib.CommandLine, "_terminal_output"): nib.CommandLine.set_default_terminal_output("allatonce") ci = nib.CommandLine(command="ls -l") assert ci.terminal_output == "allatonce" nib.CommandLine.set_default_terminal_output("file") ci = nib.CommandLine(command="ls -l") assert ci.terminal_output == "file" # Check default affects derived interfaces ci = BET() assert ci.terminal_output == "file" def test_CommandLine_prefix(tmpdir): tmpdir.chdir() oop = "out/of/path" os.makedirs(oop) script_name = "test_script.sh" script_path = os.path.join(oop, script_name) with open(script_path, "w") as script_f: script_f.write("#!/usr/bin/env bash\necho Success!") os.chmod(script_path, 0o755) ci = nib.CommandLine(command=script_name) with pytest.raises(IOError): ci.run() class OOPCLI(nib.CommandLine): _cmd_prefix = oop + "/" ci = OOPCLI(command=script_name) ci.run() class OOPShell(nib.CommandLine): _cmd_prefix = "bash {}/".format(oop) ci = OOPShell(command=script_name) ci.run() class OOPBadShell(nib.CommandLine): _cmd_prefix = "shell_dne {}/".format(oop) ci = OOPBadShell(command=script_name) with pytest.raises(IOError): ci.run() def test_runtime_checks(): class TestInterface(nib.BaseInterface): class input_spec(nib.TraitedSpec): a = nib.traits.Any() class output_spec(nib.TraitedSpec): b = nib.traits.Any() def _run_interface(self, runtime): return runtime class NoRuntime(TestInterface): def _run_interface(self, runtime): return None class BrokenRuntime(TestInterface): def _run_interface(self, runtime): del runtime.__dict__["cwd"] return runtime with pytest.raises(RuntimeError): NoRuntime().run() with pytest.raises(RuntimeError): BrokenRuntime().run() nipype-1.7.0/nipype/interfaces/base/tests/test_resource_monitor.py000066400000000000000000000056701413403311400255370ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Module to unit test the resource_monitor in nipype """ import os import pytest # Import packages from .... import config from ....utils.profiler import _use_resources from ...base import traits, CommandLine, CommandLineInputSpec from ... import utility as niu # Try to enable the resource monitor run_profile = config.resource_monitor @pytest.fixture(scope="module") def use_resource_monitor(): config.enable_resource_monitor() yield config.disable_resource_monitor() class UseResourcesInputSpec(CommandLineInputSpec): mem_gb = traits.Float( desc="Number of GB of RAM to use", argstr="-g %f", mandatory=True ) n_procs = traits.Int( desc="Number of threads to use", argstr="-p %d", mandatory=True ) class UseResources(CommandLine): """ use_resources cmd interface """ from nipype import __path__ # Init attributes input_spec = UseResourcesInputSpec # Get path of executable exec_dir = os.path.realpath(__path__[0]) exec_path = os.path.join(exec_dir, "utils", "tests", "use_resources") # Init cmd _cmd = exec_path _always_run = True @pytest.mark.skip(reason="inconsistent readings") @pytest.mark.skipif(os.getenv("CI_SKIP_TEST", False), reason="disabled in CI tests") @pytest.mark.parametrize("mem_gb,n_procs", [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)]) def test_cmdline_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor): """ Test runtime profiler correctly records workflow RAM/CPUs consumption of a CommandLine-derived interface """ from nipype import config config.set("monitoring", "sample_frequency", "0.2") # Force sampling fast tmpdir.chdir() iface = UseResources(mem_gb=mem_gb, n_procs=n_procs) result = iface.run() assert ( abs(mem_gb - result.runtime.mem_peak_gb) < 0.3 ), "estimated memory error above .3GB" assert ( int(result.runtime.cpu_percent / 100 + 0.2) == n_procs ), "wrong number of threads estimated" @pytest.mark.skipif( True, reason="test disabled temporarily, until funcion profiling works" ) @pytest.mark.parametrize("mem_gb,n_procs", [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)]) def test_function_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor): """ Test runtime profiler correctly records workflow RAM/CPUs consumption of a Function interface """ from nipype import config config.set("monitoring", "sample_frequency", "0.2") # Force sampling fast tmpdir.chdir() iface = niu.Function(function=_use_resources) iface.inputs.mem_gb = mem_gb iface.inputs.n_procs = n_procs result = iface.run() assert ( abs(mem_gb - result.runtime.mem_peak_gb) < 0.3 ), "estimated memory error above .3GB" assert int(result.runtime.cpu_percent / 100 + 0.2) >= n_procs nipype-1.7.0/nipype/interfaces/base/tests/test_specs.py000066400000000000000000000353141413403311400232540ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import warnings import pytest from ....utils.filemanip import split_filename from ... import base as nib from ...base import traits, Undefined from ....interfaces import fsl from ...utility.wrappers import Function from ....pipeline import Node from ..specs import get_filecopy_info @pytest.fixture(scope="module") def setup_file(request, tmpdir_factory): tmp_dir = tmpdir_factory.mktemp("files") tmp_infile = tmp_dir.join("foo.txt") with tmp_infile.open("w") as fp: fp.writelines(["123456789"]) tmp_dir.chdir() return tmp_infile.strpath def test_TraitedSpec(): assert nib.TraitedSpec().get_hashval() assert nib.TraitedSpec().__repr__() == "\n\n" class spec(nib.TraitedSpec): foo = nib.traits.Int goo = nib.traits.Float(usedefault=True) assert spec().foo == Undefined assert spec().goo == 0.0 specfunc = lambda x: spec(hoo=x) with pytest.raises(nib.traits.TraitError): specfunc(1) infields = spec(foo=1) hashval = ( [("foo", 1), ("goo", "0.0000000000")], "e89433b8c9141aa0fda2f8f4d662c047", ) assert infields.get_hashval() == hashval assert infields.__repr__() == "\nfoo = 1\ngoo = 0.0\n" def test_TraitedSpec_tab_completion(): bet_nd = Node(fsl.BET(), name="bet") bet_interface = fsl.BET() bet_inputs = bet_nd.inputs.class_editable_traits() bet_outputs = bet_nd.outputs.class_editable_traits() # Check __all__ for bet node and interface inputs assert set(bet_nd.inputs.__all__) == set(bet_inputs) assert set(bet_interface.inputs.__all__) == set(bet_inputs) # Check __all__ for bet node outputs assert set(bet_nd.outputs.__all__) == set(bet_outputs) @pytest.mark.skip def test_TraitedSpec_dynamic(): from pickle import dumps, loads a = nib.BaseTraitedSpec() a.add_trait("foo", nib.traits.Int) a.foo = 1 assign_a = lambda: setattr(a, "foo", "a") with pytest.raises(Exception): assign_a pkld_a = dumps(a) unpkld_a = loads(pkld_a) assign_a_again = lambda: setattr(unpkld_a, "foo", "a") with pytest.raises(Exception): assign_a_again def test_DynamicTraitedSpec_tab_completion(): def extract_func(list_out): return list_out[0] # Define interface func_interface = Function( input_names=["list_out"], output_names=["out_file", "another_file"], function=extract_func, ) # Define node list_extract = Node( Function( input_names=["list_out"], output_names=["out_file"], function=extract_func ), name="list_extract", ) # Check __all__ for interface inputs expected_input = set(list_extract.inputs.editable_traits()) assert set(func_interface.inputs.__all__) == expected_input # Check __all__ for node inputs assert set(list_extract.inputs.__all__) == expected_input # Check __all__ for node outputs expected_output = set(list_extract.outputs.editable_traits()) assert set(list_extract.outputs.__all__) == expected_output # Add trait and retest list_extract._interface._output_names.append("added_out_trait") expected_output.add("added_out_trait") assert set(list_extract.outputs.__all__) == expected_output def test_TraitedSpec_logic(): class spec3(nib.TraitedSpec): _xor_inputs = ("foo", "bar") foo = nib.traits.Int(xor=_xor_inputs, desc="foo or bar, not both") bar = nib.traits.Int(xor=_xor_inputs, desc="bar or foo, not both") kung = nib.traits.Float(requires=("foo",), position=0, desc="kung foo") class out3(nib.TraitedSpec): output = nib.traits.Int class MyInterface(nib.BaseInterface): input_spec = spec3 output_spec = out3 myif = MyInterface() # NOTE_dj, FAIL: I don't get a TypeError, only a UserWarning # with pytest.raises(TypeError): # setattr(myif.inputs, 'kung', 10.0) myif.inputs.foo = 1 assert myif.inputs.foo == 1 set_bar = lambda: setattr(myif.inputs, "bar", 1) with pytest.raises(IOError): set_bar() assert myif.inputs.foo == 1 myif.inputs.kung = 2 assert myif.inputs.kung == 2.0 def test_deprecation(): with warnings.catch_warnings(record=True) as w: warnings.filterwarnings("always", "", UserWarning) class DeprecationSpec1(nib.TraitedSpec): foo = nib.traits.Int(deprecated="0.1") spec_instance = DeprecationSpec1() set_foo = lambda: setattr(spec_instance, "foo", 1) with pytest.raises(nib.TraitError): set_foo() assert len(w) == 0, "no warnings, just errors" with warnings.catch_warnings(record=True) as w: warnings.filterwarnings("always", "", UserWarning) class DeprecationSpec2(nib.TraitedSpec): foo = nib.traits.Int(deprecated="100", new_name="bar") spec_instance = DeprecationSpec2() set_foo = lambda: setattr(spec_instance, "foo", 1) with pytest.raises(nib.TraitError): set_foo() assert len(w) == 0, "no warnings, just errors" with warnings.catch_warnings(record=True) as w: warnings.filterwarnings("always", "", UserWarning) class DeprecationSpec3(nib.TraitedSpec): foo = nib.traits.Int(deprecated="1000", new_name="bar") bar = nib.traits.Int() spec_instance = DeprecationSpec3() not_raised = True try: spec_instance.foo = 1 except nib.TraitError: not_raised = False assert not_raised assert len(w) == 1, f"deprecated warning 1 {[str(w1) for w1 in w]}" with warnings.catch_warnings(record=True) as w: warnings.filterwarnings("always", "", UserWarning) class DeprecationSpec3(nib.TraitedSpec): foo = nib.traits.Int(deprecated="1000", new_name="bar") bar = nib.traits.Int() spec_instance = DeprecationSpec3() not_raised = True try: spec_instance.foo = 1 except nib.TraitError: not_raised = False assert not_raised assert spec_instance.foo == Undefined assert spec_instance.bar == 1 assert len(w) == 1, f"deprecated warning 2 {[str(w1) for w1 in w]}" def test_namesource(setup_file): tmp_infile = setup_file tmpd, nme, ext = split_filename(tmp_infile) class spec2(nib.CommandLineInputSpec): moo = nib.File(name_source=["doo"], hash_files=False, argstr="%s", position=2) doo = nib.File(exists=True, argstr="%s", position=1) goo = traits.Int(argstr="%d", position=4) poo = nib.File(name_source=["goo"], hash_files=False, argstr="%s", position=3) class TestName(nib.CommandLine): _cmd = "mycommand" input_spec = spec2 testobj = TestName() testobj.inputs.doo = tmp_infile testobj.inputs.goo = 99 assert "%s_generated" % nme in testobj.cmdline assert "%d_generated" % testobj.inputs.goo in testobj.cmdline testobj.inputs.moo = "my_%s_template" assert "my_%s_template" % nme in testobj.cmdline def test_chained_namesource(setup_file): tmp_infile = setup_file tmpd, nme, ext = split_filename(tmp_infile) class spec2(nib.CommandLineInputSpec): doo = nib.File(exists=True, argstr="%s", position=1) moo = nib.File( name_source=["doo"], hash_files=False, argstr="%s", position=2, name_template="%s_mootpl", ) poo = nib.File(name_source=["moo"], hash_files=False, argstr="%s", position=3) class TestName(nib.CommandLine): _cmd = "mycommand" input_spec = spec2 testobj = TestName() testobj.inputs.doo = tmp_infile res = testobj.cmdline assert "%s" % tmp_infile in res assert "%s_mootpl " % nme in res assert "%s_mootpl_generated" % nme in res def test_cycle_namesource1(setup_file): tmp_infile = setup_file tmpd, nme, ext = split_filename(tmp_infile) class spec3(nib.CommandLineInputSpec): moo = nib.File( name_source=["doo"], hash_files=False, argstr="%s", position=1, name_template="%s_mootpl", ) poo = nib.File(name_source=["moo"], hash_files=False, argstr="%s", position=2) doo = nib.File(name_source=["poo"], hash_files=False, argstr="%s", position=3) class TestCycle(nib.CommandLine): _cmd = "mycommand" input_spec = spec3 # Check that an exception is raised to0 = TestCycle() not_raised = True try: to0.cmdline except nib.NipypeInterfaceError: not_raised = False assert not not_raised def test_cycle_namesource2(setup_file): tmp_infile = setup_file tmpd, nme, ext = split_filename(tmp_infile) class spec3(nib.CommandLineInputSpec): moo = nib.File( name_source=["doo"], hash_files=False, argstr="%s", position=1, name_template="%s_mootpl", ) poo = nib.File(name_source=["moo"], hash_files=False, argstr="%s", position=2) doo = nib.File(name_source=["poo"], hash_files=False, argstr="%s", position=3) class TestCycle(nib.CommandLine): _cmd = "mycommand" input_spec = spec3 # Check that loop can be broken by setting one of the inputs to1 = TestCycle() to1.inputs.poo = tmp_infile not_raised = True try: res = to1.cmdline except nib.NipypeInterfaceError: not_raised = False print(res) assert not_raised assert "%s" % tmp_infile in res assert "%s_generated" % nme in res assert "%s_generated_mootpl" % nme in res def test_namesource_constraints(setup_file): tmp_infile = setup_file tmpd, nme, ext = split_filename(tmp_infile) class constrained_spec(nib.CommandLineInputSpec): in_file = nib.File(argstr="%s", position=1) threshold = traits.Float(argstr="%g", xor=["mask_file"], position=2) mask_file = nib.File( argstr="%s", name_source=["in_file"], name_template="%s_mask", keep_extension=True, xor=["threshold"], position=2, ) out_file1 = nib.File( argstr="%s", name_source=["in_file"], name_template="%s_out1", keep_extension=True, position=3, ) out_file2 = nib.File( argstr="%s", name_source=["in_file"], name_template="%s_out2", keep_extension=True, requires=["threshold"], position=4, ) class TestConstrained(nib.CommandLine): _cmd = "mycommand" input_spec = constrained_spec tc = TestConstrained() # name_source undefined, so template traits remain undefined assert tc.cmdline == "mycommand" # mask_file and out_file1 enabled by name_source definition tc.inputs.in_file = os.path.basename(tmp_infile) assert tc.cmdline == "mycommand foo.txt foo_mask.txt foo_out1.txt" # mask_file disabled by threshold, out_file2 enabled by threshold tc.inputs.threshold = 10.0 assert tc.cmdline == "mycommand foo.txt 10 foo_out1.txt foo_out2.txt" def test_TraitedSpec_withFile(setup_file): tmp_infile = setup_file tmpd, nme = os.path.split(tmp_infile) assert os.path.exists(tmp_infile) class spec2(nib.TraitedSpec): moo = nib.File(exists=True) doo = nib.traits.List(nib.File(exists=True)) infields = spec2(moo=tmp_infile, doo=[tmp_infile]) hashval = infields.get_hashval(hash_method="content") assert hashval[1] == "a00e9ee24f5bfa9545a515b7a759886b" def test_TraitedSpec_withNoFileHashing(setup_file): tmp_infile = setup_file tmpd, nme = os.path.split(tmp_infile) assert os.path.exists(tmp_infile) class spec2(nib.TraitedSpec): moo = nib.File(exists=True, hash_files=False) doo = nib.traits.List(nib.File(exists=True)) infields = spec2(moo=nme, doo=[tmp_infile]) hashval = infields.get_hashval(hash_method="content") assert hashval[1] == "8da4669ff5d72f670a46ea3e7a203215" class spec3(nib.TraitedSpec): moo = nib.File(exists=True, name_source="doo") doo = nib.traits.List(nib.File(exists=True)) infields = spec3(moo=nme, doo=[tmp_infile]) hashval1 = infields.get_hashval(hash_method="content") class spec4(nib.TraitedSpec): moo = nib.File(exists=True) doo = nib.traits.List(nib.File(exists=True)) infields = spec4(moo=nme, doo=[tmp_infile]) hashval2 = infields.get_hashval(hash_method="content") assert hashval1[1] != hashval2[1] def test_ImageFile(): x = nib.BaseInterface().inputs # setup traits x.add_trait("nifti", nib.ImageFile(types=["nifti1", "dicom"])) x.add_trait("anytype", nib.ImageFile()) with pytest.raises(ValueError): x.add_trait("newtype", nib.ImageFile(types=["nifti10"])) x.add_trait("nocompress", nib.ImageFile(types=["mgh"], allow_compressed=False)) with pytest.raises(nib.TraitError): x.nifti = "test.mgz" x.nifti = "test.nii" x.anytype = "test.xml" with pytest.raises(nib.TraitError): x.nocompress = "test.mgz" x.nocompress = "test.mgh" def test_filecopy_info(): class InputSpec(nib.TraitedSpec): foo = nib.traits.Int(desc="a random int") goo = nib.traits.Int(desc="a random int", mandatory=True) moo = nib.traits.Int(desc="a random int", mandatory=False) hoo = nib.traits.Int(desc="a random int", usedefault=True) zoo = nib.File(desc="a file", copyfile=False) woo = nib.File(desc="a file", copyfile=True) class DerivedInterface(nib.BaseInterface): input_spec = InputSpec resource_monitor = False def normalize_filenames(self): """A mock normalize_filenames for freesurfer interfaces that have one""" self.inputs.zoo = "normalized_filename.ext" assert get_filecopy_info(nib.BaseInterface) == [] # Test on interface class, not instantiated info = get_filecopy_info(DerivedInterface) assert info[0]["key"] == "woo" assert info[0]["copy"] assert info[1]["key"] == "zoo" assert not info[1]["copy"] info = None # Test with instantiated interface derived = DerivedInterface() # First check that zoo is not defined assert derived.inputs.zoo == Undefined # After the first call to get_filecopy_info zoo is defined info = get_filecopy_info(derived) # Ensure that normalize_filenames was called assert derived.inputs.zoo == "normalized_filename.ext" # Check the results are consistent assert info[0]["key"] == "woo" assert info[0]["copy"] assert info[1]["key"] == "zoo" assert not info[1]["copy"] nipype-1.7.0/nipype/interfaces/base/tests/test_support.py000066400000000000000000000031431413403311400236460ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import pytest from pkg_resources import resource_filename as pkgrf from ....utils.filemanip import md5 from ... import base as nib @pytest.mark.parametrize("args", [{}, {"a": 1, "b": [2, 3]}]) def test_bunch(args): b = nib.Bunch(**args) assert b.__dict__ == args def test_bunch_attribute(): b = nib.Bunch(a=1, b=[2, 3], c=None) assert b.a == 1 assert b.b == [2, 3] assert b.c is None def test_bunch_repr(): b = nib.Bunch(b=2, c=3, a=dict(n=1, m=2)) assert repr(b) == "Bunch(a={'m': 2, 'n': 1}, b=2, c=3)" def test_bunch_methods(): b = nib.Bunch(a=2) b.update(a=3) newb = b.dictcopy() assert b.a == 3 assert b.get("a") == 3 assert b.get("badkey", "otherthing") == "otherthing" assert b != newb assert type(dict()) == type(newb) assert newb["a"] == 3 def test_bunch_hash(): # NOTE: Since the path to the json file is included in the Bunch, # the hash will be unique to each machine. json_pth = pkgrf("nipype", os.path.join("testing", "data", "realign_json.json")) b = nib.Bunch(infile=json_pth, otherthing="blue", yat=True) newbdict, bhash = b._get_bunch_hash() assert bhash == "d1f46750044c3de102efc847720fc35f" # Make sure the hash stored in the json file for `infile` is correct. jshash = md5() with open(json_pth, "r") as fp: jshash.update(fp.read().encode("utf-8")) assert newbdict["infile"][0][1] == jshash.hexdigest() assert newbdict["yat"] is True nipype-1.7.0/nipype/interfaces/base/tests/test_traits_extension.py000066400000000000000000000245031413403311400255370ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Check the resolving/rebasing feature of ``BasePath``s.""" from ... import base as nib from ..traits_extension import rebase_path_traits, resolve_path_traits, Path class _test_spec(nib.TraitedSpec): a = nib.File() b = nib.traits.Tuple(nib.File(), nib.File()) c = nib.traits.List(nib.File()) d = nib.traits.Either(nib.File(), nib.traits.Float()) e = nib.OutputMultiObject(nib.File()) ee = nib.OutputMultiObject(nib.Str) f = nib.traits.Dict(nib.Str, nib.File()) g = nib.traits.Either(nib.File, nib.Str) h = nib.Str i = nib.traits.Either(nib.File, nib.traits.Tuple(nib.File, nib.traits.Int)) j = nib.traits.Either( nib.File, nib.traits.Tuple(nib.File, nib.traits.Int), nib.traits.Dict(nib.Str, nib.File()), ) k = nib.DictStrStr def test_rebase_resolve_path_traits(): """Check rebase_path_traits and resolve_path_traits and idempotence.""" spec = _test_spec() v = "/some/path/f1.txt" a = rebase_path_traits(spec.trait("a"), v, "/some/path") assert a == Path("f1.txt") # Idempotence assert rebase_path_traits(spec.trait("a"), a, "/some/path") == a a = resolve_path_traits(spec.trait("a"), a, "/some/path") assert a == Path(v) # Idempotence assert resolve_path_traits(spec.trait("a"), a, "/some/path") == a a = rebase_path_traits(spec.trait("a"), v, "/some/other/path") assert a == Path(v) # Idempotence assert rebase_path_traits(spec.trait("a"), a, "/some/other/path") == a a = resolve_path_traits(spec.trait("a"), a, "/some/path") assert a == Path(v) # Idempotence assert resolve_path_traits(spec.trait("a"), a, "/some/path") == a v = ("/some/path/f1.txt", "/some/path/f2.txt") b = rebase_path_traits(spec.trait("b"), v, "/some/path") assert b == (Path("f1.txt"), Path("f2.txt")) # Idempotence assert rebase_path_traits(spec.trait("b"), b, "/some/path") == b b = resolve_path_traits(spec.trait("b"), b, "/some/path") assert b == (Path(v[0]), Path(v[1])) # Idempotence assert resolve_path_traits(spec.trait("b"), b, "/some/path") == b v = ["/some/path/f1.txt", "/some/path/f2.txt", "/some/path/f3.txt"] c = rebase_path_traits(spec.trait("c"), v, "/some/path") assert c == [Path("f1.txt"), Path("f2.txt"), Path("f3.txt")] # Idempotence assert rebase_path_traits(spec.trait("c"), c, "/some/path") == c c = resolve_path_traits(spec.trait("c"), c, "/some/path") assert c == [Path(vp) for vp in v] # Idempotence assert resolve_path_traits(spec.trait("c"), c, "/some/path") == c v = 2.0 d = rebase_path_traits(spec.trait("d"), v, "/some/path") assert d == v d = resolve_path_traits(spec.trait("d"), d, "/some/path") assert d == v v = "/some/path/either.txt" d = rebase_path_traits(spec.trait("d"), v, "/some/path") assert d == Path("either.txt") # Idempotence assert rebase_path_traits(spec.trait("d"), d, "/some/path") == d d = resolve_path_traits(spec.trait("d"), d, "/some/path") assert d == Path(v) # Idempotence assert resolve_path_traits(spec.trait("d"), d, "/some/path") == d v = ["/some/path/f1.txt", "/some/path/f2.txt", "/some/path/f3.txt"] e = rebase_path_traits(spec.trait("e"), v, "/some/path") assert e == [Path("f1.txt"), Path("f2.txt"), Path("f3.txt")] # Idempotence assert rebase_path_traits(spec.trait("e"), e, "/some/path") == e e = resolve_path_traits(spec.trait("e"), e, "/some/path") assert e == [Path(vp) for vp in v] # Idempotence assert resolve_path_traits(spec.trait("e"), e, "/some/path") == e v = [["/some/path/f1.txt", "/some/path/f2.txt"], [["/some/path/f3.txt"]]] e = rebase_path_traits(spec.trait("e"), v, "/some/path") assert e == [[Path("f1.txt"), Path("f2.txt")], [[Path("f3.txt")]]] # Idempotence assert rebase_path_traits(spec.trait("e"), e, "/some/path") == e e = resolve_path_traits(spec.trait("e"), e, "/some/path") assert e == [ [ [Path(vpp) for vpp in vp] if isinstance(vp, list) else Path(vp) for vp in inner ] for inner in v ] # Idempotence assert resolve_path_traits(spec.trait("e"), e, "/some/path") == e # These are Str - no rebasing/resolving should happen v = [["/some/path/f1.txt", "/some/path/f2.txt"], [["/some/path/f3.txt"]]] ee = rebase_path_traits(spec.trait("ee"), v, "/some/path") assert ee == v # Idempotence assert rebase_path_traits(spec.trait("ee"), ee, "/some/path") == ee ee = resolve_path_traits( spec.trait("ee"), [["f1.txt", "f2.txt"], [["f3.txt"]]], "/some/path" ) assert ee == [["f1.txt", "f2.txt"], [["f3.txt"]]] # Idempotence assert resolve_path_traits(spec.trait("ee"), ee, "/some/path") == ee v = {"1": "/some/path/f1.txt"} f = rebase_path_traits(spec.trait("f"), v, "/some") assert f == {"1": Path("path/f1.txt")} # Idempotence assert rebase_path_traits(spec.trait("f"), f, "/some") == f f = resolve_path_traits(spec.trait("f"), f, "/some") assert f == {k: Path(val) for k, val in v.items()} # Idempotence assert resolve_path_traits(spec.trait("f"), f, "/some") == f # Either(Str, File): passing in path-like apply manipulation v = "/some/path/either.txt" g = rebase_path_traits(spec.trait("g"), v, "/some/path") assert g == Path("either.txt") # Idempotence assert rebase_path_traits(spec.trait("g"), g, "/some/path") == g g = resolve_path_traits(spec.trait("g"), g, "/some/path") assert g == Path(v) # Idempotence assert resolve_path_traits(spec.trait("g"), g, "/some/path") == g g = rebase_path_traits(spec.trait("g"), v, "/some") assert g == Path("path/either.txt") # Idempotence assert rebase_path_traits(spec.trait("g"), g, "/some/path") == g g = resolve_path_traits(spec.trait("g"), g, "/some") assert g == Path(v) # Idempotence assert resolve_path_traits(spec.trait("g"), g, "/some/path") == g # Either(Str, File): passing str discards File v = "either.txt" g = rebase_path_traits(spec.trait("g"), v, "/some/path") assert g == v # Idempotence assert rebase_path_traits(spec.trait("g"), g, "/some/path") == g # This is a problematic case, it is impossible to know whether this # was meant to be a string or a file. # In this implementation, strings take precedence g = resolve_path_traits(spec.trait("g"), g, "/some/path") assert g == v # Idempotence assert resolve_path_traits(spec.trait("g"), g, "/some/path") == g v = "string" g = rebase_path_traits(spec.trait("g"), v, "/some") assert g == v # Idempotence assert rebase_path_traits(spec.trait("g"), g, "/some") == g # This is a problematic case, it is impossible to know whether this # was meant to be a string or a file. g = resolve_path_traits(spec.trait("g"), v, "/some") assert g == v # Idempotence assert resolve_path_traits(spec.trait("g"), g, "/some") == g g = rebase_path_traits(spec.trait("g"), v, "/some/path") assert g == v # You dont want this one to be a Path # Idempotence assert rebase_path_traits(spec.trait("g"), g, "/some/path") == g # This is a problematic case, it is impossible to know whether this # was meant to be a string or a file. g = resolve_path_traits(spec.trait("g"), g, "/some/path") assert g == v # You dont want this one to be a Path # Idempotence assert resolve_path_traits(spec.trait("g"), g, "/some/path") == g h = rebase_path_traits(spec.trait("h"), v, "/some/path") assert h == v # Idempotence assert rebase_path_traits(spec.trait("h"), h, "/some/path") == h h = resolve_path_traits(spec.trait("h"), h, "/some/path") assert h == v # Idempotence assert resolve_path_traits(spec.trait("h"), h, "/some/path") == h v = "/some/path/either/file.txt" i = rebase_path_traits(spec.trait("i"), v, "/some/path") assert i == Path("either/file.txt") # Idempotence assert rebase_path_traits(spec.trait("i"), i, "/some/path") == i i = resolve_path_traits(spec.trait("i"), i, "/some/path") assert i == Path(v) # Idempotence assert resolve_path_traits(spec.trait("i"), i, "/some/path") == i v = ("/some/path/either/tuple/file.txt", 2) i = rebase_path_traits(spec.trait("i"), v, "/some/path") assert i == (Path("either/tuple/file.txt"), 2) # Idempotence assert rebase_path_traits(spec.trait("i"), i, "/some/path") == i i = resolve_path_traits(spec.trait("i"), i, "/some/path") assert i == (Path(v[0]), v[1]) # Idempotence assert resolve_path_traits(spec.trait("i"), i, "/some/path") == i v = "/some/path/either/file.txt" j = rebase_path_traits(spec.trait("j"), v, "/some/path") assert j == Path("either/file.txt") # Idempotence assert rebase_path_traits(spec.trait("j"), j, "/some/path") == j j = resolve_path_traits(spec.trait("j"), j, "/some/path") assert j == Path(v) # Idempotence assert resolve_path_traits(spec.trait("j"), j, "/some/path") == j v = ("/some/path/either/tuple/file.txt", 2) j = rebase_path_traits( spec.trait("j"), ("/some/path/either/tuple/file.txt", 2), "/some/path" ) assert j == (Path("either/tuple/file.txt"), 2) # Idempotence assert rebase_path_traits(spec.trait("j"), j, "/some/path") == j j = resolve_path_traits(spec.trait("j"), j, "/some/path") assert j == (Path(v[0]), v[1]) # Idempotence assert resolve_path_traits(spec.trait("j"), j, "/some/path") == j v = {"a": "/some/path/either/dict/file.txt"} j = rebase_path_traits(spec.trait("j"), v, "/some/path") assert j == {"a": Path("either/dict/file.txt")} # Idempotence assert rebase_path_traits(spec.trait("j"), j, "/some/path") == j j = resolve_path_traits(spec.trait("j"), j, "/some/path") assert j == {k: Path(val) for k, val in v.items()} # Idempotence assert resolve_path_traits(spec.trait("j"), j, "/some/path") == j v = {"path": "/some/path/f1.txt"} k = rebase_path_traits(spec.trait("k"), v, "/some/path") assert k == v # Idempotence assert rebase_path_traits(spec.trait("k"), k, "/some/path") == k k = resolve_path_traits(spec.trait("k"), k, "/some/path") assert k == v nipype-1.7.0/nipype/interfaces/base/traits_extension.py000066400000000000000000000426071413403311400233430ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Traits extension ................ This module contains Trait classes that we've pulled from the traits source and fixed due to various bugs. File and Directory are redefined as the release version had dependencies on TraitsUI, which we do not want Nipype to depend on. At least not yet. Undefined class was missing the __len__ operator, causing edit_traits and configure_traits to fail on List objects. Even though we don't require TraitsUI, this bug was the only thing preventing us from popping up GUIs which users like. These bugs have been in Traits v3.3.0 and v3.2.1. We have reported all of these bugs and they've been fixed in enthought svn repository (usually by Robert Kern). """ from collections.abc import Sequence # perform all external trait imports here from traits import __version__ as traits_version import traits.api as traits from traits.api import TraitType, Unicode from traits.trait_base import _Undefined try: # Moved in traits 6.0 from traits.trait_type import NoDefaultSpecified except ImportError: # Pre-6.0 from traits.trait_handlers import NoDefaultSpecified from pathlib import Path from ...utils.filemanip import path_resolve if traits_version < "3.7.0": raise ImportError("Traits version 3.7.0 or higher must be installed") IMG_FORMATS = { "afni": (".HEAD", ".BRIK"), "cifti2": (".nii", ".nii.gz"), "dicom": (".dcm", ".IMA", ".tar", ".tar.gz"), "gifti": (".gii", ".gii.gz"), "mgh": (".mgh", ".mgz", ".mgh.gz"), "nifti1": (".nii", ".nii.gz", ".hdr", ".img", ".img.gz"), "nifti2": (".nii", ".nii.gz"), "nrrd": (".nrrd", ".nhdr"), } IMG_ZIP_FMT = set([".nii.gz", "tar.gz", ".gii.gz", ".mgz", ".mgh.gz", "img.gz"]) """ The functions that pop-up the Traits GUIs, edit_traits and configure_traits, were failing because all of our inputs default to Undefined deep and down in traits/ui/wx/list_editor.py it checks for the len() of the elements of the list. The _Undefined class in traits does not define the __len__ method and would error. I tried defining our own Undefined and even sublassing Undefined, but both of those failed with a TraitError in our initializer when we assign the Undefined to the inputs because of an incompatible type: TraitError: The 'vertical_gradient' trait of a BetInputSpec instance must be \ a float, but a value of was specified. So... in order to keep the same type but add the missing method, I monkey patched. """ def _length(self): return 0 ########################################################################## # Apply monkeypatch here _Undefined.__len__ = _length ########################################################################## Undefined = _Undefined() class Str(Unicode): """Replaces the default traits.Str based in bytes.""" # Monkeypatch Str and DictStrStr for Python 2 compatibility traits.Str = Str DictStrStr = traits.Dict((bytes, str), (bytes, str)) traits.DictStrStr = DictStrStr class BasePath(TraitType): """Defines a trait whose value must be a valid filesystem path.""" # A description of the type of value this trait accepts: exists = False resolve = False _is_file = False _is_dir = False @property def info_text(self): """Create the trait's general description.""" info_text = "a pathlike object or string" if any((self.exists, self._is_file, self._is_dir)): info_text += " representing a" if self.exists: info_text += "n existing" if self._is_file: info_text += " file" elif self._is_dir: info_text += " directory" else: info_text += " file or directory" return info_text def __init__(self, value=Undefined, exists=False, resolve=False, **metadata): """Create a BasePath trait.""" self.exists = exists self.resolve = resolve super(BasePath, self).__init__(value, **metadata) def validate(self, objekt, name, value, return_pathlike=False): """Validate a value change.""" try: value = Path(value) # Use pathlib's validation except Exception: self.error(objekt, name, str(value)) if self.exists: if not value.exists(): self.error(objekt, name, str(value)) if self._is_file and not value.is_file(): self.error(objekt, name, str(value)) if self._is_dir and not value.is_dir(): self.error(objekt, name, str(value)) if self.resolve: value = path_resolve(value, strict=self.exists) if not return_pathlike: value = str(value) return value class Directory(BasePath): """ Defines a trait whose value must be a directory path. >>> from nipype.interfaces.base import Directory, TraitedSpec, TraitError >>> class A(TraitedSpec): ... foo = Directory(exists=False) >>> a = A() >>> a.foo >>> a.foo = '/some/made/out/path' >>> a.foo '/some/made/out/path' >>> class A(TraitedSpec): ... foo = Directory(exists=False, resolve=True) >>> a = A(foo='relative_dir') >>> a.foo # doctest: +ELLIPSIS '.../relative_dir' >>> class A(TraitedSpec): ... foo = Directory(exists=True, resolve=True) >>> a = A() >>> a.foo = 'relative_dir' # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): TraitError: >>> from os import mkdir >>> mkdir('relative_dir') >>> a.foo = 'relative_dir' >>> a.foo # doctest: +ELLIPSIS '.../relative_dir' >>> class A(TraitedSpec): ... foo = Directory(exists=True, resolve=False) >>> a = A(foo='relative_dir') >>> a.foo 'relative_dir' >>> class A(TraitedSpec): ... foo = Directory('tmpdir') >>> a = A() >>> a.foo # doctest: +ELLIPSIS >>> class A(TraitedSpec): ... foo = Directory('tmpdir', usedefault=True) >>> a = A() >>> a.foo # doctest: +ELLIPSIS 'tmpdir' """ _is_dir = True class File(BasePath): """ Defines a trait whose value must be a file path. >>> from nipype.interfaces.base import File, TraitedSpec, TraitError >>> class A(TraitedSpec): ... foo = File() >>> a = A() >>> a.foo >>> a.foo = '/some/made/out/path/to/file' >>> a.foo '/some/made/out/path/to/file' >>> class A(TraitedSpec): ... foo = File(exists=False, resolve=True) >>> a = A(foo='idontexist.txt') >>> a.foo # doctest: +ELLIPSIS '.../idontexist.txt' >>> class A(TraitedSpec): ... foo = File(exists=True, resolve=True) >>> a = A() >>> a.foo = 'idontexist.txt' # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): TraitError: >>> open('idoexist.txt', 'w').close() >>> a.foo = 'idoexist.txt' >>> a.foo # doctest: +ELLIPSIS '.../idoexist.txt' >>> class A(TraitedSpec): ... foo = File('idoexist.txt') >>> a = A() >>> a.foo >>> class A(TraitedSpec): ... foo = File('idoexist.txt', usedefault=True) >>> a = A() >>> a.foo 'idoexist.txt' >>> class A(TraitedSpec): ... foo = File(exists=True, resolve=True, extensions=['.txt', 'txt.gz']) >>> a = A() >>> a.foo = 'idoexist.badtxt' # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): TraitError: >>> a.foo = 'idoexist.txt' >>> a.foo # doctest: +ELLIPSIS '.../idoexist.txt' >>> class A(TraitedSpec): ... foo = File(extensions=['.nii', '.nii.gz']) >>> a = A() >>> a.foo = 'badext.txt' # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): TraitError: >>> class A(TraitedSpec): ... foo = File(extensions=['.nii', '.nii.gz']) >>> a = A() >>> a.foo = 'goodext.nii' >>> a.foo 'goodext.nii' >>> a = A() >>> a.foo = 'idontexist.000.nii' >>> a.foo # doctest: +ELLIPSIS 'idontexist.000.nii' >>> a = A() >>> a.foo = 'idontexist.000.nii.gz' >>> a.foo # doctest: +ELLIPSIS 'idontexist.000.nii.gz' """ _is_file = True _exts = None def __init__( self, value=NoDefaultSpecified, exists=False, resolve=False, allow_compressed=True, extensions=None, **metadata ): """Create a File trait.""" if extensions is not None: if isinstance(extensions, (bytes, str)): extensions = [extensions] if allow_compressed is False: extensions = list(set(extensions) - IMG_ZIP_FMT) self._exts = sorted( set( [ ".%s" % ext if not ext.startswith(".") else ext for ext in extensions ] ) ) super(File, self).__init__( value=value, exists=exists, resolve=resolve, extensions=self._exts, **metadata ) def validate(self, objekt, name, value, return_pathlike=False): """Validate a value change.""" value = super(File, self).validate(objekt, name, value, return_pathlike=True) if self._exts: fname = value.name if not any((fname.endswith(e) for e in self._exts)): self.error(objekt, name, str(value)) if not return_pathlike: value = str(value) return value class ImageFile(File): """Defines a trait whose value must be a known neuroimaging file.""" def __init__( self, value=NoDefaultSpecified, exists=False, resolve=False, types=None, **metadata ): """Create an ImageFile trait.""" extensions = None if types is not None: if isinstance(types, (bytes, str)): types = [types] if set(types) - set(IMG_FORMATS.keys()): invalid = set(types) - set(IMG_FORMATS.keys()) raise ValueError( """\ Unknown value(s) %s for metadata type of an ImageFile input.\ """ % ", ".join(['"%s"' % t for t in invalid]) ) extensions = [ext for t in types for ext in IMG_FORMATS[t]] super(ImageFile, self).__init__( value=value, exists=exists, extensions=extensions, resolve=resolve, **metadata ) def isdefined(objekt): return not isinstance(objekt, _Undefined) def has_metadata(trait, metadata, value=None, recursive=True): """ Checks if a given trait has a metadata (and optionally if it is set to particular value) """ count = 0 if ( hasattr(trait, "_metadata") and metadata in list(trait._metadata.keys()) and (trait._metadata[metadata] == value or value is None) ): count += 1 if recursive: if hasattr(trait, "inner_traits"): for inner_trait in trait.inner_traits(): count += has_metadata(inner_trait.trait_type, metadata, recursive) if hasattr(trait, "handlers") and trait.handlers is not None: for handler in trait.handlers: count += has_metadata(handler, metadata, recursive) return count > 0 class MultiObject(traits.List): """Abstract class - shared functionality of input and output MultiObject""" def validate(self, objekt, name, value): # want to treat range and other sequences (except str) as list if not isinstance(value, (str, bytes)) and isinstance(value, Sequence): value = list(value) if not isdefined(value) or (isinstance(value, list) and len(value) == 0): return Undefined newvalue = value inner_trait = self.inner_traits()[0] if not isinstance(value, list) or ( isinstance(inner_trait.trait_type, traits.List) and not isinstance(inner_trait.trait_type, InputMultiObject) and not isinstance(value[0], list) ): newvalue = [value] value = super(MultiObject, self).validate(objekt, name, newvalue) if value: return value self.error(objekt, name, value) class OutputMultiObject(MultiObject): """Implements a user friendly traits that accepts one or more paths to files or directories. This is the output version which return a single string whenever possible (when it was set to a single value or a list of length 1). Default value of this trait is _Undefined. It does not accept empty lists. XXX This should only be used as a final resort. We should stick to established Traits to the extent possible. XXX This needs to be vetted by somebody who understands traits >>> from nipype.interfaces.base import OutputMultiObject, TraitedSpec >>> class A(TraitedSpec): ... foo = OutputMultiObject(File(exists=False)) >>> a = A() >>> a.foo >>> a.foo = '/software/temp/foo.txt' >>> a.foo '/software/temp/foo.txt' >>> a.foo = ['/software/temp/foo.txt'] >>> a.foo '/software/temp/foo.txt' >>> a.foo = ['/software/temp/foo.txt', '/software/temp/goo.txt'] >>> a.foo ['/software/temp/foo.txt', '/software/temp/goo.txt'] """ def get(self, objekt, name): value = self.get_value(objekt, name) if len(value) == 0: return Undefined elif len(value) == 1: return value[0] else: return value def set(self, objekt, name, value): self.set_value(objekt, name, value) class InputMultiObject(MultiObject): """Implements a user friendly traits that accepts one or more paths to files or directories. This is the input version which always returns a list. Default value of this trait is _Undefined. It does not accept empty lists. XXX This should only be used as a final resort. We should stick to established Traits to the extent possible. XXX This needs to be vetted by somebody who understands traits >>> from nipype.interfaces.base import InputMultiObject, TraitedSpec >>> class A(TraitedSpec): ... foo = InputMultiObject(File(exists=False)) >>> a = A() >>> a.foo >>> a.foo = '/software/temp/foo.txt' >>> a.foo ['/software/temp/foo.txt'] >>> a.foo = ['/software/temp/foo.txt'] >>> a.foo ['/software/temp/foo.txt'] >>> a.foo = ['/software/temp/foo.txt', '/software/temp/goo.txt'] >>> a.foo ['/software/temp/foo.txt', '/software/temp/goo.txt'] """ pass InputMultiPath = InputMultiObject OutputMultiPath = OutputMultiObject def _rebase_path(value, cwd): if isinstance(value, list): return [_rebase_path(v, cwd) for v in value] try: value = Path(value) except TypeError: pass else: try: value = value.relative_to(cwd) except ValueError: pass return value def _resolve_path(value, cwd): if isinstance(value, list): return [_resolve_path(v, cwd) for v in value] try: value = Path(value) except TypeError: pass else: if not value.is_absolute(): value = Path(cwd).absolute() / value return value def _recurse_on_path_traits(func, thistrait, value, cwd): """Run func recursively on BasePath-derived traits.""" if thistrait.is_trait_type(BasePath): value = func(value, cwd) elif thistrait.is_trait_type(traits.List): (innertrait,) = thistrait.inner_traits if not isinstance(value, (list, tuple)): return _recurse_on_path_traits(func, innertrait, value, cwd) value = [_recurse_on_path_traits(func, innertrait, v, cwd) for v in value] elif isinstance(value, dict) and thistrait.is_trait_type(traits.Dict): _, innertrait = thistrait.inner_traits value = { k: _recurse_on_path_traits(func, innertrait, v, cwd) for k, v in value.items() } elif isinstance(value, tuple) and thistrait.is_trait_type(traits.Tuple): value = tuple( [ _recurse_on_path_traits(func, subtrait, v, cwd) for subtrait, v in zip(thistrait.handler.types, value) ] ) elif thistrait.is_trait_type(traits.TraitCompound): is_str = [ isinstance(f, (traits.String, traits.BaseStr, traits.BaseBytes, Str)) for f in thistrait.handler.handlers ] if ( any(is_str) and isinstance(value, (bytes, str)) and not value.startswith("/") ): return value for subtrait in thistrait.handler.handlers: try: sb_instance = subtrait() except TypeError: return value else: value = _recurse_on_path_traits(func, sb_instance, value, cwd) return value def rebase_path_traits(thistrait, value, cwd): """Rebase a BasePath-derived trait given an interface spec.""" return _recurse_on_path_traits(_rebase_path, thistrait, value, cwd) def resolve_path_traits(thistrait, value, cwd): """Resolve a BasePath-derived trait given an interface spec.""" return _recurse_on_path_traits(_resolve_path, thistrait, value, cwd) nipype-1.7.0/nipype/interfaces/brainsuite/000077500000000000000000000000001413403311400206115ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/brainsuite/__init__.py000066400000000000000000000003511413403311400227210ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .brainsuite import ( Bse, Bfc, Pvc, Cerebro, Cortex, Scrubmask, Tca, Dewisp, Dfs, Pialmesh, Skullfinder, Hemisplit, SVReg, BDP, ThicknessPVC, ) nipype-1.7.0/nipype/interfaces/brainsuite/brainsuite.py000066400000000000000000002230551413403311400233370ustar00rootroot00000000000000# -*- coding: utf-8 -*- """This script provides interfaces for BrainSuite command line tools. Please see brainsuite.org for more information. Author: Jason Wong """ import os import re as regex from ..base import ( TraitedSpec, CommandLineInputSpec, CommandLine, File, traits, isdefined, ) class BseInputSpec(CommandLineInputSpec): inputMRIFile = File(mandatory=True, argstr="-i %s", desc="input MRI volume") outputMRIVolume = File( desc="output brain-masked MRI volume. If unspecified, output file name will be auto generated.", argstr="-o %s", hash_files=False, genfile=True, ) outputMaskFile = File( desc="save smooth brain mask. If unspecified, output file name will be auto generated.", argstr="--mask %s", hash_files=False, genfile=True, ) diffusionConstant = traits.Float( 25, usedefault=True, desc="diffusion constant", argstr="-d %f" ) diffusionIterations = traits.Int( 3, usedefault=True, desc="diffusion iterations", argstr="-n %d" ) edgeDetectionConstant = traits.Float( 0.64, usedefault=True, desc="edge detection constant", argstr="-s %f" ) radius = traits.Float( 1, usedefault=True, desc="radius of erosion/dilation filter", argstr="-r %f" ) dilateFinalMask = traits.Bool( True, usedefault=True, desc="dilate final mask", argstr="-p" ) trim = traits.Bool(True, usedefault=True, desc="trim brainstem", argstr="--trim") outputDiffusionFilter = File( desc="diffusion filter output", argstr="--adf %s", hash_files=False ) outputEdgeMap = File(desc="edge map output", argstr="--edge %s", hash_files=False) outputDetailedBrainMask = File( desc="save detailed brain mask", argstr="--hires %s", hash_files=False ) outputCortexFile = File(desc="cortex file", argstr="--cortex %s", hash_files=False) verbosityLevel = traits.Float( 1, usedefault=True, desc=" verbosity level (0=silent)", argstr="-v %f" ) noRotate = traits.Bool( desc="retain original orientation(default behavior will auto-rotate input NII files to LPI orientation)", argstr="--norotate", ) timer = traits.Bool(desc="show timing", argstr="--timer") class BseOutputSpec(TraitedSpec): outputMRIVolume = File(desc="path/name of brain-masked MRI volume") outputMaskFile = File(desc="path/name of smooth brain mask") outputDiffusionFilter = File(desc="path/name of diffusion filter output") outputEdgeMap = File(desc="path/name of edge map output") outputDetailedBrainMask = File(desc="path/name of detailed brain mask") outputCortexFile = File(desc="path/name of cortex file") class Bse(CommandLine): """ brain surface extractor (BSE) This program performs automated skull and scalp removal on T1-weighted MRI volumes. http://brainsuite.org/processing/surfaceextraction/bse/ Examples -------- >>> from nipype.interfaces import brainsuite >>> from nipype.testing import example_data >>> bse = brainsuite.Bse() >>> bse.inputs.inputMRIFile = example_data('structural.nii') >>> results = bse.run() #doctest: +SKIP """ input_spec = BseInputSpec output_spec = BseOutputSpec _cmd = "bse" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) fileToSuffixMap = { "outputMRIVolume": ".bse.nii.gz", "outputMaskFile": ".mask.nii.gz", } if name in fileToSuffixMap: return getFileName(self.inputs.inputMRIFile, fileToSuffixMap[name]) return None def _list_outputs(self): return l_outputs(self) class BfcInputSpec(CommandLineInputSpec): inputMRIFile = File( mandatory=True, desc="input skull-stripped MRI volume", argstr="-i %s" ) inputMaskFile = File(desc="mask file", argstr="-m %s", hash_files=False) outputMRIVolume = File( desc="output bias-corrected MRI volume. If unspecified, output file name will be auto generated.", argstr="-o %s", hash_files=False, genfile=True, ) outputBiasField = File( desc="save bias field estimate", argstr="--bias %s", hash_files=False ) outputMaskedBiasField = File( desc="save bias field estimate (masked)", argstr="--maskedbias %s", hash_files=False, ) histogramRadius = traits.Int(desc="histogram radius (voxels)", argstr="-r %d") biasEstimateSpacing = traits.Int( desc="bias sample spacing (voxels)", argstr="-s %d" ) controlPointSpacing = traits.Int( desc="control point spacing (voxels)", argstr="-c %d" ) splineLambda = traits.Float( desc="spline stiffness weighting parameter", argstr="-w %f" ) histogramType = traits.Enum( "ellipse", "block", desc="""\ Options for type of histogram: * ``ellipse``: use ellipsoid for ROI histogram * ``block``:use block for ROI histogram """, argstr="%s", ) iterativeMode = traits.Bool( desc="iterative mode (overrides -r, -s, -c, -w settings)", argstr="--iterate" ) correctionScheduleFile = File(desc="list of parameters ", argstr="--schedule %s") biasFieldEstimatesOutputPrefix = traits.Str( desc="save iterative bias field estimates as .n.field.nii.gz", argstr="--biasprefix %s", ) correctedImagesOutputPrefix = traits.Str( desc="save iterative corrected images as .n.bfc.nii.gz", argstr="--prefix %s", ) correctWholeVolume = traits.Bool( desc="apply correction field to entire volume", argstr="--extrapolate" ) minBias = traits.Float( 0.5, usedefault=True, desc="minimum allowed bias value", argstr="-L %f" ) maxBias = traits.Float( 1.5, usedefault=True, desc="maximum allowed bias value", argstr="-U %f" ) biasRange = traits.Enum( "low", "medium", "high", desc="""\ Preset options for bias_model * low: small bias model [0.95,1.05] * medium: medium bias model [0.90,1.10] * high: high bias model [0.80,1.20] """, argstr="%s", ) intermediate_file_type = traits.Enum( "analyze", "nifti", "gzippedAnalyze", "gzippedNifti", desc="Options for the format in which intermediate files are generated", argstr="%s", ) convergenceThreshold = traits.Float(desc="convergence threshold", argstr="--eps %f") biasEstimateConvergenceThreshold = traits.Float( desc="bias estimate convergence threshold (values > 0.1 disable)", argstr="--beps %f", ) verbosityLevel = traits.Int(desc="verbosity level (0=silent)", argstr="-v %d") timer = traits.Bool(desc="display timing information", argstr="--timer") class BfcOutputSpec(TraitedSpec): outputMRIVolume = File(desc="path/name of output file") outputBiasField = File(desc="path/name of bias field output file") outputMaskedBiasField = File(desc="path/name of masked bias field output") correctionScheduleFile = File(desc="path/name of schedule file") class Bfc(CommandLine): """ bias field corrector (BFC) This program corrects gain variation in T1-weighted MRI. http://brainsuite.org/processing/surfaceextraction/bfc/ Examples -------- >>> from nipype.interfaces import brainsuite >>> from nipype.testing import example_data >>> bfc = brainsuite.Bfc() >>> bfc.inputs.inputMRIFile = example_data('structural.nii') >>> bfc.inputs.inputMaskFile = example_data('mask.nii') >>> results = bfc.run() #doctest: +SKIP """ input_spec = BfcInputSpec output_spec = BfcOutputSpec _cmd = "bfc" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) fileToSuffixMap = {"outputMRIVolume": ".bfc.nii.gz"} if name in fileToSuffixMap: return getFileName(self.inputs.inputMRIFile, fileToSuffixMap[name]) return None def _format_arg(self, name, spec, value): if name == "histogramType": return spec.argstr % {"ellipse": "--ellipse", "block": "--block"}[value] if name == "biasRange": return ( spec.argstr % {"low": "--low", "medium": "--medium", "high": "--high"}[value] ) if name == "intermediate_file_type": return spec.argstr % { "analyze": "--analyze", "nifti": "--nifti", "gzippedAnalyze": "--analyzegz", "gzippedNifti": "--niftigz", }[value] return super(Bfc, self)._format_arg(name, spec, value) def _list_outputs(self): return l_outputs(self) class PvcInputSpec(CommandLineInputSpec): inputMRIFile = File(mandatory=True, desc="MRI file", argstr="-i %s") inputMaskFile = File(desc="brain mask file", argstr="-m %s") outputLabelFile = File( desc="output label file. If unspecified, output file name will be auto generated.", argstr="-o %s", genfile=True, ) outputTissueFractionFile = File( desc="output tissue fraction file", argstr="-f %s", genfile=True ) spatialPrior = traits.Float(desc="spatial prior strength", argstr="-l %f") verbosity = traits.Int(desc="verbosity level (0 = silent)", argstr="-v %d") threeClassFlag = traits.Bool( desc="use a three-class (CSF=0,GM=1,WM=2) labeling", argstr="-3" ) timer = traits.Bool(desc="time processing", argstr="--timer") class PvcOutputSpec(TraitedSpec): outputLabelFile = File(desc="path/name of label file") outputTissueFractionFile = File(desc="path/name of tissue fraction file") class Pvc(CommandLine): """ partial volume classifier (PVC) tool. This program performs voxel-wise tissue classification T1-weighted MRI. Image should be skull-stripped and bias-corrected before tissue classification. http://brainsuite.org/processing/surfaceextraction/pvc/ Examples -------- >>> from nipype.interfaces import brainsuite >>> from nipype.testing import example_data >>> pvc = brainsuite.Pvc() >>> pvc.inputs.inputMRIFile = example_data('structural.nii') >>> pvc.inputs.inputMaskFile = example_data('mask.nii') >>> results = pvc.run() #doctest: +SKIP """ input_spec = PvcInputSpec output_spec = PvcOutputSpec _cmd = "pvc" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) fileToSuffixMap = { "outputLabelFile": ".pvc.label.nii.gz", "outputTissueFractionFile": ".pvc.frac.nii.gz", } if name in fileToSuffixMap: return getFileName(self.inputs.inputMRIFile, fileToSuffixMap[name]) return None def _list_outputs(self): return l_outputs(self) class CerebroInputSpec(CommandLineInputSpec): inputMRIFile = File(mandatory=True, desc="input 3D MRI volume", argstr="-i %s") inputAtlasMRIFile = File( mandatory=True, desc="atlas MRI volume", argstr="--atlas %s" ) inputAtlasLabelFile = File( mandatory=True, desc="atlas labeling", argstr="--atlaslabels %s" ) inputBrainMaskFile = File(desc="brain mask file", argstr="-m %s") outputCerebrumMaskFile = File( desc="output cerebrum mask volume. If unspecified, output file name will be auto generated.", argstr="-o %s", genfile=True, ) outputLabelVolumeFile = File( desc="output labeled hemisphere/cerebrum volume. If unspecified, output file name will be auto generated.", argstr="-l %s", genfile=True, ) costFunction = traits.Int(2, usedefault=True, desc="0,1,2", argstr="-c %d") useCentroids = traits.Bool( desc="use centroids of data to initialize position", argstr="--centroids" ) outputAffineTransformFile = File( desc="save affine transform to file.", argstr="--air %s", genfile=True ) outputWarpTransformFile = File( desc="save warp transform to file.", argstr="--warp %s", genfile=True ) verbosity = traits.Int(desc="verbosity level (0=silent)", argstr="-v %d") linearConvergence = traits.Float(desc="linear convergence", argstr="--linconv %f") warpLabel = traits.Int(desc="warp order (2,3,4,5,6,7,8)", argstr="--warplevel %d") warpConvergence = traits.Float(desc="warp convergence", argstr="--warpconv %f") keepTempFiles = traits.Bool(desc="don't remove temporary files", argstr="--keep") tempDirectory = traits.Str( desc="specify directory to use for temporary files", argstr="--tempdir %s" ) tempDirectoryBase = traits.Str( desc="create a temporary directory within this directory", argstr="--tempdirbase %s", ) class CerebroOutputSpec(TraitedSpec): outputCerebrumMaskFile = File(desc="path/name of cerebrum mask file") outputLabelVolumeFile = File(desc="path/name of label mask file") outputAffineTransformFile = File(desc="path/name of affine transform file") outputWarpTransformFile = File(desc="path/name of warp transform file") class Cerebro(CommandLine): """ Cerebrum/cerebellum labeling tool This program performs automated labeling of cerebellum and cerebrum in T1 MRI. Input MRI should be skull-stripped or a brain-only mask should be provided. http://brainsuite.org/processing/surfaceextraction/cerebrum/ Examples -------- >>> from nipype.interfaces import brainsuite >>> from nipype.testing import example_data >>> cerebro = brainsuite.Cerebro() >>> cerebro.inputs.inputMRIFile = example_data('structural.nii') >>> cerebro.inputs.inputAtlasMRIFile = 'atlasMRIVolume.img' >>> cerebro.inputs.inputAtlasLabelFile = 'atlasLabels.img' >>> cerebro.inputs.inputBrainMaskFile = example_data('mask.nii') >>> results = cerebro.run() #doctest: +SKIP """ input_spec = CerebroInputSpec output_spec = CerebroOutputSpec _cmd = "cerebro" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) fileToSuffixMap = { "outputCerebrumMaskFile": ".cerebrum.mask.nii.gz", "outputLabelVolumeFile": ".hemi.label.nii.gz", "outputWarpTransformFile": ".warp", "outputAffineTransformFile": ".air", } if name in fileToSuffixMap: return getFileName(self.inputs.inputMRIFile, fileToSuffixMap[name]) return None def _list_outputs(self): return l_outputs(self) class CortexInputSpec(CommandLineInputSpec): inputHemisphereLabelFile = File( mandatory=True, desc="hemisphere / lobe label volume", argstr="-h %s" ) outputCerebrumMask = File( desc="output structure mask. If unspecified, output file name will be auto generated.", argstr="-o %s", genfile=True, ) inputTissueFractionFile = File( mandatory=True, desc="tissue fraction file (32-bit float)", argstr="-f %s" ) tissueFractionThreshold = traits.Float( 50.0, usedefault=True, desc="tissue fraction threshold (percentage)", argstr="-p %f", ) computeWGBoundary = traits.Bool( True, usedefault=True, desc="compute WM/GM boundary", argstr="-w" ) computeGCBoundary = traits.Bool(desc="compute GM/CSF boundary", argstr="-g") includeAllSubcorticalAreas = traits.Bool( True, usedefault=True, desc="include all subcortical areas in WM mask", argstr="-a", ) verbosity = traits.Int(desc="verbosity level", argstr="-v %d") timer = traits.Bool(desc="timing function", argstr="--timer") class CortexOutputSpec(TraitedSpec): outputCerebrumMask = File(desc="path/name of cerebrum mask") class Cortex(CommandLine): """ cortex extractor This program produces a cortical mask using tissue fraction estimates and a co-registered cerebellum/hemisphere mask. http://brainsuite.org/processing/surfaceextraction/cortex/ Examples -------- >>> from nipype.interfaces import brainsuite >>> from nipype.testing import example_data >>> cortex = brainsuite.Cortex() >>> cortex.inputs.inputHemisphereLabelFile = example_data('mask.nii') >>> cortex.inputs.inputTissueFractionFile = example_data('tissues.nii.gz') >>> results = cortex.run() #doctest: +SKIP """ input_spec = CortexInputSpec output_spec = CortexOutputSpec _cmd = "cortex" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) if name == "outputCerebrumMask": return getFileName( self.inputs.inputHemisphereLabelFile, ".init.cortex.mask.nii.gz" ) return None def _list_outputs(self): return l_outputs(self) class ScrubmaskInputSpec(CommandLineInputSpec): inputMaskFile = File( mandatory=True, desc="input structure mask file", argstr="-i %s" ) outputMaskFile = File( desc="output structure mask file. If unspecified, output file name will be auto generated.", argstr="-o %s", genfile=True, ) backgroundFillThreshold = traits.Int( 2, usedefault=True, desc="background fill threshold", argstr="-b %d" ) foregroundTrimThreshold = traits.Int( 0, usedefault=True, desc="foreground trim threshold", argstr="-f %d" ) numberIterations = traits.Int(desc="number of iterations", argstr="-n %d") verbosity = traits.Int(desc="verbosity (0=silent)", argstr="-v %d") timer = traits.Bool(desc="timing function", argstr="--timer") class ScrubmaskOutputSpec(TraitedSpec): outputMaskFile = File(desc="path/name of mask file") class Scrubmask(CommandLine): """ ScrubMask tool scrubmask filters binary masks to trim loosely connected voxels that may result from segmentation errors and produce bumps on tessellated surfaces. http://brainsuite.org/processing/surfaceextraction/scrubmask/ Examples -------- >>> from nipype.interfaces import brainsuite >>> from nipype.testing import example_data >>> scrubmask = brainsuite.Scrubmask() >>> scrubmask.inputs.inputMaskFile = example_data('mask.nii') >>> results = scrubmask.run() #doctest: +SKIP """ input_spec = ScrubmaskInputSpec output_spec = ScrubmaskOutputSpec _cmd = "scrubmask" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) if name == "outputMaskFile": return getFileName( self.inputs.inputMaskFile, ".cortex.scrubbed.mask.nii.gz" ) return None def _list_outputs(self): return l_outputs(self) class TcaInputSpec(CommandLineInputSpec): inputMaskFile = File(mandatory=True, desc="input mask volume", argstr="-i %s") outputMaskFile = File( desc="output mask volume. If unspecified, output file name will be auto generated.", argstr="-o %s", genfile=True, ) minCorrectionSize = traits.Int( 2500, usedefault=True, desc="maximum correction size", argstr="-m %d" ) maxCorrectionSize = traits.Int(desc="minimum correction size", argstr="-n %d") foregroundDelta = traits.Int( 20, usedefault=True, desc="foreground delta", argstr="--delta %d" ) verbosity = traits.Int(desc="verbosity (0 = quiet)", argstr="-v %d") timer = traits.Bool(desc="timing function", argstr="--timer") class TcaOutputSpec(TraitedSpec): outputMaskFile = File(desc="path/name of mask file") class Tca(CommandLine): """ topological correction algorithm (TCA) This program removes topological handles from a binary object. http://brainsuite.org/processing/surfaceextraction/tca/ Examples -------- >>> from nipype.interfaces import brainsuite >>> from nipype.testing import example_data >>> tca = brainsuite.Tca() >>> tca.inputs.inputMaskFile = example_data('mask.nii') >>> results = tca.run() #doctest: +SKIP """ input_spec = TcaInputSpec output_spec = TcaOutputSpec _cmd = "tca" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) if name == "outputMaskFile": return getFileName(self.inputs.inputMaskFile, ".cortex.tca.mask.nii.gz") return None def _list_outputs(self): return l_outputs(self) class DewispInputSpec(CommandLineInputSpec): inputMaskFile = File(mandatory=True, desc="input file", argstr="-i %s") outputMaskFile = File( desc="output file. If unspecified, output file name will be auto generated.", argstr="-o %s", genfile=True, ) verbosity = traits.Int(desc="verbosity", argstr="-v %d") sizeThreshold = traits.Int(desc="size threshold", argstr="-t %d") maximumIterations = traits.Int(desc="maximum number of iterations", argstr="-n %d") timer = traits.Bool(desc="time processing", argstr="--timer") class DewispOutputSpec(TraitedSpec): outputMaskFile = File(desc="path/name of mask file") class Dewisp(CommandLine): """ dewisp removes wispy tendril structures from cortex model binary masks. It does so based on graph theoretic analysis of connected components, similar to TCA. Each branch of the structure graph is analyzed to determine pinch points that indicate a likely error in segmentation that attaches noise to the image. The pinch threshold determines how many voxels the cross-section can be before it is considered part of the image. http://brainsuite.org/processing/surfaceextraction/dewisp/ Examples -------- >>> from nipype.interfaces import brainsuite >>> from nipype.testing import example_data >>> dewisp = brainsuite.Dewisp() >>> dewisp.inputs.inputMaskFile = example_data('mask.nii') >>> results = dewisp.run() #doctest: +SKIP """ input_spec = DewispInputSpec output_spec = DewispOutputSpec _cmd = "dewisp" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) if name == "outputMaskFile": return getFileName(self.inputs.inputMaskFile, ".cortex.dewisp.mask.nii.gz") return None def _list_outputs(self): return l_outputs(self) class DfsInputSpec(CommandLineInputSpec): inputVolumeFile = File(mandatory=True, desc="input 3D volume", argstr="-i %s") outputSurfaceFile = File( desc="output surface mesh file. If unspecified, output file name will be auto generated.", argstr="-o %s", genfile=True, ) inputShadingVolume = File( desc="shade surface model with data from image volume", argstr="-c %s" ) smoothingIterations = traits.Int( 10, usedefault=True, desc="number of smoothing iterations", argstr="-n %d" ) smoothingConstant = traits.Float( 0.5, usedefault=True, desc="smoothing constant", argstr="-a %f" ) curvatureWeighting = traits.Float( 5.0, usedefault=True, desc="curvature weighting", argstr="-w %f" ) scalingPercentile = traits.Float(desc="scaling percentile", argstr="-f %f") nonZeroTessellation = traits.Bool( desc="tessellate non-zero voxels", argstr="-nz", xor=("nonZeroTessellation", "specialTessellation"), ) tessellationThreshold = traits.Float( desc="To be used with specialTessellation. Set this value first, then set specialTessellation value.\nUsage: tessellate voxels greater_than, less_than, or equal_to ", argstr="%f", ) specialTessellation = traits.Enum( "greater_than", "less_than", "equal_to", desc="To avoid throwing a UserWarning, set tessellationThreshold first. Then set this attribute.\nUsage: tessellate voxels greater_than, less_than, or equal_to ", argstr="%s", xor=("nonZeroTessellation", "specialTessellation"), requires=["tessellationThreshold"], position=-1, ) zeroPadFlag = traits.Bool( desc="zero-pad volume (avoids clipping at edges)", argstr="-z" ) noNormalsFlag = traits.Bool( desc="do not compute vertex normals", argstr="--nonormals" ) postSmoothFlag = traits.Bool( desc="smooth vertices after coloring", argstr="--postsmooth" ) verbosity = traits.Int(desc="verbosity (0 = quiet)", argstr="-v %d") timer = traits.Bool(desc="timing function", argstr="--timer") class DfsOutputSpec(TraitedSpec): outputSurfaceFile = File(desc="path/name of surface file") class Dfs(CommandLine): """ Surface Generator Generates mesh surfaces using an isosurface algorithm. http://brainsuite.org/processing/surfaceextraction/inner-cortical-surface/ Examples -------- >>> from nipype.interfaces import brainsuite >>> from nipype.testing import example_data >>> dfs = brainsuite.Dfs() >>> dfs.inputs.inputVolumeFile = example_data('structural.nii') >>> results = dfs.run() #doctest: +SKIP """ input_spec = DfsInputSpec output_spec = DfsOutputSpec _cmd = "dfs" def _format_arg(self, name, spec, value): if name == "tessellationThreshold": return "" # blank argstr if name == "specialTessellation": threshold = self.inputs.tessellationThreshold return spec.argstr % { "greater_than": "".join(("-gt %f" % threshold)), "less_than": "".join(("-lt %f" % threshold)), "equal_to": "".join(("-eq %f" % threshold)), }[value] return super(Dfs, self)._format_arg(name, spec, value) def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) if name == "outputSurfaceFile": return getFileName(self.inputs.inputVolumeFile, ".inner.cortex.dfs") return None def _list_outputs(self): return l_outputs(self) class PialmeshInputSpec(CommandLineInputSpec): inputSurfaceFile = File(mandatory=True, desc="input file", argstr="-i %s") outputSurfaceFile = File( desc="output file. If unspecified, output file name will be auto generated.", argstr="-o %s", genfile=True, ) verbosity = traits.Int(desc="verbosity", argstr="-v %d") inputTissueFractionFile = File( mandatory=True, desc="floating point (32) tissue fraction image", argstr="-f %s" ) numIterations = traits.Int( 100, usedefault=True, desc="number of iterations", argstr="-n %d" ) searchRadius = traits.Float( 1, usedefault=True, desc="search radius", argstr="-r %f" ) stepSize = traits.Float(0.4, usedefault=True, desc="step size", argstr="-s %f") inputMaskFile = File( mandatory=True, desc="restrict growth to mask file region", argstr="-m %s" ) maxThickness = traits.Float( 20, usedefault=True, desc="maximum allowed tissue thickness", argstr="--max %f" ) tissueThreshold = traits.Float( 1.05, usedefault=True, desc="tissue threshold", argstr="-t %f" ) # output interval is not an output -- it specifies how frequently the # output surfaces are generated outputInterval = traits.Int( 10, usedefault=True, desc="output interval", argstr="--interval %d" ) exportPrefix = traits.Str( desc="prefix for exporting surfaces if interval is set", argstr="--prefix %s" ) laplacianSmoothing = traits.Float( 0.025, usedefault=True, desc="apply Laplacian smoothing", argstr="--smooth %f" ) timer = traits.Bool(desc="show timing", argstr="--timer") recomputeNormals = traits.Bool( desc="recompute normals at each iteration", argstr="--norm" ) normalSmoother = traits.Float( 0.2, usedefault=True, desc="strength of normal smoother.", argstr="--nc %f" ) tangentSmoother = traits.Float( desc="strength of tangential smoother.", argstr="--tc %f" ) class PialmeshOutputSpec(TraitedSpec): outputSurfaceFile = File(desc="path/name of surface file") class Pialmesh(CommandLine): """ pialmesh computes a pial surface model using an inner WM/GM mesh and a tissue fraction map. http://brainsuite.org/processing/surfaceextraction/pial/ Examples -------- >>> from nipype.interfaces import brainsuite >>> from nipype.testing import example_data >>> pialmesh = brainsuite.Pialmesh() >>> pialmesh.inputs.inputSurfaceFile = 'input_mesh.dfs' >>> pialmesh.inputs.inputTissueFractionFile = 'frac_file.nii.gz' >>> pialmesh.inputs.inputMaskFile = example_data('mask.nii') >>> results = pialmesh.run() #doctest: +SKIP """ input_spec = PialmeshInputSpec output_spec = PialmeshOutputSpec _cmd = "pialmesh" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) if name == "outputSurfaceFile": return getFileName(self.inputs.inputSurfaceFile, ".pial.cortex.dfs") return None def _list_outputs(self): return l_outputs(self) class HemisplitInputSpec(CommandLineInputSpec): inputSurfaceFile = File(mandatory=True, desc="input surface", argstr="-i %s") inputHemisphereLabelFile = File( mandatory=True, desc="input hemisphere label volume", argstr="-l %s" ) outputLeftHemisphere = File( desc="output surface file, left hemisphere. If unspecified, output file name will be auto generated.", argstr="--left %s", genfile=True, ) outputRightHemisphere = File( desc="output surface file, right hemisphere. If unspecified, output file name will be auto generated.", argstr="--right %s", genfile=True, ) pialSurfaceFile = File( desc="pial surface file -- must have same geometry as input surface", argstr="-p %s", ) outputLeftPialHemisphere = File( desc="output pial surface file, left hemisphere. If unspecified, output file name will be auto generated.", argstr="-pl %s", genfile=True, ) outputRightPialHemisphere = File( desc="output pial surface file, right hemisphere. If unspecified, output file name will be auto generated.", argstr="-pr %s", genfile=True, ) verbosity = traits.Int(desc="verbosity (0 = silent)", argstr="-v %d") timer = traits.Bool(desc="timing function", argstr="--timer") class HemisplitOutputSpec(TraitedSpec): outputLeftHemisphere = File(desc="path/name of left hemisphere") outputRightHemisphere = File(desc="path/name of right hemisphere") outputLeftPialHemisphere = File(desc="path/name of left pial hemisphere") outputRightPialHemisphere = File(desc="path/name of right pial hemisphere") class Hemisplit(CommandLine): """ Hemisphere splitter Splits a surface object into two separate surfaces given an input label volume. Each vertex is labeled left or right based on the labels being odd (left) or even (right). The largest contour on the split surface is then found and used as the separation between left and right. Examples -------- >>> from nipype.interfaces import brainsuite >>> from nipype.testing import example_data >>> hemisplit = brainsuite.Hemisplit() >>> hemisplit.inputs.inputSurfaceFile = 'input_surf.dfs' >>> hemisplit.inputs.inputHemisphereLabelFile = 'label.nii' >>> hemisplit.inputs.pialSurfaceFile = 'pial.dfs' >>> results = hemisplit.run() #doctest: +SKIP """ input_spec = HemisplitInputSpec output_spec = HemisplitOutputSpec _cmd = "hemisplit" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) fileToSuffixMap = { "outputLeftHemisphere": ".left.inner.cortex.dfs", "outputLeftPialHemisphere": ".left.pial.cortex.dfs", "outputRightHemisphere": ".right.inner.cortex.dfs", "outputRightPialHemisphere": ".right.pial.cortex.dfs", } if name in fileToSuffixMap: return getFileName(self.inputs.inputSurfaceFile, fileToSuffixMap[name]) return None def _list_outputs(self): return l_outputs(self) class SkullfinderInputSpec(CommandLineInputSpec): inputMRIFile = File(mandatory=True, desc="input file", argstr="-i %s") inputMaskFile = File( mandatory=True, desc="A brain mask file, 8-bit image (0=non-brain, 255=brain)", argstr="-m %s", ) outputLabelFile = File( desc="output multi-colored label volume segmenting brain, scalp, inner skull & outer skull " "If unspecified, output file name will be auto generated.", argstr="-o %s", genfile=True, ) verbosity = traits.Int(desc="verbosity", argstr="-v %d") lowerThreshold = traits.Int(desc="Lower threshold for segmentation", argstr="-l %d") upperThreshold = traits.Int(desc="Upper threshold for segmentation", argstr="-u %d") surfaceFilePrefix = traits.Str( desc="if specified, generate surface files for brain, skull, and scalp", argstr="-s %s", ) bgLabelValue = traits.Int( desc="background label value (0-255)", argstr="--bglabel %d" ) scalpLabelValue = traits.Int( desc="scalp label value (0-255)", argstr="--scalplabel %d" ) skullLabelValue = traits.Int( desc="skull label value (0-255)", argstr="--skulllabel %d" ) spaceLabelValue = traits.Int( desc="space label value (0-255)", argstr="--spacelabel %d" ) brainLabelValue = traits.Int( desc="brain label value (0-255)", argstr="--brainlabel %d" ) performFinalOpening = traits.Bool( desc="perform a final opening operation on the scalp mask", argstr="--finalOpening", ) class SkullfinderOutputSpec(TraitedSpec): outputLabelFile = File(desc="path/name of label file") class Skullfinder(CommandLine): """ Skull and scalp segmentation algorithm. Examples -------- >>> from nipype.interfaces import brainsuite >>> from nipype.testing import example_data >>> skullfinder = brainsuite.Skullfinder() >>> skullfinder.inputs.inputMRIFile = example_data('structural.nii') >>> skullfinder.inputs.inputMaskFile = example_data('mask.nii') >>> results = skullfinder.run() #doctest: +SKIP """ input_spec = SkullfinderInputSpec output_spec = SkullfinderOutputSpec _cmd = "skullfinder" def _gen_filename(self, name): inputs = self.inputs.get() if isdefined(inputs[name]): return os.path.abspath(inputs[name]) if name == "outputLabelFile": return getFileName(self.inputs.inputMRIFile, ".skullfinder.label.nii.gz") return None def _list_outputs(self): return l_outputs(self) class SVRegInputSpec(CommandLineInputSpec): subjectFilePrefix = traits.Str( argstr="'%s'", mandatory=True, position=0, desc="Absolute path and filename prefix of the subjects output from BrainSuite " "Cortical Surface Extraction Sequence", ) dataSinkDelay = traits.List( traits.Str, argstr="%s", desc="Connect datasink out_file to dataSinkDelay to delay execution of SVReg " "until dataSink has finished sinking CSE outputs." "For use with parallel processing workflows including Brainsuites Cortical " "Surface Extraction sequence (SVReg requires certain files from Brainsuite " "CSE, which must all be in the pathway specified by subjectFilePrefix. see " "http://brainsuite.org/processing/svreg/usage/ for list of required inputs ", ) atlasFilePrefix = traits.Str( position=1, argstr="'%s'", desc="Optional: Absolute Path and filename prefix of atlas files and labels to which " "the subject will be registered. If unspecified, SVReg" "will use its own included atlas files", ) iterations = traits.Int( argstr="'-H %d'", desc="Assigns a number of iterations in the intensity registration step." "if unspecified, performs 100 iterations", ) refineOutputs = traits.Bool( argstr="'-r'", desc="Refine outputs at the expense of more processing time." ) skipToVolumeReg = traits.Bool( argstr="'-s'", desc="If surface registration was already performed at an earlier time and the " "user would not like to redo this step, then this flag may be used to skip " "ahead to the volumetric registration. Necessary input files will need to " "be present in the input directory called by the command.", ) skipToIntensityReg = traits.Bool( argstr="'-p'", desc="If the p-harmonic volumetric registration was already performed at an " "earlier time and the user would not like to redo this step, then this " "flag may be used to skip ahead to the intensity registration and " "label transfer step.", ) useManualMaskFile = traits.Bool( argstr="'-cbm'", desc="Can call a manually edited cerebrum mask to limit boundaries. Will " "use file: subbasename.cerebrum.mask.nii.gz Make sure to correctly " "replace your manually edited mask file in your input folder with the " "correct subbasename.", ) curveMatchingInstructions = traits.Str( argstr="'-cur %s'", desc="Used to take control of the curve matching process between the atlas " "and subject. One can specify the name of the .dfc file and " "the sulcal numbers <#sul> to be used as constraints. " 'example: curveMatchingInstructions = "subbasename.right.dfc 1 2 20"', ) useCerebrumMask = traits.Bool( argstr="'-C'", desc="The cerebrum mask will be used for " "masking the final labels instead of the default pial surface mask. " "Every voxel will be labeled within the cerebrum mask regardless of " "the boundaries of the pial surface.", ) pialSurfaceMaskDilation = traits.Int( argstr="'-D %d'", desc="Cortical volume labels found in file output subbasename.svreg.label.nii.gz " "find its boundaries by using the pial surface then dilating by 1 voxel. " "Use this flag in order to control the number of pial surface mask dilation. " "(ie. -D 0 will assign no voxel dilation)", ) keepIntermediates = traits.Bool( argstr="'-k'", desc="Keep the intermediate files after the svreg sequence is complete.", ) _XOR_verbosity = ("verbosity0", "verbosity1", "verbosity2") verbosity0 = traits.Bool( argstr="'-v0'", xor=_XOR_verbosity, desc="no messages will be reported" ) verbosity1 = traits.Bool( argstr="'-v1'", xor=_XOR_verbosity, desc="messages will be reported but not the iteration-wise detailed messages", ) verbosity2 = traits.Bool( argstr="'v2'", xor=_XOR_verbosity, desc="all the messages, including per-iteration, will be displayed", ) shortMessages = traits.Bool( argstr="'-gui'", desc="Short messages instead of detailed messages" ) displayModuleName = traits.Bool( argstr="'-m'", desc="Module name will be displayed in the messages" ) displayTimestamps = traits.Bool( argstr="'-t'", desc="Timestamps will be displayed in the messages" ) skipVolumetricProcessing = traits.Bool( argstr="'-S'", desc="Only surface registration and labeling will be performed. Volumetric " "processing will be skipped.", ) useMultiThreading = traits.Bool( argstr="'-P'", desc="If multiple CPUs are present on the system, the code will try to use " "multithreading to make the execution fast.", ) useSingleThreading = traits.Bool(argstr="'-U'", desc="Use single threaded mode.") class SVReg(CommandLine): """ surface and volume registration (svreg) This program registers a subject's BrainSuite-processed volume and surfaces to an atlas, allowing for automatic labelling of volume and surface ROIs. For more information, please see: http://brainsuite.org/processing/svreg/usage/ Examples -------- >>> from nipype.interfaces import brainsuite >>> svreg = brainsuite.SVReg() >>> svreg.inputs.subjectFilePrefix = 'home/user/btestsubject/testsubject' >>> svreg.inputs.refineOutputs = True >>> svreg.inputs.skipToVolumeReg = False >>> svreg.inputs. keepIntermediates = True >>> svreg.inputs.verbosity2 = True >>> svreg.inputs.displayTimestamps = True >>> svreg.inputs.useSingleThreading = True >>> results = svreg.run() #doctest: +SKIP """ input_spec = SVRegInputSpec _cmd = "svreg.sh" def _format_arg(self, name, spec, value): if ( name == "subjectFilePrefix" or name == "atlasFilePrefix" or name == "curveMatchingInstructions" ): return spec.argstr % os.path.expanduser(value) if name == "dataSinkDelay": return spec.argstr % "" return super(SVReg, self)._format_arg(name, spec, value) class BDPInputSpec(CommandLineInputSpec): bfcFile = File( argstr="%s", mandatory=True, position=0, xor=["noStructuralRegistration"], desc="Specify absolute path to file produced by bfc. By default, bfc produces the file in " "the format: prefix.bfc.nii.gz", ) noStructuralRegistration = traits.Bool( argstr="--no-structural-registration", mandatory=True, position=0, xor=["bfcFile"], desc="Allows BDP to work without any structural input. This can useful when " "one is only interested in diffusion modelling part of BDP. With this " "flag only fieldmap-based distortion correction is supported. " "outPrefix can be used to specify fileprefix of the output " "filenames. Change dwiMask to define region of interest " "for diffusion modelling.", ) inputDiffusionData = File( argstr="--nii %s", mandatory=True, position=-2, desc="Specifies the absolute path and filename of the input diffusion data in 4D NIfTI-1 " "format. The flag must be followed by the filename. Only NIfTI-1 files " "with extension .nii or .nii.gz are supported. Furthermore, either " "bMatrixFile, or a combination of both bValueFile and diffusionGradientFile " "must be used to provide the necessary b-matrices/b-values and gradient vectors. ", ) bMatrixFile = File( argstr="--bmat %s", mandatory=True, xor=["BVecBValPair"], position=-1, desc="Specifies the absolute path and filename of the file containing b-matrices for " "diffusion-weighted scans. The flag must be followed by the filename. " "This file must be a plain text file containing 3x3 matrices for each " "diffusion encoding direction. It should contain zero matrices " 'corresponding to b=0 images. This file usually has ".bmat" as its ' "extension, and can be used to provide BDP with the more-accurate " "b-matrices as saved by some proprietary scanners. The b-matrices " "specified by the file must be in the voxel coordinates of the input " "diffusion weighted image (NIfTI file). In case b-matrices are not known/calculated, " "bvec and .bval files can be used instead (see diffusionGradientFile and bValueFile). ", ) BVecBValPair = traits.List( traits.Str, minlen=2, maxlen=2, mandatory=True, position=-1, xor=["bMatrixFile"], argstr="--bvec %s --bval %s", desc="Must input a list containing first the BVector file, then the BValue file (both must be absolute paths)\n" "Example: bdp.inputs.BVecBValPair = ['/directory/subdir/prefix.dwi.bvec', '/directory/subdir/prefix.dwi.bval'] " "The first item in the list specifies the filename of the file containing b-values for the " "diffusion scan. The b-value file must be a plain-text file and usually has an " "extension of .bval\n" "The second item in the list specifies the filename of the file containing the diffusion gradient " "directions (specified in the voxel coordinates of the input " "diffusion-weighted image)The b-vectors file must be a plain text file and " "usually has an extension of .bvec ", ) dataSinkDelay = traits.List( traits.Str, argstr="%s", desc="For use in parallel processing workflows including Brainsuite Cortical " "Surface Extraction sequence. Connect datasink out_file to dataSinkDelay " "to delay execution of BDP until dataSink has finished sinking outputs. " "In particular, BDP may be run after BFC has finished. For more information " "see http://brainsuite.org/processing/diffusion/pipeline/", ) phaseEncodingDirection = traits.Enum( "x", "x-", "y", "y-", "z", "z-", argstr="--dir=%s", desc="Specifies the phase-encoding direction of the EPI (diffusion) images. " "It is same as the dominant direction of distortion in the images. This " "information is used to constrain the distortion correction along the " "specified direction. Directions are represented by any one of x, x-, y, " 'y-, z or z-. "x" direction increases towards the right side of the ' 'subject, while "x-" increases towards the left side of the subject. ' 'Similarly, "y" and "y-" are along the anterior-posterior direction of ' 'the subject, and "z" & "z-" are along the inferior-superior direction. ' 'When this flag is not used, BDP uses "y" as the default phase-encoding ' "direction. ", ) echoSpacing = traits.Float( argstr="--echo-spacing=%f", desc="Sets the echo spacing to t seconds, which is used for fieldmap-based " "distortion correction. This flag is required when using fieldmapCorrection", ) bValRatioThreshold = traits.Float( argstr="--bval-ratio-threshold %f", desc="Sets a threshold which is used to determine b=0 images. When there are " "no diffusion weighted image with b-value of zero, then BDP tries to use " "diffusion weighted images with a low b-value in place of b=0 image. The " "diffusion images with minimum b-value is used as b=0 image only if the " "ratio of the maximum and minimum b-value is more than the specified " "threshold. A lower value of threshold will allow diffusion images with " "higher b-value to be used as b=0 image. The default value of this " "threshold is set to 45, if this trait is not set. ", ) estimateTensors = traits.Bool( argstr="--tensors", desc="Estimates diffusion tensors using a weighted log-linear estimation and " "saves derived diffusion tensor parameters (FA, MD, axial, radial, L2, " "L3). This is the default behavior if no diffusion modeling flags are " "specified. The estimated diffusion tensors can be visualized by loading " "the saved ``*.eig.nii.gz`` file in BrainSuite. BDP reports diffusivity (MD, " "axial, radial, L2 and L3) in a unit which is reciprocal inverse of the " "unit of input b-value. ", ) estimateODF_FRACT = traits.Bool( argstr="--FRACT", desc="Estimates ODFs using the Funk-Radon and Cosine Transformation (FRACT). " 'The outputs are saved in a separate directory with name "FRACT" and the ' 'ODFs can be visualized by loading the saved ".odf" file in BrainSuite. ', ) estimateODF_FRT = traits.Bool( argstr="--FRT", desc="Estimates ODFs using Funk-Radon Transformation (FRT). The coefficient " 'maps for ODFs are saved in a separate directory with name "FRT" and the ' 'ODFs can be visualized by loading the saved ".odf" file in BrainSuite. ' "The derived generalized-FA (GFA) maps are also saved in the output " "directory. ", ) estimateODF_3DShore = traits.Float( argstr="--3dshore --diffusion_time_ms %f", desc="Estimates ODFs using 3Dshore. Pass in diffusion time, in ms", ) odfLambta = traits.Bool( argstr="--odf-lambda ", desc="Sets the regularization parameter, lambda, of the Laplace-Beltrami " "operator while estimating ODFs. The default value is set to 0.006 . This " "can be used to set the appropriate regularization for the input " "diffusion data. ", ) t1Mask = File( argstr="--t1-mask %s", desc="Specifies the filename of the brain-mask file for input T1-weighted " "image. This mask can be same as the brain mask generated during " "BrainSuite extraction sequence. For best results, the mask should not " "include any extra-meningial tissues from T1-weighted image. The mask " "must be in the same coordinates as input T1-weighted image (i.e. should " "overlay correctly with input .bfc.nii.gz file in " "BrainSuite). This mask is used for co-registration and defining brain " "boundary for statistics computation. The mask can be generated and/or " "edited in BrainSuite. In case outputDiffusionCoordinates is also " "used, this mask is first transformed to diffusion coordinate and the " "transformed mask is used for defining brain boundary in diffusion " "coordinates. When t1Mask is not set, BDP will try to use " "fileprefix>.mask.nii.gz as brain-mask. If .mask.nii.gz is " "not found, then BDP will use the input .bfc.nii.gz itself as " "mask (i.e. all non-zero voxels in .bfc.nii.gz is assumed to " "constitute brain mask). ", ) dwiMask = File( argstr="--dwi-mask %s", desc="Specifies the filename of the brain-mask file for diffusion data. This " "mask is used only for co-registration purposes and can affect overall " "quality of co-registration (see t1Mask for definition of brain mask " "for statistics computation). The mask must be a 3D volume and should be " "in the same coordinates as input Diffusion file/data (i.e. should " "overlay correctly with input diffusion data in BrainSuite). For best " "results, the mask should include only brain voxels (CSF voxels around " "brain is also acceptable). When this flag is not used, BDP will generate " "a pseudo mask using first b=0 image volume and would save it as " "fileprefix>.dwi.RSA.mask.nii.gz. In case co-registration is not " "accurate with automatically generated pseudo mask, BDP should be re-run " "with a refined diffusion mask. The mask can be generated and/or edited " "in BrainSuite. ", ) rigidRegMeasure = traits.Enum( "MI", "INVERSION", "BDP", argstr="--rigid-reg-measure %s", desc="Defines the similarity measure to be used for rigid registration. " 'Possible measures are "MI", "INVERSION" and "BDP". MI measure uses ' "normalized mutual information based cost function. INVERSION measure " "uses simpler cost function based on sum of squared difference by " "exploiting the approximate inverse-contrast relationship in T1- and " "T2-weighted images. BDP measure combines MI and INVERSION. It starts " "with INVERSION measure and refines the result with MI measure. BDP is " "the default measure when this trait is not set. ", ) dcorrRegMeasure = traits.Enum( "MI", "INVERSION-EPI", "INVERSION-T1", "INVERSION-BOTH", "BDP", argstr="--dcorr-reg-method %s", desc="Defines the method for registration-based distortion correction. " 'Possible methods are "MI", "INVERSION-EPI", "INVERSION-T1", ' 'INVERSION-BOTH", and "BDP". MI method uses normalized mutual ' "information based cost-function while estimating the distortion field. " "INVERSION-based method uses simpler cost function based on sum of " "squared difference by exploiting the known approximate contrast " "relationship in T1- and T2-weighted images. T2-weighted EPI is inverted " "when INVERSION-EPI is used; T1-image is inverted when INVERSION-T1 is " "used; and both are inverted when INVERSION-BOTH is used. BDP method add " "the MI-based refinement after the correction using INVERSION-BOTH " "method. BDP is the default method when this trait is not set. ", ) dcorrWeight = traits.Float( argstr="--dcorr-regularization-wt %f", desc="Sets the (scalar) weighting parameter for regularization penalty in " "registration-based distortion correction. Set this trait to a single, non-negative " "number which specifies the weight. A large regularization weight encourages " "smoother distortion field at the cost of low measure of image similarity " "after distortion correction. On the other hand, a smaller regularization " "weight can result into higher measure of image similarity but with " "unrealistic and unsmooth distortion field. A weight of 0.5 would reduce " "the penalty to half of the default regularization penalty (By default, this weight " "is set to 1.0). Similarly, a weight of 2.0 " "would increase the penalty to twice of the default penalty. ", ) skipDistortionCorr = traits.Bool( argstr="--no-distortion-correction", desc="Skips distortion correction completely and performs only a rigid " "registration of diffusion and T1-weighted image. This can be useful when " "the input diffusion images do not have any distortion or they have been " "corrected for distortion. ", ) skipNonuniformityCorr = traits.Bool( argstr="--no-nonuniformity-correction", desc="Skips intensity non-uniformity correction in b=0 image for " "registration-based distortion correction. The intensity non-uniformity " "correction does not affect any diffusion modeling. ", ) skipIntensityCorr = traits.Bool( argstr="--no-intensity-correction", xor=["fieldmapCorrectionMethod"], desc="Disables intensity correction when performing distortion correction. " "Intensity correction can change the noise distribution in the corrected " "image, but it does not affect estimated diffusion parameters like FA, " "etc. ", ) fieldmapCorrection = File( argstr="--fieldmap-correction %s", requires=["echoSpacing"], desc="Use an acquired fieldmap for distortion correction. The fieldmap must " "have units of radians/second. Specify the filename of the fieldmap file. " "The field of view (FOV) of the fieldmap scan must cover the FOV of the diffusion " "scan. BDP will try to check the overlap of the FOV of the two scans and " 'will issue a warning/error if the diffusion scan"s FOV is not fully ' 'covered by the fieldmap"s FOV. BDP uses all of the information saved in ' "the NIfTI header to compute the FOV. If you get this error and think " "that it is incorrect, then it can be suppressed using the flag " "ignore-fieldmap-FOV. Neither the image matrix size nor the imaging " "grid resolution of the fieldmap needs to be the same as that of the " "diffusion scan, but the fieldmap must be pre-registred to the diffusion " "scan. BDP does NOT align the fieldmap to the diffusion scan, nor does it " "check the alignment of the fieldmap and diffusion scans. Only NIfTI " "files with extension of .nii or .nii.gz are supported. Fieldmap-based " "distortion correction also requires the echoSpacing. Also " "fieldmapCorrectionMethod allows you to define method for " "distortion correction. least squares is the default method. ", ) fieldmapCorrectionMethod = traits.Enum( "pixelshift", "leastsq", xor=["skipIntensityCorr"], argstr="--fieldmap-correction-method %s", desc="Defines the distortion correction method while using fieldmap. " 'Possible methods are "pixelshift" and "leastsq". leastsq is the default ' "method when this flag is not used. Pixel-shift (pixelshift) method uses " "image interpolation to un-distort the distorted diffusion images. Least " "squares (leastsq) method uses a physical model of distortion which is " "more accurate (and more computationally expensive) than pixel-shift " "method.", ) ignoreFieldmapFOV = traits.Bool( argstr="--ignore-fieldmap-fov", desc="Supresses the error generated by an insufficient field of view of the " "input fieldmap and continues with the processing. It is useful only when " "used with fieldmap-based distortion correction. See " "fieldmap-correction for a detailed explanation. ", ) fieldmapSmooth = traits.Float( argstr="--fieldmap-smooth3=%f", desc="Applies 3D Gaussian smoothing with a standard deviation of S " "millimeters (mm) to the input fieldmap before applying distortion " "correction. This trait is only useful with " "fieldmapCorrection. Skip this trait for no smoothing. ", ) transformDiffusionVolume = File( argstr="--transform-diffusion-volume %s", desc="This flag allows to define custom volumes in diffusion coordinate " "which would be transformed into T1 coordinate in a rigid fashion. The " "flag must be followed by the name of either a NIfTI file or of a folder " "that contains one or more NIfTI files. All of the files must be in " "diffusion coordinate, i.e. the files should overlay correctly with the " "diffusion scan in BrainSuite. Only NIfTI files with an extension of .nii " "or .nii.gz are supported. The transformed files are written to the " 'output directory with suffix ".T1_coord" in the filename and will not be ' "corrected for distortion, if any. The trait transformInterpolation can " "be used to define the type of interpolation that would be used (default " "is set to linear). If you are attempting to transform a label file or " 'mask file, use "nearest" interpolation method with transformInterpolation. ' "See also transformT1Volume and transformInterpolation", ) transformT1Volume = File( argstr="--transform-t1-volume %s", desc="Same as transformDiffusionVolume except that files specified must " "be in T1 coordinate, i.e. the files should overlay correctly with the " "input .bfc.nii.gz files in BrainSuite. BDP transforms these " "data/images from T1 coordinate to diffusion coordinate. The transformed " 'files are written to the output directory with suffix ".D_coord" in the ' "filename. See also transformDiffusionVolume and transformInterpolation. ", ) transformInterpolation = traits.Enum( "linear", "nearest", "cubic", "spline", argstr="--transform-interpolation %s", desc="Defines the type of interpolation method which would be used while " "transforming volumes defined by transformT1Volume and " 'transformDiffusionVolume. Possible methods are "linear", "nearest", ' '"cubic" and "spline". By default, "linear" interpolation is used. ', ) transformT1Surface = File( argstr="--transform-t1-surface %s", desc="Similar to transformT1Volume, except that this flag allows " "transforming surfaces (instead of volumes) in T1 coordinate into " "diffusion coordinate in a rigid fashion. The flag must be followed by " "the name of either a .dfs file or of a folder that contains one or more " "dfs files. All of the files must be in T1 coordinate, i.e. the files " "should overlay correctly with the T1-weighted scan in BrainSuite. The " "transformed files are written to the output directory with suffix " 'D_coord" in the filename. ', ) transformDiffusionSurface = File( argstr="--transform-diffusion-surface %s", desc="Same as transformT1Volume, except that the .dfs files specified " "must be in diffusion coordinate, i.e. the surface files should overlay " "correctly with the diffusion scan in BrainSuite. The transformed files " 'are written to the output directory with suffix ".T1_coord" in the ' "filename. See also transformT1Volume. ", ) transformDataOnly = traits.Bool( argstr="--transform-data-only", desc="Skip all of the processing (co-registration, distortion correction and " "tensor/ODF estimation) and directly start transformation of defined " "custom volumes, mask and labels (using transformT1Volume, " "transformDiffusionVolume, transformT1Surface, " "transformDiffusionSurface, customDiffusionLabel, " "customT1Label). This flag is useful when BDP was previously run on a " "subject (or ) and some more data (volumes, mask or labels) " "need to be transformed across the T1-diffusion coordinate spaces. This " "assumes that all the necessary files were generated earlier and all of " "the other flags MUST be used in the same way as they were in the initial " "BDP run that processed the data. ", ) generateStats = traits.Bool( argstr="--generate-stats", desc="Generate ROI-wise statistics of estimated diffusion tensor parameters. " "Units of the reported statistics are same as that of the estimated " "tensor parameters (see estimateTensors). Mean, variance, and voxel counts of " "white matter(WM), grey matter(GM), and both WM and GM combined are " "written for each estimated parameter in a separate comma-seperated value " "csv) file. BDP uses the ROI labels generated by Surface-Volume " "Registration (SVReg) in the BrainSuite extraction sequence. " "Specifically, it looks for labels saved in either " "fileprefix>.svreg.corr.label.nii.gz or .svreg.label.nii.gz. " "In case both files are present, only the first file is used. Also see " "customDiffusionLabel and customT1Label for specifying your own " "ROIs. It is also possible to forgo computing the SVReg ROI-wise " "statistics and only compute stats with custom labels if SVReg label is " "missing. BDP also transfers (and saves) the label/mask files to " "appropriate coordinates before computing statistics. Also see " "outputDiffusionCoordinates for outputs in diffusion coordinate and " "forcePartialROIStats for an important note about field of view of " "diffusion and T1-weighted scans. ", ) onlyStats = traits.Bool( argstr="--generate-only-stats", desc="Skip all of the processing (co-registration, distortion correction and " "tensor/ODF estimation) and directly start computation of statistics. " "This flag is useful when BDP was previously run on a subject (or " "fileprefix>) and statistics need to be (re-)computed later. This " "assumes that all the necessary files were generated earlier. All of the " "other flags MUST be used in the same way as they were in the initial BDP " "run that processed the data. ", ) forcePartialROIStats = traits.Bool( argstr="--force-partial-roi-stats", desc="The field of view (FOV) of the diffusion and T1-weighted scans may " "differ significantly in some situations. This may result in partial " "acquisitions of some ROIs in the diffusion scan. By default, BDP does " "not compute statistics for partially acquired ROIs and shows warnings. " "This flag forces computation of statistics for all ROIs, including those " "which are partially acquired. When this flag is used, number of missing " "voxels are also reported for each ROI in statistics files. Number of " "missing voxels are reported in the same coordinate system as the " "statistics file. ", ) customDiffusionLabel = File( argstr="--custom-diffusion-label %s", desc="BDP supports custom ROIs in addition to those generated by BrainSuite " "SVReg) for ROI-wise statistics calculation. The flag must be followed " "by the name of either a file (custom ROI file) or of a folder that " "contains one or more ROI files. All of the files must be in diffusion " "coordinate, i.e. the label files should overlay correctly with the " "diffusion scan in BrainSuite. These input label files are also " "transferred (and saved) to T1 coordinate for statistics in T1 " "coordinate. BDP uses nearest-neighborhood interpolation for this " "transformation. Only NIfTI files, with an extension of .nii or .nii.gz " "are supported. In order to avoid confusion with other ROI IDs in the " "statistic files, a 5-digit ROI ID is generated for each custom label " "found and the mapping of ID to label file is saved in the file " "fileprefix>.BDP_ROI_MAP.xml. Custom label files can also be generated " "by using the label painter tool in BrainSuite. See also " "customLabelXML", ) customT1Label = File( argstr="--custom-t1-label %s", desc="Same as customDiffusionLabelexcept that the label files specified " "must be in T1 coordinate, i.e. the label files should overlay correctly " "with the T1-weighted scan in BrainSuite. If the trait " "outputDiffusionCoordinates is also used then these input label files " "are also transferred (and saved) to diffusion coordinate for statistics " "in diffusion coordinate. BDP uses nearest-neighborhood interpolation for " "this transformation. See also customLabelXML. ", ) customLabelXML = File( argstr="--custom-label-xml %s", desc="BrainSuite saves a descriptions of the SVReg labels (ROI name, ID, " "color, and description) in an .xml file " 'brainsuite_labeldescription.xml). BDP uses the ROI ID"s from this xml ' "file to report statistics. This flag allows for the use of a custom " "label description xml file. The flag must be followed by an xml " "filename. This can be useful when you want to limit the ROIs for which " "you compute statistics. You can also use custom xml files to name your " 'own ROIs (assign ID"s) for custom labels. BrainSuite can save a label ' "description in .xml format after using the label painter tool to create " 'a ROI label. The xml file MUST be in the same format as BrainSuite"s ' "label description file (see brainsuite_labeldescription.xml for an " "example). When this flag is used, NO 5-digit ROI ID is generated for " "custom label files and NO Statistics will be calculated for ROIs not " "identified in the custom xml file. See also customDiffusionLabel and " "customT1Label.", ) outputSubdir = traits.Str( argstr="--output-subdir %s", desc="By default, BDP writes out all the output (and intermediate) files in " "the same directory (or folder) as the BFC file. This flag allows to " "specify a sub-directory name in which output (and intermediate) files " "would be written. BDP will create the sub-directory in the same " "directory as BFC file. should be the name of the " "sub-directory without any path. This can be useful to organize all " "outputs generated by BDP in a separate sub-directory. ", ) outputDiffusionCoordinates = traits.Bool( argstr="--output-diffusion-coordinate", desc="Enables estimation of diffusion tensors and/or ODFs (and statistics if " "applicable) in the native diffusion coordinate in addition to the " "default T1-coordinate. All native diffusion coordinate files are saved " 'in a seperate folder named "diffusion_coord_outputs". In case statistics ' "computation is required, it will also transform/save all label/mask " "files required to diffusion coordinate (see generateStats for " "details). ", ) flagConfigFile = File( argstr="--flag-conf-file %s", desc="Uses the defined file to specify BDP flags which can be useful for " "batch processing. A flag configuration file is a plain text file which " 'can contain any number of BDP"s optional flags (and their parameters) ' "separated by whitespace. Everything coming after # until end-of-line is " "treated as comment and is ignored. If a flag is defined in configuration " "file and is also specified in the command used to run BDP, then the " "later get preference and overrides the definition in configuration " "file. ", ) outPrefix = traits.Str( argstr="--output-fileprefix %s", desc="Specifies output fileprefix when noStructuralRegistration is " "used. The fileprefix can not start with a dash (-) and should be a " "simple string reflecting the absolute path to desired location, along with outPrefix. When this flag is " "not specified (and noStructuralRegistration is used) then the output " "files have same file-base as the input diffusion file. This trait is " "ignored when noStructuralRegistration is not used. ", ) threads = traits.Int( argstr="--threads=%d", desc="Sets the number of parallel process threads which can be used for " "computations to N, where N must be an integer. Default value of N is " " ", ) lowMemory = traits.Bool( argstr="--low-memory", desc="Activates low-memory mode. This will run the registration-based " "distortion correction at a lower resolution, which could result in a " "less-accurate correction. This should only be used when no other " "alternative is available. ", ) ignoreMemory = traits.Bool( argstr="--ignore-memory", desc="Deactivates the inbuilt memory checks and forces BDP to run " "registration-based distortion correction at its default resolution even " "on machines with a low amount of memory. This may result in an " "out-of-memory error when BDP cannot allocate sufficient memory. ", ) class BDP(CommandLine): """ BrainSuite Diffusion Pipeline (BDP) enables fusion of diffusion and structural MRI information for advanced image and connectivity analysis. It provides various methods for distortion correction, co-registration, diffusion modeling (DTI and ODF) and basic ROI-wise statistic. BDP is a flexible and diverse tool which supports wide variety of diffusion datasets. For more information, please see: http://brainsuite.org/processing/diffusion/ Examples -------- >>> from nipype.interfaces import brainsuite >>> bdp = brainsuite.BDP() >>> bdp.inputs.bfcFile = '/directory/subdir/prefix.bfc.nii.gz' >>> bdp.inputs.inputDiffusionData = '/directory/subdir/prefix.dwi.nii.gz' >>> bdp.inputs.BVecBValPair = ['/directory/subdir/prefix.dwi.bvec', '/directory/subdir/prefix.dwi.bval'] >>> results = bdp.run() #doctest: +SKIP """ input_spec = BDPInputSpec _cmd = "bdp.sh" def _format_arg(self, name, spec, value): if name == "BVecBValPair": return spec.argstr % (value[0], value[1]) if name == "dataSinkDelay": return spec.argstr % "" return super(BDP, self)._format_arg(name, spec, value) class ThicknessPVCInputSpec(CommandLineInputSpec): subjectFilePrefix = traits.Str( argstr="%s", mandatory=True, desc="Absolute path and filename prefix of the subject data", ) class ThicknessPVC(CommandLine): """ ThicknessPVC computes cortical thickness using partial tissue fractions. This thickness measure is then transferred to the atlas surface to facilitate population studies. It also stores the computed thickness into separate hemisphere files and subject thickness mapped to the atlas hemisphere surfaces. ThicknessPVC is not run through the main SVReg sequence, and should be used after executing the BrainSuite and SVReg sequence. For more informaction, please see: http://brainsuite.org/processing/svreg/svreg_modules/ Examples -------- >>> from nipype.interfaces import brainsuite >>> thicknessPVC = brainsuite.ThicknessPVC() >>> thicknessPVC.inputs.subjectFilePrefix = 'home/user/btestsubject/testsubject' >>> results = thicknessPVC.run() #doctest: +SKIP """ input_spec = ThicknessPVCInputSpec _cmd = "thicknessPVC.sh" # used to generate file names for outputs # removes pathway and extension of inputName, returns concatenation of: # inputName and suffix def getFileName(inputName, suffix): fullInput = os.path.basename(inputName) dotRegex = regex.compile("[^.]+") # extract between last slash and first period inputNoExtension = dotRegex.findall(fullInput)[0] return os.path.abspath("".join((inputNoExtension, suffix))) def l_outputs(self): outputs = self.output_spec().get() for key in outputs: name = self._gen_filename(key) if name is not None: outputs[key] = name return outputs nipype-1.7.0/nipype/interfaces/brainsuite/tests/000077500000000000000000000000001413403311400217535ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/brainsuite/tests/__init__.py000066400000000000000000000000301413403311400240550ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_BDP.py000066400000000000000000000121101413403311400250140ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import BDP def test_BDP_inputs(): input_map = dict( BVecBValPair=dict( argstr="--bvec %s --bval %s", mandatory=True, position=-1, xor=["bMatrixFile"], ), args=dict( argstr="%s", ), bMatrixFile=dict( argstr="--bmat %s", extensions=None, mandatory=True, position=-1, xor=["BVecBValPair"], ), bValRatioThreshold=dict( argstr="--bval-ratio-threshold %f", ), bfcFile=dict( argstr="%s", extensions=None, mandatory=True, position=0, xor=["noStructuralRegistration"], ), customDiffusionLabel=dict( argstr="--custom-diffusion-label %s", extensions=None, ), customLabelXML=dict( argstr="--custom-label-xml %s", extensions=None, ), customT1Label=dict( argstr="--custom-t1-label %s", extensions=None, ), dataSinkDelay=dict( argstr="%s", ), dcorrRegMeasure=dict( argstr="--dcorr-reg-method %s", ), dcorrWeight=dict( argstr="--dcorr-regularization-wt %f", ), dwiMask=dict( argstr="--dwi-mask %s", extensions=None, ), echoSpacing=dict( argstr="--echo-spacing=%f", ), environ=dict( nohash=True, usedefault=True, ), estimateODF_3DShore=dict( argstr="--3dshore --diffusion_time_ms %f", ), estimateODF_FRACT=dict( argstr="--FRACT", ), estimateODF_FRT=dict( argstr="--FRT", ), estimateTensors=dict( argstr="--tensors", ), fieldmapCorrection=dict( argstr="--fieldmap-correction %s", extensions=None, requires=["echoSpacing"], ), fieldmapCorrectionMethod=dict( argstr="--fieldmap-correction-method %s", xor=["skipIntensityCorr"], ), fieldmapSmooth=dict( argstr="--fieldmap-smooth3=%f", ), flagConfigFile=dict( argstr="--flag-conf-file %s", extensions=None, ), forcePartialROIStats=dict( argstr="--force-partial-roi-stats", ), generateStats=dict( argstr="--generate-stats", ), ignoreFieldmapFOV=dict( argstr="--ignore-fieldmap-fov", ), ignoreMemory=dict( argstr="--ignore-memory", ), inputDiffusionData=dict( argstr="--nii %s", extensions=None, mandatory=True, position=-2, ), lowMemory=dict( argstr="--low-memory", ), noStructuralRegistration=dict( argstr="--no-structural-registration", mandatory=True, position=0, xor=["bfcFile"], ), odfLambta=dict( argstr="--odf-lambda ", ), onlyStats=dict( argstr="--generate-only-stats", ), outPrefix=dict( argstr="--output-fileprefix %s", ), outputDiffusionCoordinates=dict( argstr="--output-diffusion-coordinate", ), outputSubdir=dict( argstr="--output-subdir %s", ), phaseEncodingDirection=dict( argstr="--dir=%s", ), rigidRegMeasure=dict( argstr="--rigid-reg-measure %s", ), skipDistortionCorr=dict( argstr="--no-distortion-correction", ), skipIntensityCorr=dict( argstr="--no-intensity-correction", xor=["fieldmapCorrectionMethod"], ), skipNonuniformityCorr=dict( argstr="--no-nonuniformity-correction", ), t1Mask=dict( argstr="--t1-mask %s", extensions=None, ), threads=dict( argstr="--threads=%d", ), transformDataOnly=dict( argstr="--transform-data-only", ), transformDiffusionSurface=dict( argstr="--transform-diffusion-surface %s", extensions=None, ), transformDiffusionVolume=dict( argstr="--transform-diffusion-volume %s", extensions=None, ), transformInterpolation=dict( argstr="--transform-interpolation %s", ), transformT1Surface=dict( argstr="--transform-t1-surface %s", extensions=None, ), transformT1Volume=dict( argstr="--transform-t1-volume %s", extensions=None, ), ) inputs = BDP.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_Bfc.py000066400000000000000000000060501413403311400251070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import Bfc def test_Bfc_inputs(): input_map = dict( args=dict( argstr="%s", ), biasEstimateConvergenceThreshold=dict( argstr="--beps %f", ), biasEstimateSpacing=dict( argstr="-s %d", ), biasFieldEstimatesOutputPrefix=dict( argstr="--biasprefix %s", ), biasRange=dict( argstr="%s", ), controlPointSpacing=dict( argstr="-c %d", ), convergenceThreshold=dict( argstr="--eps %f", ), correctWholeVolume=dict( argstr="--extrapolate", ), correctedImagesOutputPrefix=dict( argstr="--prefix %s", ), correctionScheduleFile=dict( argstr="--schedule %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), histogramRadius=dict( argstr="-r %d", ), histogramType=dict( argstr="%s", ), inputMRIFile=dict( argstr="-i %s", extensions=None, mandatory=True, ), inputMaskFile=dict( argstr="-m %s", extensions=None, hash_files=False, ), intermediate_file_type=dict( argstr="%s", ), iterativeMode=dict( argstr="--iterate", ), maxBias=dict( argstr="-U %f", usedefault=True, ), minBias=dict( argstr="-L %f", usedefault=True, ), outputBiasField=dict( argstr="--bias %s", extensions=None, hash_files=False, ), outputMRIVolume=dict( argstr="-o %s", extensions=None, genfile=True, hash_files=False, ), outputMaskedBiasField=dict( argstr="--maskedbias %s", extensions=None, hash_files=False, ), splineLambda=dict( argstr="-w %f", ), timer=dict( argstr="--timer", ), verbosityLevel=dict( argstr="-v %d", ), ) inputs = Bfc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Bfc_outputs(): output_map = dict( correctionScheduleFile=dict( extensions=None, ), outputBiasField=dict( extensions=None, ), outputMRIVolume=dict( extensions=None, ), outputMaskedBiasField=dict( extensions=None, ), ) outputs = Bfc.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_Bse.py000066400000000000000000000056111413403311400251300ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import Bse def test_Bse_inputs(): input_map = dict( args=dict( argstr="%s", ), diffusionConstant=dict( argstr="-d %f", usedefault=True, ), diffusionIterations=dict( argstr="-n %d", usedefault=True, ), dilateFinalMask=dict( argstr="-p", usedefault=True, ), edgeDetectionConstant=dict( argstr="-s %f", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), inputMRIFile=dict( argstr="-i %s", extensions=None, mandatory=True, ), noRotate=dict( argstr="--norotate", ), outputCortexFile=dict( argstr="--cortex %s", extensions=None, hash_files=False, ), outputDetailedBrainMask=dict( argstr="--hires %s", extensions=None, hash_files=False, ), outputDiffusionFilter=dict( argstr="--adf %s", extensions=None, hash_files=False, ), outputEdgeMap=dict( argstr="--edge %s", extensions=None, hash_files=False, ), outputMRIVolume=dict( argstr="-o %s", extensions=None, genfile=True, hash_files=False, ), outputMaskFile=dict( argstr="--mask %s", extensions=None, genfile=True, hash_files=False, ), radius=dict( argstr="-r %f", usedefault=True, ), timer=dict( argstr="--timer", ), trim=dict( argstr="--trim", usedefault=True, ), verbosityLevel=dict( argstr="-v %f", usedefault=True, ), ) inputs = Bse.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Bse_outputs(): output_map = dict( outputCortexFile=dict( extensions=None, ), outputDetailedBrainMask=dict( extensions=None, ), outputDiffusionFilter=dict( extensions=None, ), outputEdgeMap=dict( extensions=None, ), outputMRIVolume=dict( extensions=None, ), outputMaskFile=dict( extensions=None, ), ) outputs = Bse.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_Cerebro.py000066400000000000000000000053411413403311400260000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import Cerebro def test_Cerebro_inputs(): input_map = dict( args=dict( argstr="%s", ), costFunction=dict( argstr="-c %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), inputAtlasLabelFile=dict( argstr="--atlaslabels %s", extensions=None, mandatory=True, ), inputAtlasMRIFile=dict( argstr="--atlas %s", extensions=None, mandatory=True, ), inputBrainMaskFile=dict( argstr="-m %s", extensions=None, ), inputMRIFile=dict( argstr="-i %s", extensions=None, mandatory=True, ), keepTempFiles=dict( argstr="--keep", ), linearConvergence=dict( argstr="--linconv %f", ), outputAffineTransformFile=dict( argstr="--air %s", extensions=None, genfile=True, ), outputCerebrumMaskFile=dict( argstr="-o %s", extensions=None, genfile=True, ), outputLabelVolumeFile=dict( argstr="-l %s", extensions=None, genfile=True, ), outputWarpTransformFile=dict( argstr="--warp %s", extensions=None, genfile=True, ), tempDirectory=dict( argstr="--tempdir %s", ), tempDirectoryBase=dict( argstr="--tempdirbase %s", ), useCentroids=dict( argstr="--centroids", ), verbosity=dict( argstr="-v %d", ), warpConvergence=dict( argstr="--warpconv %f", ), warpLabel=dict( argstr="--warplevel %d", ), ) inputs = Cerebro.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Cerebro_outputs(): output_map = dict( outputAffineTransformFile=dict( extensions=None, ), outputCerebrumMaskFile=dict( extensions=None, ), outputLabelVolumeFile=dict( extensions=None, ), outputWarpTransformFile=dict( extensions=None, ), ) outputs = Cerebro.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_Cortex.py000066400000000000000000000032761413403311400256700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import Cortex def test_Cortex_inputs(): input_map = dict( args=dict( argstr="%s", ), computeGCBoundary=dict( argstr="-g", ), computeWGBoundary=dict( argstr="-w", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), includeAllSubcorticalAreas=dict( argstr="-a", usedefault=True, ), inputHemisphereLabelFile=dict( argstr="-h %s", extensions=None, mandatory=True, ), inputTissueFractionFile=dict( argstr="-f %s", extensions=None, mandatory=True, ), outputCerebrumMask=dict( argstr="-o %s", extensions=None, genfile=True, ), timer=dict( argstr="--timer", ), tissueFractionThreshold=dict( argstr="-p %f", usedefault=True, ), verbosity=dict( argstr="-v %d", ), ) inputs = Cortex.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Cortex_outputs(): output_map = dict( outputCerebrumMask=dict( extensions=None, ), ) outputs = Cortex.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_Dewisp.py000066400000000000000000000024661413403311400256570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import Dewisp def test_Dewisp_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputMaskFile=dict( argstr="-i %s", extensions=None, mandatory=True, ), maximumIterations=dict( argstr="-n %d", ), outputMaskFile=dict( argstr="-o %s", extensions=None, genfile=True, ), sizeThreshold=dict( argstr="-t %d", ), timer=dict( argstr="--timer", ), verbosity=dict( argstr="-v %d", ), ) inputs = Dewisp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Dewisp_outputs(): output_map = dict( outputMaskFile=dict( extensions=None, ), ) outputs = Dewisp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_Dfs.py000066400000000000000000000043601413403311400251330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import Dfs def test_Dfs_inputs(): input_map = dict( args=dict( argstr="%s", ), curvatureWeighting=dict( argstr="-w %f", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), inputShadingVolume=dict( argstr="-c %s", extensions=None, ), inputVolumeFile=dict( argstr="-i %s", extensions=None, mandatory=True, ), noNormalsFlag=dict( argstr="--nonormals", ), nonZeroTessellation=dict( argstr="-nz", xor=("nonZeroTessellation", "specialTessellation"), ), outputSurfaceFile=dict( argstr="-o %s", extensions=None, genfile=True, ), postSmoothFlag=dict( argstr="--postsmooth", ), scalingPercentile=dict( argstr="-f %f", ), smoothingConstant=dict( argstr="-a %f", usedefault=True, ), smoothingIterations=dict( argstr="-n %d", usedefault=True, ), specialTessellation=dict( argstr="%s", position=-1, requires=["tessellationThreshold"], xor=("nonZeroTessellation", "specialTessellation"), ), tessellationThreshold=dict( argstr="%f", ), timer=dict( argstr="--timer", ), verbosity=dict( argstr="-v %d", ), zeroPadFlag=dict( argstr="-z", ), ) inputs = Dfs.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Dfs_outputs(): output_map = dict( outputSurfaceFile=dict( extensions=None, ), ) outputs = Dfs.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_Hemisplit.py000066400000000000000000000040661413403311400263600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import Hemisplit def test_Hemisplit_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputHemisphereLabelFile=dict( argstr="-l %s", extensions=None, mandatory=True, ), inputSurfaceFile=dict( argstr="-i %s", extensions=None, mandatory=True, ), outputLeftHemisphere=dict( argstr="--left %s", extensions=None, genfile=True, ), outputLeftPialHemisphere=dict( argstr="-pl %s", extensions=None, genfile=True, ), outputRightHemisphere=dict( argstr="--right %s", extensions=None, genfile=True, ), outputRightPialHemisphere=dict( argstr="-pr %s", extensions=None, genfile=True, ), pialSurfaceFile=dict( argstr="-p %s", extensions=None, ), timer=dict( argstr="--timer", ), verbosity=dict( argstr="-v %d", ), ) inputs = Hemisplit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Hemisplit_outputs(): output_map = dict( outputLeftHemisphere=dict( extensions=None, ), outputLeftPialHemisphere=dict( extensions=None, ), outputRightHemisphere=dict( extensions=None, ), outputRightPialHemisphere=dict( extensions=None, ), ) outputs = Hemisplit.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_Pialmesh.py000066400000000000000000000046541413403311400261670ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import Pialmesh def test_Pialmesh_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), exportPrefix=dict( argstr="--prefix %s", ), inputMaskFile=dict( argstr="-m %s", extensions=None, mandatory=True, ), inputSurfaceFile=dict( argstr="-i %s", extensions=None, mandatory=True, ), inputTissueFractionFile=dict( argstr="-f %s", extensions=None, mandatory=True, ), laplacianSmoothing=dict( argstr="--smooth %f", usedefault=True, ), maxThickness=dict( argstr="--max %f", usedefault=True, ), normalSmoother=dict( argstr="--nc %f", usedefault=True, ), numIterations=dict( argstr="-n %d", usedefault=True, ), outputInterval=dict( argstr="--interval %d", usedefault=True, ), outputSurfaceFile=dict( argstr="-o %s", extensions=None, genfile=True, ), recomputeNormals=dict( argstr="--norm", ), searchRadius=dict( argstr="-r %f", usedefault=True, ), stepSize=dict( argstr="-s %f", usedefault=True, ), tangentSmoother=dict( argstr="--tc %f", ), timer=dict( argstr="--timer", ), tissueThreshold=dict( argstr="-t %f", usedefault=True, ), verbosity=dict( argstr="-v %d", ), ) inputs = Pialmesh.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Pialmesh_outputs(): output_map = dict( outputSurfaceFile=dict( extensions=None, ), ) outputs = Pialmesh.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_Pvc.py000066400000000000000000000031251413403311400251450ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import Pvc def test_Pvc_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputMRIFile=dict( argstr="-i %s", extensions=None, mandatory=True, ), inputMaskFile=dict( argstr="-m %s", extensions=None, ), outputLabelFile=dict( argstr="-o %s", extensions=None, genfile=True, ), outputTissueFractionFile=dict( argstr="-f %s", extensions=None, genfile=True, ), spatialPrior=dict( argstr="-l %f", ), threeClassFlag=dict( argstr="-3", ), timer=dict( argstr="--timer", ), verbosity=dict( argstr="-v %d", ), ) inputs = Pvc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Pvc_outputs(): output_map = dict( outputLabelFile=dict( extensions=None, ), outputTissueFractionFile=dict( extensions=None, ), ) outputs = Pvc.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_SVReg.py000066400000000000000000000042341413403311400254050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import SVReg def test_SVReg_inputs(): input_map = dict( args=dict( argstr="%s", ), atlasFilePrefix=dict( argstr="'%s'", position=1, ), curveMatchingInstructions=dict( argstr="'-cur %s'", ), dataSinkDelay=dict( argstr="%s", ), displayModuleName=dict( argstr="'-m'", ), displayTimestamps=dict( argstr="'-t'", ), environ=dict( nohash=True, usedefault=True, ), iterations=dict( argstr="'-H %d'", ), keepIntermediates=dict( argstr="'-k'", ), pialSurfaceMaskDilation=dict( argstr="'-D %d'", ), refineOutputs=dict( argstr="'-r'", ), shortMessages=dict( argstr="'-gui'", ), skipToIntensityReg=dict( argstr="'-p'", ), skipToVolumeReg=dict( argstr="'-s'", ), skipVolumetricProcessing=dict( argstr="'-S'", ), subjectFilePrefix=dict( argstr="'%s'", mandatory=True, position=0, ), useCerebrumMask=dict( argstr="'-C'", ), useManualMaskFile=dict( argstr="'-cbm'", ), useMultiThreading=dict( argstr="'-P'", ), useSingleThreading=dict( argstr="'-U'", ), verbosity0=dict( argstr="'-v0'", xor=("verbosity0", "verbosity1", "verbosity2"), ), verbosity1=dict( argstr="'-v1'", xor=("verbosity0", "verbosity1", "verbosity2"), ), verbosity2=dict( argstr="'v2'", xor=("verbosity0", "verbosity1", "verbosity2"), ), ) inputs = SVReg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_Scrubmask.py000066400000000000000000000027251413403311400263540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import Scrubmask def test_Scrubmask_inputs(): input_map = dict( args=dict( argstr="%s", ), backgroundFillThreshold=dict( argstr="-b %d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), foregroundTrimThreshold=dict( argstr="-f %d", usedefault=True, ), inputMaskFile=dict( argstr="-i %s", extensions=None, mandatory=True, ), numberIterations=dict( argstr="-n %d", ), outputMaskFile=dict( argstr="-o %s", extensions=None, genfile=True, ), timer=dict( argstr="--timer", ), verbosity=dict( argstr="-v %d", ), ) inputs = Scrubmask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Scrubmask_outputs(): output_map = dict( outputMaskFile=dict( extensions=None, ), ) outputs = Scrubmask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_Skullfinder.py000066400000000000000000000036531413403311400267050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import Skullfinder def test_Skullfinder_inputs(): input_map = dict( args=dict( argstr="%s", ), bgLabelValue=dict( argstr="--bglabel %d", ), brainLabelValue=dict( argstr="--brainlabel %d", ), environ=dict( nohash=True, usedefault=True, ), inputMRIFile=dict( argstr="-i %s", extensions=None, mandatory=True, ), inputMaskFile=dict( argstr="-m %s", extensions=None, mandatory=True, ), lowerThreshold=dict( argstr="-l %d", ), outputLabelFile=dict( argstr="-o %s", extensions=None, genfile=True, ), performFinalOpening=dict( argstr="--finalOpening", ), scalpLabelValue=dict( argstr="--scalplabel %d", ), skullLabelValue=dict( argstr="--skulllabel %d", ), spaceLabelValue=dict( argstr="--spacelabel %d", ), surfaceFilePrefix=dict( argstr="-s %s", ), upperThreshold=dict( argstr="-u %d", ), verbosity=dict( argstr="-v %d", ), ) inputs = Skullfinder.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Skullfinder_outputs(): output_map = dict( outputLabelFile=dict( extensions=None, ), ) outputs = Skullfinder.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_Tca.py000066400000000000000000000026571413403311400251350ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import Tca def test_Tca_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), foregroundDelta=dict( argstr="--delta %d", usedefault=True, ), inputMaskFile=dict( argstr="-i %s", extensions=None, mandatory=True, ), maxCorrectionSize=dict( argstr="-n %d", ), minCorrectionSize=dict( argstr="-m %d", usedefault=True, ), outputMaskFile=dict( argstr="-o %s", extensions=None, genfile=True, ), timer=dict( argstr="--timer", ), verbosity=dict( argstr="-v %d", ), ) inputs = Tca.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Tca_outputs(): output_map = dict( outputMaskFile=dict( extensions=None, ), ) outputs = Tca.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/brainsuite/tests/test_auto_ThicknessPVC.py000066400000000000000000000011331413403311400267160ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsuite import ThicknessPVC def test_ThicknessPVC_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), subjectFilePrefix=dict( argstr="%s", mandatory=True, ), ) inputs = ThicknessPVC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/bru2nii.py000066400000000000000000000043521413403311400203740ustar00rootroot00000000000000# -*- coding: utf-8 -*- """The bru2nii module provides basic functions for dicom conversion """ import os from .base import ( CommandLine, CommandLineInputSpec, traits, TraitedSpec, isdefined, File, Directory, ) class Bru2InputSpec(CommandLineInputSpec): input_dir = Directory( desc="Input Directory", exists=True, mandatory=True, position=-1, argstr="%s" ) actual_size = traits.Bool( argstr="-a", desc="Keep actual size - otherwise x10 scale so animals match human.", ) force_conversion = traits.Bool( argstr="-f", desc="Force conversion of localizers images (multiple slice " "orientations).", ) compress = traits.Bool(argstr="-z", desc='gz compress images (".nii.gz").') append_protocol_name = traits.Bool( argstr="-p", desc="Append protocol name to output filename." ) output_filename = traits.Str( argstr="-o %s", desc='Output filename (".nii" will be appended, or ".nii.gz" if the "-z" compress option is selected)', genfile=True, ) class Bru2OutputSpec(TraitedSpec): nii_file = File(exists=True) class Bru2(CommandLine): """Uses bru2nii's Bru2 to convert Bruker files Examples ======== >>> from nipype.interfaces.bru2nii import Bru2 >>> converter = Bru2() >>> converter.inputs.input_dir = "brukerdir" >>> converter.cmdline # doctest: +ELLIPSIS 'Bru2 -o .../data/brukerdir brukerdir' """ input_spec = Bru2InputSpec output_spec = Bru2OutputSpec _cmd = "Bru2" def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.output_filename): output_filename1 = os.path.abspath(self.inputs.output_filename) else: output_filename1 = self._gen_filename("output_filename") if self.inputs.compress: outputs["nii_file"] = output_filename1 + ".nii.gz" else: outputs["nii_file"] = output_filename1 + ".nii" return outputs def _gen_filename(self, name): if name == "output_filename": outfile = os.path.join( os.getcwd(), os.path.basename(os.path.normpath(self.inputs.input_dir)) ) return outfile nipype-1.7.0/nipype/interfaces/c3.py000066400000000000000000000165361413403311400173360ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Convert3D is a command-line tool for converting 3D images between common file formats.""" import os from glob import glob from .base import ( CommandLineInputSpec, traits, TraitedSpec, File, SEMLikeCommandLine, InputMultiPath, OutputMultiPath, CommandLine, isdefined, ) from ..utils.filemanip import split_filename from .. import logging iflogger = logging.getLogger("interface") class C3dAffineToolInputSpec(CommandLineInputSpec): reference_file = File(exists=True, argstr="-ref %s", position=1) source_file = File(exists=True, argstr="-src %s", position=2) transform_file = File(exists=True, argstr="%s", position=3) itk_transform = traits.Either( traits.Bool, File(), hash_files=False, desc="Export ITK transform.", argstr="-oitk %s", position=5, ) fsl2ras = traits.Bool(argstr="-fsl2ras", position=4) class C3dAffineToolOutputSpec(TraitedSpec): itk_transform = File(exists=True) class C3dAffineTool(SEMLikeCommandLine): """Converts fsl-style Affine registration into ANTS compatible itk format Example ======= >>> from nipype.interfaces.c3 import C3dAffineTool >>> c3 = C3dAffineTool() >>> c3.inputs.source_file = 'cmatrix.mat' >>> c3.inputs.itk_transform = 'affine.txt' >>> c3.inputs.fsl2ras = True >>> c3.cmdline 'c3d_affine_tool -src cmatrix.mat -fsl2ras -oitk affine.txt' """ input_spec = C3dAffineToolInputSpec output_spec = C3dAffineToolOutputSpec _cmd = "c3d_affine_tool" _outputs_filenames = {"itk_transform": "affine.txt"} class C3dInputSpec(CommandLineInputSpec): in_file = InputMultiPath( File(), position=1, argstr="%s", mandatory=True, desc="Input file (wildcard and multiple are supported).", ) out_file = File( exists=False, argstr="-o %s", position=-1, xor=["out_files"], desc="Output file of last image on the stack.", ) out_files = InputMultiPath( File(), argstr="-oo %s", xor=["out_file"], position=-1, desc=( "Write all images on the convert3d stack as multiple files." " Supports both list of output files or a pattern for the output" " filenames (using %d substituion)." ), ) pix_type = traits.Enum( "float", "char", "uchar", "short", "ushort", "int", "uint", "double", argstr="-type %s", desc=( "Specifies the pixel type for the output image. By default," " images are written in floating point (float) format" ), ) scale = traits.Either( traits.Int(), traits.Float(), argstr="-scale %s", desc=( "Multiplies the intensity of each voxel in the last image on the" " stack by the given factor." ), ) shift = traits.Either( traits.Int(), traits.Float(), argstr="-shift %s", desc="Adds the given constant to every voxel.", ) interp = traits.Enum( "Linear", "NearestNeighbor", "Cubic", "Sinc", "Gaussian", argstr="-interpolation %s", desc=( "Specifies the interpolation used with -resample and other" " commands. Default is Linear." ), ) resample = traits.Str( argstr="-resample %s", desc=( "Resamples the image, keeping the bounding box the same, but" " changing the number of voxels in the image. The dimensions can be" " specified as a percentage, for example to double the number of voxels" " in each direction. The -interpolation flag affects how sampling is" " performed." ), ) smooth = traits.Str( argstr="-smooth %s", desc=( "Applies Gaussian smoothing to the image. The parameter vector" " specifies the standard deviation of the Gaussian kernel." ), ) multicomp_split = traits.Bool( False, usedefault=True, argstr="-mcr", position=0, desc="Enable reading of multi-component images.", ) is_4d = traits.Bool( False, usedefault=True, desc=("Changes command to support 4D file operations (default is" " false)."), ) class C3dOutputSpec(TraitedSpec): out_files = OutputMultiPath(File(exists=False)) class C3d(CommandLine): """ Convert3d is a command-line tool for converting 3D (or 4D) images between common file formats. The tool also includes a growing list of commands for image manipulation, such as thresholding and resampling. The tool can also be used to obtain information about image files. More information on Convert3d can be found at: https://sourceforge.net/p/c3d/git/ci/master/tree/doc/c3d.md Example ======= >>> from nipype.interfaces.c3 import C3d >>> c3 = C3d() >>> c3.inputs.in_file = "T1.nii" >>> c3.inputs.pix_type = "short" >>> c3.inputs.out_file = "T1.img" >>> c3.cmdline 'c3d T1.nii -type short -o T1.img' >>> c3.inputs.is_4d = True >>> c3.inputs.in_file = "epi.nii" >>> c3.inputs.out_file = "epi.img" >>> c3.cmdline 'c4d epi.nii -type short -o epi.img' """ input_spec = C3dInputSpec output_spec = C3dOutputSpec _cmd = "c3d" def __init__(self, **inputs): super(C3d, self).__init__(**inputs) self.inputs.on_trait_change(self._is_4d, "is_4d") if self.inputs.is_4d: self._is_4d() def _is_4d(self): self._cmd = "c4d" if self.inputs.is_4d else "c3d" def _run_interface(self, runtime): cmd = self._cmd if not isdefined(self.inputs.out_file) and not isdefined(self.inputs.out_files): # Convert3d does not want to override file, by default # so we define a new output file self._gen_outfile() runtime = super(C3d, self)._run_interface(runtime) self._cmd = cmd return runtime def _gen_outfile(self): # if many infiles, raise exception if (len(self.inputs.in_file) > 1) or ("*" in self.inputs.in_file[0]): raise AttributeError( "Multiple in_files found - specify either" " `out_file` or `out_files`." ) _, fn, ext = split_filename(self.inputs.in_file[0]) self.inputs.out_file = fn + "_generated" + ext # if generated file will overwrite, raise error if os.path.exists(os.path.abspath(self.inputs.out_file)): raise IOError("File already found - to overwrite, use `out_file`.") iflogger.info("Generating `out_file`.") def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): outputs["out_files"] = os.path.abspath(self.inputs.out_file) if isdefined(self.inputs.out_files): if len(self.inputs.out_files) == 1: _out_files = glob(os.path.abspath(self.inputs.out_files[0])) else: _out_files = [ os.path.abspath(f) for f in self.inputs.out_files if os.path.exists(os.path.abspath(f)) ] outputs["out_files"] = _out_files return outputs nipype-1.7.0/nipype/interfaces/camino/000077500000000000000000000000001413403311400177125ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/camino/__init__.py000066400000000000000000000015361413403311400220300ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Camino top level namespace """ from .connectivity import Conmat from .convert import ( Image2Voxel, FSL2Scheme, VtkStreamlines, ProcStreamlines, TractShredder, DT2NIfTI, NIfTIDT2Camino, AnalyzeHeader, Shredder, ) from .dti import ( DTIFit, ModelFit, DTLUTGen, PicoPDFs, Track, TrackPICo, TrackBayesDirac, TrackDT, TrackBallStick, TrackBootstrap, TrackBedpostxDeter, TrackBedpostxProba, ComputeFractionalAnisotropy, ComputeMeanDiffusivity, ComputeTensorTrace, ComputeEigensystem, DTMetric, ) from .calib import SFPICOCalibData, SFLUTGen from .odf import QBallMX, LinRecon, SFPeaks, MESD from .utils import ImageStats nipype-1.7.0/nipype/interfaces/camino/calib.py000066400000000000000000000262631413403311400213470ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os from ...utils.filemanip import split_filename from ..base import ( traits, TraitedSpec, File, StdOutCommandLine, StdOutCommandLineInputSpec, ) class SFPICOCalibDataInputSpec(StdOutCommandLineInputSpec): snr = traits.Float( argstr="-snr %f", units="NA", desc=( "Specifies the signal-to-noise ratio of the " "non-diffusion-weighted measurements to use in simulations." ), ) scheme_file = File( exists=True, argstr="-schemefile %s", mandatory=True, desc="Specifies the scheme file for the diffusion MRI data", ) info_file = File( desc="The name to be given to the information output filename.", argstr="-infooutputfile %s", mandatory=True, genfile=True, hash_files=False, ) # Genfile and hash_files? trace = traits.Float( argstr="-trace %f", units="NA", desc="Trace of the diffusion tensor(s) used in the test function.", ) onedtfarange = traits.List( traits.Float, argstr="-onedtfarange %s", minlen=2, maxlen=2, units="NA", desc=("Minimum and maximum FA for the single tensor " "synthetic data."), ) onedtfastep = traits.Float( argstr="-onedtfastep %f", units="NA", desc=( "FA step size controlling how many steps there are " "between the minimum and maximum FA settings." ), ) twodtfarange = traits.List( traits.Float, argstr="-twodtfarange %s", minlen=2, maxlen=2, units="NA", desc=( "Minimum and maximum FA for the two tensor " "synthetic data. FA is varied for both tensors " "to give all the different permutations." ), ) twodtfastep = traits.Float( argstr="-twodtfastep %f", units="NA", desc=( "FA step size controlling how many steps there are " "between the minimum and maximum FA settings " "for the two tensor cases." ), ) twodtanglerange = traits.List( traits.Float, argstr="-twodtanglerange %s", minlen=2, maxlen=2, units="NA", desc=("Minimum and maximum crossing angles " "between the two fibres."), ) twodtanglestep = traits.Float( argstr="-twodtanglestep %f", units="NA", desc=( "Angle step size controlling how many steps there are " "between the minimum and maximum crossing angles for " "the two tensor cases." ), ) twodtmixmax = traits.Float( argstr="-twodtmixmax %f", units="NA", desc=( "Mixing parameter controlling the proportion of one fibre population " "to the other. The minimum mixing parameter is (1 - twodtmixmax)." ), ) twodtmixstep = traits.Float( argstr="-twodtmixstep %f", units="NA", desc=( "Mixing parameter step size for the two tensor cases. " "Specify how many mixing parameter increments to use." ), ) seed = traits.Float( argstr="-seed %f", units="NA", desc="Specifies the random seed to use for noise generation in simulation trials.", ) class SFPICOCalibDataOutputSpec(TraitedSpec): PICOCalib = File(exists=True, desc="Calibration dataset") calib_info = File(exists=True, desc="Calibration dataset") class SFPICOCalibData(StdOutCommandLine): """ Generates Spherical Function PICo Calibration Data. SFPICOCalibData creates synthetic data for use with SFLUTGen. The synthetic data is generated using a mixture of gaussians, in the same way datasynth generates data. Each voxel of data models a slightly different fibre configuration (varying FA and fibre- crossings) and undergoes a random rotation to help account for any directional bias in the chosen acquisition scheme. A second file, which stores information about the datafile, is generated along with the datafile. Examples -------- To create a calibration dataset using the default settings >>> import nipype.interfaces.camino as cam >>> calib = cam.SFPICOCalibData() >>> calib.inputs.scheme_file = 'A.scheme' >>> calib.inputs.snr = 20 >>> calib.inputs.info_file = 'PICO_calib.info' >>> calib.run() # doctest: +SKIP The default settings create a large dataset (249,231 voxels), of which 3401 voxels contain a single fibre population per voxel and the rest of the voxels contain two fibre-populations. The amount of data produced can be varied by specifying the ranges and steps of the parameters for both the one and two fibre datasets used. To create a custom calibration dataset >>> import nipype.interfaces.camino as cam >>> calib = cam.SFPICOCalibData() >>> calib.inputs.scheme_file = 'A.scheme' >>> calib.inputs.snr = 20 >>> calib.inputs.info_file = 'PICO_calib.info' >>> calib.inputs.twodtfarange = [0.3, 0.9] >>> calib.inputs.twodtfastep = 0.02 >>> calib.inputs.twodtanglerange = [0, 0.785] >>> calib.inputs.twodtanglestep = 0.03925 >>> calib.inputs.twodtmixmax = 0.8 >>> calib.inputs.twodtmixstep = 0.1 >>> calib.run() # doctest: +SKIP This would provide 76,313 voxels of synthetic data, where 3401 voxels simulate the one fibre cases and 72,912 voxels simulate the various two fibre cases. However, care should be taken to ensure that enough data is generated for calculating the LUT. # doctest: +SKIP """ _cmd = "sfpicocalibdata" input_spec = SFPICOCalibDataInputSpec output_spec = SFPICOCalibDataOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["PICOCalib"] = os.path.abspath(self._gen_outfilename()) outputs["calib_info"] = os.path.abspath(self.inputs.info_file) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) return name + "_PICOCalib.Bfloat" class SFLUTGenInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="-inputfile %s", mandatory=True, desc="Voxel-order data of the spherical functions peaks.", ) info_file = File( argstr="-infofile %s", mandatory=True, desc=( "The Info file that corresponds to the calibration " "datafile used in the reconstruction." ), ) outputstem = traits.Str( "LUT", argstr="-outputstem %s", desc=( "Define the name of the generated luts. The form of the filenames will be " "[outputstem]_oneFibreSurfaceCoeffs.Bdouble and " "[outputstem]_twoFibreSurfaceCoeffs.Bdouble" ), usedefault=True, ) pdf = traits.Enum( "bingham", "watson", argstr="-pdf %s", desc="""\ Sets the distribution to use for the calibration. The default is the Bingham distribution, which allows elliptical probability density contours. Currently supported options are: * bingham -- The Bingham distribution, which allows elliptical probability density contours. * watson -- The Watson distribution. This distribution is rotationally symmetric. """, usedefault=True, ) binincsize = traits.Int( argstr="-binincsize %d", units="NA", desc=( "Sets the size of the bins. In the case of 2D histograms such as the " "Bingham, the bins are always square. Default is 1." ), ) minvectsperbin = traits.Int( argstr="-minvectsperbin %d", units="NA", desc=( "Specifies the minimum number of fibre-orientation estimates a bin " "must contain before it is used in the lut line/surface generation. " 'Default is 50. If you get the error "no fibre-orientation estimates ' 'in histogram!", the calibration data set is too small to get enough ' "samples in any of the histogram bins. You can decrease the minimum " "number per bin to get things running in quick tests, but the sta- " "tistics will not be reliable and for serious applications, you need " "to increase the size of the calibration data set until the error goes." ), ) directmap = traits.Bool( argstr="-directmap", desc=( "Use direct mapping between the eigenvalues and the distribution parameters " "instead of the log of the eigenvalues." ), ) order = traits.Int( argstr="-order %d", units="NA", desc=( "The order of the polynomial fitting the surface. Order 1 is linear. " "Order 2 (default) is quadratic." ), ) class SFLUTGenOutputSpec(TraitedSpec): lut_one_fibre = File(exists=True, desc="PICo lut for one-fibre model") lut_two_fibres = File(exists=True, desc="PICo lut for two-fibre model") class SFLUTGen(StdOutCommandLine): """ Generates PICo lookup tables (LUT) for multi-fibre methods such as PASMRI and Q-Ball. SFLUTGen creates the lookup tables for the generalized multi-fibre implementation of the PICo tractography algorithm. The outputs of this utility are either surface or line coefficients up to a given order. The calibration can be performed for different distributions, such as the Bingham and Watson distributions. This utility uses calibration data generated from SFPICOCalibData and peak information created by SFPeaks. The utility outputs two lut's, ``*_oneFibreSurfaceCoeffs.Bdouble`` and ``*_twoFibreSurfaceCoeffs.Bdouble``. Each of these files contains big-endian doubles as standard. The format of the output is:: dimensions (1 for Watson, 2 for Bingham) order (the order of the polynomial) coefficient_1 coefficient_2 ... coefficient_N In the case of the Watson, there is a single set of coefficients, which are ordered:: constant, x, x^2, ..., x^order. In the case of the Bingham, there are two sets of coefficients (one for each surface), ordered so that:: for j = 1 to order for k = 1 to order coeff_i = x^j * y^k where j+k < order Example ------- To create a calibration dataset using the default settings >>> import nipype.interfaces.camino as cam >>> lutgen = cam.SFLUTGen() >>> lutgen.inputs.in_file = 'QSH_peaks.Bdouble' >>> lutgen.inputs.info_file = 'PICO_calib.info' >>> lutgen.run() # doctest: +SKIP """ _cmd = "sflutgen" input_spec = SFLUTGenInputSpec output_spec = SFLUTGenOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["lut_one_fibre"] = ( self.inputs.outputstem + "_oneFibreSurfaceCoeffs.Bdouble" ) outputs["lut_two_fibres"] = ( self.inputs.outputstem + "_twoFibreSurfaceCoeffs.Bdouble" ) return outputs def _gen_outfilename(self): return "/dev/null" nipype-1.7.0/nipype/interfaces/camino/connectivity.py000066400000000000000000000141701413403311400230050ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os from ...utils.filemanip import split_filename from ..base import ( traits, TraitedSpec, File, CommandLine, CommandLineInputSpec, isdefined, ) class ConmatInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="-inputfile %s", mandatory=True, desc="Streamlines as generated by the Track interface", ) target_file = File( exists=True, argstr="-targetfile %s", mandatory=True, desc="An image containing targets, as used in ProcStreamlines interface.", ) scalar_file = File( exists=True, argstr="-scalarfile %s", desc=( "Optional scalar file for computing tract-based statistics. " "Must be in the same space as the target file." ), requires=["tract_stat"], ) targetname_file = File( exists=True, argstr="-targetnamefile %s", desc=( "Optional names of targets. This file should contain one entry per line, " "with the target intensity followed by the name, separated by white space. " "For example: " " 1 some_brain_region " " 2 some_other_region " "These names will be used in the output. The names themselves should not " "contain spaces or commas. The labels may be in any order but the output " "matrices will be ordered by label intensity." ), ) tract_stat = traits.Enum( "mean", "min", "max", "sum", "median", "var", argstr="-tractstat %s", units="NA", desc=("Tract statistic to use. See TractStats for other options."), requires=["scalar_file"], xor=["tract_prop"], ) tract_prop = traits.Enum( "length", "endpointsep", argstr="-tractstat %s", units="NA", xor=["tract_stat"], desc=( "Tract property average to compute in the connectivity matrix. " "See TractStats for details." ), ) output_root = File( argstr="-outputroot %s", genfile=True, desc=( "filename root prepended onto the names of the output files. " "The extension will be determined from the input." ), ) class ConmatOutputSpec(TraitedSpec): conmat_sc = File(exists=True, desc="Connectivity matrix in CSV file.") conmat_ts = File(desc="Tract statistics in CSV file.") class Conmat(CommandLine): """ Creates a connectivity matrix using a 3D label image (the target image) and a set of streamlines. The connectivity matrix records how many stream- lines connect each pair of targets, and optionally the mean tractwise statistic (eg tract-averaged FA, or length). The output is a comma separated variable file or files. The first row of the output matrix is label names. Label names may be defined by the user, otherwise they are assigned based on label intensity. Starting from the seed point, we move along the streamline until we find a point in a labeled region. This is done in both directions from the seed point. Streamlines are counted if they connect two target regions, one on either side of the seed point. Only the labeled region closest to the seed is counted, for example if the input contains two streamlines: :: 1: A-----B------SEED---C 2: A--------SEED----------- then the output would be :: A,B,C 0,0,0 0,0,1 0,1,0 There are zero connections to A because in streamline 1, the connection to B is closer to the seed than the connection to A, and in streamline 2 there is no region reached in the other direction. The connected target regions can have the same label, as long as the seed point is outside of the labeled region and both ends connect to the same label (which may be in different locations). Therefore this is allowed: :: A------SEED-------A Such fibers will add to the diagonal elements of the matrix. To remove these entries, run procstreamlines with -endpointfile before running conmat. If the seed point is inside a labled region, it counts as one end of the connection. So :: ----[SEED inside A]---------B counts as a connection between A and B, while :: C----[SEED inside A]---------B counts as a connection between A and C, because C is closer to the seed point. In all cases, distance to the seed point is defined along the streamline path. Examples -------- To create a standard connectivity matrix based on streamline counts. >>> import nipype.interfaces.camino as cam >>> conmat = cam.Conmat() >>> conmat.inputs.in_file = 'tracts.Bdouble' >>> conmat.inputs.target_file = 'atlas.nii.gz' >>> conmat.run() # doctest: +SKIP To create a standard connectivity matrix and mean tractwise FA statistics. >>> import nipype.interfaces.camino as cam >>> conmat = cam.Conmat() >>> conmat.inputs.in_file = 'tracts.Bdouble' >>> conmat.inputs.target_file = 'atlas.nii.gz' >>> conmat.inputs.scalar_file = 'fa.nii.gz' >>> conmat.tract_stat = 'mean' >>> conmat.run() # doctest: +SKIP """ _cmd = "conmat" input_spec = ConmatInputSpec output_spec = ConmatOutputSpec def _list_outputs(self): outputs = self.output_spec().get() output_root = self._gen_outputroot() outputs["conmat_sc"] = os.path.abspath(output_root + "sc.csv") outputs["conmat_ts"] = os.path.abspath(output_root + "ts.csv") return outputs def _gen_outfilename(self): return self._gen_outputroot() def _gen_outputroot(self): output_root = self.inputs.output_root if not isdefined(output_root): output_root = self._gen_filename("output_root") return output_root def _gen_filename(self, name): if name == "output_root": _, filename, _ = split_filename(self.inputs.in_file) filename = filename + "_" return filename nipype-1.7.0/nipype/interfaces/camino/convert.py000066400000000000000000000770471413403311400217630ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os import glob from ...utils.filemanip import split_filename from ..base import ( CommandLineInputSpec, CommandLine, traits, TraitedSpec, File, StdOutCommandLine, OutputMultiPath, StdOutCommandLineInputSpec, isdefined, ) class Image2VoxelInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="-4dimage %s", mandatory=True, position=1, desc="4d image file", ) # TODO convert list of files on the fly # imagelist = File(exists=True, argstr='-imagelist %s', # mandatory=True, position=1, # desc='Name of a file containing a list of 3D images') # # imageprefix = traits.Str(argstr='-imageprefix %s', position=3, # desc='Path to prepend onto filenames in the imagelist.') out_type = traits.Enum( "float", "char", "short", "int", "long", "double", argstr="-outputdatatype %s", position=2, desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', usedefault=True, ) class Image2VoxelOutputSpec(TraitedSpec): voxel_order = File(exists=True, desc="path/name of 4D volume in voxel order") class Image2Voxel(StdOutCommandLine): """ Converts Analyze / NIFTI / MHA files to voxel order. Converts scanner-order data in a supported image format to voxel-order data. Either takes a 4D file (all measurements in single image) or a list of 3D images. Examples -------- >>> import nipype.interfaces.camino as cmon >>> img2vox = cmon.Image2Voxel() >>> img2vox.inputs.in_file = '4d_dwi.nii' >>> img2vox.run() # doctest: +SKIP """ _cmd = "image2voxel" input_spec = Image2VoxelInputSpec output_spec = Image2VoxelOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["voxel_order"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + ".B" + self.inputs.out_type class FSL2SchemeInputSpec(StdOutCommandLineInputSpec): bvec_file = File( exists=True, argstr="-bvecfile %s", mandatory=True, position=1, desc="b vector file", ) bval_file = File( exists=True, argstr="-bvalfile %s", mandatory=True, position=2, desc="b value file", ) numscans = traits.Int( argstr="-numscans %d", units="NA", desc="Output all measurements numerous (n) times, used when combining multiple scans from the same imaging session.", ) interleave = traits.Bool( argstr="-interleave", desc="Interleave repeated scans. Only used with -numscans.", ) bscale = traits.Float( argstr="-bscale %d", units="NA", desc="Scaling factor to convert the b-values into different units. Default is 10^6.", ) diffusiontime = traits.Float( argstr="-diffusiontime %f", units="NA", desc="Diffusion time" ) flipx = traits.Bool( argstr="-flipx", desc="Negate the x component of all the vectors." ) flipy = traits.Bool( argstr="-flipy", desc="Negate the y component of all the vectors." ) flipz = traits.Bool( argstr="-flipz", desc="Negate the z component of all the vectors." ) usegradmod = traits.Bool( argstr="-usegradmod", desc="Use the gradient magnitude to scale b. This option has no effect if your gradient directions have unit magnitude.", ) class FSL2SchemeOutputSpec(TraitedSpec): scheme = File(exists=True, desc="Scheme file") class FSL2Scheme(StdOutCommandLine): """ Converts b-vectors and b-values from FSL format to a Camino scheme file. Examples -------- >>> import nipype.interfaces.camino as cmon >>> makescheme = cmon.FSL2Scheme() >>> makescheme.inputs.bvec_file = 'bvecs' >>> makescheme.inputs.bvec_file = 'bvals' >>> makescheme.run() # doctest: +SKIP """ _cmd = "fsl2scheme" input_spec = FSL2SchemeInputSpec output_spec = FSL2SchemeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["scheme"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.bvec_file) return name + ".scheme" class VtkStreamlinesInputSpec(StdOutCommandLineInputSpec): inputmodel = traits.Enum( "raw", "voxels", argstr="-inputmodel %s", desc="input model type (raw or voxels)", usedefault=True, ) in_file = File( exists=True, argstr=" < %s", mandatory=True, position=-2, desc="data file" ) voxeldims = traits.List( traits.Int, desc="voxel dimensions in mm", argstr="-voxeldims %s", minlen=3, maxlen=3, position=4, units="mm", ) seed_file = File( exists=False, argstr="-seedfile %s", position=1, desc="image containing seed points", ) target_file = File( exists=False, argstr="-targetfile %s", position=2, desc="image containing integer-valued target regions", ) scalar_file = File( exists=False, argstr="-scalarfile %s", position=3, desc="image that is in the same physical space as the tracts", ) colourorient = traits.Bool( argstr="-colourorient", desc="Each point on the streamline is coloured by the local orientation.", ) interpolatescalars = traits.Bool( argstr="-interpolatescalars", desc="the scalar value at each point on the streamline is calculated by trilinear interpolation", ) interpolate = traits.Bool( argstr="-interpolate", desc="the scalar value at each point on the streamline is calculated by trilinear interpolation", ) class VtkStreamlinesOutputSpec(TraitedSpec): vtk = File(exists=True, desc="Streamlines in VTK format") class VtkStreamlines(StdOutCommandLine): """ Use vtkstreamlines to convert raw or voxel format streamlines to VTK polydata Examples -------- >>> import nipype.interfaces.camino as cmon >>> vtk = cmon.VtkStreamlines() >>> vtk.inputs.in_file = 'tract_data.Bfloat' >>> vtk.inputs.voxeldims = [1,1,1] >>> vtk.run() # doctest: +SKIP """ _cmd = "vtkstreamlines" input_spec = VtkStreamlinesInputSpec output_spec = VtkStreamlinesOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["vtk"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + ".vtk" class ProcStreamlinesInputSpec(StdOutCommandLineInputSpec): inputmodel = traits.Enum( "raw", "voxels", argstr="-inputmodel %s", desc="input model type (raw or voxels)", usedefault=True, ) in_file = File( exists=True, argstr="-inputfile %s", mandatory=True, position=1, desc="data file", ) maxtractpoints = traits.Int( argstr="-maxtractpoints %d", units="NA", desc="maximum number of tract points" ) mintractpoints = traits.Int( argstr="-mintractpoints %d", units="NA", desc="minimum number of tract points" ) maxtractlength = traits.Int( argstr="-maxtractlength %d", units="mm", desc="maximum length of tracts" ) mintractlength = traits.Int( argstr="-mintractlength %d", units="mm", desc="minimum length of tracts" ) datadims = traits.List( traits.Int, desc="data dimensions in voxels", argstr="-datadims %s", minlen=3, maxlen=3, units="voxels", ) voxeldims = traits.List( traits.Int, desc="voxel dimensions in mm", argstr="-voxeldims %s", minlen=3, maxlen=3, units="mm", ) seedpointmm = traits.List( traits.Int, desc="The coordinates of a single seed point for tractography in mm", argstr="-seedpointmm %s", minlen=3, maxlen=3, units="mm", ) seedpointvox = traits.List( traits.Int, desc="The coordinates of a single seed point for tractography in voxels", argstr="-seedpointvox %s", minlen=3, maxlen=3, units="voxels", ) seedfile = File( exists=False, argstr="-seedfile %s", desc="Image Containing Seed Points" ) regionindex = traits.Int( argstr="-regionindex %d", units="mm", desc="index of specific region to process" ) iterations = traits.Float( argstr="-iterations %d", units="NA", desc="Number of streamlines generated for each seed. Not required when outputting streamlines, but needed to create PICo images. The default is 1 if the output is streamlines, and 5000 if the output is connection probability images.", ) targetfile = File( exists=False, argstr="-targetfile %s", desc="Image containing target volumes." ) allowmultitargets = traits.Bool( argstr="-allowmultitargets", desc="Allows streamlines to connect to multiple target volumes.", ) directional = traits.List( traits.Int, desc="Splits the streamlines at the seed point and computes separate connection probabilities for each segment. Streamline segments are grouped according to their dot product with the vector (X, Y, Z). The ideal vector will be tangential to the streamline trajectory at the seed, such that the streamline projects from the seed along (X, Y, Z) and -(X, Y, Z). However, it is only necessary for the streamline trajectory to not be orthogonal to (X, Y, Z).", argstr="-directional %s", minlen=3, maxlen=3, units="NA", ) waypointfile = File( exists=False, argstr="-waypointfile %s", desc="Image containing waypoints. Waypoints are defined as regions of the image with the same intensity, where 0 is background and any value > 0 is a waypoint.", ) truncateloops = traits.Bool( argstr="-truncateloops", desc="This option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, it is truncated upon a second entry to the waypoint.", ) discardloops = traits.Bool( argstr="-discardloops", desc="This option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, the entire streamline is discarded upon a second entry to the waypoint.", ) exclusionfile = File( exists=False, argstr="-exclusionfile %s", desc="Image containing exclusion ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.", ) truncateinexclusion = traits.Bool( argstr="-truncateinexclusion", desc="Retain segments of a streamline before entry to an exclusion ROI.", ) endpointfile = File( exists=False, argstr="-endpointfile %s", desc="Image containing endpoint ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.", ) resamplestepsize = traits.Float( argstr="-resamplestepsize %d", units="NA", desc="Each point on a streamline is tested for entry into target, exclusion or waypoint volumes. If the length between points on a tract is not much smaller than the voxel length, then streamlines may pass through part of a voxel without being counted. To avoid this, the program resamples streamlines such that the step size is one tenth of the smallest voxel dimension in the image. This increases the size of raw or oogl streamline output and incurs some performance penalty. The resample resolution can be controlled with this option or disabled altogether by passing a negative step size or by passing the -noresample option.", ) noresample = traits.Bool( argstr="-noresample", desc="Disables resampling of input streamlines. Resampling is automatically disabled if the input model is voxels.", ) outputtracts = traits.Bool( argstr="-outputtracts", desc="Output streamlines in raw binary format." ) outputroot = File( exists=False, argstr="-outputroot %s", desc="Prepended onto all output file names.", ) gzip = traits.Bool(argstr="-gzip", desc="save the output image in gzip format") outputcp = traits.Bool( argstr="-outputcp", desc="output the connection probability map (Analyze image, float)", requires=["outputroot", "seedfile"], ) outputsc = traits.Bool( argstr="-outputsc", desc="output the connection probability map (raw streamlines, int)", requires=["outputroot", "seedfile"], ) outputacm = traits.Bool( argstr="-outputacm", desc="output all tracts in a single connection probability map (Analyze image)", requires=["outputroot", "seedfile"], ) outputcbs = traits.Bool( argstr="-outputcbs", desc="outputs connectivity-based segmentation maps; requires target outputfile", requires=["outputroot", "targetfile", "seedfile"], ) class ProcStreamlinesOutputSpec(TraitedSpec): proc = File(exists=True, desc="Processed Streamlines") outputroot_files = OutputMultiPath(File(exists=True)) class ProcStreamlines(StdOutCommandLine): """ Process streamline data This program does post-processing of streamline output from track. It can either output streamlines or connection probability maps. * http://web4.cs.ucl.ac.uk/research/medic/camino/pmwiki/pmwiki.php?n=Man.procstreamlines Examples -------- >>> import nipype.interfaces.camino as cmon >>> proc = cmon.ProcStreamlines() >>> proc.inputs.in_file = 'tract_data.Bfloat' >>> proc.run() # doctest: +SKIP """ _cmd = "procstreamlines" input_spec = ProcStreamlinesInputSpec output_spec = ProcStreamlinesOutputSpec def _format_arg(self, name, spec, value): if name == "outputroot": return spec.argstr % self._get_actual_outputroot(value) return super(ProcStreamlines, self)._format_arg(name, spec, value) def __init__(self, *args, **kwargs): super(ProcStreamlines, self).__init__(*args, **kwargs) self.outputroot_files = [] def _run_interface(self, runtime): outputroot = self.inputs.outputroot if isdefined(outputroot): actual_outputroot = self._get_actual_outputroot(outputroot) base, filename, ext = split_filename(actual_outputroot) if not os.path.exists(base): os.makedirs(base) new_runtime = super(ProcStreamlines, self)._run_interface(runtime) self.outputroot_files = glob.glob( os.path.join(os.getcwd(), actual_outputroot + "*") ) return new_runtime else: new_runtime = super(ProcStreamlines, self)._run_interface(runtime) return new_runtime def _get_actual_outputroot(self, outputroot): actual_outputroot = os.path.join("procstream_outfiles", outputroot) return actual_outputroot def _list_outputs(self): outputs = self.output_spec().get() outputs["proc"] = os.path.abspath(self._gen_outfilename()) outputs["outputroot_files"] = self.outputroot_files return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_proc" class TractShredderInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="< %s", mandatory=True, position=-2, desc="tract file" ) offset = traits.Int( argstr="%d", units="NA", desc="initial offset of offset tracts", position=1 ) bunchsize = traits.Int( argstr="%d", units="NA", desc="reads and outputs a group of bunchsize tracts", position=2, ) space = traits.Int(argstr="%d", units="NA", desc="skips space tracts", position=3) class TractShredderOutputSpec(TraitedSpec): shredded = File(exists=True, desc="Shredded tract file") class TractShredder(StdOutCommandLine): """ Extracts bunches of streamlines. tractshredder works in a similar way to shredder, but processes streamlines instead of scalar data. The input is raw streamlines, in the format produced by track or procstreamlines. The program first makes an initial offset of offset tracts. It then reads and outputs a group of bunchsize tracts, skips space tracts, and repeats until there is no more input. Examples -------- >>> import nipype.interfaces.camino as cmon >>> shred = cmon.TractShredder() >>> shred.inputs.in_file = 'tract_data.Bfloat' >>> shred.inputs.offset = 0 >>> shred.inputs.bunchsize = 1 >>> shred.inputs.space = 2 >>> shred.run() # doctest: +SKIP """ _cmd = "tractshredder" input_spec = TractShredderInputSpec output_spec = TractShredderOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["shredded"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_shredded" class DT2NIfTIInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="-inputfile %s", mandatory=True, position=1, desc="tract file", ) output_root = File( argstr="-outputroot %s", position=2, genfile=True, desc="filename root prepended onto the names of three output files.", ) header_file = File( exists=True, argstr="-header %s", mandatory=True, position=3, desc=" A Nifti .nii or .hdr file containing the header information", ) class DT2NIfTIOutputSpec(TraitedSpec): dt = File(exists=True, desc="diffusion tensors in NIfTI format") exitcode = File( exists=True, desc="exit codes from Camino reconstruction in NIfTI format" ) lns0 = File( exists=True, desc="estimated lns0 from Camino reconstruction in NIfTI format" ) class DT2NIfTI(CommandLine): """ Converts camino tensor data to NIfTI format Reads Camino diffusion tensors, and converts them to NIFTI format as three .nii files. """ _cmd = "dt2nii" input_spec = DT2NIfTIInputSpec output_spec = DT2NIfTIOutputSpec def _list_outputs(self): outputs = self.output_spec().get() output_root = self._gen_outputroot() outputs["dt"] = os.path.abspath(output_root + "dt.nii") outputs["exitcode"] = os.path.abspath(output_root + "exitcode.nii") outputs["lns0"] = os.path.abspath(output_root + "lns0.nii") return outputs def _gen_outfilename(self): return self._gen_outputroot() def _gen_outputroot(self): output_root = self.inputs.output_root if not isdefined(output_root): output_root = self._gen_filename("output_root") return output_root def _gen_filename(self, name): if name == "output_root": _, filename, _ = split_filename(self.inputs.in_file) filename = filename + "_" return filename class NIfTIDT2CaminoInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="-inputfile %s", mandatory=True, position=1, desc="A NIFTI-1 dataset containing diffusion tensors. The tensors are assumed to be " "in lower-triangular order as specified by the NIFTI standard for the storage of " "symmetric matrices. This file should be either a .nii or a .hdr file.", ) s0_file = File( argstr="-s0 %s", exists=True, desc="File containing the unweighted signal for each voxel, may be a raw binary " "file (specify type with -inputdatatype) or a supported image file.", ) lns0_file = File( argstr="-lns0 %s", exists=True, desc="File containing the log of the unweighted signal for each voxel, may be a " "raw binary file (specify type with -inputdatatype) or a supported image file.", ) bgmask = File( argstr="-bgmask %s", exists=True, desc="Binary valued brain / background segmentation, may be a raw binary file " "(specify type with -maskdatatype) or a supported image file.", ) scaleslope = traits.Float( argstr="-scaleslope %s", desc="A value v in the diffusion tensor is scaled to v * s + i. This is " "applied after any scaling specified by the input image. Default is 1.0.", ) scaleinter = traits.Float( argstr="-scaleinter %s", desc="A value v in the diffusion tensor is scaled to v * s + i. This is " "applied after any scaling specified by the input image. Default is 0.0.", ) uppertriangular = traits.Bool( argstr="-uppertriangular %s", desc="Specifies input in upper-triangular (VTK style) order.", ) class NIfTIDT2CaminoOutputSpec(TraitedSpec): out_file = File(desc="diffusion tensors data in Camino format") class NIfTIDT2Camino(CommandLine): """ Converts NIFTI-1 diffusion tensors to Camino format. The program reads the NIFTI header but does not apply any spatial transformations to the data. The NIFTI intensity scaling parameters are applied. The output is the tensors in Camino voxel ordering: [exit, ln(S0), dxx, dxy, dxz, dyy, dyz, dzz]. The exit code is set to 0 unless a background mask is supplied, in which case the code is 0 in brain voxels and -1 in background voxels. The value of ln(S0) in the output is taken from a file if one is supplied, otherwise it is set to 0. NOTE FOR FSL USERS - FSL's dtifit can output NIFTI tensors, but they are not stored in the usual way (which is using NIFTI_INTENT_SYMMATRIX). FSL's tensors follow the ITK / VTK "upper-triangular" convention, so you will need to use the -uppertriangular option to convert these correctly. """ _cmd = "niftidt2camino" input_spec = NIfTIDT2CaminoInputSpec output_spec = NIfTIDT2CaminoOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self._gen_filename("out_file") return outputs def _gen_filename(self, name): if name == "out_file": _, filename, _ = split_filename(self.inputs.in_file) return filename class AnalyzeHeaderInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="< %s", mandatory=True, position=1, desc="Tensor-fitted data filename", ) scheme_file = File( exists=True, argstr="%s", position=2, desc=("Camino scheme file (b values / vectors, " "see camino.fsl2scheme)"), ) readheader = File( exists=True, argstr="-readheader %s", position=3, desc=( "Reads header information from file and prints to " "stdout. If this option is not specified, then the " "program writes a header based on the other " "arguments." ), ) printimagedims = File( exists=True, argstr="-printimagedims %s", position=3, desc=( "Prints image data and voxel dimensions as " "Camino arguments and exits." ), ) # How do we implement both file and enum (for the program) in one argument? # Is this option useful anyway? # -printprogargs # Prints data dimension (and type, if relevant) arguments for a specific # Camino program, where prog is one of shredder, scanner2voxel, # vcthreshselect, pdview, track. printprogargs = File( exists=True, argstr="-printprogargs %s", position=3, desc=( "Prints data dimension (and type, if relevant) " "arguments for a specific Camino program, where " "prog is one of shredder, scanner2voxel, " "vcthreshselect, pdview, track." ), ) printintelbyteorder = File( exists=True, argstr="-printintelbyteorder %s", position=3, desc=("Prints 1 if the header is little-endian, " "0 otherwise."), ) printbigendian = File( exists=True, argstr="-printbigendian %s", position=3, desc=("Prints 1 if the header is big-endian, 0 " "otherwise."), ) initfromheader = File( exists=True, argstr="-initfromheader %s", position=3, desc=( "Reads header information from file and " "intializes a new header with the values read " "from the file. You may replace any " "combination of fields in the new header by " "specifying subsequent options." ), ) data_dims = traits.List( traits.Int, desc="data dimensions in voxels", argstr="-datadims %s", minlen=3, maxlen=3, units="voxels", ) voxel_dims = traits.List( traits.Float, desc="voxel dimensions in mm", argstr="-voxeldims %s", minlen=3, maxlen=3, units="mm", ) centre = traits.List( traits.Int, argstr="-centre %s", minlen=3, maxlen=3, units="mm", desc=( "Voxel specifying origin of Talairach " "coordinate system for SPM, default [0 0 0]." ), ) picoseed = traits.List( traits.Int, argstr="-picoseed %s", minlen=3, maxlen=3, desc=("Voxel specifying the seed (for PICo maps), " "default [0 0 0]."), units="mm", ) nimages = traits.Int( argstr="-nimages %d", units="NA", desc="Number of images in the img file. Default 1.", ) datatype = traits.Enum( "byte", "char", "[u]short", "[u]int", "float", "complex", "double", argstr="-datatype %s", desc=( "The char datatype is 8 bit (not the 16 bit " "char of Java), as specified by the Analyze " "7.5 standard. The byte, ushort and uint " "types are not part of the Analyze " "specification but are supported by SPM." ), mandatory=True, ) offset = traits.Int( argstr="-offset %d", units="NA", desc=( "According to the Analyze 7.5 standard, this is " "the byte offset in the .img file at which " "voxels start. This value can be negative to " "specify that the absolute value is applied for " "every image in the file." ), ) greylevels = traits.List( traits.Int, argstr="-gl %s", minlen=2, maxlen=2, desc=("Minimum and maximum greylevels. Stored as " "shorts in the header."), units="NA", ) scaleslope = traits.Float( argstr="-scaleslope %d", units="NA", desc=( "Intensities in the image are scaled by " "this factor by SPM and MRICro. Default is " "1.0." ), ) scaleinter = traits.Float( argstr="-scaleinter %d", units="NA", desc=("Constant to add to the image intensities. " "Used by SPM and MRIcro."), ) description = traits.String( argstr="-description %s", desc=( "Short description - No spaces, max " "length 79 bytes. Will be null " "terminated automatically." ), ) intelbyteorder = traits.Bool( argstr="-intelbyteorder", desc=("Write header in intel byte order " "(little-endian)."), ) networkbyteorder = traits.Bool( argstr="-networkbyteorder", desc=( "Write header in network byte order " "(big-endian). This is the default " "for new headers." ), ) class AnalyzeHeaderOutputSpec(TraitedSpec): header = File(exists=True, desc="Analyze header") class AnalyzeHeader(StdOutCommandLine): """ Create or read an Analyze 7.5 header file. Analyze image header, provides support for the most common header fields. Some fields, such as patient_id, are not currently supported. The program allows three nonstandard options: the field image_dimension.funused1 is the image scale. The intensity of each pixel in the associated .img file is (image value from file) * scale. Also, the origin of the Talairach coordinates (midline of the anterior commisure) are encoded in the field data_history.originator. These changes are included for compatibility with SPM. All headers written with this program are big endian by default. Example ------- >>> import nipype.interfaces.camino as cmon >>> hdr = cmon.AnalyzeHeader() >>> hdr.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> hdr.inputs.scheme_file = 'A.scheme' >>> hdr.inputs.data_dims = [256,256,256] >>> hdr.inputs.voxel_dims = [1,1,1] >>> hdr.run() # doctest: +SKIP """ _cmd = "analyzeheader" input_spec = AnalyzeHeaderInputSpec output_spec = AnalyzeHeaderOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["header"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + ".hdr" class ShredderInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="< %s", mandatory=True, position=-2, desc="raw binary data file", ) offset = traits.Int( argstr="%d", units="NA", desc="initial offset of offset bytes", position=1 ) chunksize = traits.Int( argstr="%d", units="NA", desc="reads and outputs a chunk of chunksize bytes", position=2, ) space = traits.Int(argstr="%d", units="NA", desc="skips space bytes", position=3) class ShredderOutputSpec(TraitedSpec): shredded = File(exists=True, desc="Shredded binary data file") class Shredder(StdOutCommandLine): """ Extracts periodic chunks from a data stream. Shredder makes an initial offset of offset bytes. It then reads and outputs chunksize bytes, skips space bytes, and repeats until there is no more input. If the chunksize is negative, chunks of size chunksize are read and the byte ordering of each chunk is reversed. The whole chunk will be reversed, so the chunk must be the same size as the data type, otherwise the order of the values in the chunk, as well as their endianness, will be reversed. Examples -------- >>> import nipype.interfaces.camino as cam >>> shred = cam.Shredder() >>> shred.inputs.in_file = 'SubjectA.Bfloat' >>> shred.inputs.offset = 0 >>> shred.inputs.chunksize = 1 >>> shred.inputs.space = 2 >>> shred.run() # doctest: +SKIP """ _cmd = "shredder" input_spec = ShredderInputSpec output_spec = ShredderOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["shredded_file"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_shredded" nipype-1.7.0/nipype/interfaces/camino/dti.py000066400000000000000000001430701413403311400210510ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os from ...utils.filemanip import split_filename from ..base import ( CommandLineInputSpec, CommandLine, traits, TraitedSpec, File, Directory, StdOutCommandLine, StdOutCommandLineInputSpec, isdefined, InputMultiPath, ) class DTIFitInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=1, desc="voxel-order data filename", ) bgmask = File( argstr="-bgmask %s", exists=True, desc=( "Provides the name of a file containing a background mask computed using, " "for example, FSL bet2 program. The mask file contains zero in background " "voxels and non-zero in foreground." ), ) scheme_file = File( exists=True, argstr="%s", mandatory=True, position=2, desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", ) non_linear = traits.Bool( argstr="-nonlinear", position=3, desc="Use non-linear fitting instead of the default linear regression " "to the log measurements. ", ) class DTIFitOutputSpec(TraitedSpec): tensor_fitted = File(exists=True, desc="path/name of 4D volume in voxel order") class DTIFit(StdOutCommandLine): """ Reads diffusion MRI data, acquired using the acquisition scheme detailed in the scheme file, from the data file. Use non-linear fitting instead of the default linear regression to the log measurements. The data file stores the diffusion MRI data in voxel order with the measurements stored in big-endian format and ordered as in the scheme file. The default input data type is four-byte float. The default output data type is eight-byte double. See modelfit and camino for the format of the data file and scheme file. The program fits the diffusion tensor to each voxel and outputs the results, in voxel order and as big-endian eight-byte doubles, to the standard output. The program outputs eight values in each voxel: [exit code, ln(S(0)), D_xx, D_xy, D_xz, D_yy, D_yz, D_zz]. An exit code of zero indicates no problems. For a list of other exit codes, see modelfit(1). The entry S(0) is an estimate of the signal at q=0. Example ------- >>> import nipype.interfaces.camino as cmon >>> fit = cmon.DTIFit() >>> fit.inputs.scheme_file = 'A.scheme' >>> fit.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> fit.run() # doctest: +SKIP """ _cmd = "dtfit" input_spec = DTIFitInputSpec output_spec = DTIFitOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["tensor_fitted"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_DT.Bdouble" class DTMetricInputSpec(CommandLineInputSpec): eigen_data = File( exists=True, argstr="-inputfile %s", mandatory=True, desc="voxel-order data filename", ) metric = traits.Enum( "fa", "md", "rd", "l1", "l2", "l3", "tr", "ra", "2dfa", "cl", "cp", "cs", argstr="-stat %s", mandatory=True, desc="Specifies the metric to compute.", ) inputdatatype = traits.Enum( "double", "float", "long", "int", "short", "char", argstr="-inputdatatype %s", usedefault=True, desc="Specifies the data type of the input data.", ) outputdatatype = traits.Enum( "double", "float", "long", "int", "short", "char", argstr="-outputdatatype %s", usedefault=True, desc="Specifies the data type of the output data.", ) data_header = File( argstr="-header %s", exists=True, desc=( "A Nifti .nii or .nii.gz file containing the header information. " "Usually this will be the header of the raw data file from which " "the diffusion tensors were reconstructed." ), ) outputfile = File( argstr="-outputfile %s", genfile=True, desc=( "Output name. Output will be a .nii.gz file if data_header is provided and" "in voxel order with outputdatatype datatype (default: double) otherwise." ), ) class DTMetricOutputSpec(TraitedSpec): metric_stats = File( exists=True, desc="Diffusion Tensor statistics of the chosen metric" ) class DTMetric(CommandLine): """ Computes tensor metric statistics based on the eigenvalues l1 >= l2 >= l3 typically obtained from ComputeEigensystem. The full list of statistics is: - = (l1 - l2) / l1 , a measure of linearity - = (l2 - l3) / l1 , a measure of planarity - = l3 / l1 , a measure of isotropy with: cl + cp + cs = 1 - = first eigenvalue - = second eigenvalue - = third eigenvalue - = l1 + l2 + l3 - = tr / 3 - = (l2 + l3) / 2 - = fractional anisotropy. (Basser et al, J Magn Reson B 1996) - = relative anisotropy (Basser et al, J Magn Reson B 1996) - <2dfa> = 2D FA of the two minor eigenvalues l2 and l3 i.e. sqrt( 2 * [(l2 - )^2 + (l3 - )^2] / (l2^2 + l3^2) ) with: = (l2 + l3) / 2 Example ------- Compute the CP planar metric as float data type. >>> import nipype.interfaces.camino as cam >>> dtmetric = cam.DTMetric() >>> dtmetric.inputs.eigen_data = 'dteig.Bdouble' >>> dtmetric.inputs.metric = 'cp' >>> dtmetric.inputs.outputdatatype = 'float' >>> dtmetric.run() # doctest: +SKIP """ _cmd = "dtshape" input_spec = DTMetricInputSpec output_spec = DTMetricOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["metric_stats"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): return self._gen_outputfile() def _gen_outputfile(self): outputfile = self.inputs.outputfile if not isdefined(outputfile): outputfile = self._gen_filename("outputfile") return outputfile def _gen_filename(self, name): if name == "outputfile": _, name, _ = split_filename(self.inputs.eigen_data) metric = self.inputs.metric datatype = self.inputs.outputdatatype if isdefined(self.inputs.data_header): filename = name + "_" + metric + ".nii.gz" else: filename = name + "_" + metric + ".B" + datatype return filename class ModelFitInputSpec(StdOutCommandLineInputSpec): def _gen_model_options(): # @NoSelf """ Generate all possible permutations of < multi - tensor > < single - tensor > options """ single_tensor = ["dt", "restore", "algdt", "nldt_pos", "nldt", "ldt_wtd"] multi_tensor = [ "cylcyl", "cylcyl_eq", "pospos", "pospos_eq", "poscyl", "poscyl_eq", "cylcylcyl", "cylcylcyl_eq", "pospospos", "pospospos_eq", "posposcyl", "posposcyl_eq", "poscylcyl", "poscylcyl_eq", ] other = ["adc", "ball_stick"] model_list = single_tensor model_list.extend(other) model_list.extend( [multi + " " + single for multi in multi_tensor for single in single_tensor] ) return model_list model = traits.Enum( _gen_model_options(), argstr="-model %s", mandatory=True, desc="Specifies the model to be fit to the data.", ) in_file = File( exists=True, argstr="-inputfile %s", mandatory=True, desc="voxel-order data filename", ) inputdatatype = traits.Enum( "float", "char", "short", "int", "long", "double", argstr="-inputdatatype %s", desc="Specifies the data type of the input file. " "The input file must have BIG-ENDIAN ordering. " "By default, the input type is ``float``.", ) scheme_file = File( exists=True, argstr="-schemefile %s", mandatory=True, desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", ) outputfile = File(argstr="-outputfile %s", desc="Filename of the output file.") outlier = File( argstr="-outliermap %s", exists=True, desc="Specifies the name of the file to contain the outlier map generated by " "the RESTORE algorithm.", ) noisemap = File( argstr="-noisemap %s", exists=True, desc="Specifies the name of the file to contain the estimated noise variance on the " "diffusion-weighted signal, generated by a weighted tensor fit. " "The data type of this file is big-endian double.", ) residualmap = File( argstr="-residualmap %s", exists=True, desc="Specifies the name of the file to contain the weighted residual errors after " "computing a weighted linear tensor fit. " "One value is produced per measurement, in voxel order. " "The data type of this file is big-endian double. " "Images of the residuals for each measurement can be extracted with shredder.", ) sigma = traits.Float( argstr="-sigma %G", desc="Specifies the standard deviation of the noise in the data. " "Required by the RESTORE algorithm.", ) bgthresh = traits.Float( argstr="-bgthresh %G", desc="Sets a threshold on the average q=0 measurement to separate " "foreground and background. The program does not process background voxels, " "but outputs the same number of values in background voxels and foreground voxels. " "Each value is zero in background voxels apart from the exit code which is -1.", ) bgmask = File( argstr="-bgmask %s", exists=True, desc="Provides the name of a file containing a background mask computed using, " "for example, FSL's bet2 program. The mask file contains zero in background voxels " "and non-zero in foreground.", ) cfthresh = traits.Float( argstr="-csfthresh %G", desc="Sets a threshold on the average q=0 measurement to determine which voxels " "are CSF. This program does not treat CSF voxels any different to other voxels.", ) fixedmodq = traits.List( traits.Float, argstr="-fixedmod %s", minlen=4, maxlen=4, desc="Specifies a spherical acquisition scheme with M measurements " "with q=0 and N measurements with :math:`|q|=Q` and diffusion time tau. " "The N measurements with :math:`|q|=Q` have unique directions. The program reads in " "the directions from the files in directory PointSets.", ) fixedbvalue = traits.List( traits.Float, argstr="-fixedbvalue %s", minlen=3, maxlen=3, desc="As above, but specifies . The resulting scheme is the same whether " "you specify b directly or indirectly using -fixedmodq.", ) tau = traits.Float( argstr="-tau %G", desc="Sets the diffusion time separately. This overrides the diffusion time " "specified in a scheme file or by a scheme index for both the acquisition scheme " "and in the data synthesis.", ) class ModelFitOutputSpec(TraitedSpec): fitted_data = File(exists=True, desc="output file of 4D volume in voxel order") class ModelFit(StdOutCommandLine): """ Fits models of the spin-displacement density to diffusion MRI measurements. This is an interface to various model fitting routines for diffusion MRI data that fit models of the spin-displacement density function. In particular, it will fit the diffusion tensor to a set of measurements as well as various other models including two or three-tensor models. The program can read input data from a file or can generate synthetic data using various test functions for testing and simulations. Example ------- >>> import nipype.interfaces.camino as cmon >>> fit = cmon.ModelFit() >>> fit.model = 'dt' >>> fit.inputs.scheme_file = 'A.scheme' >>> fit.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> fit.run() # doctest: +SKIP """ _cmd = "modelfit" input_spec = ModelFitInputSpec output_spec = ModelFitOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["fitted_data"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_fit.Bdouble" class DTLUTGenInputSpec(StdOutCommandLineInputSpec): lrange = traits.List( traits.Float, desc="Index to one-tensor LUTs. This is the ratio L1/L3 and L2 / L3." "The LUT is square, with half the values calculated (because L2 / L3 cannot be " "less than L1 / L3 by definition)." "The minimum must be >= 1. For comparison, a ratio L1 / L3 = 10 with L2 / L3 = 1 " "corresponds to an FA of 0.891, and L1 / L3 = 15 with L2 / L3 = 1 corresponds " "to an FA of 0.929. The default range is 1 to 10.", argstr="-lrange %s", minlen=2, maxlen=2, position=1, units="NA", ) frange = traits.List( traits.Float, desc="Index to two-tensor LUTs. This is the fractional anisotropy" " of the two tensors. The default is 0.3 to 0.94", argstr="-frange %s", minlen=2, maxlen=2, position=1, units="NA", ) step = traits.Float( argstr="-step %f", units="NA", desc="Distance between points in the LUT." "For example, if lrange is 1 to 10 and the step is 0.1, LUT entries will be computed " "at L1 / L3 = 1, 1.1, 1.2 ... 10.0 and at L2 / L3 = 1.0, 1.1 ... L1 / L3." "For single tensor LUTs, the default step is 0.2, for two-tensor LUTs it is 0.02.", ) samples = traits.Int( argstr="-samples %d", units="NA", desc="The number of synthetic measurements to generate at each point in the LUT. " "The default is 2000.", ) snr = traits.Float( argstr="-snr %f", units="NA", desc="The signal to noise ratio of the unweighted (q = 0) measurements." "This should match the SNR (in white matter) of the images that the LUTs are used with.", ) bingham = traits.Bool( argstr="-bingham", desc="Compute a LUT for the Bingham PDF. This is the default.", ) acg = traits.Bool(argstr="-acg", desc="Compute a LUT for the ACG PDF.") watson = traits.Bool(argstr="-watson", desc="Compute a LUT for the Watson PDF.") inversion = traits.Int( argstr="-inversion %d", units="NA", desc="Index of the inversion to use. The default is 1 (linear single tensor inversion).", ) trace = traits.Float( argstr="-trace %G", units="NA", desc="Trace of the diffusion tensor(s) used in the test function in the LUT generation. " "The default is 2100E-12 m^2 s^-1.", ) scheme_file = File( argstr="-schemefile %s", mandatory=True, position=2, desc="The scheme file of the images to be processed using this LUT.", ) class DTLUTGenOutputSpec(TraitedSpec): dtLUT = File(exists=True, desc="Lookup Table") class DTLUTGen(StdOutCommandLine): """ Calibrates the PDFs for PICo probabilistic tractography. This program needs to be run once for every acquisition scheme. It outputs a lookup table that is used by the dtpicoparams program to find PICo PDF parameters for an image. The default single tensor LUT contains parameters of the Bingham distribution and is generated by supplying a scheme file and an estimated signal to noise in white matter regions of the (q=0) image. The default inversion is linear (inversion index 1). Advanced users can control several options, including the extent and resolution of the LUT, the inversion index, and the type of PDF. See dtlutgen(1) for details. Example ------- >>> import nipype.interfaces.camino as cmon >>> dtl = cmon.DTLUTGen() >>> dtl.inputs.snr = 16 >>> dtl.inputs.scheme_file = 'A.scheme' >>> dtl.run() # doctest: +SKIP """ _cmd = "dtlutgen" input_spec = DTLUTGenInputSpec output_spec = DTLUTGenOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["dtLUT"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) return name + ".dat" class PicoPDFsInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="< %s", mandatory=True, position=1, desc="voxel-order data filename", ) inputmodel = traits.Enum( "dt", "multitensor", "pds", argstr="-inputmodel %s", position=2, desc="input model type", usedefault=True, ) luts = InputMultiPath( File(exists=True), argstr="-luts %s", mandatory=True, desc="Files containing the lookup tables." "For tensor data, one lut must be specified for each type of inversion used in the " "image (one-tensor, two-tensor, three-tensor)." "For pds, the number of LUTs must match -numpds (it is acceptable to use the same " "LUT several times - see example, above)." "These LUTs may be generated with dtlutgen.", ) pdf = traits.Enum( "bingham", "watson", "acg", argstr="-pdf %s", position=4, desc="""\ Specifies the PDF to use. There are three choices: * watson - The Watson distribution. This distribution is rotationally symmetric. * bingham - The Bingham distributionn, which allows elliptical probability density contours. * acg - The Angular Central Gaussian distribution, which also allows elliptical probability density contours. """, usedefault=True, ) directmap = traits.Bool( argstr="-directmap", desc="Only applicable when using pds as the inputmodel. Use direct mapping between " "the eigenvalues and the distribution parameters instead of the log of the eigenvalues.", ) maxcomponents = traits.Int( argstr="-maxcomponents %d", units="NA", desc="The maximum number of tensor components in a voxel (default 2) for multitensor data." "Currently, only the default is supported, but future releases may allow the input " "of three-tensor data using this option.", ) numpds = traits.Int( argstr="-numpds %d", units="NA", desc="The maximum number of PDs in a voxel (default 3) for PD data." "This option determines the size of the input and output voxels." "This means that the data file may be large enough to accomodate three or more PDs," "but does not mean that any of the voxels are classified as containing three or more PDs.", ) class PicoPDFsOutputSpec(TraitedSpec): pdfs = File(exists=True, desc="path/name of 4D volume in voxel order") class PicoPDFs(StdOutCommandLine): """ Constructs a spherical PDF in each voxel for probabilistic tractography. Example ------- >>> import nipype.interfaces.camino as cmon >>> pdf = cmon.PicoPDFs() >>> pdf.inputs.inputmodel = 'dt' >>> pdf.inputs.luts = ['lut_file'] >>> pdf.inputs.in_file = 'voxel-order_data.Bfloat' >>> pdf.run() # doctest: +SKIP """ _cmd = "picopdfs" input_spec = PicoPDFsInputSpec output_spec = PicoPDFsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["pdfs"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_pdfs.Bdouble" class TrackInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="-inputfile %s", position=1, desc="input data file" ) seed_file = File(exists=True, argstr="-seedfile %s", position=2, desc="seed file") inputmodel = traits.Enum( "dt", "multitensor", "sfpeak", "pico", "repbs_dt", "repbs_multitensor", "ballstick", "wildbs_dt", "bayesdirac", "bayesdirac_dt", "bedpostx_dyad", "bedpostx", argstr="-inputmodel %s", desc="input model type", usedefault=True, ) tracker = traits.Enum( "fact", "euler", "rk4", argstr="-tracker %s", desc=( "The tracking algorithm controls streamlines are " "generated from the data. The choices are: " "- FACT, which follows the local fibre orientation " "in each voxel. No interpolation is used." "- EULER, which uses a fixed step size along the " "local fibre orientation. With nearest-neighbour " "interpolation, this method may be very similar to " "FACT, except that the step size is fixed, whereas " "FACT steps extend to the boundary of the next voxel " "(distance variable depending on the entry and exit " "points to the voxel)." "- RK4: Fourth-order Runge-Kutta method. The step " "size is fixed, however the eventual direction of " "the step is determined by taking and averaging a " "series of partial steps." ), usedefault=True, ) interpolator = traits.Enum( "nn", "prob_nn", "linear", argstr="-interpolator %s", desc=( "The interpolation algorithm determines how " "the fiber orientation(s) are defined at a given " "continuous point within the input image. " "Interpolators are only used when the tracking " "algorithm is not FACT. The choices are: " "- NN: Nearest-neighbour interpolation, just " "uses the local voxel data directly." "- PROB_NN: Probabilistic nearest-neighbor " "interpolation, similar to the method pro- " "posed by Behrens et al [Magnetic Resonance " "in Medicine, 50:1077-1088, 2003]. The data " "is not interpolated, but at each point we " "randomly choose one of the 8 voxels sur- " "rounding a point. The probability of choosing " "a particular voxel is based on how close the " "point is to the centre of that voxel." "- LINEAR: Linear interpolation of the vector " "field containing the principal directions at " "each point." ), ) stepsize = traits.Float( argstr="-stepsize %f", requires=["tracker"], desc=("Step size for EULER and RK4 tracking. " "The default is 1mm."), ) inputdatatype = traits.Enum( "float", "double", argstr="-inputdatatype %s", desc="input file type" ) gzip = traits.Bool(argstr="-gzip", desc="save the output image in gzip format") maxcomponents = traits.Int( argstr="-maxcomponents %d", units="NA", desc=( "The maximum number of tensor components in a " "voxel. This determines the size of the input " "file and does not say anything about the " "voxel classification. The default is 2 if " "the input model is multitensor and 1 if the " "input model is dt." ), ) numpds = traits.Int( argstr="-numpds %d", units="NA", desc=( "The maximum number of PDs in a voxel for input " "models sfpeak and pico. The default is 3 for input " "model sfpeak and 1 for input model pico. This option " "determines the size of the voxels in the input file " "and does not affect tracking. For tensor data, use " "the -maxcomponents option." ), ) data_dims = traits.List( traits.Int, desc="data dimensions in voxels", argstr="-datadims %s", minlen=3, maxlen=3, units="voxels", ) voxel_dims = traits.List( traits.Float, desc="voxel dimensions in mm", argstr="-voxeldims %s", minlen=3, maxlen=3, units="mm", ) ipthresh = traits.Float( argstr="-ipthresh %f", desc=( "Curvature threshold for tracking, expressed as " "the minimum dot product between two streamline " "orientations calculated over the length of a " "voxel. If the dot product between the previous " "and current directions is less than this " "threshold, then the streamline terminates. The " "default setting will terminate fibres that curve " "by more than 80 degrees. Set this to -1.0 to " "disable curvature checking completely." ), ) curvethresh = traits.Float( argstr="-curvethresh %f", desc=( "Curvature threshold for tracking, expressed " "as the maximum angle (in degrees) between " "between two streamline orientations " "calculated over the length of a voxel. If " "the angle is greater than this, then the " "streamline terminates." ), ) curveinterval = traits.Float( argstr="-curveinterval %f", requires=["curvethresh"], desc=( "Interval over which the curvature threshold " "should be evaluated, in mm. The default is " "5mm. When using the default curvature " "threshold of 90 degrees, this means that " "streamlines will terminate if they curve by " "more than 90 degrees over a path length " "of 5mm." ), ) anisthresh = traits.Float( argstr="-anisthresh %f", desc=( "Terminate fibres that enter a voxel with lower " "anisotropy than the threshold." ), ) anisfile = File( argstr="-anisfile %s", exists=True, desc=( "File containing the anisotropy map. This is required to " "apply an anisotropy threshold with non tensor data. If " "the map issupplied it is always used, even in tensor " "data." ), ) outputtracts = traits.Enum( "float", "double", "oogl", argstr="-outputtracts %s", desc="output tract file type", ) out_file = File( argstr="-outputfile %s", position=-1, genfile=True, desc="output data file" ) output_root = File( exists=False, argstr="-outputroot %s", position=-1, desc="root directory for output", ) class TrackOutputSpec(TraitedSpec): tracked = File(exists=True, desc="output file containing reconstructed tracts") class Track(CommandLine): """ Performs tractography using one of the following models: dt', 'multitensor', 'pds', 'pico', 'bootstrap', 'ballstick', 'bayesdirac' Example ------- >>> import nipype.interfaces.camino as cmon >>> track = cmon.Track() >>> track.inputs.inputmodel = 'dt' >>> track.inputs.in_file = 'data.Bfloat' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP """ _cmd = "track" input_spec = TrackInputSpec output_spec = TrackOutputSpec def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): out_file_path = os.path.abspath(self.inputs.out_file) else: out_file_path = os.path.abspath(self._gen_outfilename()) outputs["tracked"] = out_file_path return outputs def _gen_filename(self, name): if name == "out_file": return self._gen_outfilename() else: return None def _gen_outfilename(self): # Currently in_file is only undefined for bedpostx input if not isdefined(self.inputs.in_file): name = "bedpostx" else: _, name, _ = split_filename(self.inputs.in_file) return name + "_tracked" class TrackDT(Track): """ Performs streamline tractography using tensor data Example ------- >>> import nipype.interfaces.camino as cmon >>> track = cmon.TrackDT() >>> track.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP """ def __init__(self, command=None, **inputs): inputs["inputmodel"] = "dt" return super(TrackDT, self).__init__(command, **inputs) class TrackPICoInputSpec(TrackInputSpec): pdf = traits.Enum( "bingham", "watson", "acg", argstr="-pdf %s", desc='Specifies the model for PICo parameters. The default is "bingham.', ) iterations = traits.Int( argstr="-iterations %d", units="NA", desc="Number of streamlines to generate at each seed point. The default is 5000.", ) class TrackPICo(Track): """ Performs streamline tractography using Probabilistic Index of Connectivity (PICo). Example ------- >>> import nipype.interfaces.camino as cmon >>> track = cmon.TrackPICo() >>> track.inputs.in_file = 'pdfs.Bfloat' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP """ input_spec = TrackPICoInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "pico" return super(TrackPICo, self).__init__(command, **inputs) class TrackBedpostxDeterInputSpec(TrackInputSpec): bedpostxdir = Directory( argstr="-bedpostxdir %s", mandatory=True, exists=True, desc=("Directory containing bedpostx output"), ) min_vol_frac = traits.Float( argstr="-bedpostxminf %d", units="NA", desc=( "Zeros out compartments in bedpostx data " "with a mean volume fraction f of less than " "min_vol_frac. The default is 0.01." ), ) class TrackBedpostxDeter(Track): """ Data from FSL's bedpostx can be imported into Camino for deterministic tracking. (Use TrackBedpostxProba for bedpostx probabilistic tractography.) The tracking is based on the vector images dyads1.nii.gz, ... , dyadsN.nii.gz, where there are a maximum of N compartments (corresponding to each fiber population) in each voxel. It also uses the N images mean_f1samples.nii.gz, ..., mean_fNsamples.nii.gz, normalized such that the sum of all compartments is 1. Compartments where the mean_f is less than a threshold are discarded and not used for tracking. The default value is 0.01. This can be changed with the min_vol_frac option. Example ------- >>> import nipype.interfaces.camino as cam >>> track = cam.TrackBedpostxDeter() >>> track.inputs.bedpostxdir = 'bedpostxout' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP """ input_spec = TrackBedpostxDeterInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "bedpostx_dyad" return super(TrackBedpostxDeter, self).__init__(command, **inputs) class TrackBedpostxProbaInputSpec(TrackInputSpec): bedpostxdir = Directory( argstr="-bedpostxdir %s", mandatory=True, exists=True, desc=("Directory containing bedpostx output"), ) min_vol_frac = traits.Float( argstr="-bedpostxminf %d", units="NA", desc=( "Zeros out compartments in bedpostx data " "with a mean volume fraction f of less than " "min_vol_frac. The default is 0.01." ), ) iterations = traits.Int( argstr="-iterations %d", units="NA", desc="Number of streamlines to generate at each seed point. The default is 1.", ) class TrackBedpostxProba(Track): """ Data from FSL's bedpostx can be imported into Camino for probabilistic tracking. (Use TrackBedpostxDeter for bedpostx deterministic tractography.) The tracking uses the files merged_th1samples.nii.gz, merged_ph1samples.nii.gz, ... , merged_thNsamples.nii.gz, merged_phNsamples.nii.gz where there are a maximum of N compartments (corresponding to each fiber population) in each voxel. These images contain M samples of theta and phi, the polar coordinates describing the "stick" for each compartment. At each iteration, a random number X between 1 and M is drawn and the Xth samples of theta and phi become the principal directions in the voxel. It also uses the N images mean_f1samples.nii.gz, ..., mean_fNsamples.nii.gz, normalized such that the sum of all compartments is 1. Compartments where the mean_f is less than a threshold are discarded and not used for tracking. The default value is 0.01. This can be changed with the min_vol_frac option. Example ------- >>> import nipype.interfaces.camino as cam >>> track = cam.TrackBedpostxProba() >>> track.inputs.bedpostxdir = 'bedpostxout' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.inputs.iterations = 100 >>> track.run() # doctest: +SKIP """ input_spec = TrackBedpostxProbaInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "bedpostx" return super(TrackBedpostxProba, self).__init__(command, **inputs) class TrackBayesDiracInputSpec(TrackInputSpec): scheme_file = File( argstr="-schemefile %s", mandatory=True, exists=True, desc=("The scheme file corresponding to the data being " "processed."), ) iterations = traits.Int( argstr="-iterations %d", units="NA", desc=( "Number of streamlines to generate at each " "seed point. The default is 5000." ), ) pdf = traits.Enum( "bingham", "watson", "acg", argstr="-pdf %s", desc="Specifies the model for PICo priors (not the curvature priors). " "The default is 'bingham'.", ) pointset = traits.Int( argstr="-pointset %s", desc="""\ Index to the point set to use for Bayesian likelihood calculation. The index specifies a set of evenly distributed points on the unit sphere, where each point x defines two possible step directions (x or -x) for the streamline path. A larger number indexes a larger point set, which gives higher angular resolution at the expense of computation time. The default is index 1, which gives 1922 points, index 0 gives 1082 points, index 2 gives 3002 points.""", ) datamodel = traits.Enum( "cylsymmdt", "ballstick", argstr="-datamodel %s", desc="""\ Model of the data for Bayesian tracking. The default model is "cylsymmdt", a diffusion tensor with cylindrical symmetry about e_1, ie L1 >= L_2 = L_3. The other model is "ballstick", the partial volume model (see ballstickfit).""", ) curvepriork = traits.Float( argstr="-curvepriork %G", desc="""\ Concentration parameter for the prior distribution on fibre orientations given the fibre orientation at the previous step. Larger values of k make curvature less likely.""", ) curvepriorg = traits.Float( argstr="-curvepriorg %G", desc="""\ Concentration parameter for the prior distribution on fibre orientations given the fibre orientation at the previous step. Larger values of g make curvature less likely.""", ) extpriorfile = File( exists=True, argstr="-extpriorfile %s", desc="""\ Path to a PICo image produced by picopdfs. The PDF in each voxel is used as a prior for the fibre orientation in Bayesian tracking. The prior image must be in the same space as the diffusion data.""", ) extpriordatatype = traits.Enum( "float", "double", argstr="-extpriordatatype %s", desc='Datatype of the prior image. The default is "double".', ) class TrackBayesDirac(Track): """ Perform streamline tractography using a Bayesian tracking with Dirac priors. Example ------- >>> import nipype.interfaces.camino as cmon >>> track = cmon.TrackBayesDirac() >>> track.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.inputs.scheme_file = 'bvecs.scheme' >>> track.run() # doctest: +SKIP """ input_spec = TrackBayesDiracInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "bayesdirac" return super(TrackBayesDirac, self).__init__(command, **inputs) class TrackBallStick(Track): """ Performs streamline tractography using ball-stick fitted data Example ------- >>> import nipype.interfaces.camino as cmon >>> track = cmon.TrackBallStick() >>> track.inputs.in_file = 'ballstickfit_data.Bfloat' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP """ def __init__(self, command=None, **inputs): inputs["inputmodel"] = "ballstick" return super(TrackBallStick, self).__init__(command, **inputs) class TrackBootstrapInputSpec(TrackInputSpec): scheme_file = File( argstr="-schemefile %s", mandatory=True, exists=True, desc="The scheme file corresponding to the data being processed.", ) iterations = traits.Int( argstr="-iterations %d", units="NA", desc="Number of streamlines to generate at each seed point.", ) inversion = traits.Int( argstr="-inversion %s", desc="""\ Tensor reconstruction algorithm for repetition bootstrapping. Default is 1 (linear reconstruction, single tensor).""", ) bsdatafiles = traits.List( File(exists=True), mandatory=True, argstr="-bsdatafile %s", desc="""\ Specifies files containing raw data for repetition bootstrapping. Use -inputfile for wild bootstrap data.""", ) bgmask = File( argstr="-bgmask %s", exists=True, desc="""\ Provides the name of a file containing a background mask computed using, for example, FSL's bet2 program. The mask file contains zero in background voxels and non-zero in foreground.""", ) class TrackBootstrap(Track): """ Performs bootstrap streamline tractography using mulitple scans of the same subject Example ------- >>> import nipype.interfaces.camino as cmon >>> track = cmon.TrackBootstrap() >>> track.inputs.inputmodel='repbs_dt' >>> track.inputs.scheme_file = 'bvecs.scheme' >>> track.inputs.bsdatafiles = ['fitted_data1.Bfloat', 'fitted_data2.Bfloat'] >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP """ input_spec = TrackBootstrapInputSpec def __init__(self, command=None, **inputs): return super(TrackBootstrap, self).__init__(command, **inputs) class ComputeMeanDiffusivityInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="< %s", mandatory=True, position=1, desc="Tensor-fitted data filename", ) scheme_file = File( exists=True, argstr="%s", position=2, desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", ) out_file = File(argstr="> %s", position=-1, genfile=True) inputmodel = traits.Enum( "dt", "twotensor", "threetensor", argstr="-inputmodel %s", desc="""\ Specifies the model that the input tensor data contains parameters for. By default, the program assumes that the input data contains a single diffusion tensor in each voxel.""", ) inputdatatype = traits.Enum( "char", "short", "int", "long", "float", "double", argstr="-inputdatatype %s", desc="Specifies the data type of the input file.", ) outputdatatype = traits.Enum( "char", "short", "int", "long", "float", "double", argstr="-outputdatatype %s", desc="Specifies the data type of the output data.", ) class ComputeMeanDiffusivityOutputSpec(TraitedSpec): md = File(exists=True, desc="Mean Diffusivity Map") class ComputeMeanDiffusivity(StdOutCommandLine): """ Computes the mean diffusivity (trace/3) from diffusion tensors. Example ------- >>> import nipype.interfaces.camino as cmon >>> md = cmon.ComputeMeanDiffusivity() >>> md.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> md.inputs.scheme_file = 'A.scheme' >>> md.run() # doctest: +SKIP """ _cmd = "md" input_spec = ComputeMeanDiffusivityInputSpec output_spec = ComputeMeanDiffusivityOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["md"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_MD.img" # Need to change to self.inputs.outputdatatype class ComputeFractionalAnisotropyInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="< %s", mandatory=True, position=1, desc="Tensor-fitted data filename", ) scheme_file = File( exists=True, argstr="%s", position=2, desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", ) inputmodel = traits.Enum( "dt", "twotensor", "threetensor", "multitensor", argstr="-inputmodel %s", desc="""\ Specifies the model that the input tensor data contains parameters for. By default, the program assumes that the input data contains a single diffusion tensor in each voxel.""", ) inputdatatype = traits.Enum( "char", "short", "int", "long", "float", "double", argstr="-inputdatatype %s", desc="Specifies the data type of the input file.", ) outputdatatype = traits.Enum( "char", "short", "int", "long", "float", "double", argstr="-outputdatatype %s", desc="Specifies the data type of the output data.", ) class ComputeFractionalAnisotropyOutputSpec(TraitedSpec): fa = File(exists=True, desc="Fractional Anisotropy Map") class ComputeFractionalAnisotropy(StdOutCommandLine): """ Computes the fractional anisotropy of tensors. Reads diffusion tensor (single, two-tensor or three-tensor) data from the standard input, computes the fractional anisotropy (FA) of each tensor and outputs the results to the standard output. For multiple-tensor data the program outputs the FA of each tensor, so for three-tensor data, for example, the output contains three fractional anisotropy values per voxel. Example ------- >>> import nipype.interfaces.camino as cmon >>> fa = cmon.ComputeFractionalAnisotropy() >>> fa.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> fa.inputs.scheme_file = 'A.scheme' >>> fa.run() # doctest: +SKIP """ _cmd = "fa" input_spec = ComputeFractionalAnisotropyInputSpec output_spec = ComputeFractionalAnisotropyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["fa"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_FA.Bdouble" # Need to change to self.inputs.outputdatatype class ComputeTensorTraceInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="< %s", mandatory=True, position=1, desc="Tensor-fitted data filename", ) scheme_file = File( exists=True, argstr="%s", position=2, desc="Camino scheme file (b values / vectors, see camino.fsl2scheme)", ) inputmodel = traits.Enum( "dt", "twotensor", "threetensor", "multitensor", argstr="-inputmodel %s", desc="""\ Specifies the model that the input tensor data contains parameters for. By default, the program assumes that the input data contains a single diffusion tensor in each voxel.""", ) inputdatatype = traits.Enum( "char", "short", "int", "long", "float", "double", argstr="-inputdatatype %s", desc="Specifies the data type of the input file.", ) outputdatatype = traits.Enum( "char", "short", "int", "long", "float", "double", argstr="-outputdatatype %s", desc="Specifies the data type of the output data.", ) class ComputeTensorTraceOutputSpec(TraitedSpec): trace = File(exists=True, desc="Trace of the diffusion tensor") class ComputeTensorTrace(StdOutCommandLine): """ Computes the trace of tensors. Reads diffusion tensor (single, two-tensor or three-tensor) data from the standard input, computes the trace of each tensor, i.e., three times the mean diffusivity, and outputs the results to the standard output. For multiple-tensor data the program outputs the trace of each tensor, so for three-tensor data, for example, the output contains three values per voxel. Divide the output by three to get the mean diffusivity. Example ------- >>> import nipype.interfaces.camino as cmon >>> trace = cmon.ComputeTensorTrace() >>> trace.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> trace.inputs.scheme_file = 'A.scheme' >>> trace.run() # doctest: +SKIP """ _cmd = "trd" input_spec = ComputeTensorTraceInputSpec output_spec = ComputeTensorTraceOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["trace"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_TrD.img" # Need to change to self.inputs.outputdatatype class ComputeEigensystemInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="< %s", mandatory=True, position=1, desc="Tensor-fitted data filename", ) inputmodel = traits.Enum( "dt", "multitensor", argstr="-inputmodel %s", desc="Specifies the model that the input data contains parameters for", ) maxcomponents = traits.Int( argstr="-maxcomponents %d", desc="The maximum number of tensor components in a voxel of the input data.", ) inputdatatype = traits.Enum( "double", "float", "long", "int", "short", "char", argstr="-inputdatatype %s", usedefault=True, desc=( "Specifies the data type of the input data. " "The data type can be any of the following strings: " '"char", "short", "int", "long", "float" or "double".' "Default is double data type" ), ) outputdatatype = traits.Enum( "double", "float", "long", "int", "short", "char", argstr="-outputdatatype %s", usedefault=True, desc="Specifies the data type of the output data.", ) class ComputeEigensystemOutputSpec(TraitedSpec): eigen = File(exists=True, desc="Trace of the diffusion tensor") class ComputeEigensystem(StdOutCommandLine): """ Computes the eigensystem from tensor fitted data. Reads diffusion tensor (single, two-tensor, three-tensor or multitensor) data from the standard input, computes the eigenvalues and eigenvectors of each tensor and outputs the results to the standard output. For multiple-tensor data the program outputs the eigensystem of each tensor. For each tensor the program outputs: {l_1, e_11, e_12, e_13, l_2, e_21, e_22, e_33, l_3, e_31, e_32, e_33}, where l_1 >= l_2 >= l_3 and e_i = (e_i1, e_i2, e_i3) is the eigenvector with eigenvalue l_i. For three-tensor data, for example, the output contains thirty-six values per voxel. Example ------- >>> import nipype.interfaces.camino as cmon >>> dteig = cmon.ComputeEigensystem() >>> dteig.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> dteig.run() # doctest: +SKIP """ _cmd = "dteig" input_spec = ComputeEigensystemInputSpec output_spec = ComputeEigensystemOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["eigen"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) datatype = self.inputs.outputdatatype return name + "_eig.B" + datatype nipype-1.7.0/nipype/interfaces/camino/odf.py000066400000000000000000000541541413403311400210450ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os from ...utils.filemanip import split_filename from ..base import ( traits, TraitedSpec, File, StdOutCommandLine, StdOutCommandLineInputSpec, ) class QBallMXInputSpec(StdOutCommandLineInputSpec): basistype = traits.Enum( "rbf", "sh", argstr="-basistype %s", desc=( 'Basis function type. "rbf" to use radial basis functions ' '"sh" to use spherical harmonics' ), usedefault=True, ) scheme_file = File( exists=True, argstr="-schemefile %s", mandatory=True, desc="Specifies the scheme file for the diffusion MRI data", ) order = traits.Int( argstr="-order %d", units="NA", desc=( "Specific to sh. Maximum order of the spherical harmonic series. " "Default is 4." ), ) rbfpointset = traits.Int( argstr="-rbfpointset %d", units="NA", desc=( "Specific to rbf. Sets the number of radial basis functions to use. " "The value specified must be present in the Pointsets directory. " "The default value is 246." ), ) rbfsigma = traits.Float( argstr="-rbfsigma %f", units="NA", desc=( "Specific to rbf. Sets the width of the interpolating basis functions. " "The default value is 0.2618 (15 degrees)." ), ) smoothingsigma = traits.Float( argstr="-smoothingsigma %f", units="NA", desc=( "Specific to rbf. Sets the width of the smoothing basis functions. " "The default value is 0.1309 (7.5 degrees)." ), ) class QBallMXOutputSpec(TraitedSpec): qmat = File(exists=True, desc="Q-Ball reconstruction matrix") class QBallMX(StdOutCommandLine): """ Generates a reconstruction matrix for Q-Ball. Used in LinRecon with the same scheme file to reconstruct data. Examples -------- To create a linear transform matrix using Spherical Harmonics (sh). >>> import nipype.interfaces.camino as cam >>> qballmx = cam.QBallMX() >>> qballmx.inputs.scheme_file = 'A.scheme' >>> qballmx.inputs.basistype = 'sh' >>> qballmx.inputs.order = 6 >>> qballmx.run() # doctest: +SKIP To create a linear transform matrix using Radial Basis Functions (rbf). This command uses the default setting of rbf sigma = 0.2618 (15 degrees), data smoothing sigma = 0.1309 (7.5 degrees), rbf pointset 246 >>> import nipype.interfaces.camino as cam >>> qballmx = cam.QBallMX() >>> qballmx.inputs.scheme_file = 'A.scheme' >>> qballmx.run() # doctest: +SKIP The linear transform matrix from any of these two examples can then be run over each voxel using LinRecon >>> qballcoeffs = cam.LinRecon() >>> qballcoeffs.inputs.in_file = 'SubjectA.Bfloat' >>> qballcoeffs.inputs.scheme_file = 'A.scheme' >>> qballcoeffs.inputs.qball_mat = 'A_qmat.Bdouble' >>> qballcoeffs.inputs.normalize = True >>> qballcoeffs.inputs.bgmask = 'brain_mask.nii' >>> qballcoeffs.run() # doctest: +SKIP """ _cmd = "qballmx" input_spec = QBallMXInputSpec output_spec = QBallMXOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["qmat"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) return name + "_qmat.Bdouble" class LinReconInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=1, desc="voxel-order data filename", ) scheme_file = File( exists=True, argstr="%s", mandatory=True, position=2, desc="Specifies the scheme file for the diffusion MRI data", ) qball_mat = File( exists=True, argstr="%s", mandatory=True, position=3, desc="Linear transformation matrix.", ) normalize = traits.Bool( argstr="-normalize", desc=( "Normalize the measurements and discard " "the zero measurements before the linear transform." ), ) log = traits.Bool( argstr="-log", desc=( "Transform the log measurements rather than the " "measurements themselves" ), ) bgmask = File(exists=True, argstr="-bgmask %s", desc="background mask") class LinReconOutputSpec(TraitedSpec): recon_data = File(exists=True, desc="Transformed data") class LinRecon(StdOutCommandLine): """ Runs a linear transformation in each voxel. Reads a linear transformation from the matrix file assuming the imaging scheme specified in the scheme file. Performs the linear transformation on the data in every voxel and outputs the result to the standard output. The ouput in every voxel is actually: :: [exit code, ln(S(0)), p1, ..., pR] where p1, ..., pR are the parameters of the reconstruction. Possible exit codes are: - 0. No problems. - 6. Bad data replaced by substitution of zero. The matrix must be R by N+M where N+M is the number of measurements and R is the number of parameters of the reconstruction. The matrix file contains binary double-precision floats. The matrix elements are stored row by row. Example ------- First run QBallMX and create a linear transform matrix using Spherical Harmonics (sh). >>> import nipype.interfaces.camino as cam >>> qballmx = cam.QBallMX() >>> qballmx.inputs.scheme_file = 'A.scheme' >>> qballmx.inputs.basistype = 'sh' >>> qballmx.inputs.order = 4 >>> qballmx.run() # doctest: +SKIP Then run it over each voxel using LinRecon >>> qballcoeffs = cam.LinRecon() >>> qballcoeffs.inputs.in_file = 'SubjectA.Bfloat' >>> qballcoeffs.inputs.scheme_file = 'A.scheme' >>> qballcoeffs.inputs.qball_mat = 'A_qmat.Bdouble' >>> qballcoeffs.inputs.normalize = True >>> qballcoeffs.run() # doctest: +SKIP """ _cmd = "linrecon" input_spec = LinReconInputSpec output_spec = LinReconOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["recon_data"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) return name + "_recondata.Bdouble" class MESDInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="-inputfile %s", mandatory=True, position=1, desc="voxel-order data filename", ) inverter = traits.Enum( "SPIKE", "PAS", argstr="-filter %s", position=2, mandatory=True, desc=""" The inversion index specifies the type of inversion to perform on the data. The currently available choices are: +----------------+---------------------------------------------+ | Inverter name | Inverter parameters | +================+=============================================+ | SPIKE | bd (b-value x diffusivity along the fibre.) | +----------------+---------------------------------------------+ | PAS | r | +----------------+---------------------------------------------+ """, ) inverter_param = traits.Float( argstr="%f", units="NA", position=3, mandatory=True, desc=( "Parameter associated with the inverter. Cf. inverter description for" "more information." ), ) fastmesd = traits.Bool( argstr="-fastmesd", requires=["mepointset"], desc=( "Turns off numerical integration checks and fixes the integration point set size at that of" "the index specified by -basepointset.." ), ) mepointset = traits.Int( argstr="-mepointset %d", units="NA", desc=( "Use a set of directions other than those in the scheme file for the deconvolution kernel." "The number refers to the number of directions on the unit sphere. For example, " '"-mepointset 54" uses the directions in "camino/PointSets/Elec054.txt".' ), ) scheme_file = File( exists=True, argstr="-schemefile %s", mandatory=True, desc="Specifies the scheme file for the diffusion MRI data", ) bgmask = File(exists=True, argstr="-bgmask %s", desc="background mask") inputdatatype = traits.Enum( "float", "char", "short", "int", "long", "double", argstr="-inputdatatype %s", desc=( 'Specifies the data type of the input file: "char", "short", "int", "long",' '"float" or "double". The input file must have BIG-ENDIAN ordering.' 'By default, the input type is "float".' ), ) class MESDOutputSpec(TraitedSpec): mesd_data = File(exists=True, desc="MESD data") class MESD(StdOutCommandLine): """ MESD is a general program for maximum entropy spherical deconvolution. It also runs PASMRI, which is a special case of spherical deconvolution. The input data must be in voxel order. The format of the output in each voxel is: { exitcode, ln(A^star(0)), lambda_0, lambda_1, ..., lambda_N } The exitcode contains the results of three tests. The first test thresholds the maximum relative error between the numerical integrals computed at con- vergence and those computed using a larger test point set; if the error is greater than a threshold the exitcode is increased from zero to one as a warning; if it is greater than a larger threshold the exitcode is increased to two to suggest failure. The second test thresholds the predicted error in numerical integrals computed using the test point set; if the predicted error is greater than a threshold the exitcode is increased by 10. The third test thresholds the RMS error between the measurements and their predictions from the fitted deconvolution; if the errors are greater than a threshold, the exit code is increased by 100. An exitcode of 112 means that all three tests were failed and the result is likely to be unreliable. If all is well the exitcode is zero. Results are often still reliable even if one or two of the tests are failed. Other possible exitcodes are: - 5 - The optimization failed to converge - -1 - Background - -100 - Something wrong in the MRI data, e.g. negative or zero measurements, so that the optimization could not run. The standard MESD implementation is computationally demanding, particularly as the number of measurements increases (computation is approximately O(N^2), where N is the number of measurements). There are two ways to obtain significant computational speed-up: i) Turn off error checks and use a small point set for computing numerical integrals in the algorithm by adding the flag -fastmesd. Sakaie CDMRI 2008 shows that using the smallest point set (-basepointset 0) with no error checks usually has only a minor effect on the output of the algorithm, but provides a major reduction in computation time. You can increase the point set size using -basepointset with an argument higher than 0, which may produce better results in some voxels, but will increase computation time, which approximately doubles every time the point set index increases by 1. ii) Reduce the complexity of the maximum entropy encoding using -mepointset . By default = N, the number of measurements, and is the number of parameters in the max. ent. representation of the output function, ie the number of lambda parameters, as described in Jansons and Alexander Inverse Problems 2003. However, we can represent the function using less components and here specifies the number of lambda parameters. To obtain speed-up, set < N; complexity become O(^2) rather than O(N^2). Note that must be chosen so that the camino/PointSets directory contains a point set with that number of elements. When -mepointset decreases, the numerical integration checks make less and less of a difference and smaller point sets for numerical integration (see -basepointset) become adequate. So when is low -fastmesd is worth using to get even more speed-up. The choice of is a parameter of the technique. Too low and you lose angular resoloution; too high and you see no computational benefit and may even suffer from overfitting. Empirically, we have found that =16 often gives good results and good speed up, but it is worth trying a few values a comparing performance. The reduced encoding is described in the following ISMRM abstract: Sweet and Alexander "Reduced Encoding Persistent Angular Structure" 572 ISMRM 2010. Example ------- Run MESD on every voxel of the data file SubjectA.Bfloat using the PASMRI kernel. >>> import nipype.interfaces.camino as cam >>> mesd = cam.MESD() >>> mesd.inputs.in_file = 'SubjectA.Bfloat' >>> mesd.inputs.scheme_file = 'A.scheme' >>> mesd.inputs.inverter = 'PAS' >>> mesd.inputs.inverter_param = 1.4 >>> mesd.run() # doctest: +SKIP """ _cmd = "mesd" input_spec = MESDInputSpec output_spec = MESDOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["mesd_data"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.scheme_file) return name + "_MESD.Bdouble" class SFPeaksInputSpec(StdOutCommandLineInputSpec): in_file = File( exists=True, argstr="-inputfile %s", mandatory=True, desc="Voxel-order data of spherical functions", ) inputmodel = traits.Enum( "sh", "maxent", "rbf", argstr="-inputmodel %s", mandatory=True, desc=( "Type of functions input via in_file. Currently supported options are: " " sh - Spherical harmonic series. Specify the maximum order of the SH series " ' with the "order" attribute if different from the default of 4. ' " maxent - Maximum entropy representations output by MESD. The reconstruction " " directions input to MESD must be specified. By default this is the " " same set of gradient directions (excluding zero gradients) in the " ' scheme file, so specify the "schemefile" attribute unless the ' ' "mepointset" attribute was set in MESD. ' " rbf - Sums of radial basis functions. Specify the pointset with the attribute " ' "rbfpointset" if different from the default. See QBallMX.' ), ) order = traits.Int( argstr="-order %d", units="NA", desc="Specific to sh. Maximum order of the spherical harmonic series.", ) scheme_file = File( exists=True, argstr="%s", desc="Specific to maxent. Specifies the scheme file." ) rbfpointset = traits.Int( argstr="-rbfpointset %d", units="NA", desc=( "Specific to rbf. Sets the number of radial basis functions to use. " "The value specified must be present in the Pointsets directory. " "The default value is 246." ), ) mepointset = traits.Int( argstr="-mepointset %d", units="NA", desc=( "Use a set of directions other than those in the scheme file for the deconvolution " "kernel. The number refers to the number of directions on the unit sphere. " 'For example, "mepointset = 54" uses the directions in "camino/PointSets/Elec054.txt" ' "Use this option only if you told MESD to use a custom set of directions with the same " 'option. Otherwise, specify the scheme file with the "schemefile" attribute.' ), ) numpds = traits.Int( argstr="-numpds %d", units="NA", desc="The largest number of peak directions to output in each voxel.", ) noconsistencycheck = traits.Bool( argstr="-noconsistencycheck", desc="Turns off the consistency check. The output shows all consistencies as true.", ) searchradius = traits.Float( argstr="-searchradius %f", units="NA", desc='The search radius in the peak finding algorithm. The default is 0.4 (cf. "density")', ) density = traits.Int( argstr="-density %d", units="NA", desc=( "The number of randomly rotated icosahedra to use in constructing the set of points for " "random sampling in the peak finding algorithm. Default is 1000, which works well for very " "spiky maxent functions. For other types of function, it is reasonable to set the density " "much lower and increase the search radius slightly, which speeds up the computation." ), ) pointset = traits.Int( argstr="-pointset %d", units="NA", desc=( "To sample using an evenly distributed set of points instead. The integer can be " "0, 1, ..., 7. Index 0 gives 1082 points, 1 gives 1922, 2 gives 3002, 3 gives 4322, " "4 gives 5882, 5 gives 8672, 6 gives 12002, 7 gives 15872." ), ) pdthresh = traits.Float( argstr="-pdthresh %f", units="NA", desc=( "Base threshold on the actual peak direction strength divided by the mean of the " "function. The default is 1.0 (the peak must be equal or greater than the mean)." ), ) stdsfrommean = traits.Float( argstr="-stdsfrommean %f", units="NA", desc=( "This is the number of standard deviations of the function to be added to the " '"pdthresh" attribute in the peak directions pruning.' ), ) class SFPeaksOutputSpec(TraitedSpec): peaks = File(exists=True, desc="Peaks of the spherical functions.") class SFPeaks(StdOutCommandLine): """ Finds the peaks of spherical functions. This utility reads coefficients of the spherical functions and outputs a list of peak directions of the function. It computes the value of the function at each of a set of sample points. Then it finds local maxima by finding all points at which the function is larger than for any other point within a fixed search radius (the default is 0.4). The utility then uses Powell's algorithm to optimize the position of each local maximum. Finally the utility removes duplicates and tiny peaks with function value smaller than some threshold, which is the mean of the function plus some number of standard deviations. By default the program checks for con- sistency with a second set of starting points, but skips the optimization step. To speed up execution, you can turn off the con- sistency check by setting the noconsistencycheck flag to True. By default, the utility constructs a set of sample points by randomly rotating a unit icosahedron repeatedly (the default is 1000 times, which produces a set of 6000 points) and concatenating the lists of vertices. The 'pointset = ' attribute can tell the utility to use an evenly distributed set of points (index 0 gives 1082 points, 1 gives 1922, 2 gives 4322, 3 gives 8672, 4 gives 15872, 5 gives 32762, 6 gives 72032), which is quicker, because you can get away with fewer points. We estimate that you can use a factor of 2.5 less evenly distributed points than randomly distributed points and still expect similar performance levels. The output for each voxel is: - exitcode (inherited from the input data). - ln(A(0)) - number of peaks found. - flag for consistency with a repeated run (number of directions is the same and the directions are the same to within a threshold.) - mean(f). - std(f). - direction 1 (x, y, z, f, H00, H01, H10, H11). - direction 2 (x, y, z, f, H00, H01, H10, H11). - direction 3 (x, y, z, f, H00, H01, H10, H11). H is the Hessian of f at the peak. It is the matrix: :: [d^2f/ds^2 d^2f/dsdt] [d^2f/dtds d^2f/dt^2] = [H00 H01] [H10 H11] where s and t are orthogonal coordinates local to the peak. By default the maximum number of peak directions output in each voxel is three. If less than three directions are found, zeros are output for later directions. The peaks are ordered by the value of the function at the peak. If more than the maximum number of directions are found only the strongest ones are output. The maximum number can be changed setting the 'numpds' attribute. The utility can read various kinds of spherical function, but must be told what kind of function is input using the 'inputmodel' attribute. The description of the 'inputmodel' attribute lists additional information required by SFPeaks for each input model. Example ------- First run QBallMX and create a linear transform matrix using Spherical Harmonics (sh). >>> import nipype.interfaces.camino as cam >>> sf_peaks = cam.SFPeaks() >>> sf_peaks.inputs.in_file = 'A_recon_params.Bdouble' >>> sf_peaks.inputs.inputmodel = 'sh' >>> sf_peaks.inputs.order = 4 >>> sf_peaks.inputs.density = 100 >>> sf_peaks.inputs.searchradius = 1.0 >>> sf_peaks.run() # doctest: +SKIP """ _cmd = "sfpeaks" input_spec = SFPeaksInputSpec output_spec = SFPeaksOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["peaks"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_peaks.Bdouble" nipype-1.7.0/nipype/interfaces/camino/tests/000077500000000000000000000000001413403311400210545ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/camino/tests/__init__.py000066400000000000000000000002121413403311400231600ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_AnalyzeHeader.py000066400000000000000000000062251413403311400262360ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import AnalyzeHeader def test_AnalyzeHeader_inputs(): input_map = dict( args=dict( argstr="%s", ), centre=dict( argstr="-centre %s", units="mm", ), data_dims=dict( argstr="-datadims %s", units="voxels", ), datatype=dict( argstr="-datatype %s", mandatory=True, ), description=dict( argstr="-description %s", ), environ=dict( nohash=True, usedefault=True, ), greylevels=dict( argstr="-gl %s", units="NA", ), in_file=dict( argstr="< %s", extensions=None, mandatory=True, position=1, ), initfromheader=dict( argstr="-initfromheader %s", extensions=None, position=3, ), intelbyteorder=dict( argstr="-intelbyteorder", ), networkbyteorder=dict( argstr="-networkbyteorder", ), nimages=dict( argstr="-nimages %d", units="NA", ), offset=dict( argstr="-offset %d", units="NA", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), picoseed=dict( argstr="-picoseed %s", units="mm", ), printbigendian=dict( argstr="-printbigendian %s", extensions=None, position=3, ), printimagedims=dict( argstr="-printimagedims %s", extensions=None, position=3, ), printintelbyteorder=dict( argstr="-printintelbyteorder %s", extensions=None, position=3, ), printprogargs=dict( argstr="-printprogargs %s", extensions=None, position=3, ), readheader=dict( argstr="-readheader %s", extensions=None, position=3, ), scaleinter=dict( argstr="-scaleinter %d", units="NA", ), scaleslope=dict( argstr="-scaleslope %d", units="NA", ), scheme_file=dict( argstr="%s", extensions=None, position=2, ), voxel_dims=dict( argstr="-voxeldims %s", units="mm", ), ) inputs = AnalyzeHeader.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AnalyzeHeader_outputs(): output_map = dict( header=dict( extensions=None, ), ) outputs = AnalyzeHeader.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_ComputeEigensystem.py000066400000000000000000000027611413403311400273540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import ComputeEigensystem def test_ComputeEigensystem_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="< %s", extensions=None, mandatory=True, position=1, ), inputdatatype=dict( argstr="-inputdatatype %s", usedefault=True, ), inputmodel=dict( argstr="-inputmodel %s", ), maxcomponents=dict( argstr="-maxcomponents %d", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), outputdatatype=dict( argstr="-outputdatatype %s", usedefault=True, ), ) inputs = ComputeEigensystem.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ComputeEigensystem_outputs(): output_map = dict( eigen=dict( extensions=None, ), ) outputs = ComputeEigensystem.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_ComputeFractionalAnisotropy.py000066400000000000000000000030051413403311400312220ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import ComputeFractionalAnisotropy def test_ComputeFractionalAnisotropy_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="< %s", extensions=None, mandatory=True, position=1, ), inputdatatype=dict( argstr="-inputdatatype %s", ), inputmodel=dict( argstr="-inputmodel %s", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), outputdatatype=dict( argstr="-outputdatatype %s", ), scheme_file=dict( argstr="%s", extensions=None, position=2, ), ) inputs = ComputeFractionalAnisotropy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ComputeFractionalAnisotropy_outputs(): output_map = dict( fa=dict( extensions=None, ), ) outputs = ComputeFractionalAnisotropy.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_ComputeMeanDiffusivity.py000066400000000000000000000027541413403311400301700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import ComputeMeanDiffusivity def test_ComputeMeanDiffusivity_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="< %s", extensions=None, mandatory=True, position=1, ), inputdatatype=dict( argstr="-inputdatatype %s", ), inputmodel=dict( argstr="-inputmodel %s", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), outputdatatype=dict( argstr="-outputdatatype %s", ), scheme_file=dict( argstr="%s", extensions=None, position=2, ), ) inputs = ComputeMeanDiffusivity.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ComputeMeanDiffusivity_outputs(): output_map = dict( md=dict( extensions=None, ), ) outputs = ComputeMeanDiffusivity.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_ComputeTensorTrace.py000066400000000000000000000027331413403311400273100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import ComputeTensorTrace def test_ComputeTensorTrace_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="< %s", extensions=None, mandatory=True, position=1, ), inputdatatype=dict( argstr="-inputdatatype %s", ), inputmodel=dict( argstr="-inputmodel %s", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), outputdatatype=dict( argstr="-outputdatatype %s", ), scheme_file=dict( argstr="%s", extensions=None, position=2, ), ) inputs = ComputeTensorTrace.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ComputeTensorTrace_outputs(): output_map = dict( trace=dict( extensions=None, ), ) outputs = ComputeTensorTrace.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_Conmat.py000066400000000000000000000034311413403311400247370ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..connectivity import Conmat def test_Conmat_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-inputfile %s", extensions=None, mandatory=True, ), output_root=dict( argstr="-outputroot %s", extensions=None, genfile=True, ), scalar_file=dict( argstr="-scalarfile %s", extensions=None, requires=["tract_stat"], ), target_file=dict( argstr="-targetfile %s", extensions=None, mandatory=True, ), targetname_file=dict( argstr="-targetnamefile %s", extensions=None, ), tract_prop=dict( argstr="-tractstat %s", units="NA", xor=["tract_stat"], ), tract_stat=dict( argstr="-tractstat %s", requires=["scalar_file"], units="NA", xor=["tract_prop"], ), ) inputs = Conmat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Conmat_outputs(): output_map = dict( conmat_sc=dict( extensions=None, ), conmat_ts=dict( extensions=None, ), ) outputs = Conmat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_DT2NIfTI.py000066400000000000000000000025641413403311400247470ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import DT2NIfTI def test_DT2NIfTI_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), header_file=dict( argstr="-header %s", extensions=None, mandatory=True, position=3, ), in_file=dict( argstr="-inputfile %s", extensions=None, mandatory=True, position=1, ), output_root=dict( argstr="-outputroot %s", extensions=None, genfile=True, position=2, ), ) inputs = DT2NIfTI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DT2NIfTI_outputs(): output_map = dict( dt=dict( extensions=None, ), exitcode=dict( extensions=None, ), lns0=dict( extensions=None, ), ) outputs = DT2NIfTI.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_DTIFit.py000066400000000000000000000026231413403311400246030ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import DTIFit def test_DTIFit_inputs(): input_map = dict( args=dict( argstr="%s", ), bgmask=dict( argstr="-bgmask %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), non_linear=dict( argstr="-nonlinear", position=3, ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), scheme_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), ) inputs = DTIFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DTIFit_outputs(): output_map = dict( tensor_fitted=dict( extensions=None, ), ) outputs = DTIFit.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_DTLUTGen.py000066400000000000000000000036541413403311400250530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import DTLUTGen def test_DTLUTGen_inputs(): input_map = dict( acg=dict( argstr="-acg", ), args=dict( argstr="%s", ), bingham=dict( argstr="-bingham", ), environ=dict( nohash=True, usedefault=True, ), frange=dict( argstr="-frange %s", position=1, units="NA", ), inversion=dict( argstr="-inversion %d", units="NA", ), lrange=dict( argstr="-lrange %s", position=1, units="NA", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), samples=dict( argstr="-samples %d", units="NA", ), scheme_file=dict( argstr="-schemefile %s", extensions=None, mandatory=True, position=2, ), snr=dict( argstr="-snr %f", units="NA", ), step=dict( argstr="-step %f", units="NA", ), trace=dict( argstr="-trace %G", units="NA", ), watson=dict( argstr="-watson", ), ) inputs = DTLUTGen.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DTLUTGen_outputs(): output_map = dict( dtLUT=dict( extensions=None, ), ) outputs = DTLUTGen.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_DTMetric.py000066400000000000000000000027231413403311400251740ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import DTMetric def test_DTMetric_inputs(): input_map = dict( args=dict( argstr="%s", ), data_header=dict( argstr="-header %s", extensions=None, ), eigen_data=dict( argstr="-inputfile %s", extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), inputdatatype=dict( argstr="-inputdatatype %s", usedefault=True, ), metric=dict( argstr="-stat %s", mandatory=True, ), outputdatatype=dict( argstr="-outputdatatype %s", usedefault=True, ), outputfile=dict( argstr="-outputfile %s", extensions=None, genfile=True, ), ) inputs = DTMetric.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DTMetric_outputs(): output_map = dict( metric_stats=dict( extensions=None, ), ) outputs = DTMetric.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_FSL2Scheme.py000066400000000000000000000035321413403311400253530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import FSL2Scheme def test_FSL2Scheme_inputs(): input_map = dict( args=dict( argstr="%s", ), bscale=dict( argstr="-bscale %d", units="NA", ), bval_file=dict( argstr="-bvalfile %s", extensions=None, mandatory=True, position=2, ), bvec_file=dict( argstr="-bvecfile %s", extensions=None, mandatory=True, position=1, ), diffusiontime=dict( argstr="-diffusiontime %f", units="NA", ), environ=dict( nohash=True, usedefault=True, ), flipx=dict( argstr="-flipx", ), flipy=dict( argstr="-flipy", ), flipz=dict( argstr="-flipz", ), interleave=dict( argstr="-interleave", ), numscans=dict( argstr="-numscans %d", units="NA", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), usegradmod=dict( argstr="-usegradmod", ), ) inputs = FSL2Scheme.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FSL2Scheme_outputs(): output_map = dict( scheme=dict( extensions=None, ), ) outputs = FSL2Scheme.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_Image2Voxel.py000066400000000000000000000023551413403311400256440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import Image2Voxel def test_Image2Voxel_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-4dimage %s", extensions=None, mandatory=True, position=1, ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), out_type=dict( argstr="-outputdatatype %s", position=2, usedefault=True, ), ) inputs = Image2Voxel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Image2Voxel_outputs(): output_map = dict( voxel_order=dict( extensions=None, ), ) outputs = Image2Voxel.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_ImageStats.py000066400000000000000000000024261413403311400255620ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ImageStats def test_ImageStats_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr="-images %s", mandatory=True, position=-1, ), out_type=dict( argstr="-outputdatatype %s", usedefault=True, ), output_root=dict( argstr="-outputroot %s", extensions=None, mandatory=True, ), stat=dict( argstr="-stat %s", mandatory=True, units="NA", ), ) inputs = ImageStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ImageStats_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ImageStats.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_LinRecon.py000066400000000000000000000031061413403311400252260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..odf import LinRecon def test_LinRecon_inputs(): input_map = dict( args=dict( argstr="%s", ), bgmask=dict( argstr="-bgmask %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), log=dict( argstr="-log", ), normalize=dict( argstr="-normalize", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), qball_mat=dict( argstr="%s", extensions=None, mandatory=True, position=3, ), scheme_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), ) inputs = LinRecon.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_LinRecon_outputs(): output_map = dict( recon_data=dict( extensions=None, ), ) outputs = LinRecon.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_MESD.py000066400000000000000000000035021413403311400242450ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..odf import MESD def test_MESD_inputs(): input_map = dict( args=dict( argstr="%s", ), bgmask=dict( argstr="-bgmask %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), fastmesd=dict( argstr="-fastmesd", requires=["mepointset"], ), in_file=dict( argstr="-inputfile %s", extensions=None, mandatory=True, position=1, ), inputdatatype=dict( argstr="-inputdatatype %s", ), inverter=dict( argstr="-filter %s", mandatory=True, position=2, ), inverter_param=dict( argstr="%f", mandatory=True, position=3, units="NA", ), mepointset=dict( argstr="-mepointset %d", units="NA", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), scheme_file=dict( argstr="-schemefile %s", extensions=None, mandatory=True, ), ) inputs = MESD.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MESD_outputs(): output_map = dict( mesd_data=dict( extensions=None, ), ) outputs = MESD.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_ModelFit.py000066400000000000000000000043701413403311400252240ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import ModelFit def test_ModelFit_inputs(): input_map = dict( args=dict( argstr="%s", ), bgmask=dict( argstr="-bgmask %s", extensions=None, ), bgthresh=dict( argstr="-bgthresh %G", ), cfthresh=dict( argstr="-csfthresh %G", ), environ=dict( nohash=True, usedefault=True, ), fixedbvalue=dict( argstr="-fixedbvalue %s", ), fixedmodq=dict( argstr="-fixedmod %s", ), in_file=dict( argstr="-inputfile %s", extensions=None, mandatory=True, ), inputdatatype=dict( argstr="-inputdatatype %s", ), model=dict( argstr="-model %s", mandatory=True, ), noisemap=dict( argstr="-noisemap %s", extensions=None, ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), outlier=dict( argstr="-outliermap %s", extensions=None, ), outputfile=dict( argstr="-outputfile %s", extensions=None, ), residualmap=dict( argstr="-residualmap %s", extensions=None, ), scheme_file=dict( argstr="-schemefile %s", extensions=None, mandatory=True, ), sigma=dict( argstr="-sigma %G", ), tau=dict( argstr="-tau %G", ), ) inputs = ModelFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ModelFit_outputs(): output_map = dict( fitted_data=dict( extensions=None, ), ) outputs = ModelFit.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_NIfTIDT2Camino.py000066400000000000000000000031701413403311400260700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import NIfTIDT2Camino def test_NIfTIDT2Camino_inputs(): input_map = dict( args=dict( argstr="%s", ), bgmask=dict( argstr="-bgmask %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-inputfile %s", extensions=None, mandatory=True, position=1, ), lns0_file=dict( argstr="-lns0 %s", extensions=None, ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), s0_file=dict( argstr="-s0 %s", extensions=None, ), scaleinter=dict( argstr="-scaleinter %s", ), scaleslope=dict( argstr="-scaleslope %s", ), uppertriangular=dict( argstr="-uppertriangular %s", ), ) inputs = NIfTIDT2Camino.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_NIfTIDT2Camino_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = NIfTIDT2Camino.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_PicoPDFs.py000066400000000000000000000032271413403311400251300ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import PicoPDFs def test_PicoPDFs_inputs(): input_map = dict( args=dict( argstr="%s", ), directmap=dict( argstr="-directmap", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="< %s", extensions=None, mandatory=True, position=1, ), inputmodel=dict( argstr="-inputmodel %s", position=2, usedefault=True, ), luts=dict( argstr="-luts %s", mandatory=True, ), maxcomponents=dict( argstr="-maxcomponents %d", units="NA", ), numpds=dict( argstr="-numpds %d", units="NA", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), pdf=dict( argstr="-pdf %s", position=4, usedefault=True, ), ) inputs = PicoPDFs.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_PicoPDFs_outputs(): output_map = dict( pdfs=dict( extensions=None, ), ) outputs = PicoPDFs.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_ProcStreamlines.py000066400000000000000000000100371413403311400266300ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import ProcStreamlines def test_ProcStreamlines_inputs(): input_map = dict( allowmultitargets=dict( argstr="-allowmultitargets", ), args=dict( argstr="%s", ), datadims=dict( argstr="-datadims %s", units="voxels", ), directional=dict( argstr="-directional %s", units="NA", ), discardloops=dict( argstr="-discardloops", ), endpointfile=dict( argstr="-endpointfile %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), exclusionfile=dict( argstr="-exclusionfile %s", extensions=None, ), gzip=dict( argstr="-gzip", ), in_file=dict( argstr="-inputfile %s", extensions=None, mandatory=True, position=1, ), inputmodel=dict( argstr="-inputmodel %s", usedefault=True, ), iterations=dict( argstr="-iterations %d", units="NA", ), maxtractlength=dict( argstr="-maxtractlength %d", units="mm", ), maxtractpoints=dict( argstr="-maxtractpoints %d", units="NA", ), mintractlength=dict( argstr="-mintractlength %d", units="mm", ), mintractpoints=dict( argstr="-mintractpoints %d", units="NA", ), noresample=dict( argstr="-noresample", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), outputacm=dict( argstr="-outputacm", requires=["outputroot", "seedfile"], ), outputcbs=dict( argstr="-outputcbs", requires=["outputroot", "targetfile", "seedfile"], ), outputcp=dict( argstr="-outputcp", requires=["outputroot", "seedfile"], ), outputroot=dict( argstr="-outputroot %s", extensions=None, ), outputsc=dict( argstr="-outputsc", requires=["outputroot", "seedfile"], ), outputtracts=dict( argstr="-outputtracts", ), regionindex=dict( argstr="-regionindex %d", units="mm", ), resamplestepsize=dict( argstr="-resamplestepsize %d", units="NA", ), seedfile=dict( argstr="-seedfile %s", extensions=None, ), seedpointmm=dict( argstr="-seedpointmm %s", units="mm", ), seedpointvox=dict( argstr="-seedpointvox %s", units="voxels", ), targetfile=dict( argstr="-targetfile %s", extensions=None, ), truncateinexclusion=dict( argstr="-truncateinexclusion", ), truncateloops=dict( argstr="-truncateloops", ), voxeldims=dict( argstr="-voxeldims %s", units="mm", ), waypointfile=dict( argstr="-waypointfile %s", extensions=None, ), ) inputs = ProcStreamlines.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ProcStreamlines_outputs(): output_map = dict( outputroot_files=dict(), proc=dict( extensions=None, ), ) outputs = ProcStreamlines.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_QBallMX.py000066400000000000000000000030411413403311400247530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..odf import QBallMX def test_QBallMX_inputs(): input_map = dict( args=dict( argstr="%s", ), basistype=dict( argstr="-basistype %s", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), order=dict( argstr="-order %d", units="NA", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), rbfpointset=dict( argstr="-rbfpointset %d", units="NA", ), rbfsigma=dict( argstr="-rbfsigma %f", units="NA", ), scheme_file=dict( argstr="-schemefile %s", extensions=None, mandatory=True, ), smoothingsigma=dict( argstr="-smoothingsigma %f", units="NA", ), ) inputs = QBallMX.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_QBallMX_outputs(): output_map = dict( qmat=dict( extensions=None, ), ) outputs = QBallMX.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_SFLUTGen.py000066400000000000000000000034571413403311400250550ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..calib import SFLUTGen def test_SFLUTGen_inputs(): input_map = dict( args=dict( argstr="%s", ), binincsize=dict( argstr="-binincsize %d", units="NA", ), directmap=dict( argstr="-directmap", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-inputfile %s", extensions=None, mandatory=True, ), info_file=dict( argstr="-infofile %s", extensions=None, mandatory=True, ), minvectsperbin=dict( argstr="-minvectsperbin %d", units="NA", ), order=dict( argstr="-order %d", units="NA", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), outputstem=dict( argstr="-outputstem %s", usedefault=True, ), pdf=dict( argstr="-pdf %s", usedefault=True, ), ) inputs = SFLUTGen.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SFLUTGen_outputs(): output_map = dict( lut_one_fibre=dict( extensions=None, ), lut_two_fibres=dict( extensions=None, ), ) outputs = SFLUTGen.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_SFPICOCalibData.py000066400000000000000000000046251413403311400262340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..calib import SFPICOCalibData def test_SFPICOCalibData_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), info_file=dict( argstr="-infooutputfile %s", extensions=None, genfile=True, hash_files=False, mandatory=True, ), onedtfarange=dict( argstr="-onedtfarange %s", units="NA", ), onedtfastep=dict( argstr="-onedtfastep %f", units="NA", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), scheme_file=dict( argstr="-schemefile %s", extensions=None, mandatory=True, ), seed=dict( argstr="-seed %f", units="NA", ), snr=dict( argstr="-snr %f", units="NA", ), trace=dict( argstr="-trace %f", units="NA", ), twodtanglerange=dict( argstr="-twodtanglerange %s", units="NA", ), twodtanglestep=dict( argstr="-twodtanglestep %f", units="NA", ), twodtfarange=dict( argstr="-twodtfarange %s", units="NA", ), twodtfastep=dict( argstr="-twodtfastep %f", units="NA", ), twodtmixmax=dict( argstr="-twodtmixmax %f", units="NA", ), twodtmixstep=dict( argstr="-twodtmixstep %f", units="NA", ), ) inputs = SFPICOCalibData.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SFPICOCalibData_outputs(): output_map = dict( PICOCalib=dict( extensions=None, ), calib_info=dict( extensions=None, ), ) outputs = SFPICOCalibData.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_SFPeaks.py000066400000000000000000000042421413403311400250130ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..odf import SFPeaks def test_SFPeaks_inputs(): input_map = dict( args=dict( argstr="%s", ), density=dict( argstr="-density %d", units="NA", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-inputfile %s", extensions=None, mandatory=True, ), inputmodel=dict( argstr="-inputmodel %s", mandatory=True, ), mepointset=dict( argstr="-mepointset %d", units="NA", ), noconsistencycheck=dict( argstr="-noconsistencycheck", ), numpds=dict( argstr="-numpds %d", units="NA", ), order=dict( argstr="-order %d", units="NA", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), pdthresh=dict( argstr="-pdthresh %f", units="NA", ), pointset=dict( argstr="-pointset %d", units="NA", ), rbfpointset=dict( argstr="-rbfpointset %d", units="NA", ), scheme_file=dict( argstr="%s", extensions=None, ), searchradius=dict( argstr="-searchradius %f", units="NA", ), stdsfrommean=dict( argstr="-stdsfrommean %f", units="NA", ), ) inputs = SFPeaks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SFPeaks_outputs(): output_map = dict( peaks=dict( extensions=None, ), ) outputs = SFPeaks.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_Shredder.py000066400000000000000000000026221413403311400252570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import Shredder def test_Shredder_inputs(): input_map = dict( args=dict( argstr="%s", ), chunksize=dict( argstr="%d", position=2, units="NA", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="< %s", extensions=None, mandatory=True, position=-2, ), offset=dict( argstr="%d", position=1, units="NA", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), space=dict( argstr="%d", position=3, units="NA", ), ) inputs = Shredder.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Shredder_outputs(): output_map = dict( shredded=dict( extensions=None, ), ) outputs = Shredder.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_Track.py000066400000000000000000000052641413403311400245700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import Track def test_Track_inputs(): input_map = dict( anisfile=dict( argstr="-anisfile %s", extensions=None, ), anisthresh=dict( argstr="-anisthresh %f", ), args=dict( argstr="%s", ), curveinterval=dict( argstr="-curveinterval %f", requires=["curvethresh"], ), curvethresh=dict( argstr="-curvethresh %f", ), data_dims=dict( argstr="-datadims %s", units="voxels", ), environ=dict( nohash=True, usedefault=True, ), gzip=dict( argstr="-gzip", ), in_file=dict( argstr="-inputfile %s", extensions=None, position=1, ), inputdatatype=dict( argstr="-inputdatatype %s", ), inputmodel=dict( argstr="-inputmodel %s", usedefault=True, ), interpolator=dict( argstr="-interpolator %s", ), ipthresh=dict( argstr="-ipthresh %f", ), maxcomponents=dict( argstr="-maxcomponents %d", units="NA", ), numpds=dict( argstr="-numpds %d", units="NA", ), out_file=dict( argstr="-outputfile %s", extensions=None, genfile=True, position=-1, ), output_root=dict( argstr="-outputroot %s", extensions=None, position=-1, ), outputtracts=dict( argstr="-outputtracts %s", ), seed_file=dict( argstr="-seedfile %s", extensions=None, position=2, ), stepsize=dict( argstr="-stepsize %f", requires=["tracker"], ), tracker=dict( argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( argstr="-voxeldims %s", units="mm", ), ) inputs = Track.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Track_outputs(): output_map = dict( tracked=dict( extensions=None, ), ) outputs = Track.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_TrackBallStick.py000066400000000000000000000053411413403311400263550ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import TrackBallStick def test_TrackBallStick_inputs(): input_map = dict( anisfile=dict( argstr="-anisfile %s", extensions=None, ), anisthresh=dict( argstr="-anisthresh %f", ), args=dict( argstr="%s", ), curveinterval=dict( argstr="-curveinterval %f", requires=["curvethresh"], ), curvethresh=dict( argstr="-curvethresh %f", ), data_dims=dict( argstr="-datadims %s", units="voxels", ), environ=dict( nohash=True, usedefault=True, ), gzip=dict( argstr="-gzip", ), in_file=dict( argstr="-inputfile %s", extensions=None, position=1, ), inputdatatype=dict( argstr="-inputdatatype %s", ), inputmodel=dict( argstr="-inputmodel %s", usedefault=True, ), interpolator=dict( argstr="-interpolator %s", ), ipthresh=dict( argstr="-ipthresh %f", ), maxcomponents=dict( argstr="-maxcomponents %d", units="NA", ), numpds=dict( argstr="-numpds %d", units="NA", ), out_file=dict( argstr="-outputfile %s", extensions=None, genfile=True, position=-1, ), output_root=dict( argstr="-outputroot %s", extensions=None, position=-1, ), outputtracts=dict( argstr="-outputtracts %s", ), seed_file=dict( argstr="-seedfile %s", extensions=None, position=2, ), stepsize=dict( argstr="-stepsize %f", requires=["tracker"], ), tracker=dict( argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( argstr="-voxeldims %s", units="mm", ), ) inputs = TrackBallStick.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TrackBallStick_outputs(): output_map = dict( tracked=dict( extensions=None, ), ) outputs = TrackBallStick.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_TrackBayesDirac.py000066400000000000000000000067461413403311400265250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import TrackBayesDirac def test_TrackBayesDirac_inputs(): input_map = dict( anisfile=dict( argstr="-anisfile %s", extensions=None, ), anisthresh=dict( argstr="-anisthresh %f", ), args=dict( argstr="%s", ), curveinterval=dict( argstr="-curveinterval %f", requires=["curvethresh"], ), curvepriorg=dict( argstr="-curvepriorg %G", ), curvepriork=dict( argstr="-curvepriork %G", ), curvethresh=dict( argstr="-curvethresh %f", ), data_dims=dict( argstr="-datadims %s", units="voxels", ), datamodel=dict( argstr="-datamodel %s", ), environ=dict( nohash=True, usedefault=True, ), extpriordatatype=dict( argstr="-extpriordatatype %s", ), extpriorfile=dict( argstr="-extpriorfile %s", extensions=None, ), gzip=dict( argstr="-gzip", ), in_file=dict( argstr="-inputfile %s", extensions=None, position=1, ), inputdatatype=dict( argstr="-inputdatatype %s", ), inputmodel=dict( argstr="-inputmodel %s", usedefault=True, ), interpolator=dict( argstr="-interpolator %s", ), ipthresh=dict( argstr="-ipthresh %f", ), iterations=dict( argstr="-iterations %d", units="NA", ), maxcomponents=dict( argstr="-maxcomponents %d", units="NA", ), numpds=dict( argstr="-numpds %d", units="NA", ), out_file=dict( argstr="-outputfile %s", extensions=None, genfile=True, position=-1, ), output_root=dict( argstr="-outputroot %s", extensions=None, position=-1, ), outputtracts=dict( argstr="-outputtracts %s", ), pdf=dict( argstr="-pdf %s", ), pointset=dict( argstr="-pointset %s", ), scheme_file=dict( argstr="-schemefile %s", extensions=None, mandatory=True, ), seed_file=dict( argstr="-seedfile %s", extensions=None, position=2, ), stepsize=dict( argstr="-stepsize %f", requires=["tracker"], ), tracker=dict( argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( argstr="-voxeldims %s", units="mm", ), ) inputs = TrackBayesDirac.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TrackBayesDirac_outputs(): output_map = dict( tracked=dict( extensions=None, ), ) outputs = TrackBayesDirac.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_TrackBedpostxDeter.py000066400000000000000000000057011413403311400272610ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import TrackBedpostxDeter def test_TrackBedpostxDeter_inputs(): input_map = dict( anisfile=dict( argstr="-anisfile %s", extensions=None, ), anisthresh=dict( argstr="-anisthresh %f", ), args=dict( argstr="%s", ), bedpostxdir=dict( argstr="-bedpostxdir %s", mandatory=True, ), curveinterval=dict( argstr="-curveinterval %f", requires=["curvethresh"], ), curvethresh=dict( argstr="-curvethresh %f", ), data_dims=dict( argstr="-datadims %s", units="voxels", ), environ=dict( nohash=True, usedefault=True, ), gzip=dict( argstr="-gzip", ), in_file=dict( argstr="-inputfile %s", extensions=None, position=1, ), inputdatatype=dict( argstr="-inputdatatype %s", ), inputmodel=dict( argstr="-inputmodel %s", usedefault=True, ), interpolator=dict( argstr="-interpolator %s", ), ipthresh=dict( argstr="-ipthresh %f", ), maxcomponents=dict( argstr="-maxcomponents %d", units="NA", ), min_vol_frac=dict( argstr="-bedpostxminf %d", units="NA", ), numpds=dict( argstr="-numpds %d", units="NA", ), out_file=dict( argstr="-outputfile %s", extensions=None, genfile=True, position=-1, ), output_root=dict( argstr="-outputroot %s", extensions=None, position=-1, ), outputtracts=dict( argstr="-outputtracts %s", ), seed_file=dict( argstr="-seedfile %s", extensions=None, position=2, ), stepsize=dict( argstr="-stepsize %f", requires=["tracker"], ), tracker=dict( argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( argstr="-voxeldims %s", units="mm", ), ) inputs = TrackBedpostxDeter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TrackBedpostxDeter_outputs(): output_map = dict( tracked=dict( extensions=None, ), ) outputs = TrackBedpostxDeter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_TrackBedpostxProba.py000066400000000000000000000060421413403311400272600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import TrackBedpostxProba def test_TrackBedpostxProba_inputs(): input_map = dict( anisfile=dict( argstr="-anisfile %s", extensions=None, ), anisthresh=dict( argstr="-anisthresh %f", ), args=dict( argstr="%s", ), bedpostxdir=dict( argstr="-bedpostxdir %s", mandatory=True, ), curveinterval=dict( argstr="-curveinterval %f", requires=["curvethresh"], ), curvethresh=dict( argstr="-curvethresh %f", ), data_dims=dict( argstr="-datadims %s", units="voxels", ), environ=dict( nohash=True, usedefault=True, ), gzip=dict( argstr="-gzip", ), in_file=dict( argstr="-inputfile %s", extensions=None, position=1, ), inputdatatype=dict( argstr="-inputdatatype %s", ), inputmodel=dict( argstr="-inputmodel %s", usedefault=True, ), interpolator=dict( argstr="-interpolator %s", ), ipthresh=dict( argstr="-ipthresh %f", ), iterations=dict( argstr="-iterations %d", units="NA", ), maxcomponents=dict( argstr="-maxcomponents %d", units="NA", ), min_vol_frac=dict( argstr="-bedpostxminf %d", units="NA", ), numpds=dict( argstr="-numpds %d", units="NA", ), out_file=dict( argstr="-outputfile %s", extensions=None, genfile=True, position=-1, ), output_root=dict( argstr="-outputroot %s", extensions=None, position=-1, ), outputtracts=dict( argstr="-outputtracts %s", ), seed_file=dict( argstr="-seedfile %s", extensions=None, position=2, ), stepsize=dict( argstr="-stepsize %f", requires=["tracker"], ), tracker=dict( argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( argstr="-voxeldims %s", units="mm", ), ) inputs = TrackBedpostxProba.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TrackBedpostxProba_outputs(): output_map = dict( tracked=dict( extensions=None, ), ) outputs = TrackBedpostxProba.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_TrackBootstrap.py000066400000000000000000000063201413403311400264600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import TrackBootstrap def test_TrackBootstrap_inputs(): input_map = dict( anisfile=dict( argstr="-anisfile %s", extensions=None, ), anisthresh=dict( argstr="-anisthresh %f", ), args=dict( argstr="%s", ), bgmask=dict( argstr="-bgmask %s", extensions=None, ), bsdatafiles=dict( argstr="-bsdatafile %s", mandatory=True, ), curveinterval=dict( argstr="-curveinterval %f", requires=["curvethresh"], ), curvethresh=dict( argstr="-curvethresh %f", ), data_dims=dict( argstr="-datadims %s", units="voxels", ), environ=dict( nohash=True, usedefault=True, ), gzip=dict( argstr="-gzip", ), in_file=dict( argstr="-inputfile %s", extensions=None, position=1, ), inputdatatype=dict( argstr="-inputdatatype %s", ), inputmodel=dict( argstr="-inputmodel %s", usedefault=True, ), interpolator=dict( argstr="-interpolator %s", ), inversion=dict( argstr="-inversion %s", ), ipthresh=dict( argstr="-ipthresh %f", ), iterations=dict( argstr="-iterations %d", units="NA", ), maxcomponents=dict( argstr="-maxcomponents %d", units="NA", ), numpds=dict( argstr="-numpds %d", units="NA", ), out_file=dict( argstr="-outputfile %s", extensions=None, genfile=True, position=-1, ), output_root=dict( argstr="-outputroot %s", extensions=None, position=-1, ), outputtracts=dict( argstr="-outputtracts %s", ), scheme_file=dict( argstr="-schemefile %s", extensions=None, mandatory=True, ), seed_file=dict( argstr="-seedfile %s", extensions=None, position=2, ), stepsize=dict( argstr="-stepsize %f", requires=["tracker"], ), tracker=dict( argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( argstr="-voxeldims %s", units="mm", ), ) inputs = TrackBootstrap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TrackBootstrap_outputs(): output_map = dict( tracked=dict( extensions=None, ), ) outputs = TrackBootstrap.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_TrackDT.py000066400000000000000000000052761413403311400250230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import TrackDT def test_TrackDT_inputs(): input_map = dict( anisfile=dict( argstr="-anisfile %s", extensions=None, ), anisthresh=dict( argstr="-anisthresh %f", ), args=dict( argstr="%s", ), curveinterval=dict( argstr="-curveinterval %f", requires=["curvethresh"], ), curvethresh=dict( argstr="-curvethresh %f", ), data_dims=dict( argstr="-datadims %s", units="voxels", ), environ=dict( nohash=True, usedefault=True, ), gzip=dict( argstr="-gzip", ), in_file=dict( argstr="-inputfile %s", extensions=None, position=1, ), inputdatatype=dict( argstr="-inputdatatype %s", ), inputmodel=dict( argstr="-inputmodel %s", usedefault=True, ), interpolator=dict( argstr="-interpolator %s", ), ipthresh=dict( argstr="-ipthresh %f", ), maxcomponents=dict( argstr="-maxcomponents %d", units="NA", ), numpds=dict( argstr="-numpds %d", units="NA", ), out_file=dict( argstr="-outputfile %s", extensions=None, genfile=True, position=-1, ), output_root=dict( argstr="-outputroot %s", extensions=None, position=-1, ), outputtracts=dict( argstr="-outputtracts %s", ), seed_file=dict( argstr="-seedfile %s", extensions=None, position=2, ), stepsize=dict( argstr="-stepsize %f", requires=["tracker"], ), tracker=dict( argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( argstr="-voxeldims %s", units="mm", ), ) inputs = TrackDT.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TrackDT_outputs(): output_map = dict( tracked=dict( extensions=None, ), ) outputs = TrackDT.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_TrackPICo.py000066400000000000000000000055441413403311400253040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import TrackPICo def test_TrackPICo_inputs(): input_map = dict( anisfile=dict( argstr="-anisfile %s", extensions=None, ), anisthresh=dict( argstr="-anisthresh %f", ), args=dict( argstr="%s", ), curveinterval=dict( argstr="-curveinterval %f", requires=["curvethresh"], ), curvethresh=dict( argstr="-curvethresh %f", ), data_dims=dict( argstr="-datadims %s", units="voxels", ), environ=dict( nohash=True, usedefault=True, ), gzip=dict( argstr="-gzip", ), in_file=dict( argstr="-inputfile %s", extensions=None, position=1, ), inputdatatype=dict( argstr="-inputdatatype %s", ), inputmodel=dict( argstr="-inputmodel %s", usedefault=True, ), interpolator=dict( argstr="-interpolator %s", ), ipthresh=dict( argstr="-ipthresh %f", ), iterations=dict( argstr="-iterations %d", units="NA", ), maxcomponents=dict( argstr="-maxcomponents %d", units="NA", ), numpds=dict( argstr="-numpds %d", units="NA", ), out_file=dict( argstr="-outputfile %s", extensions=None, genfile=True, position=-1, ), output_root=dict( argstr="-outputroot %s", extensions=None, position=-1, ), outputtracts=dict( argstr="-outputtracts %s", ), pdf=dict( argstr="-pdf %s", ), seed_file=dict( argstr="-seedfile %s", extensions=None, position=2, ), stepsize=dict( argstr="-stepsize %f", requires=["tracker"], ), tracker=dict( argstr="-tracker %s", usedefault=True, ), voxel_dims=dict( argstr="-voxeldims %s", units="mm", ), ) inputs = TrackPICo.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TrackPICo_outputs(): output_map = dict( tracked=dict( extensions=None, ), ) outputs = TrackPICo.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_TractShredder.py000066400000000000000000000026531413403311400262610ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import TractShredder def test_TractShredder_inputs(): input_map = dict( args=dict( argstr="%s", ), bunchsize=dict( argstr="%d", position=2, units="NA", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="< %s", extensions=None, mandatory=True, position=-2, ), offset=dict( argstr="%d", position=1, units="NA", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), space=dict( argstr="%d", position=3, units="NA", ), ) inputs = TractShredder.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TractShredder_outputs(): output_map = dict( shredded=dict( extensions=None, ), ) outputs = TractShredder.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/tests/test_auto_VtkStreamlines.py000066400000000000000000000036551413403311400265010ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import VtkStreamlines def test_VtkStreamlines_inputs(): input_map = dict( args=dict( argstr="%s", ), colourorient=dict( argstr="-colourorient", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr=" < %s", extensions=None, mandatory=True, position=-2, ), inputmodel=dict( argstr="-inputmodel %s", usedefault=True, ), interpolate=dict( argstr="-interpolate", ), interpolatescalars=dict( argstr="-interpolatescalars", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), scalar_file=dict( argstr="-scalarfile %s", extensions=None, position=3, ), seed_file=dict( argstr="-seedfile %s", extensions=None, position=1, ), target_file=dict( argstr="-targetfile %s", extensions=None, position=2, ), voxeldims=dict( argstr="-voxeldims %s", position=4, units="mm", ), ) inputs = VtkStreamlines.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_VtkStreamlines_outputs(): output_map = dict( vtk=dict( extensions=None, ), ) outputs = VtkStreamlines.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino/utils.py000066400000000000000000000046531413403311400214340ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os from ..base import ( traits, TraitedSpec, File, CommandLine, CommandLineInputSpec, InputMultiPath, ) from ...utils.filemanip import split_filename class ImageStatsInputSpec(CommandLineInputSpec): in_files = InputMultiPath( File(exists=True), argstr="-images %s", mandatory=True, position=-1, desc=( "List of images to process. They must " "be in the same space and have the same " "dimensions." ), ) stat = traits.Enum( "min", "max", "mean", "median", "sum", "std", "var", argstr="-stat %s", units="NA", mandatory=True, desc="The statistic to compute.", ) out_type = traits.Enum( "float", "char", "short", "int", "long", "double", argstr="-outputdatatype %s", usedefault=True, desc=('A Camino data type string, default is "float". ' "Type must be signed."), ) output_root = File( argstr="-outputroot %s", mandatory=True, desc=( "Filename root prepended onto the names of the output " " files. The extension will be determined from the input." ), ) class ImageStatsOutputSpec(TraitedSpec): out_file = File( exists=True, desc="Path of the file computed with the statistic chosen" ) class ImageStats(CommandLine): """ This program computes voxelwise statistics on a series of 3D images. The images must be in the same space; the operation is performed voxelwise and one output is produced per voxel. Examples -------- >>> import nipype.interfaces.camino as cam >>> imstats = cam.ImageStats() >>> imstats.inputs.in_files = ['im1.nii','im2.nii','im3.nii'] >>> imstats.inputs.stat = 'max' >>> imstats.run() # doctest: +SKIP """ _cmd = "imagestats" input_spec = ImageStatsInputSpec output_spec = ImageStatsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): output_root = self.inputs.output_root first_file = self.inputs.in_files[0] _, _, ext = split_filename(first_file) return output_root + ext nipype-1.7.0/nipype/interfaces/camino2trackvis/000077500000000000000000000000001413403311400215435ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/camino2trackvis/__init__.py000066400000000000000000000004131413403311400236520ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Camino-Trackvis allows interoperability between Camino and TrackVis.""" from .convert import Camino2Trackvis, Trackvis2Camino nipype-1.7.0/nipype/interfaces/camino2trackvis/convert.py000066400000000000000000000117551413403311400236060ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Provides interfaces to various commands provided by Camino-Trackvis.""" import os from ...utils.filemanip import split_filename from ..base import CommandLineInputSpec, CommandLine, traits, TraitedSpec, File class Camino2TrackvisInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="-i %s", mandatory=True, position=1, desc="The input .Bfloat (camino) file.", ) out_file = File( argstr="-o %s", genfile=True, position=2, desc="The filename to which to write the .trk (trackvis) file.", ) min_length = traits.Float( argstr="-l %d", position=3, units="mm", desc="The minimum length of tracts to output", ) data_dims = traits.List( traits.Int, argstr="-d %s", sep=",", mandatory=True, position=4, minlen=3, maxlen=3, desc="Three comma-separated integers giving the number of voxels along each dimension of the source scans.", ) voxel_dims = traits.List( traits.Float, argstr="-x %s", sep=",", mandatory=True, position=5, minlen=3, maxlen=3, desc="Three comma-separated numbers giving the size of each voxel in mm.", ) # Change to enum with all combinations? i.e. LAS, LPI, RAS, etc.. voxel_order = File( argstr="--voxel-order %s", mandatory=True, position=6, desc="Set the order in which various directions were stored.\ Specify with three letters consisting of one each \ from the pairs LR, AP, and SI. These stand for Left-Right, \ Anterior-Posterior, and Superior-Inferior. \ Whichever is specified in each position will \ be the direction of increasing order. \ Read coordinate system from a NIfTI file.", ) nifti_file = File( argstr="--nifti %s", exists=True, position=7, desc="Read coordinate system from a NIfTI file.", ) class Camino2TrackvisOutputSpec(TraitedSpec): trackvis = File( exists=True, desc="The filename to which to write the .trk (trackvis) file." ) class Camino2Trackvis(CommandLine): """Wraps camino_to_trackvis from Camino-Trackvis Convert files from camino .Bfloat format to trackvis .trk format. Example ------- >>> import nipype.interfaces.camino2trackvis as cam2trk >>> c2t = cam2trk.Camino2Trackvis() >>> c2t.inputs.in_file = 'data.Bfloat' >>> c2t.inputs.out_file = 'streamlines.trk' >>> c2t.inputs.min_length = 30 >>> c2t.inputs.data_dims = [128, 104, 64] >>> c2t.inputs.voxel_dims = [2.0, 2.0, 2.0] >>> c2t.inputs.voxel_order = 'LAS' >>> c2t.run() # doctest: +SKIP """ _cmd = "camino_to_trackvis" input_spec = Camino2TrackvisInputSpec output_spec = Camino2TrackvisOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["trackvis"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): if name == "out_file": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + ".trk" class Trackvis2CaminoInputSpec(CommandLineInputSpec): """Wraps trackvis_to_camino from Camino-Trackvis Convert files from camino .Bfloat format to trackvis .trk format. Example ------- >>> import nipype.interfaces.camino2trackvis as cam2trk >>> t2c = cam2trk.Trackvis2Camino() >>> t2c.inputs.in_file = 'streamlines.trk' >>> t2c.inputs.out_file = 'streamlines.Bfloat' >>> t2c.run() # doctest: +SKIP """ in_file = File( exists=True, argstr="-i %s", mandatory=True, position=1, desc="The input .trk (trackvis) file.", ) out_file = File( argstr="-o %s", genfile=True, position=2, desc="The filename to which to write the .Bfloat (camino).", ) append_file = File( exists=True, argstr="-a %s", position=2, desc="A file to which the append the .Bfloat data. ", ) class Trackvis2CaminoOutputSpec(TraitedSpec): camino = File( exists=True, desc="The filename to which to write the .Bfloat (camino)." ) class Trackvis2Camino(CommandLine): _cmd = "trackvis_to_camino" input_spec = Trackvis2CaminoInputSpec output_spec = Trackvis2CaminoOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["camino"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): if name == "out_file": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + ".Bfloat" nipype-1.7.0/nipype/interfaces/camino2trackvis/tests/000077500000000000000000000000001413403311400227055ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/camino2trackvis/tests/__init__.py000066400000000000000000000000301413403311400250070ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/camino2trackvis/tests/test_auto_Camino2Trackvis.py000066400000000000000000000034201413403311400303440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import Camino2Trackvis def test_Camino2Trackvis_inputs(): input_map = dict( args=dict( argstr="%s", ), data_dims=dict( argstr="-d %s", mandatory=True, position=4, sep=",", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, position=1, ), min_length=dict( argstr="-l %d", position=3, units="mm", ), nifti_file=dict( argstr="--nifti %s", extensions=None, position=7, ), out_file=dict( argstr="-o %s", extensions=None, genfile=True, position=2, ), voxel_dims=dict( argstr="-x %s", mandatory=True, position=5, sep=",", ), voxel_order=dict( argstr="--voxel-order %s", extensions=None, mandatory=True, position=6, ), ) inputs = Camino2Trackvis.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Camino2Trackvis_outputs(): output_map = dict( trackvis=dict( extensions=None, ), ) outputs = Camino2Trackvis.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/camino2trackvis/tests/test_auto_Trackvis2Camino.py000066400000000000000000000023541413403311400303510ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import Trackvis2Camino def test_Trackvis2Camino_inputs(): input_map = dict( append_file=dict( argstr="-a %s", extensions=None, position=2, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, position=1, ), out_file=dict( argstr="-o %s", extensions=None, genfile=True, position=2, ), ) inputs = Trackvis2Camino.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Trackvis2Camino_outputs(): output_map = dict( camino=dict( extensions=None, ), ) outputs = Trackvis2Camino.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cat12/000077500000000000000000000000001413403311400173565ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/cat12/__init__.py000066400000000000000000000002371413403311400214710ustar00rootroot00000000000000from .preprocess import CAT12Segment, CAT12SANLMDenoising from .surface import ( ExtractAdditionalSurfaceParameters, ExtractROIBasedSurfaceMeasures, ) nipype-1.7.0/nipype/interfaces/cat12/base.py000066400000000000000000000005171413403311400206450ustar00rootroot00000000000000class Cell: def __init__(self, arg): self.arg = arg def to_string(self): if isinstance(self.arg, list): v = "\n".join([f"'{el}'" for el in self.arg]) else: v = self.arg return v class NestedCell(Cell): def __str__(self): return "{{%s}}" % self.to_string() nipype-1.7.0/nipype/interfaces/cat12/preprocess.py000066400000000000000000000766731413403311400221400ustar00rootroot00000000000000import os from pathlib import Path from nipype.interfaces.base import ( InputMultiPath, TraitedSpec, traits, isdefined, File, Str, ) from nipype.interfaces.cat12.base import Cell from nipype.interfaces.spm import SPMCommand from nipype.interfaces.spm.base import ( SPMCommandInputSpec, ImageFileSPM, scans_for_fnames, scans_for_fname, ) from nipype.utils.filemanip import split_filename, fname_presuffix class CAT12SegmentInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( ImageFileSPM(exists=True), field="data", desc="file to segment", mandatory=True, copyfile=False, ) _help_tpm = ( "Tissue Probability Map. Select the tissue probability image that includes 6 tissue probability " "classes for (1) grey matter, (2) white matter, (3) cerebrospinal fluid, (4) bone, (5) non-brain " "soft tissue, and (6) the background. CAT uses the TPM only for the initial SPM segmentation." ) tpm = InputMultiPath( ImageFileSPM(exists=True), field="tpm", desc=_help_tpm, mandatory=False, copyfile=False, ) _help_shoots_tpm = ( "Shooting Template %d. The Shooting template must be in multi-volume nifti format and should contain GM," " WM, and background segmentations and have to be saved with at least 16 bit. " ) shooting_tpm = ImageFileSPM( exists=True, field="extopts.registration.shooting.shootingtpm", desc=_help_shoots_tpm % 0, mandatory=False, copyfile=False, ) shooting_tpm_template_1 = ImageFileSPM( exists=True, desc=_help_shoots_tpm % 1, mandatory=False, copyfile=False ) shooting_tpm_template_2 = ImageFileSPM( exists=True, desc=_help_shoots_tpm % 2, mandatory=False, copyfile=False ) shooting_tpm_template_3 = ImageFileSPM( exists=True, desc=_help_shoots_tpm % 3, mandatory=False, copyfile=False ) shooting_tpm_template_4 = ImageFileSPM( exists=True, desc=_help_shoots_tpm % 4, mandatory=False, copyfile=False ) n_jobs = traits.Int( 1, usedefault=True, mandatory=True, field="nproc", desc="Number of threads" ) _help_affine_reg = ( "Affine Regularization. The procedure is a local optimisation, so it needs reasonable initial " "starting estimates. Images should be placed in approximate alignment using the Display " "function of SPM before beginning. A Mutual Information affine registration with the tissue " "probability maps (D" "Agostino et al, 2004) is used to achieve approximate alignment." ) affine_regularization = Str( default_value="mni", field="opts.affreg", usedefault=True, desc=_help_affine_reg ) _help_bias_acc = ( "Strength of the SPM inhomogeneity (bias) correction that simultaneously controls the SPM " "biasreg, biasfwhm, samp (resolution), and tol (iteration) parameter." ) power_spm_inhomogeneity_correction = traits.Float( default_value=0.5, field="opts.biasacc", usedefault=True, desc=_help_bias_acc ) # Extended options for CAT12 preprocessing _help_app = ( "Affine registration and SPM preprocessing can fail in some subjects with deviating anatomy (e.g. " "other species/neonates) or in images with strong signal inhomogeneities, or untypical intensities " "(e.g. synthetic images). An initial bias correction can help to reduce such problems (see details " 'below). Recommended are the "default" and "full" option.' ) affine_preprocessing = traits.Int( 1070, field="extopts.APP", desc=_help_app, usedefault=True ) _help_initial_seg = ( "In rare cases the Unified Segmentation can fail in highly abnormal brains, where e.g. the " "cerebrospinal fluid of superlarge ventricles (hydrocephalus) were classified as white " "matter. However, if the affine registration is correct, the AMAP segmentation with an " "prior-independent k-means initialization can be used to replace the SPM brain tissue " "classification. Moreover, if the default Dartel and Shooting registrations will fail then " 'rhe "Optimized Shooting - superlarge ventricles" option for "Spatial registration" is ! ' "required Values: \nnone: 0;\nlight: 1;\nfull: 2;\ndefault: 1070." ) initial_segmentation = traits.Int( 0, field="extopts.spm_kamap", desc=_help_initial_seg, usedefault=True ) _help_las = ( "Additionally to WM-inhomogeneities, GM intensity can vary across different regions such as the motor" " cortex, the basal ganglia, or the occipital lobe. These changes have an anatomical background " "(e.g. iron content, myelinization), but are dependent on the MR-protocol and often lead to " "underestimation of GM at higher intensities and overestimation of CSF at lower intensities. " "Therefore, a local intensity transformation of all tissue classes is used to reduce these effects in" " the image. This local adaptive segmentation (LAS) is applied before the final AMAP segmentation." "Possible Values: \nSPM Unified Segmentation: 0 \nk-means AMAP: 2" ) local_adaptive_seg = traits.Float( 0.5, field="extopts.LASstr", usedefault=True, desc=_help_las ) _help_gcutstr = ( "Method of initial skull-stripping before AMAP segmentation. The SPM approach works quite stable " "for the majority of data. However, in some rare cases parts of GM (i.e. in frontal lobe) might " "be cut. If this happens the GCUT approach is a good alternative. GCUT is a graph-cut/region-" "growing approach starting from the WM area. APRG (adaptive probability region-growing) is a new" " method that refines the probability maps of the SPM approach by region-growing techniques of " "the gcut approach with a final surface-based optimization strategy. This is currently the method" " with the most accurate and reliable results. If you use already skull-stripped data you can " "turn off skull-stripping although this is automaticaly detected in most cases. Please note that " "the choice of the skull-stripping method will also influence the estimation of TIV, because the" " methods mainly differ in the handling of the outer CSF around the cortical surface. " "\nPossible Values:\n - none (already skull-stripped): -1;\n - SPM approach: 0; " "\n - GCUT approach: 0.50; \n - APRG approach: 2" ) skull_strip = traits.Float( 2, field="extopts.gcutstr", desc=_help_gcutstr, usedefault=True ) _help_wmhc = ( "WARNING: Please note that the detection of WM hyperintensies is still under development and does " "not have the same accuracy as approaches that additionally consider FLAIR images (e.g. Lesion " "Segmentation Toolbox)! In aging or (neurodegenerative) diseases WM intensity can be reduced " "locally in T1 or increased in T2/PD images. These so-called WM hyperintensies (WMHs) can lead to " "preprocessing errors. Large GM areas next to the ventricle can cause normalization problems. " "Therefore, a temporary correction for normalization is useful if WMHs are expected. CAT allows " "different ways to handle WMHs: " "\n0) No Correction (handled as GM). \n1) Temporary (internal) correction as WM for spatial " "normalization and estimation of cortical thickness. \n2) Permanent correction to WM. " ) wm_hyper_intensity_correction = traits.Int( 1, field="extopts.WMHC", desc=_help_wmhc, usedefault=True ) _help_vox = ( "The (isotropic) voxel sizes of any spatially normalised written images. A non-finite value will be " "replaced by the average voxel size of the tissue probability maps used by the segmentation." ) voxel_size = traits.Float(1.5, field="extopts.vox", desc=_help_vox, usedefault=True) _help_resampling = ( "Internal resampling for preprocessing.\n The default fixed image resolution offers a good " "trade-off between optimal quality and preprocessing time and memory demands. Standard " "structural data with a voxel resolution around 1mm or even data with high in-plane resolution" " and large slice thickness (e.g. 0.5x0.5x1.5 mm) will benefit from this setting. If you have" ' higher native resolutions the highres option "Fixed 0.8 mm" will sometimes offer slightly' " better preprocessing quality with an increase of preprocessing time and memory demands. In" " case of even higher resolutions and high signal-to-noise ratio (e.g. for 7 T data) the " '"Best native" option will process the data on the highest native resolution. A resolution' " of 0.4x0.7x1.0 mm will be interpolated to 0.4x0.4x0.4 mm. A tolerance range of 0.1 mm is " "used to avoid interpolation artifacts, i.e. a resolution of 0.95x1.01x1.08 mm will not be " 'interpolated in case of the "Fixed 1.0 mm"! This "optimal" option prefers an isotropic voxel ' "size with at least 1.1 mm that is controlled by the median voxel size and a volume term that " "penalizes highly anisotropic voxels." "Values:\nOptimal: [1.0 0.1]\nFixed 1.0 mm: [1.0 0.1];\nFixed 0.8 mm:[0.8 0.1]" "\nBest native: [0.5 0.1]" ) internal_resampling_process = traits.Tuple( traits.Float(1), traits.Float(0.1), minlen=2, maxlen=2, usedefault=True, field="extopts.restypes.optimal", desc="help_resampling", ) _errors_help = ( "Error handling.\nTry to catch preprocessing errors and continue with the next data set or ignore " "all warnings (e.g., bad intensities) and use an experimental pipeline which is still in " "development. In case of errors, CAT continues with the next subject if this option is enabled. If " "the experimental option with backup functions is selected and warnings occur, CAT will try to use" " backup routines and skip some processing steps which require good T1 contrasts (e.g., LAS). If " "you want to avoid processing of critical data and ensure that only the main pipeline is used then" ' select the option "Ignore errors (continue with the next subject)". It is strongly recommended ' "to check for preprocessing problems, especially with non-T1 contrasts. " "\nValues:\nnone: 0,\ndefault: 1,\ndetails: 2." ) ignore_errors = traits.Int( 1, field="extopts.ignoreErrors", desc=_errors_help, usedefault=True ) # Writing options _help_surf = ( "Surface and thickness estimation. \nUse projection-based thickness (PBT) (Dahnke et al. 2012) to" " estimate cortical thickness and to create the central cortical surface for left and right " "hemisphere. Surface reconstruction includes topology correction (Yotter et al. 2011), spherical " "inflation (Yotter et al.) and spherical registration. Additionally you can also estimate surface " "parameters such as gyrification, cortical complexity or sulcal depth that can be subsequently " "analyzed at each vertex of the surface. Please note, that surface reconstruction and spherical " "registration additionally requires about 20-60 min of computation time. A fast (1-3 min) surface " "pipeline is available for visual preview (e.g., to check preprocessing quality) in the " "cross-sectional, but not in the longitudinal pipeline. Only the initial surfaces are created with " "a lower resolution and without topology correction, spherical mapping and surface registration. " "Please note that the files with the estimated surface thickness can therefore not be used for " 'further analysis! For distinction, these files contain "preview" in their filename and they' " are not available as batch dependencies objects. " ) surface_and_thickness_estimation = traits.Int( 1, field="surface", desc=_help_surf, usedefault=True ) surface_measures = traits.Int( 1, field="output.surf_measures", usedefault=True, desc="Extract surface measures", ) # Templates neuromorphometrics = traits.Bool( True, field="output.ROImenu.atlases.neuromorphometrics", usedefault=True, desc="Extract brain measures for Neuromorphometrics template", ) lpba40 = traits.Bool( True, field="output.ROImenu.atlases.lpba40", usedefault=True, desc="Extract brain measures for LPBA40 template", ) cobra = traits.Bool( True, field="output.ROImenu.atlases.hammers", usedefault=True, desc="Extract brain measures for COBRA template", ) hammers = traits.Bool( True, field="output.ROImenu.atlases.cobra", usedefault=True, desc="Extract brain measures for Hammers template", ) own_atlas = InputMultiPath( ImageFileSPM(exists=True), field="output.ROImenu.atlases.ownatlas", desc="Extract brain measures for a given template", mandatory=False, copyfile=False, ) # Grey matter gm_output_native = traits.Bool( False, field="output.GM.native", usedefault=True, desc="Save modulated grey matter images.", ) gm_output_modulated = traits.Bool( True, field="output.GM.mod", usedefault=True, desc="Save native grey matter images.", ) gm_output_dartel = traits.Bool( False, field="output.GM.dartel", usedefault=True, desc="Save dartel grey matter images.", ) # White matter _wm_desc = "Options to save white matter images." wm_output_native = traits.Bool( False, field="output.WM.native", usedefault=True, desc="Save dartel white matter images.", ) wm_output_modulated = traits.Bool( True, field="output.WM.mod", usedefault=True, desc="Save dartel white matter images.", ) wm_output_dartel = traits.Bool( False, field="output.WM.dartel", usedefault=True, desc="Save dartel white matter images.", ) # CSF matter _csf_desc = "Options to save CSF images." csf_output_native = traits.Bool( False, field="output.CSF.native", usedefault=True, desc="Save dartel CSF images.", ) csf_output_modulated = traits.Bool( True, field="output.CSF.mod", usedefault=True, desc="Save dartel CSF images." ) csf_output_dartel = traits.Bool( False, field="output.CSF.dartel", usedefault=True, desc="Save dartel CSF images.", ) # Labels _help_label_desc = ( "This is the option to save a labeled version of your segmentations in the %s space for fast visual " "comparision. Labels are saved as Partial Volume Estimation (PVE) values with different mix " "classes for GM-WM (2.5) and GM-CSF (1.5). BG=0, CSF=1, GM=2, WM=3, WMH=4 (if WMHC=3), " "SL=1.5 (if SLC)" ) label_native = traits.Bool( False, field="output.label.native", usedefault=True, desc=_help_label_desc % "native", ) label_warped = traits.Bool( True, field="output.label.warped", usedefault=True, desc=_help_label_desc % "warped", ) label_dartel = traits.Bool( False, field="output.label.dartel", usedefault=True, desc=_help_label_desc % "dartel", ) output_labelnative = traits.Bool( False, field="output.labelnative", usedefault=True, desc=_help_label_desc % "native", ) # Bias save_bias_corrected = traits.Bool( True, field="output.bias.warped", usedefault=True, desc="Save bias corrected image", ) # las _las_desc = ( "This is the option to save a bias, noise, and local intensity corrected version of the original T1" " image in the %s space. MR images are usually corrupted by a smooth, spatially varying artifact that modulates the" " intensity of the image (bias). These artifacts, although not usually a problem for visual " "inspection, can impede automated processing of the images. The bias corrected version should have " "more uniform intensities within the different types of tissues and can be saved in native space " "and/or normalised. Noise is corrected by an adaptive non-local mean (NLM) filter (Manjon 2008, " "Medical Image Analysis 12)." ) las_native = traits.Bool( False, field="output.las.native", usedefault=True, desc=_las_desc % "native" ) las_warped = traits.Bool( True, field="output.las.warped", usedefault=True, desc=_las_desc % "warped" ) las_dartel = traits.Bool( False, field="output.las.dartel", usedefault=True, desc=_las_desc % "dartel" ) # Jacobian Warped _help_jacobian = ( "This is the option to save the Jacobian determinant, which expresses local volume changes. This" " image can be used in a pure deformation based morphometry (DBM) design. Please note that the" " affine part of the deformation field is ignored. Thus, there is no need for any additional" " correction for different brain sizes using ICV." ) jacobianwarped = traits.Bool( True, field="output.jacobianwarped", usedefault=True, desc=_help_jacobian ) # Deformation Fields _help_warp = ( "Deformation fields can be saved to disk, and used by the Deformations Utility and/or applied to " "coregistered data from other modalities (e.g. fMRI). For spatially normalising images to MNI space," " you will need the forward deformation, whereas for spatially normalising (eg) GIFTI surface files," " you" "ll need the inverse. It is also possible to transform data in MNI space on to the individual" " subject, which also requires the inverse transform. Deformations are saved as .nii files, which" " contain three volumes to encode the x, y and z coordinates." "\nValues: No:[0 0];\nImage->Template (forward): [1 0];\nTemplate->Image (inverse): [0 1]; " "\ninverse + forward: [1 1]" ) warps = traits.Tuple( traits.Int(1), traits.Int(0), minlen=2, maxlen=2, field="output.warps", usedefault=True, desc=_help_warp, ) class CAT12SegmentOutputSpec(TraitedSpec): ########################################## # Label XML files ########################################## label_files = traits.List( File(exists=True), desc="Files with the measures extracted for OI ands ROIs" ) label_rois = File(exists=True, desc="Files with thickness values of ROIs.") label_roi = File(exists=True, desc="Files with thickness values of ROI.") ########################################## # MRI .nii files ########################################## mri_images = traits.List(File(exists=True), desc="Different segmented images.") # Grey Matter gm_modulated_image = File(exists=True, desc="Grey matter modulated image.") gm_dartel_image = File(exists=True, desc="Grey matter dartel image.") gm_native_image = File(exists=True, desc="Grey matter native space.") # White Matter wm_modulated_image = File(exists=True, desc="White matter modulated image.") wm_dartel_image = File(exists=True, desc="White matter dartel image.") wm_native_image = File(exists=True, desc="White matter in native space.") # CSF csf_modulated_image = File(exists=True, desc="CSF modulated image.") csf_dartel_image = File(exists=True, desc="CSF dartel image.") csf_native_image = File(exists=True, desc="CSF in native space.") bias_corrected_image = File(exists=True, desc="Bias corrected image") ########################################## # Surface files ########################################## surface_files = traits.List(File(exists=True), desc="Surface files") # Right hemisphere rh_central_surface = File(exists=True, desc="Central right hemisphere files") rh_sphere_surface = File(exists=True, desc="Sphere right hemisphere files") # Left hemisphere lh_central_surface = File(exists=True, desc="Central left hemisphere files") lh_sphere_surface = File(exists=True, desc="Sphere left hemisphere files") # Report files report_files = traits.List(File(exists=True), desc="Report files.") report = File(exists=True, desc="Report file.") class CAT12Segment(SPMCommand): """ CAT12: Segmentation This toolbox is an extension to the default segmentation in SPM12, but uses a completely different segmentation approach. The segmentation approach is based on an Adaptive Maximum A Posterior (MAP) technique without the need for a priori information about tissue probabilities. That is, the Tissue Probability Maps (TPM) are not used constantly in the sense of the classical Unified Segmentation approach (Ashburner et. al. 2005), but just for spatial normalization. The following AMAP estimation is adaptive in the sense that local variations of the parameters (i.e., means and variance) are modeled as slowly varying spatial functions (Rajapakse et al. 1997). This not only accounts for intensity inhomogeneities but also for other local variations of intensity. Additionally, the segmentation approach uses a Partial Volume Estimation (PVE) with a simplified mixed model of at most two tissue types (Tohka et al. 2004). We start with an initial segmentation into three pure classes: gray matter (GM), white matter (WM), and cerebrospinal fluid (CSF) based on the above described AMAP estimation. The initial segmentation is followed by a PVE of two additional mixed classes: GM-WM and GM-CSF. This results in an estimation of the amount (or fraction) of each pure tissue type present in every voxel (as single voxels - given by Another important extension to the SPM12 segmentation is the integration of the Dartel or Geodesic Shooting registration into the toolbox by an already existing Dartel/Shooting template in MNI space. This template was derived from 555 healthy control subjects of the IXI-database (http://www.brain-development.org) and provides the several Dartel or Shooting iterations. Thus, for the majority of studies the creation of sample-specific templates is not necessary anymore and is mainly recommended for children data.'}; http://www.neuro.uni-jena.de/cat12/CAT12-Manual.pdf#page=15 Examples -------- >>> path_mr = 'structural.nii' >>> cat = CAT12Segment(in_files=path_mr) >>> cat.run() # doctest: +SKIP """ input_spec = CAT12SegmentInputSpec output_spec = CAT12SegmentOutputSpec def __init__(self, **inputs): _local_version = SPMCommand().version if _local_version and "12." in _local_version: self._jobtype = "tools" self._jobname = "cat.estwrite" SPMCommand.__init__(self, **inputs) def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt == "in_files": if isinstance(val, list): return scans_for_fnames(val) else: return scans_for_fname(val) elif opt in ["tpm", "shooting_tpm"]: return Cell2Str(val) return super(CAT12Segment, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() f = self.inputs.in_files[0] pth, base, ext = split_filename(f) outputs["mri_images"] = [ str(mri) for mri in Path(pth).glob("mri/*") if mri.is_file() ] for tidx, tissue in enumerate(["gm", "wm", "csf"]): for image, prefix in [("modulated", "mw"), ("dartel", "r"), ("native", "")]: outtype = f"{tissue}_output_{image}" if isdefined(getattr(self.inputs, outtype)) and getattr( self.inputs, outtype ): outfield = f"{tissue}_{image}_image" prefix = os.path.join("mri", f"{prefix}p{tidx + 1}") if image != "dartel": outputs[outfield] = fname_presuffix(f, prefix=prefix) else: outputs[outfield] = fname_presuffix( f, prefix=prefix, suffix="_rigid" ) if self.inputs.save_bias_corrected: outputs["bias_corrected_image"] = fname_presuffix( f, prefix=os.path.join("mri", "wmi") ) outputs["surface_files"] = [ str(surf) for surf in Path(pth).glob("surf/*") if surf.is_file() ] for hemisphere in ["rh", "lh"]: for suffix in ["central", "sphere"]: outfield = f"{hemisphere}_{suffix}_surface" outputs[outfield] = fname_presuffix( f, prefix=os.path.join("surf", f"{hemisphere}.{suffix}."), suffix=".gii", use_ext=False, ) outputs["report_files"] = outputs["report_files"] = [ str(report) for report in Path(pth).glob("report/*") if report.is_file() ] outputs["report"] = fname_presuffix( f, prefix=os.path.join("report", f"cat_"), suffix=".xml", use_ext=False ) outputs["label_files"] = [ str(label) for label in Path(pth).glob("label/*") if label.is_file() ] outputs["label_rois"] = fname_presuffix( f, prefix=os.path.join("label", "catROIs_"), suffix=".xml", use_ext=False ) outputs["label_roi"] = fname_presuffix( f, prefix=os.path.join("label", "catROI_"), suffix=".xml", use_ext=False ) return outputs class CAT12SANLMDenoisingInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( ImageFileSPM(exists=True), field="data", desc="Images for filtering.", mandatory=True, copyfile=False, ) spm_type = traits.Enum( "float32", "uint16", "uint8", "same", field="spm_type", usedefault=True, desc="Data type of the output images. 'same' matches the input image type.", ) intlim = traits.Int( field="intlim", default_value=100, usedefault=True, desc="intensity limitation (default = 100)", ) filename_prefix = traits.Str( field="prefix", default_value="sanlm_", usedefault=True, desc="Filename prefix. Specify the string to be prepended to the filenames of the filtered image file(s).", ) filename_suffix = traits.Str( field="suffix", default_value="", usedefault=True, desc="Filename suffix. Specify the string to be appended to the filenames of the filtered image file(s).", ) addnoise = traits.Float( default_value=0.5, usedefault=True, field="addnoise", desc="""Strength of additional noise in noise-free regions. Add minimal amount of noise in regions without any noise to avoid image segmentation problems. This parameter defines the strength of additional noise as percentage of the average signal intensity.""", ) rician = traits.Bool( True, field="rician", usedefault=True, desc="""Rician noise MRIs can have Gaussian or Rician distributed noise with uniform or nonuniform variance across the image. If SNR is high enough (>3) noise can be well approximated by Gaussian noise in the foreground. However, for SENSE reconstruction or DTI data a Rician distribution is expected. Please note that the Rician noise estimation is sensitive for large signals in the neighbourhood and can lead to artefacts, e.g. cortex can be affected by very high values in the scalp or in blood vessels.""", ) replace_nan_and_inf = traits.Bool( True, field="replaceNANandINF", usedefault=True, desc="Replace NAN by 0, -INF by the minimum and INF by the maximum of the image.", ) noisecorr_strength = traits.Enum( "-Inf", 2, 4, field="nlmfilter.optimized.NCstr", usedefault=True, desc="""Strength of Noise Corrections Strength of the (sub-resolution) spatial adaptive non local means (SANLM) noise correction. Please note that the filter strength is automatically estimated. Change this parameter only for specific conditions. The "light" option applies half of the filter strength of the adaptive "medium" cases, whereas the "strong" option uses the full filter strength, force sub-resolution filtering and applies an additional iteration. Sub-resolution filtering is only used in case of high image resolution below 0.8 mm or in case of the "strong" option. light = 2, medium = -Inf, strong = 4""", ) class CAT12SANLMDenoisingOutputSpec(TraitedSpec): out_file = File(desc="out file") class CAT12SANLMDenoising(SPMCommand): """ Spatially adaptive non-local means (SANLM) denoising filter This function applies an spatial adaptive (sub-resolution) non-local means denoising filter to the data. This filter will remove noise while preserving edges. The filter strength is automatically estimated based on the standard deviation of the noise. This filter is internally used in the segmentation procedure anyway. Thus, it is not necessary (and not recommended) to apply the filter before segmentation. ______________________________________________________________________ Christian Gaser, Robert Dahnke Structural Brain Mapping Group (http://www.neuro.uni-jena.de) Departments of Neurology and Psychiatry Jena University Hospital ______________________________________________________________________ Examples -------- >>> from nipype.interfaces import cat12 >>> c = cat12.CAT12SANLMDenoising() >>> c.inputs.in_files = 'anatomical.nii' >>> c.run() # doctest: +SKIP """ input_spec = CAT12SANLMDenoisingInputSpec output_spec = CAT12SANLMDenoisingOutputSpec def __init__(self, **inputs): _local_version = SPMCommand().version if _local_version and "12." in _local_version: self._jobtype = "tools" self._jobname = "cat.tools.sanlm" SPMCommand.__init__(self, **inputs) def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt == "in_files": if isinstance(val, list): return scans_for_fnames(val) else: return scans_for_fname(val) if opt == "spm_type": type_map = {"same": 0, "uint8": 2, "uint16": 512, "float32": 16} val = type_map[val] return super(CAT12SANLMDenoising, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = fname_presuffix( self.inputs.in_files[0], newpath=os.getcwd(), prefix=self.inputs.filename_prefix, suffix=self.inputs.filename_suffix, ) return outputs class Cell2Str(Cell): def __str__(self): """Convert input to appropriate format for cat12""" return "{'%s'}" % self.to_string() nipype-1.7.0/nipype/interfaces/cat12/surface.py000066400000000000000000000243301413403311400213620ustar00rootroot00000000000000import os from pathlib import Path from nipype.interfaces.base import File, InputMultiPath, TraitedSpec, traits, isdefined from nipype.interfaces.cat12.base import NestedCell, Cell from nipype.interfaces.spm import SPMCommand from nipype.interfaces.spm.base import SPMCommandInputSpec from nipype.utils.filemanip import split_filename class ExtractAdditionalSurfaceParametersInputSpec(SPMCommandInputSpec): left_central_surfaces = InputMultiPath( File(exists=True), field="data_surf", desc="Left and central surfaces files", mandatory=True, copyfile=False, ) surface_files = InputMultiPath( File(exists=True), desc="All surface files", mandatory=False, copyfile=False ) gyrification = traits.Bool( True, field="GI", usedefault=True, desc="Extract gyrification index (GI) based on absolute mean curvature. The" " method is described in Luders et al. Neuroimage, 29:1224-1230, 2006", ) gmv = traits.Bool(True, field="gmv", usedefault=True, desc="Extract volume") area = traits.Bool(True, field="area", usedefault=True, desc="Extract area surface") depth = traits.Bool( False, field="SD", usedefault=True, desc="Extract sulcus depth based on euclidian distance between the central " "surface anf its convex hull.", ) fractal_dimension = traits.Bool( False, field="FD", usedefault=True, desc="Extract cortical complexity (fractal dimension) which is " "described in Yotter ar al. Neuroimage, 56(3): 961-973, 2011", ) class ExtractAdditionalSurfaceParametersOutputSpec(TraitedSpec): lh_extracted_files = traits.List( File(exists=True), desc="Files of left Hemisphere extracted measures" ) rh_extracted_files = traits.List( File(exists=True), desc="Files of right Hemisphere extracted measures" ) lh_gyrification = traits.List( File(exists=True), desc="Gyrification of left Hemisphere" ) rh_gyrification = traits.List( File(exists=True), desc="Gyrification of right Hemisphere" ) lh_gmv = traits.List( File(exists=True), desc="Grey matter volume of left Hemisphere" ) rh_gmv = traits.List( File(exists=True), desc="Grey matter volume of right Hemisphere" ) lh_area = traits.List(File(exists=True), desc="Area of left Hemisphere") rh_area = traits.List(File(exists=True), desc="Area of right Hemisphere") lh_depth = traits.List(File(exists=True), desc="Depth of left Hemisphere") rh_depth = traits.List(File(exists=True), desc="Depth of right Hemisphere") lh_fractaldimension = traits.List( File(exists=True), desc="Fractal Dimension of left Hemisphere" ) rh_fractaldimension = traits.List( File(exists=True), desc="Fractal Dimension of right Hemisphere" ) class ExtractAdditionalSurfaceParameters(SPMCommand): """ Additional surface parameters can be extracted that can be used for statistical analysis, such as: * Central surfaces * Surface area * Surface GM volume * Gyrification Index * Sulcus depth * Toro's gyrification index * Shaer's local gyrification index * Laplacian gyrification indeces * Addicional surfaces * Measure normalization * Lazy processing http://www.neuro.uni-jena.de/cat12/CAT12-Manual.pdf#page=53 Examples -------- >>> # Set the left surface files, both will be processed >>> lh_path_central = 'lh.central.structural.gii' >>> # Put here all surface files generated by CAT12 Segment, this is only required if the this approach is putted in >>> surf_files = ['lh.sphere.reg.structural.gii', 'rh.sphere.reg.structural.gii', 'lh.sphere.structural.gii', 'rh.sphere.structural.gii', 'rh.central.structural.gii', 'lh.pbt.structural', 'rh.pbt.structural'] >>> extract_additional_measures = ExtractAdditionalSurfaceParameters(left_central_surfaces=lh_path_central, surface_files=surf_files) >>> extract_additional_measures.run() # doctest: +SKIP """ input_spec = ExtractAdditionalSurfaceParametersInputSpec output_spec = ExtractAdditionalSurfaceParametersOutputSpec def __init__(self, **inputs): _local_version = SPMCommand().version if _local_version and "12." in _local_version: self._jobtype = "tools" self._jobname = "cat.stools.surfextract" super().__init__(**inputs) def _list_outputs(self): outputs = self._outputs().get() names_outputs = [ (self.inputs.gyrification, "gyrification"), (self.inputs.gmv, "gmv"), (self.inputs.area, "area"), (self.inputs.depth, "depth"), (self.inputs.fractal_dimension, "fractaldimension"), ] for filename in self.inputs.left_central_surfaces: pth, base, ext = split_filename(filename) # The first part of the filename is rh.central or lh.central original_filename = base.split(".", 2)[-1] for extracted_parameter, parameter_name in names_outputs: if extracted_parameter: for hemisphere in ["rh", "lh"]: all_files_hemisphere = hemisphere + "_extracted_files" name_hemisphere = hemisphere + "_" + parameter_name if not isdefined(outputs[name_hemisphere]): outputs[name_hemisphere] = [] if not isdefined(outputs[all_files_hemisphere]): outputs[all_files_hemisphere] = [] generated_filename = ".".join( [hemisphere, parameter_name, original_filename] ) outputs[name_hemisphere].append( os.path.join(pth, generated_filename) ) # Add all hemisphere files into one list, this is important because only the left hemisphere # files are used as input in the Surface ROI Tools, fpr instance. outputs[all_files_hemisphere].append( os.path.join(pth, generated_filename) ) return outputs def _format_arg(self, opt, spec, val): if opt == "left_central_surfaces": return Cell2Str(val) return super(ExtractAdditionalSurfaceParameters, self)._format_arg( opt, spec, val ) class ExtractROIBasedSurfaceMeasuresInputSpec(SPMCommandInputSpec): # Only these files are given as input, yet the right hemisphere (rh) files should also be on the processing # directory. surface_files = InputMultiPath( File(exists=True), desc="Surface data files. This variable should be a list " "with all", mandatory=False, copyfile=False, ) lh_roi_atlas = InputMultiPath( File(exists=True), field="rdata", desc="(Left) ROI Atlas. These are the ROI's ", mandatory=True, copyfile=False, ) rh_roi_atlas = InputMultiPath( File(exists=True), desc="(Right) ROI Atlas. These are the ROI's ", mandatory=False, copyfile=False, ) lh_surface_measure = InputMultiPath( File(exists=True), field="cdata", desc="(Left) Surface data files. ", mandatory=True, copyfile=False, ) rh_surface_measure = InputMultiPath( File(exists=True), desc="(Right) Surface data files.", mandatory=False, copyfile=False, ) class ExtractROIBasedSurfaceMeasuresOutputSpec(TraitedSpec): label_files = traits.List( File(exists=True), desc="Files with the measures extracted for ROIs." ) class ExtractROIBasedSurfaceMeasures(SPMCommand): """ Extract ROI-based surface values While ROI-based values for VBM (volume) data are automatically saved in the ``label`` folder as XML file it is necessary to additionally extract these values for surface data (except for thickness which is automatically extracted during segmentation). This has to be done after preprocessing the data and creating cortical surfaces. You can extract ROI-based values for cortical thickness but also for any other surface parameter that was extracted using the Extract Additional Surface Parameters such as volume, area, depth, gyrification and fractal dimension. http://www.neuro.uni-jena.de/cat12/CAT12-Manual.pdf#page=53 Examples -------- >>> # Template surface files >>> lh_atlas = 'lh.aparc_a2009s.freesurfer.annot' >>> rh_atlas = 'rh.aparc_a2009s.freesurfer.annot' >>> surf_files = ['lh.sphere.reg.structural.gii', 'rh.sphere.reg.structural.gii', 'lh.sphere.structural.gii', 'rh.sphere.structural.gii', 'lh.central.structural.gii', 'rh.central.structural.gii', 'lh.pbt.structural', 'rh.pbt.structural'] >>> lh_measure = 'lh.area.structural' >>> extract_additional_measures = ExtractROIBasedSurfaceMeasures(surface_files=surf_files, lh_surface_measure=lh_measure, lh_roi_atlas=lh_atlas, rh_roi_atlas=rh_atlas) >>> extract_additional_measures.run() # doctest: +SKIP """ input_spec = ExtractROIBasedSurfaceMeasuresInputSpec output_spec = ExtractROIBasedSurfaceMeasuresOutputSpec def __init__(self, **inputs): _local_version = SPMCommand().version if _local_version and "12." in _local_version: self._jobtype = "tools" self._jobname = "cat.stools.surf2roi" SPMCommand.__init__(self, **inputs) def _format_arg(self, opt, spec, val): if opt == "lh_surface_measure": return NestedCell(val) elif opt == "lh_roi_atlas": return Cell2Str(val) return super(ExtractROIBasedSurfaceMeasures, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() pth, base, ext = split_filename(self.inputs.lh_surface_measure[0]) outputs["label_files"] = [ str(label) for label in Path(pth).glob("label/*") if label.is_file() ] return outputs class Cell2Str(Cell): def __str__(self): """Convert input to appropriate format for cat12""" return "{%s}" % self.to_string() nipype-1.7.0/nipype/interfaces/cat12/tests/000077500000000000000000000000001413403311400205205ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/cat12/tests/__init__.py000066400000000000000000000000001413403311400226170ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/cat12/tests/test_auto_CAT12SANLMDenoising.py000066400000000000000000000035231413403311400263710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import CAT12SANLMDenoising def test_CAT12SANLMDenoising_inputs(): input_map = dict( addnoise=dict( field="addnoise", usedefault=True, ), filename_prefix=dict( field="prefix", usedefault=True, ), filename_suffix=dict( field="suffix", usedefault=True, ), in_files=dict( copyfile=False, field="data", mandatory=True, ), intlim=dict( field="intlim", usedefault=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), noisecorr_strength=dict( field="nlmfilter.optimized.NCstr", usedefault=True, ), paths=dict(), replace_nan_and_inf=dict( field="replaceNANandINF", usedefault=True, ), rician=dict( field="rician", usedefault=True, ), spm_type=dict( field="spm_type", usedefault=True, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = CAT12SANLMDenoising.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CAT12SANLMDenoising_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = CAT12SANLMDenoising.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cat12/tests/test_auto_CAT12Segment.py000066400000000000000000000165331413403311400252660ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import CAT12Segment def test_CAT12Segment_inputs(): input_map = dict( affine_preprocessing=dict( field="extopts.APP", usedefault=True, ), affine_regularization=dict( field="opts.affreg", usedefault=True, ), cobra=dict( field="output.ROImenu.atlases.hammers", usedefault=True, ), csf_output_dartel=dict( field="output.CSF.dartel", usedefault=True, ), csf_output_modulated=dict( field="output.CSF.mod", usedefault=True, ), csf_output_native=dict( field="output.CSF.native", usedefault=True, ), gm_output_dartel=dict( field="output.GM.dartel", usedefault=True, ), gm_output_modulated=dict( field="output.GM.mod", usedefault=True, ), gm_output_native=dict( field="output.GM.native", usedefault=True, ), hammers=dict( field="output.ROImenu.atlases.cobra", usedefault=True, ), ignore_errors=dict( field="extopts.ignoreErrors", usedefault=True, ), in_files=dict( copyfile=False, field="data", mandatory=True, ), initial_segmentation=dict( field="extopts.spm_kamap", usedefault=True, ), internal_resampling_process=dict( field="extopts.restypes.optimal", maxlen=2, minlen=2, usedefault=True, ), jacobianwarped=dict( field="output.jacobianwarped", usedefault=True, ), label_dartel=dict( field="output.label.dartel", usedefault=True, ), label_native=dict( field="output.label.native", usedefault=True, ), label_warped=dict( field="output.label.warped", usedefault=True, ), las_dartel=dict( field="output.las.dartel", usedefault=True, ), las_native=dict( field="output.las.native", usedefault=True, ), las_warped=dict( field="output.las.warped", usedefault=True, ), local_adaptive_seg=dict( field="extopts.LASstr", usedefault=True, ), lpba40=dict( field="output.ROImenu.atlases.lpba40", usedefault=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), n_jobs=dict( field="nproc", mandatory=True, usedefault=True, ), neuromorphometrics=dict( field="output.ROImenu.atlases.neuromorphometrics", usedefault=True, ), output_labelnative=dict( field="output.labelnative", usedefault=True, ), own_atlas=dict( copyfile=False, field="output.ROImenu.atlases.ownatlas", mandatory=False, ), paths=dict(), power_spm_inhomogeneity_correction=dict( field="opts.biasacc", usedefault=True, ), save_bias_corrected=dict( field="output.bias.warped", usedefault=True, ), shooting_tpm=dict( copyfile=False, extensions=[".hdr", ".img", ".img.gz", ".nii"], field="extopts.registration.shooting.shootingtpm", mandatory=False, ), shooting_tpm_template_1=dict( copyfile=False, extensions=[".hdr", ".img", ".img.gz", ".nii"], mandatory=False, ), shooting_tpm_template_2=dict( copyfile=False, extensions=[".hdr", ".img", ".img.gz", ".nii"], mandatory=False, ), shooting_tpm_template_3=dict( copyfile=False, extensions=[".hdr", ".img", ".img.gz", ".nii"], mandatory=False, ), shooting_tpm_template_4=dict( copyfile=False, extensions=[".hdr", ".img", ".img.gz", ".nii"], mandatory=False, ), skull_strip=dict( field="extopts.gcutstr", usedefault=True, ), surface_and_thickness_estimation=dict( field="surface", usedefault=True, ), surface_measures=dict( field="output.surf_measures", usedefault=True, ), tpm=dict( copyfile=False, field="tpm", mandatory=False, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), voxel_size=dict( field="extopts.vox", usedefault=True, ), warps=dict( field="output.warps", maxlen=2, minlen=2, usedefault=True, ), wm_hyper_intensity_correction=dict( field="extopts.WMHC", usedefault=True, ), wm_output_dartel=dict( field="output.WM.dartel", usedefault=True, ), wm_output_modulated=dict( field="output.WM.mod", usedefault=True, ), wm_output_native=dict( field="output.WM.native", usedefault=True, ), ) inputs = CAT12Segment.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CAT12Segment_outputs(): output_map = dict( bias_corrected_image=dict( extensions=None, ), csf_dartel_image=dict( extensions=None, ), csf_modulated_image=dict( extensions=None, ), csf_native_image=dict( extensions=None, ), gm_dartel_image=dict( extensions=None, ), gm_modulated_image=dict( extensions=None, ), gm_native_image=dict( extensions=None, ), label_files=dict(), label_roi=dict( extensions=None, ), label_rois=dict( extensions=None, ), lh_central_surface=dict( extensions=None, ), lh_sphere_surface=dict( extensions=None, ), mri_images=dict(), report=dict( extensions=None, ), report_files=dict(), rh_central_surface=dict( extensions=None, ), rh_sphere_surface=dict( extensions=None, ), surface_files=dict(), wm_dartel_image=dict( extensions=None, ), wm_modulated_image=dict( extensions=None, ), wm_native_image=dict( extensions=None, ), ) outputs = CAT12Segment.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cat12/tests/test_auto_ExtractAdditionalSurfaceParameters.py000066400000000000000000000037131413403311400321250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..surface import ExtractAdditionalSurfaceParameters def test_ExtractAdditionalSurfaceParameters_inputs(): input_map = dict( area=dict( field="area", usedefault=True, ), depth=dict( field="SD", usedefault=True, ), fractal_dimension=dict( field="FD", usedefault=True, ), gmv=dict( field="gmv", usedefault=True, ), gyrification=dict( field="GI", usedefault=True, ), left_central_surfaces=dict( copyfile=False, field="data_surf", mandatory=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), surface_files=dict( copyfile=False, mandatory=False, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = ExtractAdditionalSurfaceParameters.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ExtractAdditionalSurfaceParameters_outputs(): output_map = dict( lh_area=dict(), lh_depth=dict(), lh_extracted_files=dict(), lh_fractaldimension=dict(), lh_gmv=dict(), lh_gyrification=dict(), rh_area=dict(), rh_depth=dict(), rh_extracted_files=dict(), rh_fractaldimension=dict(), rh_gmv=dict(), rh_gyrification=dict(), ) outputs = ExtractAdditionalSurfaceParameters.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cat12/tests/test_auto_ExtractROIBasedSurfaceMeasures.py000066400000000000000000000027641413403311400311330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..surface import ExtractROIBasedSurfaceMeasures def test_ExtractROIBasedSurfaceMeasures_inputs(): input_map = dict( lh_roi_atlas=dict( copyfile=False, field="rdata", mandatory=True, ), lh_surface_measure=dict( copyfile=False, field="cdata", mandatory=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), rh_roi_atlas=dict( copyfile=False, mandatory=False, ), rh_surface_measure=dict( copyfile=False, mandatory=False, ), surface_files=dict( copyfile=False, mandatory=False, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = ExtractROIBasedSurfaceMeasures.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ExtractROIBasedSurfaceMeasures_outputs(): output_map = dict( label_files=dict(), ) outputs = ExtractROIBasedSurfaceMeasures.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cmtk/000077500000000000000000000000001413403311400174025ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/cmtk/__init__.py000066400000000000000000000005241413403311400215140ustar00rootroot00000000000000# -*- coding: utf-8 -*- """CMP implements a full processing pipeline for creating connectomes with dMRI data.""" from .cmtk import ROIGen, CreateMatrix, CreateNodes from .nx import NetworkXMetrics, AverageNetworks from .parcellation import Parcellate from .convert import CFFConverter, MergeCNetworks from .nbs import NetworkBasedStatistic nipype-1.7.0/nipype/interfaces/cmtk/base.py000066400000000000000000000013311413403311400206640ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Base interface for cmtk """ from ..base import LibraryBaseInterface from ...utils.misc import package_check class CFFBaseInterface(LibraryBaseInterface): _pkg = "cfflib" # Originally set in convert, nbs, nx, parcellation # Set here to be imported, in case anybody depends on its presence # Remove in 2.0 have_cmp = True try: package_check("cmp") except ImportError: have_cmp = False have_cfflib = True try: package_check("cfflib") except ImportError: have_cfflib = False have_cv = True try: package_check("cviewer") except ImportError: have_cv = False nipype-1.7.0/nipype/interfaces/cmtk/cmtk.py000066400000000000000000001175321413403311400207230ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pickle import os.path as op import numpy as np import nibabel as nb import networkx as nx from ... import logging from ...utils.filemanip import split_filename from ..base import ( BaseInterface, BaseInterfaceInputSpec, traits, File, TraitedSpec, Directory, OutputMultiPath, isdefined, ) iflogger = logging.getLogger("nipype.interface") def length(xyz, along=False): """ Euclidean length of track line Parameters ---------- xyz : array-like shape (N,3) array representing x,y,z of N points in a track along : bool, optional If True, return array giving cumulative length along track, otherwise (default) return scalar giving total length. Returns ------- L : scalar or array shape (N-1,) scalar in case of `along` == False, giving total length, array if `along` == True, giving cumulative lengths. Examples -------- >>> xyz = np.array([[1,1,1],[2,3,4],[0,0,0]]) >>> expected_lens = np.sqrt([1+2**2+3**2, 2**2+3**2+4**2]) >>> length(xyz) == expected_lens.sum() True >>> len_along = length(xyz, along=True) >>> np.allclose(len_along, expected_lens.cumsum()) True >>> length([]) 0 >>> length([[1, 2, 3]]) 0 >>> length([], along=True) array([0]) """ xyz = np.asarray(xyz) if xyz.shape[0] < 2: if along: return np.array([0]) return 0 dists = np.sqrt((np.diff(xyz, axis=0) ** 2).sum(axis=1)) if along: return np.cumsum(dists) return np.sum(dists) def get_rois_crossed(pointsmm, roiData, voxelSize): n_points = len(pointsmm) rois_crossed = [] for j in range(0, n_points): # store point x = int(pointsmm[j, 0] / float(voxelSize[0])) y = int(pointsmm[j, 1] / float(voxelSize[1])) z = int(pointsmm[j, 2] / float(voxelSize[2])) if not roiData[x, y, z] == 0: rois_crossed.append(roiData[x, y, z]) rois_crossed = list( dict.fromkeys(rois_crossed).keys() ) # Removed duplicates from the list return rois_crossed def get_connectivity_matrix(n_rois, list_of_roi_crossed_lists): connectivity_matrix = np.zeros((n_rois, n_rois), dtype=np.uint) for rois_crossed in list_of_roi_crossed_lists: for idx_i, roi_i in enumerate(rois_crossed): for idx_j, roi_j in enumerate(rois_crossed): if idx_i > idx_j: if not roi_i == roi_j: connectivity_matrix[roi_i - 1, roi_j - 1] += 1 connectivity_matrix = connectivity_matrix + connectivity_matrix.T return connectivity_matrix def create_allpoints_cmat(streamlines, roiData, voxelSize, n_rois): """Create the intersection arrays for each fiber""" n_fib = len(streamlines) pc = -1 # Computation for each fiber final_fiber_ids = [] list_of_roi_crossed_lists = [] for i, fiber in enumerate(streamlines): pcN = int(round(float(100 * i) / n_fib)) if pcN > pc and pcN % 1 == 0: pc = pcN print("%4.0f%%" % (pc)) rois_crossed = get_rois_crossed(fiber[0], roiData, voxelSize) if len(rois_crossed) > 0: list_of_roi_crossed_lists.append(list(rois_crossed)) final_fiber_ids.append(i) connectivity_matrix = get_connectivity_matrix(n_rois, list_of_roi_crossed_lists) dis = n_fib - len(final_fiber_ids) iflogger.info( "Found %i (%f percent out of %i fibers) fibers that start or " "terminate in a voxel which is not labeled. (orphans)", dis, dis * 100.0 / n_fib, n_fib, ) iflogger.info( "Valid fibers: %i (%f percent)", n_fib - dis, 100 - dis * 100.0 / n_fib ) iflogger.info("Returning the intersecting point connectivity matrix") return connectivity_matrix, final_fiber_ids def create_endpoints_array(fib, voxelSize): """Create the endpoints arrays for each fiber. Parameters ---------- fib : array-like the fibers data voxelSize : tuple 3-tuple containing the voxel size of the ROI image Returns ------- endpoints : ndarray of size [#fibers, 2, 3] containing for each fiber the index of its first and last point in the voxelSize volume endpointsmm : ndarray of size [#fibers, 2, 3] endpoints in millimeter coordinates """ # Init n = len(fib) endpoints = np.zeros((n, 2, 3)) endpointsmm = np.zeros((n, 2, 3)) # Computation for each fiber for i, fi in enumerate(fib): f = fi[0] # store startpoint endpoints[i, 0, :] = f[0, :] # store endpoint endpoints[i, 1, :] = f[-1, :] # store startpoint endpointsmm[i, 0, :] = f[0, :] # store endpoint endpointsmm[i, 1, :] = f[-1, :] # Translate from mm to index endpoints[i, 0, 0] = int(endpoints[i, 0, 0] / float(voxelSize[0])) endpoints[i, 0, 1] = int(endpoints[i, 0, 1] / float(voxelSize[1])) endpoints[i, 0, 2] = int(endpoints[i, 0, 2] / float(voxelSize[2])) endpoints[i, 1, 0] = int(endpoints[i, 1, 0] / float(voxelSize[0])) endpoints[i, 1, 1] = int(endpoints[i, 1, 1] / float(voxelSize[1])) endpoints[i, 1, 2] = int(endpoints[i, 1, 2] / float(voxelSize[2])) # Return the matrices iflogger.info("Returning the endpoint matrix") return (endpoints, endpointsmm) def cmat( track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_name, endpoint_name, intersections=False, ): """Create the connection matrix for each resolution using fibers and ROIs.""" import scipy.io as sio stats = {} iflogger.info("Running cmat function") # Identify the endpoints of each fiber en_fname = op.abspath(endpoint_name + "_endpoints.npy") en_fnamemm = op.abspath(endpoint_name + "_endpointsmm.npy") iflogger.info("Reading Trackvis file %s", track_file) fib, hdr = nb.trackvis.read(track_file, False) stats["orig_n_fib"] = len(fib) roi = nb.load(roi_file) # Preserve on-disk type unless scaled roiData = np.asanyarray(roi.dataobj) roiVoxelSize = roi.header.get_zooms() (endpoints, endpointsmm) = create_endpoints_array(fib, roiVoxelSize) # Output endpoint arrays iflogger.info("Saving endpoint array: %s", en_fname) np.save(en_fname, endpoints) iflogger.info("Saving endpoint array in mm: %s", en_fnamemm) np.save(en_fnamemm, endpointsmm) n = len(fib) iflogger.info("Number of fibers: %i", n) # Create empty fiber label array fiberlabels = np.zeros((n, 2)) final_fiberlabels = [] final_fibers_idx = [] # Add node information from specified parcellation scheme path, name, ext = split_filename(resolution_network_file) if ext == ".pck": gp = nx.read_gpickle(resolution_network_file) elif ext == ".graphml": gp = nx.read_graphml(resolution_network_file) else: raise TypeError("Unable to read file:", resolution_network_file) nROIs = len(gp.nodes()) # add node information from parcellation if "dn_position" in gp.nodes[list(gp.nodes())[0]]: G = gp.copy() else: G = nx.Graph() for u, d in gp.nodes(data=True): G.add_node(int(u), **d) # compute a position for the node based on the mean position of the # ROI in voxel coordinates (segmentation volume ) xyz = tuple( np.mean( np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1, ) ) G.nodes[int(u)]["dn_position"] = tuple([xyz[0], xyz[2], -xyz[1]]) if intersections: iflogger.info("Filtering tractography from intersections") intersection_matrix, final_fiber_ids = create_allpoints_cmat( fib, roiData, roiVoxelSize, nROIs ) finalfibers_fname = op.abspath( endpoint_name + "_intersections_streamline_final.trk" ) stats["intersections_n_fib"] = save_fibers( hdr, fib, finalfibers_fname, final_fiber_ids ) intersection_matrix = np.matrix(intersection_matrix) I = G.copy() H = nx.from_numpy_matrix(np.matrix(intersection_matrix)) H = nx.relabel_nodes(H, lambda x: x + 1) # relabel nodes so they start at 1 I.add_weighted_edges_from( ((u, v, d["weight"]) for u, v, d in H.edges(data=True)) ) dis = 0 for i in range(endpoints.shape[0]): # ROI start => ROI end try: startROI = int( roiData[endpoints[i, 0, 0], endpoints[i, 0, 1], endpoints[i, 0, 2]] ) endROI = int( roiData[endpoints[i, 1, 0], endpoints[i, 1, 1], endpoints[i, 1, 2]] ) except IndexError: iflogger.error( "AN INDEXERROR EXCEPTION OCCURED FOR FIBER %s. " "PLEASE CHECK ENDPOINT GENERATION", i, ) break # Filter if startROI == 0 or endROI == 0: dis += 1 fiberlabels[i, 0] = -1 continue if startROI > nROIs or endROI > nROIs: iflogger.error( "Start or endpoint of fiber terminate in a voxel which is labeled higher" ) iflogger.error("than is expected by the parcellation node information.") iflogger.error("Start ROI: %i, End ROI: %i", startROI, endROI) iflogger.error("This needs bugfixing!") continue # Update fiber label # switch the rois in order to enforce startROI < endROI if endROI < startROI: tmp = startROI startROI = endROI endROI = tmp fiberlabels[i, 0] = startROI fiberlabels[i, 1] = endROI final_fiberlabels.append([startROI, endROI]) final_fibers_idx.append(i) # Add edge to graph if G.has_edge(startROI, endROI) and "fiblist" in G.edge[startROI][endROI]: G.edge[startROI][endROI]["fiblist"].append(i) else: G.add_edge(startROI, endROI, fiblist=[i]) # create a final fiber length array finalfiberlength = [] if intersections: final_fibers_indices = final_fiber_ids else: final_fibers_indices = final_fibers_idx for idx in final_fibers_indices: # compute length of fiber finalfiberlength.append(length(fib[idx][0])) # convert to array final_fiberlength_array = np.array(finalfiberlength) # make final fiber labels as array final_fiberlabels_array = np.array(final_fiberlabels, dtype=int) iflogger.info( "Found %i (%f percent out of %i fibers) fibers that start or " "terminate in a voxel which is not labeled. (orphans)", dis, dis * 100.0 / n, n, ) iflogger.info("Valid fibers: %i (%f%%)", n - dis, 100 - dis * 100.0 / n) numfib = nx.Graph() numfib.add_nodes_from(G) fibmean = numfib.copy() fibmedian = numfib.copy() fibdev = numfib.copy() for u, v, d in G.edges(data=True): G.remove_edge(u, v) di = {} if "fiblist" in d: di["number_of_fibers"] = len(d["fiblist"]) idx = np.where( (final_fiberlabels_array[:, 0] == int(u)) & (final_fiberlabels_array[:, 1] == int(v)) )[0] di["fiber_length_mean"] = float(np.mean(final_fiberlength_array[idx])) di["fiber_length_median"] = float(np.median(final_fiberlength_array[idx])) di["fiber_length_std"] = float(np.std(final_fiberlength_array[idx])) else: di["number_of_fibers"] = 0 di["fiber_length_mean"] = 0 di["fiber_length_median"] = 0 di["fiber_length_std"] = 0 if not u == v: # Fix for self loop problem G.add_edge(u, v, **di) if "fiblist" in d: numfib.add_edge(u, v, weight=di["number_of_fibers"]) fibmean.add_edge(u, v, weight=di["fiber_length_mean"]) fibmedian.add_edge(u, v, weight=di["fiber_length_median"]) fibdev.add_edge(u, v, weight=di["fiber_length_std"]) iflogger.info("Writing network as %s", matrix_name) nx.write_gpickle(G, op.abspath(matrix_name)) numfib_mlab = nx.to_numpy_matrix(numfib, dtype=int) numfib_dict = {"number_of_fibers": numfib_mlab} fibmean_mlab = nx.to_numpy_matrix(fibmean, dtype=np.float64) fibmean_dict = {"mean_fiber_length": fibmean_mlab} fibmedian_mlab = nx.to_numpy_matrix(fibmedian, dtype=np.float64) fibmedian_dict = {"median_fiber_length": fibmedian_mlab} fibdev_mlab = nx.to_numpy_matrix(fibdev, dtype=np.float64) fibdev_dict = {"fiber_length_std": fibdev_mlab} if intersections: path, name, ext = split_filename(matrix_name) intersection_matrix_name = op.abspath(name + "_intersections") + ext iflogger.info("Writing intersection network as %s", intersection_matrix_name) nx.write_gpickle(I, intersection_matrix_name) path, name, ext = split_filename(matrix_mat_name) if not ext == ".mat": ext = ".mat" matrix_mat_name = matrix_mat_name + ext iflogger.info("Writing matlab matrix as %s", matrix_mat_name) sio.savemat(matrix_mat_name, numfib_dict) if intersections: intersect_dict = {"intersections": intersection_matrix} intersection_matrix_mat_name = op.abspath(name + "_intersections") + ext iflogger.info("Writing intersection matrix as %s", intersection_matrix_mat_name) sio.savemat(intersection_matrix_mat_name, intersect_dict) mean_fiber_length_matrix_name = op.abspath(name + "_mean_fiber_length") + ext iflogger.info( "Writing matlab mean fiber length matrix as %s", mean_fiber_length_matrix_name ) sio.savemat(mean_fiber_length_matrix_name, fibmean_dict) median_fiber_length_matrix_name = op.abspath(name + "_median_fiber_length") + ext iflogger.info( "Writing matlab median fiber length matrix as %s", median_fiber_length_matrix_name, ) sio.savemat(median_fiber_length_matrix_name, fibmedian_dict) fiber_length_std_matrix_name = op.abspath(name + "_fiber_length_std") + ext iflogger.info( "Writing matlab fiber length deviation matrix as %s", fiber_length_std_matrix_name, ) sio.savemat(fiber_length_std_matrix_name, fibdev_dict) fiberlengths_fname = op.abspath(endpoint_name + "_final_fiberslength.npy") iflogger.info("Storing final fiber length array as %s", fiberlengths_fname) np.save(fiberlengths_fname, final_fiberlength_array) fiberlabels_fname = op.abspath(endpoint_name + "_filtered_fiberslabel.npy") iflogger.info("Storing all fiber labels (with orphans) as %s", fiberlabels_fname) np.save(fiberlabels_fname, np.array(fiberlabels, dtype=np.int32)) fiberlabels_noorphans_fname = op.abspath(endpoint_name + "_final_fiberslabels.npy") iflogger.info( "Storing final fiber labels (no orphans) as %s", fiberlabels_noorphans_fname ) np.save(fiberlabels_noorphans_fname, final_fiberlabels_array) iflogger.info("Filtering tractography - keeping only no orphan fibers") finalfibers_fname = op.abspath(endpoint_name + "_streamline_final.trk") stats["endpoint_n_fib"] = save_fibers(hdr, fib, finalfibers_fname, final_fibers_idx) stats["endpoints_percent"] = ( float(stats["endpoint_n_fib"]) / float(stats["orig_n_fib"]) * 100 ) stats["intersections_percent"] = ( float(stats["intersections_n_fib"]) / float(stats["orig_n_fib"]) * 100 ) out_stats_file = op.abspath(endpoint_name + "_statistics.mat") iflogger.info("Saving matrix creation statistics as %s", out_stats_file) sio.savemat(out_stats_file, stats) def save_fibers(oldhdr, oldfib, fname, indices): """Stores a new trackvis file fname using only given indices""" hdrnew = oldhdr.copy() outstreams = [] for i in indices: outstreams.append(oldfib[i]) n_fib_out = len(outstreams) hdrnew["n_count"] = n_fib_out iflogger.info("Writing final non-orphan fibers as %s", fname) nb.trackvis.write(fname, outstreams, hdrnew) return n_fib_out class CreateMatrixInputSpec(TraitedSpec): roi_file = File(exists=True, mandatory=True, desc="Freesurfer aparc+aseg file") tract_file = File(exists=True, mandatory=True, desc="Trackvis tract file") resolution_network_file = File( exists=True, mandatory=True, desc="Parcellation files from Connectome Mapping Toolkit", ) count_region_intersections = traits.Bool( False, usedefault=True, desc="Counts all of the fiber-region traversals in the connectivity matrix (requires significantly more computational time)", ) out_matrix_file = File( genfile=True, desc="NetworkX graph describing the connectivity" ) out_matrix_mat_file = File( "cmatrix.mat", usedefault=True, desc="Matlab matrix describing the connectivity" ) out_mean_fiber_length_matrix_mat_file = File( genfile=True, desc="Matlab matrix describing the mean fiber lengths between each node.", ) out_median_fiber_length_matrix_mat_file = File( genfile=True, desc="Matlab matrix describing the mean fiber lengths between each node.", ) out_fiber_length_std_matrix_mat_file = File( genfile=True, desc="Matlab matrix describing the deviation in fiber lengths connecting each node.", ) out_intersection_matrix_mat_file = File( genfile=True, desc="Matlab connectivity matrix if all region/fiber intersections are counted.", ) out_endpoint_array_name = File( genfile=True, desc="Name for the generated endpoint arrays" ) class CreateMatrixOutputSpec(TraitedSpec): matrix_file = File(desc="NetworkX graph describing the connectivity", exists=True) intersection_matrix_file = File( desc="NetworkX graph describing the connectivity", exists=True ) matrix_files = OutputMultiPath( File( desc="All of the gpickled network files output by this interface", exists=True, ) ) matlab_matrix_files = OutputMultiPath( File(desc="All of the MATLAB .mat files output by this interface", exists=True) ) matrix_mat_file = File( desc="Matlab matrix describing the connectivity", exists=True ) intersection_matrix_mat_file = File( desc="Matlab matrix describing the mean fiber lengths between each node.", exists=True, ) mean_fiber_length_matrix_mat_file = File( desc="Matlab matrix describing the mean fiber lengths between each node.", exists=True, ) median_fiber_length_matrix_mat_file = File( desc="Matlab matrix describing the median fiber lengths between each node.", exists=True, ) fiber_length_std_matrix_mat_file = File( desc="Matlab matrix describing the deviation in fiber lengths connecting each node.", exists=True, ) endpoint_file = File( desc="Saved Numpy array with the endpoints of each fiber", exists=True ) endpoint_file_mm = File( desc="Saved Numpy array with the endpoints of each fiber (in millimeters)", exists=True, ) fiber_length_file = File( desc="Saved Numpy array with the lengths of each fiber", exists=True ) fiber_label_file = File( desc="Saved Numpy array with the labels for each fiber", exists=True ) fiber_labels_noorphans = File( desc="Saved Numpy array with the labels for each non-orphan fiber", exists=True ) filtered_tractography = File( desc="TrackVis file containing only those fibers originate in one and terminate in another region", exists=True, ) filtered_tractography_by_intersections = File( desc="TrackVis file containing all fibers which connect two regions", exists=True, ) filtered_tractographies = OutputMultiPath( File( desc="TrackVis file containing only those fibers originate in one and terminate in another region", exists=True, ) ) stats_file = File( desc="Saved Matlab .mat file with the number of fibers saved at each stage", exists=True, ) class CreateMatrix(BaseInterface): """ Performs connectivity mapping and outputs the result as a NetworkX graph and a Matlab matrix Example ------- >>> import nipype.interfaces.cmtk as cmtk >>> conmap = cmtk.CreateMatrix() >>> conmap.roi_file = 'fsLUT_aparc+aseg.nii' >>> conmap.tract_file = 'fibers.trk' >>> conmap.run() # doctest: +SKIP """ input_spec = CreateMatrixInputSpec output_spec = CreateMatrixOutputSpec def _run_interface(self, runtime): if isdefined(self.inputs.out_matrix_file): path, name, _ = split_filename(self.inputs.out_matrix_file) matrix_file = op.abspath(name + ".pck") else: matrix_file = self._gen_outfilename(".pck") matrix_mat_file = op.abspath(self.inputs.out_matrix_mat_file) path, name, ext = split_filename(matrix_mat_file) if not ext == ".mat": ext = ".mat" matrix_mat_file = matrix_mat_file + ext if isdefined(self.inputs.out_mean_fiber_length_matrix_mat_file): mean_fiber_length_matrix_mat_file = op.abspath( self.inputs.out_mean_fiber_length_matrix_mat_file ) else: mean_fiber_length_matrix_name = op.abspath( self._gen_outfilename("_mean_fiber_length.mat") ) if isdefined(self.inputs.out_median_fiber_length_matrix_mat_file): median_fiber_length_matrix_mat_file = op.abspath( self.inputs.out_median_fiber_length_matrix_mat_file ) else: median_fiber_length_matrix_name = op.abspath( self._gen_outfilename("_median_fiber_length.mat") ) if isdefined(self.inputs.out_fiber_length_std_matrix_mat_file): fiber_length_std_matrix_mat_file = op.abspath( self.inputs.out_fiber_length_std_matrix_mat_file ) else: fiber_length_std_matrix_name = op.abspath( self._gen_outfilename("_fiber_length_std.mat") ) if not isdefined(self.inputs.out_endpoint_array_name): _, endpoint_name, _ = split_filename(self.inputs.tract_file) endpoint_name = op.abspath(endpoint_name) else: endpoint_name = op.abspath(self.inputs.out_endpoint_array_name) cmat( self.inputs.tract_file, self.inputs.roi_file, self.inputs.resolution_network_file, matrix_file, matrix_mat_file, endpoint_name, self.inputs.count_region_intersections, ) return runtime def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_matrix_file): path, name, _ = split_filename(self.inputs.out_matrix_file) out_matrix_file = op.abspath(name + ".pck") out_intersection_matrix_file = op.abspath(name + "_intersections.pck") else: out_matrix_file = op.abspath(self._gen_outfilename(".pck")) out_intersection_matrix_file = op.abspath( self._gen_outfilename("_intersections.pck") ) outputs["matrix_file"] = out_matrix_file outputs["intersection_matrix_file"] = out_intersection_matrix_file matrix_mat_file = op.abspath(self.inputs.out_matrix_mat_file) path, name, ext = split_filename(matrix_mat_file) if not ext == ".mat": ext = ".mat" matrix_mat_file = matrix_mat_file + ext outputs["matrix_mat_file"] = matrix_mat_file if isdefined(self.inputs.out_mean_fiber_length_matrix_mat_file): outputs["mean_fiber_length_matrix_mat_file"] = op.abspath( self.inputs.out_mean_fiber_length_matrix_mat_file ) else: outputs["mean_fiber_length_matrix_mat_file"] = op.abspath( self._gen_outfilename("_mean_fiber_length.mat") ) if isdefined(self.inputs.out_median_fiber_length_matrix_mat_file): outputs["median_fiber_length_matrix_mat_file"] = op.abspath( self.inputs.out_median_fiber_length_matrix_mat_file ) else: outputs["median_fiber_length_matrix_mat_file"] = op.abspath( self._gen_outfilename("_median_fiber_length.mat") ) if isdefined(self.inputs.out_fiber_length_std_matrix_mat_file): outputs["fiber_length_std_matrix_mat_file"] = op.abspath( self.inputs.out_fiber_length_std_matrix_mat_file ) else: outputs["fiber_length_std_matrix_mat_file"] = op.abspath( self._gen_outfilename("_fiber_length_std.mat") ) if isdefined(self.inputs.out_intersection_matrix_mat_file): outputs["intersection_matrix_mat_file"] = op.abspath( self.inputs.out_intersection_matrix_mat_file ) else: outputs["intersection_matrix_mat_file"] = op.abspath( self._gen_outfilename("_intersections.mat") ) if isdefined(self.inputs.out_endpoint_array_name): endpoint_name = self.inputs.out_endpoint_array_name outputs["endpoint_file"] = op.abspath( self.inputs.out_endpoint_array_name + "_endpoints.npy" ) outputs["endpoint_file_mm"] = op.abspath( self.inputs.out_endpoint_array_name + "_endpointsmm.npy" ) outputs["fiber_length_file"] = op.abspath( self.inputs.out_endpoint_array_name + "_final_fiberslength.npy" ) outputs["fiber_label_file"] = op.abspath( self.inputs.out_endpoint_array_name + "_filtered_fiberslabel.npy" ) outputs["fiber_labels_noorphans"] = op.abspath( self.inputs.out_endpoint_array_name + "_final_fiberslabels.npy" ) else: _, endpoint_name, _ = split_filename(self.inputs.tract_file) outputs["endpoint_file"] = op.abspath(endpoint_name + "_endpoints.npy") outputs["endpoint_file_mm"] = op.abspath(endpoint_name + "_endpointsmm.npy") outputs["fiber_length_file"] = op.abspath( endpoint_name + "_final_fiberslength.npy" ) outputs["fiber_label_file"] = op.abspath( endpoint_name + "_filtered_fiberslabel.npy" ) outputs["fiber_labels_noorphans"] = op.abspath( endpoint_name + "_final_fiberslabels.npy" ) if self.inputs.count_region_intersections: outputs["matrix_files"] = [out_matrix_file, out_intersection_matrix_file] outputs["matlab_matrix_files"] = [ outputs["matrix_mat_file"], outputs["mean_fiber_length_matrix_mat_file"], outputs["median_fiber_length_matrix_mat_file"], outputs["fiber_length_std_matrix_mat_file"], outputs["intersection_matrix_mat_file"], ] else: outputs["matrix_files"] = [out_matrix_file] outputs["matlab_matrix_files"] = [ outputs["matrix_mat_file"], outputs["mean_fiber_length_matrix_mat_file"], outputs["median_fiber_length_matrix_mat_file"], outputs["fiber_length_std_matrix_mat_file"], ] outputs["filtered_tractography"] = op.abspath( endpoint_name + "_streamline_final.trk" ) outputs["filtered_tractography_by_intersections"] = op.abspath( endpoint_name + "_intersections_streamline_final.trk" ) outputs["filtered_tractographies"] = [ outputs["filtered_tractography"], outputs["filtered_tractography_by_intersections"], ] outputs["stats_file"] = op.abspath(endpoint_name + "_statistics.mat") return outputs def _gen_outfilename(self, ext): if ext.endswith("mat") and isdefined(self.inputs.out_matrix_mat_file): _, name, _ = split_filename(self.inputs.out_matrix_mat_file) elif isdefined(self.inputs.out_matrix_file): _, name, _ = split_filename(self.inputs.out_matrix_file) else: _, name, _ = split_filename(self.inputs.tract_file) return name + ext class ROIGenInputSpec(BaseInterfaceInputSpec): aparc_aseg_file = File( exists=True, mandatory=True, desc="Freesurfer aparc+aseg file" ) LUT_file = File( exists=True, xor=["use_freesurfer_LUT"], desc="Custom lookup table (cf. FreeSurferColorLUT.txt)", ) use_freesurfer_LUT = traits.Bool( xor=["LUT_file"], desc="Boolean value; Set to True to use default Freesurfer LUT, False for custom LUT", ) freesurfer_dir = Directory( requires=["use_freesurfer_LUT"], desc="Freesurfer main directory" ) out_roi_file = File( genfile=True, desc="Region of Interest file for connectivity mapping" ) out_dict_file = File(genfile=True, desc="Label dictionary saved in Pickle format") class ROIGenOutputSpec(TraitedSpec): roi_file = File(desc="Region of Interest file for connectivity mapping") dict_file = File(desc="Label dictionary saved in Pickle format") class ROIGen(BaseInterface): """ Generates a ROI file for connectivity mapping and a dictionary file containing relevant node information Example ------- >>> import nipype.interfaces.cmtk as cmtk >>> rg = cmtk.ROIGen() >>> rg.inputs.aparc_aseg_file = 'aparc+aseg.nii' >>> rg.inputs.use_freesurfer_LUT = True >>> rg.inputs.freesurfer_dir = '/usr/local/freesurfer' >>> rg.run() # doctest: +SKIP The label dictionary is written to disk using Pickle. Resulting data can be loaded using: >>> file = open("FreeSurferColorLUT_adapted_aparc+aseg_out.pck", "r") >>> file = open("fsLUT_aparc+aseg.pck", "r") >>> labelDict = pickle.load(file) # doctest: +SKIP >>> labelDict # doctest: +SKIP """ input_spec = ROIGenInputSpec output_spec = ROIGenOutputSpec def _run_interface(self, runtime): aparc_aseg_file = self.inputs.aparc_aseg_file aparcpath, aparcname, aparcext = split_filename(aparc_aseg_file) iflogger.info("Using Aparc+Aseg file: %s", aparcname + aparcext) niiAPARCimg = nb.load(aparc_aseg_file) # Preserve on-disk type niiAPARCdata = np.asanyarray(niiAPARCimg.dataobj) niiDataLabels = np.unique(niiAPARCdata) numDataLabels = np.size(niiDataLabels) iflogger.info("Number of labels in image: %s", numDataLabels) write_dict = True if self.inputs.use_freesurfer_LUT: self.LUT_file = self.inputs.freesurfer_dir + "/FreeSurferColorLUT.txt" iflogger.info("Using Freesurfer LUT: %s", self.LUT_file) prefix = "fsLUT" elif not self.inputs.use_freesurfer_LUT and isdefined(self.inputs.LUT_file): self.LUT_file = op.abspath(self.inputs.LUT_file) lutpath, lutname, lutext = split_filename(self.LUT_file) iflogger.info("Using Custom LUT file: %s", lutname + lutext) prefix = lutname else: prefix = "hardcoded" write_dict = False if isdefined(self.inputs.out_roi_file): roi_file = op.abspath(self.inputs.out_roi_file) else: roi_file = op.abspath(prefix + "_" + aparcname + ".nii") if isdefined(self.inputs.out_dict_file): dict_file = op.abspath(self.inputs.out_dict_file) else: dict_file = op.abspath(prefix + "_" + aparcname + ".pck") if write_dict: iflogger.info("Lookup table: %s", op.abspath(self.LUT_file)) LUTlabelsRGBA = np.loadtxt( self.LUT_file, skiprows=4, usecols=[0, 1, 2, 3, 4, 5], comments="#", dtype={ "names": ("index", "label", "R", "G", "B", "A"), "formats": ("int", "|S30", "int", "int", "int", "int"), }, ) numLUTLabels = np.size(LUTlabelsRGBA) if numLUTLabels < numDataLabels: iflogger.error( "LUT file provided does not contain all of the regions in the image" ) iflogger.error("Removing unmapped regions") iflogger.info("Number of labels in LUT: %s", numLUTLabels) LUTlabelDict = {} """ Create dictionary for input LUT table""" for labels in range(0, numLUTLabels): LUTlabelDict[LUTlabelsRGBA[labels][0]] = [ LUTlabelsRGBA[labels][1], LUTlabelsRGBA[labels][2], LUTlabelsRGBA[labels][3], LUTlabelsRGBA[labels][4], LUTlabelsRGBA[labels][5], ] iflogger.info("Printing LUT label dictionary") iflogger.info(LUTlabelDict) mapDict = {} MAPPING = [ [1, 2012], [2, 2019], [3, 2032], [4, 2014], [5, 2020], [6, 2018], [7, 2027], [8, 2028], [9, 2003], [10, 2024], [11, 2017], [12, 2026], [13, 2002], [14, 2023], [15, 2010], [16, 2022], [17, 2031], [18, 2029], [19, 2008], [20, 2025], [21, 2005], [22, 2021], [23, 2011], [24, 2013], [25, 2007], [26, 2016], [27, 2006], [28, 2033], [29, 2009], [30, 2015], [31, 2001], [32, 2030], [33, 2034], [34, 2035], [35, 49], [36, 50], [37, 51], [38, 52], [39, 58], [40, 53], [41, 54], [42, 1012], [43, 1019], [44, 1032], [45, 1014], [46, 1020], [47, 1018], [48, 1027], [49, 1028], [50, 1003], [51, 1024], [52, 1017], [53, 1026], [54, 1002], [55, 1023], [56, 1010], [57, 1022], [58, 1031], [59, 1029], [60, 1008], [61, 1025], [62, 1005], [63, 1021], [64, 1011], [65, 1013], [66, 1007], [67, 1016], [68, 1006], [69, 1033], [70, 1009], [71, 1015], [72, 1001], [73, 1030], [74, 1034], [75, 1035], [76, 10], [77, 11], [78, 12], [79, 13], [80, 26], [81, 17], [82, 18], [83, 16], ] """ Create empty grey matter mask, Populate with only those regions defined in the mapping.""" niiGM = np.zeros(niiAPARCdata.shape, dtype=np.uint) for ma in MAPPING: niiGM[niiAPARCdata == ma[1]] = ma[0] mapDict[ma[0]] = ma[1] iflogger.info("Grey matter mask created") greyMaskLabels = np.unique(niiGM) numGMLabels = np.size(greyMaskLabels) iflogger.info("Number of grey matter labels: %s", numGMLabels) labelDict = {} GMlabelDict = {} for label in greyMaskLabels: try: mapDict[label] if write_dict: GMlabelDict["originalID"] = mapDict[label] except: iflogger.info("Label %s not in provided mapping", label) if write_dict: del GMlabelDict GMlabelDict = {} GMlabelDict["labels"] = LUTlabelDict[label][0] GMlabelDict["colors"] = [ LUTlabelDict[label][1], LUTlabelDict[label][2], LUTlabelDict[label][3], ] GMlabelDict["a"] = LUTlabelDict[label][4] labelDict[label] = GMlabelDict roi_image = nb.Nifti1Image(niiGM, niiAPARCimg.affine, niiAPARCimg.header) iflogger.info("Saving ROI File to %s", roi_file) nb.save(roi_image, roi_file) if write_dict: iflogger.info("Saving Dictionary File to %s in Pickle format", dict_file) with open(dict_file, "w") as f: pickle.dump(labelDict, f) return runtime def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.out_roi_file): outputs["roi_file"] = op.abspath(self.inputs.out_roi_file) else: outputs["roi_file"] = op.abspath(self._gen_outfilename("nii")) if isdefined(self.inputs.out_dict_file): outputs["dict_file"] = op.abspath(self.inputs.out_dict_file) else: outputs["dict_file"] = op.abspath(self._gen_outfilename("pck")) return outputs def _gen_outfilename(self, ext): _, name, _ = split_filename(self.inputs.aparc_aseg_file) if self.inputs.use_freesurfer_LUT: prefix = "fsLUT" elif not self.inputs.use_freesurfer_LUT and isdefined(self.inputs.LUT_file): lutpath, lutname, lutext = split_filename(self.inputs.LUT_file) prefix = lutname else: prefix = "hardcoded" return prefix + "_" + name + "." + ext def create_nodes(roi_file, resolution_network_file, out_filename): G = nx.Graph() gp = nx.read_graphml(resolution_network_file) roi_image = nb.load(roi_file) # Preserve on-disk type unless scaled roiData = np.asanyarray(roi_image.dataobj) for u, d in gp.nodes(data=True): G.add_node(int(u), **d) xyz = tuple( np.mean( np.where(np.flipud(roiData) == int(d["dn_correspondence_id"])), axis=1 ) ) G.nodes[int(u)]["dn_position"] = tuple([xyz[0], xyz[2], -xyz[1]]) nx.write_gpickle(G, out_filename) return out_filename class CreateNodesInputSpec(BaseInterfaceInputSpec): roi_file = File(exists=True, mandatory=True, desc="Region of interest file") resolution_network_file = File( exists=True, mandatory=True, desc="Parcellation file from Connectome Mapping Toolkit", ) out_filename = File( "nodenetwork.pck", usedefault=True, desc="Output gpickled network with the nodes defined.", ) class CreateNodesOutputSpec(TraitedSpec): node_network = File(desc="Output gpickled network with the nodes defined.") class CreateNodes(BaseInterface): """ Generates a NetworkX graph containing nodes at the centroid of each region in the input ROI file. Node data is added from the resolution network file. Example ------- >>> import nipype.interfaces.cmtk as cmtk >>> mknode = cmtk.CreateNodes() >>> mknode.inputs.roi_file = 'ROI_scale500.nii.gz' >>> mknode.run() # doctest: +SKIP """ input_spec = CreateNodesInputSpec output_spec = CreateNodesOutputSpec def _run_interface(self, runtime): iflogger.info("Creating nodes...") create_nodes( self.inputs.roi_file, self.inputs.resolution_network_file, self.inputs.out_filename, ) iflogger.info("Saving node network to %s", op.abspath(self.inputs.out_filename)) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["node_network"] = op.abspath(self.inputs.out_filename) return outputs nipype-1.7.0/nipype/interfaces/cmtk/convert.py000066400000000000000000000240521413403311400214370ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os import os.path as op import datetime import string import networkx as nx from ...utils.filemanip import split_filename from ..base import ( BaseInterfaceInputSpec, traits, File, TraitedSpec, InputMultiPath, isdefined, ) from .base import CFFBaseInterface, have_cfflib class CFFConverterInputSpec(BaseInterfaceInputSpec): graphml_networks = InputMultiPath( File(exists=True), desc="list of graphML networks" ) gpickled_networks = InputMultiPath( File(exists=True), desc="list of gpickled Networkx graphs" ) gifti_surfaces = InputMultiPath(File(exists=True), desc="list of GIFTI surfaces") gifti_labels = InputMultiPath(File(exists=True), desc="list of GIFTI labels") nifti_volumes = InputMultiPath(File(exists=True), desc="list of NIFTI volumes") tract_files = InputMultiPath(File(exists=True), desc="list of Trackvis fiber files") timeseries_files = InputMultiPath( File(exists=True), desc="list of HDF5 timeseries files" ) script_files = InputMultiPath( File(exists=True), desc="list of script files to include" ) data_files = InputMultiPath( File(exists=True), desc="list of external data files (i.e. Numpy, HD5, XML) " ) title = traits.Str(desc="Connectome Title") creator = traits.Str(desc="Creator") email = traits.Str(desc="Email address") publisher = traits.Str(desc="Publisher") license = traits.Str(desc="License") rights = traits.Str(desc="Rights") references = traits.Str(desc="References") relation = traits.Str(desc="Relation") species = traits.Str("Homo sapiens", desc="Species", usedefault=True) description = traits.Str( "Created with the Nipype CFF converter", desc="Description", usedefault=True ) out_file = File("connectome.cff", usedefault=True, desc="Output connectome file") class CFFConverterOutputSpec(TraitedSpec): connectome_file = File(exists=True, desc="Output connectome file") class CFFConverter(CFFBaseInterface): """ Creates a Connectome File Format (CFF) file from input networks, surfaces, volumes, tracts, etcetera.... Example ------- >>> import nipype.interfaces.cmtk as cmtk >>> cvt = cmtk.CFFConverter() >>> cvt.inputs.title = 'subject 1' >>> cvt.inputs.gifti_surfaces = ['lh.pial_converted.gii', 'rh.pial_converted.gii'] >>> cvt.inputs.tract_files = ['streamlines.trk'] >>> cvt.inputs.gpickled_networks = ['network0.gpickle'] >>> cvt.run() # doctest: +SKIP """ input_spec = CFFConverterInputSpec output_spec = CFFConverterOutputSpec def _run_interface(self, runtime): import cfflib as cf a = cf.connectome() if isdefined(self.inputs.title): a.connectome_meta.set_title(self.inputs.title) else: a.connectome_meta.set_title(self.inputs.out_file) if isdefined(self.inputs.creator): a.connectome_meta.set_creator(self.inputs.creator) else: # Probably only works on some OSes... a.connectome_meta.set_creator(os.getenv("USER")) if isdefined(self.inputs.email): a.connectome_meta.set_email(self.inputs.email) if isdefined(self.inputs.publisher): a.connectome_meta.set_publisher(self.inputs.publisher) if isdefined(self.inputs.license): a.connectome_meta.set_license(self.inputs.license) if isdefined(self.inputs.rights): a.connectome_meta.set_rights(self.inputs.rights) if isdefined(self.inputs.references): a.connectome_meta.set_references(self.inputs.references) if isdefined(self.inputs.relation): a.connectome_meta.set_relation(self.inputs.relation) if isdefined(self.inputs.species): a.connectome_meta.set_species(self.inputs.species) if isdefined(self.inputs.description): a.connectome_meta.set_description(self.inputs.description) a.connectome_meta.set_created(datetime.date.today()) count = 0 if isdefined(self.inputs.graphml_networks): for ntwk in self.inputs.graphml_networks: # There must be a better way to deal with the unique name problem # (i.e. tracks and networks can't use the same name, and previously we were pulling them both from the input files) ntwk_name = "Network {cnt}".format(cnt=count) a.add_connectome_network_from_graphml(ntwk_name, ntwk) count += 1 if isdefined(self.inputs.gpickled_networks): unpickled = [] for ntwk in self.inputs.gpickled_networks: _, ntwk_name, _ = split_filename(ntwk) unpickled = nx.read_gpickle(ntwk) cnet = cf.CNetwork(name=ntwk_name) cnet.set_with_nxgraph(unpickled) a.add_connectome_network(cnet) count += 1 count = 0 if isdefined(self.inputs.tract_files): for trk in self.inputs.tract_files: _, trk_name, _ = split_filename(trk) ctrack = cf.CTrack(trk_name, trk) a.add_connectome_track(ctrack) count += 1 count = 0 if isdefined(self.inputs.gifti_surfaces): for surf in self.inputs.gifti_surfaces: _, surf_name, _ = split_filename(surf) csurf = cf.CSurface.create_from_gifti( "Surface %d - %s" % (count, surf_name), surf ) csurf.fileformat = "Gifti" csurf.dtype = "Surfaceset" a.add_connectome_surface(csurf) count += 1 count = 0 if isdefined(self.inputs.gifti_labels): for label in self.inputs.gifti_labels: _, label_name, _ = split_filename(label) csurf = cf.CSurface.create_from_gifti( "Surface Label %d - %s" % (count, label_name), label ) csurf.fileformat = "Gifti" csurf.dtype = "Labels" a.add_connectome_surface(csurf) count += 1 if isdefined(self.inputs.nifti_volumes): for vol in self.inputs.nifti_volumes: _, vol_name, _ = split_filename(vol) cvol = cf.CVolume.create_from_nifti(vol_name, vol) a.add_connectome_volume(cvol) if isdefined(self.inputs.script_files): for script in self.inputs.script_files: _, script_name, _ = split_filename(script) cscript = cf.CScript.create_from_file(script_name, script) a.add_connectome_script(cscript) if isdefined(self.inputs.data_files): for data in self.inputs.data_files: _, data_name, _ = split_filename(data) cda = cf.CData(name=data_name, src=data, fileformat="NumPy") if not string.find(data_name, "lengths") == -1: cda.dtype = "FinalFiberLengthArray" if not string.find(data_name, "endpoints") == -1: cda.dtype = "FiberEndpoints" if not string.find(data_name, "labels") == -1: cda.dtype = "FinalFiberLabels" a.add_connectome_data(cda) a.print_summary() _, name, ext = split_filename(self.inputs.out_file) if not ext == ".cff": ext = ".cff" cf.save_to_cff(a, op.abspath(name + ext)) return runtime def _list_outputs(self): outputs = self._outputs().get() _, name, ext = split_filename(self.inputs.out_file) if not ext == ".cff": ext = ".cff" outputs["connectome_file"] = op.abspath(name + ext) return outputs class MergeCNetworksInputSpec(BaseInterfaceInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, desc="List of CFF files to extract networks from", ) out_file = File( "merged_network_connectome.cff", usedefault=True, desc="Output CFF file with all the networks added", ) class MergeCNetworksOutputSpec(TraitedSpec): connectome_file = File( exists=True, desc="Output CFF file with all the networks added" ) class MergeCNetworks(CFFBaseInterface): """Merges networks from multiple CFF files into one new CFF file. Example ------- >>> import nipype.interfaces.cmtk as cmtk >>> mrg = cmtk.MergeCNetworks() >>> mrg.inputs.in_files = ['subj1.cff','subj2.cff'] >>> mrg.run() # doctest: +SKIP """ input_spec = MergeCNetworksInputSpec output_spec = MergeCNetworksOutputSpec def _run_interface(self, runtime): import cfflib as cf extracted_networks = [] for i, con in enumerate(self.inputs.in_files): mycon = cf.load(con) nets = mycon.get_connectome_network() for ne in nets: # here, you might want to skip networks with a given # metadata information ne.load() contitle = mycon.get_connectome_meta().get_title() ne.set_name(str(i) + ": " + contitle + " - " + ne.get_name()) ne.set_src(ne.get_name()) extracted_networks.append(ne) # Add networks to new connectome newcon = cf.connectome( title="All CNetworks", connectome_network=extracted_networks ) # Setting additional metadata metadata = newcon.get_connectome_meta() metadata.set_creator("My Name") metadata.set_email("My Email") _, name, ext = split_filename(self.inputs.out_file) if not ext == ".cff": ext = ".cff" cf.save_to_cff(newcon, op.abspath(name + ext)) return runtime def _list_outputs(self): outputs = self._outputs().get() _, name, ext = split_filename(self.inputs.out_file) if not ext == ".cff": ext = ".cff" outputs["connectome_file"] = op.abspath(name + ext) return outputs nipype-1.7.0/nipype/interfaces/cmtk/nbs.py000066400000000000000000000151171413403311400205430ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os.path as op import numpy as np import networkx as nx from ... import logging from ..base import ( LibraryBaseInterface, BaseInterfaceInputSpec, traits, File, TraitedSpec, InputMultiPath, OutputMultiPath, isdefined, ) from .base import have_cv iflogger = logging.getLogger("nipype.interface") def ntwks_to_matrices(in_files, edge_key): first = nx.read_gpickle(in_files[0]) files = len(in_files) nodes = len(first.nodes()) matrix = np.zeros((nodes, nodes, files)) for idx, name in enumerate(in_files): graph = nx.read_gpickle(name) for u, v, d in graph.edges(data=True): try: graph[u][v]["weight"] = d[ edge_key ] # Setting the edge requested edge value as weight value except: raise KeyError( "the graph edges do not have {} attribute".format(edge_key) ) matrix[:, :, idx] = nx.to_numpy_matrix(graph) # Retrieve the matrix return matrix class NetworkBasedStatisticInputSpec(BaseInterfaceInputSpec): in_group1 = InputMultiPath( File(exists=True), mandatory=True, desc="Networks for the first group of subjects", ) in_group2 = InputMultiPath( File(exists=True), mandatory=True, desc="Networks for the second group of subjects", ) node_position_network = File( desc="An optional network used to position the nodes for the output networks" ) number_of_permutations = traits.Int( 1000, usedefault=True, desc="Number of permutations to perform" ) threshold = traits.Float(3, usedefault=True, desc="T-statistic threshold") t_tail = traits.Enum( "left", "right", "both", usedefault=True, desc='Can be one of "left", "right", or "both"', ) edge_key = traits.Str( "number_of_fibers", usedefault=True, desc='Usually "number_of_fibers, "fiber_length_mean", "fiber_length_std" for matrices made with CMTK' 'Sometimes "weight" or "value" for functional networks.', ) out_nbs_network = File(desc="Output network with edges identified by the NBS") out_nbs_pval_network = File( desc="Output network with p-values to weight the edges identified by the NBS" ) class NetworkBasedStatisticOutputSpec(TraitedSpec): nbs_network = File( exists=True, desc="Output network with edges identified by the NBS" ) nbs_pval_network = File( exists=True, desc="Output network with p-values to weight the edges identified by the NBS", ) network_files = OutputMultiPath( File(exists=True), desc="Output network with edges identified by the NBS" ) class NetworkBasedStatistic(LibraryBaseInterface): """ Calculates and outputs the average network given a set of input NetworkX gpickle files See Also -------- For documentation of Network-based statistic parameters: https://github.com/LTS5/connectomeviewer/blob/master/cviewer/libs/pyconto/groupstatistics/nbs/_nbs.py Example ------- >>> import nipype.interfaces.cmtk as cmtk >>> nbs = cmtk.NetworkBasedStatistic() >>> nbs.inputs.in_group1 = ['subj1.pck', 'subj2.pck'] # doctest: +SKIP >>> nbs.inputs.in_group2 = ['pat1.pck', 'pat2.pck'] # doctest: +SKIP >>> nbs.run() # doctest: +SKIP """ input_spec = NetworkBasedStatisticInputSpec output_spec = NetworkBasedStatisticOutputSpec _pkg = "cviewer" def _run_interface(self, runtime): from cviewer.libs.pyconto.groupstatistics import nbs THRESH = self.inputs.threshold K = self.inputs.number_of_permutations TAIL = self.inputs.t_tail edge_key = self.inputs.edge_key details = ( edge_key + "-thresh-" + str(THRESH) + "-k-" + str(K) + "-tail-" + TAIL + ".pck" ) # Fill in the data from the networks X = ntwks_to_matrices(self.inputs.in_group1, edge_key) Y = ntwks_to_matrices(self.inputs.in_group2, edge_key) PVAL, ADJ, _ = nbs.compute_nbs(X, Y, THRESH, K, TAIL) iflogger.info("p-values:") iflogger.info(PVAL) pADJ = ADJ.copy() for idx, _ in enumerate(PVAL): x, y = np.where(ADJ == idx + 1) pADJ[x, y] = PVAL[idx] # Create networkx graphs from the adjacency matrix nbsgraph = nx.from_numpy_matrix(ADJ) nbs_pval_graph = nx.from_numpy_matrix(pADJ) # Relabel nodes because they should not start at zero for our convention nbsgraph = nx.relabel_nodes(nbsgraph, lambda x: x + 1) nbs_pval_graph = nx.relabel_nodes(nbs_pval_graph, lambda x: x + 1) if isdefined(self.inputs.node_position_network): node_ntwk_name = self.inputs.node_position_network else: node_ntwk_name = self.inputs.in_group1[0] node_network = nx.read_gpickle(node_ntwk_name) iflogger.info( "Populating node dictionaries with attributes from %s", node_ntwk_name ) for nid, ndata in node_network.nodes(data=True): nbsgraph.nodes[nid] = ndata nbs_pval_graph.nodes[nid] = ndata path = op.abspath("NBS_Result_" + details) iflogger.info(path) nx.write_gpickle(nbsgraph, path) iflogger.info("Saving output NBS edge network as %s", path) pval_path = op.abspath("NBS_P_vals_" + details) iflogger.info(pval_path) nx.write_gpickle(nbs_pval_graph, pval_path) iflogger.info("Saving output p-value network as %s", pval_path) return runtime def _list_outputs(self): outputs = self.output_spec().get() THRESH = self.inputs.threshold K = self.inputs.number_of_permutations TAIL = self.inputs.t_tail edge_key = self.inputs.edge_key details = ( edge_key + "-thresh-" + str(THRESH) + "-k-" + str(K) + "-tail-" + TAIL + ".pck" ) path = op.abspath("NBS_Result_" + details) pval_path = op.abspath("NBS_P_vals_" + details) outputs["nbs_network"] = path outputs["nbs_pval_network"] = pval_path outputs["network_files"] = [path, pval_path] return outputs def _gen_outfilename(self, name, ext): return name + "." + ext nipype-1.7.0/nipype/interfaces/cmtk/nx.py000066400000000000000000000633201413403311400204050ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os.path as op import pickle import numpy as np import networkx as nx from ... import logging from ...utils.filemanip import split_filename from ..base import ( BaseInterface, BaseInterfaceInputSpec, traits, File, TraitedSpec, InputMultiPath, OutputMultiPath, isdefined, ) from .base import have_cmp iflogger = logging.getLogger("nipype.interface") def read_unknown_ntwk(ntwk): if not isinstance(ntwk, nx.classes.graph.Graph): _, _, ext = split_filename(ntwk) if ext == ".pck": ntwk = nx.read_gpickle(ntwk) elif ext == ".graphml": ntwk = nx.read_graphml(ntwk) return ntwk def remove_all_edges(ntwk): ntwktmp = ntwk.copy() edges = list(ntwktmp.edges()) for edge in edges: ntwk.remove_edge(edge[0], edge[1]) return ntwk def fix_keys_for_gexf(orig): """ GEXF Networks can be read in Gephi, however, the keys for the node and edge IDs must be converted to strings """ import networkx as nx ntwk = nx.Graph() nodes = list(orig.nodes()) edges = list(orig.edges()) for node in nodes: newnodedata = {} newnodedata.update(orig.nodes[node]) if "dn_fsname" in orig.nodes[node]: newnodedata["label"] = orig.nodes[node]["dn_fsname"] ntwk.add_node(str(node), **newnodedata) if "dn_position" in ntwk.nodes[str(node)] and "dn_position" in newnodedata: ntwk.nodes[str(node)]["dn_position"] = str(newnodedata["dn_position"]) for edge in edges: data = {} data = orig.edge[edge[0]][edge[1]] ntwk.add_edge(str(edge[0]), str(edge[1]), **data) if "fiber_length_mean" in ntwk.edge[str(edge[0])][str(edge[1])]: ntwk.edge[str(edge[0])][str(edge[1])]["fiber_length_mean"] = str( data["fiber_length_mean"] ) if "fiber_length_std" in ntwk.edge[str(edge[0])][str(edge[1])]: ntwk.edge[str(edge[0])][str(edge[1])]["fiber_length_std"] = str( data["fiber_length_std"] ) if "number_of_fibers" in ntwk.edge[str(edge[0])][str(edge[1])]: ntwk.edge[str(edge[0])][str(edge[1])]["number_of_fibers"] = str( data["number_of_fibers"] ) if "value" in ntwk.edge[str(edge[0])][str(edge[1])]: ntwk.edge[str(edge[0])][str(edge[1])]["value"] = str(data["value"]) return ntwk def add_dicts_by_key(in_dict1, in_dict2): """ Combines two dictionaries and adds the values for those keys that are shared """ both = {} for key1 in in_dict1: for key2 in in_dict2: if key1 == key2: both[key1] = in_dict1[key1] + in_dict2[key2] return both def average_networks(in_files, ntwk_res_file, group_id): """ Sums the edges of input networks and divides by the number of networks Writes the average network as .pck and .gexf and returns the name of the written networks """ import networkx as nx import os.path as op import scipy.io as sio iflogger.info("Creating average network for group: %s", group_id) matlab_network_list = [] if len(in_files) == 1: avg_ntwk = read_unknown_ntwk(in_files[0]) else: count_to_keep_edge = np.round(len(in_files) / 2.0) iflogger.info( "Number of networks: %i, an edge must occur in at " "least %i to remain in the average network", len(in_files), count_to_keep_edge, ) ntwk_res_file = read_unknown_ntwk(ntwk_res_file) iflogger.info( "%i nodes found in network resolution file", ntwk_res_file.number_of_nodes() ) ntwk = remove_all_edges(ntwk_res_file) counting_ntwk = ntwk.copy() # Sums all the relevant variables for index, subject in enumerate(in_files): tmp = nx.read_gpickle(subject) iflogger.info("File %s has %i edges", subject, tmp.number_of_edges()) edges = list(tmp.edges()) for edge in edges: data = {} data = tmp.edge[edge[0]][edge[1]] data["count"] = 1 if ntwk.has_edge(edge[0], edge[1]): current = {} current = ntwk.edge[edge[0]][edge[1]] data = add_dicts_by_key(current, data) ntwk.add_edge(edge[0], edge[1], **data) nodes = list(tmp.nodes()) for node in nodes: data = {} data = ntwk.nodes[node] if "value" in tmp.nodes[node]: data["value"] = data["value"] + tmp.nodes[node]["value"] ntwk.add_node(node, **data) # Divides each value by the number of files nodes = list(ntwk.nodes()) edges = list(ntwk.edges()) iflogger.info("Total network has %i edges", ntwk.number_of_edges()) avg_ntwk = nx.Graph() newdata = {} for node in nodes: data = ntwk.nodes[node] newdata = data if "value" in data: newdata["value"] = data["value"] / len(in_files) ntwk.nodes[node]["value"] = newdata avg_ntwk.add_node(node, **newdata) edge_dict = {} edge_dict["count"] = np.zeros( (avg_ntwk.number_of_nodes(), avg_ntwk.number_of_nodes()) ) for edge in edges: data = ntwk.edge[edge[0]][edge[1]] if ntwk.edge[edge[0]][edge[1]]["count"] >= count_to_keep_edge: for key in list(data.keys()): if not key == "count": data[key] = data[key] / len(in_files) ntwk.edge[edge[0]][edge[1]] = data avg_ntwk.add_edge(edge[0], edge[1], **data) edge_dict["count"][edge[0] - 1][edge[1] - 1] = ntwk.edge[edge[0]][edge[1]][ "count" ] iflogger.info( "After thresholding, the average network has %i edges", avg_ntwk.number_of_edges(), ) avg_edges = avg_ntwk.edges() for edge in avg_edges: data = avg_ntwk.edge[edge[0]][edge[1]] for key in list(data.keys()): if not key == "count": edge_dict[key] = np.zeros( (avg_ntwk.number_of_nodes(), avg_ntwk.number_of_nodes()) ) edge_dict[key][edge[0] - 1][edge[1] - 1] = data[key] for key in list(edge_dict.keys()): tmp = {} network_name = group_id + "_" + key + "_average.mat" matlab_network_list.append(op.abspath(network_name)) tmp[key] = edge_dict[key] sio.savemat(op.abspath(network_name), tmp) iflogger.info( "Saving average network for key: %s as %s", key, op.abspath(network_name), ) # Writes the networks and returns the name network_name = group_id + "_average.pck" nx.write_gpickle(avg_ntwk, op.abspath(network_name)) iflogger.info("Saving average network as %s", op.abspath(network_name)) avg_ntwk = fix_keys_for_gexf(avg_ntwk) network_name = group_id + "_average.gexf" nx.write_gexf(avg_ntwk, op.abspath(network_name)) iflogger.info("Saving average network as %s", op.abspath(network_name)) return network_name, matlab_network_list def compute_node_measures(ntwk, calculate_cliques=False): """ These return node-based measures """ iflogger.info("Computing node measures:") measures = {} iflogger.info("...Computing degree...") measures["degree"] = np.array(list(ntwk.degree().values())) iflogger.info("...Computing load centrality...") measures["load_centrality"] = np.array(list(nx.load_centrality(ntwk).values())) iflogger.info("...Computing betweenness centrality...") measures["betweenness_centrality"] = np.array( list(nx.betweenness_centrality(ntwk).values()) ) iflogger.info("...Computing degree centrality...") measures["degree_centrality"] = np.array(list(nx.degree_centrality(ntwk).values())) iflogger.info("...Computing closeness centrality...") measures["closeness_centrality"] = np.array( list(nx.closeness_centrality(ntwk).values()) ) # iflogger.info('...Computing eigenvector centrality...') # measures['eigenvector_centrality'] = np.array(nx.eigenvector_centrality(ntwk, max_iter=100000).values()) iflogger.info("...Computing triangles...") measures["triangles"] = np.array(list(nx.triangles(ntwk).values())) iflogger.info("...Computing clustering...") measures["clustering"] = np.array(list(nx.clustering(ntwk).values())) iflogger.info("...Computing k-core number") measures["core_number"] = np.array(list(nx.core_number(ntwk).values())) iflogger.info("...Identifying network isolates...") isolate_list = nx.isolates(ntwk) binarized = np.zeros((ntwk.number_of_nodes(), 1)) for value in isolate_list: value = value - 1 # Zero indexing binarized[value] = 1 measures["isolates"] = binarized if calculate_cliques: iflogger.info("...Calculating node clique number") measures["node_clique_number"] = np.array( list(nx.node_clique_number(ntwk).values()) ) iflogger.info("...Computing number of cliques for each node...") measures["number_of_cliques"] = np.array( list(nx.number_of_cliques(ntwk).values()) ) return measures def compute_edge_measures(ntwk): """ These return edge-based measures """ iflogger.info("Computing edge measures:") measures = {} # iflogger.info('...Computing google matrix...' #Makes really large networks (500k+ edges)) # measures['google_matrix'] = nx.google_matrix(ntwk) # iflogger.info('...Computing hub matrix...') # measures['hub_matrix'] = nx.hub_matrix(ntwk) # iflogger.info('...Computing authority matrix...') # measures['authority_matrix'] = nx.authority_matrix(ntwk) return measures def compute_dict_measures(ntwk): """ Returns a dictionary """ iflogger.info("Computing measures which return a dictionary:") measures = {} iflogger.info("...Computing rich club coefficient...") measures["rich_club_coef"] = nx.rich_club_coefficient(ntwk) return measures def compute_singlevalued_measures(ntwk, weighted=True, calculate_cliques=False): """ Returns a single value per network """ iflogger.info("Computing single valued measures:") measures = {} iflogger.info("...Computing degree assortativity (pearson number) ...") measures["degree_pearsonr"] = nx.degree_pearson_correlation_coefficient(ntwk) iflogger.info("...Computing degree assortativity...") measures["degree_assortativity"] = nx.degree_assortativity_coefficient(ntwk) iflogger.info("...Computing transitivity...") measures["transitivity"] = nx.transitivity(ntwk) iflogger.info("...Computing number of connected_components...") measures["number_connected_components"] = nx.number_connected_components(ntwk) iflogger.info("...Computing graph density...") measures["graph_density"] = nx.density(ntwk) iflogger.info("...Recording number of edges...") measures["number_of_edges"] = nx.number_of_edges(ntwk) iflogger.info("...Recording number of nodes...") measures["number_of_nodes"] = nx.number_of_nodes(ntwk) iflogger.info("...Computing average clustering...") measures["average_clustering"] = nx.average_clustering(ntwk) if nx.is_connected(ntwk): iflogger.info("...Calculating average shortest path length...") measures["average_shortest_path_length"] = nx.average_shortest_path_length( ntwk, weighted ) else: iflogger.info("...Calculating average shortest path length...") measures["average_shortest_path_length"] = nx.average_shortest_path_length( nx.connected_component_subgraphs(ntwk)[0], weighted ) if calculate_cliques: iflogger.info("...Computing graph clique number...") measures["graph_clique_number"] = nx.graph_clique_number( ntwk ) # out of memory error return measures def compute_network_measures(ntwk): measures = {} # iflogger.info('Identifying k-core') # measures['k_core'] = nx.k_core(ntwk) # iflogger.info('Identifying k-shell') # measures['k_shell'] = nx.k_shell(ntwk) # iflogger.info('Identifying k-crust') # measures['k_crust'] = nx.k_crust(ntwk) return measures def add_node_data(node_array, ntwk): node_ntwk = nx.Graph() newdata = {} for idx, data in ntwk.nodes(data=True): if not int(idx) == 0: newdata["value"] = node_array[int(idx) - 1] data.update(newdata) node_ntwk.add_node(int(idx), **data) return node_ntwk def add_edge_data(edge_array, ntwk, above=0, below=0): edge_ntwk = ntwk.copy() data = {} for x, row in enumerate(edge_array): for y in range(0, np.max(np.shape(edge_array[x]))): if not edge_array[x, y] == 0: data["value"] = edge_array[x, y] if data["value"] <= below or data["value"] >= above: if edge_ntwk.has_edge(x + 1, y + 1): old_edge_dict = edge_ntwk.edge[x + 1][y + 1] edge_ntwk.remove_edge(x + 1, y + 1) data.update(old_edge_dict) edge_ntwk.add_edge(x + 1, y + 1, **data) return edge_ntwk class NetworkXMetricsInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="Input network") out_k_core = File( "k_core", usedefault=True, desc="Computed k-core network stored as a NetworkX pickle.", ) out_k_shell = File( "k_shell", usedefault=True, desc="Computed k-shell network stored as a NetworkX pickle.", ) out_k_crust = File( "k_crust", usedefault=True, desc="Computed k-crust network stored as a NetworkX pickle.", ) treat_as_weighted_graph = traits.Bool( True, usedefault=True, desc="Some network metrics can be calculated while considering only a binarized version of the graph", ) compute_clique_related_measures = traits.Bool( False, usedefault=True, desc="Computing clique-related measures (e.g. node clique number) can be very time consuming", ) out_global_metrics_matlab = File( genfile=True, desc="Output node metrics in MATLAB .mat format" ) out_node_metrics_matlab = File( genfile=True, desc="Output node metrics in MATLAB .mat format" ) out_edge_metrics_matlab = File( genfile=True, desc="Output edge metrics in MATLAB .mat format" ) out_pickled_extra_measures = File( "extra_measures", usedefault=True, desc="Network measures for group 1 that return dictionaries stored as a Pickle.", ) class NetworkXMetricsOutputSpec(TraitedSpec): gpickled_network_files = OutputMultiPath(File(desc="Output gpickled network files")) matlab_matrix_files = OutputMultiPath( File(desc="Output network metrics in MATLAB .mat format") ) global_measures_matlab = File(desc="Output global metrics in MATLAB .mat format") node_measures_matlab = File(desc="Output node metrics in MATLAB .mat format") edge_measures_matlab = File(desc="Output edge metrics in MATLAB .mat format") node_measure_networks = OutputMultiPath( File(desc="Output gpickled network files for all node-based measures") ) edge_measure_networks = OutputMultiPath( File(desc="Output gpickled network files for all edge-based measures") ) k_networks = OutputMultiPath( File( desc="Output gpickled network files for the k-core, k-shell, and k-crust networks" ) ) k_core = File(desc="Computed k-core network stored as a NetworkX pickle.") k_shell = File(desc="Computed k-shell network stored as a NetworkX pickle.") k_crust = File(desc="Computed k-crust network stored as a NetworkX pickle.") pickled_extra_measures = File( desc="Network measures for the group that return dictionaries, stored as a Pickle." ) matlab_dict_measures = OutputMultiPath( File( desc="Network measures for the group that return dictionaries, stored as matlab matrices." ) ) class NetworkXMetrics(BaseInterface): """ Calculates and outputs NetworkX-based measures for an input network Example ------- >>> import nipype.interfaces.cmtk as cmtk >>> nxmetrics = cmtk.NetworkXMetrics() >>> nxmetrics.inputs.in_file = 'subj1.pck' >>> nxmetrics.run() # doctest: +SKIP """ input_spec = NetworkXMetricsInputSpec output_spec = NetworkXMetricsOutputSpec def _run_interface(self, runtime): import scipy.io as sio global gpickled, nodentwks, edgentwks, kntwks, matlab gpickled = list() nodentwks = list() edgentwks = list() kntwks = list() matlab = list() ntwk = nx.read_gpickle(self.inputs.in_file) # Each block computes, writes, and saves a measure # The names are then added to the output .pck file list # In the case of the degeneracy networks, they are given specified output names calculate_cliques = self.inputs.compute_clique_related_measures weighted = self.inputs.treat_as_weighted_graph global_measures = compute_singlevalued_measures( ntwk, weighted, calculate_cliques ) if isdefined(self.inputs.out_global_metrics_matlab): global_out_file = op.abspath(self.inputs.out_global_metrics_matlab) else: global_out_file = op.abspath(self._gen_outfilename("globalmetrics", "mat")) sio.savemat(global_out_file, global_measures, oned_as="column") matlab.append(global_out_file) node_measures = compute_node_measures(ntwk, calculate_cliques) for key in list(node_measures.keys()): newntwk = add_node_data(node_measures[key], ntwk) out_file = op.abspath(self._gen_outfilename(key, "pck")) nx.write_gpickle(newntwk, out_file) nodentwks.append(out_file) if isdefined(self.inputs.out_node_metrics_matlab): node_out_file = op.abspath(self.inputs.out_node_metrics_matlab) else: node_out_file = op.abspath(self._gen_outfilename("nodemetrics", "mat")) sio.savemat(node_out_file, node_measures, oned_as="column") matlab.append(node_out_file) gpickled.extend(nodentwks) edge_measures = compute_edge_measures(ntwk) for key in list(edge_measures.keys()): newntwk = add_edge_data(edge_measures[key], ntwk) out_file = op.abspath(self._gen_outfilename(key, "pck")) nx.write_gpickle(newntwk, out_file) edgentwks.append(out_file) if isdefined(self.inputs.out_edge_metrics_matlab): edge_out_file = op.abspath(self.inputs.out_edge_metrics_matlab) else: edge_out_file = op.abspath(self._gen_outfilename("edgemetrics", "mat")) sio.savemat(edge_out_file, edge_measures, oned_as="column") matlab.append(edge_out_file) gpickled.extend(edgentwks) ntwk_measures = compute_network_measures(ntwk) for key in list(ntwk_measures.keys()): if key == "k_core": out_file = op.abspath( self._gen_outfilename(self.inputs.out_k_core, "pck") ) if key == "k_shell": out_file = op.abspath( self._gen_outfilename(self.inputs.out_k_shell, "pck") ) if key == "k_crust": out_file = op.abspath( self._gen_outfilename(self.inputs.out_k_crust, "pck") ) nx.write_gpickle(ntwk_measures[key], out_file) kntwks.append(out_file) gpickled.extend(kntwks) out_pickled_extra_measures = op.abspath( self._gen_outfilename(self.inputs.out_pickled_extra_measures, "pck") ) dict_measures = compute_dict_measures(ntwk) iflogger.info( "Saving extra measure file to %s in Pickle format", op.abspath(out_pickled_extra_measures), ) with open(out_pickled_extra_measures, "w") as fo: pickle.dump(dict_measures, fo) iflogger.info("Saving MATLAB measures as %s", matlab) # Loops through the measures which return a dictionary, # converts the keys and values to a Numpy array, # stacks them together, and saves them in a MATLAB .mat file via Scipy global dicts dicts = list() for idx, key in enumerate(dict_measures.keys()): for idxd, keyd in enumerate(dict_measures[key].keys()): if idxd == 0: nparraykeys = np.array(keyd) nparrayvalues = np.array(dict_measures[key][keyd]) else: nparraykeys = np.append(nparraykeys, np.array(keyd)) values = np.array(dict_measures[key][keyd]) nparrayvalues = np.append(nparrayvalues, values) nparray = np.vstack((nparraykeys, nparrayvalues)) out_file = op.abspath(self._gen_outfilename(key, "mat")) npdict = {} npdict[key] = nparray sio.savemat(out_file, npdict, oned_as="column") dicts.append(out_file) return runtime def _list_outputs(self): outputs = self.output_spec().get() outputs["k_core"] = op.abspath( self._gen_outfilename(self.inputs.out_k_core, "pck") ) outputs["k_shell"] = op.abspath( self._gen_outfilename(self.inputs.out_k_shell, "pck") ) outputs["k_crust"] = op.abspath( self._gen_outfilename(self.inputs.out_k_crust, "pck") ) outputs["gpickled_network_files"] = gpickled outputs["k_networks"] = kntwks outputs["node_measure_networks"] = nodentwks outputs["edge_measure_networks"] = edgentwks outputs["matlab_dict_measures"] = dicts outputs["global_measures_matlab"] = op.abspath( self._gen_outfilename("globalmetrics", "mat") ) outputs["node_measures_matlab"] = op.abspath( self._gen_outfilename("nodemetrics", "mat") ) outputs["edge_measures_matlab"] = op.abspath( self._gen_outfilename("edgemetrics", "mat") ) outputs["matlab_matrix_files"] = [ outputs["global_measures_matlab"], outputs["node_measures_matlab"], outputs["edge_measures_matlab"], ] outputs["pickled_extra_measures"] = op.abspath( self._gen_outfilename(self.inputs.out_pickled_extra_measures, "pck") ) return outputs def _gen_outfilename(self, name, ext): return name + "." + ext class AverageNetworksInputSpec(BaseInterfaceInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, desc="Networks for a group of subjects" ) resolution_network_file = File( exists=True, desc="Parcellation files from Connectome Mapping Toolkit. This is not necessary" ", but if included, the interface will output the statistical maps as networkx graphs.", ) group_id = traits.Str("group1", usedefault=True, desc="ID for group") out_gpickled_groupavg = File(desc="Average network saved as a NetworkX .pck") out_gexf_groupavg = File(desc="Average network saved as a .gexf file") class AverageNetworksOutputSpec(TraitedSpec): gpickled_groupavg = File(desc="Average network saved as a NetworkX .pck") gexf_groupavg = File(desc="Average network saved as a .gexf file") matlab_groupavgs = OutputMultiPath( File(desc="Average network saved as a .gexf file") ) class AverageNetworks(BaseInterface): """ Calculates and outputs the average network given a set of input NetworkX gpickle files This interface will only keep an edge in the averaged network if that edge is present in at least half of the input networks. Example ------- >>> import nipype.interfaces.cmtk as cmtk >>> avg = cmtk.AverageNetworks() >>> avg.inputs.in_files = ['subj1.pck', 'subj2.pck'] >>> avg.run() # doctest: +SKIP """ input_spec = AverageNetworksInputSpec output_spec = AverageNetworksOutputSpec def _run_interface(self, runtime): if isdefined(self.inputs.resolution_network_file): ntwk_res_file = self.inputs.resolution_network_file else: ntwk_res_file = self.inputs.in_files[0] global matlab_network_list network_name, matlab_network_list = average_networks( self.inputs.in_files, ntwk_res_file, self.inputs.group_id ) return runtime def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_gpickled_groupavg): outputs["gpickled_groupavg"] = op.abspath( self._gen_outfilename(self.inputs.group_id + "_average", "pck") ) else: outputs["gpickled_groupavg"] = op.abspath(self.inputs.out_gpickled_groupavg) if not isdefined(self.inputs.out_gexf_groupavg): outputs["gexf_groupavg"] = op.abspath( self._gen_outfilename(self.inputs.group_id + "_average", "gexf") ) else: outputs["gexf_groupavg"] = op.abspath(self.inputs.out_gexf_groupavg) outputs["matlab_groupavgs"] = matlab_network_list return outputs def _gen_outfilename(self, name, ext): return name + "." + ext nipype-1.7.0/nipype/interfaces/cmtk/parcellation.py000066400000000000000000000665561413403311400224530ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import os.path as op import shutil import numpy as np import nibabel as nb import networkx as nx from ... import logging from ..base import ( BaseInterface, LibraryBaseInterface, BaseInterfaceInputSpec, traits, File, TraitedSpec, Directory, isdefined, ) from .base import have_cmp iflogger = logging.getLogger("nipype.interface") def create_annot_label(subject_id, subjects_dir, fs_dir, parcellation_name): import cmp from cmp.util import runCmd iflogger.info("Create the cortical labels necessary for our ROIs") iflogger.info("=================================================") fs_label_dir = op.join(op.join(subjects_dir, subject_id), "label") output_dir = op.abspath(op.curdir) paths = [] cmp_config = cmp.configuration.PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" for hemi in ["lh", "rh"]: spath = ( cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name][ "fs_label_subdir_name" ] % hemi ) paths.append(spath) for p in paths: try: os.makedirs(op.join(".", p)) except: pass if "33" in parcellation_name: comp = [ ( "rh", "myatlas_36_rh.gcs", "rh.myaparc_36.annot", "regenerated_rh_36", "myaparc_36", ), ( "rh", "myatlas_60_rh.gcs", "rh.myaparc_60.annot", "regenerated_rh_60", "myaparc_60", ), ( "lh", "myatlas_36_lh.gcs", "lh.myaparc_36.annot", "regenerated_lh_36", "myaparc_36", ), ( "lh", "myatlas_60_lh.gcs", "lh.myaparc_60.annot", "regenerated_lh_60", "myaparc_60", ), ] elif "60" in parcellation_name: comp = [ ( "rh", "myatlas_60_rh.gcs", "rh.myaparc_60.annot", "regenerated_rh_60", "myaparc_60", ), ( "lh", "myatlas_60_lh.gcs", "lh.myaparc_60.annot", "regenerated_lh_60", "myaparc_60", ), ] elif "125" in parcellation_name: comp = [ ( "rh", "myatlas_125_rh.gcs", "rh.myaparc_125.annot", "regenerated_rh_125", "myaparc_125", ), ( "rh", "myatlas_60_rh.gcs", "rh.myaparc_60.annot", "regenerated_rh_60", "myaparc_60", ), ( "lh", "myatlas_125_lh.gcs", "lh.myaparc_125.annot", "regenerated_lh_125", "myaparc_125", ), ( "lh", "myatlas_60_lh.gcs", "lh.myaparc_60.annot", "regenerated_lh_60", "myaparc_60", ), ] elif "250" in parcellation_name: comp = [ ( "rh", "myatlas_250_rh.gcs", "rh.myaparc_250.annot", "regenerated_rh_250", "myaparc_250", ), ( "rh", "myatlas_60_rh.gcs", "rh.myaparc_60.annot", "regenerated_rh_60", "myaparc_60", ), ( "lh", "myatlas_250_lh.gcs", "lh.myaparc_250.annot", "regenerated_lh_250", "myaparc_250", ), ( "lh", "myatlas_60_lh.gcs", "lh.myaparc_60.annot", "regenerated_lh_60", "myaparc_60", ), ] else: comp = [ ( "rh", "myatlas_36_rh.gcs", "rh.myaparc_36.annot", "regenerated_rh_36", "myaparc_36", ), ( "rh", "myatlasP1_16_rh.gcs", "rh.myaparcP1_16.annot", "regenerated_rh_500", "myaparcP1_16", ), ( "rh", "myatlasP17_28_rh.gcs", "rh.myaparcP17_28.annot", "regenerated_rh_500", "myaparcP17_28", ), ( "rh", "myatlasP29_36_rh.gcs", "rh.myaparcP29_36.annot", "regenerated_rh_500", "myaparcP29_36", ), ( "rh", "myatlas_60_rh.gcs", "rh.myaparc_60.annot", "regenerated_rh_60", "myaparc_60", ), ( "rh", "myatlas_125_rh.gcs", "rh.myaparc_125.annot", "regenerated_rh_125", "myaparc_125", ), ( "rh", "myatlas_250_rh.gcs", "rh.myaparc_250.annot", "regenerated_rh_250", "myaparc_250", ), ( "lh", "myatlas_36_lh.gcs", "lh.myaparc_36.annot", "regenerated_lh_36", "myaparc_36", ), ( "lh", "myatlasP1_16_lh.gcs", "lh.myaparcP1_16.annot", "regenerated_lh_500", "myaparcP1_16", ), ( "lh", "myatlasP17_28_lh.gcs", "lh.myaparcP17_28.annot", "regenerated_lh_500", "myaparcP17_28", ), ( "lh", "myatlasP29_36_lh.gcs", "lh.myaparcP29_36.annot", "regenerated_lh_500", "myaparcP29_36", ), ( "lh", "myatlas_60_lh.gcs", "lh.myaparc_60.annot", "regenerated_lh_60", "myaparc_60", ), ( "lh", "myatlas_125_lh.gcs", "lh.myaparc_125.annot", "regenerated_lh_125", "myaparc_125", ), ( "lh", "myatlas_250_lh.gcs", "lh.myaparc_250.annot", "regenerated_lh_250", "myaparc_250", ), ] log = cmp_config.get_logger() for out in comp: mris_cmd = 'mris_ca_label %s %s "%s/surf/%s.sphere.reg" "%s" "%s" ' % ( subject_id, out[0], op.join(subjects_dir, subject_id), out[0], cmp_config.get_lausanne_atlas(out[1]), op.join(fs_label_dir, out[2]), ) runCmd(mris_cmd, log) iflogger.info("-----------") annot = '--annotation "%s"' % out[4] mri_an_cmd = 'mri_annotation2label --subject %s --hemi %s --outdir "%s" %s' % ( subject_id, out[0], op.join(output_dir, out[3]), annot, ) iflogger.info(mri_an_cmd) runCmd(mri_an_cmd, log) iflogger.info("-----------") iflogger.info(os.environ["SUBJECTS_DIR"]) # extract cc and unknown to add to tractography mask, we do not want this as a region of interest # in FS 5.0, unknown and corpuscallosum are not available for the 35 scale (why?), # but for the other scales only, take the ones from _60 rhun = op.join(output_dir, "rh.unknown.label") lhun = op.join(output_dir, "lh.unknown.label") rhco = op.join(output_dir, "rh.corpuscallosum.label") lhco = op.join(output_dir, "lh.corpuscallosum.label") shutil.copy(op.join(output_dir, "regenerated_rh_60", "rh.unknown.label"), rhun) shutil.copy(op.join(output_dir, "regenerated_lh_60", "lh.unknown.label"), lhun) shutil.copy( op.join(output_dir, "regenerated_rh_60", "rh.corpuscallosum.label"), rhco ) shutil.copy( op.join(output_dir, "regenerated_lh_60", "lh.corpuscallosum.label"), lhco ) mri_cmd = """mri_label2vol --label "%s" --label "%s" --label "%s" --label "%s" --temp "%s" --o "%s" --identity """ % ( rhun, lhun, rhco, lhco, op.join(op.join(subjects_dir, subject_id), "mri", "orig.mgz"), op.join(fs_label_dir, "cc_unknown.nii.gz"), ) runCmd(mri_cmd, log) runCmd("mris_volmask %s" % subject_id, log) mri_cmd = 'mri_convert -i "%s/mri/ribbon.mgz" -o "%s/mri/ribbon.nii.gz"' % ( op.join(subjects_dir, subject_id), op.join(subjects_dir, subject_id), ) runCmd(mri_cmd, log) mri_cmd = 'mri_convert -i "%s/mri/aseg.mgz" -o "%s/mri/aseg.nii.gz"' % ( op.join(subjects_dir, subject_id), op.join(subjects_dir, subject_id), ) runCmd(mri_cmd, log) iflogger.info("[ DONE ]") def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): """Creates the ROI_%s.nii.gz files using the given parcellation information from networks. Iteratively create volume.""" import cmp from cmp.util import runCmd iflogger.info("Create the ROIs:") output_dir = op.abspath(op.curdir) fs_dir = op.join(subjects_dir, subject_id) cmp_config = cmp.configuration.PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" log = cmp_config.get_logger() parval = cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name] pgpath = parval["node_information_graphml"] aseg = nb.load(op.join(fs_dir, "mri", "aseg.nii.gz")) asegd = np.asanyarray(aseg.dataobj) # identify cortical voxels, right (3) and left (42) hemispheres idxr = np.where(asegd == 3) idxl = np.where(asegd == 42) xx = np.concatenate((idxr[0], idxl[0])) yy = np.concatenate((idxr[1], idxl[1])) zz = np.concatenate((idxr[2], idxl[2])) # initialize variables necessary for cortical ROIs dilation # dimensions of the neighbourhood for rois labels assignment (choose odd dimensions!) shape = (25, 25, 25) center = np.array(shape) // 2 # dist: distances from the center of the neighbourhood dist = np.zeros(shape, dtype="float32") for x in range(shape[0]): for y in range(shape[1]): for z in range(shape[2]): distxyz = center - [x, y, z] dist[x, y, z] = np.sqrt(np.sum(np.multiply(distxyz, distxyz))) iflogger.info("Working on parcellation: ") iflogger.info( cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name] ) iflogger.info("========================") pg = nx.read_graphml(pgpath) # each node represents a brain region # create a big 256^3 volume for storage of all ROIs rois = np.zeros((256, 256, 256), dtype=np.int16) count = 0 for brk, brv in pg.nodes(data=True): count = count + 1 iflogger.info(brv) iflogger.info(brk) if brv["dn_hemisphere"] == "left": hemi = "lh" elif brv["dn_hemisphere"] == "right": hemi = "rh" if brv["dn_region"] == "subcortical": iflogger.info(brv) iflogger.info("---------------------") iflogger.info("Work on brain region: %s", brv["dn_region"]) iflogger.info("Freesurfer Name: %s", brv["dn_fsname"]) iflogger.info("Region %s of %s", count, pg.number_of_nodes()) iflogger.info("---------------------") # if it is subcortical, retrieve roi from aseg idx = np.where(asegd == int(brv["dn_fs_aseg_val"])) rois[idx] = int(brv["dn_correspondence_id"]) elif brv["dn_region"] == "cortical": iflogger.info(brv) iflogger.info("---------------------") iflogger.info("Work on brain region: %s", brv["dn_region"]) iflogger.info("Freesurfer Name: %s", brv["dn_fsname"]) iflogger.info("Region %s of %s", count, pg.number_of_nodes()) iflogger.info("---------------------") labelpath = op.join(output_dir, parval["fs_label_subdir_name"] % hemi) # construct .label file name fname = "%s.%s.label" % (hemi, brv["dn_fsname"]) # execute fs mri_label2vol to generate volume roi from the label file # store it in temporary file to be overwritten for each region mri_cmd = 'mri_label2vol --label "%s" --temp "%s" --o "%s" --identity' % ( op.join(labelpath, fname), op.join(fs_dir, "mri", "orig.mgz"), op.join(output_dir, "tmp.nii.gz"), ) runCmd(mri_cmd, log) tmp = nb.load(op.join(output_dir, "tmp.nii.gz")) tmpd = np.asanyarray(tmp.dataobj) # find voxel and set them to intensityvalue in rois idx = np.where(tmpd == 1) rois[idx] = int(brv["dn_correspondence_id"]) # store volume eg in ROI_scale33.nii.gz out_roi = op.abspath("ROI_%s.nii.gz" % parcellation_name) # update the header hdr = aseg.header hdr2 = hdr.copy() hdr2.set_data_dtype(np.uint16) log.info("Save output image to %s" % out_roi) img = nb.Nifti1Image(rois, aseg.affine, hdr2) nb.save(img, out_roi) iflogger.info("[ DONE ]") # dilate cortical regions if dilation is True: iflogger.info("Dilating cortical regions...") # loop throughout all the voxels belonging to the aseg GM volume for j in range(xx.size): if rois[xx[j], yy[j], zz[j]] == 0: local = extract(rois, shape, position=(xx[j], yy[j], zz[j]), fill=0) mask = local.copy() mask[np.nonzero(local > 0)] = 1 thisdist = np.multiply(dist, mask) thisdist[np.nonzero(thisdist == 0)] = np.amax(thisdist) value = np.int_(local[np.nonzero(thisdist == np.amin(thisdist))]) if value.size > 1: counts = np.bincount(value) value = np.argmax(counts) rois[xx[j], yy[j], zz[j]] = value # store volume eg in ROIv_scale33.nii.gz out_roi = op.abspath("ROIv_%s.nii.gz" % parcellation_name) iflogger.info("Save output image to %s", out_roi) img = nb.Nifti1Image(rois, aseg.affine, hdr2) nb.save(img, out_roi) iflogger.info("[ DONE ]") def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name): import cmp import scipy.ndimage.morphology as nd iflogger.info("Create white matter mask") fs_dir = op.join(subjects_dir, subject_id) cmp_config = cmp.configuration.PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" pgpath = cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name][ "node_information_graphml" ] # load ribbon as basis for white matter mask fsmask = nb.load(op.join(fs_dir, "mri", "ribbon.nii.gz")) fsmaskd = np.asanyarray(fsmask.dataobj) wmmask = np.zeros(fsmaskd.shape) # extract right and left white matter idx_lh = np.where(fsmaskd == 120) idx_rh = np.where(fsmaskd == 20) wmmask[idx_lh] = 1 wmmask[idx_rh] = 1 # remove subcortical nuclei from white matter mask aseg = nb.load(op.join(fs_dir, "mri", "aseg.nii.gz")) asegd = np.asanyarray(aseg.dataobj) # need binary erosion function imerode = nd.binary_erosion # ventricle erosion csfA = np.zeros(asegd.shape) csfB = np.zeros(asegd.shape) # structuring elements for erosion se1 = np.zeros((3, 3, 5)) se1[1, :, 2] = 1 se1[:, 1, 2] = 1 se1[1, 1, :] = 1 se = np.zeros((3, 3, 3)) se[1, :, 1] = 1 se[:, 1, 1] = 1 se[1, 1, :] = 1 # lateral ventricles, thalamus proper and caudate # the latter two removed for better erosion, but put back afterwards idx = np.where( (asegd == 4) | (asegd == 43) | (asegd == 11) | (asegd == 50) | (asegd == 31) | (asegd == 63) | (asegd == 10) | (asegd == 49) ) csfA[idx] = 1 csfA = imerode(imerode(csfA, se1), se) # thalmus proper and cuadate are put back because they are not lateral ventricles idx = np.where((asegd == 11) | (asegd == 50) | (asegd == 10) | (asegd == 49)) csfA[idx] = 0 # REST CSF, IE 3RD AND 4TH VENTRICULE AND EXTRACEREBRAL CSF idx = np.where( (asegd == 5) | (asegd == 14) | (asegd == 15) | (asegd == 24) | (asegd == 44) | (asegd == 72) | (asegd == 75) | (asegd == 76) | (asegd == 213) | (asegd == 221) ) # 43 ??, 4?? 213?, 221? # more to discuss. for i in [5, 14, 15, 24, 44, 72, 75, 76, 213, 221]: idx = np.where(asegd == i) csfB[idx] = 1 # do not remove the subthalamic nucleus for now from the wm mask # 23, 60 # would stop the fiber going to the segmented "brainstem" # grey nuclei, either with or without erosion gr_ncl = np.zeros(asegd.shape) # with erosion for i in [10, 11, 12, 49, 50, 51]: idx = np.where(asegd == i) # temporary volume tmp = np.zeros(asegd.shape) tmp[idx] = 1 tmp = imerode(tmp, se) idx = np.where(tmp == 1) gr_ncl[idx] = 1 # without erosion for i in [13, 17, 18, 26, 52, 53, 54, 58]: idx = np.where(asegd == i) gr_ncl[idx] = 1 # remove remaining structure, e.g. brainstem remaining = np.zeros(asegd.shape) idx = np.where(asegd == 16) remaining[idx] = 1 # now remove all the structures from the white matter idx = np.where((csfA != 0) | (csfB != 0) | (gr_ncl != 0) | (remaining != 0)) wmmask[idx] = 0 iflogger.info( "Removing lateral ventricles and eroded grey nuclei and brainstem from white matter mask" ) # ADD voxels from 'cc_unknown.nii.gz' dataset ccun = nb.load(op.join(fs_dir, "label", "cc_unknown.nii.gz")) ccund = np.asanyarray(ccun.dataobj) idx = np.where(ccund != 0) iflogger.info("Add corpus callosum and unknown to wm mask") wmmask[idx] = 1 # check if we should subtract the cortical rois from this parcellation iflogger.info( "Loading ROI_%s.nii.gz to subtract cortical ROIs from white " "matter mask", parcellation_name, ) roi = nb.load(op.join(op.curdir, "ROI_%s.nii.gz" % parcellation_name)) roid = np.asanyarray(roi.dataobj) assert roid.shape[0] == wmmask.shape[0] pg = nx.read_graphml(pgpath) for brk, brv in pg.nodes(data=True): if brv["dn_region"] == "cortical": iflogger.info( "Subtracting region %s with intensity value %s", brv["dn_region"], brv["dn_correspondence_id"], ) idx = np.where(roid == int(brv["dn_correspondence_id"])) wmmask[idx] = 0 # output white matter mask. crop and move it afterwards wm_out = op.join(fs_dir, "mri", "fsmask_1mm.nii.gz") img = nb.Nifti1Image(wmmask, fsmask.affine, fsmask.header) iflogger.info("Save white matter mask: %s", wm_out) nb.save(img, wm_out) def crop_and_move_datasets( subject_id, subjects_dir, fs_dir, parcellation_name, out_roi_file, dilation ): from cmp.util import runCmd fs_dir = op.join(subjects_dir, subject_id) cmp_config = cmp.configuration.PipelineConfiguration() cmp_config.parcellation_scheme = "Lausanne2008" log = cmp_config.get_logger() output_dir = op.abspath(op.curdir) iflogger.info("Cropping and moving datasets to %s", output_dir) ds = [ (op.join(fs_dir, "mri", "aseg.nii.gz"), op.abspath("aseg.nii.gz")), (op.join(fs_dir, "mri", "ribbon.nii.gz"), op.abspath("ribbon.nii.gz")), (op.join(fs_dir, "mri", "fsmask_1mm.nii.gz"), op.abspath("fsmask_1mm.nii.gz")), ( op.join(fs_dir, "label", "cc_unknown.nii.gz"), op.abspath("cc_unknown.nii.gz"), ), ] ds.append( ( op.abspath("ROI_%s.nii.gz" % parcellation_name), op.abspath("ROI_HR_th.nii.gz"), ) ) if dilation is True: ds.append( ( op.abspath("ROIv_%s.nii.gz" % parcellation_name), op.abspath("ROIv_HR_th.nii.gz"), ) ) orig = op.join(fs_dir, "mri", "orig", "001.mgz") for d in ds: iflogger.info("Processing %s:", d[0]) if not op.exists(d[0]): raise Exception("File %s does not exist." % d[0]) # reslice to original volume because the roi creation with freesurfer # changed to 256x256x256 resolution mri_cmd = 'mri_convert -rl "%s" -rt nearest "%s" -nc "%s"' % (orig, d[0], d[1]) runCmd(mri_cmd, log) def extract(Z, shape, position, fill): """Extract voxel neighbourhood Parameters ---------- Z : array-like the original data shape : tuple tuple containing neighbourhood dimensions position : tuple tuple containing central point indexes fill : float value for the padding of Z Returns ------- R : ndarray the neighbourhood of the specified point in Z """ R = ( np.ones(shape, dtype=Z.dtype) * fill ) # initialize output block to the fill value P = np.array(list(position)).astype(int) # position coordinates(numpy array) Rs = np.array(list(R.shape)).astype(int) # output block dimensions (numpy array) Zs = np.array(list(Z.shape)).astype(int) # original volume dimensions (numpy array) R_start = np.zeros(len(shape)).astype(int) R_stop = np.array(list(shape)).astype(int) Z_start = P - Rs // 2 Z_start_cor = (np.maximum(Z_start, 0)).tolist() # handle borders R_start = R_start + (Z_start_cor - Z_start) Z_stop = (P + Rs // 2) + Rs % 2 Z_stop_cor = (np.minimum(Z_stop, Zs)).tolist() # handle borders R_stop = R_stop - (Z_stop - Z_stop_cor) R[R_start[0] : R_stop[0], R_start[1] : R_stop[1], R_start[2] : R_stop[2]] = Z[ Z_start_cor[0] : Z_stop_cor[0], Z_start_cor[1] : Z_stop_cor[1], Z_start_cor[2] : Z_stop_cor[2], ] return R class ParcellateInputSpec(BaseInterfaceInputSpec): subject_id = traits.String(mandatory=True, desc="Subject ID") parcellation_name = traits.Enum( "scale500", ["scale33", "scale60", "scale125", "scale250", "scale500"], usedefault=True, ) freesurfer_dir = Directory(exists=True, desc="Freesurfer main directory") subjects_dir = Directory(exists=True, desc="Freesurfer subjects directory") out_roi_file = File( genfile=True, desc="Region of Interest file for connectivity mapping" ) dilation = traits.Bool( False, usedefault=True, desc="Dilate cortical parcels? Useful for fMRI connectivity", ) class ParcellateOutputSpec(TraitedSpec): roi_file = File( exists=True, desc="Region of Interest file for connectivity mapping" ) roiv_file = File(desc="Region of Interest file for fMRI connectivity mapping") white_matter_mask_file = File(exists=True, desc="White matter mask file") cc_unknown_file = File( desc="Image file with regions labelled as unknown cortical structures", exists=True, ) ribbon_file = File(desc="Image file detailing the cortical ribbon", exists=True) aseg_file = File( desc='Automated segmentation file converted from Freesurfer "subjects" directory', exists=True, ) roi_file_in_structural_space = File( desc="ROI image resliced to the dimensions of the original structural image", exists=True, ) dilated_roi_file_in_structural_space = File( desc="dilated ROI image resliced to the dimensions of the original structural image" ) class Parcellate(LibraryBaseInterface): """Subdivides segmented ROI file into smaller subregions This interface implements the same procedure as in the ConnectomeMapper's parcellation stage (cmp/stages/parcellation/maskcreation.py) for a single parcellation scheme (e.g. 'scale500'). Example ------- >>> import nipype.interfaces.cmtk as cmtk >>> parcellate = cmtk.Parcellate() >>> parcellate.inputs.freesurfer_dir = '.' >>> parcellate.inputs.subjects_dir = '.' >>> parcellate.inputs.subject_id = 'subj1' >>> parcellate.inputs.dilation = True >>> parcellate.inputs.parcellation_name = 'scale500' >>> parcellate.run() # doctest: +SKIP """ input_spec = ParcellateInputSpec output_spec = ParcellateOutputSpec _pkg = "cmp" imports = ("scipy",) def _run_interface(self, runtime): if self.inputs.subjects_dir: os.environ.update({"SUBJECTS_DIR": self.inputs.subjects_dir}) if not os.path.exists( op.join(self.inputs.subjects_dir, self.inputs.subject_id) ): raise Exception iflogger.info("ROI_HR_th.nii.gz / fsmask_1mm.nii.gz CREATION") iflogger.info("=============================================") create_annot_label( self.inputs.subject_id, self.inputs.subjects_dir, self.inputs.freesurfer_dir, self.inputs.parcellation_name, ) create_roi( self.inputs.subject_id, self.inputs.subjects_dir, self.inputs.freesurfer_dir, self.inputs.parcellation_name, self.inputs.dilation, ) create_wm_mask( self.inputs.subject_id, self.inputs.subjects_dir, self.inputs.freesurfer_dir, self.inputs.parcellation_name, ) crop_and_move_datasets( self.inputs.subject_id, self.inputs.subjects_dir, self.inputs.freesurfer_dir, self.inputs.parcellation_name, self.inputs.out_roi_file, self.inputs.dilation, ) return runtime def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.out_roi_file): outputs["roi_file"] = op.abspath(self.inputs.out_roi_file) else: outputs["roi_file"] = op.abspath(self._gen_outfilename("nii.gz", "ROI")) if self.inputs.dilation is True: outputs["roiv_file"] = op.abspath(self._gen_outfilename("nii.gz", "ROIv")) outputs["white_matter_mask_file"] = op.abspath("fsmask_1mm.nii.gz") outputs["cc_unknown_file"] = op.abspath("cc_unknown.nii.gz") outputs["ribbon_file"] = op.abspath("ribbon.nii.gz") outputs["aseg_file"] = op.abspath("aseg.nii.gz") outputs["roi_file_in_structural_space"] = op.abspath("ROI_HR_th.nii.gz") if self.inputs.dilation is True: outputs["dilated_roi_file_in_structural_space"] = op.abspath( "ROIv_HR_th.nii.gz" ) return outputs def _gen_outfilename(self, ext, prefix="ROI"): return prefix + "_" + self.inputs.parcellation_name + "." + ext nipype-1.7.0/nipype/interfaces/cmtk/tests/000077500000000000000000000000001413403311400205445ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/cmtk/tests/__init__.py000066400000000000000000000000301413403311400226460ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/cmtk/tests/test_auto_AverageNetworks.py000066400000000000000000000022271413403311400263170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..nx import AverageNetworks def test_AverageNetworks_inputs(): input_map = dict( group_id=dict( usedefault=True, ), in_files=dict( mandatory=True, ), out_gexf_groupavg=dict( extensions=None, ), out_gpickled_groupavg=dict( extensions=None, ), resolution_network_file=dict( extensions=None, ), ) inputs = AverageNetworks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AverageNetworks_outputs(): output_map = dict( gexf_groupavg=dict( extensions=None, ), gpickled_groupavg=dict( extensions=None, ), matlab_groupavgs=dict(), ) outputs = AverageNetworks.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cmtk/tests/test_auto_CFFBaseInterface.py000066400000000000000000000005561413403311400262250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import CFFBaseInterface def test_CFFBaseInterface_inputs(): input_map = dict() inputs = CFFBaseInterface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cmtk/tests/test_auto_CFFConverter.py000066400000000000000000000025661413403311400255040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import CFFConverter def test_CFFConverter_inputs(): input_map = dict( creator=dict(), data_files=dict(), description=dict( usedefault=True, ), email=dict(), gifti_labels=dict(), gifti_surfaces=dict(), gpickled_networks=dict(), graphml_networks=dict(), license=dict(), nifti_volumes=dict(), out_file=dict( extensions=None, usedefault=True, ), publisher=dict(), references=dict(), relation=dict(), rights=dict(), script_files=dict(), species=dict( usedefault=True, ), timeseries_files=dict(), title=dict(), tract_files=dict(), ) inputs = CFFConverter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CFFConverter_outputs(): output_map = dict( connectome_file=dict( extensions=None, ), ) outputs = CFFConverter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cmtk/tests/test_auto_CreateMatrix.py000066400000000000000000000057411413403311400256040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..cmtk import CreateMatrix def test_CreateMatrix_inputs(): input_map = dict( count_region_intersections=dict( usedefault=True, ), out_endpoint_array_name=dict( extensions=None, genfile=True, ), out_fiber_length_std_matrix_mat_file=dict( extensions=None, genfile=True, ), out_intersection_matrix_mat_file=dict( extensions=None, genfile=True, ), out_matrix_file=dict( extensions=None, genfile=True, ), out_matrix_mat_file=dict( extensions=None, usedefault=True, ), out_mean_fiber_length_matrix_mat_file=dict( extensions=None, genfile=True, ), out_median_fiber_length_matrix_mat_file=dict( extensions=None, genfile=True, ), resolution_network_file=dict( extensions=None, mandatory=True, ), roi_file=dict( extensions=None, mandatory=True, ), tract_file=dict( extensions=None, mandatory=True, ), ) inputs = CreateMatrix.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CreateMatrix_outputs(): output_map = dict( endpoint_file=dict( extensions=None, ), endpoint_file_mm=dict( extensions=None, ), fiber_label_file=dict( extensions=None, ), fiber_labels_noorphans=dict( extensions=None, ), fiber_length_file=dict( extensions=None, ), fiber_length_std_matrix_mat_file=dict( extensions=None, ), filtered_tractographies=dict(), filtered_tractography=dict( extensions=None, ), filtered_tractography_by_intersections=dict( extensions=None, ), intersection_matrix_file=dict( extensions=None, ), intersection_matrix_mat_file=dict( extensions=None, ), matlab_matrix_files=dict(), matrix_file=dict( extensions=None, ), matrix_files=dict(), matrix_mat_file=dict( extensions=None, ), mean_fiber_length_matrix_mat_file=dict( extensions=None, ), median_fiber_length_matrix_mat_file=dict( extensions=None, ), stats_file=dict( extensions=None, ), ) outputs = CreateMatrix.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cmtk/tests/test_auto_CreateNodes.py000066400000000000000000000017411413403311400254040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..cmtk import CreateNodes def test_CreateNodes_inputs(): input_map = dict( out_filename=dict( extensions=None, usedefault=True, ), resolution_network_file=dict( extensions=None, mandatory=True, ), roi_file=dict( extensions=None, mandatory=True, ), ) inputs = CreateNodes.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CreateNodes_outputs(): output_map = dict( node_network=dict( extensions=None, ), ) outputs = CreateNodes.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cmtk/tests/test_auto_MergeCNetworks.py000066400000000000000000000015531413403311400261100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import MergeCNetworks def test_MergeCNetworks_inputs(): input_map = dict( in_files=dict( mandatory=True, ), out_file=dict( extensions=None, usedefault=True, ), ) inputs = MergeCNetworks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MergeCNetworks_outputs(): output_map = dict( connectome_file=dict( extensions=None, ), ) outputs = MergeCNetworks.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cmtk/tests/test_auto_NetworkBasedStatistic.py000066400000000000000000000026651413403311400274760ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..nbs import NetworkBasedStatistic def test_NetworkBasedStatistic_inputs(): input_map = dict( edge_key=dict( usedefault=True, ), in_group1=dict( mandatory=True, ), in_group2=dict( mandatory=True, ), node_position_network=dict( extensions=None, ), number_of_permutations=dict( usedefault=True, ), out_nbs_network=dict( extensions=None, ), out_nbs_pval_network=dict( extensions=None, ), t_tail=dict( usedefault=True, ), threshold=dict( usedefault=True, ), ) inputs = NetworkBasedStatistic.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_NetworkBasedStatistic_outputs(): output_map = dict( nbs_network=dict( extensions=None, ), nbs_pval_network=dict( extensions=None, ), network_files=dict(), ) outputs = NetworkBasedStatistic.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cmtk/tests/test_auto_NetworkXMetrics.py000066400000000000000000000044151413403311400263210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..nx import NetworkXMetrics def test_NetworkXMetrics_inputs(): input_map = dict( compute_clique_related_measures=dict( usedefault=True, ), in_file=dict( extensions=None, mandatory=True, ), out_edge_metrics_matlab=dict( extensions=None, genfile=True, ), out_global_metrics_matlab=dict( extensions=None, genfile=True, ), out_k_core=dict( extensions=None, usedefault=True, ), out_k_crust=dict( extensions=None, usedefault=True, ), out_k_shell=dict( extensions=None, usedefault=True, ), out_node_metrics_matlab=dict( extensions=None, genfile=True, ), out_pickled_extra_measures=dict( extensions=None, usedefault=True, ), treat_as_weighted_graph=dict( usedefault=True, ), ) inputs = NetworkXMetrics.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_NetworkXMetrics_outputs(): output_map = dict( edge_measure_networks=dict(), edge_measures_matlab=dict( extensions=None, ), global_measures_matlab=dict( extensions=None, ), gpickled_network_files=dict(), k_core=dict( extensions=None, ), k_crust=dict( extensions=None, ), k_networks=dict(), k_shell=dict( extensions=None, ), matlab_dict_measures=dict(), matlab_matrix_files=dict(), node_measure_networks=dict(), node_measures_matlab=dict( extensions=None, ), pickled_extra_measures=dict( extensions=None, ), ) outputs = NetworkXMetrics.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cmtk/tests/test_auto_Parcellate.py000066400000000000000000000030361413403311400252630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..parcellation import Parcellate def test_Parcellate_inputs(): input_map = dict( dilation=dict( usedefault=True, ), freesurfer_dir=dict(), out_roi_file=dict( extensions=None, genfile=True, ), parcellation_name=dict( usedefault=True, ), subject_id=dict( mandatory=True, ), subjects_dir=dict(), ) inputs = Parcellate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Parcellate_outputs(): output_map = dict( aseg_file=dict( extensions=None, ), cc_unknown_file=dict( extensions=None, ), dilated_roi_file_in_structural_space=dict( extensions=None, ), ribbon_file=dict( extensions=None, ), roi_file=dict( extensions=None, ), roi_file_in_structural_space=dict( extensions=None, ), roiv_file=dict( extensions=None, ), white_matter_mask_file=dict( extensions=None, ), ) outputs = Parcellate.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cmtk/tests/test_auto_ROIGen.py000066400000000000000000000024021413403311400242660ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..cmtk import ROIGen def test_ROIGen_inputs(): input_map = dict( LUT_file=dict( extensions=None, xor=["use_freesurfer_LUT"], ), aparc_aseg_file=dict( extensions=None, mandatory=True, ), freesurfer_dir=dict( requires=["use_freesurfer_LUT"], ), out_dict_file=dict( extensions=None, genfile=True, ), out_roi_file=dict( extensions=None, genfile=True, ), use_freesurfer_LUT=dict( xor=["LUT_file"], ), ) inputs = ROIGen.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ROIGen_outputs(): output_map = dict( dict_file=dict( extensions=None, ), roi_file=dict( extensions=None, ), ) outputs = ROIGen.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/cmtk/tests/test_nbs.py000066400000000000000000000030311413403311400227340ustar00rootroot00000000000000from ..nbs import NetworkBasedStatistic from ....utils.misc import package_check import numpy as np import networkx as nx import pytest have_cv = True try: package_check("cviewer") except Exception as e: have_cv = False @pytest.fixture() def creating_graphs(tmpdir): graphlist = [] graphnames = ["name" + str(i) for i in range(6)] for idx, name in enumerate(graphnames): graph = np.random.rand(10, 10) G = nx.from_numpy_matrix(graph) out_file = tmpdir.strpath + graphnames[idx] + ".pck" # Save as pck file nx.write_gpickle(G, out_file) graphlist.append(out_file) return graphlist @pytest.mark.skipif(have_cv, reason="tests for import error, cviewer available") def test_importerror(creating_graphs, tmpdir): tmpdir.chdir() graphlist = creating_graphs group1 = graphlist[:3] group2 = graphlist[3:] nbs = NetworkBasedStatistic() nbs.inputs.in_group1 = group1 nbs.inputs.in_group2 = group2 nbs.inputs.edge_key = "weight" with pytest.raises(ImportError) as e: nbs.run() @pytest.mark.skipif(not have_cv, reason="cviewer has to be available") def test_keyerror(creating_graphs): graphlist = creating_graphs group1 = graphlist[:3] group2 = graphlist[3:] nbs = NetworkBasedStatistic() nbs.inputs.in_group1 = group1 nbs.inputs.in_group2 = group2 nbs.inputs.edge_key = "Your_edge" with pytest.raises(KeyError) as e: nbs.run() assert "the graph edges do not have Your_edge attribute" in str(e.value) nipype-1.7.0/nipype/interfaces/dcm2nii.py000066400000000000000000000411251413403311400203460ustar00rootroot00000000000000# -*- coding: utf-8 -*- """dcm2nii converts images from the proprietary scanner DICOM format to NIfTI.""" import os import re from copy import deepcopy import itertools as it from glob import iglob from ..utils.filemanip import split_filename from .base import ( CommandLine, CommandLineInputSpec, InputMultiPath, traits, TraitedSpec, OutputMultiPath, isdefined, File, Directory, PackageInfo, ) class Info(PackageInfo): """Handle dcm2niix version information""" version_cmd = "dcm2niix" @staticmethod def parse_version(raw_info): m = re.search(r"version (\S+)", raw_info) return m.groups()[0] if m else None class Dcm2niiInputSpec(CommandLineInputSpec): source_names = InputMultiPath( File(exists=True), argstr="%s", position=-1, copyfile=False, mandatory=True, xor=["source_dir"], ) source_dir = Directory( exists=True, argstr="%s", position=-1, mandatory=True, xor=["source_names"] ) anonymize = traits.Bool( True, argstr="-a", usedefault=True, desc="Remove identifying information" ) config_file = File( exists=True, argstr="-b %s", genfile=True, desc="Load settings from specified inifile", ) collapse_folders = traits.Bool( True, argstr="-c", usedefault=True, desc="Collapse input folders" ) date_in_filename = traits.Bool( True, argstr="-d", usedefault=True, desc="Date in filename" ) events_in_filename = traits.Bool( True, argstr="-e", usedefault=True, desc="Events (series/acq) in filename" ) source_in_filename = traits.Bool( False, argstr="-f", usedefault=True, desc="Source filename" ) gzip_output = traits.Bool( False, argstr="-g", usedefault=True, desc="Gzip output (.gz)" ) id_in_filename = traits.Bool( False, argstr="-i", usedefault=True, desc="ID in filename" ) nii_output = traits.Bool( True, argstr="-n", usedefault=True, desc="Save as .nii - if no, create .hdr/.img pair", ) output_dir = Directory( exists=True, argstr="-o %s", genfile=True, desc="Output dir - if unspecified, source directory is used", ) protocol_in_filename = traits.Bool( True, argstr="-p", usedefault=True, desc="Protocol in filename" ) reorient = traits.Bool(argstr="-r", desc="Reorient image to nearest orthogonal") spm_analyze = traits.Bool( argstr="-s", xor=["nii_output"], desc="SPM2/Analyze not SPM5/NIfTI" ) convert_all_pars = traits.Bool( True, argstr="-v", usedefault=True, desc="Convert every image in directory" ) reorient_and_crop = traits.Bool( False, argstr="-x", usedefault=True, desc="Reorient and crop 3D images" ) class Dcm2niiOutputSpec(TraitedSpec): converted_files = OutputMultiPath(File(exists=True)) reoriented_files = OutputMultiPath(File(exists=True)) reoriented_and_cropped_files = OutputMultiPath(File(exists=True)) bvecs = OutputMultiPath(File(exists=True)) bvals = OutputMultiPath(File(exists=True)) class Dcm2nii(CommandLine): """Uses MRIcron's dcm2nii to convert dicom files Examples ======== >>> from nipype.interfaces.dcm2nii import Dcm2nii >>> converter = Dcm2nii() >>> converter.inputs.source_names = ['functional_1.dcm', 'functional_2.dcm'] >>> converter.inputs.gzip_output = True >>> converter.inputs.output_dir = '.' >>> converter.cmdline # doctest: +ELLIPSIS 'dcm2nii -a y -c y -b config.ini -v y -d y -e y -g y -i n -n y -o . -p y -x n -f n functional_1.dcm'""" input_spec = Dcm2niiInputSpec output_spec = Dcm2niiOutputSpec _cmd = "dcm2nii" def _format_arg(self, opt, spec, val): if opt in [ "anonymize", "collapse_folders", "date_in_filename", "events_in_filename", "source_in_filename", "gzip_output", "id_in_filename", "nii_output", "protocol_in_filename", "reorient", "spm_analyze", "convert_all_pars", "reorient_and_crop", ]: spec = deepcopy(spec) if val: spec.argstr += " y" else: spec.argstr += " n" val = True if opt == "source_names": return spec.argstr % val[0] return super(Dcm2nii, self)._format_arg(opt, spec, val) def _run_interface(self, runtime): self._config_created = False new_runtime = super(Dcm2nii, self)._run_interface(runtime) ( self.output_files, self.reoriented_files, self.reoriented_and_cropped_files, self.bvecs, self.bvals, ) = self._parse_stdout(new_runtime.stdout) if self._config_created: os.remove("config.ini") return new_runtime def _parse_stdout(self, stdout): files = [] reoriented_files = [] reoriented_and_cropped_files = [] bvecs = [] bvals = [] skip = False last_added_file = None for line in stdout.split("\n"): if not skip: out_file = None if line.startswith("Saving "): out_file = line[len("Saving ") :] elif line.startswith("GZip..."): # for gzipped output files are not absolute fname = line[len("GZip...") :] if len(files) and os.path.basename(files[-1]) == fname[:-3]: # we are seeing a previously reported conversion # as being saved in gzipped form -- remove the # obsolete, uncompressed file files.pop() if isdefined(self.inputs.output_dir): output_dir = self.inputs.output_dir else: output_dir = self._gen_filename("output_dir") out_file = os.path.abspath(os.path.join(output_dir, fname)) elif line.startswith("Number of diffusion directions "): if last_added_file: base, filename, ext = split_filename(last_added_file) bvecs.append(os.path.join(base, filename + ".bvec")) bvals.append(os.path.join(base, filename + ".bval")) elif line.startswith("Removed DWI from DTI scan"): # such line can only follow the 'diffusion' case handled # just above for l in (bvecs, bvals): l[-1] = os.path.join( os.path.dirname(l[-1]), "x%s" % (os.path.basename(l[-1]),) ) elif re.search(".*->(.*)", line): val = re.search(".*->(.*)", line) val = val.groups()[0] if isdefined(self.inputs.output_dir): output_dir = self.inputs.output_dir else: output_dir = self._gen_filename("output_dir") val = os.path.join(output_dir, val) if os.path.exists(val): out_file = val if out_file: if out_file not in files: files.append(out_file) last_added_file = out_file continue if line.startswith("Reorienting as "): reoriented_files.append(line[len("Reorienting as ") :]) skip = True continue elif line.startswith("Cropping NIfTI/Analyze image "): base, filename = os.path.split( line[len("Cropping NIfTI/Analyze image ") :] ) filename = "c" + filename if ( os.path.exists(os.path.join(base, filename)) or self.inputs.reorient_and_crop ): # if reorient&crop is true but the file doesn't exist, this errors when setting outputs reoriented_and_cropped_files.append( os.path.join(base, filename) ) skip = True continue skip = False return files, reoriented_files, reoriented_and_cropped_files, bvecs, bvals def _list_outputs(self): outputs = self.output_spec().get() outputs["converted_files"] = self.output_files outputs["reoriented_files"] = self.reoriented_files outputs["reoriented_and_cropped_files"] = self.reoriented_and_cropped_files outputs["bvecs"] = self.bvecs outputs["bvals"] = self.bvals return outputs def _gen_filename(self, name): if name == "output_dir": return os.getcwd() elif name == "config_file": self._config_created = True config_file = "config.ini" with open(config_file, "w") as f: # disable interactive mode f.write("[BOOL]\nManualNIfTIConv=0\n") return config_file return None class Dcm2niixInputSpec(CommandLineInputSpec): source_names = InputMultiPath( File(exists=True), argstr="%s", position=-1, copyfile=False, mandatory=True, desc=( "A set of filenames to be converted. Note that the current " "version (1.0.20180328) of dcm2niix converts any files in the " "directory. To only convert specific files they should be in an " "isolated directory" ), xor=["source_dir"], ) source_dir = Directory( exists=True, argstr="%s", position=-1, mandatory=True, desc="A directory containing dicom files to be converted", xor=["source_names"], ) out_filename = traits.Str( argstr="-f %s", desc="Output filename template (" "%a=antenna (coil) number, " "%c=comments, " "%d=description, " "%e=echo number, " "%f=folder name, " "%i=ID of patient, " "%j=seriesInstanceUID, " "%k=studyInstanceUID, " "%m=manufacturer, " "%n=name of patient, " "%p=protocol, " "%s=series number, " "%t=time, " "%u=acquisition number, " "%v=vendor, " "%x=study ID; " "%z=sequence name)", ) output_dir = Directory( ".", usedefault=True, exists=True, argstr="-o %s", desc="Output directory" ) bids_format = traits.Bool( True, argstr="-b", usedefault=True, desc="Create a BIDS sidecar file" ) anon_bids = traits.Bool( argstr="-ba", requires=["bids_format"], desc="Anonymize BIDS" ) compress = traits.Enum( "y", "i", "n", "3", argstr="-z %s", usedefault=True, desc="Gzip compress images - [y=pigz, i=internal, n=no, 3=no,3D]", ) merge_imgs = traits.Bool( False, argstr="-m", usedefault=True, desc="merge 2D slices from same series" ) single_file = traits.Bool( False, argstr="-s", usedefault=True, desc="Single file mode" ) verbose = traits.Bool(False, argstr="-v", usedefault=True, desc="Verbose output") crop = traits.Bool( False, argstr="-x", usedefault=True, desc="Crop 3D T1 acquisitions" ) has_private = traits.Bool( False, argstr="-t", usedefault=True, desc="Text notes including private patient details", ) compression = traits.Enum( 1, 2, 3, 4, 5, 6, 7, 8, 9, argstr="-%d", desc="Gz compression level (1=fastest, 9=smallest)", ) comment = traits.Str(argstr="-c %s", desc="Comment stored as NIfTI aux_file") ignore_deriv = traits.Bool( argstr="-i", desc="Ignore derived, localizer and 2D images" ) series_numbers = InputMultiPath( traits.Str(), argstr="-n %s...", desc="Selectively convert by series number - can be used up to 16 times", ) philips_float = traits.Bool( argstr="-p", desc="Philips precise float (not display) scaling" ) to_nrrd = traits.Bool(argstr="-e", desc="Export as NRRD instead of NIfTI") class Dcm2niixOutputSpec(TraitedSpec): converted_files = OutputMultiPath(File(exists=True)) bvecs = OutputMultiPath(File(exists=True)) bvals = OutputMultiPath(File(exists=True)) bids = OutputMultiPath(File(exists=True)) class Dcm2niix(CommandLine): """Uses Chris Rorden's dcm2niix to convert dicom files Examples ======== >>> from nipype.interfaces.dcm2nii import Dcm2niix >>> converter = Dcm2niix() >>> converter.inputs.source_dir = 'dicomdir' >>> converter.inputs.compression = 5 >>> converter.inputs.output_dir = 'ds005' >>> converter.cmdline 'dcm2niix -b y -z y -5 -x n -t n -m n -o ds005 -s n -v n dicomdir' >>> converter.run() # doctest: +SKIP In the example below, we note that the current version of dcm2niix converts any files in the directory containing the files in the list. We also do not support nested filenames with this option. **Thus all files must have a common root directory.** >>> converter = Dcm2niix() >>> converter.inputs.source_names = ['functional_1.dcm', 'functional_2.dcm'] >>> converter.inputs.compression = 5 >>> converter.inputs.output_dir = 'ds005' >>> converter.cmdline 'dcm2niix -b y -z y -5 -x n -t n -m n -o ds005 -s n -v n .' >>> converter.run() # doctest: +SKIP """ input_spec = Dcm2niixInputSpec output_spec = Dcm2niixOutputSpec _cmd = "dcm2niix" @property def version(self): return Info.version() def _format_arg(self, opt, spec, val): bools = [ "bids_format", "merge_imgs", "single_file", "verbose", "crop", "has_private", "anon_bids", "ignore_deriv", "philips_float", "to_nrrd", ] if opt in bools: spec = deepcopy(spec) if val: spec.argstr += " y" else: spec.argstr += " n" val = True if opt == "source_names": return spec.argstr % (os.path.dirname(val[0]) or ".") return super(Dcm2niix, self)._format_arg(opt, spec, val) def _run_interface(self, runtime): # may use return code 1 despite conversion runtime = super(Dcm2niix, self)._run_interface( runtime, correct_return_codes=(0, 1) ) self._parse_files(self._parse_stdout(runtime.stdout)) return runtime def _parse_stdout(self, stdout): filenames = [] for line in stdout.split("\n"): if line.startswith("Convert "): # output fname = str(re.search(r"\S+/\S+", line).group(0)) filenames.append(os.path.abspath(fname)) return filenames def _parse_files(self, filenames): outfiles, bvals, bvecs, bids = [], [], [], [] outtypes = [".bval", ".bvec", ".json", ".txt"] if self.inputs.to_nrrd: outtypes += [".nrrd", ".nhdr", ".raw.gz"] else: outtypes += [".nii", ".nii.gz"] for filename in filenames: # search for relevant files, and sort accordingly for fl in search_files(filename, outtypes): if ( fl.endswith(".nii") or fl.endswith(".gz") or fl.endswith(".nrrd") or fl.endswith(".nhdr") ): outfiles.append(fl) elif fl.endswith(".bval"): bvals.append(fl) elif fl.endswith(".bvec"): bvecs.append(fl) elif fl.endswith(".json") or fl.endswith(".txt"): bids.append(fl) self.output_files = outfiles self.bvecs = bvecs self.bvals = bvals self.bids = bids def _list_outputs(self): outputs = self.output_spec().get() outputs["converted_files"] = self.output_files outputs["bvecs"] = self.bvecs outputs["bvals"] = self.bvals outputs["bids"] = self.bids return outputs # https://stackoverflow.com/a/4829130 def search_files(prefix, outtypes): return it.chain.from_iterable(iglob(prefix + outtype) for outtype in outtypes) nipype-1.7.0/nipype/interfaces/dcmstack.py000066400000000000000000000334221413403311400206130ustar00rootroot00000000000000# -*- coding: utf-8 -*- """dcmstack allows series of DICOM images to be stacked into multi-dimensional arrays.""" import os from os import path as op import string import errno from glob import glob import nibabel as nb import imghdr from .base import ( TraitedSpec, DynamicTraitedSpec, InputMultiPath, File, Directory, traits, BaseInterface, isdefined, Undefined, ) have_dcmstack = True try: import dicom import dcmstack from dcmstack.dcmmeta import NiftiWrapper except ImportError: have_dcmstack = False def sanitize_path_comp(path_comp): result = [] for char in path_comp: if char not in string.letters + string.digits + "-_.": result.append("_") else: result.append(char) return "".join(result) class NiftiGeneratorBaseInputSpec(TraitedSpec): out_format = traits.Str( desc="String which can be formatted with " "meta data to create the output filename(s)" ) out_ext = traits.Str(".nii.gz", usedefault=True, desc="Determines output file type") out_path = Directory(desc="output path, current working directory if not set") class NiftiGeneratorBase(BaseInterface): """Base class for interfaces that produce Nifti files, potentially with embedded meta data.""" def _get_out_path(self, meta, idx=None): """Return the output path for the gernerated Nifti.""" if self.inputs.out_format: out_fmt = self.inputs.out_format else: # If no out_format is specified, use a sane default that will work # with the provided meta data. out_fmt = [] if idx is not None: out_fmt.append("%03d" % idx) if "SeriesNumber" in meta: out_fmt.append("%(SeriesNumber)03d") if "ProtocolName" in meta: out_fmt.append("%(ProtocolName)s") elif "SeriesDescription" in meta: out_fmt.append("%(SeriesDescription)s") else: out_fmt.append("sequence") out_fmt = "-".join(out_fmt) out_fn = (out_fmt % meta) + self.inputs.out_ext out_fn = sanitize_path_comp(out_fn) out_path = os.getcwd() if isdefined(self.inputs.out_path): out_path = op.abspath(self.inputs.out_path) # now, mkdir -p $out_path try: os.makedirs(out_path) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and op.isdir(out_path): pass else: raise return op.join(out_path, out_fn) class DcmStackInputSpec(NiftiGeneratorBaseInputSpec): dicom_files = traits.Either( InputMultiPath(File(exists=True)), Directory(exists=True), traits.Str(), mandatory=True, ) embed_meta = traits.Bool(desc="Embed DICOM meta data into result") exclude_regexes = traits.List( desc="Meta data to exclude, suplementing " "any default exclude filters" ) include_regexes = traits.List( desc="Meta data to include, overriding any " "exclude filters" ) force_read = traits.Bool( True, usedefault=True, desc=("Force reading files without DICM marker") ) class DcmStackOutputSpec(TraitedSpec): out_file = File(exists=True) class DcmStack(NiftiGeneratorBase): """Create one Nifti file from a set of DICOM files. Can optionally embed meta data. Example ------- >>> from nipype.interfaces.dcmstack import DcmStack >>> stacker = DcmStack() >>> stacker.inputs.dicom_files = 'path/to/series/' >>> stacker.run() # doctest: +SKIP >>> result.outputs.out_file # doctest: +SKIP '/path/to/cwd/sequence.nii.gz' """ input_spec = DcmStackInputSpec output_spec = DcmStackOutputSpec def _get_filelist(self, trait_input): if isinstance(trait_input, (str, bytes)): if op.isdir(trait_input): return glob(op.join(trait_input, "*.dcm")) else: return glob(trait_input) return trait_input def _run_interface(self, runtime): src_paths = self._get_filelist(self.inputs.dicom_files) include_regexes = dcmstack.default_key_incl_res if isdefined(self.inputs.include_regexes): include_regexes += self.inputs.include_regexes exclude_regexes = dcmstack.default_key_excl_res if isdefined(self.inputs.exclude_regexes): exclude_regexes += self.inputs.exclude_regexes meta_filter = dcmstack.make_key_regex_filter(exclude_regexes, include_regexes) stack = dcmstack.DicomStack(meta_filter=meta_filter) for src_path in src_paths: if not imghdr.what(src_path) == "gif": src_dcm = dicom.read_file(src_path, force=self.inputs.force_read) stack.add_dcm(src_dcm) nii = stack.to_nifti(embed_meta=True) nw = NiftiWrapper(nii) self.out_path = self._get_out_path( nw.meta_ext.get_class_dict(("global", "const")) ) if not self.inputs.embed_meta: nw.remove_extension() nb.save(nii, self.out_path) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = self.out_path return outputs class GroupAndStackOutputSpec(TraitedSpec): out_list = traits.List(desc="List of output nifti files") class GroupAndStack(DcmStack): """Create (potentially) multiple Nifti files for a set of DICOM files.""" input_spec = DcmStackInputSpec output_spec = GroupAndStackOutputSpec def _run_interface(self, runtime): src_paths = self._get_filelist(self.inputs.dicom_files) stacks = dcmstack.parse_and_stack(src_paths) self.out_list = [] for key, stack in list(stacks.items()): nw = NiftiWrapper(stack.to_nifti(embed_meta=True)) const_meta = nw.meta_ext.get_class_dict(("global", "const")) out_path = self._get_out_path(const_meta) if not self.inputs.embed_meta: nw.remove_extension() nb.save(nw.nii_img, out_path) self.out_list.append(out_path) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_list"] = self.out_list return outputs class LookupMetaInputSpec(TraitedSpec): in_file = File(mandatory=True, exists=True, desc="The input Nifti file") meta_keys = traits.Either( traits.List(), traits.Dict(), mandatory=True, desc=( "List of meta data keys to lookup, or a " "dict where keys specify the meta data " "keys to lookup and the values specify " "the output names" ), ) class LookupMeta(BaseInterface): """Lookup meta data values from a Nifti with embedded meta data. Example ------- >>> from nipype.interfaces import dcmstack >>> lookup = dcmstack.LookupMeta() >>> lookup.inputs.in_file = 'functional.nii' >>> lookup.inputs.meta_keys = {'RepetitionTime' : 'TR', \ 'EchoTime' : 'TE'} >>> result = lookup.run() # doctest: +SKIP >>> result.outputs.TR # doctest: +SKIP 9500.0 >>> result.outputs.TE # doctest: +SKIP 95.0 """ input_spec = LookupMetaInputSpec output_spec = DynamicTraitedSpec def _make_name_map(self): if isinstance(self.inputs.meta_keys, list): self._meta_keys = {} for key in self.inputs.meta_keys: self._meta_keys[key] = key else: self._meta_keys = self.inputs.meta_keys def _outputs(self): self._make_name_map() outputs = super(LookupMeta, self)._outputs() undefined_traits = {} for out_name in list(self._meta_keys.values()): outputs.add_trait(out_name, traits.Any) undefined_traits[out_name] = Undefined outputs.trait_set(trait_change_notify=False, **undefined_traits) # Not sure why this is needed for out_name in list(self._meta_keys.values()): _ = getattr(outputs, out_name) return outputs def _run_interface(self, runtime): # If the 'meta_keys' input is a list, covert it to a dict self._make_name_map() nw = NiftiWrapper.from_filename(self.inputs.in_file) self.result = {} for meta_key, out_name in list(self._meta_keys.items()): self.result[out_name] = nw.meta_ext.get_values(meta_key) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs.update(self.result) return outputs class CopyMetaInputSpec(TraitedSpec): src_file = File(mandatory=True, exists=True) dest_file = File(mandatory=True, exists=True) include_classes = traits.List( desc="List of specific meta data " "classifications to include. If not " "specified include everything." ) exclude_classes = traits.List( desc="List of meta data " "classifications to exclude" ) class CopyMetaOutputSpec(TraitedSpec): dest_file = File(exists=True) class CopyMeta(BaseInterface): """Copy meta data from one Nifti file to another. Useful for preserving meta data after some processing steps.""" input_spec = CopyMetaInputSpec output_spec = CopyMetaOutputSpec def _run_interface(self, runtime): src_nii = nb.load(self.inputs.src_file) src = NiftiWrapper(src_nii, make_empty=True) dest_nii = nb.load(self.inputs.dest_file) dest = NiftiWrapper(dest_nii, make_empty=True) classes = src.meta_ext.get_valid_classes() if self.inputs.include_classes: classes = [cls for cls in classes if cls in self.inputs.include_classes] if self.inputs.exclude_classes: classes = [cls for cls in classes if cls not in self.inputs.exclude_classes] for cls in classes: src_dict = src.meta_ext.get_class_dict(cls) dest_dict = dest.meta_ext.get_class_dict(cls) dest_dict.update(src_dict) # Update the shape and slice dimension to reflect the meta extension # update. dest.meta_ext.slice_dim = src.meta_ext.slice_dim dest.meta_ext.shape = src.meta_ext.shape self.out_path = op.join(os.getcwd(), op.basename(self.inputs.dest_file)) dest.to_filename(self.out_path) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["dest_file"] = self.out_path return outputs class MergeNiftiInputSpec(NiftiGeneratorBaseInputSpec): in_files = traits.List(mandatory=True, desc="List of Nifti files to merge") sort_order = traits.Either( traits.Str(), traits.List(), desc="One or more meta data keys to " "sort files by.", ) merge_dim = traits.Int( desc="Dimension to merge along. If not " "specified, the last singular or " "non-existant dimension is used." ) class MergeNiftiOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Merged Nifti file") def make_key_func(meta_keys, index=None): def key_func(src_nii): result = [src_nii.get_meta(key, index) for key in meta_keys] return result return key_func class MergeNifti(NiftiGeneratorBase): """Merge multiple Nifti files into one. Merges together meta data extensions as well.""" input_spec = MergeNiftiInputSpec output_spec = MergeNiftiOutputSpec def _run_interface(self, runtime): niis = [nb.load(fn) for fn in self.inputs.in_files] nws = [NiftiWrapper(nii, make_empty=True) for nii in niis] if self.inputs.sort_order: sort_order = self.inputs.sort_order if isinstance(sort_order, (str, bytes)): sort_order = [sort_order] nws.sort(key=make_key_func(sort_order)) if self.inputs.merge_dim == traits.Undefined: merge_dim = None else: merge_dim = self.inputs.merge_dim merged = NiftiWrapper.from_sequence(nws, merge_dim) const_meta = merged.meta_ext.get_class_dict(("global", "const")) self.out_path = self._get_out_path(const_meta) nb.save(merged.nii_img, self.out_path) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = self.out_path return outputs class SplitNiftiInputSpec(NiftiGeneratorBaseInputSpec): in_file = File(exists=True, mandatory=True, desc="Nifti file to split") split_dim = traits.Int( desc="Dimension to split along. If not " "specified, the last dimension is used." ) class SplitNiftiOutputSpec(TraitedSpec): out_list = traits.List(File(exists=True), desc="Split Nifti files") class SplitNifti(NiftiGeneratorBase): """ Split one Nifti file into many along the specified dimension. Each result has an updated meta data extension as well. """ input_spec = SplitNiftiInputSpec output_spec = SplitNiftiOutputSpec def _run_interface(self, runtime): self.out_list = [] nii = nb.load(self.inputs.in_file) nw = NiftiWrapper(nii, make_empty=True) split_dim = None if self.inputs.split_dim == traits.Undefined: split_dim = None else: split_dim = self.inputs.split_dim for split_idx, split_nw in enumerate(nw.split(split_dim)): const_meta = split_nw.meta_ext.get_class_dict(("global", "const")) out_path = self._get_out_path(const_meta, idx=split_idx) nb.save(split_nw.nii_img, out_path) self.out_list.append(out_path) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_list"] = self.out_list return outputs nipype-1.7.0/nipype/interfaces/diffusion_toolkit/000077500000000000000000000000001413403311400221775ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/diffusion_toolkit/__init__.py000066400000000000000000000004241413403311400243100ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Diffusion Toolkit performs data reconstruction and fiber tracking on diffusion MR images.""" from .base import Info from .postproc import SplineFilter, TrackMerge from .dti import DTIRecon, DTITracker from .odf import HARDIMat, ODFRecon, ODFTracker nipype-1.7.0/nipype/interfaces/diffusion_toolkit/base.py000066400000000000000000000024101413403311400234600ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The dtk module provides basic functions for interfacing with Diffusion Toolkit tools. Currently these tools are supported: * TODO Examples -------- See the docstrings for the individual classes for 'working' examples. """ import re from ..base import CommandLine __docformat__ = "restructuredtext" class Info(object): """Handle dtk output type and version information. Examples -------- >>> from nipype.interfaces.diffusion_toolkit import Info >>> Info.version() # doctest: +SKIP >>> Info.subjectsdir() # doctest: +SKIP """ @staticmethod def version(): """Check for dtk version on system Parameters ---------- None Returns ------- version : str Version number as string or None if FSL not found """ clout = CommandLine(command="dti_recon", terminal_output="allatonce").run() if clout.runtime.returncode != 0: return None dtirecon = clout.runtime.stdout result = re.search("dti_recon (.*)\n", dtirecon) version = result.group(0).split()[1] return version nipype-1.7.0/nipype/interfaces/diffusion_toolkit/dti.py000066400000000000000000000232451413403311400233370ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by diffusion toolkit """ import os import re from ...utils.filemanip import fname_presuffix, split_filename, copyfile from ..base import ( TraitedSpec, File, traits, CommandLine, CommandLineInputSpec, isdefined, ) __docformat__ = "restructuredtext" class DTIReconInputSpec(CommandLineInputSpec): DWI = File( desc="Input diffusion volume", argstr="%s", exists=True, mandatory=True, position=1, ) out_prefix = traits.Str( "dti", desc="Output file prefix", argstr="%s", usedefault=True, position=2 ) output_type = traits.Enum( "nii", "analyze", "ni1", "nii.gz", argstr="-ot %s", desc="output file type", usedefault=True, ) bvecs = File(exists=True, desc="b vectors file", argstr="-gm %s", mandatory=True) bvals = File(exists=True, desc="b values file", mandatory=True) n_averages = traits.Int(desc="Number of averages", argstr="-nex %s") image_orientation_vectors = traits.List( traits.Float(), minlen=6, maxlen=6, desc="""\ Specify image orientation vectors. if just one argument given, will treat it as filename and read the orientation vectors from the file. If 6 arguments are given, will treat them as 6 float numbers and construct the 1st and 2nd vector and calculate the 3rd one automatically. This information will be used to determine image orientation, as well as to adjust gradient vectors with oblique angle when.""", argstr="-iop %f", ) oblique_correction = traits.Bool( desc="""\ When oblique angle(s) applied, some SIEMENS DTI protocols do not adjust gradient accordingly, thus it requires adjustment for correct diffusion tensor calculation""", argstr="-oc", ) b0_threshold = traits.Float( desc="""\ Program will use b0 image with the given threshold to mask out high background of fa/adc maps. by default it will calculate threshold automatically. but if it failed, you need to set it manually.""", argstr="-b0_th", ) class DTIReconOutputSpec(TraitedSpec): ADC = File(exists=True) B0 = File(exists=True) L1 = File(exists=True) L2 = File(exists=True) L3 = File(exists=True) exp = File(exists=True) FA = File(exists=True) FA_color = File(exists=True) tensor = File(exists=True) V1 = File(exists=True) V2 = File(exists=True) V3 = File(exists=True) class DTIRecon(CommandLine): """Use dti_recon to generate tensors and other maps""" input_spec = DTIReconInputSpec output_spec = DTIReconOutputSpec _cmd = "dti_recon" def _create_gradient_matrix(self, bvecs_file, bvals_file): _gradient_matrix_file = "gradient_matrix.txt" with open(bvals_file) as fbvals: bvals = [val for val in re.split(r"\s+", fbvals.readline().strip())] with open(bvecs_file) as fbvecs: bvecs_x = fbvecs.readline().split() bvecs_y = fbvecs.readline().split() bvecs_z = fbvecs.readline().split() with open(_gradient_matrix_file, "w") as gradient_matrix_f: for i in range(len(bvals)): gradient_matrix_f.write( "%s, %s, %s, %s\n" % (bvecs_x[i], bvecs_y[i], bvecs_z[i], bvals[i]) ) return _gradient_matrix_file def _format_arg(self, name, spec, value): if name == "bvecs": new_val = self._create_gradient_matrix(self.inputs.bvecs, self.inputs.bvals) return super(DTIRecon, self)._format_arg("bvecs", spec, new_val) return super(DTIRecon, self)._format_arg(name, spec, value) def _list_outputs(self): out_prefix = self.inputs.out_prefix output_type = self.inputs.output_type outputs = self.output_spec().get() outputs["ADC"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_adc." + output_type) ) outputs["B0"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_b0." + output_type) ) outputs["L1"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_e1." + output_type) ) outputs["L2"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_e2." + output_type) ) outputs["L3"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_e3." + output_type) ) outputs["exp"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_exp." + output_type) ) outputs["FA"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_fa." + output_type) ) outputs["FA_color"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_fa_color." + output_type) ) outputs["tensor"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_tensor." + output_type) ) outputs["V1"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_v1." + output_type) ) outputs["V2"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_v2." + output_type) ) outputs["V3"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_v3." + output_type) ) return outputs class DTITrackerInputSpec(CommandLineInputSpec): tensor_file = File(exists=True, desc="reconstructed tensor file") input_type = traits.Enum( "nii", "analyze", "ni1", "nii.gz", desc="""\ Input and output file type. Accepted values are: * analyze -> analyze format 7.5 * ni1 -> nifti format saved in seperate .hdr and .img file * nii -> nifti format with one .nii file * nii.gz -> nifti format with compression Default type is 'nii' """, argstr="-it %s", ) tracking_method = traits.Enum( "fact", "rk2", "tl", "sl", desc="""\ Tracking algorithm. * fact -> use FACT method for tracking. This is the default method. * rk2 -> use 2nd order Runge-Kutta method for tracking. * tl -> use tensorline method for tracking. * sl -> use interpolated streamline method with fixed step-length """, argstr="-%s", ) step_length = traits.Float( desc="""\ Step length, in the unit of minimum voxel size. default value is 0.5 for interpolated streamline method and 0.1 for other methods""", argstr="-l %f", ) angle_threshold = traits.Float( desc="set angle threshold. default value is 35 degree", argstr="-at %f" ) angle_threshold_weight = traits.Float( desc="set angle threshold weighting factor. weighting will be be applied " "on top of the angle_threshold", argstr="-atw %f", ) random_seed = traits.Int( desc="use random location in a voxel instead of the center of the voxel " "to seed. can also define number of seed per voxel. default is 1", argstr="-rseed %d", ) invert_x = traits.Bool(desc="invert x component of the vector", argstr="-ix") invert_y = traits.Bool(desc="invert y component of the vector", argstr="-iy") invert_z = traits.Bool(desc="invert z component of the vector", argstr="-iz") swap_xy = traits.Bool(desc="swap x & y vectors while tracking", argstr="-sxy") swap_yz = traits.Bool(desc="swap y & z vectors while tracking", argstr="-syz") swap_zx = traits.Bool(desc="swap x & z vectors while tracking", argstr="-szx") mask1_file = File( desc="first mask image", mandatory=True, argstr="-m %s", position=2 ) mask1_threshold = traits.Float( desc="threshold value for the first mask image, if not given, the program will " "try automatically find the threshold", position=3, ) mask2_file = File(desc="second mask image", argstr="-m2 %s", position=4) mask2_threshold = traits.Float( desc="threshold value for the second mask image, if not given, the program will " "try automatically find the threshold", position=5, ) input_data_prefix = traits.Str( "dti", desc="for internal naming use only", position=0, argstr="%s", usedefault=True, ) output_file = File( "tracks.trk", "file containing tracks", argstr="%s", position=1, usedefault=True ) output_mask = File( desc="output a binary mask file in analyze format", argstr="-om %s" ) primary_vector = traits.Enum( "v2", "v3", desc="which vector to use for fibre tracking: v2 or v3. If not set use v1", argstr="-%s", ) class DTITrackerOutputSpec(TraitedSpec): track_file = File(exists=True) mask_file = File(exists=True) class DTITracker(CommandLine): input_spec = DTITrackerInputSpec output_spec = DTITrackerOutputSpec _cmd = "dti_tracker" def _run_interface(self, runtime): _, _, ext = split_filename(self.inputs.tensor_file) copyfile( self.inputs.tensor_file, os.path.abspath(self.inputs.input_data_prefix + "_tensor" + ext), copy=False, ) return super(DTITracker, self)._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() outputs["track_file"] = os.path.abspath(self.inputs.output_file) if isdefined(self.inputs.output_mask) and self.inputs.output_mask: outputs["mask_file"] = os.path.abspath(self.inputs.output_mask) return outputs nipype-1.7.0/nipype/interfaces/diffusion_toolkit/odf.py000066400000000000000000000327361413403311400233340ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by diffusion toolkit """ import os import re from ...utils.filemanip import fname_presuffix, split_filename, copyfile from ..base import ( TraitedSpec, File, traits, CommandLine, CommandLineInputSpec, isdefined, ) __docformat__ = "restructuredtext" class HARDIMatInputSpec(CommandLineInputSpec): bvecs = File( exists=True, desc="b vectors file", argstr="%s", position=1, mandatory=True ) bvals = File(exists=True, desc="b values file", mandatory=True) out_file = File( "recon_mat.dat", desc="output matrix file", argstr="%s", usedefault=True, position=2, ) order = traits.Int( argstr="-order %s", desc="""maximum order of spherical harmonics. must be even number. default is 4""", ) odf_file = File( exists=True, argstr="-odf %s", desc="""\ Filename that contains the reconstruction points on a HEMI-sphere. Use the pre-set 181 points by default""", ) reference_file = File( exists=True, argstr="-ref %s", desc="""\ Provide a dicom or nifti image as the reference for the program to figure out the image orientation information. if no such info was found in the given image header, the next 5 options -info, etc., will be used if provided. if image orientation info can be found in the given reference, all other 5 image orientation options will be IGNORED""", ) image_info = File( exists=True, argstr="-info %s", desc="""\ specify image information file. the image info file is generated from original dicom image by diff_unpack program and contains image orientation and other information needed for reconstruction and tracking. by default will look into the image folder for .info file""", ) image_orientation_vectors = traits.List( traits.Float(), minlen=6, maxlen=6, desc="""\ specify image orientation vectors. if just one argument given, will treat it as filename and read the orientation vectors from the file. if 6 arguments are given, will treat them as 6 float numbers and construct the 1st and 2nd vector and calculate the 3rd one automatically. this information will be used to determine image orientation, as well as to adjust gradient vectors with oblique angle when""", argstr="-iop %f", ) oblique_correction = traits.Bool( desc="""\ when oblique angle(s) applied, some SIEMENS dti protocols do not adjust gradient accordingly, thus it requires adjustment for correct diffusion tensor calculation""", argstr="-oc", ) class HARDIMatOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output matrix file") class HARDIMat(CommandLine): """Use hardi_mat to calculate a reconstruction matrix from a gradient table""" input_spec = HARDIMatInputSpec output_spec = HARDIMatOutputSpec _cmd = "hardi_mat" def _create_gradient_matrix(self, bvecs_file, bvals_file): _gradient_matrix_file = "gradient_matrix.txt" bvals = [val for val in re.split(r"\s+", open(bvals_file).readline().strip())] bvecs_f = open(bvecs_file) bvecs_x = [val for val in re.split(r"\s+", bvecs_f.readline().strip())] bvecs_y = [val for val in re.split(r"\s+", bvecs_f.readline().strip())] bvecs_z = [val for val in re.split(r"\s+", bvecs_f.readline().strip())] bvecs_f.close() gradient_matrix_f = open(_gradient_matrix_file, "w") for i in range(len(bvals)): if int(bvals[i]) == 0: continue gradient_matrix_f.write("%s %s %s\n" % (bvecs_x[i], bvecs_y[i], bvecs_z[i])) gradient_matrix_f.close() return _gradient_matrix_file def _format_arg(self, name, spec, value): if name == "bvecs": new_val = self._create_gradient_matrix(self.inputs.bvecs, self.inputs.bvals) return super(HARDIMat, self)._format_arg("bvecs", spec, new_val) return super(HARDIMat, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class ODFReconInputSpec(CommandLineInputSpec): DWI = File( desc="Input raw data", argstr="%s", exists=True, mandatory=True, position=1 ) n_directions = traits.Int( desc="Number of directions", argstr="%s", mandatory=True, position=2 ) n_output_directions = traits.Int( desc="Number of output directions", argstr="%s", mandatory=True, position=3 ) out_prefix = traits.Str( "odf", desc="Output file prefix", argstr="%s", usedefault=True, position=4 ) matrix = File( argstr="-mat %s", exists=True, desc="""use given file as reconstruction matrix.""", mandatory=True, ) n_b0 = traits.Int( argstr="-b0 %s", desc="""\ number of b0 scans. by default the program gets this information from the number of directions and number of volumes in the raw data. useful when dealing with incomplete raw data set or only using part of raw data set to reconstruct""", mandatory=True, ) output_type = traits.Enum( "nii", "analyze", "ni1", "nii.gz", argstr="-ot %s", desc="output file type", usedefault=True, ) sharpness = traits.Float( desc="""\ smooth or sharpen the raw data. factor > 0 is smoothing. factor < 0 is sharpening. default value is 0 NOTE: this option applies to DSI study only""", argstr="-s %f", ) filter = traits.Bool( desc="""apply a filter (e.g. high pass) to the raw image""", argstr="-f" ) subtract_background = traits.Bool( desc="""subtract the background value before reconstruction""", argstr="-bg" ) dsi = traits.Bool(desc="""indicates that the data is dsi""", argstr="-dsi") output_entropy = traits.Bool(desc="""output entropy map""", argstr="-oe") image_orientation_vectors = traits.List( traits.Float(), minlen=6, maxlen=6, desc="""\ specify image orientation vectors. if just one argument given, will treat it as filename and read the orientation vectors from the file. if 6 arguments are given, will treat them as 6 float numbers and construct the 1st and 2nd vector and calculate the 3rd one automatically. this information will be used to determine image orientation, as well as to adjust gradient vectors with oblique angle when""", argstr="-iop %f", ) oblique_correction = traits.Bool( desc="""\ when oblique angle(s) applied, some SIEMENS dti protocols do not adjust gradient accordingly, thus it requires adjustment for correct diffusion tensor calculation""", argstr="-oc", ) class ODFReconOutputSpec(TraitedSpec): B0 = File(exists=True) DWI = File(exists=True) max = File(exists=True) ODF = File(exists=True) entropy = File() class ODFRecon(CommandLine): """Use odf_recon to generate tensors and other maps""" input_spec = ODFReconInputSpec output_spec = ODFReconOutputSpec _cmd = "odf_recon" def _list_outputs(self): out_prefix = self.inputs.out_prefix output_type = self.inputs.output_type outputs = self.output_spec().get() outputs["B0"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_b0." + output_type) ) outputs["DWI"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_dwi." + output_type) ) outputs["max"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_max." + output_type) ) outputs["ODF"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_odf." + output_type) ) if isdefined(self.inputs.output_entropy): outputs["entropy"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_entropy." + output_type) ) return outputs class ODFTrackerInputSpec(CommandLineInputSpec): max = File(exists=True, mandatory=True) ODF = File(exists=True, mandatory=True) input_data_prefix = traits.Str( "odf", desc="recon data prefix", argstr="%s", usedefault=True, position=0 ) out_file = File( "tracks.trk", desc="output track file", argstr="%s", usedefault=True, position=1 ) input_output_type = traits.Enum( "nii", "analyze", "ni1", "nii.gz", argstr="-it %s", desc="input and output file type", usedefault=True, ) runge_kutta2 = traits.Bool( argstr="-rk2", desc="""\ use 2nd order Runge-Kutta method for tracking. default tracking method is non-interpolate streamline""", ) step_length = traits.Float( argstr="-l %f", desc="""\ set step length, in the unit of minimum voxel size. default value is 0.1.""", ) angle_threshold = traits.Float( argstr="-at %f", desc="""\ set angle threshold. default value is 35 degree for default tracking method and 25 for rk2""", ) random_seed = traits.Int( argstr="-rseed %s", desc="""\ use random location in a voxel instead of the center of the voxel to seed. can also define number of seed per voxel. default is 1""", ) invert_x = traits.Bool(argstr="-ix", desc="invert x component of the vector") invert_y = traits.Bool(argstr="-iy", desc="invert y component of the vector") invert_z = traits.Bool(argstr="-iz", desc="invert z component of the vector") swap_xy = traits.Bool(argstr="-sxy", desc="swap x and y vectors while tracking") swap_yz = traits.Bool(argstr="-syz", desc="swap y and z vectors while tracking") swap_zx = traits.Bool(argstr="-szx", desc="swap x and z vectors while tracking") disc = traits.Bool(argstr="-disc", desc="use disc tracking") mask1_file = File( desc="first mask image", mandatory=True, argstr="-m %s", position=2 ) mask1_threshold = traits.Float( desc="threshold value for the first mask image, if not given, the program will " "try automatically find the threshold", position=3, ) mask2_file = File(desc="second mask image", argstr="-m2 %s", position=4) mask2_threshold = traits.Float( desc="threshold value for the second mask image, if not given, the program will " "try automatically find the threshold", position=5, ) limit = traits.Int( argstr="-limit %d", desc="""\ in some special case, such as heart data, some track may go into infinite circle and take long time to stop. this option allows setting a limit for the longest tracking steps (voxels)""", ) dsi = traits.Bool( argstr="-dsi", desc="""\ specify the input odf data is dsi. because dsi recon uses fixed pre-calculated matrix, some special orientation patch needs to be applied to keep dti/dsi/q-ball consistent.""", ) image_orientation_vectors = traits.List( traits.Float(), minlen=6, maxlen=6, desc="""\ specify image orientation vectors. if just one argument given, will treat it as filename and read the orientation vectors from the file. if 6 arguments are given, will treat them as 6 float numbers and construct the 1st and 2nd vector and calculate the 3rd one automatically. this information will be used to determine image orientation, as well as to adjust gradient vectors with oblique angle when""", argstr="-iop %f", ) slice_order = traits.Int( argstr="-sorder %d", desc="set the slice order. 1 means normal, -1 means reversed. default value is 1", ) voxel_order = traits.Enum( "RAS", "RPS", "RAI", "RPI", "LAI", "LAS", "LPS", "LPI", argstr="-vorder %s", desc="""\ specify the voxel order in RL/AP/IS (human brain) reference. must be 3 letters with no space in between. for example, RAS means the voxel row is from L->R, the column is from P->A and the slice order is from I->S. by default voxel order is determined by the image orientation (but NOT guaranteed to be correct because of various standards). for example, siemens axial image is LPS, coronal image is LIP and sagittal image is PIL. this information also is NOT needed for tracking but will be saved in the track file and is essential for track display to map onto the right coordinates""", ) class ODFTrackerOutputSpec(TraitedSpec): track_file = File(exists=True, desc="output track file") class ODFTracker(CommandLine): """Use odf_tracker to generate track file""" input_spec = ODFTrackerInputSpec output_spec = ODFTrackerOutputSpec _cmd = "odf_tracker" def _run_interface(self, runtime): _, _, ext = split_filename(self.inputs.max) copyfile( self.inputs.max, os.path.abspath(self.inputs.input_data_prefix + "_max" + ext), copy=False, ) _, _, ext = split_filename(self.inputs.ODF) copyfile( self.inputs.ODF, os.path.abspath(self.inputs.input_data_prefix + "_odf" + ext), copy=False, ) return super(ODFTracker, self)._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() outputs["track_file"] = os.path.abspath(self.inputs.out_file) return outputs nipype-1.7.0/nipype/interfaces/diffusion_toolkit/postproc.py000066400000000000000000000064721413403311400244330ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by diffusion toolkit """ import os from ..base import ( TraitedSpec, File, traits, CommandLine, InputMultiPath, CommandLineInputSpec, ) __docformat__ = "restructuredtext" class SplineFilterInputSpec(CommandLineInputSpec): track_file = File( exists=True, desc="file containing tracks to be filtered", position=0, argstr="%s", mandatory=True, ) step_length = traits.Float( desc="in the unit of minimum voxel size", position=1, argstr="%f", mandatory=True, ) output_file = File( "spline_tracks.trk", desc="target file for smoothed tracks", position=2, argstr="%s", usedefault=True, ) class SplineFilterOutputSpec(TraitedSpec): smoothed_track_file = File(exists=True) class SplineFilter(CommandLine): """ Smoothes TrackVis track files with a B-Spline filter. Helps remove redundant track points and segments (thus reducing the size of the track file) and also make tracks nicely smoothed. It will NOT change the quality of the tracks or lose any original information. Example ------- >>> import nipype.interfaces.diffusion_toolkit as dtk >>> filt = dtk.SplineFilter() >>> filt.inputs.track_file = 'tracks.trk' >>> filt.inputs.step_length = 0.5 >>> filt.run() # doctest: +SKIP """ input_spec = SplineFilterInputSpec output_spec = SplineFilterOutputSpec _cmd = "spline_filter" def _list_outputs(self): outputs = self.output_spec().get() outputs["smoothed_track_file"] = os.path.abspath(self.inputs.output_file) return outputs class TrackMergeInputSpec(CommandLineInputSpec): track_files = InputMultiPath( File(exists=True), desc="file containing tracks to be filtered", position=0, argstr="%s...", mandatory=True, ) output_file = File( "merged_tracks.trk", desc="target file for merged tracks", position=-1, argstr="%s", usedefault=True, ) class TrackMergeOutputSpec(TraitedSpec): track_file = File(exists=True) class TrackMerge(CommandLine): """ Merges several TrackVis track files into a single track file. An id type property tag is added to each track in the newly merged file, with each unique id representing where the track was originally from. When the merged file is loaded in TrackVis, a property filter will show up in Track Property panel. Users can adjust that to distinguish and sub-group tracks by its id (origin). Example ------- >>> import nipype.interfaces.diffusion_toolkit as dtk >>> mrg = dtk.TrackMerge() >>> mrg.inputs.track_files = ['track1.trk','track2.trk'] >>> mrg.run() # doctest: +SKIP """ input_spec = TrackMergeInputSpec output_spec = TrackMergeOutputSpec _cmd = "track_merge" def _list_outputs(self): outputs = self.output_spec().get() outputs["track_file"] = os.path.abspath(self.inputs.output_file) return outputs nipype-1.7.0/nipype/interfaces/diffusion_toolkit/tests/000077500000000000000000000000001413403311400233415ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/diffusion_toolkit/tests/__init__.py000066400000000000000000000000301413403311400254430ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTIRecon.py000066400000000000000000000043601413403311400274140ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import DTIRecon def test_DTIRecon_inputs(): input_map = dict( DWI=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), args=dict( argstr="%s", ), b0_threshold=dict( argstr="-b0_th", ), bvals=dict( extensions=None, mandatory=True, ), bvecs=dict( argstr="-gm %s", extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), image_orientation_vectors=dict( argstr="-iop %f", ), n_averages=dict( argstr="-nex %s", ), oblique_correction=dict( argstr="-oc", ), out_prefix=dict( argstr="%s", position=2, usedefault=True, ), output_type=dict( argstr="-ot %s", usedefault=True, ), ) inputs = DTIRecon.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DTIRecon_outputs(): output_map = dict( ADC=dict( extensions=None, ), B0=dict( extensions=None, ), FA=dict( extensions=None, ), FA_color=dict( extensions=None, ), L1=dict( extensions=None, ), L2=dict( extensions=None, ), L3=dict( extensions=None, ), V1=dict( extensions=None, ), V2=dict( extensions=None, ), V3=dict( extensions=None, ), exp=dict( extensions=None, ), tensor=dict( extensions=None, ), ) outputs = DTIRecon.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/diffusion_toolkit/tests/test_auto_DTITracker.py000066400000000000000000000047751413403311400277530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import DTITracker def test_DTITracker_inputs(): input_map = dict( angle_threshold=dict( argstr="-at %f", ), angle_threshold_weight=dict( argstr="-atw %f", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), input_data_prefix=dict( argstr="%s", position=0, usedefault=True, ), input_type=dict( argstr="-it %s", ), invert_x=dict( argstr="-ix", ), invert_y=dict( argstr="-iy", ), invert_z=dict( argstr="-iz", ), mask1_file=dict( argstr="-m %s", extensions=None, mandatory=True, position=2, ), mask1_threshold=dict( position=3, ), mask2_file=dict( argstr="-m2 %s", extensions=None, position=4, ), mask2_threshold=dict( position=5, ), output_file=dict( argstr="%s", extensions=None, position=1, usedefault=True, ), output_mask=dict( argstr="-om %s", extensions=None, ), primary_vector=dict( argstr="-%s", ), random_seed=dict( argstr="-rseed %d", ), step_length=dict( argstr="-l %f", ), swap_xy=dict( argstr="-sxy", ), swap_yz=dict( argstr="-syz", ), swap_zx=dict( argstr="-szx", ), tensor_file=dict( extensions=None, ), tracking_method=dict( argstr="-%s", ), ) inputs = DTITracker.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DTITracker_outputs(): output_map = dict( mask_file=dict( extensions=None, ), track_file=dict( extensions=None, ), ) outputs = DTITracker.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/diffusion_toolkit/tests/test_auto_HARDIMat.py000066400000000000000000000032321413403311400272730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..odf import HARDIMat def test_HARDIMat_inputs(): input_map = dict( args=dict( argstr="%s", ), bvals=dict( extensions=None, mandatory=True, ), bvecs=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), environ=dict( nohash=True, usedefault=True, ), image_info=dict( argstr="-info %s", extensions=None, ), image_orientation_vectors=dict( argstr="-iop %f", ), oblique_correction=dict( argstr="-oc", ), odf_file=dict( argstr="-odf %s", extensions=None, ), order=dict( argstr="-order %s", ), out_file=dict( argstr="%s", extensions=None, position=2, usedefault=True, ), reference_file=dict( argstr="-ref %s", extensions=None, ), ) inputs = HARDIMat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_HARDIMat_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = HARDIMat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFRecon.py000066400000000000000000000043771413403311400274140ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..odf import ODFRecon def test_ODFRecon_inputs(): input_map = dict( DWI=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), args=dict( argstr="%s", ), dsi=dict( argstr="-dsi", ), environ=dict( nohash=True, usedefault=True, ), filter=dict( argstr="-f", ), image_orientation_vectors=dict( argstr="-iop %f", ), matrix=dict( argstr="-mat %s", extensions=None, mandatory=True, ), n_b0=dict( argstr="-b0 %s", mandatory=True, ), n_directions=dict( argstr="%s", mandatory=True, position=2, ), n_output_directions=dict( argstr="%s", mandatory=True, position=3, ), oblique_correction=dict( argstr="-oc", ), out_prefix=dict( argstr="%s", position=4, usedefault=True, ), output_entropy=dict( argstr="-oe", ), output_type=dict( argstr="-ot %s", usedefault=True, ), sharpness=dict( argstr="-s %f", ), subtract_background=dict( argstr="-bg", ), ) inputs = ODFRecon.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ODFRecon_outputs(): output_map = dict( B0=dict( extensions=None, ), DWI=dict( extensions=None, ), ODF=dict( extensions=None, ), entropy=dict( extensions=None, ), max=dict( extensions=None, ), ) outputs = ODFRecon.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/diffusion_toolkit/tests/test_auto_ODFTracker.py000066400000000000000000000053451413403311400277350ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..odf import ODFTracker def test_ODFTracker_inputs(): input_map = dict( ODF=dict( extensions=None, mandatory=True, ), angle_threshold=dict( argstr="-at %f", ), args=dict( argstr="%s", ), disc=dict( argstr="-disc", ), dsi=dict( argstr="-dsi", ), environ=dict( nohash=True, usedefault=True, ), image_orientation_vectors=dict( argstr="-iop %f", ), input_data_prefix=dict( argstr="%s", position=0, usedefault=True, ), input_output_type=dict( argstr="-it %s", usedefault=True, ), invert_x=dict( argstr="-ix", ), invert_y=dict( argstr="-iy", ), invert_z=dict( argstr="-iz", ), limit=dict( argstr="-limit %d", ), mask1_file=dict( argstr="-m %s", extensions=None, mandatory=True, position=2, ), mask1_threshold=dict( position=3, ), mask2_file=dict( argstr="-m2 %s", extensions=None, position=4, ), mask2_threshold=dict( position=5, ), max=dict( extensions=None, mandatory=True, ), out_file=dict( argstr="%s", extensions=None, position=1, usedefault=True, ), random_seed=dict( argstr="-rseed %s", ), runge_kutta2=dict( argstr="-rk2", ), slice_order=dict( argstr="-sorder %d", ), step_length=dict( argstr="-l %f", ), swap_xy=dict( argstr="-sxy", ), swap_yz=dict( argstr="-syz", ), swap_zx=dict( argstr="-szx", ), voxel_order=dict( argstr="-vorder %s", ), ) inputs = ODFTracker.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ODFTracker_outputs(): output_map = dict( track_file=dict( extensions=None, ), ) outputs = ODFTracker.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/diffusion_toolkit/tests/test_auto_SplineFilter.py000066400000000000000000000023521413403311400304040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..postproc import SplineFilter def test_SplineFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), output_file=dict( argstr="%s", extensions=None, position=2, usedefault=True, ), step_length=dict( argstr="%f", mandatory=True, position=1, ), track_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), ) inputs = SplineFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SplineFilter_outputs(): output_map = dict( smoothed_track_file=dict( extensions=None, ), ) outputs = SplineFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/diffusion_toolkit/tests/test_auto_TrackMerge.py000066400000000000000000000021151413403311400300250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..postproc import TrackMerge def test_TrackMerge_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), output_file=dict( argstr="%s", extensions=None, position=-1, usedefault=True, ), track_files=dict( argstr="%s...", mandatory=True, position=0, ), ) inputs = TrackMerge.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TrackMerge_outputs(): output_map = dict( track_file=dict( extensions=None, ), ) outputs = TrackMerge.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/000077500000000000000000000000001413403311400174115ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/dipy/__init__.py000066400000000000000000000005651413403311400215300ustar00rootroot00000000000000# -*- coding: utf-8 -*- """DIPY is a computational neuroimaging tool for diffusion MRI.""" from .tracks import StreamlineTractography, TrackDensityMap from .tensors import TensorMode, DTI from .preprocess import Resample, Denoise from .reconstruction import RESTORE, EstimateResponseSH, CSD from .simulate import SimulateMultiTensor from .anisotropic_power import APMQball nipype-1.7.0/nipype/interfaces/dipy/anisotropic_power.py000066400000000000000000000041031413403311400235270ustar00rootroot00000000000000# -*- coding: utf-8 -*- import numpy as np import nibabel as nb from ... import logging from ..base import TraitedSpec, File, isdefined from .base import DipyDiffusionInterface, DipyBaseInterfaceInputSpec IFLOGGER = logging.getLogger("nipype.interface") class APMQballInputSpec(DipyBaseInterfaceInputSpec): mask_file = File(exists=True, desc="An optional brain mask") class APMQballOutputSpec(TraitedSpec): out_file = File(exists=True) class APMQball(DipyDiffusionInterface): """ Calculates the anisotropic power map Example ------- >>> import nipype.interfaces.dipy as dipy >>> apm = dipy.APMQball() >>> apm.inputs.in_file = 'diffusion.nii' >>> apm.inputs.in_bvec = 'bvecs' >>> apm.inputs.in_bval = 'bvals' >>> apm.run() # doctest: +SKIP """ input_spec = APMQballInputSpec output_spec = APMQballOutputSpec def _run_interface(self, runtime): from dipy.reconst import shm from dipy.data import get_sphere from dipy.reconst.peaks import peaks_from_model gtab = self._get_gradient_table() img = nb.load(self.inputs.in_file) data = np.asanyarray(img.dataobj) affine = img.affine mask = None if isdefined(self.inputs.mask_file): mask = np.asanyarray(nb.load(self.inputs.mask_file).dataobj) # Fit it model = shm.QballModel(gtab, 8) sphere = get_sphere("symmetric724") peaks = peaks_from_model( model=model, data=data, relative_peak_threshold=0.5, min_separation_angle=25, sphere=sphere, mask=mask, ) apm = shm.anisotropic_power(peaks.shm_coeff) out_file = self._gen_filename("apm") nb.Nifti1Image(apm.astype("float32"), affine).to_filename(out_file) IFLOGGER.info("APM qball image saved as %s", out_file) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = self._gen_filename("apm") return outputs nipype-1.7.0/nipype/interfaces/dipy/base.py000066400000000000000000000167521413403311400207100ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ Base interfaces for dipy """ import os.path as op import inspect import numpy as np from ... import logging from ..base import ( traits, File, isdefined, LibraryBaseInterface, BaseInterfaceInputSpec, TraitedSpec, ) # List of workflows to ignore SKIP_WORKFLOWS_LIST = ["Workflow", "CombinedWorkflow"] HAVE_DIPY = True try: import dipy from dipy.workflows.base import IntrospectiveArgumentParser except ImportError: HAVE_DIPY = False def no_dipy(): """Check if dipy is available""" global HAVE_DIPY return not HAVE_DIPY def dipy_version(): """Check dipy version""" if no_dipy(): return None return dipy.__version__ class DipyBaseInterface(LibraryBaseInterface): """ A base interface for py:mod:`dipy` computations """ _pkg = "dipy" class DipyBaseInterfaceInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc=("input diffusion data")) in_bval = File(exists=True, mandatory=True, desc=("input b-values table")) in_bvec = File(exists=True, mandatory=True, desc=("input b-vectors table")) b0_thres = traits.Int(700, usedefault=True, desc=("b0 threshold")) out_prefix = traits.Str(desc=("output prefix for file names")) class DipyDiffusionInterface(DipyBaseInterface): """ A base interface for py:mod:`dipy` computations """ input_spec = DipyBaseInterfaceInputSpec def _get_gradient_table(self): bval = np.loadtxt(self.inputs.in_bval) bvec = np.loadtxt(self.inputs.in_bvec).T from dipy.core.gradients import gradient_table gtab = gradient_table(bval, bvec) gtab.b0_threshold = self.inputs.b0_thres return gtab def _gen_filename(self, name, ext=None): fname, fext = op.splitext(op.basename(self.inputs.in_file)) if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext if not isdefined(self.inputs.out_prefix): out_prefix = op.abspath(fname) else: out_prefix = self.inputs.out_prefix if ext is None: ext = fext return out_prefix + "_" + name + ext def convert_to_traits_type(dipy_type, is_file=False): """Convert DIPY type to Traits type.""" dipy_type = dipy_type.lower() is_mandatory = bool("optional" not in dipy_type) if "variable" in dipy_type and "string" in dipy_type: return traits.ListStr, is_mandatory elif "variable" in dipy_type and "int" in dipy_type: return traits.ListInt, is_mandatory elif "variable" in dipy_type and "float" in dipy_type: return traits.ListFloat, is_mandatory elif "variable" in dipy_type and "bool" in dipy_type: return traits.ListBool, is_mandatory elif "variable" in dipy_type and "complex" in dipy_type: return traits.ListComplex, is_mandatory elif "string" in dipy_type and not is_file: return traits.Str, is_mandatory elif "string" in dipy_type and is_file: return File, is_mandatory elif "int" in dipy_type: return traits.Int, is_mandatory elif "float" in dipy_type: return traits.Float, is_mandatory elif "bool" in dipy_type: return traits.Bool, is_mandatory elif "complex" in dipy_type: return traits.Complex, is_mandatory else: msg = ( "Error during convert_to_traits_type({0}).".format(dipy_type) + "Unknown DIPY type." ) raise IOError(msg) def create_interface_specs(class_name, params=None, BaseClass=TraitedSpec): """Create IN/Out interface specifications dynamically. Parameters ---------- class_name: str The future class name(e.g, (MyClassInSpec)) params: list of tuple dipy argument list BaseClass: TraitedSpec object parent class Returns ------- newclass: object new nipype interface specification class """ attr = {} if params is not None: for p in params: name, dipy_type, desc = p[0], p[1], p[2] is_file = bool("files" in name or "out_" in name) traits_type, is_mandatory = convert_to_traits_type(dipy_type, is_file) # print(name, dipy_type, desc, is_file, traits_type, is_mandatory) if BaseClass.__name__ == BaseInterfaceInputSpec.__name__: if len(p) > 3: attr[name] = traits_type( p[3], desc=desc[-1], usedefault=True, mandatory=is_mandatory ) else: attr[name] = traits_type(desc=desc[-1], mandatory=is_mandatory) else: attr[name] = traits_type( p[3], desc=desc[-1], exists=True, usedefault=True ) newclass = type(str(class_name), (BaseClass,), attr) return newclass def dipy_to_nipype_interface(cls_name, dipy_flow, BaseClass=DipyBaseInterface): """Construct a class in order to respect nipype interface specifications. This convenient class factory convert a DIPY Workflow to a nipype interface. Parameters ---------- cls_name: string new class name dipy_flow: Workflow class type. It should be any children class of `dipy.workflows.workflow.Worflow` BaseClass: object nipype instance object Returns ------- newclass: object new nipype interface specification class """ parser = IntrospectiveArgumentParser() flow = dipy_flow() parser.add_workflow(flow) default_values = inspect.getfullargspec(flow.run).defaults optional_params = [ args + (val,) for args, val in zip(parser.optional_parameters, default_values) ] start = len(parser.optional_parameters) - len(parser.output_parameters) output_parameters = [ args + (val,) for args, val in zip(parser.output_parameters, default_values[start:]) ] input_parameters = parser.positional_parameters + optional_params input_spec = create_interface_specs( "{}InputSpec".format(cls_name), input_parameters, BaseClass=BaseInterfaceInputSpec, ) output_spec = create_interface_specs( "{}OutputSpec".format(cls_name), output_parameters, BaseClass=TraitedSpec ) def _run_interface(self, runtime): flow = dipy_flow() args = self.inputs.get() flow.run(**args) def _list_outputs(self): outputs = self._outputs().get() out_dir = outputs.get("out_dir", ".") for key, values in outputs.items(): outputs[key] = op.join(out_dir, values) return outputs newclass = type( str(cls_name), (BaseClass,), { "input_spec": input_spec, "output_spec": output_spec, "_run_interface": _run_interface, "_list_outputs:": _list_outputs, }, ) return newclass def get_dipy_workflows(module): """Search for DIPY workflow class. Parameters ---------- module : object module object Returns ------- l_wkflw : list of tuple This a list of tuple containing 2 elements: Worflow name, Workflow class obj Examples -------- >>> from dipy.workflows import align # doctest: +SKIP >>> get_dipy_workflows(align) # doctest: +SKIP """ return [ (m, obj) for m, obj in inspect.getmembers(module) if inspect.isclass(obj) and issubclass(obj, module.Workflow) and m not in SKIP_WORKFLOWS_LIST ] nipype-1.7.0/nipype/interfaces/dipy/preprocess.py000066400000000000000000000231121413403311400221470ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os.path as op import nibabel as nb import numpy as np from distutils.version import LooseVersion from ... import logging from ..base import traits, TraitedSpec, File, isdefined from .base import ( HAVE_DIPY, dipy_version, dipy_to_nipype_interface, get_dipy_workflows, DipyBaseInterface, ) IFLOGGER = logging.getLogger("nipype.interface") if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): from dipy.workflows import denoise, mask l_wkflw = get_dipy_workflows(denoise) + get_dipy_workflows(mask) for name, obj in l_wkflw: new_name = name.replace("Flow", "") globals()[new_name] = dipy_to_nipype_interface(new_name, obj) del l_wkflw else: IFLOGGER.info( "We advise you to upgrade DIPY version. This upgrade will" " open access to more function" ) class ResampleInputSpec(TraitedSpec): in_file = File( exists=True, mandatory=True, desc="The input 4D diffusion-weighted image file" ) vox_size = traits.Tuple( traits.Float, traits.Float, traits.Float, desc=( "specify the new voxel zooms. If no vox_size" " is set, then isotropic regridding will " "be performed, with spacing equal to the " "smallest current zoom." ), ) interp = traits.Int( 1, mandatory=True, usedefault=True, desc=("order of the interpolator (0 = nearest, 1 = linear, etc."), ) class ResampleOutputSpec(TraitedSpec): out_file = File(exists=True) class Resample(DipyBaseInterface): """ An interface to reslicing diffusion datasets. See http://nipy.org/dipy/examples_built/reslice_datasets.html#example-reslice-datasets. Example ------- >>> import nipype.interfaces.dipy as dipy >>> reslice = dipy.Resample() >>> reslice.inputs.in_file = 'diffusion.nii' >>> reslice.run() # doctest: +SKIP """ input_spec = ResampleInputSpec output_spec = ResampleOutputSpec def _run_interface(self, runtime): order = self.inputs.interp vox_size = None if isdefined(self.inputs.vox_size): vox_size = self.inputs.vox_size out_file = op.abspath(self._gen_outfilename()) resample_proxy( self.inputs.in_file, order=order, new_zooms=vox_size, out_file=out_file ) IFLOGGER.info("Resliced image saved as %s", out_file) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = op.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): fname, fext = op.splitext(op.basename(self.inputs.in_file)) if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext return op.abspath("%s_reslice%s" % (fname, fext)) class DenoiseInputSpec(TraitedSpec): in_file = File( exists=True, mandatory=True, desc="The input 4D diffusion-weighted image file" ) in_mask = File(exists=True, desc="brain mask") noise_model = traits.Enum( "rician", "gaussian", mandatory=True, usedefault=True, desc=("noise distribution model"), ) signal_mask = File( desc=("mask in which the mean signal " "will be computed"), exists=True ) noise_mask = File( desc=("mask in which the standard deviation of noise " "will be computed"), exists=True, ) patch_radius = traits.Int(1, usedefault=True, desc="patch radius") block_radius = traits.Int(5, usedefault=True, desc="block_radius") snr = traits.Float(desc="manually set an SNR") class DenoiseOutputSpec(TraitedSpec): out_file = File(exists=True) class Denoise(DipyBaseInterface): """ An interface to denoising diffusion datasets [Coupe2008]_. See http://nipy.org/dipy/examples_built/denoise_nlmeans.html#example-denoise-nlmeans. .. [Coupe2008] Coupe P et al., `An Optimized Blockwise Non Local Means Denoising Filter for 3D Magnetic Resonance Images `_, IEEE Transactions on Medical Imaging, 27(4):425-441, 2008. Example ------- >>> import nipype.interfaces.dipy as dipy >>> denoise = dipy.Denoise() >>> denoise.inputs.in_file = 'diffusion.nii' >>> denoise.run() # doctest: +SKIP """ input_spec = DenoiseInputSpec output_spec = DenoiseOutputSpec def _run_interface(self, runtime): out_file = op.abspath(self._gen_outfilename()) settings = dict(mask=None, rician=(self.inputs.noise_model == "rician")) if isdefined(self.inputs.in_mask): settings["mask"] = np.asanyarray(nb.load(self.inputs.in_mask).dataobj) if isdefined(self.inputs.patch_radius): settings["patch_radius"] = self.inputs.patch_radius if isdefined(self.inputs.block_radius): settings["block_radius"] = self.inputs.block_radius snr = None if isdefined(self.inputs.snr): snr = self.inputs.snr signal_mask = None if isdefined(self.inputs.signal_mask): signal_mask = np.asanyarray(nb.load(self.inputs.signal_mask).dataobj) noise_mask = None if isdefined(self.inputs.noise_mask): noise_mask = np.asanyarray(nb.load(self.inputs.noise_mask).dataobj) _, s = nlmeans_proxy( self.inputs.in_file, settings, snr=snr, smask=signal_mask, nmask=noise_mask, out_file=out_file, ) IFLOGGER.info("Denoised image saved as %s, estimated SNR=%s", out_file, str(s)) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = op.abspath(self._gen_outfilename()) return outputs def _gen_outfilename(self): fname, fext = op.splitext(op.basename(self.inputs.in_file)) if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext return op.abspath("%s_denoise%s" % (fname, fext)) def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): """ Performs regridding of an image to set isotropic voxel sizes using dipy. """ from dipy.align.reslice import reslice if out_file is None: fname, fext = op.splitext(op.basename(in_file)) if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext out_file = op.abspath("./%s_reslice%s" % (fname, fext)) img = nb.load(in_file) hdr = img.header.copy() data = img.get_fdata(dtype=np.float32) affine = img.affine im_zooms = hdr.get_zooms()[:3] if new_zooms is None: minzoom = np.array(im_zooms).min() new_zooms = tuple(np.ones((3,)) * minzoom) if np.all(im_zooms == new_zooms): return in_file data2, affine2 = reslice(data, affine, im_zooms, new_zooms, order=order) tmp_zooms = np.array(hdr.get_zooms()) tmp_zooms[:3] = new_zooms[0] hdr.set_zooms(tuple(tmp_zooms)) hdr.set_data_shape(data2.shape) hdr.set_xyzt_units("mm") nb.Nifti1Image(data2.astype(hdr.get_data_dtype()), affine2, hdr).to_filename( out_file ) return out_file, new_zooms def nlmeans_proxy(in_file, settings, snr=None, smask=None, nmask=None, out_file=None): """ Uses non-local means to denoise 4D datasets """ from dipy.denoise.nlmeans import nlmeans from scipy.ndimage.morphology import binary_erosion from scipy import ndimage if out_file is None: fname, fext = op.splitext(op.basename(in_file)) if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext out_file = op.abspath("./%s_denoise%s" % (fname, fext)) img = nb.load(in_file) hdr = img.header data = img.get_fdata() aff = img.affine if data.ndim < 4: data = data[..., np.newaxis] data = np.nan_to_num(data) if data.max() < 1.0e-4: raise RuntimeError("There is no signal in the image") df = 1.0 if data.max() < 1000.0: df = 1000.0 / data.max() data *= df b0 = data[..., 0] if smask is None: smask = np.zeros_like(b0) smask[b0 > np.percentile(b0, 85.0)] = 1 smask = binary_erosion(smask.astype(np.uint8), iterations=2).astype(np.uint8) if nmask is None: nmask = np.ones_like(b0, dtype=np.uint8) bmask = settings["mask"] if bmask is None: bmask = np.zeros_like(b0) bmask[b0 > np.percentile(b0[b0 > 0], 10)] = 1 label_im, nb_labels = ndimage.label(bmask) sizes = ndimage.sum(bmask, label_im, range(nb_labels + 1)) maxidx = np.argmax(sizes) bmask = np.zeros_like(b0, dtype=np.uint8) bmask[label_im == maxidx] = 1 nmask[bmask > 0] = 0 else: nmask = np.squeeze(nmask) nmask[nmask > 0.0] = 1 nmask[nmask < 1] = 0 nmask = nmask.astype(bool) nmask = binary_erosion(nmask, iterations=1).astype(np.uint8) den = np.zeros_like(data) est_snr = True if snr is not None: snr = [snr] * data.shape[-1] est_snr = False else: snr = [] for i in range(data.shape[-1]): d = data[..., i] if est_snr: s = np.mean(d[smask > 0]) n = np.std(d[nmask > 0]) snr.append(s / n) den[..., i] = nlmeans(d, snr[i], **settings) den = np.squeeze(den) den /= df nb.Nifti1Image(den.astype(hdr.get_data_dtype()), aff, hdr).to_filename(out_file) return out_file, snr nipype-1.7.0/nipype/interfaces/dipy/reconstruction.py000066400000000000000000000325071413403311400230530ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ Interfaces to the reconstruction algorithms in dipy """ import os.path as op import numpy as np import nibabel as nb from distutils.version import LooseVersion from ... import logging from ..base import TraitedSpec, File, traits, isdefined from .base import ( DipyDiffusionInterface, DipyBaseInterfaceInputSpec, HAVE_DIPY, dipy_version, dipy_to_nipype_interface, get_dipy_workflows, ) IFLOGGER = logging.getLogger("nipype.interface") if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): from dipy.workflows import reconst l_wkflw = get_dipy_workflows(reconst) for name, obj in l_wkflw: new_name = name.replace("Flow", "") globals()[new_name] = dipy_to_nipype_interface(new_name, obj) del l_wkflw else: IFLOGGER.info( "We advise you to upgrade DIPY version. This upgrade will" " open access to more models" ) class RESTOREInputSpec(DipyBaseInterfaceInputSpec): in_mask = File(exists=True, desc=("input mask in which compute tensors")) noise_mask = File(exists=True, desc=("input mask in which compute noise variance")) class RESTOREOutputSpec(TraitedSpec): fa = File( desc="output fractional anisotropy (FA) map computed from " "the fitted DTI" ) md = File(desc="output mean diffusivity (MD) map computed from the " "fitted DTI") rd = File(desc="output radial diffusivity (RD) map computed from " "the fitted DTI") mode = File(desc=("output mode (MO) map computed from the fitted DTI")) trace = File(desc=("output the tensor trace map computed from the " "fitted DTI")) evals = File(desc=("output the eigenvalues of the fitted DTI")) evecs = File(desc=("output the eigenvectors of the fitted DTI")) class RESTORE(DipyDiffusionInterface): """ Uses RESTORE [Chang2005]_ to perform DTI fitting with outlier detection. The interface uses :py:mod:`dipy`, as explained in `dipy's documentation`_. .. [Chang2005] Chang, LC, Jones, DK and Pierpaoli, C. RESTORE: robust \ estimation of tensors by outlier rejection. MRM, 53:1088-95, (2005). .. _dipy's documentation: \ http://nipy.org/dipy/examples_built/restore_dti.html Example ------- >>> from nipype.interfaces import dipy as ndp >>> dti = ndp.RESTORE() >>> dti.inputs.in_file = '4d_dwi.nii' >>> dti.inputs.in_bval = 'bvals' >>> dti.inputs.in_bvec = 'bvecs' >>> res = dti.run() # doctest: +SKIP """ input_spec = RESTOREInputSpec output_spec = RESTOREOutputSpec def _run_interface(self, runtime): from scipy.special import gamma from dipy.reconst.dti import TensorModel import gc img = nb.load(self.inputs.in_file) hdr = img.header.copy() affine = img.affine data = img.get_fdata() gtab = self._get_gradient_table() if isdefined(self.inputs.in_mask): msk = np.asanyarray(nb.load(self.inputs.in_mask).dataobj).astype(np.uint8) else: msk = np.ones(data.shape[:3], dtype=np.uint8) try_b0 = True if isdefined(self.inputs.noise_mask): noise_msk = ( nb.load(self.inputs.noise_mask).get_fdata(dtype=np.float32).reshape(-1) ) noise_msk[noise_msk > 0.5] = 1 noise_msk[noise_msk < 1.0] = 0 noise_msk = noise_msk.astype(np.uint8) try_b0 = False elif np.all(data[msk == 0, 0] == 0): IFLOGGER.info("Input data are masked.") noise_msk = msk.reshape(-1).astype(np.uint8) else: noise_msk = (1 - msk).reshape(-1).astype(np.uint8) nb0 = np.sum(gtab.b0s_mask) dsample = data.reshape(-1, data.shape[-1]) if try_b0 and (nb0 > 1): noise_data = dsample.take(np.where(gtab.b0s_mask), axis=-1)[ noise_msk == 0, ... ] n = nb0 else: nodiff = np.where(~gtab.b0s_mask) nodiffidx = nodiff[0].tolist() n = 20 if len(nodiffidx) >= 20 else len(nodiffidx) idxs = np.random.choice(nodiffidx, size=n, replace=False) noise_data = dsample.take(idxs, axis=-1)[noise_msk == 1, ...] # Estimate sigma required by RESTORE mean_std = np.median(noise_data.std(-1)) try: bias = 1.0 - np.sqrt(2.0 / (n - 1)) * ( gamma(n / 2.0) / gamma((n - 1) / 2.0) ) except: bias = 0.0 pass sigma = mean_std * (1 + bias) if sigma == 0: IFLOGGER.warning( "Noise std is 0.0, looks like data was masked and " "noise cannot be estimated correctly. Using default " "tensor model instead of RESTORE." ) dti = TensorModel(gtab) else: IFLOGGER.info("Performing RESTORE with noise std=%.4f.", sigma) dti = TensorModel(gtab, fit_method="RESTORE", sigma=sigma) try: fit_restore = dti.fit(data, msk) except TypeError: dti = TensorModel(gtab) fit_restore = dti.fit(data, msk) hdr.set_data_dtype(np.float32) hdr["data_type"] = 16 for k in self._outputs().get(): scalar = getattr(fit_restore, k) hdr.set_data_shape(np.shape(scalar)) nb.Nifti1Image(scalar.astype(np.float32), affine, hdr).to_filename( self._gen_filename(k) ) return runtime def _list_outputs(self): outputs = self._outputs().get() for k in list(outputs.keys()): outputs[k] = self._gen_filename(k) return outputs class EstimateResponseSHInputSpec(DipyBaseInterfaceInputSpec): in_evals = File(exists=True, mandatory=True, desc=("input eigenvalues file")) in_mask = File(exists=True, desc=("input mask in which we find single fibers")) fa_thresh = traits.Float(0.7, usedefault=True, desc=("FA threshold")) roi_radius = traits.Int( 10, usedefault=True, desc=("ROI radius to be used in auto_response") ) auto = traits.Bool( xor=["recursive"], desc="use the auto_response estimator from dipy" ) recursive = traits.Bool( xor=["auto"], desc="use the recursive response estimator from dipy" ) response = File("response.txt", usedefault=True, desc=("the output response file")) out_mask = File("wm_mask.nii.gz", usedefault=True, desc="computed wm mask") class EstimateResponseSHOutputSpec(TraitedSpec): response = File(exists=True, desc=("the response file")) out_mask = File(exists=True, desc=("output wm mask")) class EstimateResponseSH(DipyDiffusionInterface): """ Uses dipy to compute the single fiber response to be used in spherical deconvolution methods, in a similar way to MRTrix's command ``estimate_response``. Example ------- >>> from nipype.interfaces import dipy as ndp >>> dti = ndp.EstimateResponseSH() >>> dti.inputs.in_file = '4d_dwi.nii' >>> dti.inputs.in_bval = 'bvals' >>> dti.inputs.in_bvec = 'bvecs' >>> dti.inputs.in_evals = 'dwi_evals.nii' >>> res = dti.run() # doctest: +SKIP """ input_spec = EstimateResponseSHInputSpec output_spec = EstimateResponseSHOutputSpec def _run_interface(self, runtime): from dipy.core.gradients import GradientTable from dipy.reconst.dti import fractional_anisotropy, mean_diffusivity from dipy.reconst.csdeconv import recursive_response, auto_response img = nb.load(self.inputs.in_file) imref = nb.four_to_three(img)[0] affine = img.affine if isdefined(self.inputs.in_mask): msk = np.asanyarray(nb.load(self.inputs.in_mask).dataobj) msk[msk > 0] = 1 msk[msk < 0] = 0 else: msk = np.ones(imref.shape) data = img.get_fdata(dtype=np.float32) gtab = self._get_gradient_table() evals = np.nan_to_num(nb.load(self.inputs.in_evals).dataobj) FA = np.nan_to_num(fractional_anisotropy(evals)) * msk indices = np.where(FA > self.inputs.fa_thresh) S0s = data[indices][:, np.nonzero(gtab.b0s_mask)[0]] S0 = np.mean(S0s) if self.inputs.auto: response, ratio = auto_response( gtab, data, roi_radius=self.inputs.roi_radius, fa_thr=self.inputs.fa_thresh, ) response = response[0].tolist() + [S0] elif self.inputs.recursive: MD = np.nan_to_num(mean_diffusivity(evals)) * msk indices = np.logical_or( FA >= 0.4, (np.logical_and(FA >= 0.15, MD >= 0.0011)) ) data = np.asanyarray(nb.load(self.inputs.in_file).dataobj) response = recursive_response( gtab, data, mask=indices, sh_order=8, peak_thr=0.01, init_fa=0.08, init_trace=0.0021, iter=8, convergence=0.001, parallel=True, ) ratio = abs(response[1] / response[0]) else: lambdas = evals[indices] l01 = np.sort(np.mean(lambdas, axis=0)) response = np.array([l01[-1], l01[-2], l01[-2], S0]) ratio = abs(response[1] / response[0]) if ratio > 0.25: IFLOGGER.warning( "Estimated response is not prolate enough. " "Ratio=%0.3f.", ratio ) elif ratio < 1.0e-5 or np.any(np.isnan(response)): response = np.array([1.8e-3, 3.6e-4, 3.6e-4, S0]) IFLOGGER.warning("Estimated response is not valid, using a default one") else: IFLOGGER.info("Estimated response: %s", str(response[:3])) np.savetxt(op.abspath(self.inputs.response), response) wm_mask = np.zeros_like(FA) wm_mask[indices] = 1 nb.Nifti1Image(wm_mask.astype(np.uint8), affine, None).to_filename( op.abspath(self.inputs.out_mask) ) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["response"] = op.abspath(self.inputs.response) outputs["out_mask"] = op.abspath(self.inputs.out_mask) return outputs class CSDInputSpec(DipyBaseInterfaceInputSpec): in_mask = File(exists=True, desc=("input mask in which compute tensors")) response = File(exists=True, desc=("single fiber estimated response")) sh_order = traits.Int( 8, usedefault=True, desc=("maximal shperical harmonics order") ) save_fods = traits.Bool(True, usedefault=True, desc=("save fODFs in file")) out_fods = File(desc=("fODFs output file name")) class CSDOutputSpec(TraitedSpec): model = File(desc="Python pickled object of the CSD model fitted.") out_fods = File(desc=("fODFs output file name")) class CSD(DipyDiffusionInterface): """ Uses CSD [Tournier2007]_ to generate the fODF of DWIs. The interface uses :py:mod:`dipy`, as explained in `dipy's CSD example `_. .. [Tournier2007] Tournier, J.D., et al. NeuroImage 2007. Robust determination of the fibre orientation distribution in diffusion MRI: Non-negativity constrained super-resolved spherical deconvolution Example ------- >>> from nipype.interfaces import dipy as ndp >>> csd = ndp.CSD() >>> csd.inputs.in_file = '4d_dwi.nii' >>> csd.inputs.in_bval = 'bvals' >>> csd.inputs.in_bvec = 'bvecs' >>> res = csd.run() # doctest: +SKIP """ input_spec = CSDInputSpec output_spec = CSDOutputSpec def _run_interface(self, runtime): from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel from dipy.data import get_sphere # import marshal as pickle import pickle as pickle import gzip img = nb.load(self.inputs.in_file) imref = nb.four_to_three(img)[0] if isdefined(self.inputs.in_mask): msk = np.asanyarray(nb.load(self.inputs.in_mask).dataobj) else: msk = np.ones(imref.shape) data = img.get_fdata(dtype=np.float32) gtab = self._get_gradient_table() resp_file = np.loadtxt(self.inputs.response) response = (np.array(resp_file[0:3]), resp_file[-1]) ratio = response[0][1] / response[0][0] if abs(ratio - 0.2) > 0.1: IFLOGGER.warning( "Estimated response is not prolate enough. " "Ratio=%0.3f.", ratio ) csd_model = ConstrainedSphericalDeconvModel( gtab, response, sh_order=self.inputs.sh_order ) IFLOGGER.info("Fitting CSD model") csd_fit = csd_model.fit(data, msk) f = gzip.open(self._gen_filename("csdmodel", ext=".pklz"), "wb") pickle.dump(csd_model, f, -1) f.close() if self.inputs.save_fods: sphere = get_sphere("symmetric724") fods = csd_fit.odf(sphere) nb.Nifti1Image(fods.astype(np.float32), img.affine, None).to_filename( self._gen_filename("fods") ) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["model"] = self._gen_filename("csdmodel", ext=".pklz") if self.inputs.save_fods: outputs["out_fods"] = self._gen_filename("fods") return outputs nipype-1.7.0/nipype/interfaces/dipy/registration.py000066400000000000000000000012171413403311400224760ustar00rootroot00000000000000from distutils.version import LooseVersion from ... import logging from .base import HAVE_DIPY, dipy_version, dipy_to_nipype_interface, get_dipy_workflows IFLOGGER = logging.getLogger("nipype.interface") if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): from dipy.workflows import align l_wkflw = get_dipy_workflows(align) for name, obj in l_wkflw: new_name = name.replace("Flow", "") globals()[new_name] = dipy_to_nipype_interface(new_name, obj) del l_wkflw else: IFLOGGER.info( "We advise you to upgrade DIPY version. This upgrade will" " open access to more function" ) nipype-1.7.0/nipype/interfaces/dipy/setup.py000066400000000000000000000007471413403311400211330ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: def configuration(parent_package="", top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration("dipy", parent_package, top_path) # config.add_data_dir('tests') return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path="").todict()) nipype-1.7.0/nipype/interfaces/dipy/simulate.py000066400000000000000000000267721413403311400216240ustar00rootroot00000000000000# -*- coding: utf-8 -*- from multiprocessing import Pool, cpu_count import os.path as op import numpy as np import nibabel as nb from ... import logging from ..base import ( traits, TraitedSpec, BaseInterfaceInputSpec, File, InputMultiPath, isdefined, ) from .base import DipyBaseInterface IFLOGGER = logging.getLogger("nipype.interface") class SimulateMultiTensorInputSpec(BaseInterfaceInputSpec): in_dirs = InputMultiPath( File(exists=True), mandatory=True, desc="list of fibers (principal directions)" ) in_frac = InputMultiPath( File(exists=True), mandatory=True, desc=("volume fraction of each fiber") ) in_vfms = InputMultiPath( File(exists=True), mandatory=True, desc=("volume fractions of isotropic " "compartiments"), ) in_mask = File(exists=True, desc="mask to simulate data") diff_iso = traits.List( [3000e-6, 960e-6, 680e-6], traits.Float, usedefault=True, desc="Diffusivity of isotropic compartments", ) diff_sf = traits.Tuple( (1700e-6, 200e-6, 200e-6), traits.Float, traits.Float, traits.Float, usedefault=True, desc="Single fiber tensor", ) n_proc = traits.Int(0, usedefault=True, desc="number of processes") baseline = File(exists=True, mandatory=True, desc="baseline T2 signal") gradients = File(exists=True, desc="gradients file") in_bvec = File(exists=True, desc="input bvecs file") in_bval = File(exists=True, desc="input bvals file") num_dirs = traits.Int( 32, usedefault=True, desc=( "number of gradient directions (when table " "is automatically generated)" ), ) bvalues = traits.List( traits.Int, value=[1000, 3000], usedefault=True, desc=("list of b-values (when table " "is automatically generated)"), ) out_file = File( "sim_dwi.nii.gz", usedefault=True, desc="output file with fractions to be simluated", ) out_mask = File( "sim_msk.nii.gz", usedefault=True, desc="file with the mask simulated" ) out_bvec = File("bvec.sim", usedefault=True, desc="simulated b vectors") out_bval = File("bval.sim", usedefault=True, desc="simulated b values") snr = traits.Int(0, usedefault=True, desc="signal-to-noise ratio (dB)") class SimulateMultiTensorOutputSpec(TraitedSpec): out_file = File(exists=True, desc="simulated DWIs") out_mask = File(exists=True, desc="mask file") out_bvec = File(exists=True, desc="simulated b vectors") out_bval = File(exists=True, desc="simulated b values") class SimulateMultiTensor(DipyBaseInterface): """ Interface to MultiTensor model simulator in dipy http://nipy.org/dipy/examples_built/simulate_multi_tensor.html Example ------- >>> import nipype.interfaces.dipy as dipy >>> sim = dipy.SimulateMultiTensor() >>> sim.inputs.in_dirs = ['fdir00.nii', 'fdir01.nii'] >>> sim.inputs.in_frac = ['ffra00.nii', 'ffra01.nii'] >>> sim.inputs.in_vfms = ['tpm_00.nii.gz', 'tpm_01.nii.gz', ... 'tpm_02.nii.gz'] >>> sim.inputs.baseline = 'b0.nii' >>> sim.inputs.in_bvec = 'bvecs' >>> sim.inputs.in_bval = 'bvals' >>> sim.run() # doctest: +SKIP """ input_spec = SimulateMultiTensorInputSpec output_spec = SimulateMultiTensorOutputSpec def _run_interface(self, runtime): from dipy.core.gradients import gradient_table # Gradient table if isdefined(self.inputs.in_bval) and isdefined(self.inputs.in_bvec): # Load the gradient strengths and directions bvals = np.loadtxt(self.inputs.in_bval) bvecs = np.loadtxt(self.inputs.in_bvec).T gtab = gradient_table(bvals, bvecs) else: gtab = _generate_gradients(self.inputs.num_dirs, self.inputs.bvalues) ndirs = len(gtab.bvals) np.savetxt(op.abspath(self.inputs.out_bvec), gtab.bvecs.T) np.savetxt(op.abspath(self.inputs.out_bval), gtab.bvals) # Load the baseline b0 signal b0_im = nb.load(self.inputs.baseline) hdr = b0_im.header shape = b0_im.shape aff = b0_im.affine # Check and load sticks and their volume fractions nsticks = len(self.inputs.in_dirs) if len(self.inputs.in_frac) != nsticks: raise RuntimeError( ("Number of sticks and their volume fractions" " must match.") ) # Volume fractions of isotropic compartments nballs = len(self.inputs.in_vfms) vfs = np.squeeze(nb.concat_images(self.inputs.in_vfms).dataobj) if nballs == 1: vfs = vfs[..., np.newaxis] total_vf = np.sum(vfs, axis=3) # Generate a mask if isdefined(self.inputs.in_mask): msk = np.asanyarray(nb.load(self.inputs.in_mask).dataobj) msk[msk > 0.0] = 1.0 msk[msk < 1.0] = 0.0 else: msk = np.zeros(shape) msk[total_vf > 0.0] = 1.0 msk = np.clip(msk, 0.0, 1.0) nvox = len(msk[msk > 0]) # Fiber fractions ffsim = nb.concat_images(self.inputs.in_frac) ffs = np.nan_to_num(np.squeeze(ffsim.dataobj)) # fiber fractions ffs = np.clip(ffs, 0.0, 1.0) if nsticks == 1: ffs = ffs[..., np.newaxis] for i in range(nsticks): ffs[..., i] *= msk total_ff = np.sum(ffs, axis=3) # Fix incongruencies in fiber fractions for i in range(1, nsticks): if np.any(total_ff > 1.0): errors = np.zeros_like(total_ff) errors[total_ff > 1.0] = total_ff[total_ff > 1.0] - 1.0 ffs[..., i] -= errors ffs[ffs < 0.0] = 0.0 total_ff = np.sum(ffs, axis=3) for i in range(vfs.shape[-1]): vfs[..., i] -= total_ff vfs = np.clip(vfs, 0.0, 1.0) fractions = np.concatenate((ffs, vfs), axis=3) nb.Nifti1Image(fractions, aff, None).to_filename("fractions.nii.gz") nb.Nifti1Image(np.sum(fractions, axis=3), aff, None).to_filename( "total_vf.nii.gz" ) mhdr = hdr.copy() mhdr.set_data_dtype(np.uint8) mhdr.set_xyzt_units("mm", "sec") nb.Nifti1Image(msk, aff, mhdr).to_filename(op.abspath(self.inputs.out_mask)) # Initialize stack of args fracs = fractions[msk > 0] # Stack directions dirs = None for i in range(nsticks): f = self.inputs.in_dirs[i] fd = np.nan_to_num(nb.load(f).dataobj) w = np.linalg.norm(fd, axis=3)[..., np.newaxis] w[w < np.finfo(float).eps] = 1.0 fd /= w if dirs is None: dirs = fd[msk > 0].copy() else: dirs = np.hstack((dirs, fd[msk > 0])) # Add random directions for isotropic components for d in range(nballs): fd = np.random.randn(nvox, 3) w = np.linalg.norm(fd, axis=1) fd[w < np.finfo(float).eps, ...] = np.array([1.0, 0.0, 0.0]) w[w < np.finfo(float).eps] = 1.0 fd /= w[..., np.newaxis] dirs = np.hstack((dirs, fd)) sf_evals = list(self.inputs.diff_sf) ba_evals = list(self.inputs.diff_iso) mevals = [sf_evals] * nsticks + [[ba_evals[d]] * 3 for d in range(nballs)] b0 = b0_im.get_fdata()[msk > 0] args = [] for i in range(nvox): args.append( { "fractions": fracs[i, ...].tolist(), "sticks": [ tuple(dirs[i, j : j + 3]) for j in range(nsticks + nballs) ], "gradients": gtab, "mevals": mevals, "S0": b0[i], "snr": self.inputs.snr, } ) n_proc = self.inputs.n_proc if n_proc == 0: n_proc = cpu_count() try: pool = Pool(processes=n_proc, maxtasksperchild=50) except TypeError: pool = Pool(processes=n_proc) # Simulate sticks using dipy IFLOGGER.info( "Starting simulation of %d voxels, %d diffusion directions.", len(args), ndirs, ) result = np.array(pool.map(_compute_voxel, args)) if np.shape(result)[1] != ndirs: raise RuntimeError( ("Computed directions do not match number" "of b-values.") ) signal = np.zeros((shape[0], shape[1], shape[2], ndirs)) signal[msk > 0] = result simhdr = hdr.copy() simhdr.set_data_dtype(np.float32) simhdr.set_xyzt_units("mm", "sec") nb.Nifti1Image(signal.astype(np.float32), aff, simhdr).to_filename( op.abspath(self.inputs.out_file) ) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = op.abspath(self.inputs.out_file) outputs["out_mask"] = op.abspath(self.inputs.out_mask) outputs["out_bvec"] = op.abspath(self.inputs.out_bvec) outputs["out_bval"] = op.abspath(self.inputs.out_bval) return outputs def _compute_voxel(args): """ Simulate DW signal for one voxel. Uses the multi-tensor model and three isotropic compartments. Apparent diffusivity tensors are taken from [Alexander2002]_ and [Pierpaoli1996]_. .. [Alexander2002] Alexander et al., Detection and modeling of non-Gaussian apparent diffusion coefficient profiles in human brain data, MRM 48(2):331-340, 2002, doi: `10.1002/mrm.10209 `_. .. [Pierpaoli1996] Pierpaoli et al., Diffusion tensor MR imaging of the human brain, Radiology 201:637-648. 1996. """ from dipy.sims.voxel import multi_tensor ffs = args["fractions"] gtab = args["gradients"] signal = np.zeros_like(gtab.bvals, dtype=np.float32) # Simulate dwi signal sf_vf = np.sum(ffs) if sf_vf > 0.0: ffs = (np.array(ffs) / sf_vf) * 100 snr = args["snr"] if args["snr"] > 0 else None try: signal, _ = multi_tensor( gtab, args["mevals"], S0=args["S0"], angles=args["sticks"], fractions=ffs, snr=snr, ) except Exception: pass return signal.tolist() def _generate_gradients(ndirs=64, values=[1000, 3000], nb0s=1): """ Automatically generate a `gradient table `_ """ import numpy as np from dipy.core.sphere import disperse_charges, Sphere, HemiSphere from dipy.core.gradients import gradient_table theta = np.pi * np.random.rand(ndirs) phi = 2 * np.pi * np.random.rand(ndirs) hsph_initial = HemiSphere(theta=theta, phi=phi) hsph_updated, potential = disperse_charges(hsph_initial, 5000) values = np.atleast_1d(values).tolist() vertices = hsph_updated.vertices bvecs = vertices.copy() bvals = np.ones(vertices.shape[0]) * values[0] for v in values[1:]: bvecs = np.vstack((bvecs, vertices)) bvals = np.hstack((bvals, v * np.ones(vertices.shape[0]))) for i in range(0, nb0s): bvals = bvals.tolist() bvals.insert(0, 0) bvecs = bvecs.tolist() bvecs.insert(0, np.zeros(3)) return gradient_table(bvals, bvecs) nipype-1.7.0/nipype/interfaces/dipy/stats.py000066400000000000000000000012161413403311400211210ustar00rootroot00000000000000from distutils.version import LooseVersion from ... import logging from .base import HAVE_DIPY, dipy_version, dipy_to_nipype_interface, get_dipy_workflows IFLOGGER = logging.getLogger("nipype.interface") if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.16"): from dipy.workflows import stats l_wkflw = get_dipy_workflows(stats) for name, obj in l_wkflw: new_name = name.replace("Flow", "") globals()[new_name] = dipy_to_nipype_interface(new_name, obj) del l_wkflw else: IFLOGGER.info( "We advise you to upgrade DIPY version. This upgrade will" " open access to more function" ) nipype-1.7.0/nipype/interfaces/dipy/tensors.py000066400000000000000000000110741413403311400214630ustar00rootroot00000000000000# -*- coding: utf-8 -*- import numpy as np import nibabel as nb from ... import logging from ..base import TraitedSpec, File, isdefined from .base import DipyDiffusionInterface, DipyBaseInterfaceInputSpec IFLOGGER = logging.getLogger("nipype.interface") class DTIInputSpec(DipyBaseInterfaceInputSpec): mask_file = File(exists=True, desc="An optional white matter mask") class DTIOutputSpec(TraitedSpec): out_file = File(exists=True) fa_file = File(exists=True) md_file = File(exists=True) rd_file = File(exists=True) ad_file = File(exists=True) color_fa_file = File(exists=True) class DTI(DipyDiffusionInterface): """ Calculates the diffusion tensor model parameters Example ------- >>> import nipype.interfaces.dipy as dipy >>> dti = dipy.DTI() >>> dti.inputs.in_file = 'diffusion.nii' >>> dti.inputs.in_bvec = 'bvecs' >>> dti.inputs.in_bval = 'bvals' >>> dti.run() # doctest: +SKIP """ input_spec = DTIInputSpec output_spec = DTIOutputSpec def _run_interface(self, runtime): from dipy.reconst import dti from dipy.io.utils import nifti1_symmat gtab = self._get_gradient_table() img = nb.load(self.inputs.in_file) data = img.get_fdata() affine = img.affine mask = None if isdefined(self.inputs.mask_file): mask = np.asanyarray(nb.load(self.inputs.mask_file).dataobj) # Fit it tenmodel = dti.TensorModel(gtab) ten_fit = tenmodel.fit(data, mask) lower_triangular = ten_fit.lower_triangular() img = nifti1_symmat(lower_triangular, affine) out_file = self._gen_filename("dti") nb.save(img, out_file) IFLOGGER.info("DTI parameters image saved as %s", out_file) # FA MD RD and AD for metric in ["fa", "md", "rd", "ad", "color_fa"]: data = getattr(ten_fit, metric).astype("float32") out_name = self._gen_filename(metric) nb.Nifti1Image(data, affine).to_filename(out_name) IFLOGGER.info("DTI %s image saved as %s", metric, out_name) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = self._gen_filename("dti") for metric in ["fa", "md", "rd", "ad", "color_fa"]: outputs["{}_file".format(metric)] = self._gen_filename(metric) return outputs class TensorModeInputSpec(DipyBaseInterfaceInputSpec): mask_file = File(exists=True, desc="An optional white matter mask") class TensorModeOutputSpec(TraitedSpec): out_file = File(exists=True) class TensorMode(DipyDiffusionInterface): """ Creates a map of the mode of the diffusion tensors given a set of diffusion-weighted images, as well as their associated b-values and b-vectors [1]_. Fits the diffusion tensors and calculates tensor mode with Dipy. Example ------- >>> import nipype.interfaces.dipy as dipy >>> mode = dipy.TensorMode() >>> mode.inputs.in_file = 'diffusion.nii' >>> mode.inputs.in_bvec = 'bvecs' >>> mode.inputs.in_bval = 'bvals' >>> mode.run() # doctest: +SKIP References ---------- .. [1] Daniel B. Ennis and G. Kindlmann, "Orthogonal Tensor Invariants and the Analysis of Diffusion Tensor Magnetic Resonance Images", Magnetic Resonance in Medicine, vol. 55, no. 1, pp. 136-146, 2006. """ input_spec = TensorModeInputSpec output_spec = TensorModeOutputSpec def _run_interface(self, runtime): from dipy.reconst import dti # Load the 4D image files img = nb.load(self.inputs.in_file) data = img.get_fdata() affine = img.affine # Load the gradient strengths and directions gtab = self._get_gradient_table() # Mask the data so that tensors are not fit for # unnecessary voxels mask = data[..., 0] > 50 # Fit the tensors to the data tenmodel = dti.TensorModel(gtab) tenfit = tenmodel.fit(data, mask) # Calculate the mode of each voxel's tensor mode_data = tenfit.mode # Write as a 3D Nifti image with the original affine img = nb.Nifti1Image(mode_data, affine) out_file = self._gen_filename("mode") nb.save(img, out_file) IFLOGGER.info("Tensor mode image saved as %s", out_file) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = self._gen_filename("mode") return outputs nipype-1.7.0/nipype/interfaces/dipy/tests/000077500000000000000000000000001413403311400205535ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/dipy/tests/__init__.py000066400000000000000000000000301413403311400226550ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/dipy/tests/test_auto_APMQball.py000066400000000000000000000021361413403311400246070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..anisotropic_power import APMQball def test_APMQball_inputs(): input_map = dict( b0_thres=dict( usedefault=True, ), in_bval=dict( extensions=None, mandatory=True, ), in_bvec=dict( extensions=None, mandatory=True, ), in_file=dict( extensions=None, mandatory=True, ), mask_file=dict( extensions=None, ), out_prefix=dict(), ) inputs = APMQball.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_APMQball_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = APMQball.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/tests/test_auto_CSD.py000066400000000000000000000025711413403311400236320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..reconstruction import CSD def test_CSD_inputs(): input_map = dict( b0_thres=dict( usedefault=True, ), in_bval=dict( extensions=None, mandatory=True, ), in_bvec=dict( extensions=None, mandatory=True, ), in_file=dict( extensions=None, mandatory=True, ), in_mask=dict( extensions=None, ), out_fods=dict( extensions=None, ), out_prefix=dict(), response=dict( extensions=None, ), save_fods=dict( usedefault=True, ), sh_order=dict( usedefault=True, ), ) inputs = CSD.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CSD_outputs(): output_map = dict( model=dict( extensions=None, ), out_fods=dict( extensions=None, ), ) outputs = CSD.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/tests/test_auto_DTI.py000066400000000000000000000025671413403311400236460ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tensors import DTI def test_DTI_inputs(): input_map = dict( b0_thres=dict( usedefault=True, ), in_bval=dict( extensions=None, mandatory=True, ), in_bvec=dict( extensions=None, mandatory=True, ), in_file=dict( extensions=None, mandatory=True, ), mask_file=dict( extensions=None, ), out_prefix=dict(), ) inputs = DTI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DTI_outputs(): output_map = dict( ad_file=dict( extensions=None, ), color_fa_file=dict( extensions=None, ), fa_file=dict( extensions=None, ), md_file=dict( extensions=None, ), out_file=dict( extensions=None, ), rd_file=dict( extensions=None, ), ) outputs = DTI.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/tests/test_auto_Denoise.py000066400000000000000000000022751413403311400246100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Denoise def test_Denoise_inputs(): input_map = dict( block_radius=dict( usedefault=True, ), in_file=dict( extensions=None, mandatory=True, ), in_mask=dict( extensions=None, ), noise_mask=dict( extensions=None, ), noise_model=dict( mandatory=True, usedefault=True, ), patch_radius=dict( usedefault=True, ), signal_mask=dict( extensions=None, ), snr=dict(), ) inputs = Denoise.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Denoise_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Denoise.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/tests/test_auto_DipyBaseInterface.py000066400000000000000000000005611413403311400265370ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import DipyBaseInterface def test_DipyBaseInterface_inputs(): input_map = dict() inputs = DipyBaseInterface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/tests/test_auto_DipyDiffusionInterface.py000066400000000000000000000013551413403311400276150ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import DipyDiffusionInterface def test_DipyDiffusionInterface_inputs(): input_map = dict( b0_thres=dict( usedefault=True, ), in_bval=dict( extensions=None, mandatory=True, ), in_bvec=dict( extensions=None, mandatory=True, ), in_file=dict( extensions=None, mandatory=True, ), out_prefix=dict(), ) inputs = DipyDiffusionInterface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/tests/test_auto_EstimateResponseSH.py000066400000000000000000000033301413403311400267400ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..reconstruction import EstimateResponseSH def test_EstimateResponseSH_inputs(): input_map = dict( auto=dict( xor=["recursive"], ), b0_thres=dict( usedefault=True, ), fa_thresh=dict( usedefault=True, ), in_bval=dict( extensions=None, mandatory=True, ), in_bvec=dict( extensions=None, mandatory=True, ), in_evals=dict( extensions=None, mandatory=True, ), in_file=dict( extensions=None, mandatory=True, ), in_mask=dict( extensions=None, ), out_mask=dict( extensions=None, usedefault=True, ), out_prefix=dict(), recursive=dict( xor=["auto"], ), response=dict( extensions=None, usedefault=True, ), roi_radius=dict( usedefault=True, ), ) inputs = EstimateResponseSH.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EstimateResponseSH_outputs(): output_map = dict( out_mask=dict( extensions=None, ), response=dict( extensions=None, ), ) outputs = EstimateResponseSH.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/tests/test_auto_RESTORE.py000066400000000000000000000027601413403311400243440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..reconstruction import RESTORE def test_RESTORE_inputs(): input_map = dict( b0_thres=dict( usedefault=True, ), in_bval=dict( extensions=None, mandatory=True, ), in_bvec=dict( extensions=None, mandatory=True, ), in_file=dict( extensions=None, mandatory=True, ), in_mask=dict( extensions=None, ), noise_mask=dict( extensions=None, ), out_prefix=dict(), ) inputs = RESTORE.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RESTORE_outputs(): output_map = dict( evals=dict( extensions=None, ), evecs=dict( extensions=None, ), fa=dict( extensions=None, ), md=dict( extensions=None, ), mode=dict( extensions=None, ), rd=dict( extensions=None, ), trace=dict( extensions=None, ), ) outputs = RESTORE.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/tests/test_auto_Resample.py000066400000000000000000000015731413403311400247720ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Resample def test_Resample_inputs(): input_map = dict( in_file=dict( extensions=None, mandatory=True, ), interp=dict( mandatory=True, usedefault=True, ), vox_size=dict(), ) inputs = Resample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Resample_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Resample.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/tests/test_auto_SimulateMultiTensor.py000066400000000000000000000042161413403311400272100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..simulate import SimulateMultiTensor def test_SimulateMultiTensor_inputs(): input_map = dict( baseline=dict( extensions=None, mandatory=True, ), bvalues=dict( usedefault=True, ), diff_iso=dict( usedefault=True, ), diff_sf=dict( usedefault=True, ), gradients=dict( extensions=None, ), in_bval=dict( extensions=None, ), in_bvec=dict( extensions=None, ), in_dirs=dict( mandatory=True, ), in_frac=dict( mandatory=True, ), in_mask=dict( extensions=None, ), in_vfms=dict( mandatory=True, ), n_proc=dict( usedefault=True, ), num_dirs=dict( usedefault=True, ), out_bval=dict( extensions=None, usedefault=True, ), out_bvec=dict( extensions=None, usedefault=True, ), out_file=dict( extensions=None, usedefault=True, ), out_mask=dict( extensions=None, usedefault=True, ), snr=dict( usedefault=True, ), ) inputs = SimulateMultiTensor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SimulateMultiTensor_outputs(): output_map = dict( out_bval=dict( extensions=None, ), out_bvec=dict( extensions=None, ), out_file=dict( extensions=None, ), out_mask=dict( extensions=None, ), ) outputs = SimulateMultiTensor.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/tests/test_auto_StreamlineTractography.py000066400000000000000000000036231413403311400277130ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tracks import StreamlineTractography def test_StreamlineTractography_inputs(): input_map = dict( gfa_thresh=dict( mandatory=True, usedefault=True, ), in_file=dict( extensions=None, mandatory=True, ), in_model=dict( extensions=None, ), in_peaks=dict( extensions=None, ), min_angle=dict( mandatory=True, usedefault=True, ), multiprocess=dict( mandatory=True, usedefault=True, ), num_seeds=dict( mandatory=True, usedefault=True, ), out_prefix=dict(), peak_threshold=dict( mandatory=True, usedefault=True, ), save_seeds=dict( mandatory=True, usedefault=True, ), seed_coord=dict( extensions=None, ), seed_mask=dict( extensions=None, ), tracking_mask=dict( extensions=None, ), ) inputs = StreamlineTractography.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_StreamlineTractography_outputs(): output_map = dict( gfa=dict( extensions=None, ), odf_peaks=dict( extensions=None, ), out_seeds=dict( extensions=None, ), tracks=dict( extensions=None, ), ) outputs = StreamlineTractography.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/tests/test_auto_TensorMode.py000066400000000000000000000021361413403311400252750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tensors import TensorMode def test_TensorMode_inputs(): input_map = dict( b0_thres=dict( usedefault=True, ), in_bval=dict( extensions=None, mandatory=True, ), in_bvec=dict( extensions=None, mandatory=True, ), in_file=dict( extensions=None, mandatory=True, ), mask_file=dict( extensions=None, ), out_prefix=dict(), ) inputs = TensorMode.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TensorMode_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TensorMode.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/tests/test_auto_TrackDensityMap.py000066400000000000000000000021001413403311400262470ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tracks import TrackDensityMap def test_TrackDensityMap_inputs(): input_map = dict( data_dims=dict(), in_file=dict( extensions=None, mandatory=True, ), out_filename=dict( extensions=None, usedefault=True, ), points_space=dict( usedefault=True, ), reference=dict( extensions=None, ), voxel_dims=dict(), ) inputs = TrackDensityMap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TrackDensityMap_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TrackDensityMap.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dipy/tests/test_base.py000066400000000000000000000137271413403311400231100ustar00rootroot00000000000000import pytest from collections import namedtuple from ...base import traits, File, TraitedSpec, BaseInterfaceInputSpec from ..base import ( convert_to_traits_type, create_interface_specs, dipy_to_nipype_interface, DipyBaseInterface, no_dipy, get_dipy_workflows, ) def test_convert_to_traits_type(): Params = namedtuple("Params", "traits_type is_file") Res = namedtuple("Res", "traits_type is_mandatory") l_entries = [ Params("variable string", False), Params("variable int", False), Params("variable float", False), Params("variable bool", False), Params("variable complex", False), Params("variable int, optional", False), Params("variable string, optional", False), Params("variable float, optional", False), Params("variable bool, optional", False), Params("variable complex, optional", False), Params("string", False), Params("int", False), Params("string", True), Params("float", False), Params("bool", False), Params("complex", False), Params("string, optional", False), Params("int, optional", False), Params("string, optional", True), Params("float, optional", False), Params("bool, optional", False), Params("complex, optional", False), ] l_expected = [ Res(traits.ListStr, True), Res(traits.ListInt, True), Res(traits.ListFloat, True), Res(traits.ListBool, True), Res(traits.ListComplex, True), Res(traits.ListInt, False), Res(traits.ListStr, False), Res(traits.ListFloat, False), Res(traits.ListBool, False), Res(traits.ListComplex, False), Res(traits.Str, True), Res(traits.Int, True), Res(File, True), Res(traits.Float, True), Res(traits.Bool, True), Res(traits.Complex, True), Res(traits.Str, False), Res(traits.Int, False), Res(File, False), Res(traits.Float, False), Res(traits.Bool, False), Res(traits.Complex, False), ] for entry, res in zip(l_entries, l_expected): traits_type, is_mandatory = convert_to_traits_type( entry.traits_type, entry.is_file ) assert traits_type == res.traits_type assert is_mandatory == res.is_mandatory with pytest.raises(IOError): convert_to_traits_type("file, optional") def test_create_interface_specs(): new_interface = create_interface_specs("MyInterface") assert new_interface.__base__ == TraitedSpec assert isinstance(new_interface(), TraitedSpec) assert new_interface.__name__ == "MyInterface" assert not new_interface().get() new_interface = create_interface_specs( "MyInterface", BaseClass=BaseInterfaceInputSpec ) assert new_interface.__base__ == BaseInterfaceInputSpec assert isinstance(new_interface(), BaseInterfaceInputSpec) assert new_interface.__name__ == "MyInterface" assert not new_interface().get() params = [ ("params1", "string", ["my description"]), ("params2_files", "string", ["my description @"]), ("params3", "int, optional", ["useful option"]), ("out_params", "string", ["my out description"]), ] new_interface = create_interface_specs( "MyInterface", params=params, BaseClass=BaseInterfaceInputSpec ) assert new_interface.__base__ == BaseInterfaceInputSpec assert isinstance(new_interface(), BaseInterfaceInputSpec) assert new_interface.__name__ == "MyInterface" current_params = new_interface().get() assert len(current_params) == 4 assert "params1" in current_params.keys() assert "params2_files" in current_params.keys() assert "params3" in current_params.keys() assert "out_params" in current_params.keys() @pytest.mark.skipif(no_dipy(), reason="DIPY is not installed") def test_dipy_to_nipype_interface(): from dipy.workflows.workflow import Workflow class DummyWorkflow(Workflow): @classmethod def get_short_name(cls): return "dwf1" def run(self, in_files, param1=1, out_dir="", out_ref="out1.txt"): """Workflow used to test basic workflows. Parameters ---------- in_files : string fake input string param param1 : int, optional fake positional param (default 1) out_dir : string, optional fake output directory (default '') out_ref : string, optional fake out file (default out1.txt) References ----------- dummy references """ return param1 new_specs = dipy_to_nipype_interface("MyModelSpec", DummyWorkflow) assert new_specs.__base__ == DipyBaseInterface assert isinstance(new_specs(), DipyBaseInterface) assert new_specs.__name__ == "MyModelSpec" assert hasattr(new_specs, "input_spec") assert new_specs().input_spec.__base__ == BaseInterfaceInputSpec assert hasattr(new_specs, "output_spec") assert new_specs().output_spec.__base__ == TraitedSpec assert hasattr(new_specs, "_run_interface") assert hasattr(new_specs, "_list_outputs") params_in = new_specs().inputs.get() params_out = new_specs()._outputs().get() assert len(params_in) == 4 assert "in_files" in params_in.keys() assert "param1" in params_in.keys() assert "out_dir" in params_out.keys() assert "out_ref" in params_out.keys() with pytest.raises(ValueError): new_specs().run() @pytest.mark.skipif(no_dipy(), reason="DIPY is not installed") def test_get_dipy_workflows(): from dipy.workflows import align l_wkflw = get_dipy_workflows(align) for name, obj in l_wkflw: assert name.endswith("Flow") assert issubclass(obj, align.Workflow) if __name__ == "__main__": test_convert_to_traits_type() test_create_interface_specs() test_dipy_to_nipype_interface() nipype-1.7.0/nipype/interfaces/dipy/tracks.py000066400000000000000000000277261413403311400212700ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os.path as op import numpy as np import nibabel as nb from distutils.version import LooseVersion from ... import logging from ..base import TraitedSpec, BaseInterfaceInputSpec, File, isdefined, traits from .base import ( DipyBaseInterface, HAVE_DIPY, dipy_version, dipy_to_nipype_interface, get_dipy_workflows, ) IFLOGGER = logging.getLogger("nipype.interface") if HAVE_DIPY and ( LooseVersion("0.15") >= LooseVersion(dipy_version()) >= LooseVersion("0.16") ): try: from dipy.workflows.tracking import LocalFiberTrackingPAMFlow as DetTrackFlow except ImportError: # different name in 0.15 from dipy.workflows.tracking import DetTrackPAMFlow as DetTrackFlow DeterministicTracking = dipy_to_nipype_interface( "DeterministicTracking", DetTrackFlow ) if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): from dipy.workflows import segment, tracking l_wkflw = get_dipy_workflows(segment) + get_dipy_workflows(tracking) for name, obj in l_wkflw: new_name = name.replace("Flow", "") globals()[new_name] = dipy_to_nipype_interface(new_name, obj) del l_wkflw else: IFLOGGER.info( "We advise you to upgrade DIPY version. This upgrade will" " open access to more function" ) class TrackDensityMapInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="The input TrackVis track file") reference = File( exists=True, desc="A reference file to define RAS coordinates space" ) points_space = traits.Enum( "rasmm", "voxel", None, usedefault=True, desc="coordinates of trk file" ) voxel_dims = traits.List( traits.Float, minlen=3, maxlen=3, desc="The size of each voxel in mm." ) data_dims = traits.List( traits.Int, minlen=3, maxlen=3, desc="The size of the image in voxels." ) out_filename = File( "tdi.nii", usedefault=True, desc="The output filename for the tracks in TrackVis " "(.trk) format", ) class TrackDensityMapOutputSpec(TraitedSpec): out_file = File(exists=True) class TrackDensityMap(DipyBaseInterface): """ Creates a tract density image from a TrackVis track file using functions from dipy Example ------- >>> import nipype.interfaces.dipy as dipy >>> trk2tdi = dipy.TrackDensityMap() >>> trk2tdi.inputs.in_file = 'converted.trk' >>> trk2tdi.run() # doctest: +SKIP """ input_spec = TrackDensityMapInputSpec output_spec = TrackDensityMapOutputSpec def _run_interface(self, runtime): from numpy import min_scalar_type from dipy.tracking.utils import density_map import nibabel.trackvis as nbt tracks, header = nbt.read(self.inputs.in_file) streams = ((ii[0]) for ii in tracks) if isdefined(self.inputs.reference): refnii = nb.load(self.inputs.reference) affine = refnii.affine data_dims = refnii.shape[:3] kwargs = dict(affine=affine) else: IFLOGGER.warning( "voxel_dims and data_dims are deprecated as of dipy " "0.7.1. Please use reference input instead" ) if not isdefined(self.inputs.data_dims): data_dims = header["dim"] else: data_dims = self.inputs.data_dims if not isdefined(self.inputs.voxel_dims): voxel_size = header["voxel_size"] else: voxel_size = self.inputs.voxel_dims affine = header["vox_to_ras"] kwargs = dict(voxel_size=voxel_size) data = density_map(streams, data_dims, **kwargs) data = data.astype(min_scalar_type(data.max())) img = nb.Nifti1Image(data, affine) out_file = op.abspath(self.inputs.out_filename) nb.save(img, out_file) IFLOGGER.info( "Track density map saved as %s, size=%s, dimensions=%s", out_file, img.shape, img.header.get_zooms(), ) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = op.abspath(self.inputs.out_filename) return outputs class StreamlineTractographyInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc=("input diffusion data")) in_model = File(exists=True, desc=("input f/d-ODF model extracted from.")) tracking_mask = File(exists=True, desc=("input mask within which perform tracking")) seed_mask = File(exists=True, desc=("input mask within which perform seeding")) in_peaks = File(exists=True, desc=("peaks computed from the odf")) seed_coord = File( exists=True, desc=("file containing the list of seed voxel " "coordinates (N,3)"), ) gfa_thresh = traits.Float( 0.2, mandatory=True, usedefault=True, desc=("GFA threshold to compute tracking mask"), ) peak_threshold = traits.Float( 0.5, mandatory=True, usedefault=True, desc=("threshold to consider peaks from model"), ) min_angle = traits.Float( 25.0, mandatory=True, usedefault=True, desc=("minimum separation angle") ) multiprocess = traits.Bool( True, mandatory=True, usedefault=True, desc=("use multiprocessing") ) save_seeds = traits.Bool( False, mandatory=True, usedefault=True, desc=("save seeding voxels coordinates") ) num_seeds = traits.Int( 10000, mandatory=True, usedefault=True, desc=("desired number of tracks in tractography"), ) out_prefix = traits.Str(desc=("output prefix for file names")) class StreamlineTractographyOutputSpec(TraitedSpec): tracks = File(desc="TrackVis file containing extracted streamlines") gfa = File( desc=( "The resulting GFA (generalized FA) computed using the " "peaks of the ODF" ) ) odf_peaks = File(desc=("peaks computed from the odf")) out_seeds = File( desc=("file containing the (N,3) *voxel* coordinates used" " in seeding.") ) class StreamlineTractography(DipyBaseInterface): """ Streamline tractography using EuDX [Garyfallidis12]_. .. [Garyfallidis12] Garyfallidis E., “Towards an accurate brain tractography”, PhD thesis, University of Cambridge, 2012 Example ------- >>> from nipype.interfaces import dipy as ndp >>> track = ndp.StreamlineTractography() >>> track.inputs.in_file = '4d_dwi.nii' >>> track.inputs.in_model = 'model.pklz' >>> track.inputs.tracking_mask = 'dilated_wm_mask.nii' >>> res = track.run() # doctest: +SKIP """ input_spec = StreamlineTractographyInputSpec output_spec = StreamlineTractographyOutputSpec def _run_interface(self, runtime): from dipy.reconst.peaks import peaks_from_model from dipy.tracking.eudx import EuDX from dipy.data import get_sphere # import marshal as pickle import pickle as pickle import gzip if not (isdefined(self.inputs.in_model) or isdefined(self.inputs.in_peaks)): raise RuntimeError( ("At least one of in_model or in_peaks should " "be supplied") ) img = nb.load(self.inputs.in_file) imref = nb.four_to_three(img)[0] affine = img.affine data = img.get_fdata(dtype=np.float32) hdr = imref.header.copy() hdr.set_data_dtype(np.float32) hdr["data_type"] = 16 sphere = get_sphere("symmetric724") self._save_peaks = False if isdefined(self.inputs.in_peaks): IFLOGGER.info("Peaks file found, skipping ODF peaks search...") f = gzip.open(self.inputs.in_peaks, "rb") peaks = pickle.load(f) f.close() else: self._save_peaks = True IFLOGGER.info("Loading model and computing ODF peaks") f = gzip.open(self.inputs.in_model, "rb") odf_model = pickle.load(f) f.close() peaks = peaks_from_model( model=odf_model, data=data, sphere=sphere, relative_peak_threshold=self.inputs.peak_threshold, min_separation_angle=self.inputs.min_angle, parallel=self.inputs.multiprocess, ) f = gzip.open(self._gen_filename("peaks", ext=".pklz"), "wb") pickle.dump(peaks, f, -1) f.close() hdr.set_data_shape(peaks.gfa.shape) nb.Nifti1Image(peaks.gfa.astype(np.float32), affine, hdr).to_filename( self._gen_filename("gfa") ) IFLOGGER.info("Performing tractography") if isdefined(self.inputs.tracking_mask): msk = np.asanyarray(nb.load(self.inputs.tracking_mask).dataobj) msk[msk > 0] = 1 msk[msk < 0] = 0 else: msk = np.ones(imref.shape) gfa = peaks.gfa * msk seeds = self.inputs.num_seeds if isdefined(self.inputs.seed_coord): seeds = np.loadtxt(self.inputs.seed_coord) elif isdefined(self.inputs.seed_mask): seedmsk = np.asanyarray(nb.load(self.inputs.seed_mask).dataobj) assert seedmsk.shape == data.shape[:3] seedmsk[seedmsk > 0] = 1 seedmsk[seedmsk < 1] = 0 seedps = np.array(np.where(seedmsk == 1), dtype=np.float32).T vseeds = seedps.shape[0] nsperv = (seeds // vseeds) + 1 IFLOGGER.info( "Seed mask is provided (%d voxels inside " "mask), computing seeds (%d seeds/voxel).", vseeds, nsperv, ) if nsperv > 1: IFLOGGER.info( "Needed %d seeds per selected voxel (total %d).", nsperv, vseeds ) seedps = np.vstack(np.array([seedps] * nsperv)) voxcoord = seedps + np.random.uniform(-1, 1, size=seedps.shape) nseeds = voxcoord.shape[0] seeds = affine.dot(np.vstack((voxcoord.T, np.ones((1, nseeds)))))[ :3, : ].T if self.inputs.save_seeds: np.savetxt(self._gen_filename("seeds", ext=".txt"), seeds) if isdefined(self.inputs.tracking_mask): tmask = msk a_low = 0.1 else: tmask = gfa a_low = self.inputs.gfa_thresh eu = EuDX( tmask, peaks.peak_indices[..., 0], seeds=seeds, affine=affine, odf_vertices=sphere.vertices, a_low=a_low, ) ss_mm = [np.array(s) for s in eu] trkfilev = nb.trackvis.TrackvisFile( [(s, None, None) for s in ss_mm], points_space="rasmm", affine=np.eye(4) ) trkfilev.to_file(self._gen_filename("tracked", ext=".trk")) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["tracks"] = self._gen_filename("tracked", ext=".trk") outputs["gfa"] = self._gen_filename("gfa") if self._save_peaks: outputs["odf_peaks"] = self._gen_filename("peaks", ext=".pklz") if self.inputs.save_seeds: if isdefined(self.inputs.seed_coord): outputs["out_seeds"] = self.inputs.seed_coord else: outputs["out_seeds"] = self._gen_filename("seeds", ext=".txt") return outputs def _gen_filename(self, name, ext=None): fname, fext = op.splitext(op.basename(self.inputs.in_file)) if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext if not isdefined(self.inputs.out_prefix): out_prefix = op.abspath(fname) else: out_prefix = self.inputs.out_prefix if ext is None: ext = fext return out_prefix + "_" + name + ext nipype-1.7.0/nipype/interfaces/dtitk/000077500000000000000000000000001413403311400175635ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/dtitk/__init__.py000066400000000000000000000007731413403311400217030ustar00rootroot00000000000000""" DTI-TK is a spatial normalization and atlas construction toolkit for DTI. Interfaces for the `Diffusion Tensor Imaging Toolkit (DTI-TK) `_ command line tools. """ from .registration import ( Rigid, Affine, Diffeo, ComposeXfm, DiffeoSymTensor3DVol, AffSymTensor3DVol, AffScalarVol, DiffeoScalarVol, ) from .utils import ( TVAdjustVoxSp, SVAdjustVoxSp, TVResample, SVResample, TVtool, BinThresh, ) nipype-1.7.0/nipype/interfaces/dtitk/base.py000066400000000000000000000063641413403311400210600ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The dtitk module provides classes for interfacing with the `DTITK `_ command line tools. These are the base tools for working with DTITK. Preprocessing tools are found in dtitk/preprocess.py Registration tools are found in dtitk/registration.py Currently these tools are supported: * Rigid Tensor Registration * Affine Tensor Registration * Diffeomorphic Tensor Registration * Combine affiine and diffeomorphic transforms * Application of transform to tensor and scalar volumes * Threshold and Binarize * Adjusting the voxel space of tensor and scalar volumes * Resampling tensor and scalar volumes * Calculation of tensor metrics from tensor volume Examples -------- See the docstrings of the individual classes for examples. """ import os from ... import logging from ...utils.filemanip import fname_presuffix from ..base import CommandLine from nipype.interfaces.fsl.base import Info import warnings LOGGER = logging.getLogger("nipype.interface") class DTITKRenameMixin(object): def __init__(self, *args, **kwargs): classes = [cls.__name__ for cls in self.__class__.mro()] dep_name = classes[0] rename_idx = classes.index("DTITKRenameMixin") new_name = classes[rename_idx + 1] warnings.warn( "The {} interface has been renamed to {}\n" "Please see the documentation for DTI-TK " "interfaces, as some inputs have been " "added or renamed for clarity." "".format(dep_name, new_name), DeprecationWarning, ) super(DTITKRenameMixin, self).__init__(*args, **kwargs) class CommandLineDtitk(CommandLine): def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None): """Generate a filename based on the given parameters. The filename will take the form: cwd/basename. If change_ext is True, it will use the extentions specified in intputs.output_type. Parameters ---------- basename : str Filename to base the new filename on. cwd : str Path to prefix to the new filename. (default is os.getcwd()) suffix : str Suffix to add to the `basename`. (defaults is '' ) change_ext : bool Flag to change the filename extension to the FSL output type. (default True) Returns ------- fname : str New filename based on given parameters. """ if basename == "": msg = "Unable to generate filename for command %s. " % self.cmd msg += "basename is not set!" raise ValueError(msg) if cwd is None: cwd = os.getcwd() if ext is None: ext = Info.output_type_to_ext(self.inputs.output_type) if change_ext: if suffix: suffix = "".join((suffix, ext)) else: suffix = ext if suffix is None: suffix = "" fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) return fname nipype-1.7.0/nipype/interfaces/dtitk/registration.py000066400000000000000000000437051413403311400226600ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """DTITK registration interfaces DTI-TK developed by Gary Hui Zhang, gary.zhang@ucl.ac.uk For additional help, visit http://dti-tk.sf.net The high-dimensional tensor-based DTI registration algorithm Zhang, H., Avants, B.B, Yushkevich, P.A., Woo, J.H., Wang, S., McCluskey, L.H., Elman, L.B., Melhem, E.R., Gee, J.C., High-dimensional spatial normalization of diffusion tensor images improves the detection of white matter differences in amyotrophic lateral sclerosis, IEEE Transactions on Medical Imaging, 26(11):1585-1597, November 2007. PMID: 18041273. The original piecewise-affine tensor-based DTI registration algorithm at the core of DTI-TK Zhang, H., Yushkevich, P.A., Alexander, D.C., Gee, J.C., Deformable registration of diffusion tensor MR images with explicit orientation optimization, Medical Image Analysis, 10(5):764-785, October 2006. PMID: 16899392. """ from ..base import TraitedSpec, CommandLineInputSpec, traits, File, isdefined from ...utils.filemanip import fname_presuffix, split_filename from .base import CommandLineDtitk, DTITKRenameMixin import os __docformat__ = "restructuredtext" class RigidInputSpec(CommandLineInputSpec): fixed_file = File( desc="fixed tensor volume", exists=True, mandatory=True, position=0, argstr="%s", copyfile=False, ) moving_file = File( desc="moving tensor volume", exists=True, mandatory=True, position=1, argstr="%s", copyfile=False, ) similarity_metric = traits.Enum( "EDS", "GDS", "DDS", "NMI", mandatory=True, position=2, argstr="%s", desc="similarity metric", usedefault=True, ) sampling_xyz = traits.Tuple( (4, 4, 4), mandatory=True, position=3, argstr="%g %g %g", usedefault=True, desc="dist between samp points (mm) (x,y,z)", ) ftol = traits.Float( mandatory=True, position=4, argstr="%g", desc="cost function tolerance", default_value=0.01, usedefault=True, ) initialize_xfm = File( copyfile=True, desc="Initialize w/DTITK-FORMAT" "affine", position=5, argstr="%s", exists=True, ) class RigidOutputSpec(TraitedSpec): out_file = File(exists=True) out_file_xfm = File(exists=True) class Rigid(CommandLineDtitk): """Performs rigid registration between two tensor volumes Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.Rigid() >>> node.inputs.fixed_file = 'im1.nii' >>> node.inputs.moving_file = 'im2.nii' >>> node.inputs.similarity_metric = 'EDS' >>> node.inputs.sampling_xyz = (4,4,4) >>> node.inputs.ftol = 0.01 >>> node.cmdline 'dti_rigid_reg im1.nii im2.nii EDS 4 4 4 0.01' >>> node.run() # doctest: +SKIP """ input_spec = RigidInputSpec output_spec = RigidOutputSpec _cmd = "dti_rigid_reg" """def _format_arg(self, name, spec, value): if name == 'initialize_xfm': value = 1 return super(Rigid, self)._format_arg(name, spec, value)""" def _run_interface(self, runtime): runtime = super(Rigid, self)._run_interface(runtime) if """.aff doesn't exist or can't be opened""" in runtime.stderr: self.raise_exception(runtime) return runtime def _list_outputs(self): outputs = self.output_spec().get() moving = self.inputs.moving_file outputs["out_file_xfm"] = fname_presuffix(moving, suffix=".aff", use_ext=False) outputs["out_file"] = fname_presuffix(moving, suffix="_aff") return outputs class Affine(Rigid): """Performs affine registration between two tensor volumes Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.Affine() >>> node.inputs.fixed_file = 'im1.nii' >>> node.inputs.moving_file = 'im2.nii' >>> node.inputs.similarity_metric = 'EDS' >>> node.inputs.sampling_xyz = (4,4,4) >>> node.inputs.ftol = 0.01 >>> node.inputs.initialize_xfm = 'im_affine.aff' >>> node.cmdline 'dti_affine_reg im1.nii im2.nii EDS 4 4 4 0.01 im_affine.aff' >>> node.run() # doctest: +SKIP """ _cmd = "dti_affine_reg" class DiffeoInputSpec(CommandLineInputSpec): fixed_file = File(desc="fixed tensor volume", exists=True, position=0, argstr="%s") moving_file = File( desc="moving tensor volume", exists=True, position=1, argstr="%s", copyfile=False, ) mask_file = File(desc="mask", exists=True, position=2, argstr="%s") legacy = traits.Enum( 1, desc="legacy parameter; always set to 1", usedefault=True, mandatory=True, position=3, argstr="%d", ) n_iters = traits.Int( 6, desc="number of iterations", mandatory=True, position=4, argstr="%d", usedefault=True, ) ftol = traits.Float( 0.002, desc="iteration for the optimization to stop", mandatory=True, position=5, argstr="%g", usedefault=True, ) class DiffeoOutputSpec(TraitedSpec): out_file = File(exists=True) out_file_xfm = File(exists=True) class Diffeo(CommandLineDtitk): """Performs diffeomorphic registration between two tensor volumes Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.Diffeo() >>> node.inputs.fixed_file = 'im1.nii' >>> node.inputs.moving_file = 'im2.nii' >>> node.inputs.mask_file = 'mask.nii' >>> node.inputs.legacy = 1 >>> node.inputs.n_iters = 6 >>> node.inputs.ftol = 0.002 >>> node.cmdline 'dti_diffeomorphic_reg im1.nii im2.nii mask.nii 1 6 0.002' >>> node.run() # doctest: +SKIP """ input_spec = DiffeoInputSpec output_spec = DiffeoOutputSpec _cmd = "dti_diffeomorphic_reg" def _list_outputs(self): outputs = self.output_spec().get() moving = self.inputs.moving_file outputs["out_file_xfm"] = fname_presuffix(moving, suffix="_diffeo.df") outputs["out_file"] = fname_presuffix(moving, suffix="_diffeo") return outputs class ComposeXfmInputSpec(CommandLineInputSpec): in_df = File( desc="diffeomorphic warp file", exists=True, argstr="-df %s", mandatory=True ) in_aff = File( desc="affine transform file", exists=True, argstr="-aff %s", mandatory=True ) out_file = File(desc="output path", argstr="-out %s", genfile=True) class ComposeXfmOutputSpec(TraitedSpec): out_file = File(exists=True) class ComposeXfm(CommandLineDtitk): """ Combines diffeomorphic and affine transforms Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.ComposeXfm() >>> node.inputs.in_df = 'im_warp.df.nii' >>> node.inputs.in_aff= 'im_affine.aff' >>> node.cmdline 'dfRightComposeAffine -aff im_affine.aff -df im_warp.df.nii -out im_warp_affdf.df.nii' >>> node.run() # doctest: +SKIP """ input_spec = ComposeXfmInputSpec output_spec = ComposeXfmOutputSpec _cmd = "dfRightComposeAffine" def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): out_file = self._gen_filename("out_file") outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): if name != "out_file": return path, base, ext = split_filename(self.inputs.in_df) suffix = "_affdf" if base.endswith(".df"): suffix += ".df" base = base[:-3] return fname_presuffix(base, suffix=suffix + ext, use_ext=False) class AffSymTensor3DVolInputSpec(CommandLineInputSpec): in_file = File( desc="moving tensor volume", exists=True, argstr="-in %s", mandatory=True ) out_file = File( desc="output filename", argstr="-out %s", name_source="in_file", name_template="%s_affxfmd", keep_extension=True, ) transform = File( exists=True, argstr="-trans %s", xor=["target", "translation", "euler", "deformation"], desc="transform to apply: specify an input transformation" " file; parameters input will be ignored", ) interpolation = traits.Enum( "LEI", "EI", usedefault=True, argstr="-interp %s", desc="Log Euclidean/Euclidean Interpolation", ) reorient = traits.Enum( "PPD", "NO", "FS", argstr="-reorient %s", usedefault=True, desc="Reorientation strategy: " "preservation of principal direction, no " "reorientation, or finite strain", ) target = File( exists=True, argstr="-target %s", xor=["transform"], desc="output volume specification read from the target " "volume if specified", ) translation = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="translation (x,y,z) in mm", argstr="-translation %g %g %g", xor=["transform"], ) euler = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="(theta, phi, psi) in degrees", xor=["transform"], argstr="-euler %g %g %g", ) deformation = traits.Tuple( (traits.Float(),) * 6, desc="(xx,yy,zz,xy,yz,xz)", xor=["transform"], argstr="-deformation %g %g %g %g %g %g", ) class AffSymTensor3DVolOutputSpec(TraitedSpec): out_file = File(exists=True) class AffSymTensor3DVol(CommandLineDtitk): """ Applies affine transform to a tensor volume Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.AffSymTensor3DVol() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.transform = 'im_affine.aff' >>> node.cmdline 'affineSymTensor3DVolume -in im1.nii -interp LEI -out im1_affxfmd.nii -reorient PPD -trans im_affine.aff' >>> node.run() # doctest: +SKIP """ input_spec = AffSymTensor3DVolInputSpec output_spec = AffSymTensor3DVolOutputSpec _cmd = "affineSymTensor3DVolume" class AffScalarVolInputSpec(CommandLineInputSpec): in_file = File( desc="moving scalar volume", exists=True, argstr="-in %s", mandatory=True ) out_file = File( desc="output filename", argstr="-out %s", name_source="in_file", name_template="%s_affxfmd", keep_extension=True, ) transform = File( exists=True, argstr="-trans %s", xor=["target", "translation", "euler", "deformation"], desc="transform to apply: specify an input transformation" " file; parameters input will be ignored", ) interpolation = traits.Enum( "trilinear", "NN", usedefault=True, argstr="-interp %s", desc="trilinear or nearest neighbor" " interpolation", ) target = File( exists=True, argstr="-target %s", xor=["transform"], desc="output volume specification read from the target " "volume if specified", ) translation = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="translation (x,y,z) in mm", argstr="-translation %g %g %g", xor=["transform"], ) euler = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="(theta, phi, psi) in degrees", xor=["transform"], argstr="-euler %g %g %g", ) deformation = traits.Tuple( (traits.Float(),) * 6, desc="(xx,yy,zz,xy,yz,xz)", xor=["transform"], argstr="-deformation %g %g %g %g %g %g", ) class AffScalarVolOutputSpec(TraitedSpec): out_file = File(desc="moved volume", exists=True) class AffScalarVol(CommandLineDtitk): """ Applies affine transform to a scalar volume Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.AffScalarVol() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.transform = 'im_affine.aff' >>> node.cmdline 'affineScalarVolume -in im1.nii -interp 0 -out im1_affxfmd.nii -trans im_affine.aff' >>> node.run() # doctest: +SKIP """ input_spec = AffScalarVolInputSpec output_spec = AffScalarVolOutputSpec _cmd = "affineScalarVolume" def _format_arg(self, name, spec, value): if name == "interpolation": value = {"trilinear": 0, "NN": 1}[value] return super(AffScalarVol, self)._format_arg(name, spec, value) class DiffeoSymTensor3DVolInputSpec(CommandLineInputSpec): in_file = File( desc="moving tensor volume", exists=True, argstr="-in %s", mandatory=True ) out_file = File( desc="output filename", argstr="-out %s", name_source="in_file", name_template="%s_diffeoxfmd", keep_extension=True, ) transform = File( exists=True, argstr="-trans %s", mandatory=True, desc="transform to apply" ) df = traits.Str("FD", argstr="-df %s", usedefault=True) interpolation = traits.Enum( "LEI", "EI", usedefault=True, argstr="-interp %s", desc="Log Euclidean/Euclidean Interpolation", ) reorient = traits.Enum( "PPD", "FS", argstr="-reorient %s", usedefault=True, desc="Reorientation strategy: " "preservation of principal direction or finite " "strain", ) target = File( exists=True, argstr="-target %s", xor=["voxel_size"], desc="output volume specification read from the target " "volume if specified", ) voxel_size = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="xyz voxel size (superseded by target)", argstr="-vsize %g %g %g", xor=["target"], ) flip = traits.Tuple( (traits.Int(), traits.Int(), traits.Int()), argstr="-flip %d %d %d" ) resampling_type = traits.Enum( "backward", "forward", desc="use backward or forward resampling", argstr="-type %s", ) class DiffeoSymTensor3DVolOutputSpec(TraitedSpec): out_file = File(exists=True) class DiffeoSymTensor3DVol(CommandLineDtitk): """ Applies diffeomorphic transform to a tensor volume Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.DiffeoSymTensor3DVol() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.transform = 'im_warp.df.nii' >>> node.cmdline 'deformationSymTensor3DVolume -df FD -in im1.nii -interp LEI -out im1_diffeoxfmd.nii -reorient PPD -trans im_warp.df.nii' >>> node.run() # doctest: +SKIP """ input_spec = DiffeoSymTensor3DVolInputSpec output_spec = DiffeoSymTensor3DVolOutputSpec _cmd = "deformationSymTensor3DVolume" def _format_arg(self, name, spec, value): if name == "resampling_type": value = {"forward": 0, "backward": 1}[value] return super(DiffeoSymTensor3DVol, self)._format_arg(name, spec, value) class DiffeoScalarVolInputSpec(CommandLineInputSpec): in_file = File( desc="moving scalar volume", exists=True, argstr="-in %s", mandatory=True ) out_file = File( desc="output filename", argstr="-out %s", name_source="in_file", name_template="%s_diffeoxfmd", keep_extension=True, ) transform = File( exists=True, argstr="-trans %s", mandatory=True, desc="transform to apply" ) target = File( exists=True, argstr="-target %s", xor=["voxel_size"], desc="output volume specification read from the target " "volume if specified", ) voxel_size = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="xyz voxel size (superseded by target)", argstr="-vsize %g %g %g", xor=["target"], ) flip = traits.Tuple( (traits.Int(), traits.Int(), traits.Int()), argstr="-flip %d %d %d" ) resampling_type = traits.Enum( "backward", "forward", desc="use backward or forward resampling", argstr="-type %s", ) interpolation = traits.Enum( "trilinear", "NN", desc="trilinear, or nearest neighbor", argstr="-interp %s", usedefault=True, ) class DiffeoScalarVolOutputSpec(TraitedSpec): out_file = File(desc="moved volume", exists=True) class DiffeoScalarVol(CommandLineDtitk): """ Applies diffeomorphic transform to a scalar volume Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.DiffeoScalarVol() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.transform = 'im_warp.df.nii' >>> node.cmdline 'deformationScalarVolume -in im1.nii -interp 0 -out im1_diffeoxfmd.nii -trans im_warp.df.nii' >>> node.run() # doctest: +SKIP """ input_spec = DiffeoScalarVolInputSpec output_spec = DiffeoScalarVolOutputSpec _cmd = "deformationScalarVolume" def _format_arg(self, name, spec, value): if name == "resampling_type": value = {"forward": 0, "backward": 1}[value] elif name == "interpolation": value = {"trilinear": 0, "NN": 1}[value] return super(DiffeoScalarVol, self)._format_arg(name, spec, value) class RigidTask(DTITKRenameMixin, Rigid): pass class AffineTask(DTITKRenameMixin, Affine): pass class DiffeoTask(DTITKRenameMixin, Diffeo): pass class ComposeXfmTask(DTITKRenameMixin, ComposeXfm): pass class affScalarVolTask(DTITKRenameMixin, AffScalarVol): pass class affSymTensor3DVolTask(DTITKRenameMixin, AffSymTensor3DVol): pass class diffeoScalarVolTask(DTITKRenameMixin, DiffeoScalarVol): pass class diffeoSymTensor3DVolTask(DTITKRenameMixin, DiffeoSymTensor3DVol): pass nipype-1.7.0/nipype/interfaces/dtitk/tests/000077500000000000000000000000001413403311400207255ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/dtitk/tests/__init__.py000066400000000000000000000002121413403311400230310ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_AffScalarVol.py000066400000000000000000000035451413403311400257000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import AffScalarVol def test_AffScalarVol_inputs(): input_map = dict( args=dict( argstr="%s", ), deformation=dict( argstr="-deformation %g %g %g %g %g %g", xor=["transform"], ), environ=dict( nohash=True, usedefault=True, ), euler=dict( argstr="-euler %g %g %g", xor=["transform"], ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), interpolation=dict( argstr="-interp %s", usedefault=True, ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_affxfmd", ), target=dict( argstr="-target %s", extensions=None, xor=["transform"], ), transform=dict( argstr="-trans %s", extensions=None, xor=["target", "translation", "euler", "deformation"], ), translation=dict( argstr="-translation %g %g %g", xor=["transform"], ), ) inputs = AffScalarVol.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AffScalarVol_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = AffScalarVol.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_AffSymTensor3DVol.py000066400000000000000000000037401413403311400266220ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import AffSymTensor3DVol def test_AffSymTensor3DVol_inputs(): input_map = dict( args=dict( argstr="%s", ), deformation=dict( argstr="-deformation %g %g %g %g %g %g", xor=["transform"], ), environ=dict( nohash=True, usedefault=True, ), euler=dict( argstr="-euler %g %g %g", xor=["transform"], ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), interpolation=dict( argstr="-interp %s", usedefault=True, ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_affxfmd", ), reorient=dict( argstr="-reorient %s", usedefault=True, ), target=dict( argstr="-target %s", extensions=None, xor=["transform"], ), transform=dict( argstr="-trans %s", extensions=None, xor=["target", "translation", "euler", "deformation"], ), translation=dict( argstr="-translation %g %g %g", xor=["transform"], ), ) inputs = AffSymTensor3DVol.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AffSymTensor3DVol_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = AffSymTensor3DVol.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_Affine.py000066400000000000000000000034211413403311400245560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import Affine def test_Affine_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixed_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=0, ), ftol=dict( argstr="%g", mandatory=True, position=4, usedefault=True, ), initialize_xfm=dict( argstr="%s", copyfile=True, extensions=None, position=5, ), moving_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=1, ), sampling_xyz=dict( argstr="%g %g %g", mandatory=True, position=3, usedefault=True, ), similarity_metric=dict( argstr="%s", mandatory=True, position=2, usedefault=True, ), ) inputs = Affine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Affine_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_file_xfm=dict( extensions=None, ), ) outputs = Affine.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_AffineTask.py000066400000000000000000000034451413403311400254070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import AffineTask def test_AffineTask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixed_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=0, ), ftol=dict( argstr="%g", mandatory=True, position=4, usedefault=True, ), initialize_xfm=dict( argstr="%s", copyfile=True, extensions=None, position=5, ), moving_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=1, ), sampling_xyz=dict( argstr="%g %g %g", mandatory=True, position=3, usedefault=True, ), similarity_metric=dict( argstr="%s", mandatory=True, position=2, usedefault=True, ), ) inputs = AffineTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AffineTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_file_xfm=dict( extensions=None, ), ) outputs = AffineTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_BinThresh.py000066400000000000000000000033421413403311400252560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import BinThresh def test_BinThresh_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), inside_value=dict( argstr="%g", mandatory=True, position=4, usedefault=True, ), lower_bound=dict( argstr="%g", mandatory=True, position=2, usedefault=True, ), out_file=dict( argstr="%s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_thrbin", position=1, ), outside_value=dict( argstr="%g", mandatory=True, position=5, usedefault=True, ), upper_bound=dict( argstr="%g", mandatory=True, position=3, usedefault=True, ), ) inputs = BinThresh.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BinThresh_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = BinThresh.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_BinThreshTask.py000066400000000000000000000033661413403311400261070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import BinThreshTask def test_BinThreshTask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), inside_value=dict( argstr="%g", mandatory=True, position=4, usedefault=True, ), lower_bound=dict( argstr="%g", mandatory=True, position=2, usedefault=True, ), out_file=dict( argstr="%s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_thrbin", position=1, ), outside_value=dict( argstr="%g", mandatory=True, position=5, usedefault=True, ), upper_bound=dict( argstr="%g", mandatory=True, position=3, usedefault=True, ), ) inputs = BinThreshTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BinThreshTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = BinThreshTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_CommandLineDtitk.py000066400000000000000000000010011413403311400265440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import CommandLineDtitk def test_CommandLineDtitk_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ) inputs = CommandLineDtitk.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_ComposeXfm.py000066400000000000000000000022541413403311400254510ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import ComposeXfm def test_ComposeXfm_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_aff=dict( argstr="-aff %s", extensions=None, mandatory=True, ), in_df=dict( argstr="-df %s", extensions=None, mandatory=True, ), out_file=dict( argstr="-out %s", extensions=None, genfile=True, ), ) inputs = ComposeXfm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ComposeXfm_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ComposeXfm.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_ComposeXfmTask.py000066400000000000000000000023001413403311400262640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import ComposeXfmTask def test_ComposeXfmTask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_aff=dict( argstr="-aff %s", extensions=None, mandatory=True, ), in_df=dict( argstr="-df %s", extensions=None, mandatory=True, ), out_file=dict( argstr="-out %s", extensions=None, genfile=True, ), ) inputs = ComposeXfmTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ComposeXfmTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ComposeXfmTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_Diffeo.py000066400000000000000000000032071413403311400245640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import Diffeo def test_Diffeo_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixed_file=dict( argstr="%s", extensions=None, position=0, ), ftol=dict( argstr="%g", mandatory=True, position=5, usedefault=True, ), legacy=dict( argstr="%d", mandatory=True, position=3, usedefault=True, ), mask_file=dict( argstr="%s", extensions=None, position=2, ), moving_file=dict( argstr="%s", copyfile=False, extensions=None, position=1, ), n_iters=dict( argstr="%d", mandatory=True, position=4, usedefault=True, ), ) inputs = Diffeo.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Diffeo_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_file_xfm=dict( extensions=None, ), ) outputs = Diffeo.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_DiffeoScalarVol.py000066400000000000000000000033651413403311400264000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import DiffeoScalarVol def test_DiffeoScalarVol_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), flip=dict( argstr="-flip %d %d %d", ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), interpolation=dict( argstr="-interp %s", usedefault=True, ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_diffeoxfmd", ), resampling_type=dict( argstr="-type %s", ), target=dict( argstr="-target %s", extensions=None, xor=["voxel_size"], ), transform=dict( argstr="-trans %s", extensions=None, mandatory=True, ), voxel_size=dict( argstr="-vsize %g %g %g", xor=["target"], ), ) inputs = DiffeoScalarVol.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DiffeoScalarVol_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = DiffeoScalarVol.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_DiffeoSymTensor3DVol.py000066400000000000000000000037061413403311400273240ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import DiffeoSymTensor3DVol def test_DiffeoSymTensor3DVol_inputs(): input_map = dict( args=dict( argstr="%s", ), df=dict( argstr="-df %s", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), flip=dict( argstr="-flip %d %d %d", ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), interpolation=dict( argstr="-interp %s", usedefault=True, ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_diffeoxfmd", ), reorient=dict( argstr="-reorient %s", usedefault=True, ), resampling_type=dict( argstr="-type %s", ), target=dict( argstr="-target %s", extensions=None, xor=["voxel_size"], ), transform=dict( argstr="-trans %s", extensions=None, mandatory=True, ), voxel_size=dict( argstr="-vsize %g %g %g", xor=["target"], ), ) inputs = DiffeoSymTensor3DVol.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DiffeoSymTensor3DVol_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = DiffeoSymTensor3DVol.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_DiffeoTask.py000066400000000000000000000032331413403311400254060ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import DiffeoTask def test_DiffeoTask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixed_file=dict( argstr="%s", extensions=None, position=0, ), ftol=dict( argstr="%g", mandatory=True, position=5, usedefault=True, ), legacy=dict( argstr="%d", mandatory=True, position=3, usedefault=True, ), mask_file=dict( argstr="%s", extensions=None, position=2, ), moving_file=dict( argstr="%s", copyfile=False, extensions=None, position=1, ), n_iters=dict( argstr="%d", mandatory=True, position=4, usedefault=True, ), ) inputs = DiffeoTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DiffeoTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_file_xfm=dict( extensions=None, ), ) outputs = DiffeoTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_Rigid.py000066400000000000000000000034141413403311400244260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import Rigid def test_Rigid_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixed_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=0, ), ftol=dict( argstr="%g", mandatory=True, position=4, usedefault=True, ), initialize_xfm=dict( argstr="%s", copyfile=True, extensions=None, position=5, ), moving_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=1, ), sampling_xyz=dict( argstr="%g %g %g", mandatory=True, position=3, usedefault=True, ), similarity_metric=dict( argstr="%s", mandatory=True, position=2, usedefault=True, ), ) inputs = Rigid.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Rigid_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_file_xfm=dict( extensions=None, ), ) outputs = Rigid.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_RigidTask.py000066400000000000000000000034401413403311400252500ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import RigidTask def test_RigidTask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixed_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=0, ), ftol=dict( argstr="%g", mandatory=True, position=4, usedefault=True, ), initialize_xfm=dict( argstr="%s", copyfile=True, extensions=None, position=5, ), moving_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=1, ), sampling_xyz=dict( argstr="%g %g %g", mandatory=True, position=3, usedefault=True, ), similarity_metric=dict( argstr="%s", mandatory=True, position=2, usedefault=True, ), ) inputs = RigidTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RigidTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_file_xfm=dict( extensions=None, ), ) outputs = RigidTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSp.py000066400000000000000000000027551413403311400261020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import SVAdjustVoxSp def test_SVAdjustVoxSp_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), origin=dict( argstr="-origin %g %g %g", xor=["target_file"], ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_avs", ), target_file=dict( argstr="-target %s", extensions=None, xor=["voxel_size", "origin"], ), voxel_size=dict( argstr="-vsize %g %g %g", xor=["target_file"], ), ) inputs = SVAdjustVoxSp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SVAdjustVoxSp_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SVAdjustVoxSp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_SVAdjustVoxSpTask.py000066400000000000000000000030011413403311400267060ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import SVAdjustVoxSpTask def test_SVAdjustVoxSpTask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), origin=dict( argstr="-origin %g %g %g", xor=["target_file"], ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_avs", ), target_file=dict( argstr="-target %s", extensions=None, xor=["voxel_size", "origin"], ), voxel_size=dict( argstr="-vsize %g %g %g", xor=["target_file"], ), ) inputs = SVAdjustVoxSpTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SVAdjustVoxSpTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SVAdjustVoxSpTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_SVResample.py000066400000000000000000000032331413403311400254100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import SVResample def test_SVResample_inputs(): input_map = dict( align=dict( argstr="-align %s", ), args=dict( argstr="%s", ), array_size=dict( argstr="-size %d %d %d", xor=["target_file"], ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), origin=dict( argstr="-origin %g %g %g", xor=["target_file"], ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_resampled", ), target_file=dict( argstr="-target %s", extensions=None, xor=["array_size", "voxel_size", "origin"], ), voxel_size=dict( argstr="-vsize %g %g %g", xor=["target_file"], ), ) inputs = SVResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SVResample_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SVResample.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_SVResampleTask.py000066400000000000000000000032571413403311400262410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import SVResampleTask def test_SVResampleTask_inputs(): input_map = dict( align=dict( argstr="-align %s", ), args=dict( argstr="%s", ), array_size=dict( argstr="-size %d %d %d", xor=["target_file"], ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), origin=dict( argstr="-origin %g %g %g", xor=["target_file"], ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_resampled", ), target_file=dict( argstr="-target %s", extensions=None, xor=["array_size", "voxel_size", "origin"], ), voxel_size=dict( argstr="-vsize %g %g %g", xor=["target_file"], ), ) inputs = SVResampleTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SVResampleTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SVResampleTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_TVAdjustOriginTask.py000066400000000000000000000030061413403311400270640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TVAdjustOriginTask def test_TVAdjustOriginTask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), origin=dict( argstr="-origin %g %g %g", xor=["target_file"], ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_avs", ), target_file=dict( argstr="-target %s", extensions=None, xor=["voxel_size", "origin"], ), voxel_size=dict( argstr="-vsize %g %g %g", xor=["target_file"], ), ) inputs = TVAdjustOriginTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TVAdjustOriginTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TVAdjustOriginTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSp.py000066400000000000000000000027551413403311400261030ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TVAdjustVoxSp def test_TVAdjustVoxSp_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), origin=dict( argstr="-origin %g %g %g", xor=["target_file"], ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_avs", ), target_file=dict( argstr="-target %s", extensions=None, xor=["voxel_size", "origin"], ), voxel_size=dict( argstr="-vsize %g %g %g", xor=["target_file"], ), ) inputs = TVAdjustVoxSp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TVAdjustVoxSp_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TVAdjustVoxSp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_TVAdjustVoxSpTask.py000066400000000000000000000030011413403311400267070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TVAdjustVoxSpTask def test_TVAdjustVoxSpTask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), origin=dict( argstr="-origin %g %g %g", xor=["target_file"], ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_avs", ), target_file=dict( argstr="-target %s", extensions=None, xor=["voxel_size", "origin"], ), voxel_size=dict( argstr="-vsize %g %g %g", xor=["target_file"], ), ) inputs = TVAdjustVoxSpTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TVAdjustVoxSpTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TVAdjustVoxSpTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_TVResample.py000066400000000000000000000033431413403311400254130ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TVResample def test_TVResample_inputs(): input_map = dict( align=dict( argstr="-align %s", ), args=dict( argstr="%s", ), array_size=dict( argstr="-size %d %d %d", xor=["target_file"], ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), interpolation=dict( argstr="-interp %s", ), origin=dict( argstr="-origin %g %g %g", xor=["target_file"], ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_resampled", ), target_file=dict( argstr="-target %s", extensions=None, xor=["array_size", "voxel_size", "origin"], ), voxel_size=dict( argstr="-vsize %g %g %g", xor=["target_file"], ), ) inputs = TVResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TVResample_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TVResample.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_TVResampleTask.py000066400000000000000000000033671413403311400262440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TVResampleTask def test_TVResampleTask_inputs(): input_map = dict( align=dict( argstr="-align %s", ), args=dict( argstr="%s", ), array_size=dict( argstr="-size %d %d %d", xor=["target_file"], ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), interpolation=dict( argstr="-interp %s", ), origin=dict( argstr="-origin %g %g %g", xor=["target_file"], ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_resampled", ), target_file=dict( argstr="-target %s", extensions=None, xor=["array_size", "voxel_size", "origin"], ), voxel_size=dict( argstr="-vsize %g %g %g", xor=["target_file"], ), ) inputs = TVResampleTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TVResampleTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TVResampleTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_TVtool.py000066400000000000000000000021271413403311400246170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TVtool def test_TVtool_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), in_flag=dict( argstr="-%s", ), out_file=dict( argstr="-out %s", extensions=None, genfile=True, ), ) inputs = TVtool.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TVtool_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TVtool.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_TVtoolTask.py000066400000000000000000000021531413403311400254410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TVtoolTask def test_TVtoolTask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), in_flag=dict( argstr="-%s", ), out_file=dict( argstr="-out %s", extensions=None, genfile=True, ), ) inputs = TVtoolTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TVtoolTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TVtoolTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_affScalarVolTask.py000066400000000000000000000035711413403311400265620ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import affScalarVolTask def test_affScalarVolTask_inputs(): input_map = dict( args=dict( argstr="%s", ), deformation=dict( argstr="-deformation %g %g %g %g %g %g", xor=["transform"], ), environ=dict( nohash=True, usedefault=True, ), euler=dict( argstr="-euler %g %g %g", xor=["transform"], ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), interpolation=dict( argstr="-interp %s", usedefault=True, ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_affxfmd", ), target=dict( argstr="-target %s", extensions=None, xor=["transform"], ), transform=dict( argstr="-trans %s", extensions=None, xor=["target", "translation", "euler", "deformation"], ), translation=dict( argstr="-translation %g %g %g", xor=["transform"], ), ) inputs = affScalarVolTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_affScalarVolTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = affScalarVolTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_affSymTensor3DVolTask.py000066400000000000000000000037641413403311400275130ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import affSymTensor3DVolTask def test_affSymTensor3DVolTask_inputs(): input_map = dict( args=dict( argstr="%s", ), deformation=dict( argstr="-deformation %g %g %g %g %g %g", xor=["transform"], ), environ=dict( nohash=True, usedefault=True, ), euler=dict( argstr="-euler %g %g %g", xor=["transform"], ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), interpolation=dict( argstr="-interp %s", usedefault=True, ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_affxfmd", ), reorient=dict( argstr="-reorient %s", usedefault=True, ), target=dict( argstr="-target %s", extensions=None, xor=["transform"], ), transform=dict( argstr="-trans %s", extensions=None, xor=["target", "translation", "euler", "deformation"], ), translation=dict( argstr="-translation %g %g %g", xor=["transform"], ), ) inputs = affSymTensor3DVolTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_affSymTensor3DVolTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = affSymTensor3DVolTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_diffeoScalarVolTask.py000066400000000000000000000034111413403311400272530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import diffeoScalarVolTask def test_diffeoScalarVolTask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), flip=dict( argstr="-flip %d %d %d", ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), interpolation=dict( argstr="-interp %s", usedefault=True, ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_diffeoxfmd", ), resampling_type=dict( argstr="-type %s", ), target=dict( argstr="-target %s", extensions=None, xor=["voxel_size"], ), transform=dict( argstr="-trans %s", extensions=None, mandatory=True, ), voxel_size=dict( argstr="-vsize %g %g %g", xor=["target"], ), ) inputs = diffeoScalarVolTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_diffeoScalarVolTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = diffeoScalarVolTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/tests/test_auto_diffeoSymTensor3DVolTask.py000066400000000000000000000037321413403311400302060ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import diffeoSymTensor3DVolTask def test_diffeoSymTensor3DVolTask_inputs(): input_map = dict( args=dict( argstr="%s", ), df=dict( argstr="-df %s", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), flip=dict( argstr="-flip %d %d %d", ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), interpolation=dict( argstr="-interp %s", usedefault=True, ), out_file=dict( argstr="-out %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_diffeoxfmd", ), reorient=dict( argstr="-reorient %s", usedefault=True, ), resampling_type=dict( argstr="-type %s", ), target=dict( argstr="-target %s", extensions=None, xor=["voxel_size"], ), transform=dict( argstr="-trans %s", extensions=None, mandatory=True, ), voxel_size=dict( argstr="-vsize %g %g %g", xor=["target"], ), ) inputs = diffeoSymTensor3DVolTask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_diffeoSymTensor3DVolTask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = diffeoSymTensor3DVolTask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/dtitk/utils.py000066400000000000000000000266701413403311400213100ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """DTITK utility interfaces DTI-TK developed by Gary Hui Zhang, gary.zhang@ucl.ac.uk For additional help, visit http://dti-tk.sf.net The high-dimensional tensor-based DTI registration algorithm Zhang, H., Avants, B.B, Yushkevich, P.A., Woo, J.H., Wang, S., McCluskey, L.H., Elman, L.B., Melhem, E.R., Gee, J.C., High-dimensional spatial normalization of diffusion tensor images improves the detection of white matter differences in amyotrophic lateral sclerosis, IEEE Transactions on Medical Imaging, 26(11):1585-1597, November 2007. PMID: 18041273. The original piecewise-affine tensor-based DTI registration algorithm at the core of DTI-TK Zhang, H., Yushkevich, P.A., Alexander, D.C., Gee, J.C., Deformable registration of diffusion tensor MR images with explicit orientation optimization, Medical Image Analysis, 10(5):764-785, October 2006. PMID: 16899392. """ from ..base import TraitedSpec, CommandLineInputSpec, File, traits, isdefined from ...utils.filemanip import fname_presuffix from .base import CommandLineDtitk, DTITKRenameMixin import os __docformat__ = "restructuredtext" class TVAdjustVoxSpInputSpec(CommandLineInputSpec): in_file = File( desc="tensor volume to modify", exists=True, mandatory=True, argstr="-in %s" ) out_file = File( desc="output path", argstr="-out %s", name_source="in_file", name_template="%s_avs", keep_extension=True, ) target_file = File( desc="target volume to match", argstr="-target %s", xor=["voxel_size", "origin"] ) voxel_size = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="xyz voxel size (superseded by target)", argstr="-vsize %g %g %g", xor=["target_file"], ) origin = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="xyz origin (superseded by target)", argstr="-origin %g %g %g", xor=["target_file"], ) class TVAdjustVoxSpOutputSpec(TraitedSpec): out_file = File(exists=True) class TVAdjustVoxSp(CommandLineDtitk): """ Adjusts the voxel space of a tensor volume. Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.TVAdjustVoxSp() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.target_file = 'im2.nii' >>> node.cmdline 'TVAdjustVoxelspace -in im1.nii -out im1_avs.nii -target im2.nii' >>> node.run() # doctest: +SKIP """ input_spec = TVAdjustVoxSpInputSpec output_spec = TVAdjustVoxSpOutputSpec _cmd = "TVAdjustVoxelspace" class SVAdjustVoxSpInputSpec(CommandLineInputSpec): in_file = File( desc="scalar volume to modify", exists=True, mandatory=True, argstr="-in %s" ) out_file = File( desc="output path", argstr="-out %s", name_source="in_file", name_template="%s_avs", keep_extension=True, ) target_file = File( desc="target volume to match", argstr="-target %s", xor=["voxel_size", "origin"] ) voxel_size = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="xyz voxel size (superseded by target)", argstr="-vsize %g %g %g", xor=["target_file"], ) origin = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="xyz origin (superseded by target)", argstr="-origin %g %g %g", xor=["target_file"], ) class SVAdjustVoxSpOutputSpec(TraitedSpec): out_file = File(exists=True) class SVAdjustVoxSp(CommandLineDtitk): """ Adjusts the voxel space of a scalar volume. Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.SVAdjustVoxSp() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.target_file = 'im2.nii' >>> node.cmdline 'SVAdjustVoxelspace -in im1.nii -out im1_avs.nii -target im2.nii' >>> node.run() # doctest: +SKIP """ input_spec = SVAdjustVoxSpInputSpec output_spec = SVAdjustVoxSpOutputSpec _cmd = "SVAdjustVoxelspace" class TVResampleInputSpec(CommandLineInputSpec): in_file = File( desc="tensor volume to resample", exists=True, mandatory=True, argstr="-in %s" ) out_file = File( desc="output path", name_source="in_file", name_template="%s_resampled", keep_extension=True, argstr="-out %s", ) target_file = File( desc="specs read from the target volume", argstr="-target %s", xor=["array_size", "voxel_size", "origin"], ) align = traits.Enum( "center", "origin", argstr="-align %s", desc="how to align output volume to input volume", ) interpolation = traits.Enum( "LEI", "EI", argstr="-interp %s", desc="Log Euclidean Euclidean Interpolation" ) array_size = traits.Tuple( (traits.Int(), traits.Int(), traits.Int()), desc="resampled array size", xor=["target_file"], argstr="-size %d %d %d", ) voxel_size = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="resampled voxel size", xor=["target_file"], argstr="-vsize %g %g %g", ) origin = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="xyz origin", xor=["target_file"], argstr="-origin %g %g %g", ) class TVResampleOutputSpec(TraitedSpec): out_file = File(exists=True) class TVResample(CommandLineDtitk): """ Resamples a tensor volume. Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.TVResample() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.target_file = 'im2.nii' >>> node.cmdline 'TVResample -in im1.nii -out im1_resampled.nii -target im2.nii' >>> node.run() # doctest: +SKIP """ input_spec = TVResampleInputSpec output_spec = TVResampleOutputSpec _cmd = "TVResample" class SVResampleInputSpec(CommandLineInputSpec): in_file = File( desc="image to resample", exists=True, mandatory=True, argstr="-in %s" ) out_file = File( desc="output path", name_source="in_file", name_template="%s_resampled", keep_extension=True, argstr="-out %s", ) target_file = File( desc="specs read from the target volume", argstr="-target %s", xor=["array_size", "voxel_size", "origin"], ) align = traits.Enum( "center", "origin", argstr="-align %s", desc="how to align output volume to input volume", ) array_size = traits.Tuple( (traits.Int(), traits.Int(), traits.Int()), desc="resampled array size", xor=["target_file"], argstr="-size %d %d %d", ) voxel_size = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="resampled voxel size", xor=["target_file"], argstr="-vsize %g %g %g", ) origin = traits.Tuple( (traits.Float(), traits.Float(), traits.Float()), desc="xyz origin", xor=["target_file"], argstr="-origin %g %g %g", ) class SVResampleOutputSpec(TraitedSpec): out_file = File(exists=True) class SVResample(CommandLineDtitk): """ Resamples a scalar volume. Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.SVResample() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.target_file = 'im2.nii' >>> node.cmdline 'SVResample -in im1.nii -out im1_resampled.nii -target im2.nii' >>> node.run() # doctest: +SKIP """ input_spec = SVResampleInputSpec output_spec = SVResampleOutputSpec _cmd = "SVResample" class TVtoolInputSpec(CommandLineInputSpec): in_file = File( desc="scalar volume to resample", exists=True, argstr="-in %s", mandatory=True ) """NOTE: there are a lot more options here; not implementing all of them""" in_flag = traits.Enum("fa", "tr", "ad", "rd", "pd", "rgb", argstr="-%s", desc="") out_file = File(argstr="-out %s", genfile=True) class TVtoolOutputSpec(TraitedSpec): out_file = File() class TVtool(CommandLineDtitk): """ Calculates a tensor metric volume from a tensor volume. Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.TVtool() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.in_flag = 'fa' >>> node.cmdline 'TVtool -in im1.nii -fa -out im1_fa.nii' >>> node.run() # doctest: +SKIP """ input_spec = TVtoolInputSpec output_spec = TVtoolOutputSpec _cmd = "TVtool" def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): out_file = self._gen_filename("out_file") outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): if name != "out_file": return return fname_presuffix( os.path.basename(self.inputs.in_file), suffix="_" + self.inputs.in_flag ) """Note: SVTool not implemented at this time""" class BinThreshInputSpec(CommandLineInputSpec): in_file = File( desc="Image to threshold/binarize", exists=True, position=0, argstr="%s", mandatory=True, ) out_file = File( desc="output path", position=1, argstr="%s", keep_extension=True, name_source="in_file", name_template="%s_thrbin", ) lower_bound = traits.Float( 0.01, usedefault=True, position=2, argstr="%g", mandatory=True, desc="lower bound of binarization range", ) upper_bound = traits.Float( 100, usedefault=True, position=3, argstr="%g", mandatory=True, desc="upper bound of binarization range", ) inside_value = traits.Float( 1, position=4, argstr="%g", usedefault=True, mandatory=True, desc="value for voxels in " "binarization range", ) outside_value = traits.Float( 0, position=5, argstr="%g", usedefault=True, mandatory=True, desc="value for voxels" "outside of binarization range", ) class BinThreshOutputSpec(TraitedSpec): out_file = File(exists=True) class BinThresh(CommandLineDtitk): """ Binarizes an image. Example ------- >>> from nipype.interfaces import dtitk >>> node = dtitk.BinThresh() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.lower_bound = 0 >>> node.inputs.upper_bound = 100 >>> node.inputs.inside_value = 1 >>> node.inputs.outside_value = 0 >>> node.cmdline 'BinaryThresholdImageFilter im1.nii im1_thrbin.nii 0 100 1 0' >>> node.run() # doctest: +SKIP """ input_spec = BinThreshInputSpec output_spec = BinThreshOutputSpec _cmd = "BinaryThresholdImageFilter" class BinThreshTask(DTITKRenameMixin, BinThresh): pass class SVAdjustVoxSpTask(DTITKRenameMixin, SVAdjustVoxSp): pass class SVResampleTask(DTITKRenameMixin, SVResample): pass class TVAdjustOriginTask(DTITKRenameMixin, TVAdjustVoxSp): pass class TVAdjustVoxSpTask(DTITKRenameMixin, TVAdjustVoxSp): pass class TVResampleTask(DTITKRenameMixin, TVResample): pass class TVtoolTask(DTITKRenameMixin, TVtool): pass nipype-1.7.0/nipype/interfaces/dynamic_slicer.py000066400000000000000000000204061413403311400220050ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Experimental Slicer wrapper - Work in progress.""" import os import warnings import xml.dom.minidom from .base import ( CommandLine, CommandLineInputSpec, DynamicTraitedSpec, traits, Undefined, File, isdefined, ) class SlicerCommandLineInputSpec(DynamicTraitedSpec, CommandLineInputSpec): module = traits.Str(desc="name of the Slicer command line module you want to use") class SlicerCommandLine(CommandLine): """Experimental Slicer wrapper. Work in progress.""" _cmd = "Slicer3" input_spec = SlicerCommandLineInputSpec output_spec = DynamicTraitedSpec def _grab_xml(self, module): cmd = CommandLine( command="Slicer3", resource_monitor=False, args="--launch %s --xml" % module ) ret = cmd.run() if ret.runtime.returncode == 0: return xml.dom.minidom.parseString(ret.runtime.stdout) else: raise Exception(cmd.cmdline + " failed:\n%s" % ret.runtime.stderr) def _outputs(self): base = super(SlicerCommandLine, self)._outputs() undefined_output_traits = {} for key in [ node.getElementsByTagName("name")[0].firstChild.nodeValue for node in self._outputs_nodes ]: base.add_trait(key, File(exists=True)) undefined_output_traits[key] = Undefined base.trait_set(trait_change_notify=False, **undefined_output_traits) return base def __init__(self, module, **inputs): warnings.warn("slicer is Not fully implemented", RuntimeWarning) super(SlicerCommandLine, self).__init__( command="Slicer3 --launch %s " % module, name=module, **inputs ) dom = self._grab_xml(module) self._outputs_filenames = {} self._outputs_nodes = [] undefined_traits = {} for paramGroup in dom.getElementsByTagName("parameters"): for param in paramGroup.childNodes: if param.nodeName in ["label", "description", "#text", "#comment"]: continue traitsParams = {} name = param.getElementsByTagName("name")[0].firstChild.nodeValue longFlagNode = param.getElementsByTagName("longflag") if longFlagNode: traitsParams["argstr"] = ( "--" + longFlagNode[0].firstChild.nodeValue + " " ) else: traitsParams["argstr"] = "--" + name + " " argsDict = { "file": "%s", "integer": "%d", "double": "%f", "float": "%f", "image": "%s", "transform": "%s", "boolean": "", "string-enumeration": "%s", "string": "%s", } if param.nodeName.endswith("-vector"): traitsParams["argstr"] += argsDict[param.nodeName[:-7]] else: traitsParams["argstr"] += argsDict[param.nodeName] index = param.getElementsByTagName("index") if index: traitsParams["position"] = index[0].firstChild.nodeValue desc = param.getElementsByTagName("description") if index: traitsParams["desc"] = desc[0].firstChild.nodeValue name = param.getElementsByTagName("name")[0].firstChild.nodeValue typesDict = { "integer": traits.Int, "double": traits.Float, "float": traits.Float, "image": File, "transform": File, "boolean": traits.Bool, "string": traits.Str, "file": File, } if param.nodeName == "string-enumeration": type = traits.Enum values = [ el.firstChild.nodeValue for el in param.getElementsByTagName("element") ] elif param.nodeName.endswith("-vector"): type = traits.List values = [typesDict[param.nodeName[:-7]]] traitsParams["sep"] = "," else: values = [] type = typesDict[param.nodeName] if ( param.nodeName in ["file", "directory", "image", "transform"] and param.getElementsByTagName("channel")[0].firstChild.nodeValue == "output" ): self.inputs.add_trait( name, traits.Either(traits.Bool, File, **traitsParams) ) undefined_traits[name] = Undefined # traitsParams["exists"] = True self._outputs_filenames[name] = self._gen_filename_from_param(param) # undefined_output_traits[name] = Undefined # self._outputs().add_trait(name, File(*values, **traitsParams)) self._outputs_nodes.append(param) else: if param.nodeName in ["file", "directory", "image", "transform"]: traitsParams["exists"] = True self.inputs.add_trait(name, type(*values, **traitsParams)) undefined_traits[name] = Undefined self.inputs.trait_set(trait_change_notify=False, **undefined_traits) for name in list(undefined_traits.keys()): _ = getattr(self.inputs, name) # self._outputs().trait_set(trait_change_notify=False, **undefined_output_traits) def _gen_filename(self, name): if name in self._outputs_filenames: return os.path.join(os.getcwd(), self._outputs_filenames[name]) return None def _gen_filename_from_param(self, param): base = param.getElementsByTagName("name")[0].firstChild.nodeValue fileExtensions = param.getAttribute("fileExtensions") if fileExtensions: ext = fileExtensions else: ext = {"image": ".nii", "transform": ".txt", "file": ""}[param.nodeName] return base + ext def _list_outputs(self): outputs = self.output_spec().get() for output_node in self._outputs_nodes: name = output_node.getElementsByTagName("name")[0].firstChild.nodeValue outputs[name] = getattr(self.inputs, name) if isdefined(outputs[name]) and isinstance(outputs[name], bool): if outputs[name]: outputs[name] = self._gen_filename(name) else: outputs[name] = Undefined return outputs def _format_arg(self, name, spec, value): if name in [ output_node.getElementsByTagName("name")[0].firstChild.nodeValue for output_node in self._outputs_nodes ]: if isinstance(value, bool): fname = self._gen_filename(name) else: fname = value return spec.argstr % fname return super(SlicerCommandLine, self)._format_arg(name, spec, value) # test = SlicerCommandLine(module="BRAINSFit") # test.inputs.fixedVolume = "/home/filo/workspace/fmri_tumour/data/pilot1/10_co_COR_3D_IR_PREP.nii" # test.inputs.movingVolume = "/home/filo/workspace/fmri_tumour/data/pilot1/2_line_bisection.nii" # test.inputs.outputTransform = True # test.inputs.transformType = ["Affine"] # print test.cmdline # print test.inputs # print test._outputs() # ret = test.run() # test = SlicerCommandLine(name="BRAINSResample") # test.inputs.referenceVolume = "/home/filo/workspace/fmri_tumour/data/pilot1/10_co_COR_3D_IR_PREP.nii" # test.inputs.inputVolume = "/home/filo/workspace/fmri_tumour/data/pilot1/2_line_bisection.nii" # test.inputs.outputVolume = True # test.inputs.warpTransform = "/home/filo/workspace/nipype/nipype/interfaces/outputTransform.mat" # print test.cmdline # ret = test.run() # print ret.runtime.stderr # print ret.runtime.returncode nipype-1.7.0/nipype/interfaces/elastix/000077500000000000000000000000001413403311400201155ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/elastix/__init__.py000066400000000000000000000005201413403311400222230ustar00rootroot00000000000000# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """elastix is a toolbox for rigid and nonrigid registration of images.""" from .registration import Registration, ApplyWarp, AnalyzeWarp, PointsWarp from .utils import EditTransform nipype-1.7.0/nipype/interfaces/elastix/base.py000066400000000000000000000015231413403311400214020ustar00rootroot00000000000000# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The :py:mod:`nipype.interfaces.elastix` provides the interface to the elastix registration software. .. note:: http://elastix.isi.uu.nl/ """ from ... import logging from ..base import CommandLineInputSpec, Directory, traits iflogger = logging.getLogger("nipype.interface") class ElastixBaseInputSpec(CommandLineInputSpec): output_path = Directory( "./", exists=True, mandatory=True, usedefault=True, argstr="-out %s", desc="output directory", ) num_threads = traits.Int( 1, usedefault=True, argstr="-threads %01d", nohash=True, desc="set the maximum number of threads of elastix", ) nipype-1.7.0/nipype/interfaces/elastix/registration.py000066400000000000000000000206041413403311400232030ustar00rootroot00000000000000# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Interfaces to perform image registrations and to apply the resulting displacement maps to images and points. """ import os.path as op import re from ... import logging from .base import ElastixBaseInputSpec from ..base import CommandLine, TraitedSpec, File, traits, InputMultiPath iflogger = logging.getLogger("nipype.interface") class RegistrationInputSpec(ElastixBaseInputSpec): fixed_image = File(exists=True, mandatory=True, argstr="-f %s", desc="fixed image") moving_image = File( exists=True, mandatory=True, argstr="-m %s", desc="moving image" ) parameters = InputMultiPath( File(exists=True), mandatory=True, argstr="-p %s...", desc="parameter file, elastix handles 1 or more -p", ) fixed_mask = File(exists=True, argstr="-fMask %s", desc="mask for fixed image") moving_mask = File(exists=True, argstr="-mMask %s", desc="mask for moving image") initial_transform = File( exists=True, argstr="-t0 %s", desc="parameter file for initial transform" ) class RegistrationOutputSpec(TraitedSpec): transform = InputMultiPath(File(exists=True), desc="output transform") warped_file = File(desc="input moving image warped to fixed image") warped_files = InputMultiPath( File(exists=False), desc=("input moving image warped to fixed image at each level"), ) warped_files_flags = traits.List( traits.Bool(False), desc="flag indicating if warped image was generated" ) class Registration(CommandLine): """ Elastix nonlinear registration interface Example ------- >>> from nipype.interfaces.elastix import Registration >>> reg = Registration() >>> reg.inputs.fixed_image = 'fixed1.nii' >>> reg.inputs.moving_image = 'moving1.nii' >>> reg.inputs.parameters = ['elastix.txt'] >>> reg.cmdline 'elastix -f fixed1.nii -m moving1.nii -threads 1 -out ./ -p elastix.txt' """ _cmd = "elastix" input_spec = RegistrationInputSpec output_spec = RegistrationOutputSpec def _list_outputs(self): outputs = self._outputs().get() out_dir = op.abspath(self.inputs.output_path) regex = re.compile(r"^\((\w+)\s(.+)\)$") outputs["transform"] = [] outputs["warped_files"] = [] outputs["warped_files_flags"] = [] for i, params in enumerate(self.inputs.parameters): config = {} with open(params, "r") as f: for line in f.readlines(): line = line.strip() if not line.startswith("//") and line: m = regex.search(line) if m: value = self._cast(m.group(2).strip()) config[m.group(1).strip()] = value outputs["transform"].append( op.join(out_dir, "TransformParameters.%01d.txt" % i) ) warped_file = None if config["WriteResultImage"]: warped_file = op.join( out_dir, "result.%01d.%s" % (i, config["ResultImageFormat"]) ) outputs["warped_files"].append(warped_file) outputs["warped_files_flags"].append(config["WriteResultImage"]) if outputs["warped_files_flags"][-1]: outputs["warped_file"] = outputs["warped_files"][-1] return outputs def _cast(self, val): if val.startswith('"') and val.endswith('"'): if val == '"true"': return True elif val == '"false"': return False else: return val[1:-1] try: return int(val) except ValueError: try: return float(val) except ValueError: return val class ApplyWarpInputSpec(ElastixBaseInputSpec): transform_file = File( exists=True, mandatory=True, argstr="-tp %s", desc="transform-parameter file, only 1", ) moving_image = File( exists=True, argstr="-in %s", mandatory=True, desc="input image to deform" ) class ApplyWarpOutputSpec(TraitedSpec): warped_file = File(desc="input moving image warped to fixed image") class ApplyWarp(CommandLine): """ Use ``transformix`` to apply a transform on an input image. The transform is specified in the transform-parameter file. Example ------- >>> from nipype.interfaces.elastix import ApplyWarp >>> reg = ApplyWarp() >>> reg.inputs.moving_image = 'moving1.nii' >>> reg.inputs.transform_file = 'TransformParameters.0.txt' >>> reg.cmdline 'transformix -in moving1.nii -threads 1 -out ./ -tp TransformParameters.0.txt' """ _cmd = "transformix" input_spec = ApplyWarpInputSpec output_spec = ApplyWarpOutputSpec def _list_outputs(self): outputs = self._outputs().get() out_dir = op.abspath(self.inputs.output_path) outputs["warped_file"] = op.join(out_dir, "result.nii.gz") return outputs class AnalyzeWarpInputSpec(ApplyWarpInputSpec): points = traits.Enum( "all", usedefault=True, position=0, argstr="-def %s", desc="transform all points from the input-image, which effectively" " generates a deformation field.", ) jac = traits.Enum( "all", usedefault=True, argstr="-jac %s", desc="generate an image with the determinant of the spatial Jacobian", ) jacmat = traits.Enum( "all", usedefault=True, argstr="-jacmat %s", desc="generate an image with the spatial Jacobian matrix at each voxel", ) moving_image = File( exists=True, argstr="-in %s", desc="input image to deform (not used)" ) class AnalyzeWarpOutputSpec(TraitedSpec): disp_field = File(desc="displacements field") jacdet_map = File(desc="det(Jacobian) map") jacmat_map = File(desc="Jacobian matrix map") class AnalyzeWarp(ApplyWarp): """ Use transformix to get details from the input transform (generate the corresponding deformation field, generate the determinant of the Jacobian map or the Jacobian map itself) Example ------- >>> from nipype.interfaces.elastix import AnalyzeWarp >>> reg = AnalyzeWarp() >>> reg.inputs.transform_file = 'TransformParameters.0.txt' >>> reg.cmdline 'transformix -def all -jac all -jacmat all -threads 1 -out ./ -tp TransformParameters.0.txt' """ input_spec = AnalyzeWarpInputSpec output_spec = AnalyzeWarpOutputSpec def _list_outputs(self): outputs = self._outputs().get() out_dir = op.abspath(self.inputs.output_path) outputs["disp_field"] = op.join(out_dir, "deformationField.nii.gz") outputs["jacdet_map"] = op.join(out_dir, "spatialJacobian.nii.gz") outputs["jacmat_map"] = op.join(out_dir, "fullSpatialJacobian.nii.gz") return outputs class PointsWarpInputSpec(ElastixBaseInputSpec): points_file = File( exists=True, argstr="-def %s", mandatory=True, desc="input points (accepts .vtk triangular meshes).", ) transform_file = File( exists=True, mandatory=True, argstr="-tp %s", desc="transform-parameter file, only 1", ) class PointsWarpOutputSpec(TraitedSpec): warped_file = File(desc="input points displaced in fixed image domain") class PointsWarp(CommandLine): """Use ``transformix`` to apply a transform on an input point set. The transform is specified in the transform-parameter file. Example ------- >>> from nipype.interfaces.elastix import PointsWarp >>> reg = PointsWarp() >>> reg.inputs.points_file = 'surf1.vtk' >>> reg.inputs.transform_file = 'TransformParameters.0.txt' >>> reg.cmdline 'transformix -threads 1 -out ./ -def surf1.vtk -tp TransformParameters.0.txt' """ _cmd = "transformix" input_spec = PointsWarpInputSpec output_spec = PointsWarpOutputSpec def _list_outputs(self): outputs = self._outputs().get() out_dir = op.abspath(self.inputs.output_path) fname, ext = op.splitext(op.basename(self.inputs.points_file)) outputs["warped_file"] = op.join(out_dir, "outputpoints%s" % ext) return outputs nipype-1.7.0/nipype/interfaces/elastix/tests/000077500000000000000000000000001413403311400212575ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/elastix/tests/__init__.py000066400000000000000000000000301413403311400233610ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/elastix/tests/test_auto_AnalyzeWarp.py000066400000000000000000000033241413403311400261570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import AnalyzeWarp def test_AnalyzeWarp_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), jac=dict( argstr="-jac %s", usedefault=True, ), jacmat=dict( argstr="-jacmat %s", usedefault=True, ), moving_image=dict( argstr="-in %s", extensions=None, ), num_threads=dict( argstr="-threads %01d", nohash=True, usedefault=True, ), output_path=dict( argstr="-out %s", mandatory=True, usedefault=True, ), points=dict( argstr="-def %s", position=0, usedefault=True, ), transform_file=dict( argstr="-tp %s", extensions=None, mandatory=True, ), ) inputs = AnalyzeWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AnalyzeWarp_outputs(): output_map = dict( disp_field=dict( extensions=None, ), jacdet_map=dict( extensions=None, ), jacmat_map=dict( extensions=None, ), ) outputs = AnalyzeWarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/elastix/tests/test_auto_ApplyWarp.py000066400000000000000000000024741413403311400256460ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import ApplyWarp def test_ApplyWarp_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), moving_image=dict( argstr="-in %s", extensions=None, mandatory=True, ), num_threads=dict( argstr="-threads %01d", nohash=True, usedefault=True, ), output_path=dict( argstr="-out %s", mandatory=True, usedefault=True, ), transform_file=dict( argstr="-tp %s", extensions=None, mandatory=True, ), ) inputs = ApplyWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ApplyWarp_outputs(): output_map = dict( warped_file=dict( extensions=None, ), ) outputs = ApplyWarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/elastix/tests/test_auto_EditTransform.py000066400000000000000000000023071413403311400265030ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import EditTransform def test_EditTransform_inputs(): input_map = dict( interpolation=dict( argstr="FinalBSplineInterpolationOrder", usedefault=True, ), output_file=dict( extensions=None, ), output_format=dict( argstr="ResultImageFormat", ), output_type=dict( argstr="ResultImagePixelType", ), reference_image=dict( extensions=None, ), transform_file=dict( extensions=None, mandatory=True, ), ) inputs = EditTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EditTransform_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = EditTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/elastix/tests/test_auto_PointsWarp.py000066400000000000000000000025011413403311400260240ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import PointsWarp def test_PointsWarp_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), num_threads=dict( argstr="-threads %01d", nohash=True, usedefault=True, ), output_path=dict( argstr="-out %s", mandatory=True, usedefault=True, ), points_file=dict( argstr="-def %s", extensions=None, mandatory=True, ), transform_file=dict( argstr="-tp %s", extensions=None, mandatory=True, ), ) inputs = PointsWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_PointsWarp_outputs(): output_map = dict( warped_file=dict( extensions=None, ), ) outputs = PointsWarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/elastix/tests/test_auto_Registration.py000066400000000000000000000034471413403311400264020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import Registration def test_Registration_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixed_image=dict( argstr="-f %s", extensions=None, mandatory=True, ), fixed_mask=dict( argstr="-fMask %s", extensions=None, ), initial_transform=dict( argstr="-t0 %s", extensions=None, ), moving_image=dict( argstr="-m %s", extensions=None, mandatory=True, ), moving_mask=dict( argstr="-mMask %s", extensions=None, ), num_threads=dict( argstr="-threads %01d", nohash=True, usedefault=True, ), output_path=dict( argstr="-out %s", mandatory=True, usedefault=True, ), parameters=dict( argstr="-p %s...", mandatory=True, ), ) inputs = Registration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Registration_outputs(): output_map = dict( transform=dict(), warped_file=dict( extensions=None, ), warped_files=dict(), warped_files_flags=dict(), ) outputs = Registration.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/elastix/utils.py000066400000000000000000000132321413403311400216300ustar00rootroot00000000000000# -*- coding: utf-8 -*- # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Generic interfaces to manipulate registration parameters files, including transform files (to configure warpings) """ import os.path as op from ... import logging from ..base import ( BaseInterface, BaseInterfaceInputSpec, isdefined, TraitedSpec, File, traits, ) iflogger = logging.getLogger("nipype.interface") class EditTransformInputSpec(BaseInterfaceInputSpec): transform_file = File( exists=True, mandatory=True, desc="transform-parameter file, only 1" ) reference_image = File( exists=True, desc=("set a new reference image to change the " "target coordinate system."), ) interpolation = traits.Enum( "cubic", "linear", "nearest", usedefault=True, argstr="FinalBSplineInterpolationOrder", desc="set a new interpolator for transformation", ) output_type = traits.Enum( "float", "unsigned char", "unsigned short", "short", "unsigned long", "long", "double", argstr="ResultImagePixelType", desc="set a new output pixel type for resampled images", ) output_format = traits.Enum( "nii.gz", "nii", "mhd", "hdr", "vtk", argstr="ResultImageFormat", desc="set a new image format for resampled images", ) output_file = File(desc="the filename for the resulting transform file") class EditTransformOutputSpec(TraitedSpec): output_file = File(exists=True, desc="output transform file") class EditTransform(BaseInterface): """ Manipulates an existing transform file generated with elastix Example ------- >>> from nipype.interfaces.elastix import EditTransform >>> tfm = EditTransform() >>> tfm.inputs.transform_file = 'TransformParameters.0.txt' # doctest: +SKIP >>> tfm.inputs.reference_image = 'fixed1.nii' # doctest: +SKIP >>> tfm.inputs.output_type = 'unsigned char' >>> tfm.run() # doctest: +SKIP """ input_spec = EditTransformInputSpec output_spec = EditTransformOutputSpec _out_file = "" _pattern = r'\((?P%s\s"?)([-\.\s\w]+)("?\))' _interp = {"nearest": 0, "linear": 1, "cubic": 3} def _run_interface(self, runtime): import re import nibabel as nb import numpy as np contents = "" with open(self.inputs.transform_file, "r") as f: contents = f.read() if isdefined(self.inputs.output_type): p = re.compile( (self._pattern % "ResultImagePixelType").decode("string-escape") ) rep = r"(\g%s\g<3>" % self.inputs.output_type contents = p.sub(rep, contents) if isdefined(self.inputs.output_format): p = re.compile( (self._pattern % "ResultImageFormat").decode("string-escape") ) rep = r"(\g%s\g<3>" % self.inputs.output_format contents = p.sub(rep, contents) if isdefined(self.inputs.interpolation): p = re.compile( (self._pattern % "FinalBSplineInterpolationOrder").decode( "string-escape" ) ) rep = r"(\g%s\g<3>" % self._interp[self.inputs.interpolation] contents = p.sub(rep, contents) if isdefined(self.inputs.reference_image): im = nb.load(self.inputs.reference_image) if len(im.header.get_zooms()) == 4: im = nb.func.four_to_three(im)[0] size = " ".join(["%01d" % s for s in im.shape]) p = re.compile((self._pattern % "Size").decode("string-escape")) rep = r"(\g%s\g<3>" % size contents = p.sub(rep, contents) index = " ".join(["0" for s in im.shape]) p = re.compile((self._pattern % "Index").decode("string-escape")) rep = r"(\g%s\g<3>" % index contents = p.sub(rep, contents) spacing = " ".join(["%0.4f" % f for f in im.header.get_zooms()]) p = re.compile((self._pattern % "Spacing").decode("string-escape")) rep = r"(\g%s\g<3>" % spacing contents = p.sub(rep, contents) itkmat = np.eye(4) itkmat[0, 0] = -1 itkmat[1, 1] = -1 affine = np.dot(itkmat, im.affine) dirs = " ".join(["%0.4f" % f for f in affine[0:3, 0:3].reshape(-1)]) orig = " ".join(["%0.4f" % f for f in affine[0:3, 3].reshape(-1)]) # p = re.compile((self._pattern % 'Direction').decode('string-escape')) # rep = '(\g%s\g<3>' % dirs # contents = p.sub(rep, contents) p = re.compile((self._pattern % "Origin").decode("string-escape")) rep = r"(\g%s\g<3>" % orig contents = p.sub(rep, contents) with open(self._get_outfile(), "w") as of: of.write(contents) return runtime def _list_outputs(self): outputs = self.output_spec().get() outputs["output_file"] = getattr(self, "_out_file") return outputs def _get_outfile(self): val = getattr(self, "_out_file") if val is not None and val != "": return val if isdefined(self.inputs.output_file): setattr(self, "_out_file", self.inputs.output_file) return self.inputs.output_file out_file = op.abspath(op.basename(self.inputs.transform_file)) setattr(self, "_out_file", out_file) return out_file nipype-1.7.0/nipype/interfaces/freesurfer/000077500000000000000000000000001413403311400206145ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/freesurfer/__init__.py000066400000000000000000000035521413403311400227320ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """FreeSurfer is an open source software suite for processing and analyzing brain MRI images.""" from .base import Info, FSCommand, no_freesurfer from .preprocess import ( ParseDICOMDir, UnpackSDICOMDir, MRIConvert, Resample, ReconAll, BBRegister, ApplyVolTransform, Smooth, DICOMConvert, RobustRegister, FitMSParams, SynthesizeFLASH, MNIBiasCorrection, WatershedSkullStrip, Normalize, CANormalize, CARegister, CALabel, MRIsCALabel, SegmentCC, SegmentWM, EditWMwithAseg, ConcatenateLTA, ) from .model import ( MRISPreproc, MRISPreprocReconAll, GLMFit, OneSampleTTest, Binarize, Concatenate, SegStats, SegStatsReconAll, Label2Vol, MS_LDA, Label2Label, Label2Annot, SphericalAverage, ) from .utils import ( SampleToSurface, SurfaceSmooth, SurfaceTransform, Surface2VolTransform, SurfaceSnapshots, ApplyMask, MRIsConvert, MRITessellate, MRIPretess, MRIMarchingCubes, SmoothTessellation, MakeAverageSubject, ExtractMainComponent, Tkregister2, AddXFormToHeader, CheckTalairachAlignment, TalairachAVI, TalairachQC, RemoveNeck, MRIFill, MRIsInflate, Sphere, FixTopology, EulerNumber, RemoveIntersection, MakeSurfaces, Curvature, CurvatureStats, Jacobian, MRIsCalc, VolumeMask, ParcellationStats, Contrast, RelabelHypointensities, Aparc2Aseg, Apas2Aseg, MRIsExpand, MRIsCombine, ) from .longitudinal import RobustTemplate, FuseSegmentations from .registration import ( MPRtoMNI305, RegisterAVItoTalairach, EMRegister, Register, Paint, MRICoreg, ) nipype-1.7.0/nipype/interfaces/freesurfer/base.py000066400000000000000000000202021413403311400220740ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The freesurfer module provides basic functions for interfacing with freesurfer tools. Currently these tools are supported: * Dicom2Nifti: using mri_convert * Resample: using mri_convert Examples -------- See the docstrings for the individual classes for 'working' examples. """ import os from ... import LooseVersion from ...utils.filemanip import fname_presuffix from ..base import ( CommandLine, Directory, CommandLineInputSpec, isdefined, traits, TraitedSpec, File, PackageInfo, ) __docformat__ = "restructuredtext" class Info(PackageInfo): """Freesurfer subject directory and version information. Examples -------- >>> from nipype.interfaces.freesurfer import Info >>> Info.version() # doctest: +SKIP >>> Info.subjectsdir() # doctest: +SKIP """ if os.getenv("FREESURFER_HOME"): version_file = os.path.join(os.getenv("FREESURFER_HOME"), "build-stamp.txt") @staticmethod def parse_version(raw_info): return raw_info.splitlines()[0] @classmethod def looseversion(cls): """Return a comparable version object If no version found, use LooseVersion('0.0.0') """ ver = cls.version() if ver is None: return LooseVersion("0.0.0") vinfo = ver.rstrip().split("-") try: int(vinfo[-1], 16) except ValueError: githash = "" else: githash = "." + vinfo[-1] # As of FreeSurfer v6.0.0, the final component is a githash if githash: if vinfo[3] == "dev": # This will need updating when v6.0.1 comes out vstr = "6.0.0-dev" + githash elif vinfo[5][0] == "v": vstr = vinfo[5][1:] elif len([1 for val in vinfo[3] if val == "."]) == 2: "version string: freesurfer-linux-centos7_x86_64-7.1.0-20200511-813297b" vstr = vinfo[3] else: raise RuntimeError("Unknown version string: " + ver) # Retain pre-6.0.0 heuristics elif "dev" in ver: vstr = vinfo[-1] + "-dev" else: vstr = ver.rstrip().split("-v")[-1] return LooseVersion(vstr) @classmethod def subjectsdir(cls): """Check the global SUBJECTS_DIR Parameters ---------- subjects_dir : string The system defined subjects directory Returns ------- subject_dir : string Represents the current environment setting of SUBJECTS_DIR """ if cls.version(): return os.environ["SUBJECTS_DIR"] return None class FSTraitedSpec(CommandLineInputSpec): subjects_dir = Directory(exists=True, desc="subjects directory") class FSCommand(CommandLine): """General support for FreeSurfer commands. Every FS command accepts 'subjects_dir' input. """ input_spec = FSTraitedSpec _subjects_dir = None def __init__(self, **inputs): super(FSCommand, self).__init__(**inputs) self.inputs.on_trait_change(self._subjects_dir_update, "subjects_dir") if not self._subjects_dir: self._subjects_dir = Info.subjectsdir() if not isdefined(self.inputs.subjects_dir) and self._subjects_dir: self.inputs.subjects_dir = self._subjects_dir self._subjects_dir_update() def _subjects_dir_update(self): if self.inputs.subjects_dir: self.inputs.environ.update({"SUBJECTS_DIR": self.inputs.subjects_dir}) @classmethod def set_default_subjects_dir(cls, subjects_dir): cls._subjects_dir = subjects_dir def run(self, **inputs): if "subjects_dir" in inputs: self.inputs.subjects_dir = inputs["subjects_dir"] self._subjects_dir_update() return super(FSCommand, self).run(**inputs) def _gen_fname(self, basename, fname=None, cwd=None, suffix="_fs", use_ext=True): """Define a generic mapping for a single outfile The filename is potentially autogenerated by suffixing inputs.infile Parameters ---------- basename : string (required) filename to base the new filename on fname : string if not None, just use this fname cwd : string prefix paths with cwd, otherwise os.getcwd() suffix : string default suffix """ if basename == "": msg = "Unable to generate filename for command %s. " % self.cmd msg += "basename is not set!" raise ValueError(msg) if cwd is None: cwd = os.getcwd() fname = fname_presuffix(basename, suffix=suffix, use_ext=use_ext, newpath=cwd) return fname @property def version(self): ver = Info.looseversion() if ver > LooseVersion("0.0.0"): return ver.vstring class FSSurfaceCommand(FSCommand): """Support for FreeSurfer surface-related functions. For some functions, if the output file is not specified starting with 'lh.' or 'rh.', FreeSurfer prepends the prefix from the input file to the output filename. Output out_file must be adjusted to accommodate this. By including the full path in the filename, we can also avoid this behavior. """ @staticmethod def _associated_file(in_file, out_name): """Based on MRIsBuildFileName in freesurfer/utils/mrisurf.c If no path information is provided for out_name, use path and hemisphere (if also unspecified) from in_file to determine the path of the associated file. Use in_file prefix to indicate hemisphere for out_name, rather than inspecting the surface data structure. """ path, base = os.path.split(out_name) if path == "": path, in_file = os.path.split(in_file) hemis = ("lh.", "rh.") if in_file[:3] in hemis and base[:3] not in hemis: base = in_file[:3] + base return os.path.join(path, base) class FSScriptCommand(FSCommand): """Support for Freesurfer script commands with log terminal_output""" _terminal_output = "file" _always_run = False def _list_outputs(self): outputs = self._outputs().get() outputs["log_file"] = os.path.abspath("output.nipype") return outputs class FSScriptOutputSpec(TraitedSpec): log_file = File( "output.nipype", usedefault=True, exists=True, desc="The output log" ) class FSTraitedSpecOpenMP(FSTraitedSpec): num_threads = traits.Int(desc="allows for specifying more threads") class FSCommandOpenMP(FSCommand): """Support for FS commands that utilize OpenMP Sets the environment variable 'OMP_NUM_THREADS' to the number of threads specified by the input num_threads. """ input_spec = FSTraitedSpecOpenMP _num_threads = None def __init__(self, **inputs): super(FSCommandOpenMP, self).__init__(**inputs) self.inputs.on_trait_change(self._num_threads_update, "num_threads") if not self._num_threads: self._num_threads = os.environ.get("OMP_NUM_THREADS", None) if not self._num_threads: self._num_threads = os.environ.get("NSLOTS", None) if not isdefined(self.inputs.num_threads) and self._num_threads: self.inputs.num_threads = int(self._num_threads) self._num_threads_update() def _num_threads_update(self): if self.inputs.num_threads: self.inputs.environ.update( {"OMP_NUM_THREADS": str(self.inputs.num_threads)} ) def run(self, **inputs): if "num_threads" in inputs: self.inputs.num_threads = inputs["num_threads"] self._num_threads_update() return super(FSCommandOpenMP, self).run(**inputs) def no_freesurfer(): """Checks if FreeSurfer is NOT installed used with skipif to skip tests that will fail if FreeSurfer is not installed""" if Info.version() is None: return True else: return False nipype-1.7.0/nipype/interfaces/freesurfer/longitudinal.py000066400000000000000000000230351413403311400236620ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various longitudinal commands provided by freesurfer """ import os from ... import logging from ..base import TraitedSpec, File, traits, InputMultiPath, OutputMultiPath, isdefined from .base import FSCommand, FSTraitedSpec, FSCommandOpenMP, FSTraitedSpecOpenMP __docformat__ = "restructuredtext" iflogger = logging.getLogger("nipype.interface") class RobustTemplateInputSpec(FSTraitedSpecOpenMP): # required in_files = InputMultiPath( File(exists=True), mandatory=True, argstr="--mov %s", desc="input movable volumes to be aligned to common mean/median " "template", ) out_file = File( "mri_robust_template_out.mgz", mandatory=True, usedefault=True, argstr="--template %s", desc="output template volume (final mean/median image)", ) auto_detect_sensitivity = traits.Bool( argstr="--satit", xor=["outlier_sensitivity"], mandatory=True, desc="auto-detect good sensitivity (recommended for head or full " "brain scans)", ) outlier_sensitivity = traits.Float( argstr="--sat %.4f", xor=["auto_detect_sensitivity"], mandatory=True, desc='set outlier sensitivity manually (e.g. "--sat 4.685" ). Higher ' "values mean less sensitivity.", ) # optional transform_outputs = traits.Either( InputMultiPath(File(exists=False)), traits.Bool, argstr="--lta %s", desc="output xforms to template (for each input)", ) intensity_scaling = traits.Bool( default_value=False, argstr="--iscale", desc="allow also intensity scaling (default off)", ) scaled_intensity_outputs = traits.Either( InputMultiPath(File(exists=False)), traits.Bool, argstr="--iscaleout %s", desc="final intensity scales (will activate --iscale)", ) subsample_threshold = traits.Int( argstr="--subsample %d", desc="subsample if dim > # on all axes (default no subs.)", ) average_metric = traits.Enum( "median", "mean", argstr="--average %d", desc="construct template from: 0 Mean, 1 Median (default)", ) initial_timepoint = traits.Int( argstr="--inittp %d", desc="use TP# for spacial init (default random), 0: no init", ) fixed_timepoint = traits.Bool( default_value=False, argstr="--fixtp", desc="map everthing to init TP# (init TP is not resampled)", ) no_iteration = traits.Bool( default_value=False, argstr="--noit", desc="do not iterate, just create first template", ) initial_transforms = InputMultiPath( File(exists=True), argstr="--ixforms %s", desc="use initial transforms (lta) on source", ) in_intensity_scales = InputMultiPath( File(exists=True), argstr="--iscalein %s", desc="use initial intensity scales" ) class RobustTemplateOutputSpec(TraitedSpec): out_file = File( exists=True, desc="output template volume (final mean/median image)" ) transform_outputs = OutputMultiPath( File(exists=True), desc="output xform files from moving to template" ) scaled_intensity_outputs = OutputMultiPath( File(exists=True), desc="output final intensity scales" ) class RobustTemplate(FSCommandOpenMP): """construct an unbiased robust template for longitudinal volumes Examples -------- >>> from nipype.interfaces.freesurfer import RobustTemplate >>> template = RobustTemplate() >>> template.inputs.in_files = ['structural.nii', 'functional.nii'] >>> template.inputs.auto_detect_sensitivity = True >>> template.inputs.average_metric = 'mean' >>> template.inputs.initial_timepoint = 1 >>> template.inputs.fixed_timepoint = True >>> template.inputs.no_iteration = True >>> template.inputs.subsample_threshold = 200 >>> template.cmdline #doctest: 'mri_robust_template --satit --average 0 --fixtp --mov structural.nii functional.nii --inittp 1 --noit --template mri_robust_template_out.mgz --subsample 200' >>> template.inputs.out_file = 'T1.nii' >>> template.cmdline #doctest: 'mri_robust_template --satit --average 0 --fixtp --mov structural.nii functional.nii --inittp 1 --noit --template T1.nii --subsample 200' >>> template.inputs.transform_outputs = ['structural.lta', ... 'functional.lta'] >>> template.inputs.scaled_intensity_outputs = ['structural-iscale.txt', ... 'functional-iscale.txt'] >>> template.cmdline #doctest: +ELLIPSIS 'mri_robust_template --satit --average 0 --fixtp --mov structural.nii functional.nii --inittp 1 --noit --template T1.nii --iscaleout .../structural-iscale.txt .../functional-iscale.txt --subsample 200 --lta .../structural.lta .../functional.lta' >>> template.inputs.transform_outputs = True >>> template.inputs.scaled_intensity_outputs = True >>> template.cmdline #doctest: +ELLIPSIS 'mri_robust_template --satit --average 0 --fixtp --mov structural.nii functional.nii --inittp 1 --noit --template T1.nii --iscaleout .../is1.txt .../is2.txt --subsample 200 --lta .../tp1.lta .../tp2.lta' >>> template.run() #doctest: +SKIP References ---------- [https://surfer.nmr.mgh.harvard.edu/fswiki/mri_robust_template] """ _cmd = "mri_robust_template" input_spec = RobustTemplateInputSpec output_spec = RobustTemplateOutputSpec def _format_arg(self, name, spec, value): if name == "average_metric": # return enumeration value return spec.argstr % {"mean": 0, "median": 1}[value] if name in ("transform_outputs", "scaled_intensity_outputs"): value = self._list_outputs()[name] return super(RobustTemplate, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) n_files = len(self.inputs.in_files) fmt = "{}{:02d}.{}" if n_files > 9 else "{}{:d}.{}" if isdefined(self.inputs.transform_outputs): fnames = self.inputs.transform_outputs if fnames is True: fnames = [fmt.format("tp", i + 1, "lta") for i in range(n_files)] outputs["transform_outputs"] = [os.path.abspath(x) for x in fnames] if isdefined(self.inputs.scaled_intensity_outputs): fnames = self.inputs.scaled_intensity_outputs if fnames is True: fnames = [fmt.format("is", i + 1, "txt") for i in range(n_files)] outputs["scaled_intensity_outputs"] = [os.path.abspath(x) for x in fnames] return outputs class FuseSegmentationsInputSpec(FSTraitedSpec): # required subject_id = traits.String( argstr="%s", position=-3, desc="subject_id being processed" ) timepoints = InputMultiPath( traits.String(), mandatory=True, argstr="%s", position=-2, desc="subject_ids or timepoints to be processed", ) out_file = File( exists=False, mandatory=True, position=-1, desc="output fused segmentation file" ) in_segmentations = InputMultiPath( File(exists=True), argstr="-a %s", mandatory=True, desc="name of aseg file to use (default: aseg.mgz) \ must include the aseg files for all the given timepoints", ) in_segmentations_noCC = InputMultiPath( File(exists=True), argstr="-c %s", mandatory=True, desc="name of aseg file w/o CC labels (default: aseg.auto_noCCseg.mgz) \ must include the corresponding file for all the given timepoints", ) in_norms = InputMultiPath( File(exists=True), argstr="-n %s", mandatory=True, desc="-n - name of norm file to use (default: norm.mgs) \ must include the corresponding norm file for all given timepoints \ as well as for the current subject", ) class FuseSegmentationsOutputSpec(TraitedSpec): out_file = File(exists=False, desc="output fused segmentation file") class FuseSegmentations(FSCommand): """fuse segmentations together from multiple timepoints Examples -------- >>> from nipype.interfaces.freesurfer import FuseSegmentations >>> fuse = FuseSegmentations() >>> fuse.inputs.subject_id = 'tp.long.A.template' >>> fuse.inputs.timepoints = ['tp1', 'tp2'] >>> fuse.inputs.out_file = 'aseg.fused.mgz' >>> fuse.inputs.in_segmentations = ['aseg.mgz', 'aseg.mgz'] >>> fuse.inputs.in_segmentations_noCC = ['aseg.mgz', 'aseg.mgz'] >>> fuse.inputs.in_norms = ['norm.mgz', 'norm.mgz', 'norm.mgz'] >>> fuse.cmdline 'mri_fuse_segmentations -n norm.mgz -a aseg.mgz -c aseg.mgz tp.long.A.template tp1 tp2' """ _cmd = "mri_fuse_segmentations" input_spec = FuseSegmentationsInputSpec output_spec = FuseSegmentationsOutputSpec def _format_arg(self, name, spec, value): if name in ("in_segmentations", "in_segmentations_noCC", "in_norms"): # return enumeration value return spec.argstr % os.path.basename(value[0]) return super(FuseSegmentations, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs nipype-1.7.0/nipype/interfaces/freesurfer/model.py000066400000000000000000001702541413403311400222770ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The freesurfer module provides basic functions for interfacing with freesurfer tools. """ import os from ...utils.filemanip import fname_presuffix, split_filename from ..base import ( TraitedSpec, File, traits, InputMultiPath, OutputMultiPath, Directory, isdefined, ) from .base import FSCommand, FSTraitedSpec from .utils import copy2subjdir __docformat__ = "restructuredtext" class MRISPreprocInputSpec(FSTraitedSpec): out_file = File(argstr="--out %s", genfile=True, desc="output filename") target = traits.Str( argstr="--target %s", mandatory=True, desc="target subject name" ) hemi = traits.Enum( "lh", "rh", argstr="--hemi %s", mandatory=True, desc="hemisphere for source and target", ) surf_measure = traits.Str( argstr="--meas %s", xor=("surf_measure", "surf_measure_file", "surf_area"), desc="Use subject/surf/hemi.surf_measure as input", ) surf_area = traits.Str( argstr="--area %s", xor=("surf_measure", "surf_measure_file", "surf_area"), desc="Extract vertex area from subject/surf/hemi.surfname to use as input.", ) subjects = traits.List( argstr="--s %s...", xor=("subjects", "fsgd_file", "subject_file"), desc="subjects from who measures are calculated", ) fsgd_file = File( exists=True, argstr="--fsgd %s", xor=("subjects", "fsgd_file", "subject_file"), desc="specify subjects using fsgd file", ) subject_file = File( exists=True, argstr="--f %s", xor=("subjects", "fsgd_file", "subject_file"), desc="file specifying subjects separated by white space", ) surf_measure_file = InputMultiPath( File(exists=True), argstr="--is %s...", xor=("surf_measure", "surf_measure_file", "surf_area"), desc="file alternative to surfmeas, still requires list of subjects", ) source_format = traits.Str(argstr="--srcfmt %s", desc="source format") surf_dir = traits.Str( argstr="--surfdir %s", desc="alternative directory (instead of surf)" ) vol_measure_file = InputMultiPath( traits.Tuple(File(exists=True), File(exists=True)), argstr="--iv %s %s...", desc="list of volume measure and reg file tuples", ) proj_frac = traits.Float( argstr="--projfrac %s", desc="projection fraction for vol2surf" ) fwhm = traits.Float( argstr="--fwhm %f", xor=["num_iters"], desc="smooth by fwhm mm on the target surface", ) num_iters = traits.Int( argstr="--niters %d", xor=["fwhm"], desc="niters : smooth by niters on the target surface", ) fwhm_source = traits.Float( argstr="--fwhm-src %f", xor=["num_iters_source"], desc="smooth by fwhm mm on the source surface", ) num_iters_source = traits.Int( argstr="--niterssrc %d", xor=["fwhm_source"], desc="niters : smooth by niters on the source surface", ) smooth_cortex_only = traits.Bool( argstr="--smooth-cortex-only", desc="only smooth cortex (ie, exclude medial wall)", ) class MRISPreprocOutputSpec(TraitedSpec): out_file = File(desc="preprocessed output file") class MRISPreproc(FSCommand): """Use FreeSurfer mris_preproc to prepare a group of contrasts for a second level analysis Examples -------- >>> preproc = MRISPreproc() >>> preproc.inputs.target = 'fsaverage' >>> preproc.inputs.hemi = 'lh' >>> preproc.inputs.vol_measure_file = [('cont1.nii', 'register.dat'), \ ('cont1a.nii', 'register.dat')] >>> preproc.inputs.out_file = 'concatenated_file.mgz' >>> preproc.cmdline 'mris_preproc --hemi lh --out concatenated_file.mgz --target fsaverage --iv cont1.nii register.dat --iv cont1a.nii register.dat' """ _cmd = "mris_preproc" input_spec = MRISPreprocInputSpec output_spec = MRISPreprocOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outfile = self.inputs.out_file outputs["out_file"] = outfile if not isdefined(outfile): outputs["out_file"] = os.path.join( os.getcwd(), "concat_%s_%s.mgz" % (self.inputs.hemi, self.inputs.target) ) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()[name] return None class MRISPreprocReconAllInputSpec(MRISPreprocInputSpec): surf_measure_file = File( exists=True, argstr="--meas %s", xor=("surf_measure", "surf_measure_file", "surf_area"), desc="file necessary for surfmeas", ) surfreg_files = InputMultiPath( File(exists=True), argstr="--surfreg %s", requires=["lh_surfreg_target", "rh_surfreg_target"], desc="lh and rh input surface registration files", ) lh_surfreg_target = File( desc="Implicit target surface registration file", requires=["surfreg_files"] ) rh_surfreg_target = File( desc="Implicit target surface registration file", requires=["surfreg_files"] ) subject_id = traits.String( "subject_id", argstr="--s %s", usedefault=True, xor=("subjects", "fsgd_file", "subject_file", "subject_id"), desc="subject from whom measures are calculated", ) copy_inputs = traits.Bool( desc="If running as a node, set this to True " "this will copy some implicit inputs to the " "node directory." ) class MRISPreprocReconAll(MRISPreproc): """Extends MRISPreproc to allow it to be used in a recon-all workflow Examples -------- >>> preproc = MRISPreprocReconAll() >>> preproc.inputs.target = 'fsaverage' >>> preproc.inputs.hemi = 'lh' >>> preproc.inputs.vol_measure_file = [('cont1.nii', 'register.dat'), \ ('cont1a.nii', 'register.dat')] >>> preproc.inputs.out_file = 'concatenated_file.mgz' >>> preproc.cmdline 'mris_preproc --hemi lh --out concatenated_file.mgz --s subject_id --target fsaverage --iv cont1.nii register.dat --iv cont1a.nii register.dat' """ input_spec = MRISPreprocReconAllInputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir if isdefined(self.inputs.surf_dir): folder = self.inputs.surf_dir else: folder = "surf" if isdefined(self.inputs.surfreg_files): for surfreg in self.inputs.surfreg_files: basename = os.path.basename(surfreg) copy2subjdir(self, surfreg, folder, basename) if basename.startswith("lh."): copy2subjdir( self, self.inputs.lh_surfreg_target, folder, basename, subject_id=self.inputs.target, ) else: copy2subjdir( self, self.inputs.rh_surfreg_target, folder, basename, subject_id=self.inputs.target, ) if isdefined(self.inputs.surf_measure_file): copy2subjdir(self, self.inputs.surf_measure_file, folder) return super(MRISPreprocReconAll, self).run(**inputs) def _format_arg(self, name, spec, value): # mris_preproc looks for these files in the surf dir if name == "surfreg_files": basename = os.path.basename(value[0]) return spec.argstr % basename.lstrip("rh.").lstrip("lh.") if name == "surf_measure_file": basename = os.path.basename(value) return spec.argstr % basename.lstrip("rh.").lstrip("lh.") return super(MRISPreprocReconAll, self)._format_arg(name, spec, value) class GLMFitInputSpec(FSTraitedSpec): glm_dir = traits.Str(argstr="--glmdir %s", desc="save outputs to dir", genfile=True) in_file = File( desc="input 4D file", argstr="--y %s", mandatory=True, copyfile=False ) _design_xor = ("fsgd", "design", "one_sample") fsgd = traits.Tuple( File(exists=True), traits.Enum("doss", "dods"), argstr="--fsgd %s %s", xor=_design_xor, desc="freesurfer descriptor file", ) design = File( exists=True, argstr="--X %s", xor=_design_xor, desc="design matrix file" ) contrast = InputMultiPath( File(exists=True), argstr="--C %s...", desc="contrast file" ) one_sample = traits.Bool( argstr="--osgm", xor=("one_sample", "fsgd", "design", "contrast"), desc="construct X and C as a one-sample group mean", ) no_contrast_ok = traits.Bool( argstr="--no-contrasts-ok", desc="do not fail if no contrasts specified" ) per_voxel_reg = InputMultiPath( File(exists=True), argstr="--pvr %s...", desc="per-voxel regressors" ) self_reg = traits.Tuple( traits.Int, traits.Int, traits.Int, argstr="--selfreg %d %d %d", desc="self-regressor from index col row slice", ) weighted_ls = File( exists=True, argstr="--wls %s", xor=("weight_file", "weight_inv", "weight_sqrt"), desc="weighted least squares", ) fixed_fx_var = File( exists=True, argstr="--yffxvar %s", desc="for fixed effects analysis" ) fixed_fx_dof = traits.Int( argstr="--ffxdof %d", xor=["fixed_fx_dof_file"], desc="dof for fixed effects analysis", ) fixed_fx_dof_file = File( argstr="--ffxdofdat %d", xor=["fixed_fx_dof"], desc="text file with dof for fixed effects analysis", ) weight_file = File( exists=True, xor=["weighted_ls"], desc="weight for each input at each voxel" ) weight_inv = traits.Bool( argstr="--w-inv", desc="invert weights", xor=["weighted_ls"] ) weight_sqrt = traits.Bool( argstr="--w-sqrt", desc="sqrt of weights", xor=["weighted_ls"] ) fwhm = traits.Range(low=0.0, argstr="--fwhm %f", desc="smooth input by fwhm") var_fwhm = traits.Range( low=0.0, argstr="--var-fwhm %f", desc="smooth variance by fwhm" ) no_mask_smooth = traits.Bool( argstr="--no-mask-smooth", desc="do not mask when smoothing" ) no_est_fwhm = traits.Bool( argstr="--no-est-fwhm", desc="turn off FWHM output estimation" ) mask_file = File(exists=True, argstr="--mask %s", desc="binary mask") label_file = File( exists=True, argstr="--label %s", xor=["cortex"], desc="use label as mask, surfaces only", ) cortex = traits.Bool( argstr="--cortex", xor=["label_file"], desc="use subjects ?h.cortex.label as label", ) invert_mask = traits.Bool(argstr="--mask-inv", desc="invert mask") prune = traits.Bool( argstr="--prune", desc="remove voxels that do not have a non-zero value at each frame (def)", ) no_prune = traits.Bool( argstr="--no-prune", xor=["prunethresh"], desc="do not prune" ) prune_thresh = traits.Float( argstr="--prune_thr %f", xor=["noprune"], desc="prune threshold. Default is FLT_MIN", ) compute_log_y = traits.Bool( argstr="--logy", desc="compute natural log of y prior to analysis" ) save_estimate = traits.Bool( argstr="--yhat-save", desc="save signal estimate (yhat)" ) save_residual = traits.Bool(argstr="--eres-save", desc="save residual error (eres)") save_res_corr_mtx = traits.Bool( argstr="--eres-scm", desc="save residual error spatial correlation matrix (eres.scm). Big!", ) surf = traits.Bool( argstr="--surf %s %s %s", requires=["subject_id", "hemi"], desc="analysis is on a surface mesh", ) subject_id = traits.Str(desc="subject id for surface geometry") hemi = traits.Enum("lh", "rh", desc="surface hemisphere") surf_geo = traits.Str( "white", usedefault=True, desc="surface geometry name (e.g. white, pial)" ) simulation = traits.Tuple( traits.Enum("perm", "mc-full", "mc-z"), traits.Int(min=1), traits.Float, traits.Str, argstr="--sim %s %d %f %s", desc="nulltype nsim thresh csdbasename", ) sim_sign = traits.Enum( "abs", "pos", "neg", argstr="--sim-sign %s", desc="abs, pos, or neg" ) uniform = traits.Tuple( traits.Float, traits.Float, argstr="--uniform %f %f", desc="use uniform distribution instead of gaussian", ) pca = traits.Bool(argstr="--pca", desc="perform pca/svd analysis on residual") calc_AR1 = traits.Bool( argstr="--tar1", desc="compute and save temporal AR1 of residual" ) save_cond = traits.Bool( argstr="--save-cond", desc="flag to save design matrix condition at each voxel" ) vox_dump = traits.Tuple( traits.Int, traits.Int, traits.Int, argstr="--voxdump %d %d %d", desc="dump voxel GLM and exit", ) seed = traits.Int(argstr="--seed %d", desc="used for synthesizing noise") synth = traits.Bool(argstr="--synth", desc="replace input with gaussian") resynth_test = traits.Int(argstr="--resynthtest %d", desc="test GLM by resynthsis") profile = traits.Int(argstr="--profile %d", desc="niters : test speed") force_perm = traits.Bool( argstr="--perm-force", desc="force perumtation test, even when design matrix is not orthog", ) diag = traits.Int(argstr="--diag %d", desc="Gdiag_no : set diagnositc level") diag_cluster = traits.Bool( argstr="--diag-cluster", desc="save sig volume and exit from first sim loop" ) debug = traits.Bool(argstr="--debug", desc="turn on debugging") check_opts = traits.Bool( argstr="--checkopts", desc="don't run anything, just check options and exit" ) allow_repeated_subjects = traits.Bool( argstr="--allowsubjrep", desc="allow subject names to repeat in the fsgd file (must appear before --fsgd", ) allow_ill_cond = traits.Bool( argstr="--illcond", desc="allow ill-conditioned design matrices" ) sim_done_file = File( argstr="--sim-done %s", desc="create file when simulation finished" ) class GLMFitOutputSpec(TraitedSpec): glm_dir = Directory(exists=True, desc="output directory") beta_file = File(exists=True, desc="map of regression coefficients") error_file = File(desc="map of residual error") error_var_file = File(desc="map of residual error variance") error_stddev_file = File(desc="map of residual error standard deviation") estimate_file = File(desc="map of the estimated Y values") mask_file = File(desc="map of the mask used in the analysis") fwhm_file = File(desc="text file with estimated smoothness") dof_file = File(desc="text file with effective degrees-of-freedom for the analysis") gamma_file = OutputMultiPath(desc="map of contrast of regression coefficients") gamma_var_file = OutputMultiPath(desc="map of regression contrast variance") sig_file = OutputMultiPath(desc="map of F-test significance (in -log10p)") ftest_file = OutputMultiPath(desc="map of test statistic values") spatial_eigenvectors = File(desc="map of spatial eigenvectors from residual PCA") frame_eigenvectors = File(desc="matrix of frame eigenvectors from residual PCA") singular_values = File(desc="matrix singular values from residual PCA") svd_stats_file = File(desc="text file summarizing the residual PCA") class GLMFit(FSCommand): """Use FreeSurfer's mri_glmfit to specify and estimate a general linear model. Examples -------- >>> glmfit = GLMFit() >>> glmfit.inputs.in_file = 'functional.nii' >>> glmfit.inputs.one_sample = True >>> glmfit.cmdline == 'mri_glmfit --glmdir %s --y functional.nii --osgm'%os.getcwd() True """ _cmd = "mri_glmfit" input_spec = GLMFitInputSpec output_spec = GLMFitOutputSpec def _format_arg(self, name, spec, value): if name == "surf": _si = self.inputs return spec.argstr % (_si.subject_id, _si.hemi, _si.surf_geo) return super(GLMFit, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() # Get the top-level output directory if not isdefined(self.inputs.glm_dir): glmdir = os.getcwd() else: glmdir = os.path.abspath(self.inputs.glm_dir) outputs["glm_dir"] = glmdir # Assign the output files that always get created outputs["beta_file"] = os.path.join(glmdir, "beta.mgh") outputs["error_var_file"] = os.path.join(glmdir, "rvar.mgh") outputs["error_stddev_file"] = os.path.join(glmdir, "rstd.mgh") outputs["mask_file"] = os.path.join(glmdir, "mask.mgh") outputs["fwhm_file"] = os.path.join(glmdir, "fwhm.dat") outputs["dof_file"] = os.path.join(glmdir, "dof.dat") # Assign the conditional outputs if isdefined(self.inputs.save_residual) and self.inputs.save_residual: outputs["error_file"] = os.path.join(glmdir, "eres.mgh") if isdefined(self.inputs.save_estimate) and self.inputs.save_estimate: outputs["estimate_file"] = os.path.join(glmdir, "yhat.mgh") # Get the contrast directory name(s) if isdefined(self.inputs.contrast): contrasts = [] for c in self.inputs.contrast: if split_filename(c)[2] in [".mat", ".dat", ".mtx", ".con"]: contrasts.append(split_filename(c)[1]) else: contrasts.append(os.path.split(c)[1]) elif isdefined(self.inputs.one_sample) and self.inputs.one_sample: contrasts = ["osgm"] # Add in the contrast images outputs["sig_file"] = [os.path.join(glmdir, c, "sig.mgh") for c in contrasts] outputs["ftest_file"] = [os.path.join(glmdir, c, "F.mgh") for c in contrasts] outputs["gamma_file"] = [ os.path.join(glmdir, c, "gamma.mgh") for c in contrasts ] outputs["gamma_var_file"] = [ os.path.join(glmdir, c, "gammavar.mgh") for c in contrasts ] # Add in the PCA results, if relevant if isdefined(self.inputs.pca) and self.inputs.pca: pcadir = os.path.join(glmdir, "pca-eres") outputs["spatial_eigenvectors"] = os.path.join(pcadir, "v.mgh") outputs["frame_eigenvectors"] = os.path.join(pcadir, "u.mtx") outputs["singluar_values"] = os.path.join(pcadir, "sdiag.mat") outputs["svd_stats_file"] = os.path.join(pcadir, "stats.dat") return outputs def _gen_filename(self, name): if name == "glm_dir": return os.getcwd() return None class OneSampleTTest(GLMFit): def __init__(self, **kwargs): super(OneSampleTTest, self).__init__(**kwargs) self.inputs.one_sample = True class BinarizeInputSpec(FSTraitedSpec): in_file = File( exists=True, argstr="--i %s", mandatory=True, copyfile=False, desc="input volume", ) min = traits.Float(argstr="--min %f", xor=["wm_ven_csf"], desc="min thresh") max = traits.Float(argstr="--max %f", xor=["wm_ven_csf"], desc="max thresh") rmin = traits.Float(argstr="--rmin %f", desc="compute min based on rmin*globalmean") rmax = traits.Float(argstr="--rmax %f", desc="compute max based on rmax*globalmean") match = traits.List( traits.Int, argstr="--match %d...", desc="match instead of threshold" ) wm = traits.Bool( argstr="--wm", desc="set match vals to 2 and 41 (aseg for cerebral WM)" ) ventricles = traits.Bool( argstr="--ventricles", desc="set match vals those for aseg ventricles+choroid (not 4th)", ) wm_ven_csf = traits.Bool( argstr="--wm+vcsf", xor=["min", "max"], desc="WM and ventricular CSF, including choroid (not 4th)", ) binary_file = File(argstr="--o %s", genfile=True, desc="binary output volume") out_type = traits.Enum("nii", "nii.gz", "mgz", argstr="", desc="output file type") count_file = traits.Either( traits.Bool, File, argstr="--count %s", desc="save number of hits in ascii file (hits, ntotvox, pct)", ) bin_val = traits.Int( argstr="--binval %d", desc="set vox within thresh to val (default is 1)" ) bin_val_not = traits.Int( argstr="--binvalnot %d", desc="set vox outside range to val (default is 0)" ) invert = traits.Bool(argstr="--inv", desc="set binval=0, binvalnot=1") frame_no = traits.Int( argstr="--frame %s", desc="use 0-based frame of input (default is 0)" ) merge_file = File(exists=True, argstr="--merge %s", desc="merge with mergevol") mask_file = File(exists=True, argstr="--mask maskvol", desc="must be within mask") mask_thresh = traits.Float(argstr="--mask-thresh %f", desc="set thresh for mask") abs = traits.Bool( argstr="--abs", desc="take abs of invol first (ie, make unsigned)" ) bin_col_num = traits.Bool( argstr="--bincol", desc="set binarized voxel value to its column number" ) zero_edges = traits.Bool(argstr="--zero-edges", desc="zero the edge voxels") zero_slice_edge = traits.Bool( argstr="--zero-slice-edges", desc="zero the edge slice voxels" ) dilate = traits.Int(argstr="--dilate %d", desc="niters: dilate binarization in 3D") erode = traits.Int( argstr="--erode %d", desc="nerode: erode binarization in 3D (after any dilation)", ) erode2d = traits.Int( argstr="--erode2d %d", desc="nerode2d: erode binarization in 2D (after any 3D erosion)", ) class BinarizeOutputSpec(TraitedSpec): binary_file = File(exists=True, desc="binarized output volume") count_file = File(desc="ascii file containing number of hits") class Binarize(FSCommand): """Use FreeSurfer mri_binarize to threshold an input volume Examples -------- >>> binvol = Binarize(in_file='structural.nii', min=10, binary_file='foo_out.nii') >>> binvol.cmdline 'mri_binarize --o foo_out.nii --i structural.nii --min 10.000000' """ _cmd = "mri_binarize" input_spec = BinarizeInputSpec output_spec = BinarizeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outfile = self.inputs.binary_file if not isdefined(outfile): if isdefined(self.inputs.out_type): outfile = fname_presuffix( self.inputs.in_file, newpath=os.getcwd(), suffix=".".join(("_thresh", self.inputs.out_type)), use_ext=False, ) else: outfile = fname_presuffix( self.inputs.in_file, newpath=os.getcwd(), suffix="_thresh" ) outputs["binary_file"] = os.path.abspath(outfile) value = self.inputs.count_file if isdefined(value): if isinstance(value, bool): if value: outputs["count_file"] = fname_presuffix( self.inputs.in_file, suffix="_count.txt", newpath=os.getcwd(), use_ext=False, ) else: outputs["count_file"] = value return outputs def _format_arg(self, name, spec, value): if name == "count_file": if isinstance(value, bool): fname = self._list_outputs()[name] else: fname = value return spec.argstr % fname if name == "out_type": return "" return super(Binarize, self)._format_arg(name, spec, value) def _gen_filename(self, name): if name == "binary_file": return self._list_outputs()[name] return None class ConcatenateInputSpec(FSTraitedSpec): in_files = InputMultiPath( File(exists=True), desc="Individual volumes to be concatenated", argstr="--i %s...", mandatory=True, ) concatenated_file = File(desc="Output volume", argstr="--o %s", genfile=True) sign = traits.Enum( "abs", "pos", "neg", argstr="--%s", desc="Take only pos or neg voxles from input, or take abs", ) stats = traits.Enum( "sum", "var", "std", "max", "min", "mean", argstr="--%s", desc="Compute the sum, var, std, max, min or mean of the input volumes", ) paired_stats = traits.Enum( "sum", "avg", "diff", "diff-norm", "diff-norm1", "diff-norm2", argstr="--paired-%s", desc="Compute paired sum, avg, or diff", ) gmean = traits.Int( argstr="--gmean %d", desc="create matrix to average Ng groups, Nper=Ntot/Ng" ) mean_div_n = traits.Bool( argstr="--mean-div-n", desc="compute mean/nframes (good for var)" ) multiply_by = traits.Float( argstr="--mul %f", desc="Multiply input volume by some amount" ) add_val = traits.Float( argstr="--add %f", desc="Add some amount to the input volume" ) multiply_matrix_file = File( exists=True, argstr="--mtx %s", desc="Multiply input by an ascii matrix in file" ) combine = traits.Bool( argstr="--combine", desc="Combine non-zero values into single frame volume" ) keep_dtype = traits.Bool( argstr="--keep-datatype", desc="Keep voxelwise precision type (default is float" ) max_bonfcor = traits.Bool( argstr="--max-bonfcor", desc="Compute max and bonferroni correct (assumes -log10(ps))", ) max_index = traits.Bool( argstr="--max-index", desc="Compute the index of max voxel in concatenated volumes", ) mask_file = File(exists=True, argstr="--mask %s", desc="Mask input with a volume") vote = traits.Bool( argstr="--vote", desc="Most frequent value at each voxel and fraction of occurances", ) sort = traits.Bool(argstr="--sort", desc="Sort each voxel by ascending frame value") class ConcatenateOutputSpec(TraitedSpec): concatenated_file = File(exists=True, desc="Path/name of the output volume") class Concatenate(FSCommand): """Use Freesurfer mri_concat to combine several input volumes into one output volume. Can concatenate by frames, or compute a variety of statistics on the input volumes. Examples -------- Combine two input volumes into one volume with two frames >>> concat = Concatenate() >>> concat.inputs.in_files = ['cont1.nii', 'cont2.nii'] >>> concat.inputs.concatenated_file = 'bar.nii' >>> concat.cmdline 'mri_concat --o bar.nii --i cont1.nii --i cont2.nii' """ _cmd = "mri_concat" input_spec = ConcatenateInputSpec output_spec = ConcatenateOutputSpec def _list_outputs(self): outputs = self.output_spec().get() fname = self.inputs.concatenated_file if not isdefined(fname): fname = "concat_output.nii.gz" outputs["concatenated_file"] = os.path.join(os.getcwd(), fname) return outputs def _gen_filename(self, name): if name == "concatenated_file": return self._list_outputs()[name] return None class SegStatsInputSpec(FSTraitedSpec): _xor_inputs = ("segmentation_file", "annot", "surf_label") segmentation_file = File( exists=True, argstr="--seg %s", xor=_xor_inputs, mandatory=True, desc="segmentation volume path", ) annot = traits.Tuple( traits.Str, traits.Enum("lh", "rh"), traits.Str, argstr="--annot %s %s %s", xor=_xor_inputs, mandatory=True, desc="subject hemi parc : use surface parcellation", ) surf_label = traits.Tuple( traits.Str, traits.Enum("lh", "rh"), traits.Str, argstr="--slabel %s %s %s", xor=_xor_inputs, mandatory=True, desc="subject hemi label : use surface label", ) summary_file = File( argstr="--sum %s", genfile=True, position=-1, desc="Segmentation stats summary table file", ) partial_volume_file = File( exists=True, argstr="--pv %s", desc="Compensate for partial voluming" ) in_file = File( exists=True, argstr="--i %s", desc="Use the segmentation to report stats on this volume", ) frame = traits.Int( argstr="--frame %d", desc="Report stats on nth frame of input volume" ) multiply = traits.Float(argstr="--mul %f", desc="multiply input by val") calc_snr = traits.Bool( argstr="--snr", desc="save mean/std as extra column in output table" ) calc_power = traits.Enum( "sqr", "sqrt", argstr="--%s", desc="Compute either the sqr or the sqrt of the input", ) _ctab_inputs = ("color_table_file", "default_color_table", "gca_color_table") color_table_file = File( exists=True, argstr="--ctab %s", xor=_ctab_inputs, desc="color table file with seg id names", ) default_color_table = traits.Bool( argstr="--ctab-default", xor=_ctab_inputs, desc="use $FREESURFER_HOME/FreeSurferColorLUT.txt", ) gca_color_table = File( exists=True, argstr="--ctab-gca %s", xor=_ctab_inputs, desc="get color table from GCA (CMA)", ) segment_id = traits.List( argstr="--id %s...", desc="Manually specify segmentation ids" ) exclude_id = traits.Int(argstr="--excludeid %d", desc="Exclude seg id from report") exclude_ctx_gm_wm = traits.Bool( argstr="--excl-ctxgmwm", desc="exclude cortical gray and white matter" ) wm_vol_from_surf = traits.Bool( argstr="--surf-wm-vol", desc="Compute wm volume from surf" ) cortex_vol_from_surf = traits.Bool( argstr="--surf-ctx-vol", desc="Compute cortex volume from surf" ) non_empty_only = traits.Bool( argstr="--nonempty", desc="Only report nonempty segmentations" ) empty = traits.Bool( argstr="--empty", desc="Report on segmentations listed in the color table" ) mask_file = File( exists=True, argstr="--mask %s", desc="Mask volume (same size as seg" ) mask_thresh = traits.Float( argstr="--maskthresh %f", desc="binarize mask with this threshold <0.5>" ) mask_sign = traits.Enum( "abs", "pos", "neg", "--masksign %s", desc="Sign for mask threshold: pos, neg, or abs", ) mask_frame = traits.Int( "--maskframe %d", requires=["mask_file"], desc="Mask with this (0 based) frame of the mask volume", ) mask_invert = traits.Bool( argstr="--maskinvert", desc="Invert binarized mask volume" ) mask_erode = traits.Int(argstr="--maskerode %d", desc="Erode mask by some amount") brain_vol = traits.Enum( "brain-vol-from-seg", "brainmask", argstr="--%s", desc="Compute brain volume either with ``brainmask`` or ``brain-vol-from-seg``", ) brainmask_file = File( argstr="--brainmask %s", exists=True, desc="Load brain mask and compute the volume of the brain as the non-zero voxels in this volume", ) etiv = traits.Bool(argstr="--etiv", desc="Compute ICV from talairach transform") etiv_only = traits.Enum( "etiv", "old-etiv", "--%s-only", desc="Compute etiv and exit. Use ``etiv`` or ``old-etiv``", ) avgwf_txt_file = traits.Either( traits.Bool, File, argstr="--avgwf %s", desc="Save average waveform into file (bool or filename)", ) avgwf_file = traits.Either( traits.Bool, File, argstr="--avgwfvol %s", desc="Save as binary volume (bool or filename)", ) sf_avg_file = traits.Either( traits.Bool, File, argstr="--sfavg %s", desc="Save mean across space and time" ) vox = traits.List( traits.Int, argstr="--vox %s", desc="Replace seg with all 0s except at C R S (three int inputs)", ) supratent = traits.Bool(argstr="--supratent", desc="Undocumented input flag") subcort_gm = traits.Bool( argstr="--subcortgray", desc="Compute volume of subcortical gray matter" ) total_gray = traits.Bool( argstr="--totalgray", desc="Compute volume of total gray matter" ) euler = traits.Bool( argstr="--euler", desc="Write out number of defect holes in orig.nofix based on the euler number", ) in_intensity = File( argstr="--in %s --in-intensity-name %s", desc="Undocumented input norm.mgz file" ) intensity_units = traits.Enum( "MR", argstr="--in-intensity-units %s", requires=["in_intensity"], desc="Intensity units", ) class SegStatsOutputSpec(TraitedSpec): summary_file = File(exists=True, desc="Segmentation summary statistics table") avgwf_txt_file = File( desc="Text file with functional statistics averaged over segs" ) avgwf_file = File(desc="Volume with functional statistics averaged over segs") sf_avg_file = File( desc="Text file with func statistics averaged over segs and framss" ) class SegStats(FSCommand): """Use FreeSurfer mri_segstats for ROI analysis Examples -------- >>> import nipype.interfaces.freesurfer as fs >>> ss = fs.SegStats() >>> ss.inputs.annot = ('PWS04', 'lh', 'aparc') >>> ss.inputs.in_file = 'functional.nii' >>> ss.inputs.subjects_dir = '.' >>> ss.inputs.avgwf_txt_file = 'avgwf.txt' >>> ss.inputs.summary_file = 'summary.stats' >>> ss.cmdline 'mri_segstats --annot PWS04 lh aparc --avgwf ./avgwf.txt --i functional.nii --sum ./summary.stats' """ _cmd = "mri_segstats" input_spec = SegStatsInputSpec output_spec = SegStatsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.summary_file): outputs["summary_file"] = os.path.abspath(self.inputs.summary_file) else: outputs["summary_file"] = os.path.join(os.getcwd(), "summary.stats") suffices = dict( avgwf_txt_file="_avgwf.txt", avgwf_file="_avgwf.nii.gz", sf_avg_file="sfavg.txt", ) if isdefined(self.inputs.segmentation_file): _, src = os.path.split(self.inputs.segmentation_file) if isdefined(self.inputs.annot): src = "_".join(self.inputs.annot) if isdefined(self.inputs.surf_label): src = "_".join(self.inputs.surf_label) for name, suffix in list(suffices.items()): value = getattr(self.inputs, name) if isdefined(value): if isinstance(value, bool): outputs[name] = fname_presuffix( src, suffix=suffix, newpath=os.getcwd(), use_ext=False ) else: outputs[name] = os.path.abspath(value) return outputs def _format_arg(self, name, spec, value): if name in ("summary_file", "avgwf_txt_file"): if not isinstance(value, bool): if not os.path.isabs(value): value = os.path.join(".", value) if name in ["avgwf_txt_file", "avgwf_file", "sf_avg_file"]: if isinstance(value, bool): fname = self._list_outputs()[name] else: fname = value return spec.argstr % fname elif name == "in_intensity": intensity_name = os.path.basename(self.inputs.in_intensity).replace( ".mgz", "" ) return spec.argstr % (value, intensity_name) return super(SegStats, self)._format_arg(name, spec, value) def _gen_filename(self, name): if name == "summary_file": return self._list_outputs()[name] return None class SegStatsReconAllInputSpec(SegStatsInputSpec): # recon-all input requirements subject_id = traits.String( "subject_id", usedefault=True, argstr="--subject %s", mandatory=True, desc="Subject id being processed", ) # implicit ribbon = File(mandatory=True, exists=True, desc="Input file mri/ribbon.mgz") presurf_seg = File(exists=True, desc="Input segmentation volume") transform = File(mandatory=True, exists=True, desc="Input transform file") lh_orig_nofix = File(mandatory=True, exists=True, desc="Input lh.orig.nofix") rh_orig_nofix = File(mandatory=True, exists=True, desc="Input rh.orig.nofix") lh_white = File( mandatory=True, exists=True, desc="Input file must be /surf/lh.white", ) rh_white = File( mandatory=True, exists=True, desc="Input file must be /surf/rh.white", ) lh_pial = File( mandatory=True, exists=True, desc="Input file must be /surf/lh.pial" ) rh_pial = File( mandatory=True, exists=True, desc="Input file must be /surf/rh.pial" ) aseg = File(exists=True, desc="Mandatory implicit input in 5.3") copy_inputs = traits.Bool( desc="If running as a node, set this to True " "otherwise, this will copy the implicit inputs " "to the node directory." ) class SegStatsReconAll(SegStats): """ This class inherits SegStats and modifies it for use in a recon-all workflow. This implementation mandates implicit inputs that SegStats. To ensure backwards compatability of SegStats, this class was created. Examples -------- >>> from nipype.interfaces.freesurfer import SegStatsReconAll >>> segstatsreconall = SegStatsReconAll() >>> segstatsreconall.inputs.annot = ('PWS04', 'lh', 'aparc') >>> segstatsreconall.inputs.avgwf_txt_file = 'avgwf.txt' >>> segstatsreconall.inputs.summary_file = 'summary.stats' >>> segstatsreconall.inputs.subject_id = '10335' >>> segstatsreconall.inputs.ribbon = 'wm.mgz' >>> segstatsreconall.inputs.transform = 'trans.mat' >>> segstatsreconall.inputs.presurf_seg = 'wm.mgz' >>> segstatsreconall.inputs.lh_orig_nofix = 'lh.pial' >>> segstatsreconall.inputs.rh_orig_nofix = 'lh.pial' >>> segstatsreconall.inputs.lh_pial = 'lh.pial' >>> segstatsreconall.inputs.rh_pial = 'lh.pial' >>> segstatsreconall.inputs.lh_white = 'lh.pial' >>> segstatsreconall.inputs.rh_white = 'lh.pial' >>> segstatsreconall.inputs.empty = True >>> segstatsreconall.inputs.brain_vol = 'brain-vol-from-seg' >>> segstatsreconall.inputs.exclude_ctx_gm_wm = True >>> segstatsreconall.inputs.supratent = True >>> segstatsreconall.inputs.subcort_gm = True >>> segstatsreconall.inputs.etiv = True >>> segstatsreconall.inputs.wm_vol_from_surf = True >>> segstatsreconall.inputs.cortex_vol_from_surf = True >>> segstatsreconall.inputs.total_gray = True >>> segstatsreconall.inputs.euler = True >>> segstatsreconall.inputs.exclude_id = 0 >>> segstatsreconall.cmdline 'mri_segstats --annot PWS04 lh aparc --avgwf ./avgwf.txt --brain-vol-from-seg --surf-ctx-vol --empty --etiv --euler --excl-ctxgmwm --excludeid 0 --subcortgray --subject 10335 --supratent --totalgray --surf-wm-vol --sum ./summary.stats' """ input_spec = SegStatsReconAllInputSpec output_spec = SegStatsOutputSpec def _format_arg(self, name, spec, value): if name == "brainmask_file": return spec.argstr % os.path.basename(value) return super(SegStatsReconAll, self)._format_arg(name, spec, value) def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir copy2subjdir(self, self.inputs.lh_orig_nofix, "surf", "lh.orig.nofix") copy2subjdir(self, self.inputs.rh_orig_nofix, "surf", "rh.orig.nofix") copy2subjdir(self, self.inputs.lh_white, "surf", "lh.white") copy2subjdir(self, self.inputs.rh_white, "surf", "rh.white") copy2subjdir(self, self.inputs.lh_pial, "surf", "lh.pial") copy2subjdir(self, self.inputs.rh_pial, "surf", "rh.pial") copy2subjdir(self, self.inputs.ribbon, "mri", "ribbon.mgz") copy2subjdir(self, self.inputs.presurf_seg, "mri", "aseg.presurf.mgz") copy2subjdir(self, self.inputs.aseg, "mri", "aseg.mgz") copy2subjdir( self, self.inputs.transform, os.path.join("mri", "transforms"), "talairach.xfm", ) copy2subjdir(self, self.inputs.in_intensity, "mri") copy2subjdir(self, self.inputs.brainmask_file, "mri") return super(SegStatsReconAll, self).run(**inputs) class Label2VolInputSpec(FSTraitedSpec): label_file = InputMultiPath( File(exists=True), argstr="--label %s...", xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), copyfile=False, mandatory=True, desc="list of label files", ) annot_file = File( exists=True, argstr="--annot %s", xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), requires=("subject_id", "hemi"), mandatory=True, copyfile=False, desc="surface annotation file", ) seg_file = File( exists=True, argstr="--seg %s", xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), mandatory=True, copyfile=False, desc="segmentation file", ) aparc_aseg = traits.Bool( argstr="--aparc+aseg", xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), mandatory=True, desc="use aparc+aseg.mgz in subjectdir as seg", ) template_file = File( exists=True, argstr="--temp %s", mandatory=True, desc="output template volume" ) reg_file = File( exists=True, argstr="--reg %s", xor=("reg_file", "reg_header", "identity"), desc="tkregister style matrix VolXYZ = R*LabelXYZ", ) reg_header = File( exists=True, argstr="--regheader %s", xor=("reg_file", "reg_header", "identity"), desc="label template volume", ) identity = traits.Bool( argstr="--identity", xor=("reg_file", "reg_header", "identity"), desc="set R=I" ) invert_mtx = traits.Bool( argstr="--invertmtx", desc="Invert the registration matrix" ) fill_thresh = traits.Range( 0.0, 1.0, argstr="--fillthresh %g", desc="thresh : between 0 and 1" ) label_voxel_volume = traits.Float( argstr="--labvoxvol %f", desc="volume of each label point (def 1mm3)" ) proj = traits.Tuple( traits.Enum("abs", "frac"), traits.Float, traits.Float, traits.Float, argstr="--proj %s %f %f %f", requires=("subject_id", "hemi"), desc="project along surface normal", ) subject_id = traits.Str(argstr="--subject %s", desc="subject id") hemi = traits.Enum( "lh", "rh", argstr="--hemi %s", desc="hemisphere to use lh or rh" ) surface = traits.Str(argstr="--surf %s", desc="use surface instead of white") vol_label_file = File(argstr="--o %s", genfile=True, desc="output volume") label_hit_file = File( argstr="--hits %s", desc="file with each frame is nhits for a label" ) map_label_stat = File( argstr="--label-stat %s", desc="map the label stats field into the vol" ) native_vox2ras = traits.Bool( argstr="--native-vox2ras", desc="use native vox2ras xform instead of tkregister-style", ) class Label2VolOutputSpec(TraitedSpec): vol_label_file = File(exists=True, desc="output volume") class Label2Vol(FSCommand): """Make a binary volume from a Freesurfer label Examples -------- >>> binvol = Label2Vol(label_file='cortex.label', template_file='structural.nii', reg_file='register.dat', fill_thresh=0.5, vol_label_file='foo_out.nii') >>> binvol.cmdline 'mri_label2vol --fillthresh 0.5 --label cortex.label --reg register.dat --temp structural.nii --o foo_out.nii' """ _cmd = "mri_label2vol" input_spec = Label2VolInputSpec output_spec = Label2VolOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outfile = self.inputs.vol_label_file if not isdefined(outfile): for key in ["label_file", "annot_file", "seg_file"]: if isdefined(getattr(self.inputs, key)): path = getattr(self.inputs, key) if isinstance(path, list): path = path[0] _, src = os.path.split(path) if isdefined(self.inputs.aparc_aseg): src = "aparc+aseg.mgz" outfile = fname_presuffix( src, suffix="_vol.nii.gz", newpath=os.getcwd(), use_ext=False ) outputs["vol_label_file"] = outfile return outputs def _gen_filename(self, name): if name == "vol_label_file": return self._list_outputs()[name] return None class MS_LDAInputSpec(FSTraitedSpec): lda_labels = traits.List( traits.Int(), argstr="-lda %s", mandatory=True, minlen=2, maxlen=2, sep=" ", desc="pair of class labels to optimize", ) weight_file = File( argstr="-weight %s", mandatory=True, desc="filename for the LDA weights (input or output)", ) vol_synth_file = File( exists=False, argstr="-synth %s", mandatory=True, desc=("filename for the synthesized output " "volume"), ) label_file = File( exists=True, argstr="-label %s", desc="filename of the label volume" ) mask_file = File( exists=True, argstr="-mask %s", desc="filename of the brain mask volume" ) shift = traits.Int( argstr="-shift %d", desc="shift all values equal to the given value to zero" ) conform = traits.Bool( argstr="-conform", desc=("Conform the input volumes (brain mask " "typically already conformed)"), ) use_weights = traits.Bool( argstr="-W", desc=("Use the weights from a previously " "generated weight file") ) images = InputMultiPath( File(exists=True), argstr="%s", mandatory=True, copyfile=False, desc="list of input FLASH images", position=-1, ) class MS_LDAOutputSpec(TraitedSpec): weight_file = File(exists=True, desc="") vol_synth_file = File(exists=True, desc="") class MS_LDA(FSCommand): """Perform LDA reduction on the intensity space of an arbitrary # of FLASH images Examples -------- >>> grey_label = 2 >>> white_label = 3 >>> zero_value = 1 >>> optimalWeights = MS_LDA(lda_labels=[grey_label, white_label], \ label_file='label.mgz', weight_file='weights.txt', \ shift=zero_value, vol_synth_file='synth_out.mgz', \ conform=True, use_weights=True, \ images=['FLASH1.mgz', 'FLASH2.mgz', 'FLASH3.mgz']) >>> optimalWeights.cmdline 'mri_ms_LDA -conform -label label.mgz -lda 2 3 -shift 1 -W -synth synth_out.mgz -weight weights.txt FLASH1.mgz FLASH2.mgz FLASH3.mgz' """ _cmd = "mri_ms_LDA" input_spec = MS_LDAInputSpec output_spec = MS_LDAOutputSpec def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.output_synth): outputs["vol_synth_file"] = os.path.abspath(self.inputs.output_synth) else: outputs["vol_synth_file"] = os.path.abspath(self.inputs.vol_synth_file) if not isdefined(self.inputs.use_weights) or self.inputs.use_weights is False: outputs["weight_file"] = os.path.abspath(self.inputs.weight_file) return outputs def _verify_weights_file_exists(self): if not os.path.exists(os.path.abspath(self.inputs.weight_file)): raise traits.TraitError( "MS_LDA: use_weights must accompany an existing weights file" ) def _format_arg(self, name, spec, value): if name == "use_weights": if self.inputs.use_weights is True: self._verify_weights_file_exists() else: return "" # TODO: Fix bug when boolean values are set explicitly to false return super(MS_LDA, self)._format_arg(name, spec, value) def _gen_filename(self, name): pass class Label2LabelInputSpec(FSTraitedSpec): hemisphere = traits.Enum( "lh", "rh", argstr="--hemi %s", mandatory=True, desc="Input hemisphere" ) subject_id = traits.String( "subject_id", usedefault=True, argstr="--trgsubject %s", mandatory=True, desc="Target subject", ) sphere_reg = File( mandatory=True, exists=True, desc="Implicit input .sphere.reg" ) white = File(mandatory=True, exists=True, desc="Implicit input .white") source_sphere_reg = File( mandatory=True, exists=True, desc="Implicit input .sphere.reg" ) source_white = File( mandatory=True, exists=True, desc="Implicit input .white" ) source_label = File( argstr="--srclabel %s", mandatory=True, exists=True, desc="Source label" ) source_subject = traits.String( argstr="--srcsubject %s", mandatory=True, desc="Source subject name" ) # optional out_file = File( argstr="--trglabel %s", name_source=["source_label"], name_template="%s_converted", hash_files=False, keep_extension=True, desc="Target label", ) registration_method = traits.Enum( "surface", "volume", usedefault=True, argstr="--regmethod %s", desc="Registration method", ) copy_inputs = traits.Bool( desc="If running as a node, set this to True." + "This will copy the input files to the node " + "directory." ) class Label2LabelOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Output label") class Label2Label(FSCommand): """ Converts a label in one subject's space to a label in another subject's space using either talairach or spherical as an intermediate registration space. If a source mask is used, then the input label must have been created from a surface (ie, the vertex numbers are valid). The format can be anything supported by mri_convert or curv or paint. Vertices in the source label that do not meet threshold in the mask will be removed from the label. Examples -------- >>> from nipype.interfaces.freesurfer import Label2Label >>> l2l = Label2Label() >>> l2l.inputs.hemisphere = 'lh' >>> l2l.inputs.subject_id = '10335' >>> l2l.inputs.sphere_reg = 'lh.pial' >>> l2l.inputs.white = 'lh.pial' >>> l2l.inputs.source_subject = 'fsaverage' >>> l2l.inputs.source_label = 'lh-pial.stl' >>> l2l.inputs.source_white = 'lh.pial' >>> l2l.inputs.source_sphere_reg = 'lh.pial' >>> l2l.cmdline 'mri_label2label --hemi lh --trglabel lh-pial_converted.stl --regmethod surface --srclabel lh-pial.stl --srcsubject fsaverage --trgsubject 10335' """ _cmd = "mri_label2label" input_spec = Label2LabelInputSpec output_spec = Label2LabelOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.join( self.inputs.subjects_dir, self.inputs.subject_id, "label", self.inputs.out_file, ) return outputs def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir hemi = self.inputs.hemisphere copy2subjdir( self, self.inputs.sphere_reg, "surf", "{0}.sphere.reg".format(hemi) ) copy2subjdir(self, self.inputs.white, "surf", "{0}.white".format(hemi)) copy2subjdir( self, self.inputs.source_sphere_reg, "surf", "{0}.sphere.reg".format(hemi), subject_id=self.inputs.source_subject, ) copy2subjdir( self, self.inputs.source_white, "surf", "{0}.white".format(hemi), subject_id=self.inputs.source_subject, ) # label dir must exist in order for output file to be written label_dir = os.path.join( self.inputs.subjects_dir, self.inputs.subject_id, "label" ) if not os.path.isdir(label_dir): os.makedirs(label_dir) return super(Label2Label, self).run(**inputs) class Label2AnnotInputSpec(FSTraitedSpec): # required hemisphere = traits.Enum( "lh", "rh", argstr="--hemi %s", mandatory=True, desc="Input hemisphere" ) subject_id = traits.String( "subject_id", usedefault=True, argstr="--s %s", mandatory=True, desc="Subject name/ID", ) in_labels = traits.List( argstr="--l %s...", mandatory=True, desc="List of input label files" ) out_annot = traits.String( argstr="--a %s", mandatory=True, desc="Name of the annotation to create" ) orig = File(exists=True, mandatory=True, desc="implicit {hemisphere}.orig") # optional keep_max = traits.Bool( argstr="--maxstatwinner", desc="Keep label with highest 'stat' value" ) verbose_off = traits.Bool( argstr="--noverbose", desc="Turn off overlap and stat override messages" ) color_table = File( argstr="--ctab %s", exists=True, desc="File that defines the structure names, their indices, and their color", ) copy_inputs = traits.Bool( desc="copy implicit inputs and create a temp subjects_dir" ) class Label2AnnotOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Output annotation file") class Label2Annot(FSCommand): """ Converts a set of surface labels to an annotation file Examples -------- >>> from nipype.interfaces.freesurfer import Label2Annot >>> l2a = Label2Annot() >>> l2a.inputs.hemisphere = 'lh' >>> l2a.inputs.subject_id = '10335' >>> l2a.inputs.in_labels = ['lh.aparc.label'] >>> l2a.inputs.orig = 'lh.pial' >>> l2a.inputs.out_annot = 'test' >>> l2a.cmdline 'mris_label2annot --hemi lh --l lh.aparc.label --a test --s 10335' """ _cmd = "mris_label2annot" input_spec = Label2AnnotInputSpec output_spec = Label2AnnotOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir copy2subjdir( self, self.inputs.orig, folder="surf", basename="{0}.orig".format(self.inputs.hemisphere), ) # label dir must exist in order for output file to be written label_dir = os.path.join( self.inputs.subjects_dir, self.inputs.subject_id, "label" ) if not os.path.isdir(label_dir): os.makedirs(label_dir) return super(Label2Annot, self).run(**inputs) def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.join( str(self.inputs.subjects_dir), str(self.inputs.subject_id), "label", str(self.inputs.hemisphere) + "." + str(self.inputs.out_annot) + ".annot", ) return outputs class SphericalAverageInputSpec(FSTraitedSpec): out_file = File( argstr="%s", genfile=True, exists=False, position=-1, desc="Output filename" ) in_average = Directory( argstr="%s", exists=True, genfile=True, position=-2, desc="Average subject" ) in_surf = File( argstr="%s", mandatory=True, exists=True, position=-3, desc="Input surface file" ) hemisphere = traits.Enum( "lh", "rh", argstr="%s", mandatory=True, position=-4, desc="Input hemisphere" ) fname = traits.String( argstr="%s", mandatory=True, position=-5, desc="""\ Filename from the average subject directory. Example: to use rh.entorhinal.label as the input label filename, set fname to 'rh.entorhinal' and which to 'label'. The program will then search for ``/label/rh.entorhinal.label``""", ) which = traits.Enum( "coords", "label", "vals", "curv", "area", argstr="%s", mandatory=True, position=-6, desc="No documentation", ) subject_id = traits.String(argstr="-o %s", mandatory=True, desc="Output subject id") # optional erode = traits.Int(argstr="-erode %d", desc="Undocumented") in_orig = File(argstr="-orig %s", exists=True, desc="Original surface filename") threshold = traits.Float(argstr="-t %.1f", desc="Undocumented") class SphericalAverageOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output label") class SphericalAverage(FSCommand): """ This program will add a template into an average surface. Examples -------- >>> from nipype.interfaces.freesurfer import SphericalAverage >>> sphericalavg = SphericalAverage() >>> sphericalavg.inputs.out_file = 'test.out' >>> sphericalavg.inputs.in_average = '.' >>> sphericalavg.inputs.in_surf = 'lh.pial' >>> sphericalavg.inputs.hemisphere = 'lh' >>> sphericalavg.inputs.fname = 'lh.entorhinal' >>> sphericalavg.inputs.which = 'label' >>> sphericalavg.inputs.subject_id = '10335' >>> sphericalavg.inputs.erode = 2 >>> sphericalavg.inputs.threshold = 5 >>> sphericalavg.cmdline 'mris_spherical_average -erode 2 -o 10335 -t 5.0 label lh.entorhinal lh pial . test.out' """ _cmd = "mris_spherical_average" input_spec = SphericalAverageInputSpec output_spec = SphericalAverageOutputSpec def _format_arg(self, name, spec, value): if name == "in_orig" or name == "in_surf": surf = os.path.basename(value) for item in ["lh.", "rh."]: surf = surf.replace(item, "") return spec.argstr % surf return super(SphericalAverage, self)._format_arg(name, spec, value) def _gen_filename(self, name): if name == "in_average": avg_subject = str(self.inputs.hemisphere) + ".EC_average" avg_directory = os.path.join(self.inputs.subjects_dir, avg_subject) if not os.path.isdir(avg_directory): fs_home = os.path.abspath(os.environ.get("FREESURFER_HOME")) return avg_subject elif name == "out_file": return self._list_outputs()[name] else: return None def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.out_file): outputs["out_file"] = os.path.abspath(self.inputs.out_file) else: out_dir = os.path.join( self.inputs.subjects_dir, self.inputs.subject_id, "label" ) if isdefined(self.inputs.in_average): basename = os.path.basename(self.inputs.in_average) basename = basename.replace("_", "_exvivo_") + ".label" else: basename = str(self.inputs.hemisphere) + ".EC_exvivo_average.label" outputs["out_file"] = os.path.join(out_dir, basename) return outputs nipype-1.7.0/nipype/interfaces/freesurfer/preprocess.py000066400000000000000000003427431413403311400233700ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by FreeSurfer """ import os import os.path as op from glob import glob import shutil import sys import numpy as np from nibabel import load from ... import logging, LooseVersion from ...utils.filemanip import fname_presuffix, check_depends from ..io import FreeSurferSource from ..base import ( TraitedSpec, File, traits, Directory, InputMultiPath, OutputMultiPath, CommandLine, CommandLineInputSpec, isdefined, ) from .base import FSCommand, FSTraitedSpec, FSTraitedSpecOpenMP, FSCommandOpenMP, Info from .utils import copy2subjdir __docformat__ = "restructuredtext" iflogger = logging.getLogger("nipype.interface") # Keeping this to avoid breaking external programs that depend on it, but # this should not be used internally FSVersion = Info.looseversion().vstring class ParseDICOMDirInputSpec(FSTraitedSpec): dicom_dir = Directory( exists=True, argstr="--d %s", mandatory=True, desc="path to siemens dicom directory", ) dicom_info_file = File( "dicominfo.txt", argstr="--o %s", usedefault=True, desc="file to which results are written", ) sortbyrun = traits.Bool(argstr="--sortbyrun", desc="assign run numbers") summarize = traits.Bool( argstr="--summarize", desc="only print out info for run leaders" ) class ParseDICOMDirOutputSpec(TraitedSpec): dicom_info_file = File(exists=True, desc="text file containing dicom information") class ParseDICOMDir(FSCommand): """Uses mri_parse_sdcmdir to get information from dicom directories Examples -------- >>> from nipype.interfaces.freesurfer import ParseDICOMDir >>> dcminfo = ParseDICOMDir() >>> dcminfo.inputs.dicom_dir = '.' >>> dcminfo.inputs.sortbyrun = True >>> dcminfo.inputs.summarize = True >>> dcminfo.cmdline 'mri_parse_sdcmdir --d . --o dicominfo.txt --sortbyrun --summarize' """ _cmd = "mri_parse_sdcmdir" input_spec = ParseDICOMDirInputSpec output_spec = ParseDICOMDirOutputSpec def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.dicom_info_file): outputs["dicom_info_file"] = os.path.join( os.getcwd(), self.inputs.dicom_info_file ) return outputs class UnpackSDICOMDirInputSpec(FSTraitedSpec): source_dir = Directory( exists=True, argstr="-src %s", mandatory=True, desc="directory with the DICOM files", ) output_dir = Directory( argstr="-targ %s", desc="top directory into which the files will be unpacked" ) run_info = traits.Tuple( traits.Int, traits.Str, traits.Str, traits.Str, mandatory=True, argstr="-run %d %s %s %s", xor=("run_info", "config", "seq_config"), desc="runno subdir format name : spec unpacking rules on cmdline", ) config = File( exists=True, argstr="-cfg %s", mandatory=True, xor=("run_info", "config", "seq_config"), desc="specify unpacking rules in file", ) seq_config = File( exists=True, argstr="-seqcfg %s", mandatory=True, xor=("run_info", "config", "seq_config"), desc="specify unpacking rules based on sequence", ) dir_structure = traits.Enum( "fsfast", "generic", argstr="-%s", desc="unpack to specified directory structures", ) no_info_dump = traits.Bool(argstr="-noinfodump", desc="do not create infodump file") scan_only = File( exists=True, argstr="-scanonly %s", desc="only scan the directory and put result in file", ) log_file = File(exists=True, argstr="-log %s", desc="explicilty set log file") spm_zeropad = traits.Int( argstr="-nspmzeropad %d", desc="set frame number zero padding width for SPM" ) no_unpack_err = traits.Bool( argstr="-no-unpackerr", desc="do not try to unpack runs with errors" ) class UnpackSDICOMDir(FSCommand): """Use unpacksdcmdir to convert dicom files Call unpacksdcmdir -help from the command line to see more information on using this command. Examples -------- >>> from nipype.interfaces.freesurfer import UnpackSDICOMDir >>> unpack = UnpackSDICOMDir() >>> unpack.inputs.source_dir = '.' >>> unpack.inputs.output_dir = '.' >>> unpack.inputs.run_info = (5, 'mprage', 'nii', 'struct') >>> unpack.inputs.dir_structure = 'generic' >>> unpack.cmdline 'unpacksdcmdir -generic -targ . -run 5 mprage nii struct -src .' """ _cmd = "unpacksdcmdir" input_spec = UnpackSDICOMDirInputSpec class MRIConvertInputSpec(FSTraitedSpec): read_only = traits.Bool(argstr="--read_only", desc="read the input volume") no_write = traits.Bool(argstr="--no_write", desc="do not write output") in_info = traits.Bool(argstr="--in_info", desc="display input info") out_info = traits.Bool(argstr="--out_info", desc="display output info") in_stats = traits.Bool(argstr="--in_stats", desc="display input stats") out_stats = traits.Bool(argstr="--out_stats", desc="display output stats") in_matrix = traits.Bool(argstr="--in_matrix", desc="display input matrix") out_matrix = traits.Bool(argstr="--out_matrix", desc="display output matrix") in_i_size = traits.Int(argstr="--in_i_size %d", desc="input i size") in_j_size = traits.Int(argstr="--in_j_size %d", desc="input j size") in_k_size = traits.Int(argstr="--in_k_size %d", desc="input k size") force_ras = traits.Bool( argstr="--force_ras_good", desc="use default when orientation info absent" ) in_i_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--in_i_direction %f %f %f", desc=" ", ) in_j_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--in_j_direction %f %f %f", desc=" ", ) in_k_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--in_k_direction %f %f %f", desc=" ", ) _orientations = [ "LAI", "LIA", "ALI", "AIL", "ILA", "IAL", "LAS", "LSA", "ALS", "ASL", "SLA", "SAL", "LPI", "LIP", "PLI", "PIL", "ILP", "IPL", "LPS", "LSP", "PLS", "PSL", "SLP", "SPL", "RAI", "RIA", "ARI", "AIR", "IRA", "IAR", "RAS", "RSA", "ARS", "ASR", "SRA", "SAR", "RPI", "RIP", "PRI", "PIR", "IRP", "IPR", "RPS", "RSP", "PRS", "PSR", "SRP", "SPR", ] # _orientations = [comb for comb in itertools.chain(*[[''.join(c) for c in itertools.permutations(s)] for s in [a+b+c for a in 'LR' for b in 'AP' for c in 'IS']])] in_orientation = traits.Enum( _orientations, argstr="--in_orientation %s", desc="specify the input orientation", ) in_center = traits.List( traits.Float, maxlen=3, argstr="--in_center %s", desc=" ", ) sphinx = traits.Bool(argstr="--sphinx", desc="change orientation info to sphinx") out_i_count = traits.Int( argstr="--out_i_count %d", desc="some count ?? in i direction" ) out_j_count = traits.Int( argstr="--out_j_count %d", desc="some count ?? in j direction" ) out_k_count = traits.Int( argstr="--out_k_count %d", desc="some count ?? in k direction" ) vox_size = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="-voxsize %f %f %f", desc=" specify the size (mm) - useful for upsampling or downsampling", ) out_i_size = traits.Int(argstr="--out_i_size %d", desc="output i size") out_j_size = traits.Int(argstr="--out_j_size %d", desc="output j size") out_k_size = traits.Int(argstr="--out_k_size %d", desc="output k size") out_i_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--out_i_direction %f %f %f", desc=" ", ) out_j_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--out_j_direction %f %f %f", desc=" ", ) out_k_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--out_k_direction %f %f %f", desc=" ", ) out_orientation = traits.Enum( _orientations, argstr="--out_orientation %s", desc="specify the output orientation", ) out_center = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--out_center %f %f %f", desc=" ", ) out_datatype = traits.Enum( "uchar", "short", "int", "float", argstr="--out_data_type %s", desc="output data type ", ) resample_type = traits.Enum( "interpolate", "weighted", "nearest", "sinc", "cubic", argstr="--resample_type %s", desc=" (default is interpolate)", ) no_scale = traits.Bool(argstr="--no_scale 1", desc="dont rescale values for COR") no_change = traits.Bool( argstr="--nochange", desc="don't change type of input to that of template" ) tr = traits.Int(argstr="-tr %d", desc="TR in msec") te = traits.Int(argstr="-te %d", desc="TE in msec") ti = traits.Int(argstr="-ti %d", desc="TI in msec (note upper case flag)") autoalign_matrix = File( exists=True, argstr="--autoalign %s", desc="text file with autoalign matrix" ) unwarp_gradient = traits.Bool( argstr="--unwarp_gradient_nonlinearity", desc="unwarp gradient nonlinearity" ) apply_transform = File( exists=True, argstr="--apply_transform %s", desc="apply xfm file" ) apply_inv_transform = File( exists=True, argstr="--apply_inverse_transform %s", desc="apply inverse transformation xfm file", ) devolve_transform = traits.Str(argstr="--devolvexfm %s", desc="subject id") crop_center = traits.Tuple( traits.Int, traits.Int, traits.Int, argstr="--crop %d %d %d", desc=" crop to 256 around center (x, y, z)", ) crop_size = traits.Tuple( traits.Int, traits.Int, traits.Int, argstr="--cropsize %d %d %d", desc=" crop to size ", ) cut_ends = traits.Int( argstr="--cutends %d", desc="remove ncut slices from the ends" ) slice_crop = traits.Tuple( traits.Int, traits.Int, argstr="--slice-crop %d %d", desc="s_start s_end : keep slices s_start to s_end", ) slice_reverse = traits.Bool( argstr="--slice-reverse", desc="reverse order of slices, update vox2ras" ) slice_bias = traits.Float( argstr="--slice-bias %f", desc="apply half-cosine bias field" ) fwhm = traits.Float(argstr="--fwhm %f", desc="smooth input volume by fwhm mm") _filetypes = [ "cor", "mgh", "mgz", "minc", "analyze", "analyze4d", "spm", "afni", "brik", "bshort", "bfloat", "sdt", "outline", "otl", "gdf", "nifti1", "nii", "niigz", ] _infiletypes = ["ge", "gelx", "lx", "ximg", "siemens", "dicom", "siemens_dicom"] in_type = traits.Enum( _filetypes + _infiletypes, argstr="--in_type %s", desc="input file type" ) out_type = traits.Enum(_filetypes, argstr="--out_type %s", desc="output file type") ascii = traits.Bool( argstr="--ascii", desc="save output as ascii col>row>slice>frame" ) reorder = traits.Tuple( traits.Int, traits.Int, traits.Int, argstr="--reorder %d %d %d", desc="olddim1 olddim2 olddim3", ) invert_contrast = traits.Float( argstr="--invert_contrast %f", desc="threshold for inversting contrast" ) in_file = File( exists=True, mandatory=True, position=-2, argstr="--input_volume %s", desc="File to read/convert", ) out_file = File( argstr="--output_volume %s", position=-1, genfile=True, desc="output filename or True to generate one", ) conform = traits.Bool( argstr="--conform", desc="conform to 1mm voxel size in coronal slice direction with 256^3 or more", ) conform_min = traits.Bool(argstr="--conform_min", desc="conform to smallest size") conform_size = traits.Float( argstr="--conform_size %s", desc="conform to size_in_mm" ) cw256 = traits.Bool(argstr="--cw256", desc="confrom to dimensions of 256^3") parse_only = traits.Bool(argstr="--parse_only", desc="parse input only") subject_name = traits.Str(argstr="--subject_name %s", desc="subject name ???") reslice_like = File( exists=True, argstr="--reslice_like %s", desc="reslice output to match file" ) template_type = traits.Enum( _filetypes + _infiletypes, argstr="--template_type %s", desc="template file type", ) split = traits.Bool( argstr="--split", desc="split output frames into separate output files." ) frame = traits.Int(argstr="--frame %d", desc="keep only 0-based frame number") midframe = traits.Bool(argstr="--mid-frame", desc="keep only the middle frame") skip_n = traits.Int(argstr="--nskip %d", desc="skip the first n frames") drop_n = traits.Int(argstr="--ndrop %d", desc="drop the last n frames") frame_subsample = traits.Tuple( traits.Int, traits.Int, traits.Int, argstr="--fsubsample %d %d %d", desc="start delta end : frame subsampling (end = -1 for end)", ) in_scale = traits.Float(argstr="--scale %f", desc="input intensity scale factor") out_scale = traits.Float( argstr="--out-scale %d", desc="output intensity scale factor" ) in_like = File(exists=True, argstr="--in_like %s", desc="input looks like") fill_parcellation = traits.Bool( argstr="--fill_parcellation", desc="fill parcellation" ) smooth_parcellation = traits.Bool( argstr="--smooth_parcellation", desc="smooth parcellation" ) zero_outlines = traits.Bool(argstr="--zero_outlines", desc="zero outlines") color_file = File(exists=True, argstr="--color_file %s", desc="color file") no_translate = traits.Bool(argstr="--no_translate", desc="???") status_file = File(argstr="--status %s", desc="status file for DICOM conversion") sdcm_list = File( exists=True, argstr="--sdcmlist %s", desc="list of DICOM files for conversion" ) template_info = traits.Bool( argstr="--template_info", desc="dump info about template" ) crop_gdf = traits.Bool(argstr="--crop_gdf", desc="apply GDF cropping") zero_ge_z_offset = traits.Bool( argstr="--zero_ge_z_offset", desc="zero ge z offset ???" ) class MRIConvertOutputSpec(TraitedSpec): out_file = OutputMultiPath(File(exists=True), desc="converted output file") class MRIConvert(FSCommand): """use fs mri_convert to manipulate files .. note:: Adds niigz as an output type option Examples -------- >>> mc = MRIConvert() >>> mc.inputs.in_file = 'structural.nii' >>> mc.inputs.out_file = 'outfile.mgz' >>> mc.inputs.out_type = 'mgz' >>> mc.cmdline 'mri_convert --out_type mgz --input_volume structural.nii --output_volume outfile.mgz' """ _cmd = "mri_convert" input_spec = MRIConvertInputSpec output_spec = MRIConvertOutputSpec filemap = dict( cor="cor", mgh="mgh", mgz="mgz", minc="mnc", afni="brik", brik="brik", bshort="bshort", spm="img", analyze="img", analyze4d="img", bfloat="bfloat", nifti1="img", nii="nii", niigz="nii.gz", ) def _format_arg(self, name, spec, value): if name in ["in_type", "out_type", "template_type"]: if value == "niigz": return spec.argstr % "nii" return super(MRIConvert, self)._format_arg(name, spec, value) def _get_outfilename(self): outfile = self.inputs.out_file if not isdefined(outfile): if isdefined(self.inputs.out_type): suffix = "_out." + self.filemap[self.inputs.out_type] else: suffix = "_out.nii.gz" outfile = fname_presuffix( self.inputs.in_file, newpath=os.getcwd(), suffix=suffix, use_ext=False ) return os.path.abspath(outfile) def _list_outputs(self): outputs = self.output_spec().get() outfile = self._get_outfilename() if isdefined(self.inputs.split) and self.inputs.split: size = load(self.inputs.in_file).shape if len(size) == 3: tp = 1 else: tp = size[-1] if outfile.endswith(".mgz"): stem = outfile.split(".mgz")[0] ext = ".mgz" elif outfile.endswith(".nii.gz"): stem = outfile.split(".nii.gz")[0] ext = ".nii.gz" else: stem = ".".join(outfile.split(".")[:-1]) ext = "." + outfile.split(".")[-1] outfile = [] for idx in range(0, tp): outfile.append(stem + "%04d" % idx + ext) if isdefined(self.inputs.out_type): if self.inputs.out_type in ["spm", "analyze"]: # generate all outputs size = load(self.inputs.in_file).shape if len(size) == 3: tp = 1 else: tp = size[-1] # have to take care of all the frame manipulations raise Exception( "Not taking frame manipulations into account- please warn the developers" ) outfiles = [] outfile = self._get_outfilename() for i in range(tp): outfiles.append(fname_presuffix(outfile, suffix="%03d" % (i + 1))) outfile = outfiles outputs["out_file"] = outfile return outputs def _gen_filename(self, name): if name == "out_file": return self._get_outfilename() return None class DICOMConvertInputSpec(FSTraitedSpec): dicom_dir = Directory( exists=True, mandatory=True, desc="dicom directory from which to convert dicom files", ) base_output_dir = Directory( mandatory=True, desc="directory in which subject directories are created" ) subject_dir_template = traits.Str( "S.%04d", usedefault=True, desc="template for subject directory name" ) subject_id = traits.Any(desc="subject identifier to insert into template") file_mapping = traits.List( traits.Tuple(traits.Str, traits.Str), desc="defines the output fields of interface", ) out_type = traits.Enum( "niigz", MRIConvertInputSpec._filetypes, usedefault=True, desc="defines the type of output file produced", ) dicom_info = File( exists=True, desc="File containing summary information from mri_parse_sdcmdir" ) seq_list = traits.List( traits.Str, requires=["dicom_info"], desc="list of pulse sequence names to be converted.", ) ignore_single_slice = traits.Bool( requires=["dicom_info"], desc="ignore volumes containing a single slice" ) class DICOMConvert(FSCommand): """use fs mri_convert to convert dicom files Examples -------- >>> from nipype.interfaces.freesurfer import DICOMConvert >>> cvt = DICOMConvert() >>> cvt.inputs.dicom_dir = 'dicomdir' >>> cvt.inputs.file_mapping = [('nifti', '*.nii'), ('info', 'dicom*.txt'), ('dti', '*dti.bv*')] """ _cmd = "mri_convert" input_spec = DICOMConvertInputSpec def _get_dicomfiles(self): """validate fsl bet options if set to None ignore """ return glob(os.path.abspath(os.path.join(self.inputs.dicom_dir, "*-1.dcm"))) def _get_outdir(self): """returns output directory""" subjid = self.inputs.subject_id if not isdefined(subjid): path, fname = os.path.split(self._get_dicomfiles()[0]) subjid = int(fname.split("-")[0]) if isdefined(self.inputs.subject_dir_template): subjid = self.inputs.subject_dir_template % subjid basedir = self.inputs.base_output_dir if not isdefined(basedir): basedir = os.path.abspath(".") outdir = os.path.abspath(os.path.join(basedir, subjid)) return outdir def _get_runs(self): """Returns list of dicom series that should be converted. Requires a dicom info summary file generated by ``DicomDirInfo`` """ seq = np.genfromtxt(self.inputs.dicom_info, dtype=object) runs = [] for s in seq: if self.inputs.seq_list: if self.inputs.ignore_single_slice: if (int(s[8]) > 1) and any( [s[12].startswith(sn) for sn in self.inputs.seq_list] ): runs.append(int(s[2])) else: if any([s[12].startswith(sn) for sn in self.inputs.seq_list]): runs.append(int(s[2])) else: runs.append(int(s[2])) return runs def _get_filelist(self, outdir): """Returns list of files to be converted""" filemap = {} for f in self._get_dicomfiles(): head, fname = os.path.split(f) fname, ext = os.path.splitext(fname) fileparts = fname.split("-") runno = int(fileparts[1]) out_type = MRIConvert.filemap[self.inputs.out_type] outfile = os.path.join( outdir, ".".join(("%s-%02d" % (fileparts[0], runno), out_type)) ) filemap[runno] = (f, outfile) if self.inputs.dicom_info: files = [filemap[r] for r in self._get_runs()] else: files = [filemap[r] for r in list(filemap.keys())] return files @property def cmdline(self): """`command` plus any arguments (args) validates arguments and generates command line""" self._check_mandatory_inputs() outdir = self._get_outdir() cmd = [] if not os.path.exists(outdir): cmdstr = "%s -c \"import os; os.makedirs('%s')\"" % ( op.basename(sys.executable), outdir, ) cmd.extend([cmdstr]) infofile = os.path.join(outdir, "shortinfo.txt") if not os.path.exists(infofile): cmdstr = "dcmdir-info-mgh %s > %s" % (self.inputs.dicom_dir, infofile) cmd.extend([cmdstr]) files = self._get_filelist(outdir) for infile, outfile in files: if not os.path.exists(outfile): single_cmd = "%s%s %s %s" % ( self._cmd_prefix, self.cmd, infile, os.path.join(outdir, outfile), ) cmd.extend([single_cmd]) return "; ".join(cmd) class ResampleInputSpec(FSTraitedSpec): in_file = File( exists=True, argstr="-i %s", mandatory=True, desc="file to resample", position=-2, ) resampled_file = File( argstr="-o %s", desc="output filename", genfile=True, position=-1 ) voxel_size = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="-vs %.2f %.2f %.2f", desc="triplet of output voxel sizes", mandatory=True, ) class ResampleOutputSpec(TraitedSpec): resampled_file = File(exists=True, desc="output filename") class Resample(FSCommand): """Use FreeSurfer mri_convert to up or down-sample image files Examples -------- >>> from nipype.interfaces import freesurfer >>> resampler = freesurfer.Resample() >>> resampler.inputs.in_file = 'structural.nii' >>> resampler.inputs.resampled_file = 'resampled.nii' >>> resampler.inputs.voxel_size = (2.1, 2.1, 2.1) >>> resampler.cmdline 'mri_convert -vs 2.10 2.10 2.10 -i structural.nii -o resampled.nii' """ _cmd = "mri_convert" input_spec = ResampleInputSpec output_spec = ResampleOutputSpec def _get_outfilename(self): if isdefined(self.inputs.resampled_file): outfile = self.inputs.resampled_file else: outfile = fname_presuffix( self.inputs.in_file, newpath=os.getcwd(), suffix="_resample" ) return outfile def _list_outputs(self): outputs = self.output_spec().get() outputs["resampled_file"] = self._get_outfilename() return outputs def _gen_filename(self, name): if name == "resampled_file": return self._get_outfilename() return None class ReconAllInputSpec(CommandLineInputSpec): subject_id = traits.Str( "recon_all", argstr="-subjid %s", desc="subject name", usedefault=True ) directive = traits.Enum( "all", "autorecon1", # autorecon2 variants "autorecon2", "autorecon2-volonly", "autorecon2-perhemi", "autorecon2-inflate1", "autorecon2-cp", "autorecon2-wm", # autorecon3 variants "autorecon3", "autorecon3-T2pial", # Mix of autorecon2 and autorecon3 steps "autorecon-pial", "autorecon-hemi", # Not "multi-stage flags" "localGI", "qcache", argstr="-%s", desc="process directive", usedefault=True, position=0, ) hemi = traits.Enum("lh", "rh", desc="hemisphere to process", argstr="-hemi %s") T1_files = InputMultiPath( File(exists=True), argstr="-i %s...", desc="name of T1 file to process" ) T2_file = File( exists=True, argstr="-T2 %s", min_ver="5.3.0", desc="Convert T2 image to orig directory", ) FLAIR_file = File( exists=True, argstr="-FLAIR %s", min_ver="5.3.0", desc="Convert FLAIR image to orig directory", ) use_T2 = traits.Bool( argstr="-T2pial", min_ver="5.3.0", xor=["use_FLAIR"], desc="Use T2 image to refine the pial surface", ) use_FLAIR = traits.Bool( argstr="-FLAIRpial", min_ver="5.3.0", xor=["use_T2"], desc="Use FLAIR image to refine the pial surface", ) openmp = traits.Int( argstr="-openmp %d", desc="Number of processors to use in parallel" ) parallel = traits.Bool(argstr="-parallel", desc="Enable parallel execution") hires = traits.Bool( argstr="-hires", min_ver="6.0.0", desc="Conform to minimum voxel size (for voxels < 1mm)", ) mprage = traits.Bool( argstr="-mprage", desc=( "Assume scan parameters are MGH MP-RAGE " "protocol, which produces darker gray matter" ), ) big_ventricles = traits.Bool( argstr="-bigventricles", desc=("For use in subjects with enlarged " "ventricles"), ) brainstem = traits.Bool( argstr="-brainstem-structures", desc="Segment brainstem structures" ) hippocampal_subfields_T1 = traits.Bool( argstr="-hippocampal-subfields-T1", min_ver="6.0.0", desc="segment hippocampal subfields using input T1 scan", ) hippocampal_subfields_T2 = traits.Tuple( File(exists=True), traits.Str(), argstr="-hippocampal-subfields-T2 %s %s", min_ver="6.0.0", desc=( "segment hippocampal subfields using T2 scan, identified by " "ID (may be combined with hippocampal_subfields_T1)" ), ) expert = File( exists=True, argstr="-expert %s", desc="Set parameters using expert file" ) xopts = traits.Enum( "use", "clean", "overwrite", argstr="-xopts-%s", desc="Use, delete or overwrite existing expert options file", ) subjects_dir = Directory( exists=True, argstr="-sd %s", hash_files=False, desc="path to subjects directory", genfile=True, ) flags = InputMultiPath(traits.Str, argstr="%s", desc="additional parameters") # Expert options talairach = traits.Str(desc="Flags to pass to talairach commands", xor=["expert"]) mri_normalize = traits.Str( desc="Flags to pass to mri_normalize commands", xor=["expert"] ) mri_watershed = traits.Str( desc="Flags to pass to mri_watershed commands", xor=["expert"] ) mri_em_register = traits.Str( desc="Flags to pass to mri_em_register commands", xor=["expert"] ) mri_ca_normalize = traits.Str( desc="Flags to pass to mri_ca_normalize commands", xor=["expert"] ) mri_ca_register = traits.Str( desc="Flags to pass to mri_ca_register commands", xor=["expert"] ) mri_remove_neck = traits.Str( desc="Flags to pass to mri_remove_neck commands", xor=["expert"] ) mri_ca_label = traits.Str( desc="Flags to pass to mri_ca_label commands", xor=["expert"] ) mri_segstats = traits.Str( desc="Flags to pass to mri_segstats commands", xor=["expert"] ) mri_mask = traits.Str(desc="Flags to pass to mri_mask commands", xor=["expert"]) mri_segment = traits.Str( desc="Flags to pass to mri_segment commands", xor=["expert"] ) mri_edit_wm_with_aseg = traits.Str( desc="Flags to pass to mri_edit_wm_with_aseg commands", xor=["expert"] ) mri_pretess = traits.Str( desc="Flags to pass to mri_pretess commands", xor=["expert"] ) mri_fill = traits.Str(desc="Flags to pass to mri_fill commands", xor=["expert"]) mri_tessellate = traits.Str( desc="Flags to pass to mri_tessellate commands", xor=["expert"] ) mris_smooth = traits.Str( desc="Flags to pass to mri_smooth commands", xor=["expert"] ) mris_inflate = traits.Str( desc="Flags to pass to mri_inflate commands", xor=["expert"] ) mris_sphere = traits.Str( desc="Flags to pass to mris_sphere commands", xor=["expert"] ) mris_fix_topology = traits.Str( desc="Flags to pass to mris_fix_topology commands", xor=["expert"] ) mris_make_surfaces = traits.Str( desc="Flags to pass to mris_make_surfaces commands", xor=["expert"] ) mris_surf2vol = traits.Str( desc="Flags to pass to mris_surf2vol commands", xor=["expert"] ) mris_register = traits.Str( desc="Flags to pass to mris_register commands", xor=["expert"] ) mrisp_paint = traits.Str( desc="Flags to pass to mrisp_paint commands", xor=["expert"] ) mris_ca_label = traits.Str( desc="Flags to pass to mris_ca_label commands", xor=["expert"] ) mris_anatomical_stats = traits.Str( desc="Flags to pass to mris_anatomical_stats commands", xor=["expert"] ) mri_aparc2aseg = traits.Str( desc="Flags to pass to mri_aparc2aseg commands", xor=["expert"] ) class ReconAllOutputSpec(FreeSurferSource.output_spec): subjects_dir = Directory(exists=True, desc="Freesurfer subjects directory.") subject_id = traits.Str(desc="Subject name for whom to retrieve data") class ReconAll(CommandLine): """Uses recon-all to generate surfaces and parcellations of structural data from anatomical images of a subject. Examples -------- >>> from nipype.interfaces.freesurfer import ReconAll >>> reconall = ReconAll() >>> reconall.inputs.subject_id = 'foo' >>> reconall.inputs.directive = 'all' >>> reconall.inputs.subjects_dir = '.' >>> reconall.inputs.T1_files = 'structural.nii' >>> reconall.cmdline 'recon-all -all -i structural.nii -subjid foo -sd .' >>> reconall.inputs.flags = "-qcache" >>> reconall.cmdline 'recon-all -all -i structural.nii -qcache -subjid foo -sd .' >>> reconall.inputs.flags = ["-cw256", "-qcache"] >>> reconall.cmdline 'recon-all -all -i structural.nii -cw256 -qcache -subjid foo -sd .' Hemisphere may be specified regardless of directive: >>> reconall.inputs.flags = [] >>> reconall.inputs.hemi = 'lh' >>> reconall.cmdline 'recon-all -all -i structural.nii -hemi lh -subjid foo -sd .' ``-autorecon-hemi`` uses the ``-hemi`` input to specify the hemisphere to operate upon: >>> reconall.inputs.directive = 'autorecon-hemi' >>> reconall.cmdline 'recon-all -autorecon-hemi lh -i structural.nii -subjid foo -sd .' Hippocampal subfields can accept T1 and T2 images: >>> reconall_subfields = ReconAll() >>> reconall_subfields.inputs.subject_id = 'foo' >>> reconall_subfields.inputs.directive = 'all' >>> reconall_subfields.inputs.subjects_dir = '.' >>> reconall_subfields.inputs.T1_files = 'structural.nii' >>> reconall_subfields.inputs.hippocampal_subfields_T1 = True >>> reconall_subfields.cmdline 'recon-all -all -i structural.nii -hippocampal-subfields-T1 -subjid foo -sd .' >>> reconall_subfields.inputs.hippocampal_subfields_T2 = ( ... 'structural.nii', 'test') >>> reconall_subfields.cmdline 'recon-all -all -i structural.nii -hippocampal-subfields-T1T2 structural.nii test -subjid foo -sd .' >>> reconall_subfields.inputs.hippocampal_subfields_T1 = False >>> reconall_subfields.cmdline 'recon-all -all -i structural.nii -hippocampal-subfields-T2 structural.nii test -subjid foo -sd .' """ _cmd = "recon-all" _additional_metadata = ["loc", "altkey"] input_spec = ReconAllInputSpec output_spec = ReconAllOutputSpec _can_resume = True force_run = False # Steps are based off of the recon-all tables [0,1] describing, inputs, # commands, and outputs of each step of the recon-all process, # controlled by flags. # # Each step is a 3-tuple containing (flag, [outputs], [inputs]) # A step is considered complete if all of its outputs exist and are newer # than the inputs. An empty input list indicates input mtimes will not # be checked. This may need updating, if users are working with manually # edited files. # # [0] https://surfer.nmr.mgh.harvard.edu/fswiki/ReconAllTableStableV5.3 # [1] https://surfer.nmr.mgh.harvard.edu/fswiki/ReconAllTableStableV6.0 # [2] https://surfer.nmr.mgh.harvard.edu/fswiki/ReconAllTableStableV6.0#ReconAllTableStable7.1.1 _autorecon1_steps = [ ("motioncor", ["mri/rawavg.mgz", "mri/orig.mgz"], []), ( "talairach", [ "mri/orig_nu.mgz", "mri/transforms/talairach.auto.xfm", "mri/transforms/talairach.xfm", # 'mri/transforms/talairach_avi.log', ], [], ), ("nuintensitycor", ["mri/nu.mgz"], []), ("normalization", ["mri/T1.mgz"], []), ( "skullstrip", [ "mri/transforms/talairach_with_skull.lta", "mri/brainmask.auto.mgz", "mri/brainmask.mgz", ], [], ), ] if Info.looseversion() < LooseVersion("6.0.0"): _autorecon2_volonly_steps = [ ("gcareg", ["mri/transforms/talairach.lta"], []), ("canorm", ["mri/norm.mgz"], []), ("careg", ["mri/transforms/talairach.m3z"], []), ( "careginv", [ "mri/transforms/talairach.m3z.inv.x.mgz", "mri/transforms/talairach.m3z.inv.y.mgz", "mri/transforms/talairach.m3z.inv.z.mgz", ], [], ), ("rmneck", ["mri/nu_noneck.mgz"], []), ("skull-lta", ["mri/transforms/talairach_with_skull_2.lta"], []), ( "calabel", ["mri/aseg.auto_noCCseg.mgz", "mri/aseg.auto.mgz", "mri/aseg.mgz"], [], ), ("normalization2", ["mri/brain.mgz"], []), ("maskbfs", ["mri/brain.finalsurfs.mgz"], []), ( "segmentation", ["mri/wm.seg.mgz", "mri/wm.asegedit.mgz", "mri/wm.mgz"], [], ), ( "fill", [ "mri/filled.mgz", # 'scripts/ponscc.cut.log', ], [], ), ] _autorecon2_lh_steps = [ ("tessellate", ["surf/lh.orig.nofix"], []), ("smooth1", ["surf/lh.smoothwm.nofix"], []), ("inflate1", ["surf/lh.inflated.nofix"], []), ("qsphere", ["surf/lh.qsphere.nofix"], []), ("fix", ["surf/lh.orig"], []), ( "white", [ "surf/lh.white", "surf/lh.curv", "surf/lh.area", "label/lh.cortex.label", ], [], ), ("smooth2", ["surf/lh.smoothwm"], []), ( "inflate2", [ "surf/lh.inflated", "surf/lh.sulc", "surf/lh.inflated.H", "surf/lh.inflated.K", ], [], ), # Undocumented in ReconAllTableStableV5.3 ("curvstats", ["stats/lh.curv.stats"], []), ] _autorecon3_lh_steps = [ ("sphere", ["surf/lh.sphere"], []), ("surfreg", ["surf/lh.sphere.reg"], []), ("jacobian_white", ["surf/lh.jacobian_white"], []), ("avgcurv", ["surf/lh.avg_curv"], []), ("cortparc", ["label/lh.aparc.annot"], []), ( "pial", [ "surf/lh.pial", "surf/lh.curv.pial", "surf/lh.area.pial", "surf/lh.thickness", ], [], ), # Misnamed outputs in ReconAllTableStableV5.3: ?h.w-c.pct.mgz ("pctsurfcon", ["surf/lh.w-g.pct.mgh"], []), ("parcstats", ["stats/lh.aparc.stats"], []), ("cortparc2", ["label/lh.aparc.a2009s.annot"], []), ("parcstats2", ["stats/lh.aparc.a2009s.stats"], []), # Undocumented in ReconAllTableStableV5.3 ("cortparc3", ["label/lh.aparc.DKTatlas40.annot"], []), # Undocumented in ReconAllTableStableV5.3 ("parcstats3", ["stats/lh.aparc.a2009s.stats"], []), ("label-exvivo-ec", ["label/lh.entorhinal_exvivo.label"], []), ] _autorecon3_added_steps = [ ( "cortribbon", ["mri/lh.ribbon.mgz", "mri/rh.ribbon.mgz", "mri/ribbon.mgz"], [], ), ("segstats", ["stats/aseg.stats"], []), ("aparc2aseg", ["mri/aparc+aseg.mgz", "mri/aparc.a2009s+aseg.mgz"], []), ("wmparc", ["mri/wmparc.mgz", "stats/wmparc.stats"], []), ("balabels", ["label/BA.ctab", "label/BA.thresh.ctab"], []), ] elif Info.looseversion() < LooseVersion("7.0.0"): _autorecon2_volonly_steps = [ ("gcareg", ["mri/transforms/talairach.lta"], []), ("canorm", ["mri/norm.mgz"], []), ("careg", ["mri/transforms/talairach.m3z"], []), ( "calabel", ["mri/aseg.auto_noCCseg.mgz", "mri/aseg.auto.mgz", "mri/aseg.mgz"], [], ), ("normalization2", ["mri/brain.mgz"], []), ("maskbfs", ["mri/brain.finalsurfs.mgz"], []), ( "segmentation", ["mri/wm.seg.mgz", "mri/wm.asegedit.mgz", "mri/wm.mgz"], [], ), ( "fill", [ "mri/filled.mgz", # 'scripts/ponscc.cut.log', ], [], ), ] _autorecon2_lh_steps = [ ("tessellate", ["surf/lh.orig.nofix"], []), ("smooth1", ["surf/lh.smoothwm.nofix"], []), ("inflate1", ["surf/lh.inflated.nofix"], []), ("qsphere", ["surf/lh.qsphere.nofix"], []), ("fix", ["surf/lh.orig"], []), ( "white", [ "surf/lh.white.preaparc", "surf/lh.curv", "surf/lh.area", "label/lh.cortex.label", ], [], ), ("smooth2", ["surf/lh.smoothwm"], []), ("inflate2", ["surf/lh.inflated", "surf/lh.sulc"], []), ( "curvHK", [ "surf/lh.white.H", "surf/lh.white.K", "surf/lh.inflated.H", "surf/lh.inflated.K", ], [], ), ("curvstats", ["stats/lh.curv.stats"], []), ] _autorecon3_lh_steps = [ ("sphere", ["surf/lh.sphere"], []), ("surfreg", ["surf/lh.sphere.reg"], []), ("jacobian_white", ["surf/lh.jacobian_white"], []), ("avgcurv", ["surf/lh.avg_curv"], []), ("cortparc", ["label/lh.aparc.annot"], []), ( "pial", [ "surf/lh.pial", "surf/lh.curv.pial", "surf/lh.area.pial", "surf/lh.thickness", "surf/lh.white", ], [], ), ("parcstats", ["stats/lh.aparc.stats"], []), ("cortparc2", ["label/lh.aparc.a2009s.annot"], []), ("parcstats2", ["stats/lh.aparc.a2009s.stats"], []), ("cortparc3", ["label/lh.aparc.DKTatlas.annot"], []), ("parcstats3", ["stats/lh.aparc.DKTatlas.stats"], []), ("pctsurfcon", ["surf/lh.w-g.pct.mgh"], []), ] _autorecon3_added_steps = [ ( "cortribbon", ["mri/lh.ribbon.mgz", "mri/rh.ribbon.mgz", "mri/ribbon.mgz"], [], ), ("hyporelabel", ["mri/aseg.presurf.hypos.mgz"], []), ( "aparc2aseg", [ "mri/aparc+aseg.mgz", "mri/aparc.a2009s+aseg.mgz", "mri/aparc.DKTatlas+aseg.mgz", ], [], ), ("apas2aseg", ["mri/aseg.mgz"], ["mri/aparc+aseg.mgz"]), ("segstats", ["stats/aseg.stats"], []), ("wmparc", ["mri/wmparc.mgz", "stats/wmparc.stats"], []), # Note that this is a very incomplete list; however the ctab # files are last to be touched, so this should be reasonable ( "balabels", [ "label/BA_exvivo.ctab", "label/BA_exvivo.thresh.ctab", "label/lh.entorhinal_exvivo.label", "label/rh.entorhinal_exvivo.label", ], [], ), ] else: _autorecon2_volonly_steps = [ ("gcareg", ["mri/transforms/talairach.lta"], []), ("canorm", ["mri/norm.mgz"], []), ("careg", ["mri/transforms/talairach.m3z"], []), ( "calabel", [ "mri/aseg.auto_noCCseg.mgz", "mri/aseg.auto.mgz", "mri/aseg.presurf.mgz", ], [], ), ("normalization2", ["mri/brain.mgz"], []), ("maskbfs", ["mri/brain.finalsurfs.mgz"], []), ( "segmentation", ["mri/wm.seg.mgz", "mri/wm.asegedit.mgz", "mri/wm.mgz"], [], ), ( "fill", [ "mri/filled.mgz", # 'scripts/ponscc.cut.log', ], [], ), ] _autorecon2_lh_steps = [ ("tessellate", ["surf/lh.orig.nofix"], []), ("smooth1", ["surf/lh.smoothwm.nofix"], []), ("inflate1", ["surf/lh.inflated.nofix"], []), ("qsphere", ["surf/lh.qsphere.nofix"], []), ("fix", ["surf/lh.inflated", "surf/lh.orig"], []), ( "white", [ "surf/lh.white.preaparc", "surf/lh.curv", "surf/lh.area", "label/lh.cortex.label", ], [], ), ("smooth2", ["surf/lh.smoothwm"], []), ("inflate2", ["surf/lh.inflated", "surf/lh.sulc"], []), ( "curvHK", [ "surf/lh.white.H", "surf/lh.white.K", "surf/lh.inflated.H", "surf/lh.inflated.K", ], [], ), ("curvstats", ["stats/lh.curv.stats"], []), ] _autorecon3_lh_steps = [ ("sphere", ["surf/lh.sphere"], []), ("surfreg", ["surf/lh.sphere.reg"], []), ("jacobian_white", ["surf/lh.jacobian_white"], []), ("avgcurv", ["surf/lh.avg_curv"], []), ("cortparc", ["label/lh.aparc.annot"], []), ( "pial", [ "surf/lh.pial", "surf/lh.curv.pial", "surf/lh.area.pial", "surf/lh.thickness", "surf/lh.white", ], [], ), ("parcstats", ["stats/lh.aparc.stats"], []), ("cortparc2", ["label/lh.aparc.a2009s.annot"], []), ("parcstats2", ["stats/lh.aparc.a2009s.stats"], []), ("cortparc3", ["label/lh.aparc.DKTatlas.annot"], []), ("parcstats3", ["stats/lh.aparc.DKTatlas.stats"], []), ("pctsurfcon", ["surf/lh.w-g.pct.mgh", "stats/lh.w-g.pct.stats"], []), ] _autorecon3_added_steps = [ ( "cortribbon", ["mri/lh.ribbon.mgz", "mri/rh.ribbon.mgz", "mri/ribbon.mgz"], [], ), ("hyporelabel", ["mri/aseg.presurf.hypos.mgz"], []), ( "aparc2aseg", [ "mri/aparc+aseg.mgz", "mri/aparc.a2009s+aseg.mgz", "mri/aparc.DKTatlas+aseg.mgz", ], [], ), ("apas2aseg", ["mri/aseg.mgz"], ["mri/aparc+aseg.mgz"]), ("segstats", ["stats/aseg.stats"], []), ("wmparc", ["mri/wmparc.mgz", "stats/wmparc.stats"], []), # Note that this is a very incomplete list; however the ctab # files are last to be touched, so this should be reasonable ( "balabels", [ "label/BA_exvivo.ctab", "label/BA_exvivo.thresh.ctab", "label/lh.entorhinal_exvivo.label", "label/rh.entorhinal_exvivo.label", "label/lh.perirhinal_exvivo.label", "label/rh.perirhinal_exvivo.label", ], [], ), ] # Fill out autorecon2 steps _autorecon2_rh_steps = [ (step, [out.replace("lh", "rh") for out in outs], ins) for step, outs, ins in _autorecon2_lh_steps ] _autorecon2_perhemi_steps = [ (step, [of for out in outs for of in (out, out.replace("lh", "rh"))], ins) for step, outs, ins in _autorecon2_lh_steps ] _autorecon2_steps = _autorecon2_volonly_steps + _autorecon2_perhemi_steps # Fill out autorecon3 steps _autorecon3_rh_steps = [ (step, [out.replace("lh", "rh") for out in outs], ins) for step, outs, ins in _autorecon3_lh_steps ] _autorecon3_perhemi_steps = [ (step, [of for out in outs for of in (out, out.replace("lh", "rh"))], ins) for step, outs, ins in _autorecon3_lh_steps ] _autorecon3_steps = _autorecon3_perhemi_steps + _autorecon3_added_steps # Fill out autorecon-hemi lh/rh steps _autorecon_lh_steps = _autorecon2_lh_steps + _autorecon3_lh_steps _autorecon_rh_steps = _autorecon2_rh_steps + _autorecon3_rh_steps _steps = _autorecon1_steps + _autorecon2_steps + _autorecon3_steps _binaries = [ "talairach", "mri_normalize", "mri_watershed", "mri_em_register", "mri_ca_normalize", "mri_ca_register", "mri_remove_neck", "mri_ca_label", "mri_segstats", "mri_mask", "mri_segment", "mri_edit_wm_with_aseg", "mri_pretess", "mri_fill", "mri_tessellate", "mris_smooth", "mris_inflate", "mris_sphere", "mris_fix_topology", "mris_make_surfaces", "mris_surf2vol", "mris_register", "mrisp_paint", "mris_ca_label", "mris_anatomical_stats", "mri_aparc2aseg", ] def _gen_subjects_dir(self): return os.getcwd() def _gen_filename(self, name): if name == "subjects_dir": return self._gen_subjects_dir() return None def _list_outputs(self): """ See io.FreeSurferSource.outputs for the list of outputs returned """ if isdefined(self.inputs.subjects_dir): subjects_dir = self.inputs.subjects_dir else: subjects_dir = self._gen_subjects_dir() if isdefined(self.inputs.hemi): hemi = self.inputs.hemi else: hemi = "both" outputs = self._outputs().get() outputs.update( FreeSurferSource( subject_id=self.inputs.subject_id, subjects_dir=subjects_dir, hemi=hemi )._list_outputs() ) outputs["subject_id"] = self.inputs.subject_id outputs["subjects_dir"] = subjects_dir return outputs def _is_resuming(self): subjects_dir = self.inputs.subjects_dir if not isdefined(subjects_dir): subjects_dir = self._gen_subjects_dir() if os.path.isdir(os.path.join(subjects_dir, self.inputs.subject_id, "mri")): return True return False def _format_arg(self, name, trait_spec, value): if name == "T1_files": if self._is_resuming(): return None if name == "hippocampal_subfields_T1" and isdefined( self.inputs.hippocampal_subfields_T2 ): return None if all( ( name == "hippocampal_subfields_T2", isdefined(self.inputs.hippocampal_subfields_T1) and self.inputs.hippocampal_subfields_T1, ) ): argstr = trait_spec.argstr.replace("T2", "T1T2") return argstr % value if name == "directive" and value == "autorecon-hemi": if not isdefined(self.inputs.hemi): raise ValueError( "Directive 'autorecon-hemi' requires hemi " "input to be set" ) value += " " + self.inputs.hemi if all( ( name == "hemi", isdefined(self.inputs.directive) and self.inputs.directive == "autorecon-hemi", ) ): return None return super(ReconAll, self)._format_arg(name, trait_spec, value) @property def cmdline(self): cmd = super(ReconAll, self).cmdline # Adds '-expert' flag if expert flags are passed # Mutually exclusive with 'expert' input parameter cmd += self._prep_expert_file() if not self._is_resuming(): return cmd subjects_dir = self.inputs.subjects_dir if not isdefined(subjects_dir): subjects_dir = self._gen_subjects_dir() # Check only relevant steps directive = self.inputs.directive if not isdefined(directive): steps = [] elif directive == "autorecon1": steps = self._autorecon1_steps elif directive == "autorecon2-volonly": steps = self._autorecon2_volonly_steps elif directive == "autorecon2-perhemi": steps = self._autorecon2_perhemi_steps elif directive.startswith("autorecon2"): if isdefined(self.inputs.hemi): if self.inputs.hemi == "lh": steps = self._autorecon2_volonly_steps + self._autorecon2_lh_steps else: steps = self._autorecon2_volonly_steps + self._autorecon2_rh_steps else: steps = self._autorecon2_steps elif directive == "autorecon-hemi": if self.inputs.hemi == "lh": steps = self._autorecon_lh_steps else: steps = self._autorecon_rh_steps elif directive == "autorecon3": steps = self._autorecon3_steps else: steps = self._steps no_run = True flags = [] for step, outfiles, infiles in steps: flag = "-{}".format(step) noflag = "-no{}".format(step) if noflag in cmd: continue elif flag in cmd: no_run = False continue subj_dir = os.path.join(subjects_dir, self.inputs.subject_id) if check_depends( [os.path.join(subj_dir, f) for f in outfiles], [os.path.join(subj_dir, f) for f in infiles], ): flags.append(noflag) else: no_run = False if no_run and not self.force_run: iflogger.info("recon-all complete : Not running") return "echo recon-all: nothing to do" cmd += " " + " ".join(flags) iflogger.info("resume recon-all : %s", cmd) return cmd def _prep_expert_file(self): if isdefined(self.inputs.expert): return "" lines = [] for binary in self._binaries: args = getattr(self.inputs, binary) if isdefined(args): lines.append("{} {}\n".format(binary, args)) if lines == []: return "" contents = "".join(lines) if not isdefined(self.inputs.xopts) and self._get_expert_file() == contents: return " -xopts-use" expert_fname = os.path.abspath("expert.opts") with open(expert_fname, "w") as fobj: fobj.write(contents) return " -expert {}".format(expert_fname) def _get_expert_file(self): # Read pre-existing options file, if it exists if isdefined(self.inputs.subjects_dir): subjects_dir = self.inputs.subjects_dir else: subjects_dir = self._gen_subjects_dir() xopts_file = os.path.join( subjects_dir, self.inputs.subject_id, "scripts", "expert-options" ) if not os.path.exists(xopts_file): return "" with open(xopts_file, "r") as fobj: return fobj.read() @property def version(self): ver = Info.looseversion() if ver > LooseVersion("0.0.0"): return ver.vstring class BBRegisterInputSpec(FSTraitedSpec): subject_id = traits.Str( argstr="--s %s", desc="freesurfer subject id", mandatory=True ) source_file = File( argstr="--mov %s", desc="source file to be registered", mandatory=True, copyfile=False, ) init = traits.Enum( "spm", "fsl", "header", argstr="--init-%s", mandatory=True, xor=["init_reg_file"], desc="initialize registration spm, fsl, header", ) init_reg_file = File( exists=True, argstr="--init-reg %s", desc="existing registration file", xor=["init"], mandatory=True, ) contrast_type = traits.Enum( "t1", "t2", "bold", "dti", argstr="--%s", desc="contrast type of image", mandatory=True, ) intermediate_file = File( exists=True, argstr="--int %s", desc="Intermediate image, e.g. in case of partial FOV", ) reg_frame = traits.Int( argstr="--frame %d", xor=["reg_middle_frame"], desc="0-based frame index for 4D source file", ) reg_middle_frame = traits.Bool( argstr="--mid-frame", xor=["reg_frame"], desc="Register middle frame of 4D source file", ) out_reg_file = File( argstr="--reg %s", desc="output registration file", genfile=True ) spm_nifti = traits.Bool( argstr="--spm-nii", desc="force use of nifti rather than analyze with SPM" ) epi_mask = traits.Bool( argstr="--epi-mask", desc="mask out B0 regions in stages 1 and 2" ) dof = traits.Enum( 6, 9, 12, argstr="--%d", desc="number of transform degrees of freedom" ) fsldof = traits.Int( argstr="--fsl-dof %d", desc="degrees of freedom for initial registration (FSL)" ) out_fsl_file = traits.Either( traits.Bool, File, argstr="--fslmat %s", desc="write the transformation matrix in FSL FLIRT format", ) out_lta_file = traits.Either( traits.Bool, File, argstr="--lta %s", min_ver="5.2.0", desc="write the transformation matrix in LTA format", ) registered_file = traits.Either( traits.Bool, File, argstr="--o %s", desc="output warped sourcefile either True or filename", ) init_cost_file = traits.Either( traits.Bool, File, argstr="--initcost %s", desc="output initial registration cost file", ) class BBRegisterInputSpec6(BBRegisterInputSpec): init = traits.Enum( "coreg", "rr", "spm", "fsl", "header", "best", argstr="--init-%s", xor=["init_reg_file"], desc="initialize registration with mri_coreg, spm, fsl, or header", ) init_reg_file = File( exists=True, argstr="--init-reg %s", desc="existing registration file", xor=["init"], ) class BBRegisterOutputSpec(TraitedSpec): out_reg_file = File(exists=True, desc="Output registration file") out_fsl_file = File(exists=True, desc="Output FLIRT-style registration file") out_lta_file = File(exists=True, desc="Output LTA-style registration file") min_cost_file = File(exists=True, desc="Output registration minimum cost file") init_cost_file = File(exists=True, desc="Output initial registration cost file") registered_file = File(exists=True, desc="Registered and resampled source file") class BBRegister(FSCommand): """Use FreeSurfer bbregister to register a volume to the Freesurfer anatomical. This program performs within-subject, cross-modal registration using a boundary-based cost function. It is required that you have an anatomical scan of the subject that has already been recon-all-ed using freesurfer. Examples -------- >>> from nipype.interfaces.freesurfer import BBRegister >>> bbreg = BBRegister(subject_id='me', source_file='structural.nii', init='header', contrast_type='t2') >>> bbreg.cmdline 'bbregister --t2 --init-header --reg structural_bbreg_me.dat --mov structural.nii --s me' """ _cmd = "bbregister" if LooseVersion("0.0.0") < Info.looseversion() < LooseVersion("6.0.0"): input_spec = BBRegisterInputSpec else: input_spec = BBRegisterInputSpec6 output_spec = BBRegisterOutputSpec def _list_outputs(self): outputs = self.output_spec().get() _in = self.inputs if isdefined(_in.out_reg_file): outputs["out_reg_file"] = op.abspath(_in.out_reg_file) elif _in.source_file: suffix = "_bbreg_%s.dat" % _in.subject_id outputs["out_reg_file"] = fname_presuffix( _in.source_file, suffix=suffix, use_ext=False ) if isdefined(_in.registered_file): if isinstance(_in.registered_file, bool): outputs["registered_file"] = fname_presuffix( _in.source_file, suffix="_bbreg" ) else: outputs["registered_file"] = op.abspath(_in.registered_file) if isdefined(_in.out_lta_file): if isinstance(_in.out_lta_file, bool): suffix = "_bbreg_%s.lta" % _in.subject_id out_lta_file = fname_presuffix( _in.source_file, suffix=suffix, use_ext=False ) outputs["out_lta_file"] = out_lta_file else: outputs["out_lta_file"] = op.abspath(_in.out_lta_file) if isdefined(_in.out_fsl_file): if isinstance(_in.out_fsl_file, bool): suffix = "_bbreg_%s.mat" % _in.subject_id out_fsl_file = fname_presuffix( _in.source_file, suffix=suffix, use_ext=False ) outputs["out_fsl_file"] = out_fsl_file else: outputs["out_fsl_file"] = op.abspath(_in.out_fsl_file) if isdefined(_in.init_cost_file): if isinstance(_in.out_fsl_file, bool): outputs["init_cost_file"] = outputs["out_reg_file"] + ".initcost" else: outputs["init_cost_file"] = op.abspath(_in.init_cost_file) outputs["min_cost_file"] = outputs["out_reg_file"] + ".mincost" return outputs def _format_arg(self, name, spec, value): if ( name in ( "registered_file", "out_fsl_file", "out_lta_file", "init_cost_file", ) and isinstance(value, bool) ): value = self._list_outputs()[name] return super(BBRegister, self)._format_arg(name, spec, value) def _gen_filename(self, name): if name == "out_reg_file": return self._list_outputs()[name] return None class ApplyVolTransformInputSpec(FSTraitedSpec): source_file = File( exists=True, argstr="--mov %s", copyfile=False, mandatory=True, desc="Input volume you wish to transform", ) transformed_file = File(desc="Output volume", argstr="--o %s", genfile=True) _targ_xor = ("target_file", "tal", "fs_target") target_file = File( exists=True, argstr="--targ %s", xor=_targ_xor, desc="Output template volume", mandatory=True, ) tal = traits.Bool( argstr="--tal", xor=_targ_xor, mandatory=True, desc="map to a sub FOV of MNI305 (with --reg only)", ) tal_resolution = traits.Float( argstr="--talres %.10f", desc="Resolution to sample when using tal" ) fs_target = traits.Bool( argstr="--fstarg", xor=_targ_xor, mandatory=True, requires=["reg_file"], desc="use orig.mgz from subject in regfile as target", ) _reg_xor = ( "reg_file", "lta_file", "lta_inv_file", "fsl_reg_file", "xfm_reg_file", "reg_header", "mni_152_reg", "subject", ) reg_file = File( exists=True, xor=_reg_xor, argstr="--reg %s", mandatory=True, desc="tkRAS-to-tkRAS matrix (tkregister2 format)", ) lta_file = File( exists=True, xor=_reg_xor, argstr="--lta %s", mandatory=True, desc="Linear Transform Array file", ) lta_inv_file = File( exists=True, xor=_reg_xor, argstr="--lta-inv %s", mandatory=True, desc="LTA, invert", ) reg_file = File( exists=True, xor=_reg_xor, argstr="--reg %s", mandatory=True, desc="tkRAS-to-tkRAS matrix (tkregister2 format)", ) fsl_reg_file = File( exists=True, xor=_reg_xor, argstr="--fsl %s", mandatory=True, desc="fslRAS-to-fslRAS matrix (FSL format)", ) xfm_reg_file = File( exists=True, xor=_reg_xor, argstr="--xfm %s", mandatory=True, desc="ScannerRAS-to-ScannerRAS matrix (MNI format)", ) reg_header = traits.Bool( xor=_reg_xor, argstr="--regheader", mandatory=True, desc="ScannerRAS-to-ScannerRAS matrix = identity", ) mni_152_reg = traits.Bool( xor=_reg_xor, argstr="--regheader", mandatory=True, desc="target MNI152 space" ) subject = traits.Str( xor=_reg_xor, argstr="--s %s", mandatory=True, desc="set matrix = identity and use subject for any templates", ) inverse = traits.Bool(desc="sample from target to source", argstr="--inv") interp = traits.Enum( "trilin", "nearest", "cubic", argstr="--interp %s", desc="Interpolation method ( or nearest)", ) no_resample = traits.Bool( desc="Do not resample; just change vox2ras matrix", argstr="--no-resample" ) m3z_file = File( argstr="--m3z %s", desc=( "This is the morph to be applied to the volume. " "Unless the morph is in mri/transforms (eg.: for " "talairach.m3z computed by reconall), you will need " "to specify the full path to this morph and use the " "--noDefM3zPath flag." ), ) no_ded_m3z_path = traits.Bool( argstr="--noDefM3zPath", requires=["m3z_file"], desc=( "To be used with the m3z flag. " "Instructs the code not to look for the" "m3z morph in the default location " "(SUBJECTS_DIR/subj/mri/transforms), " "but instead just use the path " "indicated in --m3z." ), ) invert_morph = traits.Bool( argstr="--inv-morph", requires=["m3z_file"], desc=( "Compute and use the inverse of the " "non-linear morph to resample the input " "volume. To be used by --m3z." ), ) class ApplyVolTransformOutputSpec(TraitedSpec): transformed_file = File(exists=True, desc="Path to output file if used normally") class ApplyVolTransform(FSCommand): """Use FreeSurfer mri_vol2vol to apply a transform. Examples -------- >>> from nipype.interfaces.freesurfer import ApplyVolTransform >>> applyreg = ApplyVolTransform() >>> applyreg.inputs.source_file = 'structural.nii' >>> applyreg.inputs.reg_file = 'register.dat' >>> applyreg.inputs.transformed_file = 'struct_warped.nii' >>> applyreg.inputs.fs_target = True >>> applyreg.cmdline 'mri_vol2vol --fstarg --reg register.dat --mov structural.nii --o struct_warped.nii' """ _cmd = "mri_vol2vol" input_spec = ApplyVolTransformInputSpec output_spec = ApplyVolTransformOutputSpec def _get_outfile(self): outfile = self.inputs.transformed_file if not isdefined(outfile): if self.inputs.inverse is True: if self.inputs.fs_target is True: src = "orig.mgz" else: src = self.inputs.target_file else: src = self.inputs.source_file outfile = fname_presuffix(src, newpath=os.getcwd(), suffix="_warped") return outfile def _list_outputs(self): outputs = self.output_spec().get() outputs["transformed_file"] = os.path.abspath(self._get_outfile()) return outputs def _gen_filename(self, name): if name == "transformed_file": return self._get_outfile() return None class SmoothInputSpec(FSTraitedSpec): in_file = File(exists=True, desc="source volume", argstr="--i %s", mandatory=True) reg_file = File( desc="registers volume to surface anatomical ", argstr="--reg %s", mandatory=True, exists=True, ) smoothed_file = File(desc="output volume", argstr="--o %s", genfile=True) proj_frac_avg = traits.Tuple( traits.Float, traits.Float, traits.Float, xor=["proj_frac"], desc="average a long normal min max delta", argstr="--projfrac-avg %.2f %.2f %.2f", ) proj_frac = traits.Float( desc="project frac of thickness a long surface normal", xor=["proj_frac_avg"], argstr="--projfrac %s", ) surface_fwhm = traits.Range( low=0.0, requires=["reg_file"], mandatory=True, xor=["num_iters"], desc="surface FWHM in mm", argstr="--fwhm %f", ) num_iters = traits.Range( low=1, xor=["surface_fwhm"], mandatory=True, argstr="--niters %d", desc="number of iterations instead of fwhm", ) vol_fwhm = traits.Range( low=0.0, argstr="--vol-fwhm %f", desc="volume smoothing outside of surface" ) class SmoothOutputSpec(TraitedSpec): smoothed_file = File(exists=True, desc="smoothed input volume") class Smooth(FSCommand): """Use FreeSurfer mris_volsmooth to smooth a volume This function smoothes cortical regions on a surface and non-cortical regions in volume. .. note:: Cortical voxels are mapped to the surface (3D->2D) and then the smoothed values from the surface are put back into the volume to fill the cortical ribbon. If data is smoothed with this algorithm, one has to be careful about how further processing is interpreted. Examples -------- >>> from nipype.interfaces.freesurfer import Smooth >>> smoothvol = Smooth(in_file='functional.nii', smoothed_file = 'foo_out.nii', reg_file='register.dat', surface_fwhm=10, vol_fwhm=6) >>> smoothvol.cmdline 'mris_volsmooth --i functional.nii --reg register.dat --o foo_out.nii --fwhm 10.000000 --vol-fwhm 6.000000' """ _cmd = "mris_volsmooth" input_spec = SmoothInputSpec output_spec = SmoothOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outfile = self.inputs.smoothed_file if not isdefined(outfile): outfile = self._gen_fname(self.inputs.in_file, suffix="_smooth") outputs["smoothed_file"] = outfile return outputs def _gen_filename(self, name): if name == "smoothed_file": return self._list_outputs()[name] return None class RobustRegisterInputSpec(FSTraitedSpec): source_file = File( exists=True, mandatory=True, argstr="--mov %s", desc="volume to be registered" ) target_file = File( exists=True, mandatory=True, argstr="--dst %s", desc="target volume for the registration", ) out_reg_file = traits.Either( True, File, default=True, usedefault=True, argstr="--lta %s", desc="registration file; either True or filename", ) registered_file = traits.Either( traits.Bool, File, argstr="--warp %s", desc="registered image; either True or filename", ) weights_file = traits.Either( traits.Bool, File, argstr="--weights %s", desc="weights image to write; either True or filename", ) est_int_scale = traits.Bool( argstr="--iscale", desc="estimate intensity scale (recommended for unnormalized images)", ) trans_only = traits.Bool( argstr="--transonly", desc="find 3 parameter translation only" ) in_xfm_file = File( exists=True, argstr="--transform", desc="use initial transform on source" ) half_source = traits.Either( traits.Bool, File, argstr="--halfmov %s", desc="write source volume mapped to halfway space", ) half_targ = traits.Either( traits.Bool, File, argstr="--halfdst %s", desc="write target volume mapped to halfway space", ) half_weights = traits.Either( traits.Bool, File, argstr="--halfweights %s", desc="write weights volume mapped to halfway space", ) half_source_xfm = traits.Either( traits.Bool, File, argstr="--halfmovlta %s", desc="write transform from source to halfway space", ) half_targ_xfm = traits.Either( traits.Bool, File, argstr="--halfdstlta %s", desc="write transform from target to halfway space", ) auto_sens = traits.Bool( argstr="--satit", xor=["outlier_sens"], mandatory=True, desc="auto-detect good sensitivity", ) outlier_sens = traits.Float( argstr="--sat %.4f", xor=["auto_sens"], mandatory=True, desc="set outlier sensitivity explicitly", ) least_squares = traits.Bool( argstr="--leastsquares", desc="use least squares instead of robust estimator" ) no_init = traits.Bool(argstr="--noinit", desc="skip transform init") init_orient = traits.Bool( argstr="--initorient", desc="use moments for initial orient (recommended for stripped brains)", ) max_iterations = traits.Int( argstr="--maxit %d", desc="maximum # of times on each resolution" ) high_iterations = traits.Int( argstr="--highit %d", desc="max # of times on highest resolution" ) iteration_thresh = traits.Float( argstr="--epsit %.3f", desc="stop iterations when below threshold" ) subsample_thresh = traits.Int( argstr="--subsample %d", desc="subsample if dimension is above threshold size" ) outlier_limit = traits.Float( argstr="--wlimit %.3f", desc="set maximal outlier limit in satit" ) write_vo2vox = traits.Bool( argstr="--vox2vox", desc="output vox2vox matrix (default is RAS2RAS)" ) no_multi = traits.Bool(argstr="--nomulti", desc="work on highest resolution") mask_source = File( exists=True, argstr="--maskmov %s", desc="image to mask source volume with" ) mask_target = File( exists=True, argstr="--maskdst %s", desc="image to mask target volume with" ) force_double = traits.Bool( argstr="--doubleprec", desc="use double-precision intensities" ) force_float = traits.Bool(argstr="--floattype", desc="use float intensities") class RobustRegisterOutputSpec(TraitedSpec): out_reg_file = File(exists=True, desc="output registration file") registered_file = File(exists=True, desc="output image with registration applied") weights_file = File(exists=True, desc="image of weights used") half_source = File(exists=True, desc="source image mapped to halfway space") half_targ = File(exists=True, desc="target image mapped to halfway space") half_weights = File(exists=True, desc="weights image mapped to halfway space") half_source_xfm = File( exists=True, desc="transform file to map source image to halfway space" ) half_targ_xfm = File( exists=True, desc="transform file to map target image to halfway space" ) class RobustRegister(FSCommand): """Perform intramodal linear registration (translation and rotation) using robust statistics. Examples -------- >>> from nipype.interfaces.freesurfer import RobustRegister >>> reg = RobustRegister() >>> reg.inputs.source_file = 'structural.nii' >>> reg.inputs.target_file = 'T1.nii' >>> reg.inputs.auto_sens = True >>> reg.inputs.init_orient = True >>> reg.cmdline # doctest: +ELLIPSIS 'mri_robust_register --satit --initorient --lta .../structural_robustreg.lta --mov structural.nii --dst T1.nii' References ---------- Reuter, M, Rosas, HD, and Fischl, B, (2010). Highly Accurate Inverse Consistent Registration: A Robust Approach. Neuroimage 53(4) 1181-96. """ _cmd = "mri_robust_register" input_spec = RobustRegisterInputSpec output_spec = RobustRegisterOutputSpec def _format_arg(self, name, spec, value): options = ( "out_reg_file", "registered_file", "weights_file", "half_source", "half_targ", "half_weights", "half_source_xfm", "half_targ_xfm", ) if name in options and isinstance(value, bool): value = self._list_outputs()[name] return super(RobustRegister, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() cwd = os.getcwd() prefices = dict(src=self.inputs.source_file, trg=self.inputs.target_file) suffices = dict( out_reg_file=("src", "_robustreg.lta", False), registered_file=("src", "_robustreg", True), weights_file=("src", "_robustweights", True), half_source=("src", "_halfway", True), half_targ=("trg", "_halfway", True), half_weights=("src", "_halfweights", True), half_source_xfm=("src", "_robustxfm.lta", False), half_targ_xfm=("trg", "_robustxfm.lta", False), ) for name, sufftup in list(suffices.items()): value = getattr(self.inputs, name) if value: if value is True: outputs[name] = fname_presuffix( prefices[sufftup[0]], suffix=sufftup[1], newpath=cwd, use_ext=sufftup[2], ) else: outputs[name] = os.path.abspath(value) return outputs class FitMSParamsInputSpec(FSTraitedSpec): in_files = traits.List( File(exists=True), argstr="%s", position=-2, mandatory=True, desc="list of FLASH images (must be in mgh format)", ) tr_list = traits.List(traits.Int, desc="list of TRs of the input files (in msec)") te_list = traits.List(traits.Float, desc="list of TEs of the input files (in msec)") flip_list = traits.List(traits.Int, desc="list of flip angles of the input files") xfm_list = traits.List( File(exists=True), desc="list of transform files to apply to each FLASH image" ) out_dir = Directory( argstr="%s", position=-1, genfile=True, desc="directory to store output in" ) class FitMSParamsOutputSpec(TraitedSpec): t1_image = File(exists=True, desc="image of estimated T1 relaxation values") pd_image = File(exists=True, desc="image of estimated proton density values") t2star_image = File(exists=True, desc="image of estimated T2* values") class FitMSParams(FSCommand): """Estimate tissue paramaters from a set of FLASH images. Examples -------- >>> from nipype.interfaces.freesurfer import FitMSParams >>> msfit = FitMSParams() >>> msfit.inputs.in_files = ['flash_05.mgz', 'flash_30.mgz'] >>> msfit.inputs.out_dir = 'flash_parameters' >>> msfit.cmdline 'mri_ms_fitparms flash_05.mgz flash_30.mgz flash_parameters' """ _cmd = "mri_ms_fitparms" input_spec = FitMSParamsInputSpec output_spec = FitMSParamsOutputSpec def _format_arg(self, name, spec, value): if name == "in_files": cmd = "" for i, file in enumerate(value): if isdefined(self.inputs.tr_list): cmd = " ".join((cmd, "-tr %.1f" % self.inputs.tr_list[i])) if isdefined(self.inputs.te_list): cmd = " ".join((cmd, "-te %.3f" % self.inputs.te_list[i])) if isdefined(self.inputs.flip_list): cmd = " ".join((cmd, "-fa %.1f" % self.inputs.flip_list[i])) if isdefined(self.inputs.xfm_list): cmd = " ".join((cmd, "-at %s" % self.inputs.xfm_list[i])) cmd = " ".join((cmd, file)) return cmd return super(FitMSParams, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_dir): out_dir = self._gen_filename("out_dir") else: out_dir = self.inputs.out_dir outputs["t1_image"] = os.path.join(out_dir, "T1.mgz") outputs["pd_image"] = os.path.join(out_dir, "PD.mgz") outputs["t2star_image"] = os.path.join(out_dir, "T2star.mgz") return outputs def _gen_filename(self, name): if name == "out_dir": return os.getcwd() return None class SynthesizeFLASHInputSpec(FSTraitedSpec): fixed_weighting = traits.Bool( position=1, argstr="-w", desc="use a fixed weighting to generate optimal gray/white contrast", ) tr = traits.Float( mandatory=True, position=2, argstr="%.2f", desc="repetition time (in msec)" ) flip_angle = traits.Float( mandatory=True, position=3, argstr="%.2f", desc="flip angle (in degrees)" ) te = traits.Float( mandatory=True, position=4, argstr="%.3f", desc="echo time (in msec)" ) t1_image = File( exists=True, mandatory=True, position=5, argstr="%s", desc="image of T1 values" ) pd_image = File( exists=True, mandatory=True, position=6, argstr="%s", desc="image of proton density values", ) out_file = File(genfile=True, argstr="%s", desc="image to write") class SynthesizeFLASHOutputSpec(TraitedSpec): out_file = File(exists=True, desc="synthesized FLASH acquisition") class SynthesizeFLASH(FSCommand): """Synthesize a FLASH acquisition from T1 and proton density maps. Examples -------- >>> from nipype.interfaces.freesurfer import SynthesizeFLASH >>> syn = SynthesizeFLASH(tr=20, te=3, flip_angle=30) >>> syn.inputs.t1_image = 'T1.mgz' >>> syn.inputs.pd_image = 'PD.mgz' >>> syn.inputs.out_file = 'flash_30syn.mgz' >>> syn.cmdline 'mri_synthesize 20.00 30.00 3.000 T1.mgz PD.mgz flash_30syn.mgz' """ _cmd = "mri_synthesize" input_spec = SynthesizeFLASHInputSpec output_spec = SynthesizeFLASHOutputSpec def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): outputs["out_file"] = self.inputs.out_file else: outputs["out_file"] = self._gen_fname( "synth-flash_%02d.mgz" % self.inputs.flip_angle, suffix="" ) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["out_file"] return None class MNIBiasCorrectionInputSpec(FSTraitedSpec): # mandatory in_file = File( exists=True, mandatory=True, argstr="--i %s", desc="input volume. Input can be any format accepted by mri_convert.", ) # optional out_file = File( argstr="--o %s", name_source=["in_file"], name_template="%s_output", hash_files=False, keep_extension=True, desc="output volume. Output can be any format accepted by mri_convert. " + "If the output format is COR, then the directory must exist.", ) iterations = traits.Int( 4, usedefault=True, argstr="--n %d", desc="Number of iterations to run nu_correct. Default is 4. This is the number of times " + "that nu_correct is repeated (ie, using the output from the previous run as the input for " + "the next). This is different than the -iterations option to nu_correct.", ) protocol_iterations = traits.Int( argstr="--proto-iters %d", desc="Passes Np as argument of the -iterations flag of nu_correct. This is different " + "than the --n flag above. Default is not to pass nu_correct the -iterations flag.", ) distance = traits.Int(argstr="--distance %d", desc="N3 -distance option") no_rescale = traits.Bool( argstr="--no-rescale", desc="do not rescale so that global mean of output == input global mean", ) mask = File( exists=True, argstr="--mask %s", desc="brainmask volume. Input can be any format accepted by mri_convert.", ) transform = File( exists=True, argstr="--uchar %s", desc="tal.xfm. Use mri_make_uchar instead of conforming", ) stop = traits.Float( argstr="--stop %f", desc="Convergence threshold below which iteration stops (suggest 0.01 to 0.0001)", ) shrink = traits.Int( argstr="--shrink %d", desc="Shrink parameter for finer sampling (default is 4)" ) class MNIBiasCorrectionOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output volume") class MNIBiasCorrection(FSCommand): """Wrapper for nu_correct, a program from the Montreal Neurological Insitute (MNI) used for correcting intensity non-uniformity (ie, bias fields). You must have the MNI software installed on your system to run this. See [www.bic.mni.mcgill.ca/software/N3] for more info. mri_nu_correct.mni uses float internally instead of uchar. It also rescales the output so that the global mean is the same as that of the input. These two changes are linked and can be turned off with --no-float Examples -------- >>> from nipype.interfaces.freesurfer import MNIBiasCorrection >>> correct = MNIBiasCorrection() >>> correct.inputs.in_file = "norm.mgz" >>> correct.inputs.iterations = 6 >>> correct.inputs.protocol_iterations = 1000 >>> correct.inputs.distance = 50 >>> correct.cmdline 'mri_nu_correct.mni --distance 50 --i norm.mgz --n 6 --o norm_output.mgz --proto-iters 1000' References ---------- [http://freesurfer.net/fswiki/mri_nu_correct.mni] [http://www.bic.mni.mcgill.ca/software/N3] [https://github.com/BIC-MNI/N3] """ _cmd = "mri_nu_correct.mni" input_spec = MNIBiasCorrectionInputSpec output_spec = MNIBiasCorrectionOutputSpec class WatershedSkullStripInputSpec(FSTraitedSpec): # required in_file = File( argstr="%s", exists=True, mandatory=True, position=-2, desc="input volume" ) out_file = File( "brainmask.auto.mgz", argstr="%s", exists=False, mandatory=True, position=-1, usedefault=True, desc="output volume", ) # optional t1 = traits.Bool(argstr="-T1", desc="specify T1 input volume (T1 grey value = 110)") brain_atlas = File(argstr="-brain_atlas %s", exists=True, position=-4, desc="") transform = File(argstr="%s", exists=False, position=-3, desc="undocumented") class WatershedSkullStripOutputSpec(TraitedSpec): out_file = File(exists=False, desc="skull stripped brain volume") class WatershedSkullStrip(FSCommand): """This program strips skull and other outer non-brain tissue and produces the brain volume from T1 volume or the scanned volume. The "watershed" segmentation algorithm was used to dertermine the intensity values for white matter, grey matter, and CSF. A force field was then used to fit a spherical surface to the brain. The shape of the surface fit was then evaluated against a previously derived template. The default parameters are: -w 0.82 -b 0.32 -h 10 -seedpt -ta -wta (Segonne 2004) Examples ======== >>> from nipype.interfaces.freesurfer import WatershedSkullStrip >>> skullstrip = WatershedSkullStrip() >>> skullstrip.inputs.in_file = "T1.mgz" >>> skullstrip.inputs.t1 = True >>> skullstrip.inputs.transform = "transforms/talairach_with_skull.lta" >>> skullstrip.inputs.out_file = "brainmask.auto.mgz" >>> skullstrip.cmdline 'mri_watershed -T1 transforms/talairach_with_skull.lta T1.mgz brainmask.auto.mgz' """ _cmd = "mri_watershed" input_spec = WatershedSkullStripInputSpec output_spec = WatershedSkullStripOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class NormalizeInputSpec(FSTraitedSpec): # required in_file = File( argstr="%s", exists=True, mandatory=True, position=-2, desc="The input file for Normalize", ) out_file = File( argstr="%s", position=-1, name_source=["in_file"], name_template="%s_norm", hash_files=False, keep_extension=True, desc="The output file for Normalize", ) # optional gradient = traits.Int( argstr="-g %d", desc="use max intensity/mm gradient g (default=1)" ) mask = File( argstr="-mask %s", exists=True, desc="The input mask file for Normalize" ) segmentation = File( argstr="-aseg %s", exists=True, desc="The input segmentation for Normalize" ) transform = File( exists=True, desc="Tranform file from the header of the input file" ) class NormalizeOutputSpec(TraitedSpec): out_file = File(exists=False, desc="The output file for Normalize") class Normalize(FSCommand): """ Normalize the white-matter, optionally based on control points. The input volume is converted into a new volume where white matter image values all range around 110. Examples ======== >>> from nipype.interfaces import freesurfer >>> normalize = freesurfer.Normalize() >>> normalize.inputs.in_file = "T1.mgz" >>> normalize.inputs.gradient = 1 >>> normalize.cmdline 'mri_normalize -g 1 T1.mgz T1_norm.mgz' """ _cmd = "mri_normalize" input_spec = NormalizeInputSpec output_spec = NormalizeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class CANormalizeInputSpec(FSTraitedSpec): in_file = File( argstr="%s", exists=True, mandatory=True, position=-4, desc="The input file for CANormalize", ) out_file = File( argstr="%s", position=-1, name_source=["in_file"], name_template="%s_norm", hash_files=False, keep_extension=True, desc="The output file for CANormalize", ) atlas = File( argstr="%s", exists=True, mandatory=True, position=-3, desc="The atlas file in gca format", ) transform = File( argstr="%s", exists=True, mandatory=True, position=-2, desc="The tranform file in lta format", ) # optional mask = File(argstr="-mask %s", exists=True, desc="Specifies volume to use as mask") control_points = File( argstr="-c %s", desc="File name for the output control points" ) long_file = File( argstr="-long %s", desc="undocumented flag used in longitudinal processing" ) class CANormalizeOutputSpec(TraitedSpec): out_file = File(exists=False, desc="The output file for Normalize") control_points = File(exists=False, desc="The output control points for Normalize") class CANormalize(FSCommand): """This program creates a normalized volume using the brain volume and an input gca file. See Also -------- For complete details, see the `FS Documentation `__. Examples -------- >>> from nipype.interfaces import freesurfer >>> ca_normalize = freesurfer.CANormalize() >>> ca_normalize.inputs.in_file = "T1.mgz" >>> ca_normalize.inputs.atlas = "atlas.nii.gz" # in practice use .gca atlases >>> ca_normalize.inputs.transform = "trans.mat" # in practice use .lta transforms >>> ca_normalize.cmdline 'mri_ca_normalize T1.mgz atlas.nii.gz trans.mat T1_norm.mgz' """ _cmd = "mri_ca_normalize" input_spec = CANormalizeInputSpec output_spec = CANormalizeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) outputs["control_points"] = os.path.abspath(self.inputs.control_points) return outputs class CARegisterInputSpec(FSTraitedSpecOpenMP): # required in_file = File( argstr="%s", exists=True, mandatory=True, position=-3, desc="The input volume for CARegister", ) out_file = File( argstr="%s", position=-1, genfile=True, desc="The output volume for CARegister" ) template = File( argstr="%s", exists=True, position=-2, desc="The template file in gca format" ) # optional mask = File(argstr="-mask %s", exists=True, desc="Specifies volume to use as mask") invert_and_save = traits.Bool( argstr="-invert-and-save", position=-4, desc="Invert and save the .m3z multi-dimensional talaraich transform to x, y, and z .mgz files", ) no_big_ventricles = traits.Bool(argstr="-nobigventricles", desc="No big ventricles") transform = File( argstr="-T %s", exists=True, desc="Specifies transform in lta format" ) align = traits.String( argstr="-align-%s", desc="Specifies when to perform alignment" ) levels = traits.Int( argstr="-levels %d", desc="defines how many surrounding voxels will be used in interpolations, default is 6", ) A = traits.Int( argstr="-A %d", desc="undocumented flag used in longitudinal processing" ) l_files = InputMultiPath( File(exists=False), argstr="-l %s", desc="undocumented flag used in longitudinal processing", ) class CARegisterOutputSpec(TraitedSpec): out_file = File(exists=False, desc="The output file for CARegister") class CARegister(FSCommandOpenMP): """Generates a multi-dimensional talairach transform from a gca file and talairach.lta file See Also -------- For complete details, see the `FS Documentation `__ Examples -------- >>> from nipype.interfaces import freesurfer >>> ca_register = freesurfer.CARegister() >>> ca_register.inputs.in_file = "norm.mgz" >>> ca_register.inputs.out_file = "talairach.m3z" >>> ca_register.cmdline 'mri_ca_register norm.mgz talairach.m3z' """ _cmd = "mri_ca_register" input_spec = CARegisterInputSpec output_spec = CARegisterOutputSpec def _format_arg(self, name, spec, value): if name == "l_files" and len(value) == 1: value.append("identity.nofile") return super(CARegister, self)._format_arg(name, spec, value) def _gen_fname(self, name): if name == "out_file": return os.path.abspath("talairach.m3z") return None def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class CALabelInputSpec(FSTraitedSpecOpenMP): # required in_file = File( argstr="%s", position=-4, mandatory=True, exists=True, desc="Input volume for CALabel", ) out_file = File( argstr="%s", position=-1, mandatory=True, exists=False, desc="Output file for CALabel", ) transform = File( argstr="%s", position=-3, mandatory=True, exists=True, desc="Input transform for CALabel", ) template = File( argstr="%s", position=-2, mandatory=True, exists=True, desc="Input template for CALabel", ) # optional in_vol = File(argstr="-r %s", exists=True, desc="set input volume") intensities = File( argstr="-r %s", exists=True, desc="input label intensities file(used in longitudinal processing)", ) no_big_ventricles = traits.Bool(argstr="-nobigventricles", desc="No big ventricles") align = traits.Bool(argstr="-align", desc="Align CALabel") prior = traits.Float(argstr="-prior %.1f", desc="Prior for CALabel") relabel_unlikely = traits.Tuple( traits.Int, traits.Float, argstr="-relabel_unlikely %d %.1f", desc=( "Reclassify voxels at least some std" " devs from the mean using some size" " Gaussian window" ), ) label = File( argstr="-l %s", exists=True, desc="Undocumented flag. Autorecon3 uses ../label/{hemisphere}.cortex.label as input file", ) aseg = File( argstr="-aseg %s", exists=True, desc="Undocumented flag. Autorecon3 uses ../mri/aseg.presurf.mgz as input file", ) class CALabelOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output volume from CALabel") class CALabel(FSCommandOpenMP): """Label subcortical structures based in GCA model. See Also -------- For complete details, see the `FS Documentation `__ Examples -------- >>> from nipype.interfaces import freesurfer >>> ca_label = freesurfer.CALabel() >>> ca_label.inputs.in_file = "norm.mgz" >>> ca_label.inputs.out_file = "out.mgz" >>> ca_label.inputs.transform = "trans.mat" >>> ca_label.inputs.template = "Template_6.nii" # in practice use .gcs extension >>> ca_label.cmdline 'mri_ca_label norm.mgz trans.mat Template_6.nii out.mgz' """ _cmd = "mri_ca_label" input_spec = CALabelInputSpec output_spec = CALabelOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class MRIsCALabelInputSpec(FSTraitedSpecOpenMP): # required subject_id = traits.String( "subject_id", argstr="%s", position=-5, usedefault=True, mandatory=True, desc="Subject name or ID", ) hemisphere = traits.Enum( "lh", "rh", argstr="%s", position=-4, mandatory=True, desc="Hemisphere ('lh' or 'rh')", ) canonsurf = File( argstr="%s", position=-3, mandatory=True, exists=True, desc="Input canonical surface file", ) classifier = File( argstr="%s", position=-2, mandatory=True, exists=True, desc="Classifier array input file", ) smoothwm = File( mandatory=True, exists=True, desc="implicit input {hemisphere}.smoothwm" ) curv = File(mandatory=True, exists=True, desc="implicit input {hemisphere}.curv") sulc = File(mandatory=True, exists=True, desc="implicit input {hemisphere}.sulc") out_file = File( argstr="%s", position=-1, exists=False, name_source=["hemisphere"], keep_extension=True, hash_files=False, name_template="%s.aparc.annot", desc="Annotated surface output file", ) # optional label = File( argstr="-l %s", exists=True, desc="Undocumented flag. Autorecon3 uses ../label/{hemisphere}.cortex.label as input file", ) aseg = File( argstr="-aseg %s", exists=True, desc="Undocumented flag. Autorecon3 uses ../mri/aseg.presurf.mgz as input file", ) seed = traits.Int(argstr="-seed %d", desc="") copy_inputs = traits.Bool( desc="Copies implicit inputs to node directory " + "and creates a temp subjects_directory. " + "Use this when running as a node" ) class MRIsCALabelOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output volume from MRIsCALabel") class MRIsCALabel(FSCommandOpenMP): """ For a single subject, produces an annotation file, in which each cortical surface vertex is assigned a neuroanatomical label.This automatic procedure employs data from a previously-prepared atlas file. An atlas file is created from a training set, capturing region data manually drawn by neuroanatomists combined with statistics on variability correlated to geometric information derived from the cortical model (sulcus and curvature). Besides the atlases provided with FreeSurfer, new ones can be prepared using mris_ca_train). Examples ======== >>> from nipype.interfaces import freesurfer >>> ca_label = freesurfer.MRIsCALabel() >>> ca_label.inputs.subject_id = "test" >>> ca_label.inputs.hemisphere = "lh" >>> ca_label.inputs.canonsurf = "lh.pial" >>> ca_label.inputs.curv = "lh.pial" >>> ca_label.inputs.sulc = "lh.pial" >>> ca_label.inputs.classifier = "im1.nii" # in pracice, use .gcs extension >>> ca_label.inputs.smoothwm = "lh.pial" >>> ca_label.cmdline 'mris_ca_label test lh lh.pial im1.nii lh.aparc.annot' """ _cmd = "mris_ca_label" input_spec = MRIsCALabelInputSpec output_spec = MRIsCALabelOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir copy2subjdir(self, self.inputs.canonsurf, folder="surf") copy2subjdir( self, self.inputs.smoothwm, folder="surf", basename="{0}.smoothwm".format(self.inputs.hemisphere), ) copy2subjdir( self, self.inputs.curv, folder="surf", basename="{0}.curv".format(self.inputs.hemisphere), ) copy2subjdir( self, self.inputs.sulc, folder="surf", basename="{0}.sulc".format(self.inputs.hemisphere), ) # The label directory must exist in order for an output to be written label_dir = os.path.join( self.inputs.subjects_dir, self.inputs.subject_id, "label" ) if not os.path.isdir(label_dir): os.makedirs(label_dir) return super(MRIsCALabel, self).run(**inputs) def _list_outputs(self): outputs = self.output_spec().get() out_basename = os.path.basename(self.inputs.out_file) outputs["out_file"] = os.path.join( self.inputs.subjects_dir, self.inputs.subject_id, "label", out_basename ) return outputs class SegmentCCInputSpec(FSTraitedSpec): in_file = File( argstr="-aseg %s", mandatory=True, exists=True, desc="Input aseg file to read from subjects directory", ) in_norm = File( mandatory=True, exists=True, desc="Required undocumented input {subject}/mri/norm.mgz", ) out_file = File( argstr="-o %s", exists=False, name_source=["in_file"], name_template="%s.auto.mgz", hash_files=False, keep_extension=False, desc="Filename to write aseg including CC", ) out_rotation = File( argstr="-lta %s", mandatory=True, exists=False, desc="Global filepath for writing rotation lta", ) subject_id = traits.String( "subject_id", argstr="%s", mandatory=True, position=-1, usedefault=True, desc="Subject name", ) copy_inputs = traits.Bool( desc="If running as a node, set this to True." + "This will copy the input files to the node " + "directory." ) class SegmentCCOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output segmentation uncluding corpus collosum") out_rotation = File(exists=False, desc="Output lta rotation file") class SegmentCC(FSCommand): """ This program segments the corpus callosum into five separate labels in the subcortical segmentation volume 'aseg.mgz'. The divisions of the cc are equally spaced in terms of distance along the primary eigendirection (pretty much the long axis) of the cc. The lateral extent can be changed with the -T parameter, where is the distance off the midline (so -T 1 would result in the who CC being 3mm thick). The default is 2 so it's 5mm thick. The aseg.stats values should be volume. Examples ======== >>> from nipype.interfaces import freesurfer >>> SegmentCC_node = freesurfer.SegmentCC() >>> SegmentCC_node.inputs.in_file = "aseg.mgz" >>> SegmentCC_node.inputs.in_norm = "norm.mgz" >>> SegmentCC_node.inputs.out_rotation = "cc.lta" >>> SegmentCC_node.inputs.subject_id = "test" >>> SegmentCC_node.cmdline 'mri_cc -aseg aseg.mgz -o aseg.auto.mgz -lta cc.lta test' """ _cmd = "mri_cc" input_spec = SegmentCCInputSpec output_spec = SegmentCCOutputSpec # mri_cc does not take absolute paths and will look for the # input files in //mri/ # So, if the files are not there, they will be copied to that # location def _format_arg(self, name, spec, value): if name in ["in_file", "in_norm", "out_file"]: # mri_cc can't use abspaths just the basename basename = os.path.basename(value) return spec.argstr % basename return super(SegmentCC, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) outputs["out_rotation"] = os.path.abspath(self.inputs.out_rotation) return outputs def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir for originalfile in [self.inputs.in_file, self.inputs.in_norm]: copy2subjdir(self, originalfile, folder="mri") return super(SegmentCC, self).run(**inputs) def aggregate_outputs(self, runtime=None, needed_outputs=None): # it is necessary to find the output files and move # them to the correct loacation predicted_outputs = self._list_outputs() for name in ["out_file", "out_rotation"]: out_file = predicted_outputs[name] if not os.path.isfile(out_file): out_base = os.path.basename(out_file) if isdefined(self.inputs.subjects_dir): subj_dir = os.path.join( self.inputs.subjects_dir, self.inputs.subject_id ) else: subj_dir = os.path.join(os.getcwd(), self.inputs.subject_id) if name == "out_file": out_tmp = os.path.join(subj_dir, "mri", out_base) elif name == "out_rotation": out_tmp = os.path.join(subj_dir, "mri", "transforms", out_base) else: out_tmp = None # move the file to correct location if out_tmp and os.path.isfile(out_tmp): if not os.path.isdir(os.path.dirname(out_tmp)): os.makedirs(os.path.dirname(out_tmp)) shutil.move(out_tmp, out_file) return super(SegmentCC, self).aggregate_outputs(runtime, needed_outputs) class SegmentWMInputSpec(FSTraitedSpec): in_file = File( argstr="%s", exists=True, mandatory=True, position=-2, desc="Input file for SegmentWM", ) out_file = File( argstr="%s", exists=False, mandatory=True, position=-1, desc="File to be written as output for SegmentWM", ) class SegmentWMOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output white matter segmentation") class SegmentWM(FSCommand): """ This program segments white matter from the input volume. The input volume should be normalized such that white matter voxels are ~110-valued, and the volume is conformed to 256^3. Examples ======== >>> from nipype.interfaces import freesurfer >>> SegmentWM_node = freesurfer.SegmentWM() >>> SegmentWM_node.inputs.in_file = "norm.mgz" >>> SegmentWM_node.inputs.out_file = "wm.seg.mgz" >>> SegmentWM_node.cmdline 'mri_segment norm.mgz wm.seg.mgz' """ _cmd = "mri_segment" input_spec = SegmentWMInputSpec output_spec = SegmentWMOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class EditWMwithAsegInputSpec(FSTraitedSpec): in_file = File( argstr="%s", position=-4, mandatory=True, exists=True, desc="Input white matter segmentation file", ) brain_file = File( argstr="%s", position=-3, mandatory=True, exists=True, desc="Input brain/T1 file", ) seg_file = File( argstr="%s", position=-2, mandatory=True, exists=True, desc="Input presurf segmentation file", ) out_file = File( argstr="%s", position=-1, mandatory=True, exists=False, desc="File to be written as output", ) # optional keep_in = traits.Bool(argstr="-keep-in", desc="Keep edits as found in input volume") class EditWMwithAsegOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output edited WM file") class EditWMwithAseg(FSCommand): """ Edits a wm file using a segmentation Examples ======== >>> from nipype.interfaces.freesurfer import EditWMwithAseg >>> editwm = EditWMwithAseg() >>> editwm.inputs.in_file = "T1.mgz" >>> editwm.inputs.brain_file = "norm.mgz" >>> editwm.inputs.seg_file = "aseg.mgz" >>> editwm.inputs.out_file = "wm.asegedit.mgz" >>> editwm.inputs.keep_in = True >>> editwm.cmdline 'mri_edit_wm_with_aseg -keep-in T1.mgz norm.mgz aseg.mgz wm.asegedit.mgz' """ _cmd = "mri_edit_wm_with_aseg" input_spec = EditWMwithAsegInputSpec output_spec = EditWMwithAsegOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class ConcatenateLTAInputSpec(FSTraitedSpec): # required in_lta1 = File( exists=True, mandatory=True, argstr="%s", position=-3, desc="maps some src1 to dst1", ) in_lta2 = traits.Either( File(exists=True), "identity.nofile", argstr="%s", position=-2, mandatory=True, desc="maps dst1(src2) to dst2", ) out_file = File( position=-1, argstr="%s", hash_files=False, name_source=["in_lta1"], name_template="%s_concat", keep_extension=True, desc="the combined LTA maps: src1 to dst2 = LTA2*LTA1", ) # Inversion and transform type invert_1 = traits.Bool(argstr="-invert1", desc="invert in_lta1 before applying it") invert_2 = traits.Bool(argstr="-invert2", desc="invert in_lta2 before applying it") invert_out = traits.Bool(argstr="-invertout", desc="invert output LTA") out_type = traits.Enum( "VOX2VOX", "RAS2RAS", argstr="-out_type %d", desc="set final LTA type" ) # Talairach options tal_source_file = File( exists=True, argstr="-tal %s", position=-5, requires=["tal_template_file"], desc="if in_lta2 is talairach.xfm, specify source for talairach", ) tal_template_file = File( exists=True, argstr="%s", position=-4, requires=["tal_source_file"], desc="if in_lta2 is talairach.xfm, specify template for talairach", ) subject = traits.Str(argstr="-subject %s", desc="set subject in output LTA") # Note rmsdiff would be xor out_file, and would be most easily dealt with # in a new interface. -CJM 2017.10.05 class ConcatenateLTAOutputSpec(TraitedSpec): out_file = File( exists=False, desc="the combined LTA maps: src1 to dst2 = LTA2*LTA1" ) class ConcatenateLTA(FSCommand): """Concatenates two consecutive LTA transformations into one overall transformation Out = LTA2*LTA1 Examples -------- >>> from nipype.interfaces.freesurfer import ConcatenateLTA >>> conc_lta = ConcatenateLTA() >>> conc_lta.inputs.in_lta1 = 'lta1.lta' >>> conc_lta.inputs.in_lta2 = 'lta2.lta' >>> conc_lta.cmdline 'mri_concatenate_lta lta1.lta lta2.lta lta1_concat.lta' You can use 'identity.nofile' as the filename for in_lta2, e.g.: >>> conc_lta.inputs.in_lta2 = 'identity.nofile' >>> conc_lta.inputs.invert_1 = True >>> conc_lta.inputs.out_file = 'inv1.lta' >>> conc_lta.cmdline 'mri_concatenate_lta -invert1 lta1.lta identity.nofile inv1.lta' To create a RAS2RAS transform: >>> conc_lta.inputs.out_type = 'RAS2RAS' >>> conc_lta.cmdline 'mri_concatenate_lta -invert1 -out_type 1 lta1.lta identity.nofile inv1.lta' """ _cmd = "mri_concatenate_lta" input_spec = ConcatenateLTAInputSpec output_spec = ConcatenateLTAOutputSpec def _format_arg(self, name, spec, value): if name == "out_type": value = {"VOX2VOX": 0, "RAS2RAS": 1}[value] return super(ConcatenateLTA, self)._format_arg(name, spec, value) nipype-1.7.0/nipype/interfaces/freesurfer/registration.py000066400000000000000000000472331413403311400237110ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various longitudinal commands provided by freesurfer """ import os import os.path from ... import logging from ...utils.filemanip import split_filename, copyfile from .base import ( FSCommand, FSTraitedSpec, FSScriptCommand, FSScriptOutputSpec, FSCommandOpenMP, FSTraitedSpecOpenMP, ) from ..base import isdefined, TraitedSpec, File, traits, Directory __docformat__ = "restructuredtext" iflogger = logging.getLogger("nipype.interface") class MPRtoMNI305InputSpec(FSTraitedSpec): # environment variables, required # usedefault=True is hack for on_trait_change in __init__ reference_dir = Directory( "", exists=True, mandatory=True, usedefault=True, desc="TODO" ) target = traits.String("", mandatory=True, usedefault=True, desc="input atlas file") # required in_file = File( argstr="%s", usedefault=True, desc="the input file prefix for MPRtoMNI305" ) class MPRtoMNI305OutputSpec(FSScriptOutputSpec): out_file = File( exists=False, desc="The output file '_to__t4_vox2vox.txt'" ) class MPRtoMNI305(FSScriptCommand): """ For complete details, see FreeSurfer documentation Examples -------- >>> from nipype.interfaces.freesurfer import MPRtoMNI305, Info >>> mprtomni305 = MPRtoMNI305() >>> mprtomni305.inputs.target = 'structural.nii' >>> mprtomni305.inputs.reference_dir = '.' # doctest: +SKIP >>> mprtomni305.cmdline # doctest: +SKIP 'mpr2mni305 output' >>> mprtomni305.inputs.out_file = 'struct_out' # doctest: +SKIP >>> mprtomni305.cmdline # doctest: +SKIP 'mpr2mni305 struct_out' # doctest: +SKIP >>> mprtomni305.inputs.environ['REFDIR'] == os.path.join(Info.home(), 'average') # doctest: +SKIP True >>> mprtomni305.inputs.environ['MPR2MNI305_TARGET'] # doctest: +SKIP 'structural' >>> mprtomni305.run() # doctest: +SKIP """ _cmd = "mpr2mni305" input_spec = MPRtoMNI305InputSpec output_spec = MPRtoMNI305OutputSpec def __init__(self, **inputs): super(MPRtoMNI305, self).__init__(**inputs) self.inputs.on_trait_change(self._environ_update, "target") self.inputs.on_trait_change(self._environ_update, "reference_dir") def _format_arg(self, opt, spec, val): if opt in ["target", "reference_dir"]: return "" elif opt == "in_file": _, retval, ext = split_filename(val) # Need to copy file to working cache directory! copyfile( val, os.path.abspath(retval + ext), copy=True, hashmethod="content" ) return retval return super(MPRtoMNI305, self)._format_arg(opt, spec, val) def _environ_update(self): # refdir = os.path.join(Info.home(), val) refdir = self.inputs.reference_dir target = self.inputs.target self.inputs.environ["MPR2MNI305_TARGET"] = target self.inputs.environ["REFDIR"] = refdir def _get_fname(self, fname): return split_filename(fname)[1] def _list_outputs(self): outputs = super(MPRtoMNI305, self)._list_outputs() fullname = "_".join( [ self._get_fname(self.inputs.in_file), "to", self.inputs.target, "t4", "vox2vox.txt", ] ) outputs["out_file"] = os.path.abspath(fullname) return outputs class RegisterAVItoTalairachInputSpec(FSTraitedSpec): in_file = File( argstr="%s", exists=True, mandatory=True, position=0, desc="The input file" ) target = File( argstr="%s", exists=True, mandatory=True, position=1, desc="The target file" ) vox2vox = File( argstr="%s", exists=True, mandatory=True, position=2, desc="The vox2vox file" ) out_file = File( "talairach.auto.xfm", usedefault=True, argstr="%s", position=3, desc="The transform output", ) class RegisterAVItoTalairachOutputSpec(FSScriptOutputSpec): out_file = File(exists=False, desc="The output file for RegisterAVItoTalairach") class RegisterAVItoTalairach(FSScriptCommand): """ converts the vox2vox from talairach_avi to a talairach.xfm file This is a script that converts the vox2vox from talairach_avi to a talairach.xfm file. It is meant to replace the following cmd line: tkregister2_cmdl \ --mov $InVol \ --targ $FREESURFER_HOME/average/mni305.cor.mgz \ --xfmout ${XFM} \ --vox2vox talsrcimg_to_${target}_t4_vox2vox.txt \ --noedit \ --reg talsrcimg.reg.tmp.dat set targ = $FREESURFER_HOME/average/mni305.cor.mgz set subject = mgh-02407836-v2 set InVol = $SUBJECTS_DIR/$subject/mri/orig.mgz set vox2vox = $SUBJECTS_DIR/$subject/mri/transforms/talsrcimg_to_711-2C_as_mni_average_305_t4_vox2vox.txt Examples ======== >>> from nipype.interfaces.freesurfer import RegisterAVItoTalairach >>> register = RegisterAVItoTalairach() >>> register.inputs.in_file = 'structural.mgz' # doctest: +SKIP >>> register.inputs.target = 'mni305.cor.mgz' # doctest: +SKIP >>> register.inputs.vox2vox = 'talsrcimg_to_structural_t4_vox2vox.txt' # doctest: +SKIP >>> register.cmdline # doctest: +SKIP 'avi2talxfm structural.mgz mni305.cor.mgz talsrcimg_to_structural_t4_vox2vox.txt talairach.auto.xfm' >>> register.run() # doctest: +SKIP """ _cmd = "avi2talxfm" input_spec = RegisterAVItoTalairachInputSpec output_spec = RegisterAVItoTalairachOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class EMRegisterInputSpec(FSTraitedSpecOpenMP): # required in_file = File( argstr="%s", exists=True, mandatory=True, position=-3, desc="in brain volume" ) template = File( argstr="%s", exists=True, mandatory=True, position=-2, desc="template gca" ) out_file = File( argstr="%s", exists=False, name_source=["in_file"], name_template="%s_transform.lta", hash_files=False, keep_extension=False, position=-1, desc="output transform", ) # optional skull = traits.Bool(argstr="-skull", desc="align to atlas containing skull (uns=5)") mask = File(argstr="-mask %s", exists=True, desc="use volume as a mask") nbrspacing = traits.Int( argstr="-uns %d", desc="align to atlas containing skull setting unknown_nbr_spacing = nbrspacing", ) transform = File(argstr="-t %s", exists=True, desc="Previously computed transform") class EMRegisterOutputSpec(TraitedSpec): out_file = File(exists=False, desc="output transform") class EMRegister(FSCommandOpenMP): """This program creates a tranform in lta format Examples ======== >>> from nipype.interfaces.freesurfer import EMRegister >>> register = EMRegister() >>> register.inputs.in_file = 'norm.mgz' >>> register.inputs.template = 'aseg.mgz' >>> register.inputs.out_file = 'norm_transform.lta' >>> register.inputs.skull = True >>> register.inputs.nbrspacing = 9 >>> register.cmdline 'mri_em_register -uns 9 -skull norm.mgz aseg.mgz norm_transform.lta' """ _cmd = "mri_em_register" input_spec = EMRegisterInputSpec output_spec = EMRegisterOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class RegisterInputSpec(FSTraitedSpec): # required in_surf = File( argstr="%s", exists=True, mandatory=True, position=-3, copyfile=True, desc="Surface to register, often {hemi}.sphere", ) target = File( argstr="%s", exists=True, mandatory=True, position=-2, desc="The data to register to. In normal recon-all usage, " + "this is a template file for average surface.", ) in_sulc = File( exists=True, mandatory=True, copyfile=True, desc="Undocumented mandatory input file ${SUBJECTS_DIR}/surf/{hemisphere}.sulc ", ) out_file = File( argstr="%s", exists=False, position=-1, genfile=True, desc="Output surface file to capture registration", ) # optional curv = traits.Bool( argstr="-curv", requires=["in_smoothwm"], desc="Use smoothwm curvature for final alignment", ) in_smoothwm = File( exists=True, copyfile=True, desc="Undocumented input file ${SUBJECTS_DIR}/surf/{hemisphere}.smoothwm ", ) class RegisterOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output surface file to capture registration") class Register(FSCommand): """This program registers a surface to an average surface template. Examples ======== >>> from nipype.interfaces.freesurfer import Register >>> register = Register() >>> register.inputs.in_surf = 'lh.pial' >>> register.inputs.in_smoothwm = 'lh.pial' >>> register.inputs.in_sulc = 'lh.pial' >>> register.inputs.target = 'aseg.mgz' >>> register.inputs.out_file = 'lh.pial.reg' >>> register.inputs.curv = True >>> register.cmdline 'mris_register -curv lh.pial aseg.mgz lh.pial.reg' """ _cmd = "mris_register" input_spec = RegisterInputSpec output_spec = RegisterOutputSpec def _format_arg(self, opt, spec, val): if opt == "curv": return spec.argstr return super(Register, self)._format_arg(opt, spec, val) def _gen_filename(self, name): if name == "out_file": return self._list_outputs()[name] return None def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): outputs["out_file"] = os.path.abspath(self.inputs.out_file) else: outputs["out_file"] = os.path.abspath(self.inputs.in_surf) + ".reg" return outputs class PaintInputSpec(FSTraitedSpec): # required in_surf = File( argstr="%s", exists=True, mandatory=True, position=-2, desc="Surface file with grid (vertices) onto which the " + "template data is to be sampled or 'painted'", ) template = File( argstr="%s", exists=True, mandatory=True, position=-3, desc="Template file" ) # optional template_param = traits.Int(desc="Frame number of the input template") averages = traits.Int(argstr="-a %d", desc="Average curvature patterns") out_file = File( argstr="%s", exists=False, position=-1, name_template="%s.avg_curv", hash_files=False, name_source=["in_surf"], keep_extension=False, desc="File containing a surface-worth of per-vertex values, " + "saved in 'curvature' format.", ) class PaintOutputSpec(TraitedSpec): out_file = File( exists=False, desc="File containing a surface-worth of per-vertex values, saved in 'curvature' format.", ) class Paint(FSCommand): """ This program is useful for extracting one of the arrays ("a variable") from a surface-registration template file. The output is a file containing a surface-worth of per-vertex values, saved in "curvature" format. Because the template data is sampled to a particular surface mesh, this conjures the idea of "painting to a surface". Examples ======== >>> from nipype.interfaces.freesurfer import Paint >>> paint = Paint() >>> paint.inputs.in_surf = 'lh.pial' >>> paint.inputs.template = 'aseg.mgz' >>> paint.inputs.averages = 5 >>> paint.inputs.out_file = 'lh.avg_curv' >>> paint.cmdline 'mrisp_paint -a 5 aseg.mgz lh.pial lh.avg_curv' """ _cmd = "mrisp_paint" input_spec = PaintInputSpec output_spec = PaintOutputSpec def _format_arg(self, opt, spec, val): if opt == "template": if isdefined(self.inputs.template_param): return spec.argstr % (val + "#" + str(self.inputs.template_param)) return super(Paint, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class MRICoregInputSpec(FSTraitedSpec): source_file = File( argstr="--mov %s", desc="source file to be registered", mandatory=True, copyfile=False, ) reference_file = File( argstr="--ref %s", desc="reference (target) file", mandatory=True, copyfile=False, xor=["subject_id"], ) out_lta_file = traits.Either( True, File, argstr="--lta %s", default=True, usedefault=True, desc="output registration file (LTA format)", ) out_reg_file = traits.Either( True, File, argstr="--regdat %s", desc="output registration file (REG format)" ) out_params_file = traits.Either( True, File, argstr="--params %s", desc="output parameters file" ) subjects_dir = Directory( exists=True, argstr="--sd %s", desc="FreeSurfer SUBJECTS_DIR" ) subject_id = traits.Str( argstr="--s %s", position=1, mandatory=True, xor=["reference_file"], requires=["subjects_dir"], desc="freesurfer subject ID (implies ``reference_mask == " "aparc+aseg.mgz`` unless otherwise specified)", ) dof = traits.Enum( 6, 9, 12, argstr="--dof %d", desc="number of transform degrees of freedom" ) reference_mask = traits.Either( False, traits.Str, argstr="--ref-mask %s", position=2, desc="mask reference volume with given mask, or None if ``False``", ) source_mask = traits.Str( argstr="--mov-mask", desc="mask source file with given mask" ) num_threads = traits.Int(argstr="--threads %d", desc="number of OpenMP threads") no_coord_dithering = traits.Bool( argstr="--no-coord-dither", desc="turn off coordinate dithering" ) no_intensity_dithering = traits.Bool( argstr="--no-intensity-dither", desc="turn off intensity dithering" ) sep = traits.List( argstr="--sep %s...", minlen=1, maxlen=2, desc="set spatial scales, in voxels (default [2, 4])", ) initial_translation = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--trans %g %g %g", desc="initial translation in mm (implies no_cras0)", ) initial_rotation = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--rot %g %g %g", desc="initial rotation in degrees", ) initial_scale = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--scale %g %g %g", desc="initial scale", ) initial_shear = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--shear %g %g %g", desc="initial shear (Hxy, Hxz, Hyz)", ) no_cras0 = traits.Bool( argstr="--no-cras0", desc="do not set translation parameters to align " "centers of source and reference files", ) max_iters = traits.Range( low=1, argstr="--nitersmax %d", desc="maximum iterations (default: 4)" ) ftol = traits.Float( argstr="--ftol %e", desc="floating-point tolerance (default=1e-7)" ) linmintol = traits.Float(argstr="--linmintol %e") saturation_threshold = traits.Range( low=0.0, high=100.0, argstr="--sat %g", desc="saturation threshold (default=9.999)", ) conform_reference = traits.Bool( argstr="--conf-ref", desc="conform reference without rescaling" ) no_brute_force = traits.Bool(argstr="--no-bf", desc="do not brute force search") brute_force_limit = traits.Float( argstr="--bf-lim %g", xor=["no_brute_force"], desc="constrain brute force search to +/- lim", ) brute_force_samples = traits.Int( argstr="--bf-nsamp %d", xor=["no_brute_force"], desc="number of samples in brute force search", ) no_smooth = traits.Bool( argstr="--no-smooth", desc="do not apply smoothing to either reference or source file", ) ref_fwhm = traits.Float( argstr="--ref-fwhm", desc="apply smoothing to reference file" ) source_oob = traits.Bool( argstr="--mov-oob", desc="count source voxels that are out-of-bounds as 0" ) # Skipping mat2par class MRICoregOutputSpec(TraitedSpec): out_reg_file = File(exists=True, desc="output registration file") out_lta_file = File(exists=True, desc="output LTA-style registration file") out_params_file = File(exists=True, desc="output parameters file") class MRICoreg(FSCommand): """This program registers one volume to another mri_coreg is a C reimplementation of spm_coreg in FreeSurfer Examples ======== >>> from nipype.interfaces.freesurfer import MRICoreg >>> coreg = MRICoreg() >>> coreg.inputs.source_file = 'moving1.nii' >>> coreg.inputs.reference_file = 'fixed1.nii' >>> coreg.inputs.subjects_dir = '.' >>> coreg.cmdline # doctest: +ELLIPSIS 'mri_coreg --lta .../registration.lta --ref fixed1.nii --mov moving1.nii --sd .' If passing a subject ID, the reference mask may be disabled: >>> coreg = MRICoreg() >>> coreg.inputs.source_file = 'moving1.nii' >>> coreg.inputs.subjects_dir = '.' >>> coreg.inputs.subject_id = 'fsaverage' >>> coreg.inputs.reference_mask = False >>> coreg.cmdline # doctest: +ELLIPSIS 'mri_coreg --s fsaverage --no-ref-mask --lta .../registration.lta --mov moving1.nii --sd .' Spatial scales may be specified as a list of one or two separations: >>> coreg.inputs.sep = [4] >>> coreg.cmdline # doctest: +ELLIPSIS 'mri_coreg --s fsaverage --no-ref-mask --lta .../registration.lta --sep 4 --mov moving1.nii --sd .' >>> coreg.inputs.sep = [4, 5] >>> coreg.cmdline # doctest: +ELLIPSIS 'mri_coreg --s fsaverage --no-ref-mask --lta .../registration.lta --sep 4 --sep 5 --mov moving1.nii --sd .' """ _cmd = "mri_coreg" input_spec = MRICoregInputSpec output_spec = MRICoregOutputSpec def _format_arg(self, opt, spec, val): if opt in ("out_reg_file", "out_lta_file", "out_params_file") and val is True: val = self._list_outputs()[opt] elif opt == "reference_mask" and val is False: return "--no-ref-mask" return super(MRICoreg, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self.output_spec().get() out_lta_file = self.inputs.out_lta_file if isdefined(out_lta_file): if out_lta_file is True: out_lta_file = "registration.lta" outputs["out_lta_file"] = os.path.abspath(out_lta_file) out_reg_file = self.inputs.out_reg_file if isdefined(out_reg_file): if out_reg_file is True: out_reg_file = "registration.dat" outputs["out_reg_file"] = os.path.abspath(out_reg_file) out_params_file = self.inputs.out_params_file if isdefined(out_params_file): if out_params_file is True: out_params_file = "registration.par" outputs["out_params_file"] = os.path.abspath(out_params_file) return outputs nipype-1.7.0/nipype/interfaces/freesurfer/tests/000077500000000000000000000000001413403311400217565ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/freesurfer/tests/__init__.py000066400000000000000000000000301413403311400240600ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_BBRegister.py000066400000000000000000000060571413403311400253670ustar00rootroot00000000000000from ..preprocess import BBRegister, BBRegisterInputSpec6 def test_BBRegister_inputs(): input_map_5_3 = dict( args=dict(argstr="%s"), contrast_type=dict(argstr="--%s", mandatory=True), dof=dict(argstr="--%d"), environ=dict(nohash=True, usedefault=True), epi_mask=dict(argstr="--epi-mask"), fsldof=dict(argstr="--fsl-dof %d"), init=dict(argstr="--init-%s", mandatory=True, xor=["init_reg_file"]), init_cost_file=dict(argstr="--initcost %s"), init_reg_file=dict(argstr="--init-reg %s", mandatory=True, xor=["init"]), intermediate_file=dict(argstr="--int %s"), out_fsl_file=dict(argstr="--fslmat %s"), out_lta_file=dict(argstr="--lta %s", min_ver="5.2.0"), out_reg_file=dict(argstr="--reg %s", genfile=True), reg_frame=dict(argstr="--frame %d", xor=["reg_middle_frame"]), reg_middle_frame=dict(argstr="--mid-frame", xor=["reg_frame"]), registered_file=dict(argstr="--o %s"), source_file=dict(argstr="--mov %s", copyfile=False, mandatory=True), spm_nifti=dict(argstr="--spm-nii"), subject_id=dict(argstr="--s %s", mandatory=True), subjects_dir=dict(), ) input_map_6_0 = dict( args=dict(argstr="%s"), contrast_type=dict(argstr="--%s", mandatory=True), dof=dict(argstr="--%d"), environ=dict(nohash=True, usedefault=True), epi_mask=dict(argstr="--epi-mask"), fsldof=dict(argstr="--fsl-dof %d"), init=dict(argstr="--init-%s", xor=["init_reg_file"]), init_reg_file=dict(argstr="--init-reg %s", xor=["init"]), init_cost_file=dict(argstr="--initcost %s"), intermediate_file=dict(argstr="--int %s"), out_fsl_file=dict(argstr="--fslmat %s"), out_lta_file=dict(argstr="--lta %s", min_ver="5.2.0"), out_reg_file=dict(argstr="--reg %s", genfile=True), reg_frame=dict(argstr="--frame %d", xor=["reg_middle_frame"]), reg_middle_frame=dict(argstr="--mid-frame", xor=["reg_frame"]), registered_file=dict(argstr="--o %s"), source_file=dict(argstr="--mov %s", copyfile=False, mandatory=True), spm_nifti=dict(argstr="--spm-nii"), subject_id=dict(argstr="--s %s", mandatory=True), subjects_dir=dict(), ) instance = BBRegister() if isinstance(instance.inputs, BBRegisterInputSpec6): input_map = input_map_6_0 else: input_map = input_map_5_3 for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(instance.inputs.traits()[key], metakey) == value def test_BBRegister_outputs(): output_map = dict( init_cost_file=dict(), min_cost_file=dict(), out_fsl_file=dict(), out_lta_file=dict(), out_reg_file=dict(), registered_file=dict(), ) outputs = BBRegister.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_FSSurfaceCommand.py000066400000000000000000000030321413403311400265050ustar00rootroot00000000000000# Modified 2017.04.21 by Chris Markiewicz import pytest from ..base import FSSurfaceCommand from ... import freesurfer as fs from ...io import FreeSurferSource def test_FSSurfaceCommand_inputs(): input_map = dict( args=dict(argstr="%s"), environ=dict(nohash=True, usedefault=True), subjects_dir=dict(), ) inputs = FSSurfaceCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_associated_file(tmpdir): fssrc = FreeSurferSource( subjects_dir=fs.Info.subjectsdir(), subject_id="fsaverage", hemi="lh" ) fssrc.base_dir = tmpdir.strpath fssrc.resource_monitor = False fsavginfo = fssrc.run().outputs.get() # Pairs of white/pial files in the same directories for white, pial in [ ("lh.white", "lh.pial"), ("./lh.white", "./lh.pial"), (fsavginfo["white"], fsavginfo["pial"]), ]: # Unspecified paths, possibly with missing hemisphere information, # are equivalent to using the same directory and hemisphere for name in ("pial", "lh.pial", pial): assert FSSurfaceCommand._associated_file(white, name) == pial # With path information, no changes are made for name in ("./pial", "./lh.pial", fsavginfo["pial"]): assert FSSurfaceCommand._associated_file(white, name) == name nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_AddXFormToHeader.py000066400000000000000000000026331413403311400275030ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import AddXFormToHeader def test_AddXFormToHeader_inputs(): input_map = dict( args=dict( argstr="%s", ), copy_name=dict( argstr="-c", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), out_file=dict( argstr="%s", extensions=None, position=-1, usedefault=True, ), subjects_dir=dict(), transform=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), verbose=dict( argstr="-v", ), ) inputs = AddXFormToHeader.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AddXFormToHeader_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = AddXFormToHeader.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Aparc2Aseg.py000066400000000000000000000050071413403311400263310ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Aparc2Aseg def test_Aparc2Aseg_inputs(): input_map = dict( a2009s=dict( argstr="--a2009s", ), args=dict( argstr="%s", ), aseg=dict( argstr="--aseg %s", extensions=None, ), copy_inputs=dict(), ctxseg=dict( argstr="--ctxseg %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), filled=dict( extensions=None, ), hypo_wm=dict( argstr="--hypo-as-wm", ), label_wm=dict( argstr="--labelwm", ), lh_annotation=dict( extensions=None, mandatory=True, ), lh_pial=dict( extensions=None, mandatory=True, ), lh_ribbon=dict( extensions=None, mandatory=True, ), lh_white=dict( extensions=None, mandatory=True, ), out_file=dict( argstr="--o %s", extensions=None, mandatory=True, ), rh_annotation=dict( extensions=None, mandatory=True, ), rh_pial=dict( extensions=None, mandatory=True, ), rh_ribbon=dict( extensions=None, mandatory=True, ), rh_white=dict( extensions=None, mandatory=True, ), ribbon=dict( extensions=None, mandatory=True, ), rip_unknown=dict( argstr="--rip-unknown", ), subject_id=dict( argstr="--s %s", mandatory=True, usedefault=True, ), subjects_dir=dict(), volmask=dict( argstr="--volmask", ), ) inputs = Aparc2Aseg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Aparc2Aseg_outputs(): output_map = dict( out_file=dict( argstr="%s", extensions=None, ), ) outputs = Aparc2Aseg.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Apas2Aseg.py000066400000000000000000000021421413403311400261640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Apas2Aseg def test_Apas2Aseg_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="--i %s", extensions=None, mandatory=True, ), out_file=dict( argstr="--o %s", extensions=None, mandatory=True, ), subjects_dir=dict(), ) inputs = Apas2Aseg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Apas2Aseg_outputs(): output_map = dict( out_file=dict( argstr="%s", extensions=None, ), ) outputs = Apas2Aseg.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_ApplyMask.py000066400000000000000000000037741413403311400263330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ApplyMask def test_ApplyMask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), invert_xfm=dict( argstr="-invert", ), keep_mask_deletion_edits=dict( argstr="-keep_mask_deletion_edits", ), mask_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), mask_thresh=dict( argstr="-T %.4f", ), out_file=dict( argstr="%s", extensions=None, hash_files=True, keep_extension=True, name_source=["in_file"], name_template="%s_masked", position=-1, ), subjects_dir=dict(), transfer=dict( argstr="-transfer %d", ), use_abs=dict( argstr="-abs", ), xfm_file=dict( argstr="-xform %s", extensions=None, ), xfm_source=dict( argstr="-lta_src %s", extensions=None, ), xfm_target=dict( argstr="-lta_dst %s", extensions=None, ), ) inputs = ApplyMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ApplyMask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ApplyMask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_ApplyVolTransform.py000066400000000000000000000124011413403311400300570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ApplyVolTransform def test_ApplyVolTransform_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fs_target=dict( argstr="--fstarg", mandatory=True, requires=["reg_file"], xor=("target_file", "tal", "fs_target"), ), fsl_reg_file=dict( argstr="--fsl %s", extensions=None, mandatory=True, xor=( "reg_file", "lta_file", "lta_inv_file", "fsl_reg_file", "xfm_reg_file", "reg_header", "mni_152_reg", "subject", ), ), interp=dict( argstr="--interp %s", ), inverse=dict( argstr="--inv", ), invert_morph=dict( argstr="--inv-morph", requires=["m3z_file"], ), lta_file=dict( argstr="--lta %s", extensions=None, mandatory=True, xor=( "reg_file", "lta_file", "lta_inv_file", "fsl_reg_file", "xfm_reg_file", "reg_header", "mni_152_reg", "subject", ), ), lta_inv_file=dict( argstr="--lta-inv %s", extensions=None, mandatory=True, xor=( "reg_file", "lta_file", "lta_inv_file", "fsl_reg_file", "xfm_reg_file", "reg_header", "mni_152_reg", "subject", ), ), m3z_file=dict( argstr="--m3z %s", extensions=None, ), mni_152_reg=dict( argstr="--regheader", mandatory=True, xor=( "reg_file", "lta_file", "lta_inv_file", "fsl_reg_file", "xfm_reg_file", "reg_header", "mni_152_reg", "subject", ), ), no_ded_m3z_path=dict( argstr="--noDefM3zPath", requires=["m3z_file"], ), no_resample=dict( argstr="--no-resample", ), reg_file=dict( argstr="--reg %s", extensions=None, mandatory=True, xor=( "reg_file", "lta_file", "lta_inv_file", "fsl_reg_file", "xfm_reg_file", "reg_header", "mni_152_reg", "subject", ), ), reg_header=dict( argstr="--regheader", mandatory=True, xor=( "reg_file", "lta_file", "lta_inv_file", "fsl_reg_file", "xfm_reg_file", "reg_header", "mni_152_reg", "subject", ), ), source_file=dict( argstr="--mov %s", copyfile=False, extensions=None, mandatory=True, ), subject=dict( argstr="--s %s", mandatory=True, xor=( "reg_file", "lta_file", "lta_inv_file", "fsl_reg_file", "xfm_reg_file", "reg_header", "mni_152_reg", "subject", ), ), subjects_dir=dict(), tal=dict( argstr="--tal", mandatory=True, xor=("target_file", "tal", "fs_target"), ), tal_resolution=dict( argstr="--talres %.10f", ), target_file=dict( argstr="--targ %s", extensions=None, mandatory=True, xor=("target_file", "tal", "fs_target"), ), transformed_file=dict( argstr="--o %s", extensions=None, genfile=True, ), xfm_reg_file=dict( argstr="--xfm %s", extensions=None, mandatory=True, xor=( "reg_file", "lta_file", "lta_inv_file", "fsl_reg_file", "xfm_reg_file", "reg_header", "mni_152_reg", "subject", ), ), ) inputs = ApplyVolTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ApplyVolTransform_outputs(): output_map = dict( transformed_file=dict( extensions=None, ), ) outputs = ApplyVolTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Binarize.py000066400000000000000000000055701413403311400261710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import Binarize def test_Binarize_inputs(): input_map = dict( abs=dict( argstr="--abs", ), args=dict( argstr="%s", ), bin_col_num=dict( argstr="--bincol", ), bin_val=dict( argstr="--binval %d", ), bin_val_not=dict( argstr="--binvalnot %d", ), binary_file=dict( argstr="--o %s", extensions=None, genfile=True, ), count_file=dict( argstr="--count %s", ), dilate=dict( argstr="--dilate %d", ), environ=dict( nohash=True, usedefault=True, ), erode=dict( argstr="--erode %d", ), erode2d=dict( argstr="--erode2d %d", ), frame_no=dict( argstr="--frame %s", ), in_file=dict( argstr="--i %s", copyfile=False, extensions=None, mandatory=True, ), invert=dict( argstr="--inv", ), mask_file=dict( argstr="--mask maskvol", extensions=None, ), mask_thresh=dict( argstr="--mask-thresh %f", ), match=dict( argstr="--match %d...", ), max=dict( argstr="--max %f", xor=["wm_ven_csf"], ), merge_file=dict( argstr="--merge %s", extensions=None, ), min=dict( argstr="--min %f", xor=["wm_ven_csf"], ), out_type=dict( argstr="", ), rmax=dict( argstr="--rmax %f", ), rmin=dict( argstr="--rmin %f", ), subjects_dir=dict(), ventricles=dict( argstr="--ventricles", ), wm=dict( argstr="--wm", ), wm_ven_csf=dict( argstr="--wm+vcsf", xor=["min", "max"], ), zero_edges=dict( argstr="--zero-edges", ), zero_slice_edge=dict( argstr="--zero-slice-edges", ), ) inputs = Binarize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Binarize_outputs(): output_map = dict( binary_file=dict( extensions=None, ), count_file=dict( extensions=None, ), ) outputs = Binarize.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_CALabel.py000066400000000000000000000040671413403311400256510ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import CALabel def test_CALabel_inputs(): input_map = dict( align=dict( argstr="-align", ), args=dict( argstr="%s", ), aseg=dict( argstr="-aseg %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-4, ), in_vol=dict( argstr="-r %s", extensions=None, ), intensities=dict( argstr="-r %s", extensions=None, ), label=dict( argstr="-l %s", extensions=None, ), no_big_ventricles=dict( argstr="-nobigventricles", ), num_threads=dict(), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), prior=dict( argstr="-prior %.1f", ), relabel_unlikely=dict( argstr="-relabel_unlikely %d %.1f", ), subjects_dir=dict(), template=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), transform=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), ) inputs = CALabel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CALabel_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = CALabel.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_CANormalize.py000066400000000000000000000035461413403311400265730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import CANormalize def test_CANormalize_inputs(): input_map = dict( args=dict( argstr="%s", ), atlas=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), control_points=dict( argstr="-c %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-4, ), long_file=dict( argstr="-long %s", extensions=None, ), mask=dict( argstr="-mask %s", extensions=None, ), out_file=dict( argstr="%s", extensions=None, hash_files=False, keep_extension=True, name_source=["in_file"], name_template="%s_norm", position=-1, ), subjects_dir=dict(), transform=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), ) inputs = CANormalize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CANormalize_outputs(): output_map = dict( control_points=dict( extensions=None, ), out_file=dict( extensions=None, ), ) outputs = CANormalize.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_CARegister.py000066400000000000000000000035551413403311400264170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import CARegister def test_CARegister_inputs(): input_map = dict( A=dict( argstr="-A %d", ), align=dict( argstr="-align-%s", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), invert_and_save=dict( argstr="-invert-and-save", position=-4, ), l_files=dict( argstr="-l %s", ), levels=dict( argstr="-levels %d", ), mask=dict( argstr="-mask %s", extensions=None, ), no_big_ventricles=dict( argstr="-nobigventricles", ), num_threads=dict(), out_file=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), subjects_dir=dict(), template=dict( argstr="%s", extensions=None, position=-2, ), transform=dict( argstr="-T %s", extensions=None, ), ) inputs = CARegister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CARegister_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = CARegister.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_CheckTalairachAlignment.py000066400000000000000000000024761413403311400311150ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import CheckTalairachAlignment def test_CheckTalairachAlignment_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-xfm %s", extensions=None, mandatory=True, position=-1, xor=["subject"], ), subject=dict( argstr="-subj %s", mandatory=True, position=-1, xor=["in_file"], ), subjects_dir=dict(), threshold=dict( argstr="-T %.3f", usedefault=True, ), ) inputs = CheckTalairachAlignment.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CheckTalairachAlignment_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = CheckTalairachAlignment.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Concatenate.py000066400000000000000000000041511413403311400266440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import Concatenate def test_Concatenate_inputs(): input_map = dict( add_val=dict( argstr="--add %f", ), args=dict( argstr="%s", ), combine=dict( argstr="--combine", ), concatenated_file=dict( argstr="--o %s", extensions=None, genfile=True, ), environ=dict( nohash=True, usedefault=True, ), gmean=dict( argstr="--gmean %d", ), in_files=dict( argstr="--i %s...", mandatory=True, ), keep_dtype=dict( argstr="--keep-datatype", ), mask_file=dict( argstr="--mask %s", extensions=None, ), max_bonfcor=dict( argstr="--max-bonfcor", ), max_index=dict( argstr="--max-index", ), mean_div_n=dict( argstr="--mean-div-n", ), multiply_by=dict( argstr="--mul %f", ), multiply_matrix_file=dict( argstr="--mtx %s", extensions=None, ), paired_stats=dict( argstr="--paired-%s", ), sign=dict( argstr="--%s", ), sort=dict( argstr="--sort", ), stats=dict( argstr="--%s", ), subjects_dir=dict(), vote=dict( argstr="--vote", ), ) inputs = Concatenate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Concatenate_outputs(): output_map = dict( concatenated_file=dict( extensions=None, ), ) outputs = Concatenate.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_ConcatenateLTA.py000066400000000000000000000040131413403311400272020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ConcatenateLTA def test_ConcatenateLTA_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_lta1=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), in_lta2=dict( argstr="%s", mandatory=True, position=-2, ), invert_1=dict( argstr="-invert1", ), invert_2=dict( argstr="-invert2", ), invert_out=dict( argstr="-invertout", ), out_file=dict( argstr="%s", extensions=None, hash_files=False, keep_extension=True, name_source=["in_lta1"], name_template="%s_concat", position=-1, ), out_type=dict( argstr="-out_type %d", ), subject=dict( argstr="-subject %s", ), subjects_dir=dict(), tal_source_file=dict( argstr="-tal %s", extensions=None, position=-5, requires=["tal_template_file"], ), tal_template_file=dict( argstr="%s", extensions=None, position=-4, requires=["tal_source_file"], ), ) inputs = ConcatenateLTA.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ConcatenateLTA_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ConcatenateLTA.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Contrast.py000066400000000000000000000033471413403311400262230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Contrast def test_Contrast_inputs(): input_map = dict( annotation=dict( extensions=None, mandatory=True, ), args=dict( argstr="%s", ), copy_inputs=dict(), cortex=dict( extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), hemisphere=dict( argstr="--%s-only", mandatory=True, ), orig=dict( extensions=None, mandatory=True, ), rawavg=dict( extensions=None, mandatory=True, ), subject_id=dict( argstr="--s %s", mandatory=True, usedefault=True, ), subjects_dir=dict(), thickness=dict( extensions=None, mandatory=True, ), white=dict( extensions=None, mandatory=True, ), ) inputs = Contrast.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Contrast_outputs(): output_map = dict( out_contrast=dict( extensions=None, ), out_log=dict( extensions=None, ), out_stats=dict( extensions=None, ), ) outputs = Contrast.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Curvature.py000066400000000000000000000026251413403311400264040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Curvature def test_Curvature_inputs(): input_map = dict( args=dict( argstr="%s", ), averages=dict( argstr="-a %d", ), copy_input=dict(), distances=dict( argstr="-distances %d %d", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, ), n=dict( argstr="-n", ), save=dict( argstr="-w", ), subjects_dir=dict(), threshold=dict( argstr="-thresh %.3f", ), ) inputs = Curvature.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Curvature_outputs(): output_map = dict( out_gauss=dict( extensions=None, ), out_mean=dict( extensions=None, ), ) outputs = Curvature.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_CurvatureStats.py000066400000000000000000000036201413403311400274170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import CurvatureStats def test_CurvatureStats_inputs(): input_map = dict( args=dict( argstr="%s", ), copy_inputs=dict(), curvfile1=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), curvfile2=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), environ=dict( nohash=True, usedefault=True, ), hemisphere=dict( argstr="%s", mandatory=True, position=-3, ), min_max=dict( argstr="-m", ), out_file=dict( argstr="-o %s", extensions=None, hash_files=False, name_source=["hemisphere"], name_template="%s.curv.stats", ), subject_id=dict( argstr="%s", mandatory=True, position=-4, usedefault=True, ), subjects_dir=dict(), surface=dict( argstr="-F %s", extensions=None, ), values=dict( argstr="-G", ), write=dict( argstr="--writeCurvatureFiles", ), ) inputs = CurvatureStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CurvatureStats_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = CurvatureStats.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_DICOMConvert.py000066400000000000000000000020701413403311400266120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import DICOMConvert def test_DICOMConvert_inputs(): input_map = dict( args=dict( argstr="%s", ), base_output_dir=dict( mandatory=True, ), dicom_dir=dict( mandatory=True, ), dicom_info=dict( extensions=None, ), environ=dict( nohash=True, usedefault=True, ), file_mapping=dict(), ignore_single_slice=dict( requires=["dicom_info"], ), out_type=dict( usedefault=True, ), seq_list=dict( requires=["dicom_info"], ), subject_dir_template=dict( usedefault=True, ), subject_id=dict(), subjects_dir=dict(), ) inputs = DICOMConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_EMRegister.py000066400000000000000000000033231413403311400264260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import EMRegister def test_EMRegister_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), mask=dict( argstr="-mask %s", extensions=None, ), nbrspacing=dict( argstr="-uns %d", ), num_threads=dict(), out_file=dict( argstr="%s", extensions=None, hash_files=False, keep_extension=False, name_source=["in_file"], name_template="%s_transform.lta", position=-1, ), skull=dict( argstr="-skull", ), subjects_dir=dict(), template=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), transform=dict( argstr="-t %s", extensions=None, ), ) inputs = EMRegister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EMRegister_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = EMRegister.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_EditWMwithAseg.py000066400000000000000000000027551413403311400272550ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import EditWMwithAseg def test_EditWMwithAseg_inputs(): input_map = dict( args=dict( argstr="%s", ), brain_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-4, ), keep_in=dict( argstr="-keep-in", ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), seg_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), subjects_dir=dict(), ) inputs = EditWMwithAseg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EditWMwithAseg_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = EditWMwithAseg.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_EulerNumber.py000066400000000000000000000017371413403311400266540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import EulerNumber def test_EulerNumber_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), subjects_dir=dict(), ) inputs = EulerNumber.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EulerNumber_outputs(): output_map = dict( defects=dict(), euler=dict(), ) outputs = EulerNumber.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_ExtractMainComponent.py000066400000000000000000000022721413403311400305240ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ExtractMainComponent def test_ExtractMainComponent_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), out_file=dict( argstr="%s", extensions=None, name_source="in_file", name_template="%s.maincmp", position=2, ), ) inputs = ExtractMainComponent.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ExtractMainComponent_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ExtractMainComponent.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_FSCommand.py000066400000000000000000000010111413403311400262170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import FSCommand def test_FSCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), subjects_dir=dict(), ) inputs = FSCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_FSCommandOpenMP.py000066400000000000000000000010671413403311400273110ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import FSCommandOpenMP def test_FSCommandOpenMP_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), num_threads=dict(), subjects_dir=dict(), ) inputs = FSCommandOpenMP.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_FSScriptCommand.py000066400000000000000000000010331413403311400274100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import FSScriptCommand def test_FSScriptCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), subjects_dir=dict(), ) inputs = FSScriptCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_FitMSParams.py000066400000000000000000000024531413403311400265510ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import FitMSParams def test_FitMSParams_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), flip_list=dict(), in_files=dict( argstr="%s", mandatory=True, position=-2, ), out_dir=dict( argstr="%s", genfile=True, position=-1, ), subjects_dir=dict(), te_list=dict(), tr_list=dict(), xfm_list=dict(), ) inputs = FitMSParams.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FitMSParams_outputs(): output_map = dict( pd_image=dict( extensions=None, ), t1_image=dict( extensions=None, ), t2star_image=dict( extensions=None, ), ) outputs = FitMSParams.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_FixTopology.py000066400000000000000000000034321413403311400267040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import FixTopology def test_FixTopology_inputs(): input_map = dict( args=dict( argstr="%s", ), copy_inputs=dict( mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), ga=dict( argstr="-ga", ), hemisphere=dict( argstr="%s", mandatory=True, position=-1, ), in_brain=dict( extensions=None, mandatory=True, ), in_inflated=dict( extensions=None, mandatory=True, ), in_orig=dict( extensions=None, mandatory=True, ), in_wm=dict( extensions=None, mandatory=True, ), mgz=dict( argstr="-mgz", ), seed=dict( argstr="-seed %d", ), sphere=dict( argstr="-sphere %s", extensions=None, ), subject_id=dict( argstr="%s", mandatory=True, position=-2, usedefault=True, ), subjects_dir=dict(), ) inputs = FixTopology.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FixTopology_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = FixTopology.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_FuseSegmentations.py000066400000000000000000000027501413403311400300660ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..longitudinal import FuseSegmentations def test_FuseSegmentations_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_norms=dict( argstr="-n %s", mandatory=True, ), in_segmentations=dict( argstr="-a %s", mandatory=True, ), in_segmentations_noCC=dict( argstr="-c %s", mandatory=True, ), out_file=dict( extensions=None, mandatory=True, position=-1, ), subject_id=dict( argstr="%s", position=-3, ), subjects_dir=dict(), timepoints=dict( argstr="%s", mandatory=True, position=-2, ), ) inputs = FuseSegmentations.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FuseSegmentations_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = FuseSegmentations.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_GLMFit.py000066400000000000000000000141641413403311400255070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import GLMFit def test_GLMFit_inputs(): input_map = dict( allow_ill_cond=dict( argstr="--illcond", ), allow_repeated_subjects=dict( argstr="--allowsubjrep", ), args=dict( argstr="%s", ), calc_AR1=dict( argstr="--tar1", ), check_opts=dict( argstr="--checkopts", ), compute_log_y=dict( argstr="--logy", ), contrast=dict( argstr="--C %s...", ), cortex=dict( argstr="--cortex", xor=["label_file"], ), debug=dict( argstr="--debug", ), design=dict( argstr="--X %s", extensions=None, xor=("fsgd", "design", "one_sample"), ), diag=dict( argstr="--diag %d", ), diag_cluster=dict( argstr="--diag-cluster", ), environ=dict( nohash=True, usedefault=True, ), fixed_fx_dof=dict( argstr="--ffxdof %d", xor=["fixed_fx_dof_file"], ), fixed_fx_dof_file=dict( argstr="--ffxdofdat %d", extensions=None, xor=["fixed_fx_dof"], ), fixed_fx_var=dict( argstr="--yffxvar %s", extensions=None, ), force_perm=dict( argstr="--perm-force", ), fsgd=dict( argstr="--fsgd %s %s", xor=("fsgd", "design", "one_sample"), ), fwhm=dict( argstr="--fwhm %f", ), glm_dir=dict( argstr="--glmdir %s", genfile=True, ), hemi=dict(), in_file=dict( argstr="--y %s", copyfile=False, extensions=None, mandatory=True, ), invert_mask=dict( argstr="--mask-inv", ), label_file=dict( argstr="--label %s", extensions=None, xor=["cortex"], ), mask_file=dict( argstr="--mask %s", extensions=None, ), no_contrast_ok=dict( argstr="--no-contrasts-ok", ), no_est_fwhm=dict( argstr="--no-est-fwhm", ), no_mask_smooth=dict( argstr="--no-mask-smooth", ), no_prune=dict( argstr="--no-prune", xor=["prunethresh"], ), one_sample=dict( argstr="--osgm", xor=("one_sample", "fsgd", "design", "contrast"), ), pca=dict( argstr="--pca", ), per_voxel_reg=dict( argstr="--pvr %s...", ), profile=dict( argstr="--profile %d", ), prune=dict( argstr="--prune", ), prune_thresh=dict( argstr="--prune_thr %f", xor=["noprune"], ), resynth_test=dict( argstr="--resynthtest %d", ), save_cond=dict( argstr="--save-cond", ), save_estimate=dict( argstr="--yhat-save", ), save_res_corr_mtx=dict( argstr="--eres-scm", ), save_residual=dict( argstr="--eres-save", ), seed=dict( argstr="--seed %d", ), self_reg=dict( argstr="--selfreg %d %d %d", ), sim_done_file=dict( argstr="--sim-done %s", extensions=None, ), sim_sign=dict( argstr="--sim-sign %s", ), simulation=dict( argstr="--sim %s %d %f %s", ), subject_id=dict(), subjects_dir=dict(), surf=dict( argstr="--surf %s %s %s", requires=["subject_id", "hemi"], ), surf_geo=dict( usedefault=True, ), synth=dict( argstr="--synth", ), uniform=dict( argstr="--uniform %f %f", ), var_fwhm=dict( argstr="--var-fwhm %f", ), vox_dump=dict( argstr="--voxdump %d %d %d", ), weight_file=dict( extensions=None, xor=["weighted_ls"], ), weight_inv=dict( argstr="--w-inv", xor=["weighted_ls"], ), weight_sqrt=dict( argstr="--w-sqrt", xor=["weighted_ls"], ), weighted_ls=dict( argstr="--wls %s", extensions=None, xor=("weight_file", "weight_inv", "weight_sqrt"), ), ) inputs = GLMFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GLMFit_outputs(): output_map = dict( beta_file=dict( extensions=None, ), dof_file=dict( extensions=None, ), error_file=dict( extensions=None, ), error_stddev_file=dict( extensions=None, ), error_var_file=dict( extensions=None, ), estimate_file=dict( extensions=None, ), frame_eigenvectors=dict( extensions=None, ), ftest_file=dict(), fwhm_file=dict( extensions=None, ), gamma_file=dict(), gamma_var_file=dict(), glm_dir=dict(), mask_file=dict( extensions=None, ), sig_file=dict(), singular_values=dict( extensions=None, ), spatial_eigenvectors=dict( extensions=None, ), svd_stats_file=dict( extensions=None, ), ) outputs = GLMFit.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_ImageInfo.py000066400000000000000000000022711413403311400262570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ImageInfo def test_ImageInfo_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, position=1, ), subjects_dir=dict(), ) inputs = ImageInfo.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ImageInfo_outputs(): output_map = dict( TE=dict(), TI=dict(), TR=dict(), data_type=dict(), dimensions=dict(), file_format=dict(), info=dict(), orientation=dict(), out_file=dict( extensions=None, ), ph_enc_dir=dict(), vox_sizes=dict(), ) outputs = ImageInfo.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Jacobian.py000066400000000000000000000025721413403311400261330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Jacobian def test_Jacobian_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_mappedsurf=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), in_origsurf=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), out_file=dict( argstr="%s", extensions=None, hash_files=False, keep_extension=False, name_source=["in_origsurf"], name_template="%s.jacobian", position=-1, ), subjects_dir=dict(), ) inputs = Jacobian.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Jacobian_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Jacobian.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_LTAConvert.py000066400000000000000000000057701413403311400264110ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import LTAConvert def test_LTAConvert_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_fsl=dict( argstr="--infsl %s", extensions=None, mandatory=True, xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_itk=dict( argstr="--initk %s", extensions=None, mandatory=True, xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_lta=dict( argstr="--inlta %s", mandatory=True, xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_mni=dict( argstr="--inmni %s", extensions=None, mandatory=True, xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_niftyreg=dict( argstr="--inniftyreg %s", extensions=None, mandatory=True, xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), in_reg=dict( argstr="--inreg %s", extensions=None, mandatory=True, xor=("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk"), ), invert=dict( argstr="--invert", ), ltavox2vox=dict( argstr="--ltavox2vox", requires=["out_lta"], ), out_fsl=dict( argstr="--outfsl %s", ), out_itk=dict( argstr="--outitk %s", ), out_lta=dict( argstr="--outlta %s", ), out_mni=dict( argstr="--outmni %s", ), out_reg=dict( argstr="--outreg %s", ), source_file=dict( argstr="--src %s", extensions=None, ), target_conform=dict( argstr="--trgconform", ), target_file=dict( argstr="--trg %s", extensions=None, ), ) inputs = LTAConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_LTAConvert_outputs(): output_map = dict( out_fsl=dict( extensions=None, ), out_itk=dict( extensions=None, ), out_lta=dict( extensions=None, ), out_mni=dict( extensions=None, ), out_reg=dict( extensions=None, ), ) outputs = LTAConvert.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Label2Annot.py000066400000000000000000000031351413403311400265220ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import Label2Annot def test_Label2Annot_inputs(): input_map = dict( args=dict( argstr="%s", ), color_table=dict( argstr="--ctab %s", extensions=None, ), copy_inputs=dict(), environ=dict( nohash=True, usedefault=True, ), hemisphere=dict( argstr="--hemi %s", mandatory=True, ), in_labels=dict( argstr="--l %s...", mandatory=True, ), keep_max=dict( argstr="--maxstatwinner", ), orig=dict( extensions=None, mandatory=True, ), out_annot=dict( argstr="--a %s", mandatory=True, ), subject_id=dict( argstr="--s %s", mandatory=True, usedefault=True, ), subjects_dir=dict(), verbose_off=dict( argstr="--noverbose", ), ) inputs = Label2Annot.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Label2Annot_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Label2Annot.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Label2Label.py000066400000000000000000000040551413403311400264640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import Label2Label def test_Label2Label_inputs(): input_map = dict( args=dict( argstr="%s", ), copy_inputs=dict(), environ=dict( nohash=True, usedefault=True, ), hemisphere=dict( argstr="--hemi %s", mandatory=True, ), out_file=dict( argstr="--trglabel %s", extensions=None, hash_files=False, keep_extension=True, name_source=["source_label"], name_template="%s_converted", ), registration_method=dict( argstr="--regmethod %s", usedefault=True, ), source_label=dict( argstr="--srclabel %s", extensions=None, mandatory=True, ), source_sphere_reg=dict( extensions=None, mandatory=True, ), source_subject=dict( argstr="--srcsubject %s", mandatory=True, ), source_white=dict( extensions=None, mandatory=True, ), sphere_reg=dict( extensions=None, mandatory=True, ), subject_id=dict( argstr="--trgsubject %s", mandatory=True, usedefault=True, ), subjects_dir=dict(), white=dict( extensions=None, mandatory=True, ), ) inputs = Label2Label.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Label2Label_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Label2Label.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Label2Vol.py000066400000000000000000000062461413403311400262110ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import Label2Vol def test_Label2Vol_inputs(): input_map = dict( annot_file=dict( argstr="--annot %s", copyfile=False, extensions=None, mandatory=True, requires=("subject_id", "hemi"), xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), ), aparc_aseg=dict( argstr="--aparc+aseg", mandatory=True, xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fill_thresh=dict( argstr="--fillthresh %g", ), hemi=dict( argstr="--hemi %s", ), identity=dict( argstr="--identity", xor=("reg_file", "reg_header", "identity"), ), invert_mtx=dict( argstr="--invertmtx", ), label_file=dict( argstr="--label %s...", copyfile=False, mandatory=True, xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), ), label_hit_file=dict( argstr="--hits %s", extensions=None, ), label_voxel_volume=dict( argstr="--labvoxvol %f", ), map_label_stat=dict( argstr="--label-stat %s", extensions=None, ), native_vox2ras=dict( argstr="--native-vox2ras", ), proj=dict( argstr="--proj %s %f %f %f", requires=("subject_id", "hemi"), ), reg_file=dict( argstr="--reg %s", extensions=None, xor=("reg_file", "reg_header", "identity"), ), reg_header=dict( argstr="--regheader %s", extensions=None, xor=("reg_file", "reg_header", "identity"), ), seg_file=dict( argstr="--seg %s", copyfile=False, extensions=None, mandatory=True, xor=("label_file", "annot_file", "seg_file", "aparc_aseg"), ), subject_id=dict( argstr="--subject %s", ), subjects_dir=dict(), surface=dict( argstr="--surf %s", ), template_file=dict( argstr="--temp %s", extensions=None, mandatory=True, ), vol_label_file=dict( argstr="--o %s", extensions=None, genfile=True, ), ) inputs = Label2Vol.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Label2Vol_outputs(): output_map = dict( vol_label_file=dict( extensions=None, ), ) outputs = Label2Vol.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MNIBiasCorrection.py000066400000000000000000000035401413403311400276730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import MNIBiasCorrection def test_MNIBiasCorrection_inputs(): input_map = dict( args=dict( argstr="%s", ), distance=dict( argstr="--distance %d", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="--i %s", extensions=None, mandatory=True, ), iterations=dict( argstr="--n %d", usedefault=True, ), mask=dict( argstr="--mask %s", extensions=None, ), no_rescale=dict( argstr="--no-rescale", ), out_file=dict( argstr="--o %s", extensions=None, hash_files=False, keep_extension=True, name_source=["in_file"], name_template="%s_output", ), protocol_iterations=dict( argstr="--proto-iters %d", ), shrink=dict( argstr="--shrink %d", ), stop=dict( argstr="--stop %f", ), subjects_dir=dict(), transform=dict( argstr="--uchar %s", extensions=None, ), ) inputs = MNIBiasCorrection.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MNIBiasCorrection_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MNIBiasCorrection.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MPRtoMNI305.py000066400000000000000000000023641413403311400262210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import MPRtoMNI305 def test_MPRtoMNI305_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, usedefault=True, ), reference_dir=dict( mandatory=True, usedefault=True, ), subjects_dir=dict(), target=dict( mandatory=True, usedefault=True, ), ) inputs = MPRtoMNI305.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MPRtoMNI305_outputs(): output_map = dict( log_file=dict( extensions=None, usedefault=True, ), out_file=dict( extensions=None, ), ) outputs = MPRtoMNI305.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRIConvert.py000066400000000000000000000162621413403311400264160ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import MRIConvert def test_MRIConvert_inputs(): input_map = dict( apply_inv_transform=dict( argstr="--apply_inverse_transform %s", extensions=None, ), apply_transform=dict( argstr="--apply_transform %s", extensions=None, ), args=dict( argstr="%s", ), ascii=dict( argstr="--ascii", ), autoalign_matrix=dict( argstr="--autoalign %s", extensions=None, ), color_file=dict( argstr="--color_file %s", extensions=None, ), conform=dict( argstr="--conform", ), conform_min=dict( argstr="--conform_min", ), conform_size=dict( argstr="--conform_size %s", ), crop_center=dict( argstr="--crop %d %d %d", ), crop_gdf=dict( argstr="--crop_gdf", ), crop_size=dict( argstr="--cropsize %d %d %d", ), cut_ends=dict( argstr="--cutends %d", ), cw256=dict( argstr="--cw256", ), devolve_transform=dict( argstr="--devolvexfm %s", ), drop_n=dict( argstr="--ndrop %d", ), environ=dict( nohash=True, usedefault=True, ), fill_parcellation=dict( argstr="--fill_parcellation", ), force_ras=dict( argstr="--force_ras_good", ), frame=dict( argstr="--frame %d", ), frame_subsample=dict( argstr="--fsubsample %d %d %d", ), fwhm=dict( argstr="--fwhm %f", ), in_center=dict( argstr="--in_center %s", ), in_file=dict( argstr="--input_volume %s", extensions=None, mandatory=True, position=-2, ), in_i_dir=dict( argstr="--in_i_direction %f %f %f", ), in_i_size=dict( argstr="--in_i_size %d", ), in_info=dict( argstr="--in_info", ), in_j_dir=dict( argstr="--in_j_direction %f %f %f", ), in_j_size=dict( argstr="--in_j_size %d", ), in_k_dir=dict( argstr="--in_k_direction %f %f %f", ), in_k_size=dict( argstr="--in_k_size %d", ), in_like=dict( argstr="--in_like %s", extensions=None, ), in_matrix=dict( argstr="--in_matrix", ), in_orientation=dict( argstr="--in_orientation %s", ), in_scale=dict( argstr="--scale %f", ), in_stats=dict( argstr="--in_stats", ), in_type=dict( argstr="--in_type %s", ), invert_contrast=dict( argstr="--invert_contrast %f", ), midframe=dict( argstr="--mid-frame", ), no_change=dict( argstr="--nochange", ), no_scale=dict( argstr="--no_scale 1", ), no_translate=dict( argstr="--no_translate", ), no_write=dict( argstr="--no_write", ), out_center=dict( argstr="--out_center %f %f %f", ), out_datatype=dict( argstr="--out_data_type %s", ), out_file=dict( argstr="--output_volume %s", extensions=None, genfile=True, position=-1, ), out_i_count=dict( argstr="--out_i_count %d", ), out_i_dir=dict( argstr="--out_i_direction %f %f %f", ), out_i_size=dict( argstr="--out_i_size %d", ), out_info=dict( argstr="--out_info", ), out_j_count=dict( argstr="--out_j_count %d", ), out_j_dir=dict( argstr="--out_j_direction %f %f %f", ), out_j_size=dict( argstr="--out_j_size %d", ), out_k_count=dict( argstr="--out_k_count %d", ), out_k_dir=dict( argstr="--out_k_direction %f %f %f", ), out_k_size=dict( argstr="--out_k_size %d", ), out_matrix=dict( argstr="--out_matrix", ), out_orientation=dict( argstr="--out_orientation %s", ), out_scale=dict( argstr="--out-scale %d", ), out_stats=dict( argstr="--out_stats", ), out_type=dict( argstr="--out_type %s", ), parse_only=dict( argstr="--parse_only", ), read_only=dict( argstr="--read_only", ), reorder=dict( argstr="--reorder %d %d %d", ), resample_type=dict( argstr="--resample_type %s", ), reslice_like=dict( argstr="--reslice_like %s", extensions=None, ), sdcm_list=dict( argstr="--sdcmlist %s", extensions=None, ), skip_n=dict( argstr="--nskip %d", ), slice_bias=dict( argstr="--slice-bias %f", ), slice_crop=dict( argstr="--slice-crop %d %d", ), slice_reverse=dict( argstr="--slice-reverse", ), smooth_parcellation=dict( argstr="--smooth_parcellation", ), sphinx=dict( argstr="--sphinx", ), split=dict( argstr="--split", ), status_file=dict( argstr="--status %s", extensions=None, ), subject_name=dict( argstr="--subject_name %s", ), subjects_dir=dict(), te=dict( argstr="-te %d", ), template_info=dict( argstr="--template_info", ), template_type=dict( argstr="--template_type %s", ), ti=dict( argstr="-ti %d", ), tr=dict( argstr="-tr %d", ), unwarp_gradient=dict( argstr="--unwarp_gradient_nonlinearity", ), vox_size=dict( argstr="-voxsize %f %f %f", ), zero_ge_z_offset=dict( argstr="--zero_ge_z_offset", ), zero_outlines=dict( argstr="--zero_outlines", ), ) inputs = MRIConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRIConvert_outputs(): output_map = dict( out_file=dict(), ) outputs = MRIConvert.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRICoreg.py000066400000000000000000000070301413403311400260260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import MRICoreg def test_MRICoreg_inputs(): input_map = dict( args=dict( argstr="%s", ), brute_force_limit=dict( argstr="--bf-lim %g", xor=["no_brute_force"], ), brute_force_samples=dict( argstr="--bf-nsamp %d", xor=["no_brute_force"], ), conform_reference=dict( argstr="--conf-ref", ), dof=dict( argstr="--dof %d", ), environ=dict( nohash=True, usedefault=True, ), ftol=dict( argstr="--ftol %e", ), initial_rotation=dict( argstr="--rot %g %g %g", ), initial_scale=dict( argstr="--scale %g %g %g", ), initial_shear=dict( argstr="--shear %g %g %g", ), initial_translation=dict( argstr="--trans %g %g %g", ), linmintol=dict( argstr="--linmintol %e", ), max_iters=dict( argstr="--nitersmax %d", ), no_brute_force=dict( argstr="--no-bf", ), no_coord_dithering=dict( argstr="--no-coord-dither", ), no_cras0=dict( argstr="--no-cras0", ), no_intensity_dithering=dict( argstr="--no-intensity-dither", ), no_smooth=dict( argstr="--no-smooth", ), num_threads=dict( argstr="--threads %d", ), out_lta_file=dict( argstr="--lta %s", usedefault=True, ), out_params_file=dict( argstr="--params %s", ), out_reg_file=dict( argstr="--regdat %s", ), ref_fwhm=dict( argstr="--ref-fwhm", ), reference_file=dict( argstr="--ref %s", copyfile=False, extensions=None, mandatory=True, xor=["subject_id"], ), reference_mask=dict( argstr="--ref-mask %s", position=2, ), saturation_threshold=dict( argstr="--sat %g", ), sep=dict( argstr="--sep %s...", ), source_file=dict( argstr="--mov %s", copyfile=False, extensions=None, mandatory=True, ), source_mask=dict( argstr="--mov-mask", ), source_oob=dict( argstr="--mov-oob", ), subject_id=dict( argstr="--s %s", mandatory=True, position=1, requires=["subjects_dir"], xor=["reference_file"], ), subjects_dir=dict( argstr="--sd %s", ), ) inputs = MRICoreg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRICoreg_outputs(): output_map = dict( out_lta_file=dict( extensions=None, ), out_params_file=dict( extensions=None, ), out_reg_file=dict( extensions=None, ), ) outputs = MRICoreg.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRIFill.py000066400000000000000000000027151413403311400256620ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MRIFill def test_MRIFill_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), log_file=dict( argstr="-a %s", extensions=None, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), segmentation=dict( argstr="-segmentation %s", extensions=None, ), subjects_dir=dict(), transform=dict( argstr="-xform %s", extensions=None, ), ) inputs = MRIFill.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRIFill_outputs(): output_map = dict( log_file=dict( extensions=None, ), out_file=dict( extensions=None, ), ) outputs = MRIFill.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRIMarchingCubes.py000066400000000000000000000026011413403311400275000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MRIMarchingCubes def test_MRIMarchingCubes_inputs(): input_map = dict( args=dict( argstr="%s", ), connectivity_value=dict( argstr="%d", position=-1, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), label_value=dict( argstr="%d", mandatory=True, position=2, ), out_file=dict( argstr="./%s", extensions=None, genfile=True, position=-2, ), subjects_dir=dict(), ) inputs = MRIMarchingCubes.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRIMarchingCubes_outputs(): output_map = dict( surface=dict( extensions=None, ), ) outputs = MRIMarchingCubes.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRIPretess.py000066400000000000000000000032361413403311400264200ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MRIPretess def test_MRIPretess_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_filled=dict( argstr="%s", extensions=None, mandatory=True, position=-4, ), in_norm=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), keep=dict( argstr="-keep", ), label=dict( argstr="%s", mandatory=True, position=-3, usedefault=True, ), nocorners=dict( argstr="-nocorners", ), out_file=dict( argstr="%s", extensions=None, keep_extension=True, name_source=["in_filled"], name_template="%s_pretesswm", position=-1, ), subjects_dir=dict(), test=dict( argstr="-test", ), ) inputs = MRIPretess.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRIPretess_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRIPretess.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRISPreproc.py000066400000000000000000000053751413403311400265360ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import MRISPreproc def test_MRISPreproc_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fsgd_file=dict( argstr="--fsgd %s", extensions=None, xor=("subjects", "fsgd_file", "subject_file"), ), fwhm=dict( argstr="--fwhm %f", xor=["num_iters"], ), fwhm_source=dict( argstr="--fwhm-src %f", xor=["num_iters_source"], ), hemi=dict( argstr="--hemi %s", mandatory=True, ), num_iters=dict( argstr="--niters %d", xor=["fwhm"], ), num_iters_source=dict( argstr="--niterssrc %d", xor=["fwhm_source"], ), out_file=dict( argstr="--out %s", extensions=None, genfile=True, ), proj_frac=dict( argstr="--projfrac %s", ), smooth_cortex_only=dict( argstr="--smooth-cortex-only", ), source_format=dict( argstr="--srcfmt %s", ), subject_file=dict( argstr="--f %s", extensions=None, xor=("subjects", "fsgd_file", "subject_file"), ), subjects=dict( argstr="--s %s...", xor=("subjects", "fsgd_file", "subject_file"), ), subjects_dir=dict(), surf_area=dict( argstr="--area %s", xor=("surf_measure", "surf_measure_file", "surf_area"), ), surf_dir=dict( argstr="--surfdir %s", ), surf_measure=dict( argstr="--meas %s", xor=("surf_measure", "surf_measure_file", "surf_area"), ), surf_measure_file=dict( argstr="--is %s...", xor=("surf_measure", "surf_measure_file", "surf_area"), ), target=dict( argstr="--target %s", mandatory=True, ), vol_measure_file=dict( argstr="--iv %s %s...", ), ) inputs = MRISPreproc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRISPreproc_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRISPreproc.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRISPreprocReconAll.py000066400000000000000000000065571413403311400301610ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import MRISPreprocReconAll def test_MRISPreprocReconAll_inputs(): input_map = dict( args=dict( argstr="%s", ), copy_inputs=dict(), environ=dict( nohash=True, usedefault=True, ), fsgd_file=dict( argstr="--fsgd %s", extensions=None, xor=("subjects", "fsgd_file", "subject_file"), ), fwhm=dict( argstr="--fwhm %f", xor=["num_iters"], ), fwhm_source=dict( argstr="--fwhm-src %f", xor=["num_iters_source"], ), hemi=dict( argstr="--hemi %s", mandatory=True, ), lh_surfreg_target=dict( extensions=None, requires=["surfreg_files"], ), num_iters=dict( argstr="--niters %d", xor=["fwhm"], ), num_iters_source=dict( argstr="--niterssrc %d", xor=["fwhm_source"], ), out_file=dict( argstr="--out %s", extensions=None, genfile=True, ), proj_frac=dict( argstr="--projfrac %s", ), rh_surfreg_target=dict( extensions=None, requires=["surfreg_files"], ), smooth_cortex_only=dict( argstr="--smooth-cortex-only", ), source_format=dict( argstr="--srcfmt %s", ), subject_file=dict( argstr="--f %s", extensions=None, xor=("subjects", "fsgd_file", "subject_file"), ), subject_id=dict( argstr="--s %s", usedefault=True, xor=("subjects", "fsgd_file", "subject_file", "subject_id"), ), subjects=dict( argstr="--s %s...", xor=("subjects", "fsgd_file", "subject_file"), ), subjects_dir=dict(), surf_area=dict( argstr="--area %s", xor=("surf_measure", "surf_measure_file", "surf_area"), ), surf_dir=dict( argstr="--surfdir %s", ), surf_measure=dict( argstr="--meas %s", xor=("surf_measure", "surf_measure_file", "surf_area"), ), surf_measure_file=dict( argstr="--meas %s", extensions=None, xor=("surf_measure", "surf_measure_file", "surf_area"), ), surfreg_files=dict( argstr="--surfreg %s", requires=["lh_surfreg_target", "rh_surfreg_target"], ), target=dict( argstr="--target %s", mandatory=True, ), vol_measure_file=dict( argstr="--iv %s %s...", ), ) inputs = MRISPreprocReconAll.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRISPreprocReconAll_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRISPreprocReconAll.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRITessellate.py000066400000000000000000000026111413403311400270740ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MRITessellate def test_MRITessellate_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), label_value=dict( argstr="%d", mandatory=True, position=-2, ), out_file=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), subjects_dir=dict(), tesselate_all_voxels=dict( argstr="-a", ), use_real_RAS_coordinates=dict( argstr="-n", ), ) inputs = MRITessellate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRITessellate_outputs(): output_map = dict( surface=dict( extensions=None, ), ) outputs = MRITessellate.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRIsCALabel.py000066400000000000000000000042731413403311400264030ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import MRIsCALabel def test_MRIsCALabel_inputs(): input_map = dict( args=dict( argstr="%s", ), aseg=dict( argstr="-aseg %s", extensions=None, ), canonsurf=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), classifier=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), copy_inputs=dict(), curv=dict( extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), hemisphere=dict( argstr="%s", mandatory=True, position=-4, ), label=dict( argstr="-l %s", extensions=None, ), num_threads=dict(), out_file=dict( argstr="%s", extensions=None, hash_files=False, keep_extension=True, name_source=["hemisphere"], name_template="%s.aparc.annot", position=-1, ), seed=dict( argstr="-seed %d", ), smoothwm=dict( extensions=None, mandatory=True, ), subject_id=dict( argstr="%s", mandatory=True, position=-5, usedefault=True, ), subjects_dir=dict(), sulc=dict( extensions=None, mandatory=True, ), ) inputs = MRIsCALabel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRIsCALabel_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRIsCALabel.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRIsCalc.py000066400000000000000000000031301413403311400260110ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MRIsCalc def test_MRIsCalc_inputs(): input_map = dict( action=dict( argstr="%s", mandatory=True, position=-2, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file1=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), in_file2=dict( argstr="%s", extensions=None, position=-1, xor=["in_float", "in_int"], ), in_float=dict( argstr="%f", position=-1, xor=["in_file2", "in_int"], ), in_int=dict( argstr="%d", position=-1, xor=["in_file2", "in_float"], ), out_file=dict( argstr="-o %s", extensions=None, mandatory=True, ), subjects_dir=dict(), ) inputs = MRIsCalc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRIsCalc_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRIsCalc.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRIsCombine.py000066400000000000000000000022111413403311400265220ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MRIsCombine def test_MRIsCombine_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr="--combinesurfs %s", mandatory=True, position=1, ), out_file=dict( argstr="%s", extensions=None, genfile=True, mandatory=True, position=-1, ), subjects_dir=dict(), ) inputs = MRIsCombine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRIsCombine_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRIsCombine.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRIsConvert.py000066400000000000000000000050561413403311400266000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MRIsConvert def test_MRIsConvert_inputs(): input_map = dict( annot_file=dict( argstr="--annot %s", extensions=None, ), args=dict( argstr="%s", ), dataarray_num=dict( argstr="--da_num %d", ), environ=dict( nohash=True, usedefault=True, ), functional_file=dict( argstr="-f %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), label_file=dict( argstr="--label %s", extensions=None, ), labelstats_outfile=dict( argstr="--labelstats %s", extensions=None, ), normal=dict( argstr="-n", ), origname=dict( argstr="-o %s", ), out_datatype=dict( mandatory=True, xor=["out_file"], ), out_file=dict( argstr="%s", extensions=None, genfile=True, mandatory=True, position=-1, xor=["out_datatype"], ), parcstats_file=dict( argstr="--parcstats %s", extensions=None, ), patch=dict( argstr="-p", ), rescale=dict( argstr="-r", ), scalarcurv_file=dict( argstr="-c %s", extensions=None, ), scale=dict( argstr="-s %.3f", ), subjects_dir=dict(), talairachxfm_subjid=dict( argstr="-t %s", ), to_scanner=dict( argstr="--to-scanner", ), to_tkr=dict( argstr="--to-tkr", ), vertex=dict( argstr="-v", ), xyz_ascii=dict( argstr="-a", ), ) inputs = MRIsConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRIsConvert_outputs(): output_map = dict( converted=dict( extensions=None, ), ) outputs = MRIsConvert.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRIsExpand.py000066400000000000000000000036111413403311400263720ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MRIsExpand def test_MRIsExpand_inputs(): input_map = dict( args=dict( argstr="%s", ), distance=dict( argstr="%g", mandatory=True, position=-2, ), dt=dict( argstr="-T %g", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=-3, ), nsurfaces=dict( argstr="-N %d", ), out_name=dict( argstr="%s", position=-1, usedefault=True, ), pial=dict( argstr="-pial %s", copyfile=False, ), smooth_averages=dict( argstr="-A %d", ), sphere=dict( copyfile=False, usedefault=True, ), spring=dict( argstr="-S %g", ), subjects_dir=dict(), thickness=dict( argstr="-thickness", ), thickness_name=dict( argstr="-thickness_name %s", copyfile=False, ), write_iterations=dict( argstr="-W %d", ), ) inputs = MRIsExpand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRIsExpand_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRIsExpand.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MRIsInflate.py000066400000000000000000000030211413403311400265300ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MRIsInflate def test_MRIsInflate_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, ), no_save_sulc=dict( argstr="-no-save-sulc", xor=["out_sulc"], ), out_file=dict( argstr="%s", extensions=None, hash_files=False, keep_extension=True, name_source=["in_file"], name_template="%s.inflated", position=-1, ), out_sulc=dict( extensions=None, xor=["no_save_sulc"], ), subjects_dir=dict(), ) inputs = MRIsInflate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRIsInflate_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_sulc=dict( extensions=None, ), ) outputs = MRIsInflate.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MS_LDA.py000066400000000000000000000034151413403311400254210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import MS_LDA def test_MS_LDA_inputs(): input_map = dict( args=dict( argstr="%s", ), conform=dict( argstr="-conform", ), environ=dict( nohash=True, usedefault=True, ), images=dict( argstr="%s", copyfile=False, mandatory=True, position=-1, ), label_file=dict( argstr="-label %s", extensions=None, ), lda_labels=dict( argstr="-lda %s", mandatory=True, sep=" ", ), mask_file=dict( argstr="-mask %s", extensions=None, ), shift=dict( argstr="-shift %d", ), subjects_dir=dict(), use_weights=dict( argstr="-W", ), vol_synth_file=dict( argstr="-synth %s", extensions=None, mandatory=True, ), weight_file=dict( argstr="-weight %s", extensions=None, mandatory=True, ), ) inputs = MS_LDA.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MS_LDA_outputs(): output_map = dict( vol_synth_file=dict( extensions=None, ), weight_file=dict( extensions=None, ), ) outputs = MS_LDA.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MakeAverageSubject.py000066400000000000000000000021431413403311400301070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MakeAverageSubject def test_MakeAverageSubject_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), out_name=dict( argstr="--out %s", extensions=None, usedefault=True, ), subjects_dir=dict(), subjects_ids=dict( argstr="--subjects %s", mandatory=True, sep=" ", ), ) inputs = MakeAverageSubject.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MakeAverageSubject_outputs(): output_map = dict( average_subject_name=dict(), ) outputs = MakeAverageSubject.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_MakeSurfaces.py000066400000000000000000000055221413403311400267740ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MakeSurfaces def test_MakeSurfaces_inputs(): input_map = dict( args=dict( argstr="%s", ), copy_inputs=dict(), environ=dict( nohash=True, usedefault=True, ), fix_mtl=dict( argstr="-fix_mtl", ), hemisphere=dict( argstr="%s", mandatory=True, position=-1, ), in_T1=dict( argstr="-T1 %s", extensions=None, ), in_aseg=dict( argstr="-aseg %s", extensions=None, ), in_filled=dict( extensions=None, mandatory=True, ), in_label=dict( extensions=None, xor=["noaparc"], ), in_orig=dict( argstr="-orig %s", extensions=None, mandatory=True, ), in_white=dict( extensions=None, ), in_wm=dict( extensions=None, mandatory=True, ), longitudinal=dict( argstr="-long", ), maximum=dict( argstr="-max %.1f", ), mgz=dict( argstr="-mgz", ), no_white=dict( argstr="-nowhite", ), noaparc=dict( argstr="-noaparc", xor=["in_label"], ), orig_pial=dict( argstr="-orig_pial %s", extensions=None, requires=["in_label"], ), orig_white=dict( argstr="-orig_white %s", extensions=None, ), subject_id=dict( argstr="%s", mandatory=True, position=-2, usedefault=True, ), subjects_dir=dict(), white=dict( argstr="-white %s", ), white_only=dict( argstr="-whiteonly", ), ) inputs = MakeSurfaces.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MakeSurfaces_outputs(): output_map = dict( out_area=dict( extensions=None, ), out_cortex=dict( extensions=None, ), out_curv=dict( extensions=None, ), out_pial=dict( extensions=None, ), out_thickness=dict( extensions=None, ), out_white=dict( extensions=None, ), ) outputs = MakeSurfaces.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Normalize.py000066400000000000000000000030371413403311400263620ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Normalize def test_Normalize_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), gradient=dict( argstr="-g %d", ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), mask=dict( argstr="-mask %s", extensions=None, ), out_file=dict( argstr="%s", extensions=None, hash_files=False, keep_extension=True, name_source=["in_file"], name_template="%s_norm", position=-1, ), segmentation=dict( argstr="-aseg %s", extensions=None, ), subjects_dir=dict(), transform=dict( extensions=None, ), ) inputs = Normalize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Normalize_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Normalize.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_OneSampleTTest.py000066400000000000000000000142341413403311400272720ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import OneSampleTTest def test_OneSampleTTest_inputs(): input_map = dict( allow_ill_cond=dict( argstr="--illcond", ), allow_repeated_subjects=dict( argstr="--allowsubjrep", ), args=dict( argstr="%s", ), calc_AR1=dict( argstr="--tar1", ), check_opts=dict( argstr="--checkopts", ), compute_log_y=dict( argstr="--logy", ), contrast=dict( argstr="--C %s...", ), cortex=dict( argstr="--cortex", xor=["label_file"], ), debug=dict( argstr="--debug", ), design=dict( argstr="--X %s", extensions=None, xor=("fsgd", "design", "one_sample"), ), diag=dict( argstr="--diag %d", ), diag_cluster=dict( argstr="--diag-cluster", ), environ=dict( nohash=True, usedefault=True, ), fixed_fx_dof=dict( argstr="--ffxdof %d", xor=["fixed_fx_dof_file"], ), fixed_fx_dof_file=dict( argstr="--ffxdofdat %d", extensions=None, xor=["fixed_fx_dof"], ), fixed_fx_var=dict( argstr="--yffxvar %s", extensions=None, ), force_perm=dict( argstr="--perm-force", ), fsgd=dict( argstr="--fsgd %s %s", xor=("fsgd", "design", "one_sample"), ), fwhm=dict( argstr="--fwhm %f", ), glm_dir=dict( argstr="--glmdir %s", genfile=True, ), hemi=dict(), in_file=dict( argstr="--y %s", copyfile=False, extensions=None, mandatory=True, ), invert_mask=dict( argstr="--mask-inv", ), label_file=dict( argstr="--label %s", extensions=None, xor=["cortex"], ), mask_file=dict( argstr="--mask %s", extensions=None, ), no_contrast_ok=dict( argstr="--no-contrasts-ok", ), no_est_fwhm=dict( argstr="--no-est-fwhm", ), no_mask_smooth=dict( argstr="--no-mask-smooth", ), no_prune=dict( argstr="--no-prune", xor=["prunethresh"], ), one_sample=dict( argstr="--osgm", xor=("one_sample", "fsgd", "design", "contrast"), ), pca=dict( argstr="--pca", ), per_voxel_reg=dict( argstr="--pvr %s...", ), profile=dict( argstr="--profile %d", ), prune=dict( argstr="--prune", ), prune_thresh=dict( argstr="--prune_thr %f", xor=["noprune"], ), resynth_test=dict( argstr="--resynthtest %d", ), save_cond=dict( argstr="--save-cond", ), save_estimate=dict( argstr="--yhat-save", ), save_res_corr_mtx=dict( argstr="--eres-scm", ), save_residual=dict( argstr="--eres-save", ), seed=dict( argstr="--seed %d", ), self_reg=dict( argstr="--selfreg %d %d %d", ), sim_done_file=dict( argstr="--sim-done %s", extensions=None, ), sim_sign=dict( argstr="--sim-sign %s", ), simulation=dict( argstr="--sim %s %d %f %s", ), subject_id=dict(), subjects_dir=dict(), surf=dict( argstr="--surf %s %s %s", requires=["subject_id", "hemi"], ), surf_geo=dict( usedefault=True, ), synth=dict( argstr="--synth", ), uniform=dict( argstr="--uniform %f %f", ), var_fwhm=dict( argstr="--var-fwhm %f", ), vox_dump=dict( argstr="--voxdump %d %d %d", ), weight_file=dict( extensions=None, xor=["weighted_ls"], ), weight_inv=dict( argstr="--w-inv", xor=["weighted_ls"], ), weight_sqrt=dict( argstr="--w-sqrt", xor=["weighted_ls"], ), weighted_ls=dict( argstr="--wls %s", extensions=None, xor=("weight_file", "weight_inv", "weight_sqrt"), ), ) inputs = OneSampleTTest.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_OneSampleTTest_outputs(): output_map = dict( beta_file=dict( extensions=None, ), dof_file=dict( extensions=None, ), error_file=dict( extensions=None, ), error_stddev_file=dict( extensions=None, ), error_var_file=dict( extensions=None, ), estimate_file=dict( extensions=None, ), frame_eigenvectors=dict( extensions=None, ), ftest_file=dict(), fwhm_file=dict( extensions=None, ), gamma_file=dict(), gamma_var_file=dict(), glm_dir=dict(), mask_file=dict( extensions=None, ), sig_file=dict(), singular_values=dict( extensions=None, ), spatial_eigenvectors=dict( extensions=None, ), svd_stats_file=dict( extensions=None, ), ) outputs = OneSampleTTest.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Paint.py000066400000000000000000000027021413403311400254730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import Paint def test_Paint_inputs(): input_map = dict( args=dict( argstr="%s", ), averages=dict( argstr="-a %d", ), environ=dict( nohash=True, usedefault=True, ), in_surf=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), out_file=dict( argstr="%s", extensions=None, hash_files=False, keep_extension=False, name_source=["in_surf"], name_template="%s.avg_curv", position=-1, ), subjects_dir=dict(), template=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), template_param=dict(), ) inputs = Paint.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Paint_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Paint.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_ParcellationStats.py000066400000000000000000000061561413403311400300630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ParcellationStats def test_ParcellationStats_inputs(): input_map = dict( args=dict( argstr="%s", ), aseg=dict( extensions=None, mandatory=True, ), brainmask=dict( extensions=None, mandatory=True, ), copy_inputs=dict(), cortex_label=dict( extensions=None, ), environ=dict( nohash=True, usedefault=True, ), hemisphere=dict( argstr="%s", mandatory=True, position=-2, ), in_annotation=dict( argstr="-a %s", extensions=None, xor=["in_label"], ), in_cortex=dict( argstr="-cortex %s", extensions=None, ), in_label=dict( argstr="-l %s", extensions=None, xor=["in_annotatoin", "out_color"], ), lh_pial=dict( extensions=None, mandatory=True, ), lh_white=dict( extensions=None, mandatory=True, ), mgz=dict( argstr="-mgz", ), out_color=dict( argstr="-c %s", extensions=None, genfile=True, xor=["in_label"], ), out_table=dict( argstr="-f %s", extensions=None, genfile=True, requires=["tabular_output"], ), rh_pial=dict( extensions=None, mandatory=True, ), rh_white=dict( extensions=None, mandatory=True, ), ribbon=dict( extensions=None, mandatory=True, ), subject_id=dict( argstr="%s", mandatory=True, position=-3, usedefault=True, ), subjects_dir=dict(), surface=dict( argstr="%s", position=-1, ), tabular_output=dict( argstr="-b", ), th3=dict( argstr="-th3", requires=["cortex_label"], ), thickness=dict( extensions=None, mandatory=True, ), transform=dict( extensions=None, mandatory=True, ), wm=dict( extensions=None, mandatory=True, ), ) inputs = ParcellationStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ParcellationStats_outputs(): output_map = dict( out_color=dict( extensions=None, ), out_table=dict( extensions=None, ), ) outputs = ParcellationStats.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_ParseDICOMDir.py000066400000000000000000000023401413403311400267030ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ParseDICOMDir def test_ParseDICOMDir_inputs(): input_map = dict( args=dict( argstr="%s", ), dicom_dir=dict( argstr="--d %s", mandatory=True, ), dicom_info_file=dict( argstr="--o %s", extensions=None, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), sortbyrun=dict( argstr="--sortbyrun", ), subjects_dir=dict(), summarize=dict( argstr="--summarize", ), ) inputs = ParseDICOMDir.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ParseDICOMDir_outputs(): output_map = dict( dicom_info_file=dict( extensions=None, ), ) outputs = ParseDICOMDir.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_ReconAll.py000066400000000000000000000162771413403311400261330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ReconAll def test_ReconAll_inputs(): input_map = dict( FLAIR_file=dict( argstr="-FLAIR %s", extensions=None, min_ver="5.3.0", ), T1_files=dict( argstr="-i %s...", ), T2_file=dict( argstr="-T2 %s", extensions=None, min_ver="5.3.0", ), args=dict( argstr="%s", ), big_ventricles=dict( argstr="-bigventricles", ), brainstem=dict( argstr="-brainstem-structures", ), directive=dict( argstr="-%s", position=0, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), expert=dict( argstr="-expert %s", extensions=None, ), flags=dict( argstr="%s", ), hemi=dict( argstr="-hemi %s", ), hippocampal_subfields_T1=dict( argstr="-hippocampal-subfields-T1", min_ver="6.0.0", ), hippocampal_subfields_T2=dict( argstr="-hippocampal-subfields-T2 %s %s", min_ver="6.0.0", ), hires=dict( argstr="-hires", min_ver="6.0.0", ), mprage=dict( argstr="-mprage", ), mri_aparc2aseg=dict( xor=["expert"], ), mri_ca_label=dict( xor=["expert"], ), mri_ca_normalize=dict( xor=["expert"], ), mri_ca_register=dict( xor=["expert"], ), mri_edit_wm_with_aseg=dict( xor=["expert"], ), mri_em_register=dict( xor=["expert"], ), mri_fill=dict( xor=["expert"], ), mri_mask=dict( xor=["expert"], ), mri_normalize=dict( xor=["expert"], ), mri_pretess=dict( xor=["expert"], ), mri_remove_neck=dict( xor=["expert"], ), mri_segment=dict( xor=["expert"], ), mri_segstats=dict( xor=["expert"], ), mri_tessellate=dict( xor=["expert"], ), mri_watershed=dict( xor=["expert"], ), mris_anatomical_stats=dict( xor=["expert"], ), mris_ca_label=dict( xor=["expert"], ), mris_fix_topology=dict( xor=["expert"], ), mris_inflate=dict( xor=["expert"], ), mris_make_surfaces=dict( xor=["expert"], ), mris_register=dict( xor=["expert"], ), mris_smooth=dict( xor=["expert"], ), mris_sphere=dict( xor=["expert"], ), mris_surf2vol=dict( xor=["expert"], ), mrisp_paint=dict( xor=["expert"], ), openmp=dict( argstr="-openmp %d", ), parallel=dict( argstr="-parallel", ), subject_id=dict( argstr="-subjid %s", usedefault=True, ), subjects_dir=dict( argstr="-sd %s", genfile=True, hash_files=False, ), talairach=dict( xor=["expert"], ), use_FLAIR=dict( argstr="-FLAIRpial", min_ver="5.3.0", xor=["use_T2"], ), use_T2=dict( argstr="-T2pial", min_ver="5.3.0", xor=["use_FLAIR"], ), xopts=dict( argstr="-xopts-%s", ), ) inputs = ReconAll.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ReconAll_outputs(): output_map = dict( BA_stats=dict( altkey="BA", loc="stats", ), T1=dict( extensions=None, loc="mri", ), annot=dict( altkey="*annot", loc="label", ), aparc_a2009s_stats=dict( altkey="aparc.a2009s", loc="stats", ), aparc_aseg=dict( altkey="aparc*aseg", loc="mri", ), aparc_stats=dict( altkey="aparc", loc="stats", ), area_pial=dict( altkey="area.pial", loc="surf", ), aseg=dict( extensions=None, loc="mri", ), aseg_stats=dict( altkey="aseg", loc="stats", ), avg_curv=dict( loc="surf", ), brain=dict( extensions=None, loc="mri", ), brainmask=dict( extensions=None, loc="mri", ), curv=dict( loc="surf", ), curv_pial=dict( altkey="curv.pial", loc="surf", ), curv_stats=dict( altkey="curv", loc="stats", ), entorhinal_exvivo_stats=dict( altkey="entorhinal_exvivo", loc="stats", ), filled=dict( extensions=None, loc="mri", ), graymid=dict( altkey=["graymid", "midthickness"], loc="surf", ), inflated=dict( loc="surf", ), jacobian_white=dict( loc="surf", ), label=dict( altkey="*label", loc="label", ), norm=dict( extensions=None, loc="mri", ), nu=dict( extensions=None, loc="mri", ), orig=dict( extensions=None, loc="mri", ), pial=dict( loc="surf", ), rawavg=dict( extensions=None, loc="mri", ), ribbon=dict( altkey="*ribbon", loc="mri", ), smoothwm=dict( loc="surf", ), sphere=dict( loc="surf", ), sphere_reg=dict( altkey="sphere.reg", loc="surf", ), subject_id=dict(), subjects_dir=dict(), sulc=dict( loc="surf", ), thickness=dict( loc="surf", ), volume=dict( loc="surf", ), white=dict( loc="surf", ), wm=dict( extensions=None, loc="mri", ), wmparc=dict( extensions=None, loc="mri", ), wmparc_stats=dict( altkey="wmparc", loc="stats", ), ) outputs = ReconAll.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Register.py000066400000000000000000000031131413403311400262010ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import Register def test_Register_inputs(): input_map = dict( args=dict( argstr="%s", ), curv=dict( argstr="-curv", requires=["in_smoothwm"], ), environ=dict( nohash=True, usedefault=True, ), in_smoothwm=dict( copyfile=True, extensions=None, ), in_sulc=dict( copyfile=True, extensions=None, mandatory=True, ), in_surf=dict( argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), subjects_dir=dict(), target=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), ) inputs = Register.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Register_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Register.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_RegisterAVItoTalairach.py000066400000000000000000000030531413403311400307200ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import RegisterAVItoTalairach def test_RegisterAVItoTalairach_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), out_file=dict( argstr="%s", extensions=None, position=3, usedefault=True, ), subjects_dir=dict(), target=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), vox2vox=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), ) inputs = RegisterAVItoTalairach.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RegisterAVItoTalairach_outputs(): output_map = dict( log_file=dict( extensions=None, usedefault=True, ), out_file=dict( extensions=None, ), ) outputs = RegisterAVItoTalairach.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_RelabelHypointensities.py000066400000000000000000000032351413403311400311070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import RelabelHypointensities def test_RelabelHypointensities_inputs(): input_map = dict( args=dict( argstr="%s", ), aseg=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), environ=dict( nohash=True, usedefault=True, ), lh_white=dict( copyfile=True, extensions=None, mandatory=True, ), out_file=dict( argstr="%s", extensions=None, hash_files=False, keep_extension=False, name_source=["aseg"], name_template="%s.hypos.mgz", position=-1, ), rh_white=dict( copyfile=True, extensions=None, mandatory=True, ), subjects_dir=dict(), surf_directory=dict( argstr="%s", position=-2, usedefault=True, ), ) inputs = RelabelHypointensities.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RelabelHypointensities_outputs(): output_map = dict( out_file=dict( argstr="%s", extensions=None, ), ) outputs = RelabelHypointensities.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_RemoveIntersection.py000066400000000000000000000024431413403311400302460ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import RemoveIntersection def test_RemoveIntersection_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, ), out_file=dict( argstr="%s", extensions=None, hash_files=False, keep_extension=True, name_source=["in_file"], name_template="%s", position=-1, ), subjects_dir=dict(), ) inputs = RemoveIntersection.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RemoveIntersection_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = RemoveIntersection.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_RemoveNeck.py000066400000000000000000000031031413403311400264520ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import RemoveNeck def test_RemoveNeck_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-4, ), out_file=dict( argstr="%s", extensions=None, hash_files=False, keep_extension=True, name_source=["in_file"], name_template="%s_noneck", position=-1, ), radius=dict( argstr="-radius %d", ), subjects_dir=dict(), template=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), transform=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), ) inputs = RemoveNeck.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RemoveNeck_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = RemoveNeck.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Resample.py000066400000000000000000000023541413403311400261730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Resample def test_Resample_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, position=-2, ), resampled_file=dict( argstr="-o %s", extensions=None, genfile=True, position=-1, ), subjects_dir=dict(), voxel_size=dict( argstr="-vs %.2f %.2f %.2f", mandatory=True, ), ) inputs = Resample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Resample_outputs(): output_map = dict( resampled_file=dict( extensions=None, ), ) outputs = Resample.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_RobustRegister.py000066400000000000000000000073331413403311400274100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import RobustRegister def test_RobustRegister_inputs(): input_map = dict( args=dict( argstr="%s", ), auto_sens=dict( argstr="--satit", mandatory=True, xor=["outlier_sens"], ), environ=dict( nohash=True, usedefault=True, ), est_int_scale=dict( argstr="--iscale", ), force_double=dict( argstr="--doubleprec", ), force_float=dict( argstr="--floattype", ), half_source=dict( argstr="--halfmov %s", ), half_source_xfm=dict( argstr="--halfmovlta %s", ), half_targ=dict( argstr="--halfdst %s", ), half_targ_xfm=dict( argstr="--halfdstlta %s", ), half_weights=dict( argstr="--halfweights %s", ), high_iterations=dict( argstr="--highit %d", ), in_xfm_file=dict( argstr="--transform", extensions=None, ), init_orient=dict( argstr="--initorient", ), iteration_thresh=dict( argstr="--epsit %.3f", ), least_squares=dict( argstr="--leastsquares", ), mask_source=dict( argstr="--maskmov %s", extensions=None, ), mask_target=dict( argstr="--maskdst %s", extensions=None, ), max_iterations=dict( argstr="--maxit %d", ), no_init=dict( argstr="--noinit", ), no_multi=dict( argstr="--nomulti", ), out_reg_file=dict( argstr="--lta %s", usedefault=True, ), outlier_limit=dict( argstr="--wlimit %.3f", ), outlier_sens=dict( argstr="--sat %.4f", mandatory=True, xor=["auto_sens"], ), registered_file=dict( argstr="--warp %s", ), source_file=dict( argstr="--mov %s", extensions=None, mandatory=True, ), subjects_dir=dict(), subsample_thresh=dict( argstr="--subsample %d", ), target_file=dict( argstr="--dst %s", extensions=None, mandatory=True, ), trans_only=dict( argstr="--transonly", ), weights_file=dict( argstr="--weights %s", ), write_vo2vox=dict( argstr="--vox2vox", ), ) inputs = RobustRegister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RobustRegister_outputs(): output_map = dict( half_source=dict( extensions=None, ), half_source_xfm=dict( extensions=None, ), half_targ=dict( extensions=None, ), half_targ_xfm=dict( extensions=None, ), half_weights=dict( extensions=None, ), out_reg_file=dict( extensions=None, ), registered_file=dict( extensions=None, ), weights_file=dict( extensions=None, ), ) outputs = RobustRegister.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py000066400000000000000000000044041413403311400273730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..longitudinal import RobustTemplate def test_RobustTemplate_inputs(): input_map = dict( args=dict( argstr="%s", ), auto_detect_sensitivity=dict( argstr="--satit", mandatory=True, xor=["outlier_sensitivity"], ), average_metric=dict( argstr="--average %d", ), environ=dict( nohash=True, usedefault=True, ), fixed_timepoint=dict( argstr="--fixtp", ), in_files=dict( argstr="--mov %s", mandatory=True, ), in_intensity_scales=dict( argstr="--iscalein %s", ), initial_timepoint=dict( argstr="--inittp %d", ), initial_transforms=dict( argstr="--ixforms %s", ), intensity_scaling=dict( argstr="--iscale", ), no_iteration=dict( argstr="--noit", ), num_threads=dict(), out_file=dict( argstr="--template %s", extensions=None, mandatory=True, usedefault=True, ), outlier_sensitivity=dict( argstr="--sat %.4f", mandatory=True, xor=["auto_detect_sensitivity"], ), scaled_intensity_outputs=dict( argstr="--iscaleout %s", ), subjects_dir=dict(), subsample_threshold=dict( argstr="--subsample %d", ), transform_outputs=dict( argstr="--lta %s", ), ) inputs = RobustTemplate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RobustTemplate_outputs(): output_map = dict( out_file=dict( extensions=None, ), scaled_intensity_outputs=dict(), transform_outputs=dict(), ) outputs = RobustTemplate.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_SampleToSurface.py000066400000000000000000000102671413403311400274620ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import SampleToSurface def test_SampleToSurface_inputs(): input_map = dict( apply_rot=dict( argstr="--rot %.3f %.3f %.3f", ), apply_trans=dict( argstr="--trans %.3f %.3f %.3f", ), args=dict( argstr="%s", ), cortex_mask=dict( argstr="--cortex", xor=["mask_label"], ), environ=dict( nohash=True, usedefault=True, ), fix_tk_reg=dict( argstr="--fixtkreg", ), float2int_method=dict( argstr="--float2int %s", ), frame=dict( argstr="--frame %d", ), hemi=dict( argstr="--hemi %s", mandatory=True, ), hits_file=dict( argstr="--srchit %s", ), hits_type=dict( argstr="--srchit_type", ), ico_order=dict( argstr="--icoorder %d", requires=["target_subject"], ), interp_method=dict( argstr="--interp %s", ), mask_label=dict( argstr="--mask %s", extensions=None, xor=["cortex_mask"], ), mni152reg=dict( argstr="--mni152reg", mandatory=True, xor=["reg_file", "reg_header", "mni152reg"], ), no_reshape=dict( argstr="--noreshape", xor=["reshape"], ), out_file=dict( argstr="--o %s", extensions=None, genfile=True, ), out_type=dict( argstr="--out_type %s", ), override_reg_subj=dict( argstr="--srcsubject %s", requires=["subject_id"], ), projection_stem=dict( mandatory=True, xor=["sampling_method"], ), reference_file=dict( argstr="--ref %s", extensions=None, ), reg_file=dict( argstr="--reg %s", extensions=None, mandatory=True, xor=["reg_file", "reg_header", "mni152reg"], ), reg_header=dict( argstr="--regheader %s", mandatory=True, requires=["subject_id"], xor=["reg_file", "reg_header", "mni152reg"], ), reshape=dict( argstr="--reshape", xor=["no_reshape"], ), reshape_slices=dict( argstr="--rf %d", ), sampling_method=dict( argstr="%s", mandatory=True, requires=["sampling_range", "sampling_units"], xor=["projection_stem"], ), sampling_range=dict(), sampling_units=dict(), scale_input=dict( argstr="--scale %.3f", ), smooth_surf=dict( argstr="--surf-fwhm %.3f", ), smooth_vol=dict( argstr="--fwhm %.3f", ), source_file=dict( argstr="--mov %s", extensions=None, mandatory=True, ), subject_id=dict(), subjects_dir=dict(), surf_reg=dict( argstr="--surfreg %s", requires=["target_subject"], ), surface=dict( argstr="--surf %s", ), target_subject=dict( argstr="--trgsubject %s", ), vox_file=dict( argstr="--nvox %s", ), ) inputs = SampleToSurface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SampleToSurface_outputs(): output_map = dict( hits_file=dict( extensions=None, ), out_file=dict( extensions=None, ), vox_file=dict( extensions=None, ), ) outputs = SampleToSurface.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_SegStats.py000066400000000000000000000111121413403311400261500ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import SegStats def test_SegStats_inputs(): input_map = dict( annot=dict( argstr="--annot %s %s %s", mandatory=True, xor=("segmentation_file", "annot", "surf_label"), ), args=dict( argstr="%s", ), avgwf_file=dict( argstr="--avgwfvol %s", ), avgwf_txt_file=dict( argstr="--avgwf %s", ), brain_vol=dict( argstr="--%s", ), brainmask_file=dict( argstr="--brainmask %s", extensions=None, ), calc_power=dict( argstr="--%s", ), calc_snr=dict( argstr="--snr", ), color_table_file=dict( argstr="--ctab %s", extensions=None, xor=("color_table_file", "default_color_table", "gca_color_table"), ), cortex_vol_from_surf=dict( argstr="--surf-ctx-vol", ), default_color_table=dict( argstr="--ctab-default", xor=("color_table_file", "default_color_table", "gca_color_table"), ), empty=dict( argstr="--empty", ), environ=dict( nohash=True, usedefault=True, ), etiv=dict( argstr="--etiv", ), etiv_only=dict(), euler=dict( argstr="--euler", ), exclude_ctx_gm_wm=dict( argstr="--excl-ctxgmwm", ), exclude_id=dict( argstr="--excludeid %d", ), frame=dict( argstr="--frame %d", ), gca_color_table=dict( argstr="--ctab-gca %s", extensions=None, xor=("color_table_file", "default_color_table", "gca_color_table"), ), in_file=dict( argstr="--i %s", extensions=None, ), in_intensity=dict( argstr="--in %s --in-intensity-name %s", extensions=None, ), intensity_units=dict( argstr="--in-intensity-units %s", requires=["in_intensity"], ), mask_erode=dict( argstr="--maskerode %d", ), mask_file=dict( argstr="--mask %s", extensions=None, ), mask_frame=dict( requires=["mask_file"], ), mask_invert=dict( argstr="--maskinvert", ), mask_sign=dict(), mask_thresh=dict( argstr="--maskthresh %f", ), multiply=dict( argstr="--mul %f", ), non_empty_only=dict( argstr="--nonempty", ), partial_volume_file=dict( argstr="--pv %s", extensions=None, ), segment_id=dict( argstr="--id %s...", ), segmentation_file=dict( argstr="--seg %s", extensions=None, mandatory=True, xor=("segmentation_file", "annot", "surf_label"), ), sf_avg_file=dict( argstr="--sfavg %s", ), subcort_gm=dict( argstr="--subcortgray", ), subjects_dir=dict(), summary_file=dict( argstr="--sum %s", extensions=None, genfile=True, position=-1, ), supratent=dict( argstr="--supratent", ), surf_label=dict( argstr="--slabel %s %s %s", mandatory=True, xor=("segmentation_file", "annot", "surf_label"), ), total_gray=dict( argstr="--totalgray", ), vox=dict( argstr="--vox %s", ), wm_vol_from_surf=dict( argstr="--surf-wm-vol", ), ) inputs = SegStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SegStats_outputs(): output_map = dict( avgwf_file=dict( extensions=None, ), avgwf_txt_file=dict( extensions=None, ), sf_avg_file=dict( extensions=None, ), summary_file=dict( extensions=None, ), ) outputs = SegStats.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_SegStatsReconAll.py000066400000000000000000000131521413403311400275760ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import SegStatsReconAll def test_SegStatsReconAll_inputs(): input_map = dict( annot=dict( argstr="--annot %s %s %s", mandatory=True, xor=("segmentation_file", "annot", "surf_label"), ), args=dict( argstr="%s", ), aseg=dict( extensions=None, ), avgwf_file=dict( argstr="--avgwfvol %s", ), avgwf_txt_file=dict( argstr="--avgwf %s", ), brain_vol=dict( argstr="--%s", ), brainmask_file=dict( argstr="--brainmask %s", extensions=None, ), calc_power=dict( argstr="--%s", ), calc_snr=dict( argstr="--snr", ), color_table_file=dict( argstr="--ctab %s", extensions=None, xor=("color_table_file", "default_color_table", "gca_color_table"), ), copy_inputs=dict(), cortex_vol_from_surf=dict( argstr="--surf-ctx-vol", ), default_color_table=dict( argstr="--ctab-default", xor=("color_table_file", "default_color_table", "gca_color_table"), ), empty=dict( argstr="--empty", ), environ=dict( nohash=True, usedefault=True, ), etiv=dict( argstr="--etiv", ), etiv_only=dict(), euler=dict( argstr="--euler", ), exclude_ctx_gm_wm=dict( argstr="--excl-ctxgmwm", ), exclude_id=dict( argstr="--excludeid %d", ), frame=dict( argstr="--frame %d", ), gca_color_table=dict( argstr="--ctab-gca %s", extensions=None, xor=("color_table_file", "default_color_table", "gca_color_table"), ), in_file=dict( argstr="--i %s", extensions=None, ), in_intensity=dict( argstr="--in %s --in-intensity-name %s", extensions=None, ), intensity_units=dict( argstr="--in-intensity-units %s", requires=["in_intensity"], ), lh_orig_nofix=dict( extensions=None, mandatory=True, ), lh_pial=dict( extensions=None, mandatory=True, ), lh_white=dict( extensions=None, mandatory=True, ), mask_erode=dict( argstr="--maskerode %d", ), mask_file=dict( argstr="--mask %s", extensions=None, ), mask_frame=dict( requires=["mask_file"], ), mask_invert=dict( argstr="--maskinvert", ), mask_sign=dict(), mask_thresh=dict( argstr="--maskthresh %f", ), multiply=dict( argstr="--mul %f", ), non_empty_only=dict( argstr="--nonempty", ), partial_volume_file=dict( argstr="--pv %s", extensions=None, ), presurf_seg=dict( extensions=None, ), rh_orig_nofix=dict( extensions=None, mandatory=True, ), rh_pial=dict( extensions=None, mandatory=True, ), rh_white=dict( extensions=None, mandatory=True, ), ribbon=dict( extensions=None, mandatory=True, ), segment_id=dict( argstr="--id %s...", ), segmentation_file=dict( argstr="--seg %s", extensions=None, mandatory=True, xor=("segmentation_file", "annot", "surf_label"), ), sf_avg_file=dict( argstr="--sfavg %s", ), subcort_gm=dict( argstr="--subcortgray", ), subject_id=dict( argstr="--subject %s", mandatory=True, usedefault=True, ), subjects_dir=dict(), summary_file=dict( argstr="--sum %s", extensions=None, genfile=True, position=-1, ), supratent=dict( argstr="--supratent", ), surf_label=dict( argstr="--slabel %s %s %s", mandatory=True, xor=("segmentation_file", "annot", "surf_label"), ), total_gray=dict( argstr="--totalgray", ), transform=dict( extensions=None, mandatory=True, ), vox=dict( argstr="--vox %s", ), wm_vol_from_surf=dict( argstr="--surf-wm-vol", ), ) inputs = SegStatsReconAll.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SegStatsReconAll_outputs(): output_map = dict( avgwf_file=dict( extensions=None, ), avgwf_txt_file=dict( extensions=None, ), sf_avg_file=dict( extensions=None, ), summary_file=dict( extensions=None, ), ) outputs = SegStatsReconAll.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_SegmentCC.py000066400000000000000000000032061413403311400262300ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import SegmentCC def test_SegmentCC_inputs(): input_map = dict( args=dict( argstr="%s", ), copy_inputs=dict(), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-aseg %s", extensions=None, mandatory=True, ), in_norm=dict( extensions=None, mandatory=True, ), out_file=dict( argstr="-o %s", extensions=None, hash_files=False, keep_extension=False, name_source=["in_file"], name_template="%s.auto.mgz", ), out_rotation=dict( argstr="-lta %s", extensions=None, mandatory=True, ), subject_id=dict( argstr="%s", mandatory=True, position=-1, usedefault=True, ), subjects_dir=dict(), ) inputs = SegmentCC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SegmentCC_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_rotation=dict( extensions=None, ), ) outputs = SegmentCC.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_SegmentWM.py000066400000000000000000000021701413403311400262650ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import SegmentWM def test_SegmentWM_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), subjects_dir=dict(), ) inputs = SegmentWM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SegmentWM_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SegmentWM.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Smooth.py000066400000000000000000000034221413403311400256710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Smooth def test_Smooth_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="--i %s", extensions=None, mandatory=True, ), num_iters=dict( argstr="--niters %d", mandatory=True, xor=["surface_fwhm"], ), proj_frac=dict( argstr="--projfrac %s", xor=["proj_frac_avg"], ), proj_frac_avg=dict( argstr="--projfrac-avg %.2f %.2f %.2f", xor=["proj_frac"], ), reg_file=dict( argstr="--reg %s", extensions=None, mandatory=True, ), smoothed_file=dict( argstr="--o %s", extensions=None, genfile=True, ), subjects_dir=dict(), surface_fwhm=dict( argstr="--fwhm %f", mandatory=True, requires=["reg_file"], xor=["num_iters"], ), vol_fwhm=dict( argstr="--vol-fwhm %f", ), ) inputs = Smooth.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Smooth_outputs(): output_map = dict( smoothed_file=dict( extensions=None, ), ) outputs = Smooth.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py000066400000000000000000000041521413403311400302610ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import SmoothTessellation def test_SmoothTessellation_inputs(): input_map = dict( args=dict( argstr="%s", ), curvature_averaging_iterations=dict( argstr="-a %d", ), disable_estimates=dict( argstr="-nw", ), environ=dict( nohash=True, usedefault=True, ), gaussian_curvature_norm_steps=dict( argstr="%d", ), gaussian_curvature_smoothing_steps=dict( argstr=" %d", ), in_file=dict( argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, ), normalize_area=dict( argstr="-area", ), out_area_file=dict( argstr="-b %s", extensions=None, ), out_curvature_file=dict( argstr="-c %s", extensions=None, ), out_file=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), seed=dict( argstr="-seed %d", ), smoothing_iterations=dict( argstr="-n %d", ), snapshot_writing_iterations=dict( argstr="-w %d", ), subjects_dir=dict(), use_gaussian_curvature_smoothing=dict( argstr="-g", ), use_momentum=dict( argstr="-m", ), ) inputs = SmoothTessellation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SmoothTessellation_outputs(): output_map = dict( surface=dict( extensions=None, ), ) outputs = SmoothTessellation.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Sphere.py000066400000000000000000000026731413403311400256550ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Sphere def test_Sphere_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, ), in_smoothwm=dict( copyfile=True, extensions=None, ), magic=dict( argstr="-q", ), num_threads=dict(), out_file=dict( argstr="%s", extensions=None, hash_files=False, name_source=["in_file"], name_template="%s.sphere", position=-1, ), seed=dict( argstr="-seed %d", ), subjects_dir=dict(), ) inputs = Sphere.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Sphere_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Sphere.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_SphericalAverage.py000066400000000000000000000036111413403311400276250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import SphericalAverage def test_SphericalAverage_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), erode=dict( argstr="-erode %d", ), fname=dict( argstr="%s", mandatory=True, position=-5, ), hemisphere=dict( argstr="%s", mandatory=True, position=-4, ), in_average=dict( argstr="%s", genfile=True, position=-2, ), in_orig=dict( argstr="-orig %s", extensions=None, ), in_surf=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), subject_id=dict( argstr="-o %s", mandatory=True, ), subjects_dir=dict(), threshold=dict( argstr="-t %.1f", ), which=dict( argstr="%s", mandatory=True, position=-6, ), ) inputs = SphericalAverage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SphericalAverage_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SphericalAverage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Surface2VolTransform.py000066400000000000000000000044451413403311400304550ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Surface2VolTransform def test_Surface2VolTransform_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), hemi=dict( argstr="--hemi %s", mandatory=True, ), mkmask=dict( argstr="--mkmask", xor=["source_file"], ), projfrac=dict( argstr="--projfrac %s", ), reg_file=dict( argstr="--volreg %s", extensions=None, mandatory=True, xor=["subject_id"], ), source_file=dict( argstr="--surfval %s", copyfile=False, extensions=None, mandatory=True, xor=["mkmask"], ), subject_id=dict( argstr="--identity %s", xor=["reg_file"], ), subjects_dir=dict( argstr="--sd %s", ), surf_name=dict( argstr="--surf %s", ), template_file=dict( argstr="--template %s", extensions=None, ), transformed_file=dict( argstr="--outvol %s", extensions=None, hash_files=False, name_source=["source_file"], name_template="%s_asVol.nii", ), vertexvol_file=dict( argstr="--vtxvol %s", extensions=None, hash_files=False, name_source=["source_file"], name_template="%s_asVol_vertex.nii", ), ) inputs = Surface2VolTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Surface2VolTransform_outputs(): output_map = dict( transformed_file=dict( extensions=None, ), vertexvol_file=dict( extensions=None, ), ) outputs = Surface2VolTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSmooth.py000066400000000000000000000031711413403311400272030ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import SurfaceSmooth def test_SurfaceSmooth_inputs(): input_map = dict( args=dict( argstr="%s", ), cortex=dict( argstr="--cortex", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), fwhm=dict( argstr="--fwhm %.4f", xor=["smooth_iters"], ), hemi=dict( argstr="--hemi %s", mandatory=True, ), in_file=dict( argstr="--sval %s", extensions=None, mandatory=True, ), out_file=dict( argstr="--tval %s", extensions=None, genfile=True, ), reshape=dict( argstr="--reshape", ), smooth_iters=dict( argstr="--smooth %d", xor=["fwhm"], ), subject_id=dict( argstr="--s %s", mandatory=True, ), subjects_dir=dict(), ) inputs = SurfaceSmooth.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SurfaceSmooth_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SurfaceSmooth.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_SurfaceSnapshots.py000066400000000000000000000073221413403311400277160ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import SurfaceSnapshots def test_SurfaceSnapshots_inputs(): input_map = dict( annot_file=dict( argstr="-annotation %s", extensions=None, xor=["annot_name"], ), annot_name=dict( argstr="-annotation %s", xor=["annot_file"], ), args=dict( argstr="%s", ), colortable=dict( argstr="-colortable %s", extensions=None, ), demean_overlay=dict( argstr="-zm", ), environ=dict( nohash=True, usedefault=True, ), hemi=dict( argstr="%s", mandatory=True, position=2, ), identity_reg=dict( argstr="-overlay-reg-identity", xor=["overlay_reg", "identity_reg", "mni152_reg"], ), invert_overlay=dict( argstr="-invphaseflag 1", ), label_file=dict( argstr="-label %s", extensions=None, xor=["label_name"], ), label_name=dict( argstr="-label %s", xor=["label_file"], ), label_outline=dict( argstr="-label-outline", ), label_under=dict( argstr="-labels-under", ), mni152_reg=dict( argstr="-mni152reg", xor=["overlay_reg", "identity_reg", "mni152_reg"], ), orig_suffix=dict( argstr="-orig %s", ), overlay=dict( argstr="-overlay %s", extensions=None, requires=["overlay_range"], ), overlay_range=dict( argstr="%s", ), overlay_range_offset=dict( argstr="-foffset %.3f", ), overlay_reg=dict( argstr="-overlay-reg %s", extensions=None, xor=["overlay_reg", "identity_reg", "mni152_reg"], ), patch_file=dict( argstr="-patch %s", extensions=None, ), reverse_overlay=dict( argstr="-revphaseflag 1", ), screenshot_stem=dict(), show_color_scale=dict( argstr="-colscalebarflag 1", ), show_color_text=dict( argstr="-colscaletext 1", ), show_curv=dict( argstr="-curv", xor=["show_gray_curv"], ), show_gray_curv=dict( argstr="-gray", xor=["show_curv"], ), six_images=dict(), sphere_suffix=dict( argstr="-sphere %s", ), stem_template_args=dict( requires=["screenshot_stem"], ), subject_id=dict( argstr="%s", mandatory=True, position=1, ), subjects_dir=dict(), surface=dict( argstr="%s", mandatory=True, position=3, ), tcl_script=dict( argstr="%s", extensions=None, genfile=True, ), truncate_overlay=dict( argstr="-truncphaseflag 1", ), ) inputs = SurfaceSnapshots.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SurfaceSnapshots_outputs(): output_map = dict( snapshots=dict(), ) outputs = SurfaceSnapshots.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_SurfaceTransform.py000066400000000000000000000040051413403311400277020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import SurfaceTransform def test_SurfaceTransform_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), hemi=dict( argstr="--hemi %s", mandatory=True, ), out_file=dict( argstr="--tval %s", extensions=None, genfile=True, ), reshape=dict( argstr="--reshape", ), reshape_factor=dict( argstr="--reshape-factor", ), source_annot_file=dict( argstr="--sval-annot %s", extensions=None, mandatory=True, xor=["source_file"], ), source_file=dict( argstr="--sval %s", extensions=None, mandatory=True, xor=["source_annot_file"], ), source_subject=dict( argstr="--srcsubject %s", mandatory=True, ), source_type=dict( argstr="--sfmt %s", requires=["source_file"], ), subjects_dir=dict(), target_ico_order=dict( argstr="--trgicoorder %d", ), target_subject=dict( argstr="--trgsubject %s", mandatory=True, ), target_type=dict( argstr="--tfmt %s", ), ) inputs = SurfaceTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SurfaceTransform_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SurfaceTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_SynthesizeFLASH.py000066400000000000000000000032521413403311400273440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import SynthesizeFLASH def test_SynthesizeFLASH_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixed_weighting=dict( argstr="-w", position=1, ), flip_angle=dict( argstr="%.2f", mandatory=True, position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, ), pd_image=dict( argstr="%s", extensions=None, mandatory=True, position=6, ), subjects_dir=dict(), t1_image=dict( argstr="%s", extensions=None, mandatory=True, position=5, ), te=dict( argstr="%.3f", mandatory=True, position=4, ), tr=dict( argstr="%.2f", mandatory=True, position=2, ), ) inputs = SynthesizeFLASH.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SynthesizeFLASH_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SynthesizeFLASH.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_TalairachAVI.py000066400000000000000000000024261413403311400266530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TalairachAVI def test_TalairachAVI_inputs(): input_map = dict( args=dict( argstr="%s", ), atlas=dict( argstr="--atlas %s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="--i %s", extensions=None, mandatory=True, ), out_file=dict( argstr="--xfm %s", extensions=None, mandatory=True, ), subjects_dir=dict(), ) inputs = TalairachAVI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TalairachAVI_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_log=dict( extensions=None, ), out_txt=dict( extensions=None, ), ) outputs = TalairachAVI.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_TalairachQC.py000066400000000000000000000020151413403311400265310ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TalairachQC def test_TalairachQC_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), log_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), subjects_dir=dict(), ) inputs = TalairachQC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TalairachQC_outputs(): output_map = dict( log_file=dict( extensions=None, usedefault=True, ), ) outputs = TalairachQC.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_Tkregister2.py000066400000000000000000000047201413403311400266270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Tkregister2 def test_Tkregister2_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fsl_in_matrix=dict( argstr="--fsl %s", extensions=None, ), fsl_out=dict( argstr="--fslregout %s", ), fstal=dict( argstr="--fstal", xor=["target_image", "moving_image", "reg_file"], ), fstarg=dict( argstr="--fstarg", xor=["target_image"], ), invert_lta_in=dict( requires=["lta_in"], ), invert_lta_out=dict( argstr="--ltaout-inv", requires=["lta_in"], ), lta_in=dict( argstr="--lta %s", extensions=None, ), lta_out=dict( argstr="--ltaout %s", ), moving_image=dict( argstr="--mov %s", extensions=None, mandatory=True, ), movscale=dict( argstr="--movscale %f", ), noedit=dict( argstr="--noedit", usedefault=True, ), reg_file=dict( argstr="--reg %s", extensions=None, mandatory=True, usedefault=True, ), reg_header=dict( argstr="--regheader", ), subject_id=dict( argstr="--s %s", ), subjects_dir=dict(), target_image=dict( argstr="--targ %s", extensions=None, xor=["fstarg"], ), xfm=dict( argstr="--xfm %s", extensions=None, ), ) inputs = Tkregister2.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Tkregister2_outputs(): output_map = dict( fsl_file=dict( extensions=None, ), lta_file=dict( extensions=None, ), reg_file=dict( extensions=None, ), ) outputs = Tkregister2.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_UnpackSDICOMDir.py000066400000000000000000000032351413403311400272010ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import UnpackSDICOMDir def test_UnpackSDICOMDir_inputs(): input_map = dict( args=dict( argstr="%s", ), config=dict( argstr="-cfg %s", extensions=None, mandatory=True, xor=("run_info", "config", "seq_config"), ), dir_structure=dict( argstr="-%s", ), environ=dict( nohash=True, usedefault=True, ), log_file=dict( argstr="-log %s", extensions=None, ), no_info_dump=dict( argstr="-noinfodump", ), no_unpack_err=dict( argstr="-no-unpackerr", ), output_dir=dict( argstr="-targ %s", ), run_info=dict( argstr="-run %d %s %s %s", mandatory=True, xor=("run_info", "config", "seq_config"), ), scan_only=dict( argstr="-scanonly %s", extensions=None, ), seq_config=dict( argstr="-seqcfg %s", extensions=None, mandatory=True, xor=("run_info", "config", "seq_config"), ), source_dir=dict( argstr="-src %s", mandatory=True, ), spm_zeropad=dict( argstr="-nspmzeropad %d", ), subjects_dir=dict(), ) inputs = UnpackSDICOMDir.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_VolumeMask.py000066400000000000000000000043401413403311400265030ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import VolumeMask def test_VolumeMask_inputs(): input_map = dict( args=dict( argstr="%s", ), aseg=dict( extensions=None, xor=["in_aseg"], ), copy_inputs=dict(), environ=dict( nohash=True, usedefault=True, ), in_aseg=dict( argstr="--aseg_name %s", extensions=None, xor=["aseg"], ), left_ribbonlabel=dict( argstr="--label_left_ribbon %d", mandatory=True, ), left_whitelabel=dict( argstr="--label_left_white %d", mandatory=True, ), lh_pial=dict( extensions=None, mandatory=True, ), lh_white=dict( extensions=None, mandatory=True, ), rh_pial=dict( extensions=None, mandatory=True, ), rh_white=dict( extensions=None, mandatory=True, ), right_ribbonlabel=dict( argstr="--label_right_ribbon %d", mandatory=True, ), right_whitelabel=dict( argstr="--label_right_white %d", mandatory=True, ), save_ribbon=dict( argstr="--save_ribbon", ), subject_id=dict( argstr="%s", mandatory=True, position=-1, usedefault=True, ), subjects_dir=dict(), ) inputs = VolumeMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_VolumeMask_outputs(): output_map = dict( lh_ribbon=dict( extensions=None, ), out_ribbon=dict( extensions=None, ), rh_ribbon=dict( extensions=None, ), ) outputs = VolumeMask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_auto_WatershedSkullStrip.py000066400000000000000000000027601413403311400304070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import WatershedSkullStrip def test_WatershedSkullStrip_inputs(): input_map = dict( args=dict( argstr="%s", ), brain_atlas=dict( argstr="-brain_atlas %s", extensions=None, position=-4, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), subjects_dir=dict(), t1=dict( argstr="-T1", ), transform=dict( argstr="%s", extensions=None, position=-3, ), ) inputs = WatershedSkullStrip.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_WatershedSkullStrip_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = WatershedSkullStrip.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_model.py000066400000000000000000000036121413403311400244710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import numpy as np import nibabel as nb import pytest from nipype.interfaces.freesurfer import model, no_freesurfer import nipype.pipeline.engine as pe @pytest.mark.skipif(no_freesurfer(), reason="freesurfer is not installed") def test_concatenate(tmpdir): tmpdir.chdir() in1 = tmpdir.join("cont1.nii").strpath in2 = tmpdir.join("cont2.nii").strpath out = "bar.nii" data1 = np.zeros((3, 3, 3, 1), dtype=np.float32) data2 = np.ones((3, 3, 3, 5), dtype=np.float32) out_data = np.concatenate((data1, data2), axis=3) mean_data = np.mean(out_data, axis=3) nb.Nifti1Image(data1, affine=np.eye(4)).to_filename(in1) nb.Nifti1Image(data2, affine=np.eye(4)).to_filename(in2) # Test default behavior res = model.Concatenate(in_files=[in1, in2]).run() assert res.outputs.concatenated_file == tmpdir.join("concat_output.nii.gz").strpath assert np.allclose(nb.load("concat_output.nii.gz").get_fdata(), out_data) # Test specified concatenated_file res = model.Concatenate(in_files=[in1, in2], concatenated_file=out).run() assert res.outputs.concatenated_file == tmpdir.join(out).strpath assert np.allclose(nb.load(out).get_fdata(), out_data) # Test in workflow wf = pe.Workflow("test_concatenate", base_dir=tmpdir.strpath) concat = pe.Node( model.Concatenate(in_files=[in1, in2], concatenated_file=out), name="concat" ) wf.add_nodes([concat]) wf.run() assert np.allclose( nb.load(tmpdir.join("test_concatenate", "concat", out).strpath).get_fdata(), out_data, ) # Test a simple statistic res = model.Concatenate( in_files=[in1, in2], concatenated_file=out, stats="mean" ).run() assert np.allclose(nb.load(out).get_fdata(), mean_data) nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_preprocess.py000066400000000000000000000141201413403311400255520ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import pytest from nipype.testing.fixtures import create_files_in_directory from nipype.interfaces import freesurfer from nipype.interfaces.freesurfer import Info from nipype import LooseVersion @pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_robustregister(create_files_in_directory): filelist, outdir = create_files_in_directory reg = freesurfer.RobustRegister() cwd = os.getcwd() # make sure command gets called assert reg.cmd == "mri_robust_register" # test raising error with mandatory args absent with pytest.raises(ValueError): reg.run() # .inputs based parameters setting reg.inputs.source_file = filelist[0] reg.inputs.target_file = filelist[1] reg.inputs.auto_sens = True assert reg.cmdline == ( "mri_robust_register --satit --lta " "%s/%s_robustreg.lta --mov %s --dst %s" % (cwd, filelist[0][:-4], filelist[0], filelist[1]) ) # constructor based parameter setting reg2 = freesurfer.RobustRegister( source_file=filelist[0], target_file=filelist[1], outlier_sens=3.0, out_reg_file="foo.lta", half_targ=True, ) assert reg2.cmdline == ( "mri_robust_register --halfdst %s_halfway.nii --lta foo.lta " "--sat 3.0000 --mov %s --dst %s" % (os.path.join(outdir, filelist[1][:-4]), filelist[0], filelist[1]) ) @pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_fitmsparams(create_files_in_directory): filelist, outdir = create_files_in_directory fit = freesurfer.FitMSParams() # make sure command gets called assert fit.cmd == "mri_ms_fitparms" # test raising error with mandatory args absent with pytest.raises(ValueError): fit.run() # .inputs based parameters setting fit.inputs.in_files = filelist fit.inputs.out_dir = outdir assert fit.cmdline == "mri_ms_fitparms %s %s %s" % ( filelist[0], filelist[1], outdir, ) # constructor based parameter setting fit2 = freesurfer.FitMSParams( in_files=filelist, te_list=[1.5, 3.5], flip_list=[20, 30], out_dir=outdir ) assert fit2.cmdline == ( "mri_ms_fitparms -te %.3f -fa %.1f %s -te %.3f -fa %.1f %s %s" % (1.500, 20.0, filelist[0], 3.500, 30.0, filelist[1], outdir) ) @pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_synthesizeflash(create_files_in_directory): filelist, outdir = create_files_in_directory syn = freesurfer.SynthesizeFLASH() # make sure command gets called assert syn.cmd == "mri_synthesize" # test raising error with mandatory args absent with pytest.raises(ValueError): syn.run() # .inputs based parameters setting syn.inputs.t1_image = filelist[0] syn.inputs.pd_image = filelist[1] syn.inputs.flip_angle = 30 syn.inputs.te = 4.5 syn.inputs.tr = 20 assert syn.cmdline == ( "mri_synthesize 20.00 30.00 4.500 %s %s %s" % (filelist[0], filelist[1], os.path.join(outdir, "synth-flash_30.mgz")) ) # constructor based parameters setting syn2 = freesurfer.SynthesizeFLASH( t1_image=filelist[0], pd_image=filelist[1], flip_angle=20, te=5, tr=25 ) assert syn2.cmdline == ( "mri_synthesize 25.00 20.00 5.000 %s %s %s" % (filelist[0], filelist[1], os.path.join(outdir, "synth-flash_20.mgz")) ) @pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_mandatory_outvol(create_files_in_directory): filelist, outdir = create_files_in_directory mni = freesurfer.MNIBiasCorrection() # make sure command gets called assert mni.cmd == "mri_nu_correct.mni" # test raising error with mandatory args absent with pytest.raises(ValueError): mni.cmdline # test with minimal args mni.inputs.in_file = filelist[0] base, ext = os.path.splitext(os.path.basename(filelist[0])) if ext == ".gz": base, ext2 = os.path.splitext(base) ext = ext2 + ext assert mni.cmdline == ( "mri_nu_correct.mni --i %s --n 4 --o %s_output%s" % (filelist[0], base, ext) ) # test with custom outfile mni.inputs.out_file = "new_corrected_file.mgz" assert mni.cmdline == ( "mri_nu_correct.mni --i %s --n 4 --o new_corrected_file.mgz" % (filelist[0]) ) # constructor based tests mni2 = freesurfer.MNIBiasCorrection( in_file=filelist[0], out_file="bias_corrected_output", iterations=2 ) assert mni2.cmdline == ( "mri_nu_correct.mni --i %s --n 2 --o bias_corrected_output" % filelist[0] ) @pytest.mark.skipif(freesurfer.no_freesurfer(), reason="freesurfer is not installed") def test_bbregister(create_files_in_directory): filelist, outdir = create_files_in_directory bbr = freesurfer.BBRegister() # make sure command gets called assert bbr.cmd == "bbregister" # test raising error with mandatory args absent with pytest.raises(ValueError): bbr.cmdline bbr.inputs.subject_id = "fsaverage" bbr.inputs.source_file = filelist[0] bbr.inputs.contrast_type = "t2" # Check that 'init' is mandatory in FS < 6, but not in 6+ if Info.looseversion() < LooseVersion("6.0.0"): with pytest.raises(ValueError): bbr.cmdline else: bbr.cmdline bbr.inputs.init = "fsl" base, ext = os.path.splitext(os.path.basename(filelist[0])) if ext == ".gz": base, _ = os.path.splitext(base) assert bbr.cmdline == ( "bbregister --t2 --init-fsl " "--reg {base}_bbreg_fsaverage.dat " "--mov {full} --s fsaverage".format(full=filelist[0], base=base) ) def test_FSVersion(): """Check that FSVersion is a string that can be compared with LooseVersion""" assert isinstance(freesurfer.preprocess.FSVersion, str) assert LooseVersion(freesurfer.preprocess.FSVersion) >= LooseVersion("0") nipype-1.7.0/nipype/interfaces/freesurfer/tests/test_utils.py000066400000000000000000000163551413403311400245410ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import os.path as op import pytest from nipype.testing.fixtures import ( create_files_in_directory_plus_dummy_file, create_surf_file_in_directory, ) from nipype.pipeline import engine as pe from nipype.interfaces import freesurfer as fs from nipype.interfaces.base import TraitError from nipype.interfaces.io import FreeSurferSource @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_sample2surf(create_files_in_directory_plus_dummy_file): s2s = fs.SampleToSurface() # Test underlying command assert s2s.cmd == "mri_vol2surf" # Test mandatory args exception with pytest.raises(ValueError): s2s.run() # Create testing files files, cwd = create_files_in_directory_plus_dummy_file # Test input settings s2s.inputs.source_file = files[0] s2s.inputs.reference_file = files[1] s2s.inputs.hemi = "lh" s2s.inputs.reg_file = files[2] s2s.inputs.sampling_range = 0.5 s2s.inputs.sampling_units = "frac" s2s.inputs.sampling_method = "point" # Test a basic command line assert s2s.cmdline == ( "mri_vol2surf " "--hemi lh --o %s --ref %s --reg reg.dat --projfrac 0.500 --mov %s" % (os.path.join(cwd, "lh.a.mgz"), files[1], files[0]) ) # Test identity s2sish = fs.SampleToSurface( source_file=files[1], reference_file=files[0], hemi="rh" ) assert s2s != s2sish # Test hits file name creation s2s.inputs.hits_file = True assert s2s._get_outfilename("hits_file") == os.path.join(cwd, "lh.a_hits.mgz") # Test that a 2-tuple range raises an error def set_illegal_range(): s2s.inputs.sampling_range = (0.2, 0.5) with pytest.raises(TraitError): set_illegal_range() @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_surfsmooth(create_surf_file_in_directory): smooth = fs.SurfaceSmooth() # Test underlying command assert smooth.cmd == "mri_surf2surf" # Test mandatory args exception with pytest.raises(ValueError): smooth.run() # Create testing files surf, cwd = create_surf_file_in_directory # Test input settings smooth.inputs.in_file = surf smooth.inputs.subject_id = "fsaverage" fwhm = 5 smooth.inputs.fwhm = fwhm smooth.inputs.hemi = "lh" # Test the command line assert smooth.cmdline == ( "mri_surf2surf --cortex --fwhm 5.0000 --hemi lh --sval %s --tval %s/lh.a_smooth%d.nii --s fsaverage" % (surf, cwd, fwhm) ) # Test identity shmooth = fs.SurfaceSmooth( subject_id="fsaverage", fwhm=6, in_file=surf, hemi="lh", out_file="lh.a_smooth.nii", ) assert smooth != shmooth @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_surfxfm(create_surf_file_in_directory): xfm = fs.SurfaceTransform() # Test underlying command assert xfm.cmd == "mri_surf2surf" # Test mandatory args exception with pytest.raises(ValueError): xfm.run() # Create testing files surf, cwd = create_surf_file_in_directory # Test input settings xfm.inputs.source_file = surf xfm.inputs.source_subject = "my_subject" xfm.inputs.target_subject = "fsaverage" xfm.inputs.hemi = "lh" # Test the command line assert xfm.cmdline == ( "mri_surf2surf --hemi lh --tval %s/lh.a.fsaverage.nii --sval %s --srcsubject my_subject --trgsubject fsaverage" % (cwd, surf) ) # Test identity xfmish = fs.SurfaceTransform( source_subject="fsaverage", target_subject="my_subject", source_file=surf, hemi="lh", ) assert xfm != xfmish @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_surfshots(create_files_in_directory_plus_dummy_file): fotos = fs.SurfaceSnapshots() # Test underlying command assert fotos.cmd == "tksurfer" # Test mandatory args exception with pytest.raises(ValueError): fotos.run() # Create testing files files, cwd = create_files_in_directory_plus_dummy_file # Test input settins fotos.inputs.subject_id = "fsaverage" fotos.inputs.hemi = "lh" fotos.inputs.surface = "pial" # Test a basic command line assert fotos.cmdline == "tksurfer fsaverage lh pial -tcl snapshots.tcl" # Test identity schmotos = fs.SurfaceSnapshots(subject_id="mysubject", hemi="rh", surface="white") assert fotos != schmotos # Test that the tcl script gets written fotos._write_tcl_script() assert os.path.exists("snapshots.tcl") # Test that we can use a different tcl script foo = open("other.tcl", "w").close() fotos.inputs.tcl_script = "other.tcl" assert fotos.cmdline == "tksurfer fsaverage lh pial -tcl other.tcl" # Test that the interface crashes politely if graphics aren't enabled try: hold_display = os.environ["DISPLAY"] del os.environ["DISPLAY"] with pytest.raises(RuntimeError): fotos.run() os.environ["DISPLAY"] = hold_display except KeyError: pass @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_mrisexpand(tmpdir): fssrc = FreeSurferSource( subjects_dir=fs.Info.subjectsdir(), subject_id="fsaverage", hemi="lh" ) fsavginfo = fssrc.run().outputs.get() # dt=60 to ensure very short runtime expand_if = fs.MRIsExpand( in_file=fsavginfo["smoothwm"], out_name="expandtmp", distance=1, dt=60 ) expand_nd = pe.Node( fs.MRIsExpand( in_file=fsavginfo["smoothwm"], out_name="expandtmp", distance=1, dt=60 ), name="expand_node", ) # Interfaces should have same command line at instantiation orig_cmdline = "mris_expand -T 60 {} 1 expandtmp".format(fsavginfo["smoothwm"]) assert expand_if.cmdline == orig_cmdline assert expand_nd.interface.cmdline == orig_cmdline # Run Node interface nd_res = expand_nd.run() # Commandlines differ node_cmdline = ( "mris_expand -T 60 -pial {cwd}/lh.pial {cwd}/lh.smoothwm " "1 expandtmp".format(cwd=nd_res.runtime.cwd) ) assert nd_res.runtime.cmdline == node_cmdline # Check output if_out_file = expand_if._list_outputs()["out_file"] nd_out_file = nd_res.outputs.get()["out_file"] # Same filename assert op.basename(if_out_file) == op.basename(nd_out_file) # Interface places output in source directory assert op.dirname(if_out_file) == op.dirname(fsavginfo["smoothwm"]) # Node places output in working directory assert op.dirname(nd_out_file) == nd_res.runtime.cwd @pytest.mark.skipif(fs.no_freesurfer(), reason="freesurfer is not installed") def test_eulernumber(tmpdir): # grab a surface from fsaverage fssrc = FreeSurferSource( subjects_dir=fs.Info.subjectsdir(), subject_id="fsaverage", hemi="lh" ) pial = fssrc.run().outputs.pial assert isinstance(pial, str), "Problem when fetching surface file" eu = fs.EulerNumber() eu.inputs.in_file = pial res = eu.run() assert res.outputs.defects == 0 assert res.outputs.euler == 2 nipype-1.7.0/nipype/interfaces/freesurfer/utils.py000066400000000000000000004176641413403311400223500ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Interfaces to assorted Freesurfer utility programs. """ import os import re import shutil from ... import logging from ...utils.filemanip import fname_presuffix, split_filename from ..base import ( TraitedSpec, Directory, File, traits, OutputMultiPath, isdefined, CommandLine, CommandLineInputSpec, ) from .base import ( FSCommand, FSTraitedSpec, FSSurfaceCommand, FSScriptCommand, FSScriptOutputSpec, FSTraitedSpecOpenMP, FSCommandOpenMP, ) __docformat__ = "restructuredtext" filemap = dict( cor="cor", mgh="mgh", mgz="mgz", minc="mnc", afni="brik", brik="brik", bshort="bshort", spm="img", analyze="img", analyze4d="img", bfloat="bfloat", nifti1="img", nii="nii", niigz="nii.gz", gii="gii", ) filetypes = [ "cor", "mgh", "mgz", "minc", "analyze", "analyze4d", "spm", "afni", "brik", "bshort", "bfloat", "sdt", "outline", "otl", "gdf", "nifti1", "nii", "niigz", ] implicit_filetypes = ["gii"] logger = logging.getLogger("nipype.interface") def copy2subjdir(cls, in_file, folder=None, basename=None, subject_id=None): """Method to copy an input to the subjects directory""" # check that the input is defined if not isdefined(in_file): return in_file # check that subjects_dir is defined if isdefined(cls.inputs.subjects_dir): subjects_dir = cls.inputs.subjects_dir else: subjects_dir = os.getcwd() # if not use cwd # check for subject_id if not subject_id: if isdefined(cls.inputs.subject_id): subject_id = cls.inputs.subject_id else: subject_id = "subject_id" # default # check for basename if basename is None: basename = os.path.basename(in_file) # check which folder to put the file in if folder is not None: out_dir = os.path.join(subjects_dir, subject_id, folder) else: out_dir = os.path.join(subjects_dir, subject_id) # make the output folder if it does not exist if not os.path.isdir(out_dir): os.makedirs(out_dir) out_file = os.path.join(out_dir, basename) if not os.path.isfile(out_file): shutil.copy(in_file, out_file) return out_file def createoutputdirs(outputs): """create all output directories. If not created, some freesurfer interfaces fail""" for output in list(outputs.values()): dirname = os.path.dirname(output) if not os.path.isdir(dirname): os.makedirs(dirname) class SampleToSurfaceInputSpec(FSTraitedSpec): source_file = File( exists=True, mandatory=True, argstr="--mov %s", desc="volume to sample values from", ) reference_file = File( exists=True, argstr="--ref %s", desc="reference volume (default is orig.mgz)" ) hemi = traits.Enum( "lh", "rh", mandatory=True, argstr="--hemi %s", desc="target hemisphere" ) surface = traits.String( argstr="--surf %s", desc="target surface (default is white)" ) reg_xors = ["reg_file", "reg_header", "mni152reg"] reg_file = File( exists=True, argstr="--reg %s", mandatory=True, xor=reg_xors, desc="source-to-reference registration file", ) reg_header = traits.Bool( argstr="--regheader %s", requires=["subject_id"], mandatory=True, xor=reg_xors, desc="register based on header geometry", ) mni152reg = traits.Bool( argstr="--mni152reg", mandatory=True, xor=reg_xors, desc="source volume is in MNI152 space", ) apply_rot = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--rot %.3f %.3f %.3f", desc="rotation angles (in degrees) to apply to reg matrix", ) apply_trans = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--trans %.3f %.3f %.3f", desc="translation (in mm) to apply to reg matrix", ) override_reg_subj = traits.Bool( argstr="--srcsubject %s", requires=["subject_id"], desc="override the subject in the reg file header", ) sampling_method = traits.Enum( "point", "max", "average", mandatory=True, argstr="%s", xor=["projection_stem"], requires=["sampling_range", "sampling_units"], desc="how to sample -- at a point or at the max or average over a range", ) sampling_range = traits.Either( traits.Float, traits.Tuple(traits.Float, traits.Float, traits.Float), desc="sampling range - a point or a tuple of (min, max, step)", ) sampling_units = traits.Enum( "mm", "frac", desc="sampling range type -- either 'mm' or 'frac'" ) projection_stem = traits.String( mandatory=True, xor=["sampling_method"], desc="stem for precomputed linear estimates and volume fractions", ) smooth_vol = traits.Float( argstr="--fwhm %.3f", desc="smooth input volume (mm fwhm)" ) smooth_surf = traits.Float( argstr="--surf-fwhm %.3f", desc="smooth output surface (mm fwhm)" ) interp_method = traits.Enum( "nearest", "trilinear", argstr="--interp %s", desc="interpolation method" ) cortex_mask = traits.Bool( argstr="--cortex", xor=["mask_label"], desc="mask the target surface with hemi.cortex.label", ) mask_label = File( exists=True, argstr="--mask %s", xor=["cortex_mask"], desc="label file to mask output with", ) float2int_method = traits.Enum( "round", "tkregister", argstr="--float2int %s", desc="method to convert reg matrix values (default is round)", ) fix_tk_reg = traits.Bool( argstr="--fixtkreg", desc="make reg matrix round-compatible" ) subject_id = traits.String(desc="subject id") target_subject = traits.String( argstr="--trgsubject %s", desc="sample to surface of different subject than source", ) surf_reg = traits.Either( traits.Bool, traits.Str(), argstr="--surfreg %s", requires=["target_subject"], desc="use surface registration to target subject", ) ico_order = traits.Int( argstr="--icoorder %d", requires=["target_subject"], desc="icosahedron order when target_subject is 'ico'", ) reshape = traits.Bool( argstr="--reshape", xor=["no_reshape"], desc="reshape surface vector to fit in non-mgh format", ) no_reshape = traits.Bool( argstr="--noreshape", xor=["reshape"], desc="do not reshape surface vector (default)", ) reshape_slices = traits.Int( argstr="--rf %d", desc="number of 'slices' for reshaping" ) scale_input = traits.Float( argstr="--scale %.3f", desc="multiple all intensities by scale factor" ) frame = traits.Int(argstr="--frame %d", desc="save only one frame (0-based)") out_file = File(argstr="--o %s", genfile=True, desc="surface file to write") out_type = traits.Enum( filetypes + implicit_filetypes, argstr="--out_type %s", desc="output file type" ) hits_file = traits.Either( traits.Bool, File(exists=True), argstr="--srchit %s", desc="save image with number of hits at each voxel", ) hits_type = traits.Enum(filetypes, argstr="--srchit_type", desc="hits file type") vox_file = traits.Either( traits.Bool, File, argstr="--nvox %s", desc="text file with the number of voxels intersecting the surface", ) class SampleToSurfaceOutputSpec(TraitedSpec): out_file = File(exists=True, desc="surface file") hits_file = File(exists=True, desc="image with number of hits at each voxel") vox_file = File( exists=True, desc="text file with the number of voxels intersecting the surface" ) class SampleToSurface(FSCommand): """Sample a volume to the cortical surface using Freesurfer's mri_vol2surf. You must supply a sampling method, range, and units. You can project either a given distance (in mm) or a given fraction of the cortical thickness at that vertex along the surface normal from the target surface, and then set the value of that vertex to be either the value at that point or the average or maximum value found along the projection vector. By default, the surface will be saved as a vector with a length equal to the number of vertices on the target surface. This is not a problem for Freesurfer programs, but if you intend to use the file with interfaces to another package, you must set the ``reshape`` input to True, which will factor the surface vector into a matrix with dimensions compatible with proper Nifti files. Examples -------- >>> import nipype.interfaces.freesurfer as fs >>> sampler = fs.SampleToSurface(hemi="lh") >>> sampler.inputs.source_file = "cope1.nii.gz" >>> sampler.inputs.reg_file = "register.dat" >>> sampler.inputs.sampling_method = "average" >>> sampler.inputs.sampling_range = 1 >>> sampler.inputs.sampling_units = "frac" >>> sampler.cmdline # doctest: +ELLIPSIS 'mri_vol2surf --hemi lh --o ...lh.cope1.mgz --reg register.dat --projfrac-avg 1.000 --mov cope1.nii.gz' >>> res = sampler.run() # doctest: +SKIP """ _cmd = "mri_vol2surf" input_spec = SampleToSurfaceInputSpec output_spec = SampleToSurfaceOutputSpec def _format_arg(self, name, spec, value): if name == "sampling_method": range = self.inputs.sampling_range units = self.inputs.sampling_units if units == "mm": units = "dist" if isinstance(range, tuple): range = "%.3f %.3f %.3f" % range else: range = "%.3f" % range method = dict(point="", max="-max", average="-avg")[value] return "--proj%s%s %s" % (units, method, range) if name == "reg_header": return spec.argstr % self.inputs.subject_id if name == "override_reg_subj": return spec.argstr % self.inputs.subject_id if name in ["hits_file", "vox_file"]: return spec.argstr % self._get_outfilename(name) if name == "out_type": if isdefined(self.inputs.out_file): _, base, ext = split_filename(self._get_outfilename()) if ext != filemap[value]: if ext in filemap.values(): raise ValueError( "Cannot create {} file with extension " "{}".format(value, ext) ) else: logger.warning( "Creating %s file with extension %s: %s%s", value, ext, base, ext, ) if value in implicit_filetypes: return "" if name == "surf_reg": if value is True: return spec.argstr % "sphere.reg" return super(SampleToSurface, self)._format_arg(name, spec, value) def _get_outfilename(self, opt="out_file"): outfile = getattr(self.inputs, opt) if not isdefined(outfile) or isinstance(outfile, bool): if isdefined(self.inputs.out_type): if opt == "hits_file": suffix = "_hits." + filemap[self.inputs.out_type] else: suffix = "." + filemap[self.inputs.out_type] elif opt == "hits_file": suffix = "_hits.mgz" else: suffix = ".mgz" outfile = fname_presuffix( self.inputs.source_file, newpath=os.getcwd(), prefix=self.inputs.hemi + ".", suffix=suffix, use_ext=False, ) return outfile def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self._get_outfilename()) hitsfile = self.inputs.hits_file if isdefined(hitsfile): outputs["hits_file"] = hitsfile if isinstance(hitsfile, bool): hitsfile = self._get_outfilename("hits_file") voxfile = self.inputs.vox_file if isdefined(voxfile): if isinstance(voxfile, bool): voxfile = fname_presuffix( self.inputs.source_file, newpath=os.getcwd(), prefix=self.inputs.hemi + ".", suffix="_vox.txt", use_ext=False, ) outputs["vox_file"] = voxfile return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()[name] return None class SurfaceSmoothInputSpec(FSTraitedSpec): in_file = File(mandatory=True, argstr="--sval %s", desc="source surface file") subject_id = traits.String( mandatory=True, argstr="--s %s", desc="subject id of surface file" ) hemi = traits.Enum( "lh", "rh", argstr="--hemi %s", mandatory=True, desc="hemisphere to operate on" ) fwhm = traits.Float( argstr="--fwhm %.4f", xor=["smooth_iters"], desc="effective FWHM of the smoothing process", ) smooth_iters = traits.Int( argstr="--smooth %d", xor=["fwhm"], desc="iterations of the smoothing process" ) cortex = traits.Bool( True, argstr="--cortex", usedefault=True, desc="only smooth within ``$hemi.cortex.label``", ) reshape = traits.Bool( argstr="--reshape", desc="reshape surface vector to fit in non-mgh format" ) out_file = File(argstr="--tval %s", genfile=True, desc="surface file to write") class SurfaceSmoothOutputSpec(TraitedSpec): out_file = File(exists=True, desc="smoothed surface file") class SurfaceSmooth(FSCommand): """Smooth a surface image with mri_surf2surf. The surface is smoothed by an interative process of averaging the value at each vertex with those of its adjacent neighbors. You may supply either the number of iterations to run or a desired effective FWHM of the smoothing process. If the latter, the underlying program will calculate the correct number of iterations internally. See Also -------- `nipype.interfaces.freesurfer.utils.SmoothTessellation`_ interface for smoothing a tessellated surface (e.g. in gifti or .stl) Examples -------- >>> import nipype.interfaces.freesurfer as fs >>> smoother = fs.SurfaceSmooth() >>> smoother.inputs.in_file = "lh.cope1.mgz" >>> smoother.inputs.subject_id = "subj_1" >>> smoother.inputs.hemi = "lh" >>> smoother.inputs.fwhm = 5 >>> smoother.cmdline # doctest: +ELLIPSIS 'mri_surf2surf --cortex --fwhm 5.0000 --hemi lh --sval lh.cope1.mgz --tval ...lh.cope1_smooth5.mgz --s subj_1' >>> smoother.run() # doctest: +SKIP """ _cmd = "mri_surf2surf" input_spec = SurfaceSmoothInputSpec output_spec = SurfaceSmoothOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = self.inputs.out_file if not isdefined(outputs["out_file"]): in_file = self.inputs.in_file if isdefined(self.inputs.fwhm): kernel = self.inputs.fwhm else: kernel = self.inputs.smooth_iters outputs["out_file"] = fname_presuffix( in_file, suffix="_smooth%d" % kernel, newpath=os.getcwd() ) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()[name] return None class SurfaceTransformInputSpec(FSTraitedSpec): source_file = File( exists=True, mandatory=True, argstr="--sval %s", xor=["source_annot_file"], desc="surface file with source values", ) source_annot_file = File( exists=True, mandatory=True, argstr="--sval-annot %s", xor=["source_file"], desc="surface annotation file", ) source_subject = traits.String( mandatory=True, argstr="--srcsubject %s", desc="subject id for source surface" ) hemi = traits.Enum( "lh", "rh", argstr="--hemi %s", mandatory=True, desc="hemisphere to transform" ) target_subject = traits.String( mandatory=True, argstr="--trgsubject %s", desc="subject id of target surface" ) target_ico_order = traits.Enum( 1, 2, 3, 4, 5, 6, 7, argstr="--trgicoorder %d", desc=("order of the icosahedron if " "target_subject is 'ico'"), ) source_type = traits.Enum( filetypes, argstr="--sfmt %s", requires=["source_file"], desc="source file format", ) target_type = traits.Enum( filetypes + implicit_filetypes, argstr="--tfmt %s", desc="output format" ) reshape = traits.Bool( argstr="--reshape", desc="reshape output surface to conform with Nifti" ) reshape_factor = traits.Int( argstr="--reshape-factor", desc="number of slices in reshaped image" ) out_file = File(argstr="--tval %s", genfile=True, desc="surface file to write") class SurfaceTransformOutputSpec(TraitedSpec): out_file = File(exists=True, desc="transformed surface file") class SurfaceTransform(FSCommand): """Transform a surface file from one subject to another via a spherical registration. Both the source and target subject must reside in your Subjects Directory, and they must have been processed with recon-all, unless you are transforming to one of the icosahedron meshes. Examples -------- >>> from nipype.interfaces.freesurfer import SurfaceTransform >>> sxfm = SurfaceTransform() >>> sxfm.inputs.source_file = "lh.cope1.nii.gz" >>> sxfm.inputs.source_subject = "my_subject" >>> sxfm.inputs.target_subject = "fsaverage" >>> sxfm.inputs.hemi = "lh" >>> sxfm.run() # doctest: +SKIP """ _cmd = "mri_surf2surf" input_spec = SurfaceTransformInputSpec output_spec = SurfaceTransformOutputSpec def _format_arg(self, name, spec, value): if name == "target_type": if isdefined(self.inputs.out_file): _, base, ext = split_filename(self._list_outputs()["out_file"]) if ext != filemap[value]: if ext in filemap.values(): raise ValueError( "Cannot create {} file with extension " "{}".format(value, ext) ) else: logger.warning( "Creating %s file with extension %s: %s%s", value, ext, base, ext, ) if value in implicit_filetypes: return "" return super(SurfaceTransform, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = self.inputs.out_file if not isdefined(outputs["out_file"]): if isdefined(self.inputs.source_file): source = self.inputs.source_file else: source = self.inputs.source_annot_file # Some recon-all files don't have a proper extension (e.g. "lh.thickness") # so we have to account for that here bad_extensions = [ ".%s" % e for e in [ "area", "mid", "pial", "avg_curv", "curv", "inflated", "jacobian_white", "orig", "nofix", "smoothwm", "crv", "sphere", "sulc", "thickness", "volume", "white", ] ] use_ext = True if split_filename(source)[2] in bad_extensions: source = source + ".stripme" use_ext = False ext = "" if isdefined(self.inputs.target_type): ext = "." + filemap[self.inputs.target_type] use_ext = False outputs["out_file"] = fname_presuffix( source, suffix=".%s%s" % (self.inputs.target_subject, ext), newpath=os.getcwd(), use_ext=use_ext, ) else: outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()[name] return None class Surface2VolTransformInputSpec(FSTraitedSpec): source_file = File( exists=True, argstr="--surfval %s", copyfile=False, mandatory=True, xor=["mkmask"], desc="This is the source of the surface values", ) hemi = traits.Str(argstr="--hemi %s", mandatory=True, desc="hemisphere of data") transformed_file = File( name_template="%s_asVol.nii", desc="Output volume", argstr="--outvol %s", name_source=["source_file"], hash_files=False, ) reg_file = File( exists=True, argstr="--volreg %s", mandatory=True, desc="tkRAS-to-tkRAS matrix (tkregister2 format)", xor=["subject_id"], ) template_file = File( exists=True, argstr="--template %s", desc="Output template volume" ) mkmask = traits.Bool( desc="make a mask instead of loading surface values", argstr="--mkmask", xor=["source_file"], ) vertexvol_file = File( name_template="%s_asVol_vertex.nii", desc=( "Path name of the vertex output volume, which " "is the same as output volume except that the " "value of each voxel is the vertex-id that is " "mapped to that voxel." ), argstr="--vtxvol %s", name_source=["source_file"], hash_files=False, ) surf_name = traits.Str(argstr="--surf %s", desc="surfname (default is white)") projfrac = traits.Float(argstr="--projfrac %s", desc="thickness fraction") subjects_dir = traits.Str( argstr="--sd %s", desc=("freesurfer subjects directory defaults to " "$SUBJECTS_DIR"), ) subject_id = traits.Str(argstr="--identity %s", desc="subject id", xor=["reg_file"]) class Surface2VolTransformOutputSpec(TraitedSpec): transformed_file = File(exists=True, desc="Path to output file if used normally") vertexvol_file = File(desc="vertex map volume path id. Optional") class Surface2VolTransform(FSCommand): """Use FreeSurfer mri_surf2vol to apply a transform. Examples -------- >>> from nipype.interfaces.freesurfer import Surface2VolTransform >>> xfm2vol = Surface2VolTransform() >>> xfm2vol.inputs.source_file = 'lh.cope1.mgz' >>> xfm2vol.inputs.reg_file = 'register.mat' >>> xfm2vol.inputs.hemi = 'lh' >>> xfm2vol.inputs.template_file = 'cope1.nii.gz' >>> xfm2vol.inputs.subjects_dir = '.' >>> xfm2vol.cmdline 'mri_surf2vol --hemi lh --volreg register.mat --surfval lh.cope1.mgz --sd . --template cope1.nii.gz --outvol lh.cope1_asVol.nii --vtxvol lh.cope1_asVol_vertex.nii' >>> res = xfm2vol.run()# doctest: +SKIP """ _cmd = "mri_surf2vol" input_spec = Surface2VolTransformInputSpec output_spec = Surface2VolTransformOutputSpec class ApplyMaskInputSpec(FSTraitedSpec): in_file = File( exists=True, mandatory=True, position=-3, argstr="%s", desc="input image (will be masked)", ) mask_file = File( exists=True, mandatory=True, position=-2, argstr="%s", desc="image defining mask space", ) out_file = File( name_source=["in_file"], name_template="%s_masked", hash_files=True, keep_extension=True, position=-1, argstr="%s", desc="final image to write", ) xfm_file = File( exists=True, argstr="-xform %s", desc="LTA-format transformation matrix to align mask with input", ) invert_xfm = traits.Bool(argstr="-invert", desc="invert transformation") xfm_source = File( exists=True, argstr="-lta_src %s", desc="image defining transform source space" ) xfm_target = File( exists=True, argstr="-lta_dst %s", desc="image defining transform target space" ) use_abs = traits.Bool( argstr="-abs", desc="take absolute value of mask before applying" ) mask_thresh = traits.Float(argstr="-T %.4f", desc="threshold mask before applying") keep_mask_deletion_edits = traits.Bool( argstr="-keep_mask_deletion_edits", desc="transfer voxel-deletion edits (voxels=1) from mask to out vol", ) transfer = traits.Int( argstr="-transfer %d", desc="transfer only voxel value # from mask to out" ) class ApplyMaskOutputSpec(TraitedSpec): out_file = File(exists=True, desc="masked image") class ApplyMask(FSCommand): """Use Freesurfer's mri_mask to apply a mask to an image. The mask file need not be binarized; it can be thresholded above a given value before application. It can also optionally be transformed into input space with an LTA matrix. """ _cmd = "mri_mask" input_spec = ApplyMaskInputSpec output_spec = ApplyMaskOutputSpec class SurfaceSnapshotsInputSpec(FSTraitedSpec): subject_id = traits.String( position=1, argstr="%s", mandatory=True, desc="subject to visualize" ) hemi = traits.Enum( "lh", "rh", position=2, argstr="%s", mandatory=True, desc="hemisphere to visualize", ) surface = traits.String( position=3, argstr="%s", mandatory=True, desc="surface to visualize" ) show_curv = traits.Bool( argstr="-curv", desc="show curvature", xor=["show_gray_curv"] ) show_gray_curv = traits.Bool( argstr="-gray", desc="show curvature in gray", xor=["show_curv"] ) overlay = File( exists=True, argstr="-overlay %s", desc="load an overlay volume/surface", requires=["overlay_range"], ) reg_xors = ["overlay_reg", "identity_reg", "mni152_reg"] overlay_reg = File( exists=True, argstr="-overlay-reg %s", xor=reg_xors, desc="registration matrix file to register overlay to surface", ) identity_reg = traits.Bool( argstr="-overlay-reg-identity", xor=reg_xors, desc="use the identity matrix to register the overlay to the surface", ) mni152_reg = traits.Bool( argstr="-mni152reg", xor=reg_xors, desc="use to display a volume in MNI152 space on the average subject", ) overlay_range = traits.Either( traits.Float, traits.Tuple(traits.Float, traits.Float), traits.Tuple(traits.Float, traits.Float, traits.Float), desc="overlay range--either min, (min, max) or (min, mid, max)", argstr="%s", ) overlay_range_offset = traits.Float( argstr="-foffset %.3f", desc="overlay range will be symettric around offset value", ) truncate_overlay = traits.Bool( argstr="-truncphaseflag 1", desc="truncate the overlay display" ) reverse_overlay = traits.Bool( argstr="-revphaseflag 1", desc="reverse the overlay display" ) invert_overlay = traits.Bool( argstr="-invphaseflag 1", desc="invert the overlay display" ) demean_overlay = traits.Bool(argstr="-zm", desc="remove mean from overlay") annot_file = File( exists=True, argstr="-annotation %s", xor=["annot_name"], desc="path to annotation file to display", ) annot_name = traits.String( argstr="-annotation %s", xor=["annot_file"], desc="name of annotation to display (must be in $subject/label directory", ) label_file = File( exists=True, argstr="-label %s", xor=["label_name"], desc="path to label file to display", ) label_name = traits.String( argstr="-label %s", xor=["label_file"], desc="name of label to display (must be in $subject/label directory", ) colortable = File(exists=True, argstr="-colortable %s", desc="load colortable file") label_under = traits.Bool( argstr="-labels-under", desc="draw label/annotation under overlay" ) label_outline = traits.Bool( argstr="-label-outline", desc="draw label/annotation as outline" ) patch_file = File(exists=True, argstr="-patch %s", desc="load a patch") orig_suffix = traits.String( argstr="-orig %s", desc="set the orig surface suffix string" ) sphere_suffix = traits.String( argstr="-sphere %s", desc="set the sphere.reg suffix string" ) show_color_scale = traits.Bool( argstr="-colscalebarflag 1", desc="display the color scale bar" ) show_color_text = traits.Bool( argstr="-colscaletext 1", desc="display text in the color scale bar" ) six_images = traits.Bool(desc="also take anterior and posterior snapshots") screenshot_stem = traits.String(desc="stem to use for screenshot file names") stem_template_args = traits.List( traits.String, requires=["screenshot_stem"], desc="input names to use as arguments for a string-formated stem template", ) tcl_script = File( exists=True, argstr="%s", genfile=True, desc="override default screenshot script", ) class SurfaceSnapshotsOutputSpec(TraitedSpec): snapshots = OutputMultiPath( File(exists=True), desc="tiff images of the surface from different perspectives" ) class SurfaceSnapshots(FSCommand): """Use Tksurfer to save pictures of the cortical surface. By default, this takes snapshots of the lateral, medial, ventral, and dorsal surfaces. See the ``six_images`` option to add the anterior and posterior surfaces. You may also supply your own tcl script (see the Freesurfer wiki for information on scripting tksurfer). The screenshot stem is set as the environment variable "_SNAPSHOT_STEM", which you can use in your own scripts. Node that this interface will not run if you do not have graphics enabled on your system. Examples -------- >>> import nipype.interfaces.freesurfer as fs >>> shots = fs.SurfaceSnapshots(subject_id="fsaverage", hemi="lh", surface="pial") >>> shots.inputs.overlay = "zstat1.nii.gz" >>> shots.inputs.overlay_range = (2.3, 6) >>> shots.inputs.overlay_reg = "register.dat" >>> res = shots.run() # doctest: +SKIP """ _cmd = "tksurfer" input_spec = SurfaceSnapshotsInputSpec output_spec = SurfaceSnapshotsOutputSpec def _format_arg(self, name, spec, value): if name == "tcl_script": if not isdefined(value): return "-tcl snapshots.tcl" else: return "-tcl %s" % value elif name == "overlay_range": if isinstance(value, float): return "-fthresh %.3f" % value else: if len(value) == 2: return "-fminmax %.3f %.3f" % value else: return "-fminmax %.3f %.3f -fmid %.3f" % ( value[0], value[2], value[1], ) elif name == "annot_name" and isdefined(value): # Matching annot by name needs to strip the leading hemi and trailing # extension strings if value.endswith(".annot"): value = value[:-6] if re.match(r"%s[\.\-_]" % self.inputs.hemi, value[:3]): value = value[3:] return "-annotation %s" % value return super(SurfaceSnapshots, self)._format_arg(name, spec, value) def _run_interface(self, runtime): if not isdefined(self.inputs.screenshot_stem): stem = "%s_%s_%s" % ( self.inputs.subject_id, self.inputs.hemi, self.inputs.surface, ) else: stem = self.inputs.screenshot_stem stem_args = self.inputs.stem_template_args if isdefined(stem_args): args = tuple([getattr(self.inputs, arg) for arg in stem_args]) stem = stem % args # Check if the DISPLAY variable is set -- should avoid crashes (might not?) if "DISPLAY" not in os.environ: raise RuntimeError("Graphics are not enabled -- cannot run tksurfer") runtime.environ["_SNAPSHOT_STEM"] = stem self._write_tcl_script() runtime = super(SurfaceSnapshots, self)._run_interface(runtime) # If a display window can't be opened, this will crash on # aggregate_outputs. Let's try to parse stderr and raise a # better exception here if that happened. errors = [ "surfer: failed, no suitable display found", "Fatal Error in tksurfer.bin: could not open display", ] for err in errors: if err in runtime.stderr: self.raise_exception(runtime) # Tksurfer always (or at least always when you run a tcl script) # exits with a nonzero returncode. We have to force it to 0 here. runtime.returncode = 0 return runtime def _write_tcl_script(self): fid = open("snapshots.tcl", "w") script = [ "save_tiff $env(_SNAPSHOT_STEM)-lat.tif", "make_lateral_view", "rotate_brain_y 180", "redraw", "save_tiff $env(_SNAPSHOT_STEM)-med.tif", "make_lateral_view", "rotate_brain_x 90", "redraw", "save_tiff $env(_SNAPSHOT_STEM)-ven.tif", "make_lateral_view", "rotate_brain_x -90", "redraw", "save_tiff $env(_SNAPSHOT_STEM)-dor.tif", ] if isdefined(self.inputs.six_images) and self.inputs.six_images: script.extend( [ "make_lateral_view", "rotate_brain_y 90", "redraw", "save_tiff $env(_SNAPSHOT_STEM)-pos.tif", "make_lateral_view", "rotate_brain_y -90", "redraw", "save_tiff $env(_SNAPSHOT_STEM)-ant.tif", ] ) script.append("exit") fid.write("\n".join(script)) fid.close() def _list_outputs(self): outputs = self._outputs().get() if not isdefined(self.inputs.screenshot_stem): stem = "%s_%s_%s" % ( self.inputs.subject_id, self.inputs.hemi, self.inputs.surface, ) else: stem = self.inputs.screenshot_stem stem_args = self.inputs.stem_template_args if isdefined(stem_args): args = tuple([getattr(self.inputs, arg) for arg in stem_args]) stem = stem % args snapshots = ["%s-lat.tif", "%s-med.tif", "%s-dor.tif", "%s-ven.tif"] if self.inputs.six_images: snapshots.extend(["%s-pos.tif", "%s-ant.tif"]) snapshots = [self._gen_fname(f % stem, suffix="") for f in snapshots] outputs["snapshots"] = snapshots return outputs def _gen_filename(self, name): if name == "tcl_script": return "snapshots.tcl" return None class ImageInfoInputSpec(FSTraitedSpec): in_file = File(exists=True, position=1, argstr="%s", desc="image to query") class ImageInfoOutputSpec(TraitedSpec): info = traits.Any(desc="output of mri_info") out_file = File(exists=True, desc="text file with image information") data_type = traits.String(desc="image data type") file_format = traits.String(desc="file format") TE = traits.String(desc="echo time (msec)") TR = traits.String(desc="repetition time(msec)") TI = traits.String(desc="inversion time (msec)") dimensions = traits.Tuple(desc="image dimensions (voxels)") vox_sizes = traits.Tuple(desc="voxel sizes (mm)") orientation = traits.String(desc="image orientation") ph_enc_dir = traits.String(desc="phase encode direction") class ImageInfo(FSCommand): _cmd = "mri_info" input_spec = ImageInfoInputSpec output_spec = ImageInfoOutputSpec def info_regexp(self, info, field, delim="\n"): m = re.search(r"%s\s*:\s+(.+?)%s" % (field, delim), info) if m: return m.group(1) else: return None def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() info = runtime.stdout outputs.info = info # Pulse sequence parameters for field in ["TE", "TR", "TI"]: fieldval = self.info_regexp(info, field, ", ") if fieldval.endswith(" msec"): fieldval = fieldval[:-5] setattr(outputs, field, fieldval) # Voxel info vox = self.info_regexp(info, "voxel sizes") vox = tuple(vox.split(", ")) outputs.vox_sizes = vox dim = self.info_regexp(info, "dimensions") dim = tuple([int(d) for d in dim.split(" x ")]) outputs.dimensions = dim outputs.orientation = self.info_regexp(info, "Orientation") outputs.ph_enc_dir = self.info_regexp(info, "PhEncDir") # File format and datatype are both keyed by "type" ftype, dtype = re.findall(r"%s\s*:\s+(.+?)\n" % "type", info) outputs.file_format = ftype outputs.data_type = dtype return outputs class MRIsConvertInputSpec(FSTraitedSpec): """ Uses Freesurfer's mris_convert to convert surface files to various formats """ annot_file = File( exists=True, argstr="--annot %s", desc="input is annotation or gifti label data" ) parcstats_file = File( exists=True, argstr="--parcstats %s", desc="infile is name of text file containing label/val pairs", ) label_file = File( exists=True, argstr="--label %s", desc="infile is .label file, label is name of this label", ) scalarcurv_file = File( exists=True, argstr="-c %s", desc="input is scalar curv overlay file (must still specify surface)", ) functional_file = File( exists=True, argstr="-f %s", desc="input is functional time-series or other multi-frame data (must specify surface)", ) labelstats_outfile = File( exists=False, argstr="--labelstats %s", desc="outfile is name of gifti file to which label stats will be written", ) patch = traits.Bool(argstr="-p", desc="input is a patch, not a full surface") rescale = traits.Bool( argstr="-r", desc="rescale vertex xyz so total area is same as group average" ) normal = traits.Bool(argstr="-n", desc="output is an ascii file where vertex data") xyz_ascii = traits.Bool(argstr="-a", desc="Print only surface xyz to ascii file") vertex = traits.Bool( argstr="-v", desc="Writes out neighbors of a vertex in each row" ) scale = traits.Float(argstr="-s %.3f", desc="scale vertex xyz by scale") dataarray_num = traits.Int( argstr="--da_num %d", desc="if input is gifti, 'num' specifies which data array to use", ) talairachxfm_subjid = traits.String( argstr="-t %s", desc="apply talairach xfm of subject to vertex xyz" ) origname = traits.String(argstr="-o %s", desc="read orig positions") in_file = File( exists=True, mandatory=True, position=-2, argstr="%s", desc="File to read/convert", ) out_file = File( argstr="%s", position=-1, genfile=True, xor=["out_datatype"], mandatory=True, desc="output filename or True to generate one", ) out_datatype = traits.Enum( "asc", "ico", "tri", "stl", "vtk", "gii", "mgh", "mgz", xor=["out_file"], mandatory=True, desc="These file formats are supported: ASCII: .asc" "ICO: .ico, .tri GEO: .geo STL: .stl VTK: .vtk GIFTI: .gii MGH surface-encoded 'volume': .mgh, .mgz", ) to_scanner = traits.Bool( argstr="--to-scanner", desc="convert coordinates from native FS (tkr) coords to scanner coords", ) to_tkr = traits.Bool( argstr="--to-tkr", desc="convert coordinates from scanner coords to native FS (tkr) coords", ) class MRIsConvertOutputSpec(TraitedSpec): """ Uses Freesurfer's mris_convert to convert surface files to various formats """ converted = File(exists=True, desc="converted output surface") class MRIsConvert(FSCommand): """ Uses Freesurfer's mris_convert to convert surface files to various formats Example ------- >>> import nipype.interfaces.freesurfer as fs >>> mris = fs.MRIsConvert() >>> mris.inputs.in_file = 'lh.pial' >>> mris.inputs.out_datatype = 'gii' >>> mris.run() # doctest: +SKIP """ _cmd = "mris_convert" input_spec = MRIsConvertInputSpec output_spec = MRIsConvertOutputSpec def _format_arg(self, name, spec, value): if name == "out_file" and not os.path.isabs(value): value = os.path.abspath(value) return super(MRIsConvert, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() outputs["converted"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): if name == "out_file": return os.path.abspath(self._gen_outfilename()) else: return None def _gen_outfilename(self): if isdefined(self.inputs.out_file): return self.inputs.out_file elif isdefined(self.inputs.annot_file): _, name, ext = split_filename(self.inputs.annot_file) elif isdefined(self.inputs.parcstats_file): _, name, ext = split_filename(self.inputs.parcstats_file) elif isdefined(self.inputs.label_file): _, name, ext = split_filename(self.inputs.label_file) elif isdefined(self.inputs.scalarcurv_file): _, name, ext = split_filename(self.inputs.scalarcurv_file) elif isdefined(self.inputs.functional_file): _, name, ext = split_filename(self.inputs.functional_file) elif isdefined(self.inputs.in_file): _, name, ext = split_filename(self.inputs.in_file) return name + ext + "_converted." + self.inputs.out_datatype class MRIsCombineInputSpec(FSTraitedSpec): """ Uses Freesurfer's mris_convert to combine two surface files into one. """ in_files = traits.List( File(Exists=True), maxlen=2, minlen=2, mandatory=True, position=1, argstr="--combinesurfs %s", desc="Two surfaces to be combined.", ) out_file = File( argstr="%s", position=-1, genfile=True, mandatory=True, desc="Output filename. Combined surfaces from in_files.", ) class MRIsCombineOutputSpec(TraitedSpec): """ Uses Freesurfer's mris_convert to combine two surface files into one. """ out_file = File( exists=True, desc="Output filename. Combined surfaces from " "in_files." ) class MRIsCombine(FSSurfaceCommand): """ Uses Freesurfer's ``mris_convert`` to combine two surface files into one. For complete details, see the `mris_convert Documentation. `_ If given an ``out_file`` that does not begin with ``'lh.'`` or ``'rh.'``, ``mris_convert`` will prepend ``'lh.'`` to the file name. To avoid this behavior, consider setting ``out_file = './'``, or leaving out_file blank. In a Node/Workflow, ``out_file`` is interpreted literally. Example ------- >>> import nipype.interfaces.freesurfer as fs >>> mris = fs.MRIsCombine() >>> mris.inputs.in_files = ['lh.pial', 'rh.pial'] >>> mris.inputs.out_file = 'bh.pial' >>> mris.cmdline 'mris_convert --combinesurfs lh.pial rh.pial bh.pial' >>> mris.run() # doctest: +SKIP """ _cmd = "mris_convert" input_spec = MRIsCombineInputSpec output_spec = MRIsCombineOutputSpec def _list_outputs(self): outputs = self._outputs().get() # mris_convert --combinesurfs uses lh. as the default prefix # regardless of input file names, except when path info is # specified path, base = os.path.split(self.inputs.out_file) if path == "" and base[:3] not in ("lh.", "rh."): base = "lh." + base outputs["out_file"] = os.path.abspath(os.path.join(path, base)) return outputs def normalize_filenames(self): """ Filename normalization routine to perform only when run in Node context. Interpret out_file as a literal path to reduce surprise. """ if isdefined(self.inputs.out_file): self.inputs.out_file = os.path.abspath(self.inputs.out_file) class MRITessellateInputSpec(FSTraitedSpec): """ Uses Freesurfer's mri_tessellate to create surfaces by tessellating a given input volume """ in_file = File( exists=True, mandatory=True, position=-3, argstr="%s", desc="Input volume to tesselate voxels from.", ) label_value = traits.Int( position=-2, argstr="%d", mandatory=True, desc='Label value which to tesselate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)', ) out_file = File( argstr="%s", position=-1, genfile=True, desc="output filename or True to generate one", ) tesselate_all_voxels = traits.Bool( argstr="-a", desc="Tessellate the surface of all voxels with different labels" ) use_real_RAS_coordinates = traits.Bool( argstr="-n", desc="Saves surface with real RAS coordinates where c_(r,a,s) != 0" ) class MRITessellateOutputSpec(TraitedSpec): """ Uses Freesurfer's mri_tessellate to create surfaces by tessellating a given input volume """ surface = File(exists=True, desc="binary surface of the tessellation ") class MRITessellate(FSCommand): """ Uses Freesurfer's mri_tessellate to create surfaces by tessellating a given input volume Example ------- >>> import nipype.interfaces.freesurfer as fs >>> tess = fs.MRITessellate() >>> tess.inputs.in_file = 'aseg.mgz' >>> tess.inputs.label_value = 17 >>> tess.inputs.out_file = 'lh.hippocampus' >>> tess.run() # doctest: +SKIP """ _cmd = "mri_tessellate" input_spec = MRITessellateInputSpec output_spec = MRITessellateOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["surface"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): if name == "out_file": return self._gen_outfilename() else: return None def _gen_outfilename(self): if isdefined(self.inputs.out_file): return self.inputs.out_file else: _, name, ext = split_filename(self.inputs.in_file) return name + ext + "_" + str(self.inputs.label_value) class MRIPretessInputSpec(FSTraitedSpec): in_filled = File( exists=True, mandatory=True, position=-4, argstr="%s", desc=("filled volume, usually wm.mgz"), ) label = traits.Either( traits.Str("wm"), traits.Int(1), argstr="%s", default="wm", mandatory=True, usedefault=True, position=-3, desc=( "label to be picked up, can be a Freesurfer's string like " "'wm' or a label value (e.g. 127 for rh or 255 for lh)" ), ) in_norm = File( exists=True, mandatory=True, position=-2, argstr="%s", desc=("the normalized, brain-extracted T1w image. Usually norm.mgz"), ) out_file = File( position=-1, argstr="%s", name_source=["in_filled"], name_template="%s_pretesswm", keep_extension=True, desc="the output file after mri_pretess.", ) nocorners = traits.Bool( False, argstr="-nocorners", desc=("do not remove corner configurations" " in addition to edge ones."), ) keep = traits.Bool(False, argstr="-keep", desc=("keep WM edits")) test = traits.Bool( False, argstr="-test", desc=( "adds a voxel that should be removed by " "mri_pretess. The value of the voxel is set to that of an ON-edited WM, " "so it should be kept with -keep. The output will NOT be saved." ), ) class MRIPretessOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output file after mri_pretess") class MRIPretess(FSCommand): """ Uses Freesurfer's mri_pretess to prepare volumes to be tessellated. Changes white matter (WM) segmentation so that the neighbors of all voxels labeled as WM have a face in common - no edges or corners allowed. Example ------- >>> import nipype.interfaces.freesurfer as fs >>> pretess = fs.MRIPretess() >>> pretess.inputs.in_filled = 'wm.mgz' >>> pretess.inputs.in_norm = 'norm.mgz' >>> pretess.inputs.nocorners = True >>> pretess.cmdline 'mri_pretess -nocorners wm.mgz wm norm.mgz wm_pretesswm.mgz' >>> pretess.run() # doctest: +SKIP """ _cmd = "mri_pretess" input_spec = MRIPretessInputSpec output_spec = MRIPretessOutputSpec class MRIMarchingCubesInputSpec(FSTraitedSpec): """ Uses Freesurfer's mri_mc to create surfaces by tessellating a given input volume """ in_file = File( exists=True, mandatory=True, position=1, argstr="%s", desc="Input volume to tesselate voxels from.", ) label_value = traits.Int( position=2, argstr="%d", mandatory=True, desc='Label value which to tesselate from the input volume. (integer, if input is "filled.mgz" volume, 127 is rh, 255 is lh)', ) connectivity_value = traits.Int( 1, position=-1, argstr="%d", usedefault=True, desc="Alter the marching cubes connectivity: 1=6+,2=18,3=6,4=26 (default=1)", ) out_file = File( argstr="./%s", position=-2, genfile=True, desc="output filename or True to generate one", ) class MRIMarchingCubesOutputSpec(TraitedSpec): """ Uses Freesurfer's mri_mc to create surfaces by tessellating a given input volume """ surface = File(exists=True, desc="binary surface of the tessellation ") class MRIMarchingCubes(FSCommand): """ Uses Freesurfer's mri_mc to create surfaces by tessellating a given input volume Example ------- >>> import nipype.interfaces.freesurfer as fs >>> mc = fs.MRIMarchingCubes() >>> mc.inputs.in_file = 'aseg.mgz' >>> mc.inputs.label_value = 17 >>> mc.inputs.out_file = 'lh.hippocampus' >>> mc.run() # doctest: +SKIP """ _cmd = "mri_mc" input_spec = MRIMarchingCubesInputSpec output_spec = MRIMarchingCubesOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["surface"] = self._gen_outfilename() return outputs def _gen_filename(self, name): if name == "out_file": return self._gen_outfilename() else: return None def _gen_outfilename(self): if isdefined(self.inputs.out_file): return os.path.abspath(self.inputs.out_file) else: _, name, ext = split_filename(self.inputs.in_file) return os.path.abspath(name + ext + "_" + str(self.inputs.label_value)) class SmoothTessellationInputSpec(FSTraitedSpec): in_file = File( exists=True, mandatory=True, argstr="%s", position=-2, copyfile=True, desc="Input volume to tesselate voxels from.", ) curvature_averaging_iterations = traits.Int( argstr="-a %d", desc="Number of curvature averaging iterations (default=10)" ) smoothing_iterations = traits.Int( argstr="-n %d", desc="Number of smoothing iterations (default=10)" ) snapshot_writing_iterations = traits.Int( argstr="-w %d", desc="Write snapshot every *n* iterations" ) use_gaussian_curvature_smoothing = traits.Bool( argstr="-g", desc="Use Gaussian curvature smoothing" ) gaussian_curvature_norm_steps = traits.Int( argstr="%d", desc="Use Gaussian curvature smoothing" ) gaussian_curvature_smoothing_steps = traits.Int( argstr=" %d", desc="Use Gaussian curvature smoothing" ) disable_estimates = traits.Bool( argstr="-nw", desc="Disables the writing of curvature and area estimates" ) normalize_area = traits.Bool( argstr="-area", desc="Normalizes the area after smoothing" ) use_momentum = traits.Bool(argstr="-m", desc="Uses momentum") out_file = File( argstr="%s", position=-1, genfile=True, desc="output filename or True to generate one", ) out_curvature_file = File( argstr="-c %s", desc='Write curvature to ``?h.curvname`` (default "curv")' ) out_area_file = File( argstr="-b %s", desc='Write area to ``?h.areaname`` (default "area")' ) seed = traits.Int( argstr="-seed %d", desc="Seed for setting random number generator" ) class SmoothTessellationOutputSpec(TraitedSpec): """ This program smooths the tessellation of a surface using 'mris_smooth' """ surface = File(exists=True, desc="Smoothed surface file.") class SmoothTessellation(FSCommand): """ Smooth a tessellated surface. See Also -------- `nipype.interfaces.freesurfer.utils.SurfaceSmooth`_ interface for smoothing a scalar field along a surface manifold Example ------- >>> import nipype.interfaces.freesurfer as fs >>> smooth = fs.SmoothTessellation() >>> smooth.inputs.in_file = 'lh.hippocampus.stl' >>> smooth.run() # doctest: +SKIP """ _cmd = "mris_smooth" input_spec = SmoothTessellationInputSpec output_spec = SmoothTessellationOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["surface"] = self._gen_outfilename() return outputs def _gen_filename(self, name): if name == "out_file": return self._gen_outfilename() else: return None def _gen_outfilename(self): if isdefined(self.inputs.out_file): return os.path.abspath(self.inputs.out_file) else: _, name, ext = split_filename(self.inputs.in_file) return os.path.abspath(name + "_smoothed" + ext) def _run_interface(self, runtime): # The returncode is meaningless in BET. So check the output # in stderr and if it's set, then update the returncode # accordingly. runtime = super(SmoothTessellation, self)._run_interface(runtime) if "failed" in runtime.stderr: self.raise_exception(runtime) return runtime class MakeAverageSubjectInputSpec(FSTraitedSpec): subjects_ids = traits.List( traits.Str(), argstr="--subjects %s", desc="freesurfer subjects ids to average", mandatory=True, sep=" ", ) out_name = File( "average", argstr="--out %s", desc="name for the average subject", usedefault=True, ) class MakeAverageSubjectOutputSpec(TraitedSpec): average_subject_name = traits.Str(desc="Output registration file") class MakeAverageSubject(FSCommand): """Make an average freesurfer subject Examples -------- >>> from nipype.interfaces.freesurfer import MakeAverageSubject >>> avg = MakeAverageSubject(subjects_ids=['s1', 's2']) >>> avg.cmdline 'make_average_subject --out average --subjects s1 s2' """ _cmd = "make_average_subject" input_spec = MakeAverageSubjectInputSpec output_spec = MakeAverageSubjectOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["average_subject_name"] = self.inputs.out_name return outputs class ExtractMainComponentInputSpec(CommandLineInputSpec): in_file = File( exists=True, mandatory=True, argstr="%s", position=1, desc="input surface file" ) out_file = File( name_template="%s.maincmp", name_source="in_file", argstr="%s", position=2, desc="surface containing main component", ) class ExtractMainComponentOutputSpec(TraitedSpec): out_file = File(exists=True, desc="surface containing main component") class ExtractMainComponent(CommandLine): """Extract the main component of a tesselated surface Examples -------- >>> from nipype.interfaces.freesurfer import ExtractMainComponent >>> mcmp = ExtractMainComponent(in_file='lh.pial') >>> mcmp.cmdline 'mris_extract_main_component lh.pial lh.maincmp' """ _cmd = "mris_extract_main_component" input_spec = ExtractMainComponentInputSpec output_spec = ExtractMainComponentOutputSpec class Tkregister2InputSpec(FSTraitedSpec): target_image = File( exists=True, argstr="--targ %s", xor=["fstarg"], desc="target volume" ) fstarg = traits.Bool( False, argstr="--fstarg", xor=["target_image"], desc="use subject's T1 as reference", ) moving_image = File( exists=True, mandatory=True, argstr="--mov %s", desc="moving volume" ) # Input registration file options fsl_in_matrix = File( exists=True, argstr="--fsl %s", desc="fsl-style registration input matrix" ) xfm = File( exists=True, argstr="--xfm %s", desc="use a matrix in MNI coordinates as initial registration", ) lta_in = File( exists=True, argstr="--lta %s", desc="use a matrix in MNI coordinates as initial registration", ) invert_lta_in = traits.Bool( requires=["lta_in"], desc="Invert input LTA before applying" ) # Output registration file options fsl_out = traits.Either( True, File, argstr="--fslregout %s", desc="compute an FSL-compatible resgitration matrix", ) lta_out = traits.Either( True, File, argstr="--ltaout %s", desc="output registration file (LTA format)" ) invert_lta_out = traits.Bool( argstr="--ltaout-inv", requires=["lta_in"], desc="Invert input LTA before applying", ) subject_id = traits.String(argstr="--s %s", desc="freesurfer subject ID") noedit = traits.Bool( True, argstr="--noedit", usedefault=True, desc="do not open edit window (exit)" ) reg_file = File( "register.dat", usedefault=True, mandatory=True, argstr="--reg %s", desc="freesurfer-style registration file", ) reg_header = traits.Bool( False, argstr="--regheader", desc="compute regstration from headers" ) fstal = traits.Bool( False, argstr="--fstal", xor=["target_image", "moving_image", "reg_file"], desc="set mov to be tal and reg to be tal xfm", ) movscale = traits.Float( argstr="--movscale %f", desc="adjust registration matrix to scale mov" ) class Tkregister2OutputSpec(TraitedSpec): reg_file = File(exists=True, desc="freesurfer-style registration file") fsl_file = File(desc="FSL-style registration file") lta_file = File(desc="LTA-style registration file") class Tkregister2(FSCommand): """ Examples -------- Get transform matrix between orig (*tkRAS*) and native (*scannerRAS*) coordinates in Freesurfer. Implements the first step of mapping surfaces to native space in `this guide `__. >>> from nipype.interfaces.freesurfer import Tkregister2 >>> tk2 = Tkregister2(reg_file='T1_to_native.dat') >>> tk2.inputs.moving_image = 'T1.mgz' >>> tk2.inputs.target_image = 'structural.nii' >>> tk2.inputs.reg_header = True >>> tk2.cmdline 'tkregister2 --mov T1.mgz --noedit --reg T1_to_native.dat --regheader \ --targ structural.nii' >>> tk2.run() # doctest: +SKIP The example below uses tkregister2 without the manual editing stage to convert FSL-style registration matrix (.mat) to FreeSurfer-style registration matrix (.dat) >>> from nipype.interfaces.freesurfer import Tkregister2 >>> tk2 = Tkregister2() >>> tk2.inputs.moving_image = 'epi.nii' >>> tk2.inputs.fsl_in_matrix = 'flirt.mat' >>> tk2.cmdline 'tkregister2 --fsl flirt.mat --mov epi.nii --noedit --reg register.dat' >>> tk2.run() # doctest: +SKIP """ _cmd = "tkregister2" input_spec = Tkregister2InputSpec output_spec = Tkregister2OutputSpec def _format_arg(self, name, spec, value): if name == "lta_in" and self.inputs.invert_lta_in: spec = "--lta-inv %s" if name in ("fsl_out", "lta_out") and value is True: value = self._list_outputs()[name] return super(Tkregister2, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() reg_file = os.path.abspath(self.inputs.reg_file) outputs["reg_file"] = reg_file cwd = os.getcwd() fsl_out = self.inputs.fsl_out if isdefined(fsl_out): if fsl_out is True: outputs["fsl_file"] = fname_presuffix( reg_file, suffix=".mat", newpath=cwd, use_ext=False ) else: outputs["fsl_file"] = os.path.abspath(self.inputs.fsl_out) lta_out = self.inputs.lta_out if isdefined(lta_out): if lta_out is True: outputs["lta_file"] = fname_presuffix( reg_file, suffix=".lta", newpath=cwd, use_ext=False ) else: outputs["lta_file"] = os.path.abspath(self.inputs.lta_out) return outputs def _gen_outfilename(self): if isdefined(self.inputs.out_file): return os.path.abspath(self.inputs.out_file) else: _, name, ext = split_filename(self.inputs.in_file) return os.path.abspath(name + "_smoothed" + ext) class AddXFormToHeaderInputSpec(FSTraitedSpec): # required in_file = File( exists=True, mandatory=True, position=-2, argstr="%s", desc="input volume" ) # transform file does NOT need to exist at the time if using copy_name transform = File( exists=False, mandatory=True, position=-3, argstr="%s", desc="xfm file" ) out_file = File( "output.mgz", position=-1, argstr="%s", usedefault=True, desc="output volume" ) # optional copy_name = traits.Bool( argstr="-c", desc="do not try to load the xfmfile, just copy name" ) verbose = traits.Bool(argstr="-v", desc="be verbose") class AddXFormToHeaderOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output volume") class AddXFormToHeader(FSCommand): """ Just adds specified xform to the volume header. .. danger :: Input transform **MUST** be an absolute path to a DataSink'ed transform or the output will reference a transform in the workflow cache directory! Examples -------- >>> from nipype.interfaces.freesurfer import AddXFormToHeader >>> adder = AddXFormToHeader() >>> adder.inputs.in_file = 'norm.mgz' >>> adder.inputs.transform = 'trans.mat' >>> adder.cmdline 'mri_add_xform_to_header trans.mat norm.mgz output.mgz' >>> adder.inputs.copy_name = True >>> adder.cmdline 'mri_add_xform_to_header -c trans.mat norm.mgz output.mgz' >>> adder.run() # doctest: +SKIP References ---------- [https://surfer.nmr.mgh.harvard.edu/fswiki/mri_add_xform_to_header] """ _cmd = "mri_add_xform_to_header" input_spec = AddXFormToHeaderInputSpec output_spec = AddXFormToHeaderOutputSpec def _format_arg(self, name, spec, value): if name == "transform": return value # os.path.abspath(value) # if name == 'copy_name' and value: # self.input_spec.transform return super(AddXFormToHeader, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class CheckTalairachAlignmentInputSpec(FSTraitedSpec): in_file = File( argstr="-xfm %s", xor=["subject"], exists=True, mandatory=True, position=-1, desc="specify the talairach.xfm file to check", ) subject = traits.String( argstr="-subj %s", xor=["in_file"], mandatory=True, position=-1, desc="specify subject's name", ) # optional threshold = traits.Float( default_value=0.010, usedefault=True, argstr="-T %.3f", desc="Talairach transforms for subjects with p-values <= T " + "are considered as very unlikely default=0.010", ) class CheckTalairachAlignmentOutputSpec(TraitedSpec): out_file = File(exists=True, desc="The input file for CheckTalairachAlignment") class CheckTalairachAlignment(FSCommand): """ This program detects Talairach alignment failures Examples ======== >>> from nipype.interfaces.freesurfer import CheckTalairachAlignment >>> checker = CheckTalairachAlignment() >>> checker.inputs.in_file = 'trans.mat' >>> checker.inputs.threshold = 0.005 >>> checker.cmdline 'talairach_afd -T 0.005 -xfm trans.mat' >>> checker.run() # doctest: +SKIP """ _cmd = "talairach_afd" input_spec = CheckTalairachAlignmentInputSpec output_spec = CheckTalairachAlignmentOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = self.inputs.in_file return outputs class TalairachAVIInputSpec(FSTraitedSpec): in_file = File(argstr="--i %s", exists=True, mandatory=True, desc="input volume") out_file = File( argstr="--xfm %s", mandatory=True, exists=False, desc="output xfm file" ) # optional atlas = traits.String( argstr="--atlas %s", desc="alternate target atlas (in freesurfer/average dir)" ) class TalairachAVIOutputSpec(TraitedSpec): out_file = File(exists=False, desc="The output transform for TalairachAVI") out_log = File(exists=False, desc="The output log file for TalairachAVI") out_txt = File(exists=False, desc="The output text file for TaliarachAVI") class TalairachAVI(FSCommand): """ Front-end for Avi Snyders image registration tool. Computes the talairach transform that maps the input volume to the MNI average_305. This does not add the xfm to the header of the input file. When called by recon-all, the xfm is added to the header after the transform is computed. Examples ======== >>> from nipype.interfaces.freesurfer import TalairachAVI >>> example = TalairachAVI() >>> example.inputs.in_file = 'norm.mgz' >>> example.inputs.out_file = 'trans.mat' >>> example.cmdline 'talairach_avi --i norm.mgz --xfm trans.mat' >>> example.run() # doctest: +SKIP """ _cmd = "talairach_avi" input_spec = TalairachAVIInputSpec output_spec = TalairachAVIOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) outputs["out_log"] = os.path.abspath("talairach_avi.log") outputs["out_txt"] = os.path.join( os.path.dirname(self.inputs.out_file), "talsrcimg_to_" + str(self.inputs.atlas) + "t4_vox2vox.txt", ) return outputs class TalairachQCInputSpec(FSTraitedSpec): log_file = File( argstr="%s", mandatory=True, exists=True, position=0, desc="The log file for TalairachQC", ) class TalairachQC(FSScriptCommand): """ Examples ======== >>> from nipype.interfaces.freesurfer import TalairachQC >>> qc = TalairachQC() >>> qc.inputs.log_file = 'dirs.txt' >>> qc.cmdline 'tal_QC_AZS dirs.txt' """ _cmd = "tal_QC_AZS" input_spec = TalairachQCInputSpec output_spec = FSScriptOutputSpec class RemoveNeckInputSpec(FSTraitedSpec): in_file = File( argstr="%s", exists=True, mandatory=True, position=-4, desc="Input file for RemoveNeck", ) out_file = File( argstr="%s", exists=False, name_source=["in_file"], name_template="%s_noneck", hash_files=False, keep_extension=True, position=-1, desc="Output file for RemoveNeck", ) transform = File( argstr="%s", exists=True, mandatory=True, position=-3, desc="Input transform file for RemoveNeck", ) template = File( argstr="%s", exists=True, mandatory=True, position=-2, desc="Input template file for RemoveNeck", ) # optional radius = traits.Int(argstr="-radius %d", desc="Radius") class RemoveNeckOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output file with neck removed") class RemoveNeck(FSCommand): """ Crops the neck out of the mri image Examples ======== >>> from nipype.interfaces.freesurfer import TalairachQC >>> remove_neck = RemoveNeck() >>> remove_neck.inputs.in_file = 'norm.mgz' >>> remove_neck.inputs.transform = 'trans.mat' >>> remove_neck.inputs.template = 'trans.mat' >>> remove_neck.cmdline 'mri_remove_neck norm.mgz trans.mat trans.mat norm_noneck.mgz' """ _cmd = "mri_remove_neck" input_spec = RemoveNeckInputSpec output_spec = RemoveNeckOutputSpec def _gen_fname(self, name): if name == "out_file": return os.path.abspath("nu_noneck.mgz") return None def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class MRIFillInputSpec(FSTraitedSpec): in_file = File( argstr="%s", mandatory=True, exists=True, position=-2, desc="Input white matter file", ) out_file = File( argstr="%s", mandatory=True, exists=False, position=-1, desc="Output filled volume file name for MRIFill", ) # optional segmentation = File( argstr="-segmentation %s", exists=True, desc="Input segmentation file for MRIFill", ) transform = File( argstr="-xform %s", exists=True, desc="Input transform file for MRIFill" ) log_file = File(argstr="-a %s", desc="Output log file for MRIFill") class MRIFillOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output file from MRIFill") log_file = File(desc="Output log file from MRIFill") class MRIFill(FSCommand): """ This program creates hemispheric cutting planes and fills white matter with specific values for subsequent surface tesselation. Examples ======== >>> from nipype.interfaces.freesurfer import MRIFill >>> fill = MRIFill() >>> fill.inputs.in_file = 'wm.mgz' # doctest: +SKIP >>> fill.inputs.out_file = 'filled.mgz' # doctest: +SKIP >>> fill.cmdline # doctest: +SKIP 'mri_fill wm.mgz filled.mgz' """ _cmd = "mri_fill" input_spec = MRIFillInputSpec output_spec = MRIFillOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) if isdefined(self.inputs.log_file): outputs["log_file"] = os.path.abspath(self.inputs.log_file) return outputs class MRIsInflateInputSpec(FSTraitedSpec): in_file = File( argstr="%s", position=-2, mandatory=True, exists=True, copyfile=True, desc="Input file for MRIsInflate", ) out_file = File( argstr="%s", position=-1, exists=False, name_source=["in_file"], name_template="%s.inflated", hash_files=False, keep_extension=True, desc="Output file for MRIsInflate", ) # optional out_sulc = File(exists=False, xor=["no_save_sulc"], desc="Output sulc file") no_save_sulc = traits.Bool( argstr="-no-save-sulc", xor=["out_sulc"], desc="Do not save sulc file as output" ) class MRIsInflateOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output file for MRIsInflate") out_sulc = File(exists=False, desc="Output sulc file") class MRIsInflate(FSCommand): """ This program will inflate a cortical surface. Examples ======== >>> from nipype.interfaces.freesurfer import MRIsInflate >>> inflate = MRIsInflate() >>> inflate.inputs.in_file = 'lh.pial' >>> inflate.inputs.no_save_sulc = True >>> inflate.cmdline # doctest: +SKIP 'mris_inflate -no-save-sulc lh.pial lh.inflated' """ _cmd = "mris_inflate" input_spec = MRIsInflateInputSpec output_spec = MRIsInflateOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) if not self.inputs.no_save_sulc: # if the sulc file will be saved outputs["out_sulc"] = os.path.abspath(self.inputs.out_sulc) return outputs class SphereInputSpec(FSTraitedSpecOpenMP): in_file = File( argstr="%s", position=-2, copyfile=True, mandatory=True, exists=True, desc="Input file for Sphere", ) out_file = File( argstr="%s", position=-1, exists=False, name_source=["in_file"], hash_files=False, name_template="%s.sphere", desc="Output file for Sphere", ) # optional seed = traits.Int( argstr="-seed %d", desc="Seed for setting random number generator" ) magic = traits.Bool( argstr="-q", desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) in_smoothwm = File( exists=True, copyfile=True, desc="Input surface required when -q flag is not selected", ) class SphereOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output file for Sphere") class Sphere(FSCommandOpenMP): """ This program will add a template into an average surface Examples ======== >>> from nipype.interfaces.freesurfer import Sphere >>> sphere = Sphere() >>> sphere.inputs.in_file = 'lh.pial' >>> sphere.cmdline 'mris_sphere lh.pial lh.sphere' """ _cmd = "mris_sphere" input_spec = SphereInputSpec output_spec = SphereOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class FixTopologyInputSpec(FSTraitedSpec): in_orig = File( exists=True, mandatory=True, desc="Undocumented input file .orig" ) in_inflated = File( exists=True, mandatory=True, desc="Undocumented input file .inflated", ) in_brain = File(exists=True, mandatory=True, desc="Implicit input brain.mgz") in_wm = File(exists=True, mandatory=True, desc="Implicit input wm.mgz") hemisphere = traits.String( position=-1, argstr="%s", mandatory=True, desc="Hemisphere being processed" ) subject_id = traits.String( "subject_id", position=-2, argstr="%s", mandatory=True, usedefault=True, desc="Subject being processed", ) copy_inputs = traits.Bool( mandatory=True, desc="If running as a node, set this to True " + "otherwise, the topology fixing will be done " + "in place.", ) # optional seed = traits.Int( argstr="-seed %d", desc="Seed for setting random number generator" ) ga = traits.Bool( argstr="-ga", desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) mgz = traits.Bool( argstr="-mgz", desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) sphere = File(argstr="-sphere %s", desc="Sphere input file") class FixTopologyOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output file for FixTopology") class FixTopology(FSCommand): """ This program computes a mapping from the unit sphere onto the surface of the cortex from a previously generated approximation of the cortical surface, thus guaranteeing a topologically correct surface. Examples ======== >>> from nipype.interfaces.freesurfer import FixTopology >>> ft = FixTopology() >>> ft.inputs.in_orig = 'lh.orig' # doctest: +SKIP >>> ft.inputs.in_inflated = 'lh.inflated' # doctest: +SKIP >>> ft.inputs.sphere = 'lh.qsphere.nofix' # doctest: +SKIP >>> ft.inputs.hemisphere = 'lh' >>> ft.inputs.subject_id = '10335' >>> ft.inputs.mgz = True >>> ft.inputs.ga = True >>> ft.cmdline # doctest: +SKIP 'mris_fix_topology -ga -mgz -sphere qsphere.nofix 10335 lh' """ _cmd = "mris_fix_topology" input_spec = FixTopologyInputSpec output_spec = FixTopologyOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir hemi = self.inputs.hemisphere copy2subjdir(self, self.inputs.sphere, folder="surf") # the orig file is edited in place self.inputs.in_orig = copy2subjdir( self, self.inputs.in_orig, folder="surf", basename="{0}.orig".format(hemi), ) copy2subjdir( self, self.inputs.in_inflated, folder="surf", basename="{0}.inflated".format(hemi), ) copy2subjdir(self, self.inputs.in_brain, folder="mri", basename="brain.mgz") copy2subjdir(self, self.inputs.in_wm, folder="mri", basename="wm.mgz") return super(FixTopology, self).run(**inputs) def _format_arg(self, name, spec, value): if name == "sphere": # get the basename and take out the hemisphere suffix = os.path.basename(value).split(".", 1)[1] return spec.argstr % suffix return super(FixTopology, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.in_orig) return outputs class EulerNumberInputSpec(FSTraitedSpec): in_file = File( argstr="%s", position=-1, mandatory=True, exists=True, desc="Input file for EulerNumber", ) class EulerNumberOutputSpec(TraitedSpec): euler = traits.Int( desc="Euler number of cortical surface. A value of 2 signals a " "topologically correct surface model with no holes" ) defects = traits.Int(desc="Number of defects") class EulerNumber(FSCommand): """ This program computes EulerNumber for a cortical surface Examples ======== >>> from nipype.interfaces.freesurfer import EulerNumber >>> ft = EulerNumber() >>> ft.inputs.in_file = 'lh.pial' >>> ft.cmdline 'mris_euler_number lh.pial' """ _cmd = "mris_euler_number" input_spec = EulerNumberInputSpec output_spec = EulerNumberOutputSpec def _run_interface(self, runtime): runtime = super()._run_interface(runtime) self._parse_output(runtime.stdout, runtime.stderr) return runtime def _parse_output(self, stdout, stderr): """Parse stdout / stderr and extract defects""" m = re.search(r"(?<=total defect index = )\d+", stdout or stderr) if m is None: raise RuntimeError("Could not fetch defect index") self._defects = int(m.group()) def _list_outputs(self): outputs = self._outputs().get() outputs["defects"] = self._defects outputs["euler"] = 2 - (2 * self._defects) return outputs class RemoveIntersectionInputSpec(FSTraitedSpec): in_file = File( argstr="%s", position=-2, mandatory=True, exists=True, copyfile=True, desc="Input file for RemoveIntersection", ) out_file = File( argstr="%s", position=-1, exists=False, name_source=["in_file"], name_template="%s", hash_files=False, keep_extension=True, desc="Output file for RemoveIntersection", ) class RemoveIntersectionOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output file for RemoveIntersection") class RemoveIntersection(FSCommand): """ This program removes the intersection of the given MRI Examples ======== >>> from nipype.interfaces.freesurfer import RemoveIntersection >>> ri = RemoveIntersection() >>> ri.inputs.in_file = 'lh.pial' >>> ri.cmdline 'mris_remove_intersection lh.pial lh.pial' """ _cmd = "mris_remove_intersection" input_spec = RemoveIntersectionInputSpec output_spec = RemoveIntersectionOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class MakeSurfacesInputSpec(FSTraitedSpec): # required hemisphere = traits.Enum( "lh", "rh", position=-1, argstr="%s", mandatory=True, desc="Hemisphere being processed", ) subject_id = traits.String( "subject_id", usedefault=True, position=-2, argstr="%s", mandatory=True, desc="Subject being processed", ) # implicit in_orig = File( exists=True, mandatory=True, argstr="-orig %s", desc="Implicit input file .orig", ) in_wm = File(exists=True, mandatory=True, desc="Implicit input file wm.mgz") in_filled = File(exists=True, mandatory=True, desc="Implicit input file filled.mgz") # optional in_white = File(exists=True, desc="Implicit input that is sometimes used") in_label = File( exists=True, xor=["noaparc"], desc="Implicit input label/.aparc.annot", ) orig_white = File( argstr="-orig_white %s", exists=True, desc="Specify a white surface to start with", ) orig_pial = File( argstr="-orig_pial %s", exists=True, requires=["in_label"], desc="Specify a pial surface to start with", ) fix_mtl = traits.Bool(argstr="-fix_mtl", desc="Undocumented flag") no_white = traits.Bool(argstr="-nowhite", desc="Undocumented flag") white_only = traits.Bool(argstr="-whiteonly", desc="Undocumented flage") in_aseg = File(argstr="-aseg %s", exists=True, desc="Input segmentation file") in_T1 = File(argstr="-T1 %s", exists=True, desc="Input brain or T1 file") mgz = traits.Bool( argstr="-mgz", desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) noaparc = traits.Bool( argstr="-noaparc", xor=["in_label"], desc="No documentation. Direct questions to analysis-bugs@nmr.mgh.harvard.edu", ) maximum = traits.Float( argstr="-max %.1f", desc="No documentation (used for longitudinal processing)" ) longitudinal = traits.Bool( argstr="-long", desc="No documentation (used for longitudinal processing)" ) white = traits.String(argstr="-white %s", desc="White surface name") copy_inputs = traits.Bool( desc="If running as a node, set this to True." + "This will copy the input files to the node " + "directory." ) class MakeSurfacesOutputSpec(TraitedSpec): out_white = File(exists=False, desc="Output white matter hemisphere surface") out_curv = File(exists=False, desc="Output curv file for MakeSurfaces") out_area = File(exists=False, desc="Output area file for MakeSurfaces") out_cortex = File(exists=False, desc="Output cortex file for MakeSurfaces") out_pial = File(exists=False, desc="Output pial surface for MakeSurfaces") out_thickness = File(exists=False, desc="Output thickness file for MakeSurfaces") class MakeSurfaces(FSCommand): """ This program positions the tessellation of the cortical surface at the white matter surface, then the gray matter surface and generate surface files for these surfaces as well as a 'curvature' file for the cortical thickness, and a surface file which approximates layer IV of the cortical sheet. Examples ======== >>> from nipype.interfaces.freesurfer import MakeSurfaces >>> makesurfaces = MakeSurfaces() >>> makesurfaces.inputs.hemisphere = 'lh' >>> makesurfaces.inputs.subject_id = '10335' >>> makesurfaces.inputs.in_orig = 'lh.pial' >>> makesurfaces.inputs.in_wm = 'wm.mgz' >>> makesurfaces.inputs.in_filled = 'norm.mgz' >>> makesurfaces.inputs.in_label = 'aparc+aseg.nii' >>> makesurfaces.inputs.in_T1 = 'T1.mgz' >>> makesurfaces.inputs.orig_pial = 'lh.pial' >>> makesurfaces.cmdline 'mris_make_surfaces -T1 T1.mgz -orig pial -orig_pial pial 10335 lh' """ _cmd = "mris_make_surfaces" input_spec = MakeSurfacesInputSpec output_spec = MakeSurfacesOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir copy2subjdir(self, self.inputs.in_wm, folder="mri", basename="wm.mgz") copy2subjdir( self, self.inputs.in_filled, folder="mri", basename="filled.mgz" ) copy2subjdir( self, self.inputs.in_white, "surf", "{0}.white".format(self.inputs.hemisphere), ) for originalfile in [self.inputs.in_aseg, self.inputs.in_T1]: copy2subjdir(self, originalfile, folder="mri") for originalfile in [ self.inputs.orig_white, self.inputs.orig_pial, self.inputs.in_orig, ]: copy2subjdir(self, originalfile, folder="surf") if isdefined(self.inputs.in_label): copy2subjdir( self, self.inputs.in_label, "label", "{0}.aparc.annot".format(self.inputs.hemisphere), ) else: os.makedirs( os.path.join( self.inputs.subjects_dir, self.inputs.subject_id, "label" ) ) return super(MakeSurfaces, self).run(**inputs) def _format_arg(self, name, spec, value): if name in ["in_T1", "in_aseg"]: # These inputs do not take full paths as inputs or even basenames basename = os.path.basename(value) # whent the -mgz flag is specified, it assumes the mgz extension if self.inputs.mgz: prefix = os.path.splitext(basename)[0] else: prefix = basename if prefix == "aseg": return # aseg is already the default return spec.argstr % prefix elif name in ["orig_white", "orig_pial"]: # these inputs do take full file paths or even basenames basename = os.path.basename(value) suffix = basename.split(".")[1] return spec.argstr % suffix elif name == "in_orig": if value.endswith("lh.orig") or value.endswith("rh.orig"): # {lh,rh}.orig inputs are not sepcified on command line return else: # if the input orig file is different than lh.orig or rh.orig # these inputs do take full file paths or even basenames basename = os.path.basename(value) suffix = basename.split(".")[1] return spec.argstr % suffix return super(MakeSurfaces, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() # Outputs are saved in the surf directory dest_dir = os.path.join( self.inputs.subjects_dir, self.inputs.subject_id, "surf" ) # labels are saved in the label directory label_dir = os.path.join( self.inputs.subjects_dir, self.inputs.subject_id, "label" ) if not self.inputs.no_white: outputs["out_white"] = os.path.join( dest_dir, str(self.inputs.hemisphere) + ".white" ) # The curv and area files must have the hemisphere names as a prefix outputs["out_curv"] = os.path.join( dest_dir, str(self.inputs.hemisphere) + ".curv" ) outputs["out_area"] = os.path.join( dest_dir, str(self.inputs.hemisphere) + ".area" ) # Something determines when a pial surface and thickness file is generated # but documentation doesn't say what. # The orig_pial input is just a guess if isdefined(self.inputs.orig_pial) or self.inputs.white == "NOWRITE": outputs["out_curv"] = outputs["out_curv"] + ".pial" outputs["out_area"] = outputs["out_area"] + ".pial" outputs["out_pial"] = os.path.join( dest_dir, str(self.inputs.hemisphere) + ".pial" ) outputs["out_thickness"] = os.path.join( dest_dir, str(self.inputs.hemisphere) + ".thickness" ) else: # when a pial surface is generated, the cortex label file is not # generated outputs["out_cortex"] = os.path.join( label_dir, str(self.inputs.hemisphere) + ".cortex.label" ) return outputs class CurvatureInputSpec(FSTraitedSpec): in_file = File( argstr="%s", position=-2, mandatory=True, exists=True, copyfile=True, desc="Input file for Curvature", ) # optional threshold = traits.Float(argstr="-thresh %.3f", desc="Undocumented input threshold") n = traits.Bool(argstr="-n", desc="Undocumented boolean flag") averages = traits.Int( argstr="-a %d", desc="Perform this number iterative averages of curvature measure before saving", ) save = traits.Bool( argstr="-w", desc="Save curvature files (will only generate screen output without this option)", ) distances = traits.Tuple( traits.Int, traits.Int, argstr="-distances %d %d", desc="Undocumented input integer distances", ) copy_input = traits.Bool(desc="Copy input file to current directory") class CurvatureOutputSpec(TraitedSpec): out_mean = File(exists=False, desc="Mean curvature output file") out_gauss = File(exists=False, desc="Gaussian curvature output file") class Curvature(FSCommand): """ This program will compute the second fundamental form of a cortical surface. It will create two new files ..H and ..K with the mean and Gaussian curvature respectively. Examples ======== >>> from nipype.interfaces.freesurfer import Curvature >>> curv = Curvature() >>> curv.inputs.in_file = 'lh.pial' >>> curv.inputs.save = True >>> curv.cmdline 'mris_curvature -w lh.pial' """ _cmd = "mris_curvature" input_spec = CurvatureInputSpec output_spec = CurvatureOutputSpec def _format_arg(self, name, spec, value): if self.inputs.copy_input: if name == "in_file": basename = os.path.basename(value) return spec.argstr % basename return super(Curvature, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() if self.inputs.copy_input: in_file = os.path.basename(self.inputs.in_file) else: in_file = self.inputs.in_file outputs["out_mean"] = os.path.abspath(in_file) + ".H" outputs["out_gauss"] = os.path.abspath(in_file) + ".K" return outputs class CurvatureStatsInputSpec(FSTraitedSpec): surface = File( argstr="-F %s", exists=True, desc="Specify surface file for CurvatureStats" ) curvfile1 = File( argstr="%s", position=-2, mandatory=True, exists=True, desc="Input file for CurvatureStats", ) curvfile2 = File( argstr="%s", position=-1, mandatory=True, exists=True, desc="Input file for CurvatureStats", ) hemisphere = traits.Enum( "lh", "rh", position=-3, argstr="%s", mandatory=True, desc="Hemisphere being processed", ) subject_id = traits.String( "subject_id", usedefault=True, position=-4, argstr="%s", mandatory=True, desc="Subject being processed", ) out_file = File( argstr="-o %s", exists=False, name_source=["hemisphere"], name_template="%s.curv.stats", hash_files=False, desc="Output curvature stats file", ) # optional min_max = traits.Bool( argstr="-m", desc="Output min / max information for the processed curvature." ) values = traits.Bool( argstr="-G", desc="Triggers a series of derived curvature values" ) write = traits.Bool(argstr="--writeCurvatureFiles", desc="Write curvature files") copy_inputs = traits.Bool( desc="If running as a node, set this to True." + "This will copy the input files to the node " + "directory." ) class CurvatureStatsOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output curvature stats file") class CurvatureStats(FSCommand): """ In its simplest usage, 'mris_curvature_stats' will compute a set of statistics on its input . These statistics are the mean and standard deviation of the particular curvature on the surface, as well as the results from several surface-based integrals. Additionally, 'mris_curvature_stats' can report the max/min curvature values, and compute a simple histogram based on all curvature values. Curvatures can also be normalised and constrained to a given range before computation. Principal curvature (K, H, k1 and k2) calculations on a surface structure can also be performed, as well as several functions derived from k1 and k2. Finally, all output to the console, as well as any new curvatures that result from the above calculations can be saved to a series of text and binary-curvature files. Examples ======== >>> from nipype.interfaces.freesurfer import CurvatureStats >>> curvstats = CurvatureStats() >>> curvstats.inputs.hemisphere = 'lh' >>> curvstats.inputs.curvfile1 = 'lh.pial' >>> curvstats.inputs.curvfile2 = 'lh.pial' >>> curvstats.inputs.surface = 'lh.pial' >>> curvstats.inputs.out_file = 'lh.curv.stats' >>> curvstats.inputs.values = True >>> curvstats.inputs.min_max = True >>> curvstats.inputs.write = True >>> curvstats.cmdline 'mris_curvature_stats -m -o lh.curv.stats -F pial -G --writeCurvatureFiles subject_id lh pial pial' """ _cmd = "mris_curvature_stats" input_spec = CurvatureStatsInputSpec output_spec = CurvatureStatsOutputSpec def _format_arg(self, name, spec, value): if name in ["surface", "curvfile1", "curvfile2"]: prefix = os.path.basename(value).split(".")[1] return spec.argstr % prefix return super(CurvatureStats, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir copy2subjdir(self, self.inputs.surface, "surf") copy2subjdir(self, self.inputs.curvfile1, "surf") copy2subjdir(self, self.inputs.curvfile2, "surf") return super(CurvatureStats, self).run(**inputs) class JacobianInputSpec(FSTraitedSpec): # required in_origsurf = File( argstr="%s", position=-3, mandatory=True, exists=True, desc="Original surface" ) in_mappedsurf = File( argstr="%s", position=-2, mandatory=True, exists=True, desc="Mapped surface" ) # optional out_file = File( argstr="%s", exists=False, position=-1, name_source=["in_origsurf"], hash_files=False, name_template="%s.jacobian", keep_extension=False, desc="Output Jacobian of the surface mapping", ) class JacobianOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output Jacobian of the surface mapping") class Jacobian(FSCommand): """ This program computes the Jacobian of a surface mapping. Examples ======== >>> from nipype.interfaces.freesurfer import Jacobian >>> jacobian = Jacobian() >>> jacobian.inputs.in_origsurf = 'lh.pial' >>> jacobian.inputs.in_mappedsurf = 'lh.pial' >>> jacobian.cmdline 'mris_jacobian lh.pial lh.pial lh.jacobian' """ _cmd = "mris_jacobian" input_spec = JacobianInputSpec output_spec = JacobianOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class MRIsCalcInputSpec(FSTraitedSpec): # required in_file1 = File( argstr="%s", position=-3, mandatory=True, exists=True, desc="Input file 1" ) action = traits.String( argstr="%s", position=-2, mandatory=True, desc="Action to perform on input file(s)", ) out_file = File( argstr="-o %s", mandatory=True, desc="Output file after calculation" ) # optional in_file2 = File( argstr="%s", exists=True, position=-1, xor=["in_float", "in_int"], desc="Input file 2", ) in_float = traits.Float( argstr="%f", position=-1, xor=["in_file2", "in_int"], desc="Input float" ) in_int = traits.Int( argstr="%d", position=-1, xor=["in_file2", "in_float"], desc="Input integer" ) class MRIsCalcOutputSpec(TraitedSpec): out_file = File(exists=False, desc="Output file after calculation") class MRIsCalc(FSCommand): """ 'mris_calc' is a simple calculator that operates on FreeSurfer curvatures and volumes. In most cases, the calculator functions with three arguments: two inputs and an linking them. Some actions, however, operate with only one input . In all cases, the first input is the name of a FreeSurfer curvature overlay (e.g. rh.curv) or volume file (e.g. orig.mgz). For two inputs, the calculator first assumes that the second input is a file. If, however, this second input file doesn't exist, the calculator assumes it refers to a float number, which is then processed according to .Note: and should typically be generated on the same subject. Examples ======== >>> from nipype.interfaces.freesurfer import MRIsCalc >>> example = MRIsCalc() >>> example.inputs.in_file1 = 'lh.area' # doctest: +SKIP >>> example.inputs.in_file2 = 'lh.area.pial' # doctest: +SKIP >>> example.inputs.action = 'add' >>> example.inputs.out_file = 'area.mid' >>> example.cmdline # doctest: +SKIP 'mris_calc -o lh.area.mid lh.area add lh.area.pial' """ _cmd = "mris_calc" input_spec = MRIsCalcInputSpec output_spec = MRIsCalcOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class VolumeMaskInputSpec(FSTraitedSpec): left_whitelabel = traits.Int( argstr="--label_left_white %d", mandatory=True, desc="Left white matter label" ) left_ribbonlabel = traits.Int( argstr="--label_left_ribbon %d", mandatory=True, desc="Left cortical ribbon label", ) right_whitelabel = traits.Int( argstr="--label_right_white %d", mandatory=True, desc="Right white matter label" ) right_ribbonlabel = traits.Int( argstr="--label_right_ribbon %d", mandatory=True, desc="Right cortical ribbon label", ) lh_pial = File(mandatory=True, exists=True, desc="Implicit input left pial surface") rh_pial = File( mandatory=True, exists=True, desc="Implicit input right pial surface" ) lh_white = File( mandatory=True, exists=True, desc="Implicit input left white matter surface" ) rh_white = File( mandatory=True, exists=True, desc="Implicit input right white matter surface" ) aseg = File( exists=True, xor=["in_aseg"], desc="Implicit aseg.mgz segmentation. " + "Specify a different aseg by using the 'in_aseg' input.", ) subject_id = traits.String( "subject_id", usedefault=True, position=-1, argstr="%s", mandatory=True, desc="Subject being processed", ) # optional in_aseg = File( argstr="--aseg_name %s", exists=True, xor=["aseg"], desc="Input aseg file for VolumeMask", ) save_ribbon = traits.Bool( argstr="--save_ribbon", desc="option to save just the ribbon for the " + "hemispheres in the format ?h.ribbon.mgz", ) copy_inputs = traits.Bool( desc="If running as a node, set this to True." + "This will copy the implicit input files to the " + "node directory." ) class VolumeMaskOutputSpec(TraitedSpec): out_ribbon = File(exists=False, desc="Output cortical ribbon mask") lh_ribbon = File(exists=False, desc="Output left cortical ribbon mask") rh_ribbon = File(exists=False, desc="Output right cortical ribbon mask") class VolumeMask(FSCommand): """ Computes a volume mask, at the same resolution as the /mri/brain.mgz. The volume mask contains 4 values: LH_WM (default 10), LH_GM (default 100), RH_WM (default 20), RH_GM (default 200). The algorithm uses the 4 surfaces situated in /surf/ [lh|rh].[white|pial] and labels voxels based on the signed-distance function from the surface. Examples ======== >>> from nipype.interfaces.freesurfer import VolumeMask >>> volmask = VolumeMask() >>> volmask.inputs.left_whitelabel = 2 >>> volmask.inputs.left_ribbonlabel = 3 >>> volmask.inputs.right_whitelabel = 41 >>> volmask.inputs.right_ribbonlabel = 42 >>> volmask.inputs.lh_pial = 'lh.pial' >>> volmask.inputs.rh_pial = 'lh.pial' >>> volmask.inputs.lh_white = 'lh.pial' >>> volmask.inputs.rh_white = 'lh.pial' >>> volmask.inputs.subject_id = '10335' >>> volmask.inputs.save_ribbon = True >>> volmask.cmdline 'mris_volmask --label_left_ribbon 3 --label_left_white 2 --label_right_ribbon 42 --label_right_white 41 --save_ribbon 10335' """ _cmd = "mris_volmask" input_spec = VolumeMaskInputSpec output_spec = VolumeMaskOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir copy2subjdir(self, self.inputs.lh_pial, "surf", "lh.pial") copy2subjdir(self, self.inputs.rh_pial, "surf", "rh.pial") copy2subjdir(self, self.inputs.lh_white, "surf", "lh.white") copy2subjdir(self, self.inputs.rh_white, "surf", "rh.white") copy2subjdir(self, self.inputs.in_aseg, "mri") copy2subjdir(self, self.inputs.aseg, "mri", "aseg.mgz") return super(VolumeMask, self).run(**inputs) def _format_arg(self, name, spec, value): if name == "in_aseg": return spec.argstr % os.path.basename(value).rstrip(".mgz") return super(VolumeMask, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() out_dir = os.path.join(self.inputs.subjects_dir, self.inputs.subject_id, "mri") outputs["out_ribbon"] = os.path.join(out_dir, "ribbon.mgz") if self.inputs.save_ribbon: outputs["rh_ribbon"] = os.path.join(out_dir, "rh.ribbon.mgz") outputs["lh_ribbon"] = os.path.join(out_dir, "lh.ribbon.mgz") return outputs class ParcellationStatsInputSpec(FSTraitedSpec): # required subject_id = traits.String( "subject_id", usedefault=True, position=-3, argstr="%s", mandatory=True, desc="Subject being processed", ) hemisphere = traits.Enum( "lh", "rh", position=-2, argstr="%s", mandatory=True, desc="Hemisphere being processed", ) # implicit wm = File( mandatory=True, exists=True, desc="Input file must be /mri/wm.mgz" ) lh_white = File( mandatory=True, exists=True, desc="Input file must be /surf/lh.white", ) rh_white = File( mandatory=True, exists=True, desc="Input file must be /surf/rh.white", ) lh_pial = File( mandatory=True, exists=True, desc="Input file must be /surf/lh.pial" ) rh_pial = File( mandatory=True, exists=True, desc="Input file must be /surf/rh.pial" ) transform = File( mandatory=True, exists=True, desc="Input file must be /mri/transforms/talairach.xfm", ) thickness = File( mandatory=True, exists=True, desc="Input file must be /surf/?h.thickness", ) brainmask = File( mandatory=True, exists=True, desc="Input file must be /mri/brainmask.mgz", ) aseg = File( mandatory=True, exists=True, desc="Input file must be /mri/aseg.presurf.mgz", ) ribbon = File( mandatory=True, exists=True, desc="Input file must be /mri/ribbon.mgz", ) cortex_label = File(exists=True, desc="implicit input file {hemi}.cortex.label") # optional surface = traits.String( position=-1, argstr="%s", desc="Input surface (e.g. 'white')" ) mgz = traits.Bool(argstr="-mgz", desc="Look for mgz files") in_cortex = File(argstr="-cortex %s", exists=True, desc="Input cortex label") in_annotation = File( argstr="-a %s", exists=True, xor=["in_label"], desc="compute properties for each label in the annotation file separately", ) in_label = File( argstr="-l %s", exists=True, xor=["in_annotatoin", "out_color"], desc="limit calculations to specified label", ) tabular_output = traits.Bool(argstr="-b", desc="Tabular output") out_table = File( argstr="-f %s", exists=False, genfile=True, requires=["tabular_output"], desc="Table output to tablefile", ) out_color = File( argstr="-c %s", exists=False, genfile=True, xor=["in_label"], desc="Output annotation files's colortable to text file", ) copy_inputs = traits.Bool( desc="If running as a node, set this to True." + "This will copy the input files to the node " + "directory." ) th3 = traits.Bool( argstr="-th3", requires=["cortex_label"], desc="turns on new vertex-wise volume calc for mris_anat_stats", ) class ParcellationStatsOutputSpec(TraitedSpec): out_table = File(exists=False, desc="Table output to tablefile") out_color = File( exists=False, desc="Output annotation files's colortable to text file" ) class ParcellationStats(FSCommand): """ This program computes a number of anatomical properties. Examples ======== >>> from nipype.interfaces.freesurfer import ParcellationStats >>> import os >>> parcstats = ParcellationStats() >>> parcstats.inputs.subject_id = '10335' >>> parcstats.inputs.hemisphere = 'lh' >>> parcstats.inputs.wm = './../mri/wm.mgz' # doctest: +SKIP >>> parcstats.inputs.transform = './../mri/transforms/talairach.xfm' # doctest: +SKIP >>> parcstats.inputs.brainmask = './../mri/brainmask.mgz' # doctest: +SKIP >>> parcstats.inputs.aseg = './../mri/aseg.presurf.mgz' # doctest: +SKIP >>> parcstats.inputs.ribbon = './../mri/ribbon.mgz' # doctest: +SKIP >>> parcstats.inputs.lh_pial = 'lh.pial' # doctest: +SKIP >>> parcstats.inputs.rh_pial = 'lh.pial' # doctest: +SKIP >>> parcstats.inputs.lh_white = 'lh.white' # doctest: +SKIP >>> parcstats.inputs.rh_white = 'rh.white' # doctest: +SKIP >>> parcstats.inputs.thickness = 'lh.thickness' # doctest: +SKIP >>> parcstats.inputs.surface = 'white' >>> parcstats.inputs.out_table = 'lh.test.stats' >>> parcstats.inputs.out_color = 'test.ctab' >>> parcstats.cmdline # doctest: +SKIP 'mris_anatomical_stats -c test.ctab -f lh.test.stats 10335 lh white' """ _cmd = "mris_anatomical_stats" input_spec = ParcellationStatsInputSpec output_spec = ParcellationStatsOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir copy2subjdir(self, self.inputs.lh_white, "surf", "lh.white") copy2subjdir(self, self.inputs.lh_pial, "surf", "lh.pial") copy2subjdir(self, self.inputs.rh_white, "surf", "rh.white") copy2subjdir(self, self.inputs.rh_pial, "surf", "rh.pial") copy2subjdir(self, self.inputs.wm, "mri", "wm.mgz") copy2subjdir( self, self.inputs.transform, os.path.join("mri", "transforms"), "talairach.xfm", ) copy2subjdir(self, self.inputs.brainmask, "mri", "brainmask.mgz") copy2subjdir(self, self.inputs.aseg, "mri", "aseg.presurf.mgz") copy2subjdir(self, self.inputs.ribbon, "mri", "ribbon.mgz") copy2subjdir( self, self.inputs.thickness, "surf", "{0}.thickness".format(self.inputs.hemisphere), ) if isdefined(self.inputs.cortex_label): copy2subjdir( self, self.inputs.cortex_label, "label", "{0}.cortex.label".format(self.inputs.hemisphere), ) createoutputdirs(self._list_outputs()) return super(ParcellationStats, self).run(**inputs) def _gen_filename(self, name): if name in ["out_table", "out_color"]: return self._list_outputs()[name] return None def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.out_table): outputs["out_table"] = os.path.abspath(self.inputs.out_table) else: # subject stats directory stats_dir = os.path.join( self.inputs.subjects_dir, self.inputs.subject_id, "stats" ) if isdefined(self.inputs.in_annotation): # if out_table is not defined just tag .stats on the end # instead of .annot if self.inputs.surface == "pial": basename = os.path.basename(self.inputs.in_annotation).replace( ".annot", ".pial.stats" ) else: basename = os.path.basename(self.inputs.in_annotation).replace( ".annot", ".stats" ) elif isdefined(self.inputs.in_label): # if out_table is not defined just tag .stats on the end # instead of .label if self.inputs.surface == "pial": basename = os.path.basename(self.inputs.in_label).replace( ".label", ".pial.stats" ) else: basename = os.path.basename(self.inputs.in_label).replace( ".label", ".stats" ) else: basename = str(self.inputs.hemisphere) + ".aparc.annot.stats" outputs["out_table"] = os.path.join(stats_dir, basename) if isdefined(self.inputs.out_color): outputs["out_color"] = os.path.abspath(self.inputs.out_color) else: # subject label directory out_dir = os.path.join( self.inputs.subjects_dir, self.inputs.subject_id, "label" ) if isdefined(self.inputs.in_annotation): # find the annotation name (if it exists) basename = os.path.basename(self.inputs.in_annotation) for item in ["lh.", "rh.", "aparc.", "annot"]: basename = basename.replace(item, "") annot = basename # if the out_color table is not defined, one with the annotation # name will be created if "BA" in annot: outputs["out_color"] = os.path.join(out_dir, annot + "ctab") else: outputs["out_color"] = os.path.join( out_dir, "aparc.annot." + annot + "ctab" ) else: outputs["out_color"] = os.path.join(out_dir, "aparc.annot.ctab") return outputs class ContrastInputSpec(FSTraitedSpec): # required subject_id = traits.String( "subject_id", argstr="--s %s", usedefault=True, mandatory=True, desc="Subject being processed", ) hemisphere = traits.Enum( "lh", "rh", argstr="--%s-only", mandatory=True, desc="Hemisphere being processed", ) # implicit thickness = File( mandatory=True, exists=True, desc="Input file must be /surf/?h.thickness", ) white = File( mandatory=True, exists=True, desc="Input file must be /surf/.white", ) annotation = File( mandatory=True, exists=True, desc="Input annotation file must be /label/.aparc.annot", ) cortex = File( mandatory=True, exists=True, desc="Input cortex label must be /label/.cortex.label", ) orig = File(exists=True, mandatory=True, desc="Implicit input file mri/orig.mgz") rawavg = File( exists=True, mandatory=True, desc="Implicit input file mri/rawavg.mgz" ) copy_inputs = traits.Bool( desc="If running as a node, set this to True." + "This will copy the input files to the node " + "directory." ) class ContrastOutputSpec(TraitedSpec): out_contrast = File(exists=False, desc="Output contrast file from Contrast") out_stats = File(exists=False, desc="Output stats file from Contrast") out_log = File(exists=True, desc="Output log from Contrast") class Contrast(FSCommand): """ Compute surface-wise gray/white contrast Examples ======== >>> from nipype.interfaces.freesurfer import Contrast >>> contrast = Contrast() >>> contrast.inputs.subject_id = '10335' >>> contrast.inputs.hemisphere = 'lh' >>> contrast.inputs.white = 'lh.white' # doctest: +SKIP >>> contrast.inputs.thickness = 'lh.thickness' # doctest: +SKIP >>> contrast.inputs.annotation = '../label/lh.aparc.annot' # doctest: +SKIP >>> contrast.inputs.cortex = '../label/lh.cortex.label' # doctest: +SKIP >>> contrast.inputs.rawavg = '../mri/rawavg.mgz' # doctest: +SKIP >>> contrast.inputs.orig = '../mri/orig.mgz' # doctest: +SKIP >>> contrast.cmdline # doctest: +SKIP 'pctsurfcon --lh-only --s 10335' """ _cmd = "pctsurfcon" input_spec = ContrastInputSpec output_spec = ContrastOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir hemi = self.inputs.hemisphere copy2subjdir( self, self.inputs.annotation, "label", "{0}.aparc.annot".format(hemi) ) copy2subjdir( self, self.inputs.cortex, "label", "{0}.cortex.label".format(hemi) ) copy2subjdir(self, self.inputs.white, "surf", "{0}.white".format(hemi)) copy2subjdir( self, self.inputs.thickness, "surf", "{0}.thickness".format(hemi) ) copy2subjdir(self, self.inputs.orig, "mri", "orig.mgz") copy2subjdir(self, self.inputs.rawavg, "mri", "rawavg.mgz") # need to create output directories createoutputdirs(self._list_outputs()) return super(Contrast, self).run(**inputs) def _list_outputs(self): outputs = self._outputs().get() subject_dir = os.path.join(self.inputs.subjects_dir, self.inputs.subject_id) outputs["out_contrast"] = os.path.join( subject_dir, "surf", str(self.inputs.hemisphere) + ".w-g.pct.mgh" ) outputs["out_stats"] = os.path.join( subject_dir, "stats", str(self.inputs.hemisphere) + ".w-g.pct.stats" ) outputs["out_log"] = os.path.join(subject_dir, "scripts", "pctsurfcon.log") return outputs class RelabelHypointensitiesInputSpec(FSTraitedSpec): # required lh_white = File( mandatory=True, exists=True, copyfile=True, desc="Implicit input file must be lh.white", ) rh_white = File( mandatory=True, exists=True, copyfile=True, desc="Implicit input file must be rh.white", ) aseg = File( argstr="%s", position=-3, mandatory=True, exists=True, desc="Input aseg file" ) surf_directory = Directory( ".", argstr="%s", position=-2, exists=True, usedefault=True, desc="Directory containing lh.white and rh.white", ) out_file = File( argstr="%s", position=-1, exists=False, name_source=["aseg"], name_template="%s.hypos.mgz", hash_files=False, keep_extension=False, desc="Output aseg file", ) class RelabelHypointensitiesOutputSpec(TraitedSpec): out_file = File(argstr="%s", exists=False, desc="Output aseg file") class RelabelHypointensities(FSCommand): """ Relabel Hypointensities Examples ======== >>> from nipype.interfaces.freesurfer import RelabelHypointensities >>> relabelhypos = RelabelHypointensities() >>> relabelhypos.inputs.lh_white = 'lh.pial' >>> relabelhypos.inputs.rh_white = 'lh.pial' >>> relabelhypos.inputs.surf_directory = '.' >>> relabelhypos.inputs.aseg = 'aseg.mgz' >>> relabelhypos.cmdline 'mri_relabel_hypointensities aseg.mgz . aseg.hypos.mgz' """ _cmd = "mri_relabel_hypointensities" input_spec = RelabelHypointensitiesInputSpec output_spec = RelabelHypointensitiesOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class Aparc2AsegInputSpec(FSTraitedSpec): # required subject_id = traits.String( "subject_id", argstr="--s %s", usedefault=True, mandatory=True, desc="Subject being processed", ) out_file = File( argstr="--o %s", exists=False, mandatory=True, desc="Full path of file to save the output segmentation in", ) # implicit lh_white = File( mandatory=True, exists=True, desc="Input file must be /surf/lh.white", ) rh_white = File( mandatory=True, exists=True, desc="Input file must be /surf/rh.white", ) lh_pial = File( mandatory=True, exists=True, desc="Input file must be /surf/lh.pial" ) rh_pial = File( mandatory=True, exists=True, desc="Input file must be /surf/rh.pial" ) lh_ribbon = File( mandatory=True, exists=True, desc="Input file must be /mri/lh.ribbon.mgz", ) rh_ribbon = File( mandatory=True, exists=True, desc="Input file must be /mri/rh.ribbon.mgz", ) ribbon = File( mandatory=True, exists=True, desc="Input file must be /mri/ribbon.mgz", ) lh_annotation = File( mandatory=True, exists=True, desc="Input file must be /label/lh.aparc.annot", ) rh_annotation = File( mandatory=True, exists=True, desc="Input file must be /label/rh.aparc.annot", ) # optional filled = File( exists=True, desc="Implicit input filled file. Only required with FS v5.3." ) aseg = File(argstr="--aseg %s", exists=True, desc="Input aseg file") volmask = traits.Bool(argstr="--volmask", desc="Volume mask flag") ctxseg = File(argstr="--ctxseg %s", exists=True, desc="") label_wm = traits.Bool( argstr="--labelwm", desc="""\ For each voxel labeled as white matter in the aseg, re-assign its label to be that of the closest cortical point if its distance is less than dmaxctx.""", ) hypo_wm = traits.Bool(argstr="--hypo-as-wm", desc="Label hypointensities as WM") rip_unknown = traits.Bool( argstr="--rip-unknown", desc="Do not label WM based on 'unknown' corical label" ) a2009s = traits.Bool(argstr="--a2009s", desc="Using the a2009s atlas") copy_inputs = traits.Bool( desc="If running as a node, set this to True." "This will copy the input files to the node " "directory." ) class Aparc2AsegOutputSpec(TraitedSpec): out_file = File(argstr="%s", desc="Output aseg file") class Aparc2Aseg(FSCommand): """ Maps the cortical labels from the automatic cortical parcellation (aparc) to the automatic segmentation volume (aseg). The result can be used as the aseg would. The algorithm is to find each aseg voxel labeled as cortex (3 and 42) and assign it the label of the closest cortical vertex. If the voxel is not in the ribbon (as defined by mri/ lh.ribbon and rh.ribbon), then the voxel is marked as unknown (0). This can be turned off with ``--noribbon``. The cortical parcellation is obtained from subject/label/hemi.aparc.annot which should be based on the curvature.buckner40.filled.desikan_killiany.gcs atlas. The aseg is obtained from subject/mri/aseg.mgz and should be based on the RB40_talairach_2005-07-20.gca atlas. If these atlases are used, then the segmentations can be viewed with tkmedit and the FreeSurferColorLUT.txt color table found in ``$FREESURFER_HOME``. These are the default atlases used by ``recon-all``. Examples -------- >>> from nipype.interfaces.freesurfer import Aparc2Aseg >>> aparc2aseg = Aparc2Aseg() >>> aparc2aseg.inputs.lh_white = 'lh.pial' >>> aparc2aseg.inputs.rh_white = 'lh.pial' >>> aparc2aseg.inputs.lh_pial = 'lh.pial' >>> aparc2aseg.inputs.rh_pial = 'lh.pial' >>> aparc2aseg.inputs.lh_ribbon = 'label.mgz' >>> aparc2aseg.inputs.rh_ribbon = 'label.mgz' >>> aparc2aseg.inputs.ribbon = 'label.mgz' >>> aparc2aseg.inputs.lh_annotation = 'lh.pial' >>> aparc2aseg.inputs.rh_annotation = 'lh.pial' >>> aparc2aseg.inputs.out_file = 'aparc+aseg.mgz' >>> aparc2aseg.inputs.label_wm = True >>> aparc2aseg.inputs.rip_unknown = True >>> aparc2aseg.cmdline # doctest: +SKIP 'mri_aparc2aseg --labelwm --o aparc+aseg.mgz --rip-unknown --s subject_id' """ _cmd = "mri_aparc2aseg" input_spec = Aparc2AsegInputSpec output_spec = Aparc2AsegOutputSpec def run(self, **inputs): if self.inputs.copy_inputs: self.inputs.subjects_dir = os.getcwd() if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir copy2subjdir(self, self.inputs.lh_white, "surf", "lh.white") copy2subjdir(self, self.inputs.lh_pial, "surf", "lh.pial") copy2subjdir(self, self.inputs.rh_white, "surf", "rh.white") copy2subjdir(self, self.inputs.rh_pial, "surf", "rh.pial") copy2subjdir(self, self.inputs.lh_ribbon, "mri", "lh.ribbon.mgz") copy2subjdir(self, self.inputs.rh_ribbon, "mri", "rh.ribbon.mgz") copy2subjdir(self, self.inputs.ribbon, "mri", "ribbon.mgz") copy2subjdir(self, self.inputs.aseg, "mri") copy2subjdir(self, self.inputs.filled, "mri", "filled.mgz") copy2subjdir(self, self.inputs.lh_annotation, "label") copy2subjdir(self, self.inputs.rh_annotation, "label") return super(Aparc2Aseg, self).run(**inputs) def _format_arg(self, name, spec, value): if name == "aseg": # aseg does not take a full filename basename = os.path.basename(value).replace(".mgz", "") return spec.argstr % basename elif name == "out_file": return spec.argstr % os.path.abspath(value) return super(Aparc2Aseg, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class Apas2AsegInputSpec(FSTraitedSpec): # required in_file = File( argstr="--i %s", mandatory=True, exists=True, desc="Input aparc+aseg.mgz" ) out_file = File(argstr="--o %s", mandatory=True, desc="Output aseg file") class Apas2AsegOutputSpec(TraitedSpec): out_file = File(argstr="%s", exists=False, desc="Output aseg file") class Apas2Aseg(FSCommand): """ Converts aparc+aseg.mgz into something like aseg.mgz by replacing the cortical segmentations 1000-1035 with 3 and 2000-2035 with 42. The advantage of this output is that the cortical label conforms to the actual surface (this is not the case with aseg.mgz). Examples -------- >>> from nipype.interfaces.freesurfer import Apas2Aseg >>> apas2aseg = Apas2Aseg() >>> apas2aseg.inputs.in_file = 'aseg.mgz' >>> apas2aseg.inputs.out_file = 'output.mgz' >>> apas2aseg.cmdline 'apas2aseg --i aseg.mgz --o output.mgz' """ _cmd = "apas2aseg" input_spec = Apas2AsegInputSpec output_spec = Apas2AsegOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class MRIsExpandInputSpec(FSTraitedSpec): # Input spec derived from # https://github.com/freesurfer/freesurfer/blob/102e053/mris_expand/mris_expand.c in_file = File( exists=True, mandatory=True, argstr="%s", position=-3, copyfile=False, desc="Surface to expand", ) distance = traits.Float( mandatory=True, argstr="%g", position=-2, desc="Distance in mm or fraction of cortical thickness", ) out_name = traits.Str( "expanded", argstr="%s", position=-1, usedefault=True, desc=( "Output surface file. " "If no path, uses directory of ``in_file``. " 'If no path AND missing "lh." or "rh.", derive from ``in_file``' ), ) thickness = traits.Bool( argstr="-thickness", desc="Expand by fraction of cortical thickness, not mm" ) thickness_name = traits.Str( argstr="-thickness_name %s", copyfile=False, desc=( 'Name of thickness file (implicit: "thickness")\n' "If no path, uses directory of ``in_file``\n" 'If no path AND missing "lh." or "rh.", derive from `in_file`' ), ) pial = traits.Str( argstr="-pial %s", copyfile=False, desc=( 'Name of pial file (implicit: "pial")\n' "If no path, uses directory of ``in_file``\n" 'If no path AND missing "lh." or "rh.", derive from ``in_file``' ), ) sphere = traits.Str( "sphere", copyfile=False, usedefault=True, desc="WARNING: Do not change this trait", ) spring = traits.Float(argstr="-S %g", desc="Spring term (implicit: 0.05)") dt = traits.Float(argstr="-T %g", desc="dt (implicit: 0.25)") write_iterations = traits.Int( argstr="-W %d", desc="Write snapshots of expansion every N iterations" ) smooth_averages = traits.Int( argstr="-A %d", desc="Smooth surface with N iterations after expansion" ) nsurfaces = traits.Int( argstr="-N %d", desc="Number of surfacces to write during expansion" ) # # Requires dev version - Re-add when min_ver/max_ver support this # # https://github.com/freesurfer/freesurfer/blob/9730cb9/mris_expand/mris_expand.c # navgs = traits.Tuple( # traits.Int, traits.Int, # argstr='-navgs %d %d', # desc=('Tuple of (n_averages, min_averages) parameters ' # '(implicit: (16, 0))')) # target_intensity = traits.Tuple( # traits.Float, File(exists=True), # argstr='-intensity %g %s', # desc='Tuple of intensity and brain volume to crop to target intensity') class MRIsExpandOutputSpec(TraitedSpec): out_file = File(desc="Output surface file") class MRIsExpand(FSSurfaceCommand): """ Expands a surface (typically ?h.white) outwards while maintaining smoothness and self-intersection constraints. Examples ======== >>> from nipype.interfaces.freesurfer import MRIsExpand >>> mris_expand = MRIsExpand(thickness=True, distance=0.5) >>> mris_expand.inputs.in_file = 'lh.white' >>> mris_expand.cmdline 'mris_expand -thickness lh.white 0.5 expanded' >>> mris_expand.inputs.out_name = 'graymid' >>> mris_expand.cmdline 'mris_expand -thickness lh.white 0.5 graymid' """ _cmd = "mris_expand" input_spec = MRIsExpandInputSpec output_spec = MRIsExpandOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = self._associated_file( self.inputs.in_file, self.inputs.out_name ) return outputs def normalize_filenames(self): """ Filename normalization routine to perform only when run in Node context. Find full paths for pial, thickness and sphere files for copying. """ in_file = self.inputs.in_file pial = self.inputs.pial if not isdefined(pial): pial = "pial" self.inputs.pial = self._associated_file(in_file, pial) if isdefined(self.inputs.thickness) and self.inputs.thickness: thickness_name = self.inputs.thickness_name if not isdefined(thickness_name): thickness_name = "thickness" self.inputs.thickness_name = self._associated_file(in_file, thickness_name) self.inputs.sphere = self._associated_file(in_file, self.inputs.sphere) class LTAConvertInputSpec(CommandLineInputSpec): # Inputs _in_xor = ("in_lta", "in_fsl", "in_mni", "in_reg", "in_niftyreg", "in_itk") in_lta = traits.Either( File(exists=True), "identity.nofile", argstr="--inlta %s", mandatory=True, xor=_in_xor, desc="input transform of LTA type", ) in_fsl = File( exists=True, argstr="--infsl %s", mandatory=True, xor=_in_xor, desc="input transform of FSL type", ) in_mni = File( exists=True, argstr="--inmni %s", mandatory=True, xor=_in_xor, desc="input transform of MNI/XFM type", ) in_reg = File( exists=True, argstr="--inreg %s", mandatory=True, xor=_in_xor, desc="input transform of TK REG type (deprecated format)", ) in_niftyreg = File( exists=True, argstr="--inniftyreg %s", mandatory=True, xor=_in_xor, desc="input transform of Nifty Reg type (inverse RAS2RAS)", ) in_itk = File( exists=True, argstr="--initk %s", mandatory=True, xor=_in_xor, desc="input transform of ITK type", ) # Outputs out_lta = traits.Either( traits.Bool, File, argstr="--outlta %s", desc="output linear transform (LTA Freesurfer format)", ) out_fsl = traits.Either( traits.Bool, File, argstr="--outfsl %s", desc="output transform in FSL format" ) out_mni = traits.Either( traits.Bool, File, argstr="--outmni %s", desc="output transform in MNI/XFM format", ) out_reg = traits.Either( traits.Bool, File, argstr="--outreg %s", desc="output transform in reg dat format", ) out_itk = traits.Either( traits.Bool, File, argstr="--outitk %s", desc="output transform in ITK format" ) # Optional flags invert = traits.Bool(argstr="--invert") ltavox2vox = traits.Bool(argstr="--ltavox2vox", requires=["out_lta"]) source_file = File(exists=True, argstr="--src %s") target_file = File(exists=True, argstr="--trg %s") target_conform = traits.Bool(argstr="--trgconform") class LTAConvertOutputSpec(TraitedSpec): out_lta = File(exists=True, desc="output linear transform (LTA Freesurfer format)") out_fsl = File(exists=True, desc="output transform in FSL format") out_mni = File(exists=True, desc="output transform in MNI/XFM format") out_reg = File(exists=True, desc="output transform in reg dat format") out_itk = File(exists=True, desc="output transform in ITK format") class LTAConvert(CommandLine): """Convert different transformation formats. Some formats may require you to pass an image if the geometry information is missing form the transform file format. For complete details, see the `lta_convert documentation. `_ """ input_spec = LTAConvertInputSpec output_spec = LTAConvertOutputSpec _cmd = "lta_convert" def _format_arg(self, name, spec, value): if name.startswith("out_") and value is True: value = self._list_outputs()[name] return super(LTAConvert, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() for name, default in ( ("out_lta", "out.lta"), ("out_fsl", "out.mat"), ("out_mni", "out.xfm"), ("out_reg", "out.dat"), ("out_itk", "out.txt"), ): attr = getattr(self.inputs, name) if attr: fname = default if attr is True else attr outputs[name] = os.path.abspath(fname) return outputs nipype-1.7.0/nipype/interfaces/fsl/000077500000000000000000000000001413403311400172305ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/fsl/__init__.py000066400000000000000000000042461413403311400213470ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ FSL is a comprehensive library of analysis tools for fMRI, MRI and DTI brain imaging data. The fsl module provides classes for interfacing with the `FSL `_ command line tools. """ from .base import FSLCommand, Info, check_fsl, no_fsl, no_fsl_course_data from .preprocess import ( FAST, FLIRT, ApplyXFM, BET, MCFLIRT, FNIRT, ApplyWarp, SliceTimer, SUSAN, PRELUDE, FUGUE, FIRST, ) from .model import ( Level1Design, FEAT, FEATModel, FILMGLS, FEATRegister, FLAMEO, ContrastMgr, MultipleRegressDesign, L2Model, SMM, MELODIC, SmoothEstimate, Cluster, Randomise, GLM, ) from .utils import ( AvScale, Smooth, Slice, Merge, ExtractROI, Split, ImageMaths, ImageMeants, ImageStats, FilterRegressor, Overlay, Slicer, PlotTimeSeries, PlotMotionParams, ConvertXFM, SwapDimensions, PowerSpectrum, Reorient2Std, Complex, InvWarp, WarpUtils, ConvertWarp, WarpPoints, WarpPointsToStd, WarpPointsFromStd, RobustFOV, CopyGeom, MotionOutliers, ) from .epi import ( PrepareFieldmap, TOPUP, ApplyTOPUP, Eddy, EPIDeWarp, SigLoss, EddyCorrect, EpiReg, EddyQuad, ) from .dti import ( BEDPOSTX, XFibres, DTIFit, ProbTrackX, ProbTrackX2, VecReg, ProjThresh, FindTheBiggest, DistanceMap, TractSkeleton, MakeDyadicVectors, BEDPOSTX5, XFibres5, ) from .maths import ( ChangeDataType, Threshold, MeanImage, ApplyMask, IsotropicSmooth, TemporalFilter, DilateImage, ErodeImage, SpatialFilter, UnaryMaths, BinaryMaths, MultiImageMaths, MaxnImage, MinImage, MedianImage, PercentileImage, AR1Image, ) from .possum import B0Calc from .fix import ( AccuracyTester, Classifier, Cleaner, FeatureExtractor, Training, TrainingSetCreator, ) from .aroma import ICA_AROMA nipype-1.7.0/nipype/interfaces/fsl/aroma.py000066400000000000000000000130621413403311400207030ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """This commandline module provides classes for interfacing with the `ICA-AROMA.py `__ command line tool. """ from ..base import ( TraitedSpec, CommandLineInputSpec, CommandLine, File, Directory, traits, isdefined, ) import os class ICA_AROMAInputSpec(CommandLineInputSpec): feat_dir = Directory( exists=True, mandatory=True, argstr="-feat %s", xor=["in_file", "mat_file", "fnirt_warp_file", "motion_parameters"], desc="If a feat directory exists and temporal filtering " "has not been run yet, ICA_AROMA can use the files in " "this directory.", ) in_file = File( exists=True, mandatory=True, argstr="-i %s", xor=["feat_dir"], desc="volume to be denoised", ) out_dir = Directory( "out", usedefault=True, mandatory=True, argstr="-o %s", desc="output directory" ) mask = File( exists=True, argstr="-m %s", xor=["feat_dir"], desc="path/name volume mask" ) dim = traits.Int( argstr="-dim %d", desc="Dimensionality reduction when running " "MELODIC (defualt is automatic estimation)", ) TR = traits.Float( argstr="-tr %.3f", desc="TR in seconds. If this is not specified " "the TR will be extracted from the " "header of the fMRI nifti file.", ) melodic_dir = Directory( exists=True, argstr="-meldir %s", desc="path to MELODIC directory if MELODIC has already been run", ) mat_file = File( exists=True, argstr="-affmat %s", xor=["feat_dir"], desc="path/name of the mat-file describing the " "affine registration (e.g. FSL FLIRT) of the " "functional data to structural space (.mat file)", ) fnirt_warp_file = File( exists=True, argstr="-warp %s", xor=["feat_dir"], desc="File name of the warp-file describing " "the non-linear registration (e.g. FSL FNIRT) " "of the structural data to MNI152 space (.nii.gz)", ) motion_parameters = File( exists=True, mandatory=True, argstr="-mc %s", xor=["feat_dir"], desc="motion parameters file", ) denoise_type = traits.Enum( "nonaggr", "aggr", "both", "no", usedefault=True, mandatory=True, argstr="-den %s", desc="Type of denoising strategy:\n" "-no: only classification, no denoising\n" "-nonaggr (default): non-aggresssive denoising, i.e. partial component regression\n" "-aggr: aggressive denoising, i.e. full component regression\n" "-both: both aggressive and non-aggressive denoising (two outputs)", ) class ICA_AROMAOutputSpec(TraitedSpec): aggr_denoised_file = File( exists=True, desc="if generated: aggressively denoised volume" ) nonaggr_denoised_file = File( exists=True, desc="if generated: non aggressively denoised volume" ) out_dir = Directory( exists=True, desc="directory contains (in addition to the denoised files): " "melodic.ica + classified_motion_components + " "classification_overview + feature_scores + melodic_ic_mni)", ) class ICA_AROMA(CommandLine): """ Interface for the ICA_AROMA.py script. ICA-AROMA (i.e. 'ICA-based Automatic Removal Of Motion Artifacts') concerns a data-driven method to identify and remove motion-related independent components from fMRI data. To that end it exploits a small, but robust set of theoretically motivated features, preventing the need for classifier re-training and therefore providing direct and easy applicability. See link for further documentation: https://github.com/rhr-pruim/ICA-AROMA Example ------- >>> from nipype.interfaces.fsl import ICA_AROMA >>> from nipype.testing import example_data >>> AROMA_obj = ICA_AROMA() >>> AROMA_obj.inputs.in_file = 'functional.nii' >>> AROMA_obj.inputs.mat_file = 'func_to_struct.mat' >>> AROMA_obj.inputs.fnirt_warp_file = 'warpfield.nii' >>> AROMA_obj.inputs.motion_parameters = 'fsl_mcflirt_movpar.txt' >>> AROMA_obj.inputs.mask = 'mask.nii.gz' >>> AROMA_obj.inputs.denoise_type = 'both' >>> AROMA_obj.inputs.out_dir = 'ICA_testout' >>> AROMA_obj.cmdline # doctest: +ELLIPSIS 'ICA_AROMA.py -den both -warp warpfield.nii -i functional.nii -m mask.nii.gz -affmat func_to_struct.mat -mc fsl_mcflirt_movpar.txt -o .../ICA_testout' """ _cmd = "ICA_AROMA.py" input_spec = ICA_AROMAInputSpec output_spec = ICA_AROMAOutputSpec def _format_arg(self, name, trait_spec, value): if name == "out_dir": return trait_spec.argstr % os.path.abspath(value) return super(ICA_AROMA, self)._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_dir"] = os.path.abspath(self.inputs.out_dir) out_dir = outputs["out_dir"] if self.inputs.denoise_type in ("aggr", "both"): outputs["aggr_denoised_file"] = os.path.join( out_dir, "denoised_func_data_aggr.nii.gz" ) if self.inputs.denoise_type in ("nonaggr", "both"): outputs["nonaggr_denoised_file"] = os.path.join( out_dir, "denoised_func_data_nonaggr.nii.gz" ) return outputs nipype-1.7.0/nipype/interfaces/fsl/base.py000066400000000000000000000175511413403311400205250ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL `_ command line tools. This was written to work with FSL version 4.1.4. These are the base tools for working with FSL. Preprocessing tools are found in fsl/preprocess.py Model tools are found in fsl/model.py DTI tools are found in fsl/dti.py XXX Make this doc current! Currently these tools are supported: * BET v2.1: brain extraction * FAST v4.1: segmentation and bias correction * FLIRT v5.5: linear registration * MCFLIRT: motion correction * FNIRT v1.0: non-linear warp Examples -------- See the docstrings of the individual classes for examples. """ from glob import glob import os from ... import logging from ...utils.filemanip import fname_presuffix from ..base import traits, isdefined, CommandLine, CommandLineInputSpec, PackageInfo from ...external.due import BibTeX IFLOGGER = logging.getLogger("nipype.interface") class Info(PackageInfo): """ Handle FSL ``output_type`` and version information. output type refers to the type of file fsl defaults to writing eg, NIFTI, NIFTI_GZ Examples -------- >>> from nipype.interfaces.fsl import Info >>> Info.version() # doctest: +SKIP >>> Info.output_type() # doctest: +SKIP """ ftypes = { "NIFTI": ".nii", "NIFTI_PAIR": ".img", "NIFTI_GZ": ".nii.gz", "NIFTI_PAIR_GZ": ".img.gz", } if os.getenv("FSLDIR"): version_file = os.path.join(os.getenv("FSLDIR"), "etc", "fslversion") @staticmethod def parse_version(raw_info): return raw_info.splitlines()[0] @classmethod def output_type_to_ext(cls, output_type): """Get the file extension for the given output type. Parameters ---------- output_type : {'NIFTI', 'NIFTI_GZ', 'NIFTI_PAIR', 'NIFTI_PAIR_GZ'} String specifying the output type. Returns ------- extension : str The file extension for the output type. """ try: return cls.ftypes[output_type] except KeyError: msg = "Invalid FSLOUTPUTTYPE: ", output_type raise KeyError(msg) @classmethod def output_type(cls): """Get the global FSL output file type FSLOUTPUTTYPE. This returns the value of the environment variable FSLOUTPUTTYPE. An exception is raised if it is not defined. Returns ------- fsl_ftype : string Represents the current environment setting of FSLOUTPUTTYPE """ try: return os.environ["FSLOUTPUTTYPE"] except KeyError: IFLOGGER.warning( "FSLOUTPUTTYPE environment variable is not set. " "Setting FSLOUTPUTTYPE=NIFTI" ) return "NIFTI" @staticmethod def standard_image(img_name=None): """Grab an image from the standard location. Returns a list of standard images if called without arguments. Could be made more fancy to allow for more relocatability""" try: fsldir = os.environ["FSLDIR"] except KeyError: raise Exception("FSL environment variables not set") stdpath = os.path.join(fsldir, "data", "standard") if img_name is None: return [ filename.replace(stdpath + "/", "") for filename in glob(os.path.join(stdpath, "*nii*")) ] return os.path.join(stdpath, img_name) class FSLCommandInputSpec(CommandLineInputSpec): """ Base Input Specification for all FSL Commands All command support specifying FSLOUTPUTTYPE dynamically via output_type. Example ------- fsl.ExtractRoi(tmin=42, tsize=1, output_type='NIFTI') """ output_type = traits.Enum("NIFTI", list(Info.ftypes.keys()), desc="FSL output type") class FSLCommand(CommandLine): """Base support for FSL commands.""" input_spec = FSLCommandInputSpec _output_type = None _references = [ { "entry": BibTeX( "@article{JenkinsonBeckmannBehrensWoolrichSmith2012," "author={M. Jenkinson, C.F. Beckmann, T.E. Behrens, " "M.W. Woolrich, and S.M. Smith}," "title={FSL}," "journal={NeuroImage}," "volume={62}," "pages={782-790}," "year={2012}," "}" ), "tags": ["implementation"], } ] def __init__(self, **inputs): super(FSLCommand, self).__init__(**inputs) self.inputs.on_trait_change(self._output_update, "output_type") if self._output_type is None: self._output_type = Info.output_type() if not isdefined(self.inputs.output_type): self.inputs.output_type = self._output_type else: self._output_update() def _output_update(self): self._output_type = self.inputs.output_type self.inputs.environ.update({"FSLOUTPUTTYPE": self.inputs.output_type}) @classmethod def set_default_output_type(cls, output_type): """Set the default output type for FSL classes. This method is used to set the default output type for all fSL subclasses. However, setting this will not update the output type for any existing instances. For these, assign the .inputs.output_type. """ if output_type in Info.ftypes: cls._output_type = output_type else: raise AttributeError("Invalid FSL output_type: %s" % output_type) @property def version(self): return Info.version() def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None): """Generate a filename based on the given parameters. The filename will take the form: cwd/basename. If change_ext is True, it will use the extentions specified in intputs.output_type. Parameters ---------- basename : str Filename to base the new filename on. cwd : str Path to prefix to the new filename. (default is os.getcwd()) suffix : str Suffix to add to the `basename`. (defaults is '' ) change_ext : bool Flag to change the filename extension to the FSL output type. (default True) Returns ------- fname : str New filename based on given parameters. """ if basename == "": msg = "Unable to generate filename for command %s. " % self.cmd msg += "basename is not set!" raise ValueError(msg) if cwd is None: cwd = os.getcwd() if ext is None: ext = Info.output_type_to_ext(self.inputs.output_type) if change_ext: if suffix: suffix = "".join((suffix, ext)) else: suffix = ext if suffix is None: suffix = "" fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) return fname def _overload_extension(self, value, name=None): return value + Info.output_type_to_ext(self.inputs.output_type) def check_fsl(): ver = Info.version() if ver: return 0 else: return 1 def no_fsl(): """Checks if FSL is NOT installed used with skipif to skip tests that will fail if FSL is not installed""" if Info.version() is None: return True else: return False def no_fsl_course_data(): """check if fsl_course data is present""" return not ( "FSL_COURSE_DATA" in os.environ and os.path.isdir(os.path.abspath(os.environ["FSL_COURSE_DATA"])) ) nipype-1.7.0/nipype/interfaces/fsl/dti.py000066400000000000000000001473271413403311400204000ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL `_ command line tools. This was written to work with FSL version 4.1.4. """ import os import warnings from ...utils.filemanip import fname_presuffix, split_filename, copyfile from ..base import ( TraitedSpec, isdefined, File, Directory, InputMultiPath, OutputMultiPath, traits, ) from .base import FSLCommand, FSLCommandInputSpec, Info class DTIFitInputSpec(FSLCommandInputSpec): dwi = File( exists=True, desc="diffusion weighted image data file", argstr="-k %s", position=0, mandatory=True, ) base_name = traits.Str( "dtifit_", desc=("base_name that all output files " "will start with"), argstr="-o %s", position=1, usedefault=True, ) mask = File( exists=True, desc="bet binary mask file", argstr="-m %s", position=2, mandatory=True, ) bvecs = File( exists=True, desc="b vectors file", argstr="-r %s", position=3, mandatory=True ) bvals = File( exists=True, desc="b values file", argstr="-b %s", position=4, mandatory=True ) min_z = traits.Int(argstr="-z %d", desc="min z") max_z = traits.Int(argstr="-Z %d", desc="max z") min_y = traits.Int(argstr="-y %d", desc="min y") max_y = traits.Int(argstr="-Y %d", desc="max y") min_x = traits.Int(argstr="-x %d", desc="min x") max_x = traits.Int(argstr="-X %d", desc="max x") save_tensor = traits.Bool( desc="save the elements of the tensor", argstr="--save_tensor" ) sse = traits.Bool(desc="output sum of squared errors", argstr="--sse") cni = File(exists=True, desc="input counfound regressors", argstr="--cni=%s") little_bit = traits.Bool( desc="only process small area of brain", argstr="--littlebit" ) gradnonlin = File( exists=True, argstr="--gradnonlin=%s", desc="gradient non linearities" ) class DTIFitOutputSpec(TraitedSpec): V1 = File(exists=True, desc="path/name of file with the 1st eigenvector") V2 = File(exists=True, desc="path/name of file with the 2nd eigenvector") V3 = File(exists=True, desc="path/name of file with the 3rd eigenvector") L1 = File(exists=True, desc="path/name of file with the 1st eigenvalue") L2 = File(exists=True, desc="path/name of file with the 2nd eigenvalue") L3 = File(exists=True, desc="path/name of file with the 3rd eigenvalue") MD = File(exists=True, desc="path/name of file with the mean diffusivity") FA = File(exists=True, desc="path/name of file with the fractional anisotropy") MO = File(exists=True, desc="path/name of file with the mode of anisotropy") S0 = File( exists=True, desc=( "path/name of file with the raw T2 signal with no " "diffusion weighting" ), ) tensor = File(exists=True, desc="path/name of file with the 4D tensor volume") sse = File(exists=True, desc="path/name of file with the summed squared error") class DTIFit(FSLCommand): """Use FSL dtifit command for fitting a diffusion tensor model at each voxel Example ------- >>> from nipype.interfaces import fsl >>> dti = fsl.DTIFit() >>> dti.inputs.dwi = 'diffusion.nii' >>> dti.inputs.bvecs = 'bvecs' >>> dti.inputs.bvals = 'bvals' >>> dti.inputs.base_name = 'TP' >>> dti.inputs.mask = 'mask.nii' >>> dti.cmdline 'dtifit -k diffusion.nii -o TP -m mask.nii -r bvecs -b bvals' """ _cmd = "dtifit" input_spec = DTIFitInputSpec output_spec = DTIFitOutputSpec def _list_outputs(self): keys_to_ignore = {"outputtype", "environ", "args"} # Optional output: Map output name to input flag opt_output = {"tensor": self.inputs.save_tensor, "sse": self.inputs.sse} # Ignore optional output, whose corresponding input-flag is not defined # or set to False for output, input_flag in opt_output.items(): if isdefined(input_flag) and input_flag: # this is wanted output, do not ignore continue keys_to_ignore.add(output) outputs = self.output_spec().get() for k in set(outputs.keys()) - keys_to_ignore: outputs[k] = self._gen_fname(self.inputs.base_name, suffix="_" + k) return outputs class FSLXCommandInputSpec(FSLCommandInputSpec): dwi = File( exists=True, argstr="--data=%s", mandatory=True, desc="diffusion weighted image data file", ) mask = File( exists=True, argstr="--mask=%s", mandatory=True, desc="brain binary mask file (i.e. from BET)", ) bvecs = File( exists=True, argstr="--bvecs=%s", mandatory=True, desc="b vectors file" ) bvals = File(exists=True, argstr="--bvals=%s", mandatory=True, desc="b values file") logdir = Directory(".", argstr="--logdir=%s", usedefault=True) n_fibres = traits.Range( usedefault=True, low=1, value=2, argstr="--nfibres=%d", desc=("Maximum number of fibres to fit in each voxel"), mandatory=True, ) model = traits.Enum( 1, 2, 3, argstr="--model=%d", desc=( "use monoexponential (1, default, required for " "single-shell) or multiexponential (2, multi-" "shell) model" ), ) fudge = traits.Int(argstr="--fudge=%d", desc="ARD fudge factor") n_jumps = traits.Int( 5000, usedefault=True, argstr="--njumps=%d", desc="Num of jumps to be made by MCMC", ) burn_in = traits.Range( low=0, value=0, usedefault=True, argstr="--burnin=%d", desc=("Total num of jumps at start of MCMC to be " "discarded"), ) burn_in_no_ard = traits.Range( low=0, value=0, usedefault=True, argstr="--burnin_noard=%d", desc=("num of burnin jumps before the ard is" " imposed"), ) sample_every = traits.Range( low=0, value=1, usedefault=True, argstr="--sampleevery=%d", desc="Num of jumps for each sample (MCMC)", ) update_proposal_every = traits.Range( low=1, value=40, usedefault=True, argstr="--updateproposalevery=%d", desc=("Num of jumps for each update " "to the proposal density std " "(MCMC)"), ) seed = traits.Int( argstr="--seed=%d", desc="seed for pseudo random number generator" ) _xor_inputs1 = ("no_ard", "all_ard") no_ard = traits.Bool( argstr="--noard", xor=_xor_inputs1, desc="Turn ARD off on all fibres" ) all_ard = traits.Bool( argstr="--allard", xor=_xor_inputs1, desc="Turn ARD on on all fibres" ) _xor_inputs2 = ("no_spat", "non_linear", "cnlinear") no_spat = traits.Bool( argstr="--nospat", xor=_xor_inputs2, desc="Initialise with tensor, not spatially", ) non_linear = traits.Bool( argstr="--nonlinear", xor=_xor_inputs2, desc="Initialise with nonlinear fitting" ) cnlinear = traits.Bool( argstr="--cnonlinear", xor=_xor_inputs2, desc=("Initialise with constrained nonlinear " "fitting"), ) rician = traits.Bool(argstr="--rician", desc=("use Rician noise modeling")) _xor_inputs3 = ["f0_noard", "f0_ard"] f0_noard = traits.Bool( argstr="--f0", xor=_xor_inputs3, desc=( "Noise floor model: add to the model an " "unattenuated signal compartment f0" ), ) f0_ard = traits.Bool( argstr="--f0 --ardf0", xor=_xor_inputs3 + ["all_ard"], desc=( "Noise floor model: add to the model an " "unattenuated signal compartment f0" ), ) force_dir = traits.Bool( True, argstr="--forcedir", usedefault=True, desc=( "use the actual directory name given " "(do not add + to make a new directory)" ), ) class FSLXCommandOutputSpec(TraitedSpec): dyads = OutputMultiPath( File(exists=True), desc=("Mean of PDD distribution" " in vector form.") ) fsamples = OutputMultiPath( File(exists=True), desc=("Samples from the " "distribution on f " "anisotropy") ) mean_dsamples = File(exists=True, desc="Mean of distribution on diffusivity d") mean_fsamples = OutputMultiPath( File(exists=True), desc=("Mean of distribution on f " "anisotropy") ) mean_S0samples = File( exists=True, desc=("Mean of distribution on T2w" "baseline signal intensity S0") ) mean_tausamples = File( exists=True, desc=("Mean of distribution on " "tau samples (only with rician " "noise)"), ) phsamples = OutputMultiPath(File(exists=True), desc=("phi samples, per fiber")) thsamples = OutputMultiPath(File(exists=True), desc=("theta samples, per fiber")) class FSLXCommand(FSLCommand): """ Base support for ``xfibres`` and ``bedpostx`` """ input_spec = FSLXCommandInputSpec output_spec = FSLXCommandOutputSpec def _run_interface(self, runtime): self._out_dir = os.getcwd() runtime = super(FSLXCommand, self)._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime def _list_outputs(self, out_dir=None): outputs = self.output_spec().get() n_fibres = self.inputs.n_fibres if not out_dir: if isdefined(self.inputs.logdir): out_dir = os.path.abspath(self.inputs.logdir) else: out_dir = os.path.abspath("logdir") multi_out = ["dyads", "fsamples", "mean_fsamples", "phsamples", "thsamples"] single_out = ["mean_dsamples", "mean_S0samples"] for k in single_out: outputs[k] = self._gen_fname(k, cwd=out_dir) if isdefined(self.inputs.rician) and self.inputs.rician: outputs["mean_tausamples"] = self._gen_fname("mean_tausamples", cwd=out_dir) for k in multi_out: outputs[k] = [] for i in range(1, n_fibres + 1): outputs["fsamples"].append(self._gen_fname("f%dsamples" % i, cwd=out_dir)) outputs["mean_fsamples"].append( self._gen_fname("mean_f%dsamples" % i, cwd=out_dir) ) for i in range(1, n_fibres + 1): outputs["dyads"].append(self._gen_fname("dyads%d" % i, cwd=out_dir)) outputs["phsamples"].append(self._gen_fname("ph%dsamples" % i, cwd=out_dir)) outputs["thsamples"].append(self._gen_fname("th%dsamples" % i, cwd=out_dir)) return outputs class BEDPOSTX5InputSpec(FSLXCommandInputSpec): dwi = File(exists=True, desc="diffusion weighted image data file", mandatory=True) mask = File(exists=True, desc="bet binary mask file", mandatory=True) bvecs = File(exists=True, desc="b vectors file", mandatory=True) bvals = File(exists=True, desc="b values file", mandatory=True) logdir = Directory(argstr="--logdir=%s") n_fibres = traits.Range( usedefault=True, low=1, value=2, argstr="-n %d", desc=("Maximum number of fibres to fit in each voxel"), mandatory=True, ) model = traits.Enum( 1, 2, 3, argstr="-model %d", desc=( "use monoexponential (1, default, required for " "single-shell) or multiexponential (2, multi-" "shell) model" ), ) fudge = traits.Int(argstr="-w %d", desc="ARD fudge factor") n_jumps = traits.Int( 5000, usedefault=True, argstr="-j %d", desc="Num of jumps to be made by MCMC" ) burn_in = traits.Range( low=0, value=0, usedefault=True, argstr="-b %d", desc=("Total num of jumps at start of MCMC to be " "discarded"), ) sample_every = traits.Range( low=0, value=1, usedefault=True, argstr="-s %d", desc="Num of jumps for each sample (MCMC)", ) out_dir = Directory( "bedpostx", mandatory=True, desc="output directory", usedefault=True, position=1, argstr="%s", ) gradnonlin = traits.Bool( False, argstr="-g", desc=("consider gradient nonlinearities, " "default off") ) grad_dev = File(exists=True, desc="grad_dev file, if gradnonlin, -g is True") use_gpu = traits.Bool(False, desc="Use the GPU version of bedpostx") class BEDPOSTX5OutputSpec(TraitedSpec): mean_dsamples = File(exists=True, desc="Mean of distribution on diffusivity d") mean_fsamples = OutputMultiPath( File(exists=True), desc=("Mean of distribution on f " "anisotropy") ) mean_S0samples = File( exists=True, desc=("Mean of distribution on T2w" "baseline signal intensity S0") ) mean_phsamples = OutputMultiPath( File(exists=True), desc="Mean of distribution on phi" ) mean_thsamples = OutputMultiPath( File(exists=True), desc="Mean of distribution on theta" ) merged_thsamples = OutputMultiPath( File(exists=True), desc=("Samples from the distribution " "on theta") ) merged_phsamples = OutputMultiPath( File(exists=True), desc=("Samples from the distribution " "on phi") ) merged_fsamples = OutputMultiPath( File(exists=True), desc=("Samples from the distribution on " "anisotropic volume fraction"), ) dyads = OutputMultiPath( File(exists=True), desc="Mean of PDD distribution in vector form." ) dyads_dispersion = OutputMultiPath(File(exists=True), desc=("Dispersion")) class BEDPOSTX5(FSLXCommand): """ BEDPOSTX stands for Bayesian Estimation of Diffusion Parameters Obtained using Sampling Techniques. The X stands for modelling Crossing Fibres. bedpostx runs Markov Chain Monte Carlo sampling to build up distributions on diffusion parameters at each voxel. It creates all the files necessary for running probabilistic tractography. For an overview of the modelling carried out within bedpostx see this `technical report `_. .. note:: Consider using :func:`niflow.nipype1.workflows.fsl.dmri.create_bedpostx_pipeline` instead. Example ------- >>> from nipype.interfaces import fsl >>> bedp = fsl.BEDPOSTX5(bvecs='bvecs', bvals='bvals', dwi='diffusion.nii', ... mask='mask.nii', n_fibres=1) >>> bedp.cmdline 'bedpostx bedpostx -b 0 --burnin_noard=0 --forcedir -n 1 -j 5000 \ -s 1 --updateproposalevery=40' """ _cmd = "bedpostx" _default_cmd = _cmd input_spec = BEDPOSTX5InputSpec output_spec = BEDPOSTX5OutputSpec _can_resume = True def __init__(self, **inputs): super(BEDPOSTX5, self).__init__(**inputs) self.inputs.on_trait_change(self._cuda_update, "use_gpu") def _cuda_update(self): if isdefined(self.inputs.use_gpu) and self.inputs.use_gpu: self._cmd = "bedpostx_gpu" else: self._cmd = self._default_cmd def _run_interface(self, runtime): subjectdir = os.path.abspath(self.inputs.out_dir) if not os.path.exists(subjectdir): os.makedirs(subjectdir) _, _, ext = split_filename(self.inputs.mask) copyfile(self.inputs.mask, os.path.join(subjectdir, "nodif_brain_mask" + ext)) _, _, ext = split_filename(self.inputs.dwi) copyfile(self.inputs.dwi, os.path.join(subjectdir, "data" + ext)) copyfile(self.inputs.bvals, os.path.join(subjectdir, "bvals")) copyfile(self.inputs.bvecs, os.path.join(subjectdir, "bvecs")) if isdefined(self.inputs.grad_dev): _, _, ext = split_filename(self.inputs.grad_dev) copyfile(self.inputs.grad_dev, os.path.join(subjectdir, "grad_dev" + ext)) retval = super(BEDPOSTX5, self)._run_interface(runtime) self._out_dir = subjectdir + ".bedpostX" return retval def _list_outputs(self): outputs = self.output_spec().get() n_fibres = self.inputs.n_fibres multi_out = [ "merged_thsamples", "merged_fsamples", "merged_phsamples", "mean_phsamples", "mean_thsamples", "mean_fsamples", "dyads_dispersion", "dyads", ] single_out = ["mean_dsamples", "mean_S0samples"] for k in single_out: outputs[k] = self._gen_fname(k, cwd=self._out_dir) for k in multi_out: outputs[k] = [] for i in range(1, n_fibres + 1): outputs["merged_thsamples"].append( self._gen_fname("merged_th%dsamples" % i, cwd=self._out_dir) ) outputs["merged_fsamples"].append( self._gen_fname("merged_f%dsamples" % i, cwd=self._out_dir) ) outputs["merged_phsamples"].append( self._gen_fname("merged_ph%dsamples" % i, cwd=self._out_dir) ) outputs["mean_thsamples"].append( self._gen_fname("mean_th%dsamples" % i, cwd=self._out_dir) ) outputs["mean_phsamples"].append( self._gen_fname("mean_ph%dsamples" % i, cwd=self._out_dir) ) outputs["mean_fsamples"].append( self._gen_fname("mean_f%dsamples" % i, cwd=self._out_dir) ) outputs["dyads"].append(self._gen_fname("dyads%d" % i, cwd=self._out_dir)) outputs["dyads_dispersion"].append( self._gen_fname("dyads%d_dispersion" % i, cwd=self._out_dir) ) return outputs class XFibres5InputSpec(FSLXCommandInputSpec): gradnonlin = File( exists=True, argstr="--gradnonlin=%s", desc="gradient file corresponding to slice", ) class XFibres5(FSLXCommand): """ Perform model parameters estimation for local (voxelwise) diffusion parameters """ _cmd = "xfibres" input_spec = XFibres5InputSpec output_spec = FSLXCommandOutputSpec XFibres = XFibres5 BEDPOSTX = BEDPOSTX5 class ProbTrackXBaseInputSpec(FSLCommandInputSpec): thsamples = InputMultiPath(File(exists=True), mandatory=True) phsamples = InputMultiPath(File(exists=True), mandatory=True) fsamples = InputMultiPath(File(exists=True), mandatory=True) samples_base_name = traits.Str( "merged", desc=("the rootname/base_name for samples " "files"), argstr="--samples=%s", usedefault=True, ) mask = File( exists=True, desc="bet binary mask file in diffusion space", argstr="-m %s", mandatory=True, ) seed = traits.Either( File(exists=True), traits.List(File(exists=True)), traits.List(traits.List(traits.Int(), minlen=3, maxlen=3)), desc=("seed volume(s), or voxel(s) or freesurfer " "label file"), argstr="--seed=%s", mandatory=True, ) target_masks = InputMultiPath( File(exits=True), desc=("list of target masks - required for " "seeds_to_targets classification"), argstr="--targetmasks=%s", ) waypoints = File( exists=True, desc=( "waypoint mask or ascii list of waypoint masks - " "only keep paths going through ALL the masks" ), argstr="--waypoints=%s", ) network = traits.Bool( desc=( "activate network mode - only keep paths " "going through at least one seed mask " "(required if multiple seed masks)" ), argstr="--network", ) seed_ref = File( exists=True, desc=( "reference vol to define seed space in simple mode " "- diffusion space assumed if absent" ), argstr="--seedref=%s", ) out_dir = Directory( exists=True, argstr="--dir=%s", desc="directory to put the final volumes in", genfile=True, ) force_dir = traits.Bool( True, desc=( "use the actual directory name given - i.e. " "do not add + to make a new directory" ), argstr="--forcedir", usedefault=True, ) opd = traits.Bool( True, desc="outputs path distributions", argstr="--opd", usedefault=True ) correct_path_distribution = traits.Bool( desc=("correct path distribution " "for the length of the " "pathways"), argstr="--pd", ) os2t = traits.Bool(desc="Outputs seeds to targets", argstr="--os2t") # paths_file = File('nipype_fdtpaths', usedefault=True, argstr='--out=%s', # desc='produces an output file (default is fdt_paths)') avoid_mp = File( exists=True, desc=("reject pathways passing through locations given by " "this mask"), argstr="--avoid=%s", ) stop_mask = File( exists=True, argstr="--stop=%s", desc="stop tracking at locations given by this mask file", ) xfm = File( exists=True, argstr="--xfm=%s", desc=( "transformation matrix taking seed space to DTI space " "(either FLIRT matrix or FNIRT warp_field) - default is " "identity" ), ) inv_xfm = File( argstr="--invxfm=%s", desc=( "transformation matrix taking DTI space to seed " "space (compulsory when using a warp_field for " "seeds_to_dti)" ), ) n_samples = traits.Int( 5000, argstr="--nsamples=%d", desc="number of samples - default=5000", usedefault=True, ) n_steps = traits.Int( argstr="--nsteps=%d", desc="number of steps per sample - default=2000" ) dist_thresh = traits.Float( argstr="--distthresh=%.3f", desc=("discards samples shorter than this " "threshold (in mm - default=0)"), ) c_thresh = traits.Float( argstr="--cthr=%.3f", desc="curvature threshold - default=0.2" ) sample_random_points = traits.Bool( argstr="--sampvox", desc=("sample random points within " "seed voxels") ) step_length = traits.Float( argstr="--steplength=%.3f", desc="step_length in mm - default=0.5" ) loop_check = traits.Bool( argstr="--loopcheck", desc=( "perform loop_checks on paths - slower, " "but allows lower curvature threshold" ), ) use_anisotropy = traits.Bool( argstr="--usef", desc="use anisotropy to constrain tracking" ) rand_fib = traits.Enum( 0, 1, 2, 3, argstr="--randfib=%d", desc=( "options: 0 - default, 1 - to randomly " "sample initial fibres (with f > fibthresh), " "2 - to sample in proportion fibres (with " "f>fibthresh) to f, 3 - to sample ALL " "populations at random (even if " "f>> from nipype.interfaces import fsl >>> pbx = fsl.ProbTrackX(samples_base_name='merged', mask='mask.nii', \ seed='MASK_average_thal_right.nii', mode='seedmask', \ xfm='trans.mat', n_samples=3, n_steps=10, force_dir=True, opd=True, \ os2t=True, target_masks = ['targets_MASK1.nii', 'targets_MASK2.nii'], \ thsamples='merged_thsamples.nii', fsamples='merged_fsamples.nii', \ phsamples='merged_phsamples.nii', out_dir='.') >>> pbx.cmdline 'probtrackx --forcedir -m mask.nii --mode=seedmask --nsamples=3 --nsteps=10 --opd --os2t --dir=. --samples=merged --seed=MASK_average_thal_right.nii --targetmasks=targets.txt --xfm=trans.mat' """ _cmd = "probtrackx" input_spec = ProbTrackXInputSpec output_spec = ProbTrackXOutputSpec def __init__(self, **inputs): warnings.warn( ("Deprecated: Please use create_bedpostx_pipeline " "instead"), DeprecationWarning, ) return super(ProbTrackX, self).__init__(**inputs) def _run_interface(self, runtime): for i in range(1, len(self.inputs.thsamples) + 1): _, _, ext = split_filename(self.inputs.thsamples[i - 1]) copyfile( self.inputs.thsamples[i - 1], self.inputs.samples_base_name + "_th%dsamples" % i + ext, copy=False, ) _, _, ext = split_filename(self.inputs.thsamples[i - 1]) copyfile( self.inputs.phsamples[i - 1], self.inputs.samples_base_name + "_ph%dsamples" % i + ext, copy=False, ) _, _, ext = split_filename(self.inputs.thsamples[i - 1]) copyfile( self.inputs.fsamples[i - 1], self.inputs.samples_base_name + "_f%dsamples" % i + ext, copy=False, ) if isdefined(self.inputs.target_masks): f = open("targets.txt", "w") for target in self.inputs.target_masks: f.write("%s\n" % target) f.close() if isinstance(self.inputs.seed, list): f = open("seeds.txt", "w") for seed in self.inputs.seed: if isinstance(seed, list): f.write("%s\n" % (" ".join([str(s) for s in seed]))) else: f.write("%s\n" % seed) f.close() runtime = super(ProbTrackX, self)._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime def _format_arg(self, name, spec, value): if name == "target_masks" and isdefined(value): fname = "targets.txt" return super(ProbTrackX, self)._format_arg(name, spec, [fname]) elif name == "seed" and isinstance(value, list): fname = "seeds.txt" return super(ProbTrackX, self)._format_arg(name, spec, fname) else: return super(ProbTrackX, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_dir): out_dir = self._gen_filename("out_dir") else: out_dir = self.inputs.out_dir outputs["log"] = os.path.abspath(os.path.join(out_dir, "probtrackx.log")) # utputs['way_total'] = os.path.abspath(os.path.join(out_dir, # 'waytotal')) if isdefined(self.inputs.opd is True): if isinstance(self.inputs.seed, list) and isinstance( self.inputs.seed[0], list ): outputs["fdt_paths"] = [] for seed in self.inputs.seed: outputs["fdt_paths"].append( os.path.abspath( self._gen_fname( ("fdt_paths_%s" % ("_".join([str(s) for s in seed]))), cwd=out_dir, suffix="", ) ) ) else: outputs["fdt_paths"] = os.path.abspath( self._gen_fname("fdt_paths", cwd=out_dir, suffix="") ) # handle seeds-to-target output files if isdefined(self.inputs.target_masks): outputs["targets"] = [] for target in self.inputs.target_masks: outputs["targets"].append( os.path.abspath( self._gen_fname( "seeds_to_" + os.path.split(target)[1], cwd=out_dir, suffix="", ) ) ) if isdefined(self.inputs.verbose) and self.inputs.verbose == 2: outputs["particle_files"] = [ os.path.abspath(os.path.join(out_dir, "particle%d" % i)) for i in range(self.inputs.n_samples) ] return outputs def _gen_filename(self, name): if name == "out_dir": return os.getcwd() elif name == "mode": if isinstance(self.inputs.seed, list) and isinstance( self.inputs.seed[0], list ): return "simple" else: return "seedmask" class ProbTrackX2InputSpec(ProbTrackXBaseInputSpec): simple = traits.Bool( desc=( "rack from a list of voxels (seed must be a " "ASCII list of coordinates)" ), argstr="--simple", ) fopd = File( exists=True, desc="Other mask for binning tract distribution", argstr="--fopd=%s", ) waycond = traits.Enum( "OR", "AND", argstr="--waycond=%s", desc=('Waypoint condition. Either "AND" (default) ' 'or "OR"'), ) wayorder = traits.Bool( desc=( "Reject streamlines that do not hit " "waypoints in given order. Only valid if " "waycond=AND" ), argstr="--wayorder", ) onewaycondition = traits.Bool( desc=("Apply waypoint conditions to each " "half tract separately"), argstr="--onewaycondition", ) omatrix1 = traits.Bool( desc="Output matrix1 - SeedToSeed Connectivity", argstr="--omatrix1" ) distthresh1 = traits.Float( argstr="--distthresh1=%.3f", desc=( "Discards samples (in matrix1) shorter " "than this threshold (in mm - " "default=0)" ), ) omatrix2 = traits.Bool( desc="Output matrix2 - SeedToLowResMask", argstr="--omatrix2", requires=["target2"], ) target2 = File( exists=True, desc=( "Low resolution binary brain mask for storing " "connectivity distribution in matrix2 mode" ), argstr="--target2=%s", ) omatrix3 = traits.Bool( desc="Output matrix3 (NxN connectivity matrix)", argstr="--omatrix3", requires=["target3", "lrtarget3"], ) target3 = File( exists=True, desc=("Mask used for NxN connectivity matrix (or Nxn if " "lrtarget3 is set)"), argstr="--target3=%s", ) lrtarget3 = File( exists=True, desc="Column-space mask used for Nxn connectivity matrix", argstr="--lrtarget3=%s", ) distthresh3 = traits.Float( argstr="--distthresh3=%.3f", desc=( "Discards samples (in matrix3) shorter " "than this threshold (in mm - " "default=0)" ), ) omatrix4 = traits.Bool( desc=("Output matrix4 - DtiMaskToSeed (special " "Oxford Sparse Format)"), argstr="--omatrix4", ) colmask4 = File( exists=True, desc="Mask for columns of matrix4 (default=seed mask)", argstr="--colmask4=%s", ) target4 = File(exists=True, desc="Brain mask in DTI space", argstr="--target4=%s") meshspace = traits.Enum( "caret", "freesurfer", "first", "vox", argstr="--meshspace=%s", desc=( 'Mesh reference space - either "caret" ' '(default) or "freesurfer" or "first" or ' '"vox"' ), ) class ProbTrackX2OutputSpec(ProbTrackXOutputSpec): network_matrix = File( exists=True, desc=("the network matrix generated by --omatrix1 " "option") ) matrix1_dot = File(exists=True, desc="Output matrix1.dot - SeedToSeed Connectivity") lookup_tractspace = File( exists=True, desc=("lookup_tractspace generated by " "--omatrix2 option") ) matrix2_dot = File(exists=True, desc="Output matrix2.dot - SeedToLowResMask") matrix3_dot = File(exists=True, desc="Output matrix3 - NxN connectivity matrix") class ProbTrackX2(ProbTrackX): """Use FSL probtrackx2 for tractography on bedpostx results Examples -------- >>> from nipype.interfaces import fsl >>> pbx2 = fsl.ProbTrackX2() >>> pbx2.inputs.seed = 'seed_source.nii.gz' >>> pbx2.inputs.thsamples = 'merged_th1samples.nii.gz' >>> pbx2.inputs.fsamples = 'merged_f1samples.nii.gz' >>> pbx2.inputs.phsamples = 'merged_ph1samples.nii.gz' >>> pbx2.inputs.mask = 'nodif_brain_mask.nii.gz' >>> pbx2.inputs.out_dir = '.' >>> pbx2.inputs.n_samples = 3 >>> pbx2.inputs.n_steps = 10 >>> pbx2.cmdline 'probtrackx2 --forcedir -m nodif_brain_mask.nii.gz --nsamples=3 --nsteps=10 --opd --dir=. --samples=merged --seed=seed_source.nii.gz' """ _cmd = "probtrackx2" input_spec = ProbTrackX2InputSpec output_spec = ProbTrackX2OutputSpec def _list_outputs(self): outputs = super(ProbTrackX2, self)._list_outputs() if not isdefined(self.inputs.out_dir): out_dir = os.getcwd() else: out_dir = self.inputs.out_dir outputs["way_total"] = os.path.abspath(os.path.join(out_dir, "waytotal")) if isdefined(self.inputs.omatrix1): outputs["network_matrix"] = os.path.abspath( os.path.join(out_dir, "matrix_seeds_to_all_targets") ) outputs["matrix1_dot"] = os.path.abspath( os.path.join(out_dir, "fdt_matrix1.dot") ) if isdefined(self.inputs.omatrix2): outputs["lookup_tractspace"] = os.path.abspath( os.path.join(out_dir, "lookup_tractspace_fdt_matrix2.nii.gz") ) outputs["matrix2_dot"] = os.path.abspath( os.path.join(out_dir, "fdt_matrix2.dot") ) if isdefined(self.inputs.omatrix3): outputs["matrix3_dot"] = os.path.abspath( os.path.join(out_dir, "fdt_matrix3.dot") ) return outputs class VecRegInputSpec(FSLCommandInputSpec): in_file = File( exists=True, argstr="-i %s", desc="filename for input vector or tensor field", mandatory=True, ) out_file = File( argstr="-o %s", desc=("filename for output registered vector or tensor " "field"), genfile=True, hash_files=False, ) ref_vol = File( exists=True, argstr="-r %s", desc="filename for reference (target) volume", mandatory=True, ) affine_mat = File( exists=True, argstr="-t %s", desc="filename for affine transformation matrix" ) warp_field = File( exists=True, argstr="-w %s", desc=("filename for 4D warp field for nonlinear " "registration"), ) rotation_mat = File( exists=True, argstr="--rotmat=%s", desc=( "filename for secondary affine matrix if set, " "this will be used for the rotation of the " "vector/tensor field" ), ) rotation_warp = File( exists=True, argstr="--rotwarp=%s", desc=( "filename for secondary warp field if set, " "this will be used for the rotation of the " "vector/tensor field" ), ) interpolation = traits.Enum( "nearestneighbour", "trilinear", "sinc", "spline", argstr="--interp=%s", desc=( "interpolation method : " "nearestneighbour, trilinear (default), " "sinc or spline" ), ) mask = File(exists=True, argstr="-m %s", desc="brain mask in input space") ref_mask = File( exists=True, argstr="--refmask=%s", desc=("brain mask in output space (useful for speed up of " "nonlinear reg)"), ) class VecRegOutputSpec(TraitedSpec): out_file = File( exists=True, desc=("path/name of filename for the registered vector or " "tensor field"), ) class VecReg(FSLCommand): """Use FSL vecreg for registering vector data For complete details, see the FDT Documentation Example ------- >>> from nipype.interfaces import fsl >>> vreg = fsl.VecReg(in_file='diffusion.nii', \ affine_mat='trans.mat', \ ref_vol='mni.nii', \ out_file='diffusion_vreg.nii') >>> vreg.cmdline 'vecreg -t trans.mat -i diffusion.nii -o diffusion_vreg.nii -r mni.nii' """ _cmd = "vecreg" input_spec = VecRegInputSpec output_spec = VecRegOutputSpec def _run_interface(self, runtime): if not isdefined(self.inputs.out_file): pth, base_name = os.path.split(self.inputs.in_file) self.inputs.out_file = self._gen_fname( base_name, cwd=os.path.abspath(pth), suffix="_vreg" ) return super(VecReg, self)._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_file if not isdefined(outputs["out_file"]) and isdefined(self.inputs.in_file): pth, base_name = os.path.split(self.inputs.in_file) outputs["out_file"] = self._gen_fname( base_name, cwd=os.path.abspath(pth), suffix="_vreg" ) outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()[name] else: return None class ProjThreshInputSpec(FSLCommandInputSpec): in_files = traits.List( File(exists=True), argstr="%s", desc="a list of input volumes", mandatory=True, position=0, ) threshold = traits.Int( argstr="%d", desc=( "threshold indicating minimum number of seed " "voxels entering this mask region" ), mandatory=True, position=1, ) class ProjThreshOuputSpec(TraitedSpec): out_files = traits.List( File(exists=True), desc=("path/name of output volume after " "thresholding") ) class ProjThresh(FSLCommand): """Use FSL proj_thresh for thresholding some outputs of probtrack For complete details, see the FDT Documentation Example ------- >>> from nipype.interfaces import fsl >>> ldir = ['seeds_to_M1.nii', 'seeds_to_M2.nii'] >>> pThresh = fsl.ProjThresh(in_files=ldir, threshold=3) >>> pThresh.cmdline 'proj_thresh seeds_to_M1.nii seeds_to_M2.nii 3' """ _cmd = "proj_thresh" input_spec = ProjThreshInputSpec output_spec = ProjThreshOuputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_files"] = [] for name in self.inputs.in_files: cwd, base_name = os.path.split(name) outputs["out_files"].append( self._gen_fname( base_name, cwd=cwd, suffix="_proj_seg_thr_{}".format(self.inputs.threshold), ) ) return outputs class FindTheBiggestInputSpec(FSLCommandInputSpec): in_files = traits.List( File(exists=True), argstr="%s", desc=("a list of input volumes or a " "singleMatrixFile"), position=0, mandatory=True, ) out_file = File( argstr="%s", desc="file with the resulting segmentation", position=2, genfile=True, hash_files=False, ) class FindTheBiggestOutputSpec(TraitedSpec): out_file = File( exists=True, argstr="%s", desc="output file indexed in order of input files" ) class FindTheBiggest(FSLCommand): """ Use FSL find_the_biggest for performing hard segmentation on the outputs of connectivity-based thresholding in probtrack. For complete details, see the `FDT Documentation. `_ Example ------- >>> from nipype.interfaces import fsl >>> ldir = ['seeds_to_M1.nii', 'seeds_to_M2.nii'] >>> fBig = fsl.FindTheBiggest(in_files=ldir, out_file='biggestSegmentation') >>> fBig.cmdline 'find_the_biggest seeds_to_M1.nii seeds_to_M2.nii biggestSegmentation' """ _cmd = "find_the_biggest" input_spec = FindTheBiggestInputSpec output_spec = FindTheBiggestOutputSpec def _run_interface(self, runtime): if not isdefined(self.inputs.out_file): self.inputs.out_file = self._gen_fname("biggestSegmentation", suffix="") return super(FindTheBiggest, self)._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_file if not isdefined(outputs["out_file"]): outputs["out_file"] = self._gen_fname("biggestSegmentation", suffix="") outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()[name] else: return None class TractSkeletonInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, argstr="-i %s", desc="input image (typcially mean FA volume)", ) _proj_inputs = ["threshold", "distance_map", "data_file"] project_data = traits.Bool( argstr="-p %.3f %s %s %s %s", requires=_proj_inputs, desc="project data onto skeleton", ) threshold = traits.Float(desc="skeleton threshold value") distance_map = File(exists=True, desc="distance map image") search_mask_file = File( exists=True, xor=["use_cingulum_mask"], desc="mask in which to use alternate search rule", ) use_cingulum_mask = traits.Bool( True, usedefault=True, xor=["search_mask_file"], desc=("perform alternate search using " "built-in cingulum mask"), ) data_file = File(exists=True, desc="4D data to project onto skeleton (usually FA)") alt_data_file = File( exists=True, argstr="-a %s", desc="4D non-FA data to project onto skeleton" ) alt_skeleton = File(exists=True, argstr="-s %s", desc="alternate skeleton to use") projected_data = File(desc="input data projected onto skeleton") skeleton_file = traits.Either( traits.Bool, File, argstr="-o %s", desc="write out skeleton image" ) class TractSkeletonOutputSpec(TraitedSpec): projected_data = File(desc="input data projected onto skeleton") skeleton_file = File(desc="tract skeleton image") class TractSkeleton(FSLCommand): """Use FSL's tbss_skeleton to skeletonise an FA image or project arbitrary values onto a skeleton. There are two ways to use this interface. To create a skeleton from an FA image, just supply the ``in_file`` and set ``skeleton_file`` to True (or specify a skeleton filename. To project values onto a skeleton, you must set ``project_data`` to True, and then also supply values for ``threshold``, ``distance_map``, and ``data_file``. The ``search_mask_file`` and ``use_cingulum_mask`` inputs are also used in data projection, but ``use_cingulum_mask`` is set to True by default. This mask controls where the projection algorithm searches within a circular space around a tract, rather than in a single perpindicular direction. Example ------- >>> import nipype.interfaces.fsl as fsl >>> skeletor = fsl.TractSkeleton() >>> skeletor.inputs.in_file = "all_FA.nii.gz" >>> skeletor.inputs.skeleton_file = True >>> skeletor.run() # doctest: +SKIP """ _cmd = "tbss_skeleton" input_spec = TractSkeletonInputSpec output_spec = TractSkeletonOutputSpec def _format_arg(self, name, spec, value): if name == "project_data": if isdefined(value) and value: _si = self.inputs if isdefined(_si.use_cingulum_mask) and _si.use_cingulum_mask: mask_file = Info.standard_image("LowerCingulum_1mm.nii.gz") else: mask_file = _si.search_mask_file if not isdefined(_si.projected_data): proj_file = self._list_outputs()["projected_data"] else: proj_file = _si.projected_data return spec.argstr % ( _si.threshold, _si.distance_map, mask_file, _si.data_file, proj_file, ) elif name == "skeleton_file": if isinstance(value, bool): return spec.argstr % self._list_outputs()["skeleton_file"] else: return spec.argstr % value return super(TractSkeleton, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() _si = self.inputs if isdefined(_si.project_data) and _si.project_data: proj_data = _si.projected_data outputs["projected_data"] = proj_data if not isdefined(proj_data): stem = _si.data_file if isdefined(_si.alt_data_file): stem = _si.alt_data_file outputs["projected_data"] = fname_presuffix( stem, suffix="_skeletonised", newpath=os.getcwd(), use_ext=True ) if isdefined(_si.skeleton_file) and _si.skeleton_file: outputs["skeleton_file"] = _si.skeleton_file if isinstance(_si.skeleton_file, bool): outputs["skeleton_file"] = fname_presuffix( _si.in_file, suffix="_skeleton", newpath=os.getcwd(), use_ext=True ) return outputs class DistanceMapInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, argstr="--in=%s", desc="image to calculate distance values for", ) mask_file = File( exists=True, argstr="--mask=%s", desc="binary mask to contrain calculations" ) invert_input = traits.Bool(argstr="--invert", desc="invert input image") local_max_file = traits.Either( traits.Bool, File, argstr="--localmax=%s", desc="write an image of the local maxima", hash_files=False, ) distance_map = File( genfile=True, argstr="--out=%s", desc="distance map to write", hash_files=False ) class DistanceMapOutputSpec(TraitedSpec): distance_map = File(exists=True, desc="value is distance to nearest nonzero voxels") local_max_file = File(desc="image of local maxima") class DistanceMap(FSLCommand): """Use FSL's distancemap to generate a map of the distance to the nearest nonzero voxel. Example ------- >>> import nipype.interfaces.fsl as fsl >>> mapper = fsl.DistanceMap() >>> mapper.inputs.in_file = "skeleton_mask.nii.gz" >>> mapper.run() # doctest: +SKIP """ _cmd = "distancemap" input_spec = DistanceMapInputSpec output_spec = DistanceMapOutputSpec def _format_arg(self, name, spec, value): if name == "local_max_file": if isinstance(value, bool): return spec.argstr % self._list_outputs()["local_max_file"] return super(DistanceMap, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() _si = self.inputs outputs["distance_map"] = _si.distance_map if not isdefined(_si.distance_map): outputs["distance_map"] = fname_presuffix( _si.in_file, suffix="_dstmap", use_ext=True, newpath=os.getcwd() ) outputs["distance_map"] = os.path.abspath(outputs["distance_map"]) if isdefined(_si.local_max_file): outputs["local_max_file"] = _si.local_max_file if isinstance(_si.local_max_file, bool): outputs["local_max_file"] = fname_presuffix( _si.in_file, suffix="_lclmax", use_ext=True, newpath=os.getcwd() ) outputs["local_max_file"] = os.path.abspath(outputs["local_max_file"]) return outputs def _gen_filename(self, name): if name == "distance_map": return self._list_outputs()["distance_map"] return None class MakeDyadicVectorsInputSpec(FSLCommandInputSpec): theta_vol = File(exists=True, mandatory=True, position=0, argstr="%s") phi_vol = File(exists=True, mandatory=True, position=1, argstr="%s") mask = File(exists=True, position=2, argstr="%s") output = File("dyads", position=3, usedefault=True, argstr="%s", hash_files=False) perc = traits.Float( desc=( "the {perc}% angle of the output cone of " "uncertainty (output will be in degrees)" ), position=4, argstr="%f", ) class MakeDyadicVectorsOutputSpec(TraitedSpec): dyads = File(exists=True) dispersion = File(exists=True) class MakeDyadicVectors(FSLCommand): """Create vector volume representing mean principal diffusion direction and its uncertainty (dispersion)""" _cmd = "make_dyadic_vectors" input_spec = MakeDyadicVectorsInputSpec output_spec = MakeDyadicVectorsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["dyads"] = self._gen_fname(self.inputs.output) outputs["dispersion"] = self._gen_fname( self.inputs.output, suffix="_dispersion" ) return outputs nipype-1.7.0/nipype/interfaces/fsl/epi.py000066400000000000000000001621411413403311400203640ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL `_ command line tools. This was written to work with FSL version 5.0.4. """ import os import numpy as np import nibabel as nb import warnings from ...utils.filemanip import split_filename, fname_presuffix from ..base import traits, TraitedSpec, InputMultiPath, File, isdefined from .base import FSLCommand, FSLCommandInputSpec, Info class PrepareFieldmapInputSpec(FSLCommandInputSpec): scanner = traits.String( "SIEMENS", argstr="%s", position=1, desc="must be SIEMENS", usedefault=True ) in_phase = File( exists=True, argstr="%s", position=2, mandatory=True, desc=( "Phase difference map, in SIEMENS format range from " "0-4096 or 0-8192)" ), ) in_magnitude = File( exists=True, argstr="%s", position=3, mandatory=True, desc="Magnitude difference map, brain extracted", ) delta_TE = traits.Float( 2.46, usedefault=True, mandatory=True, argstr="%f", position=-2, desc=( "echo time difference of the " "fieldmap sequence in ms. (usually 2.46ms in" " Siemens)" ), ) nocheck = traits.Bool( False, position=-1, argstr="--nocheck", usedefault=True, desc=("do not perform sanity checks for image " "size/range/dimensions"), ) out_fieldmap = File( argstr="%s", position=4, desc="output name for prepared fieldmap" ) class PrepareFieldmapOutputSpec(TraitedSpec): out_fieldmap = File(exists=True, desc="output name for prepared fieldmap") class PrepareFieldmap(FSLCommand): """ Interface for the fsl_prepare_fieldmap script (FSL 5.0) Prepares a fieldmap suitable for FEAT from SIEMENS data - saves output in rad/s format (e.g. ```fsl_prepare_fieldmap SIEMENS images_3_gre_field_mapping images_4_gre_field_mapping fmap_rads 2.65```). Examples -------- >>> from nipype.interfaces.fsl import PrepareFieldmap >>> prepare = PrepareFieldmap() >>> prepare.inputs.in_phase = "phase.nii" >>> prepare.inputs.in_magnitude = "magnitude.nii" >>> prepare.inputs.output_type = "NIFTI_GZ" >>> prepare.cmdline # doctest: +ELLIPSIS 'fsl_prepare_fieldmap SIEMENS phase.nii magnitude.nii \ .../phase_fslprepared.nii.gz 2.460000' >>> res = prepare.run() # doctest: +SKIP """ _cmd = "fsl_prepare_fieldmap" input_spec = PrepareFieldmapInputSpec output_spec = PrepareFieldmapOutputSpec def _parse_inputs(self, skip=None): if skip is None: skip = [] if not isdefined(self.inputs.out_fieldmap): self.inputs.out_fieldmap = self._gen_fname( self.inputs.in_phase, suffix="_fslprepared" ) if not isdefined(self.inputs.nocheck) or not self.inputs.nocheck: skip += ["nocheck"] return super(PrepareFieldmap, self)._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_fieldmap"] = self.inputs.out_fieldmap return outputs def _run_interface(self, runtime): runtime = super(PrepareFieldmap, self)._run_interface(runtime) if runtime.returncode == 0: out_file = self.inputs.out_fieldmap im = nb.load(out_file) dumb_img = nb.Nifti1Image(np.zeros(im.shape), im.affine, im.header) out_nii = nb.funcs.concat_images((im, dumb_img)) nb.save(out_nii, out_file) return runtime class TOPUPInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, desc="name of 4D file with images", argstr="--imain=%s", ) encoding_file = File( exists=True, mandatory=True, xor=["encoding_direction"], desc="name of text file with PE directions/times", argstr="--datain=%s", ) encoding_direction = traits.List( traits.Enum("y", "x", "z", "x-", "y-", "z-"), mandatory=True, xor=["encoding_file"], requires=["readout_times"], argstr="--datain=%s", desc=("encoding direction for automatic " "generation of encoding_file"), ) readout_times = InputMultiPath( traits.Float, requires=["encoding_direction"], xor=["encoding_file"], mandatory=True, desc=("readout times (dwell times by # " "phase-encode steps minus 1)"), ) out_base = File( desc=( "base-name of output files (spline " "coefficients (Hz) and movement parameters)" ), name_source=["in_file"], name_template="%s_base", argstr="--out=%s", hash_files=False, ) out_field = File( argstr="--fout=%s", hash_files=False, name_source=["in_file"], name_template="%s_field", desc="name of image file with field (Hz)", ) out_warp_prefix = traits.Str( "warpfield", argstr="--dfout=%s", hash_files=False, desc="prefix for the warpfield images (in mm)", usedefault=True, ) out_mat_prefix = traits.Str( "xfm", argstr="--rbmout=%s", hash_files=False, desc="prefix for the realignment matrices", usedefault=True, ) out_jac_prefix = traits.Str( "jac", argstr="--jacout=%s", hash_files=False, desc="prefix for the warpfield images", usedefault=True, ) out_corrected = File( argstr="--iout=%s", hash_files=False, name_source=["in_file"], name_template="%s_corrected", desc="name of 4D image file with unwarped images", ) out_logfile = File( argstr="--logout=%s", desc="name of log-file", name_source=["in_file"], name_template="%s_topup.log", keep_extension=True, hash_files=False, ) # TODO: the following traits admit values separated by commas, one value # per registration level inside topup. warp_res = traits.Float( argstr="--warpres=%f", desc=( "(approximate) resolution (in mm) of warp " "basis for the different sub-sampling levels" ), ) subsamp = traits.Int(argstr="--subsamp=%d", desc="sub-sampling scheme") fwhm = traits.Float( argstr="--fwhm=%f", desc="FWHM (in mm) of gaussian smoothing kernel" ) config = traits.String( "b02b0.cnf", argstr="--config=%s", usedefault=True, desc=("Name of config file specifying command line " "arguments"), ) max_iter = traits.Int(argstr="--miter=%d", desc="max # of non-linear iterations") reg_lambda = traits.Float( argstr="--lambda=%0.f", desc=( "Weight of regularisation, default " "depending on --ssqlambda and --regmod switches." ), ) ssqlambda = traits.Enum( 1, 0, argstr="--ssqlambda=%d", desc=( "Weight lambda by the current value of the " "ssd. If used (=1), the effective weight of " "regularisation term becomes higher for the " "initial iterations, therefore initial steps" " are a little smoother than they would " "without weighting. This reduces the " "risk of finding a local minimum." ), ) regmod = traits.Enum( "bending_energy", "membrane_energy", argstr="--regmod=%s", desc=( "Regularisation term implementation. Defaults " "to bending_energy. Note that the two functions" " have vastly different scales. The membrane " "energy is based on the first derivatives and " "the bending energy on the second derivatives. " "The second derivatives will typically be much " "smaller than the first derivatives, so input " "lambda will have to be larger for " "bending_energy to yield approximately the same" " level of regularisation." ), ) estmov = traits.Enum(1, 0, argstr="--estmov=%d", desc="estimate movements if set") minmet = traits.Enum( 0, 1, argstr="--minmet=%d", desc=( "Minimisation method 0=Levenberg-Marquardt, " "1=Scaled Conjugate Gradient" ), ) splineorder = traits.Int( argstr="--splineorder=%d", desc=("order of spline, 2->Qadratic spline, " "3->Cubic spline"), ) numprec = traits.Enum( "double", "float", argstr="--numprec=%s", desc=("Precision for representing Hessian, double " "or float."), ) interp = traits.Enum( "spline", "linear", argstr="--interp=%s", desc="Image interpolation model, linear or spline.", ) scale = traits.Enum( 0, 1, argstr="--scale=%d", desc=("If set (=1), the images are individually scaled" " to a common mean"), ) regrid = traits.Enum( 1, 0, argstr="--regrid=%d", desc=("If set (=1), the calculations are done in a " "different grid"), ) class TOPUPOutputSpec(TraitedSpec): out_fieldcoef = File(exists=True, desc="file containing the field coefficients") out_movpar = File(exists=True, desc="movpar.txt output file") out_enc_file = File(desc="encoding directions file output for applytopup") out_field = File(desc="name of image file with field (Hz)") out_warps = traits.List(File(exists=True), desc="warpfield images") out_jacs = traits.List(File(exists=True), desc="Jacobian images") out_mats = traits.List(File(exists=True), desc="realignment matrices") out_corrected = File(desc="name of 4D image file with unwarped images") out_logfile = File(desc="name of log-file") class TOPUP(FSLCommand): """ Interface for FSL topup, a tool for estimating and correcting susceptibility induced distortions. See FSL documentation for `reference `_, `usage examples `_, and `exemplary config files `_. Examples -------- >>> from nipype.interfaces.fsl import TOPUP >>> topup = TOPUP() >>> topup.inputs.in_file = "b0_b0rev.nii" >>> topup.inputs.encoding_file = "topup_encoding.txt" >>> topup.inputs.output_type = "NIFTI_GZ" >>> topup.cmdline # doctest: +ELLIPSIS 'topup --config=b02b0.cnf --datain=topup_encoding.txt \ --imain=b0_b0rev.nii --out=b0_b0rev_base --iout=b0_b0rev_corrected.nii.gz \ --fout=b0_b0rev_field.nii.gz --jacout=jac --logout=b0_b0rev_topup.log \ --rbmout=xfm --dfout=warpfield' >>> res = topup.run() # doctest: +SKIP """ _cmd = "topup" input_spec = TOPUPInputSpec output_spec = TOPUPOutputSpec def _format_arg(self, name, trait_spec, value): if name == "encoding_direction": return trait_spec.argstr % self._generate_encfile() if name == "out_base": path, name, ext = split_filename(value) if path != "": if not os.path.exists(path): raise ValueError("out_base path must exist if provided") return super(TOPUP, self)._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = super(TOPUP, self)._list_outputs() del outputs["out_base"] base_path = None if isdefined(self.inputs.out_base): base_path, base, _ = split_filename(self.inputs.out_base) if base_path == "": base_path = None else: base = split_filename(self.inputs.in_file)[1] + "_base" outputs["out_fieldcoef"] = self._gen_fname( base, suffix="_fieldcoef", cwd=base_path ) outputs["out_movpar"] = self._gen_fname( base, suffix="_movpar", ext=".txt", cwd=base_path ) n_vols = nb.load(self.inputs.in_file).shape[-1] ext = Info.output_type_to_ext(self.inputs.output_type) fmt = os.path.abspath("{prefix}_{i:02d}{ext}").format outputs["out_warps"] = [ fmt(prefix=self.inputs.out_warp_prefix, i=i, ext=ext) for i in range(1, n_vols + 1) ] outputs["out_jacs"] = [ fmt(prefix=self.inputs.out_jac_prefix, i=i, ext=ext) for i in range(1, n_vols + 1) ] outputs["out_mats"] = [ fmt(prefix=self.inputs.out_mat_prefix, i=i, ext=".mat") for i in range(1, n_vols + 1) ] if isdefined(self.inputs.encoding_direction): outputs["out_enc_file"] = self._get_encfilename() return outputs def _get_encfilename(self): out_file = os.path.join( os.getcwd(), ("%s_encfile.txt" % split_filename(self.inputs.in_file)[1]) ) return out_file def _generate_encfile(self): """Generate a topup compatible encoding file based on given directions""" out_file = self._get_encfilename() durations = self.inputs.readout_times if len(self.inputs.encoding_direction) != len(durations): if len(self.inputs.readout_times) != 1: raise ValueError( ( "Readout time must be a float or match the" "length of encoding directions" ) ) durations = durations * len(self.inputs.encoding_direction) lines = [] for idx, encdir in enumerate(self.inputs.encoding_direction): direction = 1.0 if encdir.endswith("-"): direction = -1.0 line = [ float(val[0] == encdir[0]) * direction for val in ["x", "y", "z"] ] + [durations[idx]] lines.append(line) np.savetxt(out_file, np.array(lines), fmt=b"%d %d %d %.8f") return out_file def _overload_extension(self, value, name=None): if name == "out_base": return value return super(TOPUP, self)._overload_extension(value, name) class ApplyTOPUPInputSpec(FSLCommandInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, desc="name of file with images", argstr="--imain=%s", sep=",", ) encoding_file = File( exists=True, mandatory=True, desc="name of text file with PE directions/times", argstr="--datain=%s", ) in_index = traits.List( traits.Int, argstr="--inindex=%s", sep=",", desc="comma separated list of indices corresponding to --datain", ) in_topup_fieldcoef = File( exists=True, argstr="--topup=%s", copyfile=False, requires=["in_topup_movpar"], desc=("topup file containing the field " "coefficients"), ) in_topup_movpar = File( exists=True, requires=["in_topup_fieldcoef"], copyfile=False, desc="topup movpar.txt file", ) out_corrected = File( desc="output (warped) image", name_source=["in_files"], name_template="%s_corrected", argstr="--out=%s", ) method = traits.Enum( "jac", "lsr", argstr="--method=%s", desc=("use jacobian modulation (jac) or least-squares" " resampling (lsr)"), ) interp = traits.Enum( "trilinear", "spline", argstr="--interp=%s", desc="interpolation method" ) datatype = traits.Enum( "char", "short", "int", "float", "double", argstr="-d=%s", desc="force output data type", ) class ApplyTOPUPOutputSpec(TraitedSpec): out_corrected = File( exists=True, desc=("name of 4D image file with " "unwarped images") ) class ApplyTOPUP(FSLCommand): """ Interface for FSL topup, a tool for estimating and correcting susceptibility induced distortions. `General reference `_ and `use example `_. Examples -------- >>> from nipype.interfaces.fsl import ApplyTOPUP >>> applytopup = ApplyTOPUP() >>> applytopup.inputs.in_files = ["epi.nii", "epi_rev.nii"] >>> applytopup.inputs.encoding_file = "topup_encoding.txt" >>> applytopup.inputs.in_topup_fieldcoef = "topup_fieldcoef.nii.gz" >>> applytopup.inputs.in_topup_movpar = "topup_movpar.txt" >>> applytopup.inputs.output_type = "NIFTI_GZ" >>> applytopup.cmdline # doctest: +ELLIPSIS 'applytopup --datain=topup_encoding.txt --imain=epi.nii,epi_rev.nii \ --inindex=1,2 --topup=topup --out=epi_corrected.nii.gz' >>> res = applytopup.run() # doctest: +SKIP """ _cmd = "applytopup" input_spec = ApplyTOPUPInputSpec output_spec = ApplyTOPUPOutputSpec def _parse_inputs(self, skip=None): if skip is None: skip = [] # If not defined, assume index are the first N entries in the # parameters file, for N input images. if not isdefined(self.inputs.in_index): self.inputs.in_index = list(range(1, len(self.inputs.in_files) + 1)) return super(ApplyTOPUP, self)._parse_inputs(skip=skip) def _format_arg(self, name, spec, value): if name == "in_topup_fieldcoef": return spec.argstr % value.split("_fieldcoef")[0] return super(ApplyTOPUP, self)._format_arg(name, spec, value) class EddyInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, argstr="--imain=%s", desc="File containing all the images to estimate distortions for", ) in_mask = File( exists=True, mandatory=True, argstr="--mask=%s", desc="Mask to indicate brain" ) in_index = File( exists=True, mandatory=True, argstr="--index=%s", desc="File containing indices for all volumes in --imain " "into --acqp and --topup", ) in_acqp = File( exists=True, mandatory=True, argstr="--acqp=%s", desc="File containing acquisition parameters", ) in_bvec = File( exists=True, mandatory=True, argstr="--bvecs=%s", desc="File containing the b-vectors for all volumes in --imain", ) in_bval = File( exists=True, mandatory=True, argstr="--bvals=%s", desc="File containing the b-values for all volumes in --imain", ) out_base = traits.Str( default_value="eddy_corrected", usedefault=True, argstr="--out=%s", desc="Basename for output image", ) session = File( exists=True, argstr="--session=%s", desc="File containing session indices for all volumes in --imain", ) in_topup_fieldcoef = File( exists=True, argstr="--topup=%s", requires=["in_topup_movpar"], desc="Topup results file containing the field coefficients", ) in_topup_movpar = File( exists=True, requires=["in_topup_fieldcoef"], desc="Topup results file containing the movement parameters (movpar.txt)", ) field = File( exists=True, argstr="--field=%s", desc="Non-topup derived fieldmap scaled in Hz" ) field_mat = File( exists=True, argstr="--field_mat=%s", desc="Matrix specifying the relative positions of the fieldmap, " "--field, and the first volume of the input file, --imain", ) flm = traits.Enum( "quadratic", "linear", "cubic", usedefault=True, argstr="--flm=%s", desc="First level EC model", ) slm = traits.Enum( "none", "linear", "quadratic", usedefault=True, argstr="--slm=%s", desc="Second level EC model", ) fep = traits.Bool( False, argstr="--fep", desc="Fill empty planes in x- or y-directions" ) initrand = traits.Bool( False, argstr="--initrand", desc="Resets rand for when selecting voxels", min_ver="5.0.10", ) interp = traits.Enum( "spline", "trilinear", usedefault=True, argstr="--interp=%s", desc="Interpolation model for estimation step", ) nvoxhp = traits.Int( default_value=1000, usedefault=True, argstr="--nvoxhp=%s", desc="# of voxels used to estimate the hyperparameters", ) fudge_factor = traits.Float( default_value=10.0, usedefault=True, argstr="--ff=%s", desc="Fudge factor for hyperparameter error variance", ) dont_sep_offs_move = traits.Bool( False, argstr="--dont_sep_offs_move", desc="Do NOT attempt to separate field offset from subject movement", ) dont_peas = traits.Bool( False, argstr="--dont_peas", desc="Do NOT perform a post-eddy alignment of shells", ) fwhm = traits.Float( desc="FWHM for conditioning filter when estimating the parameters", argstr="--fwhm=%s", ) niter = traits.Int( 5, usedefault=True, argstr="--niter=%s", desc="Number of iterations" ) method = traits.Enum( "jac", "lsr", usedefault=True, argstr="--resamp=%s", desc="Final resampling method (jacobian/least squares)", ) repol = traits.Bool( False, argstr="--repol", desc="Detect and replace outlier slices" ) outlier_nstd = traits.Int( argstr="--ol_nstd", desc="Number of std off to qualify as outlier", requires=["repol"], min_ver="5.0.10", ) outlier_nvox = traits.Int( argstr="--ol_nvox", desc="Min # of voxels in a slice for inclusion in outlier detection", requires=["repol"], min_ver="5.0.10", ) outlier_type = traits.Enum( "sw", "gw", "both", argstr="--ol_type", desc="Type of outliers, slicewise (sw), groupwise (gw) or both (both)", requires=["repol"], min_ver="5.0.10", ) outlier_pos = traits.Bool( False, argstr="--ol_pos", desc="Consider both positive and negative outliers if set", requires=["repol"], min_ver="5.0.10", ) outlier_sqr = traits.Bool( False, argstr="--ol_sqr", desc="Consider outliers among sums-of-squared differences if set", requires=["repol"], min_ver="5.0.10", ) multiband_factor = traits.Int( argstr="--mb=%s", desc="Multi-band factor", min_ver="5.0.10" ) multiband_offset = traits.Enum( 0, 1, -1, argstr="--mb_offs=%d", desc="Multi-band offset (-1 if bottom slice removed, 1 if top slice removed", requires=["multiband_factor"], min_ver="5.0.10", ) mporder = traits.Int( argstr="--mporder=%s", desc="Order of slice-to-vol movement model", requires=["use_cuda"], min_ver="5.0.11", ) slice2vol_niter = traits.Int( argstr="--s2v_niter=%d", desc="Number of iterations for slice-to-vol", requires=["mporder"], min_ver="5.0.11", ) slice2vol_lambda = traits.Int( argstr="--s2v_lambda=%d", desc="Regularisation weight for slice-to-vol movement (reasonable range 1-10)", requires=["mporder"], min_ver="5.0.11", ) slice2vol_interp = traits.Enum( "trilinear", "spline", argstr="--s2v_interp=%s", desc="Slice-to-vol interpolation model for estimation step", requires=["mporder"], min_ver="5.0.11", ) slice_order = traits.File( exists=True, argstr="--slspec=%s", desc="Name of text file completely specifying slice/group acquisition", requires=["mporder"], xor=["json"], min_ver="5.0.11", ) json = traits.File( exists=True, argstr="--json=%s", desc="Name of .json text file with information about slice timing", requires=["mporder"], xor=["slice_order"], min_ver="6.0.1", ) estimate_move_by_susceptibility = traits.Bool( False, argstr="--estimate_move_by_susceptibility", desc="Estimate how susceptibility field changes with subject movement", min_ver="6.0.1", ) mbs_niter = traits.Int( argstr="--mbs_niter=%s", desc="Number of iterations for MBS estimation", requires=["estimate_move_by_susceptibility"], min_ver="6.0.1", ) mbs_lambda = traits.Int( argstr="--mbs_lambda=%s", desc="Weighting of regularisation for MBS estimation", requires=["estimate_move_by_susceptibility"], min_ver="6.0.1", ) mbs_ksp = traits.Int( argstr="--mbs_ksp=%smm", desc="Knot-spacing for MBS field estimation", requires=["estimate_move_by_susceptibility"], min_ver="6.0.1", ) num_threads = traits.Int( 1, usedefault=True, nohash=True, desc="Number of openmp threads to use" ) is_shelled = traits.Bool( False, argstr="--data_is_shelled", desc="Override internal check to ensure that date are acquired " "on a set of b-value shells", ) use_cuda = traits.Bool(False, desc="Run eddy using cuda gpu") cnr_maps = traits.Bool( False, desc="Output CNR-Maps", argstr="--cnr_maps", min_ver="5.0.10" ) residuals = traits.Bool( False, desc="Output Residuals", argstr="--residuals", min_ver="5.0.10" ) class EddyOutputSpec(TraitedSpec): out_corrected = File( exists=True, desc="4D image file containing all the corrected volumes" ) out_parameter = File( exists=True, desc="Text file with parameters defining the field and movement for each scan", ) out_rotated_bvecs = File( exists=True, desc="File containing rotated b-values for all volumes" ) out_movement_rms = File( exists=True, desc="Summary of the 'total movement' in each volume" ) out_restricted_movement_rms = File( exists=True, desc="Summary of the 'total movement' in each volume " "disregarding translation in the PE direction", ) out_shell_alignment_parameters = File( exists=True, desc="Text file containing rigid body movement parameters " "between the different shells as estimated by a " "post-hoc mutual information based registration", ) out_shell_pe_translation_parameters = File( exists=True, desc="Text file containing translation along the PE-direction " "between the different shells as estimated by a " "post-hoc mutual information based registration", ) out_shell_pe_translation_parameters = File( exists=True, desc="Text file containing translation along the PE-direction " "between the different shells as estimated by a " "post-hoc mutual information based registration", ) out_outlier_map = File( exists=True, desc="Matrix where rows represent volumes and columns represent " 'slices. "0" indicates that scan-slice is not an outlier ' 'and "1" indicates that it is', ) out_outlier_n_stdev_map = File( exists=True, desc="Matrix where rows represent volumes and columns represent " "slices. Values indicate number of standard deviations off the " "mean difference between observation and prediction is", ) out_outlier_n_sqr_stdev_map = File( exists=True, desc="Matrix where rows represent volumes and columns represent " "slices. Values indicate number of standard deivations off the " "square root of the mean squared difference between observation " "and prediction is", ) out_outlier_report = File( exists=True, desc="Text file with a plain language report on what " "outlier slices eddy has found", ) out_outlier_free = File( exists=True, desc="4D image file not corrected for susceptibility or eddy-" "current distortions or subject movement but with outlier " "slices replaced", ) out_movement_over_time = File( exists=True, desc="Text file containing translations (mm) and rotations " "(radians) for each excitation", ) out_cnr_maps = File(exists=True, desc="path/name of file with the cnr_maps") out_residuals = File(exists=True, desc="path/name of file with the residuals") class Eddy(FSLCommand): """ Interface for FSL eddy, a tool for estimating and correcting eddy currents induced distortions. `User guide `__ and `more info regarding acqp file `_. Examples -------- >>> from nipype.interfaces.fsl import Eddy Running eddy on a CPU using OpenMP: >>> eddy = Eddy() >>> eddy.inputs.in_file = 'epi.nii' >>> eddy.inputs.in_mask = 'epi_mask.nii' >>> eddy.inputs.in_index = 'epi_index.txt' >>> eddy.inputs.in_acqp = 'epi_acqp.txt' >>> eddy.inputs.in_bvec = 'bvecs.scheme' >>> eddy.inputs.in_bval = 'bvals.scheme' >>> eddy.cmdline # doctest: +ELLIPSIS 'eddy_openmp --flm=quadratic --ff=10.0 \ --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme \ --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii \ --interp=spline --resamp=jac --niter=5 --nvoxhp=1000 \ --out=.../eddy_corrected --slm=none' Running eddy on an Nvidia GPU using cuda: >>> eddy.inputs.use_cuda = True >>> eddy.cmdline # doctest: +ELLIPSIS 'eddy_cuda --flm=quadratic --ff=10.0 \ --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme \ --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii \ --interp=spline --resamp=jac --niter=5 --nvoxhp=1000 \ --out=.../eddy_corrected --slm=none' Running eddy with slice-to-volume motion correction: >>> eddy.inputs.mporder = 6 >>> eddy.inputs.slice2vol_niter = 5 >>> eddy.inputs.slice2vol_lambda = 1 >>> eddy.inputs.slice2vol_interp = 'trilinear' >>> eddy.inputs.slice_order = 'epi_slspec.txt' >>> eddy.cmdline # doctest: +ELLIPSIS 'eddy_cuda --flm=quadratic --ff=10.0 \ --acqp=epi_acqp.txt --bvals=bvals.scheme --bvecs=bvecs.scheme \ --imain=epi.nii --index=epi_index.txt --mask=epi_mask.nii \ --interp=spline --resamp=jac --mporder=6 --niter=5 --nvoxhp=1000 \ --out=.../eddy_corrected --s2v_interp=trilinear --s2v_lambda=1 \ --s2v_niter=5 --slspec=epi_slspec.txt --slm=none' >>> res = eddy.run() # doctest: +SKIP """ _cmd = "eddy_openmp" input_spec = EddyInputSpec output_spec = EddyOutputSpec _num_threads = 1 def __init__(self, **inputs): super(Eddy, self).__init__(**inputs) self.inputs.on_trait_change(self._num_threads_update, "num_threads") if not isdefined(self.inputs.num_threads): self.inputs.num_threads = self._num_threads else: self._num_threads_update() self.inputs.on_trait_change(self._use_cuda, "use_cuda") if isdefined(self.inputs.use_cuda): self._use_cuda() def _num_threads_update(self): self._num_threads = self.inputs.num_threads if not isdefined(self.inputs.num_threads): if "OMP_NUM_THREADS" in self.inputs.environ: del self.inputs.environ["OMP_NUM_THREADS"] else: self.inputs.environ["OMP_NUM_THREADS"] = str(self.inputs.num_threads) def _use_cuda(self): self._cmd = "eddy_cuda" if self.inputs.use_cuda else "eddy_openmp" def _run_interface(self, runtime): # If 'eddy_openmp' is missing, use 'eddy' FSLDIR = os.getenv("FSLDIR", "") cmd = self._cmd if all( ( FSLDIR != "", cmd == "eddy_openmp", not os.path.exists(os.path.join(FSLDIR, "bin", cmd)), ) ): self._cmd = "eddy" runtime = super(Eddy, self)._run_interface(runtime) # Restore command to avoid side-effects self._cmd = cmd return runtime def _format_arg(self, name, spec, value): if name == "in_topup_fieldcoef": return spec.argstr % value.split("_fieldcoef")[0] if name == "field": return spec.argstr % fname_presuffix(value, use_ext=False) if name == "out_base": return spec.argstr % os.path.abspath(value) return super(Eddy, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_corrected"] = os.path.abspath("%s.nii.gz" % self.inputs.out_base) outputs["out_parameter"] = os.path.abspath( "%s.eddy_parameters" % self.inputs.out_base ) # File generation might depend on the version of EDDY out_rotated_bvecs = os.path.abspath( "%s.eddy_rotated_bvecs" % self.inputs.out_base ) out_movement_rms = os.path.abspath( "%s.eddy_movement_rms" % self.inputs.out_base ) out_restricted_movement_rms = os.path.abspath( "%s.eddy_restricted_movement_rms" % self.inputs.out_base ) out_shell_alignment_parameters = os.path.abspath( "%s.eddy_post_eddy_shell_alignment_parameters" % self.inputs.out_base ) out_shell_pe_translation_parameters = os.path.abspath( "%s.eddy_post_eddy_shell_PE_translation_parameters" % self.inputs.out_base ) out_outlier_map = os.path.abspath("%s.eddy_outlier_map" % self.inputs.out_base) out_outlier_n_stdev_map = os.path.abspath( "%s.eddy_outlier_n_stdev_map" % self.inputs.out_base ) out_outlier_n_sqr_stdev_map = os.path.abspath( "%s.eddy_outlier_n_sqr_stdev_map" % self.inputs.out_base ) out_outlier_report = os.path.abspath( "%s.eddy_outlier_report" % self.inputs.out_base ) if isdefined(self.inputs.repol) and self.inputs.repol: out_outlier_free = os.path.abspath( "%s.eddy_outlier_free_data" % self.inputs.out_base ) if os.path.exists(out_outlier_free): outputs["out_outlier_free"] = out_outlier_free if isdefined(self.inputs.mporder) and self.inputs.mporder > 0: out_movement_over_time = os.path.abspath( "%s.eddy_movement_over_time" % self.inputs.out_base ) if os.path.exists(out_movement_over_time): outputs["out_movement_over_time"] = out_movement_over_time if isdefined(self.inputs.cnr_maps) and self.inputs.cnr_maps: out_cnr_maps = os.path.abspath( "%s.eddy_cnr_maps.nii.gz" % self.inputs.out_base ) if os.path.exists(out_cnr_maps): outputs["out_cnr_maps"] = out_cnr_maps if isdefined(self.inputs.residuals) and self.inputs.residuals: out_residuals = os.path.abspath( "%s.eddy_residuals.nii.gz" % self.inputs.out_base ) if os.path.exists(out_residuals): outputs["out_residuals"] = out_residuals if os.path.exists(out_rotated_bvecs): outputs["out_rotated_bvecs"] = out_rotated_bvecs if os.path.exists(out_movement_rms): outputs["out_movement_rms"] = out_movement_rms if os.path.exists(out_restricted_movement_rms): outputs["out_restricted_movement_rms"] = out_restricted_movement_rms if os.path.exists(out_shell_alignment_parameters): outputs["out_shell_alignment_parameters"] = out_shell_alignment_parameters if os.path.exists(out_shell_pe_translation_parameters): outputs[ "out_shell_pe_translation_parameters" ] = out_shell_pe_translation_parameters if os.path.exists(out_outlier_map): outputs["out_outlier_map"] = out_outlier_map if os.path.exists(out_outlier_n_stdev_map): outputs["out_outlier_n_stdev_map"] = out_outlier_n_stdev_map if os.path.exists(out_outlier_n_sqr_stdev_map): outputs["out_outlier_n_sqr_stdev_map"] = out_outlier_n_sqr_stdev_map if os.path.exists(out_outlier_report): outputs["out_outlier_report"] = out_outlier_report return outputs class SigLossInputSpec(FSLCommandInputSpec): in_file = File(mandatory=True, exists=True, argstr="-i %s", desc="b0 fieldmap file") out_file = File( argstr="-s %s", desc="output signal loss estimate file", genfile=True ) mask_file = File(exists=True, argstr="-m %s", desc="brain mask file") echo_time = traits.Float(argstr="--te=%f", desc="echo time in seconds") slice_direction = traits.Enum( "x", "y", "z", argstr="-d %s", desc="slicing direction" ) class SigLossOuputSpec(TraitedSpec): out_file = File(exists=True, desc="signal loss estimate file") class SigLoss(FSLCommand): """ Estimates signal loss from a field map (in rad/s) Examples -------- >>> from nipype.interfaces.fsl import SigLoss >>> sigloss = SigLoss() >>> sigloss.inputs.in_file = "phase.nii" >>> sigloss.inputs.echo_time = 0.03 >>> sigloss.inputs.output_type = "NIFTI_GZ" >>> sigloss.cmdline # doctest: +ELLIPSIS 'sigloss --te=0.030000 -i phase.nii -s .../phase_sigloss.nii.gz' >>> res = sigloss.run() # doctest: +SKIP """ input_spec = SigLossInputSpec output_spec = SigLossOuputSpec _cmd = "sigloss" def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_file if (not isdefined(outputs["out_file"])) and (isdefined(self.inputs.in_file)): outputs["out_file"] = self._gen_fname( self.inputs.in_file, suffix="_sigloss" ) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["out_file"] return None class EpiRegInputSpec(FSLCommandInputSpec): epi = File( exists=True, argstr="--epi=%s", mandatory=True, position=-4, desc="EPI image" ) t1_head = File( exists=True, argstr="--t1=%s", mandatory=True, position=-3, desc="wholehead T1 image", ) t1_brain = File( exists=True, argstr="--t1brain=%s", mandatory=True, position=-2, desc="brain extracted T1 image", ) out_base = traits.String( "epi2struct", desc="output base name", argstr="--out=%s", position=-1, usedefault=True, ) fmap = File(exists=True, argstr="--fmap=%s", desc="fieldmap image (in rad/s)") fmapmag = File( exists=True, argstr="--fmapmag=%s", desc="fieldmap magnitude image - wholehead" ) fmapmagbrain = File( exists=True, argstr="--fmapmagbrain=%s", desc="fieldmap magnitude image - brain extracted", ) wmseg = File( exists=True, argstr="--wmseg=%s", desc="white matter segmentation of T1 image, has to be named \ like the t1brain and end on _wmseg", ) echospacing = traits.Float( argstr="--echospacing=%f", desc="Effective EPI echo spacing \ (sometimes called dwell time) - in seconds", ) pedir = traits.Enum( "x", "y", "z", "-x", "-y", "-z", argstr="--pedir=%s", desc="phase encoding direction, dir = x/y/z/-x/-y/-z", ) weight_image = File( exists=True, argstr="--weight=%s", desc="weighting image (in T1 space)" ) no_fmapreg = traits.Bool( False, argstr="--nofmapreg", desc="do not perform registration of fmap to T1 \ (use if fmap already registered)", ) no_clean = traits.Bool( True, argstr="--noclean", usedefault=True, desc="do not clean up intermediate files", ) class EpiRegOutputSpec(TraitedSpec): out_file = File(exists=True, desc="unwarped and coregistered epi input") out_1vol = File(exists=True, desc="unwarped and coregistered single volume") fmap2str_mat = File(exists=True, desc="rigid fieldmap-to-structural transform") fmap2epi_mat = File(exists=True, desc="rigid fieldmap-to-epi transform") fmap_epi = File(exists=True, desc="fieldmap in epi space") fmap_str = File(exists=True, desc="fieldmap in structural space") fmapmag_str = File(exists=True, desc="fieldmap magnitude image in structural space") epi2str_inv = File(exists=True, desc="rigid structural-to-epi transform") epi2str_mat = File(exists=True, desc="rigid epi-to-structural transform") shiftmap = File(exists=True, desc="shiftmap in epi space") fullwarp = File( exists=True, desc="warpfield to unwarp epi and transform into \ structural space", ) wmseg = File(exists=True, desc="white matter segmentation used in flirt bbr") seg = File(exists=True, desc="white matter, gray matter, csf segmentation") wmedge = File(exists=True, desc="white matter edges for visualization") class EpiReg(FSLCommand): """ Runs FSL epi_reg script for simultaneous coregistration and fieldmap unwarping. Examples -------- >>> from nipype.interfaces.fsl import EpiReg >>> epireg = EpiReg() >>> epireg.inputs.epi='epi.nii' >>> epireg.inputs.t1_head='T1.nii' >>> epireg.inputs.t1_brain='T1_brain.nii' >>> epireg.inputs.out_base='epi2struct' >>> epireg.inputs.fmap='fieldmap_phase_fslprepared.nii' >>> epireg.inputs.fmapmag='fieldmap_mag.nii' >>> epireg.inputs.fmapmagbrain='fieldmap_mag_brain.nii' >>> epireg.inputs.echospacing=0.00067 >>> epireg.inputs.pedir='y' >>> epireg.cmdline # doctest: +ELLIPSIS 'epi_reg --echospacing=0.000670 --fmap=fieldmap_phase_fslprepared.nii \ --fmapmag=fieldmap_mag.nii --fmapmagbrain=fieldmap_mag_brain.nii --noclean \ --pedir=y --epi=epi.nii --t1=T1.nii --t1brain=T1_brain.nii --out=epi2struct' >>> epireg.run() # doctest: +SKIP """ _cmd = "epi_reg" input_spec = EpiRegInputSpec output_spec = EpiRegOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.join( os.getcwd(), self.inputs.out_base + ".nii.gz" ) if not ( isdefined(self.inputs.no_fmapreg) and self.inputs.no_fmapreg ) and isdefined(self.inputs.fmap): outputs["out_1vol"] = os.path.join( os.getcwd(), self.inputs.out_base + "_1vol.nii.gz" ) outputs["fmap2str_mat"] = os.path.join( os.getcwd(), self.inputs.out_base + "_fieldmap2str.mat" ) outputs["fmap2epi_mat"] = os.path.join( os.getcwd(), self.inputs.out_base + "_fieldmaprads2epi.mat" ) outputs["fmap_epi"] = os.path.join( os.getcwd(), self.inputs.out_base + "_fieldmaprads2epi.nii.gz" ) outputs["fmap_str"] = os.path.join( os.getcwd(), self.inputs.out_base + "_fieldmaprads2str.nii.gz" ) outputs["fmapmag_str"] = os.path.join( os.getcwd(), self.inputs.out_base + "_fieldmap2str.nii.gz" ) outputs["shiftmap"] = os.path.join( os.getcwd(), self.inputs.out_base + "_fieldmaprads2epi_shift.nii.gz" ) outputs["fullwarp"] = os.path.join( os.getcwd(), self.inputs.out_base + "_warp.nii.gz" ) outputs["epi2str_inv"] = os.path.join( os.getcwd(), self.inputs.out_base + "_inv.mat" ) if not isdefined(self.inputs.wmseg): outputs["wmedge"] = os.path.join( os.getcwd(), self.inputs.out_base + "_fast_wmedge.nii.gz" ) outputs["wmseg"] = os.path.join( os.getcwd(), self.inputs.out_base + "_fast_wmseg.nii.gz" ) outputs["seg"] = os.path.join( os.getcwd(), self.inputs.out_base + "_fast_seg.nii.gz" ) outputs["epi2str_mat"] = os.path.join( os.getcwd(), self.inputs.out_base + ".mat" ) return outputs ####################################### # deprecated interfaces ####################################### class EPIDeWarpInputSpec(FSLCommandInputSpec): mag_file = File( exists=True, desc="Magnitude file", argstr="--mag %s", position=0, mandatory=True, ) dph_file = File( exists=True, desc="Phase file assumed to be scaled from 0 to 4095", argstr="--dph %s", mandatory=True, ) exf_file = File( exists=True, desc="example func volume (or use epi)", argstr="--exf %s" ) epi_file = File(exists=True, desc="EPI volume to unwarp", argstr="--epi %s") tediff = traits.Float( 2.46, usedefault=True, desc="difference in B0 field map TEs", argstr="--tediff %s", ) esp = traits.Float( 0.58, desc="EPI echo spacing", argstr="--esp %s", usedefault=True ) sigma = traits.Int( 2, usedefault=True, argstr="--sigma %s", desc="2D spatial gaussing smoothing \ stdev (default = 2mm)", ) vsm = traits.String(genfile=True, desc="voxel shift map", argstr="--vsm %s") exfdw = traits.String( desc="dewarped example func volume", genfile=True, argstr="--exfdw %s" ) epidw = traits.String( desc="dewarped epi volume", genfile=False, argstr="--epidw %s" ) tmpdir = traits.String(genfile=True, desc="tmpdir", argstr="--tmpdir %s") nocleanup = traits.Bool( True, usedefault=True, desc="no cleanup", argstr="--nocleanup" ) cleanup = traits.Bool(desc="cleanup", argstr="--cleanup") class EPIDeWarpOutputSpec(TraitedSpec): unwarped_file = File(desc="unwarped epi file") vsm_file = File(desc="voxel shift map") exfdw = File(desc="dewarped functional volume example") exf_mask = File(desc="Mask from example functional volume") class EPIDeWarp(FSLCommand): """ Wraps the unwarping script `epidewarp.fsl `_. .. warning:: deprecated in FSL, please use :func:`niflow.nipype1.workflows.dmri.preprocess.epi.sdc_fmb` instead. Examples -------- >>> from nipype.interfaces.fsl import EPIDeWarp >>> dewarp = EPIDeWarp() >>> dewarp.inputs.epi_file = "functional.nii" >>> dewarp.inputs.mag_file = "magnitude.nii" >>> dewarp.inputs.dph_file = "phase.nii" >>> dewarp.inputs.output_type = "NIFTI_GZ" >>> dewarp.cmdline # doctest: +ELLIPSIS 'epidewarp.fsl --mag magnitude.nii --dph phase.nii --epi functional.nii \ --esp 0.58 --exfdw .../exfdw.nii.gz --nocleanup --sigma 2 --tediff 2.46 \ --tmpdir .../temp --vsm .../vsm.nii.gz' >>> res = dewarp.run() # doctest: +SKIP """ _cmd = "epidewarp.fsl" input_spec = EPIDeWarpInputSpec output_spec = EPIDeWarpOutputSpec def __init__(self, **inputs): warnings.warn( ( "Deprecated: Please use " "niflow.nipype1.workflows.dmri.preprocess.epi.sdc_fmb instead" ), DeprecationWarning, ) return super(EPIDeWarp, self).__init__(**inputs) def _run_interface(self, runtime): runtime = super(EPIDeWarp, self)._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime def _gen_filename(self, name): if name == "exfdw": if isdefined(self.inputs.exf_file): return self._gen_fname(self.inputs.exf_file, suffix="_exfdw") else: return self._gen_fname("exfdw") if name == "epidw": if isdefined(self.inputs.epi_file): return self._gen_fname(self.inputs.epi_file, suffix="_epidw") if name == "vsm": return self._gen_fname("vsm") if name == "tmpdir": return os.path.join(os.getcwd(), "temp") return None def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.exfdw): outputs["exfdw"] = self._gen_filename("exfdw") else: outputs["exfdw"] = self.inputs.exfdw if isdefined(self.inputs.epi_file): if isdefined(self.inputs.epidw): outputs["unwarped_file"] = self.inputs.epidw else: outputs["unwarped_file"] = self._gen_filename("epidw") if not isdefined(self.inputs.vsm): outputs["vsm_file"] = self._gen_filename("vsm") else: outputs["vsm_file"] = self._gen_fname(self.inputs.vsm) if not isdefined(self.inputs.tmpdir): outputs["exf_mask"] = self._gen_fname( cwd=self._gen_filename("tmpdir"), basename="maskexf" ) else: outputs["exf_mask"] = self._gen_fname( cwd=self.inputs.tmpdir, basename="maskexf" ) return outputs class EddyCorrectInputSpec(FSLCommandInputSpec): in_file = File( exists=True, desc="4D input file", argstr="%s", position=0, mandatory=True ) out_file = File( desc="4D output file", argstr="%s", position=1, name_source=["in_file"], name_template="%s_edc", output_name="eddy_corrected", ) ref_num = traits.Int( 0, argstr="%d", position=2, desc="reference number", mandatory=True, usedefault=True, ) class EddyCorrectOutputSpec(TraitedSpec): eddy_corrected = File( exists=True, desc="path/name of 4D eddy corrected output file" ) class EddyCorrect(FSLCommand): """ .. warning:: Deprecated in FSL. Please use :class:`nipype.interfaces.fsl.epi.Eddy` instead Example ------- >>> from nipype.interfaces.fsl import EddyCorrect >>> eddyc = EddyCorrect(in_file='diffusion.nii', ... out_file="diffusion_edc.nii", ref_num=0) >>> eddyc.cmdline 'eddy_correct diffusion.nii diffusion_edc.nii 0' """ _cmd = "eddy_correct" input_spec = EddyCorrectInputSpec output_spec = EddyCorrectOutputSpec def __init__(self, **inputs): warnings.warn( ("Deprecated: Please use nipype.interfaces.fsl.epi.Eddy " "instead"), DeprecationWarning, ) return super(EddyCorrect, self).__init__(**inputs) def _run_interface(self, runtime): runtime = super(EddyCorrect, self)._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime class EddyQuadInputSpec(FSLCommandInputSpec): base_name = traits.Str( "eddy_corrected", usedefault=True, argstr="%s", desc=( "Basename (including path) for EDDY output files, i.e., " "corrected images and QC files" ), position=0, ) idx_file = File( exists=True, mandatory=True, argstr="--eddyIdx %s", desc=("File containing indices for all volumes into acquisition " "parameters"), ) param_file = File( exists=True, mandatory=True, argstr="--eddyParams %s", desc="File containing acquisition parameters", ) mask_file = File( exists=True, mandatory=True, argstr="--mask %s", desc="Binary mask file" ) bval_file = File( exists=True, mandatory=True, argstr="--bvals %s", desc="b-values file" ) bvec_file = File( exists=True, argstr="--bvecs %s", desc=( "b-vectors file - only used when .eddy_residuals " "file is present" ), ) output_dir = traits.Str( name_template="%s.qc", name_source=["base_name"], argstr="--output-dir %s", desc="Output directory - default = '.qc'", ) field = File(exists=True, argstr="--field %s", desc="TOPUP estimated field (in Hz)") slice_spec = File( exists=True, argstr="--slspec %s", desc="Text file specifying slice/group acquisition", ) verbose = traits.Bool(argstr="--verbose", desc="Display debug messages") class EddyQuadOutputSpec(TraitedSpec): qc_json = File( exists=True, desc=("Single subject database containing quality metrics and data " "info."), ) qc_pdf = File(exists=True, desc="Single subject QC report.") avg_b_png = traits.List( File(exists=True), desc=( "Image showing mid-sagittal, -coronal and -axial slices of " "each averaged b-shell volume." ), ) avg_b0_pe_png = traits.List( File(exists=True), desc=( "Image showing mid-sagittal, -coronal and -axial slices of " "each averaged pe-direction b0 volume. Generated when using " "the -f option." ), ) cnr_png = traits.List( File(exists=True), desc=( "Image showing mid-sagittal, -coronal and -axial slices of " "each b-shell CNR volume. Generated when CNR maps are " "available." ), ) vdm_png = File( exists=True, desc=( "Image showing mid-sagittal, -coronal and -axial slices of " "the voxel displacement map. Generated when using the -f " "option." ), ) residuals = File( exists=True, desc=( "Text file containing the volume-wise mask-averaged squared " "residuals. Generated when residual maps are available." ), ) clean_volumes = File( exists=True, desc=( "Text file containing a list of clean volumes, based on " "the eddy squared residuals. To generate a version of the " "pre-processed dataset without outlier volumes, use: " "`fslselectvols -i -o " "eddy_corrected_data_clean --vols=vols_no_outliers.txt`" ), ) class EddyQuad(FSLCommand): """ Interface for FSL eddy_quad, a tool for generating single subject reports and storing the quality assessment indices for each subject. `User guide `__ Examples -------- >>> from nipype.interfaces.fsl import EddyQuad >>> quad = EddyQuad() >>> quad.inputs.base_name = 'eddy_corrected' >>> quad.inputs.idx_file = 'epi_index.txt' >>> quad.inputs.param_file = 'epi_acqp.txt' >>> quad.inputs.mask_file = 'epi_mask.nii' >>> quad.inputs.bval_file = 'bvals.scheme' >>> quad.inputs.bvec_file = 'bvecs.scheme' >>> quad.inputs.output_dir = 'eddy_corrected.qc' >>> quad.inputs.field = 'fieldmap_phase_fslprepared.nii' >>> quad.inputs.verbose = True >>> quad.cmdline 'eddy_quad eddy_corrected --bvals bvals.scheme --bvecs bvecs.scheme \ --field fieldmap_phase_fslprepared.nii --eddyIdx epi_index.txt \ --mask epi_mask.nii --output-dir eddy_corrected.qc --eddyParams epi_acqp.txt \ --verbose' >>> res = quad.run() # doctest: +SKIP """ _cmd = "eddy_quad" input_spec = EddyQuadInputSpec output_spec = EddyQuadOutputSpec def _list_outputs(self): from glob import glob outputs = self.output_spec().get() # If the output directory isn't defined, the interface seems to use # the default but not set its value in `self.inputs.output_dir` if not isdefined(self.inputs.output_dir): out_dir = os.path.abspath(os.path.basename(self.inputs.base_name) + ".qc") else: out_dir = os.path.abspath(self.inputs.output_dir) outputs["qc_json"] = os.path.join(out_dir, "qc.json") outputs["qc_pdf"] = os.path.join(out_dir, "qc.pdf") # Grab all b* files here. This will also grab the b0_pe* files # as well, but only if the field input was provided. So we'll remove # them later in the next conditional. outputs["avg_b_png"] = sorted(glob(os.path.join(out_dir, "avg_b*.png"))) if isdefined(self.inputs.field): outputs["avg_b0_pe_png"] = sorted( glob(os.path.join(out_dir, "avg_b0_pe*.png")) ) # The previous glob for `avg_b_png` also grabbed the # `avg_b0_pe_png` files so we have to remove them # from `avg_b_png`. for fname in outputs["avg_b0_pe_png"]: outputs["avg_b_png"].remove(fname) outputs["vdm_png"] = os.path.join(out_dir, "vdm.png") outputs["cnr_png"] = sorted(glob(os.path.join(out_dir, "cnr*.png"))) residuals = os.path.join(out_dir, "eddy_msr.txt") if os.path.isfile(residuals): outputs["residuals"] = residuals clean_volumes = os.path.join(out_dir, "vols_no_outliers.txt") if os.path.isfile(clean_volumes): outputs["clean_volumes"] = clean_volumes return outputs nipype-1.7.0/nipype/interfaces/fsl/fix.py000066400000000000000000000275271413403311400204050ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fix module provides classes for interfacing with the `FSL FIX ` command line tools. This was written to work with FSL version v5.0 The following example assumes that melodic has already been run, so the datagrabber is configured to start from there (a list of melodic output directories). If no hand_labels_noise.txt exists already, this will fail and comment on that. EXAMPLE: subject_list = ['1', '2', '3'] fix_pipeline = pe.Workflow(name='fix_pipeline') fix_pipeline.base_dir = os.path.abspath('./') info = dict(mel_ica=[['subject_id']]) datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['mel_ica']), name='datasource') datasource.inputs.base_directory = os.path.abspath('') datasource.inputs.template = '%s/' datasource.inputs.template_args = info datasource.inputs.subject_id = subject_list datasource.inputs.sort_filelist = True datasource.iterables = ('subject_id', subject_list) # create training set by looking into which mel_icas have hand_labels_noise.txt files in them create_training_set = pe.JoinNode(interface=fix.TrainingSetCreator(), joinfield=['mel_icas_in'], joinsource='datasource', name='trainingset_creator') # train the classifier train_node = pe.Node(interface=fix.Training(trained_wts_filestem='foo'), name='train_node') # test accuracy. Probably not necessary, and also failing on my setup because of fix itself (no error msg) accuracy_tester = pe.Node(interface=fix.AccuracyTester(output_directory='more_foo'), name='accuracy_tester') # classify components classify_node = pe.Node(interface=fix.Classifier(), name='classify') # apply cleanup cleaner_node = pe.Node(interface=fix.Cleaner(), name='cleaner') fix_pipeline.connect(datasource, 'mel_ica', create_training_set, 'mel_icas_in') fix_pipeline.connect(create_training_set, 'mel_icas_out', train_node, 'mel_icas') fix_pipeline.connect(train_node, 'trained_wts_file', accuracy_tester, 'trained_wts_file') fix_pipeline.connect(datasource, 'mel_ica', accuracy_tester, 'mel_icas') fix_pipeline.connect(train_node, 'trained_wts_file', classify_node, 'trained_wts_file') fix_pipeline.connect(datasource, 'mel_ica', classify_node, 'mel_ica') fix_pipeline.connect(classify_node, 'artifacts_list_file', cleaner_node, 'artifacts_list_file') fix_pipeline.write_graph() outgraph = fix_pipeline.run() """ from ..base import ( TraitedSpec, CommandLineInputSpec, CommandLine, InputMultiPath, OutputMultiPath, BaseInterface, BaseInterfaceInputSpec, traits, Directory, File, isdefined, ) import os class TrainingSetCreatorInputSpec(BaseInterfaceInputSpec): mel_icas_in = InputMultiPath( Directory(exists=True), copyfile=False, desc="Melodic output directories", argstr="%s", position=-1, ) class TrainingSetCreatorOutputSpec(TraitedSpec): mel_icas_out = OutputMultiPath( Directory(exists=True), copyfile=False, desc="Hand labels for noise vs signal", argstr="%s", position=-1, ) class TrainingSetCreator(BaseInterface): """Goes through set of provided melodic output directories, to find all the ones that have a hand_labels_noise.txt file in them. This is outsourced as a separate class, so that the pipeline is rerun everytime a handlabeled file has been changed, or a new one created. """ input_spec = TrainingSetCreatorInputSpec output_spec = TrainingSetCreatorOutputSpec _always_run = True def _run_interface(self, runtime): mel_icas = [] for item in self.inputs.mel_icas_in: if os.path.exists(os.path.join(item, "hand_labels_noise.txt")): mel_icas.append(item) if len(mel_icas) == 0: raise Exception( "%s did not find any hand_labels_noise.txt files in the following directories: %s" % (self.__class__.__name__, mel_icas) ) return runtime def _list_outputs(self): mel_icas = [] for item in self.inputs.mel_icas_in: if os.path.exists(os.path.join(item, "hand_labels_noise.txt")): mel_icas.append(item) outputs = self._outputs().get() outputs["mel_icas_out"] = mel_icas return outputs class FeatureExtractorInputSpec(CommandLineInputSpec): mel_ica = Directory( exists=True, copyfile=False, desc="Melodic output directory or directories", argstr="%s", position=-1, ) class FeatureExtractorOutputSpec(TraitedSpec): mel_ica = Directory( exists=True, copyfile=False, desc="Melodic output directory or directories", argstr="%s", position=-1, ) class FeatureExtractor(CommandLine): """ Extract features (for later training and/or classifying) """ input_spec = FeatureExtractorInputSpec output_spec = FeatureExtractorOutputSpec cmd = "fix -f" def _list_outputs(self): outputs = self.output_spec().get() outputs["mel_ica"] = self.inputs.mel_ica return outputs class TrainingInputSpec(CommandLineInputSpec): mel_icas = InputMultiPath( Directory(exists=True), copyfile=False, desc="Melodic output directories", argstr="%s", position=-1, ) trained_wts_filestem = traits.Str( desc="trained-weights filestem, used for trained_wts_file and output directories", argstr="%s", position=1, ) loo = traits.Bool( argstr="-l", desc="full leave-one-out test with classifier training", position=2 ) class TrainingOutputSpec(TraitedSpec): trained_wts_file = File(exists=True, desc="Trained-weights file") class Training(CommandLine): """ Train the classifier based on your own FEAT/MELODIC output directory. """ input_spec = TrainingInputSpec output_spec = TrainingOutputSpec cmd = "fix -t" def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.trained_wts_filestem): outputs["trained_wts_file"] = os.path.abspath( self.inputs.trained_wts_filestem + ".RData" ) else: outputs["trained_wts_file"] = os.path.abspath("trained_wts_file.RData") return outputs class AccuracyTesterInputSpec(CommandLineInputSpec): mel_icas = InputMultiPath( Directory(exists=True), copyfile=False, desc="Melodic output directories", argstr="%s", position=3, mandatory=True, ) trained_wts_file = File( desc="trained-weights file", argstr="%s", position=1, mandatory=True ) output_directory = Directory( desc="Path to folder in which to store the results of the accuracy test.", argstr="%s", position=2, mandatory=True, ) class AccuracyTesterOutputSpec(TraitedSpec): output_directory = Directory( desc="Path to folder in which to store the results of the accuracy test.", argstr="%s", position=1, ) class AccuracyTester(CommandLine): """ Test the accuracy of an existing training dataset on a set of hand-labelled subjects. Note: This may or may not be working. Couldn't presently not confirm because fix fails on this (even outside of nipype) without leaving an error msg. """ input_spec = AccuracyTesterInputSpec output_spec = AccuracyTesterOutputSpec cmd = "fix -C" def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.output_directory): outputs["output_directory"] = Directory( exists=False, value=self.inputs.output_directory ) else: outputs["output_directory"] = Directory(exists=False, value="accuracy_test") return outputs class ClassifierInputSpec(CommandLineInputSpec): mel_ica = Directory( exists=True, copyfile=False, desc="Melodic output directory or directories", argstr="%s", position=1, ) trained_wts_file = File( exists=True, desc="trained-weights file", argstr="%s", position=2, mandatory=True, copyfile=False, ) thresh = traits.Int( argstr="%d", desc="Threshold for cleanup.", position=-1, mandatory=True ) artifacts_list_file = File( desc="Text file listing which ICs are artifacts; can be the output from classification or can be created manually" ) class ClassifierOutputSpec(TraitedSpec): artifacts_list_file = File( desc="Text file listing which ICs are artifacts; can be the output from classification or can be created manually" ) class Classifier(CommandLine): """ Classify ICA components using a specific training dataset ( is in the range 0-100, typically 5-20). """ input_spec = ClassifierInputSpec output_spec = ClassifierOutputSpec cmd = "fix -c" def _gen_artifacts_list_file(self, mel_ica, thresh): _, trained_wts_file = os.path.split(self.inputs.trained_wts_file) trained_wts_filestem = trained_wts_file.split(".")[0] filestem = "fix4melview_" + trained_wts_filestem + "_thr" fname = os.path.join(mel_ica, filestem + str(thresh) + ".txt") return fname def _list_outputs(self): outputs = self.output_spec().get() outputs["artifacts_list_file"] = self._gen_artifacts_list_file( self.inputs.mel_ica, self.inputs.thresh ) return outputs class CleanerInputSpec(CommandLineInputSpec): artifacts_list_file = File( exists=True, argstr="%s", position=1, mandatory=True, desc="Text file listing which ICs are artifacts; can be the output from classification or can be created manually", ) cleanup_motion = traits.Bool( argstr="-m", desc="cleanup motion confounds, looks for design.fsf for highpass filter cut-off", position=2, ) highpass = traits.Float( 100, argstr="-m -h %f", usedefault=True, desc="cleanup motion confounds", position=2, ) aggressive = traits.Bool( argstr="-A", desc="Apply aggressive (full variance) cleanup, instead of the default less-aggressive (unique variance) cleanup.", position=3, ) confound_file = File( argstr="-x %s", desc="Include additional confound file.", position=4 ) confound_file_1 = File( argstr="-x %s", desc="Include additional confound file.", position=5 ) confound_file_2 = File( argstr="-x %s", desc="Include additional confound file.", position=6 ) class CleanerOutputSpec(TraitedSpec): cleaned_functional_file = File(exists=True, desc="Cleaned session data") class Cleaner(CommandLine): """ Extract features (for later training and/or classifying) """ input_spec = CleanerInputSpec output_spec = CleanerOutputSpec cmd = "fix -a" def _get_cleaned_functional_filename(self, artifacts_list_filename): """extract the proper filename from the first line of the artifacts file""" artifacts_list_file = open(artifacts_list_filename, "r") functional_filename, extension = artifacts_list_file.readline().split(".") artifacts_list_file_path, artifacts_list_filename = os.path.split( artifacts_list_filename ) return os.path.join( artifacts_list_file_path, functional_filename + "_clean.nii.gz" ) def _list_outputs(self): outputs = self.output_spec().get() outputs["cleaned_functional_file"] = self._get_cleaned_functional_filename( self.inputs.artifacts_list_file ) return outputs nipype-1.7.0/nipype/interfaces/fsl/maths.py000066400000000000000000000350341413403311400207230ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The maths module provides higher-level interfaces to some of the operations that can be performed with the fslmaths command-line program. """ import os import numpy as np from ..base import TraitedSpec, File, traits, InputMultiPath, isdefined from .base import FSLCommand, FSLCommandInputSpec class MathsInput(FSLCommandInputSpec): in_file = File( position=2, argstr="%s", exists=True, mandatory=True, desc="image to operate on" ) out_file = File( genfile=True, position=-2, argstr="%s", desc="image to write", hash_files=False ) _dtypes = ["float", "char", "int", "short", "double", "input"] internal_datatype = traits.Enum( *_dtypes, position=1, argstr="-dt %s", desc=("datatype to use for calculations " "(default is float)") ) output_datatype = traits.Enum( *_dtypes, position=-1, argstr="-odt %s", desc=("datatype to use for output (default " "uses input type)") ) nan2zeros = traits.Bool( position=3, argstr="-nan", desc="change NaNs to zeros before doing anything" ) class MathsOutput(TraitedSpec): out_file = File(desc="image written after calculations") class MathsCommand(FSLCommand): _cmd = "fslmaths" input_spec = MathsInput output_spec = MathsOutput _suffix = "_maths" def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_file if not isdefined(self.inputs.out_file): outputs["out_file"] = self._gen_fname( self.inputs.in_file, suffix=self._suffix ) outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["out_file"] return None class ChangeDataTypeInput(MathsInput): _dtypes = ["float", "char", "int", "short", "double", "input"] output_datatype = traits.Enum( *_dtypes, position=-1, argstr="-odt %s", mandatory=True, desc="output data type" ) class ChangeDataType(MathsCommand): """Use fslmaths to change the datatype of an image.""" input_spec = ChangeDataTypeInput _suffix = "_chdt" class ThresholdInputSpec(MathsInput): thresh = traits.Float( mandatory=True, position=4, argstr="%s", desc="threshold value" ) direction = traits.Enum( "below", "above", usedefault=True, desc="zero-out either below or above thresh value", ) use_robust_range = traits.Bool( desc="interpret thresh as percentage (0-100) of robust range" ) use_nonzero_voxels = traits.Bool( desc="use nonzero voxels to calculate robust range", requires=["use_robust_range"], ) class Threshold(MathsCommand): """Use fslmaths to apply a threshold to an image in a variety of ways.""" input_spec = ThresholdInputSpec _suffix = "_thresh" def _format_arg(self, name, spec, value): if name == "thresh": arg = "-" _si = self.inputs if self.inputs.direction == "above": arg += "u" arg += "thr" if isdefined(_si.use_robust_range) and _si.use_robust_range: if isdefined(_si.use_nonzero_voxels) and _si.use_nonzero_voxels: arg += "P" else: arg += "p" arg += " %.10f" % value return arg return super(Threshold, self)._format_arg(name, spec, value) class StdImageInput(MathsInput): dimension = traits.Enum( "T", "X", "Y", "Z", usedefault=True, argstr="-%sstd", position=4, desc="dimension to standard deviate across", ) class StdImage(MathsCommand): """Use fslmaths to generate a standard deviation in an image across a given dimension. """ input_spec = StdImageInput _suffix = "_std" class MeanImageInput(MathsInput): dimension = traits.Enum( "T", "X", "Y", "Z", usedefault=True, argstr="-%smean", position=4, desc="dimension to mean across", ) class MeanImage(MathsCommand): """Use fslmaths to generate a mean image across a given dimension.""" input_spec = MeanImageInput _suffix = "_mean" class MaxImageInput(MathsInput): dimension = traits.Enum( "T", "X", "Y", "Z", usedefault=True, argstr="-%smax", position=4, desc="dimension to max across", ) class MaxImage(MathsCommand): """Use fslmaths to generate a max image across a given dimension. Examples -------- >>> from nipype.interfaces.fsl.maths import MaxImage >>> maxer = MaxImage() >>> maxer.inputs.in_file = "functional.nii" # doctest: +SKIP >>> maxer.dimension = "T" >>> maxer.cmdline # doctest: +SKIP 'fslmaths functional.nii -Tmax functional_max.nii' """ input_spec = MaxImageInput _suffix = "_max" class PercentileImageInput(MathsInput): dimension = traits.Enum( "T", "X", "Y", "Z", usedefault=True, argstr="-%sperc", position=4, desc="dimension to percentile across", ) perc = traits.Range( low=0, high=100, argstr="%f", position=5, desc=("nth percentile (0-100) of FULL RANGE " "across dimension"), ) class PercentileImage(MathsCommand): """Use fslmaths to generate a percentile image across a given dimension. Examples -------- >>> from nipype.interfaces.fsl.maths import MaxImage >>> percer = PercentileImage() >>> percer.inputs.in_file = "functional.nii" # doctest: +SKIP >>> percer.dimension = "T" >>> percer.perc = 90 >>> percer.cmdline # doctest: +SKIP 'fslmaths functional.nii -Tperc 90 functional_perc.nii' """ input_spec = PercentileImageInput _suffix = "_perc" class MaxnImageInput(MathsInput): dimension = traits.Enum( "T", "X", "Y", "Z", usedefault=True, argstr="-%smaxn", position=4, desc="dimension to index max across", ) class MaxnImage(MathsCommand): """Use fslmaths to generate an image of index of max across a given dimension. """ input_spec = MaxnImageInput _suffix = "_maxn" class MinImageInput(MathsInput): dimension = traits.Enum( "T", "X", "Y", "Z", usedefault=True, argstr="-%smin", position=4, desc="dimension to min across", ) class MinImage(MathsCommand): """Use fslmaths to generate a minimum image across a given dimension.""" input_spec = MinImageInput _suffix = "_min" class MedianImageInput(MathsInput): dimension = traits.Enum( "T", "X", "Y", "Z", usedefault=True, argstr="-%smedian", position=4, desc="dimension to median across", ) class MedianImage(MathsCommand): """Use fslmaths to generate a median image across a given dimension.""" input_spec = MedianImageInput _suffix = "_median" class AR1ImageInput(MathsInput): dimension = traits.Enum( "T", "X", "Y", "Z", usedefault=True, argstr="-%sar1", position=4, desc=("dimension to find AR(1) coefficient" "across"), ) class AR1Image(MathsCommand): """Use fslmaths to generate an AR1 coefficient image across a given dimension. (Should use -odt float and probably demean first) """ input_spec = AR1ImageInput _suffix = "_ar1" class IsotropicSmoothInput(MathsInput): fwhm = traits.Float( mandatory=True, xor=["sigma"], position=4, argstr="-s %.5f", desc="fwhm of smoothing kernel [mm]", ) sigma = traits.Float( mandatory=True, xor=["fwhm"], position=4, argstr="-s %.5f", desc="sigma of smoothing kernel [mm]", ) class IsotropicSmooth(MathsCommand): """Use fslmaths to spatially smooth an image with a gaussian kernel.""" input_spec = IsotropicSmoothInput _suffix = "_smooth" def _format_arg(self, name, spec, value): if name == "fwhm": sigma = float(value) / np.sqrt(8 * np.log(2)) return spec.argstr % sigma return super(IsotropicSmooth, self)._format_arg(name, spec, value) class ApplyMaskInput(MathsInput): mask_file = File( exists=True, mandatory=True, argstr="-mas %s", position=4, desc="binary image defining mask space", ) class ApplyMask(MathsCommand): """Use fslmaths to apply a binary mask to another image.""" input_spec = ApplyMaskInput _suffix = "_masked" class KernelInput(MathsInput): kernel_shape = traits.Enum( "3D", "2D", "box", "boxv", "gauss", "sphere", "file", argstr="-kernel %s", position=4, desc="kernel shape to use", ) kernel_size = traits.Float( argstr="%.4f", position=5, xor=["kernel_file"], desc=( "kernel size - voxels for box/boxv, mm " "for sphere, mm sigma for gauss" ), ) kernel_file = File( exists=True, argstr="%s", position=5, xor=["kernel_size"], desc="use external file for kernel", ) class DilateInput(KernelInput): operation = traits.Enum( "mean", "modal", "max", argstr="-dil%s", position=6, mandatory=True, desc="filtering operation to perfoem in dilation", ) class DilateImage(MathsCommand): """Use fslmaths to perform a spatial dilation of an image.""" input_spec = DilateInput _suffix = "_dil" def _format_arg(self, name, spec, value): if name == "operation": return spec.argstr % dict(mean="M", modal="D", max="F")[value] return super(DilateImage, self)._format_arg(name, spec, value) class ErodeInput(KernelInput): minimum_filter = traits.Bool( argstr="%s", position=6, usedefault=True, default_value=False, desc=("if true, minimum filter rather than " "erosion by zeroing-out"), ) class ErodeImage(MathsCommand): """Use fslmaths to perform a spatial erosion of an image.""" input_spec = ErodeInput _suffix = "_ero" def _format_arg(self, name, spec, value): if name == "minimum_filter": if value: return "-eroF" return "-ero" return super(ErodeImage, self)._format_arg(name, spec, value) class SpatialFilterInput(KernelInput): operation = traits.Enum( "mean", "median", "meanu", argstr="-f%s", position=6, mandatory=True, desc="operation to filter with", ) class SpatialFilter(MathsCommand): """Use fslmaths to spatially filter an image.""" input_spec = SpatialFilterInput _suffix = "_filt" class UnaryMathsInput(MathsInput): operation = traits.Enum( "exp", "log", "sin", "cos", "tan", "asin", "acos", "atan", "sqr", "sqrt", "recip", "abs", "bin", "binv", "fillh", "fillh26", "index", "edge", "nan", "nanm", "rand", "randn", "range", argstr="-%s", position=4, mandatory=True, desc="operation to perform", ) class UnaryMaths(MathsCommand): """Use fslmaths to perorm a variety of mathematical operations on an image.""" input_spec = UnaryMathsInput def _list_outputs(self): self._suffix = "_" + self.inputs.operation return super(UnaryMaths, self)._list_outputs() class BinaryMathsInput(MathsInput): operation = traits.Enum( "add", "sub", "mul", "div", "rem", "max", "min", mandatory=True, argstr="-%s", position=4, desc="operation to perform", ) operand_file = File( exists=True, argstr="%s", mandatory=True, position=5, xor=["operand_value"], desc="second image to perform operation with", ) operand_value = traits.Float( argstr="%.8f", mandatory=True, position=5, xor=["operand_file"], desc="value to perform operation with", ) class BinaryMaths(MathsCommand): """Use fslmaths to perform mathematical operations using a second image or a numeric value. """ input_spec = BinaryMathsInput class MultiImageMathsInput(MathsInput): op_string = traits.String( position=4, argstr="%s", mandatory=True, desc=("python formatted string of operations " "to perform"), ) operand_files = InputMultiPath( File(exists=True), mandatory=True, desc=("list of file names to plug into op " "string"), ) class MultiImageMaths(MathsCommand): """Use fslmaths to perform a sequence of mathematical operations. Examples -------- >>> from nipype.interfaces.fsl import MultiImageMaths >>> maths = MultiImageMaths() >>> maths.inputs.in_file = "functional.nii" >>> maths.inputs.op_string = "-add %s -mul -1 -div %s" >>> maths.inputs.operand_files = ["functional2.nii", "functional3.nii"] >>> maths.inputs.out_file = "functional4.nii" >>> maths.cmdline 'fslmaths functional.nii -add functional2.nii -mul -1 -div functional3.nii functional4.nii' """ input_spec = MultiImageMathsInput def _format_arg(self, name, spec, value): if name == "op_string": return value % tuple(self.inputs.operand_files) return super(MultiImageMaths, self)._format_arg(name, spec, value) class TemporalFilterInput(MathsInput): lowpass_sigma = traits.Float( -1, argstr="%.6f", position=5, usedefault=True, desc="lowpass filter sigma (in volumes)", ) highpass_sigma = traits.Float( -1, argstr="-bptf %.6f", position=4, usedefault=True, desc="highpass filter sigma (in volumes)", ) class TemporalFilter(MathsCommand): """Use fslmaths to apply a low, high, or bandpass temporal filter to a timeseries. """ input_spec = TemporalFilterInput _suffix = "_filt" nipype-1.7.0/nipype/interfaces/fsl/model.py000066400000000000000000002612221413403311400207070ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL `_ command line tools. This was written to work with FSL version 4.1.4. """ import os from glob import glob from shutil import rmtree from string import Template import numpy as np from nibabel import load from ... import LooseVersion from ...utils.filemanip import simplify_list, ensure_list from ...utils.misc import human_order_sorted from ...external.due import BibTeX from ..base import ( File, traits, isdefined, TraitedSpec, BaseInterface, Directory, InputMultiPath, OutputMultiPath, BaseInterfaceInputSpec, ) from .base import FSLCommand, FSLCommandInputSpec, Info class Level1DesignInputSpec(BaseInterfaceInputSpec): interscan_interval = traits.Float( mandatory=True, desc="Interscan interval (in secs)" ) session_info = traits.Any( mandatory=True, desc=("Session specific information generated " "by ``modelgen.SpecifyModel``"), ) bases = traits.Either( traits.Dict( traits.Enum("dgamma"), traits.Dict(traits.Enum("derivs"), traits.Bool) ), traits.Dict( traits.Enum("gamma"), traits.Dict(traits.Enum("derivs", "gammasigma", "gammadelay")), ), traits.Dict( traits.Enum("custom"), traits.Dict(traits.Enum("bfcustompath"), traits.Str) ), traits.Dict(traits.Enum("none"), traits.Dict()), traits.Dict(traits.Enum("none"), traits.Enum(None)), mandatory=True, desc=( "name of basis function and options e.g., " "{'dgamma': {'derivs': True}}" ), ) orthogonalization = traits.Dict( traits.Int, traits.Dict(traits.Int, traits.Either(traits.Bool, traits.Int)), desc=( "which regressors to make orthogonal e.g., " "{1: {0:0,1:0,2:0}, 2: {0:1,1:1,2:0}} to make the second " "regressor in a 2-regressor model orthogonal to the first." ), usedefault=True, ) model_serial_correlations = traits.Bool( desc="Option to model serial correlations using an \ autoregressive estimator (order 1). Setting this option is only \ useful in the context of the fsf file. If you set this to False, you need to \ repeat this option for FILMGLS by setting autocorr_noestimate to True", mandatory=True, ) contrasts = traits.List( traits.Either( traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), ), traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), traits.List(traits.Float), ), traits.Tuple( traits.Str, traits.Enum("F"), traits.List( traits.Either( traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), ), traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), traits.List(traits.Float), ), ) ), ), ), desc="List of contrasts with each contrast being a list of the form - \ [('name', 'stat', [condition list], [weight list], [session list])]. if \ session list is None or not provided, all sessions are used. For F \ contrasts, the condition list should contain previously defined \ T-contrasts.", ) class Level1DesignOutputSpec(TraitedSpec): fsf_files = OutputMultiPath(File(exists=True), desc="FSL feat specification files") ev_files = OutputMultiPath( traits.List(File(exists=True)), desc="condition information files" ) class Level1Design(BaseInterface): """Generate FEAT specific files Examples -------- >>> level1design = Level1Design() >>> level1design.inputs.interscan_interval = 2.5 >>> level1design.inputs.bases = {'dgamma':{'derivs': False}} >>> level1design.inputs.session_info = 'session_info.npz' >>> level1design.run() # doctest: +SKIP """ input_spec = Level1DesignInputSpec output_spec = Level1DesignOutputSpec def _create_ev_file(self, evfname, evinfo): f = open(evfname, "wt") for i in evinfo: if len(i) == 3: f.write("%f %f %f\n" % (i[0], i[1], i[2])) else: f.write("%f\n" % i[0]) f.close() def _create_ev_files( self, cwd, runinfo, runidx, ev_parameters, orthogonalization, contrasts, do_tempfilter, basis_key, ): """Creates EV files from condition and regressor information. Parameters: ----------- runinfo : dict Generated by `SpecifyModel` and contains information about events and other regressors. runidx : int Index to run number ev_parameters : dict A dictionary containing the model parameters for the given design type. orthogonalization : dict A dictionary of dictionaries specifying orthogonal EVs. contrasts : list of lists Information on contrasts to be evaluated """ conds = {} evname = [] if basis_key == "dgamma": basis_key = "hrf" elif basis_key == "gamma": try: _ = ev_parameters["gammasigma"] except KeyError: ev_parameters["gammasigma"] = 3 try: _ = ev_parameters["gammadelay"] except KeyError: ev_parameters["gammadelay"] = 6 ev_template = load_template("feat_ev_" + basis_key + ".tcl") ev_none = load_template("feat_ev_none.tcl") ev_ortho = load_template("feat_ev_ortho.tcl") ev_txt = "" # generate sections for conditions and other nuisance # regressors num_evs = [0, 0] for field in ["cond", "regress"]: for i, cond in enumerate(runinfo[field]): name = cond["name"] evname.append(name) evfname = os.path.join( cwd, "ev_%s_%d_%d.txt" % (name, runidx, len(evname)) ) evinfo = [] num_evs[0] += 1 num_evs[1] += 1 if field == "cond": for j, onset in enumerate(cond["onset"]): try: amplitudes = cond["amplitudes"] if len(amplitudes) > 1: amp = amplitudes[j] else: amp = amplitudes[0] except KeyError: amp = 1 if len(cond["duration"]) > 1: evinfo.insert(j, [onset, cond["duration"][j], amp]) else: evinfo.insert(j, [onset, cond["duration"][0], amp]) ev_parameters["cond_file"] = evfname ev_parameters["ev_num"] = num_evs[0] ev_parameters["ev_name"] = name ev_parameters["tempfilt_yn"] = do_tempfilter if "basisorth" not in ev_parameters: ev_parameters["basisorth"] = 1 if "basisfnum" not in ev_parameters: ev_parameters["basisfnum"] = 1 try: ev_parameters["fsldir"] = os.environ["FSLDIR"] except KeyError: if basis_key == "flobs": raise Exception("FSL environment variables not set") else: ev_parameters["fsldir"] = "/usr/share/fsl" ev_parameters["temporalderiv"] = int( bool(ev_parameters.get("derivs", False)) ) if ev_parameters["temporalderiv"]: evname.append(name + "TD") num_evs[1] += 1 ev_txt += ev_template.substitute(ev_parameters) elif field == "regress": evinfo = [[j] for j in cond["val"]] ev_txt += ev_none.substitute( ev_num=num_evs[0], ev_name=name, tempfilt_yn=do_tempfilter, cond_file=evfname, ) ev_txt += "\n" conds[name] = evfname self._create_ev_file(evfname, evinfo) # add ev orthogonalization for i in range(1, num_evs[0] + 1): initial = ev_ortho.substitute(c0=i, c1=0, orthogonal=1) for j in range(0, num_evs[0] + 1): try: orthogonal = int(orthogonalization[i][j]) except (KeyError, TypeError, ValueError, IndexError): orthogonal = 0 if orthogonal == 1 and initial not in ev_txt: ev_txt += initial + "\n" ev_txt += ev_ortho.substitute(c0=i, c1=j, orthogonal=orthogonal) ev_txt += "\n" # add contrast info to fsf file if isdefined(contrasts): contrast_header = load_template("feat_contrast_header.tcl") contrast_prolog = load_template("feat_contrast_prolog.tcl") contrast_element = load_template("feat_contrast_element.tcl") contrast_ftest_element = load_template("feat_contrast_ftest_element.tcl") contrastmask_header = load_template("feat_contrastmask_header.tcl") contrastmask_footer = load_template("feat_contrastmask_footer.tcl") contrastmask_element = load_template("feat_contrastmask_element.tcl") # add t/f contrast info ev_txt += contrast_header.substitute() con_names = [] for j, con in enumerate(contrasts): con_names.append(con[0]) con_map = {} ftest_idx = [] ttest_idx = [] for j, con in enumerate(contrasts): if con[1] == "F": ftest_idx.append(j) for c in con[2]: if c[0] not in list(con_map.keys()): con_map[c[0]] = [] con_map[c[0]].append(j) else: ttest_idx.append(j) for ctype in ["real", "orig"]: for j, con in enumerate(contrasts): if con[1] == "F": continue tidx = ttest_idx.index(j) + 1 ev_txt += contrast_prolog.substitute( cnum=tidx, ctype=ctype, cname=con[0] ) count = 0 for c in range(1, len(evname) + 1): if evname[c - 1].endswith("TD") and ctype == "orig": continue count = count + 1 if evname[c - 1] in con[2]: val = con[3][con[2].index(evname[c - 1])] else: val = 0.0 ev_txt += contrast_element.substitute( cnum=tidx, element=count, ctype=ctype, val=val ) ev_txt += "\n" for fconidx in ftest_idx: fval = 0 if con[0] in con_map.keys() and fconidx in con_map[con[0]]: fval = 1 ev_txt += contrast_ftest_element.substitute( cnum=ftest_idx.index(fconidx) + 1, element=tidx, ctype=ctype, val=fval, ) ev_txt += "\n" # add contrast mask info ev_txt += contrastmask_header.substitute() for j, _ in enumerate(contrasts): for k, _ in enumerate(contrasts): if j != k: ev_txt += contrastmask_element.substitute(c1=j + 1, c2=k + 1) ev_txt += contrastmask_footer.substitute() return num_evs, ev_txt def _format_session_info(self, session_info): if isinstance(session_info, dict): session_info = [session_info] return session_info def _get_func_files(self, session_info): """Returns functional files in the order of runs""" func_files = [] for i, info in enumerate(session_info): func_files.insert(i, info["scans"]) return func_files def _run_interface(self, runtime): cwd = os.getcwd() fsf_header = load_template("feat_header_l1.tcl") fsf_postscript = load_template("feat_nongui.tcl") prewhiten = 0 if isdefined(self.inputs.model_serial_correlations): prewhiten = int(self.inputs.model_serial_correlations) basis_key = list(self.inputs.bases.keys())[0] ev_parameters = dict(self.inputs.bases[basis_key]) session_info = self._format_session_info(self.inputs.session_info) func_files = self._get_func_files(session_info) n_tcon = 0 n_fcon = 0 if isdefined(self.inputs.contrasts): for i, c in enumerate(self.inputs.contrasts): if c[1] == "T": n_tcon += 1 elif c[1] == "F": n_fcon += 1 for i, info in enumerate(session_info): do_tempfilter = 1 if info["hpf"] == np.inf: do_tempfilter = 0 num_evs, cond_txt = self._create_ev_files( cwd, info, i, ev_parameters, self.inputs.orthogonalization, self.inputs.contrasts, do_tempfilter, basis_key, ) nim = load(func_files[i]) (_, _, _, timepoints) = nim.shape fsf_txt = fsf_header.substitute( run_num=i, interscan_interval=self.inputs.interscan_interval, num_vols=timepoints, prewhiten=prewhiten, num_evs=num_evs[0], num_evs_real=num_evs[1], num_tcon=n_tcon, num_fcon=n_fcon, high_pass_filter_cutoff=info["hpf"], temphp_yn=do_tempfilter, func_file=func_files[i], ) fsf_txt += cond_txt fsf_txt += fsf_postscript.substitute(overwrite=1) f = open(os.path.join(cwd, "run%d.fsf" % i), "w") f.write(fsf_txt) f.close() return runtime def _list_outputs(self): outputs = self.output_spec().get() cwd = os.getcwd() outputs["fsf_files"] = [] outputs["ev_files"] = [] basis_key = list(self.inputs.bases.keys())[0] ev_parameters = dict(self.inputs.bases[basis_key]) for runno, runinfo in enumerate( self._format_session_info(self.inputs.session_info) ): outputs["fsf_files"].append(os.path.join(cwd, "run%d.fsf" % runno)) outputs["ev_files"].insert(runno, []) evname = [] for field in ["cond", "regress"]: for i, cond in enumerate(runinfo[field]): name = cond["name"] evname.append(name) evfname = os.path.join( cwd, "ev_%s_%d_%d.txt" % (name, runno, len(evname)) ) if field == "cond": ev_parameters["temporalderiv"] = int( bool(ev_parameters.get("derivs", False)) ) if ev_parameters["temporalderiv"]: evname.append(name + "TD") outputs["ev_files"][runno].append(os.path.join(cwd, evfname)) return outputs class FEATInputSpec(FSLCommandInputSpec): fsf_file = File( exists=True, mandatory=True, argstr="%s", position=0, desc="File specifying the feat design spec file", ) class FEATOutputSpec(TraitedSpec): feat_dir = Directory(exists=True) class FEAT(FSLCommand): """Uses FSL feat to calculate first level stats""" _cmd = "feat" input_spec = FEATInputSpec output_spec = FEATOutputSpec def _list_outputs(self): outputs = self._outputs().get() is_ica = False outputs["feat_dir"] = None with open(self.inputs.fsf_file, "rt") as fp: text = fp.read() if "set fmri(inmelodic) 1" in text: is_ica = True for line in text.split("\n"): if line.find("set fmri(outputdir)") > -1: try: outputdir_spec = line.split('"')[-2] if os.path.exists(outputdir_spec): outputs["feat_dir"] = outputdir_spec except: pass if not outputs["feat_dir"]: if is_ica: outputs["feat_dir"] = glob(os.path.join(os.getcwd(), "*ica"))[0] else: outputs["feat_dir"] = glob(os.path.join(os.getcwd(), "*feat"))[0] print("Outputs from FEATmodel:", outputs) return outputs class FEATModelInputSpec(FSLCommandInputSpec): fsf_file = File( exists=True, mandatory=True, argstr="%s", position=0, desc="File specifying the feat design spec file", copyfile=False, ) ev_files = traits.List( File(exists=True), mandatory=True, argstr="%s", desc="Event spec files generated by level1design", position=1, copyfile=False, ) class FEATModelOutpuSpec(TraitedSpec): design_file = File(exists=True, desc="Mat file containing ascii matrix for design") design_image = File(exists=True, desc="Graphical representation of design matrix") design_cov = File(exists=True, desc="Graphical representation of design covariance") con_file = File(exists=True, desc="Contrast file containing contrast vectors") fcon_file = File(desc="Contrast file containing contrast vectors") class FEATModel(FSLCommand): """Uses FSL feat_model to generate design.mat files""" _cmd = "feat_model" input_spec = FEATModelInputSpec output_spec = FEATModelOutpuSpec def _format_arg(self, name, trait_spec, value): if name == "fsf_file": return super(FEATModel, self)._format_arg( name, trait_spec, self._get_design_root(value) ) elif name == "ev_files": return "" else: return super(FEATModel, self)._format_arg(name, trait_spec, value) def _get_design_root(self, infile): _, fname = os.path.split(infile) return fname.split(".")[0] def _list_outputs(self): # TODO: figure out file names and get rid off the globs outputs = self._outputs().get() root = self._get_design_root(simplify_list(self.inputs.fsf_file)) design_file = glob(os.path.join(os.getcwd(), "%s*.mat" % root)) assert len(design_file) == 1, "No mat file generated by FEAT Model" outputs["design_file"] = design_file[0] design_image = glob(os.path.join(os.getcwd(), "%s.png" % root)) assert len(design_image) == 1, "No design image generated by FEAT Model" outputs["design_image"] = design_image[0] design_cov = glob(os.path.join(os.getcwd(), "%s_cov.png" % root)) assert len(design_cov) == 1, "No covariance image generated by FEAT Model" outputs["design_cov"] = design_cov[0] con_file = glob(os.path.join(os.getcwd(), "%s*.con" % root)) assert len(con_file) == 1, "No con file generated by FEAT Model" outputs["con_file"] = con_file[0] fcon_file = glob(os.path.join(os.getcwd(), "%s*.fts" % root)) if fcon_file: assert len(fcon_file) == 1, "No fts file generated by FEAT Model" outputs["fcon_file"] = fcon_file[0] return outputs class FILMGLSInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, position=-3, argstr="%s", desc="input data file" ) design_file = File(exists=True, position=-2, argstr="%s", desc="design matrix file") threshold = traits.Range( value=1000.0, low=0.0, argstr="%f", position=-1, usedefault=True, desc="threshold", ) smooth_autocorr = traits.Bool(argstr="-sa", desc="Smooth auto corr estimates") mask_size = traits.Int(argstr="-ms %d", desc="susan mask size") brightness_threshold = traits.Range( low=0, argstr="-epith %d", desc=("susan brightness threshold, " "otherwise it is estimated"), ) full_data = traits.Bool(argstr="-v", desc="output full data") _estimate_xor = [ "autocorr_estimate_only", "fit_armodel", "tukey_window", "multitaper_product", "use_pava", "autocorr_noestimate", ] autocorr_estimate_only = traits.Bool( argstr="-ac", xor=_estimate_xor, desc=("perform autocorrelation " "estimatation only"), ) fit_armodel = traits.Bool( argstr="-ar", xor=_estimate_xor, desc=( "fits autoregressive model - default is " "to use tukey with M=sqrt(numvols)" ), ) tukey_window = traits.Int( argstr="-tukey %d", xor=_estimate_xor, desc="tukey window size to estimate autocorr", ) multitaper_product = traits.Int( argstr="-mt %d", xor=_estimate_xor, desc=( "multitapering with slepian tapers " "and num is the time-bandwidth " "product" ), ) use_pava = traits.Bool(argstr="-pava", desc="estimates autocorr using PAVA") autocorr_noestimate = traits.Bool( argstr="-noest", xor=_estimate_xor, desc="do not estimate autocorrs" ) output_pwdata = traits.Bool( argstr="-output_pwdata", desc=("output prewhitened data and average " "design matrix"), ) results_dir = Directory( "results", argstr="-rn %s", usedefault=True, desc="directory to store results in", ) class FILMGLSInputSpec505(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, position=-3, argstr="--in=%s", desc="input data file", ) design_file = File( exists=True, position=-2, argstr="--pd=%s", desc="design matrix file" ) threshold = traits.Range( value=1000.0, low=0.0, argstr="--thr=%f", position=-1, usedefault=True, desc="threshold", ) smooth_autocorr = traits.Bool(argstr="--sa", desc="Smooth auto corr estimates") mask_size = traits.Int(argstr="--ms=%d", desc="susan mask size") brightness_threshold = traits.Range( low=0, argstr="--epith=%d", desc=("susan brightness threshold, " "otherwise it is estimated"), ) full_data = traits.Bool(argstr="-v", desc="output full data") _estimate_xor = [ "autocorr_estimate_only", "fit_armodel", "tukey_window", "multitaper_product", "use_pava", "autocorr_noestimate", ] autocorr_estimate_only = traits.Bool( argstr="--ac", xor=_estimate_xor, desc=("perform autocorrelation " "estimation only"), ) fit_armodel = traits.Bool( argstr="--ar", xor=_estimate_xor, desc=( "fits autoregressive model - default is " "to use tukey with M=sqrt(numvols)" ), ) tukey_window = traits.Int( argstr="--tukey=%d", xor=_estimate_xor, desc="tukey window size to estimate autocorr", ) multitaper_product = traits.Int( argstr="--mt=%d", xor=_estimate_xor, desc=( "multitapering with slepian tapers " "and num is the time-bandwidth " "product" ), ) use_pava = traits.Bool(argstr="--pava", desc="estimates autocorr using PAVA") autocorr_noestimate = traits.Bool( argstr="--noest", xor=_estimate_xor, desc="do not estimate autocorrs" ) output_pwdata = traits.Bool( argstr="--outputPWdata", desc=("output prewhitened data and average " "design matrix"), ) results_dir = Directory( "results", argstr="--rn=%s", usedefault=True, desc="directory to store results in", ) class FILMGLSInputSpec507(FILMGLSInputSpec505): threshold = traits.Float( default_value=-1000.0, argstr="--thr=%f", position=-1, usedefault=True, desc="threshold", ) tcon_file = File( exists=True, argstr="--con=%s", desc="contrast file containing T-contrasts" ) fcon_file = File( exists=True, argstr="--fcon=%s", desc="contrast file containing F-contrasts" ) mode = traits.Enum( "volumetric", "surface", argstr="--mode=%s", desc="Type of analysis to be done" ) surface = File( exists=True, argstr="--in2=%s", desc=("input surface for autocorr smoothing in " "surface-based analyses"), ) class FILMGLSOutputSpec(TraitedSpec): param_estimates = OutputMultiPath( File(exists=True), desc=("Parameter estimates for each " "column of the design matrix"), ) residual4d = File( exists=True, desc=("Model fit residual mean-squared error for each " "time point"), ) dof_file = File(exists=True, desc="degrees of freedom") sigmasquareds = File( exists=True, desc="summary of residuals, See Woolrich, et. al., 2001" ) results_dir = Directory( exists=True, desc="directory storing model estimation output" ) corrections = File( exists=True, desc=("statistical corrections used within FILM " "modeling") ) thresholdac = File(exists=True, desc="The FILM autocorrelation parameters") logfile = File(exists=True, desc="FILM run logfile") class FILMGLSOutputSpec507(TraitedSpec): param_estimates = OutputMultiPath( File(exists=True), desc=("Parameter estimates for each " "column of the design matrix"), ) residual4d = File( exists=True, desc=("Model fit residual mean-squared error for each " "time point"), ) dof_file = File(exists=True, desc="degrees of freedom") sigmasquareds = File( exists=True, desc="summary of residuals, See Woolrich, et. al., 2001" ) results_dir = Directory( exists=True, desc="directory storing model estimation output" ) thresholdac = File(exists=True, desc="The FILM autocorrelation parameters") logfile = File(exists=True, desc="FILM run logfile") copes = OutputMultiPath( File(exists=True), desc="Contrast estimates for each contrast" ) varcopes = OutputMultiPath( File(exists=True), desc="Variance estimates for each contrast" ) zstats = OutputMultiPath(File(exists=True), desc="z-stat file for each contrast") tstats = OutputMultiPath(File(exists=True), desc="t-stat file for each contrast") fstats = OutputMultiPath(File(exists=True), desc="f-stat file for each contrast") zfstats = OutputMultiPath(File(exists=True), desc="z-stat file for each F contrast") class FILMGLS(FSLCommand): """Use FSL film_gls command to fit a design matrix to voxel timeseries Examples -------- Initialize with no options, assigning them when calling run: >>> from nipype.interfaces import fsl >>> fgls = fsl.FILMGLS() >>> res = fgls.run('in_file', 'design_file', 'thresh', rn='stats') #doctest: +SKIP Assign options through the ``inputs`` attribute: >>> fgls = fsl.FILMGLS() >>> fgls.inputs.in_file = 'functional.nii' >>> fgls.inputs.design_file = 'design.mat' >>> fgls.inputs.threshold = 10 >>> fgls.inputs.results_dir = 'stats' >>> res = fgls.run() #doctest: +SKIP Specify options when creating an instance: >>> fgls = fsl.FILMGLS(in_file='functional.nii', \ design_file='design.mat', \ threshold=10, results_dir='stats') >>> res = fgls.run() #doctest: +SKIP """ _cmd = "film_gls" input_spec = FILMGLSInputSpec output_spec = FILMGLSOutputSpec if Info.version() and LooseVersion(Info.version()) > LooseVersion("5.0.6"): input_spec = FILMGLSInputSpec507 output_spec = FILMGLSOutputSpec507 elif Info.version() and LooseVersion(Info.version()) > LooseVersion("5.0.4"): input_spec = FILMGLSInputSpec505 def _get_pe_files(self, cwd): files = None if isdefined(self.inputs.design_file): fp = open(self.inputs.design_file, "rt") for line in fp.readlines(): if line.startswith("/NumWaves"): numpes = int(line.split()[-1]) files = [] for i in range(numpes): files.append(self._gen_fname("pe%d.nii" % (i + 1), cwd=cwd)) break fp.close() return files def _get_numcons(self): numtcons = 0 numfcons = 0 if isdefined(self.inputs.tcon_file): fp = open(self.inputs.tcon_file, "rt") for line in fp.readlines(): if line.startswith("/NumContrasts"): numtcons = int(line.split()[-1]) break fp.close() if isdefined(self.inputs.fcon_file): fp = open(self.inputs.fcon_file, "rt") for line in fp.readlines(): if line.startswith("/NumContrasts"): numfcons = int(line.split()[-1]) break fp.close() return numtcons, numfcons def _list_outputs(self): outputs = self._outputs().get() cwd = os.getcwd() results_dir = os.path.join(cwd, self.inputs.results_dir) outputs["results_dir"] = results_dir pe_files = self._get_pe_files(results_dir) if pe_files: outputs["param_estimates"] = pe_files outputs["residual4d"] = self._gen_fname("res4d.nii", cwd=results_dir) outputs["dof_file"] = os.path.join(results_dir, "dof") outputs["sigmasquareds"] = self._gen_fname("sigmasquareds.nii", cwd=results_dir) outputs["thresholdac"] = self._gen_fname("threshac1.nii", cwd=results_dir) if Info.version() and LooseVersion(Info.version()) < LooseVersion("5.0.7"): outputs["corrections"] = self._gen_fname("corrections.nii", cwd=results_dir) outputs["logfile"] = self._gen_fname( "logfile", change_ext=False, cwd=results_dir ) if Info.version() and LooseVersion(Info.version()) > LooseVersion("5.0.6"): pth = results_dir numtcons, numfcons = self._get_numcons() base_contrast = 1 copes = [] varcopes = [] zstats = [] tstats = [] for i in range(numtcons): copes.append( self._gen_fname("cope%d.nii" % (base_contrast + i), cwd=pth) ) varcopes.append( self._gen_fname("varcope%d.nii" % (base_contrast + i), cwd=pth) ) zstats.append( self._gen_fname("zstat%d.nii" % (base_contrast + i), cwd=pth) ) tstats.append( self._gen_fname("tstat%d.nii" % (base_contrast + i), cwd=pth) ) if copes: outputs["copes"] = copes outputs["varcopes"] = varcopes outputs["zstats"] = zstats outputs["tstats"] = tstats fstats = [] zfstats = [] for i in range(numfcons): fstats.append( self._gen_fname("fstat%d.nii" % (base_contrast + i), cwd=pth) ) zfstats.append( self._gen_fname("zfstat%d.nii" % (base_contrast + i), cwd=pth) ) if fstats: outputs["fstats"] = fstats outputs["zfstats"] = zfstats return outputs class FEATRegisterInputSpec(BaseInterfaceInputSpec): feat_dirs = InputMultiPath( Directory(exists=True), desc="Lower level feat dirs", mandatory=True ) reg_image = File( exists=True, desc="image to register to (will be treated as standard)", mandatory=True, ) reg_dof = traits.Int(12, desc="registration degrees of freedom", usedefault=True) class FEATRegisterOutputSpec(TraitedSpec): fsf_file = File(exists=True, desc="FSL feat specification file") class FEATRegister(BaseInterface): """Register feat directories to a specific standard""" input_spec = FEATRegisterInputSpec output_spec = FEATRegisterOutputSpec def _run_interface(self, runtime): fsf_header = load_template("featreg_header.tcl") fsf_footer = load_template("feat_nongui.tcl") fsf_dirs = load_template("feat_fe_featdirs.tcl") num_runs = len(self.inputs.feat_dirs) fsf_txt = fsf_header.substitute( num_runs=num_runs, regimage=self.inputs.reg_image, regdof=self.inputs.reg_dof, ) for i, rundir in enumerate(ensure_list(self.inputs.feat_dirs)): fsf_txt += fsf_dirs.substitute(runno=i + 1, rundir=os.path.abspath(rundir)) fsf_txt += fsf_footer.substitute() f = open(os.path.join(os.getcwd(), "register.fsf"), "wt") f.write(fsf_txt) f.close() return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["fsf_file"] = os.path.abspath(os.path.join(os.getcwd(), "register.fsf")) return outputs class FLAMEOInputSpec(FSLCommandInputSpec): cope_file = File( exists=True, argstr="--copefile=%s", mandatory=True, desc="cope regressor data file", ) var_cope_file = File( exists=True, argstr="--varcopefile=%s", desc="varcope weightings data file" ) dof_var_cope_file = File( exists=True, argstr="--dofvarcopefile=%s", desc="dof data file for varcope data" ) mask_file = File( exists=True, argstr="--maskfile=%s", mandatory=True, desc="mask file" ) design_file = File( exists=True, argstr="--designfile=%s", mandatory=True, desc="design matrix file" ) t_con_file = File( exists=True, argstr="--tcontrastsfile=%s", mandatory=True, desc="ascii matrix specifying t-contrasts", ) f_con_file = File( exists=True, argstr="--fcontrastsfile=%s", desc="ascii matrix specifying f-contrasts", ) cov_split_file = File( exists=True, argstr="--covsplitfile=%s", mandatory=True, desc="ascii matrix specifying the groups the covariance is split into", ) run_mode = traits.Enum( "fe", "ols", "flame1", "flame12", argstr="--runmode=%s", mandatory=True, desc="inference to perform", ) n_jumps = traits.Int(argstr="--njumps=%d", desc="number of jumps made by mcmc") burnin = traits.Int( argstr="--burnin=%d", desc=("number of jumps at start of mcmc to be " "discarded"), ) sample_every = traits.Int( argstr="--sampleevery=%d", desc="number of jumps for each sample" ) fix_mean = traits.Bool(argstr="--fixmean", desc="fix mean for tfit") infer_outliers = traits.Bool( argstr="--inferoutliers", desc="infer outliers - not for fe" ) no_pe_outputs = traits.Bool(argstr="--nopeoutput", desc="do not output pe files") sigma_dofs = traits.Int( argstr="--sigma_dofs=%d", desc=( "sigma (in mm) to use for Gaussian " "smoothing the DOFs in FLAME 2. Default is " "1mm, -1 indicates no smoothing" ), ) outlier_iter = traits.Int( argstr="--ioni=%d", desc=( "Number of max iterations to use when " "inferring outliers. Default is 12." ), ) log_dir = Directory("stats", argstr="--ld=%s", usedefault=True) # ohinds # no support for ven, vef class FLAMEOOutputSpec(TraitedSpec): pes = OutputMultiPath( File(exists=True), desc=( "Parameter estimates for each column of the " "design matrix for each voxel" ), ) res4d = OutputMultiPath( File(exists=True), desc=("Model fit residual mean-squared error for " "each time point"), ) copes = OutputMultiPath( File(exists=True), desc="Contrast estimates for each contrast" ) var_copes = OutputMultiPath( File(exists=True), desc="Variance estimates for each contrast" ) zstats = OutputMultiPath(File(exists=True), desc="z-stat file for each contrast") tstats = OutputMultiPath(File(exists=True), desc="t-stat file for each contrast") zfstats = OutputMultiPath(File(exists=True), desc="z stat file for each f contrast") fstats = OutputMultiPath(File(exists=True), desc="f-stat file for each contrast") mrefvars = OutputMultiPath( File(exists=True), desc=("mean random effect variances for each " "contrast") ) tdof = OutputMultiPath( File(exists=True), desc="temporal dof file for each contrast" ) weights = OutputMultiPath(File(exists=True), desc="weights file for each contrast") stats_dir = Directory( File(exists=True), desc="directory storing model estimation output" ) class FLAMEO(FSLCommand): """Use FSL flameo command to perform higher level model fits Examples -------- Initialize FLAMEO with no options, assigning them when calling run: >>> from nipype.interfaces import fsl >>> flameo = fsl.FLAMEO() >>> flameo.inputs.cope_file = 'cope.nii.gz' >>> flameo.inputs.var_cope_file = 'varcope.nii.gz' >>> flameo.inputs.cov_split_file = 'cov_split.mat' >>> flameo.inputs.design_file = 'design.mat' >>> flameo.inputs.t_con_file = 'design.con' >>> flameo.inputs.mask_file = 'mask.nii' >>> flameo.inputs.run_mode = 'fe' >>> flameo.cmdline 'flameo --copefile=cope.nii.gz --covsplitfile=cov_split.mat --designfile=design.mat --ld=stats --maskfile=mask.nii --runmode=fe --tcontrastsfile=design.con --varcopefile=varcope.nii.gz' """ _cmd = "flameo" input_spec = FLAMEOInputSpec output_spec = FLAMEOOutputSpec _references = [ { "entry": BibTeX( "@article{BeckmannJenkinsonSmith2003," "author={C.F. Beckmann, M. Jenkinson, and S.M. Smith}," "title={General multilevel linear modeling for group analysis in FMRI.}," "journal={NeuroImage}," "volume={20}," "pages={1052-1063}," "year={2003}," "}" ), "tags": ["method"], }, { "entry": BibTeX( "@article{WoolrichBehrensBeckmannJenkinsonSmith2004," "author={M.W. Woolrich, T.E. Behrens, " "C.F. Beckmann, M. Jenkinson, and S.M. Smith}," "title={Multilevel linear modelling for FMRI group analysis using Bayesian inference.}," "journal={NeuroImage}," "volume={21}," "pages={1732-1747}," "year={2004}," "}" ), "tags": ["method"], }, ] # ohinds: 2010-04-06 def _run_interface(self, runtime): log_dir = self.inputs.log_dir cwd = os.getcwd() if os.access(os.path.join(cwd, log_dir), os.F_OK): rmtree(os.path.join(cwd, log_dir)) return super(FLAMEO, self)._run_interface(runtime) # ohinds: 2010-04-06 # made these compatible with flameo def _list_outputs(self): outputs = self._outputs().get() pth = os.path.join(os.getcwd(), self.inputs.log_dir) pes = human_order_sorted(glob(os.path.join(pth, "pe[0-9]*.*"))) assert len(pes) >= 1, "No pe volumes generated by FSL Estimate" outputs["pes"] = pes res4d = human_order_sorted(glob(os.path.join(pth, "res4d.*"))) assert len(res4d) == 1, "No residual volume generated by FSL Estimate" outputs["res4d"] = res4d[0] copes = human_order_sorted(glob(os.path.join(pth, "cope[0-9]*.*"))) assert len(copes) >= 1, "No cope volumes generated by FSL CEstimate" outputs["copes"] = copes var_copes = human_order_sorted(glob(os.path.join(pth, "varcope[0-9]*.*"))) assert len(var_copes) >= 1, "No varcope volumes generated by FSL CEstimate" outputs["var_copes"] = var_copes zstats = human_order_sorted(glob(os.path.join(pth, "zstat[0-9]*.*"))) assert len(zstats) >= 1, "No zstat volumes generated by FSL CEstimate" outputs["zstats"] = zstats if isdefined(self.inputs.f_con_file): zfstats = human_order_sorted(glob(os.path.join(pth, "zfstat[0-9]*.*"))) assert len(zfstats) >= 1, "No zfstat volumes generated by FSL CEstimate" outputs["zfstats"] = zfstats fstats = human_order_sorted(glob(os.path.join(pth, "fstat[0-9]*.*"))) assert len(fstats) >= 1, "No fstat volumes generated by FSL CEstimate" outputs["fstats"] = fstats tstats = human_order_sorted(glob(os.path.join(pth, "tstat[0-9]*.*"))) assert len(tstats) >= 1, "No tstat volumes generated by FSL CEstimate" outputs["tstats"] = tstats mrefs = human_order_sorted( glob(os.path.join(pth, "mean_random_effects_var[0-9]*.*")) ) assert len(mrefs) >= 1, "No mean random effects volumes generated by FLAMEO" outputs["mrefvars"] = mrefs tdof = human_order_sorted(glob(os.path.join(pth, "tdof_t[0-9]*.*"))) assert len(tdof) >= 1, "No T dof volumes generated by FLAMEO" outputs["tdof"] = tdof weights = human_order_sorted(glob(os.path.join(pth, "weights[0-9]*.*"))) assert len(weights) >= 1, "No weight volumes generated by FLAMEO" outputs["weights"] = weights outputs["stats_dir"] = pth return outputs class ContrastMgrInputSpec(FSLCommandInputSpec): tcon_file = File( exists=True, mandatory=True, argstr="%s", position=-1, desc="contrast file containing T-contrasts", ) fcon_file = File( exists=True, argstr="-f %s", desc="contrast file containing F-contrasts" ) param_estimates = InputMultiPath( File(exists=True), argstr="", copyfile=False, mandatory=True, desc=("Parameter estimates for each " "column of the design matrix"), ) corrections = File( exists=True, copyfile=False, mandatory=True, desc="statistical corrections used within FILM modelling", ) dof_file = File( exists=True, argstr="", copyfile=False, mandatory=True, desc="degrees of freedom", ) sigmasquareds = File( exists=True, argstr="", position=-2, copyfile=False, mandatory=True, desc=("summary of residuals, See Woolrich, et. al., " "2001"), ) contrast_num = traits.Range( low=1, argstr="-cope", desc=("contrast number to start labeling " "copes from") ) suffix = traits.Str( argstr="-suffix %s", desc=( "suffix to put on the end of the cope filename " "before the contrast number, default is " "nothing" ), ) class ContrastMgrOutputSpec(TraitedSpec): copes = OutputMultiPath( File(exists=True), desc="Contrast estimates for each contrast" ) varcopes = OutputMultiPath( File(exists=True), desc="Variance estimates for each contrast" ) zstats = OutputMultiPath(File(exists=True), desc="z-stat file for each contrast") tstats = OutputMultiPath(File(exists=True), desc="t-stat file for each contrast") fstats = OutputMultiPath(File(exists=True), desc="f-stat file for each contrast") zfstats = OutputMultiPath(File(exists=True), desc="z-stat file for each F contrast") neffs = OutputMultiPath(File(exists=True), desc="neff file ?? for each contrast") class ContrastMgr(FSLCommand): """Use FSL contrast_mgr command to evaluate contrasts In interface mode this file assumes that all the required inputs are in the same location. This has deprecated for FSL versions 5.0.7+ as the necessary corrections file is no longer generated by FILMGLS. """ if Info.version() and LooseVersion(Info.version()) >= LooseVersion("5.0.7"): DeprecationWarning("ContrastMgr is deprecated in FSL 5.0.7+") _cmd = "contrast_mgr" input_spec = ContrastMgrInputSpec output_spec = ContrastMgrOutputSpec def _run_interface(self, runtime): # The returncode is meaningless in ContrastMgr. So check the output # in stderr and if it's set, then update the returncode # accordingly. runtime = super(ContrastMgr, self)._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime def _format_arg(self, name, trait_spec, value): if name in ["param_estimates", "corrections", "dof_file"]: return "" elif name in ["sigmasquareds"]: path, _ = os.path.split(value) return path else: return super(ContrastMgr, self)._format_arg(name, trait_spec, value) def _get_design_root(self, infile): _, fname = os.path.split(infile) return fname.split(".")[0] def _get_numcons(self): numtcons = 0 numfcons = 0 if isdefined(self.inputs.tcon_file): fp = open(self.inputs.tcon_file, "rt") for line in fp.readlines(): if line.startswith("/NumContrasts"): numtcons = int(line.split()[-1]) break fp.close() if isdefined(self.inputs.fcon_file): fp = open(self.inputs.fcon_file, "rt") for line in fp.readlines(): if line.startswith("/NumContrasts"): numfcons = int(line.split()[-1]) break fp.close() return numtcons, numfcons def _list_outputs(self): outputs = self._outputs().get() pth, _ = os.path.split(self.inputs.sigmasquareds) numtcons, numfcons = self._get_numcons() base_contrast = 1 if isdefined(self.inputs.contrast_num): base_contrast = self.inputs.contrast_num copes = [] varcopes = [] zstats = [] tstats = [] neffs = [] for i in range(numtcons): copes.append(self._gen_fname("cope%d.nii" % (base_contrast + i), cwd=pth)) varcopes.append( self._gen_fname("varcope%d.nii" % (base_contrast + i), cwd=pth) ) zstats.append(self._gen_fname("zstat%d.nii" % (base_contrast + i), cwd=pth)) tstats.append(self._gen_fname("tstat%d.nii" % (base_contrast + i), cwd=pth)) neffs.append(self._gen_fname("neff%d.nii" % (base_contrast + i), cwd=pth)) if copes: outputs["copes"] = copes outputs["varcopes"] = varcopes outputs["zstats"] = zstats outputs["tstats"] = tstats outputs["neffs"] = neffs fstats = [] zfstats = [] for i in range(numfcons): fstats.append(self._gen_fname("fstat%d.nii" % (base_contrast + i), cwd=pth)) zfstats.append( self._gen_fname("zfstat%d.nii" % (base_contrast + i), cwd=pth) ) if fstats: outputs["fstats"] = fstats outputs["zfstats"] = zfstats return outputs class L2ModelInputSpec(BaseInterfaceInputSpec): num_copes = traits.Range( low=1, mandatory=True, desc="number of copes to be combined" ) class L2ModelOutputSpec(TraitedSpec): design_mat = File(exists=True, desc="design matrix file") design_con = File(exists=True, desc="design contrast file") design_grp = File(exists=True, desc="design group file") class L2Model(BaseInterface): """Generate subject specific second level model Examples -------- >>> from nipype.interfaces.fsl import L2Model >>> model = L2Model(num_copes=3) # 3 sessions """ input_spec = L2ModelInputSpec output_spec = L2ModelOutputSpec def _run_interface(self, runtime): cwd = os.getcwd() mat_txt = [ "/NumWaves 1", "/NumPoints {:d}".format(self.inputs.num_copes), "/PPheights 1", "", "/Matrix", ] for i in range(self.inputs.num_copes): mat_txt += ["1"] mat_txt = "\n".join(mat_txt) con_txt = [ "/ContrastName1 group mean", "/NumWaves 1", "/NumContrasts 1", "/PPheights 1", "/RequiredEffect 100", # XX where does this # number come from "", "/Matrix", "1", ] con_txt = "\n".join(con_txt) grp_txt = [ "/NumWaves 1", "/NumPoints {:d}".format(self.inputs.num_copes), "", "/Matrix", ] for i in range(self.inputs.num_copes): grp_txt += ["1"] grp_txt = "\n".join(grp_txt) txt = {"design.mat": mat_txt, "design.con": con_txt, "design.grp": grp_txt} # write design files for i, name in enumerate(["design.mat", "design.con", "design.grp"]): f = open(os.path.join(cwd, name), "wt") f.write(txt[name]) f.close() return runtime def _list_outputs(self): outputs = self._outputs().get() for field in list(outputs.keys()): outputs[field] = os.path.join(os.getcwd(), field.replace("_", ".")) return outputs class MultipleRegressDesignInputSpec(BaseInterfaceInputSpec): contrasts = traits.List( traits.Either( traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), ), traits.Tuple( traits.Str, traits.Enum("F"), traits.List( traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), ) ), ), ), mandatory=True, desc="List of contrasts with each contrast being a list of the form - \ [('name', 'stat', [condition list], [weight list])]. if \ session list is None or not provided, all sessions are used. For F \ contrasts, the condition list should contain previously defined \ T-contrasts without any weight list.", ) regressors = traits.Dict( traits.Str, traits.List(traits.Float), mandatory=True, desc=("dictionary containing named lists of " "regressors"), ) groups = traits.List( traits.Int, desc=("list of group identifiers (defaults to single " "group)") ) class MultipleRegressDesignOutputSpec(TraitedSpec): design_mat = File(exists=True, desc="design matrix file") design_con = File(exists=True, desc="design t-contrast file") design_fts = File(exists=True, desc="design f-contrast file") design_grp = File(exists=True, desc="design group file") class MultipleRegressDesign(BaseInterface): """Generate multiple regression design .. note:: FSL does not demean columns for higher level analysis. Please see `FSL documentation `_ for more details on model specification for higher level analysis. Examples -------- >>> from nipype.interfaces.fsl import MultipleRegressDesign >>> model = MultipleRegressDesign() >>> model.inputs.contrasts = [['group mean', 'T',['reg1'],[1]]] >>> model.inputs.regressors = dict(reg1=[1, 1, 1], reg2=[2.,-4, 3]) >>> model.run() # doctest: +SKIP """ input_spec = MultipleRegressDesignInputSpec output_spec = MultipleRegressDesignOutputSpec def _run_interface(self, runtime): cwd = os.getcwd() regs = sorted(self.inputs.regressors.keys()) nwaves = len(regs) npoints = len(self.inputs.regressors[regs[0]]) ntcons = sum([1 for con in self.inputs.contrasts if con[1] == "T"]) nfcons = sum([1 for con in self.inputs.contrasts if con[1] == "F"]) # write mat file mat_txt = ["/NumWaves %d" % nwaves, "/NumPoints %d" % npoints] ppheights = [] for reg in regs: maxreg = np.max(self.inputs.regressors[reg]) minreg = np.min(self.inputs.regressors[reg]) if np.sign(maxreg) == np.sign(minreg): regheight = max([abs(minreg), abs(maxreg)]) else: regheight = abs(maxreg - minreg) ppheights.append("%e" % regheight) mat_txt += ["/PPheights " + " ".join(ppheights)] mat_txt += ["", "/Matrix"] for cidx in range(npoints): mat_txt.append( " ".join(["%e" % self.inputs.regressors[key][cidx] for key in regs]) ) mat_txt = "\n".join(mat_txt) + "\n" # write t-con file con_txt = [] counter = 0 tconmap = {} for conidx, con in enumerate(self.inputs.contrasts): if con[1] == "T": tconmap[conidx] = counter counter += 1 con_txt += ["/ContrastName%d %s" % (counter, con[0])] con_txt += [ "/NumWaves %d" % nwaves, "/NumContrasts %d" % ntcons, "/PPheights %s" % " ".join(["%e" % 1 for i in range(counter)]), "/RequiredEffect %s" % " ".join(["%.3f" % 100 for i in range(counter)]), "", "/Matrix", ] for idx in sorted(tconmap.keys()): convals = np.zeros((nwaves, 1)) for regidx, reg in enumerate(self.inputs.contrasts[idx][2]): convals[regs.index(reg)] = self.inputs.contrasts[idx][3][regidx] con_txt.append(" ".join(["%e" % val for val in convals])) con_txt = "\n".join(con_txt) + "\n" # write f-con file fcon_txt = "" if nfcons: fcon_txt = [ "/NumWaves %d" % ntcons, "/NumContrasts %d" % nfcons, "", "/Matrix", ] for conidx, con in enumerate(self.inputs.contrasts): if con[1] == "F": convals = np.zeros((ntcons, 1)) for tcon in con[2]: convals[tconmap[self.inputs.contrasts.index(tcon)]] = 1 fcon_txt.append(" ".join(["%d" % val for val in convals])) fcon_txt = "\n".join(fcon_txt) + "\n" # write group file grp_txt = ["/NumWaves 1", "/NumPoints %d" % npoints, "", "/Matrix"] for i in range(npoints): if isdefined(self.inputs.groups): grp_txt += ["%d" % self.inputs.groups[i]] else: grp_txt += ["1"] grp_txt = "\n".join(grp_txt) + "\n" txt = { "design.mat": mat_txt, "design.con": con_txt, "design.fts": fcon_txt, "design.grp": grp_txt, } # write design files for key, val in list(txt.items()): if ("fts" in key) and (nfcons == 0): continue filename = key.replace("_", ".") f = open(os.path.join(cwd, filename), "wt") f.write(val) f.close() return runtime def _list_outputs(self): outputs = self._outputs().get() nfcons = sum([1 for con in self.inputs.contrasts if con[1] == "F"]) for field in list(outputs.keys()): if ("fts" in field) and (nfcons == 0): continue outputs[field] = os.path.join(os.getcwd(), field.replace("_", ".")) return outputs class SMMInputSpec(FSLCommandInputSpec): spatial_data_file = File( exists=True, position=0, argstr='--sdf="%s"', mandatory=True, desc="statistics spatial map", copyfile=False, ) mask = File( exists=True, position=1, argstr='--mask="%s"', mandatory=True, desc="mask file", copyfile=False, ) no_deactivation_class = traits.Bool( position=2, argstr="--zfstatmode", desc="enforces no deactivation class" ) class SMMOutputSpec(TraitedSpec): null_p_map = File(exists=True) activation_p_map = File(exists=True) deactivation_p_map = File(exists=True) class SMM(FSLCommand): """ Spatial Mixture Modelling. For more detail on the spatial mixture modelling see Mixture Models with Adaptive Spatial Regularisation for Segmentation with an Application to FMRI Data; Woolrich, M., Behrens, T., Beckmann, C., and Smith, S.; IEEE Trans. Medical Imaging, 24(1):1-11, 2005. """ _cmd = "mm --ld=logdir" input_spec = SMMInputSpec output_spec = SMMOutputSpec def _list_outputs(self): outputs = self._outputs().get() # TODO get the true logdir from the stdout outputs["null_p_map"] = self._gen_fname(basename="w1_mean", cwd="logdir") outputs["activation_p_map"] = self._gen_fname(basename="w2_mean", cwd="logdir") if ( not isdefined(self.inputs.no_deactivation_class) or not self.inputs.no_deactivation_class ): outputs["deactivation_p_map"] = self._gen_fname( basename="w3_mean", cwd="logdir" ) return outputs class MELODICInputSpec(FSLCommandInputSpec): in_files = InputMultiPath( File(exists=True), argstr="-i %s", mandatory=True, position=0, desc="input file names (either single file name or a list)", sep=",", ) out_dir = Directory(argstr="-o %s", desc="output directory name", genfile=True) mask = File(exists=True, argstr="-m %s", desc="file name of mask for thresholding") no_mask = traits.Bool(argstr="--nomask", desc="switch off masking") update_mask = traits.Bool(argstr="--update_mask", desc="switch off mask updating") no_bet = traits.Bool(argstr="--nobet", desc="switch off BET") bg_threshold = traits.Float( argstr="--bgthreshold=%f", desc=( "brain/non-brain threshold used to mask non-brain voxels, as a " "percentage (only if --nobet selected)" ), ) dim = traits.Int( argstr="-d %d", desc=( "dimensionality reduction into #num dimensions (default: " "automatic estimation)" ), ) dim_est = traits.Str( argstr="--dimest=%s", desc=( "use specific dim. estimation technique: lap, " "bic, mdl, aic, mean (default: lap)" ), ) sep_whiten = traits.Bool(argstr="--sep_whiten", desc="switch on separate whitening") sep_vn = traits.Bool( argstr="--sep_vn", desc="switch off joined variance normalization" ) migp = traits.Bool(argstr="--migp", desc="switch on MIGP data reduction") migpN = traits.Int(argstr="--migpN %d", desc="number of internal Eigenmaps") migp_shuffle = traits.Bool( argstr="--migp_shuffle", desc="randomise MIGP file order (default: TRUE)" ) migp_factor = traits.Int( argstr="--migp_factor %d", desc="Internal Factor of mem-threshold relative to number of Eigenmaps (default: 2)", ) num_ICs = traits.Int( argstr="-n %d", desc="number of IC's to extract (for deflation approach)" ) approach = traits.Str( argstr="-a %s", desc="approach for decomposition, 2D: defl, symm (default), 3D: tica " "(default), concat", ) non_linearity = traits.Str( argstr="--nl=%s", desc="nonlinearity: gauss, tanh, pow3, pow4" ) var_norm = traits.Bool(argstr="--vn", desc="switch off variance normalization") pbsc = traits.Bool( argstr="--pbsc", desc="switch off conversion to percent BOLD signal change" ) cov_weight = traits.Float( argstr="--covarweight=%f", desc=( "voxel-wise weights for the covariance matrix (e.g. " "segmentation information)" ), ) epsilon = traits.Float(argstr="--eps=%f", desc="minimum error change") epsilonS = traits.Float( argstr="--epsS=%f", desc="minimum error change for rank-1 approximation in TICA" ) maxit = traits.Int( argstr="--maxit=%d", desc="maximum number of iterations before restart" ) max_restart = traits.Int( argstr="--maxrestart=%d", desc="maximum number of restarts" ) mm_thresh = traits.Float( argstr="--mmthresh=%f", desc="threshold for Mixture Model based inference" ) no_mm = traits.Bool( argstr="--no_mm", desc="switch off mixture modelling on IC maps" ) ICs = File( exists=True, argstr="--ICs=%s", desc="filename of the IC components file for mixture modelling", ) mix = File( exists=True, argstr="--mix=%s", desc="mixing matrix for mixture modelling / filtering", ) smode = File( exists=True, argstr="--smode=%s", desc="matrix of session modes for report generation", ) rem_cmp = traits.List( traits.Int, argstr="-f %d", desc="component numbers to remove" ) report = traits.Bool(argstr="--report", desc="generate Melodic web report") bg_image = File( exists=True, argstr="--bgimage=%s", desc="specify background image for report (default: mean image)", ) tr_sec = traits.Float(argstr="--tr=%f", desc="TR in seconds") log_power = traits.Bool( argstr="--logPower", desc="calculate log of power for frequency spectrum" ) t_des = File( exists=True, argstr="--Tdes=%s", desc="design matrix across time-domain" ) t_con = File( exists=True, argstr="--Tcon=%s", desc="t-contrast matrix across time-domain" ) s_des = File( exists=True, argstr="--Sdes=%s", desc="design matrix across subject-domain" ) s_con = File( exists=True, argstr="--Scon=%s", desc="t-contrast matrix across subject-domain" ) out_all = traits.Bool(argstr="--Oall", desc="output everything") out_unmix = traits.Bool(argstr="--Ounmix", desc="output unmixing matrix") out_stats = traits.Bool( argstr="--Ostats", desc="output thresholded maps and probability maps" ) out_pca = traits.Bool(argstr="--Opca", desc="output PCA results") out_white = traits.Bool( argstr="--Owhite", desc="output whitening/dewhitening matrices" ) out_orig = traits.Bool(argstr="--Oorig", desc="output the original ICs") out_mean = traits.Bool(argstr="--Omean", desc="output mean volume") report_maps = traits.Str( argstr="--report_maps=%s", desc="control string for spatial map images (see slicer)", ) remove_deriv = traits.Bool( argstr="--remove_deriv", desc="removes every second entry in paradigm file (EV derivatives)", ) class MELODICOutputSpec(TraitedSpec): out_dir = Directory(exists=True) report_dir = Directory(exists=True) class MELODIC(FSLCommand): """Multivariate Exploratory Linear Optimised Decomposition into Independent Components Examples -------- >>> melodic_setup = MELODIC() >>> melodic_setup.inputs.approach = 'tica' >>> melodic_setup.inputs.in_files = ['functional.nii', 'functional2.nii', 'functional3.nii'] >>> melodic_setup.inputs.no_bet = True >>> melodic_setup.inputs.bg_threshold = 10 >>> melodic_setup.inputs.tr_sec = 1.5 >>> melodic_setup.inputs.mm_thresh = 0.5 >>> melodic_setup.inputs.out_stats = True >>> melodic_setup.inputs.t_des = 'timeDesign.mat' >>> melodic_setup.inputs.t_con = 'timeDesign.con' >>> melodic_setup.inputs.s_des = 'subjectDesign.mat' >>> melodic_setup.inputs.s_con = 'subjectDesign.con' >>> melodic_setup.inputs.out_dir = 'groupICA.out' >>> melodic_setup.cmdline 'melodic -i functional.nii,functional2.nii,functional3.nii -a tica --bgthreshold=10.000000 --mmthresh=0.500000 --nobet -o groupICA.out --Ostats --Scon=subjectDesign.con --Sdes=subjectDesign.mat --Tcon=timeDesign.con --Tdes=timeDesign.mat --tr=1.500000' >>> melodic_setup.run() # doctest: +SKIP """ input_spec = MELODICInputSpec output_spec = MELODICOutputSpec _cmd = "melodic" def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_dir): outputs["out_dir"] = os.path.abspath(self.inputs.out_dir) else: outputs["out_dir"] = self._gen_filename("out_dir") if isdefined(self.inputs.report) and self.inputs.report: outputs["report_dir"] = os.path.join(outputs["out_dir"], "report") return outputs def _gen_filename(self, name): if name == "out_dir": return os.getcwd() class SmoothEstimateInputSpec(FSLCommandInputSpec): dof = traits.Int( argstr="--dof=%d", mandatory=True, xor=["zstat_file"], desc="number of degrees of freedom", ) mask_file = File( argstr="--mask=%s", exists=True, mandatory=True, desc="brain mask volume" ) residual_fit_file = File( argstr="--res=%s", exists=True, requires=["dof"], desc="residual-fit image file" ) zstat_file = File( argstr="--zstat=%s", exists=True, xor=["dof"], desc="zstat image file" ) class SmoothEstimateOutputSpec(TraitedSpec): dlh = traits.Float(desc="smoothness estimate sqrt(det(Lambda))") volume = traits.Int(desc="number of voxels in mask") resels = traits.Float( desc="volume of resel, in voxels, defined as FWHM_x * FWHM_y * FWHM_z" ) class SmoothEstimate(FSLCommand): """Estimates the smoothness of an image Examples -------- >>> est = SmoothEstimate() >>> est.inputs.zstat_file = 'zstat1.nii.gz' >>> est.inputs.mask_file = 'mask.nii' >>> est.cmdline 'smoothest --mask=mask.nii --zstat=zstat1.nii.gz' """ input_spec = SmoothEstimateInputSpec output_spec = SmoothEstimateOutputSpec _cmd = "smoothest" def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() stdout = runtime.stdout.split("\n") outputs.dlh = float(stdout[0].split()[1]) outputs.volume = int(stdout[1].split()[1]) outputs.resels = float(stdout[2].split()[1]) return outputs class ClusterInputSpec(FSLCommandInputSpec): in_file = File(argstr="--in=%s", mandatory=True, exists=True, desc="input volume") threshold = traits.Float( argstr="--thresh=%.10f", mandatory=True, desc="threshold for input volume" ) out_index_file = traits.Either( traits.Bool, File, argstr="--oindex=%s", desc="output of cluster index (in size order)", hash_files=False, ) out_threshold_file = traits.Either( traits.Bool, File, argstr="--othresh=%s", desc="thresholded image", hash_files=False, ) out_localmax_txt_file = traits.Either( traits.Bool, File, argstr="--olmax=%s", desc="local maxima text file", hash_files=False, ) out_localmax_vol_file = traits.Either( traits.Bool, File, argstr="--olmaxim=%s", desc="output of local maxima volume", hash_files=False, ) out_size_file = traits.Either( traits.Bool, File, argstr="--osize=%s", desc="filename for output of size image", hash_files=False, ) out_max_file = traits.Either( traits.Bool, File, argstr="--omax=%s", desc="filename for output of max image", hash_files=False, ) out_mean_file = traits.Either( traits.Bool, File, argstr="--omean=%s", desc="filename for output of mean image", hash_files=False, ) out_pval_file = traits.Either( traits.Bool, File, argstr="--opvals=%s", desc="filename for image output of log pvals", hash_files=False, ) pthreshold = traits.Float( argstr="--pthresh=%.10f", requires=["dlh", "volume"], desc="p-threshold for clusters", ) peak_distance = traits.Float( argstr="--peakdist=%.10f", desc="minimum distance between local maxima/minima, in mm (default 0)", ) cope_file = File(argstr="--cope=%s", desc="cope volume") volume = traits.Int(argstr="--volume=%d", desc="number of voxels in the mask") dlh = traits.Float( argstr="--dlh=%.10f", desc="smoothness estimate = sqrt(det(Lambda))" ) fractional = traits.Bool( False, usedefault=True, argstr="--fractional", desc="interprets the threshold as a fraction of the robust range", ) connectivity = traits.Int( argstr="--connectivity=%d", desc="the connectivity of voxels (default 26)" ) use_mm = traits.Bool( False, usedefault=True, argstr="--mm", desc="use mm, not voxel, coordinates" ) find_min = traits.Bool( False, usedefault=True, argstr="--min", desc="find minima instead of maxima" ) no_table = traits.Bool( False, usedefault=True, argstr="--no_table", desc="suppresses printing of the table info", ) minclustersize = traits.Bool( False, usedefault=True, argstr="--minclustersize", desc="prints out minimum significant cluster size", ) xfm_file = File( argstr="--xfm=%s", desc=( "filename for Linear: input->standard-space " "transform. Non-linear: input->highres transform" ), ) std_space_file = File( argstr="--stdvol=%s", desc="filename for standard-space volume" ) num_maxima = traits.Int(argstr="--num=%d", desc="no of local maxima to report") warpfield_file = File(argstr="--warpvol=%s", desc="file contining warpfield") class ClusterOutputSpec(TraitedSpec): index_file = File(desc="output of cluster index (in size order)") threshold_file = File(desc="thresholded image") localmax_txt_file = File(desc="local maxima text file") localmax_vol_file = File(desc="output of local maxima volume") size_file = File(desc="filename for output of size image") max_file = File(desc="filename for output of max image") mean_file = File(desc="filename for output of mean image") pval_file = File(desc="filename for image output of log pvals") class Cluster(FSLCommand): """Uses FSL cluster to perform clustering on statistical output Examples -------- >>> cl = Cluster() >>> cl.inputs.threshold = 2.3 >>> cl.inputs.in_file = 'zstat1.nii.gz' >>> cl.inputs.out_localmax_txt_file = 'stats.txt' >>> cl.inputs.use_mm = True >>> cl.cmdline 'cluster --in=zstat1.nii.gz --olmax=stats.txt --thresh=2.3000000000 --mm' """ input_spec = ClusterInputSpec output_spec = ClusterOutputSpec _cmd = "cluster" filemap = { "out_index_file": "index", "out_threshold_file": "threshold", "out_localmax_txt_file": "localmax.txt", "out_localmax_vol_file": "localmax", "out_size_file": "size", "out_max_file": "max", "out_mean_file": "mean", "out_pval_file": "pval", } def _list_outputs(self): outputs = self.output_spec().get() for key, suffix in list(self.filemap.items()): outkey = key[4:] inval = getattr(self.inputs, key) if isdefined(inval): if isinstance(inval, bool): if inval: change_ext = True if suffix.endswith(".txt"): change_ext = False outputs[outkey] = self._gen_fname( self.inputs.in_file, suffix="_" + suffix, change_ext=change_ext, ) else: outputs[outkey] = os.path.abspath(inval) return outputs def _format_arg(self, name, spec, value): if name in list(self.filemap.keys()): if isinstance(value, bool): fname = self._list_outputs()[name[4:]] else: fname = value return spec.argstr % fname return super(Cluster, self)._format_arg(name, spec, value) class DualRegressionInputSpec(FSLCommandInputSpec): in_files = InputMultiPath( File(exists=True), argstr="%s", mandatory=True, position=-1, sep=" ", desc="List all subjects' preprocessed, standard-space 4D datasets", ) group_IC_maps_4D = File( exists=True, argstr="%s", mandatory=True, position=1, desc="4D image containing spatial IC maps (melodic_IC) from the " "whole-group ICA analysis", ) des_norm = traits.Bool( True, argstr="%i", position=2, usedefault=True, desc="Whether to variance-normalise the timecourses used as the " "stage-2 regressors; True is default and recommended", ) one_sample_group_mean = traits.Bool( argstr="-1", position=3, desc="perform 1-sample group-mean test instead of generic " "permutation test", ) design_file = File( exists=True, argstr="%s", position=3, desc="Design matrix for final cross-subject modelling with " "randomise", ) con_file = File( exists=True, argstr="%s", position=4, desc="Design contrasts for final cross-subject modelling with " "randomise", ) n_perm = traits.Int( argstr="%i", mandatory=True, position=5, desc="Number of permutations for randomise; set to 1 for just raw " "tstat output, set to 0 to not run randomise at all.", ) out_dir = Directory( "output", argstr="%s", usedefault=True, position=6, desc="This directory will be created to hold all output and logfiles", genfile=True, ) class DualRegressionOutputSpec(TraitedSpec): out_dir = Directory(exists=True) class DualRegression(FSLCommand): """Wrapper Script for Dual Regression Workflow Examples -------- >>> dual_regression = DualRegression() >>> dual_regression.inputs.in_files = ["functional.nii", "functional2.nii", "functional3.nii"] >>> dual_regression.inputs.group_IC_maps_4D = "allFA.nii" >>> dual_regression.inputs.des_norm = False >>> dual_regression.inputs.one_sample_group_mean = True >>> dual_regression.inputs.n_perm = 10 >>> dual_regression.inputs.out_dir = "my_output_directory" >>> dual_regression.cmdline 'dual_regression allFA.nii 0 -1 10 my_output_directory functional.nii functional2.nii functional3.nii' >>> dual_regression.run() # doctest: +SKIP """ input_spec = DualRegressionInputSpec output_spec = DualRegressionOutputSpec _cmd = "dual_regression" def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_dir): outputs["out_dir"] = os.path.abspath(self.inputs.out_dir) else: outputs["out_dir"] = self._gen_filename("out_dir") return outputs def _gen_filename(self, name): if name == "out_dir": return os.getcwd() class RandomiseInputSpec(FSLCommandInputSpec): in_file = File( exists=True, desc="4D input file", argstr="-i %s", position=0, mandatory=True ) base_name = traits.Str( "randomise", desc="the rootname that all generated files will have", argstr='-o "%s"', position=1, usedefault=True, ) design_mat = File( exists=True, desc="design matrix file", argstr="-d %s", position=2 ) tcon = File(exists=True, desc="t contrasts file", argstr="-t %s", position=3) fcon = File(exists=True, desc="f contrasts file", argstr="-f %s") mask = File(exists=True, desc="mask image", argstr="-m %s") x_block_labels = File( exists=True, desc="exchangeability block labels file", argstr="-e %s" ) demean = traits.Bool( desc="demean data temporally before model fitting", argstr="-D" ) one_sample_group_mean = traits.Bool( desc=( "perform 1-sample group-mean test instead of generic " "permutation test" ), argstr="-1", ) show_total_perms = traits.Bool( desc=("print out how many unique permutations would be generated " "and exit"), argstr="-q", ) show_info_parallel_mode = traits.Bool( desc="print out information required for parallel mode and exit", argstr="-Q" ) vox_p_values = traits.Bool( desc="output voxelwise (corrected and uncorrected) p-value images", argstr="-x" ) tfce = traits.Bool(desc="carry out Threshold-Free Cluster Enhancement", argstr="-T") tfce2D = traits.Bool( desc=("carry out Threshold-Free Cluster Enhancement with 2D " "optimisation"), argstr="--T2", ) f_only = traits.Bool(desc="calculate f-statistics only", argstr="--fonly") raw_stats_imgs = traits.Bool( desc="output raw ( unpermuted ) statistic images", argstr="-R" ) p_vec_n_dist_files = traits.Bool( desc="output permutation vector and null distribution text files", argstr="-P" ) num_perm = traits.Int( argstr="-n %d", desc="number of permutations (default 5000, set to 0 for exhaustive)", ) seed = traits.Int( argstr="--seed=%d", desc="specific integer seed for random number generator" ) var_smooth = traits.Int( argstr="-v %d", desc="use variance smoothing (std is in mm)" ) c_thresh = traits.Float( argstr="-c %.1f", desc="carry out cluster-based thresholding" ) cm_thresh = traits.Float( argstr="-C %.1f", desc="carry out cluster-mass-based thresholding" ) f_c_thresh = traits.Float(argstr="-F %.2f", desc="carry out f cluster thresholding") f_cm_thresh = traits.Float( argstr="-S %.2f", desc="carry out f cluster-mass thresholding" ) tfce_H = traits.Float( argstr="--tfce_H=%.2f", desc="TFCE height parameter (default=2)" ) tfce_E = traits.Float( argstr="--tfce_E=%.2f", desc="TFCE extent parameter (default=0.5)" ) tfce_C = traits.Float( argstr="--tfce_C=%.2f", desc="TFCE connectivity (6 or 26; default=6)" ) class RandomiseOutputSpec(TraitedSpec): tstat_files = traits.List(File(exists=True), desc="t contrast raw statistic") fstat_files = traits.List(File(exists=True), desc="f contrast raw statistic") t_p_files = traits.List( File(exists=True), desc="f contrast uncorrected p values files" ) f_p_files = traits.List( File(exists=True), desc="f contrast uncorrected p values files" ) t_corrected_p_files = traits.List( File(exists=True), desc="t contrast FWE (Family-wise error) corrected p values files", ) f_corrected_p_files = traits.List( File(exists=True), desc="f contrast FWE (Family-wise error) corrected p values files", ) class Randomise(FSLCommand): """FSL Randomise: feeds the 4D projected FA data into GLM modelling and thresholding in order to find voxels which correlate with your model Example ------- >>> import nipype.interfaces.fsl as fsl >>> rand = fsl.Randomise(in_file='allFA.nii', mask = 'mask.nii', tcon='design.con', design_mat='design.mat') >>> rand.cmdline 'randomise -i allFA.nii -o "randomise" -d design.mat -t design.con -m mask.nii' """ _cmd = "randomise" input_spec = RandomiseInputSpec output_spec = RandomiseOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["tstat_files"] = glob( self._gen_fname("%s_tstat*.nii" % self.inputs.base_name) ) outputs["fstat_files"] = glob( self._gen_fname("%s_fstat*.nii" % self.inputs.base_name) ) prefix = False if self.inputs.tfce or self.inputs.tfce2D: prefix = "tfce" elif self.inputs.vox_p_values: prefix = "vox" elif self.inputs.c_thresh or self.inputs.f_c_thresh: prefix = "clustere" elif self.inputs.cm_thresh or self.inputs.f_cm_thresh: prefix = "clusterm" if prefix: outputs["t_p_files"] = glob( self._gen_fname("%s_%s_p_tstat*" % (self.inputs.base_name, prefix)) ) outputs["t_corrected_p_files"] = glob( self._gen_fname( "%s_%s_corrp_tstat*.nii" % (self.inputs.base_name, prefix) ) ) outputs["f_p_files"] = glob( self._gen_fname("%s_%s_p_fstat*.nii" % (self.inputs.base_name, prefix)) ) outputs["f_corrected_p_files"] = glob( self._gen_fname( "%s_%s_corrp_fstat*.nii" % (self.inputs.base_name, prefix) ) ) return outputs class GLMInputSpec(FSLCommandInputSpec): in_file = File( exists=True, argstr="-i %s", mandatory=True, position=1, desc="input file name (text matrix or 3D/4D image file)", ) out_file = File( name_template="%s_glm", argstr="-o %s", position=3, desc=("filename for GLM parameter estimates" + " (GLM betas)"), name_source="in_file", keep_extension=True, ) design = File( exists=True, argstr="-d %s", mandatory=True, position=2, desc=( "file name of the GLM design matrix (text time" + " courses for temporal regression or an image" + " file for spatial regression)" ), ) contrasts = File( exists=True, argstr="-c %s", desc=("matrix of t-statics contrasts") ) mask = File( exists=True, argstr="-m %s", desc=("mask image file name if input is image") ) dof = traits.Int(argstr="--dof=%d", desc=("set degrees of freedom" + " explicitly")) des_norm = traits.Bool( argstr="--des_norm", desc=( "switch on normalization of the design" + " matrix columns to unit std deviation" ), ) dat_norm = traits.Bool( argstr="--dat_norm", desc=( "switch on normalization of the data time series to unit std " "deviation" ), ) var_norm = traits.Bool( argstr="--vn", desc=("perform MELODIC variance-normalisation on data") ) demean = traits.Bool( argstr="--demean", desc=("switch on demeaining of design and data") ) out_cope = File( argstr="--out_cope=%s", desc="output file name for COPE (either as txt or image" ) out_z_name = File( argstr="--out_z=%s", desc="output file name for Z-stats (either as txt or image" ) out_t_name = File( argstr="--out_t=%s", desc="output file name for t-stats (either as txt or image" ) out_p_name = File( argstr="--out_p=%s", desc=( "output file name for p-values of Z-stats (either as text file " "or image)" ), ) out_f_name = File( argstr="--out_f=%s", desc="output file name for F-value of full model fit" ) out_pf_name = File( argstr="--out_pf=%s", desc="output file name for p-value for full model fit" ) out_res_name = File(argstr="--out_res=%s", desc="output file name for residuals") out_varcb_name = File( argstr="--out_varcb=%s", desc="output file name for variance of COPEs" ) out_sigsq_name = File( argstr="--out_sigsq=%s", desc=("output file name for residual noise variance sigma-square"), ) out_data_name = File( argstr="--out_data=%s", desc="output file name for pre-processed data" ) out_vnscales_name = File( argstr="--out_vnscales=%s", desc=("output file name for scaling factors for variance " "normalisation"), ) class GLMOutputSpec(TraitedSpec): out_file = File(exists=True, desc=("file name of GLM parameters (if generated)")) out_cope = OutputMultiPath( File(exists=True), desc=("output file name for COPEs (either as text file or image)"), ) out_z = OutputMultiPath( File(exists=True), desc=("output file name for COPEs (either as text file or image)"), ) out_t = OutputMultiPath( File(exists=True), desc=("output file name for t-stats (either as text file or image)"), ) out_p = OutputMultiPath( File(exists=True), desc=( "output file name for p-values of Z-stats (either as text file " "or image)" ), ) out_f = OutputMultiPath( File(exists=True), desc=("output file name for F-value of full model fit") ) out_pf = OutputMultiPath( File(exists=True), desc=("output file name for p-value for full model fit") ) out_res = OutputMultiPath(File(exists=True), desc="output file name for residuals") out_varcb = OutputMultiPath( File(exists=True), desc="output file name for variance of COPEs" ) out_sigsq = OutputMultiPath( File(exists=True), desc=("output file name for residual noise variance sigma-square"), ) out_data = OutputMultiPath( File(exists=True), desc="output file for preprocessed data" ) out_vnscales = OutputMultiPath( File(exists=True), desc=("output file name for scaling factors for variance " "normalisation"), ) class GLM(FSLCommand): """ FSL GLM: Example ------- >>> import nipype.interfaces.fsl as fsl >>> glm = fsl.GLM(in_file='functional.nii', design='maps.nii', output_type='NIFTI') >>> glm.cmdline 'fsl_glm -i functional.nii -d maps.nii -o functional_glm.nii' """ _cmd = "fsl_glm" input_spec = GLMInputSpec output_spec = GLMOutputSpec def _list_outputs(self): outputs = super(GLM, self)._list_outputs() if isdefined(self.inputs.out_cope): outputs["out_cope"] = os.path.abspath(self.inputs.out_cope) if isdefined(self.inputs.out_z_name): outputs["out_z"] = os.path.abspath(self.inputs.out_z_name) if isdefined(self.inputs.out_t_name): outputs["out_t"] = os.path.abspath(self.inputs.out_t_name) if isdefined(self.inputs.out_p_name): outputs["out_p"] = os.path.abspath(self.inputs.out_p_name) if isdefined(self.inputs.out_f_name): outputs["out_f"] = os.path.abspath(self.inputs.out_f_name) if isdefined(self.inputs.out_pf_name): outputs["out_pf"] = os.path.abspath(self.inputs.out_pf_name) if isdefined(self.inputs.out_res_name): outputs["out_res"] = os.path.abspath(self.inputs.out_res_name) if isdefined(self.inputs.out_varcb_name): outputs["out_varcb"] = os.path.abspath(self.inputs.out_varcb_name) if isdefined(self.inputs.out_sigsq_name): outputs["out_sigsq"] = os.path.abspath(self.inputs.out_sigsq_name) if isdefined(self.inputs.out_data_name): outputs["out_data"] = os.path.abspath(self.inputs.out_data_name) if isdefined(self.inputs.out_vnscales_name): outputs["out_vnscales"] = os.path.abspath(self.inputs.out_vnscales_name) return outputs def load_template(name): """Load a template from the model_templates directory Parameters ---------- name : str The name of the file to load Returns ------- template : string.Template """ from pkg_resources import resource_filename as pkgrf full_fname = pkgrf( "nipype", os.path.join("interfaces", "fsl", "model_templates", name) ) with open(full_fname) as template_file: template = Template(template_file.read()) return template nipype-1.7.0/nipype/interfaces/fsl/model_templates/000077500000000000000000000000001413403311400224065ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_contrast_element.tcl000066400000000000000000000001351413403311400274560ustar00rootroot00000000000000# Real contrast_$ctype vector $cnum element $element set fmri(con_$ctype$cnum.$element) $val nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_contrast_ftest_element.tcl000066400000000000000000000001121413403311400306560ustar00rootroot00000000000000# F-test $cnum element $element set fmri(ftest_$ctype$cnum.$element) $val nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_contrast_header.tcl000066400000000000000000000002061413403311400272540ustar00rootroot00000000000000# Contrast & F-tests mode # real : control real EVs # orig : control original EVs set fmri(con_mode_old) orig set fmri(con_mode) orig nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_contrast_prolog.tcl000066400000000000000000000002261413403311400273300ustar00rootroot00000000000000# Display images for contrast_$ctype $cnum set fmri(conpic_$ctype.$cnum) 1 # Title for contrast_$ctype $cnum set fmri(conname_$ctype.$cnum) "$cname" nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_contrastmask_element.tcl000066400000000000000000000001371413403311400303340ustar00rootroot00000000000000# Mask real contrast/F-test $c1 with real contrast/F-test $c2? set fmri(conmask${c1}_${c2}) 0 nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_contrastmask_footer.tcl000066400000000000000000000000651413403311400302010ustar00rootroot00000000000000# Do contrast masking at all? set fmri(conmask1_1) 0 nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_contrastmask_header.tcl000066400000000000000000000001271413403311400301320ustar00rootroot00000000000000# Contrast masking - use >0 instead of thresholding? set fmri(conmask_zerothresh_yn) 0 nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_contrasts.tcl000066400000000000000000000377351413403311400261500ustar00rootroot00000000000000# Contrast & F-tests mode # real : control real EVs # orig : control original EVs set fmri(con_mode_old) orig set fmri(con_mode) orig ### Needs iteration # Display images for contrast_real 1 set fmri(conpic_real.1) 1 # Title for contrast_real 1 set fmri(conname_real.1) "left>right" # Real contrast_real vector 1 element 1 set fmri(con_real1.1) 1 # Real contrast_real vector 1 element 2 set fmri(con_real1.2) -1.0 # Real contrast_real vector 1 element 3 set fmri(con_real1.3) 1.0 # Real contrast_real vector 1 element 4 set fmri(con_real1.4) -1.0 # Real contrast_real vector 1 element 5 set fmri(con_real1.5) 1.0 # Real contrast_real vector 1 element 6 set fmri(con_real1.6) -1.0 # Real contrast_real vector 1 element 7 set fmri(con_real1.7) 1.0 # Real contrast_real vector 1 element 8 set fmri(con_real1.8) -1.0 # Display images for contrast_real 2 set fmri(conpic_real.2) 1 # Title for contrast_real 2 set fmri(conname_real.2) "visual>vibe" # Real contrast_real vector 2 element 1 set fmri(con_real2.1) -1.0 # Real contrast_real vector 2 element 2 set fmri(con_real2.2) -1.0 # Real contrast_real vector 2 element 3 set fmri(con_real2.3) -1.0 # Real contrast_real vector 2 element 4 set fmri(con_real2.4) -1.0 # Real contrast_real vector 2 element 5 set fmri(con_real2.5) 1.0 # Real contrast_real vector 2 element 6 set fmri(con_real2.6) 1.0 # Real contrast_real vector 2 element 7 set fmri(con_real2.7) 1.0 # Real contrast_real vector 2 element 8 set fmri(con_real2.8) 1.0 # Display images for contrast_real 3 set fmri(conpic_real.3) 1 # Title for contrast_real 3 set fmri(conname_real.3) "seq>all" # Real contrast_real vector 3 element 1 set fmri(con_real3.1) -1.0 # Real contrast_real vector 3 element 2 set fmri(con_real3.2) -1.0 # Real contrast_real vector 3 element 3 set fmri(con_real3.3) 1.0 # Real contrast_real vector 3 element 4 set fmri(con_real3.4) 1.0 # Real contrast_real vector 3 element 5 set fmri(con_real3.5) -1.0 # Real contrast_real vector 3 element 6 set fmri(con_real3.6) -1.0 # Real contrast_real vector 3 element 7 set fmri(con_real3.7) 1.0 # Real contrast_real vector 3 element 8 set fmri(con_real3.8) 1.0 # Display images for contrast_real 4 set fmri(conpic_real.4) 1 # Title for contrast_real 4 set fmri(conname_real.4) "visual seq>all" # Real contrast_real vector 4 element 1 set fmri(con_real4.1) 0 # Real contrast_real vector 4 element 2 set fmri(con_real4.2) 0 # Real contrast_real vector 4 element 3 set fmri(con_real4.3) 0 # Real contrast_real vector 4 element 4 set fmri(con_real4.4) 0 # Real contrast_real vector 4 element 5 set fmri(con_real4.5) -1.0 # Real contrast_real vector 4 element 6 set fmri(con_real4.6) -1.0 # Real contrast_real vector 4 element 7 set fmri(con_real4.7) 1.0 # Real contrast_real vector 4 element 8 set fmri(con_real4.8) 1.0 # Display images for contrast_real 5 set fmri(conpic_real.5) 1 # Title for contrast_real 5 set fmri(conname_real.5) "vibe seq>all" # Real contrast_real vector 5 element 1 set fmri(con_real5.1) -1.0 # Real contrast_real vector 5 element 2 set fmri(con_real5.2) -1.0 # Real contrast_real vector 5 element 3 set fmri(con_real5.3) 1.0 # Real contrast_real vector 5 element 4 set fmri(con_real5.4) 1.0 # Real contrast_real vector 5 element 5 set fmri(con_real5.5) 0 # Real contrast_real vector 5 element 6 set fmri(con_real5.6) 0 # Real contrast_real vector 5 element 7 set fmri(con_real5.7) 0 # Real contrast_real vector 5 element 8 set fmri(con_real5.8) 0 # Display images for contrast_real 6 set fmri(conpic_real.6) 1 # Title for contrast_real 6 set fmri(conname_real.6) "visual seq>vibe seq" # Real contrast_real vector 6 element 1 set fmri(con_real6.1) 0 # Real contrast_real vector 6 element 2 set fmri(con_real6.2) 0 # Real contrast_real vector 6 element 3 set fmri(con_real6.3) -1.0 # Real contrast_real vector 6 element 4 set fmri(con_real6.4) -1.0 # Real contrast_real vector 6 element 5 set fmri(con_real6.5) 0 # Real contrast_real vector 6 element 6 set fmri(con_real6.6) 0 # Real contrast_real vector 6 element 7 set fmri(con_real6.7) 1.0 # Real contrast_real vector 6 element 8 set fmri(con_real6.8) 1.0 # Display images for contrast_real 7 set fmri(conpic_real.7) 1 # Title for contrast_real 7 set fmri(conname_real.7) "visual all>vibe all" # Real contrast_real vector 7 element 1 set fmri(con_real7.1) -1.0 # Real contrast_real vector 7 element 2 set fmri(con_real7.2) -1.0 # Real contrast_real vector 7 element 3 set fmri(con_real7.3) 0 # Real contrast_real vector 7 element 4 set fmri(con_real7.4) 0 # Real contrast_real vector 7 element 5 set fmri(con_real7.5) 1.0 # Real contrast_real vector 7 element 6 set fmri(con_real7.6) 1.0 # Real contrast_real vector 7 element 7 set fmri(con_real7.7) 0 # Real contrast_real vector 7 element 8 set fmri(con_real7.8) 0 # Display images for contrast_real 8 set fmri(conpic_real.8) 1 # Title for contrast_real 8 set fmri(conname_real.8) "mode x complexity" # Real contrast_real vector 8 element 1 set fmri(con_real8.1) -1.0 # Real contrast_real vector 8 element 2 set fmri(con_real8.2) -1.0 # Real contrast_real vector 8 element 3 set fmri(con_real8.3) 1.0 # Real contrast_real vector 8 element 4 set fmri(con_real8.4) 1.0 # Real contrast_real vector 8 element 5 set fmri(con_real8.5) 1.0 # Real contrast_real vector 8 element 6 set fmri(con_real8.6) 1.0 # Real contrast_real vector 8 element 7 set fmri(con_real8.7) -1.0 # Real contrast_real vector 8 element 8 set fmri(con_real8.8) -1.0 # Display images for contrast_orig 1 set fmri(conpic_orig.1) 1 # Title for contrast_orig 1 set fmri(conname_orig.1) "left>right" # Real contrast_orig vector 1 element 1 set fmri(con_orig1.1) 1 # Real contrast_orig vector 1 element 2 set fmri(con_orig1.2) -1.0 # Real contrast_orig vector 1 element 3 set fmri(con_orig1.3) 1.0 # Real contrast_orig vector 1 element 4 set fmri(con_orig1.4) -1.0 # Real contrast_orig vector 1 element 5 set fmri(con_orig1.5) 1.0 # Real contrast_orig vector 1 element 6 set fmri(con_orig1.6) -1.0 # Real contrast_orig vector 1 element 7 set fmri(con_orig1.7) 1.0 # Real contrast_orig vector 1 element 8 set fmri(con_orig1.8) -1.0 # Display images for contrast_orig 2 set fmri(conpic_orig.2) 1 # Title for contrast_orig 2 set fmri(conname_orig.2) "visual>vibe" # Real contrast_orig vector 2 element 1 set fmri(con_orig2.1) -1.0 # Real contrast_orig vector 2 element 2 set fmri(con_orig2.2) -1.0 # Real contrast_orig vector 2 element 3 set fmri(con_orig2.3) -1.0 # Real contrast_orig vector 2 element 4 set fmri(con_orig2.4) -1.0 # Real contrast_orig vector 2 element 5 set fmri(con_orig2.5) 1.0 # Real contrast_orig vector 2 element 6 set fmri(con_orig2.6) 1.0 # Real contrast_orig vector 2 element 7 set fmri(con_orig2.7) 1.0 # Real contrast_orig vector 2 element 8 set fmri(con_orig2.8) 1.0 # Display images for contrast_orig 3 set fmri(conpic_orig.3) 1 # Title for contrast_orig 3 set fmri(conname_orig.3) "seq>all" # Real contrast_orig vector 3 element 1 set fmri(con_orig3.1) -1.0 # Real contrast_orig vector 3 element 2 set fmri(con_orig3.2) -1.0 # Real contrast_orig vector 3 element 3 set fmri(con_orig3.3) 1.0 # Real contrast_orig vector 3 element 4 set fmri(con_orig3.4) 1.0 # Real contrast_orig vector 3 element 5 set fmri(con_orig3.5) -1.0 # Real contrast_orig vector 3 element 6 set fmri(con_orig3.6) -1.0 # Real contrast_orig vector 3 element 7 set fmri(con_orig3.7) 1.0 # Real contrast_orig vector 3 element 8 set fmri(con_orig3.8) 1.0 # Display images for contrast_orig 4 set fmri(conpic_orig.4) 1 # Title for contrast_orig 4 set fmri(conname_orig.4) "visual seq>all" # Real contrast_orig vector 4 element 1 set fmri(con_orig4.1) 0 # Real contrast_orig vector 4 element 2 set fmri(con_orig4.2) 0 # Real contrast_orig vector 4 element 3 set fmri(con_orig4.3) 0 # Real contrast_orig vector 4 element 4 set fmri(con_orig4.4) 0 # Real contrast_orig vector 4 element 5 set fmri(con_orig4.5) -1.0 # Real contrast_orig vector 4 element 6 set fmri(con_orig4.6) -1.0 # Real contrast_orig vector 4 element 7 set fmri(con_orig4.7) 1.0 # Real contrast_orig vector 4 element 8 set fmri(con_orig4.8) 1.0 # Display images for contrast_orig 5 set fmri(conpic_orig.5) 1 # Title for contrast_orig 5 set fmri(conname_orig.5) "vibe seq>all" # Real contrast_orig vector 5 element 1 set fmri(con_orig5.1) -1.0 # Real contrast_orig vector 5 element 2 set fmri(con_orig5.2) -1.0 # Real contrast_orig vector 5 element 3 set fmri(con_orig5.3) 1.0 # Real contrast_orig vector 5 element 4 set fmri(con_orig5.4) 1.0 # Real contrast_orig vector 5 element 5 set fmri(con_orig5.5) 0 # Real contrast_orig vector 5 element 6 set fmri(con_orig5.6) 0 # Real contrast_orig vector 5 element 7 set fmri(con_orig5.7) 0 # Real contrast_orig vector 5 element 8 set fmri(con_orig5.8) 0 # Display images for contrast_orig 6 set fmri(conpic_orig.6) 1 # Title for contrast_orig 6 set fmri(conname_orig.6) "visual seq>vibe seq" # Real contrast_orig vector 6 element 1 set fmri(con_orig6.1) 0 # Real contrast_orig vector 6 element 2 set fmri(con_orig6.2) 0 # Real contrast_orig vector 6 element 3 set fmri(con_orig6.3) -1.0 # Real contrast_orig vector 6 element 4 set fmri(con_orig6.4) -1.0 # Real contrast_orig vector 6 element 5 set fmri(con_orig6.5) 0 # Real contrast_orig vector 6 element 6 set fmri(con_orig6.6) 0 # Real contrast_orig vector 6 element 7 set fmri(con_orig6.7) 1.0 # Real contrast_orig vector 6 element 8 set fmri(con_orig6.8) 1.0 # Display images for contrast_orig 7 set fmri(conpic_orig.7) 1 # Title for contrast_orig 7 set fmri(conname_orig.7) "visual all>vibe all" # Real contrast_orig vector 7 element 1 set fmri(con_orig7.1) -1.0 # Real contrast_orig vector 7 element 2 set fmri(con_orig7.2) -1.0 # Real contrast_orig vector 7 element 3 set fmri(con_orig7.3) 0 # Real contrast_orig vector 7 element 4 set fmri(con_orig7.4) 0 # Real contrast_orig vector 7 element 5 set fmri(con_orig7.5) 1.0 # Real contrast_orig vector 7 element 6 set fmri(con_orig7.6) 1.0 # Real contrast_orig vector 7 element 7 set fmri(con_orig7.7) 0 # Real contrast_orig vector 7 element 8 set fmri(con_orig7.8) 0 # Display images for contrast_orig 8 set fmri(conpic_orig.8) 1 # Title for contrast_orig 8 set fmri(conname_orig.8) "mode x complexity" # Real contrast_orig vector 8 element 1 set fmri(con_orig8.1) -1.0 # Real contrast_orig vector 8 element 2 set fmri(con_orig8.2) -1.0 # Real contrast_orig vector 8 element 3 set fmri(con_orig8.3) 1.0 # Real contrast_orig vector 8 element 4 set fmri(con_orig8.4) 1.0 # Real contrast_orig vector 8 element 5 set fmri(con_orig8.5) 1.0 # Real contrast_orig vector 8 element 6 set fmri(con_orig8.6) 1.0 # Real contrast_orig vector 8 element 7 set fmri(con_orig8.7) -1.0 # Real contrast_orig vector 8 element 8 set fmri(con_orig8.8) -1.0 ### This is fixed # Contrast masking - use >0 instead of thresholding? set fmri(conmask_zerothresh_yn) 0 ### These are set for the full combo of contrasts - needs iteration # Mask real contrast/F-test 1 with real contrast/F-test 2? set fmri(conmask1_2) 0 # Mask real contrast/F-test 1 with real contrast/F-test 3? set fmri(conmask1_3) 0 # Mask real contrast/F-test 1 with real contrast/F-test 4? set fmri(conmask1_4) 0 # Mask real contrast/F-test 1 with real contrast/F-test 5? set fmri(conmask1_5) 0 # Mask real contrast/F-test 1 with real contrast/F-test 6? set fmri(conmask1_6) 0 # Mask real contrast/F-test 1 with real contrast/F-test 7? set fmri(conmask1_7) 0 # Mask real contrast/F-test 1 with real contrast/F-test 8? set fmri(conmask1_8) 0 # Mask real contrast/F-test 2 with real contrast/F-test 1? set fmri(conmask2_1) 0 # Mask real contrast/F-test 2 with real contrast/F-test 3? set fmri(conmask2_3) 0 # Mask real contrast/F-test 2 with real contrast/F-test 4? set fmri(conmask2_4) 0 # Mask real contrast/F-test 2 with real contrast/F-test 5? set fmri(conmask2_5) 0 # Mask real contrast/F-test 2 with real contrast/F-test 6? set fmri(conmask2_6) 0 # Mask real contrast/F-test 2 with real contrast/F-test 7? set fmri(conmask2_7) 0 # Mask real contrast/F-test 2 with real contrast/F-test 8? set fmri(conmask2_8) 0 # Mask real contrast/F-test 3 with real contrast/F-test 1? set fmri(conmask3_1) 0 # Mask real contrast/F-test 3 with real contrast/F-test 2? set fmri(conmask3_2) 0 # Mask real contrast/F-test 3 with real contrast/F-test 4? set fmri(conmask3_4) 0 # Mask real contrast/F-test 3 with real contrast/F-test 5? set fmri(conmask3_5) 0 # Mask real contrast/F-test 3 with real contrast/F-test 6? set fmri(conmask3_6) 0 # Mask real contrast/F-test 3 with real contrast/F-test 7? set fmri(conmask3_7) 0 # Mask real contrast/F-test 3 with real contrast/F-test 8? set fmri(conmask3_8) 0 # Mask real contrast/F-test 4 with real contrast/F-test 1? set fmri(conmask4_1) 0 # Mask real contrast/F-test 4 with real contrast/F-test 2? set fmri(conmask4_2) 0 # Mask real contrast/F-test 4 with real contrast/F-test 3? set fmri(conmask4_3) 0 # Mask real contrast/F-test 4 with real contrast/F-test 5? set fmri(conmask4_5) 0 # Mask real contrast/F-test 4 with real contrast/F-test 6? set fmri(conmask4_6) 0 # Mask real contrast/F-test 4 with real contrast/F-test 7? set fmri(conmask4_7) 0 # Mask real contrast/F-test 4 with real contrast/F-test 8? set fmri(conmask4_8) 0 # Mask real contrast/F-test 5 with real contrast/F-test 1? set fmri(conmask5_1) 0 # Mask real contrast/F-test 5 with real contrast/F-test 2? set fmri(conmask5_2) 0 # Mask real contrast/F-test 5 with real contrast/F-test 3? set fmri(conmask5_3) 0 # Mask real contrast/F-test 5 with real contrast/F-test 4? set fmri(conmask5_4) 0 # Mask real contrast/F-test 5 with real contrast/F-test 6? set fmri(conmask5_6) 0 # Mask real contrast/F-test 5 with real contrast/F-test 7? set fmri(conmask5_7) 0 # Mask real contrast/F-test 5 with real contrast/F-test 8? set fmri(conmask5_8) 0 # Mask real contrast/F-test 6 with real contrast/F-test 1? set fmri(conmask6_1) 0 # Mask real contrast/F-test 6 with real contrast/F-test 2? set fmri(conmask6_2) 0 # Mask real contrast/F-test 6 with real contrast/F-test 3? set fmri(conmask6_3) 0 # Mask real contrast/F-test 6 with real contrast/F-test 4? set fmri(conmask6_4) 0 # Mask real contrast/F-test 6 with real contrast/F-test 5? set fmri(conmask6_5) 0 # Mask real contrast/F-test 6 with real contrast/F-test 7? set fmri(conmask6_7) 0 # Mask real contrast/F-test 6 with real contrast/F-test 8? set fmri(conmask6_8) 0 # Mask real contrast/F-test 7 with real contrast/F-test 1? set fmri(conmask7_1) 0 # Mask real contrast/F-test 7 with real contrast/F-test 2? set fmri(conmask7_2) 0 # Mask real contrast/F-test 7 with real contrast/F-test 3? set fmri(conmask7_3) 0 # Mask real contrast/F-test 7 with real contrast/F-test 4? set fmri(conmask7_4) 0 # Mask real contrast/F-test 7 with real contrast/F-test 5? set fmri(conmask7_5) 0 # Mask real contrast/F-test 7 with real contrast/F-test 6? set fmri(conmask7_6) 0 # Mask real contrast/F-test 7 with real contrast/F-test 8? set fmri(conmask7_8) 0 # Mask real contrast/F-test 8 with real contrast/F-test 1? set fmri(conmask8_1) 0 # Mask real contrast/F-test 8 with real contrast/F-test 2? set fmri(conmask8_2) 0 # Mask real contrast/F-test 8 with real contrast/F-test 3? set fmri(conmask8_3) 0 # Mask real contrast/F-test 8 with real contrast/F-test 4? set fmri(conmask8_4) 0 # Mask real contrast/F-test 8 with real contrast/F-test 5? set fmri(conmask8_5) 0 # Mask real contrast/F-test 8 with real contrast/F-test 6? set fmri(conmask8_6) 0 # Mask real contrast/F-test 8 with real contrast/F-test 7? set fmri(conmask8_7) 0 ### The rest is just fixed # Do contrast masking at all? set fmri(conmask1_1) 0 # Now options that don't appear in the GUI # Alternative example_func image (not derived from input 4D dataset) set fmri(alternative_example_func) "" # Alternative (to BETting) mask image set fmri(alternative_mask) "" # Initial structural space registration initialisation transform set fmri(init_initial_highres) "" # Structural space registration initialisation transform set fmri(init_highres) "" # Standard space registration initialisation transform set fmri(init_standard) "" # For full FEAT analysis: overwrite existing .feat output dir? set fmri(overwrite_yn) 1 nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_ev_custom.tcl000066400000000000000000000017651413403311400261260ustar00rootroot00000000000000# EV title set fmri(evtitle$ev_num) "$ev_name" # Basic waveform shape (EV $ev_num) # 0 : Square # 1 : Sinusoid # 2 : Custom (1 entry per volume) # 3 : Custom (3 column format) # 4 : Interaction # 10 : Empty (all zeros) set fmri(shape$ev_num) 3 # Convolution (EV $ev_num) # 0 : None # 1 : Gaussian # 2 : Gamma # 3 : Double-Gamma HRF # 4 : Gamma basis functions # 5 : Sine basis functions # 6 : FIR basis functions # 7 : Optimal/custom basis functions set fmri(convolve$ev_num) 7 # Convolve phase (EV $ev_num) set fmri(convolve_phase$ev_num) 0 # Apply temporal filtering (EV $ev_num) set fmri(tempfilt_yn$ev_num) $tempfilt_yn # Add temporal derivative (EV $ev_num) set fmri(deriv_yn$ev_num) $temporalderiv # Custom EV file (EV $ev_num) set fmri(custom$ev_num) "$cond_file" # Optimal/custom HRF convolution file set fmri(default_bfcustom) "${fsldir}/etc/default_flobs.flobs/hrfbasisfns.txt" set fmri(basisorth$ev_num) $basisorth set fmri(basisfnum$ev_num) $basisfnum set fmri(bfcustom$ev_num) "$bfcustompath" nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_ev_gamma.tcl000066400000000000000000000014361413403311400256710ustar00rootroot00000000000000# EV title set fmri(evtitle$ev_num) "$ev_name" # Basic waveform shape # 0 : Square # 1 : Sinusoid # 2 : Custom (1 entry per volume) # 3 : Custom (3 column format) # 4 : Interaction # 10 : Empty (all zeros) set fmri(shape$ev_num) 3 # Convolution # 0 : None # 1 : Gaussian # 2 : Gamma # 3 : Double-Gamma HRF # 4 : Gamma basis functions # 5 : Sine basis functions # 6 : FIR basis functions # 7 : Optimal/custom basis functions set fmri(convolve$ev_num) 2 # Convolve phase set fmri(convolve_phase$ev_num) 0 # Apply temporal filtering set fmri(tempfilt_yn$ev_num) 1 # Add temporal derivative set fmri(deriv_yn$ev_num) $temporalderiv # Custom EV file set fmri(custom$ev_num) "$cond_file" # Gamma sigma set fmri(gammasigma$ev_num) $gammasigma # Gamma delay set fmri(gammadelay$ev_num) $gammadelay nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_ev_hrf.tcl000066400000000000000000000014111413403311400253570ustar00rootroot00000000000000# EV title set fmri(evtitle$ev_num) "$ev_name" # Basic waveform shape (EV $ev_num) # 0 : Square # 1 : Sinusoid # 2 : Custom (1 entry per volume) # 3 : Custom (3 column format) # 4 : Interaction # 10 : Empty (all zeros) set fmri(shape$ev_num) 3 # Convolution (EV $ev_num) # 0 : None # 1 : Gaussian # 2 : Gamma # 3 : Double-Gamma HRF # 4 : Gamma basis functions # 5 : Sine basis functions # 6 : FIR basis functions # 7 : Optimal/custom basis functions set fmri(convolve$ev_num) 3 # Convolve phase (EV $ev_num) set fmri(convolve_phase$ev_num) 0 # Apply temporal filtering (EV $ev_num) set fmri(tempfilt_yn$ev_num) $tempfilt_yn # Add temporal derivative (EV $ev_num) set fmri(deriv_yn$ev_num) $temporalderiv # Custom EV file (EV $ev_num) set fmri(custom$ev_num) "$cond_file" nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_ev_none.tcl000066400000000000000000000011721413403311400255430ustar00rootroot00000000000000# EV title set fmri(evtitle$ev_num) "$ev_name" # Basic waveform shape # 0 : Square # 1 : Sinusoid # 2 : Custom (1 entry per volume) # 3 : Custom (3 column format) # 4 : Interaction # 10 : Empty (all zeros) set fmri(shape$ev_num) 2 # Convolution # 0 : None # 1 : Gaussian # 2 : Gamma # 3 : Double-Gamma HRF # 4 : Gamma basis functions # 5 : Sine basis functions # 6 : FIR basis functions # 7 : Optimal/custom basis functions set fmri(convolve$ev_num) 0 # Apply temporal filtering set fmri(tempfilt_yn$ev_num) $tempfilt_yn # Add temporal derivative set fmri(deriv_yn$ev_num) 0 # Custom EV file set fmri(custom$ev_num) "$cond_file" nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_ev_ortho.tcl000066400000000000000000000001051413403311400257320ustar00rootroot00000000000000# Orthogonalise EV $c0 wrt EV $c1 set fmri(ortho$c0.$c1) $orthogonal nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_fe_copes.tcl000066400000000000000000000001321413403311400256700ustar00rootroot00000000000000# Use lower-level cope $copeno for higher-level analysis set fmri(copeinput.${copeno}) 1 nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_fe_ev_element.tcl000066400000000000000000000002171413403311400267060ustar00rootroot00000000000000# Higher-level EV value for EV 1 and input $input set fmri(evg${input}.1) 1 # Group membership for input $input set fmri(groupmem.${input}) 1 nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_fe_ev_header.tcl000066400000000000000000000014241413403311400265060ustar00rootroot00000000000000# Add confound EVs text file set fmri(confoundevs) 0 # EV 1 title set fmri(evtitle1) "" # Basic waveform shape (EV 1) # 0 : Square # 1 : Sinusoid # 2 : Custom (1 entry per volume) # 3 : Custom (3 column format) # 4 : Interaction # 10 : Empty (all zeros) set fmri(shape1) 2 # Convolution (EV 1) # 0 : None # 1 : Gaussian # 2 : Gamma # 3 : Double-Gamma HRF # 4 : Gamma basis functions # 5 : Sine basis functions # 6 : FIR basis functions set fmri(convolve1) 0 # Convolve phase (EV 1) set fmri(convolve_phase1) 0 # Apply temporal filtering (EV 1) set fmri(tempfilt_yn1) 0 # Add temporal derivative (EV 1) set fmri(deriv_yn1) 0 # Custom EV file (EV 1) set fmri(custom1) "dummy" # Orthogonalise EV 1 wrt EV 0 set fmri(ortho1.0) 0 # Orthogonalise EV 1 wrt EV 1 set fmri(ortho1.1) 0 nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_fe_featdirs.tcl000066400000000000000000000001151413403311400263610ustar00rootroot00000000000000# 4D AVW data or FEAT directory ($runno) set feat_files($runno) "${rundir}" nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_fe_footer.tcl000066400000000000000000000021351413403311400260620ustar00rootroot00000000000000# Contrast & F-tests mode # real : control real EVs # orig : control original EVs set fmri(con_mode_old) real set fmri(con_mode) real # Display images for contrast_real 1 set fmri(conpic_real.1) 1 # Title for contrast_real 1 set fmri(conname_real.1) "group mean" # Real contrast_real vector 1 element 1 set fmri(con_real1.1) 1 # Contrast masking - use >0 instead of thresholding? set fmri(conmask_zerothresh_yn) 0 # Do contrast masking at all? set fmri(conmask1_1) 0 ########################################################## # Now options that don't appear in the GUI # Alternative example_func image (not derived from input 4D dataset) set fmri(alternative_example_func) "" # Alternative (to BETting) mask image set fmri(alternative_mask) "" # Initial structural space registration initialisation transform set fmri(init_initial_highres) "" # Structural space registration initialisation transform set fmri(init_highres) "" # Standard space registration initialisation transform set fmri(init_standard) "" # For full FEAT analysis: overwrite existing .feat output dir? set fmri(overwrite_yn) $overwrite nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_fe_header.tcl000066400000000000000000000122511413403311400260140ustar00rootroot00000000000000# FEAT version number set fmri(version) 5.98 # Are we in MELODIC? set fmri(inmelodic) 0 # Analysis level # 1 : First-level analysis # 2 : Higher-level analysis set fmri(level) 2 # Which stages to run # 0 : No first-level analysis (registration and/or group stats only) # 7 : Full first-level analysis # 1 : Pre-Stats # 3 : Pre-Stats + Stats # 2 : Stats # 6 : Stats + Post-stats # 4 : Post-stats set fmri(analysis) 6 # Use relative filenames set fmri(relative_yn) 0 # Balloon help set fmri(help_yn) 1 # Run Featwatcher set fmri(featwatcher_yn) 1 # Cleanup first-level standard-space images set fmri(sscleanup_yn) 0 # Output directory set fmri(outputdir) "./output" # TR(s) set fmri(tr) 3 # Total volumes set fmri(npts) ${num_runs} # Delete volumes set fmri(ndelete) 0 # Perfusion tag/control order set fmri(tagfirst) 1 # Number of first-level analyses set fmri(multiple) ${num_runs} # Higher-level input type # 1 : Inputs are lower-level FEAT directories # 2 : Inputs are cope images from FEAT directories set fmri(inputtype) 1 # Carry out pre-stats processing? set fmri(filtering_yn) 0 # Brain/background threshold, % set fmri(brain_thresh) 10 # Critical z for design efficiency calculation set fmri(critical_z) 5.3 # Noise level set fmri(noise) 0.66 # Noise AR(1) set fmri(noisear) 0.34 # Post-stats-only directory copying # 0 : Overwrite original post-stats results # 1 : Copy original FEAT directory for new Contrasts, Thresholding, Rendering set fmri(newdir_yn) 0 # Motion correction # 0 : None # 1 : MCFLIRT set fmri(mc) 1 # Spin-history (currently obsolete) set fmri(sh_yn) 0 # B0 fieldmap unwarping? set fmri(regunwarp_yn) 0 # EPI dwell time (ms) set fmri(dwell) 0.7 # EPI TE (ms) set fmri(te) 35 # % Signal loss threshold set fmri(signallossthresh) 10 # Unwarp direction set fmri(unwarp_dir) y- # Slice timing correction # 0 : None # 1 : Regular up (0, 1, 2, 3, ...) # 2 : Regular down # 3 : Use slice order file # 4 : Use slice timings file # 5 : Interleaved (0, 2, 4 ... 1, 3, 5 ... ) set fmri(st) 0 # Slice timings file set fmri(st_file) "" # BET brain extraction set fmri(bet_yn) 1 # Spatial smoothing FWHM (mm) set fmri(smooth) 5 # Intensity normalization set fmri(norm_yn) 0 # Perfusion subtraction set fmri(perfsub_yn) 0 # Highpass temporal filtering set fmri(temphp_yn) 1 # Lowpass temporal filtering set fmri(templp_yn) 0 # MELODIC ICA data exploration set fmri(melodic_yn) 0 # Carry out main stats? set fmri(stats_yn) 1 # Carry out prewhitening? set fmri(prewhiten_yn) 1 # Add motion parameters to model # 0 : No # 1 : Yes set fmri(motionevs) 0 # Robust outlier detection in FLAME? set fmri(robust_yn) 0 # Higher-level modelling # 3 : Fixed effects # 0 : Mixed Effects: Simple OLS # 2 : Mixed Effects: FLAME 1 # 1 : Mixed Effects: FLAME 1+2 set fmri(mixed_yn) 3 # Number of EVs set fmri(evs_orig) 1 set fmri(evs_real) 1 set fmri(evs_vox) 0 # Number of contrasts set fmri(ncon_orig) 1 set fmri(ncon_real) 1 # Number of F-tests set fmri(nftests_orig) 0 set fmri(nftests_real) 0 # Add constant column to design matrix? (obsolete) set fmri(constcol) 0 # Carry out post-stats steps? set fmri(poststats_yn) 1 # Pre-threshold masking? set fmri(threshmask) "" # Thresholding # 0 : None # 1 : Uncorrected # 2 : Voxel # 3 : Cluster set fmri(thresh) 3 # P threshold set fmri(prob_thresh) 0.05 # Z threshold set fmri(z_thresh) 2.3 # Z min/max for colour rendering # 0 : Use actual Z min/max # 1 : Use preset Z min/max set fmri(zdisplay) 0 # Z min in colour rendering set fmri(zmin) 2 # Z max in colour rendering set fmri(zmax) 8 # Colour rendering type # 0 : Solid blobs # 1 : Transparent blobs set fmri(rendertype) 1 # Background image for higher-level stats overlays # 1 : Mean highres # 2 : First highres # 3 : Mean functional # 4 : First functional # 5 : Standard space template set fmri(bgimage) 1 # Create time series plots set fmri(tsplot_yn) 1 # Registration? set fmri(reg_yn) 0 # Registration to initial structural set fmri(reginitial_highres_yn) 0 # Search space for registration to initial structural # 0 : No search # 90 : Normal search # 180 : Full search set fmri(reginitial_highres_search) 90 # Degrees of Freedom for registration to initial structural set fmri(reginitial_highres_dof) 3 # Registration to main structural set fmri(reghighres_yn) 0 # Search space for registration to main structural # 0 : No search # 90 : Normal search # 180 : Full search set fmri(reghighres_search) 90 # Degrees of Freedom for registration to main structural set fmri(reghighres_dof) 6 # Registration to standard image? set fmri(regstandard_yn) 0 # Standard image set fmri(regstandard) "regimage" # Search space for registration to standard space # 0 : No search # 90 : Normal search # 180 : Full search set fmri(regstandard_search) 90 # Degrees of Freedom for registration to standard space set fmri(regstandard_dof) 12 # Do nonlinear registration from structural to standard space? set fmri(regstandard_nonlinear_yn) 0 # Control nonlinear warp field resolution set fmri(regstandard_nonlinear_warpres) 10 # High pass filter cutoff set fmri(paradigm_hp) 100 # Number of lower-level copes feeding into higher-level analysis set fmri(ncopeinputs) ${num_copes} nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_header.tcl000066400000000000000000000124221413403311400253420ustar00rootroot00000000000000# FEAT version number set fmri(version) 5.98 # Are we in MELODIC? set fmri(inmelodic) 0 # Analysis level # 1 : First-level analysis # 2 : Higher-level analysis set fmri(level) 1 # Which stages to run # 0 : No first-level analysis (registration and/or group stats only) # 7 : Full first-level analysis # 1 : Pre-Stats # 3 : Pre-Stats + Stats # 2 : Stats # 6 : Stats + Post-stats # 4 : Post-stats set fmri(analysis) $analysis_stages # Use relative filenames set fmri(relative_yn) 0 # Balloon help set fmri(help_yn) 1 # Run Featwatcher set fmri(featwatcher_yn) 0 # Cleanup first-level standard-space images set fmri(sscleanup_yn) 0 # Output directory set fmri(outputdir) "scan$scan_num" # TR(s) set fmri(tr) 2.0 # Total volumes set fmri(npts) $num_vols # Delete volumes set fmri(ndelete) 0 # Perfusion tag/control order set fmri(tagfirst) 1 # Number of first-level analyses set fmri(multiple) 1 # Higher-level input type # 1 : Inputs are lower-level FEAT directories # 2 : Inputs are cope images from FEAT directories set fmri(inputtype) 1 # Carry out pre-stats processing? set fmri(filtering_yn) 0 # Brain/background threshold, set fmri(brain_thresh) 10 # Critical z for design efficiency calculation set fmri(critical_z) 5.3 # Noise level set fmri(noise) 0.66 # Noise AR(1) set fmri(noisear) 0.34 # Post-stats-only directory copying # 0 : Overwrite original post-stats results # 1 : Copy original FEAT directory for new Contrasts, Thresholding, Rendering set fmri(newdir_yn) 0 # Motion correction # 0 : None # 1 : MCFLIRT set fmri(mc) 0 # Spin-history (currently obsolete) set fmri(sh_yn) 0 # B0 fieldmap unwarping? set fmri(regunwarp_yn) 0 # EPI dwell time (ms) set fmri(dwell) 0.7 # EPI TE (ms) set fmri(te) 35 # Signal loss threshold set fmri(signallossthresh) 10 # Unwarp direction set fmri(unwarp_dir) y- # Slice timing correction # 0 : None # 1 : Regular up (0, 1, 2, 3, ...) # 2 : Regular down # 3 : Use slice order file # 4 : Use slice timings file # 5 : Interleaved (0, 2, 4 ... 1, 3, 5 ... ) set fmri(st) 0 # Slice timings file set fmri(st_file) "" # BET brain extraction set fmri(bet_yn) 0 # Spatial smoothing FWHM (mm) set fmri(smooth) 5 # Intensity normalization set fmri(norm_yn) 0 # Perfusion subtraction set fmri(perfsub_yn) 0 # Highpass temporal filtering set fmri(temphp_yn) 1 # Lowpass temporal filtering set fmri(templp_yn) 0 # MELODIC ICA data exploration set fmri(melodic_yn) 0 # Carry out main stats? set fmri(stats_yn) 1 # Carry out prewhitening? set fmri(prewhiten_yn) 1 # Add motion parameters to model # 0 : No # 1 : Yes set fmri(motionevs) 0 # Robust outlier detection in FLAME? set fmri(robust_yn) 0 # Higher-level modelling # 3 : Fixed effects # 0 : Mixed Effects: Simple OLS # 2 : Mixed Effects: FLAME 1 # 1 : Mixed Effects: FLAME 1+2 set fmri(mixed_yn) 2 # Number of EVs set fmri(evs_orig) $num_evs set fmri(evs_real) $num_evs set fmri(evs_vox) 0 # Number of contrasts set fmri(ncon_orig) $num_contrasts set fmri(ncon_real) $num_contrasts # Number of F-tests set fmri(nftests_orig) 0 set fmri(nftests_real) 0 # Add constant column to design matrix? (obsolete) set fmri(constcol) 0 # Carry out post-stats steps? set fmri(poststats_yn) $do_contrasts # Pre-threshold masking? set fmri(threshmask) "" # Thresholding # 0 : None # 1 : Uncorrected # 2 : Voxel # 3 : Cluster set fmri(thresh) 3 # P threshold set fmri(prob_thresh) 0.05 # Z threshold set fmri(z_thresh) 2.3 # Z min/max for colour rendering # 0 : Use actual Z min/max # 1 : Use preset Z min/max set fmri(zdisplay) 0 # Z min in colour rendering set fmri(zmin) 2 # Z max in colour rendering set fmri(zmax) 8 # Colour rendering type # 0 : Solid blobs # 1 : Transparent blobs set fmri(rendertype) 1 # Background image for higher-level stats overlays # 1 : Mean highres # 2 : First highres # 3 : Mean functional # 4 : First functional # 5 : Standard space template set fmri(bgimage) 1 # Create time series plots set fmri(tsplot_yn) 1 #Registration? set fmri(reg_yn) 0 # Registration to initial structural set fmri(reginitial_highres_yn) 0 # Search space for registration to initial structural # 0 : No search # 90 : Normal search # 180 : Full search set fmri(reginitial_highres_search) 90 # Degrees of Freedom for registration to initial structural set fmri(reginitial_highres_dof) 3 # Registration to main structural set fmri(reghighres_yn) 0 # Search space for registration to main structural # 0 : No search # 90 : Normal search # 180 : Full search set fmri(reghighres_search) 90 # Degrees of Freedom for registration to main structural set fmri(reghighres_dof) 6 # Registration to standard image? set fmri(regstandard_yn) 0 # Standard image set fmri(regstandard) "standard_image" # Search space for registration to standard space # 0 : No search # 90 : Normal search # 180 : Full search set fmri(regstandard_search) 90 # Degrees of Freedom for registration to standard space set fmri(regstandard_dof) 12 # Do nonlinear registration from structural to standard space? set fmri(regstandard_nonlinear_yn) 0 # Control nonlinear warp field resolution set fmri(regstandard_nonlinear_warpres) 10 # High pass filter cutoff set fmri(paradigm_hp) 100 # 4D AVW data or FEAT directory (1) set feat_files(1) "$func_file" # Subject's structural for analysis 1 set highres_files(1) "$struct_file" nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_header_l1.tcl000066400000000000000000000124421413403311400257400ustar00rootroot00000000000000# FEAT version number set fmri(version) 5.98 # Are we in MELODIC? set fmri(inmelodic) 0 # Analysis level # 1 : First-level analysis # 2 : Higher-level analysis set fmri(level) 1 # Which stages to run # 0 : No first-level analysis (registration and/or group stats only) # 7 : Full first-level analysis # 1 : Pre-Stats # 3 : Pre-Stats + Stats # 2 : Stats # 6 : Stats + Post-stats # 4 : Post-stats set fmri(analysis) 6 # Use relative filenames set fmri(relative_yn) 0 # Balloon help set fmri(help_yn) 1 # Run Featwatcher set fmri(featwatcher_yn) 0 # Cleanup first-level standard-space images set fmri(sscleanup_yn) 0 # Output directory set fmri(outputdir) "run$run_num" # TR(s) set fmri(tr) $interscan_interval # Total volumes set fmri(npts) $num_vols # Delete volumes set fmri(ndelete) 0 # Perfusion tag/control order set fmri(tagfirst) 1 # Number of first-level analyses set fmri(multiple) 1 # Higher-level input type # 1 : Inputs are lower-level FEAT directories # 2 : Inputs are cope images from FEAT directories set fmri(inputtype) 2 # Carry out pre-stats processing? set fmri(filtering_yn) 0 # Brain/background threshold, set fmri(brain_thresh) 10 # Critical z for design efficiency calculation set fmri(critical_z) 5.3 # Noise level set fmri(noise) 0.66 # Noise AR(1) set fmri(noisear) 0.34 # Post-stats-only directory copying # 0 : Overwrite original post-stats results # 1 : Copy original FEAT directory for new Contrasts, Thresholding, Rendering set fmri(newdir_yn) 0 # Motion correction # 0 : None # 1 : MCFLIRT set fmri(mc) 0 # Spin-history (currently obsolete) set fmri(sh_yn) 0 # B0 fieldmap unwarping? set fmri(regunwarp_yn) 0 # EPI dwell time (ms) set fmri(dwell) 0.7 # EPI TE (ms) set fmri(te) 35 # Signal loss threshold set fmri(signallossthresh) 10 # Unwarp direction set fmri(unwarp_dir) y- # Slice timing correction # 0 : None # 1 : Regular up (0, 1, 2, 3, ...) # 2 : Regular down # 3 : Use slice order file # 4 : Use slice timings file # 5 : Interleaved (0, 2, 4 ... 1, 3, 5 ... ) set fmri(st) 0 # Slice timings file set fmri(st_file) "" # BET brain extraction set fmri(bet_yn) 0 # Spatial smoothing FWHM (mm) set fmri(smooth) 0 # Intensity normalization set fmri(norm_yn) 0 # Perfusion subtraction set fmri(perfsub_yn) 0 # Highpass temporal filtering set fmri(temphp_yn) $temphp_yn # Lowpass temporal filtering set fmri(templp_yn) 0 # MELODIC ICA data exploration set fmri(melodic_yn) 0 # Carry out main stats? set fmri(stats_yn) 1 # Carry out prewhitening? set fmri(prewhiten_yn) $prewhiten # Add motion parameters to model # 0 : No # 1 : Yes set fmri(motionevs) 0 # Robust outlier detection in FLAME? set fmri(robust_yn) 0 # Higher-level modelling # 3 : Fixed effects # 0 : Mixed Effects: Simple OLS # 2 : Mixed Effects: FLAME 1 # 1 : Mixed Effects: FLAME 1+2 set fmri(mixed_yn) 2 # Number of EVs set fmri(evs_orig) $num_evs set fmri(evs_real) $num_evs_real set fmri(evs_vox) 0 # Number of contrasts set fmri(ncon_orig) $num_tcon set fmri(ncon_real) $num_tcon # Number of F-tests set fmri(nftests_orig) $num_fcon set fmri(nftests_real) $num_fcon # Add constant column to design matrix? (obsolete) set fmri(constcol) 0 # Carry out post-stats steps? set fmri(poststats_yn) 1 # Pre-threshold masking? set fmri(threshmask) "" # Thresholding # 0 : None # 1 : Uncorrected # 2 : Voxel # 3 : Cluster set fmri(thresh) 3 # P threshold set fmri(prob_thresh) 0.05 # Z threshold set fmri(z_thresh) 2.3 # Z min/max for colour rendering # 0 : Use actual Z min/max # 1 : Use preset Z min/max set fmri(zdisplay) 0 # Z min in colour rendering set fmri(zmin) 2 # Z max in colour rendering set fmri(zmax) 8 # Colour rendering type # 0 : Solid blobs # 1 : Transparent blobs set fmri(rendertype) 1 # Background image for higher-level stats overlays # 1 : Mean highres # 2 : First highres # 3 : Mean functional # 4 : First functional # 5 : Standard space template set fmri(bgimage) 1 # Create time series plots set fmri(tsplot_yn) 1 #Registration? set fmri(reg_yn) 0 # Registration to initial structural set fmri(reginitial_highres_yn) 0 # Search space for registration to initial structural # 0 : No search # 90 : Normal search # 180 : Full search set fmri(reginitial_highres_search) 90 # Degrees of Freedom for registration to initial structural set fmri(reginitial_highres_dof) 3 # Registration to main structural set fmri(reghighres_yn) 0 # Search space for registration to main structural # 0 : No search # 90 : Normal search # 180 : Full search set fmri(reghighres_search) 90 # Degrees of Freedom for registration to main structural set fmri(reghighres_dof) 6 # Registration to standard image? set fmri(regstandard_yn) 0 # Standard image set fmri(regstandard) "MNI152" # Search space for registration to standard space # 0 : No search # 90 : Normal search # 180 : Full search set fmri(regstandard_search) 90 # Degrees of Freedom for registration to standard space set fmri(regstandard_dof) 0 # Do nonlinear registration from structural to standard space? set fmri(regstandard_nonlinear_yn) 0 # Control nonlinear warp field resolution set fmri(regstandard_nonlinear_warpres) 10 # High pass filter cutoff set fmri(paradigm_hp) $high_pass_filter_cutoff # 4D AVW data or FEAT directory (1) set feat_files(1) "$func_file" # Subject's structural for analysis 1 set highres_files(1) "" nipype-1.7.0/nipype/interfaces/fsl/model_templates/feat_nongui.tcl000066400000000000000000000012041413403311400254050ustar00rootroot00000000000000########################################################## # Now options that don't appear in the GUI # Alternative example_func image (not derived from input 4D dataset) set fmri(alternative_example_func) "" # Alternative (to BETting) mask image set fmri(alternative_mask) "" # Initial structural space registration initialisation transform set fmri(init_initial_highres) "" # Structural space registration initialisation transform set fmri(init_highres) "" # Standard space registration initialisation transform set fmri(init_standard) "" # For full FEAT analysis: overwrite existing .feat output dir? set fmri(overwrite_yn) $overwrite nipype-1.7.0/nipype/interfaces/fsl/model_templates/featreg_header.tcl000066400000000000000000000122341413403311400260410ustar00rootroot00000000000000# FEAT version number set fmri(version) 5.98 # Are we in MELODIC? set fmri(inmelodic) 0 # Analysis level # 1 : First-level analysis # 2 : Higher-level analysis set fmri(level) 2 # Which stages to run # 0 : No first-level analysis (registration and/or group stats only) # 7 : Full first-level analysis # 1 : Pre-Stats # 3 : Pre-Stats + Stats # 2 : Stats # 6 : Stats + Post-stats # 4 : Post-stats set fmri(analysis) 0 # Use relative filenames set fmri(relative_yn) 0 # Balloon help set fmri(help_yn) 1 # Run Featwatcher set fmri(featwatcher_yn) 1 # Cleanup first-level standard-space images set fmri(sscleanup_yn) 0 # Output directory set fmri(outputdir) "" # TR(s) set fmri(tr) 3 # Total volumes set fmri(npts) 2 # Delete volumes set fmri(ndelete) 0 # Perfusion tag/control order set fmri(tagfirst) 1 # Number of first-level analyses set fmri(multiple) ${num_runs} # Higher-level input type # 1 : Inputs are lower-level FEAT directories # 2 : Inputs are cope images from FEAT directories set fmri(inputtype) 1 # Carry out pre-stats processing? set fmri(filtering_yn) 0 # Brain/background threshold, % set fmri(brain_thresh) 10 # Critical z for design efficiency calculation set fmri(critical_z) 5.3 # Noise level set fmri(noise) 0.66 # Noise AR(1) set fmri(noisear) 0.34 # Post-stats-only directory copying # 0 : Overwrite original post-stats results # 1 : Copy original FEAT directory for new Contrasts, Thresholding, Rendering set fmri(newdir_yn) 0 # Motion correction # 0 : None # 1 : MCFLIRT set fmri(mc) 1 # Spin-history (currently obsolete) set fmri(sh_yn) 0 # B0 fieldmap unwarping? set fmri(regunwarp_yn) 0 # EPI dwell time (ms) set fmri(dwell) 0.7 # EPI TE (ms) set fmri(te) 35 # % Signal loss threshold set fmri(signallossthresh) 10 # Unwarp direction set fmri(unwarp_dir) y- # Slice timing correction # 0 : None # 1 : Regular up (0, 1, 2, 3, ...) # 2 : Regular down # 3 : Use slice order file # 4 : Use slice timings file # 5 : Interleaved (0, 2, 4 ... 1, 3, 5 ... ) set fmri(st) 0 # Slice timings file set fmri(st_file) "" # BET brain extraction set fmri(bet_yn) 1 # Spatial smoothing FWHM (mm) set fmri(smooth) 5 # Intensity normalization set fmri(norm_yn) 0 # Perfusion subtraction set fmri(perfsub_yn) 0 # Highpass temporal filtering set fmri(temphp_yn) 1 # Lowpass temporal filtering set fmri(templp_yn) 0 # MELODIC ICA data exploration set fmri(melodic_yn) 0 # Carry out main stats? set fmri(stats_yn) 1 # Carry out prewhitening? set fmri(prewhiten_yn) 1 # Add motion parameters to model # 0 : No # 1 : Yes set fmri(motionevs) 0 # Robust outlier detection in FLAME? set fmri(robust_yn) 0 # Higher-level modelling # 3 : Fixed effects # 0 : Mixed Effects: Simple OLS # 2 : Mixed Effects: FLAME 1 # 1 : Mixed Effects: FLAME 1+2 set fmri(mixed_yn) 3 # Number of EVs set fmri(evs_orig) 0 set fmri(evs_real) 0 set fmri(evs_vox) 0 # Number of contrasts set fmri(ncon_orig) 0 set fmri(ncon_real) 0 # Number of F-tests set fmri(nftests_orig) 0 set fmri(nftests_real) 0 # Add constant column to design matrix? (obsolete) set fmri(constcol) 0 # Carry out post-stats steps? set fmri(poststats_yn) 1 # Pre-threshold masking? set fmri(threshmask) "" # Thresholding # 0 : None # 1 : Uncorrected # 2 : Voxel # 3 : Cluster set fmri(thresh) 3 # P threshold set fmri(prob_thresh) 0.05 # Z threshold set fmri(z_thresh) 2.3 # Z min/max for colour rendering # 0 : Use actual Z min/max # 1 : Use preset Z min/max set fmri(zdisplay) 0 # Z min in colour rendering set fmri(zmin) 2 # Z max in colour rendering set fmri(zmax) 8 # Colour rendering type # 0 : Solid blobs # 1 : Transparent blobs set fmri(rendertype) 1 # Background image for higher-level stats overlays # 1 : Mean highres # 2 : First highres # 3 : Mean functional # 4 : First functional # 5 : Standard space template set fmri(bgimage) 1 # Create time series plots set fmri(tsplot_yn) 1 # Registration? set fmri(reg_yn) 0 # Registration to initial structural set fmri(reginitial_highres_yn) 0 # Search space for registration to initial structural # 0 : No search # 90 : Normal search # 180 : Full search set fmri(reginitial_highres_search) 90 # Degrees of Freedom for registration to initial structural set fmri(reginitial_highres_dof) 3 # Registration to main structural set fmri(reghighres_yn) 0 # Search space for registration to main structural # 0 : No search # 90 : Normal search # 180 : Full search set fmri(reghighres_search) 90 # Degrees of Freedom for registration to main structural set fmri(reghighres_dof) 6 # Registration to standard image? set fmri(regstandard_yn) 1 # Standard image set fmri(regstandard) "$regimage" # Search space for registration to standard space # 0 : No search # 90 : Normal search # 180 : Full search set fmri(regstandard_search) 90 # Degrees of Freedom for registration to standard space set fmri(regstandard_dof) $regdof # Do nonlinear registration from structural to standard space? set fmri(regstandard_nonlinear_yn) 0 # Control nonlinear warp field resolution set fmri(regstandard_nonlinear_warpres) 10 # High pass filter cutoff set fmri(paradigm_hp) 100 # Number of lower-level copes feeding into higher-level analysis set fmri(ncopeinputs) ${num_runs} nipype-1.7.0/nipype/interfaces/fsl/possum.py000066400000000000000000000100711413403311400211270ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The possum module provides classes for interfacing with `POSSUM `_ command line tools. Please, check out the link for pertinent citations using POSSUM. .. Note:: This was written to work with FSL version 5.0.6. """ from .base import FSLCommand, FSLCommandInputSpec from ..base import TraitedSpec, File, traits class B0CalcInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, argstr="-i %s", position=0, desc="filename of input image (usually a tissue/air segmentation)", ) out_file = File( argstr="-o %s", position=1, name_source=["in_file"], name_template="%s_b0field", output_name="out_file", desc="filename of B0 output volume", ) x_grad = traits.Float( 0.0, usedefault=True, argstr="--gx=%0.4f", desc="Value for zeroth-order x-gradient field (per mm)", ) y_grad = traits.Float( 0.0, usedefault=True, argstr="--gy=%0.4f", desc="Value for zeroth-order y-gradient field (per mm)", ) z_grad = traits.Float( 0.0, usedefault=True, argstr="--gz=%0.4f", desc="Value for zeroth-order z-gradient field (per mm)", ) x_b0 = traits.Float( 0.0, usedefault=True, argstr="--b0x=%0.2f", xor=["xyz_b0"], desc="Value for zeroth-order b0 field (x-component), in Tesla", ) y_b0 = traits.Float( 0.0, usedefault=True, argstr="--b0y=%0.2f", xor=["xyz_b0"], desc="Value for zeroth-order b0 field (y-component), in Tesla", ) z_b0 = traits.Float( 1.0, usedefault=True, argstr="--b0=%0.2f", xor=["xyz_b0"], desc="Value for zeroth-order b0 field (z-component), in Tesla", ) xyz_b0 = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--b0x=%0.2f --b0y=%0.2f --b0=%0.2f", xor=["x_b0", "y_b0", "z_b0"], desc="Zeroth-order B0 field in Tesla", ) delta = traits.Float( -9.45e-6, usedefault=True, argstr="-d %e", desc="Delta value (chi_tissue - chi_air)", ) chi_air = traits.Float( 4.0e-7, usedefault=True, argstr="--chi0=%e", desc="susceptibility of air" ) compute_xyz = traits.Bool( False, usedefault=True, argstr="--xyz", desc="calculate and save all 3 field components (i.e. x,y,z)", ) extendboundary = traits.Float( 1.0, usedefault=True, argstr="--extendboundary=%0.2f", desc="Relative proportion to extend voxels at boundary", ) directconv = traits.Bool( False, usedefault=True, argstr="--directconv", desc="use direct (image space) convolution, not FFT", ) class B0CalcOutputSpec(TraitedSpec): out_file = File(exists=True, desc="filename of B0 output volume") class B0Calc(FSLCommand): """ B0 inhomogeneities occur at interfaces of materials with different magnetic susceptibilities, such as tissue-air interfaces. These differences lead to distortion in the local magnetic field, as Maxwell’s equations need to be satisfied. An example of B0 inhomogneity is the first volume of the 4D volume ```$FSLDIR/data/possum/b0_ppm.nii.gz```. Examples -------- >>> from nipype.interfaces.fsl import B0Calc >>> b0calc = B0Calc() >>> b0calc.inputs.in_file = 'tissue+air_map.nii' >>> b0calc.inputs.z_b0 = 3.0 >>> b0calc.inputs.output_type = "NIFTI_GZ" >>> b0calc.cmdline 'b0calc -i tissue+air_map.nii -o tissue+air_map_b0field.nii.gz --chi0=4.000000e-07 \ -d -9.450000e-06 --extendboundary=1.00 --b0x=0.00 --gx=0.0000 --b0y=0.00 --gy=0.0000 \ --b0=3.00 --gz=0.0000' """ _cmd = "b0calc" input_spec = B0CalcInputSpec output_spec = B0CalcOutputSpec nipype-1.7.0/nipype/interfaces/fsl/preprocess.py000066400000000000000000002267601413403311400220040ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL `_ command line tools. This was written to work with FSL version 4.1.4. """ import os import os.path as op from warnings import warn import numpy as np from nibabel import load from ... import LooseVersion from ...utils.filemanip import split_filename from ..base import ( TraitedSpec, File, InputMultiPath, OutputMultiPath, Undefined, traits, isdefined, ) from .base import FSLCommand, FSLCommandInputSpec, Info class BETInputSpec(FSLCommandInputSpec): # We use position args here as list indices - so a negative number # will put something on the end in_file = File( exists=True, desc="input file to skull strip", argstr="%s", position=0, mandatory=True, copyfile=False, ) out_file = File( desc="name of output skull stripped image", argstr="%s", position=1, genfile=True, hash_files=False, ) outline = traits.Bool(desc="create surface outline image", argstr="-o") mask = traits.Bool(desc="create binary mask image", argstr="-m") skull = traits.Bool(desc="create skull image", argstr="-s") no_output = traits.Bool(argstr="-n", desc="Don't generate segmented output") frac = traits.Float(desc="fractional intensity threshold", argstr="-f %.2f") vertical_gradient = traits.Float( argstr="-g %.2f", desc="vertical gradient in fractional intensity threshold (-1, 1)", ) radius = traits.Int(argstr="-r %d", units="mm", desc="head radius") center = traits.List( traits.Int, desc="center of gravity in voxels", argstr="-c %s", minlen=0, maxlen=3, units="voxels", ) threshold = traits.Bool( argstr="-t", desc="apply thresholding to segmented brain image and mask" ) mesh = traits.Bool(argstr="-e", desc="generate a vtk mesh brain surface") # the remaining 'options' are more like modes (mutually exclusive) that # FSL actually implements in a shell script wrapper around the bet binary. # for some combinations of them in specific order a call would not fail, # but in general using more than one of the following is clearly not # supported _xor_inputs = ( "functional", "reduce_bias", "robust", "padding", "remove_eyes", "surfaces", "t2_guided", ) robust = traits.Bool( desc="robust brain centre estimation (iterates BET several times)", argstr="-R", xor=_xor_inputs, ) padding = traits.Bool( desc=( "improve BET if FOV is very small in Z (by temporarily padding " "end slices)" ), argstr="-Z", xor=_xor_inputs, ) remove_eyes = traits.Bool( desc="eye & optic nerve cleanup (can be useful in SIENA)", argstr="-S", xor=_xor_inputs, ) surfaces = traits.Bool( desc=( "run bet2 and then betsurf to get additional skull and scalp " "surfaces (includes registrations)" ), argstr="-A", xor=_xor_inputs, ) t2_guided = File( desc="as with creating surfaces, when also feeding in " "non-brain-extracted T2 (includes registrations)", argstr="-A2 %s", xor=_xor_inputs, ) functional = traits.Bool(argstr="-F", xor=_xor_inputs, desc="apply to 4D fMRI data") reduce_bias = traits.Bool( argstr="-B", xor=_xor_inputs, desc="bias field and neck cleanup" ) class BETOutputSpec(TraitedSpec): out_file = File(desc="path/name of skullstripped file (if generated)") mask_file = File(desc="path/name of binary brain mask (if generated)") outline_file = File(desc="path/name of outline file (if generated)") meshfile = File(desc="path/name of vtk mesh file (if generated)") inskull_mask_file = File(desc="path/name of inskull mask (if generated)") inskull_mesh_file = File(desc="path/name of inskull mesh outline (if generated)") outskull_mask_file = File(desc="path/name of outskull mask (if generated)") outskull_mesh_file = File(desc="path/name of outskull mesh outline (if generated)") outskin_mask_file = File(desc="path/name of outskin mask (if generated)") outskin_mesh_file = File(desc="path/name of outskin mesh outline (if generated)") skull_mask_file = File(desc="path/name of skull mask (if generated)") skull_file = File(desc="path/name of skull file (if generated)") class BET(FSLCommand): """FSL BET wrapper for skull stripping For complete details, see the `BET Documentation. `_ Examples -------- >>> from nipype.interfaces import fsl >>> btr = fsl.BET() >>> btr.inputs.in_file = 'structural.nii' >>> btr.inputs.frac = 0.7 >>> btr.inputs.out_file = 'brain_anat.nii' >>> btr.cmdline 'bet structural.nii brain_anat.nii -f 0.70' >>> res = btr.run() # doctest: +SKIP """ _cmd = "bet" input_spec = BETInputSpec output_spec = BETOutputSpec def _run_interface(self, runtime): # The returncode is meaningless in BET. So check the output # in stderr and if it's set, then update the returncode # accordingly. runtime = super(BET, self)._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime def _format_arg(self, name, spec, value): formatted = super(BET, self)._format_arg(name, spec, value) if name == "in_file": # Convert to relative path to prevent BET failure # with long paths. return op.relpath(formatted, start=os.getcwd()) return formatted def _gen_outfilename(self): out_file = self.inputs.out_file # Generate default output filename if non specified. if not isdefined(out_file) and isdefined(self.inputs.in_file): out_file = self._gen_fname(self.inputs.in_file, suffix="_brain") # Convert to relative path to prevent BET failure # with long paths. return op.relpath(out_file, start=os.getcwd()) return out_file def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self._gen_outfilename()) basename = os.path.basename(outputs["out_file"]) cwd = os.path.dirname(outputs["out_file"]) kwargs = {"basename": basename, "cwd": cwd} if (isdefined(self.inputs.mesh) and self.inputs.mesh) or ( isdefined(self.inputs.surfaces) and self.inputs.surfaces ): outputs["meshfile"] = self._gen_fname( suffix="_mesh.vtk", change_ext=False, **kwargs ) if (isdefined(self.inputs.mask) and self.inputs.mask) or ( isdefined(self.inputs.reduce_bias) and self.inputs.reduce_bias ): outputs["mask_file"] = self._gen_fname(suffix="_mask", **kwargs) if isdefined(self.inputs.outline) and self.inputs.outline: outputs["outline_file"] = self._gen_fname(suffix="_overlay", **kwargs) if isdefined(self.inputs.surfaces) and self.inputs.surfaces: outputs["inskull_mask_file"] = self._gen_fname( suffix="_inskull_mask", **kwargs ) outputs["inskull_mesh_file"] = self._gen_fname( suffix="_inskull_mesh", **kwargs ) outputs["outskull_mask_file"] = self._gen_fname( suffix="_outskull_mask", **kwargs ) outputs["outskull_mesh_file"] = self._gen_fname( suffix="_outskull_mesh", **kwargs ) outputs["outskin_mask_file"] = self._gen_fname( suffix="_outskin_mask", **kwargs ) outputs["outskin_mesh_file"] = self._gen_fname( suffix="_outskin_mesh", **kwargs ) outputs["skull_mask_file"] = self._gen_fname(suffix="_skull_mask", **kwargs) if isdefined(self.inputs.skull) and self.inputs.skull: outputs["skull_file"] = self._gen_fname(suffix="_skull", **kwargs) if isdefined(self.inputs.no_output) and self.inputs.no_output: outputs["out_file"] = Undefined return outputs def _gen_filename(self, name): if name == "out_file": return self._gen_outfilename() return None class FASTInputSpec(FSLCommandInputSpec): """Defines inputs (trait classes) for FAST""" in_files = InputMultiPath( File(exists=True), copyfile=False, desc="image, or multi-channel set of images, " "to be segmented", argstr="%s", position=-1, mandatory=True, ) out_basename = File(desc="base name of output files", argstr="-o %s") # ^^ uses in_file name as basename if none given number_classes = traits.Range( low=1, high=10, argstr="-n %d", desc="number of tissue-type classes" ) output_biasfield = traits.Bool(desc="output estimated bias field", argstr="-b") output_biascorrected = traits.Bool( desc="output restored image (bias-corrected image)", argstr="-B" ) img_type = traits.Enum( (1, 2, 3), desc="int specifying type of image: (1 = T1, 2 = T2, 3 = PD)", argstr="-t %d", ) bias_iters = traits.Range( low=1, high=10, argstr="-I %d", desc="number of main-loop iterations during " "bias-field removal", ) bias_lowpass = traits.Range( low=4, high=40, desc="bias field smoothing extent (FWHM) " "in mm", argstr="-l %d", units="mm", ) init_seg_smooth = traits.Range( low=0.0001, high=0.1, desc="initial segmentation spatial " "smoothness (during bias field " "estimation)", argstr="-f %.3f", ) segments = traits.Bool( desc="outputs a separate binary image for each " "tissue type", argstr="-g" ) init_transform = File( exists=True, desc=" initialise" " using priors", argstr="-a %s", ) other_priors = InputMultiPath( File(exist=True), desc="alternative prior images", argstr="-A %s", minlen=3, maxlen=3, ) no_pve = traits.Bool( desc="turn off PVE (partial volume estimation)", argstr="--nopve" ) no_bias = traits.Bool(desc="do not remove bias field", argstr="-N") use_priors = traits.Bool(desc="use priors throughout", argstr="-P") # ^^ Must also set -a!, mutually inclusive?? No, conditional mandatory... need to figure out how to handle with traits. segment_iters = traits.Range( low=1, high=50, desc="number of segmentation-initialisation" " iterations", argstr="-W %d", ) mixel_smooth = traits.Range( low=0.0, high=1.0, desc="spatial smoothness for mixeltype", argstr="-R %.2f" ) iters_afterbias = traits.Range( low=1, high=20, desc="number of main-loop iterations " "after bias-field removal", argstr="-O %d", ) hyper = traits.Range( low=0.0, high=1.0, desc="segmentation spatial smoothness", argstr="-H %.2f" ) verbose = traits.Bool(desc="switch on diagnostic messages", argstr="-v") manual_seg = File( exists=True, desc="Filename containing intensities", argstr="-s %s" ) probability_maps = traits.Bool( desc="outputs individual probability maps", argstr="-p" ) class FASTOutputSpec(TraitedSpec): """Specify possible outputs from FAST""" tissue_class_map = File( exists=True, desc="path/name of binary segmented volume file" " one val for each class _seg", ) tissue_class_files = OutputMultiPath( File( desc=( "path/name of binary segmented volumes one file for each class " "_seg_x" ) ) ) restored_image = OutputMultiPath( File( desc=( "restored images (one for each input image) named according to " "the input images _restore" ) ) ) mixeltype = File(desc="path/name of mixeltype volume file _mixeltype") partial_volume_map = File(desc="path/name of partial volume file _pveseg") partial_volume_files = OutputMultiPath( File(desc="path/name of partial volumes files one for each class, _pve_x") ) bias_field = OutputMultiPath(File(desc="Estimated bias field _bias")) probability_maps = OutputMultiPath( File(desc="filenames, one for each class, for each input, prob_x") ) class FAST(FSLCommand): """FSL FAST wrapper for segmentation and bias correction For complete details, see the `FAST Documentation. `_ Examples -------- >>> from nipype.interfaces import fsl >>> fastr = fsl.FAST() >>> fastr.inputs.in_files = 'structural.nii' >>> fastr.inputs.out_basename = 'fast_' >>> fastr.cmdline 'fast -o fast_ -S 1 structural.nii' >>> out = fastr.run() # doctest: +SKIP """ _cmd = "fast" input_spec = FASTInputSpec output_spec = FASTOutputSpec def _format_arg(self, name, spec, value): # first do what should be done in general formatted = super(FAST, self)._format_arg(name, spec, value) if name == "in_files": # FAST needs the -S parameter value to correspond to the number # of input images, otherwise it will ignore all but the first formatted = "-S %d %s" % (len(value), formatted) return formatted def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.number_classes): nclasses = 3 else: nclasses = self.inputs.number_classes # when using multichannel, results basename is based on last # input filename _gen_fname_opts = {} if isdefined(self.inputs.out_basename): _gen_fname_opts["basename"] = self.inputs.out_basename _gen_fname_opts["cwd"] = os.getcwd() else: _gen_fname_opts["basename"] = self.inputs.in_files[-1] _gen_fname_opts["cwd"], _, _ = split_filename(_gen_fname_opts["basename"]) outputs["tissue_class_map"] = self._gen_fname(suffix="_seg", **_gen_fname_opts) if self.inputs.segments: outputs["tissue_class_files"] = [] for i in range(nclasses): outputs["tissue_class_files"].append( self._gen_fname(suffix="_seg_%d" % i, **_gen_fname_opts) ) if isdefined(self.inputs.output_biascorrected): outputs["restored_image"] = [] if len(self.inputs.in_files) > 1: # for multi-image segmentation there is one corrected image # per input for val, f in enumerate(self.inputs.in_files): # image numbering is 1-based outputs["restored_image"].append( self._gen_fname( suffix="_restore_%d" % (val + 1), **_gen_fname_opts ) ) else: # single image segmentation has unnumbered output image outputs["restored_image"].append( self._gen_fname(suffix="_restore", **_gen_fname_opts) ) outputs["mixeltype"] = self._gen_fname(suffix="_mixeltype", **_gen_fname_opts) if not self.inputs.no_pve: outputs["partial_volume_map"] = self._gen_fname( suffix="_pveseg", **_gen_fname_opts ) outputs["partial_volume_files"] = [] for i in range(nclasses): outputs["partial_volume_files"].append( self._gen_fname(suffix="_pve_%d" % i, **_gen_fname_opts) ) if self.inputs.output_biasfield: outputs["bias_field"] = [] if len(self.inputs.in_files) > 1: # for multi-image segmentation there is one bias field image # per input for val, f in enumerate(self.inputs.in_files): # image numbering is 1-based outputs["bias_field"].append( self._gen_fname( suffix="_bias_%d" % (val + 1), **_gen_fname_opts ) ) else: # single image segmentation has unnumbered output image outputs["bias_field"].append( self._gen_fname(suffix="_bias", **_gen_fname_opts) ) if self.inputs.probability_maps: outputs["probability_maps"] = [] for i in range(nclasses): outputs["probability_maps"].append( self._gen_fname(suffix="_prob_%d" % i, **_gen_fname_opts) ) return outputs class FLIRTInputSpec(FSLCommandInputSpec): in_file = File( exists=True, argstr="-in %s", mandatory=True, position=0, desc="input file" ) reference = File( exists=True, argstr="-ref %s", mandatory=True, position=1, desc="reference file" ) out_file = File( argstr="-out %s", desc="registered output file", name_source=["in_file"], name_template="%s_flirt", position=2, hash_files=False, ) out_matrix_file = File( argstr="-omat %s", name_source=["in_file"], keep_extension=True, name_template="%s_flirt.mat", desc="output affine matrix in 4x4 asciii format", position=3, hash_files=False, ) out_log = File( name_source=["in_file"], keep_extension=True, requires=["save_log"], name_template="%s_flirt.log", desc="output log", ) in_matrix_file = File(argstr="-init %s", desc="input 4x4 affine matrix") apply_xfm = traits.Bool( argstr="-applyxfm", desc=( "apply transformation supplied by in_matrix_file or uses_qform to" " use the affine matrix stored in the reference header" ), ) apply_isoxfm = traits.Float( argstr="-applyisoxfm %f", xor=["apply_xfm"], desc="as applyxfm but forces isotropic resampling", ) datatype = traits.Enum( "char", "short", "int", "float", "double", argstr="-datatype %s", desc="force output data type", ) cost = traits.Enum( "mutualinfo", "corratio", "normcorr", "normmi", "leastsq", "labeldiff", "bbr", argstr="-cost %s", desc="cost function", ) # XXX What is the difference between 'cost' and 'searchcost'? Are # these both necessary or do they map to the same variable. cost_func = traits.Enum( "mutualinfo", "corratio", "normcorr", "normmi", "leastsq", "labeldiff", "bbr", argstr="-searchcost %s", desc="cost function", ) uses_qform = traits.Bool( argstr="-usesqform", desc="initialize using sform or qform" ) display_init = traits.Bool(argstr="-displayinit", desc="display initial matrix") angle_rep = traits.Enum( "quaternion", "euler", argstr="-anglerep %s", desc="representation of rotation angles", ) interp = traits.Enum( "trilinear", "nearestneighbour", "sinc", "spline", argstr="-interp %s", desc="final interpolation method used in reslicing", ) sinc_width = traits.Int( argstr="-sincwidth %d", units="voxels", desc="full-width in voxels" ) sinc_window = traits.Enum( "rectangular", "hanning", "blackman", argstr="-sincwindow %s", desc="sinc window", ) # XXX better doc bins = traits.Int(argstr="-bins %d", desc="number of histogram bins") dof = traits.Int(argstr="-dof %d", desc="number of transform degrees of freedom") no_resample = traits.Bool(argstr="-noresample", desc="do not change input sampling") force_scaling = traits.Bool( argstr="-forcescaling", desc="force rescaling even for low-res images" ) min_sampling = traits.Float( argstr="-minsampling %f", units="mm", desc="set minimum voxel dimension for sampling", ) padding_size = traits.Int( argstr="-paddingsize %d", units="voxels", desc="for applyxfm: interpolates outside image " "by size", ) searchr_x = traits.List( traits.Int, minlen=2, maxlen=2, units="degrees", argstr="-searchrx %s", desc="search angles along x-axis, in degrees", ) searchr_y = traits.List( traits.Int, minlen=2, maxlen=2, units="degrees", argstr="-searchry %s", desc="search angles along y-axis, in degrees", ) searchr_z = traits.List( traits.Int, minlen=2, maxlen=2, units="degrees", argstr="-searchrz %s", desc="search angles along z-axis, in degrees", ) no_search = traits.Bool( argstr="-nosearch", desc="set all angular searches to ranges 0 to 0" ) coarse_search = traits.Int( argstr="-coarsesearch %d", units="degrees", desc="coarse search delta angle" ) fine_search = traits.Int( argstr="-finesearch %d", units="degrees", desc="fine search delta angle" ) schedule = File( exists=True, argstr="-schedule %s", desc="replaces default schedule" ) ref_weight = File( exists=True, argstr="-refweight %s", desc="File for reference weighting volume" ) in_weight = File( exists=True, argstr="-inweight %s", desc="File for input weighting volume" ) no_clamp = traits.Bool(argstr="-noclamp", desc="do not use intensity clamping") no_resample_blur = traits.Bool( argstr="-noresampblur", desc="do not use blurring on downsampling" ) rigid2D = traits.Bool(argstr="-2D", desc="use 2D rigid body mode - ignores dof") save_log = traits.Bool(desc="save to log file") verbose = traits.Int(argstr="-verbose %d", desc="verbose mode, 0 is least") bgvalue = traits.Float( 0, argstr="-setbackground %f", desc=("use specified background value for points " "outside FOV"), ) # BBR options wm_seg = File( argstr="-wmseg %s", min_ver="5.0.0", desc="white matter segmentation volume needed by BBR cost function", ) wmcoords = File( argstr="-wmcoords %s", min_ver="5.0.0", desc="white matter boundary coordinates for BBR cost function", ) wmnorms = File( argstr="-wmnorms %s", min_ver="5.0.0", desc="white matter boundary normals for BBR cost function", ) fieldmap = File( argstr="-fieldmap %s", min_ver="5.0.0", desc=( "fieldmap image in rads/s - must be already registered to the " "reference image" ), ) fieldmapmask = File( argstr="-fieldmapmask %s", min_ver="5.0.0", desc="mask for fieldmap image" ) pedir = traits.Int( argstr="-pedir %d", min_ver="5.0.0", desc="phase encode direction of EPI - 1/2/3=x/y/z & -1/-2/-3=-x/-y/-z", ) echospacing = traits.Float( argstr="-echospacing %f", min_ver="5.0.0", desc="value of EPI echo spacing - units of seconds", ) bbrtype = traits.Enum( "signed", "global_abs", "local_abs", argstr="-bbrtype %s", min_ver="5.0.0", desc=("type of bbr cost function: signed [default], global_abs, " "local_abs"), ) bbrslope = traits.Float( argstr="-bbrslope %f", min_ver="5.0.0", desc="value of bbr slope" ) class FLIRTOutputSpec(TraitedSpec): out_file = File(exists=True, desc="path/name of registered file (if generated)") out_matrix_file = File( exists=True, desc="path/name of calculated affine transform " "(if generated)" ) out_log = File(desc="path/name of output log (if generated)") class FLIRT(FSLCommand): """FSL FLIRT wrapper for coregistration For complete details, see the `FLIRT Documentation. `_ To print out the command line help, use: fsl.FLIRT().inputs_help() Examples -------- >>> from nipype.interfaces import fsl >>> from nipype.testing import example_data >>> flt = fsl.FLIRT(bins=640, cost_func='mutualinfo') >>> flt.inputs.in_file = 'structural.nii' >>> flt.inputs.reference = 'mni.nii' >>> flt.inputs.output_type = "NIFTI_GZ" >>> flt.cmdline # doctest: +ELLIPSIS 'flirt -in structural.nii -ref mni.nii -out structural_flirt.nii.gz -omat structural_flirt.mat -bins 640 -searchcost mutualinfo' >>> res = flt.run() #doctest: +SKIP """ _cmd = "flirt" input_spec = FLIRTInputSpec output_spec = FLIRTOutputSpec _log_written = False def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = super(FLIRT, self).aggregate_outputs( runtime=runtime, needed_outputs=needed_outputs ) if self.inputs.save_log and not self._log_written: with open(outputs.out_log, "a") as text_file: text_file.write(runtime.stdout + "\n") self._log_written = True return outputs def _parse_inputs(self, skip=None): if skip is None: skip = [] if self.inputs.save_log and not self.inputs.verbose: self.inputs.verbose = 1 if self.inputs.apply_xfm and not ( self.inputs.in_matrix_file or self.inputs.uses_qform ): raise RuntimeError( "Argument apply_xfm requires in_matrix_file or " "uses_qform arguments to run" ) skip.append("save_log") return super(FLIRT, self)._parse_inputs(skip=skip) class ApplyXFMInputSpec(FLIRTInputSpec): apply_xfm = traits.Bool( True, argstr="-applyxfm", desc=( "apply transformation supplied by in_matrix_file or uses_qform to" " use the affine matrix stored in the reference header" ), usedefault=True, ) class ApplyXFM(FLIRT): """Currently just a light wrapper around FLIRT, with no modifications ApplyXFM is used to apply an existing tranform to an image Examples -------- >>> import nipype.interfaces.fsl as fsl >>> from nipype.testing import example_data >>> applyxfm = fsl.preprocess.ApplyXFM() >>> applyxfm.inputs.in_file = example_data('structural.nii') >>> applyxfm.inputs.in_matrix_file = example_data('trans.mat') >>> applyxfm.inputs.out_file = 'newfile.nii' >>> applyxfm.inputs.reference = example_data('mni.nii') >>> applyxfm.inputs.apply_xfm = True >>> result = applyxfm.run() # doctest: +SKIP """ input_spec = ApplyXFMInputSpec class MCFLIRTInputSpec(FSLCommandInputSpec): in_file = File( exists=True, position=0, argstr="-in %s", mandatory=True, desc="timeseries to motion-correct", ) out_file = File( argstr="-out %s", genfile=True, desc="file to write", hash_files=False ) cost = traits.Enum( "mutualinfo", "woods", "corratio", "normcorr", "normmi", "leastsquares", argstr="-cost %s", desc="cost function to optimize", ) bins = traits.Int(argstr="-bins %d", desc="number of histogram bins") dof = traits.Int(argstr="-dof %d", desc="degrees of freedom for the transformation") ref_vol = traits.Int(argstr="-refvol %d", desc="volume to align frames to") scaling = traits.Float(argstr="-scaling %.2f", desc="scaling factor to use") smooth = traits.Float( argstr="-smooth %.2f", desc="smoothing factor for the cost function" ) rotation = traits.Int( argstr="-rotation %d", desc="scaling factor for rotation tolerances" ) stages = traits.Int( argstr="-stages %d", desc="stages (if 4, perform final search with sinc interpolation", ) init = File(exists=True, argstr="-init %s", desc="inital transformation matrix") interpolation = traits.Enum( "spline", "nn", "sinc", argstr="-%s_final", desc="interpolation method for transformation", ) use_gradient = traits.Bool(argstr="-gdt", desc="run search on gradient images") use_contour = traits.Bool(argstr="-edge", desc="run search on contour images") mean_vol = traits.Bool(argstr="-meanvol", desc="register to mean volume") stats_imgs = traits.Bool( argstr="-stats", desc="produce variance and std. dev. images" ) save_mats = traits.Bool(argstr="-mats", desc="save transformation matrices") save_plots = traits.Bool(argstr="-plots", desc="save transformation parameters") save_rms = traits.Bool( argstr="-rmsabs -rmsrel", desc="save rms displacement parameters" ) ref_file = File( exists=True, argstr="-reffile %s", desc="target image for motion correction" ) class MCFLIRTOutputSpec(TraitedSpec): out_file = File(exists=True, desc="motion-corrected timeseries") variance_img = File(exists=True, desc="variance image") std_img = File(exists=True, desc="standard deviation image") mean_img = File(exists=True, desc="mean timeseries image (if mean_vol=True)") par_file = File(exists=True, desc="text-file with motion parameters") mat_file = OutputMultiPath(File(exists=True), desc="transformation matrices") rms_files = OutputMultiPath( File(exists=True), desc="absolute and relative displacement parameters" ) class MCFLIRT(FSLCommand): """FSL MCFLIRT wrapper for within-modality motion correction For complete details, see the `MCFLIRT Documentation. `_ Examples -------- >>> from nipype.interfaces import fsl >>> mcflt = fsl.MCFLIRT() >>> mcflt.inputs.in_file = 'functional.nii' >>> mcflt.inputs.cost = 'mutualinfo' >>> mcflt.inputs.out_file = 'moco.nii' >>> mcflt.cmdline 'mcflirt -in functional.nii -cost mutualinfo -out moco.nii' >>> res = mcflt.run() # doctest: +SKIP """ _cmd = "mcflirt" input_spec = MCFLIRTInputSpec output_spec = MCFLIRTOutputSpec def _format_arg(self, name, spec, value): if name == "interpolation": if value == "trilinear": return "" else: return spec.argstr % value return super(MCFLIRT, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = self._gen_outfilename() output_dir = os.path.dirname(outputs["out_file"]) if isdefined(self.inputs.stats_imgs) and self.inputs.stats_imgs: if LooseVersion(Info.version()) < LooseVersion("6.0.0"): # FSL <6.0 outputs have .nii.gz_variance.nii.gz as extension outputs["variance_img"] = self._gen_fname( outputs["out_file"] + "_variance.ext", cwd=output_dir ) outputs["std_img"] = self._gen_fname( outputs["out_file"] + "_sigma.ext", cwd=output_dir ) else: outputs["variance_img"] = self._gen_fname( outputs["out_file"], suffix="_variance", cwd=output_dir ) outputs["std_img"] = self._gen_fname( outputs["out_file"], suffix="_sigma", cwd=output_dir ) # The mean image created if -stats option is specified ('meanvol') # is missing the top and bottom slices. Therefore we only expose the # mean image created by -meanvol option ('mean_reg') which isn't # corrupted. # Note that the same problem holds for the std and variance image. if isdefined(self.inputs.mean_vol) and self.inputs.mean_vol: if LooseVersion(Info.version()) < LooseVersion("6.0.0"): # FSL <6.0 outputs have .nii.gz_mean_img.nii.gz as extension outputs["mean_img"] = self._gen_fname( outputs["out_file"] + "_mean_reg.ext", cwd=output_dir ) else: outputs["mean_img"] = self._gen_fname( outputs["out_file"], suffix="_mean_reg", cwd=output_dir ) if isdefined(self.inputs.save_mats) and self.inputs.save_mats: _, filename = os.path.split(outputs["out_file"]) matpathname = os.path.join(output_dir, filename + ".mat") _, _, _, timepoints = load(self.inputs.in_file).shape outputs["mat_file"] = [] for t in range(timepoints): outputs["mat_file"].append(os.path.join(matpathname, "MAT_%04d" % t)) if isdefined(self.inputs.save_plots) and self.inputs.save_plots: # Note - if e.g. out_file has .nii.gz, you get .nii.gz.par, # which is what mcflirt does! outputs["par_file"] = outputs["out_file"] + ".par" if isdefined(self.inputs.save_rms) and self.inputs.save_rms: outfile = outputs["out_file"] outputs["rms_files"] = [outfile + "_abs.rms", outfile + "_rel.rms"] return outputs def _gen_filename(self, name): if name == "out_file": return self._gen_outfilename() return None def _gen_outfilename(self): out_file = self.inputs.out_file if isdefined(out_file): out_file = os.path.realpath(out_file) if not isdefined(out_file) and isdefined(self.inputs.in_file): out_file = self._gen_fname(self.inputs.in_file, suffix="_mcf") return os.path.abspath(out_file) class FNIRTInputSpec(FSLCommandInputSpec): ref_file = File( exists=True, argstr="--ref=%s", mandatory=True, desc="name of reference image" ) in_file = File( exists=True, argstr="--in=%s", mandatory=True, desc="name of input image" ) affine_file = File( exists=True, argstr="--aff=%s", desc="name of file containing affine transform" ) inwarp_file = File( exists=True, argstr="--inwarp=%s", desc="name of file containing initial non-linear warps", ) in_intensitymap_file = traits.List( File(exists=True), argstr="--intin=%s", copyfile=False, minlen=1, maxlen=2, desc=( "name of file/files containing " "initial intensity mapping " "usually generated by previous " "fnirt run" ), ) fieldcoeff_file = traits.Either( traits.Bool, File, argstr="--cout=%s", desc="name of output file with field coefficients or true", ) warped_file = File( argstr="--iout=%s", desc="name of output image", genfile=True, hash_files=False ) field_file = traits.Either( traits.Bool, File, argstr="--fout=%s", desc="name of output file with field or true", hash_files=False, ) jacobian_file = traits.Either( traits.Bool, File, argstr="--jout=%s", desc=( "name of file for writing out the " "Jacobian of the field (for " "diagnostic or VBM purposes)" ), hash_files=False, ) modulatedref_file = traits.Either( traits.Bool, File, argstr="--refout=%s", desc=( "name of file for writing out " "intensity modulated --ref (for " "diagnostic purposes)" ), hash_files=False, ) out_intensitymap_file = traits.Either( traits.Bool, File, argstr="--intout=%s", desc=( "name of files for writing " "information pertaining to " "intensity mapping" ), hash_files=False, ) log_file = File( argstr="--logout=%s", desc="Name of log-file", genfile=True, hash_files=False ) config_file = traits.Either( traits.Enum("T1_2_MNI152_2mm", "FA_2_FMRIB58_1mm"), File(exists=True), argstr="--config=%s", desc="Name of config file specifying command line arguments", ) refmask_file = File( exists=True, argstr="--refmask=%s", desc="name of file with mask in reference space", ) inmask_file = File( exists=True, argstr="--inmask=%s", desc="name of file with mask in input image space", ) skip_refmask = traits.Bool( argstr="--applyrefmask=0", xor=["apply_refmask"], desc="Skip specified refmask if set, default false", ) skip_inmask = traits.Bool( argstr="--applyinmask=0", xor=["apply_inmask"], desc="skip specified inmask if set, default false", ) apply_refmask = traits.List( traits.Enum(0, 1), argstr="--applyrefmask=%s", xor=["skip_refmask"], desc=("list of iterations to use reference mask on (1 to use, 0 to " "skip)"), sep=",", ) apply_inmask = traits.List( traits.Enum(0, 1), argstr="--applyinmask=%s", xor=["skip_inmask"], desc="list of iterations to use input mask on (1 to use, 0 to skip)", sep=",", ) skip_implicit_ref_masking = traits.Bool( argstr="--imprefm=0", desc=("skip implicit masking based on value in --ref image. " "Default = 0"), ) skip_implicit_in_masking = traits.Bool( argstr="--impinm=0", desc=("skip implicit masking based on value in --in image. " "Default = 0"), ) refmask_val = traits.Float( argstr="--imprefval=%f", desc="Value to mask out in --ref image. Default =0.0" ) inmask_val = traits.Float( argstr="--impinval=%f", desc="Value to mask out in --in image. Default =0.0" ) max_nonlin_iter = traits.List( traits.Int, argstr="--miter=%s", desc="Max # of non-linear iterations list, default [5, 5, 5, 5]", sep=",", ) subsampling_scheme = traits.List( traits.Int, argstr="--subsamp=%s", desc="sub-sampling scheme, list, default [4, 2, 1, 1]", sep=",", ) warp_resolution = traits.Tuple( traits.Int, traits.Int, traits.Int, argstr="--warpres=%d,%d,%d", desc=( "(approximate) resolution (in mm) of warp basis in x-, y- and " "z-direction, default 10, 10, 10" ), ) spline_order = traits.Int( argstr="--splineorder=%d", desc="Order of spline, 2->Qadratic spline, 3->Cubic spline. Default=3", ) in_fwhm = traits.List( traits.Int, argstr="--infwhm=%s", desc=( "FWHM (in mm) of gaussian smoothing kernel for input volume, " "default [6, 4, 2, 2]" ), sep=",", ) ref_fwhm = traits.List( traits.Int, argstr="--reffwhm=%s", desc=( "FWHM (in mm) of gaussian smoothing kernel for ref volume, " "default [4, 2, 0, 0]" ), sep=",", ) regularization_model = traits.Enum( "membrane_energy", "bending_energy", argstr="--regmod=%s", desc=( "Model for regularisation of warp-field [membrane_energy " "bending_energy], default bending_energy" ), ) regularization_lambda = traits.List( traits.Float, argstr="--lambda=%s", desc=( "Weight of regularisation, default depending on --ssqlambda and " "--regmod switches. See user documetation." ), sep=",", ) skip_lambda_ssq = traits.Bool( argstr="--ssqlambda=0", desc="If true, lambda is not weighted by current ssq, default false", ) jacobian_range = traits.Tuple( traits.Float, traits.Float, argstr="--jacrange=%f,%f", desc="Allowed range of Jacobian determinants, default 0.01, 100.0", ) derive_from_ref = traits.Bool( argstr="--refderiv", desc=("If true, ref image is used to calculate derivatives. " "Default false"), ) intensity_mapping_model = traits.Enum( "none", "global_linear", "global_non_linear", "local_linear", "global_non_linear_with_bias", "local_non_linear", argstr="--intmod=%s", desc="Model for intensity-mapping", ) intensity_mapping_order = traits.Int( argstr="--intorder=%d", desc="Order of poynomial for mapping intensities, default 5", ) biasfield_resolution = traits.Tuple( traits.Int, traits.Int, traits.Int, argstr="--biasres=%d,%d,%d", desc=( "Resolution (in mm) of bias-field modelling local intensities, " "default 50, 50, 50" ), ) bias_regularization_lambda = traits.Float( argstr="--biaslambda=%f", desc="Weight of regularisation for bias-field, default 10000", ) skip_intensity_mapping = traits.Bool( argstr="--estint=0", xor=["apply_intensity_mapping"], desc="Skip estimate intensity-mapping default false", ) apply_intensity_mapping = traits.List( traits.Enum(0, 1), argstr="--estint=%s", xor=["skip_intensity_mapping"], desc=( "List of subsampling levels to apply intensity mapping for " "(0 to skip, 1 to apply)" ), sep=",", ) hessian_precision = traits.Enum( "double", "float", argstr="--numprec=%s", desc=("Precision for representing Hessian, double or float. " "Default double"), ) class FNIRTOutputSpec(TraitedSpec): fieldcoeff_file = File(exists=True, desc="file with field coefficients") warped_file = File(exists=True, desc="warped image") field_file = File(desc="file with warp field") jacobian_file = File(desc="file containing Jacobian of the field") modulatedref_file = File(desc="file containing intensity modulated --ref") out_intensitymap_file = traits.List( File, minlen=2, maxlen=2, desc="files containing info pertaining to intensity mapping", ) log_file = File(desc="Name of log-file") class FNIRT(FSLCommand): """FSL FNIRT wrapper for non-linear registration For complete details, see the `FNIRT Documentation. `_ Examples -------- >>> from nipype.interfaces import fsl >>> from nipype.testing import example_data >>> fnt = fsl.FNIRT(affine_file=example_data('trans.mat')) >>> res = fnt.run(ref_file=example_data('mni.nii', in_file=example_data('structural.nii')) #doctest: +SKIP T1 -> Mni153 >>> from nipype.interfaces import fsl >>> fnirt_mprage = fsl.FNIRT() >>> fnirt_mprage.inputs.in_fwhm = [8, 4, 2, 2] >>> fnirt_mprage.inputs.subsampling_scheme = [4, 2, 1, 1] Specify the resolution of the warps >>> fnirt_mprage.inputs.warp_resolution = (6, 6, 6) >>> res = fnirt_mprage.run(in_file='structural.nii', ref_file='mni.nii', warped_file='warped.nii', fieldcoeff_file='fieldcoeff.nii')#doctest: +SKIP We can check the command line and confirm that it's what we expect. >>> fnirt_mprage.cmdline #doctest: +SKIP 'fnirt --cout=fieldcoeff.nii --in=structural.nii --infwhm=8,4,2,2 --ref=mni.nii --subsamp=4,2,1,1 --warpres=6,6,6 --iout=warped.nii' """ _cmd = "fnirt" input_spec = FNIRTInputSpec output_spec = FNIRTOutputSpec filemap = { "warped_file": "warped", "field_file": "field", "jacobian_file": "field_jacobian", "modulatedref_file": "modulated", "out_intensitymap_file": "intmap", "log_file": "log.txt", "fieldcoeff_file": "fieldwarp", } def _list_outputs(self): outputs = self.output_spec().get() for key, suffix in list(self.filemap.items()): inval = getattr(self.inputs, key) change_ext = True if key in ["warped_file", "log_file"]: if suffix.endswith(".txt"): change_ext = False if isdefined(inval): outputs[key] = os.path.abspath(inval) else: outputs[key] = self._gen_fname( self.inputs.in_file, suffix="_" + suffix, change_ext=change_ext ) elif isdefined(inval): if isinstance(inval, bool): if inval: outputs[key] = self._gen_fname( self.inputs.in_file, suffix="_" + suffix, change_ext=change_ext, ) else: outputs[key] = os.path.abspath(inval) if key == "out_intensitymap_file" and isdefined(outputs[key]): basename = FNIRT.intensitymap_file_basename(outputs[key]) outputs[key] = [outputs[key], "%s.txt" % basename] return outputs def _format_arg(self, name, spec, value): if name in ("in_intensitymap_file", "out_intensitymap_file"): if name == "out_intensitymap_file": value = self._list_outputs()[name] value = [FNIRT.intensitymap_file_basename(v) for v in value] assert len(set(value)) == 1, "Found different basenames for {}: {}".format( name, value ) return spec.argstr % value[0] if name in list(self.filemap.keys()): return spec.argstr % self._list_outputs()[name] return super(FNIRT, self)._format_arg(name, spec, value) def _gen_filename(self, name): if name in ["warped_file", "log_file"]: return self._list_outputs()[name] return None def write_config(self, configfile): """Writes out currently set options to specified config file XX TODO : need to figure out how the config file is written Parameters ---------- configfile : /path/to/configfile """ try: fid = open(configfile, "w+") except IOError: print("unable to create config_file %s" % (configfile)) for item in list(self.inputs.get().items()): fid.write("%s\n" % (item)) fid.close() @classmethod def intensitymap_file_basename(cls, f): """Removes valid intensitymap extensions from `f`, returning a basename that can refer to both intensitymap files. """ for ext in list(Info.ftypes.values()) + [".txt"]: if f.endswith(ext): return f[: -len(ext)] # TODO consider warning for this case return f class ApplyWarpInputSpec(FSLCommandInputSpec): in_file = File( exists=True, argstr="--in=%s", mandatory=True, position=0, desc="image to be warped", ) out_file = File( argstr="--out=%s", genfile=True, position=2, desc="output filename", hash_files=False, ) ref_file = File( exists=True, argstr="--ref=%s", mandatory=True, position=1, desc="reference image", ) field_file = File( exists=True, argstr="--warp=%s", desc="file containing warp field" ) abswarp = traits.Bool( argstr="--abs", xor=["relwarp"], desc="treat warp field as absolute: x' = w(x)" ) relwarp = traits.Bool( argstr="--rel", xor=["abswarp"], position=-1, desc="treat warp field as relative: x' = x + w(x)", ) datatype = traits.Enum( "char", "short", "int", "float", "double", argstr="--datatype=%s", desc="Force output data type [char short int float double].", ) supersample = traits.Bool( argstr="--super", desc="intermediary supersampling of output, default is off" ) superlevel = traits.Either( traits.Enum("a"), traits.Int, argstr="--superlevel=%s", desc=( "level of intermediary supersampling, a for 'automatic' or " "integer level. Default = 2" ), ) premat = File( exists=True, argstr="--premat=%s", desc="filename for pre-transform (affine matrix)", ) postmat = File( exists=True, argstr="--postmat=%s", desc="filename for post-transform (affine matrix)", ) mask_file = File( exists=True, argstr="--mask=%s", desc="filename for mask image (in reference space)", ) interp = traits.Enum( "nn", "trilinear", "sinc", "spline", argstr="--interp=%s", position=-2, desc="interpolation method", ) class ApplyWarpOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Warped output file") class ApplyWarp(FSLCommand): """FSL's applywarp wrapper to apply the results of a FNIRT registration Examples -------- >>> from nipype.interfaces import fsl >>> from nipype.testing import example_data >>> aw = fsl.ApplyWarp() >>> aw.inputs.in_file = example_data('structural.nii') >>> aw.inputs.ref_file = example_data('mni.nii') >>> aw.inputs.field_file = 'my_coefficients_filed.nii' #doctest: +SKIP >>> res = aw.run() #doctest: +SKIP """ _cmd = "applywarp" input_spec = ApplyWarpInputSpec output_spec = ApplyWarpOutputSpec def _format_arg(self, name, spec, value): if name == "superlevel": return spec.argstr % str(value) return super(ApplyWarp, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() if not isdefined(self.inputs.out_file): outputs["out_file"] = self._gen_fname(self.inputs.in_file, suffix="_warp") else: outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()[name] return None class SliceTimerInputSpec(FSLCommandInputSpec): in_file = File( exists=True, argstr="--in=%s", mandatory=True, position=0, desc="filename of input timeseries", ) out_file = File( argstr="--out=%s", genfile=True, desc="filename of output timeseries", hash_files=False, ) index_dir = traits.Bool(argstr="--down", desc="slice indexing from top to bottom") time_repetition = traits.Float( argstr="--repeat=%f", desc="Specify TR of data - default is 3s" ) slice_direction = traits.Enum( 1, 2, 3, argstr="--direction=%d", desc="direction of slice acquisition (x=1, y=2, z=3) - default is z", ) interleaved = traits.Bool(argstr="--odd", desc="use interleaved acquisition") custom_timings = File( exists=True, argstr="--tcustom=%s", desc=( "slice timings, in fractions of TR, range 0:1 (default is 0.5 = " "no shift)" ), ) global_shift = traits.Float( argstr="--tglobal", desc="shift in fraction of TR, range 0:1 (default is 0.5 = no shift)", ) custom_order = File( exists=True, argstr="--ocustom=%s", desc=( "filename of single-column custom interleave order file (first " "slice is referred to as 1 not 0)" ), ) class SliceTimerOutputSpec(TraitedSpec): slice_time_corrected_file = File(exists=True, desc="slice time corrected file") class SliceTimer(FSLCommand): """FSL slicetimer wrapper to perform slice timing correction Examples -------- >>> from nipype.interfaces import fsl >>> from nipype.testing import example_data >>> st = fsl.SliceTimer() >>> st.inputs.in_file = example_data('functional.nii') >>> st.inputs.interleaved = True >>> result = st.run() #doctest: +SKIP """ _cmd = "slicetimer" input_spec = SliceTimerInputSpec output_spec = SliceTimerOutputSpec def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): out_file = self._gen_fname(self.inputs.in_file, suffix="_st") outputs["slice_time_corrected_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["slice_time_corrected_file"] return None class SUSANInputSpec(FSLCommandInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=1, desc="filename of input timeseries", ) brightness_threshold = traits.Float( argstr="%.10f", position=2, mandatory=True, desc=( "brightness threshold and should be greater than noise level " "and less than contrast of edges to be preserved." ), ) fwhm = traits.Float( argstr="%.10f", position=3, mandatory=True, desc="fwhm of smoothing, in mm, gets converted using sqrt(8*log(2))", ) dimension = traits.Enum( 3, 2, argstr="%d", position=4, usedefault=True, desc="within-plane (2) or fully 3D (3)", ) use_median = traits.Enum( 1, 0, argstr="%d", position=5, usedefault=True, desc=( "whether to use a local median filter in the cases where " "single-point noise is detected" ), ) usans = traits.List( traits.Tuple(File(exists=True), traits.Float), maxlen=2, argstr="", position=6, usedefault=True, desc="determines whether the smoothing area (USAN) is to be " "found from secondary images (0, 1 or 2). A negative " "value for any brightness threshold will auto-set the " "threshold at 10% of the robust range", ) out_file = File( argstr="%s", position=-1, genfile=True, desc="output file name", hash_files=False, ) class SUSANOutputSpec(TraitedSpec): smoothed_file = File(exists=True, desc="smoothed output file") class SUSAN(FSLCommand): """FSL SUSAN wrapper to perform smoothing For complete details, see the `SUSAN Documentation. `_ Examples -------- >>> from nipype.interfaces import fsl >>> from nipype.testing import example_data >>> anatfile # doctest: +SKIP anatomical.nii # doctest: +SKIP >>> sus = fsl.SUSAN() >>> sus.inputs.in_file = example_data('structural.nii') >>> sus.inputs.brightness_threshold = 2000.0 >>> sus.inputs.fwhm = 8.0 >>> result = sus.run() # doctest: +SKIP """ _cmd = "susan" input_spec = SUSANInputSpec output_spec = SUSANOutputSpec def _format_arg(self, name, spec, value): if name == "fwhm": return spec.argstr % (float(value) / np.sqrt(8 * np.log(2))) if name == "usans": if not value: return "0" arglist = [str(len(value))] for filename, thresh in value: arglist.extend([filename, "%.10f" % thresh]) return " ".join(arglist) return super(SUSAN, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): out_file = self._gen_fname(self.inputs.in_file, suffix="_smooth") outputs["smoothed_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["smoothed_file"] return None class FUGUEInputSpec(FSLCommandInputSpec): in_file = File(exists=True, argstr="--in=%s", desc="filename of input volume") shift_in_file = File( exists=True, argstr="--loadshift=%s", desc="filename for reading pixel shift volume", ) phasemap_in_file = File( exists=True, argstr="--phasemap=%s", desc="filename for input phase image" ) fmap_in_file = File( exists=True, argstr="--loadfmap=%s", desc="filename for loading fieldmap (rad/s)", ) unwarped_file = File( argstr="--unwarp=%s", desc="apply unwarping and save as filename", xor=["warped_file"], requires=["in_file"], ) warped_file = File( argstr="--warp=%s", desc="apply forward warping and save as filename", xor=["unwarped_file"], requires=["in_file"], ) forward_warping = traits.Bool( False, usedefault=True, desc="apply forward warping instead of unwarping" ) dwell_to_asym_ratio = traits.Float( argstr="--dwelltoasym=%.10f", desc="set the dwell to asym time ratio" ) dwell_time = traits.Float( argstr="--dwell=%.10f", desc=( "set the EPI dwell time per phase-encode line - same as echo " "spacing - (sec)" ), ) asym_se_time = traits.Float( argstr="--asym=%.10f", desc="set the fieldmap asymmetric spin echo time (sec)" ) median_2dfilter = traits.Bool(argstr="--median", desc="apply 2D median filtering") despike_2dfilter = traits.Bool( argstr="--despike", desc="apply a 2D de-spiking filter" ) no_gap_fill = traits.Bool( argstr="--nofill", desc="do not apply gap-filling measure to the fieldmap" ) no_extend = traits.Bool( argstr="--noextend", desc="do not apply rigid-body extrapolation to the fieldmap", ) smooth2d = traits.Float( argstr="--smooth2=%.2f", desc="apply 2D Gaussian smoothing of sigma N (in mm)" ) smooth3d = traits.Float( argstr="--smooth3=%.2f", desc="apply 3D Gaussian smoothing of sigma N (in mm)" ) poly_order = traits.Int( argstr="--poly=%d", desc="apply polynomial fitting of order N" ) fourier_order = traits.Int( argstr="--fourier=%d", desc="apply Fourier (sinusoidal) fitting of order N" ) pava = traits.Bool(argstr="--pava", desc="apply monotonic enforcement via PAVA") despike_threshold = traits.Float( argstr="--despikethreshold=%s", desc="specify the threshold for de-spiking (default=3.0)", ) unwarp_direction = traits.Enum( "x", "y", "z", "x-", "y-", "z-", argstr="--unwarpdir=%s", desc="specifies direction of warping (default y)", ) phase_conjugate = traits.Bool( argstr="--phaseconj", desc="apply phase conjugate method of unwarping" ) icorr = traits.Bool( argstr="--icorr", requires=["shift_in_file"], desc=("apply intensity correction to unwarping (pixel shift method " "only)"), ) icorr_only = traits.Bool( argstr="--icorronly", requires=["unwarped_file"], desc="apply intensity correction only", ) mask_file = File( exists=True, argstr="--mask=%s", desc="filename for loading valid mask" ) nokspace = traits.Bool( False, argstr="--nokspace", desc="do not use k-space forward warping" ) # Special outputs: shift (voxel shift map, vsm) save_shift = traits.Bool( False, xor=["save_unmasked_shift"], desc="write pixel shift volume" ) shift_out_file = File( argstr="--saveshift=%s", desc="filename for saving pixel shift volume" ) save_unmasked_shift = traits.Bool( argstr="--unmaskshift", xor=["save_shift"], desc="saves the unmasked shiftmap when using --saveshift", ) # Special outputs: fieldmap (fmap) save_fmap = traits.Bool( False, xor=["save_unmasked_fmap"], desc="write field map volume" ) fmap_out_file = File( argstr="--savefmap=%s", desc="filename for saving fieldmap (rad/s)" ) save_unmasked_fmap = traits.Bool( False, argstr="--unmaskfmap", xor=["save_fmap"], desc="saves the unmasked fieldmap when using --savefmap", ) class FUGUEOutputSpec(TraitedSpec): unwarped_file = File(desc="unwarped file") warped_file = File(desc="forward warped file") shift_out_file = File(desc="voxel shift map file") fmap_out_file = File(desc="fieldmap file") class FUGUE(FSLCommand): """FSL FUGUE set of tools for EPI distortion correction `FUGUE `_ is, most generally, a set of tools for EPI distortion correction. Distortions may be corrected for 1. improving registration with non-distorted images (e.g. structurals), or 2. dealing with motion-dependent changes. FUGUE is designed to deal only with the first case - improving registration. Examples -------- Unwarping an input image (shift map is known): >>> from nipype.interfaces.fsl.preprocess import FUGUE >>> fugue = FUGUE() >>> fugue.inputs.in_file = 'epi.nii' >>> fugue.inputs.mask_file = 'epi_mask.nii' >>> fugue.inputs.shift_in_file = 'vsm.nii' # Previously computed with fugue as well >>> fugue.inputs.unwarp_direction = 'y' >>> fugue.inputs.output_type = "NIFTI_GZ" >>> fugue.cmdline # doctest: +ELLIPSIS 'fugue --in=epi.nii --mask=epi_mask.nii --loadshift=vsm.nii --unwarpdir=y --unwarp=epi_unwarped.nii.gz' >>> fugue.run() #doctest: +SKIP Warping an input image (shift map is known): >>> from nipype.interfaces.fsl.preprocess import FUGUE >>> fugue = FUGUE() >>> fugue.inputs.in_file = 'epi.nii' >>> fugue.inputs.forward_warping = True >>> fugue.inputs.mask_file = 'epi_mask.nii' >>> fugue.inputs.shift_in_file = 'vsm.nii' # Previously computed with fugue as well >>> fugue.inputs.unwarp_direction = 'y' >>> fugue.inputs.output_type = "NIFTI_GZ" >>> fugue.cmdline # doctest: +ELLIPSIS 'fugue --in=epi.nii --mask=epi_mask.nii --loadshift=vsm.nii --unwarpdir=y --warp=epi_warped.nii.gz' >>> fugue.run() #doctest: +SKIP Computing the vsm (unwrapped phase map is known): >>> from nipype.interfaces.fsl.preprocess import FUGUE >>> fugue = FUGUE() >>> fugue.inputs.phasemap_in_file = 'epi_phasediff.nii' >>> fugue.inputs.mask_file = 'epi_mask.nii' >>> fugue.inputs.dwell_to_asym_ratio = (0.77e-3 * 3) / 2.46e-3 >>> fugue.inputs.unwarp_direction = 'y' >>> fugue.inputs.save_shift = True >>> fugue.inputs.output_type = "NIFTI_GZ" >>> fugue.cmdline # doctest: +ELLIPSIS 'fugue --dwelltoasym=0.9390243902 --mask=epi_mask.nii --phasemap=epi_phasediff.nii --saveshift=epi_phasediff_vsm.nii.gz --unwarpdir=y' >>> fugue.run() #doctest: +SKIP """ _cmd = "fugue" input_spec = FUGUEInputSpec output_spec = FUGUEOutputSpec def _parse_inputs(self, skip=None): if skip is None: skip = [] input_phase = isdefined(self.inputs.phasemap_in_file) input_vsm = isdefined(self.inputs.shift_in_file) input_fmap = isdefined(self.inputs.fmap_in_file) if not input_phase and not input_vsm and not input_fmap: raise RuntimeError( ( "Either phasemap_in_file, shift_in_file or fmap_in_file must " "be set." ) ) if not isdefined(self.inputs.in_file): skip += ["unwarped_file", "warped_file"] else: if self.inputs.forward_warping: skip += ["unwarped_file"] trait_spec = self.inputs.trait("warped_file") trait_spec.name_template = "%s_warped" trait_spec.name_source = "in_file" trait_spec.output_name = "warped_file" else: skip += ["warped_file"] trait_spec = self.inputs.trait("unwarped_file") trait_spec.name_template = "%s_unwarped" trait_spec.name_source = "in_file" trait_spec.output_name = "unwarped_file" # Handle shift output if not isdefined(self.inputs.shift_out_file): vsm_save_masked = ( isdefined(self.inputs.save_shift) and self.inputs.save_shift ) vsm_save_unmasked = ( isdefined(self.inputs.save_unmasked_shift) and self.inputs.save_unmasked_shift ) if vsm_save_masked or vsm_save_unmasked: trait_spec = self.inputs.trait("shift_out_file") trait_spec.output_name = "shift_out_file" if input_fmap: trait_spec.name_source = "fmap_in_file" elif input_phase: trait_spec.name_source = "phasemap_in_file" elif input_vsm: trait_spec.name_source = "shift_in_file" else: raise RuntimeError( ( "Either phasemap_in_file, shift_in_file or " "fmap_in_file must be set." ) ) if vsm_save_unmasked: trait_spec.name_template = "%s_vsm_unmasked" else: trait_spec.name_template = "%s_vsm" else: skip += ["save_shift", "save_unmasked_shift", "shift_out_file"] # Handle fieldmap output if not isdefined(self.inputs.fmap_out_file): fmap_save_masked = ( isdefined(self.inputs.save_fmap) and self.inputs.save_fmap ) fmap_save_unmasked = ( isdefined(self.inputs.save_unmasked_fmap) and self.inputs.save_unmasked_fmap ) if fmap_save_masked or fmap_save_unmasked: trait_spec = self.inputs.trait("fmap_out_file") trait_spec.output_name = "fmap_out_file" if input_vsm: trait_spec.name_source = "shift_in_file" elif input_phase: trait_spec.name_source = "phasemap_in_file" elif input_fmap: trait_spec.name_source = "fmap_in_file" else: raise RuntimeError( ( "Either phasemap_in_file, shift_in_file or " "fmap_in_file must be set." ) ) if fmap_save_unmasked: trait_spec.name_template = "%s_fieldmap_unmasked" else: trait_spec.name_template = "%s_fieldmap" else: skip += ["save_fmap", "save_unmasked_fmap", "fmap_out_file"] return super(FUGUE, self)._parse_inputs(skip=skip) class PRELUDEInputSpec(FSLCommandInputSpec): complex_phase_file = File( exists=True, argstr="--complex=%s", mandatory=True, xor=["magnitude_file", "phase_file"], desc="complex phase input volume", ) magnitude_file = File( exists=True, argstr="--abs=%s", mandatory=True, xor=["complex_phase_file"], desc="file containing magnitude image", ) phase_file = File( exists=True, argstr="--phase=%s", mandatory=True, xor=["complex_phase_file"], desc="raw phase file", ) unwrapped_phase_file = File( genfile=True, argstr="--unwrap=%s", desc="file containing unwrapepd phase", hash_files=False, ) num_partitions = traits.Int( argstr="--numphasesplit=%d", desc="number of phase partitions to use" ) labelprocess2d = traits.Bool( argstr="--labelslices", desc="does label processing in 2D (slice at a time)" ) process2d = traits.Bool( argstr="--slices", xor=["labelprocess2d"], desc="does all processing in 2D (slice at a time)", ) process3d = traits.Bool( argstr="--force3D", xor=["labelprocess2d", "process2d"], desc="forces all processing to be full 3D", ) threshold = traits.Float( argstr="--thresh=%.10f", desc="intensity threshold for masking" ) mask_file = File( exists=True, argstr="--mask=%s", desc="filename of mask input volume" ) start = traits.Int( argstr="--start=%d", desc="first image number to process (default 0)" ) end = traits.Int( argstr="--end=%d", desc="final image number to process (default Inf)" ) savemask_file = File( argstr="--savemask=%s", desc="saving the mask volume", hash_files=False ) rawphase_file = File( argstr="--rawphase=%s", desc="saving the raw phase output", hash_files=False ) label_file = File( argstr="--labels=%s", desc="saving the area labels output", hash_files=False ) removeramps = traits.Bool( argstr="--removeramps", desc="remove phase ramps during unwrapping" ) class PRELUDEOutputSpec(TraitedSpec): unwrapped_phase_file = File(exists=True, desc="unwrapped phase file") class PRELUDE(FSLCommand): """FSL prelude wrapper for phase unwrapping Examples -------- Please insert examples for use of this command """ input_spec = PRELUDEInputSpec output_spec = PRELUDEOutputSpec _cmd = "prelude" def __init__(self, **kwargs): super(PRELUDE, self).__init__(**kwargs) warn("This has not been fully tested. Please report any failures.") def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.unwrapped_phase_file if not isdefined(out_file): if isdefined(self.inputs.phase_file): out_file = self._gen_fname(self.inputs.phase_file, suffix="_unwrapped") elif isdefined(self.inputs.complex_phase_file): out_file = self._gen_fname( self.inputs.complex_phase_file, suffix="_phase_unwrapped" ) outputs["unwrapped_phase_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): if name == "unwrapped_phase_file": return self._list_outputs()["unwrapped_phase_file"] return None class FIRSTInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, position=-2, copyfile=False, argstr="-i %s", desc="input data file", ) out_file = File( "segmented", usedefault=True, mandatory=True, position=-1, argstr="-o %s", desc="output data file", hash_files=False, ) verbose = traits.Bool(argstr="-v", position=1, desc="Use verbose logging.") brain_extracted = traits.Bool( argstr="-b", position=2, desc="Input structural image is already brain-extracted", ) no_cleanup = traits.Bool( argstr="-d", position=3, desc="Input structural image is already brain-extracted", ) method = traits.Enum( "auto", "fast", "none", xor=["method_as_numerical_threshold"], argstr="-m %s", position=4, usedefault=True, desc=( "Method must be one of auto, fast, none, or it can be entered " "using the 'method_as_numerical_threshold' input" ), ) method_as_numerical_threshold = traits.Float( argstr="-m %.4f", position=4, desc=( "Specify a numerical threshold value or use the 'method' input " "to choose auto, fast, or none" ), ) list_of_specific_structures = traits.List( traits.Str, argstr="-s %s", sep=",", position=5, minlen=1, desc="Runs only on the specified structures (e.g. L_Hipp, R_Hipp" "L_Accu, R_Accu, L_Amyg, R_Amyg" "L_Caud, R_Caud, L_Pall, R_Pall" "L_Puta, R_Puta, L_Thal, R_Thal, BrStem", ) affine_file = File( exists=True, position=6, argstr="-a %s", desc=( "Affine matrix to use (e.g. img2std.mat) (does not " "re-run registration)" ), ) class FIRSTOutputSpec(TraitedSpec): vtk_surfaces = OutputMultiPath( File(exists=True), desc="VTK format meshes for each subcortical region" ) bvars = OutputMultiPath(File(exists=True), desc="bvars for each subcortical region") original_segmentations = File( exists=True, desc=( "3D image file containing the segmented regions " "as integer values. Uses CMA labelling" ), ) segmentation_file = File( exists=True, desc=("4D image file containing a single volume per " "segmented region"), ) class FIRST(FSLCommand): """FSL run_first_all wrapper for segmentation of subcortical volumes http://www.fmrib.ox.ac.uk/fsl/first/index.html Examples -------- >>> from nipype.interfaces import fsl >>> first = fsl.FIRST() >>> first.inputs.in_file = 'structural.nii' >>> first.inputs.out_file = 'segmented.nii' >>> res = first.run() #doctest: +SKIP """ _cmd = "run_first_all" input_spec = FIRSTInputSpec output_spec = FIRSTOutputSpec def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.list_of_specific_structures): structures = self.inputs.list_of_specific_structures else: structures = [ "L_Hipp", "R_Hipp", "L_Accu", "R_Accu", "L_Amyg", "R_Amyg", "L_Caud", "R_Caud", "L_Pall", "R_Pall", "L_Puta", "R_Puta", "L_Thal", "R_Thal", "BrStem", ] outputs["original_segmentations"] = self._gen_fname("original_segmentations") outputs["segmentation_file"] = self._gen_fname("segmentation_file") outputs["vtk_surfaces"] = self._gen_mesh_names("vtk_surfaces", structures) outputs["bvars"] = self._gen_mesh_names("bvars", structures) return outputs def _gen_fname(self, basename): path, outname, ext = split_filename(self.inputs.out_file) method = "none" if isdefined(self.inputs.method) and self.inputs.method != "none": method = "fast" if self.inputs.list_of_specific_structures and self.inputs.method == "auto": method = "none" if isdefined(self.inputs.method_as_numerical_threshold): thres = "%.4f" % self.inputs.method_as_numerical_threshold method = thres.replace(".", "") if basename == "original_segmentations": return op.abspath("%s_all_%s_origsegs.nii.gz" % (outname, method)) if basename == "segmentation_file": return op.abspath("%s_all_%s_firstseg.nii.gz" % (outname, method)) return None def _gen_mesh_names(self, name, structures): path, prefix, ext = split_filename(self.inputs.out_file) if name == "vtk_surfaces": vtks = list() for struct in structures: vtk = prefix + "-" + struct + "_first.vtk" vtks.append(op.abspath(vtk)) return vtks if name == "bvars": bvars = list() for struct in structures: bvar = prefix + "-" + struct + "_first.bvars" bvars.append(op.abspath(bvar)) return bvars return None nipype-1.7.0/nipype/interfaces/fsl/tests/000077500000000000000000000000001413403311400203725ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/fsl/tests/__init__.py000066400000000000000000000000301413403311400224740ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/fsl/tests/test_FILMGLS.py000066400000000000000000000115061413403311400231030ustar00rootroot00000000000000# -*- coding: utf-8 -*- from nipype.interfaces.fsl.model import FILMGLS, FILMGLSInputSpec def test_filmgls(): input_map = dict( args=dict(argstr="%s"), autocorr_estimate_only=dict( xor=[ "autocorr_estimate_only", "fit_armodel", "tukey_window", "multitaper_product", "use_pava", "autocorr_noestimate", ], argstr="-ac", ), autocorr_noestimate=dict( xor=[ "autocorr_estimate_only", "fit_armodel", "tukey_window", "multitaper_product", "use_pava", "autocorr_noestimate", ], argstr="-noest", ), brightness_threshold=dict(argstr="-epith %d"), design_file=dict(argstr="%s"), environ=dict(usedefault=True), fit_armodel=dict( xor=[ "autocorr_estimate_only", "fit_armodel", "tukey_window", "multitaper_product", "use_pava", "autocorr_noestimate", ], argstr="-ar", ), full_data=dict(argstr="-v"), in_file=dict(mandatory=True, argstr="%s"), mask_size=dict(argstr="-ms %d"), multitaper_product=dict( xor=[ "autocorr_estimate_only", "fit_armodel", "tukey_window", "multitaper_product", "use_pava", "autocorr_noestimate", ], argstr="-mt %d", ), output_pwdata=dict(argstr="-output_pwdata"), output_type=dict(), results_dir=dict(usedefault=True, argstr="-rn %s"), smooth_autocorr=dict(argstr="-sa"), threshold=dict(argstr="%f"), tukey_window=dict( xor=[ "autocorr_estimate_only", "fit_armodel", "tukey_window", "multitaper_product", "use_pava", "autocorr_noestimate", ], argstr="-tukey %d", ), use_pava=dict(argstr="-pava"), ) input_map2 = dict( args=dict(argstr="%s"), autocorr_estimate_only=dict( xor=[ "autocorr_estimate_only", "fit_armodel", "tukey_window", "multitaper_product", "use_pava", "autocorr_noestimate", ], argstr="--ac", ), autocorr_noestimate=dict( xor=[ "autocorr_estimate_only", "fit_armodel", "tukey_window", "multitaper_product", "use_pava", "autocorr_noestimate", ], argstr="--noest", ), brightness_threshold=dict(argstr="--epith=%d"), design_file=dict(argstr="--pd=%s"), environ=dict(usedefault=True), fit_armodel=dict( xor=[ "autocorr_estimate_only", "fit_armodel", "tukey_window", "multitaper_product", "use_pava", "autocorr_noestimate", ], argstr="--ar", ), full_data=dict(argstr="-v"), in_file=dict(mandatory=True, argstr="--in=%s"), mask_size=dict(argstr="--ms=%d"), multitaper_product=dict( xor=[ "autocorr_estimate_only", "fit_armodel", "tukey_window", "multitaper_product", "use_pava", "autocorr_noestimate", ], argstr="--mt=%d", ), output_pwdata=dict(argstr="--outputPWdata"), output_type=dict(), results_dir=dict(argstr="--rn=%s", usedefault=True), smooth_autocorr=dict(argstr="--sa"), threshold=dict(usedefault=True, argstr="--thr=%f"), tukey_window=dict( xor=[ "autocorr_estimate_only", "fit_armodel", "tukey_window", "multitaper_product", "use_pava", "autocorr_noestimate", ], argstr="--tukey=%d", ), use_pava=dict(argstr="--pava"), ) instance = FILMGLS() if isinstance(instance.inputs, FILMGLSInputSpec): for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(instance.inputs.traits()[key], metakey) == value else: for key, metadata in list(input_map2.items()): for metakey, value in list(metadata.items()): assert getattr(instance.inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_Level1Design_functions.py000066400000000000000000000021641413403311400263600ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os from ...base import Undefined from ..model import Level1Design def test_level1design(tmpdir): old = tmpdir.chdir() l = Level1Design() runinfo = dict( cond=[{"name": "test_condition", "onset": [0, 10], "duration": [10, 10]}], regress=[], ) runidx = 0 contrasts = Undefined do_tempfilter = False orthogonalization = {} basic_ev_parameters = {"temporalderiv": False} convolution_variants = [ ("custom", 7, {"temporalderiv": False, "bfcustompath": "/some/path"}), ("hrf", 3, basic_ev_parameters), ("dgamma", 3, basic_ev_parameters), ("gamma", 2, basic_ev_parameters), ("none", 0, basic_ev_parameters), ] for key, val, ev_parameters in convolution_variants: output_num, output_txt = Level1Design._create_ev_files( l, os.getcwd(), runinfo, runidx, ev_parameters, orthogonalization, contrasts, do_tempfilter, key, ) assert "set fmri(convolve1) {0}".format(val) in output_txt nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_AR1Image.py000066400000000000000000000030231413403311400243570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import AR1Image def test_AR1Image_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="-%sar1", position=4, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = AR1Image.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AR1Image_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = AR1Image.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_AccuracyTester.py000066400000000000000000000024061413403311400257560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..fix import AccuracyTester def test_AccuracyTester_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), mel_icas=dict( argstr="%s", copyfile=False, mandatory=True, position=3, ), output_directory=dict( argstr="%s", mandatory=True, position=2, ), trained_wts_file=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), ) inputs = AccuracyTester.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AccuracyTester_outputs(): output_map = dict( output_directory=dict( argstr="%s", position=1, ), ) outputs = AccuracyTester.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ApplyMask.py000066400000000000000000000030651413403311400247400ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import ApplyMask def test_ApplyMask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), mask_file=dict( argstr="-mas %s", extensions=None, mandatory=True, position=4, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = ApplyMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ApplyMask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ApplyMask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ApplyTOPUP.py000066400000000000000000000035451413403311400247570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..epi import ApplyTOPUP def test_ApplyTOPUP_inputs(): input_map = dict( args=dict( argstr="%s", ), datatype=dict( argstr="-d=%s", ), encoding_file=dict( argstr="--datain=%s", extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr="--imain=%s", mandatory=True, sep=",", ), in_index=dict( argstr="--inindex=%s", sep=",", ), in_topup_fieldcoef=dict( argstr="--topup=%s", copyfile=False, extensions=None, requires=["in_topup_movpar"], ), in_topup_movpar=dict( copyfile=False, extensions=None, requires=["in_topup_fieldcoef"], ), interp=dict( argstr="--interp=%s", ), method=dict( argstr="--method=%s", ), out_corrected=dict( argstr="--out=%s", extensions=None, name_source=["in_files"], name_template="%s_corrected", ), output_type=dict(), ) inputs = ApplyTOPUP.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ApplyTOPUP_outputs(): output_map = dict( out_corrected=dict( extensions=None, ), ) outputs = ApplyTOPUP.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ApplyWarp.py000066400000000000000000000042521413403311400247550ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ApplyWarp def test_ApplyWarp_inputs(): input_map = dict( abswarp=dict( argstr="--abs", xor=["relwarp"], ), args=dict( argstr="%s", ), datatype=dict( argstr="--datatype=%s", ), environ=dict( nohash=True, usedefault=True, ), field_file=dict( argstr="--warp=%s", extensions=None, ), in_file=dict( argstr="--in=%s", extensions=None, mandatory=True, position=0, ), interp=dict( argstr="--interp=%s", position=-2, ), mask_file=dict( argstr="--mask=%s", extensions=None, ), out_file=dict( argstr="--out=%s", extensions=None, genfile=True, hash_files=False, position=2, ), output_type=dict(), postmat=dict( argstr="--postmat=%s", extensions=None, ), premat=dict( argstr="--premat=%s", extensions=None, ), ref_file=dict( argstr="--ref=%s", extensions=None, mandatory=True, position=1, ), relwarp=dict( argstr="--rel", position=-1, xor=["abswarp"], ), superlevel=dict( argstr="--superlevel=%s", ), supersample=dict( argstr="--super", ), ) inputs = ApplyWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ApplyWarp_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ApplyWarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ApplyXFM.py000066400000000000000000000130431413403311400244740ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ApplyXFM def test_ApplyXFM_inputs(): input_map = dict( angle_rep=dict( argstr="-anglerep %s", ), apply_isoxfm=dict( argstr="-applyisoxfm %f", xor=["apply_xfm"], ), apply_xfm=dict( argstr="-applyxfm", usedefault=True, ), args=dict( argstr="%s", ), bbrslope=dict( argstr="-bbrslope %f", min_ver="5.0.0", ), bbrtype=dict( argstr="-bbrtype %s", min_ver="5.0.0", ), bgvalue=dict( argstr="-setbackground %f", ), bins=dict( argstr="-bins %d", ), coarse_search=dict( argstr="-coarsesearch %d", units="degrees", ), cost=dict( argstr="-cost %s", ), cost_func=dict( argstr="-searchcost %s", ), datatype=dict( argstr="-datatype %s", ), display_init=dict( argstr="-displayinit", ), dof=dict( argstr="-dof %d", ), echospacing=dict( argstr="-echospacing %f", min_ver="5.0.0", ), environ=dict( nohash=True, usedefault=True, ), fieldmap=dict( argstr="-fieldmap %s", extensions=None, min_ver="5.0.0", ), fieldmapmask=dict( argstr="-fieldmapmask %s", extensions=None, min_ver="5.0.0", ), fine_search=dict( argstr="-finesearch %d", units="degrees", ), force_scaling=dict( argstr="-forcescaling", ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, position=0, ), in_matrix_file=dict( argstr="-init %s", extensions=None, ), in_weight=dict( argstr="-inweight %s", extensions=None, ), interp=dict( argstr="-interp %s", ), min_sampling=dict( argstr="-minsampling %f", units="mm", ), no_clamp=dict( argstr="-noclamp", ), no_resample=dict( argstr="-noresample", ), no_resample_blur=dict( argstr="-noresampblur", ), no_search=dict( argstr="-nosearch", ), out_file=dict( argstr="-out %s", extensions=None, hash_files=False, name_source=["in_file"], name_template="%s_flirt", position=2, ), out_log=dict( extensions=None, keep_extension=True, name_source=["in_file"], name_template="%s_flirt.log", requires=["save_log"], ), out_matrix_file=dict( argstr="-omat %s", extensions=None, hash_files=False, keep_extension=True, name_source=["in_file"], name_template="%s_flirt.mat", position=3, ), output_type=dict(), padding_size=dict( argstr="-paddingsize %d", units="voxels", ), pedir=dict( argstr="-pedir %d", min_ver="5.0.0", ), ref_weight=dict( argstr="-refweight %s", extensions=None, ), reference=dict( argstr="-ref %s", extensions=None, mandatory=True, position=1, ), rigid2D=dict( argstr="-2D", ), save_log=dict(), schedule=dict( argstr="-schedule %s", extensions=None, ), searchr_x=dict( argstr="-searchrx %s", units="degrees", ), searchr_y=dict( argstr="-searchry %s", units="degrees", ), searchr_z=dict( argstr="-searchrz %s", units="degrees", ), sinc_width=dict( argstr="-sincwidth %d", units="voxels", ), sinc_window=dict( argstr="-sincwindow %s", ), uses_qform=dict( argstr="-usesqform", ), verbose=dict( argstr="-verbose %d", ), wm_seg=dict( argstr="-wmseg %s", extensions=None, min_ver="5.0.0", ), wmcoords=dict( argstr="-wmcoords %s", extensions=None, min_ver="5.0.0", ), wmnorms=dict( argstr="-wmnorms %s", extensions=None, min_ver="5.0.0", ), ) inputs = ApplyXFM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ApplyXFM_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_log=dict( extensions=None, ), out_matrix_file=dict( extensions=None, ), ) outputs = ApplyXFM.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_AvScale.py000066400000000000000000000025501413403311400243530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import AvScale def test_AvScale_inputs(): input_map = dict( all_param=dict( argstr="--allparams", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), mat_file=dict( argstr="%s", extensions=None, position=-2, ), ref_file=dict( argstr="%s", extensions=None, position=-1, ), ) inputs = AvScale.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AvScale_outputs(): output_map = dict( average_scaling=dict(), backward_half_transform=dict(), determinant=dict(), forward_half_transform=dict(), left_right_orientation_preserved=dict(), rot_angles=dict(), rotation_translation_matrix=dict(), scales=dict(), skews=dict(), translations=dict(), ) outputs = AvScale.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_B0Calc.py000066400000000000000000000046561413403311400240720ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..possum import B0Calc def test_B0Calc_inputs(): input_map = dict( args=dict( argstr="%s", ), chi_air=dict( argstr="--chi0=%e", usedefault=True, ), compute_xyz=dict( argstr="--xyz", usedefault=True, ), delta=dict( argstr="-d %e", usedefault=True, ), directconv=dict( argstr="--directconv", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), extendboundary=dict( argstr="--extendboundary=%0.2f", usedefault=True, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, position=0, ), out_file=dict( argstr="-o %s", extensions=None, name_source=["in_file"], name_template="%s_b0field", output_name="out_file", position=1, ), output_type=dict(), x_b0=dict( argstr="--b0x=%0.2f", usedefault=True, xor=["xyz_b0"], ), x_grad=dict( argstr="--gx=%0.4f", usedefault=True, ), xyz_b0=dict( argstr="--b0x=%0.2f --b0y=%0.2f --b0=%0.2f", xor=["x_b0", "y_b0", "z_b0"], ), y_b0=dict( argstr="--b0y=%0.2f", usedefault=True, xor=["xyz_b0"], ), y_grad=dict( argstr="--gy=%0.4f", usedefault=True, ), z_b0=dict( argstr="--b0=%0.2f", usedefault=True, xor=["xyz_b0"], ), z_grad=dict( argstr="--gz=%0.4f", usedefault=True, ), ) inputs = B0Calc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_B0Calc_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = B0Calc.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_BEDPOSTX5.py000066400000000000000000000070651413403311400243600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import BEDPOSTX5 def test_BEDPOSTX5_inputs(): input_map = dict( all_ard=dict( argstr="--allard", xor=("no_ard", "all_ard"), ), args=dict( argstr="%s", ), burn_in=dict( argstr="-b %d", usedefault=True, ), burn_in_no_ard=dict( argstr="--burnin_noard=%d", usedefault=True, ), bvals=dict( extensions=None, mandatory=True, ), bvecs=dict( extensions=None, mandatory=True, ), cnlinear=dict( argstr="--cnonlinear", xor=("no_spat", "non_linear", "cnlinear"), ), dwi=dict( extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), f0_ard=dict( argstr="--f0 --ardf0", xor=["f0_noard", "f0_ard", "all_ard"], ), f0_noard=dict( argstr="--f0", xor=["f0_noard", "f0_ard"], ), force_dir=dict( argstr="--forcedir", usedefault=True, ), fudge=dict( argstr="-w %d", ), grad_dev=dict( extensions=None, ), gradnonlin=dict( argstr="-g", ), logdir=dict( argstr="--logdir=%s", ), mask=dict( extensions=None, mandatory=True, ), model=dict( argstr="-model %d", ), n_fibres=dict( argstr="-n %d", mandatory=True, usedefault=True, ), n_jumps=dict( argstr="-j %d", usedefault=True, ), no_ard=dict( argstr="--noard", xor=("no_ard", "all_ard"), ), no_spat=dict( argstr="--nospat", xor=("no_spat", "non_linear", "cnlinear"), ), non_linear=dict( argstr="--nonlinear", xor=("no_spat", "non_linear", "cnlinear"), ), out_dir=dict( argstr="%s", mandatory=True, position=1, usedefault=True, ), output_type=dict(), rician=dict( argstr="--rician", ), sample_every=dict( argstr="-s %d", usedefault=True, ), seed=dict( argstr="--seed=%d", ), update_proposal_every=dict( argstr="--updateproposalevery=%d", usedefault=True, ), use_gpu=dict(), ) inputs = BEDPOSTX5.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BEDPOSTX5_outputs(): output_map = dict( dyads=dict(), dyads_dispersion=dict(), mean_S0samples=dict( extensions=None, ), mean_dsamples=dict( extensions=None, ), mean_fsamples=dict(), mean_phsamples=dict(), mean_thsamples=dict(), merged_fsamples=dict(), merged_phsamples=dict(), merged_thsamples=dict(), ) outputs = BEDPOSTX5.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_BET.py000066400000000000000000000111041413403311400234420ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import BET def test_BET_inputs(): input_map = dict( args=dict( argstr="%s", ), center=dict( argstr="-c %s", units="voxels", ), environ=dict( nohash=True, usedefault=True, ), frac=dict( argstr="-f %.2f", ), functional=dict( argstr="-F", xor=( "functional", "reduce_bias", "robust", "padding", "remove_eyes", "surfaces", "t2_guided", ), ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=0, ), mask=dict( argstr="-m", ), mesh=dict( argstr="-e", ), no_output=dict( argstr="-n", ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=1, ), outline=dict( argstr="-o", ), output_type=dict(), padding=dict( argstr="-Z", xor=( "functional", "reduce_bias", "robust", "padding", "remove_eyes", "surfaces", "t2_guided", ), ), radius=dict( argstr="-r %d", units="mm", ), reduce_bias=dict( argstr="-B", xor=( "functional", "reduce_bias", "robust", "padding", "remove_eyes", "surfaces", "t2_guided", ), ), remove_eyes=dict( argstr="-S", xor=( "functional", "reduce_bias", "robust", "padding", "remove_eyes", "surfaces", "t2_guided", ), ), robust=dict( argstr="-R", xor=( "functional", "reduce_bias", "robust", "padding", "remove_eyes", "surfaces", "t2_guided", ), ), skull=dict( argstr="-s", ), surfaces=dict( argstr="-A", xor=( "functional", "reduce_bias", "robust", "padding", "remove_eyes", "surfaces", "t2_guided", ), ), t2_guided=dict( argstr="-A2 %s", extensions=None, xor=( "functional", "reduce_bias", "robust", "padding", "remove_eyes", "surfaces", "t2_guided", ), ), threshold=dict( argstr="-t", ), vertical_gradient=dict( argstr="-g %.2f", ), ) inputs = BET.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BET_outputs(): output_map = dict( inskull_mask_file=dict( extensions=None, ), inskull_mesh_file=dict( extensions=None, ), mask_file=dict( extensions=None, ), meshfile=dict( extensions=None, ), out_file=dict( extensions=None, ), outline_file=dict( extensions=None, ), outskin_mask_file=dict( extensions=None, ), outskin_mesh_file=dict( extensions=None, ), outskull_mask_file=dict( extensions=None, ), outskull_mesh_file=dict( extensions=None, ), skull_file=dict( extensions=None, ), skull_mask_file=dict( extensions=None, ), ) outputs = BET.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_BinaryMaths.py000066400000000000000000000035511413403311400252600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import BinaryMaths def test_BinaryMaths_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), operand_file=dict( argstr="%s", extensions=None, mandatory=True, position=5, xor=["operand_value"], ), operand_value=dict( argstr="%.8f", mandatory=True, position=5, xor=["operand_file"], ), operation=dict( argstr="-%s", mandatory=True, position=4, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = BinaryMaths.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BinaryMaths_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = BinaryMaths.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ChangeDataType.py000066400000000000000000000027301413403311400256560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import ChangeDataType def test_ChangeDataType_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", mandatory=True, position=-1, ), output_type=dict(), ) inputs = ChangeDataType.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ChangeDataType_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ChangeDataType.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Classifier.py000066400000000000000000000024411413403311400251200ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..fix import Classifier def test_Classifier_inputs(): input_map = dict( args=dict( argstr="%s", ), artifacts_list_file=dict( extensions=None, ), environ=dict( nohash=True, usedefault=True, ), mel_ica=dict( argstr="%s", copyfile=False, position=1, ), thresh=dict( argstr="%d", mandatory=True, position=-1, ), trained_wts_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=2, ), ) inputs = Classifier.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Classifier_outputs(): output_map = dict( artifacts_list_file=dict( extensions=None, ), ) outputs = Classifier.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Cleaner.py000066400000000000000000000031471413403311400244110ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..fix import Cleaner def test_Cleaner_inputs(): input_map = dict( aggressive=dict( argstr="-A", position=3, ), args=dict( argstr="%s", ), artifacts_list_file=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), cleanup_motion=dict( argstr="-m", position=2, ), confound_file=dict( argstr="-x %s", extensions=None, position=4, ), confound_file_1=dict( argstr="-x %s", extensions=None, position=5, ), confound_file_2=dict( argstr="-x %s", extensions=None, position=6, ), environ=dict( nohash=True, usedefault=True, ), highpass=dict( argstr="-m -h %f", position=2, usedefault=True, ), ) inputs = Cleaner.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Cleaner_outputs(): output_map = dict( cleaned_functional_file=dict( extensions=None, ), ) outputs = Cleaner.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Cluster.py000066400000000000000000000072151413403311400244610ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import Cluster def test_Cluster_inputs(): input_map = dict( args=dict( argstr="%s", ), connectivity=dict( argstr="--connectivity=%d", ), cope_file=dict( argstr="--cope=%s", extensions=None, ), dlh=dict( argstr="--dlh=%.10f", ), environ=dict( nohash=True, usedefault=True, ), find_min=dict( argstr="--min", usedefault=True, ), fractional=dict( argstr="--fractional", usedefault=True, ), in_file=dict( argstr="--in=%s", extensions=None, mandatory=True, ), minclustersize=dict( argstr="--minclustersize", usedefault=True, ), no_table=dict( argstr="--no_table", usedefault=True, ), num_maxima=dict( argstr="--num=%d", ), out_index_file=dict( argstr="--oindex=%s", hash_files=False, ), out_localmax_txt_file=dict( argstr="--olmax=%s", hash_files=False, ), out_localmax_vol_file=dict( argstr="--olmaxim=%s", hash_files=False, ), out_max_file=dict( argstr="--omax=%s", hash_files=False, ), out_mean_file=dict( argstr="--omean=%s", hash_files=False, ), out_pval_file=dict( argstr="--opvals=%s", hash_files=False, ), out_size_file=dict( argstr="--osize=%s", hash_files=False, ), out_threshold_file=dict( argstr="--othresh=%s", hash_files=False, ), output_type=dict(), peak_distance=dict( argstr="--peakdist=%.10f", ), pthreshold=dict( argstr="--pthresh=%.10f", requires=["dlh", "volume"], ), std_space_file=dict( argstr="--stdvol=%s", extensions=None, ), threshold=dict( argstr="--thresh=%.10f", mandatory=True, ), use_mm=dict( argstr="--mm", usedefault=True, ), volume=dict( argstr="--volume=%d", ), warpfield_file=dict( argstr="--warpvol=%s", extensions=None, ), xfm_file=dict( argstr="--xfm=%s", extensions=None, ), ) inputs = Cluster.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Cluster_outputs(): output_map = dict( index_file=dict( extensions=None, ), localmax_txt_file=dict( extensions=None, ), localmax_vol_file=dict( extensions=None, ), max_file=dict( extensions=None, ), mean_file=dict( extensions=None, ), pval_file=dict( extensions=None, ), size_file=dict( extensions=None, ), threshold_file=dict( extensions=None, ), ) outputs = Cluster.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Complex.py000066400000000000000000000142431413403311400244460ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Complex def test_Complex_inputs(): input_map = dict( args=dict( argstr="%s", ), complex_cartesian=dict( argstr="-complex", position=1, xor=[ "real_polar", "real_cartesian", "complex_cartesian", "complex_polar", "complex_split", "complex_merge", ], ), complex_in_file=dict( argstr="%s", extensions=None, position=2, ), complex_in_file2=dict( argstr="%s", extensions=None, position=3, ), complex_merge=dict( argstr="-complexmerge", position=1, xor=[ "real_polar", "real_cartesian", "complex_cartesian", "complex_polar", "complex_split", "complex_merge", "start_vol", "end_vol", ], ), complex_out_file=dict( argstr="%s", extensions=None, genfile=True, position=-3, xor=[ "complex_out_file", "magnitude_out_file", "phase_out_file", "real_out_file", "imaginary_out_file", "real_polar", "real_cartesian", ], ), complex_polar=dict( argstr="-complexpolar", position=1, xor=[ "real_polar", "real_cartesian", "complex_cartesian", "complex_polar", "complex_split", "complex_merge", ], ), complex_split=dict( argstr="-complexsplit", position=1, xor=[ "real_polar", "real_cartesian", "complex_cartesian", "complex_polar", "complex_split", "complex_merge", ], ), end_vol=dict( argstr="%d", position=-1, ), environ=dict( nohash=True, usedefault=True, ), imaginary_in_file=dict( argstr="%s", extensions=None, position=3, ), imaginary_out_file=dict( argstr="%s", extensions=None, genfile=True, position=-3, xor=[ "complex_out_file", "magnitude_out_file", "phase_out_file", "real_polar", "complex_cartesian", "complex_polar", "complex_split", "complex_merge", ], ), magnitude_in_file=dict( argstr="%s", extensions=None, position=2, ), magnitude_out_file=dict( argstr="%s", extensions=None, genfile=True, position=-4, xor=[ "complex_out_file", "real_out_file", "imaginary_out_file", "real_cartesian", "complex_cartesian", "complex_polar", "complex_split", "complex_merge", ], ), output_type=dict(), phase_in_file=dict( argstr="%s", extensions=None, position=3, ), phase_out_file=dict( argstr="%s", extensions=None, genfile=True, position=-3, xor=[ "complex_out_file", "real_out_file", "imaginary_out_file", "real_cartesian", "complex_cartesian", "complex_polar", "complex_split", "complex_merge", ], ), real_cartesian=dict( argstr="-realcartesian", position=1, xor=[ "real_polar", "real_cartesian", "complex_cartesian", "complex_polar", "complex_split", "complex_merge", ], ), real_in_file=dict( argstr="%s", extensions=None, position=2, ), real_out_file=dict( argstr="%s", extensions=None, genfile=True, position=-4, xor=[ "complex_out_file", "magnitude_out_file", "phase_out_file", "real_polar", "complex_cartesian", "complex_polar", "complex_split", "complex_merge", ], ), real_polar=dict( argstr="-realpolar", position=1, xor=[ "real_polar", "real_cartesian", "complex_cartesian", "complex_polar", "complex_split", "complex_merge", ], ), start_vol=dict( argstr="%d", position=-2, ), ) inputs = Complex.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Complex_outputs(): output_map = dict( complex_out_file=dict( extensions=None, ), imaginary_out_file=dict( extensions=None, ), magnitude_out_file=dict( extensions=None, ), phase_out_file=dict( extensions=None, ), real_out_file=dict( extensions=None, ), ) outputs = Complex.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ContrastMgr.py000066400000000000000000000035371413403311400253060ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import ContrastMgr def test_ContrastMgr_inputs(): input_map = dict( args=dict( argstr="%s", ), contrast_num=dict( argstr="-cope", ), corrections=dict( copyfile=False, extensions=None, mandatory=True, ), dof_file=dict( argstr="", copyfile=False, extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), fcon_file=dict( argstr="-f %s", extensions=None, ), output_type=dict(), param_estimates=dict( argstr="", copyfile=False, mandatory=True, ), sigmasquareds=dict( argstr="", copyfile=False, extensions=None, mandatory=True, position=-2, ), suffix=dict( argstr="-suffix %s", ), tcon_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), ) inputs = ContrastMgr.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ContrastMgr_outputs(): output_map = dict( copes=dict(), fstats=dict(), neffs=dict(), tstats=dict(), varcopes=dict(), zfstats=dict(), zstats=dict(), ) outputs = ContrastMgr.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ConvertWarp.py000066400000000000000000000047551413403311400253200ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ConvertWarp def test_ConvertWarp_inputs(): input_map = dict( abswarp=dict( argstr="--abs", xor=["relwarp"], ), args=dict( argstr="%s", ), cons_jacobian=dict( argstr="--constrainj", ), environ=dict( nohash=True, usedefault=True, ), jacobian_max=dict( argstr="--jmax=%f", ), jacobian_min=dict( argstr="--jmin=%f", ), midmat=dict( argstr="--midmat=%s", extensions=None, ), out_abswarp=dict( argstr="--absout", xor=["out_relwarp"], ), out_file=dict( argstr="--out=%s", extensions=None, name_source=["reference"], name_template="%s_concatwarp", output_name="out_file", position=-1, ), out_relwarp=dict( argstr="--relout", xor=["out_abswarp"], ), output_type=dict(), postmat=dict( argstr="--postmat=%s", extensions=None, ), premat=dict( argstr="--premat=%s", extensions=None, ), reference=dict( argstr="--ref=%s", extensions=None, mandatory=True, position=1, ), relwarp=dict( argstr="--rel", xor=["abswarp"], ), shift_direction=dict( argstr="--shiftdir=%s", requires=["shift_in_file"], ), shift_in_file=dict( argstr="--shiftmap=%s", extensions=None, ), warp1=dict( argstr="--warp1=%s", extensions=None, ), warp2=dict( argstr="--warp2=%s", extensions=None, ), ) inputs = ConvertWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ConvertWarp_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ConvertWarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ConvertXFM.py000066400000000000000000000034531413403311400250330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ConvertXFM def test_ConvertXFM_inputs(): input_map = dict( args=dict( argstr="%s", ), concat_xfm=dict( argstr="-concat", position=-3, requires=["in_file2"], xor=["invert_xfm", "concat_xfm", "fix_scale_skew"], ), environ=dict( nohash=True, usedefault=True, ), fix_scale_skew=dict( argstr="-fixscaleskew", position=-3, requires=["in_file2"], xor=["invert_xfm", "concat_xfm", "fix_scale_skew"], ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), in_file2=dict( argstr="%s", extensions=None, position=-2, ), invert_xfm=dict( argstr="-inverse", position=-3, xor=["invert_xfm", "concat_xfm", "fix_scale_skew"], ), out_file=dict( argstr="-omat %s", extensions=None, genfile=True, hash_files=False, position=1, ), output_type=dict(), ) inputs = ConvertXFM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ConvertXFM_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ConvertXFM.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_CopyGeom.py000066400000000000000000000025111413403311400245540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import CopyGeom def test_CopyGeom_inputs(): input_map = dict( args=dict( argstr="%s", ), dest_file=dict( argstr="%s", copyfile=True, extensions=None, mandatory=True, name_source="dest_file", name_template="%s", output_name="out_file", position=1, ), environ=dict( nohash=True, usedefault=True, ), ignore_dims=dict( argstr="-d", position="-1", ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), output_type=dict(), ) inputs = CopyGeom.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CopyGeom_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = CopyGeom.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_DTIFit.py000066400000000000000000000055051413403311400241230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import DTIFit def test_DTIFit_inputs(): input_map = dict( args=dict( argstr="%s", ), base_name=dict( argstr="-o %s", position=1, usedefault=True, ), bvals=dict( argstr="-b %s", extensions=None, mandatory=True, position=4, ), bvecs=dict( argstr="-r %s", extensions=None, mandatory=True, position=3, ), cni=dict( argstr="--cni=%s", extensions=None, ), dwi=dict( argstr="-k %s", extensions=None, mandatory=True, position=0, ), environ=dict( nohash=True, usedefault=True, ), gradnonlin=dict( argstr="--gradnonlin=%s", extensions=None, ), little_bit=dict( argstr="--littlebit", ), mask=dict( argstr="-m %s", extensions=None, mandatory=True, position=2, ), max_x=dict( argstr="-X %d", ), max_y=dict( argstr="-Y %d", ), max_z=dict( argstr="-Z %d", ), min_x=dict( argstr="-x %d", ), min_y=dict( argstr="-y %d", ), min_z=dict( argstr="-z %d", ), output_type=dict(), save_tensor=dict( argstr="--save_tensor", ), sse=dict( argstr="--sse", ), ) inputs = DTIFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DTIFit_outputs(): output_map = dict( FA=dict( extensions=None, ), L1=dict( extensions=None, ), L2=dict( extensions=None, ), L3=dict( extensions=None, ), MD=dict( extensions=None, ), MO=dict( extensions=None, ), S0=dict( extensions=None, ), V1=dict( extensions=None, ), V2=dict( extensions=None, ), V3=dict( extensions=None, ), sse=dict( extensions=None, ), tensor=dict( extensions=None, ), ) outputs = DTIFit.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_DilateImage.py000066400000000000000000000036151413403311400252050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import DilateImage def test_DilateImage_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), kernel_file=dict( argstr="%s", extensions=None, position=5, xor=["kernel_size"], ), kernel_shape=dict( argstr="-kernel %s", position=4, ), kernel_size=dict( argstr="%.4f", position=5, xor=["kernel_file"], ), nan2zeros=dict( argstr="-nan", position=3, ), operation=dict( argstr="-dil%s", mandatory=True, position=6, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = DilateImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DilateImage_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = DilateImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_DistanceMap.py000066400000000000000000000027131413403311400252260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import DistanceMap def test_DistanceMap_inputs(): input_map = dict( args=dict( argstr="%s", ), distance_map=dict( argstr="--out=%s", extensions=None, genfile=True, hash_files=False, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="--in=%s", extensions=None, mandatory=True, ), invert_input=dict( argstr="--invert", ), local_max_file=dict( argstr="--localmax=%s", hash_files=False, ), mask_file=dict( argstr="--mask=%s", extensions=None, ), output_type=dict(), ) inputs = DistanceMap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DistanceMap_outputs(): output_map = dict( distance_map=dict( extensions=None, ), local_max_file=dict( extensions=None, ), ) outputs = DistanceMap.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_DualRegression.py000066400000000000000000000034151413403311400257640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import DualRegression def test_DualRegression_inputs(): input_map = dict( args=dict( argstr="%s", ), con_file=dict( argstr="%s", extensions=None, position=4, ), des_norm=dict( argstr="%i", position=2, usedefault=True, ), design_file=dict( argstr="%s", extensions=None, position=3, ), environ=dict( nohash=True, usedefault=True, ), group_IC_maps_4D=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), in_files=dict( argstr="%s", mandatory=True, position=-1, sep=" ", ), n_perm=dict( argstr="%i", mandatory=True, position=5, ), one_sample_group_mean=dict( argstr="-1", position=3, ), out_dir=dict( argstr="%s", genfile=True, position=6, usedefault=True, ), output_type=dict(), ) inputs = DualRegression.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DualRegression_outputs(): output_map = dict( out_dir=dict(), ) outputs = DualRegression.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_EPIDeWarp.py000066400000000000000000000043751413403311400245640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..epi import EPIDeWarp def test_EPIDeWarp_inputs(): input_map = dict( args=dict( argstr="%s", ), cleanup=dict( argstr="--cleanup", ), dph_file=dict( argstr="--dph %s", extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), epi_file=dict( argstr="--epi %s", extensions=None, ), epidw=dict( argstr="--epidw %s", genfile=False, ), esp=dict( argstr="--esp %s", usedefault=True, ), exf_file=dict( argstr="--exf %s", extensions=None, ), exfdw=dict( argstr="--exfdw %s", genfile=True, ), mag_file=dict( argstr="--mag %s", extensions=None, mandatory=True, position=0, ), nocleanup=dict( argstr="--nocleanup", usedefault=True, ), output_type=dict(), sigma=dict( argstr="--sigma %s", usedefault=True, ), tediff=dict( argstr="--tediff %s", usedefault=True, ), tmpdir=dict( argstr="--tmpdir %s", genfile=True, ), vsm=dict( argstr="--vsm %s", genfile=True, ), ) inputs = EPIDeWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EPIDeWarp_outputs(): output_map = dict( exf_mask=dict( extensions=None, ), exfdw=dict( extensions=None, ), unwarped_file=dict( extensions=None, ), vsm_file=dict( extensions=None, ), ) outputs = EPIDeWarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Eddy.py000066400000000000000000000157651413403311400237360ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..epi import Eddy def test_Eddy_inputs(): input_map = dict( args=dict( argstr="%s", ), cnr_maps=dict( argstr="--cnr_maps", min_ver="5.0.10", ), dont_peas=dict( argstr="--dont_peas", ), dont_sep_offs_move=dict( argstr="--dont_sep_offs_move", ), environ=dict( nohash=True, usedefault=True, ), estimate_move_by_susceptibility=dict( argstr="--estimate_move_by_susceptibility", min_ver="6.0.1", ), fep=dict( argstr="--fep", ), field=dict( argstr="--field=%s", extensions=None, ), field_mat=dict( argstr="--field_mat=%s", extensions=None, ), flm=dict( argstr="--flm=%s", usedefault=True, ), fudge_factor=dict( argstr="--ff=%s", usedefault=True, ), fwhm=dict( argstr="--fwhm=%s", ), in_acqp=dict( argstr="--acqp=%s", extensions=None, mandatory=True, ), in_bval=dict( argstr="--bvals=%s", extensions=None, mandatory=True, ), in_bvec=dict( argstr="--bvecs=%s", extensions=None, mandatory=True, ), in_file=dict( argstr="--imain=%s", extensions=None, mandatory=True, ), in_index=dict( argstr="--index=%s", extensions=None, mandatory=True, ), in_mask=dict( argstr="--mask=%s", extensions=None, mandatory=True, ), in_topup_fieldcoef=dict( argstr="--topup=%s", extensions=None, requires=["in_topup_movpar"], ), in_topup_movpar=dict( extensions=None, requires=["in_topup_fieldcoef"], ), initrand=dict( argstr="--initrand", min_ver="5.0.10", ), interp=dict( argstr="--interp=%s", usedefault=True, ), is_shelled=dict( argstr="--data_is_shelled", ), json=dict( argstr="--json=%s", min_ver="6.0.1", requires=["mporder"], xor=["slice_order"], ), mbs_ksp=dict( argstr="--mbs_ksp=%smm", min_ver="6.0.1", requires=["estimate_move_by_susceptibility"], ), mbs_lambda=dict( argstr="--mbs_lambda=%s", min_ver="6.0.1", requires=["estimate_move_by_susceptibility"], ), mbs_niter=dict( argstr="--mbs_niter=%s", min_ver="6.0.1", requires=["estimate_move_by_susceptibility"], ), method=dict( argstr="--resamp=%s", usedefault=True, ), mporder=dict( argstr="--mporder=%s", min_ver="5.0.11", requires=["use_cuda"], ), multiband_factor=dict( argstr="--mb=%s", min_ver="5.0.10", ), multiband_offset=dict( argstr="--mb_offs=%d", min_ver="5.0.10", requires=["multiband_factor"], ), niter=dict( argstr="--niter=%s", usedefault=True, ), num_threads=dict( nohash=True, usedefault=True, ), nvoxhp=dict( argstr="--nvoxhp=%s", usedefault=True, ), out_base=dict( argstr="--out=%s", usedefault=True, ), outlier_nstd=dict( argstr="--ol_nstd", min_ver="5.0.10", requires=["repol"], ), outlier_nvox=dict( argstr="--ol_nvox", min_ver="5.0.10", requires=["repol"], ), outlier_pos=dict( argstr="--ol_pos", min_ver="5.0.10", requires=["repol"], ), outlier_sqr=dict( argstr="--ol_sqr", min_ver="5.0.10", requires=["repol"], ), outlier_type=dict( argstr="--ol_type", min_ver="5.0.10", requires=["repol"], ), output_type=dict(), repol=dict( argstr="--repol", ), residuals=dict( argstr="--residuals", min_ver="5.0.10", ), session=dict( argstr="--session=%s", extensions=None, ), slice2vol_interp=dict( argstr="--s2v_interp=%s", min_ver="5.0.11", requires=["mporder"], ), slice2vol_lambda=dict( argstr="--s2v_lambda=%d", min_ver="5.0.11", requires=["mporder"], ), slice2vol_niter=dict( argstr="--s2v_niter=%d", min_ver="5.0.11", requires=["mporder"], ), slice_order=dict( argstr="--slspec=%s", min_ver="5.0.11", requires=["mporder"], xor=["json"], ), slm=dict( argstr="--slm=%s", usedefault=True, ), use_cuda=dict(), ) inputs = Eddy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Eddy_outputs(): output_map = dict( out_cnr_maps=dict( extensions=None, ), out_corrected=dict( extensions=None, ), out_movement_over_time=dict( extensions=None, ), out_movement_rms=dict( extensions=None, ), out_outlier_free=dict( extensions=None, ), out_outlier_map=dict( extensions=None, ), out_outlier_n_sqr_stdev_map=dict( extensions=None, ), out_outlier_n_stdev_map=dict( extensions=None, ), out_outlier_report=dict( extensions=None, ), out_parameter=dict( extensions=None, ), out_residuals=dict( extensions=None, ), out_restricted_movement_rms=dict( extensions=None, ), out_rotated_bvecs=dict( extensions=None, ), out_shell_alignment_parameters=dict( extensions=None, ), out_shell_pe_translation_parameters=dict( extensions=None, ), ) outputs = Eddy.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_EddyCorrect.py000066400000000000000000000025401413403311400252430ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..epi import EddyCorrect def test_EddyCorrect_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), out_file=dict( argstr="%s", extensions=None, name_source=["in_file"], name_template="%s_edc", output_name="eddy_corrected", position=1, ), output_type=dict(), ref_num=dict( argstr="%d", mandatory=True, position=2, usedefault=True, ), ) inputs = EddyCorrect.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EddyCorrect_outputs(): output_map = dict( eddy_corrected=dict( extensions=None, ), ) outputs = EddyCorrect.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_EddyQuad.py000066400000000000000000000044011413403311400245320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..epi import EddyQuad def test_EddyQuad_inputs(): input_map = dict( args=dict( argstr="%s", ), base_name=dict( argstr="%s", position=0, usedefault=True, ), bval_file=dict( argstr="--bvals %s", extensions=None, mandatory=True, ), bvec_file=dict( argstr="--bvecs %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), field=dict( argstr="--field %s", extensions=None, ), idx_file=dict( argstr="--eddyIdx %s", extensions=None, mandatory=True, ), mask_file=dict( argstr="--mask %s", extensions=None, mandatory=True, ), output_dir=dict( argstr="--output-dir %s", name_source=["base_name"], name_template="%s.qc", ), output_type=dict(), param_file=dict( argstr="--eddyParams %s", extensions=None, mandatory=True, ), slice_spec=dict( argstr="--slspec %s", extensions=None, ), verbose=dict( argstr="--verbose", ), ) inputs = EddyQuad.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EddyQuad_outputs(): output_map = dict( avg_b0_pe_png=dict(), avg_b_png=dict(), clean_volumes=dict( extensions=None, ), cnr_png=dict(), qc_json=dict( extensions=None, ), qc_pdf=dict( extensions=None, ), residuals=dict( extensions=None, ), vdm_png=dict( extensions=None, ), ) outputs = EddyQuad.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_EpiReg.py000066400000000000000000000057111413403311400242120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..epi import EpiReg def test_EpiReg_inputs(): input_map = dict( args=dict( argstr="%s", ), echospacing=dict( argstr="--echospacing=%f", ), environ=dict( nohash=True, usedefault=True, ), epi=dict( argstr="--epi=%s", extensions=None, mandatory=True, position=-4, ), fmap=dict( argstr="--fmap=%s", extensions=None, ), fmapmag=dict( argstr="--fmapmag=%s", extensions=None, ), fmapmagbrain=dict( argstr="--fmapmagbrain=%s", extensions=None, ), no_clean=dict( argstr="--noclean", usedefault=True, ), no_fmapreg=dict( argstr="--nofmapreg", ), out_base=dict( argstr="--out=%s", position=-1, usedefault=True, ), output_type=dict(), pedir=dict( argstr="--pedir=%s", ), t1_brain=dict( argstr="--t1brain=%s", extensions=None, mandatory=True, position=-2, ), t1_head=dict( argstr="--t1=%s", extensions=None, mandatory=True, position=-3, ), weight_image=dict( argstr="--weight=%s", extensions=None, ), wmseg=dict( argstr="--wmseg=%s", extensions=None, ), ) inputs = EpiReg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EpiReg_outputs(): output_map = dict( epi2str_inv=dict( extensions=None, ), epi2str_mat=dict( extensions=None, ), fmap2epi_mat=dict( extensions=None, ), fmap2str_mat=dict( extensions=None, ), fmap_epi=dict( extensions=None, ), fmap_str=dict( extensions=None, ), fmapmag_str=dict( extensions=None, ), fullwarp=dict( extensions=None, ), out_1vol=dict( extensions=None, ), out_file=dict( extensions=None, ), seg=dict( extensions=None, ), shiftmap=dict( extensions=None, ), wmedge=dict( extensions=None, ), wmseg=dict( extensions=None, ), ) outputs = EpiReg.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ErodeImage.py000066400000000000000000000036121413403311400250360ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import ErodeImage def test_ErodeImage_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), kernel_file=dict( argstr="%s", extensions=None, position=5, xor=["kernel_size"], ), kernel_shape=dict( argstr="-kernel %s", position=4, ), kernel_size=dict( argstr="%.4f", position=5, xor=["kernel_file"], ), minimum_filter=dict( argstr="%s", position=6, usedefault=True, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = ErodeImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ErodeImage_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ErodeImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ExtractROI.py000066400000000000000000000041261413403311400250220ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ExtractROI def test_ExtractROI_inputs(): input_map = dict( args=dict( argstr="%s", ), crop_list=dict( argstr="%s", position=2, xor=[ "x_min", "x_size", "y_min", "y_size", "z_min", "z_size", "t_min", "t_size", ], ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), output_type=dict(), roi_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=1, ), t_min=dict( argstr="%d", position=8, ), t_size=dict( argstr="%d", position=9, ), x_min=dict( argstr="%d", position=2, ), x_size=dict( argstr="%d", position=3, ), y_min=dict( argstr="%d", position=4, ), y_size=dict( argstr="%d", position=5, ), z_min=dict( argstr="%d", position=6, ), z_size=dict( argstr="%d", position=7, ), ) inputs = ExtractROI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ExtractROI_outputs(): output_map = dict( roi_file=dict( extensions=None, ), ) outputs = ExtractROI.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FAST.py000066400000000000000000000053071413403311400235750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import FAST def test_FAST_inputs(): input_map = dict( args=dict( argstr="%s", ), bias_iters=dict( argstr="-I %d", ), bias_lowpass=dict( argstr="-l %d", units="mm", ), environ=dict( nohash=True, usedefault=True, ), hyper=dict( argstr="-H %.2f", ), img_type=dict( argstr="-t %d", ), in_files=dict( argstr="%s", copyfile=False, mandatory=True, position=-1, ), init_seg_smooth=dict( argstr="-f %.3f", ), init_transform=dict( argstr="-a %s", extensions=None, ), iters_afterbias=dict( argstr="-O %d", ), manual_seg=dict( argstr="-s %s", extensions=None, ), mixel_smooth=dict( argstr="-R %.2f", ), no_bias=dict( argstr="-N", ), no_pve=dict( argstr="--nopve", ), number_classes=dict( argstr="-n %d", ), other_priors=dict( argstr="-A %s", ), out_basename=dict( argstr="-o %s", extensions=None, ), output_biascorrected=dict( argstr="-B", ), output_biasfield=dict( argstr="-b", ), output_type=dict(), probability_maps=dict( argstr="-p", ), segment_iters=dict( argstr="-W %d", ), segments=dict( argstr="-g", ), use_priors=dict( argstr="-P", ), verbose=dict( argstr="-v", ), ) inputs = FAST.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FAST_outputs(): output_map = dict( bias_field=dict(), mixeltype=dict( extensions=None, ), partial_volume_files=dict(), partial_volume_map=dict( extensions=None, ), probability_maps=dict(), restored_image=dict(), tissue_class_files=dict(), tissue_class_map=dict( extensions=None, ), ) outputs = FAST.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FEAT.py000066400000000000000000000016461413403311400235610ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import FEAT def test_FEAT_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fsf_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), output_type=dict(), ) inputs = FEAT.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FEAT_outputs(): output_map = dict( feat_dir=dict(), ) outputs = FEAT.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FEATModel.py000066400000000000000000000026221413403311400245350ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import FEATModel def test_FEATModel_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ev_files=dict( argstr="%s", copyfile=False, mandatory=True, position=1, ), fsf_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=0, ), output_type=dict(), ) inputs = FEATModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FEATModel_outputs(): output_map = dict( con_file=dict( extensions=None, ), design_cov=dict( extensions=None, ), design_file=dict( extensions=None, ), design_image=dict( extensions=None, ), fcon_file=dict( extensions=None, ), ) outputs = FEATModel.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FEATRegister.py000066400000000000000000000016271413403311400252650ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import FEATRegister def test_FEATRegister_inputs(): input_map = dict( feat_dirs=dict( mandatory=True, ), reg_dof=dict( usedefault=True, ), reg_image=dict( extensions=None, mandatory=True, ), ) inputs = FEATRegister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FEATRegister_outputs(): output_map = dict( fsf_file=dict( extensions=None, ), ) outputs = FEATRegister.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FIRST.py000066400000000000000000000041171413403311400237250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import FIRST def test_FIRST_inputs(): input_map = dict( affine_file=dict( argstr="-a %s", extensions=None, position=6, ), args=dict( argstr="%s", ), brain_extracted=dict( argstr="-b", position=2, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-i %s", copyfile=False, extensions=None, mandatory=True, position=-2, ), list_of_specific_structures=dict( argstr="-s %s", position=5, sep=",", ), method=dict( argstr="-m %s", position=4, usedefault=True, xor=["method_as_numerical_threshold"], ), method_as_numerical_threshold=dict( argstr="-m %.4f", position=4, ), no_cleanup=dict( argstr="-d", position=3, ), out_file=dict( argstr="-o %s", extensions=None, hash_files=False, mandatory=True, position=-1, usedefault=True, ), output_type=dict(), verbose=dict( argstr="-v", position=1, ), ) inputs = FIRST.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FIRST_outputs(): output_map = dict( bvars=dict(), original_segmentations=dict( extensions=None, ), segmentation_file=dict( extensions=None, ), vtk_surfaces=dict(), ) outputs = FIRST.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FLAMEO.py000066400000000000000000000053671413403311400240110ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import FLAMEO def test_FLAMEO_inputs(): input_map = dict( args=dict( argstr="%s", ), burnin=dict( argstr="--burnin=%d", ), cope_file=dict( argstr="--copefile=%s", extensions=None, mandatory=True, ), cov_split_file=dict( argstr="--covsplitfile=%s", extensions=None, mandatory=True, ), design_file=dict( argstr="--designfile=%s", extensions=None, mandatory=True, ), dof_var_cope_file=dict( argstr="--dofvarcopefile=%s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), f_con_file=dict( argstr="--fcontrastsfile=%s", extensions=None, ), fix_mean=dict( argstr="--fixmean", ), infer_outliers=dict( argstr="--inferoutliers", ), log_dir=dict( argstr="--ld=%s", usedefault=True, ), mask_file=dict( argstr="--maskfile=%s", extensions=None, mandatory=True, ), n_jumps=dict( argstr="--njumps=%d", ), no_pe_outputs=dict( argstr="--nopeoutput", ), outlier_iter=dict( argstr="--ioni=%d", ), output_type=dict(), run_mode=dict( argstr="--runmode=%s", mandatory=True, ), sample_every=dict( argstr="--sampleevery=%d", ), sigma_dofs=dict( argstr="--sigma_dofs=%d", ), t_con_file=dict( argstr="--tcontrastsfile=%s", extensions=None, mandatory=True, ), var_cope_file=dict( argstr="--varcopefile=%s", extensions=None, ), ) inputs = FLAMEO.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FLAMEO_outputs(): output_map = dict( copes=dict(), fstats=dict(), mrefvars=dict(), pes=dict(), res4d=dict(), stats_dir=dict(), tdof=dict(), tstats=dict(), var_copes=dict(), weights=dict(), zfstats=dict(), zstats=dict(), ) outputs = FLAMEO.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FLIRT.py000066400000000000000000000127671413403311400237300ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import FLIRT def test_FLIRT_inputs(): input_map = dict( angle_rep=dict( argstr="-anglerep %s", ), apply_isoxfm=dict( argstr="-applyisoxfm %f", xor=["apply_xfm"], ), apply_xfm=dict( argstr="-applyxfm", ), args=dict( argstr="%s", ), bbrslope=dict( argstr="-bbrslope %f", min_ver="5.0.0", ), bbrtype=dict( argstr="-bbrtype %s", min_ver="5.0.0", ), bgvalue=dict( argstr="-setbackground %f", ), bins=dict( argstr="-bins %d", ), coarse_search=dict( argstr="-coarsesearch %d", units="degrees", ), cost=dict( argstr="-cost %s", ), cost_func=dict( argstr="-searchcost %s", ), datatype=dict( argstr="-datatype %s", ), display_init=dict( argstr="-displayinit", ), dof=dict( argstr="-dof %d", ), echospacing=dict( argstr="-echospacing %f", min_ver="5.0.0", ), environ=dict( nohash=True, usedefault=True, ), fieldmap=dict( argstr="-fieldmap %s", extensions=None, min_ver="5.0.0", ), fieldmapmask=dict( argstr="-fieldmapmask %s", extensions=None, min_ver="5.0.0", ), fine_search=dict( argstr="-finesearch %d", units="degrees", ), force_scaling=dict( argstr="-forcescaling", ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, position=0, ), in_matrix_file=dict( argstr="-init %s", extensions=None, ), in_weight=dict( argstr="-inweight %s", extensions=None, ), interp=dict( argstr="-interp %s", ), min_sampling=dict( argstr="-minsampling %f", units="mm", ), no_clamp=dict( argstr="-noclamp", ), no_resample=dict( argstr="-noresample", ), no_resample_blur=dict( argstr="-noresampblur", ), no_search=dict( argstr="-nosearch", ), out_file=dict( argstr="-out %s", extensions=None, hash_files=False, name_source=["in_file"], name_template="%s_flirt", position=2, ), out_log=dict( extensions=None, keep_extension=True, name_source=["in_file"], name_template="%s_flirt.log", requires=["save_log"], ), out_matrix_file=dict( argstr="-omat %s", extensions=None, hash_files=False, keep_extension=True, name_source=["in_file"], name_template="%s_flirt.mat", position=3, ), output_type=dict(), padding_size=dict( argstr="-paddingsize %d", units="voxels", ), pedir=dict( argstr="-pedir %d", min_ver="5.0.0", ), ref_weight=dict( argstr="-refweight %s", extensions=None, ), reference=dict( argstr="-ref %s", extensions=None, mandatory=True, position=1, ), rigid2D=dict( argstr="-2D", ), save_log=dict(), schedule=dict( argstr="-schedule %s", extensions=None, ), searchr_x=dict( argstr="-searchrx %s", units="degrees", ), searchr_y=dict( argstr="-searchry %s", units="degrees", ), searchr_z=dict( argstr="-searchrz %s", units="degrees", ), sinc_width=dict( argstr="-sincwidth %d", units="voxels", ), sinc_window=dict( argstr="-sincwindow %s", ), uses_qform=dict( argstr="-usesqform", ), verbose=dict( argstr="-verbose %d", ), wm_seg=dict( argstr="-wmseg %s", extensions=None, min_ver="5.0.0", ), wmcoords=dict( argstr="-wmcoords %s", extensions=None, min_ver="5.0.0", ), wmnorms=dict( argstr="-wmnorms %s", extensions=None, min_ver="5.0.0", ), ) inputs = FLIRT.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FLIRT_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_log=dict( extensions=None, ), out_matrix_file=dict( extensions=None, ), ) outputs = FLIRT.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FNIRT.py000066400000000000000000000122361413403311400237210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import FNIRT def test_FNIRT_inputs(): input_map = dict( affine_file=dict( argstr="--aff=%s", extensions=None, ), apply_inmask=dict( argstr="--applyinmask=%s", sep=",", xor=["skip_inmask"], ), apply_intensity_mapping=dict( argstr="--estint=%s", sep=",", xor=["skip_intensity_mapping"], ), apply_refmask=dict( argstr="--applyrefmask=%s", sep=",", xor=["skip_refmask"], ), args=dict( argstr="%s", ), bias_regularization_lambda=dict( argstr="--biaslambda=%f", ), biasfield_resolution=dict( argstr="--biasres=%d,%d,%d", ), config_file=dict( argstr="--config=%s", ), derive_from_ref=dict( argstr="--refderiv", ), environ=dict( nohash=True, usedefault=True, ), field_file=dict( argstr="--fout=%s", hash_files=False, ), fieldcoeff_file=dict( argstr="--cout=%s", ), hessian_precision=dict( argstr="--numprec=%s", ), in_file=dict( argstr="--in=%s", extensions=None, mandatory=True, ), in_fwhm=dict( argstr="--infwhm=%s", sep=",", ), in_intensitymap_file=dict( argstr="--intin=%s", copyfile=False, ), inmask_file=dict( argstr="--inmask=%s", extensions=None, ), inmask_val=dict( argstr="--impinval=%f", ), intensity_mapping_model=dict( argstr="--intmod=%s", ), intensity_mapping_order=dict( argstr="--intorder=%d", ), inwarp_file=dict( argstr="--inwarp=%s", extensions=None, ), jacobian_file=dict( argstr="--jout=%s", hash_files=False, ), jacobian_range=dict( argstr="--jacrange=%f,%f", ), log_file=dict( argstr="--logout=%s", extensions=None, genfile=True, hash_files=False, ), max_nonlin_iter=dict( argstr="--miter=%s", sep=",", ), modulatedref_file=dict( argstr="--refout=%s", hash_files=False, ), out_intensitymap_file=dict( argstr="--intout=%s", hash_files=False, ), output_type=dict(), ref_file=dict( argstr="--ref=%s", extensions=None, mandatory=True, ), ref_fwhm=dict( argstr="--reffwhm=%s", sep=",", ), refmask_file=dict( argstr="--refmask=%s", extensions=None, ), refmask_val=dict( argstr="--imprefval=%f", ), regularization_lambda=dict( argstr="--lambda=%s", sep=",", ), regularization_model=dict( argstr="--regmod=%s", ), skip_implicit_in_masking=dict( argstr="--impinm=0", ), skip_implicit_ref_masking=dict( argstr="--imprefm=0", ), skip_inmask=dict( argstr="--applyinmask=0", xor=["apply_inmask"], ), skip_intensity_mapping=dict( argstr="--estint=0", xor=["apply_intensity_mapping"], ), skip_lambda_ssq=dict( argstr="--ssqlambda=0", ), skip_refmask=dict( argstr="--applyrefmask=0", xor=["apply_refmask"], ), spline_order=dict( argstr="--splineorder=%d", ), subsampling_scheme=dict( argstr="--subsamp=%s", sep=",", ), warp_resolution=dict( argstr="--warpres=%d,%d,%d", ), warped_file=dict( argstr="--iout=%s", extensions=None, genfile=True, hash_files=False, ), ) inputs = FNIRT.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FNIRT_outputs(): output_map = dict( field_file=dict( extensions=None, ), fieldcoeff_file=dict( extensions=None, ), jacobian_file=dict( extensions=None, ), log_file=dict( extensions=None, ), modulatedref_file=dict( extensions=None, ), out_intensitymap_file=dict(), warped_file=dict( extensions=None, ), ) outputs = FNIRT.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FSLCommand.py000066400000000000000000000010131413403311400247510ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import FSLCommand def test_FSLCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), output_type=dict(), ) inputs = FSLCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FSLXCommand.py000066400000000000000000000066651413403311400251230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import FSLXCommand def test_FSLXCommand_inputs(): input_map = dict( all_ard=dict( argstr="--allard", xor=("no_ard", "all_ard"), ), args=dict( argstr="%s", ), burn_in=dict( argstr="--burnin=%d", usedefault=True, ), burn_in_no_ard=dict( argstr="--burnin_noard=%d", usedefault=True, ), bvals=dict( argstr="--bvals=%s", extensions=None, mandatory=True, ), bvecs=dict( argstr="--bvecs=%s", extensions=None, mandatory=True, ), cnlinear=dict( argstr="--cnonlinear", xor=("no_spat", "non_linear", "cnlinear"), ), dwi=dict( argstr="--data=%s", extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), f0_ard=dict( argstr="--f0 --ardf0", xor=["f0_noard", "f0_ard", "all_ard"], ), f0_noard=dict( argstr="--f0", xor=["f0_noard", "f0_ard"], ), force_dir=dict( argstr="--forcedir", usedefault=True, ), fudge=dict( argstr="--fudge=%d", ), logdir=dict( argstr="--logdir=%s", usedefault=True, ), mask=dict( argstr="--mask=%s", extensions=None, mandatory=True, ), model=dict( argstr="--model=%d", ), n_fibres=dict( argstr="--nfibres=%d", mandatory=True, usedefault=True, ), n_jumps=dict( argstr="--njumps=%d", usedefault=True, ), no_ard=dict( argstr="--noard", xor=("no_ard", "all_ard"), ), no_spat=dict( argstr="--nospat", xor=("no_spat", "non_linear", "cnlinear"), ), non_linear=dict( argstr="--nonlinear", xor=("no_spat", "non_linear", "cnlinear"), ), output_type=dict(), rician=dict( argstr="--rician", ), sample_every=dict( argstr="--sampleevery=%d", usedefault=True, ), seed=dict( argstr="--seed=%d", ), update_proposal_every=dict( argstr="--updateproposalevery=%d", usedefault=True, ), ) inputs = FSLXCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FSLXCommand_outputs(): output_map = dict( dyads=dict(), fsamples=dict(), mean_S0samples=dict( extensions=None, ), mean_dsamples=dict( extensions=None, ), mean_fsamples=dict(), mean_tausamples=dict( extensions=None, ), phsamples=dict(), thsamples=dict(), ) outputs = FSLXCommand.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FUGUE.py000066400000000000000000000075201413403311400237120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import FUGUE def test_FUGUE_inputs(): input_map = dict( args=dict( argstr="%s", ), asym_se_time=dict( argstr="--asym=%.10f", ), despike_2dfilter=dict( argstr="--despike", ), despike_threshold=dict( argstr="--despikethreshold=%s", ), dwell_time=dict( argstr="--dwell=%.10f", ), dwell_to_asym_ratio=dict( argstr="--dwelltoasym=%.10f", ), environ=dict( nohash=True, usedefault=True, ), fmap_in_file=dict( argstr="--loadfmap=%s", extensions=None, ), fmap_out_file=dict( argstr="--savefmap=%s", extensions=None, ), forward_warping=dict( usedefault=True, ), fourier_order=dict( argstr="--fourier=%d", ), icorr=dict( argstr="--icorr", requires=["shift_in_file"], ), icorr_only=dict( argstr="--icorronly", requires=["unwarped_file"], ), in_file=dict( argstr="--in=%s", extensions=None, ), mask_file=dict( argstr="--mask=%s", extensions=None, ), median_2dfilter=dict( argstr="--median", ), no_extend=dict( argstr="--noextend", ), no_gap_fill=dict( argstr="--nofill", ), nokspace=dict( argstr="--nokspace", ), output_type=dict(), pava=dict( argstr="--pava", ), phase_conjugate=dict( argstr="--phaseconj", ), phasemap_in_file=dict( argstr="--phasemap=%s", extensions=None, ), poly_order=dict( argstr="--poly=%d", ), save_fmap=dict( xor=["save_unmasked_fmap"], ), save_shift=dict( xor=["save_unmasked_shift"], ), save_unmasked_fmap=dict( argstr="--unmaskfmap", xor=["save_fmap"], ), save_unmasked_shift=dict( argstr="--unmaskshift", xor=["save_shift"], ), shift_in_file=dict( argstr="--loadshift=%s", extensions=None, ), shift_out_file=dict( argstr="--saveshift=%s", extensions=None, ), smooth2d=dict( argstr="--smooth2=%.2f", ), smooth3d=dict( argstr="--smooth3=%.2f", ), unwarp_direction=dict( argstr="--unwarpdir=%s", ), unwarped_file=dict( argstr="--unwarp=%s", extensions=None, requires=["in_file"], xor=["warped_file"], ), warped_file=dict( argstr="--warp=%s", extensions=None, requires=["in_file"], xor=["unwarped_file"], ), ) inputs = FUGUE.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FUGUE_outputs(): output_map = dict( fmap_out_file=dict( extensions=None, ), shift_out_file=dict( extensions=None, ), unwarped_file=dict( extensions=None, ), warped_file=dict( extensions=None, ), ) outputs = FUGUE.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FeatureExtractor.py000066400000000000000000000017751413403311400263340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..fix import FeatureExtractor def test_FeatureExtractor_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), mel_ica=dict( argstr="%s", copyfile=False, position=-1, ), ) inputs = FeatureExtractor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FeatureExtractor_outputs(): output_map = dict( mel_ica=dict( argstr="%s", copyfile=False, position=-1, ), ) outputs = FeatureExtractor.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FilterRegressor.py000066400000000000000000000035251413403311400261610ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import FilterRegressor def test_FilterRegressor_inputs(): input_map = dict( args=dict( argstr="%s", ), design_file=dict( argstr="-d %s", extensions=None, mandatory=True, position=3, ), environ=dict( nohash=True, usedefault=True, ), filter_all=dict( argstr="-f '%s'", mandatory=True, position=4, xor=["filter_columns"], ), filter_columns=dict( argstr="-f '%s'", mandatory=True, position=4, xor=["filter_all"], ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, position=1, ), mask=dict( argstr="-m %s", extensions=None, ), out_file=dict( argstr="-o %s", extensions=None, genfile=True, hash_files=False, position=2, ), out_vnscales=dict( argstr="--out_vnscales", ), output_type=dict(), var_norm=dict( argstr="--vn", ), ) inputs = FilterRegressor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FilterRegressor_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = FilterRegressor.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_FindTheBiggest.py000066400000000000000000000022401413403311400256570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import FindTheBiggest def test_FindTheBiggest_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr="%s", mandatory=True, position=0, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=2, ), output_type=dict(), ) inputs = FindTheBiggest.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FindTheBiggest_outputs(): output_map = dict( out_file=dict( argstr="%s", extensions=None, ), ) outputs = FindTheBiggest.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_GLM.py000066400000000000000000000062151413403311400234560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import GLM def test_GLM_inputs(): input_map = dict( args=dict( argstr="%s", ), contrasts=dict( argstr="-c %s", extensions=None, ), dat_norm=dict( argstr="--dat_norm", ), demean=dict( argstr="--demean", ), des_norm=dict( argstr="--des_norm", ), design=dict( argstr="-d %s", extensions=None, mandatory=True, position=2, ), dof=dict( argstr="--dof=%d", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, position=1, ), mask=dict( argstr="-m %s", extensions=None, ), out_cope=dict( argstr="--out_cope=%s", extensions=None, ), out_data_name=dict( argstr="--out_data=%s", extensions=None, ), out_f_name=dict( argstr="--out_f=%s", extensions=None, ), out_file=dict( argstr="-o %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_glm", position=3, ), out_p_name=dict( argstr="--out_p=%s", extensions=None, ), out_pf_name=dict( argstr="--out_pf=%s", extensions=None, ), out_res_name=dict( argstr="--out_res=%s", extensions=None, ), out_sigsq_name=dict( argstr="--out_sigsq=%s", extensions=None, ), out_t_name=dict( argstr="--out_t=%s", extensions=None, ), out_varcb_name=dict( argstr="--out_varcb=%s", extensions=None, ), out_vnscales_name=dict( argstr="--out_vnscales=%s", extensions=None, ), out_z_name=dict( argstr="--out_z=%s", extensions=None, ), output_type=dict(), var_norm=dict( argstr="--vn", ), ) inputs = GLM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GLM_outputs(): output_map = dict( out_cope=dict(), out_data=dict(), out_f=dict(), out_file=dict( extensions=None, ), out_p=dict(), out_pf=dict(), out_res=dict(), out_sigsq=dict(), out_t=dict(), out_varcb=dict(), out_vnscales=dict(), out_z=dict(), ) outputs = GLM.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ICA_AROMA.py000066400000000000000000000042611413403311400243510ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..aroma import ICA_AROMA def test_ICA_AROMA_inputs(): input_map = dict( TR=dict( argstr="-tr %.3f", ), args=dict( argstr="%s", ), denoise_type=dict( argstr="-den %s", mandatory=True, usedefault=True, ), dim=dict( argstr="-dim %d", ), environ=dict( nohash=True, usedefault=True, ), feat_dir=dict( argstr="-feat %s", mandatory=True, xor=["in_file", "mat_file", "fnirt_warp_file", "motion_parameters"], ), fnirt_warp_file=dict( argstr="-warp %s", extensions=None, xor=["feat_dir"], ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, xor=["feat_dir"], ), mask=dict( argstr="-m %s", extensions=None, xor=["feat_dir"], ), mat_file=dict( argstr="-affmat %s", extensions=None, xor=["feat_dir"], ), melodic_dir=dict( argstr="-meldir %s", ), motion_parameters=dict( argstr="-mc %s", extensions=None, mandatory=True, xor=["feat_dir"], ), out_dir=dict( argstr="-o %s", mandatory=True, usedefault=True, ), ) inputs = ICA_AROMA.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ICA_AROMA_outputs(): output_map = dict( aggr_denoised_file=dict( extensions=None, ), nonaggr_denoised_file=dict( extensions=None, ), out_dir=dict(), ) outputs = ICA_AROMA.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ImageMaths.py000066400000000000000000000030511413403311400250510ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ImageMaths def test_ImageMaths_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), in_file2=dict( argstr="%s", extensions=None, position=3, ), mask_file=dict( argstr="-mas %s", extensions=None, ), op_string=dict( argstr="%s", position=2, ), out_data_type=dict( argstr="-odt %s", position=-1, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_type=dict(), suffix=dict(), ) inputs = ImageMaths.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ImageMaths_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ImageMaths.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ImageMeants.py000066400000000000000000000032671413403311400252350ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ImageMeants def test_ImageMeants_inputs(): input_map = dict( args=dict( argstr="%s", ), eig=dict( argstr="--eig", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, position=0, ), mask=dict( argstr="-m %s", extensions=None, ), nobin=dict( argstr="--no_bin", ), order=dict( argstr="--order=%d", usedefault=True, ), out_file=dict( argstr="-o %s", extensions=None, genfile=True, hash_files=False, ), output_type=dict(), show_all=dict( argstr="--showall", ), spatial_coord=dict( argstr="-c %s", ), transpose=dict( argstr="--transpose", ), use_mm=dict( argstr="--usemm", ), ) inputs = ImageMeants.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ImageMeants_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ImageMeants.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ImageStats.py000066400000000000000000000025271413403311400251020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ImageStats def test_ImageStats_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=3, ), index_mask_file=dict( argstr="-K %s", extensions=None, position=2, ), mask_file=dict( argstr="", extensions=None, ), op_string=dict( argstr="%s", mandatory=True, position=4, ), output_type=dict(), split_4d=dict( argstr="-t", position=1, ), ) inputs = ImageStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ImageStats_outputs(): output_map = dict( out_stat=dict(), ) outputs = ImageStats.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_InvWarp.py000066400000000000000000000034501413403311400244230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import InvWarp def test_InvWarp_inputs(): input_map = dict( absolute=dict( argstr="--abs", xor=["relative"], ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inverse_warp=dict( argstr="--out=%s", extensions=None, hash_files=False, name_source=["warp"], name_template="%s_inverse", ), jacobian_max=dict( argstr="--jmax=%f", ), jacobian_min=dict( argstr="--jmin=%f", ), niter=dict( argstr="--niter=%d", ), noconstraint=dict( argstr="--noconstraint", ), output_type=dict(), reference=dict( argstr="--ref=%s", extensions=None, mandatory=True, ), regularise=dict( argstr="--regularise=%f", ), relative=dict( argstr="--rel", xor=["absolute"], ), warp=dict( argstr="--warp=%s", extensions=None, mandatory=True, ), ) inputs = InvWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_InvWarp_outputs(): output_map = dict( inverse_warp=dict( extensions=None, ), ) outputs = InvWarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_IsotropicSmooth.py000066400000000000000000000033271413403311400262050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import IsotropicSmooth def test_IsotropicSmooth_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fwhm=dict( argstr="-s %.5f", mandatory=True, position=4, xor=["sigma"], ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), sigma=dict( argstr="-s %.5f", mandatory=True, position=4, xor=["fwhm"], ), ) inputs = IsotropicSmooth.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_IsotropicSmooth_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = IsotropicSmooth.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_L2Model.py000066400000000000000000000015501413403311400242720ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import L2Model def test_L2Model_inputs(): input_map = dict( num_copes=dict( mandatory=True, ), ) inputs = L2Model.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_L2Model_outputs(): output_map = dict( design_con=dict( extensions=None, ), design_grp=dict( extensions=None, ), design_mat=dict( extensions=None, ), ) outputs = L2Model.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Level1Design.py000066400000000000000000000020501413403311400253120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import Level1Design def test_Level1Design_inputs(): input_map = dict( bases=dict( mandatory=True, ), contrasts=dict(), interscan_interval=dict( mandatory=True, ), model_serial_correlations=dict( mandatory=True, ), orthogonalization=dict( usedefault=True, ), session_info=dict( mandatory=True, ), ) inputs = Level1Design.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Level1Design_outputs(): output_map = dict( ev_files=dict(), fsf_files=dict(), ) outputs = Level1Design.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_MCFLIRT.py000066400000000000000000000051751413403311400241430ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import MCFLIRT def test_MCFLIRT_inputs(): input_map = dict( args=dict( argstr="%s", ), bins=dict( argstr="-bins %d", ), cost=dict( argstr="-cost %s", ), dof=dict( argstr="-dof %d", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, position=0, ), init=dict( argstr="-init %s", extensions=None, ), interpolation=dict( argstr="-%s_final", ), mean_vol=dict( argstr="-meanvol", ), out_file=dict( argstr="-out %s", extensions=None, genfile=True, hash_files=False, ), output_type=dict(), ref_file=dict( argstr="-reffile %s", extensions=None, ), ref_vol=dict( argstr="-refvol %d", ), rotation=dict( argstr="-rotation %d", ), save_mats=dict( argstr="-mats", ), save_plots=dict( argstr="-plots", ), save_rms=dict( argstr="-rmsabs -rmsrel", ), scaling=dict( argstr="-scaling %.2f", ), smooth=dict( argstr="-smooth %.2f", ), stages=dict( argstr="-stages %d", ), stats_imgs=dict( argstr="-stats", ), use_contour=dict( argstr="-edge", ), use_gradient=dict( argstr="-gdt", ), ) inputs = MCFLIRT.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MCFLIRT_outputs(): output_map = dict( mat_file=dict(), mean_img=dict( extensions=None, ), out_file=dict( extensions=None, ), par_file=dict( extensions=None, ), rms_files=dict(), std_img=dict( extensions=None, ), variance_img=dict( extensions=None, ), ) outputs = MCFLIRT.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_MELODIC.py000066400000000000000000000103541413403311400241120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import MELODIC def test_MELODIC_inputs(): input_map = dict( ICs=dict( argstr="--ICs=%s", extensions=None, ), approach=dict( argstr="-a %s", ), args=dict( argstr="%s", ), bg_image=dict( argstr="--bgimage=%s", extensions=None, ), bg_threshold=dict( argstr="--bgthreshold=%f", ), cov_weight=dict( argstr="--covarweight=%f", ), dim=dict( argstr="-d %d", ), dim_est=dict( argstr="--dimest=%s", ), environ=dict( nohash=True, usedefault=True, ), epsilon=dict( argstr="--eps=%f", ), epsilonS=dict( argstr="--epsS=%f", ), in_files=dict( argstr="-i %s", mandatory=True, position=0, sep=",", ), log_power=dict( argstr="--logPower", ), mask=dict( argstr="-m %s", extensions=None, ), max_restart=dict( argstr="--maxrestart=%d", ), maxit=dict( argstr="--maxit=%d", ), migp=dict( argstr="--migp", ), migpN=dict( argstr="--migpN %d", ), migp_factor=dict( argstr="--migp_factor %d", ), migp_shuffle=dict( argstr="--migp_shuffle", ), mix=dict( argstr="--mix=%s", extensions=None, ), mm_thresh=dict( argstr="--mmthresh=%f", ), no_bet=dict( argstr="--nobet", ), no_mask=dict( argstr="--nomask", ), no_mm=dict( argstr="--no_mm", ), non_linearity=dict( argstr="--nl=%s", ), num_ICs=dict( argstr="-n %d", ), out_all=dict( argstr="--Oall", ), out_dir=dict( argstr="-o %s", genfile=True, ), out_mean=dict( argstr="--Omean", ), out_orig=dict( argstr="--Oorig", ), out_pca=dict( argstr="--Opca", ), out_stats=dict( argstr="--Ostats", ), out_unmix=dict( argstr="--Ounmix", ), out_white=dict( argstr="--Owhite", ), output_type=dict(), pbsc=dict( argstr="--pbsc", ), rem_cmp=dict( argstr="-f %d", ), remove_deriv=dict( argstr="--remove_deriv", ), report=dict( argstr="--report", ), report_maps=dict( argstr="--report_maps=%s", ), s_con=dict( argstr="--Scon=%s", extensions=None, ), s_des=dict( argstr="--Sdes=%s", extensions=None, ), sep_vn=dict( argstr="--sep_vn", ), sep_whiten=dict( argstr="--sep_whiten", ), smode=dict( argstr="--smode=%s", extensions=None, ), t_con=dict( argstr="--Tcon=%s", extensions=None, ), t_des=dict( argstr="--Tdes=%s", extensions=None, ), tr_sec=dict( argstr="--tr=%f", ), update_mask=dict( argstr="--update_mask", ), var_norm=dict( argstr="--vn", ), ) inputs = MELODIC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MELODIC_outputs(): output_map = dict( out_dir=dict(), report_dir=dict(), ) outputs = MELODIC.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_MakeDyadicVectors.py000066400000000000000000000030711413403311400263750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import MakeDyadicVectors def test_MakeDyadicVectors_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), mask=dict( argstr="%s", extensions=None, position=2, ), output=dict( argstr="%s", extensions=None, hash_files=False, position=3, usedefault=True, ), output_type=dict(), perc=dict( argstr="%f", position=4, ), phi_vol=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), theta_vol=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), ) inputs = MakeDyadicVectors.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MakeDyadicVectors_outputs(): output_map = dict( dispersion=dict( extensions=None, ), dyads=dict( extensions=None, ), ) outputs = MakeDyadicVectors.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_MathsCommand.py000066400000000000000000000026621413403311400254140ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import MathsCommand def test_MathsCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = MathsCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MathsCommand_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MathsCommand.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_MaxImage.py000066400000000000000000000030231413403311400245210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import MaxImage def test_MaxImage_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="-%smax", position=4, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = MaxImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MaxImage_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MaxImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_MaxnImage.py000066400000000000000000000030311413403311400246760ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import MaxnImage def test_MaxnImage_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="-%smaxn", position=4, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = MaxnImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MaxnImage_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MaxnImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_MeanImage.py000066400000000000000000000030311413403311400246530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import MeanImage def test_MeanImage_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="-%smean", position=4, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = MeanImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MeanImage_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MeanImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_MedianImage.py000066400000000000000000000030451413403311400251750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import MedianImage def test_MedianImage_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="-%smedian", position=4, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = MedianImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MedianImage_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MedianImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Merge.py000066400000000000000000000025241413403311400240750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Merge def test_Merge_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="-%s", mandatory=True, position=0, ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr="%s", mandatory=True, position=2, ), merged_file=dict( argstr="%s", extensions=None, hash_files=False, name_source="in_files", name_template="%s_merged", position=1, ), output_type=dict(), tr=dict( argstr="%.2f", position=-1, ), ) inputs = Merge.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Merge_outputs(): output_map = dict( merged_file=dict( extensions=None, ), ) outputs = Merge.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_MinImage.py000066400000000000000000000030231413403311400245170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import MinImage def test_MinImage_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="-%smin", position=4, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = MinImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MinImage_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MinImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_MotionOutliers.py000066400000000000000000000042461413403311400260350ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MotionOutliers def test_MotionOutliers_inputs(): input_map = dict( args=dict( argstr="%s", ), dummy=dict( argstr="--dummy=%d", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, ), mask=dict( argstr="-m %s", extensions=None, ), metric=dict( argstr="--%s", ), no_motion_correction=dict( argstr="--nomoco", ), out_file=dict( argstr="-o %s", extensions=None, hash_files=False, keep_extension=True, name_source="in_file", name_template="%s_outliers.txt", ), out_metric_plot=dict( argstr="-p %s", extensions=None, hash_files=False, keep_extension=True, name_source="in_file", name_template="%s_metrics.png", ), out_metric_values=dict( argstr="-s %s", extensions=None, hash_files=False, keep_extension=True, name_source="in_file", name_template="%s_metrics.txt", ), output_type=dict(), threshold=dict( argstr="--thresh=%g", ), ) inputs = MotionOutliers.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MotionOutliers_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_metric_plot=dict( extensions=None, ), out_metric_values=dict( extensions=None, ), ) outputs = MotionOutliers.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_MultiImageMaths.py000066400000000000000000000031641413403311400260710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import MultiImageMaths def test_MultiImageMaths_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), op_string=dict( argstr="%s", mandatory=True, position=4, ), operand_files=dict( mandatory=True, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = MultiImageMaths.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MultiImageMaths_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MultiImageMaths.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_MultipleRegressDesign.py000066400000000000000000000021061413403311400273120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import MultipleRegressDesign def test_MultipleRegressDesign_inputs(): input_map = dict( contrasts=dict( mandatory=True, ), groups=dict(), regressors=dict( mandatory=True, ), ) inputs = MultipleRegressDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MultipleRegressDesign_outputs(): output_map = dict( design_con=dict( extensions=None, ), design_fts=dict( extensions=None, ), design_grp=dict( extensions=None, ), design_mat=dict( extensions=None, ), ) outputs = MultipleRegressDesign.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Overlay.py000066400000000000000000000051621413403311400244600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Overlay def test_Overlay_inputs(): input_map = dict( args=dict( argstr="%s", ), auto_thresh_bg=dict( argstr="-a", mandatory=True, position=5, xor=("auto_thresh_bg", "full_bg_range", "bg_thresh"), ), background_image=dict( argstr="%s", extensions=None, mandatory=True, position=4, ), bg_thresh=dict( argstr="%.3f %.3f", mandatory=True, position=5, xor=("auto_thresh_bg", "full_bg_range", "bg_thresh"), ), environ=dict( nohash=True, usedefault=True, ), full_bg_range=dict( argstr="-A", mandatory=True, position=5, xor=("auto_thresh_bg", "full_bg_range", "bg_thresh"), ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-1, ), out_type=dict( argstr="%s", position=2, usedefault=True, ), output_type=dict(), show_negative_stats=dict( argstr="%s", position=8, xor=["stat_image2"], ), stat_image=dict( argstr="%s", extensions=None, mandatory=True, position=6, ), stat_image2=dict( argstr="%s", extensions=None, position=9, xor=["show_negative_stats"], ), stat_thresh=dict( argstr="%.2f %.2f", mandatory=True, position=7, ), stat_thresh2=dict( argstr="%.2f %.2f", position=10, ), transparency=dict( argstr="%s", position=1, usedefault=True, ), use_checkerboard=dict( argstr="-c", position=3, ), ) inputs = Overlay.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Overlay_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Overlay.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_PRELUDE.py000066400000000000000000000051761413403311400241440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import PRELUDE def test_PRELUDE_inputs(): input_map = dict( args=dict( argstr="%s", ), complex_phase_file=dict( argstr="--complex=%s", extensions=None, mandatory=True, xor=["magnitude_file", "phase_file"], ), end=dict( argstr="--end=%d", ), environ=dict( nohash=True, usedefault=True, ), label_file=dict( argstr="--labels=%s", extensions=None, hash_files=False, ), labelprocess2d=dict( argstr="--labelslices", ), magnitude_file=dict( argstr="--abs=%s", extensions=None, mandatory=True, xor=["complex_phase_file"], ), mask_file=dict( argstr="--mask=%s", extensions=None, ), num_partitions=dict( argstr="--numphasesplit=%d", ), output_type=dict(), phase_file=dict( argstr="--phase=%s", extensions=None, mandatory=True, xor=["complex_phase_file"], ), process2d=dict( argstr="--slices", xor=["labelprocess2d"], ), process3d=dict( argstr="--force3D", xor=["labelprocess2d", "process2d"], ), rawphase_file=dict( argstr="--rawphase=%s", extensions=None, hash_files=False, ), removeramps=dict( argstr="--removeramps", ), savemask_file=dict( argstr="--savemask=%s", extensions=None, hash_files=False, ), start=dict( argstr="--start=%d", ), threshold=dict( argstr="--thresh=%.10f", ), unwrapped_phase_file=dict( argstr="--unwrap=%s", extensions=None, genfile=True, hash_files=False, ), ) inputs = PRELUDE.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_PRELUDE_outputs(): output_map = dict( unwrapped_phase_file=dict( extensions=None, ), ) outputs = PRELUDE.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_PercentileImage.py000066400000000000000000000032061413403311400260710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import PercentileImage def test_PercentileImage_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="-%sperc", position=4, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), perc=dict( argstr="%f", position=5, ), ) inputs = PercentileImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_PercentileImage_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = PercentileImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_PlotMotionParams.py000066400000000000000000000025201413403311400263020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import PlotMotionParams def test_PlotMotionParams_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", mandatory=True, position=1, ), in_source=dict( mandatory=True, ), out_file=dict( argstr="-o %s", extensions=None, genfile=True, hash_files=False, ), output_type=dict(), plot_size=dict( argstr="%s", ), plot_type=dict( argstr="%s", mandatory=True, ), ) inputs = PlotMotionParams.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_PlotMotionParams_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = PlotMotionParams.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_PlotTimeSeries.py000066400000000000000000000043001413403311400257400ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import PlotTimeSeries def test_PlotTimeSeries_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", mandatory=True, position=1, ), labels=dict( argstr="%s", ), legend_file=dict( argstr="--legend=%s", extensions=None, ), out_file=dict( argstr="-o %s", extensions=None, genfile=True, hash_files=False, ), output_type=dict(), plot_finish=dict( argstr="--finish=%d", xor=("plot_range",), ), plot_range=dict( argstr="%s", xor=("plot_start", "plot_finish"), ), plot_size=dict( argstr="%s", ), plot_start=dict( argstr="--start=%d", xor=("plot_range",), ), sci_notation=dict( argstr="--sci", ), title=dict( argstr="%s", ), x_precision=dict( argstr="--precision=%d", ), x_units=dict( argstr="-u %d", usedefault=True, ), y_max=dict( argstr="--ymax=%.2f", xor=("y_range",), ), y_min=dict( argstr="--ymin=%.2f", xor=("y_range",), ), y_range=dict( argstr="%s", xor=("y_min", "y_max"), ), ) inputs = PlotTimeSeries.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_PlotTimeSeries_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = PlotTimeSeries.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_PowerSpectrum.py000066400000000000000000000022401413403311400256500ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import PowerSpectrum def test_PowerSpectrum_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=1, ), output_type=dict(), ) inputs = PowerSpectrum.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_PowerSpectrum_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = PowerSpectrum.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_PrepareFieldmap.py000066400000000000000000000031741413403311400261000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..epi import PrepareFieldmap def test_PrepareFieldmap_inputs(): input_map = dict( args=dict( argstr="%s", ), delta_TE=dict( argstr="%f", mandatory=True, position=-2, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), in_magnitude=dict( argstr="%s", extensions=None, mandatory=True, position=3, ), in_phase=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), nocheck=dict( argstr="--nocheck", position=-1, usedefault=True, ), out_fieldmap=dict( argstr="%s", extensions=None, position=4, ), output_type=dict(), scanner=dict( argstr="%s", position=1, usedefault=True, ), ) inputs = PrepareFieldmap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_PrepareFieldmap_outputs(): output_map = dict( out_fieldmap=dict( extensions=None, ), ) outputs = PrepareFieldmap.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ProbTrackX.py000066400000000000000000000075071413403311400250630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import ProbTrackX def test_ProbTrackX_inputs(): input_map = dict( args=dict( argstr="%s", ), avoid_mp=dict( argstr="--avoid=%s", extensions=None, ), c_thresh=dict( argstr="--cthr=%.3f", ), correct_path_distribution=dict( argstr="--pd", ), dist_thresh=dict( argstr="--distthresh=%.3f", ), environ=dict( nohash=True, usedefault=True, ), fibst=dict( argstr="--fibst=%d", ), force_dir=dict( argstr="--forcedir", usedefault=True, ), fsamples=dict( mandatory=True, ), inv_xfm=dict( argstr="--invxfm=%s", extensions=None, ), loop_check=dict( argstr="--loopcheck", ), mask=dict( argstr="-m %s", extensions=None, mandatory=True, ), mask2=dict( argstr="--mask2=%s", extensions=None, ), mesh=dict( argstr="--mesh=%s", extensions=None, ), mod_euler=dict( argstr="--modeuler", ), mode=dict( argstr="--mode=%s", genfile=True, ), n_samples=dict( argstr="--nsamples=%d", usedefault=True, ), n_steps=dict( argstr="--nsteps=%d", ), network=dict( argstr="--network", ), opd=dict( argstr="--opd", usedefault=True, ), os2t=dict( argstr="--os2t", ), out_dir=dict( argstr="--dir=%s", genfile=True, ), output_type=dict(), phsamples=dict( mandatory=True, ), rand_fib=dict( argstr="--randfib=%d", ), random_seed=dict( argstr="--rseed", ), s2tastext=dict( argstr="--s2tastext", ), sample_random_points=dict( argstr="--sampvox", ), samples_base_name=dict( argstr="--samples=%s", usedefault=True, ), seed=dict( argstr="--seed=%s", mandatory=True, ), seed_ref=dict( argstr="--seedref=%s", extensions=None, ), step_length=dict( argstr="--steplength=%.3f", ), stop_mask=dict( argstr="--stop=%s", extensions=None, ), target_masks=dict( argstr="--targetmasks=%s", ), thsamples=dict( mandatory=True, ), use_anisotropy=dict( argstr="--usef", ), verbose=dict( argstr="--verbose=%d", ), waypoints=dict( argstr="--waypoints=%s", extensions=None, ), xfm=dict( argstr="--xfm=%s", extensions=None, ), ) inputs = ProbTrackX.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ProbTrackX_outputs(): output_map = dict( fdt_paths=dict(), log=dict( extensions=None, ), particle_files=dict(), targets=dict(), way_total=dict( extensions=None, ), ) outputs = ProbTrackX.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ProbTrackX2.py000066400000000000000000000124551413403311400251430ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import ProbTrackX2 def test_ProbTrackX2_inputs(): input_map = dict( args=dict( argstr="%s", ), avoid_mp=dict( argstr="--avoid=%s", extensions=None, ), c_thresh=dict( argstr="--cthr=%.3f", ), colmask4=dict( argstr="--colmask4=%s", extensions=None, ), correct_path_distribution=dict( argstr="--pd", ), dist_thresh=dict( argstr="--distthresh=%.3f", ), distthresh1=dict( argstr="--distthresh1=%.3f", ), distthresh3=dict( argstr="--distthresh3=%.3f", ), environ=dict( nohash=True, usedefault=True, ), fibst=dict( argstr="--fibst=%d", ), fopd=dict( argstr="--fopd=%s", extensions=None, ), force_dir=dict( argstr="--forcedir", usedefault=True, ), fsamples=dict( mandatory=True, ), inv_xfm=dict( argstr="--invxfm=%s", extensions=None, ), loop_check=dict( argstr="--loopcheck", ), lrtarget3=dict( argstr="--lrtarget3=%s", extensions=None, ), mask=dict( argstr="-m %s", extensions=None, mandatory=True, ), meshspace=dict( argstr="--meshspace=%s", ), mod_euler=dict( argstr="--modeuler", ), n_samples=dict( argstr="--nsamples=%d", usedefault=True, ), n_steps=dict( argstr="--nsteps=%d", ), network=dict( argstr="--network", ), omatrix1=dict( argstr="--omatrix1", ), omatrix2=dict( argstr="--omatrix2", requires=["target2"], ), omatrix3=dict( argstr="--omatrix3", requires=["target3", "lrtarget3"], ), omatrix4=dict( argstr="--omatrix4", ), onewaycondition=dict( argstr="--onewaycondition", ), opd=dict( argstr="--opd", usedefault=True, ), os2t=dict( argstr="--os2t", ), out_dir=dict( argstr="--dir=%s", genfile=True, ), output_type=dict(), phsamples=dict( mandatory=True, ), rand_fib=dict( argstr="--randfib=%d", ), random_seed=dict( argstr="--rseed", ), s2tastext=dict( argstr="--s2tastext", ), sample_random_points=dict( argstr="--sampvox", ), samples_base_name=dict( argstr="--samples=%s", usedefault=True, ), seed=dict( argstr="--seed=%s", mandatory=True, ), seed_ref=dict( argstr="--seedref=%s", extensions=None, ), simple=dict( argstr="--simple", ), step_length=dict( argstr="--steplength=%.3f", ), stop_mask=dict( argstr="--stop=%s", extensions=None, ), target2=dict( argstr="--target2=%s", extensions=None, ), target3=dict( argstr="--target3=%s", extensions=None, ), target4=dict( argstr="--target4=%s", extensions=None, ), target_masks=dict( argstr="--targetmasks=%s", ), thsamples=dict( mandatory=True, ), use_anisotropy=dict( argstr="--usef", ), verbose=dict( argstr="--verbose=%d", ), waycond=dict( argstr="--waycond=%s", ), wayorder=dict( argstr="--wayorder", ), waypoints=dict( argstr="--waypoints=%s", extensions=None, ), xfm=dict( argstr="--xfm=%s", extensions=None, ), ) inputs = ProbTrackX2.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ProbTrackX2_outputs(): output_map = dict( fdt_paths=dict(), log=dict( extensions=None, ), lookup_tractspace=dict( extensions=None, ), matrix1_dot=dict( extensions=None, ), matrix2_dot=dict( extensions=None, ), matrix3_dot=dict( extensions=None, ), network_matrix=dict( extensions=None, ), particle_files=dict(), targets=dict(), way_total=dict( extensions=None, ), ) outputs = ProbTrackX2.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_ProjThresh.py000066400000000000000000000020261413403311400251230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import ProjThresh def test_ProjThresh_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr="%s", mandatory=True, position=0, ), output_type=dict(), threshold=dict( argstr="%d", mandatory=True, position=1, ), ) inputs = ProjThresh.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ProjThresh_outputs(): output_map = dict( out_files=dict(), ) outputs = ProjThresh.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Randomise.py000066400000000000000000000057261413403311400247660ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import Randomise def test_Randomise_inputs(): input_map = dict( args=dict( argstr="%s", ), base_name=dict( argstr='-o "%s"', position=1, usedefault=True, ), c_thresh=dict( argstr="-c %.1f", ), cm_thresh=dict( argstr="-C %.1f", ), demean=dict( argstr="-D", ), design_mat=dict( argstr="-d %s", extensions=None, position=2, ), environ=dict( nohash=True, usedefault=True, ), f_c_thresh=dict( argstr="-F %.2f", ), f_cm_thresh=dict( argstr="-S %.2f", ), f_only=dict( argstr="--fonly", ), fcon=dict( argstr="-f %s", extensions=None, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, position=0, ), mask=dict( argstr="-m %s", extensions=None, ), num_perm=dict( argstr="-n %d", ), one_sample_group_mean=dict( argstr="-1", ), output_type=dict(), p_vec_n_dist_files=dict( argstr="-P", ), raw_stats_imgs=dict( argstr="-R", ), seed=dict( argstr="--seed=%d", ), show_info_parallel_mode=dict( argstr="-Q", ), show_total_perms=dict( argstr="-q", ), tcon=dict( argstr="-t %s", extensions=None, position=3, ), tfce=dict( argstr="-T", ), tfce2D=dict( argstr="--T2", ), tfce_C=dict( argstr="--tfce_C=%.2f", ), tfce_E=dict( argstr="--tfce_E=%.2f", ), tfce_H=dict( argstr="--tfce_H=%.2f", ), var_smooth=dict( argstr="-v %d", ), vox_p_values=dict( argstr="-x", ), x_block_labels=dict( argstr="-e %s", extensions=None, ), ) inputs = Randomise.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Randomise_outputs(): output_map = dict( f_corrected_p_files=dict(), f_p_files=dict(), fstat_files=dict(), t_corrected_p_files=dict(), t_p_files=dict(), tstat_files=dict(), ) outputs = Randomise.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Reorient2Std.py000066400000000000000000000021531413403311400253600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Reorient2Std def test_Reorient2Std_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, ), output_type=dict(), ) inputs = Reorient2Std.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Reorient2Std_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Reorient2Std.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_RobustFOV.py000066400000000000000000000027641413403311400246750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import RobustFOV def test_RobustFOV_inputs(): input_map = dict( args=dict( argstr="%s", ), brainsize=dict( argstr="-b %d", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, position=0, ), out_roi=dict( argstr="-r %s", extensions=None, hash_files=False, name_source=["in_file"], name_template="%s_ROI", ), out_transform=dict( argstr="-m %s", extensions=None, hash_files=False, name_source=["in_file"], name_template="%s_to_ROI", ), output_type=dict(), ) inputs = RobustFOV.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RobustFOV_outputs(): output_map = dict( out_roi=dict( extensions=None, ), out_transform=dict( extensions=None, ), ) outputs = RobustFOV.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_SMM.py000066400000000000000000000026351413403311400234750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import SMM def test_SMM_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), mask=dict( argstr='--mask="%s"', copyfile=False, extensions=None, mandatory=True, position=1, ), no_deactivation_class=dict( argstr="--zfstatmode", position=2, ), output_type=dict(), spatial_data_file=dict( argstr='--sdf="%s"', copyfile=False, extensions=None, mandatory=True, position=0, ), ) inputs = SMM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SMM_outputs(): output_map = dict( activation_p_map=dict( extensions=None, ), deactivation_p_map=dict( extensions=None, ), null_p_map=dict( extensions=None, ), ) outputs = SMM.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_SUSAN.py000066400000000000000000000032751413403311400237330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import SUSAN def test_SUSAN_inputs(): input_map = dict( args=dict( argstr="%s", ), brightness_threshold=dict( argstr="%.10f", mandatory=True, position=2, ), dimension=dict( argstr="%d", position=4, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), fwhm=dict( argstr="%.10f", mandatory=True, position=3, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-1, ), output_type=dict(), usans=dict( argstr="", position=6, usedefault=True, ), use_median=dict( argstr="%d", position=5, usedefault=True, ), ) inputs = SUSAN.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SUSAN_outputs(): output_map = dict( smoothed_file=dict( extensions=None, ), ) outputs = SUSAN.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_SigLoss.py000066400000000000000000000024341413403311400244210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import SigLoss def test_SigLoss_inputs(): input_map = dict( args=dict( argstr="%s", ), echo_time=dict( argstr="--te=%f", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, ), mask_file=dict( argstr="-m %s", extensions=None, ), out_file=dict( argstr="-s %s", extensions=None, genfile=True, ), output_type=dict(), slice_direction=dict( argstr="-d %s", ), ) inputs = SigLoss.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SigLoss_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SigLoss.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Slice.py000066400000000000000000000020371413403311400240740ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Slice def test_Slice_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", copyfile=False, extensions=None, mandatory=True, position=0, ), out_base_name=dict( argstr="%s", position=1, ), output_type=dict(), ) inputs = Slice.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Slice_outputs(): output_map = dict( out_files=dict(), ) outputs = Slice.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_SliceTimer.py000066400000000000000000000033101413403311400250700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import SliceTimer def test_SliceTimer_inputs(): input_map = dict( args=dict( argstr="%s", ), custom_order=dict( argstr="--ocustom=%s", extensions=None, ), custom_timings=dict( argstr="--tcustom=%s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), global_shift=dict( argstr="--tglobal", ), in_file=dict( argstr="--in=%s", extensions=None, mandatory=True, position=0, ), index_dir=dict( argstr="--down", ), interleaved=dict( argstr="--odd", ), out_file=dict( argstr="--out=%s", extensions=None, genfile=True, hash_files=False, ), output_type=dict(), slice_direction=dict( argstr="--direction=%d", ), time_repetition=dict( argstr="--repeat=%f", ), ) inputs = SliceTimer.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SliceTimer_outputs(): output_map = dict( slice_time_corrected_file=dict( extensions=None, ), ) outputs = SliceTimer.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Slicer.py000066400000000000000000000057421413403311400242640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Slicer def test_Slicer_inputs(): input_map = dict( all_axial=dict( argstr="-A", position=10, requires=["image_width"], xor=("single_slice", "middle_slices", "all_axial", "sample_axial"), ), args=dict( argstr="%s", ), colour_map=dict( argstr="-l %s", extensions=None, position=4, ), dither_edges=dict( argstr="-t", position=7, ), environ=dict( nohash=True, usedefault=True, ), image_edges=dict( argstr="%s", extensions=None, position=2, ), image_width=dict( argstr="%d", position=-2, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), intensity_range=dict( argstr="-i %.3f %.3f", position=5, ), label_slices=dict( argstr="-L", position=3, usedefault=True, ), middle_slices=dict( argstr="-a", position=10, xor=("single_slice", "middle_slices", "all_axial", "sample_axial"), ), nearest_neighbour=dict( argstr="-n", position=8, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-1, ), output_type=dict(), sample_axial=dict( argstr="-S %d", position=10, requires=["image_width"], xor=("single_slice", "middle_slices", "all_axial", "sample_axial"), ), scaling=dict( argstr="-s %f", position=0, ), show_orientation=dict( argstr="%s", position=9, usedefault=True, ), single_slice=dict( argstr="-%s", position=10, requires=["slice_number"], xor=("single_slice", "middle_slices", "all_axial", "sample_axial"), ), slice_number=dict( argstr="-%d", position=11, ), threshold_edges=dict( argstr="-e %.3f", position=6, ), ) inputs = Slicer.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Slicer_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Slicer.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Smooth.py000066400000000000000000000027651413403311400243160ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Smooth def test_Smooth_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fwhm=dict( argstr="-kernel gauss %.03f -fmean", mandatory=True, position=1, xor=["sigma"], ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), output_type=dict(), sigma=dict( argstr="-kernel gauss %.03f -fmean", mandatory=True, position=1, xor=["fwhm"], ), smoothed_file=dict( argstr="%s", extensions=None, hash_files=False, name_source=["in_file"], name_template="%s_smooth", position=2, ), ) inputs = Smooth.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Smooth_outputs(): output_map = dict( smoothed_file=dict( extensions=None, ), ) outputs = Smooth.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_SmoothEstimate.py000066400000000000000000000025511413403311400260030ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import SmoothEstimate def test_SmoothEstimate_inputs(): input_map = dict( args=dict( argstr="%s", ), dof=dict( argstr="--dof=%d", mandatory=True, xor=["zstat_file"], ), environ=dict( nohash=True, usedefault=True, ), mask_file=dict( argstr="--mask=%s", extensions=None, mandatory=True, ), output_type=dict(), residual_fit_file=dict( argstr="--res=%s", extensions=None, requires=["dof"], ), zstat_file=dict( argstr="--zstat=%s", extensions=None, xor=["dof"], ), ) inputs = SmoothEstimate.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SmoothEstimate_outputs(): output_map = dict( dlh=dict(), resels=dict(), volume=dict(), ) outputs = SmoothEstimate.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_SpatialFilter.py000066400000000000000000000036251413403311400256040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import SpatialFilter def test_SpatialFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), kernel_file=dict( argstr="%s", extensions=None, position=5, xor=["kernel_size"], ), kernel_shape=dict( argstr="-kernel %s", position=4, ), kernel_size=dict( argstr="%.4f", position=5, xor=["kernel_file"], ), nan2zeros=dict( argstr="-nan", position=3, ), operation=dict( argstr="-f%s", mandatory=True, position=6, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = SpatialFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SpatialFilter_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SpatialFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Split.py000066400000000000000000000021641413403311400241310ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Split def test_Split_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="-%s", mandatory=True, position=2, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), out_base_name=dict( argstr="%s", position=1, ), output_type=dict(), ) inputs = Split.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Split_outputs(): output_map = dict( out_files=dict(), ) outputs = Split.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_StdImage.py000066400000000000000000000030231413403311400245260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import StdImage def test_StdImage_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( argstr="-%sstd", position=4, usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = StdImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_StdImage_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = StdImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_SwapDimensions.py000066400000000000000000000023541413403311400260020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import SwapDimensions def test_SwapDimensions_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position="1", ), new_dims=dict( argstr="%s %s %s", mandatory=True, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, ), output_type=dict(), ) inputs = SwapDimensions.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SwapDimensions_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SwapDimensions.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_TOPUP.py000066400000000000000000000100771413403311400237470ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..epi import TOPUP def test_TOPUP_inputs(): input_map = dict( args=dict( argstr="%s", ), config=dict( argstr="--config=%s", usedefault=True, ), encoding_direction=dict( argstr="--datain=%s", mandatory=True, requires=["readout_times"], xor=["encoding_file"], ), encoding_file=dict( argstr="--datain=%s", extensions=None, mandatory=True, xor=["encoding_direction"], ), environ=dict( nohash=True, usedefault=True, ), estmov=dict( argstr="--estmov=%d", ), fwhm=dict( argstr="--fwhm=%f", ), in_file=dict( argstr="--imain=%s", extensions=None, mandatory=True, ), interp=dict( argstr="--interp=%s", ), max_iter=dict( argstr="--miter=%d", ), minmet=dict( argstr="--minmet=%d", ), numprec=dict( argstr="--numprec=%s", ), out_base=dict( argstr="--out=%s", extensions=None, hash_files=False, name_source=["in_file"], name_template="%s_base", ), out_corrected=dict( argstr="--iout=%s", extensions=None, hash_files=False, name_source=["in_file"], name_template="%s_corrected", ), out_field=dict( argstr="--fout=%s", extensions=None, hash_files=False, name_source=["in_file"], name_template="%s_field", ), out_jac_prefix=dict( argstr="--jacout=%s", hash_files=False, usedefault=True, ), out_logfile=dict( argstr="--logout=%s", extensions=None, hash_files=False, keep_extension=True, name_source=["in_file"], name_template="%s_topup.log", ), out_mat_prefix=dict( argstr="--rbmout=%s", hash_files=False, usedefault=True, ), out_warp_prefix=dict( argstr="--dfout=%s", hash_files=False, usedefault=True, ), output_type=dict(), readout_times=dict( mandatory=True, requires=["encoding_direction"], xor=["encoding_file"], ), reg_lambda=dict( argstr="--lambda=%0.f", ), regmod=dict( argstr="--regmod=%s", ), regrid=dict( argstr="--regrid=%d", ), scale=dict( argstr="--scale=%d", ), splineorder=dict( argstr="--splineorder=%d", ), ssqlambda=dict( argstr="--ssqlambda=%d", ), subsamp=dict( argstr="--subsamp=%d", ), warp_res=dict( argstr="--warpres=%f", ), ) inputs = TOPUP.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TOPUP_outputs(): output_map = dict( out_corrected=dict( extensions=None, ), out_enc_file=dict( extensions=None, ), out_field=dict( extensions=None, ), out_fieldcoef=dict( extensions=None, ), out_jacs=dict(), out_logfile=dict( extensions=None, ), out_mats=dict(), out_movpar=dict( extensions=None, ), out_warps=dict(), ) outputs = TOPUP.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_TemporalFilter.py000066400000000000000000000032611413403311400257660ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import TemporalFilter def test_TemporalFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), highpass_sigma=dict( argstr="-bptf %.6f", position=4, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), lowpass_sigma=dict( argstr="%.6f", position=5, usedefault=True, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = TemporalFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TemporalFilter_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TemporalFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Threshold.py000066400000000000000000000033101413403311400247640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import Threshold def test_Threshold_inputs(): input_map = dict( args=dict( argstr="%s", ), direction=dict( usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), thresh=dict( argstr="%s", mandatory=True, position=4, ), use_nonzero_voxels=dict( requires=["use_robust_range"], ), use_robust_range=dict(), ) inputs = Threshold.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Threshold_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Threshold.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_TractSkeleton.py000066400000000000000000000035741413403311400256260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import TractSkeleton def test_TractSkeleton_inputs(): input_map = dict( alt_data_file=dict( argstr="-a %s", extensions=None, ), alt_skeleton=dict( argstr="-s %s", extensions=None, ), args=dict( argstr="%s", ), data_file=dict( extensions=None, ), distance_map=dict( extensions=None, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, ), output_type=dict(), project_data=dict( argstr="-p %.3f %s %s %s %s", requires=["threshold", "distance_map", "data_file"], ), projected_data=dict( extensions=None, ), search_mask_file=dict( extensions=None, xor=["use_cingulum_mask"], ), skeleton_file=dict( argstr="-o %s", ), threshold=dict(), use_cingulum_mask=dict( usedefault=True, xor=["search_mask_file"], ), ) inputs = TractSkeleton.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TractSkeleton_outputs(): output_map = dict( projected_data=dict( extensions=None, ), skeleton_file=dict( extensions=None, ), ) outputs = TractSkeleton.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_Training.py000066400000000000000000000021331413403311400246050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..fix import Training def test_Training_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), loo=dict( argstr="-l", position=2, ), mel_icas=dict( argstr="%s", copyfile=False, position=-1, ), trained_wts_filestem=dict( argstr="%s", position=1, ), ) inputs = Training.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Training_outputs(): output_map = dict( trained_wts_file=dict( extensions=None, ), ) outputs = Training.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_TrainingSetCreator.py000066400000000000000000000016021413403311400266010ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..fix import TrainingSetCreator def test_TrainingSetCreator_inputs(): input_map = dict( mel_icas_in=dict( argstr="%s", copyfile=False, position=-1, ), ) inputs = TrainingSetCreator.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TrainingSetCreator_outputs(): output_map = dict( mel_icas_out=dict( argstr="%s", copyfile=False, position=-1, ), ) outputs = TrainingSetCreator.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_UnaryMaths.py000066400000000000000000000030311413403311400251230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import UnaryMaths def test_UnaryMaths_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), internal_datatype=dict( argstr="-dt %s", position=1, ), nan2zeros=dict( argstr="-nan", position=3, ), operation=dict( argstr="-%s", mandatory=True, position=4, ), out_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, position=-2, ), output_datatype=dict( argstr="-odt %s", position=-1, ), output_type=dict(), ) inputs = UnaryMaths.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_UnaryMaths_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = UnaryMaths.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_VecReg.py000066400000000000000000000035171413403311400242140ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import VecReg def test_VecReg_inputs(): input_map = dict( affine_mat=dict( argstr="-t %s", extensions=None, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, ), interpolation=dict( argstr="--interp=%s", ), mask=dict( argstr="-m %s", extensions=None, ), out_file=dict( argstr="-o %s", extensions=None, genfile=True, hash_files=False, ), output_type=dict(), ref_mask=dict( argstr="--refmask=%s", extensions=None, ), ref_vol=dict( argstr="-r %s", extensions=None, mandatory=True, ), rotation_mat=dict( argstr="--rotmat=%s", extensions=None, ), rotation_warp=dict( argstr="--rotwarp=%s", extensions=None, ), warp_field=dict( argstr="-w %s", extensions=None, ), ) inputs = VecReg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_VecReg_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = VecReg.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_WarpPoints.py000066400000000000000000000034431413403311400251450ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import WarpPoints def test_WarpPoints_inputs(): input_map = dict( args=dict( argstr="%s", ), coord_mm=dict( argstr="-mm", xor=["coord_vox"], ), coord_vox=dict( argstr="-vox", xor=["coord_mm"], ), dest_file=dict( argstr="-dest %s", extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), in_coords=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), out_file=dict( extensions=None, name_source="in_coords", name_template="%s_warped", output_name="out_file", ), src_file=dict( argstr="-src %s", extensions=None, mandatory=True, ), warp_file=dict( argstr="-warp %s", extensions=None, xor=["xfm_file"], ), xfm_file=dict( argstr="-xfm %s", extensions=None, xor=["warp_file"], ), ) inputs = WarpPoints.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_WarpPoints_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = WarpPoints.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_WarpPointsFromStd.py000066400000000000000000000032251413403311400264420ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import WarpPointsFromStd def test_WarpPointsFromStd_inputs(): input_map = dict( args=dict( argstr="%s", ), coord_mm=dict( argstr="-mm", xor=["coord_vox"], ), coord_vox=dict( argstr="-vox", xor=["coord_mm"], ), environ=dict( nohash=True, usedefault=True, ), img_file=dict( argstr="-img %s", extensions=None, mandatory=True, ), in_coords=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), std_file=dict( argstr="-std %s", extensions=None, mandatory=True, ), warp_file=dict( argstr="-warp %s", extensions=None, xor=["xfm_file"], ), xfm_file=dict( argstr="-xfm %s", extensions=None, xor=["warp_file"], ), ) inputs = WarpPointsFromStd.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_WarpPointsFromStd_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = WarpPointsFromStd.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_WarpPointsToStd.py000066400000000000000000000036351413403311400261260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import WarpPointsToStd def test_WarpPointsToStd_inputs(): input_map = dict( args=dict( argstr="%s", ), coord_mm=dict( argstr="-mm", xor=["coord_vox"], ), coord_vox=dict( argstr="-vox", xor=["coord_mm"], ), environ=dict( nohash=True, usedefault=True, ), img_file=dict( argstr="-img %s", extensions=None, mandatory=True, ), in_coords=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), out_file=dict( extensions=None, name_source="in_coords", name_template="%s_warped", output_name="out_file", ), premat_file=dict( argstr="-premat %s", extensions=None, ), std_file=dict( argstr="-std %s", extensions=None, mandatory=True, ), warp_file=dict( argstr="-warp %s", extensions=None, xor=["xfm_file"], ), xfm_file=dict( argstr="-xfm %s", extensions=None, xor=["warp_file"], ), ) inputs = WarpPointsToStd.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_WarpPointsToStd_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = WarpPointsToStd.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_WarpUtils.py000066400000000000000000000035121413403311400247660ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import WarpUtils def test_WarpUtils_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="--in=%s", extensions=None, mandatory=True, ), knot_space=dict( argstr="--knotspace=%d,%d,%d", ), out_file=dict( argstr="--out=%s", extensions=None, name_source=["in_file"], output_name="out_file", position=-1, ), out_format=dict( argstr="--outformat=%s", ), out_jacobian=dict( argstr="--jac=%s", extensions=None, ), output_type=dict(), reference=dict( argstr="--ref=%s", extensions=None, mandatory=True, ), warp_resolution=dict( argstr="--warpres=%0.4f,%0.4f,%0.4f", ), with_affine=dict( argstr="--withaff", ), write_jacobian=dict( mandatory=True, usedefault=True, ), ) inputs = WarpUtils.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_WarpUtils_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_jacobian=dict( extensions=None, ), ) outputs = WarpUtils.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_auto_XFibres5.py000066400000000000000000000070151413403311400244650ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dti import XFibres5 def test_XFibres5_inputs(): input_map = dict( all_ard=dict( argstr="--allard", xor=("no_ard", "all_ard"), ), args=dict( argstr="%s", ), burn_in=dict( argstr="--burnin=%d", usedefault=True, ), burn_in_no_ard=dict( argstr="--burnin_noard=%d", usedefault=True, ), bvals=dict( argstr="--bvals=%s", extensions=None, mandatory=True, ), bvecs=dict( argstr="--bvecs=%s", extensions=None, mandatory=True, ), cnlinear=dict( argstr="--cnonlinear", xor=("no_spat", "non_linear", "cnlinear"), ), dwi=dict( argstr="--data=%s", extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), f0_ard=dict( argstr="--f0 --ardf0", xor=["f0_noard", "f0_ard", "all_ard"], ), f0_noard=dict( argstr="--f0", xor=["f0_noard", "f0_ard"], ), force_dir=dict( argstr="--forcedir", usedefault=True, ), fudge=dict( argstr="--fudge=%d", ), gradnonlin=dict( argstr="--gradnonlin=%s", extensions=None, ), logdir=dict( argstr="--logdir=%s", usedefault=True, ), mask=dict( argstr="--mask=%s", extensions=None, mandatory=True, ), model=dict( argstr="--model=%d", ), n_fibres=dict( argstr="--nfibres=%d", mandatory=True, usedefault=True, ), n_jumps=dict( argstr="--njumps=%d", usedefault=True, ), no_ard=dict( argstr="--noard", xor=("no_ard", "all_ard"), ), no_spat=dict( argstr="--nospat", xor=("no_spat", "non_linear", "cnlinear"), ), non_linear=dict( argstr="--nonlinear", xor=("no_spat", "non_linear", "cnlinear"), ), output_type=dict(), rician=dict( argstr="--rician", ), sample_every=dict( argstr="--sampleevery=%d", usedefault=True, ), seed=dict( argstr="--seed=%d", ), update_proposal_every=dict( argstr="--updateproposalevery=%d", usedefault=True, ), ) inputs = XFibres5.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_XFibres5_outputs(): output_map = dict( dyads=dict(), fsamples=dict(), mean_S0samples=dict( extensions=None, ), mean_dsamples=dict( extensions=None, ), mean_fsamples=dict(), mean_tausamples=dict( extensions=None, ), phsamples=dict(), thsamples=dict(), ) outputs = XFibres5.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/fsl/tests/test_base.py000066400000000000000000000056101413403311400227170ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import nipype.interfaces.fsl as fsl from nipype.interfaces.base import InterfaceResult from nipype.interfaces.fsl import check_fsl, no_fsl import pytest @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fslversion(): ver = fsl.Info.version() assert ver.split(".", 1)[0].isdigit() @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fsloutputtype(): types = list(fsl.Info.ftypes.keys()) orig_out_type = fsl.Info.output_type() assert orig_out_type in types def test_outputtype_to_ext(): for ftype, ext in fsl.Info.ftypes.items(): res = fsl.Info.output_type_to_ext(ftype) assert res == ext with pytest.raises(KeyError): fsl.Info.output_type_to_ext("JUNK") @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_FSLCommand(): # Most methods in FSLCommand are tested in the subclasses. Only # testing the one item that is not. cmd = fsl.FSLCommand(command="ls") res = cmd.run() assert type(res) == InterfaceResult @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_FSLCommand2(): # Check default output type and environ cmd = fsl.FSLCommand(command="junk") assert cmd._output_type == fsl.Info.output_type() assert cmd.inputs.environ["FSLOUTPUTTYPE"] == cmd._output_type assert cmd._output_type in fsl.Info.ftypes cmd = fsl.FSLCommand cmdinst = fsl.FSLCommand(command="junk") for out_type in fsl.Info.ftypes: cmd.set_default_output_type(out_type) assert cmd._output_type == out_type if out_type != fsl.Info.output_type(): # Setting class outputtype should not effect existing instances assert cmdinst.inputs.output_type != out_type @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @pytest.mark.parametrize( "args, desired_name", [ ({}, {"file": "foo.nii.gz"}), # just the filename # filename with suffix ({"suffix": "_brain"}, {"file": "foo_brain.nii.gz"}), ( {"suffix": "_brain", "cwd": "/data"}, # filename with suffix and working directory {"dir": "/data", "file": "foo_brain.nii.gz"}, ), # filename with suffix and no file extension change ({"suffix": "_brain.mat", "change_ext": False}, {"file": "foo_brain.mat"}), ], ) def test_gen_fname(args, desired_name): # Test _gen_fname method of FSLCommand cmd = fsl.FSLCommand(command="junk", output_type="NIFTI_GZ") pth = os.getcwd() fname = cmd._gen_fname("foo.nii.gz", **args) if "dir" in desired_name.keys(): desired = os.path.join(desired_name["dir"], desired_name["file"]) else: desired = os.path.join(pth, desired_name["file"]) assert fname == desired nipype-1.7.0/nipype/interfaces/fsl/tests/test_dti.py000066400000000000000000000340251413403311400225670ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import nipype.interfaces.fsl.dti as fsl from nipype.interfaces.fsl import Info, no_fsl from nipype.interfaces.base import Undefined import pytest from nipype.testing.fixtures import create_files_in_directory # test dtifit @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_dtifit2(create_files_in_directory): filelist, outdir = create_files_in_directory dti = fsl.DTIFit() # make sure command gets called assert dti.cmd == "dtifit" # test raising error with mandatory args absent with pytest.raises(ValueError): dti.run() # .inputs based parameters setting dti.inputs.dwi = filelist[0] dti.inputs.base_name = "foo.dti.nii" dti.inputs.mask = filelist[1] dti.inputs.bvecs = filelist[0] dti.inputs.bvals = filelist[1] dti.inputs.min_z = 10 dti.inputs.max_z = 50 assert ( dti.cmdline == "dtifit -k %s -o foo.dti.nii -m %s -r %s -b %s -Z 50 -z 10" % (filelist[0], filelist[1], filelist[0], filelist[1]) ) @pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_randomise2(): rand = fsl.Randomise() # make sure command gets called assert rand.cmd == "randomise" # test raising error with mandatory args absent with pytest.raises(ValueError): rand.run() # .inputs based parameters setting rand.inputs.input_4D = "infile.nii" rand.inputs.output_rootname = "outfile" rand.inputs.design_matrix = "design.mat" rand.inputs.t_contrast = "infile.con" actualCmdline = sorted(rand.cmdline.split()) cmd = "randomise -i infile.nii -o outfile -d design.mat -t infile.con" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline # .run based parameter setting rand2 = fsl.Randomise( input_4D="infile2", output_rootname="outfile2", f_contrast="infile.f", one_sample_gmean=True, int_seed=4, ) actualCmdline = sorted(rand2.cmdline.split()) cmd = "randomise -i infile2 -o outfile2 -1 -f infile.f --seed=4" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline rand3 = fsl.Randomise() results = rand3.run(input_4D="infile3", output_rootname="outfile3") assert results.runtime.cmdline == "randomise -i infile3 -o outfile3" # test arguments for opt_map opt_map = { "demean_data": ("-D", True), "one_sample_gmean": ("-1", True), "mask_image": ("-m inp_mask", "inp_mask"), "design_matrix": ("-d design.mat", "design.mat"), "t_contrast": ("-t input.con", "input.con"), "f_contrast": ("-f input.fts", "input.fts"), "xchange_block_labels": ("-e design.grp", "design.grp"), "print_unique_perm": ("-q", True), "print_info_parallelMode": ("-Q", True), "num_permutations": ("-n 10", 10), "vox_pvalus": ("-x", True), "fstats_only": ("--fonly", True), "thresh_free_cluster": ("-T", True), "thresh_free_cluster_2Dopt": ("--T2", True), "cluster_thresholding": ("-c 0.20", 0.20), "cluster_mass_thresholding": ("-C 0.40", 0.40), "fcluster_thresholding": ("-F 0.10", 0.10), "fcluster_mass_thresholding": ("-S 0.30", 0.30), "variance_smoothing": ("-v 0.20", 0.20), "diagnostics_off": ("--quiet", True), "output_raw": ("-R", True), "output_perm_vect": ("-P", True), "int_seed": ("--seed=20", 20), "TFCE_height_param": ("--tfce_H=0.11", 0.11), "TFCE_extent_param": ("--tfce_E=0.50", 0.50), "TFCE_connectivity": ("--tfce_C=0.30", 0.30), "list_num_voxel_EVs_pos": ("--vxl=1,2,3,4", "1,2,3,4"), "list_img_voxel_EVs": ("--vxf=6,7,8,9,3", "6,7,8,9,3"), } for name, settings in list(opt_map.items()): rand4 = fsl.Randomise( input_4D="infile", output_rootname="root", **{name: settings[1]} ) assert rand4.cmdline == rand4.cmd + " -i infile -o root " + settings[0] @pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_Randomise_parallel(): rand = fsl.Randomise_parallel() # make sure command gets called assert rand.cmd == "randomise_parallel" # test raising error with mandatory args absent with pytest.raises(ValueError): rand.run() # .inputs based parameters setting rand.inputs.input_4D = "infile.nii" rand.inputs.output_rootname = "outfile" rand.inputs.design_matrix = "design.mat" rand.inputs.t_contrast = "infile.con" actualCmdline = sorted(rand.cmdline.split()) cmd = "randomise_parallel -i infile.nii -o outfile -d design.mat -t " "infile.con" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline # .run based parameter setting rand2 = fsl.Randomise_parallel( input_4D="infile2", output_rootname="outfile2", f_contrast="infile.f", one_sample_gmean=True, int_seed=4, ) actualCmdline = sorted(rand2.cmdline.split()) cmd = "randomise_parallel -i infile2 -o outfile2 -1 -f infile.f --seed=4" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline rand3 = fsl.Randomise_parallel() results = rand3.run(input_4D="infile3", output_rootname="outfile3") assert results.runtime.cmdline == "randomise_parallel -i infile3 -o outfile3" # test arguments for opt_map opt_map = { "demean_data": ("-D", True), "one_sample_gmean": ("-1", True), "mask_image": ("-m inp_mask", "inp_mask"), "design_matrix": ("-d design.mat", "design.mat"), "t_contrast": ("-t input.con", "input.con"), "f_contrast": ("-f input.fts", "input.fts"), "xchange_block_labels": ("-e design.grp", "design.grp"), "print_unique_perm": ("-q", True), "print_info_parallelMode": ("-Q", True), "num_permutations": ("-n 10", 10), "vox_pvalus": ("-x", True), "fstats_only": ("--fonly", True), "thresh_free_cluster": ("-T", True), "thresh_free_cluster_2Dopt": ("--T2", True), "cluster_thresholding": ("-c 0.20", 0.20), "cluster_mass_thresholding": ("-C 0.40", 0.40), "fcluster_thresholding": ("-F 0.10", 0.10), "fcluster_mass_thresholding": ("-S 0.30", 0.30), "variance_smoothing": ("-v 0.20", 0.20), "diagnostics_off": ("--quiet", True), "output_raw": ("-R", True), "output_perm_vect": ("-P", True), "int_seed": ("--seed=20", 20), "TFCE_height_param": ("--tfce_H=0.11", 0.11), "TFCE_extent_param": ("--tfce_E=0.50", 0.50), "TFCE_connectivity": ("--tfce_C=0.30", 0.30), "list_num_voxel_EVs_pos": ("--vxl=" + repr([1, 2, 3, 4]), repr([1, 2, 3, 4])), "list_img_voxel_EVs": ("--vxf=" + repr([6, 7, 8, 9, 3]), repr([6, 7, 8, 9, 3])), } for name, settings in list(opt_map.items()): rand4 = fsl.Randomise_parallel( input_4D="infile", output_rootname="root", **{name: settings[1]} ) assert rand4.cmdline == rand4.cmd + " -i infile -o root " + settings[0] # test proj_thresh @pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_Proj_thresh(): proj = fsl.ProjThresh() # make sure command gets called assert proj.cmd == "proj_thresh" # test raising error with mandatory args absent with pytest.raises(ValueError): proj.run() # .inputs based parameters setting proj.inputs.volumes = ["vol1", "vol2", "vol3"] proj.inputs.threshold = 3 assert proj.cmdline == "proj_thresh vol1 vol2 vol3 3" proj2 = fsl.ProjThresh(threshold=10, volumes=["vola", "volb"]) assert proj2.cmdline == "proj_thresh vola volb 10" # .run based parameters setting proj3 = fsl.ProjThresh() results = proj3.run(volumes=["inp1", "inp3", "inp2"], threshold=2) assert results.runtime.cmdline == "proj_thresh inp1 inp3 inp2 2" assert results.runtime.returncode != 0 assert isinstance(results.interface.inputs.volumes, list) assert results.interface.inputs.threshold == 2 # test arguments for opt_map # Proj_thresh doesn't have an opt_map{} # test vec_reg @pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_Vec_reg(): vrg = fsl.VecReg() # make sure command gets called assert vrg.cmd == "vecreg" # test raising error with mandatory args absent with pytest.raises(ValueError): vrg.run() # .inputs based parameters setting vrg.inputs.infile = "infile" vrg.inputs.outfile = "outfile" vrg.inputs.refVolName = "MNI152" vrg.inputs.affineTmat = "tmat.mat" assert vrg.cmdline == "vecreg -i infile -o outfile -r MNI152 -t tmat.mat" # .run based parameter setting vrg2 = fsl.VecReg( infile="infile2", outfile="outfile2", refVolName="MNI152", affineTmat="tmat2.mat", brainMask="nodif_brain_mask", ) actualCmdline = sorted(vrg2.cmdline.split()) cmd = "vecreg -i infile2 -o outfile2 -r MNI152 -t tmat2.mat -m nodif_brain_mask" desiredCmdline = sorted(cmd.split()) assert actualCmdline == desiredCmdline vrg3 = fsl.VecReg() results = vrg3.run( infile="infile3", outfile="outfile3", refVolName="MNI152", affineTmat="tmat3.mat", ) assert ( results.runtime.cmdline == "vecreg -i infile3 -o outfile3 -r MNI152 -t tmat3.mat" ) assert results.runtime.returncode != 0 assert results.interface.inputs.infile == "infile3" assert results.interface.inputs.outfile == "outfile3" assert results.interface.inputs.refVolName == "MNI152" assert results.interface.inputs.affineTmat == "tmat3.mat" # test arguments for opt_map opt_map = { "verbose": ("-v", True), "helpDoc": ("-h", True), "tensor": ("--tensor", True), "affineTmat": ("-t Tmat", "Tmat"), "warpFile": ("-w wrpFile", "wrpFile"), "interpolation": ("--interp=sinc", "sinc"), "brainMask": ("-m mask", "mask"), } for name, settings in list(opt_map.items()): vrg4 = fsl.VecReg( infile="infile", outfile="outfile", refVolName="MNI152", **{name: settings[1]} ) assert ( vrg4.cmdline == vrg4.cmd + " -i infile -o outfile -r MNI152 " + settings[0] ) # test find_the_biggest @pytest.mark.xfail(reason="These tests are skipped until we clean up some of this code") def test_Find_the_biggest(): fbg = fsl.FindTheBiggest() # make sure command gets called assert fbg.cmd == "find_the_biggest" # test raising error with mandatory args absent with pytest.raises(ValueError): fbg.run() # .inputs based parameters setting fbg.inputs.infiles = "seed*" fbg.inputs.outfile = "fbgfile" assert fbg.cmdline == "find_the_biggest seed* fbgfile" fbg2 = fsl.FindTheBiggest(infiles="seed2*", outfile="fbgfile2") assert fbg2.cmdline == "find_the_biggest seed2* fbgfile2" # .run based parameters setting fbg3 = fsl.FindTheBiggest() results = fbg3.run(infiles="seed3", outfile="out3") assert results.runtime.cmdline == "find_the_biggest seed3 out3" # test arguments for opt_map # Find_the_biggest doesn't have an opt_map{} @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_tbss_skeleton(create_files_in_directory): skeletor = fsl.TractSkeleton() files, newdir = create_files_in_directory # Test the underlying command assert skeletor.cmd == "tbss_skeleton" # It shouldn't run yet with pytest.raises(ValueError): skeletor.run() # Test the most basic way to use it skeletor.inputs.in_file = files[0] # First by implicit argument skeletor.inputs.skeleton_file = True assert skeletor.cmdline == "tbss_skeleton -i a.nii -o %s" % os.path.join( newdir, "a_skeleton.nii" ) # Now with a specific name skeletor.inputs.skeleton_file = "old_boney.nii" assert skeletor.cmdline == "tbss_skeleton -i a.nii -o old_boney.nii" # Now test the more complicated usage bones = fsl.TractSkeleton(in_file="a.nii", project_data=True) # This should error with pytest.raises(ValueError): bones.run() # But we can set what we need bones.inputs.threshold = 0.2 bones.inputs.distance_map = "b.nii" bones.inputs.data_file = "b.nii" # Even though that's silly # Now we get a command line assert bones.cmdline == "tbss_skeleton -i a.nii -p 0.200 b.nii %s b.nii %s" % ( Info.standard_image("LowerCingulum_1mm.nii.gz"), os.path.join(newdir, "b_skeletonised.nii"), ) # Can we specify a mask? bones.inputs.use_cingulum_mask = Undefined bones.inputs.search_mask_file = "a.nii" assert ( bones.cmdline == "tbss_skeleton -i a.nii -p 0.200 b.nii a.nii b.nii %s" % os.path.join(newdir, "b_skeletonised.nii") ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_distancemap(create_files_in_directory): mapper = fsl.DistanceMap() files, newdir = create_files_in_directory # Test the underlying command assert mapper.cmd == "distancemap" # It shouldn't run yet with pytest.raises(ValueError): mapper.run() # But if we do this... mapper.inputs.in_file = "a.nii" # It should assert mapper.cmdline == "distancemap --out=%s --in=a.nii" % os.path.join( newdir, "a_dstmap.nii" ) # And we should be able to write out a maxima map mapper.inputs.local_max_file = True assert mapper.cmdline == "distancemap --out=%s --in=a.nii --localmax=%s" % ( os.path.join(newdir, "a_dstmap.nii"), os.path.join(newdir, "a_lclmax.nii"), ) # And call it whatever we want mapper.inputs.local_max_file = "max.nii" assert ( mapper.cmdline == "distancemap --out=%s --in=a.nii --localmax=max.nii" % os.path.join(newdir, "a_dstmap.nii") ) nipype-1.7.0/nipype/interfaces/fsl/tests/test_epi.py000066400000000000000000000022641413403311400225640ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import pytest from nipype.testing.fixtures import create_files_in_directory import nipype.interfaces.fsl.epi as fsl from nipype.interfaces.fsl import no_fsl # test eddy_correct @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_eddy_correct2(create_files_in_directory): filelist, outdir = create_files_in_directory eddy = fsl.EddyCorrect() # make sure command gets called assert eddy.cmd == "eddy_correct" # test raising error with mandatory args absent with pytest.raises(ValueError): eddy.run() # .inputs based parameters setting eddy.inputs.in_file = filelist[0] eddy.inputs.out_file = "foo_eddc.nii" eddy.inputs.ref_num = 100 assert eddy.cmdline == "eddy_correct %s foo_eddc.nii 100" % filelist[0] # .run based parameter setting eddy2 = fsl.EddyCorrect(in_file=filelist[0], out_file="foo_ec.nii", ref_num=20) assert eddy2.cmdline == "eddy_correct %s foo_ec.nii 20" % filelist[0] # test arguments for opt_map # eddy_correct class doesn't have opt_map{} nipype-1.7.0/nipype/interfaces/fsl/tests/test_maths.py000066400000000000000000000406311413403311400231230ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import numpy as np from nipype.interfaces.base import Undefined import nipype.interfaces.fsl.maths as fsl from nipype.interfaces.fsl import no_fsl import pytest from nipype.testing.fixtures import create_files_in_directory_plus_output_type @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_maths_base(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get some fslmaths maths = fsl.MathsCommand() # Test that we got what we wanted assert maths.cmd == "fslmaths" # Test that it needs a mandatory argument with pytest.raises(ValueError): maths.run() # Set an in file maths.inputs.in_file = "a.nii" out_file = "a_maths{}".format(out_ext) # Now test the most basic command line assert maths.cmdline == "fslmaths a.nii {}".format(os.path.join(testdir, out_file)) # Now test that we can set the various data types dtypes = ["float", "char", "int", "short", "double", "input"] int_cmdline = "fslmaths -dt {} a.nii " + os.path.join(testdir, out_file) out_cmdline = "fslmaths a.nii " + os.path.join(testdir, out_file) + " -odt {}" duo_cmdline = ( "fslmaths -dt {} a.nii " + os.path.join(testdir, out_file) + " -odt {}" ) for dtype in dtypes: foo = fsl.MathsCommand(in_file="a.nii", internal_datatype=dtype) assert foo.cmdline == int_cmdline.format(dtype) bar = fsl.MathsCommand(in_file="a.nii", output_datatype=dtype) assert bar.cmdline == out_cmdline.format(dtype) foobar = fsl.MathsCommand( in_file="a.nii", internal_datatype=dtype, output_datatype=dtype ) assert foobar.cmdline == duo_cmdline.format(dtype, dtype) # Test that we can ask for an outfile name maths.inputs.out_file = "b.nii" assert maths.cmdline == "fslmaths a.nii b.nii" @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_changedt(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get some fslmaths cdt = fsl.ChangeDataType() # Test that we got what we wanted assert cdt.cmd == "fslmaths" # Test that it needs a mandatory argument with pytest.raises(ValueError): cdt.run() # Set an in file and out file cdt.inputs.in_file = "a.nii" cdt.inputs.out_file = "b.nii" # But it still shouldn't work with pytest.raises(ValueError): cdt.run() # Now test that we can set the various data types dtypes = ["float", "char", "int", "short", "double", "input"] cmdline = "fslmaths a.nii b.nii -odt {}" for dtype in dtypes: foo = fsl.MathsCommand(in_file="a.nii", out_file="b.nii", output_datatype=dtype) assert foo.cmdline == cmdline.format(dtype) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_threshold(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command thresh = fsl.Threshold(in_file="a.nii", out_file="b.nii") # Test the underlying command assert thresh.cmd == "fslmaths" # Test mandtory args with pytest.raises(ValueError): thresh.run() # Test the various opstrings cmdline = "fslmaths a.nii {} b.nii" for val in [0, 0.0, -1, -1.5, -0.5, 0.5, 3, 400, 400.5]: thresh.inputs.thresh = val assert thresh.cmdline == cmdline.format("-thr {:.10f}".format(val)) val = "{:.10f}".format(42) thresh = fsl.Threshold( in_file="a.nii", out_file="b.nii", thresh=42, use_robust_range=True ) assert thresh.cmdline == cmdline.format("-thrp " + val) thresh.inputs.use_nonzero_voxels = True assert thresh.cmdline == cmdline.format("-thrP " + val) thresh = fsl.Threshold( in_file="a.nii", out_file="b.nii", thresh=42, direction="above" ) assert thresh.cmdline == cmdline.format("-uthr " + val) thresh.inputs.use_robust_range = True assert thresh.cmdline == cmdline.format("-uthrp " + val) thresh.inputs.use_nonzero_voxels = True assert thresh.cmdline == cmdline.format("-uthrP " + val) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_meanimage(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command meaner = fsl.MeanImage(in_file="a.nii", out_file="b.nii") # Test the underlying command assert meaner.cmd == "fslmaths" # Test the defualt opstring assert meaner.cmdline == "fslmaths a.nii -Tmean b.nii" # Test the other dimensions cmdline = "fslmaths a.nii -{}mean b.nii" for dim in ["X", "Y", "Z", "T"]: meaner.inputs.dimension = dim assert meaner.cmdline == cmdline.format(dim) # Test the auto naming meaner = fsl.MeanImage(in_file="a.nii") assert meaner.cmdline == "fslmaths a.nii -Tmean {}".format( os.path.join(testdir, "a_mean{}".format(out_ext)) ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_stdimage(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command stder = fsl.StdImage(in_file="a.nii", out_file="b.nii") # Test the underlying command assert stder.cmd == "fslmaths" # Test the defualt opstring assert stder.cmdline == "fslmaths a.nii -Tstd b.nii" # Test the other dimensions cmdline = "fslmaths a.nii -{}std b.nii" for dim in ["X", "Y", "Z", "T"]: stder.inputs.dimension = dim assert stder.cmdline == cmdline.format(dim) # Test the auto naming stder = fsl.StdImage(in_file="a.nii", output_type="NIFTI") assert stder.cmdline == "fslmaths a.nii -Tstd {}".format( os.path.join(testdir, "a_std.nii") ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_maximage(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command maxer = fsl.MaxImage(in_file="a.nii", out_file="b.nii") # Test the underlying command assert maxer.cmd == "fslmaths" # Test the defualt opstring assert maxer.cmdline == "fslmaths a.nii -Tmax b.nii" # Test the other dimensions cmdline = "fslmaths a.nii -{}max b.nii" for dim in ["X", "Y", "Z", "T"]: maxer.inputs.dimension = dim assert maxer.cmdline == cmdline.format(dim) # Test the auto naming maxer = fsl.MaxImage(in_file="a.nii") assert maxer.cmdline == "fslmaths a.nii -Tmax {}".format( os.path.join(testdir, "a_max{}".format(out_ext)) ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_smooth(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command smoother = fsl.IsotropicSmooth(in_file="a.nii", out_file="b.nii") # Test the underlying command assert smoother.cmd == "fslmaths" # Test that smoothing kernel is mandatory with pytest.raises(ValueError): smoother.run() # Test smoothing kernels cmdline = "fslmaths a.nii -s {:.5f} b.nii" for val in [0, 1.0, 1, 25, 0.5, 8 / 3.0]: smoother = fsl.IsotropicSmooth(in_file="a.nii", out_file="b.nii", sigma=val) assert smoother.cmdline == cmdline.format(val) smoother = fsl.IsotropicSmooth(in_file="a.nii", out_file="b.nii", fwhm=val) val = float(val) / np.sqrt(8 * np.log(2)) assert smoother.cmdline == cmdline.format(val) # Test automatic naming smoother = fsl.IsotropicSmooth(in_file="a.nii", sigma=5) assert smoother.cmdline == "fslmaths a.nii -s {:.5f} {}".format( 5, os.path.join(testdir, "a_smooth{}".format(out_ext)) ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_mask(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command masker = fsl.ApplyMask(in_file="a.nii", out_file="c.nii") # Test the underlying command assert masker.cmd == "fslmaths" # Test that the mask image is mandatory with pytest.raises(ValueError): masker.run() # Test setting the mask image masker.inputs.mask_file = "b.nii" assert masker.cmdline == "fslmaths a.nii -mas b.nii c.nii" # Test auto name generation masker = fsl.ApplyMask(in_file="a.nii", mask_file="b.nii") assert masker.cmdline == "fslmaths a.nii -mas b.nii " + os.path.join( testdir, "a_masked{}".format(out_ext) ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_dilation(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command diller = fsl.DilateImage(in_file="a.nii", out_file="b.nii") # Test the underlying command assert diller.cmd == "fslmaths" # Test that the dilation operation is mandatory with pytest.raises(ValueError): diller.run() # Test the different dilation operations for op in ["mean", "modal", "max"]: cv = dict(mean="M", modal="D", max="F") diller.inputs.operation = op assert diller.cmdline == "fslmaths a.nii -dil{} b.nii".format(cv[op]) # Now test the different kernel options for k in ["3D", "2D", "box", "boxv", "gauss", "sphere"]: for size in [1, 1.5, 5]: diller.inputs.kernel_shape = k diller.inputs.kernel_size = size assert ( diller.cmdline == "fslmaths a.nii -kernel {} {:.4f} -dilF b.nii".format(k, size) ) # Test that we can use a file kernel f = open("kernel.txt", "w").close() del f # Shut pyflakes up diller.inputs.kernel_shape = "file" diller.inputs.kernel_size = Undefined diller.inputs.kernel_file = "kernel.txt" assert diller.cmdline == "fslmaths a.nii -kernel file kernel.txt -dilF b.nii" # Test that we don't need to request an out name dil = fsl.DilateImage(in_file="a.nii", operation="max") assert dil.cmdline == "fslmaths a.nii -dilF {}".format( os.path.join(testdir, "a_dil{}".format(out_ext)) ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_erosion(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command erode = fsl.ErodeImage(in_file="a.nii", out_file="b.nii") # Test the underlying command assert erode.cmd == "fslmaths" # Test the basic command line assert erode.cmdline == "fslmaths a.nii -ero b.nii" # Test that something else happens when you minimum filter erode.inputs.minimum_filter = True assert erode.cmdline == "fslmaths a.nii -eroF b.nii" # Test that we don't need to request an out name erode = fsl.ErodeImage(in_file="a.nii") assert erode.cmdline == "fslmaths a.nii -ero {}".format( os.path.join(testdir, "a_ero{}".format(out_ext)) ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_spatial_filter(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command filter = fsl.SpatialFilter(in_file="a.nii", out_file="b.nii") # Test the underlying command assert filter.cmd == "fslmaths" # Test that it fails without an operation with pytest.raises(ValueError): filter.run() # Test the different operations for op in ["mean", "meanu", "median"]: filter.inputs.operation = op assert filter.cmdline == "fslmaths a.nii -f{} b.nii".format(op) # Test that we don't need to ask for an out name filter = fsl.SpatialFilter(in_file="a.nii", operation="mean") assert filter.cmdline == "fslmaths a.nii -fmean {}".format( os.path.join(testdir, "a_filt{}".format(out_ext)) ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_unarymaths(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command maths = fsl.UnaryMaths(in_file="a.nii", out_file="b.nii") # Test the underlying command assert maths.cmd == "fslmaths" # Test that it fails without an operation with pytest.raises(ValueError): maths.run() # Test the different operations ops = ["exp", "log", "sin", "cos", "sqr", "sqrt", "recip", "abs", "bin", "index"] for op in ops: maths.inputs.operation = op assert maths.cmdline == "fslmaths a.nii -{} b.nii".format(op) # Test that we don't need to ask for an out file for op in ops: maths = fsl.UnaryMaths(in_file="a.nii", operation=op) assert maths.cmdline == "fslmaths a.nii -{} {}".format( op, os.path.join(testdir, "a_{}{}".format(op, out_ext)) ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_binarymaths(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command maths = fsl.BinaryMaths(in_file="a.nii", out_file="c.nii") # Test the underlying command assert maths.cmd == "fslmaths" # Test that it fails without an operation an with pytest.raises(ValueError): maths.run() # Test the different operations ops = ["add", "sub", "mul", "div", "rem", "min", "max"] operands = ["b.nii", -2, -0.5, 0, 0.123456, np.pi, 500] for op in ops: for ent in operands: maths = fsl.BinaryMaths(in_file="a.nii", out_file="c.nii", operation=op) if ent == "b.nii": maths.inputs.operand_file = ent assert maths.cmdline == "fslmaths a.nii -{} b.nii c.nii".format(op) else: maths.inputs.operand_value = ent assert maths.cmdline == "fslmaths a.nii -{} {:.8f} c.nii".format( op, ent ) # Test that we don't need to ask for an out file for op in ops: maths = fsl.BinaryMaths(in_file="a.nii", operation=op, operand_file="b.nii") assert maths.cmdline == "fslmaths a.nii -{} b.nii {}".format( op, os.path.join(testdir, "a_maths{}".format(out_ext)) ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_multimaths(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command maths = fsl.MultiImageMaths(in_file="a.nii", out_file="c.nii") # Test the underlying command assert maths.cmd == "fslmaths" # Test that it fails without an operation an with pytest.raises(ValueError): maths.run() # Test a few operations maths.inputs.operand_files = ["a.nii", "b.nii"] opstrings = ["-add %s -div %s", "-max 1 -sub %s -min %s", "-mas %s -add %s"] for ostr in opstrings: maths.inputs.op_string = ostr assert maths.cmdline == "fslmaths a.nii %s c.nii" % ostr % ("a.nii", "b.nii") # Test that we don't need to ask for an out file maths = fsl.MultiImageMaths( in_file="a.nii", op_string="-add %s -mul 5", operand_files=["b.nii"] ) assert maths.cmdline == "fslmaths a.nii -add b.nii -mul 5 %s" % os.path.join( testdir, "a_maths%s" % out_ext ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_tempfilt(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type # Get the command filt = fsl.TemporalFilter(in_file="a.nii", out_file="b.nii") # Test the underlying command assert filt.cmd == "fslmaths" # Test that both filters are initialized off assert filt.cmdline == "fslmaths a.nii -bptf -1.000000 -1.000000 b.nii" # Test some filters windows = [(-1, -1), (0.1, 0.1), (-1, 20), (20, -1), (128, 248)] for win in windows: filt.inputs.highpass_sigma = win[0] filt.inputs.lowpass_sigma = win[1] assert filt.cmdline == "fslmaths a.nii -bptf {:.6f} {:.6f} b.nii".format( win[0], win[1] ) # Test that we don't need to ask for an out file filt = fsl.TemporalFilter(in_file="a.nii", highpass_sigma=64) assert filt.cmdline == "fslmaths a.nii -bptf 64.000000 -1.000000 {}".format( os.path.join(testdir, "a_filt{}".format(out_ext)) ) nipype-1.7.0/nipype/interfaces/fsl/tests/test_model.py000066400000000000000000000040201413403311400230770ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import pytest import nipype.interfaces.fsl.model as fsl from nipype.interfaces.fsl import no_fsl from pathlib import Path from ....pipeline import engine as pe @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_MultipleRegressDesign(tmpdir): designer = pe.Node( fsl.MultipleRegressDesign(), name="designer", base_dir=str(tmpdir) ) designer.inputs.regressors = dict( voice_stenght=[1, 1, 1], age=[0.2, 0.4, 0.5], BMI=[1, -1, 2] ) con1 = ["voice_and_age", "T", ["age", "voice_stenght"], [0.5, 0.5]] con2 = ["just_BMI", "T", ["BMI"], [1]] designer.inputs.contrasts = [ con1, con2, ["con3", "F", [con1, con2]], ["con4", "F", [con2]], ] res = designer.run() outputs = res.outputs.get_traitsfree() for ftype in ["mat", "con", "fts", "grp"]: assert Path(outputs["design_" + ftype]).exists() expected_content = {} expected_content[ "design_mat" ] = """/NumWaves 3 /NumPoints 3 /PPheights 3.000000e+00 5.000000e-01 1.000000e+00 /Matrix 1.000000e+00 2.000000e-01 1.000000e+00 -1.000000e+00 4.000000e-01 1.000000e+00 2.000000e+00 5.000000e-01 1.000000e+00 """ expected_content[ "design_con" ] = """/ContrastName1 voice_and_age /ContrastName2 just_BMI /NumWaves 3 /NumContrasts 2 /PPheights 1.000000e+00 1.000000e+00 /RequiredEffect 100.000 100.000 /Matrix 0.000000e+00 5.000000e-01 5.000000e-01 1.000000e+00 0.000000e+00 0.000000e+00 """ expected_content[ "design_fts" ] = """/NumWaves 2 /NumContrasts 2 /Matrix 1 1 0 1 """ expected_content[ "design_grp" ] = """/NumWaves 1 /NumPoints 3 /Matrix 1 1 1 """ for ftype in ["mat", "con", "fts", "grp"]: outfile = "design_" + ftype assert Path(outputs[outfile]).read_text() == expected_content[outfile] nipype-1.7.0/nipype/interfaces/fsl/tests/test_preprocess.py000066400000000000000000000540251413403311400241760ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os from copy import deepcopy import pytest import pdb from nipype.utils.filemanip import split_filename, ensure_list from .. import preprocess as fsl from nipype.interfaces.fsl import Info from nipype.interfaces.base import File, TraitError, Undefined, isdefined from nipype.interfaces.fsl import no_fsl def fsl_name(obj, fname): """Create valid fsl name, including file extension for output type.""" ext = Info.output_type_to_ext(obj.inputs.output_type) return fname + ext @pytest.fixture() def setup_infile(tmpdir): ext = Info.output_type_to_ext(Info.output_type()) tmp_infile = tmpdir.join("foo" + ext) tmp_infile.open("w") return (tmp_infile.strpath, tmpdir.strpath) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_bet(setup_infile): tmp_infile, tp_dir = setup_infile # BET converts the in_file path to be relative to prevent # failure with long paths. tmp_infile = os.path.relpath(tmp_infile, start=os.getcwd()) better = fsl.BET() assert better.cmd == "bet" # Test raising error with mandatory args absent with pytest.raises(ValueError): better.run() # Test generated outfile name better.inputs.in_file = tmp_infile outfile = fsl_name(better, "foo_brain") realcmd = "bet %s %s" % (tmp_infile, outfile) assert better.cmdline == realcmd # Test specified outfile name outfile = fsl_name(better, "/newdata/bar") better.inputs.out_file = outfile realcmd = "bet %s %s" % (tmp_infile, outfile) assert better.cmdline == realcmd # infile foo.nii doesn't exist def func(): better.run(in_file="foo2.nii", out_file="bar.nii") with pytest.raises(TraitError): func() # Our options and some test values for them # Should parallel the opt_map structure in the class for clarity opt_map = { "outline": ("-o", True), "mask": ("-m", True), "skull": ("-s", True), "no_output": ("-n", True), "frac": ("-f 0.40", 0.4), "vertical_gradient": ("-g 0.75", 0.75), "radius": ("-r 20", 20), "center": ("-c 54 75 80", [54, 75, 80]), "threshold": ("-t", True), "mesh": ("-e", True), "surfaces": ("-A", True) # 'verbose': ('-v', True), # 'flags': ('--i-made-this-up', '--i-made-this-up'), } # Currently we don't test -R, -S, -B, -Z, -F, -A or -A2 # test each of our arguments better = fsl.BET() outfile = fsl_name(better, "foo_brain") for name, settings in list(opt_map.items()): better = fsl.BET(**{name: settings[1]}) # Add mandatory input better.inputs.in_file = tmp_infile realcmd = " ".join([better.cmd, tmp_infile, outfile, settings[0]]) assert better.cmdline == realcmd # test fast @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fast(setup_infile): tmp_infile, tp_dir = setup_infile faster = fsl.FAST() faster.inputs.verbose = True fasted = fsl.FAST(in_files=tmp_infile, verbose=True) fasted2 = fsl.FAST(in_files=[tmp_infile, tmp_infile], verbose=True) assert faster.cmd == "fast" assert faster.inputs.verbose assert faster.inputs.manual_seg == Undefined assert faster.inputs != fasted.inputs assert fasted.cmdline == "fast -v -S 1 %s" % (tmp_infile) assert fasted2.cmdline == "fast -v -S 2 %s %s" % (tmp_infile, tmp_infile) faster = fsl.FAST() faster.inputs.in_files = tmp_infile assert faster.cmdline == "fast -S 1 %s" % (tmp_infile) faster.inputs.in_files = [tmp_infile, tmp_infile] assert faster.cmdline == "fast -S 2 %s %s" % (tmp_infile, tmp_infile) # Our options and some test values for them # Should parallel the opt_map structure in the class for clarity opt_map = { "number_classes": ("-n 4", 4), "bias_iters": ("-I 5", 5), "bias_lowpass": ("-l 15", 15), "img_type": ("-t 2", 2), "init_seg_smooth": ("-f 0.035", 0.035), "segments": ("-g", True), "init_transform": ("-a %s" % (tmp_infile), "%s" % (tmp_infile)), "other_priors": ( "-A %s %s %s" % (tmp_infile, tmp_infile, tmp_infile), (["%s" % (tmp_infile), "%s" % (tmp_infile), "%s" % (tmp_infile)]), ), "no_pve": ("--nopve", True), "output_biasfield": ("-b", True), "output_biascorrected": ("-B", True), "no_bias": ("-N", True), "out_basename": ("-o fasted", "fasted"), "use_priors": ("-P", True), "segment_iters": ("-W 14", 14), "mixel_smooth": ("-R 0.25", 0.25), "iters_afterbias": ("-O 3", 3), "hyper": ("-H 0.15", 0.15), "verbose": ("-v", True), "manual_seg": ("-s %s" % (tmp_infile), "%s" % (tmp_infile)), "probability_maps": ("-p", True), } # test each of our arguments for name, settings in list(opt_map.items()): faster = fsl.FAST(in_files=tmp_infile, **{name: settings[1]}) assert faster.cmdline == " ".join( [faster.cmd, settings[0], "-S 1 %s" % tmp_infile] ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fast_list_outputs(setup_infile, tmpdir): """By default (no -o), FSL's fast command outputs files into the same directory as the input files. If the flag -o is set, it outputs files into the cwd""" def _run_and_test(opts, output_base): outputs = fsl.FAST(**opts)._list_outputs() for output in outputs.values(): if output: for filename in ensure_list(output): assert os.path.realpath(filename).startswith( os.path.realpath(output_base) ) # set up tmp_infile, indir = setup_infile cwd = tmpdir.mkdir("new") cwd.chdir() assert indir != cwd.strpath out_basename = "a_basename" # run and test opts = {"in_files": tmp_infile} input_path, input_filename, input_ext = split_filename(tmp_infile) _run_and_test(opts, os.path.join(input_path, input_filename)) opts["out_basename"] = out_basename _run_and_test(opts, os.path.join(cwd.strpath, out_basename)) @pytest.fixture() def setup_flirt(tmpdir): ext = Info.output_type_to_ext(Info.output_type()) infile = tmpdir.join("infile" + ext) infile.open("w") reffile = tmpdir.join("reffile" + ext) reffile.open("w") return (tmpdir, infile.strpath, reffile.strpath) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_flirt(setup_flirt): # setup tmpdir, infile, reffile = setup_flirt flirter = fsl.FLIRT() assert flirter.cmd == "flirt" flirter.inputs.bins = 256 flirter.inputs.cost = "mutualinfo" flirted = fsl.FLIRT( in_file=infile, reference=reffile, out_file="outfile", out_matrix_file="outmat.mat", bins=256, cost="mutualinfo", ) flirt_est = fsl.FLIRT( in_file=infile, reference=reffile, out_matrix_file="outmat.mat", bins=256, cost="mutualinfo", ) assert flirter.inputs != flirted.inputs assert flirted.inputs != flirt_est.inputs assert flirter.inputs.bins == flirted.inputs.bins assert flirter.inputs.cost == flirt_est.inputs.cost realcmd = ( "flirt -in %s -ref %s -out outfile -omat outmat.mat " "-bins 256 -cost mutualinfo" % (infile, reffile) ) assert flirted.cmdline == realcmd flirter = fsl.FLIRT() # infile not specified with pytest.raises(ValueError): flirter.cmdline flirter.inputs.in_file = infile # reference not specified with pytest.raises(ValueError): flirter.cmdline flirter.inputs.reference = reffile # Generate outfile and outmatrix pth, fname, ext = split_filename(infile) outfile = fsl_name(flirter, "%s_flirt" % fname) outmat = "%s_flirt.mat" % fname realcmd = "flirt -in %s -ref %s -out %s -omat %s" % ( infile, reffile, outfile, outmat, ) assert flirter.cmdline == realcmd # test apply_xfm option axfm = deepcopy(flirter) axfm.inputs.apply_xfm = True # in_matrix_file or uses_qform must be defined with pytest.raises(RuntimeError): axfm.cmdline axfm2 = deepcopy(axfm) # test uses_qform axfm.inputs.uses_qform = True assert axfm.cmdline == (realcmd + " -applyxfm -usesqform") # test in_matrix_file axfm2.inputs.in_matrix_file = reffile assert axfm2.cmdline == (realcmd + " -applyxfm -init %s" % reffile) tmpfile = tmpdir.join("file4test.nii") tmpfile.open("w") # Loop over all inputs, set a reasonable value and make sure the # cmdline is updated correctly. for key, trait_spec in sorted(fsl.FLIRT.input_spec().traits().items()): # Skip mandatory inputs and the trait methods if key in ( "trait_added", "trait_modified", "in_file", "reference", "environ", "output_type", "out_file", "out_matrix_file", "in_matrix_file", "apply_xfm", "resource_monitor", "out_log", "save_log", ): continue param = None value = None if key == "args": param = "-v" value = "-v" elif isinstance(trait_spec.trait_type, File): value = tmpfile.strpath param = trait_spec.argstr % value elif trait_spec.default is False: param = trait_spec.argstr value = True elif key in ("searchr_x", "searchr_y", "searchr_z"): value = [-45, 45] param = trait_spec.argstr % " ".join(str(elt) for elt in value) else: value = trait_spec.default param = trait_spec.argstr % value cmdline = "flirt -in %s -ref %s" % (infile, reffile) # Handle autogeneration of outfile pth, fname, ext = split_filename(infile) outfile = fsl_name(fsl.FLIRT(), "%s_flirt" % fname) outfile = " ".join(["-out", outfile]) # Handle autogeneration of outmatrix outmatrix = "%s_flirt.mat" % fname outmatrix = " ".join(["-omat", outmatrix]) # Build command line cmdline = " ".join([cmdline, outfile, outmatrix, param]) flirter = fsl.FLIRT(in_file=infile, reference=reffile) setattr(flirter.inputs, key, value) assert flirter.cmdline == cmdline # Test OutputSpec flirter = fsl.FLIRT(in_file=infile, reference=reffile) pth, fname, ext = split_filename(infile) flirter.inputs.out_file = "".join(["foo", ext]) flirter.inputs.out_matrix_file = "".join(["bar", ext]) outs = flirter._list_outputs() assert outs["out_file"] == os.path.join(os.getcwd(), flirter.inputs.out_file) assert outs["out_matrix_file"] == os.path.join( os.getcwd(), flirter.inputs.out_matrix_file ) assert not isdefined(flirter.inputs.out_log) # Mcflirt @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_mcflirt(setup_flirt): tmpdir, infile, reffile = setup_flirt frt = fsl.MCFLIRT() assert frt.cmd == "mcflirt" # Test generated outfile name frt.inputs.in_file = infile _, nme = os.path.split(infile) outfile = os.path.join(os.getcwd(), nme) outfile = frt._gen_fname(outfile, suffix="_mcf") realcmd = "mcflirt -in " + infile + " -out " + outfile assert frt.cmdline == realcmd # Test specified outfile name outfile2 = "/newdata/bar.nii" frt.inputs.out_file = outfile2 realcmd = "mcflirt -in " + infile + " -out " + outfile2 assert frt.cmdline == realcmd @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_mcflirt_opt(setup_flirt): tmpdir, infile, reffile = setup_flirt _, nme = os.path.split(infile) opt_map = { "cost": ("-cost mutualinfo", "mutualinfo"), "bins": ("-bins 256", 256), "dof": ("-dof 6", 6), "ref_vol": ("-refvol 2", 2), "scaling": ("-scaling 6.00", 6.00), "smooth": ("-smooth 1.00", 1.00), "rotation": ("-rotation 2", 2), "stages": ("-stages 3", 3), "init": ("-init %s" % (infile), infile), "use_gradient": ("-gdt", True), "use_contour": ("-edge", True), "mean_vol": ("-meanvol", True), "stats_imgs": ("-stats", True), "save_mats": ("-mats", True), "save_plots": ("-plots", True), } for name, settings in list(opt_map.items()): fnt = fsl.MCFLIRT(in_file=infile, **{name: settings[1]}) outfile = os.path.join(os.getcwd(), nme) outfile = fnt._gen_fname(outfile, suffix="_mcf") instr = "-in %s" % (infile) outstr = "-out %s" % (outfile) if name in ("init", "cost", "dof", "mean_vol", "bins"): assert fnt.cmdline == " ".join([fnt.cmd, instr, settings[0], outstr]) else: assert fnt.cmdline == " ".join([fnt.cmd, instr, outstr, settings[0]]) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_mcflirt_noinput(): # Test error is raised when missing required args fnt = fsl.MCFLIRT() with pytest.raises(ValueError) as excinfo: fnt.run() assert str(excinfo.value).startswith("MCFLIRT requires a value for input 'in_file'") # test fnirt @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fnirt(setup_flirt): tmpdir, infile, reffile = setup_flirt tmpdir.chdir() fnirt = fsl.FNIRT() assert fnirt.cmd == "fnirt" # Test list parameters params = [ ("subsampling_scheme", "--subsamp", [4, 2, 2, 1], "4,2,2,1"), ("max_nonlin_iter", "--miter", [4, 4, 4, 2], "4,4,4,2"), ("ref_fwhm", "--reffwhm", [4, 2, 2, 0], "4,2,2,0"), ("in_fwhm", "--infwhm", [4, 2, 2, 0], "4,2,2,0"), ("apply_refmask", "--applyrefmask", [0, 0, 1, 1], "0,0,1,1"), ("apply_inmask", "--applyinmask", [0, 0, 0, 1], "0,0,0,1"), ("regularization_lambda", "--lambda", [0.5, 0.75], "0.5,0.75"), ( "intensity_mapping_model", "--intmod", "global_non_linear", "global_non_linear", ), ] for item, flag, val, strval in params: fnirt = fsl.FNIRT(in_file=infile, ref_file=reffile, **{item: val}) log = fnirt._gen_fname(infile, suffix="_log.txt", change_ext=False) iout = fnirt._gen_fname(infile, suffix="_warped") if item in ("max_nonlin_iter"): cmd = ( "fnirt --in=%s " "--logout=%s" " %s=%s --ref=%s" " --iout=%s" % (infile, log, flag, strval, reffile, iout) ) elif item in ("in_fwhm", "intensity_mapping_model"): cmd = "fnirt --in=%s %s=%s --logout=%s " "--ref=%s --iout=%s" % ( infile, flag, strval, log, reffile, iout, ) elif item.startswith("apply"): cmd = ( "fnirt %s=%s " "--in=%s " "--logout=%s " "--ref=%s --iout=%s" % (flag, strval, infile, log, reffile, iout) ) else: cmd = ( "fnirt " "--in=%s --logout=%s " "--ref=%s %s=%s --iout=%s" % (infile, log, reffile, flag, strval, iout) ) assert fnirt.cmdline == cmd # Test ValueError is raised when missing mandatory args fnirt = fsl.FNIRT() with pytest.raises(ValueError): fnirt.run() fnirt.inputs.in_file = infile fnirt.inputs.ref_file = reffile intmap_basename = "%s_intmap" % fsl.FNIRT.intensitymap_file_basename(infile) intmap_image = fsl_name(fnirt, intmap_basename) intmap_txt = "%s.txt" % intmap_basename # doing this to create the file to pass tests for file existence with open(intmap_image, "w"): pass with open(intmap_txt, "w"): pass # test files opt_map = [ ("affine_file", "--aff=%s" % infile, infile), ("inwarp_file", "--inwarp=%s" % infile, infile), ("in_intensitymap_file", "--intin=%s" % intmap_basename, [intmap_image]), ( "in_intensitymap_file", "--intin=%s" % intmap_basename, [intmap_image, intmap_txt], ), ("config_file", "--config=%s" % infile, infile), ("refmask_file", "--refmask=%s" % infile, infile), ("inmask_file", "--inmask=%s" % infile, infile), ("field_file", "--fout=%s" % infile, infile), ("jacobian_file", "--jout=%s" % infile, infile), ("modulatedref_file", "--refout=%s" % infile, infile), ("out_intensitymap_file", "--intout=%s" % intmap_basename, True), ("out_intensitymap_file", "--intout=%s" % intmap_basename, intmap_image), ("fieldcoeff_file", "--cout=%s" % infile, infile), ("log_file", "--logout=%s" % infile, infile), ] for (name, settings, arg) in opt_map: fnirt = fsl.FNIRT(in_file=infile, ref_file=reffile, **{name: arg}) if name in ("config_file", "affine_file", "field_file", "fieldcoeff_file"): cmd = ( "fnirt %s --in=%s " "--logout=%s " "--ref=%s --iout=%s" % (settings, infile, log, reffile, iout) ) elif name in ("refmask_file"): cmd = ( "fnirt --in=%s " "--logout=%s --ref=%s " "%s " "--iout=%s" % (infile, log, reffile, settings, iout) ) elif name in ( "in_intensitymap_file", "inwarp_file", "inmask_file", "jacobian_file", ): cmd = ( "fnirt --in=%s " "%s " "--logout=%s --ref=%s " "--iout=%s" % (infile, settings, log, reffile, iout) ) elif name in ("log_file"): cmd = ( "fnirt --in=%s " "%s --ref=%s " "--iout=%s" % (infile, settings, reffile, iout) ) else: cmd = ( "fnirt --in=%s " "--logout=%s %s " "--ref=%s --iout=%s" % (infile, log, settings, reffile, iout) ) assert fnirt.cmdline == cmd if name == "out_intensitymap_file": assert fnirt._list_outputs()["out_intensitymap_file"] == [ intmap_image, intmap_txt, ] @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_applywarp(setup_flirt): tmpdir, infile, reffile = setup_flirt opt_map = { "out_file": ("--out=bar.nii", "bar.nii"), "premat": ("--premat=%s" % (reffile), reffile), "postmat": ("--postmat=%s" % (reffile), reffile), } # in_file, ref_file, field_file mandatory for name, settings in list(opt_map.items()): awarp = fsl.ApplyWarp( in_file=infile, ref_file=reffile, field_file=reffile, **{name: settings[1]} ) if name == "out_file": realcmd = ( "applywarp --in=%s " "--ref=%s --out=%s " "--warp=%s" % (infile, reffile, settings[1], reffile) ) else: outfile = awarp._gen_fname(infile, suffix="_warp") realcmd = ( "applywarp --in=%s " "--ref=%s --out=%s " "--warp=%s %s" % (infile, reffile, outfile, reffile, settings[0]) ) assert awarp.cmdline == realcmd @pytest.fixture() def setup_fugue(tmpdir): import nibabel as nb import numpy as np import os.path as op d = np.ones((80, 80, 80)) infile = tmpdir.join("dumbfile.nii.gz").strpath nb.Nifti1Image(d, None, None).to_filename(infile) return (tmpdir, infile) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") @pytest.mark.parametrize( "attr, out_file", [ ( { "save_unmasked_fmap": True, "fmap_in_file": "infile", "mask_file": "infile", "output_type": "NIFTI_GZ", }, "fmap_out_file", ), ( { "save_unmasked_shift": True, "fmap_in_file": "infile", "dwell_time": 1.0e-3, "mask_file": "infile", "output_type": "NIFTI_GZ", }, "shift_out_file", ), ( { "in_file": "infile", "mask_file": "infile", "shift_in_file": "infile", "output_type": "NIFTI_GZ", }, "unwarped_file", ), ], ) def test_fugue(setup_fugue, attr, out_file): import os.path as op tmpdir, infile = setup_fugue fugue = fsl.FUGUE() for key, value in attr.items(): if value == "infile": setattr(fugue.inputs, key, infile) else: setattr(fugue.inputs, key, value) res = fugue.run() assert isdefined(getattr(res.outputs, out_file)) trait_spec = fugue.inputs.trait(out_file) out_name = trait_spec.name_template % "dumbfile" out_name += ".nii.gz" assert op.basename(getattr(res.outputs, out_file)) == out_name @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_first_genfname(): first = fsl.FIRST() first.inputs.out_file = "segment.nii" first.inputs.output_type = "NIFTI_GZ" value = first._gen_fname(basename="original_segmentations") expected_value = os.path.abspath("segment_all_fast_origsegs.nii.gz") assert value == expected_value first.inputs.method = "none" value = first._gen_fname(basename="original_segmentations") expected_value = os.path.abspath("segment_all_none_origsegs.nii.gz") assert value == expected_value first.inputs.method = "auto" first.inputs.list_of_specific_structures = ["L_Hipp", "R_Hipp"] value = first._gen_fname(basename="original_segmentations") expected_value = os.path.abspath("segment_all_none_origsegs.nii.gz") assert value == expected_value nipype-1.7.0/nipype/interfaces/fsl/tests/test_utils.py000066400000000000000000000244341413403311400231520ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import numpy as np import nibabel as nb import pytest import nipype.interfaces.fsl.utils as fsl from nipype.interfaces.fsl import no_fsl, Info from nipype.testing.fixtures import create_files_in_directory_plus_output_type @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fslroi(create_files_in_directory_plus_output_type): filelist, outdir, _ = create_files_in_directory_plus_output_type roi = fsl.ExtractROI() # make sure command gets called assert roi.cmd == "fslroi" # test raising error with mandatory args absent with pytest.raises(ValueError): roi.run() # .inputs based parameters setting roi.inputs.in_file = filelist[0] roi.inputs.roi_file = "foo_roi.nii" roi.inputs.t_min = 10 roi.inputs.t_size = 20 assert roi.cmdline == "fslroi %s foo_roi.nii 10 20" % filelist[0] # .run based parameter setting roi2 = fsl.ExtractROI( in_file=filelist[0], roi_file="foo2_roi.nii", t_min=20, t_size=40, x_min=3, x_size=30, y_min=40, y_size=10, z_min=5, z_size=20, ) assert roi2.cmdline == "fslroi %s foo2_roi.nii 3 30 40 10 5 20 20 40" % filelist[0] # test arguments for opt_map # Fslroi class doesn't have a filled opt_map{} @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fslmerge(create_files_in_directory_plus_output_type): filelist, outdir, _ = create_files_in_directory_plus_output_type merger = fsl.Merge() # make sure command gets called assert merger.cmd == "fslmerge" # test raising error with mandatory args absent with pytest.raises(ValueError): merger.run() # .inputs based parameters setting merger.inputs.in_files = filelist merger.inputs.merged_file = "foo_merged.nii" merger.inputs.dimension = "t" merger.inputs.output_type = "NIFTI" assert merger.cmdline == "fslmerge -t foo_merged.nii %s" % " ".join(filelist) # verify that providing a tr value updates the dimension to tr merger.inputs.tr = 2.25 assert merger.cmdline == "fslmerge -tr foo_merged.nii %s %.2f" % ( " ".join(filelist), 2.25, ) # .run based parameter setting merger2 = fsl.Merge( in_files=filelist, merged_file="foo_merged.nii", dimension="t", output_type="NIFTI", tr=2.25, ) assert merger2.cmdline == "fslmerge -tr foo_merged.nii %s %.2f" % ( " ".join(filelist), 2.25, ) # test arguments for opt_map # Fslmerge class doesn't have a filled opt_map{} # test fslmath @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_fslmaths(create_files_in_directory_plus_output_type): filelist, outdir, _ = create_files_in_directory_plus_output_type math = fsl.ImageMaths() # make sure command gets called assert math.cmd == "fslmaths" # test raising error with mandatory args absent with pytest.raises(ValueError): math.run() # .inputs based parameters setting math.inputs.in_file = filelist[0] math.inputs.op_string = "-add 2.5 -mul input_volume2" math.inputs.out_file = "foo_math.nii" assert ( math.cmdline == "fslmaths %s -add 2.5 -mul input_volume2 foo_math.nii" % filelist[0] ) # .run based parameter setting math2 = fsl.ImageMaths( in_file=filelist[0], op_string="-add 2.5", out_file="foo2_math.nii" ) assert math2.cmdline == "fslmaths %s -add 2.5 foo2_math.nii" % filelist[0] # test arguments for opt_map # Fslmath class doesn't have opt_map{} # test overlay @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_overlay(create_files_in_directory_plus_output_type): filelist, outdir, _ = create_files_in_directory_plus_output_type overlay = fsl.Overlay() # make sure command gets called assert overlay.cmd == "overlay" # test raising error with mandatory args absent with pytest.raises(ValueError): overlay.run() # .inputs based parameters setting overlay.inputs.stat_image = filelist[0] overlay.inputs.stat_thresh = (2.5, 10) overlay.inputs.background_image = filelist[1] overlay.inputs.auto_thresh_bg = True overlay.inputs.show_negative_stats = True overlay.inputs.out_file = "foo_overlay.nii" assert ( overlay.cmdline == "overlay 1 0 %s -a %s 2.50 10.00 %s -2.50 -10.00 foo_overlay.nii" % (filelist[1], filelist[0], filelist[0]) ) # .run based parameter setting overlay2 = fsl.Overlay( stat_image=filelist[0], stat_thresh=(2.5, 10), background_image=filelist[1], auto_thresh_bg=True, out_file="foo2_overlay.nii", ) assert overlay2.cmdline == "overlay 1 0 %s -a %s 2.50 10.00 foo2_overlay.nii" % ( filelist[1], filelist[0], ) # test slicer @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_slicer(create_files_in_directory_plus_output_type): filelist, outdir, _ = create_files_in_directory_plus_output_type slicer = fsl.Slicer() # make sure command gets called assert slicer.cmd == "slicer" # test raising error with mandatory args absent with pytest.raises(ValueError): slicer.run() # .inputs based parameters setting slicer.inputs.in_file = filelist[0] slicer.inputs.image_edges = filelist[1] slicer.inputs.intensity_range = (10.0, 20.0) slicer.inputs.all_axial = True slicer.inputs.image_width = 750 slicer.inputs.out_file = "foo_bar.png" assert slicer.cmdline == "slicer %s %s -L -i 10.000 20.000 -A 750 foo_bar.png" % ( filelist[0], filelist[1], ) # .run based parameter setting slicer2 = fsl.Slicer( in_file=filelist[0], middle_slices=True, label_slices=False, out_file="foo_bar2.png", ) assert slicer2.cmdline == "slicer %s -a foo_bar2.png" % (filelist[0]) def create_parfiles(): np.savetxt("a.par", np.random.rand(6, 3)) np.savetxt("b.par", np.random.rand(6, 3)) return ["a.par", "b.par"] # test fsl_tsplot @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_plottimeseries(create_files_in_directory_plus_output_type): filelist, outdir, _ = create_files_in_directory_plus_output_type parfiles = create_parfiles() plotter = fsl.PlotTimeSeries() # make sure command gets called assert plotter.cmd == "fsl_tsplot" # test raising error with mandatory args absent with pytest.raises(ValueError): plotter.run() # .inputs based parameters setting plotter.inputs.in_file = parfiles[0] plotter.inputs.labels = ["x", "y", "z"] plotter.inputs.y_range = (0, 1) plotter.inputs.title = "test plot" plotter.inputs.out_file = "foo.png" assert plotter.cmdline == ( "fsl_tsplot -i %s -a x,y,z -o foo.png -t 'test plot' -u 1 --ymin=0 --ymax=1" % parfiles[0] ) # .run based parameter setting plotter2 = fsl.PlotTimeSeries( in_file=parfiles, title="test2 plot", plot_range=(2, 5), out_file="bar.png" ) assert ( plotter2.cmdline == "fsl_tsplot -i %s,%s -o bar.png --start=2 --finish=5 -t 'test2 plot' -u 1" % tuple(parfiles) ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_plotmotionparams(create_files_in_directory_plus_output_type): filelist, outdir, _ = create_files_in_directory_plus_output_type parfiles = create_parfiles() plotter = fsl.PlotMotionParams() # make sure command gets called assert plotter.cmd == "fsl_tsplot" # test raising error with mandatory args absent with pytest.raises(ValueError): plotter.run() # .inputs based parameters setting plotter.inputs.in_file = parfiles[0] plotter.inputs.in_source = "fsl" plotter.inputs.plot_type = "rotations" plotter.inputs.out_file = "foo.png" assert plotter.cmdline == ( "fsl_tsplot -i %s -o foo.png -t 'MCFLIRT estimated rotations (radians)' " "--start=1 --finish=3 -a x,y,z" % parfiles[0] ) # .run based parameter setting plotter2 = fsl.PlotMotionParams( in_file=parfiles[1], in_source="spm", plot_type="translations", out_file="bar.png", ) assert plotter2.cmdline == ( "fsl_tsplot -i %s -o bar.png -t 'Realign estimated translations (mm)' " "--start=1 --finish=3 -a x,y,z" % parfiles[1] ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_convertxfm(create_files_in_directory_plus_output_type): filelist, outdir, _ = create_files_in_directory_plus_output_type cvt = fsl.ConvertXFM() # make sure command gets called assert cvt.cmd == "convert_xfm" # test raising error with mandatory args absent with pytest.raises(ValueError): cvt.run() # .inputs based parameters setting cvt.inputs.in_file = filelist[0] cvt.inputs.invert_xfm = True cvt.inputs.out_file = "foo.mat" assert cvt.cmdline == "convert_xfm -omat foo.mat -inverse %s" % filelist[0] # constructor based parameter setting cvt2 = fsl.ConvertXFM( in_file=filelist[0], in_file2=filelist[1], concat_xfm=True, out_file="bar.mat" ) assert cvt2.cmdline == "convert_xfm -omat bar.mat -concat %s %s" % ( filelist[1], filelist[0], ) @pytest.mark.skipif(no_fsl(), reason="fsl is not installed") def test_swapdims(create_files_in_directory_plus_output_type): files, testdir, out_ext = create_files_in_directory_plus_output_type swap = fsl.SwapDimensions() # Test the underlying command assert swap.cmd == "fslswapdim" # Test mandatory args args = [dict(in_file=files[0]), dict(new_dims=("x", "y", "z"))] for arg in args: wontrun = fsl.SwapDimensions(**arg) with pytest.raises(ValueError): wontrun.run() # Now test a basic command line swap.inputs.in_file = files[0] swap.inputs.new_dims = ("x", "y", "z") assert swap.cmdline == "fslswapdim a.nii x y z %s" % os.path.realpath( os.path.join(testdir, "a_newdims%s" % out_ext) ) # Test that we can set an output name swap.inputs.out_file = "b.nii" assert swap.cmdline == "fslswapdim a.nii x y z b.nii" nipype-1.7.0/nipype/interfaces/fsl/utils.py000066400000000000000000002542501413403311400207520ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL `_ command line tools. This was written to work with FSL version 4.1.4. Examples -------- See the docstrings of the individual classes for examples. """ import os import os.path as op import re from glob import glob import tempfile import numpy as np from ...utils.filemanip import load_json, save_json, split_filename, fname_presuffix from ..base import ( traits, TraitedSpec, OutputMultiPath, File, CommandLine, CommandLineInputSpec, isdefined, ) from .base import FSLCommand, FSLCommandInputSpec, Info class CopyGeomInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, argstr="%s", position=0, desc="source image" ) dest_file = File( exists=True, mandatory=True, argstr="%s", position=1, desc="destination image", copyfile=True, output_name="out_file", name_source="dest_file", name_template="%s", ) ignore_dims = traits.Bool( desc="Do not copy image dimensions", argstr="-d", position="-1" ) class CopyGeomOutputSpec(TraitedSpec): out_file = File(exists=True, desc="image with new geometry header") class CopyGeom(FSLCommand): """Use fslcpgeom to copy the header geometry information to another image. Copy certain parts of the header information (image dimensions, voxel dimensions, voxel dimensions units string, image orientation/origin or qform/sform info) from one image to another. Note that only copies from Analyze to Analyze or Nifti to Nifti will work properly. Copying from different files will result in loss of information or potentially incorrect settings. """ _cmd = "fslcpgeom" input_spec = CopyGeomInputSpec output_spec = CopyGeomOutputSpec class RobustFOVInputSpec(FSLCommandInputSpec): in_file = File( exists=True, desc="input filename", argstr="-i %s", position=0, mandatory=True ) out_roi = File( desc="ROI volume output name", argstr="-r %s", name_source=["in_file"], hash_files=False, name_template="%s_ROI", ) brainsize = traits.Int( desc=("size of brain in z-dimension (default " "170mm/150mm)"), argstr="-b %d" ) out_transform = File( desc=("Transformation matrix in_file to out_roi " "output name"), argstr="-m %s", name_source=["in_file"], hash_files=False, name_template="%s_to_ROI", ) class RobustFOVOutputSpec(TraitedSpec): out_roi = File(exists=True, desc="ROI volume output name") out_transform = File( exists=True, desc=("Transformation matrix in_file to out_roi " "output name") ) class RobustFOV(FSLCommand): """Automatically crops an image removing lower head and neck. Interface is stable 5.0.0 to 5.0.9, but default brainsize changed from 150mm to 170mm. """ _cmd = "robustfov" input_spec = RobustFOVInputSpec output_spec = RobustFOVOutputSpec class ImageMeantsInputSpec(FSLCommandInputSpec): in_file = File( exists=True, desc="input file for computing the average timeseries", argstr="-i %s", position=0, mandatory=True, ) out_file = File( desc="name of output text matrix", argstr="-o %s", genfile=True, hash_files=False, ) mask = File(exists=True, desc="input 3D mask", argstr="-m %s") spatial_coord = traits.List( traits.Int, desc=(" requested spatial coordinate " "(instead of mask)"), argstr="-c %s", ) use_mm = traits.Bool( desc=("use mm instead of voxel coordinates (for -c " "option)"), argstr="--usemm", ) show_all = traits.Bool( desc=("show all voxel time series (within mask) " "instead of averaging"), argstr="--showall", ) eig = traits.Bool( desc=("calculate Eigenvariate(s) instead of mean (output will have 0 " "mean)"), argstr="--eig", ) order = traits.Int( 1, desc="select number of Eigenvariates", argstr="--order=%d", usedefault=True ) nobin = traits.Bool( desc=("do not binarise the mask for calculation of " "Eigenvariates"), argstr="--no_bin", ) transpose = traits.Bool( desc=("output results in transpose format (one row per voxel/mean)"), argstr="--transpose", ) class ImageMeantsOutputSpec(TraitedSpec): out_file = File(exists=True, desc="path/name of output text matrix") class ImageMeants(FSLCommand): """Use fslmeants for printing the average timeseries (intensities) to the screen (or saves to a file). The average is taken over all voxels in the mask (or all voxels in the image if no mask is specified) """ _cmd = "fslmeants" input_spec = ImageMeantsInputSpec output_spec = ImageMeantsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_file if not isdefined(outputs["out_file"]): outputs["out_file"] = self._gen_fname( self.inputs.in_file, suffix="_ts", ext=".txt", change_ext=True ) outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()[name] return None class SmoothInputSpec(FSLCommandInputSpec): in_file = File(exists=True, argstr="%s", position=0, mandatory=True) sigma = traits.Float( argstr="-kernel gauss %.03f -fmean", position=1, xor=["fwhm"], mandatory=True, desc="gaussian kernel sigma in mm (not voxels)", ) fwhm = traits.Float( argstr="-kernel gauss %.03f -fmean", position=1, xor=["sigma"], mandatory=True, desc=("gaussian kernel fwhm, will be converted to sigma in mm " "(not voxels)"), ) smoothed_file = File( argstr="%s", position=2, name_source=["in_file"], name_template="%s_smooth", hash_files=False, ) class SmoothOutputSpec(TraitedSpec): smoothed_file = File(exists=True) class Smooth(FSLCommand): """ Use fslmaths to smooth the image Examples -------- Setting the kernel width using sigma: >>> sm = Smooth() >>> sm.inputs.output_type = 'NIFTI_GZ' >>> sm.inputs.in_file = 'functional2.nii' >>> sm.inputs.sigma = 8.0 >>> sm.cmdline # doctest: +ELLIPSIS 'fslmaths functional2.nii -kernel gauss 8.000 -fmean functional2_smooth.nii.gz' Setting the kernel width using fwhm: >>> sm = Smooth() >>> sm.inputs.output_type = 'NIFTI_GZ' >>> sm.inputs.in_file = 'functional2.nii' >>> sm.inputs.fwhm = 8.0 >>> sm.cmdline # doctest: +ELLIPSIS 'fslmaths functional2.nii -kernel gauss 3.397 -fmean functional2_smooth.nii.gz' One of sigma or fwhm must be set: >>> from nipype.interfaces.fsl import Smooth >>> sm = Smooth() >>> sm.inputs.output_type = 'NIFTI_GZ' >>> sm.inputs.in_file = 'functional2.nii' >>> sm.cmdline #doctest: +ELLIPSIS Traceback (most recent call last): ... ValueError: Smooth requires a value for one of the inputs ... """ input_spec = SmoothInputSpec output_spec = SmoothOutputSpec _cmd = "fslmaths" def _format_arg(self, name, trait_spec, value): if name == "fwhm": sigma = float(value) / np.sqrt(8 * np.log(2)) return super(Smooth, self)._format_arg(name, trait_spec, sigma) return super(Smooth, self)._format_arg(name, trait_spec, value) class SliceInputSpec(FSLCommandInputSpec): in_file = File( exists=True, argstr="%s", position=0, mandatory=True, desc="input filename", copyfile=False, ) out_base_name = traits.Str(argstr="%s", position=1, desc="outputs prefix") class SliceOutputSpec(TraitedSpec): out_files = OutputMultiPath(File(exists=True)) class Slice(FSLCommand): """Use fslslice to split a 3D file into lots of 2D files (along z-axis). Examples -------- >>> from nipype.interfaces.fsl import Slice >>> slice = Slice() >>> slice.inputs.in_file = 'functional.nii' >>> slice.inputs.out_base_name = 'sl' >>> slice.cmdline 'fslslice functional.nii sl' """ _cmd = "fslslice" input_spec = SliceInputSpec output_spec = SliceOutputSpec def _list_outputs(self): """Create a Bunch which contains all possible files generated by running the interface. Some files are always generated, others depending on which ``inputs`` options are set. Returns ------- outputs : Bunch object Bunch object containing all possible files generated by interface object. If None, file was not generated Else, contains path, filename of generated outputfile """ outputs = self._outputs().get() ext = Info.output_type_to_ext(self.inputs.output_type) suffix = "_slice_*" + ext if isdefined(self.inputs.out_base_name): fname_template = os.path.abspath(self.inputs.out_base_name + suffix) else: fname_template = fname_presuffix( self.inputs.in_file, suffix=suffix, use_ext=False ) outputs["out_files"] = sorted(glob(fname_template)) return outputs class MergeInputSpec(FSLCommandInputSpec): in_files = traits.List(File(exists=True), argstr="%s", position=2, mandatory=True) dimension = traits.Enum( "t", "x", "y", "z", "a", argstr="-%s", position=0, desc=( "dimension along which to merge, optionally " "set tr input when dimension is t" ), mandatory=True, ) tr = traits.Float( position=-1, argstr="%.2f", desc=( "use to specify TR in seconds (default is 1.00 " "sec), overrides dimension and sets it to tr" ), ) merged_file = File( argstr="%s", position=1, name_source="in_files", name_template="%s_merged", hash_files=False, ) class MergeOutputSpec(TraitedSpec): merged_file = File(exists=True) class Merge(FSLCommand): """Use fslmerge to concatenate images Images can be concatenated across time, x, y, or z dimensions. Across the time (t) dimension the TR is set by default to 1 sec. Note: to set the TR to a different value, specify 't' for dimension and specify the TR value in seconds for the tr input. The dimension will be automatically updated to 'tr'. Examples -------- >>> from nipype.interfaces.fsl import Merge >>> merger = Merge() >>> merger.inputs.in_files = ['functional2.nii', 'functional3.nii'] >>> merger.inputs.dimension = 't' >>> merger.inputs.output_type = 'NIFTI_GZ' >>> merger.cmdline 'fslmerge -t functional2_merged.nii.gz functional2.nii functional3.nii' >>> merger.inputs.tr = 2.25 >>> merger.cmdline 'fslmerge -tr functional2_merged.nii.gz functional2.nii functional3.nii 2.25' """ _cmd = "fslmerge" input_spec = MergeInputSpec output_spec = MergeOutputSpec def _format_arg(self, name, spec, value): if name == "tr": if self.inputs.dimension != "t": raise ValueError("When TR is specified, dimension must be t") return spec.argstr % value if name == "dimension": if isdefined(self.inputs.tr): return "-tr" return spec.argstr % value return super(Merge, self)._format_arg(name, spec, value) class ExtractROIInputSpec(FSLCommandInputSpec): in_file = File( exists=True, argstr="%s", position=0, desc="input file", mandatory=True ) roi_file = File( argstr="%s", position=1, desc="output file", genfile=True, hash_files=False ) x_min = traits.Int(argstr="%d", position=2) x_size = traits.Int(argstr="%d", position=3) y_min = traits.Int(argstr="%d", position=4) y_size = traits.Int(argstr="%d", position=5) z_min = traits.Int(argstr="%d", position=6) z_size = traits.Int(argstr="%d", position=7) t_min = traits.Int(argstr="%d", position=8) t_size = traits.Int(argstr="%d", position=9) _crop_xor = [ "x_min", "x_size", "y_min", "y_size", "z_min", "z_size", "t_min", "t_size", ] crop_list = traits.List( traits.Tuple(traits.Int, traits.Int), argstr="%s", position=2, xor=_crop_xor, desc="list of two tuples specifying crop options", ) class ExtractROIOutputSpec(TraitedSpec): roi_file = File(exists=True) class ExtractROI(FSLCommand): """Uses FSL Fslroi command to extract region of interest (ROI) from an image. You can a) take a 3D ROI from a 3D data set (or if it is 4D, the same ROI is taken from each time point and a new 4D data set is created), b) extract just some time points from a 4D data set, or c) control time and space limits to the ROI. Note that the arguments are minimum index and size (not maximum index). So to extract voxels 10 to 12 inclusive you would specify 10 and 3 (not 10 and 12). Examples -------- >>> from nipype.interfaces.fsl import ExtractROI >>> from nipype.testing import anatfile >>> fslroi = ExtractROI(in_file=anatfile, roi_file='bar.nii', t_min=0, ... t_size=1) >>> fslroi.cmdline == 'fslroi %s bar.nii 0 1' % anatfile True """ _cmd = "fslroi" input_spec = ExtractROIInputSpec output_spec = ExtractROIOutputSpec def _format_arg(self, name, spec, value): if name == "crop_list": return " ".join(map(str, sum(list(map(list, value)), []))) return super(ExtractROI, self)._format_arg(name, spec, value) def _list_outputs(self): """Create a Bunch which contains all possible files generated by running the interface. Some files are always generated, others depending on which ``inputs`` options are set. Returns ------- outputs : Bunch object Bunch object containing all possible files generated by interface object. If None, file was not generated Else, contains path, filename of generated outputfile """ outputs = self._outputs().get() outputs["roi_file"] = self.inputs.roi_file if not isdefined(outputs["roi_file"]): outputs["roi_file"] = self._gen_fname(self.inputs.in_file, suffix="_roi") outputs["roi_file"] = os.path.abspath(outputs["roi_file"]) return outputs def _gen_filename(self, name): if name == "roi_file": return self._list_outputs()[name] return None class SplitInputSpec(FSLCommandInputSpec): in_file = File( exists=True, argstr="%s", position=0, mandatory=True, desc="input filename" ) out_base_name = traits.Str(argstr="%s", position=1, desc="outputs prefix") dimension = traits.Enum( "t", "x", "y", "z", argstr="-%s", position=2, mandatory=True, desc="dimension along which the file will be split", ) class SplitOutputSpec(TraitedSpec): out_files = OutputMultiPath(File(exists=True)) class Split(FSLCommand): """Uses FSL Fslsplit command to separate a volume into images in time, x, y or z dimension. """ _cmd = "fslsplit" input_spec = SplitInputSpec output_spec = SplitOutputSpec def _list_outputs(self): """Create a Bunch which contains all possible files generated by running the interface. Some files are always generated, others depending on which ``inputs`` options are set. Returns ------- outputs : Bunch object Bunch object containing all possible files generated by interface object. If None, file was not generated Else, contains path, filename of generated outputfile """ outputs = self._outputs().get() ext = Info.output_type_to_ext(self.inputs.output_type) outbase = "vol[0-9]*" if isdefined(self.inputs.out_base_name): outbase = "%s[0-9]*" % self.inputs.out_base_name outputs["out_files"] = sorted(glob(os.path.join(os.getcwd(), outbase + ext))) return outputs class ImageMathsInputSpec(FSLCommandInputSpec): in_file = File(exists=True, argstr="%s", mandatory=True, position=1) in_file2 = File(exists=True, argstr="%s", position=3) mask_file = File( exists=True, argstr="-mas %s", desc="use (following image>0) to mask current image", ) out_file = File(argstr="%s", position=-2, genfile=True, hash_files=False) op_string = traits.Str( argstr="%s", position=2, desc="string defining the operation, i. e. -add" ) suffix = traits.Str(desc="out_file suffix") out_data_type = traits.Enum( "char", "short", "int", "float", "double", "input", argstr="-odt %s", position=-1, desc=("output datatype, one of (char, short, " "int, float, double, input)"), ) class ImageMathsOutputSpec(TraitedSpec): out_file = File(exists=True) class ImageMaths(FSLCommand): """Use FSL fslmaths command to allow mathematical manipulation of images `FSL info `_ Examples -------- >>> from nipype.interfaces import fsl >>> from nipype.testing import anatfile >>> maths = fsl.ImageMaths(in_file=anatfile, op_string= '-add 5', ... out_file='foo_maths.nii') >>> maths.cmdline == 'fslmaths %s -add 5 foo_maths.nii' % anatfile True """ input_spec = ImageMathsInputSpec output_spec = ImageMathsOutputSpec _cmd = "fslmaths" def _gen_filename(self, name): if name == "out_file": return self._list_outputs()[name] return None def _parse_inputs(self, skip=None): return super(ImageMaths, self)._parse_inputs(skip=["suffix"]) def _list_outputs(self): suffix = "_maths" # ohinds: build suffix if isdefined(self.inputs.suffix): suffix = self.inputs.suffix outputs = self._outputs().get() outputs["out_file"] = self.inputs.out_file if not isdefined(outputs["out_file"]): outputs["out_file"] = self._gen_fname(self.inputs.in_file, suffix=suffix) outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs class FilterRegressorInputSpec(FSLCommandInputSpec): in_file = File( exists=True, argstr="-i %s", desc="input file name (4D image)", mandatory=True, position=1, ) out_file = File( argstr="-o %s", desc="output file name for the filtered data", genfile=True, position=2, hash_files=False, ) design_file = File( exists=True, argstr="-d %s", position=3, mandatory=True, desc=( "name of the matrix with time courses (e.g. GLM " "design or MELODIC mixing matrix)" ), ) filter_columns = traits.List( traits.Int, argstr="-f '%s'", xor=["filter_all"], mandatory=True, position=4, desc=("(1-based) column indices to filter out of the data"), ) filter_all = traits.Bool( mandatory=True, argstr="-f '%s'", xor=["filter_columns"], position=4, desc=("use all columns in the design file in " "denoising"), ) mask = File(exists=True, argstr="-m %s", desc="mask image file name") var_norm = traits.Bool(argstr="--vn", desc="perform variance-normalization on data") out_vnscales = traits.Bool( argstr="--out_vnscales", desc=("output scaling factors for variance " "normalization"), ) class FilterRegressorOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output file name for the filtered data") class FilterRegressor(FSLCommand): """Data de-noising by regressing out part of a design matrix Uses simple OLS regression on 4D images """ input_spec = FilterRegressorInputSpec output_spec = FilterRegressorOutputSpec _cmd = "fsl_regfilt" def _format_arg(self, name, trait_spec, value): if name == "filter_columns": return trait_spec.argstr % ",".join(map(str, value)) elif name == "filter_all": design = np.loadtxt(self.inputs.design_file) try: n_cols = design.shape[1] except IndexError: n_cols = 1 return trait_spec.argstr % ",".join(map(str, list(range(1, n_cols + 1)))) return super(FilterRegressor, self)._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_file if not isdefined(outputs["out_file"]): outputs["out_file"] = self._gen_fname( self.inputs.in_file, suffix="_regfilt" ) outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()[name] return None class ImageStatsInputSpec(FSLCommandInputSpec): split_4d = traits.Bool( argstr="-t", position=1, desc=("give a separate output line for each 3D " "volume of a 4D timeseries"), ) in_file = File( exists=True, argstr="%s", mandatory=True, position=3, desc="input file to generate stats of", ) op_string = traits.Str( argstr="%s", mandatory=True, position=4, desc=( "string defining the operation, options are " "applied in order, e.g. -M -l 10 -M will " "report the non-zero mean, apply a threshold " "and then report the new nonzero mean" ), ) mask_file = File(exists=True, argstr="", desc="mask file used for option -k %s") index_mask_file = File( exists=True, argstr="-K %s", position=2, desc="generate seperate n submasks from indexMask, " "for indexvalues 1..n where n is the maximum index " "value in indexMask, and generate statistics for each submask", ) class ImageStatsOutputSpec(TraitedSpec): out_stat = traits.Any(desc="stats output") class ImageStats(FSLCommand): """Use FSL fslstats command to calculate stats from images `FSL info `_ Examples -------- >>> from nipype.interfaces.fsl import ImageStats >>> from nipype.testing import funcfile >>> stats = ImageStats(in_file=funcfile, op_string= '-M') >>> stats.cmdline == 'fslstats %s -M'%funcfile True """ input_spec = ImageStatsInputSpec output_spec = ImageStatsOutputSpec _cmd = "fslstats" def _format_arg(self, name, trait_spec, value): if name == "mask_file": return "" if name == "op_string": if "-k %s" in self.inputs.op_string: if isdefined(self.inputs.mask_file): return self.inputs.op_string % self.inputs.mask_file else: raise ValueError("-k %s option in op_string requires mask_file") return super(ImageStats, self)._format_arg(name, trait_spec, value) def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() # local caching for backward compatibility outfile = os.path.join(os.getcwd(), "stat_result.json") if runtime is None: try: out_stat = load_json(outfile)["stat"] except IOError: return self.run().outputs else: out_stat = [] for line in runtime.stdout.split("\n"): if line: values = line.split() if len(values) > 1: out_stat.append([float(val) for val in values]) else: out_stat.extend([float(val) for val in values]) if len(out_stat) == 1: out_stat = out_stat[0] save_json(outfile, dict(stat=out_stat)) outputs.out_stat = out_stat return outputs class AvScaleInputSpec(CommandLineInputSpec): all_param = traits.Bool(False, argstr="--allparams") mat_file = File(exists=True, argstr="%s", desc="mat file to read", position=-2) ref_file = File( exists=True, argstr="%s", position=-1, desc="reference file to get center of rotation", ) class AvScaleOutputSpec(TraitedSpec): rotation_translation_matrix = traits.List( traits.List(traits.Float), desc="Rotation and Translation Matrix" ) scales = traits.List(traits.Float, desc="Scales (x,y,z)") skews = traits.List(traits.Float, desc="Skews") average_scaling = traits.Float(desc="Average Scaling") determinant = traits.Float(desc="Determinant") forward_half_transform = traits.List( traits.List(traits.Float), desc="Forward Half Transform" ) backward_half_transform = traits.List( traits.List(traits.Float), desc="Backwards Half Transform" ) left_right_orientation_preserved = traits.Bool( desc="True if LR orientation preserved" ) rot_angles = traits.List(traits.Float, desc="rotation angles") translations = traits.List(traits.Float, desc="translations") class AvScale(CommandLine): """Use FSL avscale command to extract info from mat file output of FLIRT Examples -------- >>> avscale = AvScale() >>> avscale.inputs.mat_file = 'flirt.mat' >>> res = avscale.run() # doctest: +SKIP """ input_spec = AvScaleInputSpec output_spec = AvScaleOutputSpec _cmd = "avscale" def _run_interface(self, runtime): runtime = super(AvScale, self)._run_interface(runtime) expr = re.compile( r"Rotation & Translation Matrix:\n(?P[0-9\. \n-]+)[\s\n]*" r"(Rotation Angles \(x,y,z\) \[rads\] = (?P[0-9\. -]+))?[\s\n]*" r"(Translations \(x,y,z\) \[mm\] = (?P[0-9\. -]+))?[\s\n]*" r"Scales \(x,y,z\) = (?P[0-9\. -]+)[\s\n]*" r"Skews \(xy,xz,yz\) = (?P[0-9\. -]+)[\s\n]*" r"Average scaling = (?P[0-9\.-]+)[\s\n]*" r"Determinant = (?P[0-9\.-]+)[\s\n]*" r"Left-Right orientation: (?P[A-Za-z]+)[\s\n]*" r"Forward half transform =[\s]*\n" r"(?P[0-9\. \n-]+)[\s\n]*" r"Backward half transform =[\s]*\n" r"(?P[0-9\. \n-]+)[\s\n]*" ) out = expr.search(runtime.stdout).groupdict() outputs = {} outputs["rotation_translation_matrix"] = [ [float(v) for v in r.strip().split(" ")] for r in out["rot_tran_mat"].strip().split("\n") ] outputs["scales"] = [float(s) for s in out["scales"].strip().split(" ")] outputs["skews"] = [float(s) for s in out["skews"].strip().split(" ")] outputs["average_scaling"] = float(out["avg_scaling"].strip()) outputs["determinant"] = float(out["determinant"].strip()) outputs["left_right_orientation_preserved"] = ( out["lr_orientation"].strip() == "preserved" ) outputs["forward_half_transform"] = [ [float(v) for v in r.strip().split(" ")] for r in out["fwd_half_xfm"].strip().split("\n") ] outputs["backward_half_transform"] = [ [float(v) for v in r.strip().split(" ")] for r in out["bwd_half_xfm"].strip().split("\n") ] if self.inputs.all_param: outputs["rot_angles"] = [ float(r) for r in out["rot_angles"].strip().split(" ") ] outputs["translations"] = [ float(r) for r in out["translations"].strip().split(" ") ] setattr(self, "_results", outputs) return runtime def _list_outputs(self): return self._results class OverlayInputSpec(FSLCommandInputSpec): transparency = traits.Bool( desc="make overlay colors semi-transparent", position=1, argstr="%s", usedefault=True, default_value=True, ) out_type = traits.Enum( "float", "int", position=2, usedefault=True, argstr="%s", desc="write output with float or int", ) use_checkerboard = traits.Bool( desc="use checkerboard mask for overlay", argstr="-c", position=3 ) background_image = File( exists=True, position=4, mandatory=True, argstr="%s", desc="image to use as background", ) _xor_inputs = ("auto_thresh_bg", "full_bg_range", "bg_thresh") auto_thresh_bg = traits.Bool( desc=("automatically threshold the background image"), argstr="-a", position=5, xor=_xor_inputs, mandatory=True, ) full_bg_range = traits.Bool( desc="use full range of background image", argstr="-A", position=5, xor=_xor_inputs, mandatory=True, ) bg_thresh = traits.Tuple( traits.Float, traits.Float, argstr="%.3f %.3f", position=5, desc="min and max values for background intensity", xor=_xor_inputs, mandatory=True, ) stat_image = File( exists=True, position=6, mandatory=True, argstr="%s", desc="statistical image to overlay in color", ) stat_thresh = traits.Tuple( traits.Float, traits.Float, position=7, mandatory=True, argstr="%.2f %.2f", desc=("min and max values for the statistical " "overlay"), ) show_negative_stats = traits.Bool( desc=("display negative statistics in " "overlay"), xor=["stat_image2"], argstr="%s", position=8, ) stat_image2 = File( exists=True, position=9, xor=["show_negative_stats"], argstr="%s", desc="second statistical image to overlay in color", ) stat_thresh2 = traits.Tuple( traits.Float, traits.Float, position=10, desc=("min and max values for second " "statistical overlay"), argstr="%.2f %.2f", ) out_file = File( desc="combined image volume", position=-1, argstr="%s", genfile=True, hash_files=False, ) class OverlayOutputSpec(TraitedSpec): out_file = File(exists=True, desc="combined image volume") class Overlay(FSLCommand): """Use FSL's overlay command to combine background and statistical images into one volume Examples -------- >>> from nipype.interfaces import fsl >>> combine = fsl.Overlay() >>> combine.inputs.background_image = 'mean_func.nii.gz' >>> combine.inputs.auto_thresh_bg = True >>> combine.inputs.stat_image = 'zstat1.nii.gz' >>> combine.inputs.stat_thresh = (3.5, 10) >>> combine.inputs.show_negative_stats = True >>> res = combine.run() #doctest: +SKIP """ _cmd = "overlay" input_spec = OverlayInputSpec output_spec = OverlayOutputSpec def _format_arg(self, name, spec, value): if name == "transparency": if value: return "1" else: return "0" if name == "out_type": if value == "float": return "0" else: return "1" if name == "show_negative_stats": return "%s %.2f %.2f" % ( self.inputs.stat_image, self.inputs.stat_thresh[0] * -1, self.inputs.stat_thresh[1] * -1, ) return super(Overlay, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): if isdefined(self.inputs.stat_image2) and ( not isdefined(self.inputs.show_negative_stats) or not self.inputs.show_negative_stats ): stem = "%s_and_%s" % ( split_filename(self.inputs.stat_image)[1], split_filename(self.inputs.stat_image2)[1], ) else: stem = split_filename(self.inputs.stat_image)[1] out_file = self._gen_fname(stem, suffix="_overlay") outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["out_file"] return None class SlicerInputSpec(FSLCommandInputSpec): in_file = File( exists=True, position=1, argstr="%s", mandatory=True, desc="input volume" ) image_edges = File( exists=True, position=2, argstr="%s", desc=( "volume to display edge overlay for (useful for " "checking registration" ), ) label_slices = traits.Bool( position=3, argstr="-L", desc="display slice number", usedefault=True, default_value=True, ) colour_map = File( exists=True, position=4, argstr="-l %s", desc=("use different colour map from that stored in " "nifti header"), ) intensity_range = traits.Tuple( traits.Float, traits.Float, position=5, argstr="-i %.3f %.3f", desc="min and max intensities to display", ) threshold_edges = traits.Float( position=6, argstr="-e %.3f", desc="use threshold for edges" ) dither_edges = traits.Bool( position=7, argstr="-t", desc=("produce semi-transparent (dithered) " "edges") ) nearest_neighbour = traits.Bool( position=8, argstr="-n", desc=("use nearest neighbor interpolation " "for output"), ) show_orientation = traits.Bool( position=9, argstr="%s", usedefault=True, default_value=True, desc="label left-right orientation", ) _xor_options = ("single_slice", "middle_slices", "all_axial", "sample_axial") single_slice = traits.Enum( "x", "y", "z", position=10, argstr="-%s", xor=_xor_options, requires=["slice_number"], desc=("output picture of single slice in the x, y, or z plane"), ) slice_number = traits.Int( position=11, argstr="-%d", desc="slice number to save in picture" ) middle_slices = traits.Bool( position=10, argstr="-a", xor=_xor_options, desc=("output picture of mid-sagittal, axial, " "and coronal slices"), ) all_axial = traits.Bool( position=10, argstr="-A", xor=_xor_options, requires=["image_width"], desc="output all axial slices into one picture", ) sample_axial = traits.Int( position=10, argstr="-S %d", xor=_xor_options, requires=["image_width"], desc=("output every n axial slices into one " "picture"), ) image_width = traits.Int(position=-2, argstr="%d", desc="max picture width") out_file = File( position=-1, genfile=True, argstr="%s", desc="picture to write", hash_files=False, ) scaling = traits.Float(position=0, argstr="-s %f", desc="image scale") class SlicerOutputSpec(TraitedSpec): out_file = File(exists=True, desc="picture to write") class Slicer(FSLCommand): """Use FSL's slicer command to output a png image from a volume. Examples -------- >>> from nipype.interfaces import fsl >>> from nipype.testing import example_data >>> slice = fsl.Slicer() >>> slice.inputs.in_file = example_data('functional.nii') >>> slice.inputs.all_axial = True >>> slice.inputs.image_width = 750 >>> res = slice.run() #doctest: +SKIP """ _cmd = "slicer" input_spec = SlicerInputSpec output_spec = SlicerOutputSpec def _format_arg(self, name, spec, value): if name == "show_orientation": if value: return "" else: return "-u" elif name == "label_slices": if value: return "-L" else: return "" return super(Slicer, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): out_file = self._gen_fname(self.inputs.in_file, ext=".png") outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["out_file"] return None class PlotTimeSeriesInputSpec(FSLCommandInputSpec): in_file = traits.Either( File(exists=True), traits.List(File(exists=True)), mandatory=True, argstr="%s", position=1, desc=("file or list of files with columns of " "timecourse information"), ) plot_start = traits.Int( argstr="--start=%d", xor=("plot_range",), desc="first column from in-file to plot", ) plot_finish = traits.Int( argstr="--finish=%d", xor=("plot_range",), desc="final column from in-file to plot", ) plot_range = traits.Tuple( traits.Int, traits.Int, argstr="%s", xor=("plot_start", "plot_finish"), desc=("first and last columns from the in-file " "to plot"), ) title = traits.Str(argstr="%s", desc="plot title") legend_file = File(exists=True, argstr="--legend=%s", desc="legend file") labels = traits.Either( traits.Str, traits.List(traits.Str), argstr="%s", desc="label or list of labels" ) y_min = traits.Float(argstr="--ymin=%.2f", desc="minumum y value", xor=("y_range",)) y_max = traits.Float(argstr="--ymax=%.2f", desc="maximum y value", xor=("y_range",)) y_range = traits.Tuple( traits.Float, traits.Float, argstr="%s", xor=("y_min", "y_max"), desc="min and max y axis values", ) x_units = traits.Int( argstr="-u %d", usedefault=True, default_value=1, desc=("scaling units for x-axis (between 1 and length of in file)"), ) plot_size = traits.Tuple( traits.Int, traits.Int, argstr="%s", desc="plot image height and width" ) x_precision = traits.Int(argstr="--precision=%d", desc="precision of x-axis labels") sci_notation = traits.Bool(argstr="--sci", desc="switch on scientific notation") out_file = File( argstr="-o %s", genfile=True, desc="image to write", hash_files=False ) class PlotTimeSeriesOutputSpec(TraitedSpec): out_file = File(exists=True, desc="image to write") class PlotTimeSeries(FSLCommand): """Use fsl_tsplot to create images of time course plots. Examples -------- >>> import nipype.interfaces.fsl as fsl >>> plotter = fsl.PlotTimeSeries() >>> plotter.inputs.in_file = 'functional.par' >>> plotter.inputs.title = 'Functional timeseries' >>> plotter.inputs.labels = ['run1', 'run2'] >>> plotter.run() #doctest: +SKIP """ _cmd = "fsl_tsplot" input_spec = PlotTimeSeriesInputSpec output_spec = PlotTimeSeriesOutputSpec def _format_arg(self, name, spec, value): if name == "in_file": if isinstance(value, list): args = ",".join(value) return "-i %s" % args else: return "-i %s" % value elif name == "labels": if isinstance(value, list): args = ",".join(value) return "-a %s" % args else: return "-a %s" % value elif name == "title": return "-t '%s'" % value elif name == "plot_range": return "--start=%d --finish=%d" % value elif name == "y_range": return "--ymin=%d --ymax=%d" % value elif name == "plot_size": return "-h %d -w %d" % value return super(PlotTimeSeries, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): if isinstance(self.inputs.in_file, list): infile = self.inputs.in_file[0] else: infile = self.inputs.in_file out_file = self._gen_fname(infile, ext=".png") outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["out_file"] return None class PlotMotionParamsInputSpec(FSLCommandInputSpec): in_file = traits.Either( File(exists=True), traits.List(File(exists=True)), mandatory=True, argstr="%s", position=1, desc="file with motion parameters", ) in_source = traits.Enum( "spm", "fsl", mandatory=True, desc=("which program generated the motion " "parameter file - fsl, spm"), ) plot_type = traits.Enum( "rotations", "translations", "displacement", argstr="%s", mandatory=True, desc=("which motion type to plot - rotations, " "translations, displacement"), ) plot_size = traits.Tuple( traits.Int, traits.Int, argstr="%s", desc="plot image height and width" ) out_file = File( argstr="-o %s", genfile=True, desc="image to write", hash_files=False ) class PlotMotionParamsOutputSpec(TraitedSpec): out_file = File(exists=True, desc="image to write") class PlotMotionParams(FSLCommand): """Use fsl_tsplot to plot the estimated motion parameters from a realignment program. Examples -------- >>> import nipype.interfaces.fsl as fsl >>> plotter = fsl.PlotMotionParams() >>> plotter.inputs.in_file = 'functional.par' >>> plotter.inputs.in_source = 'fsl' >>> plotter.inputs.plot_type = 'rotations' >>> res = plotter.run() #doctest: +SKIP Notes ----- The 'in_source' attribute determines the order of columns that are expected in the source file. FSL prints motion parameters in the order rotations, translations, while SPM prints them in the opposite order. This interface should be able to plot timecourses of motion parameters generated from other sources as long as they fall under one of these two patterns. For more flexibilty, see the :class:`fsl.PlotTimeSeries` interface. """ _cmd = "fsl_tsplot" input_spec = PlotMotionParamsInputSpec output_spec = PlotMotionParamsOutputSpec def _format_arg(self, name, spec, value): if name == "plot_type": source = self.inputs.in_source if self.inputs.plot_type == "displacement": title = "-t 'MCFLIRT estimated mean displacement (mm)'" labels = "-a abs,rel" return "%s %s" % (title, labels) # Get the right starting and ending position depending on source # package sfdict = dict( fsl_rot=(1, 3), fsl_tra=(4, 6), spm_rot=(4, 6), spm_tra=(1, 3) ) # Format the title properly sfstr = "--start=%d --finish=%d" % sfdict["%s_%s" % (source, value[:3])] titledict = dict(fsl="MCFLIRT", spm="Realign") unitdict = dict(rot="radians", tra="mm") title = "'%s estimated %s (%s)'" % ( titledict[source], value, unitdict[value[:3]], ) return "-t %s %s -a x,y,z" % (title, sfstr) elif name == "plot_size": return "-h %d -w %d" % value elif name == "in_file": if isinstance(value, list): args = ",".join(value) return "-i %s" % args else: return "-i %s" % value return super(PlotMotionParams, self)._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() out_file = self.inputs.out_file if not isdefined(out_file): if isinstance(self.inputs.in_file, list): infile = self.inputs.in_file[0] else: infile = self.inputs.in_file plttype = dict(rot="rot", tra="trans", dis="disp")[ self.inputs.plot_type[:3] ] out_file = fname_presuffix( infile, suffix="_%s.png" % plttype, use_ext=False ) outputs["out_file"] = os.path.abspath(out_file) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["out_file"] return None class ConvertXFMInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, argstr="%s", position=-1, desc="input transformation matrix", ) in_file2 = File( exists=True, argstr="%s", position=-2, desc="second input matrix (for use with fix_scale_skew or concat_xfm)", ) _options = ["invert_xfm", "concat_xfm", "fix_scale_skew"] invert_xfm = traits.Bool( argstr="-inverse", position=-3, xor=_options, desc="invert input transformation" ) concat_xfm = traits.Bool( argstr="-concat", position=-3, xor=_options, requires=["in_file2"], desc=("write joint transformation of two input " "matrices"), ) fix_scale_skew = traits.Bool( argstr="-fixscaleskew", position=-3, xor=_options, requires=["in_file2"], desc=("use secondary matrix to fix scale and " "skew"), ) out_file = File( genfile=True, argstr="-omat %s", position=1, desc="final transformation matrix", hash_files=False, ) class ConvertXFMOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output transformation matrix") class ConvertXFM(FSLCommand): """Use the FSL utility convert_xfm to modify FLIRT transformation matrices. Examples -------- >>> import nipype.interfaces.fsl as fsl >>> invt = fsl.ConvertXFM() >>> invt.inputs.in_file = "flirt.mat" >>> invt.inputs.invert_xfm = True >>> invt.inputs.out_file = 'flirt_inv.mat' >>> invt.cmdline 'convert_xfm -omat flirt_inv.mat -inverse flirt.mat' """ _cmd = "convert_xfm" input_spec = ConvertXFMInputSpec output_spec = ConvertXFMOutputSpec def _list_outputs(self): outputs = self._outputs().get() outfile = self.inputs.out_file if not isdefined(outfile): _, infile1, _ = split_filename(self.inputs.in_file) if self.inputs.invert_xfm: outfile = fname_presuffix( infile1, suffix="_inv.mat", newpath=os.getcwd(), use_ext=False ) else: if self.inputs.concat_xfm: _, infile2, _ = split_filename(self.inputs.in_file2) outfile = fname_presuffix( "%s_%s" % (infile1, infile2), suffix=".mat", newpath=os.getcwd(), use_ext=False, ) else: outfile = fname_presuffix( infile1, suffix="_fix.mat", newpath=os.getcwd(), use_ext=False ) outputs["out_file"] = os.path.abspath(outfile) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["out_file"] return None class SwapDimensionsInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, argstr="%s", position="1", desc="input image" ) _dims = ["x", "-x", "y", "-y", "z", "-z", "RL", "LR", "AP", "PA", "IS", "SI"] new_dims = traits.Tuple( traits.Enum(_dims), traits.Enum(_dims), traits.Enum(_dims), argstr="%s %s %s", mandatory=True, desc="3-tuple of new dimension order", ) out_file = File(genfile=True, argstr="%s", desc="image to write", hash_files=False) class SwapDimensionsOutputSpec(TraitedSpec): out_file = File(exists=True, desc="image with new dimensions") class SwapDimensions(FSLCommand): """Use fslswapdim to alter the orientation of an image. This interface accepts a three-tuple corresponding to the new orientation. You may either provide dimension ids in the form of (-)x, (-)y, or (-z), or nifti-syle dimension codes (RL, LR, AP, PA, IS, SI). """ _cmd = "fslswapdim" input_spec = SwapDimensionsInputSpec output_spec = SwapDimensionsOutputSpec def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = self.inputs.out_file if not isdefined(self.inputs.out_file): outputs["out_file"] = self._gen_fname( self.inputs.in_file, suffix="_newdims" ) outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["out_file"] return None class PowerSpectrumInputSpec(FSLCommandInputSpec): # We use position args here as list indices - so a negative number # will put something on the end in_file = File( exists=True, desc="input 4D file to estimate the power spectrum", argstr="%s", position=0, mandatory=True, ) out_file = File( desc="name of output 4D file for power spectrum", argstr="%s", position=1, genfile=True, hash_files=False, ) class PowerSpectrumOutputSpec(TraitedSpec): out_file = File(exists=True, desc="path/name of the output 4D power spectrum file") class PowerSpectrum(FSLCommand): """Use FSL PowerSpectrum command for power spectrum estimation. Examples -------- >>> from nipype.interfaces import fsl >>> pspec = fsl.PowerSpectrum() >>> pspec.inputs.in_file = 'functional.nii' >>> res = pspec.run() # doctest: +SKIP """ _cmd = "fslpspec" input_spec = PowerSpectrumInputSpec output_spec = PowerSpectrumOutputSpec def _gen_outfilename(self): out_file = self.inputs.out_file if not isdefined(out_file) and isdefined(self.inputs.in_file): out_file = self._gen_fname(self.inputs.in_file, suffix="_ps") return out_file def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = os.path.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): if name == "out_file": return self._gen_outfilename() return None class SigLossInputSpec(FSLCommandInputSpec): in_file = File(mandatory=True, exists=True, argstr="-i %s", desc="b0 fieldmap file") out_file = File( argstr="-s %s", desc="output signal loss estimate file", genfile=True ) mask_file = File(exists=True, argstr="-m %s", desc="brain mask file") echo_time = traits.Float(argstr="--te=%f", desc="echo time in seconds") slice_direction = traits.Enum( "x", "y", "z", argstr="-d %s", desc="slicing direction" ) class SigLossOuputSpec(TraitedSpec): out_file = File(exists=True, desc="signal loss estimate file") class SigLoss(FSLCommand): """Estimates signal loss from a field map (in rad/s) Examples -------- >>> sigloss = SigLoss() >>> sigloss.inputs.in_file = "phase.nii" >>> sigloss.inputs.echo_time = 0.03 >>> res = sigloss.run() # doctest: +SKIP """ input_spec = SigLossInputSpec output_spec = SigLossOuputSpec _cmd = "sigloss" def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_file if not isdefined(outputs["out_file"]) and isdefined(self.inputs.in_file): outputs["out_file"] = self._gen_fname( self.inputs.in_file, suffix="_sigloss" ) return outputs def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["out_file"] return None class Reorient2StdInputSpec(FSLCommandInputSpec): in_file = File(exists=True, mandatory=True, argstr="%s") out_file = File(genfile=True, hash_files=False, argstr="%s") class Reorient2StdOutputSpec(TraitedSpec): out_file = File(exists=True) class Reorient2Std(FSLCommand): """fslreorient2std is a tool for reorienting the image to match the approximate orientation of the standard template images (MNI152). Examples -------- >>> reorient = Reorient2Std() >>> reorient.inputs.in_file = "functional.nii" >>> res = reorient.run() # doctest: +SKIP """ _cmd = "fslreorient2std" input_spec = Reorient2StdInputSpec output_spec = Reorient2StdOutputSpec def _gen_filename(self, name): if name == "out_file": return self._gen_fname(self.inputs.in_file, suffix="_reoriented") return None def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_file): outputs["out_file"] = self._gen_filename("out_file") else: outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class InvWarpInputSpec(FSLCommandInputSpec): warp = File( exists=True, argstr="--warp=%s", mandatory=True, desc=( "Name of file containing warp-coefficients/fields. This " "would typically be the output from the --cout switch of" " fnirt (but can also use fields, like the output from " "--fout)." ), ) reference = File( exists=True, argstr="--ref=%s", mandatory=True, desc=( "Name of a file in target space. Note that the " "target space is now different from the target " "space that was used to create the --warp file. It " "would typically be the file that was specified " "with the --in argument when running fnirt." ), ) inverse_warp = File( argstr="--out=%s", name_source=["warp"], hash_files=False, name_template="%s_inverse", desc=( "Name of output file, containing warps that are " 'the "reverse" of those in --warp. This will be ' "a field-file (rather than a file of spline " "coefficients), and it will have any affine " "component included as part of the " "displacements." ), ) absolute = traits.Bool( argstr="--abs", xor=["relative"], desc=( "If set it indicates that the warps in --warp" " should be interpreted as absolute, provided" " that it is not created by fnirt (which " "always uses relative warps). If set it also " "indicates that the output --out should be " "absolute." ), ) relative = traits.Bool( argstr="--rel", xor=["absolute"], desc=( "If set it indicates that the warps in --warp" " should be interpreted as relative. I.e. the" " values in --warp are displacements from the" " coordinates in the --ref space. If set it " "also indicates that the output --out should " "be relative." ), ) niter = traits.Int( argstr="--niter=%d", desc=( "Determines how many iterations of the " "gradient-descent search that should be run." ), ) regularise = traits.Float( argstr="--regularise=%f", desc="Regularization strength (deafult=1.0)." ) noconstraint = traits.Bool( argstr="--noconstraint", desc="Do not apply Jacobian constraint" ) jacobian_min = traits.Float( argstr="--jmin=%f", desc=("Minimum acceptable Jacobian value for " "constraint (default 0.01)"), ) jacobian_max = traits.Float( argstr="--jmax=%f", desc=("Maximum acceptable Jacobian value for " "constraint (default 100.0)"), ) class InvWarpOutputSpec(TraitedSpec): inverse_warp = File( exists=True, desc=( "Name of output file, containing warps that are " 'the "reverse" of those in --warp.' ), ) class InvWarp(FSLCommand): """ Use FSL Invwarp to invert a FNIRT warp Examples -------- >>> from nipype.interfaces.fsl import InvWarp >>> invwarp = InvWarp() >>> invwarp.inputs.warp = "struct2mni.nii" >>> invwarp.inputs.reference = "anatomical.nii" >>> invwarp.inputs.output_type = "NIFTI_GZ" >>> invwarp.cmdline 'invwarp --out=struct2mni_inverse.nii.gz --ref=anatomical.nii --warp=struct2mni.nii' >>> res = invwarp.run() # doctest: +SKIP """ input_spec = InvWarpInputSpec output_spec = InvWarpOutputSpec _cmd = "invwarp" class ComplexInputSpec(FSLCommandInputSpec): complex_in_file = File(exists=True, argstr="%s", position=2) complex_in_file2 = File(exists=True, argstr="%s", position=3) real_in_file = File(exists=True, argstr="%s", position=2) imaginary_in_file = File(exists=True, argstr="%s", position=3) magnitude_in_file = File(exists=True, argstr="%s", position=2) phase_in_file = File(exists=True, argstr="%s", position=3) _ofs = [ "complex_out_file", "magnitude_out_file", "phase_out_file", "real_out_file", "imaginary_out_file", ] _conversion = [ "real_polar", "real_cartesian", "complex_cartesian", "complex_polar", "complex_split", "complex_merge", ] complex_out_file = File( genfile=True, argstr="%s", position=-3, xor=_ofs + _conversion[:2] ) magnitude_out_file = File( genfile=True, argstr="%s", position=-4, xor=_ofs[:1] + _ofs[3:] + _conversion[1:], ) phase_out_file = File( genfile=True, argstr="%s", position=-3, xor=_ofs[:1] + _ofs[3:] + _conversion[1:], ) real_out_file = File( genfile=True, argstr="%s", position=-4, xor=_ofs[:3] + _conversion[:1] + _conversion[2:], ) imaginary_out_file = File( genfile=True, argstr="%s", position=-3, xor=_ofs[:3] + _conversion[:1] + _conversion[2:], ) start_vol = traits.Int(position=-2, argstr="%d") end_vol = traits.Int(position=-1, argstr="%d") real_polar = traits.Bool(argstr="-realpolar", xor=_conversion, position=1) # requires=['complex_in_file','magnitude_out_file','phase_out_file']) real_cartesian = traits.Bool(argstr="-realcartesian", xor=_conversion, position=1) # requires=['complex_in_file','real_out_file','imaginary_out_file']) complex_cartesian = traits.Bool(argstr="-complex", xor=_conversion, position=1) # requires=['real_in_file','imaginary_in_file','complex_out_file']) complex_polar = traits.Bool(argstr="-complexpolar", xor=_conversion, position=1) # requires=['magnitude_in_file','phase_in_file', # 'magnitude_out_file','phase_out_file']) complex_split = traits.Bool(argstr="-complexsplit", xor=_conversion, position=1) # requires=['complex_in_file','complex_out_file']) complex_merge = traits.Bool( argstr="-complexmerge", xor=_conversion + ["start_vol", "end_vol"], position=1 ) # requires=['complex_in_file','complex_in_file2','complex_out_file']) class ComplexOuputSpec(TraitedSpec): magnitude_out_file = File() phase_out_file = File() real_out_file = File() imaginary_out_file = File() complex_out_file = File() class Complex(FSLCommand): """fslcomplex is a tool for converting complex data Examples -------- >>> cplx = Complex() >>> cplx.inputs.complex_in_file = "complex.nii" >>> cplx.real_polar = True >>> res = cplx.run() # doctest: +SKIP """ _cmd = "fslcomplex" input_spec = ComplexInputSpec output_spec = ComplexOuputSpec def _parse_inputs(self, skip=None): if skip is None: skip = [] if self.inputs.real_cartesian: skip += self.inputs._ofs[:3] elif self.inputs.real_polar: skip += self.inputs._ofs[:1] + self.inputs._ofs[3:] else: skip += self.inputs._ofs[1:] return super(Complex, self)._parse_inputs(skip) def _gen_filename(self, name): if name == "complex_out_file": if self.inputs.complex_cartesian: in_file = self.inputs.real_in_file elif self.inputs.complex_polar: in_file = self.inputs.magnitude_in_file elif self.inputs.complex_split or self.inputs.complex_merge: in_file = self.inputs.complex_in_file else: return None return self._gen_fname(in_file, suffix="_cplx") elif name == "magnitude_out_file": return self._gen_fname(self.inputs.complex_in_file, suffix="_mag") elif name == "phase_out_file": return self._gen_fname(self.inputs.complex_in_file, suffix="_phase") elif name == "real_out_file": return self._gen_fname(self.inputs.complex_in_file, suffix="_real") elif name == "imaginary_out_file": return self._gen_fname(self.inputs.complex_in_file, suffix="_imag") return None def _get_output(self, name): output = getattr(self.inputs, name) if not isdefined(output): output = self._gen_filename(name) return os.path.abspath(output) def _list_outputs(self): outputs = self.output_spec().get() if ( self.inputs.complex_cartesian or self.inputs.complex_polar or self.inputs.complex_split or self.inputs.complex_merge ): outputs["complex_out_file"] = self._get_output("complex_out_file") elif self.inputs.real_cartesian: outputs["real_out_file"] = self._get_output("real_out_file") outputs["imaginary_out_file"] = self._get_output("imaginary_out_file") elif self.inputs.real_polar: outputs["magnitude_out_file"] = self._get_output("magnitude_out_file") outputs["phase_out_file"] = self._get_output("phase_out_file") return outputs class WarpUtilsInputSpec(FSLCommandInputSpec): in_file = File( exists=True, argstr="--in=%s", mandatory=True, desc=( "Name of file containing warp-coefficients/fields. This " "would typically be the output from the --cout switch of " "fnirt (but can also use fields, like the output from " "--fout)." ), ) reference = File( exists=True, argstr="--ref=%s", mandatory=True, desc=( "Name of a file in target space. Note that the " "target space is now different from the target " "space that was used to create the --warp file. It " "would typically be the file that was specified " "with the --in argument when running fnirt." ), ) out_format = traits.Enum( "spline", "field", argstr="--outformat=%s", desc=( "Specifies the output format. If set to field (default) " "the output will be a (4D) field-file. If set to spline " "the format will be a (4D) file of spline coefficients." ), ) warp_resolution = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="--warpres=%0.4f,%0.4f,%0.4f", desc=( "Specifies the resolution/knot-spacing of the splines pertaining" " to the coefficients in the --out file. This parameter is only " "relevant if --outformat is set to spline. It should be noted " "that if the --in file has a higher resolution, the resulting " "coefficients will pertain to the closest (in a least-squares" " sense) file in the space of fields with the --warpres" " resolution. It should also be noted that the resolution " "will always be an integer multiple of the voxel " "size." ), ) knot_space = traits.Tuple( traits.Int, traits.Int, traits.Int, argstr="--knotspace=%d,%d,%d", desc=( "Alternative (to --warpres) specification of the resolution of " "the output spline-field." ), ) out_file = File( argstr="--out=%s", position=-1, name_source=["in_file"], output_name="out_file", desc=( "Name of output file. The format of the output depends on what " "other parameters are set. The default format is a (4D) " "field-file. If the --outformat is set to spline the format " "will be a (4D) file of spline coefficients." ), ) write_jacobian = traits.Bool( False, mandatory=True, usedefault=True, desc="Switch on --jac flag with automatically generated filename", ) out_jacobian = File( argstr="--jac=%s", desc=( "Specifies that a (3D) file of Jacobian determinants " "corresponding to --in should be produced and written to " "filename." ), ) with_affine = traits.Bool( False, argstr="--withaff", desc=( "Specifies that the affine transform (i.e. that which was " "specified for the --aff parameter in fnirt) should be " "included as displacements in the --out file. That can be " "useful for interfacing with software that cannot decode " "FSL/fnirt coefficient-files (where the affine transform is " "stored separately from the displacements)." ), ) class WarpUtilsOutputSpec(TraitedSpec): out_file = File( desc=("Name of output file, containing the warp as field or " "coefficients.") ) out_jacobian = File( desc=( "Name of output file, containing the map of the determinant of " "the Jacobian" ) ) class WarpUtils(FSLCommand): """Use FSL `fnirtfileutils `_ to convert field->coefficients, coefficients->field, coefficients->other_coefficients etc Examples -------- >>> from nipype.interfaces.fsl import WarpUtils >>> warputils = WarpUtils() >>> warputils.inputs.in_file = "warpfield.nii" >>> warputils.inputs.reference = "T1.nii" >>> warputils.inputs.out_format = 'spline' >>> warputils.inputs.warp_resolution = (10,10,10) >>> warputils.inputs.output_type = "NIFTI_GZ" >>> warputils.cmdline # doctest: +ELLIPSIS 'fnirtfileutils --in=warpfield.nii --outformat=spline --ref=T1.nii --warpres=10.0000,10.0000,10.0000 --out=warpfield_coeffs.nii.gz' >>> res = invwarp.run() # doctest: +SKIP """ input_spec = WarpUtilsInputSpec output_spec = WarpUtilsOutputSpec _cmd = "fnirtfileutils" def _parse_inputs(self, skip=None): if skip is None: skip = [] suffix = "field" if isdefined(self.inputs.out_format) and self.inputs.out_format == "spline": suffix = "coeffs" trait_spec = self.inputs.trait("out_file") trait_spec.name_template = "%s_" + suffix if self.inputs.write_jacobian: if not isdefined(self.inputs.out_jacobian): jac_spec = self.inputs.trait("out_jacobian") jac_spec.name_source = ["in_file"] jac_spec.name_template = "%s_jac" jac_spec.output_name = "out_jacobian" else: skip += ["out_jacobian"] skip += ["write_jacobian"] return super(WarpUtils, self)._parse_inputs(skip=skip) class ConvertWarpInputSpec(FSLCommandInputSpec): reference = File( exists=True, argstr="--ref=%s", mandatory=True, position=1, desc="Name of a file in target space of the full transform.", ) out_file = File( argstr="--out=%s", position=-1, name_source=["reference"], name_template="%s_concatwarp", output_name="out_file", desc=( "Name of output file, containing warps that are the combination " "of all those given as arguments. The format of this will be a " "field-file (rather than spline coefficients) with any affine " "components included." ), ) premat = File( exists=True, argstr="--premat=%s", desc="filename for pre-transform (affine matrix)", ) warp1 = File( exists=True, argstr="--warp1=%s", desc="Name of file containing initial " "warp-fields/coefficients (follows premat). This could " "e.g. be a fnirt-transform from a subjects structural " "scan to an average of a group of subjects.", ) midmat = File( exists=True, argstr="--midmat=%s", desc="Name of file containing mid-warp-affine transform", ) warp2 = File( exists=True, argstr="--warp2=%s", desc="Name of file containing secondary warp-fields/coefficients " "(after warp1/midmat but before postmat). This could e.g. be a " "fnirt-transform from the average of a group of subjects to some " "standard space (e.g. MNI152).", ) postmat = File( exists=True, argstr="--postmat=%s", desc="Name of file containing an affine transform (applied last). It " "could e.g. be an affine transform that maps the MNI152-space " "into a better approximation to the Talairach-space (if indeed " "there is one).", ) shift_in_file = File( exists=True, argstr="--shiftmap=%s", desc='Name of file containing a "shiftmap", a non-linear transform ' "with displacements only in one direction (applied first, before " "premat). This would typically be a fieldmap that has been " "pre-processed using fugue that maps a subjects functional (EPI) " "data onto an undistorted space (i.e. a space that corresponds " "to his/her true anatomy).", ) shift_direction = traits.Enum( "y-", "y", "x", "x-", "z", "z-", argstr="--shiftdir=%s", requires=["shift_in_file"], desc="Indicates the direction that the distortions from " "--shiftmap goes. It depends on the direction and " "polarity of the phase-encoding in the EPI sequence.", ) cons_jacobian = traits.Bool( False, argstr="--constrainj", desc="Constrain the Jacobian of the warpfield to lie within specified " "min/max limits.", ) jacobian_min = traits.Float( argstr="--jmin=%f", desc="Minimum acceptable Jacobian value for " "constraint (default 0.01)", ) jacobian_max = traits.Float( argstr="--jmax=%f", desc="Maximum acceptable Jacobian value for " "constraint (default 100.0)", ) abswarp = traits.Bool( argstr="--abs", xor=["relwarp"], desc="If set it indicates that the warps in --warp1 and --warp2 should" " be interpreted as absolute. I.e. the values in --warp1/2 are " "the coordinates in the next space, rather than displacements. " "This flag is ignored if --warp1/2 was created by fnirt, which " "always creates relative displacements.", ) relwarp = traits.Bool( argstr="--rel", xor=["abswarp"], desc="If set it indicates that the warps in --warp1/2 should be " "interpreted as relative. I.e. the values in --warp1/2 are " "displacements from the coordinates in the next space.", ) out_abswarp = traits.Bool( argstr="--absout", xor=["out_relwarp"], desc="If set it indicates that the warps in --out should be absolute, " "i.e. the values in --out are displacements from the coordinates " "in --ref.", ) out_relwarp = traits.Bool( argstr="--relout", xor=["out_abswarp"], desc="If set it indicates that the warps in --out should be relative, " "i.e. the values in --out are displacements from the coordinates " "in --ref.", ) class ConvertWarpOutputSpec(TraitedSpec): out_file = File( exists=True, desc="Name of output file, containing the warp as field or " "coefficients.", ) class ConvertWarp(FSLCommand): """Use FSL `convertwarp `_ for combining multiple transforms into one. Examples -------- >>> from nipype.interfaces.fsl import ConvertWarp >>> warputils = ConvertWarp() >>> warputils.inputs.warp1 = "warpfield.nii" >>> warputils.inputs.reference = "T1.nii" >>> warputils.inputs.relwarp = True >>> warputils.inputs.output_type = "NIFTI_GZ" >>> warputils.cmdline # doctest: +ELLIPSIS 'convertwarp --ref=T1.nii --rel --warp1=warpfield.nii --out=T1_concatwarp.nii.gz' >>> res = warputils.run() # doctest: +SKIP """ input_spec = ConvertWarpInputSpec output_spec = ConvertWarpOutputSpec _cmd = "convertwarp" class WarpPointsBaseInputSpec(CommandLineInputSpec): in_coords = File( exists=True, position=-1, argstr="%s", mandatory=True, desc="filename of file containing coordinates", ) xfm_file = File( exists=True, argstr="-xfm %s", xor=["warp_file"], desc="filename of affine transform (e.g. source2dest.mat)", ) warp_file = File( exists=True, argstr="-warp %s", xor=["xfm_file"], desc="filename of warpfield (e.g. " "intermediate2dest_warp.nii.gz)", ) coord_vox = traits.Bool( True, argstr="-vox", xor=["coord_mm"], desc="all coordinates in voxels - default", ) coord_mm = traits.Bool( False, argstr="-mm", xor=["coord_vox"], desc="all coordinates in mm" ) out_file = File( name_source="in_coords", name_template="%s_warped", output_name="out_file", desc="output file name", ) class WarpPointsInputSpec(WarpPointsBaseInputSpec): src_file = File( exists=True, argstr="-src %s", mandatory=True, desc="filename of source image" ) dest_file = File( exists=True, argstr="-dest %s", mandatory=True, desc="filename of destination image", ) class WarpPointsOutputSpec(TraitedSpec): out_file = File( exists=True, desc="Name of output file, containing the warp as field or " "coefficients.", ) class WarpPoints(CommandLine): """Use FSL `img2imgcoord `_ to transform point sets. Accepts plain text files and vtk files. .. Note:: transformation of TrackVis trk files is not yet implemented Examples -------- >>> from nipype.interfaces.fsl import WarpPoints >>> warppoints = WarpPoints() >>> warppoints.inputs.in_coords = 'surf.txt' >>> warppoints.inputs.src_file = 'epi.nii' >>> warppoints.inputs.dest_file = 'T1.nii' >>> warppoints.inputs.warp_file = 'warpfield.nii' >>> warppoints.inputs.coord_mm = True >>> warppoints.cmdline # doctest: +ELLIPSIS 'img2imgcoord -mm -dest T1.nii -src epi.nii -warp warpfield.nii surf.txt' >>> res = warppoints.run() # doctest: +SKIP """ input_spec = WarpPointsInputSpec output_spec = WarpPointsOutputSpec _cmd = "img2imgcoord" _terminal_output = "stream" def __init__(self, command=None, **inputs): self._tmpfile = None self._in_file = None self._outformat = None super(WarpPoints, self).__init__(command=command, **inputs) def _format_arg(self, name, trait_spec, value): if name == "out_file": return "" return super(WarpPoints, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): fname, ext = op.splitext(self.inputs.in_coords) setattr(self, "_in_file", fname) setattr(self, "_outformat", ext[1:]) first_args = super(WarpPoints, self)._parse_inputs( skip=["in_coords", "out_file"] ) second_args = fname + ".txt" if ext in [".vtk", ".trk"]: if self._tmpfile is None: self._tmpfile = tempfile.NamedTemporaryFile( suffix=".txt", dir=os.getcwd(), delete=False ).name second_args = self._tmpfile return first_args + [second_args] def _vtk_to_coords(self, in_file, out_file=None): from ..vtkbase import tvtk from ...interfaces import vtkbase as VTKInfo if VTKInfo.no_tvtk(): raise ImportError("TVTK is required and tvtk package was not found") reader = tvtk.PolyDataReader(file_name=in_file + ".vtk") reader.update() mesh = VTKInfo.vtk_output(reader) points = mesh.points if out_file is None: out_file, _ = op.splitext(in_file) + ".txt" np.savetxt(out_file, points) return out_file def _coords_to_vtk(self, points, out_file): from ..vtkbase import tvtk from ...interfaces import vtkbase as VTKInfo if VTKInfo.no_tvtk(): raise ImportError("TVTK is required and tvtk package was not found") reader = tvtk.PolyDataReader(file_name=self.inputs.in_file) reader.update() mesh = VTKInfo.vtk_output(reader) mesh.points = points writer = tvtk.PolyDataWriter(file_name=out_file) VTKInfo.configure_input_data(writer, mesh) writer.write() def _trk_to_coords(self, in_file, out_file=None): from nibabel.trackvis import TrackvisFile trkfile = TrackvisFile.from_file(in_file) streamlines = trkfile.streamlines if out_file is None: out_file, _ = op.splitext(in_file) np.savetxt(streamlines, out_file + ".txt") return out_file + ".txt" def _coords_to_trk(self, points, out_file): raise NotImplementedError("trk files are not yet supported") def _overload_extension(self, value, name): if name == "out_file": return "%s.%s" % (value, getattr(self, "_outformat")) def _run_interface(self, runtime): fname = getattr(self, "_in_file") outformat = getattr(self, "_outformat") tmpfile = None if outformat == "vtk": tmpfile = self._tmpfile self._vtk_to_coords(fname, out_file=tmpfile) elif outformat == "trk": tmpfile = self._tmpfile self._trk_to_coords(fname, out_file=tmpfile) runtime = super(WarpPoints, self)._run_interface(runtime) newpoints = np.fromstring("\n".join(runtime.stdout.split("\n")[1:]), sep=" ") if tmpfile is not None: try: os.remove(tmpfile.name) except: pass out_file = self._filename_from_source("out_file") if outformat == "vtk": self._coords_to_vtk(newpoints, out_file) elif outformat == "trk": self._coords_to_trk(newpoints, out_file) else: np.savetxt(out_file, newpoints.reshape(-1, 3)) return runtime class WarpPointsToStdInputSpec(WarpPointsBaseInputSpec): img_file = File( exists=True, argstr="-img %s", mandatory=True, desc=("filename of input image") ) std_file = File( exists=True, argstr="-std %s", mandatory=True, desc=("filename of destination image"), ) premat_file = File( exists=True, argstr="-premat %s", desc=( "filename of pre-warp affine transform " "(e.g. example_func2highres.mat)" ), ) class WarpPointsToStd(WarpPoints): """ Use FSL `img2stdcoord `_ to transform point sets to standard space coordinates. Accepts plain text files and vtk files. .. Note:: transformation of TrackVis trk files is not yet implemented Examples -------- >>> from nipype.interfaces.fsl import WarpPointsToStd >>> warppoints = WarpPointsToStd() >>> warppoints.inputs.in_coords = 'surf.txt' >>> warppoints.inputs.img_file = 'T1.nii' >>> warppoints.inputs.std_file = 'mni.nii' >>> warppoints.inputs.warp_file = 'warpfield.nii' >>> warppoints.inputs.coord_mm = True >>> warppoints.cmdline # doctest: +ELLIPSIS 'img2stdcoord -mm -img T1.nii -std mni.nii -warp warpfield.nii surf.txt' >>> res = warppoints.run() # doctest: +SKIP """ input_spec = WarpPointsToStdInputSpec output_spec = WarpPointsOutputSpec _cmd = "img2stdcoord" _terminal_output = "file_split" class WarpPointsFromStdInputSpec(CommandLineInputSpec): img_file = File( exists=True, argstr="-img %s", mandatory=True, desc="filename of a destination image", ) std_file = File( exists=True, argstr="-std %s", mandatory=True, desc="filename of the image in standard space", ) in_coords = File( exists=True, position=-2, argstr="%s", mandatory=True, desc="filename of file containing coordinates", ) xfm_file = File( exists=True, argstr="-xfm %s", xor=["warp_file"], desc="filename of affine transform (e.g. source2dest.mat)", ) warp_file = File( exists=True, argstr="-warp %s", xor=["xfm_file"], desc="filename of warpfield (e.g. " "intermediate2dest_warp.nii.gz)", ) coord_vox = traits.Bool( True, argstr="-vox", xor=["coord_mm"], desc="all coordinates in voxels - default", ) coord_mm = traits.Bool( False, argstr="-mm", xor=["coord_vox"], desc="all coordinates in mm" ) class WarpPointsFromStd(CommandLine): """ Use FSL `std2imgcoord `_ to transform point sets to standard space coordinates. Accepts plain text coordinates files. Examples -------- >>> from nipype.interfaces.fsl import WarpPointsFromStd >>> warppoints = WarpPointsFromStd() >>> warppoints.inputs.in_coords = 'surf.txt' >>> warppoints.inputs.img_file = 'T1.nii' >>> warppoints.inputs.std_file = 'mni.nii' >>> warppoints.inputs.warp_file = 'warpfield.nii' >>> warppoints.inputs.coord_mm = True >>> warppoints.cmdline # doctest: +ELLIPSIS 'std2imgcoord -mm -img T1.nii -std mni.nii -warp warpfield.nii surf.txt' >>> res = warppoints.run() # doctest: +SKIP """ input_spec = WarpPointsFromStdInputSpec output_spec = WarpPointsOutputSpec _cmd = "std2imgcoord" def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath("stdout.nipype") return outputs class MotionOutliersInputSpec(FSLCommandInputSpec): in_file = File( exists=True, mandatory=True, desc="unfiltered 4D image", argstr="-i %s" ) out_file = File( argstr="-o %s", name_source="in_file", name_template="%s_outliers.txt", keep_extension=True, desc="output outlier file name", hash_files=False, ) mask = File(exists=True, argstr="-m %s", desc="mask image for calculating metric") metric = traits.Enum( "refrms", ["refrms", "dvars", "refmse", "fd", "fdrms"], argstr="--%s", desc="metrics: refrms - RMS intensity difference to reference volume " "as metric [default metric], refmse - Mean Square Error version " "of refrms (used in original version of fsl_motion_outliers), " "dvars - DVARS, fd - frame displacement, fdrms - FD with RMS " "matrix calculation", ) threshold = traits.Float( argstr="--thresh=%g", desc=( "specify absolute threshold value " "(otherwise use box-plot cutoff = P75 + " "1.5*IQR)" ), ) no_motion_correction = traits.Bool( argstr="--nomoco", desc="do not run motion correction (assumed already done)" ) dummy = traits.Int( argstr="--dummy=%d", desc="number of dummy scans to delete (before running anything and " "creating EVs)", ) out_metric_values = File( argstr="-s %s", name_source="in_file", name_template="%s_metrics.txt", keep_extension=True, desc="output metric values (DVARS etc.) file name", hash_files=False, ) out_metric_plot = File( argstr="-p %s", name_source="in_file", name_template="%s_metrics.png", hash_files=False, keep_extension=True, desc="output metric values plot (DVARS etc.) file name", ) class MotionOutliersOutputSpec(TraitedSpec): out_file = File(exists=True) out_metric_values = File(exists=True) out_metric_plot = File(exists=True) class MotionOutliers(FSLCommand): """ Use FSL fsl_motion_outliers`http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FSLMotionOutliers`_ to find outliers in timeseries (4d) data. Examples -------- >>> from nipype.interfaces.fsl import MotionOutliers >>> mo = MotionOutliers() >>> mo.inputs.in_file = "epi.nii" >>> mo.cmdline # doctest: +ELLIPSIS 'fsl_motion_outliers -i epi.nii -o epi_outliers.txt -p epi_metrics.png -s epi_metrics.txt' >>> res = mo.run() # doctest: +SKIP """ input_spec = MotionOutliersInputSpec output_spec = MotionOutliersOutputSpec _cmd = "fsl_motion_outliers" nipype-1.7.0/nipype/interfaces/image.py000066400000000000000000000177321413403311400201120ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from ..utils.filemanip import fname_presuffix from .base import SimpleInterface, TraitedSpec, BaseInterfaceInputSpec, traits, File from .. import LooseVersion class RescaleInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="Skull-stripped image to rescale") ref_file = File(exists=True, mandatory=True, desc="Skull-stripped reference image") invert = traits.Bool(desc="Invert contrast of rescaled image") percentile = traits.Range( low=0.0, high=50.0, value=0.0, usedefault=True, desc="Percentile to use for reference to allow " "for outliers - 1 indicates the 1st and " "99th percentiles in the input file will " "be mapped to the 99th and 1st percentiles " "in the reference; 0 indicates minima and " "maxima will be mapped", ) class RescaleOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Rescaled image") class Rescale(SimpleInterface): """Rescale an image Rescales the non-zero portion of ``in_file`` to match the bounds of the non-zero portion of ``ref_file``. Reference values in the input and reference images are defined by the ``percentile`` parameter, and the reference values in each image are identified and the remaining values are scaled accordingly. In the case of ``percentile == 0``, the reference values are the maxima and minima of each image. If the ``invert`` parameter is set, the input file is inverted prior to rescaling. Examples -------- To use a high-resolution T1w image as a registration target for a T2\\* image, it may be useful to invert the T1w image and rescale to the T2\\* range. Using the 1st and 99th percentiles may reduce the impact of outlier voxels. >>> from nipype.interfaces.image import Rescale >>> invert_t1w = Rescale(invert=True) >>> invert_t1w.inputs.in_file = 'structural.nii' >>> invert_t1w.inputs.ref_file = 'functional.nii' >>> invert_t1w.inputs.percentile = 1. >>> res = invert_t1w.run() # doctest: +SKIP """ input_spec = RescaleInputSpec output_spec = RescaleOutputSpec def _run_interface(self, runtime): import numpy as np import nibabel as nb img = nb.load(self.inputs.in_file) data = img.get_fdata() ref_data = nb.load(self.inputs.ref_file).get_fdata() in_mask = data > 0 ref_mask = ref_data > 0 q = [self.inputs.percentile, 100.0 - self.inputs.percentile] in_low, in_high = np.percentile(data[in_mask], q) ref_low, ref_high = np.percentile(ref_data[ref_mask], q) scale_factor = (ref_high - ref_low) / (in_high - in_low) signal = in_high - data if self.inputs.invert else data - in_low out_data = in_mask * (signal * scale_factor + ref_low) suffix = "_inv" if self.inputs.invert else "_rescaled" out_file = fname_presuffix( self.inputs.in_file, suffix=suffix, newpath=runtime.cwd ) img.__class__(out_data, img.affine, img.header).to_filename(out_file) self._results["out_file"] = out_file return runtime _axes = ("RL", "AP", "SI") _orientations = tuple( "".join((x[i], y[j], z[k])) for x in _axes for y in _axes for z in _axes if x != y != z != x for i in (0, 1) for j in (0, 1) for k in (0, 1) ) class ReorientInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="Input image") orientation = traits.Enum( _orientations, usedefault=True, desc="Target axis orientation" ) class ReorientOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Reoriented image") transform = File( exists=True, desc="Affine transform from input orientation to output" ) class Reorient(SimpleInterface): """Conform an image to a given orientation Flips and reorder the image data array so that the axes match the directions indicated in ``orientation``. The default ``RAS`` orientation corresponds to the first axis being ordered from left to right, the second axis from posterior to anterior, and the third axis from inferior to superior. For oblique images, the original orientation is considered to be the closest plumb orientation. No resampling is performed, and thus the output image is not de-obliqued or registered to any other image or template. The effective transform is calculated from the original affine matrix to the reoriented affine matrix. Examples -------- If an image is not reoriented, the original file is not modified .. testsetup:: >>> def print_affine(matrix): ... print(str(matrix).replace(']', ' ').replace('[', ' ')) >>> import numpy as np >>> from nipype.interfaces.image import Reorient >>> reorient = Reorient(orientation='LPS') >>> reorient.inputs.in_file = 'segmentation0.nii.gz' >>> res = reorient.run() >>> res.outputs.out_file 'segmentation0.nii.gz' >>> print_affine(np.loadtxt(res.outputs.transform)) 1. 0. 0. 0. 0. 1. 0. 0. 0. 0. 1. 0. 0. 0. 0. 1. >>> reorient.inputs.orientation = 'RAS' >>> res = reorient.run() >>> res.outputs.out_file # doctest: +ELLIPSIS '.../segmentation0_ras.nii.gz' >>> print_affine(np.loadtxt(res.outputs.transform)) -1. 0. 0. 60. 0. -1. 0. 72. 0. 0. 1. 0. 0. 0. 0. 1. .. testcleanup:: >>> import os >>> os.unlink(res.outputs.out_file) >>> os.unlink(res.outputs.transform) """ input_spec = ReorientInputSpec output_spec = ReorientOutputSpec def _run_interface(self, runtime): import numpy as np import nibabel as nb from nibabel.orientations import axcodes2ornt, ornt_transform, inv_ornt_aff fname = self.inputs.in_file orig_img = nb.load(fname) # Find transform from current (approximate) orientation to # target, in nibabel orientation matrix and affine forms orig_ornt = nb.io_orientation(orig_img.affine) targ_ornt = axcodes2ornt(self.inputs.orientation) transform = ornt_transform(orig_ornt, targ_ornt) affine_xfm = inv_ornt_aff(transform, orig_img.shape) # Check can be eliminated when minimum nibabel version >= 2.4 if LooseVersion(nb.__version__) >= LooseVersion("2.4.0"): reoriented = orig_img.as_reoriented(transform) else: reoriented = _as_reoriented_backport(orig_img, transform) # Image may be reoriented if reoriented is not orig_img: suffix = "_" + self.inputs.orientation.lower() out_name = fname_presuffix(fname, suffix=suffix, newpath=runtime.cwd) reoriented.to_filename(out_name) else: out_name = fname mat_name = fname_presuffix( fname, suffix=".mat", newpath=runtime.cwd, use_ext=False ) np.savetxt(mat_name, affine_xfm, fmt="%.08f") self._results["out_file"] = out_name self._results["transform"] = mat_name return runtime def _as_reoriented_backport(img, ornt): """Backport of img.as_reoriented as of nibabel 2.4.0""" import numpy as np import nibabel as nb from nibabel.orientations import inv_ornt_aff if np.array_equal(ornt, [[0, 1], [1, 1], [2, 1]]): return img t_arr = nb.apply_orientation(img.dataobj, ornt) new_aff = img.affine.dot(inv_ornt_aff(ornt, img.shape)) reoriented = img.__class__(t_arr, new_aff, img.header) if isinstance(reoriented, nb.Nifti1Pair): # Also apply the transform to the dim_info fields new_dim = [ None if orig_dim is None else int(ornt[orig_dim, 0]) for orig_dim in img.header.get_dim_info() ] reoriented.header.set_dim_info(*new_dim) return reoriented nipype-1.7.0/nipype/interfaces/io.py000066400000000000000000003232221413403311400174310ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Set of interfaces that allow interaction with data. Currently available interfaces are: DataSource: Generic nifti to named Nifti interface DataSink: Generic named output from interfaces to data store XNATSource: preliminary interface to XNAT To come : XNATSink """ import glob import fnmatch import string import json import os import os.path as op import shutil import subprocess import re import copy import tempfile from os.path import join, dirname from warnings import warn from .. import config, logging from ..utils.filemanip import ( copyfile, simplify_list, ensure_list, get_related_files, split_filename, ) from ..utils.misc import human_order_sorted, str2bool from .base import ( TraitedSpec, traits, Str, File, Directory, BaseInterface, InputMultiPath, isdefined, OutputMultiPath, DynamicTraitedSpec, Undefined, BaseInterfaceInputSpec, LibraryBaseInterface, SimpleInterface, ) iflogger = logging.getLogger("nipype.interface") def copytree(src, dst, use_hardlink=False): """Recursively copy a directory tree using nipype.utils.filemanip.copyfile() This is not a thread-safe routine. However, in the case of creating new directories, it checks to see if a particular directory has already been created by another process. """ names = os.listdir(src) try: os.makedirs(dst) except OSError as why: if "File exists" in why.strerror: pass else: raise why errors = [] for name in names: srcname = os.path.join(src, name) dstname = os.path.join(dst, name) try: if os.path.isdir(srcname): copytree(srcname, dstname, use_hardlink) else: copyfile( srcname, dstname, True, hashmethod="content", use_hardlink=use_hardlink, ) except (IOError, os.error) as why: errors.append((srcname, dstname, str(why))) # catch the Error from the recursive copytree so that we can # continue with other files except Exception as err: errors.extend(err.args[0]) if errors: raise Exception(errors) def add_traits(base, names, trait_type=None): """Add traits to a traited class. All traits are set to Undefined by default """ if trait_type is None: trait_type = traits.Any undefined_traits = {} for key in names: base.add_trait(key, trait_type) undefined_traits[key] = Undefined base.trait_set(trait_change_notify=False, **undefined_traits) # access each trait for key in names: _ = getattr(base, key) return base def _get_head_bucket(s3_resource, bucket_name): """Try to get the header info of a bucket, in order to check if it exists and its permissions """ import botocore # Try fetch the bucket with the name argument try: s3_resource.meta.client.head_bucket(Bucket=bucket_name) except botocore.exceptions.ClientError as exc: error_code = int(exc.response["Error"]["Code"]) if error_code == 403: err_msg = "Access to bucket: %s is denied; check credentials" % bucket_name raise Exception(err_msg) elif error_code == 404: err_msg = ( "Bucket: %s does not exist; check spelling and try " "again" % bucket_name ) raise Exception(err_msg) else: err_msg = "Unable to connect to bucket: %s. Error message:\n%s" % ( bucket_name, exc, ) except Exception as exc: err_msg = "Unable to connect to bucket: %s. Error message:\n%s" % ( bucket_name, exc, ) raise Exception(err_msg) class IOBase(BaseInterface): def _run_interface(self, runtime): return runtime def _list_outputs(self): raise NotImplementedError def _outputs(self): return self._add_output_traits(super(IOBase, self)._outputs()) def _add_output_traits(self, base): return base # Class to track percentage of S3 file upload class ProgressPercentage(object): """ Callable class instsance (via __call__ method) that displays upload percentage of a file to S3 """ def __init__(self, filename): """ """ # Import packages import threading # Initialize data attributes self._filename = filename self._size = float(os.path.getsize(filename)) self._seen_so_far = 0 self._lock = threading.Lock() def __call__(self, bytes_amount): """ """ # Import packages import sys # With the lock on, print upload status with self._lock: self._seen_so_far += bytes_amount if self._size != 0: percentage = (self._seen_so_far // self._size) * 100 else: percentage = 0 progress_str = "%d / %d (%.2f%%)\r" % ( self._seen_so_far, self._size, percentage, ) # Write to stdout sys.stdout.write(progress_str) sys.stdout.flush() # DataSink inputs class DataSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): """ """ # Init inputspec data attributes base_directory = Str(desc="Path to the base directory for storing data.") container = Str(desc="Folder within base directory in which to store output") parameterization = traits.Bool( True, usedefault=True, desc="store output in parametrized structure" ) strip_dir = Str(desc="path to strip out of filename") substitutions = InputMultiPath( traits.Tuple(Str, Str), desc=( "List of 2-tuples reflecting string " "to substitute and string to replace " "it with" ), ) regexp_substitutions = InputMultiPath( traits.Tuple(Str, Str), desc=( "List of 2-tuples reflecting a pair of a " "Python regexp pattern and a replacement " "string. Invoked after string `substitutions`" ), ) _outputs = traits.Dict(Str, value={}, usedefault=True) remove_dest_dir = traits.Bool( False, usedefault=True, desc="remove dest directory when copying dirs" ) # AWS S3 data attributes creds_path = Str( desc="Filepath to AWS credentials file for S3 bucket " "access; if not specified, the credentials will " "be taken from the AWS_ACCESS_KEY_ID and " "AWS_SECRET_ACCESS_KEY environment variables" ) encrypt_bucket_keys = traits.Bool( desc="Flag indicating whether to use S3 " "server-side AES-256 encryption" ) # Set this if user wishes to override the bucket with their own bucket = traits.Any(desc="Boto3 S3 bucket for manual override of bucket") # Set this if user wishes to have local copy of files as well local_copy = Str(desc="Copy files locally as well as to S3 bucket") # Set call-able inputs attributes def __setattr__(self, key, value): if key not in self.copyable_trait_names(): if not isdefined(value): super(DataSinkInputSpec, self).__setattr__(key, value) self._outputs[key] = value else: if key in self._outputs: self._outputs[key] = value super(DataSinkInputSpec, self).__setattr__(key, value) # DataSink outputs class DataSinkOutputSpec(TraitedSpec): # Init out file out_file = traits.Any(desc="datasink output") # Custom DataSink class class DataSink(IOBase): """ Generic datasink module to store structured outputs. Primarily for use within a workflow. This interface allows arbitrary creation of input attributes. The names of these attributes define the directory structure to create for storage of the files or directories. The attributes take the following form:: string[[.[@]]string[[.[@]]string]] ... where parts between ``[]`` are optional. An attribute such as contrasts.@con will create a 'contrasts' directory to store the results linked to the attribute. If the ``@`` is left out, such as in 'contrasts.con', a subdirectory 'con' will be created under 'contrasts'. The general form of the output is:: 'base_directory/container/parameterization/destloc/filename' ``destloc = string[[.[@]]string[[.[@]]string]]`` and ``filename`` come from the input to the connect statement. .. warning:: This is not a thread-safe node because it can write to a common shared location. It will not complain when it overwrites a file. .. note:: If both substitutions and regexp_substitutions are used, then substitutions are applied first followed by regexp_substitutions. This interface **cannot** be used in a MapNode as the inputs are defined only when the connect statement is executed. Examples -------- >>> ds = DataSink() >>> ds.inputs.base_directory = 'results_dir' >>> ds.inputs.container = 'subject' >>> ds.inputs.structural = 'structural.nii' >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) >>> ds.run() # doctest: +SKIP To use DataSink in a MapNode, its inputs have to be defined at the time the interface is created. >>> ds = DataSink(infields=['contasts.@con']) >>> ds.inputs.base_directory = 'results_dir' >>> ds.inputs.container = 'subject' >>> ds.inputs.structural = 'structural.nii' >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) >>> ds.run() # doctest: +SKIP """ # Give obj .inputs and .outputs input_spec = DataSinkInputSpec output_spec = DataSinkOutputSpec # Initialization method to set up datasink def __init__(self, infields=None, force_run=True, **kwargs): """ Parameters ---------- infields : list of str Indicates the input fields to be dynamically created """ super(DataSink, self).__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check self._infields = infields if infields: for key in infields: self.inputs.add_trait(key, traits.Any) self.inputs._outputs[key] = Undefined undefined_traits[key] = Undefined self.inputs.trait_set(trait_change_notify=False, **undefined_traits) if force_run: self._always_run = True # Get destination paths def _get_dst(self, src): # If path is directory with trailing os.path.sep, # then remove that for a more robust behavior src = src.rstrip(os.path.sep) path, fname = os.path.split(src) if self.inputs.parameterization: dst = path if isdefined(self.inputs.strip_dir): dst = dst.replace(self.inputs.strip_dir, "") folders = [ folder for folder in dst.split(os.path.sep) if folder.startswith("_") ] dst = os.path.sep.join(folders) if fname: dst = os.path.join(dst, fname) else: if fname: dst = fname else: dst = path.split(os.path.sep)[-1] if dst[0] == os.path.sep: dst = dst[1:] return dst # Substitute paths in substitutions dictionary parameter def _substitute(self, pathstr): pathstr_ = pathstr if isdefined(self.inputs.substitutions): for key, val in self.inputs.substitutions: oldpathstr = pathstr pathstr = pathstr.replace(key, val) if pathstr != oldpathstr: iflogger.debug( "sub.str: %s -> %s using %r -> %r", oldpathstr, pathstr, key, val, ) if isdefined(self.inputs.regexp_substitutions): for key, val in self.inputs.regexp_substitutions: oldpathstr = pathstr pathstr, _ = re.subn(key, val, pathstr) if pathstr != oldpathstr: iflogger.debug( "sub.regexp: %s -> %s using %r -> %r", oldpathstr, pathstr, key, val, ) if pathstr_ != pathstr: iflogger.info("sub: %s -> %s", pathstr_, pathstr) return pathstr # Check for s3 in base directory def _check_s3_base_dir(self): """ Method to see if the datasink's base directory specifies an S3 bucket path; if it does, it parses the path for the bucket name in the form 's3://bucket_name/...' and returns it Parameters ---------- Returns ------- s3_flag : boolean flag indicating whether the base_directory contained an S3 bucket path bucket_name : string name of the S3 bucket to connect to; if the base directory is not a valid S3 path, defaults to '' """ s3_str = "s3://" bucket_name = "" base_directory = self.inputs.base_directory if not isdefined(base_directory): s3_flag = False return s3_flag, bucket_name s3_flag = base_directory.lower().startswith(s3_str) if s3_flag: bucket_name = base_directory[len(s3_str) :].partition("/")[0] return s3_flag, bucket_name # Function to return AWS secure environment variables def _return_aws_keys(self): """ Method to return AWS access key id and secret access key using credentials found in a local file. Parameters ---------- self : nipype.interfaces.io.DataSink self for instance method Returns ------- aws_access_key_id : string string of the AWS access key ID aws_secret_access_key : string string of the AWS secret access key """ # Import packages import os # Init variables creds_path = self.inputs.creds_path # Check if creds exist if creds_path and os.path.exists(creds_path): with open(creds_path, "r") as creds_in: # Grab csv rows row1 = creds_in.readline() row2 = creds_in.readline() # Are they root or user keys if "User Name" in row1: # And split out for keys aws_access_key_id = row2.split(",")[1] aws_secret_access_key = row2.split(",")[2] elif "AWSAccessKeyId" in row1: # And split out for keys aws_access_key_id = row1.split("=")[1] aws_secret_access_key = row2.split("=")[1] else: err_msg = "Credentials file not recognized, check file is correct" raise Exception(err_msg) # Strip any carriage return/line feeds aws_access_key_id = aws_access_key_id.replace("\r", "").replace("\n", "") aws_secret_access_key = aws_secret_access_key.replace("\r", "").replace( "\n", "" ) else: aws_access_key_id = os.getenv("AWS_ACCESS_KEY_ID") aws_secret_access_key = os.getenv("AWS_SECRET_ACCESS_KEY") # Return keys return aws_access_key_id, aws_secret_access_key # Fetch bucket object def _fetch_bucket(self, bucket_name): """ Method to return a bucket object which can be used to interact with an AWS S3 bucket using credentials found in a local file. Parameters ---------- self : nipype.interfaces.io.DataSink self for instance method bucket_name : string string corresponding to the name of the bucket on S3 Returns ------- bucket : boto3.resources.factory.s3.Bucket boto3 s3 Bucket object which is used to interact with files in an S3 bucket on AWS """ # Import packages try: import boto3 import botocore except ImportError as exc: err_msg = "Boto3 package is not installed - install boto3 and " "try again." raise Exception(err_msg) # Init variables creds_path = self.inputs.creds_path # Get AWS credentials try: aws_access_key_id, aws_secret_access_key = self._return_aws_keys() except Exception as exc: err_msg = ( "There was a problem extracting the AWS credentials " "from the credentials file provided: %s. Error:\n%s" % (creds_path, exc) ) raise Exception(err_msg) # Try and get AWS credentials if a creds_path is specified if aws_access_key_id and aws_secret_access_key: # Init connection iflogger.info( "Connecting to S3 bucket: %s with credentials...", bucket_name ) # Use individual session for each instance of DataSink # Better when datasinks are being used in multi-threading, see: # http://boto3.readthedocs.org/en/latest/guide/resources.html#multithreading session = boto3.session.Session( aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, ) else: iflogger.info("Connecting to S3 bucket: %s with IAM role...", bucket_name) # Lean on AWS environment / IAM role authentication and authorization session = boto3.session.Session() s3_resource = session.resource("s3", use_ssl=True) # And try fetch the bucket with the name argument try: _get_head_bucket(s3_resource, bucket_name) except Exception as exc: # Try to connect anonymously s3_resource.meta.client.meta.events.register( "choose-signer.s3.*", botocore.handlers.disable_signing ) iflogger.info("Connecting to AWS: %s anonymously...", bucket_name) _get_head_bucket(s3_resource, bucket_name) # Explicitly declare a secure SSL connection for bucket object bucket = s3_resource.Bucket(bucket_name) # Return the bucket return bucket # Send up to S3 method def _upload_to_s3(self, bucket, src, dst): """ Method to upload outputs to S3 bucket instead of on local disk """ # Import packages import hashlib import os from botocore.exceptions import ClientError s3_str = "s3://" s3_prefix = s3_str + bucket.name # Explicitly lower-case the "s3" if dst.lower().startswith(s3_str): dst = s3_str + dst[len(s3_str) :] # If src is a directory, collect files (this assumes dst is a dir too) if os.path.isdir(src): src_files = [] for root, dirs, files in os.walk(src): src_files.extend([os.path.join(root, fil) for fil in files]) # Make the dst files have the dst folder as base dir dst_files = [os.path.join(dst, src_f.split(src)[1]) for src_f in src_files] else: src_files = [src] dst_files = [dst] # Iterate over src and copy to dst for src_idx, src_f in enumerate(src_files): # Get destination filename/keyname dst_f = dst_files[src_idx] dst_k = dst_f.replace(s3_prefix, "").lstrip("/") # See if same file is already up there try: dst_obj = bucket.Object(key=dst_k) dst_md5 = dst_obj.e_tag.strip('"') # See if same file is already there src_read = open(src_f, "rb").read() src_md5 = hashlib.md5(src_read).hexdigest() # Move to next loop iteration if dst_md5 == src_md5: iflogger.info("File %s already exists on S3, skipping...", dst_f) continue else: iflogger.info("Overwriting previous S3 file...") except ClientError: iflogger.info("New file to S3") # Copy file up to S3 (either encrypted or not) iflogger.info( "Uploading %s to S3 bucket, %s, as %s...", src_f, bucket.name, dst_f ) if self.inputs.encrypt_bucket_keys: extra_args = {"ServerSideEncryption": "AES256"} else: extra_args = {} bucket.upload_file( src_f, dst_k, ExtraArgs=extra_args, Callback=ProgressPercentage(src_f) ) # List outputs, main run routine def _list_outputs(self): """Execute this module.""" # Init variables outputs = self.output_spec().get() out_files = [] # Use hardlink use_hardlink = str2bool(config.get("execution", "try_hard_link_datasink")) # Set local output directory if specified if isdefined(self.inputs.local_copy): outdir = self.inputs.local_copy else: outdir = self.inputs.base_directory # If base directory isn't given, assume current directory if not isdefined(outdir): outdir = "." # Check if base directory reflects S3 bucket upload s3_flag, bucket_name = self._check_s3_base_dir() if s3_flag: s3dir = self.inputs.base_directory # If user overrides bucket object, use that if self.inputs.bucket: bucket = self.inputs.bucket # Otherwise fetch bucket object using name else: try: bucket = self._fetch_bucket(bucket_name) # If encountering an exception during bucket access, set output # base directory to a local folder except Exception as exc: s3dir = "" if not isdefined(self.inputs.local_copy): local_out_exception = os.path.join( os.path.expanduser("~"), "s3_datasink_" + bucket_name ) outdir = local_out_exception # Log local copying directory iflogger.info( "Access to S3 failed! Storing outputs locally at: " "%s\nError: %s", outdir, exc, ) else: s3dir = "" # If container input is given, append that to outdir if isdefined(self.inputs.container): outdir = os.path.join(outdir, self.inputs.container) s3dir = os.path.join(s3dir, self.inputs.container) # If sinking to local folder if outdir != s3dir: outdir = os.path.abspath(outdir) # Create the directory if it doesn't exist if not os.path.exists(outdir): try: os.makedirs(outdir) except OSError as inst: if "File exists" in inst.strerror: pass else: raise (inst) # Iterate through outputs attributes {key : path(s)} for key, files in list(self.inputs._outputs.items()): if not isdefined(files): continue iflogger.debug("key: %s files: %s", key, str(files)) files = ensure_list(files) tempoutdir = outdir if s3_flag: s3tempoutdir = s3dir for d in key.split("."): if d[0] == "@": continue tempoutdir = os.path.join(tempoutdir, d) if s3_flag: s3tempoutdir = os.path.join(s3tempoutdir, d) # flattening list if isinstance(files, list): if isinstance(files[0], list): files = [item for sublist in files for item in sublist] # Iterate through passed-in source files for src in ensure_list(files): # Format src and dst files src = os.path.abspath(src) if not os.path.isfile(src): src = os.path.join(src, "") dst = self._get_dst(src) if s3_flag: s3dst = os.path.join(s3tempoutdir, dst) s3dst = self._substitute(s3dst) dst = os.path.join(tempoutdir, dst) dst = self._substitute(dst) path, _ = os.path.split(dst) # If we're uploading to S3 if s3_flag: self._upload_to_s3(bucket, src, s3dst) out_files.append(s3dst) # Otherwise, copy locally src -> dst if not s3_flag or isdefined(self.inputs.local_copy): # Create output directory if it doesnt exist if not os.path.exists(path): try: os.makedirs(path) except OSError as inst: if "File exists" in inst.strerror: pass else: raise (inst) # If src is a file, copy it to dst if os.path.isfile(src): iflogger.debug("copyfile: %s %s", src, dst) copyfile( src, dst, copy=True, hashmethod="content", use_hardlink=use_hardlink, ) out_files.append(dst) # If src is a directory, copy entire contents to dst dir elif os.path.isdir(src): if os.path.exists(dst) and self.inputs.remove_dest_dir: iflogger.debug("removing: %s", dst) shutil.rmtree(dst) iflogger.debug("copydir: %s %s", src, dst) copytree(src, dst) out_files.append(dst) # Return outputs dictionary outputs["out_file"] = out_files return outputs class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): anon = traits.Bool( False, usedefault=True, desc="Use anonymous connection to s3. If this is set to True, boto may print" " a urlopen error, but this does not prevent data from being downloaded.", ) region = Str("us-east-1", usedefault=True, desc="Region of s3 bucket") bucket = Str(mandatory=True, desc="Amazon S3 bucket where your data is stored") bucket_path = Str( "", usedefault=True, desc="Location within your bucket for subject data." ) local_directory = Directory( exists=True, desc="Path to the local directory for subject data to be downloaded " "and accessed. Should be on HDFS for Spark jobs.", ) raise_on_empty = traits.Bool( True, usedefault=True, desc="Generate exception if list is empty for a given field", ) sort_filelist = traits.Bool( mandatory=True, desc="Sort the filelist that matches the template" ) template = Str( mandatory=True, desc="Layout used to get files. Relative to bucket_path if defined." "Uses regex rather than glob style formatting.", ) template_args = traits.Dict( key_trait=Str, value_trait=traits.List(traits.List), desc="Information to plug into template", ) class S3DataGrabber(LibraryBaseInterface, IOBase): """ Pull data from an Amazon S3 Bucket. Generic datagrabber module that wraps around glob in an intelligent way for neuroimaging tasks to grab files from Amazon S3 Works exactly like DataGrabber, except, you must specify an S3 "bucket" and "bucket_path" to search for your data and a "local_directory" to store the data. "local_directory" should be a location on HDFS for Spark jobs. Additionally, "template" uses regex style formatting, rather than the glob-style found in the original DataGrabber. Examples -------- >>> s3grab = S3DataGrabber(infields=['subj_id'], outfields=["func", "anat"]) >>> s3grab.inputs.bucket = 'openneuro' >>> s3grab.inputs.sort_filelist = True >>> s3grab.inputs.template = '*' >>> s3grab.inputs.anon = True >>> s3grab.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/' >>> s3grab.inputs.local_directory = '/tmp' >>> s3grab.inputs.field_template = {'anat': '%s/anat/%s_T1w.nii.gz', ... 'func': '%s/func/%s_task-simon_run-1_bold.nii.gz'} >>> s3grab.inputs.template_args = {'anat': [['subj_id', 'subj_id']], ... 'func': [['subj_id', 'subj_id']]} >>> s3grab.inputs.subj_id = 'sub-01' >>> s3grab.run() # doctest: +SKIP """ input_spec = S3DataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True _pkg = "boto" def __init__(self, infields=None, outfields=None, **kwargs): """ Parameters ---------- infields : list of str Indicates the input fields to be dynamically created outfields: list of str Indicates output fields to be dynamically created See class examples for usage """ if not outfields: outfields = ["outfiles"] super(S3DataGrabber, self).__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check self._infields = infields self._outfields = outfields if infields: for key in infields: self.inputs.add_trait(key, traits.Any) undefined_traits[key] = Undefined # add ability to insert field specific templates self.inputs.add_trait( "field_template", traits.Dict( traits.Enum(outfields), desc="arguments that fit into template" ), ) undefined_traits["field_template"] = Undefined if not isdefined(self.inputs.template_args): self.inputs.template_args = {} for key in outfields: if key not in self.inputs.template_args: if infields: self.inputs.template_args[key] = [infields] else: self.inputs.template_args[key] = [] self.inputs.trait_set(trait_change_notify=False, **undefined_traits) def _add_output_traits(self, base): """ S3 specific: Downloads relevant files to a local folder specified Using traits.Any instead out OutputMultiPath till add_trait bug is fixed. """ return add_traits(base, list(self.inputs.template_args.keys())) def _list_outputs(self): # infields are mandatory, however I could not figure out how to set 'mandatory' flag dynamically # hence manual check import boto if self._infields: for key in self._infields: value = getattr(self.inputs, key) if not isdefined(value): msg = ( "%s requires a value for input '%s' because it was listed in 'infields'" % (self.__class__.__name__, key) ) raise ValueError(msg) outputs = {} # get list of all files in s3 bucket conn = boto.connect_s3(anon=self.inputs.anon) bkt = conn.get_bucket(self.inputs.bucket) bkt_files = list(k.key for k in bkt.list(prefix=self.inputs.bucket_path)) # keys are outfields, args are template args for the outfield for key, args in list(self.inputs.template_args.items()): outputs[key] = [] template = self.inputs.template if ( hasattr(self.inputs, "field_template") and isdefined(self.inputs.field_template) and key in self.inputs.field_template ): template = self.inputs.field_template[ key ] # template override for multiple outfields if isdefined(self.inputs.bucket_path): template = os.path.join(self.inputs.bucket_path, template) if not args: filelist = [] for fname in bkt_files: if re.match(template, fname): filelist.append(fname) if len(filelist) == 0: msg = "Output key: %s Template: %s returned no files" % ( key, template, ) if self.inputs.raise_on_empty: raise IOError(msg) else: warn(msg) else: if self.inputs.sort_filelist: filelist = human_order_sorted(filelist) outputs[key] = simplify_list(filelist) for argnum, arglist in enumerate(args): maxlen = 1 for arg in arglist: if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): raise ValueError( "incompatible number of arguments for %s" % key ) if len(arg) > maxlen: maxlen = len(arg) outfiles = [] for i in range(maxlen): argtuple = [] for arg in arglist: if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) else: argtuple.append(arg) filledtemplate = template if argtuple: try: filledtemplate = template % tuple(argtuple) except TypeError as e: raise TypeError( f"{e}: Template {template} failed to convert " f"with args {tuple(argtuple)}" ) outfiles = [] for fname in bkt_files: if re.match(filledtemplate, fname): outfiles.append(fname) if len(outfiles) == 0: msg = "Output key: %s Template: %s returned no files" % ( key, filledtemplate, ) if self.inputs.raise_on_empty: raise IOError(msg) else: warn(msg) outputs[key].append(None) else: if self.inputs.sort_filelist: outfiles = human_order_sorted(outfiles) outputs[key].append(simplify_list(outfiles)) if any([val is None for val in outputs[key]]): outputs[key] = [] if len(outputs[key]) == 0: outputs[key] = None elif len(outputs[key]) == 1: outputs[key] = outputs[key][0] # Outputs are currently stored as locations on S3. # We must convert to the local location specified # and download the files. for key, val in outputs.items(): # This will basically be either list-like or string-like: # if it's an instance of a list, we'll iterate through it. # If it isn't, it's string-like (string, unicode), we # convert that value directly. if isinstance(val, (list, tuple, set)): for i, path in enumerate(val): outputs[key][i] = self.s3tolocal(path, bkt) else: outputs[key] = self.s3tolocal(val, bkt) return outputs # Takes an s3 address and downloads the file to a local # directory, returning the local path. def s3tolocal(self, s3path, bkt): import boto # path formatting local_directory = str(self.inputs.local_directory) bucket_path = str(self.inputs.bucket_path) template = str(self.inputs.template) if not os.path.basename(local_directory) == "": local_directory += "/" if not os.path.basename(bucket_path) == "": bucket_path += "/" if template[0] == "/": template = template[1:] localpath = s3path.replace(bucket_path, local_directory) localdir = os.path.split(localpath)[0] if not os.path.exists(localdir): os.makedirs(localdir) k = boto.s3.key.Key(bkt) k.key = s3path k.get_contents_to_filename(localpath) return localpath class DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): base_directory = Directory( exists=True, desc="Path to the base directory consisting of subject data." ) raise_on_empty = traits.Bool( True, usedefault=True, desc="Generate exception if list is empty for a given field", ) drop_blank_outputs = traits.Bool( False, usedefault=True, desc="Remove ``None`` entries from output lists" ) sort_filelist = traits.Bool( mandatory=True, desc="Sort the filelist that matches the template" ) template = Str( mandatory=True, desc="Layout used to get files. relative to base directory if defined", ) template_args = traits.Dict( key_trait=Str, value_trait=traits.List(traits.List), desc="Information to plug into template", ) class DataGrabber(IOBase): """ Find files on a filesystem. Generic datagrabber module that wraps around glob in an intelligent way for neuroimaging tasks to grab files .. important:: Doesn't support directories currently Examples -------- >>> from nipype.interfaces.io import DataGrabber Pick all files from current directory >>> dg = DataGrabber() >>> dg.inputs.template = '*' Pick file foo/foo.nii from current directory >>> dg.inputs.template = '%s/%s.dcm' >>> dg.inputs.template_args['outfiles']=[['dicomdir','123456-1-1.dcm']] Same thing but with dynamically created fields >>> dg = DataGrabber(infields=['arg1','arg2']) >>> dg.inputs.template = '%s/%s.nii' >>> dg.inputs.arg1 = 'foo' >>> dg.inputs.arg2 = 'foo' however this latter form can be used with iterables and iterfield in a pipeline. Dynamically created, user-defined input and output fields >>> dg = DataGrabber(infields=['sid'], outfields=['func','struct','ref']) >>> dg.inputs.base_directory = '.' >>> dg.inputs.template = '%s/%s.nii' >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] >>> dg.inputs.template_args['ref'] = [['sid','ref']] >>> dg.inputs.sid = 's1' Change the template only for output field struct. The rest use the general template >>> dg.inputs.field_template = dict(struct='%s/struct.nii') >>> dg.inputs.template_args['struct'] = [['sid']] """ input_spec = DataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True def __init__(self, infields=None, outfields=None, **kwargs): """ Parameters ---------- infields : list of str Indicates the input fields to be dynamically created outfields: list of str Indicates output fields to be dynamically created See class examples for usage """ if not outfields: outfields = ["outfiles"] super(DataGrabber, self).__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check self._infields = infields self._outfields = outfields if infields: for key in infields: self.inputs.add_trait(key, traits.Any) undefined_traits[key] = Undefined # add ability to insert field specific templates self.inputs.add_trait( "field_template", traits.Dict( traits.Enum(outfields), desc="arguments that fit into template" ), ) undefined_traits["field_template"] = Undefined if not isdefined(self.inputs.template_args): self.inputs.template_args = {} for key in outfields: if key not in self.inputs.template_args: if infields: self.inputs.template_args[key] = [infields] else: self.inputs.template_args[key] = [] self.inputs.trait_set(trait_change_notify=False, **undefined_traits) def _add_output_traits(self, base): """ Using traits.Any instead out OutputMultiPath till add_trait bug is fixed. """ return add_traits(base, list(self.inputs.template_args.keys())) def _list_outputs(self): # infields are mandatory, however I could not figure out how to set 'mandatory' flag dynamically # hence manual check if self._infields: for key in self._infields: value = getattr(self.inputs, key) if not isdefined(value): msg = ( "%s requires a value for input '%s' because it was listed in 'infields'" % (self.__class__.__name__, key) ) raise ValueError(msg) outputs = {} for key, args in list(self.inputs.template_args.items()): outputs[key] = [] template = self.inputs.template if ( hasattr(self.inputs, "field_template") and isdefined(self.inputs.field_template) and key in self.inputs.field_template ): template = self.inputs.field_template[key] if isdefined(self.inputs.base_directory): template = os.path.join( os.path.abspath(self.inputs.base_directory), template ) else: template = os.path.abspath(template) if not args: filelist = glob.glob(template) if len(filelist) == 0: msg = "Output key: %s Template: %s returned no files" % ( key, template, ) if self.inputs.raise_on_empty: raise IOError(msg) else: warn(msg) else: if self.inputs.sort_filelist: filelist = human_order_sorted(filelist) outputs[key] = simplify_list(filelist) for argnum, arglist in enumerate(args): maxlen = 1 for arg in arglist: if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): raise ValueError( "incompatible number of arguments for %s" % key ) if len(arg) > maxlen: maxlen = len(arg) outfiles = [] for i in range(maxlen): argtuple = [] for arg in arglist: if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) else: argtuple.append(arg) filledtemplate = template if argtuple: try: filledtemplate = template % tuple(argtuple) except TypeError as e: raise TypeError( f"{e}: Template {template} failed to convert " f"with args {tuple(argtuple)}" ) outfiles = glob.glob(filledtemplate) if len(outfiles) == 0: msg = "Output key: %s Template: %s returned no files" % ( key, filledtemplate, ) if self.inputs.raise_on_empty: raise IOError(msg) else: warn(msg) outputs[key].append(None) else: if self.inputs.sort_filelist: outfiles = human_order_sorted(outfiles) outputs[key].append(simplify_list(outfiles)) if self.inputs.drop_blank_outputs: outputs[key] = [x for x in outputs[key] if x is not None] else: if any([val is None for val in outputs[key]]): outputs[key] = [] if len(outputs[key]) == 0: outputs[key] = None elif len(outputs[key]) == 1: outputs[key] = outputs[key][0] return outputs class SelectFilesInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): base_directory = Directory(exists=True, desc="Root path common to templates.") sort_filelist = traits.Bool( True, usedefault=True, desc="When matching mutliple files, return them" " in sorted order.", ) raise_on_empty = traits.Bool( True, usedefault=True, desc="Raise an exception if a template pattern " "matches no files.", ) force_lists = traits.Either( traits.Bool(), traits.List(Str()), default=False, usedefault=True, desc=( "Whether to return outputs as a list even" " when only one file matches the template. " "Either a boolean that applies to all output " "fields or a list of output field names to " "coerce to a list" ), ) class SelectFiles(IOBase): """ Flexibly collect data from disk to feed into workflows. This interface uses Python's {}-based string formatting syntax to plug values (possibly known only at workflow execution time) into string templates and collect files from persistant storage. These templates can also be combined with glob wildcards (``*``, ``?``) and character ranges (``[...]``). The field names in the formatting template (i.e. the terms in braces) will become inputs fields on the interface, and the keys in the templates dictionary will form the output fields. Examples -------- >>> import pprint >>> from nipype import SelectFiles, Node >>> templates={"T1": "{subject_id}/struct/T1.nii", ... "epi": "{subject_id}/func/f[0,1].nii"} >>> dg = Node(SelectFiles(templates), "selectfiles") >>> dg.inputs.subject_id = "subj1" >>> pprint.pprint(dg.outputs.get()) # doctest: {'T1': , 'epi': } Note that SelectFiles does not support lists as inputs for the dynamic fields. Attempts to do so may lead to unexpected results because brackets also express glob character ranges. For example, >>> templates["epi"] = "{subject_id}/func/f{run}.nii" >>> dg = Node(SelectFiles(templates), "selectfiles") >>> dg.inputs.subject_id = "subj1" >>> dg.inputs.run = [10, 11] would match f0.nii or f1.nii, not f10.nii or f11.nii. """ input_spec = SelectFilesInputSpec output_spec = DynamicTraitedSpec _always_run = True def __init__(self, templates, **kwargs): """Create an instance with specific input fields. Parameters ---------- templates : dictionary Mapping from string keys to string template values. The keys become output fields on the interface. The templates should use {}-formatting syntax, where the names in curly braces become inputs fields on the interface. Format strings can also use glob wildcards to match multiple files. At runtime, the values of the interface inputs will be plugged into these templates, and the resulting strings will be used to select files. """ super(SelectFiles, self).__init__(**kwargs) # Infer the infields and outfields from the template infields = [] for name, template in list(templates.items()): for _, field_name, _, _ in string.Formatter().parse(template): if field_name is not None: field_name = re.match("\w+", field_name).group() if field_name not in infields: infields.append(field_name) self._infields = infields self._outfields = list(templates) self._templates = templates # Add the dynamic input fields undefined_traits = {} for field in infields: self.inputs.add_trait(field, traits.Any) undefined_traits[field] = Undefined self.inputs.trait_set(trait_change_notify=False, **undefined_traits) def _add_output_traits(self, base): """Add the dynamic output fields""" return add_traits(base, list(self._templates.keys())) def _list_outputs(self): """Find the files and expose them as interface outputs.""" outputs = {} info = dict( [ (k, v) for k, v in list(self.inputs.__dict__.items()) if k in self._infields ] ) force_lists = self.inputs.force_lists if isinstance(force_lists, bool): force_lists = self._outfields if force_lists else [] bad_fields = set(force_lists) - set(self._outfields) if bad_fields: bad_fields = ", ".join(list(bad_fields)) plural = "s" if len(bad_fields) > 1 else "" verb = "were" if len(bad_fields) > 1 else "was" msg = ( "The field%s '%s' %s set in 'force_lists' and not in " "'templates'." ) % (plural, bad_fields, verb) raise ValueError(msg) for field, template in list(self._templates.items()): find_dirs = template[-1] == os.sep # Build the full template path if isdefined(self.inputs.base_directory): template = op.abspath(op.join(self.inputs.base_directory, template)) else: template = op.abspath(template) # re-add separator if searching exclusively for directories if find_dirs: template += os.sep # Fill in the template and glob for files filled_template = template.format(**info) filelist = glob.glob(filled_template) # Handle the case where nothing matched if not filelist: msg = "No files were found matching %s template: %s" % ( field, filled_template, ) if self.inputs.raise_on_empty: raise IOError(msg) else: warn(msg) # Possibly sort the list if self.inputs.sort_filelist: filelist = human_order_sorted(filelist) # Handle whether this must be a list or not if field not in force_lists: filelist = simplify_list(filelist) outputs[field] = filelist return outputs class DataFinderInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): root_paths = traits.Either(traits.List(), Str(), mandatory=True) match_regex = Str( "(.+)", usedefault=True, desc=("Regular expression for matching paths.") ) ignore_regexes = traits.List( desc=( "List of regular expressions, " "if any match the path it will be " "ignored." ) ) max_depth = traits.Int(desc="The maximum depth to search beneath " "the root_paths") min_depth = traits.Int(desc="The minimum depth to search beneath " "the root paths") unpack_single = traits.Bool( False, usedefault=True, desc="Unpack single results from list" ) class DataFinder(IOBase): r"""Search for paths that match a given regular expression. Allows a less proscriptive approach to gathering input files compared to DataGrabber. Will recursively search any subdirectories by default. This can be limited with the min/max depth options. Matched paths are available in the output 'out_paths'. Any named groups of captured text from the regular expression are also available as ouputs of the same name. Examples -------- >>> from nipype.interfaces.io import DataFinder >>> df = DataFinder() >>> df.inputs.root_paths = '.' >>> df.inputs.match_regex = r'.+/(?P.+(qT1|ep2d_fid_T1).+)/(?P.+)\.nii.gz' >>> result = df.run() # doctest: +SKIP >>> result.outputs.out_paths # doctest: +SKIP ['./027-ep2d_fid_T1_Gd4/acquisition.nii.gz', './018-ep2d_fid_T1_Gd2/acquisition.nii.gz', './016-ep2d_fid_T1_Gd1/acquisition.nii.gz', './013-ep2d_fid_T1_pre/acquisition.nii.gz'] >>> result.outputs.series_dir # doctest: +SKIP ['027-ep2d_fid_T1_Gd4', '018-ep2d_fid_T1_Gd2', '016-ep2d_fid_T1_Gd1', '013-ep2d_fid_T1_pre'] >>> result.outputs.basename # doctest: +SKIP ['acquisition', 'acquisition' 'acquisition', 'acquisition'] """ input_spec = DataFinderInputSpec output_spec = DynamicTraitedSpec _always_run = True def _match_path(self, target_path): # Check if we should ignore the path for ignore_re in self.ignore_regexes: if ignore_re.search(target_path): return # Check if we can match the path match = self.match_regex.search(target_path) if match is not None: match_dict = match.groupdict() if self.result is None: self.result = {"out_paths": []} for key in list(match_dict.keys()): self.result[key] = [] self.result["out_paths"].append(target_path) for key, val in list(match_dict.items()): self.result[key].append(val) def _run_interface(self, runtime): # Prepare some of the inputs if isinstance(self.inputs.root_paths, (str, bytes)): self.inputs.root_paths = [self.inputs.root_paths] self.match_regex = re.compile(self.inputs.match_regex) if self.inputs.max_depth is Undefined: max_depth = None else: max_depth = self.inputs.max_depth if self.inputs.min_depth is Undefined: min_depth = 0 else: min_depth = self.inputs.min_depth if self.inputs.ignore_regexes is Undefined: self.ignore_regexes = [] else: self.ignore_regexes = [ re.compile(regex) for regex in self.inputs.ignore_regexes ] self.result = None for root_path in self.inputs.root_paths: # Handle tilda/env variables and remove extra seperators root_path = os.path.normpath( os.path.expandvars(os.path.expanduser(root_path)) ) # Check if the root_path is a file if os.path.isfile(root_path): if min_depth == 0: self._match_path(root_path) continue # Walk through directory structure checking paths for curr_dir, sub_dirs, files in os.walk(root_path): # Determine the current depth from the root_path curr_depth = curr_dir.count(os.sep) - root_path.count(os.sep) # If the max path depth has been reached, clear sub_dirs # and files if max_depth is not None and curr_depth >= max_depth: sub_dirs[:] = [] files = [] # Test the path for the curr_dir and all files if curr_depth >= min_depth: self._match_path(curr_dir) if curr_depth >= (min_depth - 1): for infile in files: full_path = os.path.join(curr_dir, infile) self._match_path(full_path) if self.inputs.unpack_single and len(self.result["out_paths"]) == 1: for key, vals in list(self.result.items()): self.result[key] = vals[0] else: # sort all keys acording to out_paths for key in list(self.result.keys()): if key == "out_paths": continue sort_tuples = human_order_sorted( list(zip(self.result["out_paths"], self.result[key])) ) self.result[key] = [x for (_, x) in sort_tuples] self.result["out_paths"] = human_order_sorted(self.result["out_paths"]) if not self.result: raise RuntimeError("Regular expression did not match any files!") return runtime def _list_outputs(self): outputs = self._outputs().get() outputs.update(self.result) return outputs class FSSourceInputSpec(BaseInterfaceInputSpec): subjects_dir = Directory( exists=True, mandatory=True, desc="Freesurfer subjects directory." ) subject_id = Str(mandatory=True, desc="Subject name for whom to retrieve data") hemi = traits.Enum( "both", "lh", "rh", usedefault=True, desc="Selects hemisphere specific outputs" ) class FSSourceOutputSpec(TraitedSpec): T1 = File(exists=True, desc="Intensity normalized whole-head volume", loc="mri") aseg = File( exists=True, loc="mri", desc="Volumetric map of regions from automatic segmentation", ) brain = File(exists=True, desc="Intensity normalized brain-only volume", loc="mri") brainmask = File(exists=True, desc="Skull-stripped (brain-only) volume", loc="mri") filled = File(exists=True, desc="Subcortical mass volume", loc="mri") norm = File(exists=True, desc="Normalized skull-stripped volume", loc="mri") nu = File(exists=True, desc="Non-uniformity corrected whole-head volume", loc="mri") orig = File(exists=True, desc="Base image conformed to Freesurfer space", loc="mri") rawavg = File( exists=True, desc="Volume formed by averaging input images", loc="mri" ) ribbon = OutputMultiPath( File(exists=True), desc="Volumetric maps of cortical ribbons", loc="mri", altkey="*ribbon", ) wm = File(exists=True, desc="Segmented white-matter volume", loc="mri") wmparc = File( exists=True, loc="mri", desc="Aparc parcellation projected into subcortical white matter", ) curv = OutputMultiPath( File(exists=True), desc="Maps of surface curvature", loc="surf" ) avg_curv = OutputMultiPath( File(exists=True), desc="Average atlas curvature, sampled to subject", loc="surf", ) inflated = OutputMultiPath( File(exists=True), desc="Inflated surface meshes", loc="surf" ) pial = OutputMultiPath( File(exists=True), desc="Gray matter/pia mater surface meshes", loc="surf" ) area_pial = OutputMultiPath( File(exists=True), desc="Mean area of triangles each vertex on the pial surface is " "associated with", loc="surf", altkey="area.pial", ) curv_pial = OutputMultiPath( File(exists=True), desc="Curvature of pial surface", loc="surf", altkey="curv.pial", ) smoothwm = OutputMultiPath( File(exists=True), loc="surf", desc="Smoothed original surface meshes" ) sphere = OutputMultiPath( File(exists=True), desc="Spherical surface meshes", loc="surf" ) sulc = OutputMultiPath( File(exists=True), desc="Surface maps of sulcal depth", loc="surf" ) thickness = OutputMultiPath( File(exists=True), loc="surf", desc="Surface maps of cortical thickness" ) volume = OutputMultiPath( File(exists=True), desc="Surface maps of cortical volume", loc="surf" ) white = OutputMultiPath( File(exists=True), desc="White/gray matter surface meshes", loc="surf" ) jacobian_white = OutputMultiPath( File(exists=True), desc="Distortion required to register to spherical atlas", loc="surf", ) graymid = OutputMultiPath( File(exists=True), desc="Graymid/midthickness surface meshes", loc="surf", altkey=["graymid", "midthickness"], ) label = OutputMultiPath( File(exists=True), desc="Volume and surface label files", loc="label", altkey="*label", ) annot = OutputMultiPath( File(exists=True), desc="Surface annotation files", loc="label", altkey="*annot" ) aparc_aseg = OutputMultiPath( File(exists=True), loc="mri", altkey="aparc*aseg", desc="Aparc parcellation projected into aseg volume", ) sphere_reg = OutputMultiPath( File(exists=True), loc="surf", altkey="sphere.reg", desc="Spherical registration file", ) aseg_stats = OutputMultiPath( File(exists=True), loc="stats", altkey="aseg", desc="Automated segmentation statistics file", ) wmparc_stats = OutputMultiPath( File(exists=True), loc="stats", altkey="wmparc", desc="White matter parcellation statistics file", ) aparc_stats = OutputMultiPath( File(exists=True), loc="stats", altkey="aparc", desc="Aparc parcellation statistics files", ) BA_stats = OutputMultiPath( File(exists=True), loc="stats", altkey="BA", desc="Brodmann Area statistics files", ) aparc_a2009s_stats = OutputMultiPath( File(exists=True), loc="stats", altkey="aparc.a2009s", desc="Aparc a2009s parcellation statistics files", ) curv_stats = OutputMultiPath( File(exists=True), loc="stats", altkey="curv", desc="Curvature statistics files" ) entorhinal_exvivo_stats = OutputMultiPath( File(exists=True), loc="stats", altkey="entorhinal_exvivo", desc="Entorhinal exvivo statistics files", ) class FreeSurferSource(IOBase): """Generates freesurfer subject info from their directories. Examples -------- >>> from nipype.interfaces.io import FreeSurferSource >>> fs = FreeSurferSource() >>> #fs.inputs.subjects_dir = '.' >>> fs.inputs.subject_id = 'PWS04' >>> res = fs.run() # doctest: +SKIP >>> fs.inputs.hemi = 'lh' >>> res = fs.run() # doctest: +SKIP """ input_spec = FSSourceInputSpec output_spec = FSSourceOutputSpec _always_run = True _additional_metadata = ["loc", "altkey"] def _get_files(self, path, key, dirval, altkey=None): globsuffix = "" if dirval == "mri": globsuffix = ".mgz" elif dirval == "stats": globsuffix = ".stats" globprefix = "" if dirval in ("surf", "label", "stats"): if self.inputs.hemi != "both": globprefix = self.inputs.hemi + "." else: globprefix = "?h." if key in ("aseg_stats", "wmparc_stats"): globprefix = "" elif key == "ribbon": if self.inputs.hemi != "both": globprefix = self.inputs.hemi + "." else: globprefix = "*" keys = ensure_list(altkey) if altkey else [key] globfmt = os.path.join(path, dirval, "".join((globprefix, "{}", globsuffix))) return [ os.path.abspath(f) for key in keys for f in glob.glob(globfmt.format(key)) ] def _list_outputs(self): subjects_dir = self.inputs.subjects_dir subject_path = os.path.join(subjects_dir, self.inputs.subject_id) output_traits = self._outputs() outputs = output_traits.get() for k in list(outputs.keys()): val = self._get_files( subject_path, k, output_traits.traits()[k].loc, output_traits.traits()[k].altkey, ) if val: outputs[k] = simplify_list(val) return outputs class XNATSourceInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): query_template = Str( mandatory=True, desc=("Layout used to get files. Relative to base " "directory if defined"), ) query_template_args = traits.Dict( Str, traits.List(traits.List), value=dict(outfiles=[]), usedefault=True, desc="Information to plug into template", ) server = Str(mandatory=True, requires=["user", "pwd"], xor=["config"]) user = Str() pwd = traits.Password() config = File(mandatory=True, xor=["server"]) cache_dir = Directory(desc="Cache directory") class XNATSource(LibraryBaseInterface, IOBase): """ Pull data from an XNAT server. Generic XNATSource module that wraps around the pyxnat module in an intelligent way for neuroimaging tasks to grab files and data from an XNAT server. Examples -------- Pick all files from current directory >>> dg = XNATSource() >>> dg.inputs.template = '*' >>> dg = XNATSource(infields=['project','subject','experiment','assessor','inout']) >>> dg.inputs.query_template = '/projects/%s/subjects/%s/experiments/%s' \ '/assessors/%s/%s_resources/files' >>> dg.inputs.project = 'IMAGEN' >>> dg.inputs.subject = 'IMAGEN_000000001274' >>> dg.inputs.experiment = '*SessionA*' >>> dg.inputs.assessor = '*ADNI_MPRAGE_nii' >>> dg.inputs.inout = 'out' >>> dg = XNATSource(infields=['sid'],outfields=['struct','func']) >>> dg.inputs.query_template = '/projects/IMAGEN/subjects/%s/experiments/*SessionA*' \ '/assessors/*%s_nii/out_resources/files' >>> dg.inputs.query_template_args['struct'] = [['sid','ADNI_MPRAGE']] >>> dg.inputs.query_template_args['func'] = [['sid','EPI_faces']] >>> dg.inputs.sid = 'IMAGEN_000000001274' """ input_spec = XNATSourceInputSpec output_spec = DynamicTraitedSpec _pkg = "pyxnat" def __init__(self, infields=None, outfields=None, **kwargs): """ Parameters ---------- infields : list of str Indicates the input fields to be dynamically created outfields: list of str Indicates output fields to be dynamically created See class examples for usage """ super(XNATSource, self).__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check self._infields = infields if infields: for key in infields: self.inputs.add_trait(key, traits.Any) undefined_traits[key] = Undefined self.inputs.query_template_args["outfiles"] = [infields] if outfields: # add ability to insert field specific templates self.inputs.add_trait( "field_template", traits.Dict( traits.Enum(outfields), desc="arguments that fit into query_template", ), ) undefined_traits["field_template"] = Undefined # self.inputs.remove_trait('query_template_args') outdict = {} for key in outfields: outdict[key] = [] self.inputs.query_template_args = outdict self.inputs.trait_set(trait_change_notify=False, **undefined_traits) def _add_output_traits(self, base): """ Using traits.Any instead out OutputMultiPath till add_trait bug is fixed. """ return add_traits(base, list(self.inputs.query_template_args.keys())) def _list_outputs(self): # infields are mandatory, however I could not figure out # how to set 'mandatory' flag dynamically, hence manual check import pyxnat cache_dir = self.inputs.cache_dir or tempfile.gettempdir() if self.inputs.config: xnat = pyxnat.Interface(config=self.inputs.config) else: xnat = pyxnat.Interface( self.inputs.server, self.inputs.user, self.inputs.pwd, cache_dir ) if self._infields: for key in self._infields: value = getattr(self.inputs, key) if not isdefined(value): msg = ( "%s requires a value for input '%s' " "because it was listed in 'infields'" % (self.__class__.__name__, key) ) raise ValueError(msg) outputs = {} for key, args in list(self.inputs.query_template_args.items()): outputs[key] = [] template = self.inputs.query_template if ( hasattr(self.inputs, "field_template") and isdefined(self.inputs.field_template) and key in self.inputs.field_template ): template = self.inputs.field_template[key] if not args: file_objects = xnat.select(template).get("obj") if file_objects == []: raise IOError("Template %s returned no files" % template) outputs[key] = simplify_list( [ str(file_object.get()) for file_object in file_objects if file_object.exists() ] ) for argnum, arglist in enumerate(args): maxlen = 1 for arg in arglist: if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): raise ValueError( "incompatible number " "of arguments for %s" % key ) if len(arg) > maxlen: maxlen = len(arg) outfiles = [] for i in range(maxlen): argtuple = [] for arg in arglist: if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) else: argtuple.append(arg) if argtuple: target = template % tuple(argtuple) file_objects = xnat.select(target).get("obj") if file_objects == []: raise IOError("Template %s " "returned no files" % target) outfiles = simplify_list( [ str(file_object.get()) for file_object in file_objects if file_object.exists() ] ) else: file_objects = xnat.select(template).get("obj") if file_objects == []: raise IOError("Template %s " "returned no files" % template) outfiles = simplify_list( [ str(file_object.get()) for file_object in file_objects if file_object.exists() ] ) outputs[key].insert(i, outfiles) if len(outputs[key]) == 0: outputs[key] = None elif len(outputs[key]) == 1: outputs[key] = outputs[key][0] return outputs class XNATSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): _outputs = traits.Dict(Str, value={}, usedefault=True) server = Str(mandatory=True, requires=["user", "pwd"], xor=["config"]) user = Str() pwd = traits.Password() config = File(mandatory=True, xor=["server"]) cache_dir = Directory(desc="") project_id = Str(desc="Project in which to store the outputs", mandatory=True) subject_id = Str(desc="Set to subject id", mandatory=True) experiment_id = Str(desc="Set to workflow name", mandatory=True) assessor_id = Str( desc=( "Option to customize ouputs representation in XNAT - " "assessor level will be used with specified id" ), xor=["reconstruction_id"], ) reconstruction_id = Str( desc=( "Option to customize ouputs representation in XNAT - " "reconstruction level will be used with specified id" ), xor=["assessor_id"], ) share = traits.Bool( False, desc=( "Option to share the subjects from the original project" "instead of creating new ones when possible - the created " "experiments are then shared back to the original project" ), usedefault=True, ) def __setattr__(self, key, value): if key not in self.copyable_trait_names(): self._outputs[key] = value else: super(XNATSinkInputSpec, self).__setattr__(key, value) class XNATSink(LibraryBaseInterface, IOBase): """Generic datasink module that takes a directory containing a list of nifti files and provides a set of structured output fields. """ input_spec = XNATSinkInputSpec _pkg = "pyxnat" def _list_outputs(self): """Execute this module.""" import pyxnat # setup XNAT connection cache_dir = self.inputs.cache_dir or tempfile.gettempdir() if self.inputs.config: xnat = pyxnat.Interface(config=self.inputs.config) else: xnat = pyxnat.Interface( self.inputs.server, self.inputs.user, self.inputs.pwd, cache_dir ) # if possible share the subject from the original project if self.inputs.share: subject_id = self.inputs.subject_id result = xnat.select( "xnat:subjectData", ["xnat:subjectData/PROJECT", "xnat:subjectData/SUBJECT_ID"], ).where("xnat:subjectData/SUBJECT_ID = %s AND" % subject_id) # subject containing raw data exists on the server if result.data and isinstance(result.data[0], dict): result = result.data[0] shared = xnat.select( "/project/%s/subject/%s" % (self.inputs.project_id, self.inputs.subject_id) ) if not shared.exists(): # subject not in share project share_project = xnat.select("/project/%s" % self.inputs.project_id) if not share_project.exists(): # check project exists share_project.insert() subject = xnat.select( "/project/%(project)s" "/subject/%(subject_id)s" % result ) subject.share(str(self.inputs.project_id)) # setup XNAT resource uri_template_args = dict( project_id=quote_id(self.inputs.project_id), subject_id=self.inputs.subject_id, experiment_id=quote_id(self.inputs.experiment_id), ) if self.inputs.share: uri_template_args["original_project"] = result["project"] if self.inputs.assessor_id: uri_template_args["assessor_id"] = quote_id(self.inputs.assessor_id) elif self.inputs.reconstruction_id: uri_template_args["reconstruction_id"] = quote_id( self.inputs.reconstruction_id ) # gather outputs and upload them for key, files in list(self.inputs._outputs.items()): for name in ensure_list(files): if isinstance(name, list): for i, file_name in enumerate(name): push_file( self, xnat, file_name, "%s_" % i + key, uri_template_args ) else: push_file(self, xnat, name, key, uri_template_args) def quote_id(string): return str(string).replace("_", "---") def unquote_id(string): return str(string).replace("---", "_") def push_file(self, xnat, file_name, out_key, uri_template_args): # grab info from output file names val_list = [ unquote_id(val) for part in os.path.split(file_name)[0].split(os.sep) for val in part.split("_")[1:] if part.startswith("_") and len(part.split("_")) % 2 ] keymap = dict(list(zip(val_list[1::2], val_list[2::2]))) _label = [] for key, val in sorted(keymap.items()): if str(self.inputs.subject_id) not in val: _label.extend([key, val]) # select and define container level uri_template_args["container_type"] = None for container in ["assessor_id", "reconstruction_id"]: if getattr(self.inputs, container): uri_template_args["container_type"] = container.split("_id")[0] uri_template_args["container_id"] = uri_template_args[container] if uri_template_args["container_type"] is None: uri_template_args["container_type"] = "reconstruction" uri_template_args["container_id"] = unquote_id( uri_template_args["experiment_id"] ) if _label: uri_template_args["container_id"] += "_results_%s" % "_".join(_label) else: uri_template_args["container_id"] += "_results" # define resource level uri_template_args["resource_label"] = "%s_%s" % ( uri_template_args["container_id"], out_key.split(".")[0], ) # define file level uri_template_args["file_name"] = os.path.split( os.path.abspath(unquote_id(file_name)) )[1] uri_template = ( "/project/%(project_id)s/subject/%(subject_id)s" "/experiment/%(experiment_id)s/%(container_type)s/%(container_id)s" "/out/resource/%(resource_label)s/file/%(file_name)s" ) # unquote values before uploading for key in list(uri_template_args.keys()): uri_template_args[key] = unquote_id(uri_template_args[key]) # upload file remote_file = xnat.select(uri_template % uri_template_args) remote_file.insert(file_name, experiments="xnat:imageSessionData", use_label=True) # shares the experiment back to the original project if relevant if "original_project" in uri_template_args: experiment_template = ( "/project/%(original_project)s" "/subject/%(subject_id)s/experiment/%(experiment_id)s" ) xnat.select(experiment_template % uri_template_args).share( uri_template_args["original_project"] ) def capture_provenance(): pass def push_provenance(): pass class SQLiteSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): database_file = File(exists=True, mandatory=True) table_name = Str(mandatory=True) class SQLiteSink(LibraryBaseInterface, IOBase): """ Very simple frontend for storing values into SQLite database. .. warning:: This is not a thread-safe node because it can write to a common shared location. It will not complain when it overwrites a file. Examples -------- >>> sql = SQLiteSink(input_names=['subject_id', 'some_measurement']) >>> sql.inputs.database_file = 'my_database.db' >>> sql.inputs.table_name = 'experiment_results' >>> sql.inputs.subject_id = 's1' >>> sql.inputs.some_measurement = 11.4 >>> sql.run() # doctest: +SKIP """ input_spec = SQLiteSinkInputSpec _pkg = "sqlite3" def __init__(self, input_names, **inputs): super(SQLiteSink, self).__init__(**inputs) self._input_names = ensure_list(input_names) add_traits(self.inputs, [name for name in self._input_names]) def _list_outputs(self): """Execute this module.""" import sqlite3 conn = sqlite3.connect(self.inputs.database_file, check_same_thread=False) c = conn.cursor() c.execute( "INSERT OR REPLACE INTO %s (" % self.inputs.table_name + ",".join(self._input_names) + ") VALUES (" + ",".join(["?"] * len(self._input_names)) + ")", [getattr(self.inputs, name) for name in self._input_names], ) conn.commit() c.close() return None class MySQLSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): host = Str( "localhost", mandatory=True, requires=["username", "password"], xor=["config"], usedefault=True, ) config = File( mandatory=True, xor=["host"], desc="MySQL Options File (same format as my.cnf)" ) database_name = Str(mandatory=True, desc="Otherwise known as the schema name") table_name = Str(mandatory=True) username = Str() password = Str() class MySQLSink(IOBase): """ Very simple frontend for storing values into MySQL database. Examples -------- >>> sql = MySQLSink(input_names=['subject_id', 'some_measurement']) >>> sql.inputs.database_name = 'my_database' >>> sql.inputs.table_name = 'experiment_results' >>> sql.inputs.username = 'root' >>> sql.inputs.password = 'secret' >>> sql.inputs.subject_id = 's1' >>> sql.inputs.some_measurement = 11.4 >>> sql.run() # doctest: +SKIP """ input_spec = MySQLSinkInputSpec def __init__(self, input_names, **inputs): super(MySQLSink, self).__init__(**inputs) self._input_names = ensure_list(input_names) add_traits(self.inputs, [name for name in self._input_names]) def _list_outputs(self): """Execute this module.""" import MySQLdb if isdefined(self.inputs.config): conn = MySQLdb.connect( db=self.inputs.database_name, read_default_file=self.inputs.config ) else: conn = MySQLdb.connect( host=self.inputs.host, user=self.inputs.username, passwd=self.inputs.password, db=self.inputs.database_name, ) c = conn.cursor() c.execute( "REPLACE INTO %s (" % self.inputs.table_name + ",".join(self._input_names) + ") VALUES (" + ",".join(["%s"] * len(self._input_names)) + ")", [getattr(self.inputs, name) for name in self._input_names], ) conn.commit() c.close() return None class SSHDataGrabberInputSpec(DataGrabberInputSpec): hostname = Str(mandatory=True, desc="Server hostname.") username = Str(desc="Server username.") password = traits.Password(desc="Server password.") download_files = traits.Bool( True, usedefault=True, desc="If false it will return the file names without downloading them", ) base_directory = Str( mandatory=True, desc="Path to the base directory consisting of subject data." ) template_expression = traits.Enum( ["fnmatch", "regexp"], usedefault=True, desc="Use either fnmatch or regexp to express templates", ) ssh_log_to_file = Str( "", usedefault=True, desc="If set SSH commands will be logged to the given file" ) class SSHDataGrabber(LibraryBaseInterface, DataGrabber): """ Extension of DataGrabber module that downloads the file list and optionally the files from a SSH server. The SSH operation must not need user and password so an SSH agent must be active in where this module is being run. .. attention:: Doesn't support directories currently Examples -------- >>> from nipype.interfaces.io import SSHDataGrabber >>> dg = SSHDataGrabber() >>> dg.inputs.hostname = 'test.rebex.net' >>> dg.inputs.user = 'demo' >>> dg.inputs.password = 'password' >>> dg.inputs.base_directory = 'pub/example' Pick all files from the base directory >>> dg.inputs.template = '*' Pick all files starting with "s" and a number from current directory >>> dg.inputs.template_expression = 'regexp' >>> dg.inputs.template = 'pop[0-9].*' Same thing but with dynamically created fields >>> dg = SSHDataGrabber(infields=['arg1','arg2']) >>> dg.inputs.hostname = 'test.rebex.net' >>> dg.inputs.user = 'demo' >>> dg.inputs.password = 'password' >>> dg.inputs.base_directory = 'pub' >>> dg.inputs.template = '%s/%s.txt' >>> dg.inputs.arg1 = 'example' >>> dg.inputs.arg2 = 'foo' however this latter form can be used with iterables and iterfield in a pipeline. Dynamically created, user-defined input and output fields >>> dg = SSHDataGrabber(infields=['sid'], outfields=['func','struct','ref']) >>> dg.inputs.hostname = 'myhost.com' >>> dg.inputs.base_directory = '/main_folder/my_remote_dir' >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] >>> dg.inputs.template_args['ref'] = [['sid','ref']] >>> dg.inputs.sid = 's1' Change the template only for output field struct. The rest use the general template >>> dg.inputs.field_template = dict(struct='%s/struct.nii') >>> dg.inputs.template_args['struct'] = [['sid']] """ input_spec = SSHDataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = False _pkg = "paramiko" def __init__(self, infields=None, outfields=None, **kwargs): """ Parameters ---------- infields : list of str Indicates the input fields to be dynamically created outfields: list of str Indicates output fields to be dynamically created See class examples for usage """ if not outfields: outfields = ["outfiles"] kwargs = kwargs.copy() kwargs["infields"] = infields kwargs["outfields"] = outfields super(SSHDataGrabber, self).__init__(**kwargs) if None in (self.inputs.username, self.inputs.password): raise ValueError( "either both username and password " "are provided or none of them" ) if ( self.inputs.template_expression == "regexp" and self.inputs.template[-1] != "$" ): self.inputs.template += "$" def _get_files_over_ssh(self, template): """Get the files matching template over an SSH connection.""" # Connect over SSH client = self._get_ssh_client() sftp = client.open_sftp() sftp.chdir(self.inputs.base_directory) # Get all files in the dir, and filter for desired files template_dir = os.path.dirname(template) template_base = os.path.basename(template) every_file_in_dir = sftp.listdir(template_dir) if self.inputs.template_expression == "fnmatch": outfiles = fnmatch.filter(every_file_in_dir, template_base) elif self.inputs.template_expression == "regexp": regexp = re.compile(template_base) outfiles = list(filter(regexp.match, every_file_in_dir)) else: raise ValueError("template_expression value invalid") if len(outfiles) == 0: # no files msg = "Output template: %s returned no files" % template if self.inputs.raise_on_empty: raise IOError(msg) else: warn(msg) # return value outfiles = None else: # found files, sort and save to outputs if self.inputs.sort_filelist: outfiles = human_order_sorted(outfiles) # actually download the files, if desired if self.inputs.download_files: files_to_download = copy.copy(outfiles) # make sure new list! # check to see if there are any related files to download for file_to_download in files_to_download: related_to_current = get_related_files( file_to_download, include_this_file=False ) existing_related_not_downloading = [ f for f in related_to_current if f in every_file_in_dir and f not in files_to_download ] files_to_download.extend(existing_related_not_downloading) for f in files_to_download: try: sftp.get(os.path.join(template_dir, f), f) except IOError: iflogger.info("remote file %s not found" % f) # return value outfiles = simplify_list(outfiles) return outfiles def _list_outputs(self): import paramiko if len(self.inputs.ssh_log_to_file) > 0: paramiko.util.log_to_file(self.inputs.ssh_log_to_file) # infields are mandatory, however I could not figure out how to set 'mandatory' flag dynamically # hence manual check if self._infields: for key in self._infields: value = getattr(self.inputs, key) if not isdefined(value): msg = ( "%s requires a value for input '%s' because it was listed in 'infields'" % (self.__class__.__name__, key) ) raise ValueError(msg) outputs = {} for key, args in list(self.inputs.template_args.items()): outputs[key] = [] template = self.inputs.template if ( hasattr(self.inputs, "field_template") and isdefined(self.inputs.field_template) and key in self.inputs.field_template ): template = self.inputs.field_template[key] if not args: outputs[key] = self._get_files_over_ssh(template) for argnum, arglist in enumerate(args): maxlen = 1 for arg in arglist: if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): if (maxlen > 1) and (len(arg) != maxlen): raise ValueError( "incompatible number of arguments for %s" % key ) if len(arg) > maxlen: maxlen = len(arg) outfiles = [] for i in range(maxlen): argtuple = [] for arg in arglist: if isinstance(arg, (str, bytes)) and hasattr(self.inputs, arg): arg = getattr(self.inputs, arg) if isinstance(arg, list): argtuple.append(arg[i]) else: argtuple.append(arg) filledtemplate = template if argtuple: try: filledtemplate = template % tuple(argtuple) except TypeError as e: raise TypeError( f"{e}: Template {template} failed to convert " f"with args {tuple(argtuple)}" ) outputs[key].append(self._get_files_over_ssh(filledtemplate)) # disclude where there was any invalid matches if any([val is None for val in outputs[key]]): outputs[key] = [] # no outputs is None, not empty list if len(outputs[key]) == 0: outputs[key] = None # one output is the item, not a list elif len(outputs[key]) == 1: outputs[key] = outputs[key][0] for k, v in list(outputs.items()): outputs[k] = os.path.join(os.getcwd(), v) return outputs def _get_ssh_client(self): import paramiko config = paramiko.SSHConfig() config.parse(open(os.path.expanduser("~/.ssh/config"))) host = config.lookup(self.inputs.hostname) if "proxycommand" in host: proxy = paramiko.ProxyCommand( subprocess.check_output( [os.environ["SHELL"], "-c", "echo %s" % host["proxycommand"]] ).strip() ) else: proxy = None client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(host["hostname"], username=host["user"], sock=proxy) return client class JSONFileGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): in_file = File(exists=True, desc="JSON source file") defaults = traits.Dict( desc=( "JSON dictionary that sets default output" "values, overridden by values found in in_file" ) ) class JSONFileGrabber(IOBase): """ Datagrabber interface that loads a json file and generates an output for every first-level object Example ------- >>> import pprint >>> from nipype.interfaces.io import JSONFileGrabber >>> jsonSource = JSONFileGrabber() >>> jsonSource.inputs.defaults = {'param1': 'overrideMe', 'param3': 1.0} >>> res = jsonSource.run() >>> pprint.pprint(res.outputs.get()) {'param1': 'overrideMe', 'param3': 1.0} >>> jsonSource.inputs.in_file = os.path.join(datadir, 'jsongrabber.txt') >>> res = jsonSource.run() >>> pprint.pprint(res.outputs.get()) # doctest:, +ELLIPSIS {'param1': 'exampleStr', 'param2': 4, 'param3': 1.0} """ input_spec = JSONFileGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True def _list_outputs(self): import simplejson outputs = {} if isdefined(self.inputs.in_file): with open(self.inputs.in_file, "r") as f: data = simplejson.load(f) if not isinstance(data, dict): raise RuntimeError("JSON input has no dictionary structure") for key, value in list(data.items()): outputs[key] = value if isdefined(self.inputs.defaults): defaults = self.inputs.defaults for key, value in list(defaults.items()): if key not in list(outputs.keys()): outputs[key] = value return outputs class JSONFileSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): out_file = File(desc="JSON sink file") in_dict = traits.Dict(value={}, usedefault=True, desc="input JSON dictionary") _outputs = traits.Dict(value={}, usedefault=True) def __setattr__(self, key, value): if key not in self.copyable_trait_names(): if not isdefined(value): super(JSONFileSinkInputSpec, self).__setattr__(key, value) self._outputs[key] = value else: if key in self._outputs: self._outputs[key] = value super(JSONFileSinkInputSpec, self).__setattr__(key, value) class JSONFileSinkOutputSpec(TraitedSpec): out_file = File(desc="JSON sink file") class JSONFileSink(IOBase): """ Very simple frontend for storing values into a JSON file. Entries already existing in in_dict will be overridden by matching entries dynamically added as inputs. .. warning:: This is not a thread-safe node because it can write to a common shared location. It will not complain when it overwrites a file. Examples -------- >>> jsonsink = JSONFileSink(input_names=['subject_id', ... 'some_measurement']) >>> jsonsink.inputs.subject_id = 's1' >>> jsonsink.inputs.some_measurement = 11.4 >>> jsonsink.run() # doctest: +SKIP Using a dictionary as input: >>> dictsink = JSONFileSink() >>> dictsink.inputs.in_dict = {'subject_id': 's1', ... 'some_measurement': 11.4} >>> dictsink.run() # doctest: +SKIP """ input_spec = JSONFileSinkInputSpec output_spec = JSONFileSinkOutputSpec def __init__(self, infields=[], force_run=True, **inputs): super(JSONFileSink, self).__init__(**inputs) self._input_names = infields undefined_traits = {} for key in infields: self.inputs.add_trait(key, traits.Any) self.inputs._outputs[key] = Undefined undefined_traits[key] = Undefined self.inputs.trait_set(trait_change_notify=False, **undefined_traits) if force_run: self._always_run = True def _process_name(self, name, val): if "." in name: newkeys = name.split(".") name = newkeys.pop(0) nested_dict = {newkeys.pop(): val} for nk in reversed(newkeys): nested_dict = {nk: nested_dict} val = nested_dict return name, val def _list_outputs(self): import simplejson import os.path as op if not isdefined(self.inputs.out_file): out_file = op.abspath("datasink.json") else: out_file = op.abspath(self.inputs.out_file) out_dict = self.inputs.in_dict # Overwrite in_dict entries automatically for key, val in list(self.inputs._outputs.items()): if not isdefined(val) or key == "trait_added": continue key, val = self._process_name(key, val) out_dict[key] = val with open(out_file, "w") as f: f.write(str(simplejson.dumps(out_dict, ensure_ascii=False))) outputs = self.output_spec().get() outputs["out_file"] = out_file return outputs class BIDSDataGrabberInputSpec(DynamicTraitedSpec): base_dir = Directory(exists=True, desc="Path to BIDS Directory.", mandatory=True) output_query = traits.Dict( key_trait=Str, value_trait=traits.Dict, desc="Queries for outfield outputs" ) raise_on_empty = traits.Bool( True, usedefault=True, desc="Generate exception if list is empty for a given field", ) index_derivatives = traits.Bool( False, mandatory=True, usedefault=True, desc="Index derivatives/ sub-directory" ) extra_derivatives = traits.List( Directory(exists=True), desc="Additional derivative directories to index" ) class BIDSDataGrabber(LibraryBaseInterface, IOBase): """BIDS datagrabber module that wraps around pybids to allow arbitrary querying of BIDS datasets. Examples -------- .. setup:: >>> try: ... import bids ... except ImportError: ... pytest.skip() By default, the BIDSDataGrabber fetches anatomical and functional images from a project, and makes BIDS entities (e.g. subject) available for filtering outputs. >>> bg = BIDSDataGrabber() >>> bg.inputs.base_dir = 'ds005/' >>> bg.inputs.subject = '01' >>> results = bg.run() # doctest: +SKIP Dynamically created, user-defined output fields can also be defined to return different types of outputs from the same project. All outputs are filtered on common entities, which can be explicitly defined as infields. >>> bg = BIDSDataGrabber(infields = ['subject']) >>> bg.inputs.base_dir = 'ds005/' >>> bg.inputs.subject = '01' >>> bg.inputs.output_query['dwi'] = dict(datatype='dwi') >>> results = bg.run() # doctest: +SKIP """ input_spec = BIDSDataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True _pkg = "bids" def __init__(self, infields=None, **kwargs): """ Parameters ---------- infields : list of str Indicates the input fields to be dynamically created """ super(BIDSDataGrabber, self).__init__(**kwargs) if not isdefined(self.inputs.output_query): self.inputs.output_query = { "bold": { "datatype": "func", "suffix": "bold", "extension": ["nii", ".nii.gz"], }, "T1w": { "datatype": "anat", "suffix": "T1w", "extension": ["nii", ".nii.gz"], }, } # If infields is empty, use all BIDS entities if infields is None: from bids import layout as bidslayout bids_config = join(dirname(bidslayout.__file__), "config", "bids.json") bids_config = json.load(open(bids_config, "r")) infields = [i["name"] for i in bids_config["entities"]] self._infields = infields or [] # used for mandatory inputs check undefined_traits = {} for key in self._infields: self.inputs.add_trait(key, traits.Any) undefined_traits[key] = kwargs[key] if key in kwargs else Undefined self.inputs.trait_set(trait_change_notify=False, **undefined_traits) def _list_outputs(self): from bids import BIDSLayout layout = BIDSLayout( self.inputs.base_dir, derivatives=self.inputs.index_derivatives ) if isdefined(self.inputs.extra_derivatives): layout.add_derivatives(self.inputs.extra_derivatives) # If infield is not given nm input value, silently ignore filters = {} for key in self._infields: value = getattr(self.inputs, key) if isdefined(value): filters[key] = value outputs = {} for key, query in self.inputs.output_query.items(): args = query.copy() args.update(filters) filelist = layout.get(return_type="file", **args) if len(filelist) == 0: msg = "Output key: %s returned no files" % key if self.inputs.raise_on_empty: raise IOError(msg) else: iflogger.warning(msg) filelist = Undefined outputs[key] = filelist return outputs def _add_output_traits(self, base): return add_traits(base, list(self.inputs.output_query.keys())) class ExportFileInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="Input file name") out_file = File(mandatory=True, desc="Output file name") check_extension = traits.Bool( True, usedefault=True, desc="Ensure that the input and output file extensions match", ) clobber = traits.Bool(desc="Permit overwriting existing files") class ExportFileOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Output file name") class ExportFile(SimpleInterface): """Export a file to an absolute path. This interface copies an input file to a named output file. This is useful to save individual files to a specific location, instead of more flexible interfaces like DataSink. Examples -------- >>> from nipype.interfaces.io import ExportFile >>> import os.path as op >>> ef = ExportFile() >>> ef.inputs.in_file = "T1.nii.gz" >>> os.mkdir("output_folder") >>> ef.inputs.out_file = op.abspath("output_folder/sub1_out.nii.gz") >>> res = ef.run() >>> os.path.exists(res.outputs.out_file) True """ input_spec = ExportFileInputSpec output_spec = ExportFileOutputSpec def _run_interface(self, runtime): if not self.inputs.clobber and op.exists(self.inputs.out_file): raise FileExistsError(self.inputs.out_file) if not op.isabs(self.inputs.out_file): raise ValueError("Out_file must be an absolute path.") if ( self.inputs.check_extension and split_filename(self.inputs.in_file)[2] != split_filename(self.inputs.out_file)[2] ): raise RuntimeError( "%s and %s have different extensions" % (self.inputs.in_file, self.inputs.out_file) ) shutil.copy(str(self.inputs.in_file), str(self.inputs.out_file)) self._results["out_file"] = self.inputs.out_file return runtime nipype-1.7.0/nipype/interfaces/matlab.py000066400000000000000000000167451413403311400202730ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Interfaces to run MATLAB scripts.""" import os from .. import config from .base import ( CommandLineInputSpec, InputMultiPath, isdefined, CommandLine, traits, File, Directory, ) def get_matlab_command(): """Determine whether Matlab is installed and can be executed.""" if "NIPYPE_NO_MATLAB" not in os.environ: from nipype.utils.filemanip import which return which(os.getenv("MATLABCMD", "matlab")) no_matlab = get_matlab_command() is None class MatlabInputSpec(CommandLineInputSpec): """Basic expected inputs to Matlab interface""" script = traits.Str( argstr='-r "%s;exit"', desc="m-code to run", mandatory=True, position=-1 ) uses_mcr = traits.Bool( desc="use MCR interface", xor=["nodesktop", "nosplash", "single_comp_thread"], nohash=True, ) nodesktop = traits.Bool( True, argstr="-nodesktop", usedefault=True, desc="Switch off desktop mode on unix platforms", nohash=True, ) nosplash = traits.Bool( True, argstr="-nosplash", usedefault=True, desc="Switch of splash screen", nohash=True, ) logfile = File(argstr="-logfile %s", desc="Save matlab output to log") single_comp_thread = traits.Bool( argstr="-singleCompThread", desc="force single threaded operation", nohash=True ) # non-commandline options mfile = traits.Bool(True, desc="Run m-code using m-file", usedefault=True) script_file = File( "pyscript.m", usedefault=True, desc="Name of file to write m-code to" ) paths = InputMultiPath(Directory(), desc="Paths to add to matlabpath") prescript = traits.List( ["ver,", "try,"], usedefault=True, desc="prescript to be added before code" ) postscript = traits.List( [ "\n,catch ME,", "fprintf(2,'MATLAB code threw an exception:\\n');", "fprintf(2,'%s\\n',ME.message);", "if length(ME.stack) ~= 0, fprintf(2,'File:%s\\nName:%s\\nLine:%d\\n',ME.stack.file,ME.stack.name,ME.stack.line);, end;", "end;", ], desc="script added after code", usedefault=True, ) class MatlabCommand(CommandLine): """Interface that runs matlab code >>> import nipype.interfaces.matlab as matlab >>> mlab = matlab.MatlabCommand(mfile=False) # don't write script file >>> mlab.inputs.script = "which('who')" >>> out = mlab.run() # doctest: +SKIP """ _cmd = "matlab" _default_matlab_cmd = None _default_mfile = None _default_paths = None input_spec = MatlabInputSpec def __init__(self, matlab_cmd=None, **inputs): """initializes interface to matlab (default 'matlab -nodesktop -nosplash') """ super(MatlabCommand, self).__init__(**inputs) if matlab_cmd and isdefined(matlab_cmd): self._cmd = matlab_cmd elif self._default_matlab_cmd: self._cmd = self._default_matlab_cmd if self._default_mfile and not isdefined(self.inputs.mfile): self.inputs.mfile = self._default_mfile if self._default_paths and not isdefined(self.inputs.paths): self.inputs.paths = self._default_paths if not isdefined(self.inputs.single_comp_thread) and not isdefined( self.inputs.uses_mcr ): if config.getboolean("execution", "single_thread_matlab"): self.inputs.single_comp_thread = True # For matlab commands force all output to be returned since matlab # does not have a clean way of notifying an error self.terminal_output = "allatonce" @classmethod def set_default_matlab_cmd(cls, matlab_cmd): """Set the default MATLAB command line for MATLAB classes. This method is used to set values for all MATLAB subclasses. However, setting this will not update the output type for any existing instances. For these, assign the .inputs.matlab_cmd. """ cls._default_matlab_cmd = matlab_cmd @classmethod def set_default_mfile(cls, mfile): """Set the default MATLAB script file format for MATLAB classes. This method is used to set values for all MATLAB subclasses. However, setting this will not update the output type for any existing instances. For these, assign the .inputs.mfile. """ cls._default_mfile = mfile @classmethod def set_default_paths(cls, paths): """Set the default MATLAB paths for MATLAB classes. This method is used to set values for all MATLAB subclasses. However, setting this will not update the output type for any existing instances. For these, assign the .inputs.paths. """ cls._default_paths = paths def _run_interface(self, runtime): self.terminal_output = "allatonce" runtime = super(MatlabCommand, self)._run_interface(runtime) try: # Matlab can leave the terminal in a barbbled state os.system("stty sane") except: # We might be on a system where stty doesn't exist pass if "MATLAB code threw an exception" in runtime.stderr: self.raise_exception(runtime) return runtime def _format_arg(self, name, trait_spec, value): if name in ["script"]: argstr = trait_spec.argstr if self.inputs.uses_mcr: argstr = "%s" return self._gen_matlab_command(argstr, value) return super(MatlabCommand, self)._format_arg(name, trait_spec, value) def _gen_matlab_command(self, argstr, script_lines): """Generates commands and, if mfile specified, writes it to disk.""" cwd = os.getcwd() mfile = self.inputs.mfile or self.inputs.uses_mcr paths = [] if isdefined(self.inputs.paths): paths = self.inputs.paths # prescript prescript = self.inputs.prescript postscript = self.inputs.postscript # prescript takes different default value depending on the mfile argument if mfile: prescript.insert( 0, "fprintf(1,'Executing %s at %s:\\n',mfilename(),datestr(now));" ) else: prescript.insert(0, "fprintf(1,'Executing code at %s:\\n',datestr(now));") for path in paths: prescript.append("addpath('%s');\n" % path) if not mfile: # clean up the code of comments and replace newlines with commas script_lines = ",".join( [ line for line in script_lines.split("\n") if not line.strip().startswith("%") ] ) script_lines = "\n".join(prescript) + script_lines + "\n".join(postscript) if mfile: with open(os.path.join(cwd, self.inputs.script_file), "wt") as mfile: mfile.write(script_lines) if self.inputs.uses_mcr: script = "%s" % (os.path.join(cwd, self.inputs.script_file)) else: script = "addpath('%s');%s" % ( cwd, self.inputs.script_file.split(".")[0], ) else: script = "".join(script_lines.split("\n")) return argstr % script nipype-1.7.0/nipype/interfaces/meshfix.py000066400000000000000000000205671413403311400204730ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """MeshFix corrects topological errors in polygonal meshes.""" import os.path as op from ..utils.filemanip import split_filename from .base import ( CommandLine, CommandLineInputSpec, traits, TraitedSpec, isdefined, File, ) class MeshFixInputSpec(CommandLineInputSpec): number_of_biggest_shells = traits.Int( argstr="--shells %d", desc="Only the N biggest shells are kept" ) epsilon_angle = traits.Range( argstr="-a %f", low=0.0, high=2.0, desc="Epsilon angle in degrees (must be between 0 and 2)", ) join_overlapping_largest_components = traits.Bool( argstr="-j", xor=["join_closest_components"], desc="Join 2 biggest components if they overlap, remove the rest.", ) join_closest_components = traits.Bool( argstr="-jc", xor=["join_closest_components"], desc="Join the closest pair of components.", ) quiet_mode = traits.Bool( argstr="-q", desc="Quiet mode, don't write much to stdout." ) dont_clean = traits.Bool(argstr="--no-clean", desc="Don't Clean") save_as_stl = traits.Bool( xor=["save_as_vrml", "save_as_freesurfer_mesh"], argstr="--stl", desc="Result is saved in stereolithographic format (.stl)", ) save_as_vrml = traits.Bool( argstr="--wrl", xor=["save_as_stl", "save_as_freesurfer_mesh"], desc="Result is saved in VRML1.0 format (.wrl)", ) save_as_freesurfer_mesh = traits.Bool( argstr="--fsmesh", xor=["save_as_vrml", "save_as_stl"], desc="Result is saved in freesurfer mesh format", ) remove_handles = traits.Bool(argstr="--remove-handles", desc="Remove handles") uniform_remeshing_steps = traits.Int( argstr="-u %d", requires=["uniform_remeshing_vertices"], desc="Number of steps for uniform remeshing of the whole mesh", ) uniform_remeshing_vertices = traits.Int( argstr="--vertices %d", requires=["uniform_remeshing_steps"], desc="Constrains the number of vertices." "Must be used with uniform_remeshing_steps", ) laplacian_smoothing_steps = traits.Int( argstr="--smooth %d", desc="The number of laplacian smoothing steps to apply" ) x_shift = traits.Int( argstr="--smooth %d", desc="Shifts the coordinates of the vertices when saving. Output must be in FreeSurfer format", ) # Cutting, decoupling, dilation cut_outer = traits.Int( argstr="--cut-outer %d", desc="Remove triangles of 1st that are outside of the 2nd shell.", ) cut_inner = traits.Int( argstr="--cut-inner %d", desc="Remove triangles of 1st that are inside of the 2nd shell. Dilate 2nd by N; Fill holes and keep only 1st afterwards.", ) decouple_inin = traits.Int( argstr="--decouple-inin %d", desc="Treat 1st file as inner, 2nd file as outer component." "Resolve overlaps by moving inners triangles inwards. Constrain the min distance between the components > d.", ) decouple_outin = traits.Int( argstr="--decouple-outin %d", desc="Treat 1st file as outer, 2nd file as inner component." "Resolve overlaps by moving outers triangles inwards. Constrain the min distance between the components > d.", ) decouple_outout = traits.Int( argstr="--decouple-outout %d", desc="Treat 1st file as outer, 2nd file as inner component." "Resolve overlaps by moving outers triangles outwards. Constrain the min distance between the components > d.", ) finetuning_inwards = traits.Bool( argstr="--fineTuneIn ", requires=["finetuning_distance", "finetuning_substeps"], position=-3, desc="Used to fine-tune the minimal distance between surfaces.", ) finetuning_outwards = traits.Bool( argstr="--fineTuneOut ", requires=["finetuning_distance", "finetuning_substeps"], position=-3, xor=["finetuning_inwards"], desc="Similar to finetuning_inwards, but ensures minimal distance in the other direction", ) finetuning_distance = traits.Float( argstr="%f", requires=["finetuning_substeps"], position=-2, desc="Used to fine-tune the minimal distance between surfaces." "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)", ) finetuning_substeps = traits.Int( argstr="%d", requires=["finetuning_distance"], position=-1, desc="Used to fine-tune the minimal distance between surfaces." "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)", ) dilation = traits.Int( argstr="--dilate %d", desc="Dilate the surface by d. d < 0 means shrinking." ) set_intersections_to_one = traits.Bool( argstr="--intersect", desc="If the mesh contains intersections, return value = 1." "If saved in gmsh format, intersections will be highlighted.", ) in_file1 = File(exists=True, argstr="%s", position=1, mandatory=True) in_file2 = File(exists=True, argstr="%s", position=2) output_type = traits.Enum( "off", ["stl", "msh", "wrl", "vrml", "fs", "off"], usedefault=True, desc="The output type to save the file as.", ) out_filename = File( genfile=True, argstr="-o %s", desc="The output filename for the fixed mesh file" ) class MeshFixOutputSpec(TraitedSpec): mesh_file = File(exists=True, desc="The output mesh file") class MeshFix(CommandLine): """ MeshFix v1.2-alpha - by Marco Attene, Mirko Windhoff, Axel Thielscher. .. seealso:: http://jmeshlib.sourceforge.net Sourceforge page http://simnibs.de/installation/meshfixandgetfem Ubuntu installation instructions If MeshFix is used for research purposes, please cite the following paper: M. Attene - A lightweight approach to repairing digitized polygon meshes. The Visual Computer, 2010. (c) Springer. Accepted input formats are OFF, PLY and STL. Other formats (like .msh for gmsh) are supported only partially. Example ------- >>> import nipype.interfaces.meshfix as mf >>> fix = mf.MeshFix() >>> fix.inputs.in_file1 = 'lh-pial.stl' >>> fix.inputs.in_file2 = 'rh-pial.stl' >>> fix.run() # doctest: +SKIP >>> fix.cmdline 'meshfix lh-pial.stl rh-pial.stl -o lh-pial_fixed.off' """ _cmd = "meshfix" input_spec = MeshFixInputSpec output_spec = MeshFixOutputSpec def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_filename): path, name, ext = split_filename(self.inputs.out_filename) ext = ext.replace(".", "") out_types = ["stl", "msh", "wrl", "vrml", "fs", "off"] # Make sure that the output filename uses one of the possible file types if any(ext == out_type.lower() for out_type in out_types): outputs["mesh_file"] = op.abspath(self.inputs.out_filename) else: outputs["mesh_file"] = op.abspath(name + "." + self.inputs.output_type) else: outputs["mesh_file"] = op.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file1) if self.inputs.save_as_freesurfer_mesh or self.inputs.output_type == "fs": self.inputs.output_type = "fs" self.inputs.save_as_freesurfer_mesh = True if self.inputs.save_as_stl or self.inputs.output_type == "stl": self.inputs.output_type = "stl" self.inputs.save_as_stl = True if self.inputs.save_as_vrml or self.inputs.output_type == "vrml": self.inputs.output_type = "vrml" self.inputs.save_as_vrml = True return name + "_fixed." + self.inputs.output_type nipype-1.7.0/nipype/interfaces/minc/000077500000000000000000000000001413403311400173725ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/minc/__init__.py000066400000000000000000000016121413403311400215030ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The MINC (McConnell Brain Imaging Centre, Montreal Neurological Institute) toolkit. The minc module provides classes for interfacing with the `MINC `_ command line tools. This module was written to work with MINC version 2.2.00. Author: Carlo Hamalainen http://carlo-hamalainen.net """ from .base import Info from .minc import ( Average, BBox, Beast, BestLinReg, BigAverage, Blob, Blur, Calc, Convert, Copy, Dump, Extract, Gennlxfm, Math, NlpFit, Norm, Pik, Resample, Reshape, ToEcat, ToRaw, Volcentre, Voliso, Volpad, VolSymm, XfmAvg, XfmConcat, XfmInvert, ) nipype-1.7.0/nipype/interfaces/minc/base.py000066400000000000000000000071301413403311400206570ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The minc module provides classes for interfacing with the `MINC `_ command line tools. This module was written to work with MINC version 2.2.00. Author: Carlo Hamalainen http://carlo-hamalainen.net """ import os import os.path import warnings from ..base import CommandLine warnings.filterwarnings("always", category=UserWarning) def check_minc(): """Returns True if and only if MINC is installed.'""" return Info.version() is not None def no_minc(): """Returns True if and only if MINC is *not* installed.""" return not check_minc() class Info(object): """Handle MINC version information. version refers to the version of MINC on the system """ @staticmethod def version(): """Check for minc version on the system Parameters ---------- None Returns ------- version : dict Version number as dict or None if MINC not found """ try: clout = CommandLine( command="mincinfo", args="-version", terminal_output="allatonce" ).run() except IOError: return None out = clout.runtime.stdout def read_program_version(s): if "program" in s: return s.split(":")[1].strip() return None def read_libminc_version(s): if "libminc" in s: return s.split(":")[1].strip() return None def read_netcdf_version(s): if "netcdf" in s: return " ".join(s.split(":")[1:]).strip() return None def read_hdf5_version(s): if "HDF5" in s: return s.split(":")[1].strip() return None versions = {"minc": None, "libminc": None, "netcdf": None, "hdf5": None} for l in out.split("\n"): for (name, f) in [ ("minc", read_program_version), ("libminc", read_libminc_version), ("netcdf", read_netcdf_version), ("hdf5", read_hdf5_version), ]: if f(l) is not None: versions[name] = f(l) return versions def aggregate_filename(files, new_suffix): """ Try to work out a sensible name given a set of files that have been combined in some way (e.g. averaged). If we can't work out a sensible prefix, we use the first filename in the list. Examples -------- >>> from nipype.interfaces.minc.base import aggregate_filename >>> f = aggregate_filename(['/tmp/foo1.mnc', '/tmp/foo2.mnc', '/tmp/foo3.mnc'], 'averaged') >>> os.path.split(f)[1] # This has a full path, so just check the filename. 'foo_averaged.mnc' >>> f = aggregate_filename(['/tmp/foo1.mnc', '/tmp/blah1.mnc'], 'averaged') >>> os.path.split(f)[1] # This has a full path, so just check the filename. 'foo1_averaged.mnc' """ path = os.path.split(files[0])[0] names = [os.path.splitext(os.path.split(x)[1])[0] for x in files] common_prefix = os.path.commonprefix(names) path = os.getcwd() if common_prefix == "": return os.path.abspath( os.path.join( path, os.path.splitext(files[0])[0] + "_" + new_suffix + ".mnc" ) ) else: return os.path.abspath( os.path.join(path, common_prefix + "_" + new_suffix + ".mnc") ) nipype-1.7.0/nipype/interfaces/minc/minc.py000066400000000000000000003311571413403311400207040ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The minc module provides classes for interfacing with the `MINC `_ command line tools. This module was written to work with MINC version 2.2.00. Author: `Carlo Hamalainen `__ """ import glob import os import os.path import re import warnings from ..base import ( TraitedSpec, CommandLineInputSpec, CommandLine, StdOutCommandLineInputSpec, StdOutCommandLine, File, Directory, InputMultiPath, OutputMultiPath, traits, isdefined, ) from .base import aggregate_filename warnings.filterwarnings("always", category=UserWarning) class ExtractInputSpec(StdOutCommandLineInputSpec): input_file = File( desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File( desc="output file", position=-1, name_source=["input_file"], hash_files=False, name_template="%s.raw", keep_extension=False, ) _xor_write = ( "write_ascii", "write_ascii", "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", "write_signed", "write_unsigned", ) write_ascii = traits.Bool( desc="Write out data as ascii strings (default).", argstr="-ascii", xor=_xor_write, ) write_byte = traits.Bool( desc="Write out data as bytes.", argstr="-byte", xor=_xor_write ) write_short = traits.Bool( desc="Write out data as short integers.", argstr="-short", xor=_xor_write ) write_int = traits.Bool( desc="Write out data as 32-bit integers.", argstr="-int", xor=_xor_write ) write_long = traits.Bool( desc="Superseded by write_int.", argstr="-long", xor=_xor_write ) write_float = traits.Bool( desc="Write out data as single precision floating-point values.", argstr="-float", xor=_xor_write, ) write_double = traits.Bool( desc="Write out data as double precision floating-point values.", argstr="-double", xor=_xor_write, ) _xor_signed = ("write_signed", "write_unsigned") write_signed = traits.Bool( desc="Write out signed data.", argstr="-signed", xor=_xor_signed ) write_unsigned = traits.Bool( desc="Write out unsigned data.", argstr="-unsigned", xor=_xor_signed ) write_range = traits.Tuple( traits.Float, traits.Float, argstr="-range %s %s", desc="Specify the range of output values\nDefault value: 1.79769e+308 1.79769e+308.", ) _xor_normalize = ("normalize", "nonormalize") normalize = traits.Bool( desc="Normalize integer pixel values to file max and min.", argstr="-normalize", xor=_xor_normalize, ) nonormalize = traits.Bool( desc="Turn off pixel normalization.", argstr="-nonormalize", xor=_xor_normalize ) image_range = traits.Tuple( traits.Float, traits.Float, desc="Specify the range of real image values for normalization.", argstr="-image_range %s %s", ) image_minimum = traits.Float( desc=( "Specify the minimum real image value for normalization." "Default value: 1.79769e+308." ), argstr="-image_minimum %s", ) image_maximum = traits.Float( desc=( "Specify the maximum real image value for normalization." "Default value: 1.79769e+308." ), argstr="-image_maximum %s", ) start = InputMultiPath( traits.Int, desc="Specifies corner of hyperslab (C conventions for indices).", sep=",", argstr="-start %s", ) count = InputMultiPath( traits.Int, desc="Specifies edge lengths of hyperslab to read.", sep=",", argstr="-count %s", ) # FIXME Can we make sure that len(start) == len(count)? _xor_flip = ( "flip_positive_direction", "flip_negative_direction", "flip_any_direction", ) flip_positive_direction = traits.Bool( desc="Flip images to always have positive direction.", argstr="-positive_direction", xor=_xor_flip, ) flip_negative_direction = traits.Bool( desc="Flip images to always have negative direction.", argstr="-negative_direction", xor=_xor_flip, ) flip_any_direction = traits.Bool( desc="Do not flip images (Default).", argstr="-any_direction", xor=_xor_flip ) _xor_x_flip = ("flip_x_positive", "flip_x_negative", "flip_x_any") flip_x_positive = traits.Bool( desc="Flip images to give positive xspace:step value (left-to-right).", argstr="+xdirection", xor=_xor_x_flip, ) flip_x_negative = traits.Bool( desc="Flip images to give negative xspace:step value (right-to-left).", argstr="-xdirection", xor=_xor_x_flip, ) flip_x_any = traits.Bool( desc="Don't flip images along x-axis (default).", argstr="-xanydirection", xor=_xor_x_flip, ) _xor_y_flip = ("flip_y_positive", "flip_y_negative", "flip_y_any") flip_y_positive = traits.Bool( desc="Flip images to give positive yspace:step value (post-to-ant).", argstr="+ydirection", xor=_xor_y_flip, ) flip_y_negative = traits.Bool( desc="Flip images to give negative yspace:step value (ant-to-post).", argstr="-ydirection", xor=_xor_y_flip, ) flip_y_any = traits.Bool( desc="Don't flip images along y-axis (default).", argstr="-yanydirection", xor=_xor_y_flip, ) _xor_z_flip = ("flip_z_positive", "flip_z_negative", "flip_z_any") flip_z_positive = traits.Bool( desc="Flip images to give positive zspace:step value (inf-to-sup).", argstr="+zdirection", xor=_xor_z_flip, ) flip_z_negative = traits.Bool( desc="Flip images to give negative zspace:step value (sup-to-inf).", argstr="-zdirection", xor=_xor_z_flip, ) flip_z_any = traits.Bool( desc="Don't flip images along z-axis (default).", argstr="-zanydirection", xor=_xor_z_flip, ) class ExtractOutputSpec(TraitedSpec): output_file = File(desc="output file in raw/text format", exists=True) class Extract(StdOutCommandLine): """Dump a hyperslab of MINC file data. Examples -------- >>> from nipype.interfaces.minc import Extract >>> from nipype.interfaces.minc.testdata import minc2Dfile >>> extract = Extract(input_file=minc2Dfile) >>> extract.run() # doctest: +SKIP >>> extract = Extract(input_file=minc2Dfile, start=[3, 10, 5], count=[4, 4, 4]) # extract a 4x4x4 slab at offset [3, 10, 5] >>> extract.run() # doctest: +SKIP """ input_spec = ExtractInputSpec output_spec = ExtractOutputSpec _cmd = "mincextract" class ToRawInputSpec(StdOutCommandLineInputSpec): input_file = File( desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File( desc="output file", position=-1, name_source=["input_file"], hash_files=False, name_template="%s.raw", keep_extension=False, ) _xor_write = ( "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", ) write_byte = traits.Bool( desc="Write out data as bytes.", argstr="-byte", xor=_xor_write ) write_short = traits.Bool( desc="Write out data as short integers.", argstr="-short", xor=_xor_write ) write_int = traits.Bool( desc="Write out data as 32-bit integers.", argstr="-int", xor=_xor_write ) write_long = traits.Bool( desc="Superseded by write_int.", argstr="-long", xor=_xor_write ) write_float = traits.Bool( desc="Write out data as single precision floating-point values.", argstr="-float", xor=_xor_write, ) write_double = traits.Bool( desc="Write out data as double precision floating-point values.", argstr="-double", xor=_xor_write, ) _xor_signed = ("write_signed", "write_unsigned") write_signed = traits.Bool( desc="Write out signed data.", argstr="-signed", xor=_xor_signed ) write_unsigned = traits.Bool( desc="Write out unsigned data.", argstr="-unsigned", xor=_xor_signed ) write_range = traits.Tuple( traits.Float, traits.Float, argstr="-range %s %s", desc=( "Specify the range of output values." "Default value: 1.79769e+308 1.79769e+308." ), ) _xor_normalize = ("normalize", "nonormalize") normalize = traits.Bool( desc="Normalize integer pixel values to file max and min.", argstr="-normalize", xor=_xor_normalize, ) nonormalize = traits.Bool( desc="Turn off pixel normalization.", argstr="-nonormalize", xor=_xor_normalize ) class ToRawOutputSpec(TraitedSpec): output_file = File(desc="output file in raw format", exists=True) class ToRaw(StdOutCommandLine): """Dump a chunk of MINC file data. This program is largely superceded by mincextract (see Extract). Examples -------- >>> from nipype.interfaces.minc import ToRaw >>> from nipype.interfaces.minc.testdata import minc2Dfile >>> toraw = ToRaw(input_file=minc2Dfile) >>> toraw.run() # doctest: +SKIP >>> toraw = ToRaw(input_file=minc2Dfile, write_range=(0, 100)) >>> toraw.run() # doctest: +SKIP """ input_spec = ToRawInputSpec output_spec = ToRawOutputSpec _cmd = "minctoraw" class ConvertInputSpec(CommandLineInputSpec): input_file = File( desc="input file for converting", exists=True, mandatory=True, argstr="%s", position=-2, ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_file"], hash_files=False, name_template="%s_convert_output.mnc", ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") template = traits.Bool( desc=( "Create a template file. The dimensions, variables, and" "attributes of the input file are preserved but all data it set to zero." ), argstr="-template", ) compression = traits.Enum( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, argstr="-compress %s", desc="Set the compression level, from 0 (disabled) to 9 (maximum).", ) chunk = traits.Range( low=0, desc="Set the target block size for chunking (0 default, >1 block size).", argstr="-chunk %d", ) class ConvertOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) class Convert(CommandLine): """convert between MINC 1 to MINC 2 format. Examples -------- >>> from nipype.interfaces.minc import Convert >>> from nipype.interfaces.minc.testdata import minc2Dfile >>> c = Convert(input_file=minc2Dfile, output_file='/tmp/out.mnc', two=True) # Convert to MINC2 format. >>> c.run() # doctest: +SKIP """ input_spec = ConvertInputSpec output_spec = ConvertOutputSpec _cmd = "mincconvert" class CopyInputSpec(CommandLineInputSpec): input_file = File( desc="input file to copy", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_file"], hash_files=False, name_template="%s_copy.mnc", ) _xor_pixel = ("pixel_values", "real_values") pixel_values = traits.Bool( desc="Copy pixel values as is.", argstr="-pixel_values", xor=_xor_pixel ) real_values = traits.Bool( desc="Copy real pixel intensities (default).", argstr="-real_values", xor=_xor_pixel, ) class CopyOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) class Copy(CommandLine): """ Copy image values from one MINC file to another. Both the input and output files must exist, and the images in both files must have an equal number dimensions and equal dimension lengths. NOTE: This program is intended primarily for use with scripts such as mincedit. It does not follow the typical design rules of most MINC command-line tools and therefore should be used only with caution. """ input_spec = CopyInputSpec output_spec = CopyOutputSpec _cmd = "minccopy" class ToEcatInputSpec(CommandLineInputSpec): input_file = File( desc="input file to convert", exists=True, mandatory=True, argstr="%s", position=-2, ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_file"], hash_files=False, name_template="%s_to_ecat.v", keep_extension=False, ) ignore_patient_variable = traits.Bool( desc="Ignore informations from the minc patient variable.", argstr="-ignore_patient_variable", ) ignore_study_variable = traits.Bool( desc="Ignore informations from the minc study variable.", argstr="-ignore_study_variable", ) ignore_acquisition_variable = traits.Bool( desc="Ignore informations from the minc acquisition variable.", argstr="-ignore_acquisition_variable", ) ignore_ecat_acquisition_variable = traits.Bool( desc="Ignore informations from the minc ecat_acquisition variable.", argstr="-ignore_ecat_acquisition_variable", ) ignore_ecat_main = traits.Bool( desc="Ignore informations from the minc ecat-main variable.", argstr="-ignore_ecat_main", ) ignore_ecat_subheader_variable = traits.Bool( desc="Ignore informations from the minc ecat-subhdr variable.", argstr="-ignore_ecat_subheader_variable", ) no_decay_corr_fctr = traits.Bool( desc="Do not compute the decay correction factors", argstr="-no_decay_corr_fctr" ) voxels_as_integers = traits.Bool( desc=( "Voxel values are treated as integers, scale and" "calibration factors are set to unity" ), argstr="-label", ) class ToEcatOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) class ToEcat(CommandLine): """Convert a 2D image, a 3D volumes or a 4D dynamic volumes written in MINC file format to a 2D, 3D or 4D Ecat7 file. Examples -------- >>> from nipype.interfaces.minc import ToEcat >>> from nipype.interfaces.minc.testdata import minc2Dfile >>> c = ToEcat(input_file=minc2Dfile) >>> c.run() # doctest: +SKIP >>> c = ToEcat(input_file=minc2Dfile, voxels_as_integers=True) >>> c.run() # doctest: +SKIP """ input_spec = ToEcatInputSpec output_spec = ToEcatOutputSpec _cmd = "minctoecat" class DumpInputSpec(StdOutCommandLineInputSpec): input_file = File( desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File( desc="output file", position=-1, name_source=["input_file"], hash_files=False, name_template="%s_dump.txt", keep_extension=False, ) _xor_coords_or_header = ("coordinate_data", "header_data") coordinate_data = traits.Bool( desc="Coordinate variable data and header information.", argstr="-c", xor=_xor_coords_or_header, ) header_data = traits.Bool( desc="Header information only, no data.", argstr="-h", xor=_xor_coords_or_header ) _xor_annotations = ("annotations_brief", "annotations_full") annotations_brief = traits.Enum( "c", "f", argstr="-b %s", desc="Brief annotations for C or Fortran indices in data.", xor=_xor_annotations, ) annotations_full = traits.Enum( "c", "f", argstr="-f %s", desc="Full annotations for C or Fortran indices in data.", xor=_xor_annotations, ) variables = InputMultiPath( traits.Str, desc="Output data for specified variables only.", sep=",", argstr="-v %s", ) line_length = traits.Range( low=0, desc="Line length maximum in data section (default 80).", argstr="-l %d" ) netcdf_name = traits.Str( desc="Name for netCDF (default derived from file name).", argstr="-n %s" ) precision = traits.Either( traits.Int(), traits.Tuple(traits.Int, traits.Int), desc="Display floating-point values with less precision", argstr="%s", ) # See _format_arg in Dump for actual formatting. class DumpOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) class Dump(StdOutCommandLine): """Dump a MINC file. Typically used in conjunction with mincgen (see Gen). Examples -------- >>> from nipype.interfaces.minc import Dump >>> from nipype.interfaces.minc.testdata import minc2Dfile >>> dump = Dump(input_file=minc2Dfile) >>> dump.run() # doctest: +SKIP >>> dump = Dump(input_file=minc2Dfile, output_file='/tmp/out.txt', precision=(3, 4)) >>> dump.run() # doctest: +SKIP """ input_spec = DumpInputSpec output_spec = DumpOutputSpec _cmd = "mincdump" def _format_arg(self, name, spec, value): if name == "precision": if isinstance(value, int): return "-p %d" % value elif ( isinstance(value, tuple) and isinstance(value[0], int) and isinstance(value[1], int) ): return "-p %d,%d" % (value[0], value[1]) else: raise ValueError("Invalid precision argument: " + str(value)) return super(Dump, self)._format_arg(name, spec, value) class AverageInputSpec(CommandLineInputSpec): _xor_input_files = ("input_files", "filelist") input_files = InputMultiPath( File(exists=True), desc="input file(s)", mandatory=True, sep=" ", argstr="%s", position=-2, xor=_xor_input_files, ) filelist = File( desc="Specify the name of a file containing input file names.", argstr="-filelist %s", exists=True, mandatory=True, xor=_xor_input_files, ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_files"], hash_files=False, name_template="%s_averaged.mnc", ) two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) _xor_verbose = ("verbose", "quiet") verbose = traits.Bool( desc="Print out log messages (default).", argstr="-verbose", xor=_xor_verbose ) quiet = traits.Bool( desc="Do not print out log messages.", argstr="-quiet", xor=_xor_verbose ) debug = traits.Bool(desc="Print out debugging messages.", argstr="-debug") _xor_check_dimensions = ("check_dimensions", "no_check_dimensions") check_dimensions = traits.Bool( desc="Check that dimension info matches across files (default).", argstr="-check_dimensions", xor=_xor_check_dimensions, ) no_check_dimensions = traits.Bool( desc="Do not check dimension info.", argstr="-nocheck_dimensions", xor=_xor_check_dimensions, ) _xor_format = ( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ) format_filetype = traits.Bool( desc="Use data type of first file (default).", argstr="-filetype", xor=_xor_format, ) format_byte = traits.Bool( desc="Write out byte data.", argstr="-byte", xor=_xor_format ) format_short = traits.Bool( desc="Write out short integer data.", argstr="-short", xor=_xor_format ) format_int = traits.Bool( desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format ) format_long = traits.Bool( desc="Superseded by -int.", argstr="-long", xor=_xor_format ) format_float = traits.Bool( desc="Write out single-precision floating-point data.", argstr="-float", xor=_xor_format, ) format_double = traits.Bool( desc="Write out double-precision floating-point data.", argstr="-double", xor=_xor_format, ) format_signed = traits.Bool( desc="Write signed integer data.", argstr="-signed", xor=_xor_format ) format_unsigned = traits.Bool( desc="Write unsigned integer data (default).", argstr="-unsigned", xor=_xor_format, ) max_buffer_size_in_kb = traits.Range( low=0, desc="Specify the maximum size of the internal buffers (in kbytes).", value=4096, usedefault=True, argstr="-max_buffer_size_in_kb %d", ) _xor_normalize = ("normalize", "nonormalize") normalize = traits.Bool( desc="Normalize data sets for mean intensity.", argstr="-normalize", xor=_xor_normalize, ) nonormalize = traits.Bool( desc="Do not normalize data sets (default).", argstr="-nonormalize", xor=_xor_normalize, ) voxel_range = traits.Tuple( traits.Int, traits.Int, argstr="-range %d %d", desc="Valid range for output data.", ) sdfile = File(desc="Specify an output sd file (default=none).", argstr="-sdfile %s") _xor_copy_header = ("copy_header", "no_copy_header") copy_header = traits.Bool( desc="Copy all of the header from the first file (default for one file).", argstr="-copy_header", xor=_xor_copy_header, ) no_copy_header = traits.Bool( desc="Do not copy all of the header from the first file (default for many files)).", argstr="-nocopy_header", xor=_xor_copy_header, ) avgdim = traits.Str( desc="Specify a dimension along which we wish to average.", argstr="-avgdim %s" ) binarize = traits.Bool( desc="Binarize the volume by looking for values in a given range.", argstr="-binarize", ) binrange = traits.Tuple( traits.Float, traits.Float, argstr="-binrange %s %s", desc="Specify a range for binarization. Default value: 1.79769e+308 -1.79769e+308.", ) binvalue = traits.Float( desc=( "Specify a target value (+/- 0.5) for" "binarization. Default value: -1.79769e+308" ), argstr="-binvalue %s", ) weights = InputMultiPath( traits.Str, desc='Specify weights for averaging (",,...").', sep=",", argstr="-weights %s", ) width_weighted = traits.Bool( desc="Weight by dimension widths when -avgdim is used.", argstr="-width_weighted", requires=("avgdim",), ) class AverageOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) class Average(CommandLine): """Average a number of MINC files. Examples -------- >>> from nipype.interfaces.minc import Average >>> from nipype.interfaces.minc.testdata import nonempty_minc_data >>> files = [nonempty_minc_data(i) for i in range(3)] >>> average = Average(input_files=files, output_file='/tmp/tmp.mnc') >>> average.run() # doctest: +SKIP """ input_spec = AverageInputSpec output_spec = AverageOutputSpec _cmd = "mincaverage" class BlobInputSpec(CommandLineInputSpec): input_file = File( desc="input file to blob", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_file"], hash_files=False, name_template="%s_blob.mnc", ) trace = traits.Bool( desc="compute the trace (approximate growth and shrinkage) -- FAST", argstr="-trace", ) determinant = traits.Bool( desc="compute the determinant (exact growth and shrinkage) -- SLOW", argstr="-determinant", ) translation = traits.Bool( desc="compute translation (structure displacement)", argstr="-translation" ) magnitude = traits.Bool( desc="compute the magnitude of the displacement vector", argstr="-magnitude" ) class BlobOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) class Blob(CommandLine): """Calculate blobs from minc deformation grids. Examples -------- >>> from nipype.interfaces.minc import Blob >>> from nipype.interfaces.minc.testdata import minc2Dfile >>> blob = Blob(input_file=minc2Dfile, output_file='/tmp/tmp.mnc', trace=True) >>> blob.run() # doctest: +SKIP """ input_spec = BlobInputSpec output_spec = BlobOutputSpec _cmd = "mincblob" class CalcInputSpec(CommandLineInputSpec): _xor_input_files = ("input_files", "filelist") input_files = InputMultiPath( File(exists=True), desc="input file(s) for calculation", mandatory=True, sep=" ", argstr="%s", position=-2, ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_files"], hash_files=False, name_template="%s_calc.mnc", ) two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) _xor_verbose = ("verbose", "quiet") verbose = traits.Bool( desc="Print out log messages (default).", argstr="-verbose", xor=_xor_verbose ) quiet = traits.Bool( desc="Do not print out log messages.", argstr="-quiet", xor=_xor_verbose ) debug = traits.Bool(desc="Print out debugging messages.", argstr="-debug") filelist = File( desc="Specify the name of a file containing input file names.", argstr="-filelist %s", mandatory=True, xor=_xor_input_files, ) _xor_copy_header = ("copy_header", "no_copy_header") copy_header = traits.Bool( desc="Copy all of the header from the first file.", argstr="-copy_header", xor=_xor_copy_header, ) no_copy_header = traits.Bool( desc="Do not copy all of the header from the first file.", argstr="-nocopy_header", xor=_xor_copy_header, ) _xor_format = ( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ) format_filetype = traits.Bool( desc="Use data type of first file (default).", argstr="-filetype", xor=_xor_format, ) format_byte = traits.Bool( desc="Write out byte data.", argstr="-byte", xor=_xor_format ) format_short = traits.Bool( desc="Write out short integer data.", argstr="-short", xor=_xor_format ) format_int = traits.Bool( desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format ) format_long = traits.Bool( desc="Superseded by -int.", argstr="-long", xor=_xor_format ) format_float = traits.Bool( desc="Write out single-precision floating-point data.", argstr="-float", xor=_xor_format, ) format_double = traits.Bool( desc="Write out double-precision floating-point data.", argstr="-double", xor=_xor_format, ) format_signed = traits.Bool( desc="Write signed integer data.", argstr="-signed", xor=_xor_format ) format_unsigned = traits.Bool( desc="Write unsigned integer data (default).", argstr="-unsigned", xor=_xor_format, ) voxel_range = traits.Tuple( traits.Int, traits.Int, argstr="-range %d %d", desc="Valid range for output data.", ) max_buffer_size_in_kb = traits.Range( low=0, desc="Specify the maximum size of the internal buffers (in kbytes).", argstr="-max_buffer_size_in_kb %d", ) _xor_check_dimensions = ("check_dimensions", "no_check_dimensions") check_dimensions = traits.Bool( desc="Check that files have matching dimensions (default).", argstr="-check_dimensions", xor=_xor_check_dimensions, ) no_check_dimensions = traits.Bool( desc="Do not check that files have matching dimensions.", argstr="-nocheck_dimensions", xor=_xor_check_dimensions, ) # FIXME Is it sensible to use ignore_nan and propagate_nan at the same # time? Document this. ignore_nan = traits.Bool( desc="Ignore invalid data (NaN) for accumulations.", argstr="-ignore_nan" ) propagate_nan = traits.Bool( desc="Invalid data in any file at a voxel produces a NaN (default).", argstr="-propagate_nan", ) # FIXME Double-check that these are mutually exclusive? _xor_nan_zero_illegal = ("output_nan", "output_zero", "output_illegal_value") output_nan = traits.Bool( desc="Output NaN when an illegal operation is done (default).", argstr="-nan", xor=_xor_nan_zero_illegal, ) output_zero = traits.Bool( desc="Output zero when an illegal operation is done.", argstr="-zero", xor=_xor_nan_zero_illegal, ) output_illegal = traits.Bool( desc="Value to write out when an illegal operation is done. Default value: 1.79769e+308", argstr="-illegal_value", xor=_xor_nan_zero_illegal, ) _xor_expression = ("expression", "expfile") expression = traits.Str( desc="Expression to use in calculations.", argstr="-expression '%s'", xor=_xor_expression, mandatory=True, ) expfile = File( desc="Name of file containing expression.", argstr="-expfile %s", xor=_xor_expression, mandatory=True, ) # FIXME test this one, the argstr will probably need tweaking, see # _format_arg. outfiles = traits.List( traits.Tuple( traits.Str, File, argstr="-outfile %s %s", desc=( "List of (symbol, file) tuples indicating that output should be written" "to the specified file, taking values from the symbol which should be" "created in the expression (see the EXAMPLES section). If this option" "is given, then all non-option arguments are taken as input files." "This option can be used multiple times for multiple output files." ), ) ) eval_width = traits.Int( desc="Number of voxels to evaluate simultaneously.", argstr="-eval_width %s" ) class CalcOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) class Calc(CommandLine): """Compute an expression using MINC files as input. Examples -------- >>> from nipype.interfaces.minc import Calc >>> from nipype.interfaces.minc.testdata import nonempty_minc_data >>> file0 = nonempty_minc_data(0) >>> file1 = nonempty_minc_data(1) >>> calc = Calc(input_files=[file0, file1], output_file='/tmp/calc.mnc', expression='A[0] + A[1]') # add files together >>> calc.run() # doctest: +SKIP """ input_spec = CalcInputSpec output_spec = CalcOutputSpec _cmd = "minccalc" # FIXME mincbbox produces output like # # -5.000000 -5.000000 -5.000000 4.800000 2.800000 8.800000 # # so perhaps this would be better returned as a pair of Python # lists instead of sending to an output file? class BBoxInputSpec(StdOutCommandLineInputSpec): input_file = File( desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File( desc="output file containing bounding box corners", position=-1, name_source=["input_file"], hash_files=False, name_template="%s_bbox.txt", keep_extension=False, ) threshold = traits.Int( 0, desc="VIO_Real value threshold for bounding box. Default value: 0.", argstr="-threshold", ) _xor_one_two = ("one_line", "two_lines") one_line = traits.Bool( desc="Output on one line (default): start_x y z width_x y z", argstr="-one_line", xor=_xor_one_two, ) two_lines = traits.Bool( desc="""Write output with two rows (start and width).""", argstr="-two_lines", xor=_xor_one_two, ) format_mincresample = traits.Bool( desc="Output format for mincresample: (-step x y z -start x y z -nelements x y z", argstr="-mincresample", ) format_mincreshape = traits.Bool( desc="Output format for mincreshape: (-start x,y,z -count dx,dy,dz", argstr="-mincreshape", ) format_minccrop = traits.Bool( desc="Output format for minccrop: (-xlim x1 x2 -ylim y1 y2 -zlim z1 z2", argstr="-minccrop", ) # FIXME Not implemented, will clash with our parsing of the output? # Command-specific options: # Options for logging progress. Default = -verbose. # -verbose: Write messages indicating progress # -quiet: Do not write log messages # -debug: Print out debug info. class BBoxOutputSpec(TraitedSpec): output_file = File(desc="output file containing bounding box corners", exists=True) class BBox(StdOutCommandLine): """Determine a bounding box of image. Examples -------- >>> from nipype.interfaces.minc import BBox >>> from nipype.interfaces.minc.testdata import nonempty_minc_data >>> file0 = nonempty_minc_data(0) >>> bbox = BBox(input_file=file0) >>> bbox.run() # doctest: +SKIP """ input_spec = BBoxInputSpec output_spec = BBoxOutputSpec _cmd = "mincbbox" class BeastInputSpec(CommandLineInputSpec): """ TODO: Command-specific options: -verbose: Enable verbose output. -positive: Specify mask of positive segmentation (inside mask) instead of the default mask. -output_selection: Specify file to output selected files. -count: Specify file to output the patch count. -mask: Specify a segmentation mask instead of the the default mask. -no_mask: Do not apply a segmentation mask. Perform the segmentation over the entire image. -no_positive: Do not apply a positive mask. Generic options for all commands: -help: Print summary of command-line options and abort -version: Print version number of program and exit Copyright (C) 2011 Simon Fristed Eskildsen, Vladimir Fonov, Pierrick Coupe, Jose V. Manjon This program comes with ABSOLUTELY NO WARRANTY; for details type 'cat COPYING'. This is free software, and you are welcome to redistribute it under certain conditions; type 'cat COPYING' for details. Usage: mincbeast [options] mincbeast -help Get this example to work? https://github.com/BIC-MNI/BEaST/blob/master/README.library 2.3 Source the minc-toolkit (if installed): $ source /opt/minc/minc-toolkit-config.sh 2.4 Generate library by running: $ beast_prepareADNIlib -flip Example: $ sudo beast_prepareADNIlib -flip Downloads/ADNI /opt/minc/share/beast-library-1.1 3. Test the setup 3.1 Normalize your data $ beast_normalize -modeldir /opt/minc/share/icbm152_model_09c input.mnc normal.mnc normal.xfm 3.2 Run BEaST $ mincbeast /opt/minc/share/beast-library-1.1 normal.mnc brainmask.mnc -conf /opt/minc/share/beast-library-1.1/default.2mm.conf -same_res """ probability_map = traits.Bool( desc="Output the probability map instead of crisp mask.", argstr="-probability" ) flip_images = traits.Bool( desc="Flip images around the mid-sagittal plane to increase patch count.", argstr="-flip", ) load_moments = traits.Bool( desc=( "Do not calculate moments instead use precalculated" "library moments. (for optimization purposes)" ), argstr="-load_moments", ) fill_holes = traits.Bool(desc="Fill holes in the binary output.", argstr="-fill") median_filter = traits.Bool( desc="Apply a median filter on the probability map.", argstr="-median" ) nlm_filter = traits.Bool( desc="Apply an NLM filter on the probability map (experimental).", argstr="-nlm_filter", ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) configuration_file = File( desc="Specify configuration file.", argstr="-configuration %s" ) voxel_size = traits.Int( 4, usedefault=True, desc=( "Specify voxel size for calculations (4, 2, or 1)." "Default value: 4. Assumes no multiscale. Use configuration" "file for multiscale." ), argstr="-voxel_size %s", ) abspath = traits.Bool( desc="File paths in the library are absolute (default is relative to library root).", argstr="-abspath", usedefault=True, default_value=True, ) patch_size = traits.Int( 1, usedefault=True, desc="Specify patch size for single scale approach. Default value: 1.", argstr="-patch_size %s", ) search_area = traits.Int( 2, usedefault=True, desc="Specify size of search area for single scale approach. Default value: 2.", argstr="-search_area %s", ) confidence_level_alpha = traits.Float( 0.5, usedefault=True, desc="Specify confidence level Alpha. Default value: 0.5", argstr="-alpha %s", ) smoothness_factor_beta = traits.Float( 0.5, usedefault=True, desc="Specify smoothness factor Beta. Default value: 0.25", argstr="-beta %s", ) threshold_patch_selection = traits.Float( 0.95, usedefault=True, desc="Specify threshold for patch selection. Default value: 0.95", argstr="-threshold %s", ) number_selected_images = traits.Int( 20, usedefault=True, desc="Specify number of selected images. Default value: 20", argstr="-selection_num %s", ) same_resolution = traits.Bool( desc="Output final mask with the same resolution as input file.", argstr="-same_resolution", ) library_dir = Directory( desc="library directory", position=-3, argstr="%s", mandatory=True ) input_file = File(desc="input file", position=-2, argstr="%s", mandatory=True) output_file = File( desc="output file", position=-1, argstr="%s", name_source=["input_file"], hash_files=False, name_template="%s_beast_mask.mnc", ) class BeastOutputSpec(TraitedSpec): output_file = File(desc="output mask file", exists=True) class Beast(CommandLine): """Extract brain image using BEaST (Brain Extraction using non-local Segmentation Technique). Examples -------- >>> from nipype.interfaces.minc import Beast >>> from nipype.interfaces.minc.testdata import nonempty_minc_data >>> file0 = nonempty_minc_data(0) >>> beast = Beast(input_file=file0) >>> beast .run() # doctest: +SKIP """ input_spec = BeastInputSpec output_spec = BeastOutputSpec _cmd = "mincbeast" class PikInputSpec(CommandLineInputSpec): input_file = File( desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) _xor_image_type = ("jpg", "png") jpg = traits.Bool(desc="Output a jpg file.", xor=_xor_image_type) png = traits.Bool(desc="Output a png file (default).", xor=_xor_image_type) output_file = File( desc="output file", argstr="%s", genfile=True, position=-1, name_source=["input_file"], hash_files=False, name_template="%s.png", keep_extension=False, ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) # FIXME not implemented: --verbose # --fake # --lookup ==> arguments to pass to minclookup scale = traits.Int( 2, usedefault=True, desc=( "Scaling factor for resulting image. By default images are" "output at twice their original resolution." ), argstr="--scale %s", ) width = traits.Int( desc="Autoscale the resulting image to have a fixed image width (in pixels).", argstr="--width %s", ) depth = traits.Enum( 8, 16, desc="Bitdepth for resulting image 8 or 16 (MSB machines only!)", argstr="--depth %s", ) _xor_title = ("title_string", "title_with_filename") title = traits.Either( traits.Bool(desc="Use input filename as title in resulting image."), traits.Str(desc="Add a title to the resulting image."), argstr="%s", ) # see _format_arg for actual arg string title_size = traits.Int( desc="Font point size for the title.", argstr="--title_size %s", requires=["title"], ) annotated_bar = traits.Bool( desc="create an annotated bar to match the image (use height of the output image)", argstr="--anot_bar", ) # FIXME tuple of floats? Not voxel values? Man page doesn't specify. minc_range = traits.Tuple( traits.Float, traits.Float, desc="Valid range of values for MINC file.", argstr="--range %s %s", ) _xor_image_range = ("image_range", "auto_range") image_range = traits.Tuple( traits.Float, traits.Float, desc="Range of image values to use for pixel intensity.", argstr="--image_range %s %s", xor=_xor_image_range, ) auto_range = traits.Bool( desc="Automatically determine image range using a 5 and 95% PcT. (histogram)", argstr="--auto_range", xor=_xor_image_range, ) start = traits.Int( desc="Slice number to get. (note this is in voxel co-ordinates).", argstr="--slice %s", ) # FIXME Int is correct? _xor_slice = ("slice_z", "slice_y", "slice_x") slice_z = traits.Bool( desc="Get an axial/transverse (z) slice.", argstr="-z", xor=_xor_slice ) slice_y = traits.Bool(desc="Get a coronal (y) slice.", argstr="-y", xor=_xor_slice) slice_x = traits.Bool( desc="Get a sagittal (x) slice.", argstr="-x", xor=_xor_slice ) # FIXME typo in man page? sagital? triplanar = traits.Bool( desc="Create a triplanar view of the input file.", argstr="--triplanar" ) tile_size = traits.Int( desc="Pixel size for each image in a triplanar.", argstr="--tilesize %s" ) _xor_sagittal_offset = ("sagittal_offset", "sagittal_offset_perc") sagittal_offset = traits.Int( desc="Offset the sagittal slice from the centre.", argstr="--sagittal_offset %s" ) sagittal_offset_perc = traits.Range( low=0, high=100, desc="Offset the sagittal slice by a percentage from the centre.", argstr="--sagittal_offset_perc %d", ) _xor_vertical_horizontal = ("vertical_triplanar_view", "horizontal_triplanar_view") vertical_triplanar_view = traits.Bool( desc="Create a vertical triplanar view (Default).", argstr="--vertical", xor=_xor_vertical_horizontal, ) horizontal_triplanar_view = traits.Bool( desc="Create a horizontal triplanar view.", argstr="--horizontal", xor=_xor_vertical_horizontal, ) lookup = traits.Str(desc="Arguments to pass to minclookup", argstr="--lookup %s") class PikOutputSpec(TraitedSpec): output_file = File(desc="output image", exists=True) class Pik(CommandLine): """Generate images from minc files. Mincpik uses Imagemagick to generate images from Minc files. Examples -------- >>> from nipype.interfaces.minc import Pik >>> from nipype.interfaces.minc.testdata import nonempty_minc_data >>> file0 = nonempty_minc_data(0) >>> pik = Pik(input_file=file0, title='foo') >>> pik .run() # doctest: +SKIP """ input_spec = PikInputSpec output_spec = PikOutputSpec _cmd = "mincpik" def _format_arg(self, name, spec, value): if name == "title": if isinstance(value, bool) and value: return "--title" elif isinstance(value, str): return "--title --title_text %s" % (value,) else: raise ValueError('Unknown value for "title" argument: ' + str(value)) return super(Pik, self)._format_arg(name, spec, value) class BlurInputSpec(CommandLineInputSpec): input_file = File( desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file_base = File(desc="output file base", argstr="%s", position=-1) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) _xor_kernel = ("gaussian", "rect") gaussian = traits.Bool( desc="Use a gaussian smoothing kernel (default).", argstr="-gaussian", xor=_xor_kernel, ) rect = traits.Bool( desc="Use a rect (box) smoothing kernel.", argstr="-rect", xor=_xor_kernel ) gradient = traits.Bool( desc="Create the gradient magnitude volume as well.", argstr="-gradient" ) partial = traits.Bool( desc="Create the partial derivative and gradient magnitude volumes as well.", argstr="-partial", ) no_apodize = traits.Bool( desc="Do not apodize the data before blurring.", argstr="-no_apodize" ) _xor_main_options = ("fwhm", "fwhm3d", "standard_dev") fwhm = traits.Float( 0, desc="Full-width-half-maximum of gaussian kernel. Default value: 0.", argstr="-fwhm %s", xor=_xor_main_options, mandatory=True, ) standard_dev = traits.Float( 0, desc="Standard deviation of gaussian kernel. Default value: 0.", argstr="-standarddev %s", xor=_xor_main_options, mandatory=True, ) fwhm3d = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="-3dfwhm %s %s %s", desc=( "Full-width-half-maximum of gaussian kernel." "Default value: -1.79769e+308 -1.79769e+308 -1.79769e+308." ), xor=_xor_main_options, mandatory=True, ) dimensions = traits.Enum( 3, 1, 2, desc="Number of dimensions to blur (either 1,2 or 3). Default value: 3.", argstr="-dimensions %s", ) class BlurOutputSpec(TraitedSpec): output_file = File(desc="Blurred output file.", exists=True) gradient_dxyz = File(desc="Gradient dxyz.") partial_dx = File(desc="Partial gradient dx.") partial_dy = File(desc="Partial gradient dy.") partial_dz = File(desc="Partial gradient dz.") partial_dxyz = File(desc="Partial gradient dxyz.") class Blur(StdOutCommandLine): """ Convolve an input volume with a Gaussian blurring kernel of user-defined width. Optionally, the first partial derivatives and the gradient magnitude volume can be calculated. Examples -------- >>> from nipype.interfaces.minc import Blur >>> from nipype.interfaces.minc.testdata import minc3Dfile (1) Blur an input volume with a 6mm fwhm isotropic Gaussian blurring kernel: >>> blur = Blur(input_file=minc3Dfile, fwhm=6, output_file_base='/tmp/out_6') >>> blur.run() # doctest: +SKIP mincblur will create /tmp/out_6_blur.mnc. (2) Calculate the blurred and gradient magnitude data: >>> blur = Blur(input_file=minc3Dfile, fwhm=6, gradient=True, output_file_base='/tmp/out_6') >>> blur.run() # doctest: +SKIP will create /tmp/out_6_blur.mnc and /tmp/out_6_dxyz.mnc. (3) Calculate the blurred data, the partial derivative volumes and the gradient magnitude for the same data: >>> blur = Blur(input_file=minc3Dfile, fwhm=6, partial=True, output_file_base='/tmp/out_6') >>> blur.run() # doctest: +SKIP will create /tmp/out_6_blur.mnc, /tmp/out_6_dx.mnc, /tmp/out_6_dy.mnc, /tmp/out_6_dz.mnc and /tmp/out_6_dxyz.mnc. """ input_spec = BlurInputSpec output_spec = BlurOutputSpec _cmd = "mincblur" def _gen_output_base(self): output_file_base = self.inputs.output_file_base if isdefined(output_file_base): return output_file_base else: base_file_name = os.path.split(self.inputs.input_file)[1] # e.g. 'foo.mnc' base_file_name_no_ext = os.path.splitext(base_file_name)[0] # e.g. 'foo' output_base = os.path.join( os.getcwd(), base_file_name_no_ext + "_bluroutput" ) # e.g. '/tmp/blah/foo_bluroutput' # return os.path.splitext(self.inputs.input_file)[0] + # '_bluroutput' return output_base def _list_outputs(self): outputs = self.output_spec().get() output_file_base = self._gen_output_base() outputs["output_file"] = output_file_base + "_blur.mnc" if isdefined(self.inputs.gradient): outputs["gradient_dxyz"] = output_file_base + "_dxyz.mnc" if isdefined(self.inputs.partial): outputs["partial_dx"] = output_file_base + "_dx.mnc" outputs["partial_dy"] = output_file_base + "_dy.mnc" outputs["partial_dz"] = output_file_base + "_dz.mnc" outputs["partial_dxyz"] = output_file_base + "_dxyz.mnc" return outputs @property def cmdline(self): output_file_base = self.inputs.output_file_base orig_cmdline = super(Blur, self).cmdline if isdefined(output_file_base): return orig_cmdline else: # FIXME this seems like a bit of a hack. Can we force output_file # to show up in cmdline by default, even if it isn't specified in # the instantiation of Pik? return "%s %s" % (orig_cmdline, self._gen_output_base()) class MathInputSpec(CommandLineInputSpec): _xor_input_files = ("input_files", "filelist") input_files = InputMultiPath( File(exists=True), desc="input file(s) for calculation", mandatory=True, sep=" ", argstr="%s", position=-2, xor=_xor_input_files, ) output_file = File( desc="output file", argstr="%s", genfile=True, position=-1, name_source=["input_files"], hash_files=False, name_template="%s_mincmath.mnc", ) filelist = File( desc="Specify the name of a file containing input file names.", argstr="-filelist %s", exists=True, mandatory=True, xor=_xor_input_files, ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") _xor_copy_header = ("copy_header", "no_copy_header") copy_header = traits.Bool( desc="Copy all of the header from the first file (default for one file).", argstr="-copy_header", xor=_xor_copy_header, ) no_copy_header = traits.Bool( desc="Do not copy all of the header from the first file (default for many files)).", argstr="-nocopy_header", xor=_xor_copy_header, ) _xor_format = ( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ) format_filetype = traits.Bool( desc="Use data type of first file (default).", argstr="-filetype", xor=_xor_format, ) format_byte = traits.Bool( desc="Write out byte data.", argstr="-byte", xor=_xor_format ) format_short = traits.Bool( desc="Write out short integer data.", argstr="-short", xor=_xor_format ) format_int = traits.Bool( desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format ) format_long = traits.Bool( desc="Superseded by -int.", argstr="-long", xor=_xor_format ) format_float = traits.Bool( desc="Write out single-precision floating-point data.", argstr="-float", xor=_xor_format, ) format_double = traits.Bool( desc="Write out double-precision floating-point data.", argstr="-double", xor=_xor_format, ) format_signed = traits.Bool( desc="Write signed integer data.", argstr="-signed", xor=_xor_format ) format_unsigned = traits.Bool( desc="Write unsigned integer data (default).", argstr="-unsigned", xor=_xor_format, ) voxel_range = traits.Tuple( traits.Int, traits.Int, argstr="-range %d %d", desc="Valid range for output data.", ) max_buffer_size_in_kb = traits.Range( low=0, desc="Specify the maximum size of the internal buffers (in kbytes).", value=4096, usedefault=True, argstr="-max_buffer_size_in_kb %d", ) _xor_check_dimensions = ("check_dimensions", "no_check_dimensions") check_dimensions = traits.Bool( desc="Check that dimension info matches across files (default).", argstr="-check_dimensions", xor=_xor_check_dimensions, ) no_check_dimensions = traits.Bool( desc="Do not check dimension info.", argstr="-nocheck_dimensions", xor=_xor_check_dimensions, ) dimension = traits.Str( desc="Specify a dimension along which we wish to perform a calculation.", argstr="-dimension %s", ) # FIXME Is it sensible to use ignore_nan and propagate_nan at the same # time? Document this. ignore_nan = traits.Bool( desc="Ignore invalid data (NaN) for accumulations.", argstr="-ignore_nan" ) propagate_nan = traits.Bool( desc="Invalid data in any file at a voxel produces a NaN (default).", argstr="-propagate_nan", ) # FIXME Double-check that these are mutually exclusive? _xor_nan_zero_illegal = ("output_nan", "output_zero", "output_illegal_value") output_nan = traits.Bool( desc="Output NaN when an illegal operation is done (default).", argstr="-nan", xor=_xor_nan_zero_illegal, ) output_zero = traits.Bool( desc="Output zero when an illegal operation is done.", argstr="-zero", xor=_xor_nan_zero_illegal, ) output_illegal = traits.Bool( desc=( "Value to write out when an illegal operation" "is done. Default value: 1.79769e+308" ), argstr="-illegal_value", xor=_xor_nan_zero_illegal, ) # FIXME A whole bunch of the parameters will be mutually exclusive, e.g. surely can't do sqrt and abs at the same time? # Or does mincmath do one and then the next? ########################################################################## # Traits that expect a bool (compare two volumes) or constant (manipulate one volume) # ########################################################################## bool_or_const_traits = [ "test_gt", "test_lt", "test_eq", "test_ne", "test_ge", "test_le", "calc_add", "calc_sub", "calc_mul", "calc_div", ] test_gt = traits.Either( traits.Bool(), traits.Float(), desc="Test for vol1 > vol2 or vol1 > constant.", argstr="-gt", ) test_lt = traits.Either( traits.Bool(), traits.Float(), desc="Test for vol1 < vol2 or vol1 < constant.", argstr="-lt", ) test_eq = traits.Either( traits.Bool(), traits.Float(), desc="Test for integer vol1 == vol2 or vol1 == constant.", argstr="-eq", ) test_ne = traits.Either( traits.Bool(), traits.Float(), desc="Test for integer vol1 != vol2 or vol1 != const.", argstr="-ne", ) test_ge = traits.Either( traits.Bool(), traits.Float(), desc="Test for vol1 >= vol2 or vol1 >= const.", argstr="-ge", ) test_le = traits.Either( traits.Bool(), traits.Float(), desc="Test for vol1 <= vol2 or vol1 <= const.", argstr="-le", ) calc_add = traits.Either( traits.Bool(), traits.Float(), desc="Add N volumes or volume + constant.", argstr="-add", ) calc_sub = traits.Either( traits.Bool(), traits.Float(), desc="Subtract 2 volumes or volume - constant.", argstr="-sub", ) calc_mul = traits.Either( traits.Bool(), traits.Float(), desc="Multiply N volumes or volume * constant.", argstr="-mult", ) calc_div = traits.Either( traits.Bool(), traits.Float(), desc="Divide 2 volumes or volume / constant.", argstr="-div", ) ###################################### # Traits that expect a single volume # ###################################### single_volume_traits = [ "invert", "calc_not", "sqrt", "square", "abs", "exp", "log", "scale", "clamp", "segment", "nsegment", "isnan", "isnan", ] # FIXME enforce this in _parse_inputs and check for other members invert = traits.Either( traits.Float(), desc="Calculate 1/c.", argstr="-invert -const %s" ) calc_not = traits.Bool(desc="Calculate !vol1.", argstr="-not") sqrt = traits.Bool(desc="Take square root of a volume.", argstr="-sqrt") square = traits.Bool(desc="Take square of a volume.", argstr="-square") abs = traits.Bool(desc="Take absolute value of a volume.", argstr="-abs") exp = traits.Tuple( traits.Float, traits.Float, argstr="-exp -const2 %s %s", desc="Calculate c2*exp(c1*x). Both constants must be specified.", ) log = traits.Tuple( traits.Float, traits.Float, argstr="-log -const2 %s %s", desc="Calculate log(x/c2)/c1. The constants c1 and c2 default to 1.", ) scale = traits.Tuple( traits.Float, traits.Float, argstr="-scale -const2 %s %s", desc="Scale a volume: volume * c1 + c2.", ) clamp = traits.Tuple( traits.Float, traits.Float, argstr="-clamp -const2 %s %s", desc="Clamp a volume to lie between two values.", ) segment = traits.Tuple( traits.Float, traits.Float, argstr="-segment -const2 %s %s", desc="Segment a volume using range of -const2: within range = 1, outside range = 0.", ) nsegment = traits.Tuple( traits.Float, traits.Float, argstr="-nsegment -const2 %s %s", desc="Opposite of -segment: within range = 0, outside range = 1.", ) isnan = traits.Bool(desc="Test for NaN values in vol1.", argstr="-isnan") nisnan = traits.Bool(desc="Negation of -isnan.", argstr="-nisnan") ############################################ # Traits that expect precisely two volumes # ############################################ two_volume_traits = ["percentdiff"] percentdiff = traits.Float( desc="Percent difference between 2 volumes, thresholded (const def=0.0).", argstr="-percentdiff", ) ##################################### # Traits that expect N >= 1 volumes # ##################################### n_volume_traits = ["count_valid", "maximum", "minimum", "calc_add", "calc_or"] count_valid = traits.Bool( desc="Count the number of valid values in N volumes.", argstr="-count_valid" ) maximum = traits.Bool(desc="Find maximum of N volumes.", argstr="-maximum") minimum = traits.Bool(desc="Find minimum of N volumes.", argstr="-minimum") calc_and = traits.Bool(desc="Calculate vol1 && vol2 (&& ...).", argstr="-and") calc_or = traits.Bool(desc="Calculate vol1 || vol2 (|| ...).", argstr="-or") class MathOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) class Math(StdOutCommandLine): """ Various mathematical operations supplied by mincmath. Examples -------- >>> from nipype.interfaces.minc import Math >>> from nipype.interfaces.minc.testdata import minc2Dfile Scale: volume*3.0 + 2: >>> scale = Math(input_files=[minc2Dfile], scale=(3.0, 2)) >>> scale.run() # doctest: +SKIP Test if >= 1.5: >>> gt = Math(input_files=[minc2Dfile], test_gt=1.5) >>> gt.run() # doctest: +SKIP """ input_spec = MathInputSpec output_spec = MathOutputSpec _cmd = "mincmath" def _format_arg(self, name, spec, value): assert value is not None if name in self.input_spec.bool_or_const_traits: # t is unused, what was I trying to do with it? # t = self.inputs.__getattribute__(name) if isinstance(value, bool) and value: return spec.argstr elif isinstance(value, bool) and not value: raise ValueError("Does not make sense to specify %s=False" % (name,)) elif isinstance(value, float): return "%s -const %s" % (spec.argstr, value) else: raise ValueError("Invalid %s argument: %s" % (name, value)) return super(Math, self)._format_arg(name, spec, value) def _parse_inputs(self): """A number of the command line options expect precisely one or two files.""" nr_input_files = len(self.inputs.input_files) for n in self.input_spec.bool_or_const_traits: t = self.inputs.__getattribute__(n) if isdefined(t): if isinstance(t, bool): if nr_input_files != 2: raise ValueError( "Due to the %s option we expected 2 files but input_files is of length %d" % (n, nr_input_files) ) elif isinstance(t, float): if nr_input_files != 1: raise ValueError( "Due to the %s option we expected 1 file but input_files is of length %d" % (n, nr_input_files) ) else: raise ValueError( "Argument should be a bool or const, but got: %s" % t ) for n in self.input_spec.single_volume_traits: t = self.inputs.__getattribute__(n) if isdefined(t): if nr_input_files != 1: raise ValueError( "Due to the %s option we expected 1 file but input_files is of length %d" % (n, nr_input_files) ) for n in self.input_spec.two_volume_traits: t = self.inputs.__getattribute__(n) if isdefined(t): if nr_input_files != 2: raise ValueError( "Due to the %s option we expected 2 files but input_files is of length %d" % (n, nr_input_files) ) for n in self.input_spec.n_volume_traits: t = self.inputs.__getattribute__(n) if isdefined(t): if not nr_input_files >= 1: raise ValueError( "Due to the %s option we expected at least one file but input_files is of length %d" % (n, nr_input_files) ) return super(Math, self)._parse_inputs() class ResampleInputSpec(CommandLineInputSpec): """ not implemented: -size: synonym for -nelements) -xsize: synonym for -xnelements -ysize: synonym for -ynelements -zsize: synonym for -ynelements """ input_file = File( desc="input file for resampling", exists=True, mandatory=True, argstr="%s", position=-2, ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_file"], hash_files=False, name_template="%s_resample.mnc", ) # This is a dummy input. input_grid_files = InputMultiPath(File, desc="input grid file(s)") two = traits.Bool(desc="Create a MINC 2 output file.", argstr="-2") clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) _xor_interpolation = ( "trilinear_interpolation", "tricubic_interpolation", "nearest_neighbour_interpolation", "sinc_interpolation", ) trilinear_interpolation = traits.Bool( desc="Do trilinear interpolation.", argstr="-trilinear", xor=_xor_interpolation ) tricubic_interpolation = traits.Bool( desc="Do tricubic interpolation.", argstr="-tricubic", xor=_xor_interpolation ) nearest_neighbour_interpolation = traits.Bool( desc="Do nearest neighbour interpolation.", argstr="-nearest_neighbour", xor=_xor_interpolation, ) sinc_interpolation = traits.Bool( desc="Do windowed sinc interpolation.", argstr="-sinc", xor=_xor_interpolation ) half_width_sinc_window = traits.Enum( 5, 1, 2, 3, 4, 6, 7, 8, 9, 10, desc="Set half-width of sinc window (1-10). Default value: 5.", argstr="-width %s", requires=["sinc_interpolation"], ) _xor_sinc_window_type = ("sinc_window_hanning", "sinc_window_hamming") sinc_window_hanning = traits.Bool( desc="Set sinc window type to Hanning.", argstr="-hanning", xor=_xor_sinc_window_type, requires=["sinc_interpolation"], ) sinc_window_hamming = traits.Bool( desc="Set sinc window type to Hamming.", argstr="-hamming", xor=_xor_sinc_window_type, requires=["sinc_interpolation"], ) transformation = File( desc="File giving world transformation. (Default = identity).", exists=True, argstr="-transformation %s", ) invert_transformation = traits.Bool( desc="Invert the transformation before using it.", argstr="-invert_transformation", ) _xor_input_sampling = ("vio_transform", "no_input_sampling") vio_transform = traits.Bool( desc="VIO_Transform the input sampling with the transform (default).", argstr="-tfm_input_sampling", xor=_xor_input_sampling, ) no_input_sampling = traits.Bool( desc="Use the input sampling without transforming (old behaviour).", argstr="-use_input_sampling", xor=_xor_input_sampling, ) like = File( desc="Specifies a model file for the resampling.", argstr="-like %s", exists=True, ) _xor_format = ( "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ) format_byte = traits.Bool( desc="Write out byte data.", argstr="-byte", xor=_xor_format ) format_short = traits.Bool( desc="Write out short integer data.", argstr="-short", xor=_xor_format ) format_int = traits.Bool( desc="Write out 32-bit integer data.", argstr="-int", xor=_xor_format ) format_long = traits.Bool( desc="Superseded by -int.", argstr="-long", xor=_xor_format ) format_float = traits.Bool( desc="Write out single-precision floating-point data.", argstr="-float", xor=_xor_format, ) format_double = traits.Bool( desc="Write out double-precision floating-point data.", argstr="-double", xor=_xor_format, ) format_signed = traits.Bool( desc="Write signed integer data.", argstr="-signed", xor=_xor_format ) format_unsigned = traits.Bool( desc="Write unsigned integer data (default).", argstr="-unsigned", xor=_xor_format, ) output_range = traits.Tuple( traits.Float, traits.Float, argstr="-range %s %s", desc="Valid range for output data. Default value: -1.79769e+308 -1.79769e+308.", ) _xor_slices = ("transverse", "sagittal", "coronal") transverse_slices = traits.Bool( desc="Write out transverse slices.", argstr="-transverse", xor=_xor_slices ) sagittal_slices = traits.Bool( desc="Write out sagittal slices", argstr="-sagittal", xor=_xor_slices ) coronal_slices = traits.Bool( desc="Write out coronal slices", argstr="-coronal", xor=_xor_slices ) _xor_fill = ("nofill", "fill") no_fill = traits.Bool( desc="Use value zero for points outside of input volume.", argstr="-nofill", xor=_xor_fill, ) fill = traits.Bool( desc="Use a fill value for points outside of input volume.", argstr="-fill", xor=_xor_fill, ) fill_value = traits.Float( desc=( "Specify a fill value for points outside of input volume." "Default value: 1.79769e+308." ), argstr="-fillvalue %s", requires=["fill"], ) _xor_scale = ("keep_real_range", "nokeep_real_range") keep_real_range = traits.Bool( desc="Keep the real scale of the input volume.", argstr="-keep_real_range", xor=_xor_scale, ) nokeep_real_range = traits.Bool( desc="Do not keep the real scale of the data (default).", argstr="-nokeep_real_range", xor=_xor_scale, ) _xor_spacetype = ("spacetype", "talairach") spacetype = traits.Str( desc="Set the spacetype attribute to a specified string.", argstr="-spacetype %s", ) talairach = traits.Bool(desc="Output is in Talairach space.", argstr="-talairach") origin = traits.Tuple( traits.Float, traits.Float, traits.Float, desc=( "Origin of first pixel in 3D space." "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." ), argstr="-origin %s %s %s", ) standard_sampling = traits.Bool( desc="Set the sampling to standard values (step, start and dircos).", argstr="-standard_sampling", ) # FIXME Bool? units = traits.Str( desc="Specify the units of the output sampling.", argstr="-units %s" ) # FIXME String? # Elements along each dimension. # FIXME Ints? Ranges? # FIXME Check that this xor behaves correctly. _xor_nelements = ("nelements", "nelements_x_y_or_z") # nr elements along each dimension nelements = traits.Tuple( traits.Int, traits.Int, traits.Int, desc="Number of elements along each dimension (X, Y, Z).", argstr="-nelements %s %s %s", xor=_xor_nelements, ) # FIXME Is mincresample happy if we only specify one of these, or do we # need the requires=...? xnelements = traits.Int( desc="Number of elements along the X dimension.", argstr="-xnelements %s", requires=("ynelements", "znelements"), xor=_xor_nelements, ) ynelements = traits.Int( desc="Number of elements along the Y dimension.", argstr="-ynelements %s", requires=("xnelements", "znelements"), xor=_xor_nelements, ) znelements = traits.Int( desc="Number of elements along the Z dimension.", argstr="-znelements %s", requires=("xnelements", "ynelements"), xor=_xor_nelements, ) # step size along each dimension _xor_step = ("step", "step_x_y_or_z") step = traits.Tuple( traits.Int, traits.Int, traits.Int, desc="Step size along each dimension (X, Y, Z). Default value: (0, 0, 0).", argstr="-step %s %s %s", xor=_xor_nelements, ) # FIXME Use the requires=...? xstep = traits.Int( desc="Step size along the X dimension. Default value: 0.", argstr="-xstep %s", requires=("ystep", "zstep"), xor=_xor_step, ) ystep = traits.Int( desc="Step size along the Y dimension. Default value: 0.", argstr="-ystep %s", requires=("xstep", "zstep"), xor=_xor_step, ) zstep = traits.Int( desc="Step size along the Z dimension. Default value: 0.", argstr="-zstep %s", requires=("xstep", "ystep"), xor=_xor_step, ) # start point along each dimension _xor_start = ("start", "start_x_y_or_z") start = traits.Tuple( traits.Float, traits.Float, traits.Float, desc=( "Start point along each dimension (X, Y, Z)." "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." ), argstr="-start %s %s %s", xor=_xor_nelements, ) # FIXME Use the requires=...? xstart = traits.Float( desc="Start point along the X dimension. Default value: 1.79769e+308.", argstr="-xstart %s", requires=("ystart", "zstart"), xor=_xor_start, ) ystart = traits.Float( desc="Start point along the Y dimension. Default value: 1.79769e+308.", argstr="-ystart %s", requires=("xstart", "zstart"), xor=_xor_start, ) zstart = traits.Float( desc="Start point along the Z dimension. Default value: 1.79769e+308.", argstr="-zstart %s", requires=("xstart", "ystart"), xor=_xor_start, ) # dircos along each dimension _xor_dircos = ("dircos", "dircos_x_y_or_z") dircos = traits.Tuple( traits.Float, traits.Float, traits.Float, desc=( "Direction cosines along each dimension (X, Y, Z). Default value:" "1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308 ..." " 1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308 1.79769e+308." ), argstr="-dircos %s %s %s", xor=_xor_nelements, ) # FIXME Use the requires=...? xdircos = traits.Float( desc=( "Direction cosines along the X dimension." "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." ), argstr="-xdircos %s", requires=("ydircos", "zdircos"), xor=_xor_dircos, ) ydircos = traits.Float( desc=( "Direction cosines along the Y dimension." "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." ), argstr="-ydircos %s", requires=("xdircos", "zdircos"), xor=_xor_dircos, ) zdircos = traits.Float( desc=( "Direction cosines along the Z dimension." "Default value: 1.79769e+308 1.79769e+308 1.79769e+308." ), argstr="-zdircos %s", requires=("xdircos", "ydircos"), xor=_xor_dircos, ) class ResampleOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) class Resample(StdOutCommandLine): """ Resample a minc file.' Examples -------- >>> from nipype.interfaces.minc import Resample >>> from nipype.interfaces.minc.testdata import minc2Dfile >>> r = Resample(input_file=minc2Dfile, output_file='/tmp/out.mnc') # Resample the file. >>> r.run() # doctest: +SKIP """ input_spec = ResampleInputSpec output_spec = ResampleOutputSpec _cmd = "mincresample" class NormInputSpec(CommandLineInputSpec): """ Not implemented: -version print version and exit -verbose be verbose -noverbose opposite of -verbose [default] -quiet be quiet -noquiet opposite of -quiet [default] -fake do a dry run, (echo cmds only) -nofake opposite of -fake [default] """ input_file = File( desc="input file to normalise", exists=True, mandatory=True, argstr="%s", position=-2, ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_file"], hash_files=False, name_template="%s_norm.mnc", ) output_threshold_mask = File( desc="File in which to store the threshold mask.", argstr="-threshold_mask %s", name_source=["input_file"], hash_files=False, name_template="%s_norm_threshold_mask.mnc", ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) # Normalisation Options mask = File( desc="Calculate the image normalisation within a mask.", argstr="-mask %s", exists=True, ) clamp = traits.Bool( desc="Force the ouput range between limits [default].", argstr="-clamp", usedefault=True, default_value=True, ) cutoff = traits.Range( low=0.0, high=100.0, desc="Cutoff value to use to calculate thresholds by a histogram PcT in %. [default: 0.01]", argstr="-cutoff %s", ) lower = traits.Float(desc="Lower real value to use.", argstr="-lower %s") upper = traits.Float(desc="Upper real value to use.", argstr="-upper %s") out_floor = traits.Float( desc="Output files maximum [default: 0]", argstr="-out_floor %s" ) # FIXME is this a float? out_ceil = traits.Float( desc="Output files minimum [default: 100]", argstr="-out_ceil %s" ) # FIXME is this a float? # Threshold Options threshold = traits.Bool( desc="Threshold the image (set values below threshold_perc to -out_floor).", argstr="-threshold", ) threshold_perc = traits.Range( low=0.0, high=100.0, desc="Threshold percentage (0.1 == lower 10% of intensity range) [default: 0.1].", argstr="-threshold_perc %s", ) threshold_bmt = traits.Bool( desc="Use the resulting image BiModalT as the threshold.", argstr="-threshold_bmt", ) threshold_blur = traits.Float( desc="Blur FWHM for intensity edges then thresholding [default: 2].", argstr="-threshold_blur %s", ) class NormOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) output_threshold_mask = File(desc="threshold mask file") class Norm(CommandLine): """Normalise a file between a max and minimum (possibly) using two histogram pct's. Examples -------- >>> from nipype.interfaces.minc import Norm >>> from nipype.interfaces.minc.testdata import minc2Dfile >>> n = Norm(input_file=minc2Dfile, output_file='/tmp/out.mnc') # Normalise the file. >>> n.run() # doctest: +SKIP """ input_spec = NormInputSpec output_spec = NormOutputSpec _cmd = "mincnorm" """ | volcentre will centre a MINC image's sampling about a point (0,0,0 typically) | | NB: It will modify the file in-place unless an outfile is given | | Problems or comments should be sent to: a.janke@gmail.com Summary of options: -version print version and exit -verbose be verbose -noverbose opposite of -verbose [default] -clobber clobber existing check files -noclobber opposite of -clobber [default] -fake do a dry run, (echo cmds only) -nofake opposite of -fake [default] -com Use the CoM of the volume for the new centre (via mincstats) -nocom opposite of -com [default] -centre Centre to use (x,y,z) [default: 0 0 0] -zero_dircos Set the direction cosines to identity [default] -nozero_dirco opposite of -zero_dircos Usage: volcentre [options] [] volcentre -help to list options """ class VolcentreInputSpec(CommandLineInputSpec): """ Not implemented: -fake do a dry run, (echo cmds only) -nofake opposite of -fake [default] """ input_file = File( desc="input file to centre", exists=True, mandatory=True, argstr="%s", position=-2, ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_file"], hash_files=False, name_template="%s_volcentre.mnc", ) verbose = traits.Bool( desc="Print out log messages. Default: False.", argstr="-verbose" ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) com = traits.Bool( desc="Use the CoM of the volume for the new centre (via mincstats). Default: False", argstr="-com", ) centre = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="-centre %s %s %s", desc="Centre to use (x,y,z) [default: 0 0 0].", ) zero_dircos = traits.Bool( desc="Set the direction cosines to identity [default].", argstr="-zero_dircos" ) class VolcentreOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) class Volcentre(CommandLine): """Centre a MINC image's sampling about a point, typically (0,0,0). Example -------- >>> from nipype.interfaces.minc import Volcentre >>> from nipype.interfaces.minc.testdata import minc2Dfile >>> vc = Volcentre(input_file=minc2Dfile) >>> vc.run() # doctest: +SKIP """ input_spec = VolcentreInputSpec output_spec = VolcentreOutputSpec _cmd = "volcentre" class VolpadInputSpec(CommandLineInputSpec): """ Not implemented: -fake do a dry run, (echo cmds only) -nofake opposite of -fake [default] | volpad pads a MINC volume | | Problems or comments should be sent to: a.janke@gmail.com Summary of options: -- General Options ------------------------------------------------------------- -verbose be verbose -noverbose opposite of -verbose [default] -clobber clobber existing files -noclobber opposite of -clobber [default] -fake do a dry run, (echo cmds only) -nofake opposite of -fake [default] """ input_file = File( desc="input file to centre", exists=True, mandatory=True, argstr="%s", position=-2, ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_file"], hash_files=False, name_template="%s_volpad.mnc", ) verbose = traits.Bool( desc="Print out log messages. Default: False.", argstr="-verbose" ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) auto = traits.Bool( desc="Automatically determine padding distances (uses -distance as max). Default: False.", argstr="-auto", ) auto_freq = traits.Float( desc="Frequency of voxels over bimodalt threshold to stop at [default: 500].", argstr="-auto_freq %s", ) distance = traits.Int( desc="Padding distance (in voxels) [default: 4].", argstr="-distance %s" ) smooth = traits.Bool( desc="Smooth (blur) edges before padding. Default: False.", argstr="-smooth" ) smooth_distance = traits.Int( desc="Smoothing distance (in voxels) [default: 4].", argstr="-smooth_distance %s", ) class VolpadOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) class Volpad(CommandLine): """Centre a MINC image's sampling about a point, typically (0,0,0). Examples -------- >>> from nipype.interfaces.minc import Volpad >>> from nipype.interfaces.minc.testdata import minc2Dfile >>> vp = Volpad(input_file=minc2Dfile, smooth=True, smooth_distance=4) >>> vp.run() # doctest: +SKIP """ input_spec = VolpadInputSpec output_spec = VolpadOutputSpec _cmd = "volpad" class VolisoInputSpec(CommandLineInputSpec): input_file = File( desc="input file to convert to isotropic sampling", exists=True, mandatory=True, argstr="%s", position=-2, ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_file"], hash_files=False, name_template="%s_voliso.mnc", ) verbose = traits.Bool( desc="Print out log messages. Default: False.", argstr="--verbose" ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="--clobber", usedefault=True, default_value=True, ) maxstep = traits.Float( desc="The target maximum step desired in the output volume.", argstr="--maxstep %s", ) minstep = traits.Float( desc="The target minimum step desired in the output volume.", argstr="--minstep %s", ) avgstep = traits.Bool( desc="Calculate the maximum step from the average steps of the input volume.", argstr="--avgstep", ) class VolisoOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) class Voliso(CommandLine): """Changes the steps and starts in order that the output volume has isotropic sampling. Examples -------- >>> from nipype.interfaces.minc import Voliso >>> from nipype.interfaces.minc.testdata import minc2Dfile >>> viso = Voliso(input_file=minc2Dfile, minstep=0.1, avgstep=True) >>> viso.run() # doctest: +SKIP """ input_spec = VolisoInputSpec output_spec = VolisoOutputSpec _cmd = "voliso" class GennlxfmInputSpec(CommandLineInputSpec): output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["like"], hash_files=False, name_template="%s_gennlxfm.xfm", ) verbose = traits.Bool( desc="Print out log messages. Default: False.", argstr="-verbose" ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) ident = traits.Bool( desc="Generate an identity xfm. Default: False.", argstr="-ident" ) step = traits.Int(desc="Output ident xfm step [default: 1].", argstr="-step %s") like = File(desc="Generate a nlxfm like this file.", exists=True, argstr="-like %s") class GennlxfmOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) output_grid = File(desc="output grid", exists=True) class Gennlxfm(CommandLine): """Generate nonlinear xfms. Currently only identity xfms are supported! This tool is part of minc-widgets: https://github.com/BIC-MNI/minc-widgets/blob/master/gennlxfm/gennlxfm Examples -------- >>> from nipype.interfaces.minc import Gennlxfm >>> from nipype.interfaces.minc.testdata import minc2Dfile >>> gennlxfm = Gennlxfm(step=1, like=minc2Dfile) >>> gennlxfm.run() # doctest: +SKIP """ input_spec = GennlxfmInputSpec output_spec = GennlxfmOutputSpec _cmd = "gennlxfm" def _list_outputs(self): outputs = super(Gennlxfm, self)._list_outputs() outputs["output_grid"] = re.sub( ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"] ) return outputs class XfmConcatInputSpec(CommandLineInputSpec): input_files = InputMultiPath( File(exists=True), desc="input file(s)", mandatory=True, sep=" ", argstr="%s", position=-2, ) # This is a dummy input. input_grid_files = InputMultiPath(File, desc="input grid file(s)") output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_files"], hash_files=False, name_template="%s_xfmconcat.xfm", ) verbose = traits.Bool( desc="Print out log messages. Default: False.", argstr="-verbose" ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) class XfmConcatOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) output_grids = OutputMultiPath(File(exists=True), desc="output grids") class XfmConcat(CommandLine): """Concatenate transforms together. The output transformation is equivalent to applying input1.xfm, then input2.xfm, ..., in that order. Examples -------- >>> from nipype.interfaces.minc import XfmConcat >>> from nipype.interfaces.minc.testdata import minc2Dfile >>> conc = XfmConcat(input_files=['input1.xfm', 'input1.xfm']) >>> conc.run() # doctest: +SKIP """ input_spec = XfmConcatInputSpec output_spec = XfmConcatOutputSpec _cmd = "xfmconcat" def _list_outputs(self): outputs = super(XfmConcat, self)._list_outputs() if os.path.exists(outputs["output_file"]): if "grid" in open(outputs["output_file"], "r").read(): outputs["output_grids"] = glob.glob( re.sub(".(nlxfm|xfm)$", "_grid_*.mnc", outputs["output_file"]) ) return outputs class BestLinRegInputSpec(CommandLineInputSpec): source = File( desc="source Minc file", exists=True, mandatory=True, argstr="%s", position=-4 ) target = File( desc="target Minc file", exists=True, mandatory=True, argstr="%s", position=-3 ) output_xfm = File( desc="output xfm file", genfile=True, argstr="%s", position=-2, name_source=["source"], hash_files=False, name_template="%s_bestlinreg.xfm", keep_extension=False, ) output_mnc = File( desc="output mnc file", genfile=True, argstr="%s", position=-1, name_source=["source"], hash_files=False, name_template="%s_bestlinreg.mnc", keep_extension=False, ) verbose = traits.Bool( desc="Print out log messages. Default: False.", argstr="-verbose" ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) # FIXME Very bare implementation, none of these are done yet: """ -init_xfm initial transformation (default identity) -source_mask source mask to use during fitting -target_mask target mask to use during fitting -lsq9 use 9-parameter transformation (default) -lsq12 use 12-parameter transformation (default -lsq9) -lsq6 use 6-parameter transformation """ class BestLinRegOutputSpec(TraitedSpec): output_xfm = File(desc="output xfm file", exists=True) output_mnc = File(desc="output mnc file", exists=True) class BestLinReg(CommandLine): """Hierachial linear fitting between two files. The bestlinreg script is part of the EZminc package: https://github.com/BIC-MNI/EZminc/blob/master/scripts/bestlinreg.pl Examples -------- >>> from nipype.interfaces.minc import BestLinReg >>> from nipype.interfaces.minc.testdata import nonempty_minc_data >>> input_file = nonempty_minc_data(0) >>> target_file = nonempty_minc_data(1) >>> linreg = BestLinReg(source=input_file, target=target_file) >>> linreg.run() # doctest: +SKIP """ input_spec = BestLinRegInputSpec output_spec = BestLinRegOutputSpec _cmd = "bestlinreg" class NlpFitInputSpec(CommandLineInputSpec): source = File( desc="source Minc file", exists=True, mandatory=True, argstr="%s", position=-3 ) target = File( desc="target Minc file", exists=True, mandatory=True, argstr="%s", position=-2 ) output_xfm = File(desc="output xfm file", genfile=True, argstr="%s", position=-1) # This is a dummy input. input_grid_files = InputMultiPath(File, desc="input grid file(s)") config_file = File( desc="File containing the fitting configuration use.", argstr="-config_file %s", mandatory=True, exists=True, ) init_xfm = File( desc="Initial transformation (default identity).", argstr="-init_xfm %s", mandatory=True, exists=True, ) source_mask = File( desc="Source mask to use during fitting.", argstr="-source_mask %s", mandatory=True, exists=True, ) verbose = traits.Bool( desc="Print out log messages. Default: False.", argstr="-verbose" ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) class NlpFitOutputSpec(TraitedSpec): output_xfm = File(desc="output xfm file", exists=True) output_grid = File(desc="output grid file", exists=True) class NlpFit(CommandLine): """Hierarchial non-linear fitting with bluring. This tool is part of the minc-widgets package: https://github.com/BIC-MNI/minc-widgets/blob/master/nlpfit/nlpfit Examples -------- >>> from nipype.interfaces.minc import NlpFit >>> from nipype.interfaces.minc.testdata import nonempty_minc_data, nlp_config >>> from nipype.testing import example_data >>> source = nonempty_minc_data(0) >>> target = nonempty_minc_data(1) >>> source_mask = nonempty_minc_data(2) >>> config = nlp_config >>> initial = example_data('minc_initial.xfm') >>> nlpfit = NlpFit(config_file=config, init_xfm=initial, source_mask=source_mask, source=source, target=target) >>> nlpfit.run() # doctest: +SKIP """ input_spec = NlpFitInputSpec output_spec = NlpFitOutputSpec _cmd = "nlpfit" def _gen_filename(self, name): if name == "output_xfm": output_xfm = self.inputs.output_xfm if isdefined(output_xfm): return os.path.abspath(output_xfm) else: return ( aggregate_filename( [self.inputs.source, self.inputs.target], "nlpfit_xfm_output" ) + ".xfm" ) else: raise NotImplemented def _list_outputs(self): outputs = self.output_spec().get() outputs["output_xfm"] = os.path.abspath(self._gen_filename("output_xfm")) assert os.path.exists(outputs["output_xfm"]) if "grid" in open(outputs["output_xfm"], "r").read(): outputs["output_grid"] = re.sub( ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_xfm"] ) return outputs class XfmAvgInputSpec(CommandLineInputSpec): input_files = InputMultiPath( File(exists=True), desc="input file(s)", mandatory=True, sep=" ", argstr="%s", position=-2, ) # This is a dummy input. input_grid_files = InputMultiPath(File, desc="input grid file(s)") output_file = File(desc="output file", genfile=True, argstr="%s", position=-1) verbose = traits.Bool( desc="Print out log messages. Default: False.", argstr="-verbose" ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) # FIXME xor these: avg_linear = traits.Bool( desc="average the linear part [default].", argstr="-avg_linear" ) avg_nonlinear = traits.Bool( desc="average the non-linear part [default].", argstr="-avg_nonlinear" ) ignore_linear = traits.Bool( desc="opposite of -avg_linear.", argstr="-ignore_linear" ) ignore_nonlinear = traits.Bool( desc="opposite of -avg_nonlinear.", argstr="-ignore_nonline" ) class XfmAvgOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) output_grid = File(desc="output grid file", exists=True) class XfmAvg(CommandLine): """Average a number of xfm transforms using matrix logs and exponents. The program xfmavg calls Octave for numerical work. This tool is part of the minc-widgets package: https://github.com/BIC-MNI/minc-widgets/tree/master/xfmavg Examples -------- >>> from nipype.interfaces.minc import XfmAvg >>> from nipype.interfaces.minc.testdata import nonempty_minc_data, nlp_config >>> from nipype.testing import example_data >>> xfm1 = example_data('minc_initial.xfm') >>> xfm2 = example_data('minc_initial.xfm') # cheating for doctest >>> xfmavg = XfmAvg(input_files=[xfm1, xfm2]) >>> xfmavg.run() # doctest: +SKIP """ input_spec = XfmAvgInputSpec output_spec = XfmAvgOutputSpec _cmd = "xfmavg" def _gen_filename(self, name): if name == "output_file": output_file = self.inputs.output_file if isdefined(output_file): return os.path.abspath(output_file) else: return ( aggregate_filename(self.inputs.input_files, "xfmavg_output") + ".xfm" ) else: raise NotImplemented def _gen_outfilename(self): return self._gen_filename("output_file") def _list_outputs(self): outputs = self.output_spec().get() outputs["output_file"] = os.path.abspath(self._gen_outfilename()) assert os.path.exists(outputs["output_file"]) if "grid" in open(outputs["output_file"], "r").read(): outputs["output_grid"] = re.sub( ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"] ) return outputs class XfmInvertInputSpec(CommandLineInputSpec): input_file = File( desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File(desc="output file", genfile=True, argstr="%s", position=-1) verbose = traits.Bool( desc="Print out log messages. Default: False.", argstr="-verbose" ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) class XfmInvertOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) output_grid = File(desc="output grid file", exists=True) class XfmInvert(CommandLine): """Invert an xfm transform file. Examples -------- >>> from nipype.interfaces.minc import XfmAvg >>> from nipype.testing import example_data >>> xfm = example_data('minc_initial.xfm') >>> invert = XfmInvert(input_file=xfm) >>> invert.run() # doctest: +SKIP """ input_spec = XfmInvertInputSpec output_spec = XfmInvertOutputSpec _cmd = "xfminvert" def _gen_filename(self, name): if name == "output_file": output_file = self.inputs.output_file if isdefined(output_file): return os.path.abspath(output_file) else: return ( aggregate_filename([self.inputs.input_file], "xfminvert_output") + ".xfm" ) else: raise NotImplemented def _gen_outfilename(self): return self._gen_filename("output_file") def _list_outputs(self): outputs = self.output_spec().get() outputs["output_file"] = os.path.abspath(self._gen_outfilename()) assert os.path.exists(outputs["output_file"]) if "grid" in open(outputs["output_file"], "r").read(): outputs["output_grid"] = re.sub( ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"] ) return outputs class BigAverageInputSpec(CommandLineInputSpec): input_files = InputMultiPath( File(exists=True), desc="input file(s)", mandatory=True, sep=" ", argstr="%s", position=-2, ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_files"], hash_files=False, name_template="%s_bigaverage.mnc", ) verbose = traits.Bool( desc="Print out log messages. Default: False.", argstr="--verbose" ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="--clobber", usedefault=True, default_value=True, ) # FIXME Redumentary implementation, various parameters not implemented. # TODO! output_float = traits.Bool( desc="Output files with float precision.", argstr="--float" ) robust = traits.Bool( desc=( "Perform robust averaging, features that are outside 1 standard" "deviation from the mean are downweighted. Works well for noisy" "data with artifacts. see the --tmpdir option if you have a" "large number of input files." ), argstr="-robust", ) # Should Nipype deal with where the temp directory is? tmpdir = Directory(desc="temporary files directory", argstr="-tmpdir %s") sd_file = File( desc="Place standard deviation image in specified file.", argstr="--sdfile %s", name_source=["input_files"], hash_files=False, name_template="%s_bigaverage_stdev.mnc", ) class BigAverageOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) sd_file = File(desc="standard deviation image", exists=True) class BigAverage(CommandLine): """Average 1000's of MINC files in linear time. mincbigaverage is designed to discretise the problem of averaging either a large number of input files or averaging a smaller number of large files. (>1GB each). There is also some code included to perform "robust" averaging in which only the most common features are kept via down-weighting outliers beyond a standard deviation. One advantage of mincbigaverage is that it avoids issues around the number of possible open files in HDF/netCDF. In short if you have more than 100 files open at once while averaging things will slow down significantly. mincbigaverage does this via a iterative approach to averaging files and is a direct drop in replacement for mincaverage. That said not all the arguments of mincaverage are supported in mincbigaverage but they should be. This tool is part of the minc-widgets package: https://github.com/BIC-MNI/minc-widgets/blob/master/mincbigaverage/mincbigaverage Examples -------- >>> from nipype.interfaces.minc import BigAverage >>> from nipype.interfaces.minc.testdata import nonempty_minc_data >>> files = [nonempty_minc_data(i) for i in range(3)] >>> average = BigAverage(input_files=files, output_float=True, robust=True) >>> average.run() # doctest: +SKIP """ input_spec = BigAverageInputSpec output_spec = BigAverageOutputSpec _cmd = "mincbigaverage" class ReshapeInputSpec(CommandLineInputSpec): input_file = File( desc="input file", exists=True, mandatory=True, argstr="%s", position=-2 ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_file"], hash_files=False, name_template="%s_reshape.mnc", ) verbose = traits.Bool( desc="Print out log messages. Default: False.", argstr="-verbose" ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) # FIXME MANY options not implemented! write_short = traits.Bool(desc="Convert to short integer data.", argstr="-short") class ReshapeOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) class Reshape(CommandLine): """Cut a hyperslab out of a minc file, with dimension reordering. This is also useful for rewriting with a different format, for example converting to short (see example below). Examples -------- >>> from nipype.interfaces.minc import Reshape >>> from nipype.interfaces.minc.testdata import nonempty_minc_data >>> input_file = nonempty_minc_data(0) >>> reshape_to_short = Reshape(input_file=input_file, write_short=True) >>> reshape_to_short.run() # doctest: +SKIP """ input_spec = ReshapeInputSpec output_spec = ReshapeOutputSpec _cmd = "mincreshape" class VolSymmInputSpec(CommandLineInputSpec): input_file = File( desc="input file", exists=True, mandatory=True, argstr="%s", position=-3 ) trans_file = File( desc="output xfm trans file", genfile=True, argstr="%s", position=-2, name_source=["input_file"], hash_files=False, name_template="%s_vol_symm.xfm", keep_extension=False, ) output_file = File( desc="output file", genfile=True, argstr="%s", position=-1, name_source=["input_file"], hash_files=False, name_template="%s_vol_symm.mnc", ) # This is a dummy input. input_grid_files = InputMultiPath(File, desc="input grid file(s)") verbose = traits.Bool( desc="Print out log messages. Default: False.", argstr="-verbose" ) clobber = traits.Bool( desc="Overwrite existing file.", argstr="-clobber", usedefault=True, default_value=True, ) # FIXME MANY options not implemented! fit_linear = traits.Bool(desc="Fit using a linear xfm.", argstr="-linear") fit_nonlinear = traits.Bool(desc="Fit using a non-linear xfm.", argstr="-nonlinear") # FIXME This changes the input/output behaviour of trans_file! Split into # two separate interfaces? nofit = traits.Bool( desc="Use the input transformation instead of generating one.", argstr="-nofit" ) config_file = File( desc="File containing the fitting configuration (nlpfit -help for info).", argstr="-config_file %s", exists=True, ) x = traits.Bool(desc="Flip volume in x-plane (default).", argstr="-x") y = traits.Bool(desc="Flip volume in y-plane.", argstr="-y") z = traits.Bool(desc="Flip volume in z-plane.", argstr="-z") class VolSymmOutputSpec(TraitedSpec): output_file = File(desc="output file", exists=True) trans_file = File(desc="xfm trans file", exists=True) output_grid = File( desc="output grid file", exists=True ) # FIXME Is exists=True correct? class VolSymm(CommandLine): """Make a volume symmetric about an axis either linearly and/or nonlinearly. This is done by registering a volume to a flipped image of itself. This tool is part of the minc-widgets package: https://github.com/BIC-MNI/minc-widgets/blob/master/volsymm/volsymm Examples -------- >>> from nipype.interfaces.minc import VolSymm >>> from nipype.interfaces.minc.testdata import nonempty_minc_data >>> input_file = nonempty_minc_data(0) >>> volsymm = VolSymm(input_file=input_file) >>> volsymm.run() # doctest: +SKIP """ input_spec = VolSymmInputSpec output_spec = VolSymmOutputSpec _cmd = "volsymm" def _list_outputs(self): outputs = super(VolSymm, self)._list_outputs() # Have to manually check for the grid files. if os.path.exists(outputs["trans_file"]): if "grid" in open(outputs["trans_file"], "r").read(): outputs["output_grid"] = re.sub( ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["trans_file"] ) return outputs nipype-1.7.0/nipype/interfaces/minc/testdata.py000066400000000000000000000004751413403311400215630ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os from ...testing import example_data minc2Dfile = example_data("minc_test_2D_00.mnc") minc3Dfile = example_data("minc_test_3D_00.mnc") nlp_config = example_data("minc_nlp.conf") def nonempty_minc_data(i, shape="2D"): return example_data("minc_test_%s_%.2d.mnc" % (shape, i)) nipype-1.7.0/nipype/interfaces/minc/tests/000077500000000000000000000000001413403311400205345ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/minc/tests/__init__.py000066400000000000000000000000301413403311400226360ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Average.py000066400000000000000000000153761413403311400245630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Average def test_Average_inputs(): input_map = dict( args=dict( argstr="%s", ), avgdim=dict( argstr="-avgdim %s", ), binarize=dict( argstr="-binarize", ), binrange=dict( argstr="-binrange %s %s", ), binvalue=dict( argstr="-binvalue %s", ), check_dimensions=dict( argstr="-check_dimensions", xor=("check_dimensions", "no_check_dimensions"), ), clobber=dict( argstr="-clobber", usedefault=True, ), copy_header=dict( argstr="-copy_header", xor=("copy_header", "no_copy_header"), ), debug=dict( argstr="-debug", ), environ=dict( nohash=True, usedefault=True, ), filelist=dict( argstr="-filelist %s", extensions=None, mandatory=True, xor=("input_files", "filelist"), ), format_byte=dict( argstr="-byte", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_double=dict( argstr="-double", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_filetype=dict( argstr="-filetype", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_float=dict( argstr="-float", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_int=dict( argstr="-int", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_long=dict( argstr="-long", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_short=dict( argstr="-short", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_signed=dict( argstr="-signed", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_unsigned=dict( argstr="-unsigned", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), input_files=dict( argstr="%s", mandatory=True, position=-2, sep=" ", xor=("input_files", "filelist"), ), max_buffer_size_in_kb=dict( argstr="-max_buffer_size_in_kb %d", usedefault=True, ), no_check_dimensions=dict( argstr="-nocheck_dimensions", xor=("check_dimensions", "no_check_dimensions"), ), no_copy_header=dict( argstr="-nocopy_header", xor=("copy_header", "no_copy_header"), ), nonormalize=dict( argstr="-nonormalize", xor=("normalize", "nonormalize"), ), normalize=dict( argstr="-normalize", xor=("normalize", "nonormalize"), ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_files"], name_template="%s_averaged.mnc", position=-1, ), quiet=dict( argstr="-quiet", xor=("verbose", "quiet"), ), sdfile=dict( argstr="-sdfile %s", extensions=None, ), two=dict( argstr="-2", ), verbose=dict( argstr="-verbose", xor=("verbose", "quiet"), ), voxel_range=dict( argstr="-range %d %d", ), weights=dict( argstr="-weights %s", sep=",", ), width_weighted=dict( argstr="-width_weighted", requires=("avgdim",), ), ) inputs = Average.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Average_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Average.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_BBox.py000066400000000000000000000034671413403311400240410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import BBox def test_BBox_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), format_minccrop=dict( argstr="-minccrop", ), format_mincresample=dict( argstr="-mincresample", ), format_mincreshape=dict( argstr="-mincreshape", ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), one_line=dict( argstr="-one_line", xor=("one_line", "two_lines"), ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), output_file=dict( extensions=None, hash_files=False, keep_extension=False, name_source=["input_file"], name_template="%s_bbox.txt", position=-1, ), threshold=dict( argstr="-threshold", ), two_lines=dict( argstr="-two_lines", xor=("one_line", "two_lines"), ), ) inputs = BBox.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BBox_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = BBox.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Beast.py000066400000000000000000000054511413403311400242400ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Beast def test_Beast_inputs(): input_map = dict( abspath=dict( argstr="-abspath", usedefault=True, ), args=dict( argstr="%s", ), clobber=dict( argstr="-clobber", usedefault=True, ), confidence_level_alpha=dict( argstr="-alpha %s", usedefault=True, ), configuration_file=dict( argstr="-configuration %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), fill_holes=dict( argstr="-fill", ), flip_images=dict( argstr="-flip", ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), library_dir=dict( argstr="%s", mandatory=True, position=-3, ), load_moments=dict( argstr="-load_moments", ), median_filter=dict( argstr="-median", ), nlm_filter=dict( argstr="-nlm_filter", ), number_selected_images=dict( argstr="-selection_num %s", usedefault=True, ), output_file=dict( argstr="%s", extensions=None, hash_files=False, name_source=["input_file"], name_template="%s_beast_mask.mnc", position=-1, ), patch_size=dict( argstr="-patch_size %s", usedefault=True, ), probability_map=dict( argstr="-probability", ), same_resolution=dict( argstr="-same_resolution", ), search_area=dict( argstr="-search_area %s", usedefault=True, ), smoothness_factor_beta=dict( argstr="-beta %s", usedefault=True, ), threshold_patch_selection=dict( argstr="-threshold %s", usedefault=True, ), voxel_size=dict( argstr="-voxel_size %s", usedefault=True, ), ) inputs = Beast.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Beast_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Beast.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_BestLinReg.py000066400000000000000000000035671413403311400252060ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import BestLinReg def test_BestLinReg_inputs(): input_map = dict( args=dict( argstr="%s", ), clobber=dict( argstr="-clobber", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), output_mnc=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, keep_extension=False, name_source=["source"], name_template="%s_bestlinreg.mnc", position=-1, ), output_xfm=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, keep_extension=False, name_source=["source"], name_template="%s_bestlinreg.xfm", position=-2, ), source=dict( argstr="%s", extensions=None, mandatory=True, position=-4, ), target=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), verbose=dict( argstr="-verbose", ), ) inputs = BestLinReg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BestLinReg_outputs(): output_map = dict( output_mnc=dict( extensions=None, ), output_xfm=dict( extensions=None, ), ) outputs = BestLinReg.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_BigAverage.py000066400000000000000000000035141413403311400251740ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import BigAverage def test_BigAverage_inputs(): input_map = dict( args=dict( argstr="%s", ), clobber=dict( argstr="--clobber", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), input_files=dict( argstr="%s", mandatory=True, position=-2, sep=" ", ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_files"], name_template="%s_bigaverage.mnc", position=-1, ), output_float=dict( argstr="--float", ), robust=dict( argstr="-robust", ), sd_file=dict( argstr="--sdfile %s", extensions=None, hash_files=False, name_source=["input_files"], name_template="%s_bigaverage_stdev.mnc", ), tmpdir=dict( argstr="-tmpdir %s", ), verbose=dict( argstr="--verbose", ), ) inputs = BigAverage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BigAverage_outputs(): output_map = dict( output_file=dict( extensions=None, ), sd_file=dict( extensions=None, ), ) outputs = BigAverage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Blob.py000066400000000000000000000027021413403311400240540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Blob def test_Blob_inputs(): input_map = dict( args=dict( argstr="%s", ), determinant=dict( argstr="-determinant", ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), magnitude=dict( argstr="-magnitude", ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_file"], name_template="%s_blob.mnc", position=-1, ), trace=dict( argstr="-trace", ), translation=dict( argstr="-translation", ), ) inputs = Blob.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Blob_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Blob.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Blur.py000066400000000000000000000045561413403311400241130ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Blur def test_Blur_inputs(): input_map = dict( args=dict( argstr="%s", ), clobber=dict( argstr="-clobber", usedefault=True, ), dimensions=dict( argstr="-dimensions %s", ), environ=dict( nohash=True, usedefault=True, ), fwhm=dict( argstr="-fwhm %s", mandatory=True, xor=("fwhm", "fwhm3d", "standard_dev"), ), fwhm3d=dict( argstr="-3dfwhm %s %s %s", mandatory=True, xor=("fwhm", "fwhm3d", "standard_dev"), ), gaussian=dict( argstr="-gaussian", xor=("gaussian", "rect"), ), gradient=dict( argstr="-gradient", ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), no_apodize=dict( argstr="-no_apodize", ), output_file_base=dict( argstr="%s", extensions=None, position=-1, ), partial=dict( argstr="-partial", ), rect=dict( argstr="-rect", xor=("gaussian", "rect"), ), standard_dev=dict( argstr="-standarddev %s", mandatory=True, xor=("fwhm", "fwhm3d", "standard_dev"), ), ) inputs = Blur.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Blur_outputs(): output_map = dict( gradient_dxyz=dict( extensions=None, ), output_file=dict( extensions=None, ), partial_dx=dict( extensions=None, ), partial_dxyz=dict( extensions=None, ), partial_dy=dict( extensions=None, ), partial_dz=dict( extensions=None, ), ) outputs = Blur.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Calc.py000066400000000000000000000155221413403311400240440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Calc def test_Calc_inputs(): input_map = dict( args=dict( argstr="%s", ), check_dimensions=dict( argstr="-check_dimensions", xor=("check_dimensions", "no_check_dimensions"), ), clobber=dict( argstr="-clobber", usedefault=True, ), copy_header=dict( argstr="-copy_header", xor=("copy_header", "no_copy_header"), ), debug=dict( argstr="-debug", ), environ=dict( nohash=True, usedefault=True, ), eval_width=dict( argstr="-eval_width %s", ), expfile=dict( argstr="-expfile %s", extensions=None, mandatory=True, xor=("expression", "expfile"), ), expression=dict( argstr="-expression '%s'", mandatory=True, xor=("expression", "expfile"), ), filelist=dict( argstr="-filelist %s", extensions=None, mandatory=True, xor=("input_files", "filelist"), ), format_byte=dict( argstr="-byte", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_double=dict( argstr="-double", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_filetype=dict( argstr="-filetype", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_float=dict( argstr="-float", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_int=dict( argstr="-int", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_long=dict( argstr="-long", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_short=dict( argstr="-short", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_signed=dict( argstr="-signed", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_unsigned=dict( argstr="-unsigned", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), ignore_nan=dict( argstr="-ignore_nan", ), input_files=dict( argstr="%s", mandatory=True, position=-2, sep=" ", ), max_buffer_size_in_kb=dict( argstr="-max_buffer_size_in_kb %d", ), no_check_dimensions=dict( argstr="-nocheck_dimensions", xor=("check_dimensions", "no_check_dimensions"), ), no_copy_header=dict( argstr="-nocopy_header", xor=("copy_header", "no_copy_header"), ), outfiles=dict(), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_files"], name_template="%s_calc.mnc", position=-1, ), output_illegal=dict( argstr="-illegal_value", xor=("output_nan", "output_zero", "output_illegal_value"), ), output_nan=dict( argstr="-nan", xor=("output_nan", "output_zero", "output_illegal_value"), ), output_zero=dict( argstr="-zero", xor=("output_nan", "output_zero", "output_illegal_value"), ), propagate_nan=dict( argstr="-propagate_nan", ), quiet=dict( argstr="-quiet", xor=("verbose", "quiet"), ), two=dict( argstr="-2", ), verbose=dict( argstr="-verbose", xor=("verbose", "quiet"), ), voxel_range=dict( argstr="-range %d %d", ), ) inputs = Calc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Calc_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Calc.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Convert.py000066400000000000000000000030471413403311400246210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Convert def test_Convert_inputs(): input_map = dict( args=dict( argstr="%s", ), chunk=dict( argstr="-chunk %d", ), clobber=dict( argstr="-clobber", usedefault=True, ), compression=dict( argstr="-compress %s", ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_file"], name_template="%s_convert_output.mnc", position=-1, ), template=dict( argstr="-template", ), two=dict( argstr="-2", ), ) inputs = Convert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Convert_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Convert.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Copy.py000066400000000000000000000026461413403311400241170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Copy def test_Copy_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_file"], name_template="%s_copy.mnc", position=-1, ), pixel_values=dict( argstr="-pixel_values", xor=("pixel_values", "real_values"), ), real_values=dict( argstr="-real_values", xor=("pixel_values", "real_values"), ), ) inputs = Copy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Copy_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Copy.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Dump.py000066400000000000000000000040531413403311400241040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Dump def test_Dump_inputs(): input_map = dict( annotations_brief=dict( argstr="-b %s", xor=("annotations_brief", "annotations_full"), ), annotations_full=dict( argstr="-f %s", xor=("annotations_brief", "annotations_full"), ), args=dict( argstr="%s", ), coordinate_data=dict( argstr="-c", xor=("coordinate_data", "header_data"), ), environ=dict( nohash=True, usedefault=True, ), header_data=dict( argstr="-h", xor=("coordinate_data", "header_data"), ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), line_length=dict( argstr="-l %d", ), netcdf_name=dict( argstr="-n %s", ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), output_file=dict( extensions=None, hash_files=False, keep_extension=False, name_source=["input_file"], name_template="%s_dump.txt", position=-1, ), precision=dict( argstr="%s", ), variables=dict( argstr="-v %s", sep=",", ), ) inputs = Dump.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Dump_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Dump.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Extract.py000066400000000000000000000160211413403311400246070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Extract def test_Extract_inputs(): input_map = dict( args=dict( argstr="%s", ), count=dict( argstr="-count %s", sep=",", ), environ=dict( nohash=True, usedefault=True, ), flip_any_direction=dict( argstr="-any_direction", xor=( "flip_positive_direction", "flip_negative_direction", "flip_any_direction", ), ), flip_negative_direction=dict( argstr="-negative_direction", xor=( "flip_positive_direction", "flip_negative_direction", "flip_any_direction", ), ), flip_positive_direction=dict( argstr="-positive_direction", xor=( "flip_positive_direction", "flip_negative_direction", "flip_any_direction", ), ), flip_x_any=dict( argstr="-xanydirection", xor=("flip_x_positive", "flip_x_negative", "flip_x_any"), ), flip_x_negative=dict( argstr="-xdirection", xor=("flip_x_positive", "flip_x_negative", "flip_x_any"), ), flip_x_positive=dict( argstr="+xdirection", xor=("flip_x_positive", "flip_x_negative", "flip_x_any"), ), flip_y_any=dict( argstr="-yanydirection", xor=("flip_y_positive", "flip_y_negative", "flip_y_any"), ), flip_y_negative=dict( argstr="-ydirection", xor=("flip_y_positive", "flip_y_negative", "flip_y_any"), ), flip_y_positive=dict( argstr="+ydirection", xor=("flip_y_positive", "flip_y_negative", "flip_y_any"), ), flip_z_any=dict( argstr="-zanydirection", xor=("flip_z_positive", "flip_z_negative", "flip_z_any"), ), flip_z_negative=dict( argstr="-zdirection", xor=("flip_z_positive", "flip_z_negative", "flip_z_any"), ), flip_z_positive=dict( argstr="+zdirection", xor=("flip_z_positive", "flip_z_negative", "flip_z_any"), ), image_maximum=dict( argstr="-image_maximum %s", ), image_minimum=dict( argstr="-image_minimum %s", ), image_range=dict( argstr="-image_range %s %s", ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), nonormalize=dict( argstr="-nonormalize", xor=("normalize", "nonormalize"), ), normalize=dict( argstr="-normalize", xor=("normalize", "nonormalize"), ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), output_file=dict( extensions=None, hash_files=False, keep_extension=False, name_source=["input_file"], name_template="%s.raw", position=-1, ), start=dict( argstr="-start %s", sep=",", ), write_ascii=dict( argstr="-ascii", xor=( "write_ascii", "write_ascii", "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", "write_signed", "write_unsigned", ), ), write_byte=dict( argstr="-byte", xor=( "write_ascii", "write_ascii", "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", "write_signed", "write_unsigned", ), ), write_double=dict( argstr="-double", xor=( "write_ascii", "write_ascii", "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", "write_signed", "write_unsigned", ), ), write_float=dict( argstr="-float", xor=( "write_ascii", "write_ascii", "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", "write_signed", "write_unsigned", ), ), write_int=dict( argstr="-int", xor=( "write_ascii", "write_ascii", "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", "write_signed", "write_unsigned", ), ), write_long=dict( argstr="-long", xor=( "write_ascii", "write_ascii", "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", "write_signed", "write_unsigned", ), ), write_range=dict( argstr="-range %s %s", ), write_short=dict( argstr="-short", xor=( "write_ascii", "write_ascii", "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", "write_signed", "write_unsigned", ), ), write_signed=dict( argstr="-signed", xor=("write_signed", "write_unsigned"), ), write_unsigned=dict( argstr="-unsigned", xor=("write_signed", "write_unsigned"), ), ) inputs = Extract.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Extract_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Extract.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Gennlxfm.py000066400000000000000000000027471413403311400247650ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Gennlxfm def test_Gennlxfm_inputs(): input_map = dict( args=dict( argstr="%s", ), clobber=dict( argstr="-clobber", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), ident=dict( argstr="-ident", ), like=dict( argstr="-like %s", extensions=None, ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["like"], name_template="%s_gennlxfm.xfm", position=-1, ), step=dict( argstr="-step %s", ), verbose=dict( argstr="-verbose", ), ) inputs = Gennlxfm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Gennlxfm_outputs(): output_map = dict( output_file=dict( extensions=None, ), output_grid=dict( extensions=None, ), ) outputs = Gennlxfm.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Math.py000066400000000000000000000202241413403311400240660ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Math def test_Math_inputs(): input_map = dict( abs=dict( argstr="-abs", ), args=dict( argstr="%s", ), calc_add=dict( argstr="-add", ), calc_and=dict( argstr="-and", ), calc_div=dict( argstr="-div", ), calc_mul=dict( argstr="-mult", ), calc_not=dict( argstr="-not", ), calc_or=dict( argstr="-or", ), calc_sub=dict( argstr="-sub", ), check_dimensions=dict( argstr="-check_dimensions", xor=("check_dimensions", "no_check_dimensions"), ), clamp=dict( argstr="-clamp -const2 %s %s", ), clobber=dict( argstr="-clobber", usedefault=True, ), copy_header=dict( argstr="-copy_header", xor=("copy_header", "no_copy_header"), ), count_valid=dict( argstr="-count_valid", ), dimension=dict( argstr="-dimension %s", ), environ=dict( nohash=True, usedefault=True, ), exp=dict( argstr="-exp -const2 %s %s", ), filelist=dict( argstr="-filelist %s", extensions=None, mandatory=True, xor=("input_files", "filelist"), ), format_byte=dict( argstr="-byte", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_double=dict( argstr="-double", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_filetype=dict( argstr="-filetype", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_float=dict( argstr="-float", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_int=dict( argstr="-int", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_long=dict( argstr="-long", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_short=dict( argstr="-short", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_signed=dict( argstr="-signed", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_unsigned=dict( argstr="-unsigned", xor=( "format_filetype", "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), ignore_nan=dict( argstr="-ignore_nan", ), input_files=dict( argstr="%s", mandatory=True, position=-2, sep=" ", xor=("input_files", "filelist"), ), invert=dict( argstr="-invert -const %s", ), isnan=dict( argstr="-isnan", ), log=dict( argstr="-log -const2 %s %s", ), max_buffer_size_in_kb=dict( argstr="-max_buffer_size_in_kb %d", usedefault=True, ), maximum=dict( argstr="-maximum", ), minimum=dict( argstr="-minimum", ), nisnan=dict( argstr="-nisnan", ), no_check_dimensions=dict( argstr="-nocheck_dimensions", xor=("check_dimensions", "no_check_dimensions"), ), no_copy_header=dict( argstr="-nocopy_header", xor=("copy_header", "no_copy_header"), ), nsegment=dict( argstr="-nsegment -const2 %s %s", ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_files"], name_template="%s_mincmath.mnc", position=-1, ), output_illegal=dict( argstr="-illegal_value", xor=("output_nan", "output_zero", "output_illegal_value"), ), output_nan=dict( argstr="-nan", xor=("output_nan", "output_zero", "output_illegal_value"), ), output_zero=dict( argstr="-zero", xor=("output_nan", "output_zero", "output_illegal_value"), ), percentdiff=dict( argstr="-percentdiff", ), propagate_nan=dict( argstr="-propagate_nan", ), scale=dict( argstr="-scale -const2 %s %s", ), segment=dict( argstr="-segment -const2 %s %s", ), sqrt=dict( argstr="-sqrt", ), square=dict( argstr="-square", ), test_eq=dict( argstr="-eq", ), test_ge=dict( argstr="-ge", ), test_gt=dict( argstr="-gt", ), test_le=dict( argstr="-le", ), test_lt=dict( argstr="-lt", ), test_ne=dict( argstr="-ne", ), two=dict( argstr="-2", ), voxel_range=dict( argstr="-range %d %d", ), ) inputs = Math.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Math_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Math.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_NlpFit.py000066400000000000000000000035301413403311400243720ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import NlpFit def test_NlpFit_inputs(): input_map = dict( args=dict( argstr="%s", ), clobber=dict( argstr="-clobber", usedefault=True, ), config_file=dict( argstr="-config_file %s", extensions=None, mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), init_xfm=dict( argstr="-init_xfm %s", extensions=None, mandatory=True, ), input_grid_files=dict(), output_xfm=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), source=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), source_mask=dict( argstr="-source_mask %s", extensions=None, mandatory=True, ), target=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), verbose=dict( argstr="-verbose", ), ) inputs = NlpFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_NlpFit_outputs(): output_map = dict( output_grid=dict( extensions=None, ), output_xfm=dict( extensions=None, ), ) outputs = NlpFit.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Norm.py000066400000000000000000000045761413403311400241240ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Norm def test_Norm_inputs(): input_map = dict( args=dict( argstr="%s", ), clamp=dict( argstr="-clamp", usedefault=True, ), clobber=dict( argstr="-clobber", usedefault=True, ), cutoff=dict( argstr="-cutoff %s", ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), lower=dict( argstr="-lower %s", ), mask=dict( argstr="-mask %s", extensions=None, ), out_ceil=dict( argstr="-out_ceil %s", ), out_floor=dict( argstr="-out_floor %s", ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_file"], name_template="%s_norm.mnc", position=-1, ), output_threshold_mask=dict( argstr="-threshold_mask %s", extensions=None, hash_files=False, name_source=["input_file"], name_template="%s_norm_threshold_mask.mnc", ), threshold=dict( argstr="-threshold", ), threshold_blur=dict( argstr="-threshold_blur %s", ), threshold_bmt=dict( argstr="-threshold_bmt", ), threshold_perc=dict( argstr="-threshold_perc %s", ), upper=dict( argstr="-upper %s", ), ) inputs = Norm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Norm_outputs(): output_map = dict( output_file=dict( extensions=None, ), output_threshold_mask=dict( extensions=None, ), ) outputs = Norm.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Pik.py000066400000000000000000000063471413403311400237320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Pik def test_Pik_inputs(): input_map = dict( annotated_bar=dict( argstr="--anot_bar", ), args=dict( argstr="%s", ), auto_range=dict( argstr="--auto_range", xor=("image_range", "auto_range"), ), clobber=dict( argstr="-clobber", usedefault=True, ), depth=dict( argstr="--depth %s", ), environ=dict( nohash=True, usedefault=True, ), horizontal_triplanar_view=dict( argstr="--horizontal", xor=("vertical_triplanar_view", "horizontal_triplanar_view"), ), image_range=dict( argstr="--image_range %s %s", xor=("image_range", "auto_range"), ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), jpg=dict( xor=("jpg", "png"), ), lookup=dict( argstr="--lookup %s", ), minc_range=dict( argstr="--range %s %s", ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, keep_extension=False, name_source=["input_file"], name_template="%s.png", position=-1, ), png=dict( xor=("jpg", "png"), ), sagittal_offset=dict( argstr="--sagittal_offset %s", ), sagittal_offset_perc=dict( argstr="--sagittal_offset_perc %d", ), scale=dict( argstr="--scale %s", usedefault=True, ), slice_x=dict( argstr="-x", xor=("slice_z", "slice_y", "slice_x"), ), slice_y=dict( argstr="-y", xor=("slice_z", "slice_y", "slice_x"), ), slice_z=dict( argstr="-z", xor=("slice_z", "slice_y", "slice_x"), ), start=dict( argstr="--slice %s", ), tile_size=dict( argstr="--tilesize %s", ), title=dict( argstr="%s", ), title_size=dict( argstr="--title_size %s", requires=["title"], ), triplanar=dict( argstr="--triplanar", ), vertical_triplanar_view=dict( argstr="--vertical", xor=("vertical_triplanar_view", "horizontal_triplanar_view"), ), width=dict( argstr="--width %s", ), ) inputs = Pik.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Pik_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Pik.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Resample.py000066400000000000000000000240551413403311400247530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Resample def test_Resample_inputs(): input_map = dict( args=dict( argstr="%s", ), clobber=dict( argstr="-clobber", usedefault=True, ), coronal_slices=dict( argstr="-coronal", xor=("transverse", "sagittal", "coronal"), ), dircos=dict( argstr="-dircos %s %s %s", xor=("nelements", "nelements_x_y_or_z"), ), environ=dict( nohash=True, usedefault=True, ), fill=dict( argstr="-fill", xor=("nofill", "fill"), ), fill_value=dict( argstr="-fillvalue %s", requires=["fill"], ), format_byte=dict( argstr="-byte", xor=( "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_double=dict( argstr="-double", xor=( "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_float=dict( argstr="-float", xor=( "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_int=dict( argstr="-int", xor=( "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_long=dict( argstr="-long", xor=( "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_short=dict( argstr="-short", xor=( "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_signed=dict( argstr="-signed", xor=( "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), format_unsigned=dict( argstr="-unsigned", xor=( "format_byte", "format_short", "format_int", "format_long", "format_float", "format_double", "format_signed", "format_unsigned", ), ), half_width_sinc_window=dict( argstr="-width %s", requires=["sinc_interpolation"], ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), input_grid_files=dict(), invert_transformation=dict( argstr="-invert_transformation", ), keep_real_range=dict( argstr="-keep_real_range", xor=("keep_real_range", "nokeep_real_range"), ), like=dict( argstr="-like %s", extensions=None, ), nearest_neighbour_interpolation=dict( argstr="-nearest_neighbour", xor=( "trilinear_interpolation", "tricubic_interpolation", "nearest_neighbour_interpolation", "sinc_interpolation", ), ), nelements=dict( argstr="-nelements %s %s %s", xor=("nelements", "nelements_x_y_or_z"), ), no_fill=dict( argstr="-nofill", xor=("nofill", "fill"), ), no_input_sampling=dict( argstr="-use_input_sampling", xor=("vio_transform", "no_input_sampling"), ), nokeep_real_range=dict( argstr="-nokeep_real_range", xor=("keep_real_range", "nokeep_real_range"), ), origin=dict( argstr="-origin %s %s %s", ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_file"], name_template="%s_resample.mnc", position=-1, ), output_range=dict( argstr="-range %s %s", ), sagittal_slices=dict( argstr="-sagittal", xor=("transverse", "sagittal", "coronal"), ), sinc_interpolation=dict( argstr="-sinc", xor=( "trilinear_interpolation", "tricubic_interpolation", "nearest_neighbour_interpolation", "sinc_interpolation", ), ), sinc_window_hamming=dict( argstr="-hamming", requires=["sinc_interpolation"], xor=("sinc_window_hanning", "sinc_window_hamming"), ), sinc_window_hanning=dict( argstr="-hanning", requires=["sinc_interpolation"], xor=("sinc_window_hanning", "sinc_window_hamming"), ), spacetype=dict( argstr="-spacetype %s", ), standard_sampling=dict( argstr="-standard_sampling", ), start=dict( argstr="-start %s %s %s", xor=("nelements", "nelements_x_y_or_z"), ), step=dict( argstr="-step %s %s %s", xor=("nelements", "nelements_x_y_or_z"), ), talairach=dict( argstr="-talairach", ), transformation=dict( argstr="-transformation %s", extensions=None, ), transverse_slices=dict( argstr="-transverse", xor=("transverse", "sagittal", "coronal"), ), tricubic_interpolation=dict( argstr="-tricubic", xor=( "trilinear_interpolation", "tricubic_interpolation", "nearest_neighbour_interpolation", "sinc_interpolation", ), ), trilinear_interpolation=dict( argstr="-trilinear", xor=( "trilinear_interpolation", "tricubic_interpolation", "nearest_neighbour_interpolation", "sinc_interpolation", ), ), two=dict( argstr="-2", ), units=dict( argstr="-units %s", ), vio_transform=dict( argstr="-tfm_input_sampling", xor=("vio_transform", "no_input_sampling"), ), xdircos=dict( argstr="-xdircos %s", requires=("ydircos", "zdircos"), xor=("dircos", "dircos_x_y_or_z"), ), xnelements=dict( argstr="-xnelements %s", requires=("ynelements", "znelements"), xor=("nelements", "nelements_x_y_or_z"), ), xstart=dict( argstr="-xstart %s", requires=("ystart", "zstart"), xor=("start", "start_x_y_or_z"), ), xstep=dict( argstr="-xstep %s", requires=("ystep", "zstep"), xor=("step", "step_x_y_or_z"), ), ydircos=dict( argstr="-ydircos %s", requires=("xdircos", "zdircos"), xor=("dircos", "dircos_x_y_or_z"), ), ynelements=dict( argstr="-ynelements %s", requires=("xnelements", "znelements"), xor=("nelements", "nelements_x_y_or_z"), ), ystart=dict( argstr="-ystart %s", requires=("xstart", "zstart"), xor=("start", "start_x_y_or_z"), ), ystep=dict( argstr="-ystep %s", requires=("xstep", "zstep"), xor=("step", "step_x_y_or_z"), ), zdircos=dict( argstr="-zdircos %s", requires=("xdircos", "ydircos"), xor=("dircos", "dircos_x_y_or_z"), ), znelements=dict( argstr="-znelements %s", requires=("xnelements", "ynelements"), xor=("nelements", "nelements_x_y_or_z"), ), zstart=dict( argstr="-zstart %s", requires=("xstart", "ystart"), xor=("start", "start_x_y_or_z"), ), zstep=dict( argstr="-zstep %s", requires=("xstep", "ystep"), xor=("step", "step_x_y_or_z"), ), ) inputs = Resample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Resample_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Resample.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Reshape.py000066400000000000000000000026431413403311400245710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Reshape def test_Reshape_inputs(): input_map = dict( args=dict( argstr="%s", ), clobber=dict( argstr="-clobber", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_file"], name_template="%s_reshape.mnc", position=-1, ), verbose=dict( argstr="-verbose", ), write_short=dict( argstr="-short", ), ) inputs = Reshape.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Reshape_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Reshape.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_ToEcat.py000066400000000000000000000037241413403311400243620ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import ToEcat def test_ToEcat_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ignore_acquisition_variable=dict( argstr="-ignore_acquisition_variable", ), ignore_ecat_acquisition_variable=dict( argstr="-ignore_ecat_acquisition_variable", ), ignore_ecat_main=dict( argstr="-ignore_ecat_main", ), ignore_ecat_subheader_variable=dict( argstr="-ignore_ecat_subheader_variable", ), ignore_patient_variable=dict( argstr="-ignore_patient_variable", ), ignore_study_variable=dict( argstr="-ignore_study_variable", ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), no_decay_corr_fctr=dict( argstr="-no_decay_corr_fctr", ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, keep_extension=False, name_source=["input_file"], name_template="%s_to_ecat.v", position=-1, ), voxels_as_integers=dict( argstr="-label", ), ) inputs = ToEcat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ToEcat_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = ToEcat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_ToRaw.py000066400000000000000000000067501413403311400242410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import ToRaw def test_ToRaw_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), nonormalize=dict( argstr="-nonormalize", xor=("normalize", "nonormalize"), ), normalize=dict( argstr="-normalize", xor=("normalize", "nonormalize"), ), out_file=dict( argstr="> %s", extensions=None, genfile=True, position=-1, ), output_file=dict( extensions=None, hash_files=False, keep_extension=False, name_source=["input_file"], name_template="%s.raw", position=-1, ), write_byte=dict( argstr="-byte", xor=( "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", ), ), write_double=dict( argstr="-double", xor=( "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", ), ), write_float=dict( argstr="-float", xor=( "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", ), ), write_int=dict( argstr="-int", xor=( "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", ), ), write_long=dict( argstr="-long", xor=( "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", ), ), write_range=dict( argstr="-range %s %s", ), write_short=dict( argstr="-short", xor=( "write_byte", "write_short", "write_int", "write_long", "write_float", "write_double", ), ), write_signed=dict( argstr="-signed", xor=("write_signed", "write_unsigned"), ), write_unsigned=dict( argstr="-unsigned", xor=("write_signed", "write_unsigned"), ), ) inputs = ToRaw.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ToRaw_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = ToRaw.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_VolSymm.py000066400000000000000000000043621413403311400246100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import VolSymm def test_VolSymm_inputs(): input_map = dict( args=dict( argstr="%s", ), clobber=dict( argstr="-clobber", usedefault=True, ), config_file=dict( argstr="-config_file %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), fit_linear=dict( argstr="-linear", ), fit_nonlinear=dict( argstr="-nonlinear", ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), input_grid_files=dict(), nofit=dict( argstr="-nofit", ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_file"], name_template="%s_vol_symm.mnc", position=-1, ), trans_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, keep_extension=False, name_source=["input_file"], name_template="%s_vol_symm.xfm", position=-2, ), verbose=dict( argstr="-verbose", ), x=dict( argstr="-x", ), y=dict( argstr="-y", ), z=dict( argstr="-z", ), ) inputs = VolSymm.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_VolSymm_outputs(): output_map = dict( output_file=dict( extensions=None, ), output_grid=dict( extensions=None, ), trans_file=dict( extensions=None, ), ) outputs = VolSymm.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Volcentre.py000066400000000000000000000030641413403311400251410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Volcentre def test_Volcentre_inputs(): input_map = dict( args=dict( argstr="%s", ), centre=dict( argstr="-centre %s %s %s", ), clobber=dict( argstr="-clobber", usedefault=True, ), com=dict( argstr="-com", ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_file"], name_template="%s_volcentre.mnc", position=-1, ), verbose=dict( argstr="-verbose", ), zero_dircos=dict( argstr="-zero_dircos", ), ) inputs = Volcentre.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Volcentre_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Volcentre.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Voliso.py000066400000000000000000000030461413403311400244530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Voliso def test_Voliso_inputs(): input_map = dict( args=dict( argstr="%s", ), avgstep=dict( argstr="--avgstep", ), clobber=dict( argstr="--clobber", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), maxstep=dict( argstr="--maxstep %s", ), minstep=dict( argstr="--minstep %s", ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_file"], name_template="%s_voliso.mnc", position=-1, ), verbose=dict( argstr="--verbose", ), ) inputs = Voliso.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Voliso_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Voliso.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_Volpad.py000066400000000000000000000032621413403311400244250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import Volpad def test_Volpad_inputs(): input_map = dict( args=dict( argstr="%s", ), auto=dict( argstr="-auto", ), auto_freq=dict( argstr="-auto_freq %s", ), clobber=dict( argstr="-clobber", usedefault=True, ), distance=dict( argstr="-distance %s", ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_file"], name_template="%s_volpad.mnc", position=-1, ), smooth=dict( argstr="-smooth", ), smooth_distance=dict( argstr="-smooth_distance %s", ), verbose=dict( argstr="-verbose", ), ) inputs = Volpad.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Volpad_outputs(): output_map = dict( output_file=dict( extensions=None, ), ) outputs = Volpad.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_XfmAvg.py000066400000000000000000000031641413403311400243710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import XfmAvg def test_XfmAvg_inputs(): input_map = dict( args=dict( argstr="%s", ), avg_linear=dict( argstr="-avg_linear", ), avg_nonlinear=dict( argstr="-avg_nonlinear", ), clobber=dict( argstr="-clobber", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), ignore_linear=dict( argstr="-ignore_linear", ), ignore_nonlinear=dict( argstr="-ignore_nonline", ), input_files=dict( argstr="%s", mandatory=True, position=-2, sep=" ", ), input_grid_files=dict(), output_file=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), verbose=dict( argstr="-verbose", ), ) inputs = XfmAvg.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_XfmAvg_outputs(): output_map = dict( output_file=dict( extensions=None, ), output_grid=dict( extensions=None, ), ) outputs = XfmAvg.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_XfmConcat.py000066400000000000000000000026451413403311400250660ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import XfmConcat def test_XfmConcat_inputs(): input_map = dict( args=dict( argstr="%s", ), clobber=dict( argstr="-clobber", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), input_files=dict( argstr="%s", mandatory=True, position=-2, sep=" ", ), input_grid_files=dict(), output_file=dict( argstr="%s", extensions=None, genfile=True, hash_files=False, name_source=["input_files"], name_template="%s_xfmconcat.xfm", position=-1, ), verbose=dict( argstr="-verbose", ), ) inputs = XfmConcat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_XfmConcat_outputs(): output_map = dict( output_file=dict( extensions=None, ), output_grids=dict(), ) outputs = XfmConcat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/minc/tests/test_auto_XfmInvert.py000066400000000000000000000024731413403311400251250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..minc import XfmInvert def test_XfmInvert_inputs(): input_map = dict( args=dict( argstr="%s", ), clobber=dict( argstr="-clobber", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), input_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), output_file=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), verbose=dict( argstr="-verbose", ), ) inputs = XfmInvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_XfmInvert_outputs(): output_map = dict( output_file=dict( extensions=None, ), output_grid=dict( extensions=None, ), ) outputs = XfmInvert.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/000077500000000000000000000000001413403311400175605ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/mipav/__init__.py000066400000000000000000000013151413403311400216710ustar00rootroot00000000000000# -*- coding: utf-8 -*- """MIPAV enables quantitative analysis and visualization of multimodal medical images.""" from .developer import ( JistLaminarVolumetricLayering, JistBrainMgdmSegmentation, JistLaminarProfileGeometry, JistLaminarProfileCalculator, MedicAlgorithmN3, JistLaminarROIAveraging, MedicAlgorithmLesionToads, JistBrainMp2rageSkullStripping, JistCortexSurfaceMeshInflation, RandomVol, MedicAlgorithmImageCalculator, JistBrainMp2rageDuraEstimation, JistLaminarProfileSampling, MedicAlgorithmMipavReorient, MedicAlgorithmSPECTRE2010, JistBrainPartialVolumeFilter, JistIntensityMp2rageMasking, MedicAlgorithmThresholdToBinaryMask, ) nipype-1.7.0/nipype/interfaces/mipav/developer.py000066400000000000000000001512101413403311400221170ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ..base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class JistLaminarVolumetricLayeringInputSpec(CommandLineInputSpec): inInner = File( desc="Inner Distance Image (GM/WM boundary)", exists=True, argstr="--inInner %s" ) inOuter = File( desc="Outer Distance Image (CSF/GM boundary)", exists=True, argstr="--inOuter %s", ) inNumber = traits.Int(desc="Number of layers", argstr="--inNumber %d") inMax = traits.Int( desc="Max iterations for narrow band evolution", argstr="--inMax %d" ) inMin = traits.Float( desc="Min change ratio for narrow band evolution", argstr="--inMin %f" ) inLayering = traits.Enum( "distance-preserving", "volume-preserving", desc="Layering method", argstr="--inLayering %s", ) inLayering2 = traits.Enum( "outward", "inward", desc="Layering direction", argstr="--inLayering2 %s" ) incurvature = traits.Int( desc="curvature approximation scale (voxels)", argstr="--incurvature %d" ) inratio = traits.Float( desc="ratio smoothing kernel size (voxels)", argstr="--inratio %f" ) inpresmooth = traits.Enum( "true", "false", desc="pre-smooth cortical surfaces", argstr="--inpresmooth %s" ) inTopology = traits.Enum( "26/6", "6/26", "18/6", "6/18", "6/6", "wcs", "wco", "no", desc="Topology", argstr="--inTopology %s", ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outContinuous = traits.Either( traits.Bool, File(), hash_files=False, desc="Continuous depth measurement", argstr="--outContinuous %s", ) outDiscrete = traits.Either( traits.Bool, File(), hash_files=False, desc="Discrete sampled layers", argstr="--outDiscrete %s", ) outLayer = traits.Either( traits.Bool, File(), hash_files=False, desc="Layer boundary surfaces", argstr="--outLayer %s", ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class JistLaminarVolumetricLayeringOutputSpec(TraitedSpec): outContinuous = File(desc="Continuous depth measurement", exists=True) outDiscrete = File(desc="Discrete sampled layers", exists=True) outLayer = File(desc="Layer boundary surfaces", exists=True) class JistLaminarVolumetricLayering(SEMLikeCommandLine): """Volumetric Layering. Builds a continuous layering of the cortex following distance-preserving or volume-preserving models of cortical folding. References ---------- Waehnert MD, Dinse J, Weiss M, Streicher MN, Waehnert P, Geyer S, Turner R, Bazin PL, Anatomically motivated modeling of cortical laminae, Neuroimage, 2013. """ input_spec = JistLaminarVolumetricLayeringInputSpec output_spec = JistLaminarVolumetricLayeringOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarVolumetricLayering " _outputs_filenames = { "outContinuous": "outContinuous.nii", "outLayer": "outLayer.nii", "outDiscrete": "outDiscrete.nii", } _redirect_x = True class JistBrainMgdmSegmentationInputSpec(CommandLineInputSpec): inMP2RAGE = File(desc="MP2RAGE T1 Map Image", exists=True, argstr="--inMP2RAGE %s") inMP2RAGE2 = File( desc="MP2RAGE T1-weighted Image", exists=True, argstr="--inMP2RAGE2 %s" ) inPV = File(desc="PV / Dura Image", exists=True, argstr="--inPV %s") inMPRAGE = File( desc="MPRAGE T1-weighted Image", exists=True, argstr="--inMPRAGE %s" ) inFLAIR = File(desc="FLAIR Image", exists=True, argstr="--inFLAIR %s") inAtlas = File(desc="Atlas file", exists=True, argstr="--inAtlas %s") inData = traits.Float(desc="Data weight", argstr="--inData %f") inCurvature = traits.Float(desc="Curvature weight", argstr="--inCurvature %f") inPosterior = traits.Float(desc="Posterior scale (mm)", argstr="--inPosterior %f") inMax = traits.Int(desc="Max iterations", argstr="--inMax %d") inMin = traits.Float(desc="Min change", argstr="--inMin %f") inSteps = traits.Int(desc="Steps", argstr="--inSteps %d") inTopology = traits.Enum( "26/6", "6/26", "18/6", "6/18", "6/6", "wcs", "wco", "no", desc="Topology", argstr="--inTopology %s", ) inCompute = traits.Enum( "true", "false", desc="Compute posteriors", argstr="--inCompute %s" ) inAdjust = traits.Enum( "true", "false", desc="Adjust intensity priors", argstr="--inAdjust %s" ) inOutput = traits.Enum( "segmentation", "memberships", desc="Output images", argstr="--inOutput %s" ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outSegmented = traits.Either( traits.Bool, File(), hash_files=False, desc="Segmented Brain Image", argstr="--outSegmented %s", ) outLevelset = traits.Either( traits.Bool, File(), hash_files=False, desc="Levelset Boundary Image", argstr="--outLevelset %s", ) outPosterior2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Posterior Maximum Memberships (4D)", argstr="--outPosterior2 %s", ) outPosterior3 = traits.Either( traits.Bool, File(), hash_files=False, desc="Posterior Maximum Labels (4D)", argstr="--outPosterior3 %s", ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class JistBrainMgdmSegmentationOutputSpec(TraitedSpec): outSegmented = File(desc="Segmented Brain Image", exists=True) outLevelset = File(desc="Levelset Boundary Image", exists=True) outPosterior2 = File(desc="Posterior Maximum Memberships (4D)", exists=True) outPosterior3 = File(desc="Posterior Maximum Labels (4D)", exists=True) class JistBrainMgdmSegmentation(SEMLikeCommandLine): """MGDM Whole Brain Segmentation. Estimate brain structures from an atlas for a MRI dataset (multiple input combinations are possible). """ input_spec = JistBrainMgdmSegmentationInputSpec output_spec = JistBrainMgdmSegmentationOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainMgdmSegmentation " _outputs_filenames = { "outSegmented": "outSegmented.nii", "outPosterior2": "outPosterior2.nii", "outPosterior3": "outPosterior3.nii", "outLevelset": "outLevelset.nii", } _redirect_x = True class JistLaminarProfileGeometryInputSpec(CommandLineInputSpec): inProfile = File(desc="Profile Surface Image", exists=True, argstr="--inProfile %s") incomputed = traits.Enum( "thickness", "curvedness", "shape_index", "mean_curvature", "gauss_curvature", "profile_length", "profile_curvature", "profile_torsion", desc="computed measure", argstr="--incomputed %s", ) inregularization = traits.Enum( "none", "Gaussian", desc="regularization", argstr="--inregularization %s" ) insmoothing = traits.Float(desc="smoothing parameter", argstr="--insmoothing %f") inoutside = traits.Float(desc="outside extension (mm)", argstr="--inoutside %f") xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outResult = traits.Either( traits.Bool, File(), hash_files=False, desc="Result", argstr="--outResult %s" ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class JistLaminarProfileGeometryOutputSpec(TraitedSpec): outResult = File(desc="Result", exists=True) class JistLaminarProfileGeometry(SEMLikeCommandLine): """Compute various geometric quantities for a cortical layers.""" input_spec = JistLaminarProfileGeometryInputSpec output_spec = JistLaminarProfileGeometryOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarProfileGeometry " _outputs_filenames = {"outResult": "outResult.nii"} _redirect_x = True class JistLaminarProfileCalculatorInputSpec(CommandLineInputSpec): inIntensity = File( desc="Intensity Profile Image", exists=True, argstr="--inIntensity %s" ) inMask = File(desc="Mask Image (opt, 3D or 4D)", exists=True, argstr="--inMask %s") incomputed = traits.Enum( "mean", "stdev", "skewness", "kurtosis", desc="computed statistic", argstr="--incomputed %s", ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outResult = traits.Either( traits.Bool, File(), hash_files=False, desc="Result", argstr="--outResult %s" ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class JistLaminarProfileCalculatorOutputSpec(TraitedSpec): outResult = File(desc="Result", exists=True) class JistLaminarProfileCalculator(SEMLikeCommandLine): """Compute various moments for intensities mapped along a cortical profile.""" input_spec = JistLaminarProfileCalculatorInputSpec output_spec = JistLaminarProfileCalculatorOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarProfileCalculator " _outputs_filenames = {"outResult": "outResult.nii"} _redirect_x = True class MedicAlgorithmN3InputSpec(CommandLineInputSpec): inInput = File(desc="Input Volume", exists=True, argstr="--inInput %s") inSignal = traits.Float( desc="Default = min + 1, Values at less than threshold are treated as part of the background", argstr="--inSignal %f", ) inMaximum = traits.Int(desc="Maximum number of Iterations", argstr="--inMaximum %d") inEnd = traits.Float( desc="Usually 0.01-0.00001, The measure used to terminate the iterations is the coefficient of variation of change in field estimates between successive iterations.", argstr="--inEnd %f", ) inField = traits.Float( desc="Characteristic distance over which the field varies. The distance between adjacent knots in bspline fitting with at least 4 knots going in every dimension. The default in the dialog is one third the distance (resolution * extents) of the smallest dimension.", argstr="--inField %f", ) inSubsample = traits.Float( desc="Usually between 1-32, The factor by which the data is subsampled to a lower resolution in estimating the slowly varying non-uniformity field. Reduce sampling in the finest sampling direction by the shrink factor.", argstr="--inSubsample %f", ) inKernel = traits.Float( desc="Usually between 0.05-0.50, Width of deconvolution kernel used to sharpen the histogram. Larger values give faster convergence while smaller values give greater accuracy.", argstr="--inKernel %f", ) inWeiner = traits.Float(desc="Usually between 0.0-1.0", argstr="--inWeiner %f") inAutomatic = traits.Enum( "true", "false", desc="If true determines the threshold by histogram analysis. If true a VOI cannot be used and the input threshold is ignored.", argstr="--inAutomatic %s", ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outInhomogeneity = traits.Either( traits.Bool, File(), hash_files=False, desc="Inhomogeneity Corrected Volume", argstr="--outInhomogeneity %s", ) outInhomogeneity2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Inhomogeneity Field", argstr="--outInhomogeneity2 %s", ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class MedicAlgorithmN3OutputSpec(TraitedSpec): outInhomogeneity = File(desc="Inhomogeneity Corrected Volume", exists=True) outInhomogeneity2 = File(desc="Inhomogeneity Field", exists=True) class MedicAlgorithmN3(SEMLikeCommandLine): """Non-parametric Intensity Non-uniformity Correction, N3, originally by J.G. Sled.""" input_spec = MedicAlgorithmN3InputSpec output_spec = MedicAlgorithmN3OutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmN3 " _outputs_filenames = { "outInhomogeneity2": "outInhomogeneity2.nii", "outInhomogeneity": "outInhomogeneity.nii", } _redirect_x = True class JistLaminarROIAveragingInputSpec(CommandLineInputSpec): inIntensity = File( desc="Intensity Profile Image", exists=True, argstr="--inIntensity %s" ) inROI = File(desc="ROI Mask", exists=True, argstr="--inROI %s") inROI2 = traits.Str(desc="ROI Name", argstr="--inROI2 %s") inMask = File(desc="Mask Image (opt, 3D or 4D)", exists=True, argstr="--inMask %s") xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outROI3 = traits.Either( traits.Bool, File(), hash_files=False, desc="ROI Average", argstr="--outROI3 %s" ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class JistLaminarROIAveragingOutputSpec(TraitedSpec): outROI3 = File(desc="ROI Average", exists=True) class JistLaminarROIAveraging(SEMLikeCommandLine): """Compute an average profile over a given ROI.""" input_spec = JistLaminarROIAveragingInputSpec output_spec = JistLaminarROIAveragingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarROIAveraging " _outputs_filenames = {"outROI3": "outROI3"} _redirect_x = True class MedicAlgorithmLesionToadsInputSpec(CommandLineInputSpec): inT1_MPRAGE = File(desc="T1_MPRAGE Image", exists=True, argstr="--inT1_MPRAGE %s") inT1_SPGR = File(desc="T1_SPGR Image", exists=True, argstr="--inT1_SPGR %s") inFLAIR = File(desc="FLAIR Image", exists=True, argstr="--inFLAIR %s") inAtlas = traits.Enum( "With Lesion", "No Lesion", desc="Atlas to Use", argstr="--inAtlas %s" ) inOutput = traits.Enum( "hard segmentation", "hard segmentation+memberships", "cruise inputs", "dura removal inputs", desc="Output images", argstr="--inOutput %s", ) inOutput2 = traits.Enum( "true", "false", desc="Output the hard classification using maximum membership (not neceesarily topologically correct)", argstr="--inOutput2 %s", ) inCorrect = traits.Enum( "true", "false", desc="Correct MR field inhomogeneity.", argstr="--inCorrect %s" ) inOutput3 = traits.Enum( "true", "false", desc="Output the estimated inhomogeneity field", argstr="--inOutput3 %s", ) inAtlas2 = File( desc="Atlas File - With Lesions", exists=True, argstr="--inAtlas2 %s" ) inAtlas3 = File( desc="Atlas File - No Lesion - T1 and FLAIR", exists=True, argstr="--inAtlas3 %s", ) inAtlas4 = File( desc="Atlas File - No Lesion - T1 Only", exists=True, argstr="--inAtlas4 %s" ) inMaximum = traits.Int( desc="Maximum distance from the interventricular WM boundary to downweight the lesion membership to avoid false postives", argstr="--inMaximum %d", ) inMaximum2 = traits.Int(desc="Maximum Ventircle Distance", argstr="--inMaximum2 %d") inMaximum3 = traits.Int( desc="Maximum InterVentricular Distance", argstr="--inMaximum3 %d" ) inInclude = traits.Enum( "true", "false", desc="Include lesion in WM class in hard classification", argstr="--inInclude %s", ) inAtlas5 = traits.Float( desc="Controls the effect of the statistical atlas on the segmentation", argstr="--inAtlas5 %f", ) inSmooting = traits.Float( desc="Controls the effect of neighberhood voxels on the membership", argstr="--inSmooting %f", ) inMaximum4 = traits.Float( desc="Maximum amount of relative change in the energy function considered as the convergence criteria", argstr="--inMaximum4 %f", ) inMaximum5 = traits.Int(desc="Maximum iterations", argstr="--inMaximum5 %d") inAtlas6 = traits.Enum( "rigid", "multi_fully_affine", desc="Atlas alignment", argstr="--inAtlas6 %s" ) inConnectivity = traits.Enum( "(26,6)", "(6,26)", "(6,18)", "(18,6)", desc="Connectivity (foreground,background)", argstr="--inConnectivity %s", ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outHard = traits.Either( traits.Bool, File(), hash_files=False, desc="Hard segmentation", argstr="--outHard %s", ) outHard2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Hard segmentationfrom memberships", argstr="--outHard2 %s", ) outInhomogeneity = traits.Either( traits.Bool, File(), hash_files=False, desc="Inhomogeneity Field", argstr="--outInhomogeneity %s", ) outMembership = traits.Either( traits.Bool, File(), hash_files=False, desc="Membership Functions", argstr="--outMembership %s", ) outLesion = traits.Either( traits.Bool, File(), hash_files=False, desc="Lesion Segmentation", argstr="--outLesion %s", ) outSulcal = traits.Either( traits.Bool, File(), hash_files=False, desc="Sulcal CSF Membership", argstr="--outSulcal %s", ) outCortical = traits.Either( traits.Bool, File(), hash_files=False, desc="Cortical GM Membership", argstr="--outCortical %s", ) outFilled = traits.Either( traits.Bool, File(), hash_files=False, desc="Filled WM Membership", argstr="--outFilled %s", ) outWM = traits.Either( traits.Bool, File(), hash_files=False, desc="WM Mask", argstr="--outWM %s" ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class MedicAlgorithmLesionToadsOutputSpec(TraitedSpec): outHard = File(desc="Hard segmentation", exists=True) outHard2 = File(desc="Hard segmentationfrom memberships", exists=True) outInhomogeneity = File(desc="Inhomogeneity Field", exists=True) outMembership = File(desc="Membership Functions", exists=True) outLesion = File(desc="Lesion Segmentation", exists=True) outSulcal = File(desc="Sulcal CSF Membership", exists=True) outCortical = File(desc="Cortical GM Membership", exists=True) outFilled = File(desc="Filled WM Membership", exists=True) outWM = File(desc="WM Mask", exists=True) class MedicAlgorithmLesionToads(SEMLikeCommandLine): """Algorithm for simulataneous brain structures and MS lesion segmentation of MS Brains. The brain segmentation is topologically consistent and the algorithm can use multiple MR sequences as input data. References ---------- N. Shiee, P.-L. Bazin, A.Z. Ozturk, P.A. Calabresi, D.S. Reich, D.L. Pham, "A Topology-Preserving Approach to the Segmentation of Brain Images with Multiple Sclerosis", NeuroImage, vol. 49, no. 2, pp. 1524-1535, 2010. """ input_spec = MedicAlgorithmLesionToadsInputSpec output_spec = MedicAlgorithmLesionToadsOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmLesionToads " _outputs_filenames = { "outWM": "outWM.nii", "outHard": "outHard.nii", "outFilled": "outFilled.nii", "outMembership": "outMembership.nii", "outInhomogeneity": "outInhomogeneity.nii", "outCortical": "outCortical.nii", "outHard2": "outHard2.nii", "outLesion": "outLesion.nii", "outSulcal": "outSulcal.nii", } _redirect_x = True class JistBrainMp2rageSkullStrippingInputSpec(CommandLineInputSpec): inSecond = File( desc="Second inversion (Inv2) Image", exists=True, argstr="--inSecond %s" ) inT1 = File(desc="T1 Map (T1_Images) Image (opt)", exists=True, argstr="--inT1 %s") inT1weighted = File( desc="T1-weighted (UNI) Image (opt)", exists=True, argstr="--inT1weighted %s" ) inFilter = File(desc="Filter Image (opt)", exists=True, argstr="--inFilter %s") inSkip = traits.Enum("true", "false", desc="Skip zero values", argstr="--inSkip %s") xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outBrain = traits.Either( traits.Bool, File(), hash_files=False, desc="Brain Mask Image", argstr="--outBrain %s", ) outMasked = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked T1 Map Image", argstr="--outMasked %s", ) outMasked2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked T1-weighted Image", argstr="--outMasked2 %s", ) outMasked3 = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked Filter Image", argstr="--outMasked3 %s", ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class JistBrainMp2rageSkullStrippingOutputSpec(TraitedSpec): outBrain = File(desc="Brain Mask Image", exists=True) outMasked = File(desc="Masked T1 Map Image", exists=True) outMasked2 = File(desc="Masked T1-weighted Image", exists=True) outMasked3 = File(desc="Masked Filter Image", exists=True) class JistBrainMp2rageSkullStripping(SEMLikeCommandLine): """Estimate a brain mask for a MP2RAGE dataset. At least a T1-weighted or a T1 map image is required. """ input_spec = JistBrainMp2rageSkullStrippingInputSpec output_spec = JistBrainMp2rageSkullStrippingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainMp2rageSkullStripping " _outputs_filenames = { "outBrain": "outBrain.nii", "outMasked3": "outMasked3.nii", "outMasked2": "outMasked2.nii", "outMasked": "outMasked.nii", } _redirect_x = True class JistCortexSurfaceMeshInflationInputSpec(CommandLineInputSpec): inLevelset = File(desc="Levelset Image", exists=True, argstr="--inLevelset %s") inSOR = traits.Float(desc="SOR Parameter", argstr="--inSOR %f") inMean = traits.Float(desc="Mean Curvature Threshold", argstr="--inMean %f") inStep = traits.Int(desc="Step Size", argstr="--inStep %d") inMax = traits.Int(desc="Max Iterations", argstr="--inMax %d") inLorentzian = traits.Enum( "true", "false", desc="Lorentzian Norm", argstr="--inLorentzian %s" ) inTopology = traits.Enum( "26/6", "6/26", "18/6", "6/18", "6/6", "wcs", "wco", "no", desc="Topology", argstr="--inTopology %s", ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outOriginal = traits.Either( traits.Bool, File(), hash_files=False, desc="Original Surface", argstr="--outOriginal %s", ) outInflated = traits.Either( traits.Bool, File(), hash_files=False, desc="Inflated Surface", argstr="--outInflated %s", ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class JistCortexSurfaceMeshInflationOutputSpec(TraitedSpec): outOriginal = File(desc="Original Surface", exists=True) outInflated = File(desc="Inflated Surface", exists=True) class JistCortexSurfaceMeshInflation(SEMLikeCommandLine): """Inflates a cortical surface mesh. References ---------- D. Tosun, M. E. Rettmann, X. Han, X. Tao, C. Xu, S. M. Resnick, D. Pham, and J. L. Prince, Cortical Surface Segmentation and Mapping, NeuroImage, vol. 23, pp. S108--S118, 2004. """ input_spec = JistCortexSurfaceMeshInflationInputSpec output_spec = JistCortexSurfaceMeshInflationOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.cortex.JistCortexSurfaceMeshInflation " _outputs_filenames = {"outOriginal": "outOriginal", "outInflated": "outInflated"} _redirect_x = True class RandomVolInputSpec(CommandLineInputSpec): inSize = traits.Int(desc="Size of Volume in X direction", argstr="--inSize %d") inSize2 = traits.Int(desc="Size of Volume in Y direction", argstr="--inSize2 %d") inSize3 = traits.Int(desc="Size of Volume in Z direction", argstr="--inSize3 %d") inSize4 = traits.Int(desc="Size of Volume in t direction", argstr="--inSize4 %d") inStandard = traits.Int( desc="Standard Deviation for Normal Distribution", argstr="--inStandard %d" ) inLambda = traits.Float( desc="Lambda Value for Exponential Distribution", argstr="--inLambda %f" ) inMaximum = traits.Int(desc="Maximum Value", argstr="--inMaximum %d") inMinimum = traits.Int(desc="Minimum Value", argstr="--inMinimum %d") inField = traits.Enum( "Uniform", "Normal", "Exponential", desc="Field", argstr="--inField %s" ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outRand1 = traits.Either( traits.Bool, File(), hash_files=False, desc="Rand1", argstr="--outRand1 %s" ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class RandomVolOutputSpec(TraitedSpec): outRand1 = File(desc="Rand1", exists=True) class RandomVol(SEMLikeCommandLine): """Generate a volume of random scalars.""" input_spec = RandomVolInputSpec output_spec = RandomVolOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.bme.smile.demo.RandomVol " _outputs_filenames = {"outRand1": "outRand1.nii"} _redirect_x = True class MedicAlgorithmImageCalculatorInputSpec(CommandLineInputSpec): inVolume = File(desc="Volume 1", exists=True, argstr="--inVolume %s") inVolume2 = File(desc="Volume 2", exists=True, argstr="--inVolume2 %s") inOperation = traits.Enum( "Add", "Subtract", "Multiply", "Divide", "Min", "Max", desc="Operation", argstr="--inOperation %s", ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outResult = traits.Either( traits.Bool, File(), hash_files=False, desc="Result Volume", argstr="--outResult %s", ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class MedicAlgorithmImageCalculatorOutputSpec(TraitedSpec): outResult = File(desc="Result Volume", exists=True) class MedicAlgorithmImageCalculator(SEMLikeCommandLine): """Perform simple image calculator operations on two images. The operations include 'Add', 'Subtract', 'Multiply', and 'Divide' """ input_spec = MedicAlgorithmImageCalculatorInputSpec output_spec = MedicAlgorithmImageCalculatorOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.utilities.math.MedicAlgorithmImageCalculator " _outputs_filenames = {"outResult": "outResult.nii"} _redirect_x = True class JistBrainMp2rageDuraEstimationInputSpec(CommandLineInputSpec): inSecond = File( desc="Second inversion (Inv2) Image", exists=True, argstr="--inSecond %s" ) inSkull = File(desc="Skull Stripping Mask", exists=True, argstr="--inSkull %s") inDistance = traits.Float( desc="Distance to background (mm)", argstr="--inDistance %f" ) inoutput = traits.Enum( "dura_region", "boundary", "dura_prior", "bg_prior", "intens_prior", desc="Outputs an estimate of the dura / CSF boundary or an estimate of the entire dura region.", argstr="--inoutput %s", ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outDura = traits.Either( traits.Bool, File(), hash_files=False, desc="Dura Image", argstr="--outDura %s" ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class JistBrainMp2rageDuraEstimationOutputSpec(TraitedSpec): outDura = File(desc="Dura Image", exists=True) class JistBrainMp2rageDuraEstimation(SEMLikeCommandLine): """Filters a MP2RAGE brain image to obtain a probability map of dura matter.""" input_spec = JistBrainMp2rageDuraEstimationInputSpec output_spec = JistBrainMp2rageDuraEstimationOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation " _outputs_filenames = {"outDura": "outDura.nii"} _redirect_x = True class JistLaminarProfileSamplingInputSpec(CommandLineInputSpec): inProfile = File(desc="Profile Surface Image", exists=True, argstr="--inProfile %s") inIntensity = File(desc="Intensity Image", exists=True, argstr="--inIntensity %s") inCortex = File(desc="Cortex Mask (opt)", exists=True, argstr="--inCortex %s") xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outProfilemapped = traits.Either( traits.Bool, File(), hash_files=False, desc="Profile-mapped Intensity Image", argstr="--outProfilemapped %s", ) outProfile2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Profile 4D Mask", argstr="--outProfile2 %s", ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class JistLaminarProfileSamplingOutputSpec(TraitedSpec): outProfilemapped = File(desc="Profile-mapped Intensity Image", exists=True) outProfile2 = File(desc="Profile 4D Mask", exists=True) class JistLaminarProfileSampling(SEMLikeCommandLine): """Sample some intensity image along a cortical profile across layer surfaces.""" input_spec = JistLaminarProfileSamplingInputSpec output_spec = JistLaminarProfileSamplingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.laminar.JistLaminarProfileSampling " _outputs_filenames = { "outProfile2": "outProfile2.nii", "outProfilemapped": "outProfilemapped.nii", } _redirect_x = True class MedicAlgorithmMipavReorientInputSpec(CommandLineInputSpec): inSource = InputMultiPath(File, desc="Source", sep=";", argstr="--inSource %s") inTemplate = File(desc="Template", exists=True, argstr="--inTemplate %s") inNew = traits.Enum( "Dicom axial", "Dicom coronal", "Dicom sagittal", "User defined", desc="New image orientation", argstr="--inNew %s", ) inUser = traits.Enum( "Unknown", "Patient Right to Left", "Patient Left to Right", "Patient Posterior to Anterior", "Patient Anterior to Posterior", "Patient Inferior to Superior", "Patient Superior to Inferior", desc="User defined X-axis orientation (image left to right)", argstr="--inUser %s", ) inUser2 = traits.Enum( "Unknown", "Patient Right to Left", "Patient Left to Right", "Patient Posterior to Anterior", "Patient Anterior to Posterior", "Patient Inferior to Superior", "Patient Superior to Inferior", desc="User defined Y-axis orientation (image top to bottom)", argstr="--inUser2 %s", ) inUser3 = traits.Enum( "Unknown", "Patient Right to Left", "Patient Left to Right", "Patient Posterior to Anterior", "Patient Anterior to Posterior", "Patient Inferior to Superior", "Patient Superior to Inferior", desc="User defined Z-axis orientation (into the screen)", argstr="--inUser3 %s", ) inUser4 = traits.Enum( "Axial", "Coronal", "Sagittal", "Unknown", desc="User defined Image Orientation", argstr="--inUser4 %s", ) inInterpolation = traits.Enum( "Nearest Neighbor", "Trilinear", "Bspline 3rd order", "Bspline 4th order", "Cubic Lagrangian", "Quintic Lagrangian", "Heptic Lagrangian", "Windowed Sinc", desc="Interpolation", argstr="--inInterpolation %s", ) inResolution = traits.Enum( "Unchanged", "Finest cubic", "Coarsest cubic", "Same as template", desc="Resolution", argstr="--inResolution %s", ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outReoriented = InputMultiPath( File, desc="Reoriented Volume", sep=";", argstr="--outReoriented %s" ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class MedicAlgorithmMipavReorientOutputSpec(TraitedSpec): pass class MedicAlgorithmMipavReorient(SEMLikeCommandLine): """Reorient a volume to a particular anatomical orientation.""" input_spec = MedicAlgorithmMipavReorientInputSpec output_spec = MedicAlgorithmMipavReorientOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmMipavReorient " _outputs_filenames = {} _redirect_x = True class MedicAlgorithmSPECTRE2010InputSpec(CommandLineInputSpec): inInput = File( desc="Input volume to be skullstripped.", exists=True, argstr="--inInput %s" ) inAtlas = File( desc="SPECTRE atlas description file. A text file enumerating atlas files and landmarks.", exists=True, argstr="--inAtlas %s", ) inInitial = traits.Int( desc="Erosion of the inital mask, which is based on the probability mask and the classification., The initial mask is ouput as the d0 volume at the conclusion of SPECTRE.", argstr="--inInitial %d", ) inImage = traits.Enum( "T1_SPGR", "T1_ALT", "T1_MPRAGE", "T2", "FLAIR", desc="Set the image modality. MP-RAGE is recommended for most T1 sequence images.", argstr="--inImage %s", ) inOutput = traits.Enum( "true", "false", desc="Determines if the output results are transformed back into the space of the original input image.", argstr="--inOutput %s", ) inFind = traits.Enum( "true", "false", desc="Find Midsaggital Plane", argstr="--inFind %s" ) inRun = traits.Enum( "true", "false", desc="Run Smooth Brain Mask", argstr="--inRun %s" ) inResample = traits.Enum( "true", "false", desc="Determines if the data is resampled to be isotropic during the processing.", argstr="--inResample %s", ) inInitial2 = traits.Float( desc="Initial probability threshold", argstr="--inInitial2 %f" ) inMinimum = traits.Float( desc="Minimum probability threshold", argstr="--inMinimum %f" ) inMMC = traits.Int( desc="The size of the dilation step within the Modified Morphological Closing.", argstr="--inMMC %d", ) inMMC2 = traits.Int( desc="The size of the erosion step within the Modified Morphological Closing.", argstr="--inMMC2 %d", ) inInhomogeneity = traits.Enum( "true", "false", desc="Set to false by default, this parameter will make FANTASM try to do inhomogeneity correction during it's iterative cycle.", argstr="--inInhomogeneity %s", ) inSmoothing = traits.Float(argstr="--inSmoothing %f") inBackground = traits.Float(argstr="--inBackground %f") inOutput2 = traits.Enum( "true", "false", desc="Output Plane?", argstr="--inOutput2 %s" ) inOutput3 = traits.Enum( "true", "false", desc="Output Split-Halves?", argstr="--inOutput3 %s" ) inOutput4 = traits.Enum( "true", "false", desc="Output Segmentation on Plane?", argstr="--inOutput4 %s" ) inDegrees = traits.Enum( "Rigid - 6", "Global rescale - 7", "Specific rescale - 9", "Affine - 12", desc="Degrees of freedom", argstr="--inDegrees %s", ) inCost = traits.Enum( "Correlation ratio", "Least squares", "Normalized cross correlation", "Normalized mutual information", desc="Cost function", argstr="--inCost %s", ) inRegistration = traits.Enum( "Trilinear", "Bspline 3rd order", "Bspline 4th order", "Cubic Lagrangian", "Quintic Lagrangian", "Heptic Lagrangian", "Windowed sinc", desc="Registration interpolation", argstr="--inRegistration %s", ) inOutput5 = traits.Enum( "Trilinear", "Bspline 3rd order", "Bspline 4th order", "Cubic Lagrangian", "Quintic Lagrangian", "Heptic Lagrangian", "Windowed sinc", "Nearest Neighbor", desc="Output interpolation", argstr="--inOutput5 %s", ) inApply = traits.Enum( "All", "X", "Y", "Z", desc="Apply rotation", argstr="--inApply %s" ) inMinimum2 = traits.Float(desc="Minimum angle", argstr="--inMinimum2 %f") inMaximum = traits.Float(desc="Maximum angle", argstr="--inMaximum %f") inCoarse = traits.Float(desc="Coarse angle increment", argstr="--inCoarse %f") inFine = traits.Float(desc="Fine angle increment", argstr="--inFine %f") inMultiple = traits.Int( desc="Multiple of tolerance to bracket the minimum", argstr="--inMultiple %d" ) inNumber = traits.Int(desc="Number of iterations", argstr="--inNumber %d") inNumber2 = traits.Int( desc="Number of minima from Level 8 to test at Level 4", argstr="--inNumber2 %d" ) inUse = traits.Enum( "true", "false", desc="Use the max of the min resolutions of the two datasets when resampling", argstr="--inUse %s", ) inSubsample = traits.Enum( "true", "false", desc="Subsample image for speed", argstr="--inSubsample %s" ) inSkip = traits.Enum( "true", "false", desc="Skip multilevel search (Assume images are close to alignment)", argstr="--inSkip %s", ) inMultithreading = traits.Enum( "true", "false", desc="Set to false by default, this parameter controls the multithreaded behavior of the linear registration.", argstr="--inMultithreading %s", ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outOriginal = traits.Either( traits.Bool, File(), hash_files=False, desc="If Output in Original Space Flag is true then outputs the original input volume. Otherwise outputs the axialy reoriented input volume.", argstr="--outOriginal %s", ) outStripped = traits.Either( traits.Bool, File(), hash_files=False, desc="Skullstripped result of the input volume with just the brain.", argstr="--outStripped %s", ) outMask = traits.Either( traits.Bool, File(), hash_files=False, desc="Binary Mask of the skullstripped result with just the brain", argstr="--outMask %s", ) outPrior = traits.Either( traits.Bool, File(), hash_files=False, desc="Probability prior from the atlas registrations", argstr="--outPrior %s", ) outFANTASM = traits.Either( traits.Bool, File(), hash_files=False, desc="Tissue classification of of the whole input volume.", argstr="--outFANTASM %s", ) outd0 = traits.Either( traits.Bool, File(), hash_files=False, desc="Initial Brainmask", argstr="--outd0 %s", ) outMidsagittal = traits.Either( traits.Bool, File(), hash_files=False, desc="Plane dividing the brain hemispheres", argstr="--outMidsagittal %s", ) outSplitHalves = traits.Either( traits.Bool, File(), hash_files=False, desc="Skullstripped mask of the brain with the hemispheres divided.", argstr="--outSplitHalves %s", ) outSegmentation = traits.Either( traits.Bool, File(), hash_files=False, desc="2D image showing the tissue classification on the midsagittal plane", argstr="--outSegmentation %s", ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class MedicAlgorithmSPECTRE2010OutputSpec(TraitedSpec): outOriginal = File( desc="If Output in Original Space Flag is true then outputs the original input volume. Otherwise outputs the axialy reoriented input volume.", exists=True, ) outStripped = File( desc="Skullstripped result of the input volume with just the brain.", exists=True, ) outMask = File( desc="Binary Mask of the skullstripped result with just the brain", exists=True ) outPrior = File(desc="Probability prior from the atlas registrations", exists=True) outFANTASM = File( desc="Tissue classification of of the whole input volume.", exists=True ) outd0 = File(desc="Initial Brainmask", exists=True) outMidsagittal = File(desc="Plane dividing the brain hemispheres", exists=True) outSplitHalves = File( desc="Skullstripped mask of the brain with the hemispheres divided.", exists=True, ) outSegmentation = File( desc="2D image showing the tissue classification on the midsagittal plane", exists=True, ) class MedicAlgorithmSPECTRE2010(SEMLikeCommandLine): """SPECTRE 2010: Simple Paradigm for Extra-Cranial Tissue REmoval [1]_, [2]_. References ---------- .. [1] A. Carass, M.B. Wheeler, J. Cuzzocreo, P.-L. Bazin, S.S. Bassett, and J.L. Prince, 'A Joint Registration and Segmentation Approach to Skull Stripping', Fourth IEEE International Symposium on Biomedical Imaging (ISBI 2007), Arlington, VA, April 12-15, 2007. .. [2] A. Carass, J. Cuzzocreo, M.B. Wheeler, P.-L. Bazin, S.M. Resnick, and J.L. Prince, 'Simple paradigm for extra-cerebral tissue removal: Algorithm and analysis', NeuroImage 56(4):1982-1992, 2011. """ input_spec = MedicAlgorithmSPECTRE2010InputSpec output_spec = MedicAlgorithmSPECTRE2010OutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010 " _outputs_filenames = { "outd0": "outd0.nii", "outOriginal": "outOriginal.nii", "outMask": "outMask.nii", "outSplitHalves": "outSplitHalves.nii", "outMidsagittal": "outMidsagittal.nii", "outPrior": "outPrior.nii", "outFANTASM": "outFANTASM.nii", "outSegmentation": "outSegmentation.nii", "outStripped": "outStripped.nii", } _redirect_x = True class JistBrainPartialVolumeFilterInputSpec(CommandLineInputSpec): inInput = File(desc="Input Image", exists=True, argstr="--inInput %s") inPV = traits.Enum( "bright", "dark", "both", desc="Outputs the raw intensity values or a probability score for the partial volume regions.", argstr="--inPV %s", ) inoutput = traits.Enum( "probability", "intensity", desc="output", argstr="--inoutput %s" ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outPartial = traits.Either( traits.Bool, File(), hash_files=False, desc="Partial Volume Image", argstr="--outPartial %s", ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class JistBrainPartialVolumeFilterOutputSpec(TraitedSpec): outPartial = File(desc="Partial Volume Image", exists=True) class JistBrainPartialVolumeFilter(SEMLikeCommandLine): """Partial Volume Filter. Filters an image for regions of partial voluming assuming a ridge-like model of intensity. """ input_spec = JistBrainPartialVolumeFilterInputSpec output_spec = JistBrainPartialVolumeFilterOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter " _outputs_filenames = {"outPartial": "outPartial.nii"} _redirect_x = True class JistIntensityMp2rageMaskingInputSpec(CommandLineInputSpec): inSecond = File( desc="Second inversion (Inv2) Image", exists=True, argstr="--inSecond %s" ) inQuantitative = File( desc="Quantitative T1 Map (T1_Images) Image", exists=True, argstr="--inQuantitative %s", ) inT1weighted = File( desc="T1-weighted (UNI) Image", exists=True, argstr="--inT1weighted %s" ) inBackground = traits.Enum( "exponential", "half-normal", desc="Model distribution for background noise (default is half-normal, exponential is more stringent).", argstr="--inBackground %s", ) inSkip = traits.Enum("true", "false", desc="Skip zero values", argstr="--inSkip %s") inMasking = traits.Enum( "binary", "proba", desc="Whether to use a binary threshold or a weighted average based on the probability.", argstr="--inMasking %s", ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outSignal = traits.Either( traits.Bool, File(), hash_files=False, desc="Signal Proba Image", argstr="--outSignal_Proba %s", ) outSignal2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Signal Mask Image", argstr="--outSignal_Mask %s", ) outMasked = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked T1 Map Image", argstr="--outMasked_T1_Map %s", ) outMasked2 = traits.Either( traits.Bool, File(), hash_files=False, desc="Masked Iso Image", argstr="--outMasked_T1weighted %s", ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class JistIntensityMp2rageMaskingOutputSpec(TraitedSpec): outSignal = File(desc="Signal Proba Image", exists=True) outSignal2 = File(desc="Signal Mask Image", exists=True) outMasked = File(desc="Masked T1 Map Image", exists=True) outMasked2 = File(desc="Masked Iso Image", exists=True) class JistIntensityMp2rageMasking(SEMLikeCommandLine): """Estimate a background signal mask for a MP2RAGE dataset.""" input_spec = JistIntensityMp2rageMaskingInputSpec output_spec = JistIntensityMp2rageMaskingOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run de.mpg.cbs.jist.intensity.JistIntensityMp2rageMasking " _outputs_filenames = { "outSignal2": "outSignal2.nii", "outSignal": "outSignal.nii", "outMasked2": "outMasked2.nii", "outMasked": "outMasked.nii", } _redirect_x = True class MedicAlgorithmThresholdToBinaryMaskInputSpec(CommandLineInputSpec): inLabel = InputMultiPath(File, desc="Input volumes", sep=";", argstr="--inLabel %s") inMinimum = traits.Float(desc="Minimum threshold value.", argstr="--inMinimum %f") inMaximum = traits.Float(desc="Maximum threshold value.", argstr="--inMaximum %f") inUse = traits.Enum( "true", "false", desc="Use the images max intensity as the max value of the range.", argstr="--inUse %s", ) xPrefExt = traits.Enum("nrrd", desc="Output File Type", argstr="--xPrefExt %s") outBinary = InputMultiPath( File, desc="Binary Mask", sep=";", argstr="--outBinary %s" ) null = traits.Str(desc="Execution Time", argstr="--null %s") xDefaultMem = traits.Int( desc="Set default maximum heap size", argstr="-xDefaultMem %d" ) xMaxProcess = traits.Int( 1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True, ) class MedicAlgorithmThresholdToBinaryMaskOutputSpec(TraitedSpec): pass class MedicAlgorithmThresholdToBinaryMask(SEMLikeCommandLine): """Threshold to Binary Mask. Given a volume and an intensity range create a binary mask for values within that range. """ input_spec = MedicAlgorithmThresholdToBinaryMaskInputSpec output_spec = MedicAlgorithmThresholdToBinaryMaskOutputSpec _cmd = "java edu.jhu.ece.iacl.jist.cli.run edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmThresholdToBinaryMask " _outputs_filenames = {} _redirect_x = True nipype-1.7.0/nipype/interfaces/mipav/generate_classes.py000066400000000000000000000047451413403311400234530ustar00rootroot00000000000000# -*- coding: utf-8 -*- if __name__ == "__main__": from nipype.interfaces.slicer.generate_classes import generate_all_classes # NOTE: For now either the launcher needs to be found on the default path, or # every tool in the modules list must be found on the default path # AND calling the module with --xml must be supported and compliant. modules_list = [ "edu.jhu.bme.smile.demo.RandomVol", "de.mpg.cbs.jist.laminar.JistLaminarProfileCalculator", "de.mpg.cbs.jist.laminar.JistLaminarProfileSampling", "de.mpg.cbs.jist.laminar.JistLaminarROIAveraging", "de.mpg.cbs.jist.laminar.JistLaminarVolumetricLayering", "de.mpg.cbs.jist.laminar.JistLaminarProfileGeometry", "de.mpg.cbs.jist.brain.JistBrainMgdmSegmentation", "de.mpg.cbs.jist.brain.JistBrainMp2rageSkullStripping", "de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter", "de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation", ] modules_from_chris = [ "edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010", "edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmMipavReorient", "edu.jhu.ece.iacl.plugins.utilities.math.MedicAlgorithmImageCalculator", "de.mpg.cbs.jist.brain.JistBrainMp2rageDuraEstimation", "de.mpg.cbs.jist.brain.JistBrainPartialVolumeFilter", "edu.jhu.ece.iacl.plugins.utilities.volume.MedicAlgorithmThresholdToBinaryMask", # 'de.mpg.cbs.jist.cortex.JistCortexFullCRUISE', # waiting for http://www.nitrc.org/tracker/index.php?func=detail&aid=7236&group_id=228&atid=942 to be fixed "de.mpg.cbs.jist.cortex.JistCortexSurfaceMeshInflation", ] modules_from_julia = [ "de.mpg.cbs.jist.intensity.JistIntensityMp2rageMasking", "edu.jhu.ece.iacl.plugins.segmentation.skull_strip.MedicAlgorithmSPECTRE2010", ] modules_from_leonie = [ "edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmLesionToads" ] modules_from_yasinyazici = [ "edu.jhu.ece.iacl.plugins.classification.MedicAlgorithmN3" ] modules_list = list( set(modules_list) .union(modules_from_chris) .union(modules_from_leonie) .union(modules_from_julia) .union(modules_from_yasinyazici) .union(modules_list) ) generate_all_classes( modules_list=modules_list, launcher=["java edu.jhu.ece.iacl.jist.cli.run"], redirect_x=True, mipav_hacks=True, ) nipype-1.7.0/nipype/interfaces/mipav/tests/000077500000000000000000000000001413403311400207225ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/mipav/tests/__init__.py000066400000000000000000000000301413403311400230240ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_JistBrainMgdmSegmentation.py000066400000000000000000000061201413403311400304320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import JistBrainMgdmSegmentation def test_JistBrainMgdmSegmentation_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inAdjust=dict( argstr="--inAdjust %s", ), inAtlas=dict( argstr="--inAtlas %s", extensions=None, ), inCompute=dict( argstr="--inCompute %s", ), inCurvature=dict( argstr="--inCurvature %f", ), inData=dict( argstr="--inData %f", ), inFLAIR=dict( argstr="--inFLAIR %s", extensions=None, ), inMP2RAGE=dict( argstr="--inMP2RAGE %s", extensions=None, ), inMP2RAGE2=dict( argstr="--inMP2RAGE2 %s", extensions=None, ), inMPRAGE=dict( argstr="--inMPRAGE %s", extensions=None, ), inMax=dict( argstr="--inMax %d", ), inMin=dict( argstr="--inMin %f", ), inOutput=dict( argstr="--inOutput %s", ), inPV=dict( argstr="--inPV %s", extensions=None, ), inPosterior=dict( argstr="--inPosterior %f", ), inSteps=dict( argstr="--inSteps %d", ), inTopology=dict( argstr="--inTopology %s", ), null=dict( argstr="--null %s", ), outLevelset=dict( argstr="--outLevelset %s", hash_files=False, ), outPosterior2=dict( argstr="--outPosterior2 %s", hash_files=False, ), outPosterior3=dict( argstr="--outPosterior3 %s", hash_files=False, ), outSegmented=dict( argstr="--outSegmented %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = JistBrainMgdmSegmentation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JistBrainMgdmSegmentation_outputs(): output_map = dict( outLevelset=dict( extensions=None, ), outPosterior2=dict( extensions=None, ), outPosterior3=dict( extensions=None, ), outSegmented=dict( extensions=None, ), ) outputs = JistBrainMgdmSegmentation.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageDuraEstimation.py000066400000000000000000000032261413403311400313420ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import JistBrainMp2rageDuraEstimation def test_JistBrainMp2rageDuraEstimation_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inDistance=dict( argstr="--inDistance %f", ), inSecond=dict( argstr="--inSecond %s", extensions=None, ), inSkull=dict( argstr="--inSkull %s", extensions=None, ), inoutput=dict( argstr="--inoutput %s", ), null=dict( argstr="--null %s", ), outDura=dict( argstr="--outDura %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = JistBrainMp2rageDuraEstimation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JistBrainMp2rageDuraEstimation_outputs(): output_map = dict( outDura=dict( extensions=None, ), ) outputs = JistBrainMp2rageDuraEstimation.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_JistBrainMp2rageSkullStripping.py000066400000000000000000000044131413403311400314030ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import JistBrainMp2rageSkullStripping def test_JistBrainMp2rageSkullStripping_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inFilter=dict( argstr="--inFilter %s", extensions=None, ), inSecond=dict( argstr="--inSecond %s", extensions=None, ), inSkip=dict( argstr="--inSkip %s", ), inT1=dict( argstr="--inT1 %s", extensions=None, ), inT1weighted=dict( argstr="--inT1weighted %s", extensions=None, ), null=dict( argstr="--null %s", ), outBrain=dict( argstr="--outBrain %s", hash_files=False, ), outMasked=dict( argstr="--outMasked %s", hash_files=False, ), outMasked2=dict( argstr="--outMasked2 %s", hash_files=False, ), outMasked3=dict( argstr="--outMasked3 %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = JistBrainMp2rageSkullStripping.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JistBrainMp2rageSkullStripping_outputs(): output_map = dict( outBrain=dict( extensions=None, ), outMasked=dict( extensions=None, ), outMasked2=dict( extensions=None, ), outMasked3=dict( extensions=None, ), ) outputs = JistBrainMp2rageSkullStripping.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_JistBrainPartialVolumeFilter.py000066400000000000000000000030461413403311400311260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import JistBrainPartialVolumeFilter def test_JistBrainPartialVolumeFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inInput=dict( argstr="--inInput %s", extensions=None, ), inPV=dict( argstr="--inPV %s", ), inoutput=dict( argstr="--inoutput %s", ), null=dict( argstr="--null %s", ), outPartial=dict( argstr="--outPartial %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = JistBrainPartialVolumeFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JistBrainPartialVolumeFilter_outputs(): output_map = dict( outPartial=dict( extensions=None, ), ) outputs = JistBrainPartialVolumeFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_JistCortexSurfaceMeshInflation.py000066400000000000000000000037751413403311400314670ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import JistCortexSurfaceMeshInflation def test_JistCortexSurfaceMeshInflation_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inLevelset=dict( argstr="--inLevelset %s", extensions=None, ), inLorentzian=dict( argstr="--inLorentzian %s", ), inMax=dict( argstr="--inMax %d", ), inMean=dict( argstr="--inMean %f", ), inSOR=dict( argstr="--inSOR %f", ), inStep=dict( argstr="--inStep %d", ), inTopology=dict( argstr="--inTopology %s", ), null=dict( argstr="--null %s", ), outInflated=dict( argstr="--outInflated %s", hash_files=False, ), outOriginal=dict( argstr="--outOriginal %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = JistCortexSurfaceMeshInflation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JistCortexSurfaceMeshInflation_outputs(): output_map = dict( outInflated=dict( extensions=None, ), outOriginal=dict( extensions=None, ), ) outputs = JistCortexSurfaceMeshInflation.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_JistIntensityMp2rageMasking.py000066400000000000000000000045411413403311400307370ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import JistIntensityMp2rageMasking def test_JistIntensityMp2rageMasking_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inBackground=dict( argstr="--inBackground %s", ), inMasking=dict( argstr="--inMasking %s", ), inQuantitative=dict( argstr="--inQuantitative %s", extensions=None, ), inSecond=dict( argstr="--inSecond %s", extensions=None, ), inSkip=dict( argstr="--inSkip %s", ), inT1weighted=dict( argstr="--inT1weighted %s", extensions=None, ), null=dict( argstr="--null %s", ), outMasked=dict( argstr="--outMasked_T1_Map %s", hash_files=False, ), outMasked2=dict( argstr="--outMasked_T1weighted %s", hash_files=False, ), outSignal=dict( argstr="--outSignal_Proba %s", hash_files=False, ), outSignal2=dict( argstr="--outSignal_Mask %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = JistIntensityMp2rageMasking.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JistIntensityMp2rageMasking_outputs(): output_map = dict( outMasked=dict( extensions=None, ), outMasked2=dict( extensions=None, ), outSignal=dict( extensions=None, ), outSignal2=dict( extensions=None, ), ) outputs = JistIntensityMp2rageMasking.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileCalculator.py000066400000000000000000000031201413403311400311270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import JistLaminarProfileCalculator def test_JistLaminarProfileCalculator_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inIntensity=dict( argstr="--inIntensity %s", extensions=None, ), inMask=dict( argstr="--inMask %s", extensions=None, ), incomputed=dict( argstr="--incomputed %s", ), null=dict( argstr="--null %s", ), outResult=dict( argstr="--outResult %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = JistLaminarProfileCalculator.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JistLaminarProfileCalculator_outputs(): output_map = dict( outResult=dict( extensions=None, ), ) outputs = JistLaminarProfileCalculator.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileGeometry.py000066400000000000000000000033151413403311400306370ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import JistLaminarProfileGeometry def test_JistLaminarProfileGeometry_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inProfile=dict( argstr="--inProfile %s", extensions=None, ), incomputed=dict( argstr="--incomputed %s", ), inoutside=dict( argstr="--inoutside %f", ), inregularization=dict( argstr="--inregularization %s", ), insmoothing=dict( argstr="--insmoothing %f", ), null=dict( argstr="--null %s", ), outResult=dict( argstr="--outResult %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = JistLaminarProfileGeometry.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JistLaminarProfileGeometry_outputs(): output_map = dict( outResult=dict( extensions=None, ), ) outputs = JistLaminarProfileGeometry.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_JistLaminarProfileSampling.py000066400000000000000000000034461413403311400306230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import JistLaminarProfileSampling def test_JistLaminarProfileSampling_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inCortex=dict( argstr="--inCortex %s", extensions=None, ), inIntensity=dict( argstr="--inIntensity %s", extensions=None, ), inProfile=dict( argstr="--inProfile %s", extensions=None, ), null=dict( argstr="--null %s", ), outProfile2=dict( argstr="--outProfile2 %s", hash_files=False, ), outProfilemapped=dict( argstr="--outProfilemapped %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = JistLaminarProfileSampling.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JistLaminarProfileSampling_outputs(): output_map = dict( outProfile2=dict( extensions=None, ), outProfilemapped=dict( extensions=None, ), ) outputs = JistLaminarProfileSampling.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_JistLaminarROIAveraging.py000066400000000000000000000032061413403311400277770ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import JistLaminarROIAveraging def test_JistLaminarROIAveraging_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inIntensity=dict( argstr="--inIntensity %s", extensions=None, ), inMask=dict( argstr="--inMask %s", extensions=None, ), inROI=dict( argstr="--inROI %s", extensions=None, ), inROI2=dict( argstr="--inROI2 %s", ), null=dict( argstr="--null %s", ), outROI3=dict( argstr="--outROI3 %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = JistLaminarROIAveraging.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JistLaminarROIAveraging_outputs(): output_map = dict( outROI3=dict( extensions=None, ), ) outputs = JistLaminarROIAveraging.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_JistLaminarVolumetricLayering.py000066400000000000000000000047421413403311400313540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import JistLaminarVolumetricLayering def test_JistLaminarVolumetricLayering_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inInner=dict( argstr="--inInner %s", extensions=None, ), inLayering=dict( argstr="--inLayering %s", ), inLayering2=dict( argstr="--inLayering2 %s", ), inMax=dict( argstr="--inMax %d", ), inMin=dict( argstr="--inMin %f", ), inNumber=dict( argstr="--inNumber %d", ), inOuter=dict( argstr="--inOuter %s", extensions=None, ), inTopology=dict( argstr="--inTopology %s", ), incurvature=dict( argstr="--incurvature %d", ), inpresmooth=dict( argstr="--inpresmooth %s", ), inratio=dict( argstr="--inratio %f", ), null=dict( argstr="--null %s", ), outContinuous=dict( argstr="--outContinuous %s", hash_files=False, ), outDiscrete=dict( argstr="--outDiscrete %s", hash_files=False, ), outLayer=dict( argstr="--outLayer %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = JistLaminarVolumetricLayering.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JistLaminarVolumetricLayering_outputs(): output_map = dict( outContinuous=dict( extensions=None, ), outDiscrete=dict( extensions=None, ), outLayer=dict( extensions=None, ), ) outputs = JistLaminarVolumetricLayering.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmImageCalculator.py000066400000000000000000000031271413403311400312330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import MedicAlgorithmImageCalculator def test_MedicAlgorithmImageCalculator_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inOperation=dict( argstr="--inOperation %s", ), inVolume=dict( argstr="--inVolume %s", extensions=None, ), inVolume2=dict( argstr="--inVolume2 %s", extensions=None, ), null=dict( argstr="--null %s", ), outResult=dict( argstr="--outResult %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = MedicAlgorithmImageCalculator.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MedicAlgorithmImageCalculator_outputs(): output_map = dict( outResult=dict( extensions=None, ), ) outputs = MedicAlgorithmImageCalculator.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmLesionToads.py000066400000000000000000000104131413403311400304170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import MedicAlgorithmLesionToads def test_MedicAlgorithmLesionToads_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inAtlas=dict( argstr="--inAtlas %s", ), inAtlas2=dict( argstr="--inAtlas2 %s", extensions=None, ), inAtlas3=dict( argstr="--inAtlas3 %s", extensions=None, ), inAtlas4=dict( argstr="--inAtlas4 %s", extensions=None, ), inAtlas5=dict( argstr="--inAtlas5 %f", ), inAtlas6=dict( argstr="--inAtlas6 %s", ), inConnectivity=dict( argstr="--inConnectivity %s", ), inCorrect=dict( argstr="--inCorrect %s", ), inFLAIR=dict( argstr="--inFLAIR %s", extensions=None, ), inInclude=dict( argstr="--inInclude %s", ), inMaximum=dict( argstr="--inMaximum %d", ), inMaximum2=dict( argstr="--inMaximum2 %d", ), inMaximum3=dict( argstr="--inMaximum3 %d", ), inMaximum4=dict( argstr="--inMaximum4 %f", ), inMaximum5=dict( argstr="--inMaximum5 %d", ), inOutput=dict( argstr="--inOutput %s", ), inOutput2=dict( argstr="--inOutput2 %s", ), inOutput3=dict( argstr="--inOutput3 %s", ), inSmooting=dict( argstr="--inSmooting %f", ), inT1_MPRAGE=dict( argstr="--inT1_MPRAGE %s", extensions=None, ), inT1_SPGR=dict( argstr="--inT1_SPGR %s", extensions=None, ), null=dict( argstr="--null %s", ), outCortical=dict( argstr="--outCortical %s", hash_files=False, ), outFilled=dict( argstr="--outFilled %s", hash_files=False, ), outHard=dict( argstr="--outHard %s", hash_files=False, ), outHard2=dict( argstr="--outHard2 %s", hash_files=False, ), outInhomogeneity=dict( argstr="--outInhomogeneity %s", hash_files=False, ), outLesion=dict( argstr="--outLesion %s", hash_files=False, ), outMembership=dict( argstr="--outMembership %s", hash_files=False, ), outSulcal=dict( argstr="--outSulcal %s", hash_files=False, ), outWM=dict( argstr="--outWM %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = MedicAlgorithmLesionToads.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MedicAlgorithmLesionToads_outputs(): output_map = dict( outCortical=dict( extensions=None, ), outFilled=dict( extensions=None, ), outHard=dict( extensions=None, ), outHard2=dict( extensions=None, ), outInhomogeneity=dict( extensions=None, ), outLesion=dict( extensions=None, ), outMembership=dict( extensions=None, ), outSulcal=dict( extensions=None, ), outWM=dict( extensions=None, ), ) outputs = MedicAlgorithmLesionToads.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmMipavReorient.py000066400000000000000000000036451413403311400307700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import MedicAlgorithmMipavReorient def test_MedicAlgorithmMipavReorient_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inInterpolation=dict( argstr="--inInterpolation %s", ), inNew=dict( argstr="--inNew %s", ), inResolution=dict( argstr="--inResolution %s", ), inSource=dict( argstr="--inSource %s", sep=";", ), inTemplate=dict( argstr="--inTemplate %s", extensions=None, ), inUser=dict( argstr="--inUser %s", ), inUser2=dict( argstr="--inUser2 %s", ), inUser3=dict( argstr="--inUser3 %s", ), inUser4=dict( argstr="--inUser4 %s", ), null=dict( argstr="--null %s", ), outReoriented=dict( argstr="--outReoriented %s", sep=";", ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = MedicAlgorithmMipavReorient.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MedicAlgorithmMipavReorient_outputs(): output_map = dict() outputs = MedicAlgorithmMipavReorient.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmN3.py000066400000000000000000000041541413403311400264600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import MedicAlgorithmN3 def test_MedicAlgorithmN3_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inAutomatic=dict( argstr="--inAutomatic %s", ), inEnd=dict( argstr="--inEnd %f", ), inField=dict( argstr="--inField %f", ), inInput=dict( argstr="--inInput %s", extensions=None, ), inKernel=dict( argstr="--inKernel %f", ), inMaximum=dict( argstr="--inMaximum %d", ), inSignal=dict( argstr="--inSignal %f", ), inSubsample=dict( argstr="--inSubsample %f", ), inWeiner=dict( argstr="--inWeiner %f", ), null=dict( argstr="--null %s", ), outInhomogeneity=dict( argstr="--outInhomogeneity %s", hash_files=False, ), outInhomogeneity2=dict( argstr="--outInhomogeneity2 %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = MedicAlgorithmN3.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MedicAlgorithmN3_outputs(): output_map = dict( outInhomogeneity=dict( extensions=None, ), outInhomogeneity2=dict( extensions=None, ), ) outputs = MedicAlgorithmN3.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmSPECTRE2010.py000066400000000000000000000121011413403311400275770ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import MedicAlgorithmSPECTRE2010 def test_MedicAlgorithmSPECTRE2010_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inApply=dict( argstr="--inApply %s", ), inAtlas=dict( argstr="--inAtlas %s", extensions=None, ), inBackground=dict( argstr="--inBackground %f", ), inCoarse=dict( argstr="--inCoarse %f", ), inCost=dict( argstr="--inCost %s", ), inDegrees=dict( argstr="--inDegrees %s", ), inFind=dict( argstr="--inFind %s", ), inFine=dict( argstr="--inFine %f", ), inImage=dict( argstr="--inImage %s", ), inInhomogeneity=dict( argstr="--inInhomogeneity %s", ), inInitial=dict( argstr="--inInitial %d", ), inInitial2=dict( argstr="--inInitial2 %f", ), inInput=dict( argstr="--inInput %s", extensions=None, ), inMMC=dict( argstr="--inMMC %d", ), inMMC2=dict( argstr="--inMMC2 %d", ), inMaximum=dict( argstr="--inMaximum %f", ), inMinimum=dict( argstr="--inMinimum %f", ), inMinimum2=dict( argstr="--inMinimum2 %f", ), inMultiple=dict( argstr="--inMultiple %d", ), inMultithreading=dict( argstr="--inMultithreading %s", ), inNumber=dict( argstr="--inNumber %d", ), inNumber2=dict( argstr="--inNumber2 %d", ), inOutput=dict( argstr="--inOutput %s", ), inOutput2=dict( argstr="--inOutput2 %s", ), inOutput3=dict( argstr="--inOutput3 %s", ), inOutput4=dict( argstr="--inOutput4 %s", ), inOutput5=dict( argstr="--inOutput5 %s", ), inRegistration=dict( argstr="--inRegistration %s", ), inResample=dict( argstr="--inResample %s", ), inRun=dict( argstr="--inRun %s", ), inSkip=dict( argstr="--inSkip %s", ), inSmoothing=dict( argstr="--inSmoothing %f", ), inSubsample=dict( argstr="--inSubsample %s", ), inUse=dict( argstr="--inUse %s", ), null=dict( argstr="--null %s", ), outFANTASM=dict( argstr="--outFANTASM %s", hash_files=False, ), outMask=dict( argstr="--outMask %s", hash_files=False, ), outMidsagittal=dict( argstr="--outMidsagittal %s", hash_files=False, ), outOriginal=dict( argstr="--outOriginal %s", hash_files=False, ), outPrior=dict( argstr="--outPrior %s", hash_files=False, ), outSegmentation=dict( argstr="--outSegmentation %s", hash_files=False, ), outSplitHalves=dict( argstr="--outSplitHalves %s", hash_files=False, ), outStripped=dict( argstr="--outStripped %s", hash_files=False, ), outd0=dict( argstr="--outd0 %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = MedicAlgorithmSPECTRE2010.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MedicAlgorithmSPECTRE2010_outputs(): output_map = dict( outFANTASM=dict( extensions=None, ), outMask=dict( extensions=None, ), outMidsagittal=dict( extensions=None, ), outOriginal=dict( extensions=None, ), outPrior=dict( extensions=None, ), outSegmentation=dict( extensions=None, ), outSplitHalves=dict( extensions=None, ), outStripped=dict( extensions=None, ), outd0=dict( extensions=None, ), ) outputs = MedicAlgorithmSPECTRE2010.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmThresholdToBinaryMask.py000066400000000000000000000030741413403311400324200ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import MedicAlgorithmThresholdToBinaryMask def test_MedicAlgorithmThresholdToBinaryMask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inLabel=dict( argstr="--inLabel %s", sep=";", ), inMaximum=dict( argstr="--inMaximum %f", ), inMinimum=dict( argstr="--inMinimum %f", ), inUse=dict( argstr="--inUse %s", ), null=dict( argstr="--null %s", ), outBinary=dict( argstr="--outBinary %s", sep=";", ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = MedicAlgorithmThresholdToBinaryMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MedicAlgorithmThresholdToBinaryMask_outputs(): output_map = dict() outputs = MedicAlgorithmThresholdToBinaryMask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mipav/tests/test_auto_RandomVol.py000066400000000000000000000035161413403311400252710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..developer import RandomVol def test_RandomVol_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inField=dict( argstr="--inField %s", ), inLambda=dict( argstr="--inLambda %f", ), inMaximum=dict( argstr="--inMaximum %d", ), inMinimum=dict( argstr="--inMinimum %d", ), inSize=dict( argstr="--inSize %d", ), inSize2=dict( argstr="--inSize2 %d", ), inSize3=dict( argstr="--inSize3 %d", ), inSize4=dict( argstr="--inSize4 %d", ), inStandard=dict( argstr="--inStandard %d", ), null=dict( argstr="--null %s", ), outRand1=dict( argstr="--outRand1 %s", hash_files=False, ), xDefaultMem=dict( argstr="-xDefaultMem %d", ), xMaxProcess=dict( argstr="-xMaxProcess %d", usedefault=True, ), xPrefExt=dict( argstr="--xPrefExt %s", ), ) inputs = RandomVol.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RandomVol_outputs(): output_map = dict( outRand1=dict( extensions=None, ), ) outputs = RandomVol.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mixins/000077500000000000000000000000001413403311400177535ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/mixins/__init__.py000066400000000000000000000002601413403311400220620ustar00rootroot00000000000000from .reporting import ( ReportCapableInterface, ReportCapableInputSpec, ReportCapableOutputSpec, ) from .fixheader import CopyHeaderInputSpec, CopyHeaderInterface nipype-1.7.0/nipype/interfaces/mixins/fixheader.py000066400000000000000000000111751413403311400222710ustar00rootroot00000000000000from ..base import BaseInterface, BaseInterfaceInputSpec, traits from ...utils.imagemanip import copy_header as _copy_header class CopyHeaderInputSpec(BaseInterfaceInputSpec): copy_header = traits.Bool( desc="Copy headers of the input image into the output image" ) class CopyHeaderInterface(BaseInterface): """Copy headers if the copy_header input is ``True`` This interface mixin adds a post-run hook that allows for copying an input header to an output file. The subclass should specify a ``_copy_header_map`` that maps the **output** image to the **input** image whose header should be copied. This feature is intended for tools that are intended to adjust voxel data without modifying the header, but for some reason do not reliably preserve the header. Here we show an example interface that takes advantage of the mixin by simply setting the data block: >>> import os >>> import numpy as np >>> import nibabel as nb >>> from nipype.interfaces.base import SimpleInterface, TraitedSpec, File >>> from nipype.interfaces.mixins import CopyHeaderInputSpec, CopyHeaderInterface >>> class ZerofileInputSpec(CopyHeaderInputSpec): ... in_file = File(mandatory=True, exists=True) >>> class ZerofileOutputSpec(TraitedSpec): ... out_file = File() >>> class ZerofileInterface(SimpleInterface, CopyHeaderInterface): ... input_spec = ZerofileInputSpec ... output_spec = ZerofileOutputSpec ... _copy_header_map = {'out_file': 'in_file'} ... ... def _run_interface(self, runtime): ... img = nb.load(self.inputs.in_file) ... # Just set the data. Let the CopyHeaderInterface mixin fix the affine and header. ... nb.Nifti1Image(np.zeros(img.shape, dtype=np.uint8), None).to_filename('out.nii') ... self._results = {'out_file': os.path.abspath('out.nii')} ... return runtime Consider a file of all ones and a non-trivial affine: >>> in_file = 'test.nii' >>> nb.Nifti1Image(np.ones((5,5,5), dtype=np.int16), ... affine=np.diag((4, 3, 2, 1))).to_filename(in_file) The default behavior would produce a file with similar data: >>> res = ZerofileInterface(in_file=in_file).run() >>> out_img = nb.load(res.outputs.out_file) >>> out_img.shape (5, 5, 5) >>> np.all(out_img.get_fdata() == 0) True An updated data type: >>> out_img.get_data_dtype() dtype('uint8') But a different affine: >>> np.array_equal(out_img.affine, np.diag((4, 3, 2, 1))) False With ``copy_header=True``, then the affine is also equal: >>> res = ZerofileInterface(in_file=in_file, copy_header=True).run() >>> out_img = nb.load(res.outputs.out_file) >>> np.array_equal(out_img.affine, np.diag((4, 3, 2, 1))) True The data properties remain as expected: >>> out_img.shape (5, 5, 5) >>> out_img.get_data_dtype() dtype('uint8') >>> np.all(out_img.get_fdata() == 0) True By default, the data type of the output file is permitted to vary from the inputs. That is, the data type is preserved. If the data type of the original file is preferred, the ``_copy_header_map`` can indicate the output data type should **not** be preserved by providing a tuple of the input and ``False``. >>> ZerofileInterface._copy_header_map['out_file'] = ('in_file', False) >>> res = ZerofileInterface(in_file=in_file, copy_header=True).run() >>> out_img = nb.load(res.outputs.out_file) >>> out_img.get_data_dtype() dtype('>> np.array_equal(out_img.affine, np.diag((4, 3, 2, 1))) True >>> out_img.shape (5, 5, 5) >>> np.all(out_img.get_fdata() == 0) True Providing a tuple where the second value is ``True`` is also permissible to achieve the default behavior. """ _copy_header_map = None def _post_run_hook(self, runtime): """Copy headers for outputs, if required.""" runtime = super()._post_run_hook(runtime) if self._copy_header_map is None or not self.inputs.copy_header: return runtime inputs = self.inputs.get_traitsfree() outputs = self.aggregate_outputs(runtime=runtime).get_traitsfree() defined_outputs = set(outputs.keys()).intersection(self._copy_header_map.keys()) for out in defined_outputs: inp = self._copy_header_map[out] keep_dtype = True if isinstance(inp, tuple): inp, keep_dtype = inp _copy_header(inputs[inp], outputs[out], keep_dtype=keep_dtype) return runtime nipype-1.7.0/nipype/interfaces/mixins/reporting.py000066400000000000000000000037201413403311400223400ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ class mixin and utilities for enabling reports for nipype interfaces """ import os from abc import abstractmethod from ... import logging from ..base import File, BaseInterface, BaseInterfaceInputSpec, TraitedSpec iflogger = logging.getLogger("nipype.interface") class ReportCapableInputSpec(BaseInterfaceInputSpec): out_report = File( "report", usedefault=True, hash_files=False, desc="filename for the visual report", ) class ReportCapableOutputSpec(TraitedSpec): out_report = File(desc="filename for the visual report") class ReportCapableInterface(BaseInterface): """Mixin to enable reporting for Nipype interfaces""" _out_report = None def __init__(self, generate_report=False, **kwargs): super(ReportCapableInterface, self).__init__(**kwargs) self.generate_report = generate_report def _post_run_hook(self, runtime): runtime = super(ReportCapableInterface, self)._post_run_hook(runtime) # leave early if there's nothing to do if not self.generate_report: return runtime self._out_report = self.inputs.out_report if not os.path.isabs(self._out_report): self._out_report = os.path.abspath( os.path.join(runtime.cwd, self._out_report) ) self._generate_report() return runtime def _list_outputs(self): try: outputs = super(ReportCapableInterface, self)._list_outputs() except NotImplementedError: outputs = {} if self._out_report is not None: outputs["out_report"] = self._out_report return outputs @abstractmethod def _generate_report(self): """ Saves report to file identified by _out_report instance variable """ raise NotImplementedError nipype-1.7.0/nipype/interfaces/mixins/tests/000077500000000000000000000000001413403311400211155ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/mixins/tests/__init__.py000066400000000000000000000000301413403311400232170ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/mixins/tests/test_auto_CopyHeaderInterface.py000066400000000000000000000005741413403311400274300ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..fixheader import CopyHeaderInterface def test_CopyHeaderInterface_inputs(): input_map = dict() inputs = CopyHeaderInterface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mixins/tests/test_auto_ReportCapableInterface.py000066400000000000000000000006051413403311400301230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..reporting import ReportCapableInterface def test_ReportCapableInterface_inputs(): input_map = dict() inputs = ReportCapableInterface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mne/000077500000000000000000000000001413403311400172235ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/mne/__init__.py000066400000000000000000000002301413403311400213270ustar00rootroot00000000000000# -*- coding: utf-8 -*- """MNE is a software for exploring, visualizing, and analyzing human neurophysiological data.""" from .base import WatershedBEM nipype-1.7.0/nipype/interfaces/mne/base.py000066400000000000000000000105651413403311400205160ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os.path as op import glob from ... import logging from ...utils.filemanip import simplify_list from ..base import traits, File, Directory, TraitedSpec, OutputMultiPath from ..freesurfer.base import FSCommand, FSTraitedSpec iflogger = logging.getLogger("nipype.interface") class WatershedBEMInputSpec(FSTraitedSpec): subject_id = traits.Str( argstr="--subject %s", mandatory=True, desc="Subject ID (must have a complete Freesurfer directory)", ) subjects_dir = Directory( exists=True, mandatory=True, usedefault=True, desc="Path to Freesurfer subjects directory", ) volume = traits.Enum( "T1", "aparc+aseg", "aseg", "brain", "orig", "brainmask", "ribbon", argstr="--volume %s", usedefault=True, desc='The volume from the "mri" directory to use (defaults to T1)', ) overwrite = traits.Bool( True, usedefault=True, argstr="--overwrite", desc="Overwrites the existing files", ) atlas_mode = traits.Bool( argstr="--atlas", desc="Use atlas mode for registration (default: no rigid alignment)", ) class WatershedBEMOutputSpec(TraitedSpec): mesh_files = OutputMultiPath( File(exists=True), desc=( "Paths to the output meshes (brain, inner " "skull, outer skull, outer skin)" ), ) brain_surface = File( exists=True, loc="bem/watershed", desc="Brain surface (in Freesurfer format)" ) inner_skull_surface = File( exists=True, loc="bem/watershed", desc="Inner skull surface (in Freesurfer format)", ) outer_skull_surface = File( exists=True, loc="bem/watershed", desc="Outer skull surface (in Freesurfer format)", ) outer_skin_surface = File( exists=True, loc="bem/watershed", desc="Outer skin surface (in Freesurfer format)", ) fif_file = File( exists=True, loc="bem", altkey="fif", desc='"fif" format file for EEG processing in MNE', ) cor_files = OutputMultiPath( File(exists=True), loc="bem/watershed/ws", altkey="COR", desc='"COR" format files', ) class WatershedBEM(FSCommand): """Uses mne_watershed_bem to get information from dicom directories Examples -------- >>> from nipype.interfaces.mne import WatershedBEM >>> bem = WatershedBEM() >>> bem.inputs.subject_id = 'subj1' >>> bem.inputs.subjects_dir = '.' >>> bem.cmdline 'mne watershed_bem --overwrite --subject subj1 --volume T1' >>> bem.run() # doctest: +SKIP """ _cmd = "mne watershed_bem" input_spec = WatershedBEMInputSpec output_spec = WatershedBEMOutputSpec _additional_metadata = ["loc", "altkey"] def _get_files(self, path, key, dirval, altkey=None): globsuffix = "*" globprefix = "*" keydir = op.join(path, dirval) if altkey: key = altkey globpattern = op.join(keydir, "".join((globprefix, key, globsuffix))) return glob.glob(globpattern) def _list_outputs(self): outputs = self.output_spec().get() subjects_dir = self.inputs.subjects_dir subject_path = op.join(subjects_dir, self.inputs.subject_id) output_traits = self._outputs() mesh_paths = [] for k in list(outputs.keys()): if k != "mesh_files": val = self._get_files( subject_path, k, output_traits.traits()[k].loc, output_traits.traits()[k].altkey, ) if val: value_list = simplify_list(val) if isinstance(value_list, list): out_files = [] for value in value_list: out_files.append(op.abspath(value)) elif isinstance(value_list, (str, bytes)): out_files = op.abspath(value_list) else: raise TypeError outputs[k] = out_files if not k.rfind("surface") == -1: mesh_paths.append(out_files) outputs["mesh_files"] = mesh_paths return outputs nipype-1.7.0/nipype/interfaces/mne/tests/000077500000000000000000000000001413403311400203655ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/mne/tests/__init__.py000066400000000000000000000000301413403311400224670ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/mne/tests/test_auto_WatershedBEM.py000066400000000000000000000035421413403311400253040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import WatershedBEM def test_WatershedBEM_inputs(): input_map = dict( args=dict( argstr="%s", ), atlas_mode=dict( argstr="--atlas", ), environ=dict( nohash=True, usedefault=True, ), overwrite=dict( argstr="--overwrite", usedefault=True, ), subject_id=dict( argstr="--subject %s", mandatory=True, ), subjects_dir=dict( mandatory=True, usedefault=True, ), volume=dict( argstr="--volume %s", usedefault=True, ), ) inputs = WatershedBEM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_WatershedBEM_outputs(): output_map = dict( brain_surface=dict( extensions=None, loc="bem/watershed", ), cor_files=dict( altkey="COR", loc="bem/watershed/ws", ), fif_file=dict( altkey="fif", extensions=None, loc="bem", ), inner_skull_surface=dict( extensions=None, loc="bem/watershed", ), mesh_files=dict(), outer_skin_surface=dict( extensions=None, loc="bem/watershed", ), outer_skull_surface=dict( extensions=None, loc="bem/watershed", ), ) outputs = WatershedBEM.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/000077500000000000000000000000001413403311400177715ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/mrtrix/__init__.py000066400000000000000000000017041413403311400221040ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """MRTrix version 2 (DEPRECATED) -- tools to perform various types of diffusion MRI analyses.""" from .tracking import ( Tracks2Prob, FilterTracks, StreamlineTrack, DiffusionTensorStreamlineTrack, SphericallyDeconvolutedStreamlineTrack, ProbabilisticSphericallyDeconvolutedStreamlineTrack, ) from .tensors import ( FSL2MRTrix, ConstrainedSphericalDeconvolution, DWI2SphericalHarmonicsImage, EstimateResponseForSH, GenerateDirections, FindShPeaks, Directions2Amplitude, ) from .preprocess import ( MRConvert, MRMultiply, MRTrixViewer, MRTrixInfo, GenerateWhiteMatterMask, DWI2Tensor, Tensor2ApparentDiffusion, Tensor2FractionalAnisotropy, Tensor2Vector, MedianFilter3D, Erode, Threshold, ) from .convert import MRTrix2TrackVis nipype-1.7.0/nipype/interfaces/mrtrix/convert.py000066400000000000000000000245021413403311400220260ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os.path as op import nibabel as nb import nibabel.trackvis as trk import numpy as np from nibabel.trackvis import HeaderError from nibabel.volumeutils import native_code from nibabel.orientations import aff2axcodes from ... import logging from ...utils.filemanip import split_filename from ..base import TraitedSpec, File, isdefined from ..dipy.base import DipyBaseInterface, HAVE_DIPY as have_dipy iflogger = logging.getLogger("nipype.interface") def get_vox_dims(volume): import nibabel as nb if isinstance(volume, list): volume = volume[0] nii = nb.load(volume) hdr = nii.header voxdims = hdr.get_zooms() return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])] def get_data_dims(volume): import nibabel as nb if isinstance(volume, list): volume = volume[0] nii = nb.load(volume) hdr = nii.header datadims = hdr.get_data_shape() return [int(datadims[0]), int(datadims[1]), int(datadims[2])] def transform_to_affine(streams, header, affine): from dipy.tracking.utils import move_streamlines rotation, scale = np.linalg.qr(affine) streams = move_streamlines(streams, rotation) scale[0:3, 0:3] = np.dot(scale[0:3, 0:3], np.diag(1.0 / header["voxel_size"])) scale[0:3, 3] = abs(scale[0:3, 3]) streams = move_streamlines(streams, scale) return streams def read_mrtrix_tracks(in_file, as_generator=True): header = read_mrtrix_header(in_file) streamlines = read_mrtrix_streamlines(in_file, header, as_generator) return header, streamlines def read_mrtrix_header(in_file): fileobj = open(in_file, "rb") header = {} iflogger.info("Reading header data...") for line in fileobj: line = line.decode() if line == "END\n": iflogger.info("Reached the end of the header!") break elif ": " in line: line = line.replace("\n", "") line = line.replace("'", "") key = line.split(": ")[0] value = line.split(": ")[1] header[key] = value iflogger.info('...adding "%s" to header for key "%s"', value, key) fileobj.close() header["count"] = int(header["count"].replace("\n", "")) header["offset"] = int(header["file"].replace(".", "")) return header def read_mrtrix_streamlines(in_file, header, as_generator=True): offset = header["offset"] stream_count = header["count"] fileobj = open(in_file, "rb") fileobj.seek(offset) endianness = native_code f4dt = np.dtype(endianness + "f4") pt_cols = 3 bytesize = pt_cols * 4 def points_per_track(offset): track_points = [] iflogger.info("Identifying the number of points per tract...") all_str = fileobj.read() num_triplets = int(len(all_str) / bytesize) pts = np.ndarray(shape=(num_triplets, pt_cols), dtype="f4", buffer=all_str) nonfinite_list = np.where(np.invert(np.isfinite(pts[:, 2]))) nonfinite_list = list(nonfinite_list[0])[ 0:-1 ] # Converts numpy array to list, removes the last value for idx, value in enumerate(nonfinite_list): if idx == 0: track_points.append(nonfinite_list[idx]) else: track_points.append(nonfinite_list[idx] - nonfinite_list[idx - 1] - 1) return track_points, nonfinite_list def track_gen(track_points): n_streams = 0 iflogger.info("Reading tracks...") while True: try: n_pts = track_points[n_streams] except IndexError: break pts_str = fileobj.read(n_pts * bytesize) nan_str = fileobj.read(bytesize) if len(pts_str) < (n_pts * bytesize): if not n_streams == stream_count: raise HeaderError( "Expecting %s points, found only %s" % (stream_count, n_streams) ) iflogger.error( "Expecting %s points, found only %s", stream_count, n_streams ) break pts = np.ndarray(shape=(n_pts, pt_cols), dtype=f4dt, buffer=pts_str) nan_pt = np.ndarray(shape=(1, pt_cols), dtype=f4dt, buffer=nan_str) if np.isfinite(nan_pt[0][0]): raise ValueError break xyz = pts[:, :3] yield xyz n_streams += 1 if n_streams == stream_count: iflogger.info("100%% : %i tracks read", n_streams) raise StopIteration try: if n_streams % int(stream_count / 100) == 0: percent = int(float(n_streams) / float(stream_count) * 100) iflogger.info("%i%% : %i tracks read", percent, n_streams) except ZeroDivisionError: iflogger.info("%i stream read out of %i", n_streams, stream_count) track_points, nonfinite_list = points_per_track(offset) fileobj.seek(offset) streamlines = track_gen(track_points) if not as_generator: streamlines = list(streamlines) return streamlines class MRTrix2TrackVisInputSpec(TraitedSpec): in_file = File( exists=True, mandatory=True, desc="The input file for the tracks in MRTrix (.tck) format", ) image_file = File(exists=True, desc="The image the tracks were generated from") matrix_file = File( exists=True, desc="A transformation matrix to apply to the tracts after they have been generated (from FLIRT - affine transformation from image_file to registration_image_file)", ) registration_image_file = File( exists=True, desc="The final image the tracks should be registered to." ) out_filename = File( "converted.trk", genfile=True, usedefault=True, desc="The output filename for the tracks in TrackVis (.trk) format", ) class MRTrix2TrackVisOutputSpec(TraitedSpec): out_file = File(exists=True) class MRTrix2TrackVis(DipyBaseInterface): """ Converts MRtrix (.tck) tract files into TrackVis (.trk) format using functions from dipy Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> tck2trk = mrt.MRTrix2TrackVis() >>> tck2trk.inputs.in_file = 'dwi_CSD_tracked.tck' >>> tck2trk.inputs.image_file = 'diffusion.nii' >>> tck2trk.run() # doctest: +SKIP """ input_spec = MRTrix2TrackVisInputSpec output_spec = MRTrix2TrackVisOutputSpec def _run_interface(self, runtime): from dipy.tracking.utils import move_streamlines, affine_from_fsl_mat_file dx, dy, dz = get_data_dims(self.inputs.image_file) vx, vy, vz = get_vox_dims(self.inputs.image_file) image_file = nb.load(self.inputs.image_file) affine = image_file.affine out_filename = op.abspath(self.inputs.out_filename) # Reads MRTrix tracks header, streamlines = read_mrtrix_tracks(self.inputs.in_file, as_generator=True) iflogger.info("MRTrix Header:") iflogger.info(header) # Writes to Trackvis trk_header = nb.trackvis.empty_header() trk_header["dim"] = [dx, dy, dz] trk_header["voxel_size"] = [vx, vy, vz] trk_header["n_count"] = header["count"] if isdefined(self.inputs.matrix_file) and isdefined( self.inputs.registration_image_file ): iflogger.info( "Applying transformation from matrix file %s", self.inputs.matrix_file ) xfm = np.genfromtxt(self.inputs.matrix_file) iflogger.info(xfm) registration_image_file = nb.load(self.inputs.registration_image_file) reg_affine = registration_image_file.affine r_dx, r_dy, r_dz = get_data_dims(self.inputs.registration_image_file) r_vx, r_vy, r_vz = get_vox_dims(self.inputs.registration_image_file) iflogger.info( "Using affine from registration image file %s", self.inputs.registration_image_file, ) iflogger.info(reg_affine) trk_header["vox_to_ras"] = reg_affine trk_header["dim"] = [r_dx, r_dy, r_dz] trk_header["voxel_size"] = [r_vx, r_vy, r_vz] affine = np.dot(affine, np.diag(1.0 / np.array([vx, vy, vz, 1]))) transformed_streamlines = transform_to_affine( streamlines, trk_header, affine ) aff = affine_from_fsl_mat_file(xfm, [vx, vy, vz], [r_vx, r_vy, r_vz]) iflogger.info(aff) axcode = aff2axcodes(reg_affine) trk_header["voxel_order"] = axcode[0] + axcode[1] + axcode[2] final_streamlines = move_streamlines(transformed_streamlines, aff) trk_tracks = ((ii, None, None) for ii in final_streamlines) trk.write(out_filename, trk_tracks, trk_header) iflogger.info("Saving transformed Trackvis file as %s", out_filename) iflogger.info("New TrackVis Header:") iflogger.info(trk_header) else: iflogger.info( "Applying transformation from scanner coordinates to %s", self.inputs.image_file, ) axcode = aff2axcodes(affine) trk_header["voxel_order"] = axcode[0] + axcode[1] + axcode[2] trk_header["vox_to_ras"] = affine transformed_streamlines = transform_to_affine( streamlines, trk_header, affine ) trk_tracks = ((ii, None, None) for ii in transformed_streamlines) trk.write(out_filename, trk_tracks, trk_header) iflogger.info("Saving Trackvis file as %s", out_filename) iflogger.info("TrackVis Header:") iflogger.info(trk_header) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = op.abspath(self.inputs.out_filename) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + ".trk" nipype-1.7.0/nipype/interfaces/mrtrix/defhdr.mat000066400000000000000000000010251413403311400217260ustar00rootroot00000000000000MATLAB 5.0 MAT-file, Platform: GLNXA64, Created on: Mon Jul 18 07:53:57 2011 IMx햽N0m*RՁ&V0bj-vdvx q[׹P'˗M; UmTJħ?!Gw *= j\21S9Ne1V ]_uHZiYI+a?RP J'DuJE2bb QR"Wj9aȬ62FܽA A)f7wP}U 0.( zFV2p_ZH7W | w06cE8%|A<#۹5.[u5r}h;޷-{}w݈[ = 'ߗ-{|~a} Gknipype-1.7.0/nipype/interfaces/mrtrix/preprocess.py000066400000000000000000000677301413403311400225450ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os.path as op from ...utils.filemanip import split_filename from ..base import ( CommandLineInputSpec, CommandLine, traits, TraitedSpec, File, InputMultiPath, isdefined, ) class MRConvertInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="voxel-order data filename", ) out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output filename") extract_at_axis = traits.Enum( 1, 2, 3, argstr="-coord %s", position=1, desc='"Extract data only at the coordinates specified. This option specifies the Axis. Must be used in conjunction with extract_at_coordinate.', ) extract_at_coordinate = traits.List( traits.Float, argstr="%s", sep=",", position=2, minlen=1, maxlen=3, desc='"Extract data only at the coordinates specified. This option specifies the coordinates. Must be used in conjunction with extract_at_axis. Three comma-separated numbers giving the size of each voxel in mm.', ) voxel_dims = traits.List( traits.Float, argstr="-vox %s", sep=",", position=3, minlen=3, maxlen=3, desc="Three comma-separated numbers giving the size of each voxel in mm.", ) output_datatype = traits.Enum( "nii", "float", "char", "short", "int", "long", "double", argstr="-output %s", position=2, desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', ) # , usedefault=True) extension = traits.Enum( "mif", "nii", "float", "char", "short", "int", "long", "double", position=2, desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', usedefault=True, ) layout = traits.Enum( "nii", "float", "char", "short", "int", "long", "double", argstr="-output %s", position=2, desc="specify the layout of the data in memory. The actual layout produced will depend on whether the output image format can support it.", ) resample = traits.Float( argstr="-scale %d", position=3, units="mm", desc="Apply scaling to the intensity values.", ) offset_bias = traits.Float( argstr="-scale %d", position=3, units="mm", desc="Apply offset to the intensity values.", ) replace_NaN_with_zero = traits.Bool( argstr="-zero", position=3, desc="Replace all NaN values with zero." ) prs = traits.Bool( argstr="-prs", position=3, desc="Assume that the DW gradients are specified in the PRS frame (Siemens DICOM only).", ) class MRConvertOutputSpec(TraitedSpec): converted = File(exists=True, desc="path/name of 4D volume in voxel order") class MRConvert(CommandLine): """ Perform conversion between different file types and optionally extract a subset of the input image. If used correctly, this program can be a very useful workhorse. In addition to converting images between different formats, it can be used to extract specific studies from a data set, extract a specific region of interest, flip the images, or to scale the intensity of the images. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> mrconvert = mrt.MRConvert() >>> mrconvert.inputs.in_file = 'dwi_FA.mif' >>> mrconvert.inputs.out_filename = 'dwi_FA.nii' >>> mrconvert.run() # doctest: +SKIP """ _cmd = "mrconvert" input_spec = MRConvertInputSpec output_spec = MRConvertOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["converted"] = self.inputs.out_filename if not isdefined(outputs["converted"]): outputs["converted"] = op.abspath(self._gen_outfilename()) else: outputs["converted"] = op.abspath(outputs["converted"]) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) if isdefined(self.inputs.out_filename): outname = self.inputs.out_filename else: outname = name + "_mrconvert." + self.inputs.extension return outname class DWI2TensorInputSpec(CommandLineInputSpec): in_file = InputMultiPath( File(exists=True), argstr="%s", mandatory=True, position=-2, desc="Diffusion-weighted images", ) out_filename = File( name_template="%s_tensor.mif", name_source="in_file", output_name="tensor", argstr="%s", desc="Output tensor filename", position=-1, ) encoding_file = File( argstr="-grad %s", position=2, desc=( "Encoding file supplied as a 4xN text file with " "each line is in the format [ X Y Z b ], where " "[ X Y Z ] describe the direction of the applied " "gradient, and b gives the b-value in units " "(1000 s/mm^2). See FSL2MRTrix()" ), ) ignore_slice_by_volume = traits.List( traits.Int, argstr="-ignoreslices %s", sep=" ", position=2, minlen=2, maxlen=2, desc=( "Requires two values (i.e. [34 " "1] for [Slice Volume] Ignores " "the image slices specified " "when computing the tensor. " "Slice here means the z " "coordinate of the slice to be " "ignored." ), ) ignore_volumes = traits.List( traits.Int, argstr="-ignorevolumes %s", sep=" ", position=2, minlen=1, desc=( "Requires two values (i.e. [2 5 6] for " "[Volumes] Ignores the image volumes " "specified when computing the tensor." ), ) mask = File( exists=True, argstr="-mask %s", desc="Only perform computation within the specified binary brain mask image.", ) quiet = traits.Bool( argstr="-quiet", position=1, desc=("Do not display information messages or progress " "status."), ) debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class DWI2TensorOutputSpec(TraitedSpec): tensor = File(exists=True, desc="path/name of output diffusion tensor image") class DWI2Tensor(CommandLine): """ Converts diffusion-weighted images to tensor images. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> dwi2tensor = mrt.DWI2Tensor() >>> dwi2tensor.inputs.in_file = 'dwi.mif' >>> dwi2tensor.inputs.encoding_file = 'encoding.txt' >>> dwi2tensor.cmdline 'dwi2tensor -grad encoding.txt dwi.mif dwi_tensor.mif' >>> dwi2tensor.run() # doctest: +SKIP """ _cmd = "dwi2tensor" input_spec = DWI2TensorInputSpec output_spec = DWI2TensorOutputSpec class Tensor2VectorInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="Diffusion tensor image", ) out_filename = File( genfile=True, argstr="%s", position=-1, desc="Output vector filename" ) quiet = traits.Bool( argstr="-quiet", position=1, desc="Do not display information messages or progress status.", ) debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class Tensor2VectorOutputSpec(TraitedSpec): vector = File( exists=True, desc="the output image of the major eigenvectors of the diffusion tensor image.", ) class Tensor2Vector(CommandLine): """ Generates a map of the major eigenvectors of the tensors in each voxel. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> tensor2vector = mrt.Tensor2Vector() >>> tensor2vector.inputs.in_file = 'dwi_tensor.mif' >>> tensor2vector.run() # doctest: +SKIP """ _cmd = "tensor2vector" input_spec = Tensor2VectorInputSpec output_spec = Tensor2VectorOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["vector"] = self.inputs.out_filename if not isdefined(outputs["vector"]): outputs["vector"] = op.abspath(self._gen_outfilename()) else: outputs["vector"] = op.abspath(outputs["vector"]) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_vector.mif" class Tensor2FractionalAnisotropyInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="Diffusion tensor image", ) out_filename = File( genfile=True, argstr="%s", position=-1, desc="Output Fractional Anisotropy filename", ) quiet = traits.Bool( argstr="-quiet", position=1, desc="Do not display information messages or progress status.", ) debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class Tensor2FractionalAnisotropyOutputSpec(TraitedSpec): FA = File( exists=True, desc="the output image of the major eigenvectors of the diffusion tensor image.", ) class Tensor2FractionalAnisotropy(CommandLine): """ Generates a map of the fractional anisotropy in each voxel. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> tensor2FA = mrt.Tensor2FractionalAnisotropy() >>> tensor2FA.inputs.in_file = 'dwi_tensor.mif' >>> tensor2FA.run() # doctest: +SKIP """ _cmd = "tensor2FA" input_spec = Tensor2FractionalAnisotropyInputSpec output_spec = Tensor2FractionalAnisotropyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["FA"] = self.inputs.out_filename if not isdefined(outputs["FA"]): outputs["FA"] = op.abspath(self._gen_outfilename()) else: outputs["FA"] = op.abspath(outputs["FA"]) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_FA.mif" class Tensor2ApparentDiffusionInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="Diffusion tensor image", ) out_filename = File( genfile=True, argstr="%s", position=-1, desc="Output Fractional Anisotropy filename", ) quiet = traits.Bool( argstr="-quiet", position=1, desc="Do not display information messages or progress status.", ) debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class Tensor2ApparentDiffusionOutputSpec(TraitedSpec): ADC = File( exists=True, desc="the output image of the major eigenvectors of the diffusion tensor image.", ) class Tensor2ApparentDiffusion(CommandLine): """ Generates a map of the apparent diffusion coefficient (ADC) in each voxel Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> tensor2ADC = mrt.Tensor2ApparentDiffusion() >>> tensor2ADC.inputs.in_file = 'dwi_tensor.mif' >>> tensor2ADC.run() # doctest: +SKIP """ _cmd = "tensor2ADC" input_spec = Tensor2ApparentDiffusionInputSpec output_spec = Tensor2ApparentDiffusionOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["ADC"] = self.inputs.out_filename if not isdefined(outputs["ADC"]): outputs["ADC"] = op.abspath(self._gen_outfilename()) else: outputs["ADC"] = op.abspath(outputs["ADC"]) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_ADC.mif" class MRMultiplyInputSpec(CommandLineInputSpec): in_files = InputMultiPath( File(exists=True), argstr="%s", mandatory=True, position=-2, desc="Input images to be multiplied", ) out_filename = File( genfile=True, argstr="%s", position=-1, desc="Output image filename" ) quiet = traits.Bool( argstr="-quiet", position=1, desc="Do not display information messages or progress status.", ) debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class MRMultiplyOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output image of the multiplication") class MRMultiply(CommandLine): """ Multiplies two images. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> MRmult = mrt.MRMultiply() >>> MRmult.inputs.in_files = ['dwi.mif', 'dwi_WMProb.mif'] >>> MRmult.run() # doctest: +SKIP """ _cmd = "mrmult" input_spec = MRMultiplyInputSpec output_spec = MRMultiplyOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_filename if not isdefined(outputs["out_file"]): outputs["out_file"] = op.abspath(self._gen_outfilename()) else: outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_files[0]) return name + "_MRMult.mif" class MRTrixViewerInputSpec(CommandLineInputSpec): in_files = InputMultiPath( File(exists=True), argstr="%s", mandatory=True, position=-2, desc="Input images to be viewed", ) quiet = traits.Bool( argstr="-quiet", position=1, desc="Do not display information messages or progress status.", ) debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class MRTrixViewerOutputSpec(TraitedSpec): pass class MRTrixViewer(CommandLine): """ Loads the input images in the MRTrix Viewer. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> MRview = mrt.MRTrixViewer() >>> MRview.inputs.in_files = 'dwi.mif' >>> MRview.run() # doctest: +SKIP """ _cmd = "mrview" input_spec = MRTrixViewerInputSpec output_spec = MRTrixViewerOutputSpec def _list_outputs(self): return class MRTrixInfoInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="Input images to be read", ) class MRTrixInfoOutputSpec(TraitedSpec): pass class MRTrixInfo(CommandLine): """ Prints out relevant header information found in the image specified. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> MRinfo = mrt.MRTrixInfo() >>> MRinfo.inputs.in_file = 'dwi.mif' >>> MRinfo.run() # doctest: +SKIP """ _cmd = "mrinfo" input_spec = MRTrixInfoInputSpec output_spec = MRTrixInfoOutputSpec def _list_outputs(self): return class GenerateWhiteMatterMaskInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-3, desc="Diffusion-weighted images", ) binary_mask = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="Binary brain mask" ) out_WMProb_filename = File( genfile=True, argstr="%s", position=-1, desc="Output WM probability image filename", ) encoding_file = File( exists=True, argstr="-grad %s", mandatory=True, position=1, desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) noise_level_margin = traits.Float( argstr="-margin %s", desc="Specify the width of the margin on either side of the image to be used to estimate the noise level (default = 10)", ) class GenerateWhiteMatterMaskOutputSpec(TraitedSpec): WMprobabilitymap = File(exists=True, desc="WMprobabilitymap") class GenerateWhiteMatterMask(CommandLine): """ Generates a white matter probability mask from the DW images. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> genWM = mrt.GenerateWhiteMatterMask() >>> genWM.inputs.in_file = 'dwi.mif' >>> genWM.inputs.encoding_file = 'encoding.txt' >>> genWM.run() # doctest: +SKIP """ _cmd = "gen_WM_mask" input_spec = GenerateWhiteMatterMaskInputSpec output_spec = GenerateWhiteMatterMaskOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["WMprobabilitymap"] = op.abspath(self._gen_outfilename()) return outputs def _gen_filename(self, name): if name == "out_WMProb_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_WMProb.mif" class ErodeInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="Input mask image to be eroded", ) out_filename = File( genfile=True, argstr="%s", position=-1, desc="Output image filename" ) number_of_passes = traits.Int( argstr="-npass %s", desc="the number of passes (default: 1)" ) dilate = traits.Bool( argstr="-dilate", position=1, desc="Perform dilation rather than erosion" ) quiet = traits.Bool( argstr="-quiet", position=1, desc="Do not display information messages or progress status.", ) debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class ErodeOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output image") class Erode(CommandLine): """ Erode (or dilates) a mask (i.e. binary) image Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> erode = mrt.Erode() >>> erode.inputs.in_file = 'mask.mif' >>> erode.run() # doctest: +SKIP """ _cmd = "erode" input_spec = ErodeInputSpec output_spec = ErodeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_filename if not isdefined(outputs["out_file"]): outputs["out_file"] = op.abspath(self._gen_outfilename()) else: outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_erode.mif" class ThresholdInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="The input image to be thresholded", ) out_filename = File( genfile=True, argstr="%s", position=-1, desc="The output binary image mask." ) absolute_threshold_value = traits.Float( argstr="-abs %s", desc="Specify threshold value as absolute intensity." ) percentage_threshold_value = traits.Float( argstr="-percent %s", desc="Specify threshold value as a percentage of the peak intensity in the input image.", ) invert = traits.Bool(argstr="-invert", position=1, desc="Invert output binary mask") replace_zeros_with_NaN = traits.Bool( argstr="-nan", position=1, desc="Replace all zero values with NaN" ) quiet = traits.Bool( argstr="-quiet", position=1, desc="Do not display information messages or progress status.", ) debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class ThresholdOutputSpec(TraitedSpec): out_file = File(exists=True, desc="The output binary image mask.") class Threshold(CommandLine): """ Create bitwise image by thresholding image intensity. By default, the threshold level is determined using a histogram analysis to cut out the background. Otherwise, the threshold intensity can be specified using command line options. Note that only the first study is used for thresholding. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> thresh = mrt.Threshold() >>> thresh.inputs.in_file = 'wm_mask.mif' >>> thresh.run() # doctest: +SKIP """ _cmd = "threshold" input_spec = ThresholdInputSpec output_spec = ThresholdOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_filename if not isdefined(outputs["out_file"]): outputs["out_file"] = op.abspath(self._gen_outfilename()) else: outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_thresh.mif" class MedianFilter3DInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="Input images to be smoothed", ) out_filename = File( genfile=True, argstr="%s", position=-1, desc="Output image filename" ) quiet = traits.Bool( argstr="-quiet", position=1, desc="Do not display information messages or progress status.", ) debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class MedianFilter3DOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output image") class MedianFilter3D(CommandLine): """ Smooth images using a 3x3x3 median filter. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> median3d = mrt.MedianFilter3D() >>> median3d.inputs.in_file = 'mask.mif' >>> median3d.run() # doctest: +SKIP """ _cmd = "median3D" input_spec = MedianFilter3DInputSpec output_spec = MedianFilter3DOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_filename if not isdefined(outputs["out_file"]): outputs["out_file"] = op.abspath(self._gen_outfilename()) else: outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_median3D.mif" class MRTransformInputSpec(CommandLineInputSpec): in_files = InputMultiPath( File(exists=True), argstr="%s", mandatory=True, position=-2, desc="Input images to be transformed", ) out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output image") invert = traits.Bool( argstr="-inverse", position=1, desc="Invert the specified transform before using it", ) linear_transform = File( exists=True, argstr="-linear %s", position=1, desc=( "Specify a linear transform to apply, in the form of a 3x4 or 4x4 ascii file. " "Note the standard reverse convention is used, " "where the transform maps points in the template image to the moving image. " "Note that the reverse convention is still assumed even if no -template image is supplied." ), ) replace_transform = traits.Bool( argstr="-replace", position=1, desc="replace the current transform by that specified, rather than applying it to the current transform", ) transformation_file = File( exists=True, argstr="-transform %s", position=1, desc="The transform to apply, in the form of a 4x4 ascii file.", ) template_image = File( exists=True, argstr="-template %s", position=1, desc="Reslice the input image to match the specified template image.", ) reference_image = File( exists=True, argstr="-reference %s", position=1, desc="in case the transform supplied maps from the input image onto a reference image, use this option to specify the reference. Note that this implicitly sets the -replace option.", ) flip_x = traits.Bool( argstr="-flipx", position=1, desc="assume the transform is supplied assuming a coordinate system with the x-axis reversed relative to the MRtrix convention (i.e. x increases from right to left). This is required to handle transform matrices produced by FSL's FLIRT command. This is only used in conjunction with the -reference option.", ) quiet = traits.Bool( argstr="-quiet", position=1, desc="Do not display information messages or progress status.", ) debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class MRTransformOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output image of the transformation") class MRTransform(CommandLine): """ Apply spatial transformations or reslice images Example ------- >>> MRxform = MRTransform() >>> MRxform.inputs.in_files = 'anat_coreg.mif' >>> MRxform.run() # doctest: +SKIP """ _cmd = "mrtransform" input_spec = MRTransformInputSpec output_spec = MRTransformOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_filename if not isdefined(outputs["out_file"]): outputs["out_file"] = op.abspath(self._gen_outfilename()) else: outputs["out_file"] = op.abspath(outputs["out_file"]) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_files[0]) return name + "_MRTransform.mif" nipype-1.7.0/nipype/interfaces/mrtrix/tensors.py000066400000000000000000000531331413403311400220450ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os.path as op import numpy as np from ... import logging from ...utils.filemanip import split_filename from ..base import ( CommandLineInputSpec, CommandLine, BaseInterface, traits, File, TraitedSpec, isdefined, ) iflogger = logging.getLogger("nipype.interface") class DWI2SphericalHarmonicsImageInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="Diffusion-weighted images", ) out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output filename") encoding_file = File( exists=True, argstr="-grad %s", mandatory=True, position=1, desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) maximum_harmonic_order = traits.Float( argstr="-lmax %s", desc="set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.", ) normalise = traits.Bool( argstr="-normalise", position=3, desc="normalise the DW signal to the b=0 image" ) class DWI2SphericalHarmonicsImageOutputSpec(TraitedSpec): spherical_harmonics_image = File(exists=True, desc="Spherical harmonics image") class DWI2SphericalHarmonicsImage(CommandLine): """ Convert base diffusion-weighted images to their spherical harmonic representation. This program outputs the spherical harmonic decomposition for the set measured signal attenuations. The signal attenuations are calculated by identifying the b-zero images from the diffusion encoding supplied (i.e. those with zero as the b-value), and dividing the remaining signals by the mean b-zero signal intensity. The spherical harmonic decomposition is then calculated by least-squares linear fitting. Note that this program makes use of implied symmetries in the diffusion profile. First, the fact the signal attenuation profile is real implies that it has conjugate symmetry, i.e. Y(l,-m) = Y(l,m)* (where * denotes the complex conjugate). Second, the diffusion profile should be antipodally symmetric (i.e. S(x) = S(-x)), implying that all odd l components should be zero. Therefore, this program only computes the even elements. Note that the spherical harmonics equations used here differ slightly from those conventionally used, in that the (-1)^m factor has been omitted. This should be taken into account in all subsequent calculations. Each volume in the output image corresponds to a different spherical harmonic component, according to the following convention: * [0] Y(0,0) * [1] Im {Y(2,2)} * [2] Im {Y(2,1)} * [3] Y(2,0) * [4] Re {Y(2,1)} * [5] Re {Y(2,2)} * [6] Im {Y(4,4)} * [7] Im {Y(4,3)} Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> dwi2SH = mrt.DWI2SphericalHarmonicsImage() >>> dwi2SH.inputs.in_file = 'diffusion.nii' >>> dwi2SH.inputs.encoding_file = 'encoding.txt' >>> dwi2SH.run() # doctest: +SKIP """ _cmd = "dwi2SH" input_spec = DWI2SphericalHarmonicsImageInputSpec output_spec = DWI2SphericalHarmonicsImageOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["spherical_harmonics_image"] = self.inputs.out_filename if not isdefined(outputs["spherical_harmonics_image"]): outputs["spherical_harmonics_image"] = op.abspath(self._gen_outfilename()) else: outputs["spherical_harmonics_image"] = op.abspath( outputs["spherical_harmonics_image"] ) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_SH.mif" class ConstrainedSphericalDeconvolutionInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-3, desc="diffusion-weighted image", ) response_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="the diffusion-weighted signal response function for a single fibre population (see EstimateResponse)", ) out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output filename") mask_image = File( exists=True, argstr="-mask %s", position=2, desc="only perform computation within the specified binary brain mask image", ) encoding_file = File( exists=True, argstr="-grad %s", position=1, desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) filter_file = File( exists=True, argstr="-filter %s", position=-2, desc="a text file containing the filtering coefficients for each even harmonic order." "the linear frequency filtering parameters used for the initial linear spherical deconvolution step (default = [ 1 1 1 0 0 ]).", ) lambda_value = traits.Float( argstr="-lambda %s", desc="the regularisation parameter lambda that controls the strength of the constraint (default = 1.0).", ) maximum_harmonic_order = traits.Int( argstr="-lmax %s", desc="set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.", ) threshold_value = traits.Float( argstr="-threshold %s", desc="the threshold below which the amplitude of the FOD is assumed to be zero, expressed as a fraction of the mean value of the initial FOD (default = 0.1)", ) iterations = traits.Int( argstr="-niter %s", desc="the maximum number of iterations to perform for each voxel (default = 50)", ) debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") directions_file = File( exists=True, argstr="-directions %s", position=-2, desc="a text file containing the [ el az ] pairs for the directions: Specify the directions over which to apply the non-negativity constraint (by default, the built-in 300 direction set is used)", ) normalise = traits.Bool( argstr="-normalise", position=3, desc="normalise the DW signal to the b=0 image" ) class ConstrainedSphericalDeconvolutionOutputSpec(TraitedSpec): spherical_harmonics_image = File(exists=True, desc="Spherical harmonics image") class ConstrainedSphericalDeconvolution(CommandLine): """ Perform non-negativity constrained spherical deconvolution. Note that this program makes use of implied symmetries in the diffusion profile. First, the fact the signal attenuation profile is real implies that it has conjugate symmetry, i.e. Y(l,-m) = Y(l,m)* (where * denotes the complex conjugate). Second, the diffusion profile should be antipodally symmetric (i.e. S(x) = S(-x)), implying that all odd l components should be zero. Therefore, this program only computes the even elements. Note that the spherical harmonics equations used here differ slightly from those conventionally used, in that the (-1)^m factor has been omitted. This should be taken into account in all subsequent calculations. Each volume in the output image corresponds to a different spherical harmonic component, according to the following convention: * [0] Y(0,0) * [1] Im {Y(2,2)} * [2] Im {Y(2,1)} * [3] Y(2,0) * [4] Re {Y(2,1)} * [5] Re {Y(2,2)} * [6] Im {Y(4,4)} * [7] Im {Y(4,3)} Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> csdeconv = mrt.ConstrainedSphericalDeconvolution() >>> csdeconv.inputs.in_file = 'dwi.mif' >>> csdeconv.inputs.encoding_file = 'encoding.txt' >>> csdeconv.run() # doctest: +SKIP """ _cmd = "csdeconv" input_spec = ConstrainedSphericalDeconvolutionInputSpec output_spec = ConstrainedSphericalDeconvolutionOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["spherical_harmonics_image"] = self.inputs.out_filename if not isdefined(outputs["spherical_harmonics_image"]): outputs["spherical_harmonics_image"] = op.abspath(self._gen_outfilename()) else: outputs["spherical_harmonics_image"] = op.abspath( outputs["spherical_harmonics_image"] ) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_CSD.mif" class EstimateResponseForSHInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-3, desc="Diffusion-weighted images", ) mask_image = File( exists=True, mandatory=True, argstr="%s", position=-2, desc="only perform computation within the specified binary brain mask image", ) out_filename = File(genfile=True, argstr="%s", position=-1, desc="Output filename") encoding_file = File( exists=True, argstr="-grad %s", mandatory=True, position=1, desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) maximum_harmonic_order = traits.Int( argstr="-lmax %s", desc="set the maximum harmonic order for the output series. By default, the program will use the highest possible lmax given the number of diffusion-weighted images.", ) normalise = traits.Bool( argstr="-normalise", desc="normalise the DW signal to the b=0 image" ) quiet = traits.Bool( argstr="-quiet", desc="Do not display information messages or progress status." ) debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") class EstimateResponseForSHOutputSpec(TraitedSpec): response = File(exists=True, desc="Spherical harmonics image") class EstimateResponseForSH(CommandLine): """ Estimates the fibre response function for use in spherical deconvolution. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> estresp = mrt.EstimateResponseForSH() >>> estresp.inputs.in_file = 'dwi.mif' >>> estresp.inputs.mask_image = 'dwi_WMProb.mif' >>> estresp.inputs.encoding_file = 'encoding.txt' >>> estresp.run() # doctest: +SKIP """ _cmd = "estimate_response" input_spec = EstimateResponseForSHInputSpec output_spec = EstimateResponseForSHOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["response"] = self.inputs.out_filename if not isdefined(outputs["response"]): outputs["response"] = op.abspath(self._gen_outfilename()) else: outputs["response"] = op.abspath(outputs["response"]) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_ER.txt" def concat_files(bvec_file, bval_file, invert_x, invert_y, invert_z): bvecs = np.loadtxt(bvec_file) bvals = np.loadtxt(bval_file) if np.shape(bvecs)[0] > np.shape(bvecs)[1]: bvecs = np.transpose(bvecs) if invert_x: bvecs[0, :] = -bvecs[0, :] iflogger.info("Inverting b-vectors in the x direction") if invert_y: bvecs[1, :] = -bvecs[1, :] iflogger.info("Inverting b-vectors in the y direction") if invert_z: bvecs[2, :] = -bvecs[2, :] iflogger.info("Inverting b-vectors in the z direction") iflogger.info(np.shape(bvecs)) iflogger.info(np.shape(bvals)) encoding = np.transpose(np.vstack((bvecs, bvals))) _, bvec, _ = split_filename(bvec_file) _, bval, _ = split_filename(bval_file) out_encoding_file = bvec + "_" + bval + ".txt" np.savetxt(out_encoding_file, encoding) return out_encoding_file class FSL2MRTrixInputSpec(TraitedSpec): bvec_file = File( exists=True, mandatory=True, desc="FSL b-vectors file (3xN text file)" ) bval_file = File( exists=True, mandatory=True, desc="FSL b-values file (1xN text file)" ) invert_x = traits.Bool( False, usedefault=True, desc="Inverts the b-vectors along the x-axis" ) invert_y = traits.Bool( False, usedefault=True, desc="Inverts the b-vectors along the y-axis" ) invert_z = traits.Bool( False, usedefault=True, desc="Inverts the b-vectors along the z-axis" ) out_encoding_file = File(genfile=True, desc="Output encoding filename") class FSL2MRTrixOutputSpec(TraitedSpec): encoding_file = File( desc="The gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient" "and b gives the b-value in units (1000 s/mm^2)." ) class FSL2MRTrix(BaseInterface): """ Converts separate b-values and b-vectors from text files (FSL style) into a 4xN text file in which each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> fsl2mrtrix = mrt.FSL2MRTrix() >>> fsl2mrtrix.inputs.bvec_file = 'bvecs' >>> fsl2mrtrix.inputs.bval_file = 'bvals' >>> fsl2mrtrix.inputs.invert_y = True >>> fsl2mrtrix.run() # doctest: +SKIP """ input_spec = FSL2MRTrixInputSpec output_spec = FSL2MRTrixOutputSpec def _run_interface(self, runtime): encoding = concat_files( self.inputs.bvec_file, self.inputs.bval_file, self.inputs.invert_x, self.inputs.invert_y, self.inputs.invert_z, ) return runtime def _list_outputs(self): outputs = self.output_spec().get() outputs["encoding_file"] = op.abspath(self._gen_filename("out_encoding_file")) return outputs def _gen_filename(self, name): if name == "out_encoding_file": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, bvec, _ = split_filename(self.inputs.bvec_file) _, bval, _ = split_filename(self.inputs.bval_file) return bvec + "_" + bval + ".txt" class GenerateDirectionsInputSpec(CommandLineInputSpec): num_dirs = traits.Int( mandatory=True, argstr="%s", position=-2, desc="the number of directions to generate.", ) power = traits.Float( argstr="-power %s", desc="specify exponent to use for repulsion power law." ) niter = traits.Int( argstr="-niter %s", desc="specify the maximum number of iterations to perform." ) display_info = traits.Bool(argstr="-info", desc="Display information messages.") quiet_display = traits.Bool( argstr="-quiet", desc="do not display information messages or progress status." ) display_debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") out_file = File( name_source=["num_dirs"], name_template="directions_%d.txt", argstr="%s", hash_files=False, position=-1, desc="the text file to write the directions to, as [ az el ] pairs.", ) class GenerateDirectionsOutputSpec(TraitedSpec): out_file = File(exists=True, desc="directions file") class GenerateDirections(CommandLine): """ generate a set of directions evenly distributed over a hemisphere. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> gendir = mrt.GenerateDirections() >>> gendir.inputs.num_dirs = 300 >>> gendir.run() # doctest: +SKIP """ _cmd = "gendir" input_spec = GenerateDirectionsInputSpec output_spec = GenerateDirectionsOutputSpec class FindShPeaksInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-3, desc="the input image of SH coefficients.", ) directions_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="the set of directions to use as seeds for the peak finding", ) peaks_image = File( exists=True, argstr="-peaks %s", desc="the program will try to find the peaks that most closely match those in the image provided", ) num_peaks = traits.Int( argstr="-num %s", desc="the number of peaks to extract (default is 3)" ) peak_directions = traits.List( traits.Float, argstr="-direction %s", sep=" ", minlen=2, maxlen=2, desc="phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option " " phi: the azimuthal angle of the direction (in degrees). theta: the elevation angle of the direction (in degrees, from the vertical z-axis)", ) peak_threshold = traits.Float( argstr="-threshold %s", desc="only peak amplitudes greater than the threshold will be considered", ) display_info = traits.Bool(argstr="-info", desc="Display information messages.") quiet_display = traits.Bool( argstr="-quiet", desc="do not display information messages or progress status." ) display_debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") out_file = File( name_template="%s_peak_dirs.mif", keep_extension=False, argstr="%s", hash_files=False, position=-1, desc="the output image. Each volume corresponds to the x, y & z component of each peak direction vector in turn", name_source=["in_file"], ) class FindShPeaksOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Peak directions image") class FindShPeaks(CommandLine): """ identify the orientations of the N largest peaks of a SH profile Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> shpeaks = mrt.FindShPeaks() >>> shpeaks.inputs.in_file = 'csd.mif' >>> shpeaks.inputs.directions_file = 'dirs.txt' >>> shpeaks.inputs.num_peaks = 2 >>> shpeaks.run() # doctest: +SKIP """ _cmd = "find_SH_peaks" input_spec = FindShPeaksInputSpec output_spec = FindShPeaksOutputSpec class Directions2AmplitudeInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="the input directions image. Each volume corresponds to the x, y & z component of each direction vector in turn.", ) peaks_image = File( exists=True, argstr="-peaks %s", desc="the program will try to find the peaks that most closely match those in the image provided", ) num_peaks = traits.Int( argstr="-num %s", desc="the number of peaks to extract (default is 3)" ) peak_directions = traits.List( traits.Float, argstr="-direction %s", sep=" ", minlen=2, maxlen=2, desc="phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option " " phi: the azimuthal angle of the direction (in degrees). theta: the elevation angle of the direction (in degrees, from the vertical z-axis)", ) display_info = traits.Bool(argstr="-info", desc="Display information messages.") quiet_display = traits.Bool( argstr="-quiet", desc="do not display information messages or progress status." ) display_debug = traits.Bool(argstr="-debug", desc="Display debugging messages.") out_file = File( name_template="%s_amplitudes.mif", keep_extension=False, argstr="%s", hash_files=False, position=-1, desc="the output amplitudes image", name_source=["in_file"], ) class Directions2AmplitudeOutputSpec(TraitedSpec): out_file = File(exists=True, desc="amplitudes image") class Directions2Amplitude(CommandLine): """ convert directions image to amplitudes Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> amplitudes = mrt.Directions2Amplitude() >>> amplitudes.inputs.in_file = 'peak_directions.mif' >>> amplitudes.run() # doctest: +SKIP """ _cmd = "dir2amp" input_spec = Directions2AmplitudeInputSpec output_spec = Directions2AmplitudeOutputSpec nipype-1.7.0/nipype/interfaces/mrtrix/tests/000077500000000000000000000000001413403311400211335ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/mrtrix/tests/__init__.py000066400000000000000000000000301413403311400232350ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_ConstrainedSphericalDeconvolution.py000066400000000000000000000044471413403311400324620ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tensors import ConstrainedSphericalDeconvolution def test_ConstrainedSphericalDeconvolution_inputs(): input_map = dict( args=dict( argstr="%s", ), debug=dict( argstr="-debug", ), directions_file=dict( argstr="-directions %s", extensions=None, position=-2, ), encoding_file=dict( argstr="-grad %s", extensions=None, position=1, ), environ=dict( nohash=True, usedefault=True, ), filter_file=dict( argstr="-filter %s", extensions=None, position=-2, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), iterations=dict( argstr="-niter %s", ), lambda_value=dict( argstr="-lambda %s", ), mask_image=dict( argstr="-mask %s", extensions=None, position=2, ), maximum_harmonic_order=dict( argstr="-lmax %s", ), normalise=dict( argstr="-normalise", position=3, ), out_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), response_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), threshold_value=dict( argstr="-threshold %s", ), ) inputs = ConstrainedSphericalDeconvolution.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ConstrainedSphericalDeconvolution_outputs(): output_map = dict( spherical_harmonics_image=dict( extensions=None, ), ) outputs = ConstrainedSphericalDeconvolution.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_DWI2SphericalHarmonicsImage.py000066400000000000000000000030071413403311400307430ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tensors import DWI2SphericalHarmonicsImage def test_DWI2SphericalHarmonicsImage_inputs(): input_map = dict( args=dict( argstr="%s", ), encoding_file=dict( argstr="-grad %s", extensions=None, mandatory=True, position=1, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), maximum_harmonic_order=dict( argstr="-lmax %s", ), normalise=dict( argstr="-normalise", position=3, ), out_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), ) inputs = DWI2SphericalHarmonicsImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DWI2SphericalHarmonicsImage_outputs(): output_map = dict( spherical_harmonics_image=dict( extensions=None, ), ) outputs = DWI2SphericalHarmonicsImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_DWI2Tensor.py000066400000000000000000000034271413403311400255020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import DWI2Tensor def test_DWI2Tensor_inputs(): input_map = dict( args=dict( argstr="%s", ), debug=dict( argstr="-debug", position=1, ), encoding_file=dict( argstr="-grad %s", extensions=None, position=2, ), environ=dict( nohash=True, usedefault=True, ), ignore_slice_by_volume=dict( argstr="-ignoreslices %s", position=2, sep=" ", ), ignore_volumes=dict( argstr="-ignorevolumes %s", position=2, sep=" ", ), in_file=dict( argstr="%s", mandatory=True, position=-2, ), mask=dict( argstr="-mask %s", extensions=None, ), out_filename=dict( argstr="%s", extensions=None, name_source="in_file", name_template="%s_tensor.mif", output_name="tensor", position=-1, ), quiet=dict( argstr="-quiet", position=1, ), ) inputs = DWI2Tensor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DWI2Tensor_outputs(): output_map = dict( tensor=dict( extensions=None, ), ) outputs = DWI2Tensor.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_DiffusionTensorStreamlineTrack.py000066400000000000000000000077731413403311400317440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tracking import DiffusionTensorStreamlineTrack def test_DiffusionTensorStreamlineTrack_inputs(): input_map = dict( args=dict( argstr="%s", ), cutoff_value=dict( argstr="-cutoff %s", units="NA", ), desired_number_of_tracks=dict( argstr="-number %d", ), do_not_precompute=dict( argstr="-noprecomputed", ), environ=dict( nohash=True, usedefault=True, ), exclude_file=dict( argstr="-exclude %s", extensions=None, xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( argstr="-exclude %s", position=2, sep=",", units="mm", xor=["exclude_file", "exclude_spec"], ), gradient_encoding_file=dict( argstr="-grad %s", extensions=None, mandatory=True, position=-2, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), include_file=dict( argstr="-include %s", extensions=None, xor=["include_file", "include_spec"], ), include_spec=dict( argstr="-include %s", position=2, sep=",", units="mm", xor=["include_file", "include_spec"], ), initial_cutoff_value=dict( argstr="-initcutoff %s", units="NA", ), initial_direction=dict( argstr="-initdirection %s", units="voxels", ), inputmodel=dict( argstr="%s", position=-3, usedefault=True, ), mask_file=dict( argstr="-mask %s", extensions=None, xor=["mask_file", "mask_spec"], ), mask_spec=dict( argstr="-mask %s", position=2, sep=",", units="mm", xor=["mask_file", "mask_spec"], ), maximum_number_of_tracks=dict( argstr="-maxnum %d", ), maximum_tract_length=dict( argstr="-length %s", units="mm", ), minimum_radius_of_curvature=dict( argstr="-curvature %s", units="mm", ), minimum_tract_length=dict( argstr="-minlength %s", units="mm", ), no_mask_interpolation=dict( argstr="-nomaskinterp", ), out_file=dict( argstr="%s", extensions=None, name_source=["in_file"], name_template="%s_tracked.tck", output_name="tracked", position=-1, ), seed_file=dict( argstr="-seed %s", extensions=None, xor=["seed_file", "seed_spec"], ), seed_spec=dict( argstr="-seed %s", position=2, sep=",", units="mm", xor=["seed_file", "seed_spec"], ), step_size=dict( argstr="-step %s", units="mm", ), stop=dict( argstr="-stop", ), unidirectional=dict( argstr="-unidirectional", ), ) inputs = DiffusionTensorStreamlineTrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DiffusionTensorStreamlineTrack_outputs(): output_map = dict( tracked=dict( extensions=None, ), ) outputs = DiffusionTensorStreamlineTrack.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_Directions2Amplitude.py000066400000000000000000000033261413403311400276320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tensors import Directions2Amplitude def test_Directions2Amplitude_inputs(): input_map = dict( args=dict( argstr="%s", ), display_debug=dict( argstr="-debug", ), display_info=dict( argstr="-info", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), num_peaks=dict( argstr="-num %s", ), out_file=dict( argstr="%s", extensions=None, hash_files=False, keep_extension=False, name_source=["in_file"], name_template="%s_amplitudes.mif", position=-1, ), peak_directions=dict( argstr="-direction %s", sep=" ", ), peaks_image=dict( argstr="-peaks %s", extensions=None, ), quiet_display=dict( argstr="-quiet", ), ) inputs = Directions2Amplitude.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Directions2Amplitude_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Directions2Amplitude.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_Erode.py000066400000000000000000000026211413403311400246330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Erode def test_Erode_inputs(): input_map = dict( args=dict( argstr="%s", ), debug=dict( argstr="-debug", position=1, ), dilate=dict( argstr="-dilate", position=1, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), number_of_passes=dict( argstr="-npass %s", ), out_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), quiet=dict( argstr="-quiet", position=1, ), ) inputs = Erode.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Erode_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Erode.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_EstimateResponseForSH.py000066400000000000000000000033071413403311400277730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tensors import EstimateResponseForSH def test_EstimateResponseForSH_inputs(): input_map = dict( args=dict( argstr="%s", ), debug=dict( argstr="-debug", ), encoding_file=dict( argstr="-grad %s", extensions=None, mandatory=True, position=1, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), mask_image=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), maximum_harmonic_order=dict( argstr="-lmax %s", ), normalise=dict( argstr="-normalise", ), out_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), quiet=dict( argstr="-quiet", ), ) inputs = EstimateResponseForSH.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EstimateResponseForSH_outputs(): output_map = dict( response=dict( extensions=None, ), ) outputs = EstimateResponseForSH.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_FSL2MRTrix.py000066400000000000000000000022221413403311400254060ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tensors import FSL2MRTrix def test_FSL2MRTrix_inputs(): input_map = dict( bval_file=dict( extensions=None, mandatory=True, ), bvec_file=dict( extensions=None, mandatory=True, ), invert_x=dict( usedefault=True, ), invert_y=dict( usedefault=True, ), invert_z=dict( usedefault=True, ), out_encoding_file=dict( extensions=None, genfile=True, ), ) inputs = FSL2MRTrix.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FSL2MRTrix_outputs(): output_map = dict( encoding_file=dict( extensions=None, ), ) outputs = FSL2MRTrix.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_FilterTracks.py000066400000000000000000000044031413403311400261720ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tracking import FilterTracks def test_FilterTracks_inputs(): input_map = dict( args=dict( argstr="%s", ), debug=dict( argstr="-debug", position=1, ), environ=dict( nohash=True, usedefault=True, ), exclude_file=dict( argstr="-exclude %s", extensions=None, xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( argstr="-exclude %s", position=2, sep=",", units="mm", xor=["exclude_file", "exclude_spec"], ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), include_file=dict( argstr="-include %s", extensions=None, xor=["include_file", "include_spec"], ), include_spec=dict( argstr="-include %s", position=2, sep=",", units="mm", xor=["include_file", "include_spec"], ), invert=dict( argstr="-invert", ), minimum_tract_length=dict( argstr="-minlength %s", units="mm", ), no_mask_interpolation=dict( argstr="-nomaskinterp", ), out_file=dict( argstr="%s", extensions=None, hash_files=False, name_source=["in_file"], name_template="%s_filt", position=-1, ), quiet=dict( argstr="-quiet", position=1, ), ) inputs = FilterTracks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FilterTracks_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = FilterTracks.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_FindShPeaks.py000066400000000000000000000036101413403311400257330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tensors import FindShPeaks def test_FindShPeaks_inputs(): input_map = dict( args=dict( argstr="%s", ), directions_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), display_debug=dict( argstr="-debug", ), display_info=dict( argstr="-info", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), num_peaks=dict( argstr="-num %s", ), out_file=dict( argstr="%s", extensions=None, hash_files=False, keep_extension=False, name_source=["in_file"], name_template="%s_peak_dirs.mif", position=-1, ), peak_directions=dict( argstr="-direction %s", sep=" ", ), peak_threshold=dict( argstr="-threshold %s", ), peaks_image=dict( argstr="-peaks %s", extensions=None, ), quiet_display=dict( argstr="-quiet", ), ) inputs = FindShPeaks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FindShPeaks_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = FindShPeaks.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_GenerateDirections.py000066400000000000000000000030101413403311400273440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tensors import GenerateDirections def test_GenerateDirections_inputs(): input_map = dict( args=dict( argstr="%s", ), display_debug=dict( argstr="-debug", ), display_info=dict( argstr="-info", ), environ=dict( nohash=True, usedefault=True, ), niter=dict( argstr="-niter %s", ), num_dirs=dict( argstr="%s", mandatory=True, position=-2, ), out_file=dict( argstr="%s", extensions=None, hash_files=False, name_source=["num_dirs"], name_template="directions_%d.txt", position=-1, ), power=dict( argstr="-power %s", ), quiet_display=dict( argstr="-quiet", ), ) inputs = GenerateDirections.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GenerateDirections_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = GenerateDirections.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_GenerateWhiteMatterMask.py000066400000000000000000000030461413403311400303230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import GenerateWhiteMatterMask def test_GenerateWhiteMatterMask_inputs(): input_map = dict( args=dict( argstr="%s", ), binary_mask=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), encoding_file=dict( argstr="-grad %s", extensions=None, mandatory=True, position=1, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), noise_level_margin=dict( argstr="-margin %s", ), out_WMProb_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), ) inputs = GenerateWhiteMatterMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GenerateWhiteMatterMask_outputs(): output_map = dict( WMprobabilitymap=dict( extensions=None, ), ) outputs = GenerateWhiteMatterMask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_MRConvert.py000066400000000000000000000041201413403311400254500ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import MRConvert def test_MRConvert_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), extension=dict( position=2, usedefault=True, ), extract_at_axis=dict( argstr="-coord %s", position=1, ), extract_at_coordinate=dict( argstr="%s", position=2, sep=",", ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), layout=dict( argstr="-output %s", position=2, ), offset_bias=dict( argstr="-scale %d", position=3, units="mm", ), out_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), output_datatype=dict( argstr="-output %s", position=2, ), prs=dict( argstr="-prs", position=3, ), replace_NaN_with_zero=dict( argstr="-zero", position=3, ), resample=dict( argstr="-scale %d", position=3, units="mm", ), voxel_dims=dict( argstr="-vox %s", position=3, sep=",", ), ) inputs = MRConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRConvert_outputs(): output_map = dict( converted=dict( extensions=None, ), ) outputs = MRConvert.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_MRMultiply.py000066400000000000000000000023561413403311400256600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import MRMultiply def test_MRMultiply_inputs(): input_map = dict( args=dict( argstr="%s", ), debug=dict( argstr="-debug", position=1, ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr="%s", mandatory=True, position=-2, ), out_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), quiet=dict( argstr="-quiet", position=1, ), ) inputs = MRMultiply.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRMultiply_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRMultiply.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_MRTransform.py000066400000000000000000000040111413403311400260020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import MRTransform def test_MRTransform_inputs(): input_map = dict( args=dict( argstr="%s", ), debug=dict( argstr="-debug", position=1, ), environ=dict( nohash=True, usedefault=True, ), flip_x=dict( argstr="-flipx", position=1, ), in_files=dict( argstr="%s", mandatory=True, position=-2, ), invert=dict( argstr="-inverse", position=1, ), linear_transform=dict( argstr="-linear %s", extensions=None, position=1, ), out_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), quiet=dict( argstr="-quiet", position=1, ), reference_image=dict( argstr="-reference %s", extensions=None, position=1, ), replace_transform=dict( argstr="-replace", position=1, ), template_image=dict( argstr="-template %s", extensions=None, position=1, ), transformation_file=dict( argstr="-transform %s", extensions=None, position=1, ), ) inputs = MRTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRTransform_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_MRTrix2TrackVis.py000066400000000000000000000021641413403311400265150ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..convert import MRTrix2TrackVis def test_MRTrix2TrackVis_inputs(): input_map = dict( image_file=dict( extensions=None, ), in_file=dict( extensions=None, mandatory=True, ), matrix_file=dict( extensions=None, ), out_filename=dict( extensions=None, genfile=True, usedefault=True, ), registration_image_file=dict( extensions=None, ), ) inputs = MRTrix2TrackVis.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRTrix2TrackVis_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRTrix2TrackVis.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_MRTrixInfo.py000066400000000000000000000016171413403311400256020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import MRTrixInfo def test_MRTrixInfo_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), ) inputs = MRTrixInfo.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRTrixInfo_outputs(): output_map = dict() outputs = MRTrixInfo.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_MRTrixViewer.py000066400000000000000000000020451413403311400261440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import MRTrixViewer def test_MRTrixViewer_inputs(): input_map = dict( args=dict( argstr="%s", ), debug=dict( argstr="-debug", position=1, ), environ=dict( nohash=True, usedefault=True, ), in_files=dict( argstr="%s", mandatory=True, position=-2, ), quiet=dict( argstr="-quiet", position=1, ), ) inputs = MRTrixViewer.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRTrixViewer_outputs(): output_map = dict() outputs = MRTrixViewer.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_MedianFilter3D.py000066400000000000000000000024361413403311400263330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import MedianFilter3D def test_MedianFilter3D_inputs(): input_map = dict( args=dict( argstr="%s", ), debug=dict( argstr="-debug", position=1, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), out_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), quiet=dict( argstr="-quiet", position=1, ), ) inputs = MedianFilter3D.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MedianFilter3D_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MedianFilter3D.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value test_auto_ProbabilisticSphericallyDeconvolutedStreamlineTrack.py000066400000000000000000000100261413403311400360670ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/mrtrix/tests# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tracking import ProbabilisticSphericallyDeconvolutedStreamlineTrack def test_ProbabilisticSphericallyDeconvolutedStreamlineTrack_inputs(): input_map = dict( args=dict( argstr="%s", ), cutoff_value=dict( argstr="-cutoff %s", units="NA", ), desired_number_of_tracks=dict( argstr="-number %d", ), do_not_precompute=dict( argstr="-noprecomputed", ), environ=dict( nohash=True, usedefault=True, ), exclude_file=dict( argstr="-exclude %s", extensions=None, xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( argstr="-exclude %s", position=2, sep=",", units="mm", xor=["exclude_file", "exclude_spec"], ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), include_file=dict( argstr="-include %s", extensions=None, xor=["include_file", "include_spec"], ), include_spec=dict( argstr="-include %s", position=2, sep=",", units="mm", xor=["include_file", "include_spec"], ), initial_cutoff_value=dict( argstr="-initcutoff %s", units="NA", ), initial_direction=dict( argstr="-initdirection %s", units="voxels", ), inputmodel=dict( argstr="%s", position=-3, usedefault=True, ), mask_file=dict( argstr="-mask %s", extensions=None, xor=["mask_file", "mask_spec"], ), mask_spec=dict( argstr="-mask %s", position=2, sep=",", units="mm", xor=["mask_file", "mask_spec"], ), maximum_number_of_tracks=dict( argstr="-maxnum %d", ), maximum_number_of_trials=dict( argstr="-trials %s", ), maximum_tract_length=dict( argstr="-length %s", units="mm", ), minimum_radius_of_curvature=dict( argstr="-curvature %s", units="mm", ), minimum_tract_length=dict( argstr="-minlength %s", units="mm", ), no_mask_interpolation=dict( argstr="-nomaskinterp", ), out_file=dict( argstr="%s", extensions=None, name_source=["in_file"], name_template="%s_tracked.tck", output_name="tracked", position=-1, ), seed_file=dict( argstr="-seed %s", extensions=None, xor=["seed_file", "seed_spec"], ), seed_spec=dict( argstr="-seed %s", position=2, sep=",", units="mm", xor=["seed_file", "seed_spec"], ), step_size=dict( argstr="-step %s", units="mm", ), stop=dict( argstr="-stop", ), unidirectional=dict( argstr="-unidirectional", ), ) inputs = ProbabilisticSphericallyDeconvolutedStreamlineTrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ProbabilisticSphericallyDeconvolutedStreamlineTrack_outputs(): output_map = dict( tracked=dict( extensions=None, ), ) outputs = ProbabilisticSphericallyDeconvolutedStreamlineTrack.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_SphericallyDeconvolutedStreamlineTrack.py000066400000000000000000000076021413403311400334450ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tracking import SphericallyDeconvolutedStreamlineTrack def test_SphericallyDeconvolutedStreamlineTrack_inputs(): input_map = dict( args=dict( argstr="%s", ), cutoff_value=dict( argstr="-cutoff %s", units="NA", ), desired_number_of_tracks=dict( argstr="-number %d", ), do_not_precompute=dict( argstr="-noprecomputed", ), environ=dict( nohash=True, usedefault=True, ), exclude_file=dict( argstr="-exclude %s", extensions=None, xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( argstr="-exclude %s", position=2, sep=",", units="mm", xor=["exclude_file", "exclude_spec"], ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), include_file=dict( argstr="-include %s", extensions=None, xor=["include_file", "include_spec"], ), include_spec=dict( argstr="-include %s", position=2, sep=",", units="mm", xor=["include_file", "include_spec"], ), initial_cutoff_value=dict( argstr="-initcutoff %s", units="NA", ), initial_direction=dict( argstr="-initdirection %s", units="voxels", ), inputmodel=dict( argstr="%s", position=-3, usedefault=True, ), mask_file=dict( argstr="-mask %s", extensions=None, xor=["mask_file", "mask_spec"], ), mask_spec=dict( argstr="-mask %s", position=2, sep=",", units="mm", xor=["mask_file", "mask_spec"], ), maximum_number_of_tracks=dict( argstr="-maxnum %d", ), maximum_tract_length=dict( argstr="-length %s", units="mm", ), minimum_radius_of_curvature=dict( argstr="-curvature %s", units="mm", ), minimum_tract_length=dict( argstr="-minlength %s", units="mm", ), no_mask_interpolation=dict( argstr="-nomaskinterp", ), out_file=dict( argstr="%s", extensions=None, name_source=["in_file"], name_template="%s_tracked.tck", output_name="tracked", position=-1, ), seed_file=dict( argstr="-seed %s", extensions=None, xor=["seed_file", "seed_spec"], ), seed_spec=dict( argstr="-seed %s", position=2, sep=",", units="mm", xor=["seed_file", "seed_spec"], ), step_size=dict( argstr="-step %s", units="mm", ), stop=dict( argstr="-stop", ), unidirectional=dict( argstr="-unidirectional", ), ) inputs = SphericallyDeconvolutedStreamlineTrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SphericallyDeconvolutedStreamlineTrack_outputs(): output_map = dict( tracked=dict( extensions=None, ), ) outputs = SphericallyDeconvolutedStreamlineTrack.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_StreamlineTrack.py000066400000000000000000000074171413403311400266750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tracking import StreamlineTrack def test_StreamlineTrack_inputs(): input_map = dict( args=dict( argstr="%s", ), cutoff_value=dict( argstr="-cutoff %s", units="NA", ), desired_number_of_tracks=dict( argstr="-number %d", ), do_not_precompute=dict( argstr="-noprecomputed", ), environ=dict( nohash=True, usedefault=True, ), exclude_file=dict( argstr="-exclude %s", extensions=None, xor=["exclude_file", "exclude_spec"], ), exclude_spec=dict( argstr="-exclude %s", position=2, sep=",", units="mm", xor=["exclude_file", "exclude_spec"], ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), include_file=dict( argstr="-include %s", extensions=None, xor=["include_file", "include_spec"], ), include_spec=dict( argstr="-include %s", position=2, sep=",", units="mm", xor=["include_file", "include_spec"], ), initial_cutoff_value=dict( argstr="-initcutoff %s", units="NA", ), initial_direction=dict( argstr="-initdirection %s", units="voxels", ), inputmodel=dict( argstr="%s", position=-3, usedefault=True, ), mask_file=dict( argstr="-mask %s", extensions=None, xor=["mask_file", "mask_spec"], ), mask_spec=dict( argstr="-mask %s", position=2, sep=",", units="mm", xor=["mask_file", "mask_spec"], ), maximum_number_of_tracks=dict( argstr="-maxnum %d", ), maximum_tract_length=dict( argstr="-length %s", units="mm", ), minimum_radius_of_curvature=dict( argstr="-curvature %s", units="mm", ), minimum_tract_length=dict( argstr="-minlength %s", units="mm", ), no_mask_interpolation=dict( argstr="-nomaskinterp", ), out_file=dict( argstr="%s", extensions=None, name_source=["in_file"], name_template="%s_tracked.tck", output_name="tracked", position=-1, ), seed_file=dict( argstr="-seed %s", extensions=None, xor=["seed_file", "seed_spec"], ), seed_spec=dict( argstr="-seed %s", position=2, sep=",", units="mm", xor=["seed_file", "seed_spec"], ), step_size=dict( argstr="-step %s", units="mm", ), stop=dict( argstr="-stop", ), unidirectional=dict( argstr="-unidirectional", ), ) inputs = StreamlineTrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_StreamlineTrack_outputs(): output_map = dict( tracked=dict( extensions=None, ), ) outputs = StreamlineTrack.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_Tensor2ApparentDiffusion.py000066400000000000000000000025131413403311400304730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Tensor2ApparentDiffusion def test_Tensor2ApparentDiffusion_inputs(): input_map = dict( args=dict( argstr="%s", ), debug=dict( argstr="-debug", position=1, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), out_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), quiet=dict( argstr="-quiet", position=1, ), ) inputs = Tensor2ApparentDiffusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Tensor2ApparentDiffusion_outputs(): output_map = dict( ADC=dict( extensions=None, ), ) outputs = Tensor2ApparentDiffusion.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_Tensor2FractionalAnisotropy.py000066400000000000000000000025311413403311400312240ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Tensor2FractionalAnisotropy def test_Tensor2FractionalAnisotropy_inputs(): input_map = dict( args=dict( argstr="%s", ), debug=dict( argstr="-debug", position=1, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), out_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), quiet=dict( argstr="-quiet", position=1, ), ) inputs = Tensor2FractionalAnisotropy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Tensor2FractionalAnisotropy_outputs(): output_map = dict( FA=dict( extensions=None, ), ) outputs = Tensor2FractionalAnisotropy.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_Tensor2Vector.py000066400000000000000000000024271413403311400263200ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Tensor2Vector def test_Tensor2Vector_inputs(): input_map = dict( args=dict( argstr="%s", ), debug=dict( argstr="-debug", position=1, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), out_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), quiet=dict( argstr="-quiet", position=1, ), ) inputs = Tensor2Vector.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Tensor2Vector_outputs(): output_map = dict( vector=dict( extensions=None, ), ) outputs = Tensor2Vector.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_Threshold.py000066400000000000000000000031441413403311400255320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Threshold def test_Threshold_inputs(): input_map = dict( absolute_threshold_value=dict( argstr="-abs %s", ), args=dict( argstr="%s", ), debug=dict( argstr="-debug", position=1, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), invert=dict( argstr="-invert", position=1, ), out_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), percentage_threshold_value=dict( argstr="-percent %s", ), quiet=dict( argstr="-quiet", position=1, ), replace_zeros_with_NaN=dict( argstr="-nan", position=1, ), ) inputs = Threshold.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Threshold_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Threshold.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tests/test_auto_Tracks2Prob.py000066400000000000000000000033371413403311400257360ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tracking import Tracks2Prob def test_Tracks2Prob_inputs(): input_map = dict( args=dict( argstr="%s", ), colour=dict( argstr="-colour", position=3, ), environ=dict( nohash=True, usedefault=True, ), fraction=dict( argstr="-fraction", position=3, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), out_filename=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), output_datatype=dict( argstr="-datatype %s", position=2, ), resample=dict( argstr="-resample %d", position=3, units="mm", ), template_file=dict( argstr="-template %s", extensions=None, position=1, ), voxel_dims=dict( argstr="-vox %s", position=2, sep=",", ), ) inputs = Tracks2Prob.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Tracks2Prob_outputs(): output_map = dict( tract_image=dict( extensions=None, ), ) outputs = Tracks2Prob.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix/tracking.py000066400000000000000000000372601413403311400221550ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import os.path as op from ...utils.filemanip import split_filename from ..base import ( CommandLineInputSpec, CommandLine, traits, TraitedSpec, File, isdefined, ) class FilterTracksInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="input tracks to be filtered", ) include_xor = ["include_file", "include_spec"] include_file = File( exists=True, argstr="-include %s", desc="inclusion file", xor=include_xor ) include_spec = traits.List( traits.Float, desc="inclusion specification in mm and radius (x y z r)", position=2, argstr="-include %s", minlen=4, maxlen=4, sep=",", units="mm", xor=include_xor, ) exclude_xor = ["exclude_file", "exclude_spec"] exclude_file = File( exists=True, argstr="-exclude %s", desc="exclusion file", xor=exclude_xor ) exclude_spec = traits.List( traits.Float, desc="exclusion specification in mm and radius (x y z r)", position=2, argstr="-exclude %s", minlen=4, maxlen=4, sep=",", units="mm", xor=exclude_xor, ) minimum_tract_length = traits.Float( argstr="-minlength %s", units="mm", desc="Sets the minimum length of any track in millimeters (default is 10 mm).", ) out_file = File( argstr="%s", position=-1, desc="Output filtered track filename", name_source=["in_file"], hash_files=False, name_template="%s_filt", ) no_mask_interpolation = traits.Bool( argstr="-nomaskinterp", desc="Turns off trilinear interpolation of mask images." ) invert = traits.Bool( argstr="-invert", desc="invert the matching process, so that tracks that would" "otherwise have been included are now excluded and vice-versa.", ) quiet = traits.Bool( argstr="-quiet", position=1, desc="Do not display information messages or progress status.", ) debug = traits.Bool(argstr="-debug", position=1, desc="Display debugging messages.") class FilterTracksOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output filtered tracks") class FilterTracks(CommandLine): """ Use regions-of-interest to select a subset of tracks from a given MRtrix track file. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> filt = mrt.FilterTracks() >>> filt.inputs.in_file = 'tracks.tck' >>> filt.run() # doctest: +SKIP """ _cmd = "filter_tracks" input_spec = FilterTracksInputSpec output_spec = FilterTracksOutputSpec class Tracks2ProbInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="tract file" ) template_file = File( exists=True, argstr="-template %s", position=1, desc="an image file to be used as a template for the output (the output image wil have the same transform and field of view)", ) voxel_dims = traits.List( traits.Float, argstr="-vox %s", sep=",", position=2, minlen=3, maxlen=3, desc="Three comma-separated numbers giving the size of each voxel in mm.", ) colour = traits.Bool( argstr="-colour", position=3, desc="add colour to the output image according to the direction of the tracks.", ) fraction = traits.Bool( argstr="-fraction", position=3, desc="produce an image of the fraction of fibres through each voxel (as a proportion of the total number in the file), rather than the count.", ) output_datatype = traits.Enum( "Bit", "Int8", "UInt8", "Int16", "UInt16", "Int32", "UInt32", "float32", "float64", argstr="-datatype %s", position=2, desc='"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double"', ) # , usedefault=True) resample = traits.Float( argstr="-resample %d", position=3, units="mm", desc="resample the tracks at regular intervals using Hermite interpolation. If omitted, the program will select an appropriate interpolation factor automatically.", ) out_filename = File(genfile=True, argstr="%s", position=-1, desc="output data file") class Tracks2ProbOutputSpec(TraitedSpec): tract_image = File(exists=True, desc="Output tract count or track density image") class Tracks2Prob(CommandLine): """ Convert a tract file into a map of the fraction of tracks to enter each voxel - also known as a tract density image (TDI) - in MRtrix's image format (.mif). This can be viewed using MRview or converted to Nifti using MRconvert. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> tdi = mrt.Tracks2Prob() >>> tdi.inputs.in_file = 'dwi_CSD_tracked.tck' >>> tdi.inputs.colour = True >>> tdi.run() # doctest: +SKIP """ _cmd = "tracks2prob" input_spec = Tracks2ProbInputSpec output_spec = Tracks2ProbOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["tract_image"] = self.inputs.out_filename if not isdefined(outputs["tract_image"]): outputs["tract_image"] = op.abspath(self._gen_outfilename()) else: outputs["tract_image"] = os.path.abspath(outputs["tract_image"]) return outputs def _gen_filename(self, name): if name == "out_filename": return self._gen_outfilename() else: return None def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_TDI.mif" class StreamlineTrackInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="the image containing the source data." "The type of data required depends on the type of tracking as set in the preceeding argument. For DT methods, " "the base DWI are needed. For SD methods, the SH harmonic coefficients of the FOD are needed.", ) seed_xor = ["seed_file", "seed_spec"] seed_file = File(exists=True, argstr="-seed %s", desc="seed file", xor=seed_xor) seed_spec = traits.List( traits.Float, desc="seed specification in mm and radius (x y z r)", position=2, argstr="-seed %s", minlen=4, maxlen=4, sep=",", units="mm", xor=seed_xor, ) include_xor = ["include_file", "include_spec"] include_file = File( exists=True, argstr="-include %s", desc="inclusion file", xor=include_xor ) include_spec = traits.List( traits.Float, desc="inclusion specification in mm and radius (x y z r)", position=2, argstr="-include %s", minlen=4, maxlen=4, sep=",", units="mm", xor=include_xor, ) exclude_xor = ["exclude_file", "exclude_spec"] exclude_file = File( exists=True, argstr="-exclude %s", desc="exclusion file", xor=exclude_xor ) exclude_spec = traits.List( traits.Float, desc="exclusion specification in mm and radius (x y z r)", position=2, argstr="-exclude %s", minlen=4, maxlen=4, sep=",", units="mm", xor=exclude_xor, ) mask_xor = ["mask_file", "mask_spec"] mask_file = File( exists=True, argstr="-mask %s", desc="mask file. Only tracks within mask.", xor=mask_xor, ) mask_spec = traits.List( traits.Float, desc="Mask specification in mm and radius (x y z r). Tracks will be terminated when they leave the ROI.", position=2, argstr="-mask %s", minlen=4, maxlen=4, sep=",", units="mm", xor=mask_xor, ) inputmodel = traits.Enum( "DT_STREAM", "SD_PROB", "SD_STREAM", argstr="%s", desc="input model type", usedefault=True, position=-3, ) stop = traits.Bool( argstr="-stop", desc="stop track as soon as it enters any of the include regions.", ) do_not_precompute = traits.Bool( argstr="-noprecomputed", desc="Turns off precomputation of the legendre polynomial values. Warning: this will slow down the algorithm by a factor of approximately 4.", ) unidirectional = traits.Bool( argstr="-unidirectional", desc="Track from the seed point in one direction only (default is to track in both directions).", ) no_mask_interpolation = traits.Bool( argstr="-nomaskinterp", desc="Turns off trilinear interpolation of mask images." ) step_size = traits.Float( argstr="-step %s", units="mm", desc="Set the step size of the algorithm in mm (default is 0.2).", ) minimum_radius_of_curvature = traits.Float( argstr="-curvature %s", units="mm", desc="Set the minimum radius of curvature (default is 2 mm for DT_STREAM, 0 for SD_STREAM, 1 mm for SD_PROB and DT_PROB)", ) desired_number_of_tracks = traits.Int( argstr="-number %d", desc="Sets the desired number of tracks." "The program will continue to generate tracks until this number of tracks have been selected and written to the output file" "(default is 100 for ``*_STREAM`` methods, 1000 for ``*_PROB`` methods).", ) maximum_number_of_tracks = traits.Int( argstr="-maxnum %d", desc="Sets the maximum number of tracks to generate." "The program will not generate more tracks than this number, even if the desired number of tracks hasn't yet been reached" "(default is 100 x number).", ) minimum_tract_length = traits.Float( argstr="-minlength %s", units="mm", desc="Sets the minimum length of any track in millimeters (default is 10 mm).", ) maximum_tract_length = traits.Float( argstr="-length %s", units="mm", desc="Sets the maximum length of any track in millimeters (default is 200 mm).", ) cutoff_value = traits.Float( argstr="-cutoff %s", units="NA", desc="Set the FA or FOD amplitude cutoff for terminating tracks (default is 0.1).", ) initial_cutoff_value = traits.Float( argstr="-initcutoff %s", units="NA", desc="Sets the minimum FA or FOD amplitude for initiating tracks (default is twice the normal cutoff).", ) initial_direction = traits.List( traits.Int, desc="Specify the initial tracking direction as a vector", argstr="-initdirection %s", minlen=2, maxlen=2, units="voxels", ) out_file = File( argstr="%s", position=-1, name_source=["in_file"], name_template="%s_tracked.tck", output_name="tracked", desc="output data file", ) class StreamlineTrackOutputSpec(TraitedSpec): tracked = File(exists=True, desc="output file containing reconstructed tracts") class StreamlineTrack(CommandLine): """ Performs tractography using one of the following models: 'dt_prob', 'dt_stream', 'sd_prob', 'sd_stream', Where 'dt' stands for diffusion tensor, 'sd' stands for spherical deconvolution, and 'prob' stands for probabilistic. Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> strack = mrt.StreamlineTrack() >>> strack.inputs.inputmodel = 'SD_PROB' >>> strack.inputs.in_file = 'data.Bfloat' >>> strack.inputs.seed_file = 'seed_mask.nii' >>> strack.inputs.mask_file = 'mask.nii' >>> strack.cmdline 'streamtrack -mask mask.nii -seed seed_mask.nii SD_PROB data.Bfloat data_tracked.tck' >>> strack.run() # doctest: +SKIP """ _cmd = "streamtrack" input_spec = StreamlineTrackInputSpec output_spec = StreamlineTrackOutputSpec class DiffusionTensorStreamlineTrackInputSpec(StreamlineTrackInputSpec): gradient_encoding_file = File( exists=True, argstr="-grad %s", mandatory=True, position=-2, desc="Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix", ) class DiffusionTensorStreamlineTrack(StreamlineTrack): """ Specialized interface to StreamlineTrack. This interface is used for streamline tracking from diffusion tensor data, and calls the MRtrix function 'streamtrack' with the option 'DT_STREAM' Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> dtstrack = mrt.DiffusionTensorStreamlineTrack() >>> dtstrack.inputs.in_file = 'data.Bfloat' >>> dtstrack.inputs.seed_file = 'seed_mask.nii' >>> dtstrack.run() # doctest: +SKIP """ input_spec = DiffusionTensorStreamlineTrackInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "DT_STREAM" return super(DiffusionTensorStreamlineTrack, self).__init__(command, **inputs) class ProbabilisticSphericallyDeconvolutedStreamlineTrackInputSpec( StreamlineTrackInputSpec ): maximum_number_of_trials = traits.Int( argstr="-trials %s", desc="Set the maximum number of sampling trials at each point (only used for probabilistic tracking).", ) class ProbabilisticSphericallyDeconvolutedStreamlineTrack(StreamlineTrack): """ Performs probabilistic tracking using spherically deconvolved data Specialized interface to StreamlineTrack. This interface is used for probabilistic tracking from spherically deconvolved data, and calls the MRtrix function 'streamtrack' with the option 'SD_PROB' Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> sdprobtrack = mrt.ProbabilisticSphericallyDeconvolutedStreamlineTrack() >>> sdprobtrack.inputs.in_file = 'data.Bfloat' >>> sdprobtrack.inputs.seed_file = 'seed_mask.nii' >>> sdprobtrack.run() # doctest: +SKIP """ input_spec = ProbabilisticSphericallyDeconvolutedStreamlineTrackInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "SD_PROB" return super( ProbabilisticSphericallyDeconvolutedStreamlineTrack, self ).__init__(command, **inputs) class SphericallyDeconvolutedStreamlineTrack(StreamlineTrack): """ Performs streamline tracking using spherically deconvolved data Specialized interface to StreamlineTrack. This interface is used for streamline tracking from spherically deconvolved data, and calls the MRtrix function 'streamtrack' with the option 'SD_STREAM' Example ------- >>> import nipype.interfaces.mrtrix as mrt >>> sdtrack = mrt.SphericallyDeconvolutedStreamlineTrack() >>> sdtrack.inputs.in_file = 'data.Bfloat' >>> sdtrack.inputs.seed_file = 'seed_mask.nii' >>> sdtrack.run() # doctest: +SKIP """ input_spec = StreamlineTrackInputSpec def __init__(self, command=None, **inputs): inputs["inputmodel"] = "SD_STREAM" return super(SphericallyDeconvolutedStreamlineTrack, self).__init__( command, **inputs ) nipype-1.7.0/nipype/interfaces/mrtrix3/000077500000000000000000000000001413403311400200545ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/mrtrix3/__init__.py000066400000000000000000000013731413403311400221710ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- """MRTrix3 provides software tools to perform various types of diffusion MRI analyses.""" from .utils import ( Mesh2PVE, Generate5tt, BrainMask, TensorMetrics, ComputeTDI, TCK2VTK, MRMath, MRConvert, MRResize, DWIExtract, SHConv, SH2Amp, ) from .preprocess import ( ResponseSD, ACTPrepareFSL, ReplaceFSwithFIRST, DWIPreproc, DWIDenoise, MRDeGibbs, DWIBiasCorrect, ) from .tracking import Tractography from .reconst import FitTensor, EstimateFOD, ConstrainedSphericalDeconvolution from .connectivity import LabelConfig, LabelConvert, BuildConnectome nipype-1.7.0/nipype/interfaces/mrtrix3/base.py000066400000000000000000000073151413403311400213460ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- from ... import logging, LooseVersion from ...utils.filemanip import which from ..base import ( CommandLineInputSpec, CommandLine, traits, File, isdefined, PackageInfo, ) iflogger = logging.getLogger("nipype.interface") class Info(PackageInfo): version_cmd = "mrconvert --version" @staticmethod def parse_version(raw_info): # info is like: "== mrconvert 0.3.15-githash" for line in raw_info.splitlines(): if line.startswith("== mrconvert "): v_string = line.split()[2] break else: return None # -githash may or may not be appended v_string = v_string.split("-")[0] return ".".join(v_string.split(".")[:3]) @classmethod def looseversion(cls): """Return a comparable version object If no version found, use LooseVersion('0.0.0') """ return LooseVersion(cls.version() or "0.0.0") class MRTrix3BaseInputSpec(CommandLineInputSpec): nthreads = traits.Int( argstr="-nthreads %d", desc="number of threads. if zero, the number" " of available cpus will be used", nohash=True, ) # DW gradient table import options grad_file = File( exists=True, argstr="-grad %s", desc="dw gradient scheme (MRTrix format)", xor=["grad_fsl"], ) grad_fsl = traits.Tuple( File(exists=True), File(exists=True), argstr="-fslgrad %s %s", desc="(bvecs, bvals) dw gradient scheme (FSL format)", xor=["grad_file"], ) bval_scale = traits.Enum( "yes", "no", argstr="-bvalue_scaling %s", desc="specifies whether the b - values should be scaled by the square" " of the corresponding DW gradient norm, as often required for " "multishell or DSI DW acquisition schemes. The default action " "can also be set in the MRtrix config file, under the " "BValueScaling entry. Valid choices are yes / no, true / " "false, 0 / 1 (default: true).", ) in_bvec = File( exists=True, argstr="-fslgrad %s %s", desc="bvecs file in FSL format" ) in_bval = File(exists=True, desc="bvals file in FSL format") class MRTrix3Base(CommandLine): def _format_arg(self, name, trait_spec, value): if name == "nthreads" and value == 0: value = 1 try: from multiprocessing import cpu_count value = cpu_count() except: iflogger.warning("Number of threads could not be computed") pass return trait_spec.argstr % value if name == "in_bvec": return trait_spec.argstr % (value, self.inputs.in_bval) return super(MRTrix3Base, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): if skip is None: skip = [] try: if isdefined(self.inputs.grad_file) or isdefined(self.inputs.grad_fsl): skip += ["in_bvec", "in_bval"] is_bvec = isdefined(self.inputs.in_bvec) is_bval = isdefined(self.inputs.in_bval) if is_bvec or is_bval: if not is_bvec or not is_bval: raise RuntimeError( "If using bvecs and bvals inputs, both" "should be defined" ) skip += ["in_bval"] except AttributeError: pass return super(MRTrix3Base, self)._parse_inputs(skip=skip) @property def version(self): return Info.version() nipype-1.7.0/nipype/interfaces/mrtrix3/connectivity.py000066400000000000000000000235001413403311400231440ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- import os import os.path as op from ..base import CommandLineInputSpec, traits, TraitedSpec, File, isdefined from .base import MRTrix3Base class BuildConnectomeInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-3, desc="input tractography" ) in_parc = File(exists=True, argstr="%s", position=-2, desc="parcellation file") out_file = File( "connectome.csv", argstr="%s", mandatory=True, position=-1, usedefault=True, desc="output file after processing", ) nthreads = traits.Int( argstr="-nthreads %d", desc="number of threads. if zero, the number" " of available cpus will be used", nohash=True, ) vox_lookup = traits.Bool( argstr="-assignment_voxel_lookup", desc="use a simple voxel lookup value at each streamline endpoint", ) search_radius = traits.Float( argstr="-assignment_radial_search %f", desc="perform a radial search from each streamline endpoint to locate " "the nearest node. Argument is the maximum radius in mm; if no node is" " found within this radius, the streamline endpoint is not assigned to" " any node.", ) search_reverse = traits.Float( argstr="-assignment_reverse_search %f", desc="traverse from each streamline endpoint inwards along the " "streamline, in search of the last node traversed by the streamline. " "Argument is the maximum traversal length in mm (set to 0 to allow " "search to continue to the streamline midpoint).", ) search_forward = traits.Float( argstr="-assignment_forward_search %f", desc="project the streamline forwards from the endpoint in search of a" "parcellation node voxel. Argument is the maximum traversal length in " "mm.", ) metric = traits.Enum( "count", "meanlength", "invlength", "invnodevolume", "mean_scalar", "invlength_invnodevolume", argstr="-metric %s", desc="specify the edge" " weight metric", ) in_scalar = File( exists=True, argstr="-image %s", desc="provide the associated image " "for the mean_scalar metric", ) in_weights = File( exists=True, argstr="-tck_weights_in %s", desc="specify a text scalar " "file containing the streamline weights", ) keep_unassigned = traits.Bool( argstr="-keep_unassigned", desc="By default, the program discards the" " information regarding those streamlines that are not successfully " "assigned to a node pair. Set this option to keep these values (will " "be the first row/column in the output matrix)", ) zero_diagonal = traits.Bool( argstr="-zero_diagonal", desc="set all diagonal entries in the matrix " "to zero (these represent streamlines that connect to the same node at" " both ends)", ) class BuildConnectomeOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output response file") class BuildConnectome(MRTrix3Base): """ Generate a connectome matrix from a streamlines file and a node parcellation image Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> mat = mrt.BuildConnectome() >>> mat.inputs.in_file = 'tracks.tck' >>> mat.inputs.in_parc = 'aparc+aseg.nii' >>> mat.cmdline # doctest: +ELLIPSIS 'tck2connectome tracks.tck aparc+aseg.nii connectome.csv' >>> mat.run() # doctest: +SKIP """ _cmd = "tck2connectome" input_spec = BuildConnectomeInputSpec output_spec = BuildConnectomeOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class LabelConfigInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-3, desc="input anatomical image", ) in_config = File( exists=True, argstr="%s", position=-2, desc="connectome configuration file" ) out_file = File( "parcellation.mif", argstr="%s", mandatory=True, position=-1, usedefault=True, desc="output file after processing", ) lut_basic = File( argstr="-lut_basic %s", desc="get information from " "a basic lookup table consisting of index / name pairs", ) lut_fs = File( argstr="-lut_freesurfer %s", desc="get information from " 'a FreeSurfer lookup table(typically "FreeSurferColorLUT' '.txt")', ) lut_aal = File( argstr="-lut_aal %s", desc="get information from the AAL " 'lookup table (typically "ROI_MNI_V4.txt")', ) lut_itksnap = File( argstr="-lut_itksnap %s", desc="get information from an" " ITK - SNAP lookup table(this includes the IIT atlas " 'file "LUT_GM.txt")', ) spine = File( argstr="-spine %s", desc="provide a manually-defined " "segmentation of the base of the spine where the streamlines" " terminate, so that this can become a node in the connection" " matrix.", ) nthreads = traits.Int( argstr="-nthreads %d", desc="number of threads. if zero, the number" " of available cpus will be used", nohash=True, ) class LabelConfigOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output response file") class LabelConfig(MRTrix3Base): """ Re-configure parcellation to be incrementally defined. Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> labels = mrt.LabelConfig() >>> labels.inputs.in_file = 'aparc+aseg.nii' >>> labels.inputs.in_config = 'mrtrix3_labelconfig.txt' >>> labels.cmdline # doctest: +ELLIPSIS 'labelconfig aparc+aseg.nii mrtrix3_labelconfig.txt parcellation.mif' >>> labels.run() # doctest: +SKIP """ _cmd = "labelconfig" input_spec = LabelConfigInputSpec output_spec = LabelConfigOutputSpec def _parse_inputs(self, skip=None): if skip is None: skip = [] if not isdefined(self.inputs.in_config): from distutils.spawn import find_executable path = find_executable(self._cmd) if path is None: path = os.getenv(MRTRIX3_HOME, "/opt/mrtrix3") else: path = op.dirname(op.dirname(path)) self.inputs.in_config = op.join( path, "src/dwi/tractography/connectomics/" "example_configs/fs_default.txt", ) return super(LabelConfig, self)._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class LabelConvertInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-4, desc="input anatomical image", ) in_lut = File( exists=True, argstr="%s", mandatory=True, position=-3, desc="get information from " "a basic lookup table consisting of index / name pairs", ) in_config = File( exists=True, argstr="%s", position=-2, desc="connectome configuration file" ) out_file = File( "parcellation.mif", argstr="%s", mandatory=True, position=-1, usedefault=True, desc="output file after processing", ) spine = File( argstr="-spine %s", desc="provide a manually-defined " "segmentation of the base of the spine where the streamlines" " terminate, so that this can become a node in the connection" " matrix.", ) num_threads = traits.Int( argstr="-nthreads %d", desc="number of threads. if zero, the number" " of available cpus will be used", nohash=True, ) class LabelConvertOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output response file") class LabelConvert(MRTrix3Base): """ Re-configure parcellation to be incrementally defined. Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> labels = mrt.LabelConvert() >>> labels.inputs.in_file = 'aparc+aseg.nii' >>> labels.inputs.in_config = 'mrtrix3_labelconfig.txt' >>> labels.inputs.in_lut = 'FreeSurferColorLUT.txt' >>> labels.cmdline 'labelconvert aparc+aseg.nii FreeSurferColorLUT.txt mrtrix3_labelconfig.txt parcellation.mif' >>> labels.run() # doctest: +SKIP """ _cmd = "labelconvert" input_spec = LabelConvertInputSpec output_spec = LabelConvertOutputSpec def _parse_inputs(self, skip=None): if skip is None: skip = [] if not isdefined(self.inputs.in_config): from nipype.utils.filemanip import which path = which(self._cmd) if path is None: path = os.getenv(MRTRIX3_HOME, "/opt/mrtrix3") else: path = op.dirname(op.dirname(path)) self.inputs.in_config = op.join( path, "src/dwi/tractography/connectomics/" "example_configs/fs_default.txt", ) return super(LabelConvert, self)._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs nipype-1.7.0/nipype/interfaces/mrtrix3/preprocess.py000066400000000000000000000444601413403311400226230ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- import os.path as op from ..base import ( CommandLineInputSpec, CommandLine, traits, TraitedSpec, File, isdefined, Undefined, InputMultiObject, ) from .base import MRTrix3BaseInputSpec, MRTrix3Base class DWIDenoiseInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, argstr="%s", position=-2, mandatory=True, desc="input DWI image" ) mask = File(exists=True, argstr="-mask %s", position=1, desc="mask image") extent = traits.Tuple( (traits.Int, traits.Int, traits.Int), argstr="-extent %d,%d,%d", desc="set the window size of the denoising filter. (default = 5,5,5)", ) noise = File( argstr="-noise %s", name_template="%s_noise", name_source="in_file", keep_extension=True, desc="the output noise map", ) out_file = File( argstr="%s", position=-1, name_template="%s_denoised", name_source="in_file", keep_extension=True, desc="the output denoised DWI image", ) class DWIDenoiseOutputSpec(TraitedSpec): noise = File(desc="the output noise map", exists=True) out_file = File(desc="the output denoised DWI image", exists=True) class DWIDenoise(MRTrix3Base): """ Denoise DWI data and estimate the noise level based on the optimal threshold for PCA. DWI data denoising and noise map estimation by exploiting data redundancy in the PCA domain using the prior knowledge that the eigenspectrum of random covariance matrices is described by the universal Marchenko Pastur distribution. Important note: image denoising must be performed as the first step of the image processing pipeline. The routine will fail if interpolation or smoothing has been applied to the data prior to denoising. Note that this function does not correct for non-Gaussian noise biases. For more information, see Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> denoise = mrt.DWIDenoise() >>> denoise.inputs.in_file = 'dwi.mif' >>> denoise.inputs.mask = 'mask.mif' >>> denoise.inputs.noise = 'noise.mif' >>> denoise.cmdline # doctest: +ELLIPSIS 'dwidenoise -mask mask.mif -noise noise.mif dwi.mif dwi_denoised.mif' >>> denoise.run() # doctest: +SKIP """ _cmd = "dwidenoise" input_spec = DWIDenoiseInputSpec output_spec = DWIDenoiseOutputSpec class MRDeGibbsInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, argstr="%s", position=-2, mandatory=True, desc="input DWI image" ) axes = traits.ListInt( default_value=[0, 1], usedefault=True, sep=",", minlen=2, maxlen=2, argstr="-axes %s", desc="indicate the plane in which the data was acquired (axial = 0,1; " "coronal = 0,2; sagittal = 1,2", ) nshifts = traits.Int( default_value=20, usedefault=True, argstr="-nshifts %d", desc="discretization of subpixel spacing (default = 20)", ) minW = traits.Int( default_value=1, usedefault=True, argstr="-minW %d", desc="left border of window used for total variation (TV) computation " "(default = 1)", ) maxW = traits.Int( default_value=3, usedefault=True, argstr="-maxW %d", desc="right border of window used for total variation (TV) computation " "(default = 3)", ) out_file = File( name_template="%s_unr", name_source="in_file", keep_extension=True, argstr="%s", position=-1, desc="the output unringed DWI image", ) class MRDeGibbsOutputSpec(TraitedSpec): out_file = File(desc="the output unringed DWI image", exists=True) class MRDeGibbs(MRTrix3Base): """ Remove Gibbs ringing artifacts. This application attempts to remove Gibbs ringing artefacts from MRI images using the method of local subvoxel-shifts proposed by Kellner et al. This command is designed to run on data directly after it has been reconstructed by the scanner, before any interpolation of any kind has taken place. You should not run this command after any form of motion correction (e.g. not after dwipreproc). Similarly, if you intend running dwidenoise, you should run this command afterwards, since it has the potential to alter the noise structure, which would impact on dwidenoise's performance. Note that this method is designed to work on images acquired with full k-space coverage. Running this method on partial Fourier ('half-scan') data may lead to suboptimal and/or biased results, as noted in the original reference below. There is currently no means of dealing with this; users should exercise caution when using this method on partial Fourier data, and inspect its output for any obvious artefacts. For more information, see Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> unring = mrt.MRDeGibbs() >>> unring.inputs.in_file = 'dwi.mif' >>> unring.cmdline 'mrdegibbs -axes 0,1 -maxW 3 -minW 1 -nshifts 20 dwi.mif dwi_unr.mif' >>> unring.run() # doctest: +SKIP """ _cmd = "mrdegibbs" input_spec = MRDeGibbsInputSpec output_spec = MRDeGibbsOutputSpec class DWIBiasCorrectInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, argstr="%s", position=-2, mandatory=True, desc="input DWI image" ) in_mask = File(argstr="-mask %s", desc="input mask image for bias field estimation") use_ants = traits.Bool( argstr="ants", mandatory=True, desc="use ANTS N4 to estimate the inhomogeneity field", position=0, xor=["use_fsl"], ) use_fsl = traits.Bool( argstr="fsl", mandatory=True, desc="use FSL FAST to estimate the inhomogeneity field", position=0, xor=["use_ants"], ) bias = File(argstr="-bias %s", desc="bias field") out_file = File( name_template="%s_biascorr", name_source="in_file", keep_extension=True, argstr="%s", position=-1, desc="the output bias corrected DWI image", genfile=True, ) class DWIBiasCorrectOutputSpec(TraitedSpec): bias = File(desc="the output bias field", exists=True) out_file = File(desc="the output bias corrected DWI image", exists=True) class DWIBiasCorrect(MRTrix3Base): """ Perform B1 field inhomogeneity correction for a DWI volume series. For more information, see Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> bias_correct = mrt.DWIBiasCorrect() >>> bias_correct.inputs.in_file = 'dwi.mif' >>> bias_correct.inputs.use_ants = True >>> bias_correct.cmdline 'dwibiascorrect ants dwi.mif dwi_biascorr.mif' >>> bias_correct.run() # doctest: +SKIP """ _cmd = "dwibiascorrect" input_spec = DWIBiasCorrectInputSpec output_spec = DWIBiasCorrectOutputSpec def _format_arg(self, name, trait_spec, value): if name in ("use_ants", "use_fsl"): ver = self.version # Changed in version 3.0, after release candidates if ver is not None and (ver[0] < "3" or ver.startswith("3.0_RC")): return f"-{trait_spec.argstr}" return super()._format_arg(name, trait_spec, value) class DWIPreprocInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, argstr="%s", position=0, mandatory=True, desc="input DWI image" ) out_file = File( "preproc.mif", argstr="%s", mandatory=True, position=1, usedefault=True, desc="output file after preprocessing", ) rpe_options = traits.Enum( "none", "pair", "all", "header", argstr="-rpe_%s", position=2, mandatory=True, desc='Specify acquisition phase-encoding design. "none" for no reversed phase-encoding image, "all" for all DWIs have opposing phase-encoding acquisition, "pair" for using a pair of b0 volumes for inhomogeneity field estimation only, and "header" for phase-encoding information can be found in the image header(s)', ) pe_dir = traits.Str( argstr="-pe_dir %s", mandatory=True, desc="Specify the phase encoding direction of the input series, can be a signed axis number (e.g. -0, 1, +2), an axis designator (e.g. RL, PA, IS), or NIfTI axis codes (e.g. i-, j, k)", ) ro_time = traits.Float( argstr="-readout_time %f", desc="Total readout time of input series (in seconds)", ) in_epi = File( exists=True, argstr="-se_epi %s", desc="Provide an additional image series consisting of spin-echo EPI images, which is to be used exclusively by topup for estimating the inhomogeneity field (i.e. it will not form part of the output image series)", ) align_seepi = traits.Bool( argstr="-align_seepi", desc="Achieve alignment between the SE-EPI images used for inhomogeneity field estimation, and the DWIs", ) eddy_options = traits.Str( argstr='-eddy_options "%s"', desc="Manually provide additional command-line options to the eddy command", ) topup_options = traits.Str( argstr='-topup_options "%s"', desc="Manually provide additional command-line options to the topup command", ) export_grad_mrtrix = traits.Bool( argstr="-export_grad_mrtrix", desc="export new gradient files in mrtrix format" ) export_grad_fsl = traits.Bool( argstr="-export_grad_fsl", desc="export gradient files in FSL format" ) out_grad_mrtrix = File( "grad.b", argstr="%s", usedefault=True, requires=["export_grad_mrtrix"], desc="name of new gradient file", ) out_grad_fsl = traits.Tuple( File("grad.bvecs", usedefault=True, desc="bvecs"), File("grad.bvals", usedefault=True, desc="bvals"), argstr="%s, %s", requires=["export_grad_fsl"], desc="Output (bvecs, bvals) gradients FSL format", ) class DWIPreprocOutputSpec(TraitedSpec): out_file = File(argstr="%s", desc="output preprocessed image series") out_grad_mrtrix = File( "grad.b", argstr="%s", usedefault=True, desc="preprocessed gradient file in mrtrix3 format", ) out_fsl_bvec = File( "grad.bvecs", argstr="%s", usedefault=True, desc="exported fsl gradient bvec file", ) out_fsl_bval = File( "grad.bvals", argstr="%s", usedefault=True, desc="exported fsl gradient bval file", ) class DWIPreproc(MRTrix3Base): """ Perform diffusion image pre-processing using FSL's eddy tool; including inhomogeneity distortion correction using FSL's topup tool if possible For more information, see Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> preproc = mrt.DWIPreproc() >>> preproc.inputs.in_file = 'dwi.mif' >>> preproc.inputs.rpe_options = 'none' >>> preproc.inputs.out_file = "preproc.mif" >>> preproc.inputs.eddy_options = '--slm=linear --repol' # linear second level model and replace outliers >>> preproc.inputs.export_grad_mrtrix = True # export final gradient table in MRtrix format >>> preproc.inputs.ro_time = 0.165240 # 'TotalReadoutTime' in BIDS JSON metadata files >>> preproc.inputs.pe_dir = 'j' # 'PhaseEncodingDirection' in BIDS JSON metadata files >>> preproc.cmdline 'dwifslpreproc dwi.mif preproc.mif -rpe_none -eddy_options "--slm=linear --repol" -export_grad_mrtrix grad.b -pe_dir j -readout_time 0.165240' >>> preproc.run() # doctest: +SKIP """ _cmd = "dwifslpreproc" input_spec = DWIPreprocInputSpec output_spec = DWIPreprocOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) if self.inputs.export_grad_mrtrix: outputs["out_grad_mrtrix"] = op.abspath(self.inputs.out_grad_mrtrix) if self.inputs.export_grad_fsl: outputs["out_fsl_bvec"] = op.abspath(self.inputs.out_grad_fsl[0]) outputs["out_fsl_bval"] = op.abspath(self.inputs.out_grad_fsl[1]) return outputs class ResponseSDInputSpec(MRTrix3BaseInputSpec): algorithm = traits.Enum( "msmt_5tt", "dhollander", "tournier", "tax", argstr="%s", position=1, mandatory=True, desc="response estimation algorithm (multi-tissue)", ) in_file = File( exists=True, argstr="%s", position=-5, mandatory=True, desc="input DWI image" ) mtt_file = File(argstr="%s", position=-4, desc="input 5tt image") wm_file = File( "wm.txt", argstr="%s", position=-3, usedefault=True, desc="output WM response text file", ) gm_file = File(argstr="%s", position=-2, desc="output GM response text file") csf_file = File(argstr="%s", position=-1, desc="output CSF response text file") in_mask = File(exists=True, argstr="-mask %s", desc="provide initial mask image") max_sh = InputMultiObject( traits.Int, argstr="-lmax %s", sep=",", desc=( "maximum harmonic degree of response function - single value for " "single-shell response, list for multi-shell response" ), ) class ResponseSDOutputSpec(TraitedSpec): wm_file = File(argstr="%s", desc="output WM response text file") gm_file = File(argstr="%s", desc="output GM response text file") csf_file = File(argstr="%s", desc="output CSF response text file") class ResponseSD(MRTrix3Base): """ Estimate response function(s) for spherical deconvolution using the specified algorithm. Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> resp = mrt.ResponseSD() >>> resp.inputs.in_file = 'dwi.mif' >>> resp.inputs.algorithm = 'tournier' >>> resp.inputs.grad_fsl = ('bvecs', 'bvals') >>> resp.cmdline # doctest: +ELLIPSIS 'dwi2response tournier -fslgrad bvecs bvals dwi.mif wm.txt' >>> resp.run() # doctest: +SKIP # We can also pass in multiple harmonic degrees in the case of multi-shell >>> resp.inputs.max_sh = [6,8,10] >>> resp.cmdline 'dwi2response tournier -fslgrad bvecs bvals -lmax 6,8,10 dwi.mif wm.txt' """ _cmd = "dwi2response" input_spec = ResponseSDInputSpec output_spec = ResponseSDOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["wm_file"] = op.abspath(self.inputs.wm_file) if self.inputs.gm_file != Undefined: outputs["gm_file"] = op.abspath(self.inputs.gm_file) if self.inputs.csf_file != Undefined: outputs["csf_file"] = op.abspath(self.inputs.csf_file) return outputs class ACTPrepareFSLInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="input anatomical image", ) out_file = File( "act_5tt.mif", argstr="%s", mandatory=True, position=-1, usedefault=True, desc="output file after processing", ) class ACTPrepareFSLOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output response file") class ACTPrepareFSL(CommandLine): """ Generate anatomical information necessary for Anatomically Constrained Tractography (ACT). Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> prep = mrt.ACTPrepareFSL() >>> prep.inputs.in_file = 'T1.nii.gz' >>> prep.cmdline # doctest: +ELLIPSIS 'act_anat_prepare_fsl T1.nii.gz act_5tt.mif' >>> prep.run() # doctest: +SKIP """ _cmd = "act_anat_prepare_fsl" input_spec = ACTPrepareFSLInputSpec output_spec = ACTPrepareFSLOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class ReplaceFSwithFIRSTInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-4, desc="input anatomical image", ) in_t1w = File( exists=True, argstr="%s", mandatory=True, position=-3, desc="input T1 image" ) in_config = File( exists=True, argstr="%s", position=-2, desc="connectome configuration file" ) out_file = File( "aparc+first.mif", argstr="%s", mandatory=True, position=-1, usedefault=True, desc="output file after processing", ) class ReplaceFSwithFIRSTOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output response file") class ReplaceFSwithFIRST(CommandLine): """ Replace deep gray matter structures segmented with FSL FIRST in a FreeSurfer parcellation. Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> prep = mrt.ReplaceFSwithFIRST() >>> prep.inputs.in_file = 'aparc+aseg.nii' >>> prep.inputs.in_t1w = 'T1.nii.gz' >>> prep.inputs.in_config = 'mrtrix3_labelconfig.txt' >>> prep.cmdline # doctest: +ELLIPSIS 'fs_parc_replace_sgm_first aparc+aseg.nii T1.nii.gz \ mrtrix3_labelconfig.txt aparc+first.mif' >>> prep.run() # doctest: +SKIP """ _cmd = "fs_parc_replace_sgm_first" input_spec = ReplaceFSwithFIRSTInputSpec output_spec = ReplaceFSwithFIRSTOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs nipype-1.7.0/nipype/interfaces/mrtrix3/reconst.py000066400000000000000000000204401413403311400221030ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- import os.path as op from ..base import traits, TraitedSpec, File, InputMultiObject, isdefined from .base import MRTrix3BaseInputSpec, MRTrix3Base class FitTensorInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="input diffusion weighted images", ) out_file = File( "dti.mif", argstr="%s", mandatory=True, position=-1, usedefault=True, desc="the output diffusion tensor image", ) # General options in_mask = File( exists=True, argstr="-mask %s", desc=( "only perform computation within the specified " "binary brain mask image" ), ) method = traits.Enum( "nonlinear", "loglinear", "sech", "rician", argstr="-method %s", desc=("select method used to perform the fitting"), ) reg_term = traits.Float( argstr="-regularisation %f", max_ver="0.3.13", desc=( "specify the strength of the regularisation term on the " "magnitude of the tensor elements (default = 5000). This " "only applies to the non-linear methods" ), ) predicted_signal = File( argstr="-predicted_signal %s", desc=( "specify a file to contain the predicted signal from the tensor " "fits. This can be used to calculate the residual signal" ), ) class FitTensorOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output DTI file") predicted_signal = File(desc="Predicted signal from fitted tensors") class FitTensor(MRTrix3Base): """ Convert diffusion-weighted images to tensor images Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> tsr = mrt.FitTensor() >>> tsr.inputs.in_file = 'dwi.mif' >>> tsr.inputs.in_mask = 'mask.nii.gz' >>> tsr.inputs.grad_fsl = ('bvecs', 'bvals') >>> tsr.cmdline # doctest: +ELLIPSIS 'dwi2tensor -fslgrad bvecs bvals -mask mask.nii.gz dwi.mif dti.mif' >>> tsr.run() # doctest: +SKIP """ _cmd = "dwi2tensor" input_spec = FitTensorInputSpec output_spec = FitTensorOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) if isdefined(self.inputs.predicted_signal): outputs["predicted_signal"] = op.abspath(self.inputs.predicted_signal) return outputs class EstimateFODInputSpec(MRTrix3BaseInputSpec): algorithm = traits.Enum( "csd", "msmt_csd", argstr="%s", position=-8, mandatory=True, desc="FOD algorithm", ) in_file = File( exists=True, argstr="%s", position=-7, mandatory=True, desc="input DWI image" ) wm_txt = File( argstr="%s", position=-6, mandatory=True, desc="WM response text file" ) wm_odf = File( "wm.mif", argstr="%s", position=-5, usedefault=True, mandatory=True, desc="output WM ODF", ) gm_txt = File(argstr="%s", position=-4, desc="GM response text file") gm_odf = File( "gm.mif", usedefault=True, argstr="%s", position=-3, desc="output GM ODF" ) csf_txt = File(argstr="%s", position=-2, desc="CSF response text file") csf_odf = File( "csf.mif", usedefault=True, argstr="%s", position=-1, desc="output CSF ODF" ) mask_file = File(exists=True, argstr="-mask %s", desc="mask image") # DW Shell selection options shell = traits.List( traits.Float, sep=",", argstr="-shell %s", desc="specify one or more dw gradient shells", ) max_sh = InputMultiObject( traits.Int, value=[8], usedefault=True, argstr="-lmax %s", sep=",", desc=( "maximum harmonic degree of response function - single value for single-shell response, list for multi-shell response" ), ) in_dirs = File( exists=True, argstr="-directions %s", desc=( "specify the directions over which to apply the non-negativity " "constraint (by default, the built-in 300 direction set is " "used). These should be supplied as a text file containing the " "[ az el ] pairs for the directions." ), ) predicted_signal = File( argstr="-predicted_signal %s", desc=( "specify a file to contain the predicted signal from the FOD " "estimates. This can be used to calculate the residual signal." "Note that this is only valid if algorithm == 'msmt_csd'. " "For single shell reconstructions use a combination of SHConv " "and SH2Amp instead." ), ) class EstimateFODOutputSpec(TraitedSpec): wm_odf = File(argstr="%s", desc="output WM ODF") gm_odf = File(argstr="%s", desc="output GM ODF") csf_odf = File(argstr="%s", desc="output CSF ODF") predicted_signal = File(desc="output predicted signal") class EstimateFOD(MRTrix3Base): """ Estimate fibre orientation distributions from diffusion data using spherical deconvolution .. warning:: The CSD algorithm does not work as intended, but fixing it in this interface could break existing workflows. This interface has been superseded by :py:class:`.ConstrainedSphericalDecomposition`. Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> fod = mrt.EstimateFOD() >>> fod.inputs.algorithm = 'msmt_csd' >>> fod.inputs.in_file = 'dwi.mif' >>> fod.inputs.wm_txt = 'wm.txt' >>> fod.inputs.grad_fsl = ('bvecs', 'bvals') >>> fod.cmdline 'dwi2fod -fslgrad bvecs bvals -lmax 8 msmt_csd dwi.mif wm.txt wm.mif gm.mif csf.mif' >>> fod.run() # doctest: +SKIP """ _cmd = "dwi2fod" input_spec = EstimateFODInputSpec output_spec = EstimateFODOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["wm_odf"] = op.abspath(self.inputs.wm_odf) if isdefined(self.inputs.gm_odf): outputs["gm_odf"] = op.abspath(self.inputs.gm_odf) if isdefined(self.inputs.csf_odf): outputs["csf_odf"] = op.abspath(self.inputs.csf_odf) if isdefined(self.inputs.predicted_signal): if self.inputs.algorithm != "msmt_csd": raise Exception( "'predicted_signal' option can only be used with " "the 'msmt_csd' algorithm" ) outputs["predicted_signal"] = op.abspath(self.inputs.predicted_signal) return outputs class ConstrainedSphericalDeconvolutionInputSpec(EstimateFODInputSpec): gm_odf = File(argstr="%s", position=-3, desc="output GM ODF") csf_odf = File(argstr="%s", position=-1, desc="output CSF ODF") max_sh = InputMultiObject( traits.Int, argstr="-lmax %s", sep=",", desc=( "maximum harmonic degree of response function - single value for single-shell response, list for multi-shell response" ), ) class ConstrainedSphericalDeconvolution(EstimateFOD): """ Estimate fibre orientation distributions from diffusion data using spherical deconvolution This interface supersedes :py:class:`.EstimateFOD`. The old interface has contained a bug when using the CSD algorithm as opposed to the MSMT CSD algorithm, but fixing it could potentially break existing workflows. The new interface works the same, but does not populate the following inputs by default: * ``gm_odf`` * ``csf_odf`` * ``max_sh`` Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> fod = mrt.ConstrainedSphericalDeconvolution() >>> fod.inputs.algorithm = 'csd' >>> fod.inputs.in_file = 'dwi.mif' >>> fod.inputs.wm_txt = 'wm.txt' >>> fod.inputs.grad_fsl = ('bvecs', 'bvals') >>> fod.cmdline 'dwi2fod -fslgrad bvecs bvals csd dwi.mif wm.txt wm.mif' >>> fod.run() # doctest: +SKIP """ input_spec = ConstrainedSphericalDeconvolutionInputSpec nipype-1.7.0/nipype/interfaces/mrtrix3/tests/000077500000000000000000000000001413403311400212165ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/mrtrix3/tests/__init__.py000066400000000000000000000000301413403311400233200ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_ACTPrepareFSL.py000066400000000000000000000022141413403311400261510ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ACTPrepareFSL def test_ACTPrepareFSL_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), ) inputs = ACTPrepareFSL.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ACTPrepareFSL_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ACTPrepareFSL.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_BrainMask.py000066400000000000000000000032421413403311400255270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import BrainMask def test_BrainMask_inputs(): input_map = dict( args=dict( argstr="%s", ), bval_scale=dict( argstr="-bvalue_scaling %s", ), environ=dict( nohash=True, usedefault=True, ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), ) inputs = BrainMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BrainMask_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = BrainMask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_BuildConnectome.py000066400000000000000000000041621413403311400267340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..connectivity import BuildConnectome def test_BuildConnectome_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), in_parc=dict( argstr="%s", extensions=None, position=-2, ), in_scalar=dict( argstr="-image %s", extensions=None, ), in_weights=dict( argstr="-tck_weights_in %s", extensions=None, ), keep_unassigned=dict( argstr="-keep_unassigned", ), metric=dict( argstr="-metric %s", ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), search_forward=dict( argstr="-assignment_forward_search %f", ), search_radius=dict( argstr="-assignment_radial_search %f", ), search_reverse=dict( argstr="-assignment_reverse_search %f", ), vox_lookup=dict( argstr="-assignment_voxel_lookup", ), zero_diagonal=dict( argstr="-zero_diagonal", ), ) inputs = BuildConnectome.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BuildConnectome_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = BuildConnectome.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_ComputeTDI.py000066400000000000000000000045671413403311400256500ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ComputeTDI def test_ComputeTDI_inputs(): input_map = dict( args=dict( argstr="%s", ), contrast=dict( argstr="-constrast %s", ), data_type=dict( argstr="-datatype %s", ), dixel=dict( argstr="-dixel %s", extensions=None, ), ends_only=dict( argstr="-ends_only", ), environ=dict( nohash=True, usedefault=True, ), fwhm_tck=dict( argstr="-fwhm_tck %f", ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), in_map=dict( argstr="-image %s", extensions=None, ), map_zero=dict( argstr="-map_zero", ), max_tod=dict( argstr="-tod %d", ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, position=-1, usedefault=True, ), precise=dict( argstr="-precise", ), reference=dict( argstr="-template %s", extensions=None, ), stat_tck=dict( argstr="-stat_tck %s", ), stat_vox=dict( argstr="-stat_vox %s", ), tck_weights=dict( argstr="-tck_weights_in %s", extensions=None, ), upsample=dict( argstr="-upsample %d", ), use_dec=dict( argstr="-dec", ), vox_size=dict( argstr="-vox %s", sep=",", ), ) inputs = ComputeTDI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ComputeTDI_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ComputeTDI.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_ConstrainedSphericalDeconvolution.py000066400000000000000000000062731413403311400325440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..reconst import ConstrainedSphericalDeconvolution def test_ConstrainedSphericalDeconvolution_inputs(): input_map = dict( algorithm=dict( argstr="%s", mandatory=True, position=-8, ), args=dict( argstr="%s", ), bval_scale=dict( argstr="-bvalue_scaling %s", ), csf_odf=dict( argstr="%s", extensions=None, position=-1, ), csf_txt=dict( argstr="%s", extensions=None, position=-2, ), environ=dict( nohash=True, usedefault=True, ), gm_odf=dict( argstr="%s", extensions=None, position=-3, ), gm_txt=dict( argstr="%s", extensions=None, position=-4, ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_dirs=dict( argstr="-directions %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-7, ), mask_file=dict( argstr="-mask %s", extensions=None, ), max_sh=dict( argstr="-lmax %s", sep=",", ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), predicted_signal=dict( argstr="-predicted_signal %s", extensions=None, ), shell=dict( argstr="-shell %s", sep=",", ), wm_odf=dict( argstr="%s", extensions=None, mandatory=True, position=-5, usedefault=True, ), wm_txt=dict( argstr="%s", extensions=None, mandatory=True, position=-6, ), ) inputs = ConstrainedSphericalDeconvolution.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ConstrainedSphericalDeconvolution_outputs(): output_map = dict( csf_odf=dict( argstr="%s", extensions=None, ), gm_odf=dict( argstr="%s", extensions=None, ), predicted_signal=dict( extensions=None, ), wm_odf=dict( argstr="%s", extensions=None, ), ) outputs = ConstrainedSphericalDeconvolution.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_DWIBiasCorrect.py000066400000000000000000000044331413403311400264270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import DWIBiasCorrect def test_DWIBiasCorrect_inputs(): input_map = dict( args=dict( argstr="%s", ), bias=dict( argstr="-bias %s", extensions=None, ), bval_scale=dict( argstr="-bvalue_scaling %s", ), environ=dict( nohash=True, usedefault=True, ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), in_mask=dict( argstr="-mask %s", extensions=None, ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, genfile=True, keep_extension=True, name_source="in_file", name_template="%s_biascorr", position=-1, ), use_ants=dict( argstr="ants", mandatory=True, position=0, xor=["use_fsl"], ), use_fsl=dict( argstr="fsl", mandatory=True, position=0, xor=["use_ants"], ), ) inputs = DWIBiasCorrect.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DWIBiasCorrect_outputs(): output_map = dict( bias=dict( extensions=None, ), out_file=dict( extensions=None, ), ) outputs = DWIBiasCorrect.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_DWIDenoise.py000066400000000000000000000042331413403311400256130ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import DWIDenoise def test_DWIDenoise_inputs(): input_map = dict( args=dict( argstr="%s", ), bval_scale=dict( argstr="-bvalue_scaling %s", ), environ=dict( nohash=True, usedefault=True, ), extent=dict( argstr="-extent %d,%d,%d", ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), mask=dict( argstr="-mask %s", extensions=None, position=1, ), noise=dict( argstr="-noise %s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_noise", ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_denoised", position=-1, ), ) inputs = DWIDenoise.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DWIDenoise_outputs(): output_map = dict( noise=dict( extensions=None, ), out_file=dict( extensions=None, ), ) outputs = DWIDenoise.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_DWIExtract.py000066400000000000000000000036431413403311400256430ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import DWIExtract def test_DWIExtract_inputs(): input_map = dict( args=dict( argstr="%s", ), bval_scale=dict( argstr="-bvalue_scaling %s", ), bzero=dict( argstr="-bzero", ), environ=dict( nohash=True, usedefault=True, ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), nobzero=dict( argstr="-no_bzero", ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), shell=dict( argstr="-shell %s", sep=",", ), singleshell=dict( argstr="-singleshell", ), ) inputs = DWIExtract.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DWIExtract_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = DWIExtract.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_DWIPreproc.py000066400000000000000000000061171413403311400256420ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import DWIPreproc def test_DWIPreproc_inputs(): input_map = dict( align_seepi=dict( argstr="-align_seepi", ), args=dict( argstr="%s", ), bval_scale=dict( argstr="-bvalue_scaling %s", ), eddy_options=dict( argstr='-eddy_options "%s"', ), environ=dict( nohash=True, usedefault=True, ), export_grad_fsl=dict( argstr="-export_grad_fsl", ), export_grad_mrtrix=dict( argstr="-export_grad_mrtrix", ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_epi=dict( argstr="-se_epi %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=1, usedefault=True, ), out_grad_fsl=dict( argstr="%s, %s", requires=["export_grad_fsl"], ), out_grad_mrtrix=dict( argstr="%s", extensions=None, requires=["export_grad_mrtrix"], usedefault=True, ), pe_dir=dict( argstr="-pe_dir %s", mandatory=True, ), ro_time=dict( argstr="-readout_time %f", ), rpe_options=dict( argstr="-rpe_%s", mandatory=True, position=2, ), topup_options=dict( argstr='-topup_options "%s"', ), ) inputs = DWIPreproc.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DWIPreproc_outputs(): output_map = dict( out_file=dict( argstr="%s", extensions=None, ), out_fsl_bval=dict( argstr="%s", extensions=None, usedefault=True, ), out_fsl_bvec=dict( argstr="%s", extensions=None, usedefault=True, ), out_grad_mrtrix=dict( argstr="%s", extensions=None, usedefault=True, ), ) outputs = DWIPreproc.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_EstimateFOD.py000066400000000000000000000062441413403311400257710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..reconst import EstimateFOD def test_EstimateFOD_inputs(): input_map = dict( algorithm=dict( argstr="%s", mandatory=True, position=-8, ), args=dict( argstr="%s", ), bval_scale=dict( argstr="-bvalue_scaling %s", ), csf_odf=dict( argstr="%s", extensions=None, position=-1, usedefault=True, ), csf_txt=dict( argstr="%s", extensions=None, position=-2, ), environ=dict( nohash=True, usedefault=True, ), gm_odf=dict( argstr="%s", extensions=None, position=-3, usedefault=True, ), gm_txt=dict( argstr="%s", extensions=None, position=-4, ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_dirs=dict( argstr="-directions %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-7, ), mask_file=dict( argstr="-mask %s", extensions=None, ), max_sh=dict( argstr="-lmax %s", sep=",", usedefault=True, ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), predicted_signal=dict( argstr="-predicted_signal %s", extensions=None, ), shell=dict( argstr="-shell %s", sep=",", ), wm_odf=dict( argstr="%s", extensions=None, mandatory=True, position=-5, usedefault=True, ), wm_txt=dict( argstr="%s", extensions=None, mandatory=True, position=-6, ), ) inputs = EstimateFOD.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EstimateFOD_outputs(): output_map = dict( csf_odf=dict( argstr="%s", extensions=None, ), gm_odf=dict( argstr="%s", extensions=None, ), predicted_signal=dict( extensions=None, ), wm_odf=dict( argstr="%s", extensions=None, ), ) outputs = EstimateFOD.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_FitTensor.py000066400000000000000000000041441413403311400255770ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..reconst import FitTensor def test_FitTensor_inputs(): input_map = dict( args=dict( argstr="%s", ), bval_scale=dict( argstr="-bvalue_scaling %s", ), environ=dict( nohash=True, usedefault=True, ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), in_mask=dict( argstr="-mask %s", extensions=None, ), method=dict( argstr="-method %s", ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), predicted_signal=dict( argstr="-predicted_signal %s", extensions=None, ), reg_term=dict( argstr="-regularisation %f", max_ver="0.3.13", ), ) inputs = FitTensor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FitTensor_outputs(): output_map = dict( out_file=dict( extensions=None, ), predicted_signal=dict( extensions=None, ), ) outputs = FitTensor.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_Generate5tt.py000066400000000000000000000034001413403311400260430ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Generate5tt def test_Generate5tt_inputs(): input_map = dict( algorithm=dict( argstr="%s", mandatory=True, position=-3, ), args=dict( argstr="%s", ), bval_scale=dict( argstr="-bvalue_scaling %s", ), environ=dict( nohash=True, usedefault=True, ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), ) inputs = Generate5tt.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Generate5tt_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Generate5tt.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_LabelConfig.py000066400000000000000000000035021413403311400260240ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..connectivity import LabelConfig def test_LabelConfig_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_config=dict( argstr="%s", extensions=None, position=-2, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), lut_aal=dict( argstr="-lut_aal %s", extensions=None, ), lut_basic=dict( argstr="-lut_basic %s", extensions=None, ), lut_fs=dict( argstr="-lut_freesurfer %s", extensions=None, ), lut_itksnap=dict( argstr="-lut_itksnap %s", extensions=None, ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), spine=dict( argstr="-spine %s", extensions=None, ), ) inputs = LabelConfig.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_LabelConfig_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = LabelConfig.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_LabelConvert.py000066400000000000000000000031031413403311400262340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..connectivity import LabelConvert def test_LabelConvert_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_config=dict( argstr="%s", extensions=None, position=-2, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-4, ), in_lut=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), num_threads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), spine=dict( argstr="-spine %s", extensions=None, ), ) inputs = LabelConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_LabelConvert_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = LabelConvert.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_MRConvert.py000066400000000000000000000037601413403311400255440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MRConvert def test_MRConvert_inputs(): input_map = dict( args=dict( argstr="%s", ), axes=dict( argstr="-axes %s", sep=",", ), bval_scale=dict( argstr="-bvalue_scaling %s", ), coord=dict( argstr="-coord %s", sep=" ", ), environ=dict( nohash=True, usedefault=True, ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), scaling=dict( argstr="-scaling %s", sep=",", ), vox=dict( argstr="-vox %s", sep=",", ), ) inputs = MRConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRConvert_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRConvert.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_MRDeGibbs.py000066400000000000000000000042051413403311400254160ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import MRDeGibbs def test_MRDeGibbs_inputs(): input_map = dict( args=dict( argstr="%s", ), axes=dict( argstr="-axes %s", maxlen=2, minlen=2, sep=",", usedefault=True, ), bval_scale=dict( argstr="-bvalue_scaling %s", ), environ=dict( nohash=True, usedefault=True, ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), maxW=dict( argstr="-maxW %d", usedefault=True, ), minW=dict( argstr="-minW %d", usedefault=True, ), nshifts=dict( argstr="-nshifts %d", usedefault=True, ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_unr", position=-1, ), ) inputs = MRDeGibbs.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRDeGibbs_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRDeGibbs.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_MRMath.py000066400000000000000000000034441413403311400250140ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MRMath def test_MRMath_inputs(): input_map = dict( args=dict( argstr="%s", ), axis=dict( argstr="-axis %d", ), bval_scale=dict( argstr="-bvalue_scaling %s", ), environ=dict( nohash=True, usedefault=True, ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), operation=dict( argstr="%s", mandatory=True, position=-2, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), ) inputs = MRMath.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRMath_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRMath.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_MRResize.py000066400000000000000000000043701413403311400253630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import MRResize def test_MRResize_inputs(): input_map = dict( args=dict( argstr="%s", ), bval_scale=dict( argstr="-bvalue_scaling %s", ), environ=dict( nohash=True, usedefault=True, ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), image_size=dict( argstr="-size %d,%d,%d", mandatory=True, xor=["voxel_size", "scale_factor"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), interpolation=dict( argstr="-interp %s", usedefault=True, ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, keep_extension=True, name_source=["in_file"], name_template="%s_resized", position=-1, ), scale_factor=dict( argstr="-scale %g,%g,%g", mandatory=True, xor=["image_size", "voxel_size"], ), voxel_size=dict( argstr="-voxel %g,%g,%g", mandatory=True, xor=["image_size", "scale_factor"], ), ) inputs = MRResize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MRResize_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MRResize.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_MRTrix3Base.py000066400000000000000000000007621413403311400257270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import MRTrix3Base def test_MRTrix3Base_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ) inputs = MRTrix3Base.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_Mesh2PVE.py000066400000000000000000000025331413403311400252130ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Mesh2PVE def test_Mesh2PVE_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), in_first=dict( argstr="-first %s", extensions=None, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), reference=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), ) inputs = Mesh2PVE.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Mesh2PVE_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Mesh2PVE.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_ReplaceFSwithFIRST.py000066400000000000000000000026421413403311400271730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ReplaceFSwithFIRST def test_ReplaceFSwithFIRST_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_config=dict( argstr="%s", extensions=None, position=-2, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-4, ), in_t1w=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), ) inputs = ReplaceFSwithFIRST.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ReplaceFSwithFIRST_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ReplaceFSwithFIRST.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py000066400000000000000000000047111413403311400257070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ResponseSD def test_ResponseSD_inputs(): input_map = dict( algorithm=dict( argstr="%s", mandatory=True, position=1, ), args=dict( argstr="%s", ), bval_scale=dict( argstr="-bvalue_scaling %s", ), csf_file=dict( argstr="%s", extensions=None, position=-1, ), environ=dict( nohash=True, usedefault=True, ), gm_file=dict( argstr="%s", extensions=None, position=-2, ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-5, ), in_mask=dict( argstr="-mask %s", extensions=None, ), max_sh=dict( argstr="-lmax %s", sep=",", ), mtt_file=dict( argstr="%s", extensions=None, position=-4, ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), wm_file=dict( argstr="%s", extensions=None, position=-3, usedefault=True, ), ) inputs = ResponseSD.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ResponseSD_outputs(): output_map = dict( csf_file=dict( argstr="%s", extensions=None, ), gm_file=dict( argstr="%s", extensions=None, ), wm_file=dict( argstr="%s", extensions=None, ), ) outputs = ResponseSD.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_SH2Amp.py000066400000000000000000000025541413403311400247170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import SH2Amp def test_SH2Amp_inputs(): input_map = dict( args=dict( argstr="%s", ), directions=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), nonnegative=dict( argstr="-nonnegative", ), out_file=dict( argstr="%s", extensions=None, name_source=["in_file"], name_template="%s_amp.mif", position=-1, usedefault=True, ), ) inputs = SH2Amp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SH2Amp_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SH2Amp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_SHConv.py000066400000000000000000000024451413403311400250240ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import SHConv def test_SHConv_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-3, ), out_file=dict( argstr="%s", extensions=None, name_source=["in_file"], name_template="%s_shconv.mif", position=-1, usedefault=True, ), response=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), ) inputs = SHConv.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SHConv_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SHConv.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_TCK2VTK.py000066400000000000000000000025471413403311400247570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TCK2VTK def test_TCK2VTK_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, position=-1, usedefault=True, ), reference=dict( argstr="-image %s", extensions=None, ), voxel=dict( argstr="-image %s", extensions=None, ), ) inputs = TCK2VTK.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TCK2VTK_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TCK2VTK.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_TensorMetrics.py000066400000000000000000000034411413403311400264620ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import TensorMetrics def test_TensorMetrics_inputs(): input_map = dict( args=dict( argstr="%s", ), component=dict( argstr="-num %s", sep=",", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, ), in_mask=dict( argstr="-mask %s", extensions=None, ), modulate=dict( argstr="-modulate %s", ), out_adc=dict( argstr="-adc %s", extensions=None, ), out_eval=dict( argstr="-value %s", extensions=None, ), out_evec=dict( argstr="-vector %s", extensions=None, ), out_fa=dict( argstr="-fa %s", extensions=None, ), ) inputs = TensorMetrics.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TensorMetrics_outputs(): output_map = dict( out_adc=dict( extensions=None, ), out_eval=dict( extensions=None, ), out_evec=dict( extensions=None, ), out_fa=dict( extensions=None, ), ) outputs = TensorMetrics.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tests/test_auto_Tractography.py000066400000000000000000000112061413403311400263260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..tracking import Tractography def test_Tractography_inputs(): input_map = dict( act_file=dict( argstr="-act %s", extensions=None, ), algorithm=dict( argstr="-algorithm %s", usedefault=True, ), angle=dict( argstr="-angle %f", ), args=dict( argstr="%s", ), backtrack=dict( argstr="-backtrack", ), bval_scale=dict( argstr="-bvalue_scaling %s", ), crop_at_gmwmi=dict( argstr="-crop_at_gmwmi", ), cutoff=dict( argstr="-cutoff %f", ), cutoff_init=dict( argstr="-initcutoff %f", ), downsample=dict( argstr="-downsample %f", ), environ=dict( nohash=True, usedefault=True, ), grad_file=dict( argstr="-grad %s", extensions=None, xor=["grad_fsl"], ), grad_fsl=dict( argstr="-fslgrad %s %s", xor=["grad_file"], ), in_bval=dict( extensions=None, ), in_bvec=dict( argstr="-fslgrad %s %s", extensions=None, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=-2, ), init_dir=dict( argstr="-initdirection %f,%f,%f", ), max_length=dict( argstr="-maxlength %f", ), max_seed_attempts=dict( argstr="-max_seed_attempts %d", ), max_tracks=dict( argstr="-maxnum %d", ), min_length=dict( argstr="-minlength %f", ), n_samples=dict( argstr="-samples %d", usedefault=True, ), n_tracks=dict( argstr="-number %d", max_ver="0.4", ), n_trials=dict( argstr="-trials %d", ), noprecompt=dict( argstr="-noprecomputed", ), nthreads=dict( argstr="-nthreads %d", nohash=True, ), out_file=dict( argstr="%s", extensions=None, mandatory=True, position=-1, usedefault=True, ), out_seeds=dict( argstr="-output_seeds %s", extensions=None, usedefault=True, ), power=dict( argstr="-power %d", ), roi_excl=dict( argstr="-exclude %s", ), roi_incl=dict( argstr="-include %s", ), roi_mask=dict( argstr="-mask %s", ), seed_dynamic=dict( argstr="-seed_dynamic %s", extensions=None, ), seed_gmwmi=dict( argstr="-seed_gmwmi %s", extensions=None, requires=["act_file"], ), seed_grid_voxel=dict( argstr="-seed_grid_per_voxel %s %d", xor=["seed_image", "seed_rnd_voxel"], ), seed_image=dict( argstr="-seed_image %s", extensions=None, ), seed_rejection=dict( argstr="-seed_rejection %s", extensions=None, ), seed_rnd_voxel=dict( argstr="-seed_random_per_voxel %s %d", xor=["seed_image", "seed_grid_voxel"], ), seed_sphere=dict( argstr="-seed_sphere %f,%f,%f,%f", ), select=dict( argstr="-select %d", min_ver="3", ), sph_trait=dict( argstr="%f,%f,%f,%f", ), step_size=dict( argstr="-step %f", ), stop=dict( argstr="-stop", ), unidirectional=dict( argstr="-unidirectional", ), use_rk4=dict( argstr="-rk4", ), ) inputs = Tractography.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Tractography_outputs(): output_map = dict( out_file=dict( extensions=None, ), out_seeds=dict( extensions=None, ), ) outputs = Tractography.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/mrtrix3/tracking.py000066400000000000000000000274671413403311400222500ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- import os.path as op from ..base import traits, TraitedSpec, File from .base import MRTrix3BaseInputSpec, MRTrix3Base class TractographyInputSpec(MRTrix3BaseInputSpec): sph_trait = traits.Tuple( traits.Float, traits.Float, traits.Float, traits.Float, argstr="%f,%f,%f,%f" ) in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="input file to be processed", ) out_file = File( "tracked.tck", argstr="%s", mandatory=True, position=-1, usedefault=True, desc="output file containing tracks", ) algorithm = traits.Enum( "iFOD2", "FACT", "iFOD1", "Nulldist", "SD_Stream", "Tensor_Det", "Tensor_Prob", usedefault=True, argstr="-algorithm %s", desc="Tractography algorithm to be used -- References:" "[FACT]_, [iFOD1]_, [iFOD2]_, [Nulldist]_, [Tensor_Det]_, [Tensor_Prob]_.", ) # ROIs processing options roi_incl = traits.Either( File(exists=True), sph_trait, argstr="-include %s", desc=( "specify an inclusion region of interest, streamlines must" " traverse ALL inclusion regions to be accepted" ), ) roi_excl = traits.Either( File(exists=True), sph_trait, argstr="-exclude %s", desc=( "specify an exclusion region of interest, streamlines that" " enter ANY exclude region will be discarded" ), ) roi_mask = traits.Either( File(exists=True), sph_trait, argstr="-mask %s", desc=( "specify a masking region of interest. If defined," "streamlines exiting the mask will be truncated" ), ) # Streamlines tractography options step_size = traits.Float( argstr="-step %f", desc=( "set the step size of the algorithm in mm (default is 0.1" " x voxelsize; for iFOD2: 0.5 x voxelsize)" ), ) angle = traits.Float( argstr="-angle %f", desc=( "set the maximum angle between successive steps (default " "is 90deg x stepsize / voxelsize)" ), ) n_tracks = traits.Int( argstr="-number %d", max_ver="0.4", desc=( "set the desired number of tracks. The program will continue" " to generate tracks until this number of tracks have been " "selected and written to the output file" ), ) select = traits.Int( argstr="-select %d", min_ver="3", desc=( "set the desired number of tracks. The program will continue" " to generate tracks until this number of tracks have been " "selected and written to the output file" ), ) max_tracks = traits.Int( argstr="-maxnum %d", desc=( "set the maximum number of tracks to generate. The program " "will not generate more tracks than this number, even if " "the desired number of tracks hasn't yet been reached " "(default is 100 x number)" ), ) max_length = traits.Float( argstr="-maxlength %f", desc=( "set the maximum length of any track in mm (default is " "100 x voxelsize)" ), ) min_length = traits.Float( argstr="-minlength %f", desc=( "set the minimum length of any track in mm (default is " "5 x voxelsize)" ), ) cutoff = traits.Float( argstr="-cutoff %f", desc=( "set the FA or FOD amplitude cutoff for terminating " "tracks (default is 0.1)" ), ) cutoff_init = traits.Float( argstr="-initcutoff %f", desc=( "set the minimum FA or FOD amplitude for initiating " "tracks (default is the same as the normal cutoff)" ), ) n_trials = traits.Int( argstr="-trials %d", desc=( "set the maximum number of sampling trials at each point" " (only used for probabilistic tracking)" ), ) unidirectional = traits.Bool( argstr="-unidirectional", desc=( "track from the seed point in one direction only " "(default is to track in both directions)" ), ) init_dir = traits.Tuple( traits.Float, traits.Float, traits.Float, argstr="-initdirection %f,%f,%f", desc=( "specify an initial direction for the tracking (this " "should be supplied as a vector of 3 comma-separated values" ), ) noprecompt = traits.Bool( argstr="-noprecomputed", desc=( "do NOT pre-compute legendre polynomial values. Warning: this " "will slow down the algorithm by a factor of approximately 4" ), ) power = traits.Int( argstr="-power %d", desc=("raise the FOD to the power specified (default is 1/nsamples)"), ) n_samples = traits.Int( 4, usedefault=True, argstr="-samples %d", desc=( "set the number of FOD samples to take per step for the 2nd " "order (iFOD2) method" ), ) use_rk4 = traits.Bool( argstr="-rk4", desc=( "use 4th-order Runge-Kutta integration (slower, but eliminates" " curvature overshoot in 1st-order deterministic methods)" ), ) stop = traits.Bool( argstr="-stop", desc=( "stop propagating a streamline once it has traversed all " "include regions" ), ) downsample = traits.Float( argstr="-downsample %f", desc="downsample the generated streamlines to reduce output file size", ) # Anatomically-Constrained Tractography options act_file = File( exists=True, argstr="-act %s", desc=( "use the Anatomically-Constrained Tractography framework during" " tracking; provided image must be in the 5TT " "(five - tissue - type) format" ), ) backtrack = traits.Bool(argstr="-backtrack", desc="allow tracks to be truncated") crop_at_gmwmi = traits.Bool( argstr="-crop_at_gmwmi", desc=( "crop streamline endpoints more " "precisely as they cross the GM-WM interface" ), ) # Tractography seeding options seed_sphere = traits.Tuple( traits.Float, traits.Float, traits.Float, traits.Float, argstr="-seed_sphere %f,%f,%f,%f", desc="spherical seed", ) seed_image = File( exists=True, argstr="-seed_image %s", desc="seed streamlines entirely at random within mask", ) seed_rnd_voxel = traits.Tuple( File(exists=True), traits.Int(), argstr="-seed_random_per_voxel %s %d", xor=["seed_image", "seed_grid_voxel"], desc=( "seed a fixed number of streamlines per voxel in a mask " "image; random placement of seeds in each voxel" ), ) seed_grid_voxel = traits.Tuple( File(exists=True), traits.Int(), argstr="-seed_grid_per_voxel %s %d", xor=["seed_image", "seed_rnd_voxel"], desc=( "seed a fixed number of streamlines per voxel in a mask " "image; place seeds on a 3D mesh grid (grid_size argument " "is per axis; so a grid_size of 3 results in 27 seeds per" " voxel)" ), ) seed_rejection = File( exists=True, argstr="-seed_rejection %s", desc=( "seed from an image using rejection sampling (higher " "values = more probable to seed from" ), ) seed_gmwmi = File( exists=True, argstr="-seed_gmwmi %s", requires=["act_file"], desc=( "seed from the grey matter - white matter interface (only " "valid if using ACT framework)" ), ) seed_dynamic = File( exists=True, argstr="-seed_dynamic %s", desc=( "determine seed points dynamically using the SIFT model " "(must not provide any other seeding mechanism). Note that" " while this seeding mechanism improves the distribution of" " reconstructed streamlines density, it should NOT be used " "as a substitute for the SIFT method itself." ), ) max_seed_attempts = traits.Int( argstr="-max_seed_attempts %d", desc=( "set the maximum number of times that the tracking " "algorithm should attempt to find an appropriate tracking" " direction from a given seed point" ), ) out_seeds = File( "out_seeds.nii.gz", usedefault=True, argstr="-output_seeds %s", desc=("output the seed location of all successful streamlines to" " a file"), ) class TractographyOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output filtered tracks") out_seeds = File( desc=("output the seed location of all successful" " streamlines to a file") ) class Tractography(MRTrix3Base): """ Performs streamlines tractography after selecting the appropriate algorithm. References ---------- .. [FACT] Mori, S.; Crain, B. J.; Chacko, V. P. & van Zijl, P. C. M. Three-dimensional tracking of axonal projections in the brain by magnetic resonance imaging. Annals of Neurology, 1999, 45, 265-269 .. [iFOD1] Tournier, J.-D.; Calamante, F. & Connelly, A. MRtrix: Diffusion tractography in crossing fiber regions. Int. J. Imaging Syst. Technol., 2012, 22, 53-66 .. [iFOD2] Tournier, J.-D.; Calamante, F. & Connelly, A. Improved probabilistic streamlines tractography by 2nd order integration over fibre orientation distributions. Proceedings of the International Society for Magnetic Resonance in Medicine, 2010, 1670 .. [Nulldist] Morris, D. M.; Embleton, K. V. & Parker, G. J. Probabilistic fibre tracking: Differentiation of connections from chance events. NeuroImage, 2008, 42, 1329-1339 .. [Tensor_Det] Basser, P. J.; Pajevic, S.; Pierpaoli, C.; Duda, J. and Aldroubi, A. In vivo fiber tractography using DT-MRI data. Magnetic Resonance in Medicine, 2000, 44, 625-632 .. [Tensor_Prob] Jones, D. Tractography Gone Wild: Probabilistic Fibre Tracking Using the Wild Bootstrap With Diffusion Tensor MRI. IEEE Transactions on Medical Imaging, 2008, 27, 1268-1274 Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> tk = mrt.Tractography() >>> tk.inputs.in_file = 'fods.mif' >>> tk.inputs.roi_mask = 'mask.nii.gz' >>> tk.inputs.seed_sphere = (80, 100, 70, 10) >>> tk.cmdline # doctest: +ELLIPSIS 'tckgen -algorithm iFOD2 -samples 4 -output_seeds out_seeds.nii.gz \ -mask mask.nii.gz -seed_sphere \ 80.000000,100.000000,70.000000,10.000000 fods.mif tracked.tck' >>> tk.run() # doctest: +SKIP """ _cmd = "tckgen" input_spec = TractographyInputSpec output_spec = TractographyOutputSpec def _format_arg(self, name, trait_spec, value): if "roi_" in name and isinstance(value, tuple): value = ["%f" % v for v in value] return trait_spec.argstr % ",".join(value) return super(Tractography, self)._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs nipype-1.7.0/nipype/interfaces/mrtrix3/utils.py000066400000000000000000000636731413403311400216050ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- import os.path as op from ..base import ( CommandLineInputSpec, CommandLine, traits, TraitedSpec, File, InputMultiPath, isdefined, ) from .base import MRTrix3BaseInputSpec, MRTrix3Base class BrainMaskInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="input diffusion weighted images", ) out_file = File( "brainmask.mif", argstr="%s", mandatory=True, position=-1, usedefault=True, desc="output brain mask", ) class BrainMaskOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output response file") class BrainMask(CommandLine): """ Convert a mesh surface to a partial volume estimation image Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> bmsk = mrt.BrainMask() >>> bmsk.inputs.in_file = 'dwi.mif' >>> bmsk.cmdline # doctest: +ELLIPSIS 'dwi2mask dwi.mif brainmask.mif' >>> bmsk.run() # doctest: +SKIP """ _cmd = "dwi2mask" input_spec = BrainMaskInputSpec output_spec = BrainMaskOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class Mesh2PVEInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-3, desc="input mesh" ) reference = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="input reference image", ) in_first = File( exists=True, argstr="-first %s", desc="indicates that the mesh file is provided by FSL FIRST", ) out_file = File( "mesh2volume.nii.gz", argstr="%s", mandatory=True, position=-1, usedefault=True, desc="output file containing SH coefficients", ) class Mesh2PVEOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output response file") class Mesh2PVE(CommandLine): """ Convert a mesh surface to a partial volume estimation image Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> m2p = mrt.Mesh2PVE() >>> m2p.inputs.in_file = 'surf1.vtk' >>> m2p.inputs.reference = 'dwi.mif' >>> m2p.inputs.in_first = 'T1.nii.gz' >>> m2p.cmdline # doctest: +ELLIPSIS 'mesh2pve -first T1.nii.gz surf1.vtk dwi.mif mesh2volume.nii.gz' >>> m2p.run() # doctest: +SKIP """ _cmd = "mesh2pve" input_spec = Mesh2PVEInputSpec output_spec = Mesh2PVEOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class Generate5ttInputSpec(MRTrix3BaseInputSpec): algorithm = traits.Enum( "fsl", "gif", "freesurfer", argstr="%s", position=-3, mandatory=True, desc="tissue segmentation algorithm", ) in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="input image" ) out_file = File(argstr="%s", mandatory=True, position=-1, desc="output image") class Generate5ttOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output image") class Generate5tt(MRTrix3Base): """ Generate a 5TT image suitable for ACT using the selected algorithm Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> gen5tt = mrt.Generate5tt() >>> gen5tt.inputs.in_file = 'T1.nii.gz' >>> gen5tt.inputs.algorithm = 'fsl' >>> gen5tt.inputs.out_file = '5tt.mif' >>> gen5tt.cmdline # doctest: +ELLIPSIS '5ttgen fsl T1.nii.gz 5tt.mif' >>> gen5tt.run() # doctest: +SKIP """ _cmd = "5ttgen" input_spec = Generate5ttInputSpec output_spec = Generate5ttOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class TensorMetricsInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-1, desc="input DTI image" ) out_fa = File(argstr="-fa %s", desc="output FA file") out_adc = File(argstr="-adc %s", desc="output ADC file") out_evec = File(argstr="-vector %s", desc="output selected eigenvector(s) file") out_eval = File(argstr="-value %s", desc="output selected eigenvalue(s) file") component = traits.List( [1], usedefault=True, argstr="-num %s", sep=",", desc=( "specify the desired eigenvalue/eigenvector(s). Note that " "several eigenvalues can be specified as a number sequence" ), ) in_mask = File( exists=True, argstr="-mask %s", desc=( "only perform computation within the specified binary" " brain mask image" ), ) modulate = traits.Enum( "FA", "none", "eval", argstr="-modulate %s", desc=("how to modulate the magnitude of the" " eigenvectors"), ) class TensorMetricsOutputSpec(TraitedSpec): out_fa = File(desc="output FA file") out_adc = File(desc="output ADC file") out_evec = File(desc="output selected eigenvector(s) file") out_eval = File(desc="output selected eigenvalue(s) file") class TensorMetrics(CommandLine): """ Compute metrics from tensors Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> comp = mrt.TensorMetrics() >>> comp.inputs.in_file = 'dti.mif' >>> comp.inputs.out_fa = 'fa.mif' >>> comp.cmdline # doctest: +ELLIPSIS 'tensor2metric -num 1 -fa fa.mif dti.mif' >>> comp.run() # doctest: +SKIP """ _cmd = "tensor2metric" input_spec = TensorMetricsInputSpec output_spec = TensorMetricsOutputSpec def _list_outputs(self): outputs = self.output_spec().get() for k in list(outputs.keys()): if isdefined(getattr(self.inputs, k)): outputs[k] = op.abspath(getattr(self.inputs, k)) return outputs class ComputeTDIInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="input tractography" ) out_file = File( "tdi.mif", argstr="%s", usedefault=True, position=-1, desc="output TDI file" ) reference = File( exists=True, argstr="-template %s", desc="a reference" "image to be used as template", ) vox_size = traits.List( traits.Int, argstr="-vox %s", sep=",", desc="voxel dimensions" ) data_type = traits.Enum( "float", "unsigned int", argstr="-datatype %s", desc="specify output image data type", ) use_dec = traits.Bool(argstr="-dec", desc="perform mapping in DEC space") dixel = File( argstr="-dixel %s", desc="map streamlines to" "dixels within each voxel. Directions are stored as" "azimuth elevation pairs.", ) max_tod = traits.Int( argstr="-tod %d", desc="generate a Track Orientation " "Distribution (TOD) in each voxel.", ) contrast = traits.Enum( "tdi", "length", "invlength", "scalar_map", "scalar_map_conut", "fod_amp", "curvature", argstr="-constrast %s", desc="define the desired " "form of contrast for the output image", ) in_map = File( exists=True, argstr="-image %s", desc="provide the" "scalar image map for generating images with " "'scalar_map' contrasts, or the SHs image for fod_amp", ) stat_vox = traits.Enum( "sum", "min", "mean", "max", argstr="-stat_vox %s", desc="define the statistic for choosing the final" "voxel intesities for a given contrast", ) stat_tck = traits.Enum( "mean", "sum", "min", "max", "median", "mean_nonzero", "gaussian", "ends_min", "ends_mean", "ends_max", "ends_prod", argstr="-stat_tck %s", desc="define the statistic for choosing " "the contribution to be made by each streamline as a function of" " the samples taken along their lengths.", ) fwhm_tck = traits.Float( argstr="-fwhm_tck %f", desc="define the statistic for choosing the" " contribution to be made by each streamline as a function of the " "samples taken along their lengths", ) map_zero = traits.Bool( argstr="-map_zero", desc="if a streamline has zero contribution based " "on the contrast & statistic, typically it is not mapped; use this " "option to still contribute to the map even if this is the case " "(these non-contributing voxels can then influence the mean value in " "each voxel of the map)", ) upsample = traits.Int( argstr="-upsample %d", desc="upsample the tracks by" " some ratio using Hermite interpolation before " "mappping", ) precise = traits.Bool( argstr="-precise", desc="use a more precise streamline mapping " "strategy, that accurately quantifies the length through each voxel " "(these lengths are then taken into account during TWI calculation)", ) ends_only = traits.Bool( argstr="-ends_only", desc="only map the streamline" " endpoints to the image" ) tck_weights = File( exists=True, argstr="-tck_weights_in %s", desc="specify" " a text scalar file containing the streamline weights", ) nthreads = traits.Int( argstr="-nthreads %d", desc="number of threads. if zero, the number" " of available cpus will be used", nohash=True, ) class ComputeTDIOutputSpec(TraitedSpec): out_file = File(desc="output TDI file") class ComputeTDI(MRTrix3Base): """ Use track data as a form of contrast for producing a high-resolution image. .. admonition:: References * For TDI or DEC TDI: Calamante, F.; Tournier, J.-D.; Jackson, G. D. & Connelly, A. Track-density imaging (TDI): Super-resolution white matter imaging using whole-brain track-density mapping. NeuroImage, 2010, 53, 1233-1243 * If using -contrast length and -stat_vox mean: Pannek, K.; Mathias, J. L.; Bigler, E. D.; Brown, G.; Taylor, J. D. & Rose, S. E. The average pathlength map: A diffusion MRI tractography-derived index for studying brain pathology. NeuroImage, 2011, 55, 133-141 * If using -dixel option with TDI contrast only: Smith, R.E., Tournier, J-D., Calamante, F., Connelly, A. A novel paradigm for automated segmentation of very large whole-brain probabilistic tractography data sets. In proc. ISMRM, 2011, 19, 673 * If using -dixel option with any other contrast: Pannek, K., Raffelt, D., Salvado, O., Rose, S. Incorporating directional information in diffusion tractography derived maps: angular track imaging (ATI). In Proc. ISMRM, 2012, 20, 1912 * If using -tod option: Dhollander, T., Emsell, L., Van Hecke, W., Maes, F., Sunaert, S., Suetens, P. Track Orientation Density Imaging (TODI) and Track Orientation Distribution (TOD) based tractography. NeuroImage, 2014, 94, 312-336 * If using other contrasts / statistics: Calamante, F.; Tournier, J.-D.; Smith, R. E. & Connelly, A. A generalised framework for super-resolution track-weighted imaging. NeuroImage, 2012, 59, 2494-2503 * If using -precise mapping option: Smith, R. E.; Tournier, J.-D.; Calamante, F. & Connelly, A. SIFT: Spherical-deconvolution informed filtering of tractograms. NeuroImage, 2013, 67, 298-312 (Appendix 3) Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> tdi = mrt.ComputeTDI() >>> tdi.inputs.in_file = 'dti.mif' >>> tdi.cmdline # doctest: +ELLIPSIS 'tckmap dti.mif tdi.mif' >>> tdi.run() # doctest: +SKIP """ _cmd = "tckmap" input_spec = ComputeTDIInputSpec output_spec = ComputeTDIOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class TCK2VTKInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="input tractography" ) out_file = File( "tracks.vtk", argstr="%s", usedefault=True, position=-1, desc="output VTK file" ) reference = File( exists=True, argstr="-image %s", desc="if specified, the properties of" " this image will be used to convert track point positions from real " "(scanner) coordinates into image coordinates (in mm).", ) voxel = File( exists=True, argstr="-image %s", desc="if specified, the properties of" " this image will be used to convert track point positions from real " "(scanner) coordinates into image coordinates.", ) nthreads = traits.Int( argstr="-nthreads %d", desc="number of threads. if zero, the number" " of available cpus will be used", nohash=True, ) class TCK2VTKOutputSpec(TraitedSpec): out_file = File(desc="output VTK file") class TCK2VTK(MRTrix3Base): """ Convert a track file to a vtk format, cave: coordinates are in XYZ coordinates not reference Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> vtk = mrt.TCK2VTK() >>> vtk.inputs.in_file = 'tracks.tck' >>> vtk.inputs.reference = 'b0.nii' >>> vtk.cmdline # doctest: +ELLIPSIS 'tck2vtk -image b0.nii tracks.tck tracks.vtk' >>> vtk.run() # doctest: +SKIP """ _cmd = "tck2vtk" input_spec = TCK2VTKInputSpec output_spec = TCK2VTKOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class DWIExtractInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="input image" ) out_file = File(argstr="%s", mandatory=True, position=-1, desc="output image") bzero = traits.Bool(argstr="-bzero", desc="extract b=0 volumes") nobzero = traits.Bool(argstr="-no_bzero", desc="extract non b=0 volumes") singleshell = traits.Bool( argstr="-singleshell", desc="extract volumes with a specific shell" ) shell = traits.List( traits.Float, sep=",", argstr="-shell %s", desc="specify one or more gradient shells", ) class DWIExtractOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output image") class DWIExtract(MRTrix3Base): """ Extract diffusion-weighted volumes, b=0 volumes, or certain shells from a DWI dataset Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> dwiextract = mrt.DWIExtract() >>> dwiextract.inputs.in_file = 'dwi.mif' >>> dwiextract.inputs.bzero = True >>> dwiextract.inputs.out_file = 'b0vols.mif' >>> dwiextract.inputs.grad_fsl = ('bvecs', 'bvals') >>> dwiextract.cmdline # doctest: +ELLIPSIS 'dwiextract -bzero -fslgrad bvecs bvals dwi.mif b0vols.mif' >>> dwiextract.run() # doctest: +SKIP """ _cmd = "dwiextract" input_spec = DWIExtractInputSpec output_spec = DWIExtractOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class MRConvertInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-2, desc="input image" ) out_file = File( "dwi.mif", argstr="%s", mandatory=True, position=-1, usedefault=True, desc="output image", ) coord = traits.List( traits.Int, sep=" ", argstr="-coord %s", desc="extract data at the specified coordinates", ) vox = traits.List( traits.Float, sep=",", argstr="-vox %s", desc="change the voxel dimensions" ) axes = traits.List( traits.Int, sep=",", argstr="-axes %s", desc="specify the axes that will be used", ) scaling = traits.List( traits.Float, sep=",", argstr="-scaling %s", desc="specify the data scaling parameter", ) class MRConvertOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output image") class MRConvert(MRTrix3Base): """ Perform conversion between different file types and optionally extract a subset of the input image Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> mrconvert = mrt.MRConvert() >>> mrconvert.inputs.in_file = 'dwi.nii.gz' >>> mrconvert.inputs.grad_fsl = ('bvecs', 'bvals') >>> mrconvert.cmdline # doctest: +ELLIPSIS 'mrconvert -fslgrad bvecs bvals dwi.nii.gz dwi.mif' >>> mrconvert.run() # doctest: +SKIP """ _cmd = "mrconvert" input_spec = MRConvertInputSpec output_spec = MRConvertOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class MRMathInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-3, desc="input image" ) out_file = File(argstr="%s", mandatory=True, position=-1, desc="output image") operation = traits.Enum( "mean", "median", "sum", "product", "rms", "norm", "var", "std", "min", "max", "absmax", "magmax", argstr="%s", position=-2, mandatory=True, desc="operation to computer along a specified axis", ) axis = traits.Int( 0, argstr="-axis %d", desc="specfied axis to perform the operation along" ) class MRMathOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output image") class MRMath(MRTrix3Base): """ Compute summary statistic on image intensities along a specified axis of a single image Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> mrmath = mrt.MRMath() >>> mrmath.inputs.in_file = 'dwi.mif' >>> mrmath.inputs.operation = 'mean' >>> mrmath.inputs.axis = 3 >>> mrmath.inputs.out_file = 'dwi_mean.mif' >>> mrmath.inputs.grad_fsl = ('bvecs', 'bvals') >>> mrmath.cmdline # doctest: +ELLIPSIS 'mrmath -axis 3 -fslgrad bvecs bvals dwi.mif mean dwi_mean.mif' >>> mrmath.run() # doctest: +SKIP """ _cmd = "mrmath" input_spec = MRMathInputSpec output_spec = MRMathOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class MRResizeInputSpec(MRTrix3BaseInputSpec): in_file = File( exists=True, argstr="%s", position=-2, mandatory=True, desc="input DWI image" ) image_size = traits.Tuple( (traits.Int, traits.Int, traits.Int), argstr="-size %d,%d,%d", mandatory=True, desc="Number of voxels in each dimension of output image", xor=["voxel_size", "scale_factor"], ) voxel_size = traits.Tuple( (traits.Float, traits.Float, traits.Float), argstr="-voxel %g,%g,%g", mandatory=True, desc="Desired voxel size in mm for the output image", xor=["image_size", "scale_factor"], ) scale_factor = traits.Tuple( (traits.Float, traits.Float, traits.Float), argstr="-scale %g,%g,%g", mandatory=True, desc="Scale factors to rescale the image by in each dimension", xor=["image_size", "voxel_size"], ) interpolation = traits.Enum( "cubic", "nearest", "linear", "sinc", argstr="-interp %s", usedefault=True, desc="set the interpolation method to use when resizing (choices: " "nearest, linear, cubic, sinc. Default: cubic).", ) out_file = File( argstr="%s", name_template="%s_resized", name_source=["in_file"], keep_extension=True, position=-1, desc="the output resized DWI image", ) class MRResizeOutputSpec(TraitedSpec): out_file = File(desc="the output resized DWI image", exists=True) class MRResize(MRTrix3Base): """ Resize an image by defining the new image resolution, voxel size or a scale factor. If the image is 4D, then only the first 3 dimensions can be resized. Also, if the image is down-sampled, the appropriate smoothing is automatically applied using Gaussian smoothing. For more information, see Example ------- >>> import nipype.interfaces.mrtrix3 as mrt Defining the new image resolution: >>> image_resize = mrt.MRResize() >>> image_resize.inputs.in_file = 'dwi.mif' >>> image_resize.inputs.image_size = (256, 256, 144) >>> image_resize.cmdline # doctest: +ELLIPSIS 'mrresize -size 256,256,144 -interp cubic dwi.mif dwi_resized.mif' >>> image_resize.run() # doctest: +SKIP Defining the new image's voxel size: >>> voxel_resize = mrt.MRResize() >>> voxel_resize.inputs.in_file = 'dwi.mif' >>> voxel_resize.inputs.voxel_size = (1, 1, 1) >>> voxel_resize.cmdline # doctest: +ELLIPSIS 'mrresize -interp cubic -voxel 1,1,1 dwi.mif dwi_resized.mif' >>> voxel_resize.run() # doctest: +SKIP Defining the scale factor of each image dimension: >>> scale_resize = mrt.MRResize() >>> scale_resize.inputs.in_file = 'dwi.mif' >>> scale_resize.inputs.scale_factor = (0.5,0.5,0.5) >>> scale_resize.cmdline # doctest: +ELLIPSIS 'mrresize -interp cubic -scale 0.5,0.5,0.5 dwi.mif dwi_resized.mif' >>> scale_resize.run() # doctest: +SKIP """ _cmd = "mrresize" input_spec = MRResizeInputSpec output_spec = MRResizeOutputSpec class SHConvInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-3, desc="input ODF image", ) # General options response = File( exists=True, mandatory=True, argstr="%s", position=-2, desc=("The response function"), ) out_file = File( name_template="%s_shconv.mif", name_source=["in_file"], argstr="%s", position=-1, usedefault=True, desc="the output spherical harmonics", ) class SHConvOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output convoluted spherical harmonics file") class SHConv(CommandLine): """ Convolve spherical harmonics with a tissue response function. Useful for checking residuals of ODF estimates. Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> sh = mrt.SHConv() >>> sh.inputs.in_file = 'csd.mif' >>> sh.inputs.response = 'response.txt' >>> sh.cmdline 'shconv csd.mif response.txt csd_shconv.mif' >>> sh.run() # doctest: +SKIP """ _cmd = "shconv" input_spec = SHConvInputSpec output_spec = SHConvOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs class SH2AmpInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="%s", mandatory=True, position=-3, desc="input ODF image", ) # General options directions = File( exists=True, mandatory=True, argstr="%s", position=-2, desc=( "The gradient directions along which to sample the spherical " "harmonics MRtrix format" ), ) out_file = File( name_template="%s_amp.mif", name_source=["in_file"], argstr="%s", position=-1, usedefault=True, desc="the output spherical harmonics", ) nonnegative = traits.Bool( argstr="-nonnegative", desc="cap all negative amplitudes to zero" ) class SH2AmpOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output convoluted spherical harmonics file") class SH2Amp(CommandLine): """ Sample spherical harmonics on a set of gradient orientations. Useful for checking residuals of ODF estimates. Example ------- >>> import nipype.interfaces.mrtrix3 as mrt >>> sh = mrt.SH2Amp() >>> sh.inputs.in_file = 'sh.mif' >>> sh.inputs.directions = 'grads.txt' >>> sh.cmdline 'sh2amp sh.mif grads.txt sh_amp.mif' >>> sh.run() # doctest: +SKIP """ _cmd = "sh2amp" input_spec = SH2AmpInputSpec output_spec = SH2AmpOutputSpec def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = op.abspath(self.inputs.out_file) return outputs nipype-1.7.0/nipype/interfaces/niftyfit/000077500000000000000000000000001413403311400203005ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/niftyfit/__init__.py000066400000000000000000000006561413403311400224200ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ NiftyFit is a software package for multi-parametric model-fitting of 4D MRI. The niftyfit module provides classes for interfacing with the `NiftyFit `__ command line tools. """ from .asl import FitAsl from .dwi import FitDwi, DwiTool from .qt1 import FitQt1 nipype-1.7.0/nipype/interfaces/niftyfit/asl.py000066400000000000000000000145501413403311400214360ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The ASL module of niftyfit, which wraps the fitting methods in NiftyFit. """ from ..base import File, TraitedSpec, traits, CommandLineInputSpec from .base import NiftyFitCommand from ..niftyreg.base import get_custom_path class FitAslInputSpec(CommandLineInputSpec): """Input Spec for FitAsl.""" desc = "Filename of the 4D ASL (control/label) source image (mandatory)." source_file = File( position=1, exists=True, argstr="-source %s", mandatory=True, desc=desc ) pasl = traits.Bool(desc="Fit PASL ASL data [default]", argstr="-pasl") pcasl = traits.Bool(desc="Fit PCASL ASL data", argstr="-pcasl") # *** Output options: desc = "Filename of the Cerebral Blood Flow map (in ml/100g/min)." cbf_file = File( name_source=["source_file"], name_template="%s_cbf.nii.gz", argstr="-cbf %s", desc=desc, ) error_file = File( name_source=["source_file"], name_template="%s_error.nii.gz", argstr="-error %s", desc="Filename of the CBF error map.", ) syn_file = File( name_source=["source_file"], name_template="%s_syn.nii.gz", argstr="-syn %s", desc="Filename of the synthetic ASL data.", ) # *** Input options (see also fit_qt1 for generic T1 fitting): desc = "Filename of the estimated input T1 map (in ms)." t1map = File(exists=True, argstr="-t1map %s", desc=desc) desc = "Filename of the estimated input M0 map." m0map = File(exists=True, argstr="-m0map %s", desc=desc) desc = "Filename of the estimated input M0 map error." m0mape = File(exists=True, argstr="-m0mape %s", desc=desc) desc = "Filename of a [1,2,5]s Inversion Recovery volume (T1/M0 fitting \ carried out internally)." ir_volume = File(exists=True, argstr="-IRvolume %s", desc=desc) desc = "Output of [1,2,5]s Inversion Recovery fitting." ir_output = File(exists=True, argstr="-IRoutput %s", desc=desc) # *** Experimental options (Choose those suitable for the model!): mask = File( position=2, exists=True, desc="Filename of image mask.", argstr="-mask %s" ) t1_art_cmp = traits.Float( desc="T1 of arterial component [1650ms].", argstr="-T1a %f" ) desc = "Single plasma/tissue partition coefficient [0.9ml/g]." plasma_coeff = traits.Float(desc=desc, argstr="-L %f") desc = "Labelling efficiency [0.99 (pasl), 0.85 (pcasl)], ensure any \ background suppression pulses are included in -eff" eff = traits.Float(desc=desc, argstr="-eff %f") desc = "Outlier rejection for multi CL volumes (enter z-score threshold \ (e.g. 2.5)) [off]." out = traits.Float(desc=desc, argstr="-out %f") # *** PCASL options (Choose those suitable for the model!): pld = traits.Float(desc="Post Labelling Delay [2000ms].", argstr="-PLD %f") ldd = traits.Float(desc="Labelling Duration [1800ms].", argstr="-LDD %f") desc = "Difference in labelling delay per slice [0.0 ms/slice." dpld = traits.Float(desc=desc, argstr="-dPLD %f") # *** PASL options (Choose those suitable for the model!): t_inv1 = traits.Float(desc="Saturation pulse time [800ms].", argstr="-Tinv1 %f") t_inv2 = traits.Float(desc="Inversion time [2000ms].", argstr="-Tinv2 %f") desc = "Difference in inversion time per slice [0ms/slice]." dt_inv2 = traits.Float(desc=desc, argstr="-dTinv2 %f") # *** Other experimental assumptions: # Not programmed yet # desc = 'Slope and intercept for Arterial Transit Time.' # ATT = traits.Float(desc=desc, argstr='-ATT %f') gm_t1 = traits.Float(desc="T1 of GM [1150ms].", argstr="-gmT1 %f") gm_plasma = traits.Float( desc="Plasma/GM water partition [0.95ml/g].", argstr="-gmL %f" ) gm_ttt = traits.Float(desc="Time to GM [ATT+0ms].", argstr="-gmTTT %f") wm_t1 = traits.Float(desc="T1 of WM [800ms].", argstr="-wmT1 %f") wm_plasma = traits.Float( desc="Plasma/WM water partition [0.82ml/g].", argstr="-wmL %f" ) wm_ttt = traits.Float(desc="Time to WM [ATT+0ms].", argstr="-wmTTT %f") # *** Segmentation options: desc = "Filename of the 4D segmentation (in ASL space) for L/T1 \ estimation and PV correction {WM,GM,CSF}." seg = File(exists=True, argstr="-seg %s", desc=desc) desc = "Use sigmoid to estimate L from T1: L(T1|gmL,wmL) [Off]." sig = traits.Bool(desc=desc, argstr="-sig") desc = "Simple PV correction (CBF=vg*CBFg + vw*CBFw, with CBFw=f*CBFg) \ [0.25]." pv0 = traits.Int(desc=desc, argstr="-pv0 %d") pv2 = traits.Int(desc="In plane PV kernel size [3x3].", argstr="-pv2 %d") pv3 = traits.Tuple( traits.Int, traits.Int, traits.Int, desc="3D kernel size [3x3x1].", argstr="-pv3 %d %d %d", ) desc = "Multiply CBF by this value (e.g. if CL are mislabelled use -1.0)." mul = traits.Float(desc=desc, argstr="-mul %f") mulgm = traits.Bool(desc="Multiply CBF by segmentation [Off].", argstr="-sig") desc = "Set PV threshold for switching off LSQR [O.05]." pv_threshold = traits.Bool(desc=desc, argstr="-pvthreshold") segstyle = traits.Bool(desc="Set CBF as [gm,wm] not [wm,gm].", argstr="-segstyle") class FitAslOutputSpec(TraitedSpec): """Output Spec for FitAsl.""" desc = "Filename of the Cerebral Blood Flow map (in ml/100g/min)." cbf_file = File(exists=True, desc=desc) desc = "Filename of the CBF error map." error_file = File(exists=True, desc=desc) desc = "Filename of the synthetic ASL data." syn_file = File(exists=True, desc=desc) class FitAsl(NiftyFitCommand): """Interface for executable fit_asl from Niftyfit platform. Use NiftyFit to perform ASL fitting. ASL fitting routines (following EU Cost Action White Paper recommendations) Fits Cerebral Blood Flow maps in the first instance. `Source code `_ Examples -------- >>> from nipype.interfaces import niftyfit >>> node = niftyfit.FitAsl() >>> node.inputs.source_file = 'asl.nii.gz' >>> node.cmdline 'fit_asl -source asl.nii.gz -cbf asl_cbf.nii.gz -error asl_error.nii.gz \ -syn asl_syn.nii.gz' """ _cmd = get_custom_path("fit_asl", env_dir="NIFTYFITDIR") input_spec = FitAslInputSpec output_spec = FitAslOutputSpec _suffix = "_fit_asl" nipype-1.7.0/nipype/interfaces/niftyfit/base.py000066400000000000000000000026741413403311400215750ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The niftyfit module provide an interface with the niftyfit software developed in TIG, UCL. Software available at: https://cmiclab.cs.ucl.ac.uk/CMIC/NiftyFit-Release Version used for this version of the interfaces (git): commit c6232e4c4223c3d19f7a32906409da5af36299a2 Date: Fri Jan 6 13:34:02 2017 +0000 Examples -------- See the docstrings of the individual classes for examples. """ import os from ..base import CommandLine from ...utils.filemanip import split_filename class NiftyFitCommand(CommandLine): """ Base support interface for NiftyFit commands. """ _suffix = "_nf" def __init__(self, **inputs): """Init method calling super. No version to be checked.""" super(NiftyFitCommand, self).__init__(**inputs) def _gen_fname(self, basename, out_dir=None, suffix=None, ext=None): if basename == "": msg = "Unable to generate filename for command %s. " % self.cmd msg += "basename is not set!" raise ValueError(msg) _, final_bn, final_ext = split_filename(basename) if out_dir is None: out_dir = os.getcwd() if ext is not None: final_ext = ext if suffix is not None: final_bn = "".join((final_bn, suffix)) return os.path.abspath(os.path.join(out_dir, final_bn + final_ext)) nipype-1.7.0/nipype/interfaces/niftyfit/dwi.py000066400000000000000000000440501413403311400214400ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The dwi module of niftyfit, which wraps the fitting methods in NiftyFit. """ from ..base import File, TraitedSpec, traits, isdefined, CommandLineInputSpec from .base import NiftyFitCommand from ..niftyreg.base import get_custom_path class FitDwiInputSpec(CommandLineInputSpec): """Input Spec for FitDwi.""" # Inputs options source_file = File( position=1, exists=True, argstr="-source %s", mandatory=True, desc="The source image containing the dwi data.", ) desc = "The file containing the bvalues of the source DWI." bval_file = File( position=2, exists=True, argstr="-bval %s", mandatory=True, desc=desc ) desc = "The file containing the bvectors of the source DWI." bvec_file = File( position=3, exists=True, argstr="-bvec %s", mandatory=True, desc=desc ) te_file = File( exists=True, argstr="-TE %s", desc="Filename of TEs (ms).", xor=["te_file"] ) te_value = File( exists=True, argstr="-TE %s", desc="Value of TEs (ms).", xor=["te_file"] ) mask_file = File(exists=True, desc="The image mask", argstr="-mask %s") desc = "Filename of parameter priors for -ball and -nod." prior_file = File(exists=True, argstr="-prior %s", desc=desc) desc = "Rotate the output tensors according to the q/s form of the image \ (resulting tensors will be in mm coordinates, default: 0)." rot_sform_flag = traits.Int(desc=desc, argstr="-rotsform %d") # generic output options: error_file = File( name_source=["source_file"], name_template="%s_error.nii.gz", desc="Filename of parameter error maps.", argstr="-error %s", ) res_file = File( name_source=["source_file"], name_template="%s_resmap.nii.gz", desc="Filename of model residual map.", argstr="-res %s", ) syn_file = File( name_source=["source_file"], name_template="%s_syn.nii.gz", desc="Filename of synthetic image.", argstr="-syn %s", ) nodiff_file = File( name_source=["source_file"], name_template="%s_no_diff.nii.gz", desc="Filename of average no diffusion image.", argstr="-nodiff %s", ) # Output options, with templated output names based on the source image mcmap_file = File( name_source=["source_file"], name_template="%s_mcmap.nii.gz", desc="Filename of multi-compartment model parameter map " "(-ivim,-ball,-nod)", argstr="-mcmap %s", requires=["nodv_flag"], ) # Model Specific Output options: mdmap_file = File( name_source=["source_file"], name_template="%s_mdmap.nii.gz", desc="Filename of MD map/ADC", argstr="-mdmap %s", ) famap_file = File( name_source=["source_file"], name_template="%s_famap.nii.gz", desc="Filename of FA map", argstr="-famap %s", ) v1map_file = File( name_source=["source_file"], name_template="%s_v1map.nii.gz", desc="Filename of PDD map [x,y,z]", argstr="-v1map %s", ) rgbmap_file = File( name_source=["source_file"], name_template="%s_rgbmap.nii.gz", desc="Filename of colour-coded FA map", argstr="-rgbmap %s", requires=["dti_flag"], ) desc = "Use lower triangular (tenmap2) or diagonal, off-diagonal tensor \ format" ten_type = traits.Enum("lower-tri", "diag-off-diag", desc=desc, usedefault=True) tenmap_file = File( name_source=["source_file"], name_template="%s_tenmap.nii.gz", desc="Filename of tensor map [diag,offdiag].", argstr="-tenmap %s", requires=["dti_flag"], ) tenmap2_file = File( name_source=["source_file"], name_template="%s_tenmap2.nii.gz", desc="Filename of tensor map [lower tri]", argstr="-tenmap2 %s", requires=["dti_flag"], ) # Methods options desc = "Fit single exponential to non-directional data [default with \ no b-vectors]" mono_flag = traits.Bool( desc=desc, argstr="-mono", position=4, xor=[ "ivim_flag", "dti_flag", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ) ivim_flag = traits.Bool( desc="Fit IVIM model to non-directional data.", argstr="-ivim", position=4, xor=[ "mono_flag", "dti_flag", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ) desc = "Fit the tensor model [default with b-vectors]." dti_flag = traits.Bool( desc=desc, argstr="-dti", position=4, xor=[ "mono_flag", "ivim_flag", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ) ball_flag = traits.Bool( desc="Fit the ball and stick model.", argstr="-ball", position=4, xor=[ "mono_flag", "ivim_flag", "dti_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ) desc = "Fit the ball and stick model with optimised PDD." ballv_flag = traits.Bool( desc=desc, argstr="-ballv", position=4, xor=[ "mono_flag", "ivim_flag", "dti_flag", "ball_flag", "nod_flag", "nodv_flag", ], ) nod_flag = traits.Bool( desc="Fit the NODDI model", argstr="-nod", position=4, xor=[ "mono_flag", "ivim_flag", "dti_flag", "ball_flag", "ballv_flag", "nodv_flag", ], ) nodv_flag = traits.Bool( desc="Fit the NODDI model with optimised PDD", argstr="-nodv", position=4, xor=[ "mono_flag", "ivim_flag", "dti_flag", "ball_flag", "ballv_flag", "nod_flag", ], ) # Experimental options desc = "Maximum number of non-linear LSQR iterations [100x2 passes])" maxit_val = traits.Int(desc=desc, argstr="-maxit %d", requires=["gn_flag"]) desc = "LM parameters (initial value, decrease rate) [100,1.2]." lm_vals = traits.Tuple( traits.Float, traits.Float, argstr="-lm %f %f", requires=["gn_flag"], desc=desc ) desc = "Use Gauss-Newton algorithm [Levenberg-Marquardt]." gn_flag = traits.Bool(desc=desc, argstr="-gn", xor=["wls_flag"]) desc = "Use Variational Bayes fitting with known prior (currently \ identity covariance...)." vb_flag = traits.Bool(desc=desc, argstr="-vb") cov_file = File( exists=True, desc="Filename of ithe nc*nc covariance matrix [I]", argstr="-cov %s", ) wls_flag = traits.Bool(desc=desc, argstr="-wls", xor=["gn_flag"]) desc = "Use location-weighted least squares for DTI fitting [3x3 Gaussian]" swls_val = traits.Float(desc=desc, argstr="-swls %f") slice_no = traits.Int(desc="Fit to single slice number.", argstr="-slice %d") voxel = traits.Tuple( traits.Int, traits.Int, traits.Int, desc="Fit to single voxel only.", argstr="-voxel %d %d %d", ) diso_val = traits.Float( desc="Isotropic diffusivity for -nod [3e-3]", argstr="-diso %f" ) dpr_val = traits.Float( desc="Parallel diffusivity for -nod [1.7e-3].", argstr="-dpr %f" ) wm_t2_val = traits.Float(desc="White matter T2 value [80ms].", argstr="-wmT2 %f") csf_t2_val = traits.Float(desc="CSF T2 value [400ms].", argstr="-csfT2 %f") desc = "Threshold for perfusion/diffsuion effects [100]." perf_thr = traits.Float(desc=desc, argstr="-perfthreshold %f") # MCMC options: mcout = File( name_source=["source_file"], name_template="%s_mcout.txt", desc="Filename of mc samples (ascii text file)", argstr="-mcout %s", ) mcsamples = traits.Int( desc="Number of samples to keep [100].", argstr="-mcsamples %d" ) mcmaxit = traits.Int( desc="Number of iterations to run [10,000].", argstr="-mcmaxit %d" ) acceptance = traits.Float( desc="Fraction of iterations to accept [0.23].", argstr="-accpetance %f" ) class FitDwiOutputSpec(TraitedSpec): """Output Spec for FitDwi.""" error_file = File(desc="Filename of parameter error maps") res_file = File(desc="Filename of model residual map") syn_file = File(desc="Filename of synthetic image") nodiff_file = File(desc="Filename of average no diffusion image.") mdmap_file = File(desc="Filename of MD map/ADC") famap_file = File(desc="Filename of FA map") v1map_file = File(desc="Filename of PDD map [x,y,z]") rgbmap_file = File(desc="Filename of colour FA map") tenmap_file = File(desc="Filename of tensor map") tenmap2_file = File(desc="Filename of tensor map [lower tri]") mcmap_file = File( desc="Filename of multi-compartment model " "parameter map (-ivim,-ball,-nod)." ) mcout = File(desc="Filename of mc samples (ascii text file)") class FitDwi(NiftyFitCommand): """Interface for executable fit_dwi from Niftyfit platform. Use NiftyFit to perform diffusion model fitting. Diffusion-weighted MR Fitting. Fits DWI parameter maps to multi-shell, multi-directional data. `Source code `_ Examples -------- >>> from nipype.interfaces import niftyfit >>> fit_dwi = niftyfit.FitDwi(dti_flag=True) >>> fit_dwi.inputs.source_file = 'dwi.nii.gz' >>> fit_dwi.inputs.bvec_file = 'bvecs' >>> fit_dwi.inputs.bval_file = 'bvals' >>> fit_dwi.inputs.rgbmap_file = 'rgb.nii.gz' >>> fit_dwi.cmdline 'fit_dwi -source dwi.nii.gz -bval bvals -bvec bvecs -dti \ -error dwi_error.nii.gz -famap dwi_famap.nii.gz -mcout dwi_mcout.txt \ -mdmap dwi_mdmap.nii.gz -nodiff dwi_no_diff.nii.gz -res dwi_resmap.nii.gz \ -rgbmap rgb.nii.gz -syn dwi_syn.nii.gz -tenmap2 dwi_tenmap2.nii.gz \ -v1map dwi_v1map.nii.gz' """ _cmd = get_custom_path("fit_dwi", env_dir="NIFTYFITDIR") input_spec = FitDwiInputSpec output_spec = FitDwiOutputSpec _suffix = "_fit_dwi" def _format_arg(self, name, trait_spec, value): if name == "tenmap_file" and self.inputs.ten_type != "diag-off-diag": return "" if name == "tenmap2_file" and self.inputs.ten_type != "lower-tri": return "" return super(FitDwi, self)._format_arg(name, trait_spec, value) class DwiToolInputSpec(CommandLineInputSpec): """Input Spec for DwiTool.""" desc = "The source image containing the fitted model." source_file = File( position=1, exists=True, desc=desc, argstr="-source %s", mandatory=True ) desc = "The file containing the bvalues of the source DWI." bval_file = File( position=2, exists=True, desc=desc, argstr="-bval %s", mandatory=True ) desc = "The file containing the bvectors of the source DWI." bvec_file = File(position=3, exists=True, desc=desc, argstr="-bvec %s") b0_file = File( position=4, exists=True, desc="The B0 image corresponding to the source DWI", argstr="-b0 %s", ) mask_file = File(position=5, exists=True, desc="The image mask", argstr="-mask %s") # Output options, with templated output names based on the source image desc = "Filename of multi-compartment model parameter map \ (-ivim,-ball,-nod)" mcmap_file = File( name_source=["source_file"], name_template="%s_mcmap.nii.gz", desc=desc, argstr="-mcmap %s", ) desc = "Filename of synthetic image. Requires: bvec_file/b0_file." syn_file = File( name_source=["source_file"], name_template="%s_syn.nii.gz", desc=desc, argstr="-syn %s", requires=["bvec_file", "b0_file"], ) mdmap_file = File( name_source=["source_file"], name_template="%s_mdmap.nii.gz", desc="Filename of MD map/ADC", argstr="-mdmap %s", ) famap_file = File( name_source=["source_file"], name_template="%s_famap.nii.gz", desc="Filename of FA map", argstr="-famap %s", ) v1map_file = File( name_source=["source_file"], name_template="%s_v1map.nii.gz", desc="Filename of PDD map [x,y,z]", argstr="-v1map %s", ) rgbmap_file = File( name_source=["source_file"], name_template="%s_rgbmap.nii.gz", desc="Filename of colour FA map.", argstr="-rgbmap %s", ) logdti_file = File( name_source=["source_file"], name_template="%s_logdti2.nii.gz", desc="Filename of output logdti map.", argstr="-logdti2 %s", ) # Methods options desc = "Input is a single exponential to non-directional data \ [default with no b-vectors]" mono_flag = traits.Bool( desc=desc, position=6, argstr="-mono", xor=[ "ivim_flag", "dti_flag", "dti_flag2", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ) desc = "Inputs is an IVIM model to non-directional data." ivim_flag = traits.Bool( desc=desc, position=6, argstr="-ivim", xor=[ "mono_flag", "dti_flag", "dti_flag2", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ) dti_flag = traits.Bool( desc="Input is a tensor model diag/off-diag.", position=6, argstr="-dti", xor=[ "mono_flag", "ivim_flag", "dti_flag2", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ) dti_flag2 = traits.Bool( desc="Input is a tensor model lower triangular", position=6, argstr="-dti2", xor=[ "mono_flag", "ivim_flag", "dti_flag", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ) ball_flag = traits.Bool( desc="Input is a ball and stick model.", position=6, argstr="-ball", xor=[ "mono_flag", "ivim_flag", "dti_flag", "dti_flag2", "ballv_flag", "nod_flag", "nodv_flag", ], ) desc = "Input is a ball and stick model with optimised PDD." ballv_flag = traits.Bool( desc=desc, position=6, argstr="-ballv", xor=[ "mono_flag", "ivim_flag", "dti_flag", "dti_flag2", "ball_flag", "nod_flag", "nodv_flag", ], ) nod_flag = traits.Bool( desc="Input is a NODDI model", position=6, argstr="-nod", xor=[ "mono_flag", "ivim_flag", "dti_flag", "dti_flag2", "ball_flag", "ballv_flag", "nodv_flag", ], ) nodv_flag = traits.Bool( desc="Input is a NODDI model with optimised PDD", position=6, argstr="-nodv", xor=[ "mono_flag", "ivim_flag", "dti_flag", "dti_flag2", "ball_flag", "ballv_flag", "nod_flag", ], ) # Experimental options diso_val = traits.Float( desc="Isotropic diffusivity for -nod [3e-3]", argstr="-diso %f" ) dpr_val = traits.Float( desc="Parallel diffusivity for -nod [1.7e-3].", argstr="-dpr %f" ) class DwiToolOutputSpec(TraitedSpec): """Output Spec for DwiTool.""" desc = "Filename of multi-compartment model parameter map \ (-ivim,-ball,-nod)" mcmap_file = File(desc=desc) syn_file = File(desc="Filename of synthetic image") mdmap_file = File(desc="Filename of MD map/ADC") famap_file = File(desc="Filename of FA map") v1map_file = File(desc="Filename of PDD map [x,y,z]") rgbmap_file = File(desc="Filename of colour FA map") logdti_file = File(desc="Filename of output logdti map") class DwiTool(NiftyFitCommand): """Interface for executable dwi_tool from Niftyfit platform. Use DwiTool. Diffusion-Weighted MR Prediction. Predicts DWI from previously fitted models and calculates model derived maps. `Source code `_ Examples -------- >>> from nipype.interfaces import niftyfit >>> dwi_tool = niftyfit.DwiTool(dti_flag=True) >>> dwi_tool.inputs.source_file = 'dwi.nii.gz' >>> dwi_tool.inputs.bvec_file = 'bvecs' >>> dwi_tool.inputs.bval_file = 'bvals' >>> dwi_tool.inputs.mask_file = 'mask.nii.gz' >>> dwi_tool.inputs.b0_file = 'b0.nii.gz' >>> dwi_tool.inputs.rgbmap_file = 'rgb_map.nii.gz' >>> dwi_tool.cmdline 'dwi_tool -source dwi.nii.gz -bval bvals -bvec bvecs -b0 b0.nii.gz \ -mask mask.nii.gz -dti -famap dwi_famap.nii.gz -logdti2 dwi_logdti2.nii.gz \ -mcmap dwi_mcmap.nii.gz -mdmap dwi_mdmap.nii.gz -rgbmap rgb_map.nii.gz \ -syn dwi_syn.nii.gz -v1map dwi_v1map.nii.gz' """ _cmd = get_custom_path("dwi_tool", env_dir="NIFTYFITDIR") input_spec = DwiToolInputSpec output_spec = DwiToolOutputSpec _suffix = "_dwi_tool" def _format_arg(self, name, trait_spec, value): if name == "syn_file": if not isdefined(self.inputs.bvec_file) or not isdefined( self.inputs.b0_file ): return "" if name in ["logdti_file", "rgbmap_file"]: if not isdefined(self.inputs.dti_flag) and not isdefined( self.inputs.dti_flag2 ): return "" return super(DwiTool, self)._format_arg(name, trait_spec, value) nipype-1.7.0/nipype/interfaces/niftyfit/qt1.py000066400000000000000000000146351413403311400213700ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The QT1 module of niftyfit, which wraps the Multi-Echo T1 fitting methods in NiftyFit. """ from ..base import TraitedSpec, File, traits, CommandLineInputSpec from .base import NiftyFitCommand from ..niftyreg.base import get_custom_path class FitQt1InputSpec(CommandLineInputSpec): """Input Spec for FitQt1.""" desc = "Filename of the 4D Multi-Echo T1 source image." source_file = File( position=1, exists=True, desc=desc, argstr="-source %s", mandatory=True ) # Output options: t1map_file = File( name_source=["source_file"], name_template="%s_t1map.nii.gz", argstr="-t1map %s", desc="Filename of the estimated output T1 map (in ms).", ) m0map_file = File( name_source=["source_file"], name_template="%s_m0map.nii.gz", argstr="-m0map %s", desc="Filename of the estimated input M0 map.", ) desc = "Filename of the estimated output multi-parameter map." mcmap_file = File( name_source=["source_file"], name_template="%s_mcmap.nii.gz", argstr="-mcmap %s", desc=desc, ) comp_file = File( name_source=["source_file"], name_template="%s_comp.nii.gz", argstr="-comp %s", desc="Filename of the estimated multi-component T1 map.", ) desc = "Filename of the error map (symmetric matrix, [Diag,OffDiag])." error_file = File( name_source=["source_file"], name_template="%s_error.nii.gz", argstr="-error %s", desc=desc, ) syn_file = File( name_source=["source_file"], name_template="%s_syn.nii.gz", argstr="-syn %s", desc="Filename of the synthetic ASL data.", ) res_file = File( name_source=["source_file"], name_template="%s_res.nii.gz", argstr="-res %s", desc="Filename of the model fit residuals", ) # Other options: mask = File( position=2, exists=True, desc="Filename of image mask.", argstr="-mask %s" ) prior = File( position=3, exists=True, desc="Filename of parameter prior.", argstr="-prior %s" ) te_value = traits.Float(desc="TE Echo Time [0ms!].", argstr="-TE %f", position=4) tr_value = traits.Float( desc="TR Repetition Time [10s!].", argstr="-TR %f", position=5 ) desc = "Number of components to fit [1] (currently IR/SR only)" # set position to be ahead of TIs nb_comp = traits.Int(desc=desc, position=6, argstr="-nc %d") desc = "Set LM parameters (initial value, decrease rate) [100,1.2]." lm_val = traits.Tuple( traits.Float, traits.Float, desc=desc, argstr="-lm %f %f", position=7 ) desc = "Use Gauss-Newton algorithm [Levenberg-Marquardt]." gn_flag = traits.Bool(desc=desc, argstr="-gn", position=8) slice_no = traits.Int( desc="Fit to single slice number.", argstr="-slice %d", position=9 ) voxel = traits.Tuple( traits.Int, traits.Int, traits.Int, desc="Fit to single voxel only.", argstr="-voxel %d %d %d", position=10, ) maxit = traits.Int(desc="NLSQR iterations [100].", argstr="-maxit %d", position=11) # IR options: sr_flag = traits.Bool( desc="Saturation Recovery fitting [default].", argstr="-SR", position=12 ) ir_flag = traits.Bool( desc="Inversion Recovery fitting [default].", argstr="-IR", position=13 ) tis = traits.List( traits.Float, position=14, desc="Inversion times for T1 data [1s,2s,5s].", argstr="-TIs %s", sep=" ", ) tis_list = File( exists=True, argstr="-TIlist %s", desc="Filename of list of pre-defined TIs." ) t1_list = File( exists=True, argstr="-T1list %s", desc="Filename of list of pre-defined T1s" ) t1min = traits.Float(desc="Minimum tissue T1 value [400ms].", argstr="-T1min %f") t1max = traits.Float(desc="Maximum tissue T1 value [4000ms].", argstr="-T1max %f") # SPGR options spgr = traits.Bool(desc="Spoiled Gradient Echo fitting", argstr="-SPGR") flips = traits.List(traits.Float, desc="Flip angles", argstr="-flips %s", sep=" ") desc = "Filename of list of pre-defined flip angles (deg)." flips_list = File(exists=True, argstr="-fliplist %s", desc=desc) desc = "Filename of B1 estimate for fitting (or include in prior)." b1map = File(exists=True, argstr="-b1map %s", desc=desc) # MCMC options: mcout = File( exists=True, desc="Filename of mc samples (ascii text file)", argstr="-mcout %s" ) mcsamples = traits.Int( desc="Number of samples to keep [100].", argstr="-mcsamples %d" ) mcmaxit = traits.Int( desc="Number of iterations to run [10,000].", argstr="-mcmaxit %d" ) acceptance = traits.Float( desc="Fraction of iterations to accept [0.23].", argstr="-acceptance %f" ) class FitQt1OutputSpec(TraitedSpec): """Output Spec for FitQt1.""" t1map_file = File(desc="Filename of the estimated output T1 map (in ms)") m0map_file = File(desc="Filename of the m0 map") desc = "Filename of the estimated output multi-parameter map" mcmap_file = File(desc=desc) comp_file = File(desc="Filename of the estimated multi-component T1 map.") desc = "Filename of the error map (symmetric matrix, [Diag,OffDiag])" error_file = File(desc=desc) syn_file = File(desc="Filename of the synthetic ASL data") res_file = File(desc="Filename of the model fit residuals") class FitQt1(NiftyFitCommand): """Interface for executable fit_qt1 from Niftyfit platform. Use NiftyFit to perform Qt1 fitting. T1 Fitting Routine (To inversion recovery or spgr data). Fits single component T1 maps in the first instance. `Source code `_ Examples -------- >>> from nipype.interfaces.niftyfit import FitQt1 >>> fit_qt1 = FitQt1() >>> fit_qt1.inputs.source_file = 'TI4D.nii.gz' >>> fit_qt1.cmdline 'fit_qt1 -source TI4D.nii.gz -comp TI4D_comp.nii.gz \ -error TI4D_error.nii.gz -m0map TI4D_m0map.nii.gz -mcmap TI4D_mcmap.nii.gz \ -res TI4D_res.nii.gz -syn TI4D_syn.nii.gz -t1map TI4D_t1map.nii.gz' """ _cmd = get_custom_path("fit_qt1", env_dir="NIFTYFITDIR") input_spec = FitQt1InputSpec output_spec = FitQt1OutputSpec _suffix = "_fit_qt1" nipype-1.7.0/nipype/interfaces/niftyfit/tests/000077500000000000000000000000001413403311400214425ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/niftyfit/tests/__init__.py000066400000000000000000000000001413403311400235410ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/niftyfit/tests/test_asl.py000066400000000000000000000037731413403311400236440ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pytest from ....testing import example_data from ...niftyreg import get_custom_path from ..asl import FitAsl from ...niftyreg.tests.test_regutils import no_nifty_tool @pytest.mark.skipif(no_nifty_tool(cmd="fit_asl"), reason="niftyfit is not installed") def test_fit_asl(): """Testing FitAsl interface.""" # Create the test node fit_asl = FitAsl() # Check if the command is properly defined cmd = get_custom_path("fit_asl", env_dir="NIFTYFIT_DIR") assert fit_asl.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): fit_asl.run() # Tests on the interface: # Runs cbf fitting assuming all tissue is GM! in_file = example_data("asl.nii.gz") fit_asl.inputs.source_file = in_file cmd_tmp = "{cmd} -source {in_file} -cbf {cbf} -error {error} -syn {syn}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, cbf="asl_cbf.nii.gz", error="asl_error.nii.gz", syn="asl_syn.nii.gz", ) assert fit_asl.cmdline == expected_cmd # Runs cbf fitting using IR/SR T1 data to estimate the local T1 and uses # the segmentation data to fit tissue specific blood flow parameters # (lambda,transit times,T1) fit_asl2 = FitAsl(sig=True) in_file = example_data("asl.nii.gz") t1map = example_data("T1map.nii.gz") seg = example_data("segmentation0.nii.gz") fit_asl2.inputs.source_file = in_file fit_asl2.inputs.t1map = t1map fit_asl2.inputs.seg = seg cmd_tmp = "{cmd} -source {in_file} -cbf {cbf} -error {error} \ -seg {seg} -sig -syn {syn} -t1map {t1map}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, t1map=t1map, seg=seg, cbf="asl_cbf.nii.gz", error="asl_error.nii.gz", syn="asl_syn.nii.gz", ) assert fit_asl2.cmdline == expected_cmd nipype-1.7.0/nipype/interfaces/niftyfit/tests/test_auto_DwiTool.py000066400000000000000000000134051413403311400254670ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dwi import DwiTool def test_DwiTool_inputs(): input_map = dict( args=dict( argstr="%s", ), b0_file=dict( argstr="-b0 %s", extensions=None, position=4, ), ball_flag=dict( argstr="-ball", position=6, xor=[ "mono_flag", "ivim_flag", "dti_flag", "dti_flag2", "ballv_flag", "nod_flag", "nodv_flag", ], ), ballv_flag=dict( argstr="-ballv", position=6, xor=[ "mono_flag", "ivim_flag", "dti_flag", "dti_flag2", "ball_flag", "nod_flag", "nodv_flag", ], ), bval_file=dict( argstr="-bval %s", extensions=None, mandatory=True, position=2, ), bvec_file=dict( argstr="-bvec %s", extensions=None, position=3, ), diso_val=dict( argstr="-diso %f", ), dpr_val=dict( argstr="-dpr %f", ), dti_flag=dict( argstr="-dti", position=6, xor=[ "mono_flag", "ivim_flag", "dti_flag2", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ), dti_flag2=dict( argstr="-dti2", position=6, xor=[ "mono_flag", "ivim_flag", "dti_flag", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ), environ=dict( nohash=True, usedefault=True, ), famap_file=dict( argstr="-famap %s", extensions=None, name_source=["source_file"], name_template="%s_famap.nii.gz", ), ivim_flag=dict( argstr="-ivim", position=6, xor=[ "mono_flag", "dti_flag", "dti_flag2", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ), logdti_file=dict( argstr="-logdti2 %s", extensions=None, name_source=["source_file"], name_template="%s_logdti2.nii.gz", ), mask_file=dict( argstr="-mask %s", extensions=None, position=5, ), mcmap_file=dict( argstr="-mcmap %s", extensions=None, name_source=["source_file"], name_template="%s_mcmap.nii.gz", ), mdmap_file=dict( argstr="-mdmap %s", extensions=None, name_source=["source_file"], name_template="%s_mdmap.nii.gz", ), mono_flag=dict( argstr="-mono", position=6, xor=[ "ivim_flag", "dti_flag", "dti_flag2", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ), nod_flag=dict( argstr="-nod", position=6, xor=[ "mono_flag", "ivim_flag", "dti_flag", "dti_flag2", "ball_flag", "ballv_flag", "nodv_flag", ], ), nodv_flag=dict( argstr="-nodv", position=6, xor=[ "mono_flag", "ivim_flag", "dti_flag", "dti_flag2", "ball_flag", "ballv_flag", "nod_flag", ], ), rgbmap_file=dict( argstr="-rgbmap %s", extensions=None, name_source=["source_file"], name_template="%s_rgbmap.nii.gz", ), source_file=dict( argstr="-source %s", extensions=None, mandatory=True, position=1, ), syn_file=dict( argstr="-syn %s", extensions=None, name_source=["source_file"], name_template="%s_syn.nii.gz", requires=["bvec_file", "b0_file"], ), v1map_file=dict( argstr="-v1map %s", extensions=None, name_source=["source_file"], name_template="%s_v1map.nii.gz", ), ) inputs = DwiTool.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DwiTool_outputs(): output_map = dict( famap_file=dict( extensions=None, ), logdti_file=dict( extensions=None, ), mcmap_file=dict( extensions=None, ), mdmap_file=dict( extensions=None, ), rgbmap_file=dict( extensions=None, ), syn_file=dict( extensions=None, ), v1map_file=dict( extensions=None, ), ) outputs = DwiTool.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyfit/tests/test_auto_FitAsl.py000066400000000000000000000075241413403311400252750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..asl import FitAsl def test_FitAsl_inputs(): input_map = dict( args=dict( argstr="%s", ), cbf_file=dict( argstr="-cbf %s", extensions=None, name_source=["source_file"], name_template="%s_cbf.nii.gz", ), dpld=dict( argstr="-dPLD %f", ), dt_inv2=dict( argstr="-dTinv2 %f", ), eff=dict( argstr="-eff %f", ), environ=dict( nohash=True, usedefault=True, ), error_file=dict( argstr="-error %s", extensions=None, name_source=["source_file"], name_template="%s_error.nii.gz", ), gm_plasma=dict( argstr="-gmL %f", ), gm_t1=dict( argstr="-gmT1 %f", ), gm_ttt=dict( argstr="-gmTTT %f", ), ir_output=dict( argstr="-IRoutput %s", extensions=None, ), ir_volume=dict( argstr="-IRvolume %s", extensions=None, ), ldd=dict( argstr="-LDD %f", ), m0map=dict( argstr="-m0map %s", extensions=None, ), m0mape=dict( argstr="-m0mape %s", extensions=None, ), mask=dict( argstr="-mask %s", extensions=None, position=2, ), mul=dict( argstr="-mul %f", ), mulgm=dict( argstr="-sig", ), out=dict( argstr="-out %f", ), pasl=dict( argstr="-pasl", ), pcasl=dict( argstr="-pcasl", ), plasma_coeff=dict( argstr="-L %f", ), pld=dict( argstr="-PLD %f", ), pv0=dict( argstr="-pv0 %d", ), pv2=dict( argstr="-pv2 %d", ), pv3=dict( argstr="-pv3 %d %d %d", ), pv_threshold=dict( argstr="-pvthreshold", ), seg=dict( argstr="-seg %s", extensions=None, ), segstyle=dict( argstr="-segstyle", ), sig=dict( argstr="-sig", ), source_file=dict( argstr="-source %s", extensions=None, mandatory=True, position=1, ), syn_file=dict( argstr="-syn %s", extensions=None, name_source=["source_file"], name_template="%s_syn.nii.gz", ), t1_art_cmp=dict( argstr="-T1a %f", ), t1map=dict( argstr="-t1map %s", extensions=None, ), t_inv1=dict( argstr="-Tinv1 %f", ), t_inv2=dict( argstr="-Tinv2 %f", ), wm_plasma=dict( argstr="-wmL %f", ), wm_t1=dict( argstr="-wmT1 %f", ), wm_ttt=dict( argstr="-wmTTT %f", ), ) inputs = FitAsl.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FitAsl_outputs(): output_map = dict( cbf_file=dict( extensions=None, ), error_file=dict( extensions=None, ), syn_file=dict( extensions=None, ), ) outputs = FitAsl.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyfit/tests/test_auto_FitDwi.py000066400000000000000000000200151413403311400252670ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dwi import FitDwi def test_FitDwi_inputs(): input_map = dict( acceptance=dict( argstr="-accpetance %f", ), args=dict( argstr="%s", ), ball_flag=dict( argstr="-ball", position=4, xor=[ "mono_flag", "ivim_flag", "dti_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ), ballv_flag=dict( argstr="-ballv", position=4, xor=[ "mono_flag", "ivim_flag", "dti_flag", "ball_flag", "nod_flag", "nodv_flag", ], ), bval_file=dict( argstr="-bval %s", extensions=None, mandatory=True, position=2, ), bvec_file=dict( argstr="-bvec %s", extensions=None, mandatory=True, position=3, ), cov_file=dict( argstr="-cov %s", extensions=None, ), csf_t2_val=dict( argstr="-csfT2 %f", ), diso_val=dict( argstr="-diso %f", ), dpr_val=dict( argstr="-dpr %f", ), dti_flag=dict( argstr="-dti", position=4, xor=[ "mono_flag", "ivim_flag", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ), environ=dict( nohash=True, usedefault=True, ), error_file=dict( argstr="-error %s", extensions=None, name_source=["source_file"], name_template="%s_error.nii.gz", ), famap_file=dict( argstr="-famap %s", extensions=None, name_source=["source_file"], name_template="%s_famap.nii.gz", ), gn_flag=dict( argstr="-gn", xor=["wls_flag"], ), ivim_flag=dict( argstr="-ivim", position=4, xor=[ "mono_flag", "dti_flag", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ), lm_vals=dict( argstr="-lm %f %f", requires=["gn_flag"], ), mask_file=dict( argstr="-mask %s", extensions=None, ), maxit_val=dict( argstr="-maxit %d", requires=["gn_flag"], ), mcmap_file=dict( argstr="-mcmap %s", extensions=None, name_source=["source_file"], name_template="%s_mcmap.nii.gz", requires=["nodv_flag"], ), mcmaxit=dict( argstr="-mcmaxit %d", ), mcout=dict( argstr="-mcout %s", extensions=None, name_source=["source_file"], name_template="%s_mcout.txt", ), mcsamples=dict( argstr="-mcsamples %d", ), mdmap_file=dict( argstr="-mdmap %s", extensions=None, name_source=["source_file"], name_template="%s_mdmap.nii.gz", ), mono_flag=dict( argstr="-mono", position=4, xor=[ "ivim_flag", "dti_flag", "ball_flag", "ballv_flag", "nod_flag", "nodv_flag", ], ), nod_flag=dict( argstr="-nod", position=4, xor=[ "mono_flag", "ivim_flag", "dti_flag", "ball_flag", "ballv_flag", "nodv_flag", ], ), nodiff_file=dict( argstr="-nodiff %s", extensions=None, name_source=["source_file"], name_template="%s_no_diff.nii.gz", ), nodv_flag=dict( argstr="-nodv", position=4, xor=[ "mono_flag", "ivim_flag", "dti_flag", "ball_flag", "ballv_flag", "nod_flag", ], ), perf_thr=dict( argstr="-perfthreshold %f", ), prior_file=dict( argstr="-prior %s", extensions=None, ), res_file=dict( argstr="-res %s", extensions=None, name_source=["source_file"], name_template="%s_resmap.nii.gz", ), rgbmap_file=dict( argstr="-rgbmap %s", extensions=None, name_source=["source_file"], name_template="%s_rgbmap.nii.gz", requires=["dti_flag"], ), rot_sform_flag=dict( argstr="-rotsform %d", ), slice_no=dict( argstr="-slice %d", ), source_file=dict( argstr="-source %s", extensions=None, mandatory=True, position=1, ), swls_val=dict( argstr="-swls %f", ), syn_file=dict( argstr="-syn %s", extensions=None, name_source=["source_file"], name_template="%s_syn.nii.gz", ), te_file=dict( argstr="-TE %s", extensions=None, xor=["te_file"], ), te_value=dict( argstr="-TE %s", extensions=None, xor=["te_file"], ), ten_type=dict( usedefault=True, ), tenmap2_file=dict( argstr="-tenmap2 %s", extensions=None, name_source=["source_file"], name_template="%s_tenmap2.nii.gz", requires=["dti_flag"], ), tenmap_file=dict( argstr="-tenmap %s", extensions=None, name_source=["source_file"], name_template="%s_tenmap.nii.gz", requires=["dti_flag"], ), v1map_file=dict( argstr="-v1map %s", extensions=None, name_source=["source_file"], name_template="%s_v1map.nii.gz", ), vb_flag=dict( argstr="-vb", ), voxel=dict( argstr="-voxel %d %d %d", ), wls_flag=dict( argstr="-wls", xor=["gn_flag"], ), wm_t2_val=dict( argstr="-wmT2 %f", ), ) inputs = FitDwi.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FitDwi_outputs(): output_map = dict( error_file=dict( extensions=None, ), famap_file=dict( extensions=None, ), mcmap_file=dict( extensions=None, ), mcout=dict( extensions=None, ), mdmap_file=dict( extensions=None, ), nodiff_file=dict( extensions=None, ), res_file=dict( extensions=None, ), rgbmap_file=dict( extensions=None, ), syn_file=dict( extensions=None, ), tenmap2_file=dict( extensions=None, ), tenmap_file=dict( extensions=None, ), v1map_file=dict( extensions=None, ), ) outputs = FitDwi.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyfit/tests/test_auto_FitQt1.py000066400000000000000000000112501413403311400252120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..qt1 import FitQt1 def test_FitQt1_inputs(): input_map = dict( acceptance=dict( argstr="-acceptance %f", ), args=dict( argstr="%s", ), b1map=dict( argstr="-b1map %s", extensions=None, ), comp_file=dict( argstr="-comp %s", extensions=None, name_source=["source_file"], name_template="%s_comp.nii.gz", ), environ=dict( nohash=True, usedefault=True, ), error_file=dict( argstr="-error %s", extensions=None, name_source=["source_file"], name_template="%s_error.nii.gz", ), flips=dict( argstr="-flips %s", sep=" ", ), flips_list=dict( argstr="-fliplist %s", extensions=None, ), gn_flag=dict( argstr="-gn", position=8, ), ir_flag=dict( argstr="-IR", position=13, ), lm_val=dict( argstr="-lm %f %f", position=7, ), m0map_file=dict( argstr="-m0map %s", extensions=None, name_source=["source_file"], name_template="%s_m0map.nii.gz", ), mask=dict( argstr="-mask %s", extensions=None, position=2, ), maxit=dict( argstr="-maxit %d", position=11, ), mcmap_file=dict( argstr="-mcmap %s", extensions=None, name_source=["source_file"], name_template="%s_mcmap.nii.gz", ), mcmaxit=dict( argstr="-mcmaxit %d", ), mcout=dict( argstr="-mcout %s", extensions=None, ), mcsamples=dict( argstr="-mcsamples %d", ), nb_comp=dict( argstr="-nc %d", position=6, ), prior=dict( argstr="-prior %s", extensions=None, position=3, ), res_file=dict( argstr="-res %s", extensions=None, name_source=["source_file"], name_template="%s_res.nii.gz", ), slice_no=dict( argstr="-slice %d", position=9, ), source_file=dict( argstr="-source %s", extensions=None, mandatory=True, position=1, ), spgr=dict( argstr="-SPGR", ), sr_flag=dict( argstr="-SR", position=12, ), syn_file=dict( argstr="-syn %s", extensions=None, name_source=["source_file"], name_template="%s_syn.nii.gz", ), t1_list=dict( argstr="-T1list %s", extensions=None, ), t1map_file=dict( argstr="-t1map %s", extensions=None, name_source=["source_file"], name_template="%s_t1map.nii.gz", ), t1max=dict( argstr="-T1max %f", ), t1min=dict( argstr="-T1min %f", ), te_value=dict( argstr="-TE %f", position=4, ), tis=dict( argstr="-TIs %s", position=14, sep=" ", ), tis_list=dict( argstr="-TIlist %s", extensions=None, ), tr_value=dict( argstr="-TR %f", position=5, ), voxel=dict( argstr="-voxel %d %d %d", position=10, ), ) inputs = FitQt1.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FitQt1_outputs(): output_map = dict( comp_file=dict( extensions=None, ), error_file=dict( extensions=None, ), m0map_file=dict( extensions=None, ), mcmap_file=dict( extensions=None, ), res_file=dict( extensions=None, ), syn_file=dict( extensions=None, ), t1map_file=dict( extensions=None, ), ) outputs = FitQt1.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyfit/tests/test_auto_NiftyFitCommand.py000066400000000000000000000007761413403311400271500ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import NiftyFitCommand def test_NiftyFitCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ) inputs = NiftyFitCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyfit/tests/test_dwi.py000066400000000000000000000064321413403311400236430ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pytest from ....testing import example_data from ...niftyreg import get_custom_path from ..dwi import FitDwi, DwiTool from ...niftyreg.tests.test_regutils import no_nifty_tool @pytest.mark.skipif(no_nifty_tool(cmd="fit_dwi"), reason="niftyfit is not installed") def test_fit_dwi(): """Testing FitDwi interface.""" # Create a node object fit_dwi = FitDwi() # Check if the command is properly defined cmd = get_custom_path("fit_dwi", env_dir="NIFTYFITDIR") assert fit_dwi.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): fit_dwi.run() # Assign some input data in_file = example_data("dwi.nii.gz") bval_file = example_data("bvals") bvec_file = example_data("bvecs") fit_dwi.inputs.source_file = in_file fit_dwi.inputs.bval_file = bval_file fit_dwi.inputs.bvec_file = bvec_file fit_dwi.inputs.dti_flag = True cmd_tmp = "{cmd} -source {in_file} -bval {bval} -bvec {bvec} -dti \ -error {error} -famap {fa} -mcmap {mc} -mcout {mcout} -mdmap {md} -nodiff \ {nodiff} -res {res} -rgbmap {rgb} -syn {syn} -tenmap2 {ten2} -v1map {v1}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, bval=bval_file, bvec=bvec_file, error="dwi_error.nii.gz", fa="dwi_famap.nii.gz", mc="dwi_mcmap.nii.gz", md="dwi_mdmap.nii.gz", nodiff="dwi_no_diff.nii.gz", res="dwi_resmap.nii.gz", rgb="dwi_rgbmap.nii.gz", syn="dwi_syn.nii.gz", ten2="dwi_tenmap2.nii.gz", v1="dwi_v1map.nii.gz", mcout="dwi_mcout.txt", ) assert fit_dwi.cmdline == expected_cmd @pytest.mark.skipif(no_nifty_tool(cmd="dwi_tool"), reason="niftyfit is not installed") def test_dwi_tool(): """Testing DwiTool interface.""" # Create a node object dwi_tool = DwiTool() # Check if the command is properly defined cmd = get_custom_path("dwi_tool", env_dir="NIFTYFITDIR") assert dwi_tool.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): dwi_tool.run() # Assign some input data in_file = example_data("dwi.nii.gz") bval_file = example_data("bvals") bvec_file = example_data("bvecs") b0_file = example_data("b0.nii") mask_file = example_data("mask.nii.gz") dwi_tool.inputs.source_file = in_file dwi_tool.inputs.mask_file = mask_file dwi_tool.inputs.bval_file = bval_file dwi_tool.inputs.bvec_file = bvec_file dwi_tool.inputs.b0_file = b0_file dwi_tool.inputs.dti_flag = True cmd_tmp = "{cmd} -source {in_file} -bval {bval} -bvec {bvec} -b0 {b0} \ -mask {mask} -dti -famap {fa} -logdti2 {log} -mcmap {mc} -mdmap {md} \ -rgbmap {rgb} -syn {syn} -v1map {v1}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, bval=bval_file, bvec=bvec_file, b0=b0_file, mask=mask_file, fa="dwi_famap.nii.gz", log="dwi_logdti2.nii.gz", mc="dwi_mcmap.nii.gz", md="dwi_mdmap.nii.gz", rgb="dwi_rgbmap.nii.gz", syn="dwi_syn.nii.gz", v1="dwi_v1map.nii.gz", ) assert dwi_tool.cmdline == expected_cmd nipype-1.7.0/nipype/interfaces/niftyfit/tests/test_qt1.py000066400000000000000000000054041413403311400235630ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pytest from ....testing import example_data from ...niftyreg import get_custom_path from ...niftyreg.tests.test_regutils import no_nifty_tool from ..qt1 import FitQt1 @pytest.mark.skipif(no_nifty_tool(cmd="fit_qt1"), reason="niftyfit is not installed") def test_fit_qt1(): """Testing FitQt1 interface.""" # Create a node object fit_qt1 = FitQt1() # Check if the command is properly defined cmd = get_custom_path("fit_qt1", env_dir="NIFTYFITDIR") assert fit_qt1.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): fit_qt1.run() # Regular test: in_file = example_data("TI4D.nii.gz") fit_qt1.inputs.source_file = in_file cmd_tmp = "{cmd} -source {in_file} -comp {comp} -error {error} -m0map \ {map0} -mcmap {cmap} -res {res} -syn {syn} -t1map {t1map}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, comp="TI4D_comp.nii.gz", map0="TI4D_m0map.nii.gz", error="TI4D_error.nii.gz", cmap="TI4D_mcmap.nii.gz", res="TI4D_res.nii.gz", t1map="TI4D_t1map.nii.gz", syn="TI4D_syn.nii.gz", ) assert fit_qt1.cmdline == expected_cmd # Runs T1 fitting to inversion and saturation recovery data (NLSQR) fit_qt1_2 = FitQt1(tis=[1, 2, 5], ir_flag=True) in_file = example_data("TI4D.nii.gz") fit_qt1_2.inputs.source_file = in_file cmd_tmp = "{cmd} -source {in_file} -IR -TIs 1.0 2.0 5.0 \ -comp {comp} -error {error} -m0map {map0} -mcmap {cmap} -res {res} \ -syn {syn} -t1map {t1map}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, comp="TI4D_comp.nii.gz", map0="TI4D_m0map.nii.gz", error="TI4D_error.nii.gz", cmap="TI4D_mcmap.nii.gz", res="TI4D_res.nii.gz", t1map="TI4D_t1map.nii.gz", syn="TI4D_syn.nii.gz", ) assert fit_qt1_2.cmdline == expected_cmd # Runs T1 fitting to spoiled gradient echo (SPGR) data (NLSQR) fit_qt1_3 = FitQt1(flips=[2, 4, 8], spgr=True) in_file = example_data("TI4D.nii.gz") fit_qt1_3.inputs.source_file = in_file cmd_tmp = "{cmd} -source {in_file} -comp {comp} -error {error} \ -flips 2.0 4.0 8.0 -m0map {map0} -mcmap {cmap} -res {res} -SPGR -syn {syn} \ -t1map {t1map}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, comp="TI4D_comp.nii.gz", map0="TI4D_m0map.nii.gz", error="TI4D_error.nii.gz", cmap="TI4D_mcmap.nii.gz", res="TI4D_res.nii.gz", t1map="TI4D_t1map.nii.gz", syn="TI4D_syn.nii.gz", ) assert fit_qt1_3.cmdline == expected_cmd nipype-1.7.0/nipype/interfaces/niftyreg/000077500000000000000000000000001413403311400202735ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/niftyreg/__init__.py000066400000000000000000000010551413403311400224050ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ NiftyReg is an open-source software for efficient medical image registration. The niftyreg module provides classes for interfacing with the `NiftyReg `_ command line tools. """ from .base import get_custom_path from .reg import RegAladin, RegF3D from .regutils import ( RegResample, RegJacobian, RegAverage, RegTools, RegTransform, RegMeasure, ) nipype-1.7.0/nipype/interfaces/niftyreg/base.py000066400000000000000000000113601413403311400215600ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The niftyreg module provides classes for interfacing with `niftyreg `_ command line tools. These are the base tools for working with niftyreg. Registration tools are found in niftyreg/reg.py Every other tool is found in niftyreg/regutils.py Examples -------- See the docstrings of the individual classes for examples. """ from distutils.version import StrictVersion import os from ... import logging from ..base import CommandLine, CommandLineInputSpec, traits, Undefined, PackageInfo from ...utils.filemanip import split_filename iflogger = logging.getLogger("nipype.interface") def get_custom_path(command, env_dir="NIFTYREGDIR"): return os.path.join(os.getenv(env_dir, ""), command) class Info(PackageInfo): version_cmd = get_custom_path("reg_aladin") + " --version" @staticmethod def parse_version(raw_info): return raw_info class NiftyRegCommandInputSpec(CommandLineInputSpec): """Input Spec for niftyreg interfaces.""" # Set the number of omp thread to use omp_core_val = traits.Int( int(os.environ.get("OMP_NUM_THREADS", "1")), desc="Number of openmp thread to use", argstr="-omp %i", usedefault=True, ) class NiftyRegCommand(CommandLine): """ Base support interface for NiftyReg commands. """ _suffix = "_nr" _min_version = "1.5.30" input_spec = NiftyRegCommandInputSpec def __init__(self, required_version=None, **inputs): self.num_threads = 1 super(NiftyRegCommand, self).__init__(**inputs) self.required_version = required_version _version = self.version if _version: if self._min_version is not None and StrictVersion( _version ) < StrictVersion(self._min_version): msg = "A later version of Niftyreg is required (%s < %s)" iflogger.warning(msg, _version, self._min_version) if required_version is not None: if StrictVersion(_version) != StrictVersion(required_version): msg = "The version of NiftyReg differs from the required" msg += "(%s != %s)" iflogger.warning(msg, _version, self.required_version) self.inputs.on_trait_change(self._omp_update, "omp_core_val") self.inputs.on_trait_change(self._environ_update, "environ") self._omp_update() def _omp_update(self): if self.inputs.omp_core_val: self.inputs.environ["OMP_NUM_THREADS"] = str(self.inputs.omp_core_val) self.num_threads = self.inputs.omp_core_val else: if "OMP_NUM_THREADS" in self.inputs.environ: del self.inputs.environ["OMP_NUM_THREADS"] self.num_threads = 1 def _environ_update(self): if self.inputs.environ: if "OMP_NUM_THREADS" in self.inputs.environ: self.inputs.omp_core_val = int(self.inputs.environ["OMP_NUM_THREADS"]) else: self.inputs.omp_core_val = Undefined else: self.inputs.omp_core_val = Undefined def check_version(self): _version = self.version if not _version: raise Exception("Niftyreg not found") if StrictVersion(_version) < StrictVersion(self._min_version): err = "A later version of Niftyreg is required (%s < %s)" raise ValueError(err % (_version, self._min_version)) if self.required_version: if StrictVersion(_version) != StrictVersion(self.required_version): err = "The version of NiftyReg differs from the required" err += "(%s != %s)" raise ValueError(err % (_version, self.required_version)) @property def version(self): return Info.version() def exists(self): return self.version is not None def _format_arg(self, name, spec, value): if name == "omp_core_val": self.numthreads = value return super(NiftyRegCommand, self)._format_arg(name, spec, value) def _gen_fname(self, basename, out_dir=None, suffix=None, ext=None): if basename == "": msg = "Unable to generate filename for command %s. " % self.cmd msg += "basename is not set!" raise ValueError(msg) _, final_bn, final_ext = split_filename(basename) if out_dir is None: out_dir = os.getcwd() if ext is not None: final_ext = ext if suffix is not None: final_bn = "".join((final_bn, suffix)) return os.path.abspath(os.path.join(out_dir, final_bn + final_ext)) nipype-1.7.0/nipype/interfaces/niftyreg/reg.py000066400000000000000000000360421413403311400214270ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The reg module provides classes for interfacing with the `niftyreg `_ registration command line tools. The interfaces were written to work with niftyreg version 1.5.10 """ import os from ..base import TraitedSpec, File, traits, isdefined from .base import get_custom_path, NiftyRegCommand, NiftyRegCommandInputSpec from ...utils.filemanip import split_filename class RegAladinInputSpec(NiftyRegCommandInputSpec): """Input Spec for RegAladin.""" # Input reference file ref_file = File( exists=True, desc="The input reference/target image", argstr="-ref %s", mandatory=True, ) # Input floating file flo_file = File( exists=True, desc="The input floating/source image", argstr="-flo %s", mandatory=True, ) # No symmetric flag nosym_flag = traits.Bool(argstr="-noSym", desc="Turn off symmetric registration") # Rigid only registration rig_only_flag = traits.Bool(argstr="-rigOnly", desc="Do only a rigid registration") # Directly optimise affine flag desc = "Directly optimise the affine parameters" aff_direct_flag = traits.Bool(argstr="-affDirect", desc=desc) # Input affine in_aff_file = File( exists=True, desc="The input affine transformation", argstr="-inaff %s" ) # Input reference mask rmask_file = File(exists=True, desc="The input reference mask", argstr="-rmask %s") # Input floating mask fmask_file = File(exists=True, desc="The input floating mask", argstr="-fmask %s") # Maximum number of iterations maxit_val = traits.Range( desc="Maximum number of iterations", argstr="-maxit %d", low=0 ) # Multiresolution levels ln_val = traits.Range( desc="Number of resolution levels to create", argstr="-ln %d", low=0 ) # Number of resolution levels to process lp_val = traits.Range( desc="Number of resolution levels to perform", argstr="-lp %d", low=0 ) # Smoothing to apply on reference image desc = "Amount of smoothing to apply to reference image" smoo_r_val = traits.Float(desc=desc, argstr="-smooR %f") # Smoothing to apply on floating image desc = "Amount of smoothing to apply to floating image" smoo_f_val = traits.Float(desc=desc, argstr="-smooF %f") # Use nifti header to initialise transformation desc = "Use nifti header to initialise transformation" nac_flag = traits.Bool(desc=desc, argstr="-nac") # Use the input masks centre of mass to initialise the transformation desc = "Use the masks centre of mass to initialise the transformation" cog_flag = traits.Bool(desc=desc, argstr="-cog") # Percent of blocks that are considered active. v_val = traits.Range( desc="Percent of blocks that are active", argstr="-pv %d", low=0 ) # Percent of inlier blocks i_val = traits.Range(desc="Percent of inlier blocks", argstr="-pi %d", low=0) # Lower threshold on reference image ref_low_val = traits.Float( desc="Lower threshold value on reference image", argstr="-refLowThr %f" ) # Upper threshold on reference image ref_up_val = traits.Float( desc="Upper threshold value on reference image", argstr="-refUpThr %f" ) # Lower threshold on floating image flo_low_val = traits.Float( desc="Lower threshold value on floating image", argstr="-floLowThr %f" ) # Upper threshold on floating image flo_up_val = traits.Float( desc="Upper threshold value on floating image", argstr="-floUpThr %f" ) # Platform to use platform_val = traits.Int(desc="Platform index", argstr="-platf %i") # Platform to use gpuid_val = traits.Int(desc="Device to use id", argstr="-gpuid %i") # Verbosity off verbosity_off_flag = traits.Bool(argstr="-voff", desc="Turn off verbose output") # Affine output transformation matrix file aff_file = File( name_source=["flo_file"], name_template="%s_aff.txt", desc="The output affine matrix file", argstr="-aff %s", ) # Result warped image file res_file = File( name_source=["flo_file"], name_template="%s_res.nii.gz", desc="The affine transformed floating image", argstr="-res %s", ) class RegAladinOutputSpec(TraitedSpec): """Output Spec for RegAladin.""" aff_file = File(desc="The output affine file") res_file = File(desc="The output transformed image") desc = "Output string in the format for reg_average" avg_output = traits.String(desc=desc) class RegAladin(NiftyRegCommand): """Interface for executable reg_aladin from NiftyReg platform. Block Matching algorithm for symmetric global registration. Based on Modat et al., "Global image registration using asymmetric block-matching approach" J. Med. Img. 1(2) 024003, 2014, doi: 10.1117/1.JMI.1.2.024003 `Source code `_ Examples -------- >>> from nipype.interfaces import niftyreg >>> node = niftyreg.RegAladin() >>> node.inputs.ref_file = 'im1.nii' >>> node.inputs.flo_file = 'im2.nii' >>> node.inputs.rmask_file = 'mask.nii' >>> node.inputs.omp_core_val = 4 >>> node.cmdline 'reg_aladin -aff im2_aff.txt -flo im2.nii -omp 4 -ref im1.nii \ -res im2_res.nii.gz -rmask mask.nii' """ _cmd = get_custom_path("reg_aladin") input_spec = RegAladinInputSpec output_spec = RegAladinOutputSpec def _list_outputs(self): outputs = super(RegAladin, self)._list_outputs() # Make a list of the linear transformation file and the input image aff = os.path.abspath(outputs["aff_file"]) flo = os.path.abspath(self.inputs.flo_file) outputs["avg_output"] = "%s %s" % (aff, flo) return outputs class RegF3DInputSpec(NiftyRegCommandInputSpec): """Input Spec for RegF3D.""" # Input reference file ref_file = File( exists=True, desc="The input reference/target image", argstr="-ref %s", mandatory=True, ) # Input floating file flo_file = File( exists=True, desc="The input floating/source image", argstr="-flo %s", mandatory=True, ) # Input Affine file aff_file = File( exists=True, desc="The input affine transformation file", argstr="-aff %s" ) # Input cpp file incpp_file = File( exists=True, desc="The input cpp transformation file", argstr="-incpp %s" ) # Reference mask rmask_file = File(exists=True, desc="Reference image mask", argstr="-rmask %s") # Smoothing kernel for reference desc = "Smoothing kernel width for reference image" ref_smooth_val = traits.Float(desc=desc, argstr="-smooR %f") # Smoothing kernel for floating desc = "Smoothing kernel width for floating image" flo_smooth_val = traits.Float(desc=desc, argstr="-smooF %f") # Lower threshold for reference image rlwth_thr_val = traits.Float( desc="Lower threshold for reference image", argstr="--rLwTh %f" ) # Upper threshold for reference image rupth_thr_val = traits.Float( desc="Upper threshold for reference image", argstr="--rUpTh %f" ) # Lower threshold for reference image flwth_thr_val = traits.Float( desc="Lower threshold for floating image", argstr="--fLwTh %f" ) # Upper threshold for reference image fupth_thr_val = traits.Float( desc="Upper threshold for floating image", argstr="--fUpTh %f" ) # Lower threshold for reference image desc = "Lower threshold for reference image at the specified time point" rlwth2_thr_val = traits.Tuple( traits.Range(low=0), traits.Float, desc=desc, argstr="-rLwTh %d %f" ) # Upper threshold for reference image desc = "Upper threshold for reference image at the specified time point" rupth2_thr_val = traits.Tuple( traits.Range(low=0), traits.Float, desc=desc, argstr="-rUpTh %d %f" ) # Lower threshold for reference image desc = "Lower threshold for floating image at the specified time point" flwth2_thr_val = traits.Tuple( traits.Range(low=0), traits.Float, desc=desc, argstr="-fLwTh %d %f" ) # Upper threshold for reference image desc = "Upper threshold for floating image at the specified time point" fupth2_thr_val = traits.Tuple( traits.Range(low=0), traits.Float, desc=desc, argstr="-fUpTh %d %f" ) # Final grid spacing along the 3 axes sx_val = traits.Float(desc="Final grid spacing along the x axes", argstr="-sx %f") sy_val = traits.Float(desc="Final grid spacing along the y axes", argstr="-sy %f") sz_val = traits.Float(desc="Final grid spacing along the z axes", argstr="-sz %f") # Regularisation options be_val = traits.Float(desc="Bending energy value", argstr="-be %f") le_val = traits.Float(desc="Linear elasticity penalty term", argstr="-le %f") jl_val = traits.Float( desc="Log of jacobian of deformation penalty value", argstr="-jl %f" ) desc = "Do not approximate the log of jacobian penalty at control points \ only" no_app_jl_flag = traits.Bool(argstr="-noAppJL", desc=desc) # Similarity measure options desc = "use NMI even when other options are specified" nmi_flag = traits.Bool(argstr="--nmi", desc=desc) desc = "Number of bins in the histogram for reference image" rbn_val = traits.Range(low=0, desc=desc, argstr="--rbn %d") desc = "Number of bins in the histogram for reference image" fbn_val = traits.Range(low=0, desc=desc, argstr="--fbn %d") desc = "Number of bins in the histogram for reference image for given \ time point" rbn2_val = traits.Tuple( traits.Range(low=0), traits.Range(low=0), desc=desc, argstr="-rbn %d %d" ) desc = "Number of bins in the histogram for reference image for given \ time point" fbn2_val = traits.Tuple( traits.Range(low=0), traits.Range(low=0), desc=desc, argstr="-fbn %d %d" ) lncc_val = traits.Float( desc="SD of the Gaussian for computing LNCC", argstr="--lncc %f" ) desc = "SD of the Gaussian for computing LNCC for a given time point" lncc2_val = traits.Tuple( traits.Range(low=0), traits.Float, desc=desc, argstr="-lncc %d %f" ) ssd_flag = traits.Bool(desc="Use SSD as the similarity measure", argstr="--ssd") desc = "Use SSD as the similarity measure for a given time point" ssd2_flag = traits.Range(low=0, desc=desc, argstr="-ssd %d") kld_flag = traits.Bool( desc="Use KL divergence as the similarity measure", argstr="--kld" ) desc = "Use KL divergence as the similarity measure for a given time point" kld2_flag = traits.Range(low=0, desc=desc, argstr="-kld %d") amc_flag = traits.Bool(desc="Use additive NMI", argstr="-amc") nox_flag = traits.Bool(desc="Don't optimise in x direction", argstr="-nox") noy_flag = traits.Bool(desc="Don't optimise in y direction", argstr="-noy") noz_flag = traits.Bool(desc="Don't optimise in z direction", argstr="-noz") # Optimization options maxit_val = traits.Range( low=0, argstr="-maxit %d", desc="Maximum number of iterations per level" ) ln_val = traits.Range( low=0, argstr="-ln %d", desc="Number of resolution levels to create" ) lp_val = traits.Range( low=0, argstr="-lp %d", desc="Number of resolution levels to perform" ) nopy_flag = traits.Bool( desc="Do not use the multiresolution approach", argstr="-nopy" ) noconj_flag = traits.Bool(desc="Use simple GD optimization", argstr="-noConj") desc = "Add perturbation steps after each optimization step" pert_val = traits.Range(low=0, desc=desc, argstr="-pert %d") # F3d2 options vel_flag = traits.Bool(desc="Use velocity field integration", argstr="-vel") fmask_file = File(exists=True, desc="Floating image mask", argstr="-fmask %s") # Other options desc = "Kernel width for smoothing the metric gradient" smooth_grad_val = traits.Float(desc=desc, argstr="-smoothGrad %f") # Padding value pad_val = traits.Float(desc="Padding value", argstr="-pad %f") # verbosity off verbosity_off_flag = traits.Bool(argstr="-voff", desc="Turn off verbose output") # Output CPP image file cpp_file = File( name_source=["flo_file"], name_template="%s_cpp.nii.gz", desc="The output CPP file", argstr="-cpp %s", ) # Output warped image file res_file = File( name_source=["flo_file"], name_template="%s_res.nii.gz", desc="The output resampled image", argstr="-res %s", ) class RegF3DOutputSpec(TraitedSpec): """Output Spec for RegF3D.""" cpp_file = File(desc="The output CPP file") res_file = File(desc="The output resampled image") invcpp_file = File(desc="The output inverse CPP file") invres_file = File(desc="The output inverse res file") desc = "Output string in the format for reg_average" avg_output = traits.String(desc=desc) class RegF3D(NiftyRegCommand): """Interface for executable reg_f3d from NiftyReg platform. Fast Free-Form Deformation (F3D) algorithm for non-rigid registration. Initially based on Modat et al., "Fast Free-Form Deformation using graphics processing units", CMPB, 2010 `Source code `_ Examples -------- >>> from nipype.interfaces import niftyreg >>> node = niftyreg.RegF3D() >>> node.inputs.ref_file = 'im1.nii' >>> node.inputs.flo_file = 'im2.nii' >>> node.inputs.rmask_file = 'mask.nii' >>> node.inputs.omp_core_val = 4 >>> node.cmdline 'reg_f3d -cpp im2_cpp.nii.gz -flo im2.nii -omp 4 -ref im1.nii \ -res im2_res.nii.gz -rmask mask.nii' """ _cmd = get_custom_path("reg_f3d") input_spec = RegF3DInputSpec output_spec = RegF3DOutputSpec @staticmethod def _remove_extension(in_file): dn, bn, _ = split_filename(in_file) return os.path.join(dn, bn) def _list_outputs(self): outputs = super(RegF3D, self)._list_outputs() if self.inputs.vel_flag is True: res_name = self._remove_extension(outputs["res_file"]) cpp_name = self._remove_extension(outputs["cpp_file"]) outputs["invres_file"] = "%s_backward.nii.gz" % res_name outputs["invcpp_file"] = "%s_backward.nii.gz" % cpp_name # Make a list of the linear transformation file and the input image if self.inputs.vel_flag is True and isdefined(self.inputs.aff_file): cpp_file = os.path.abspath(outputs["cpp_file"]) flo_file = os.path.abspath(self.inputs.flo_file) outputs["avg_output"] = "%s %s %s" % ( self.inputs.aff_file, cpp_file, flo_file, ) else: cpp_file = os.path.abspath(outputs["cpp_file"]) flo_file = os.path.abspath(self.inputs.flo_file) outputs["avg_output"] = "%s %s" % (cpp_file, flo_file) return outputs nipype-1.7.0/nipype/interfaces/niftyreg/regutils.py000066400000000000000000000657111413403311400225150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The regutils module provides classes for interfacing with the `niftyreg `_ utility command line tools. The interfaces were written to work with niftyreg version 1.5.10 """ import os from ..base import TraitedSpec, File, traits, isdefined from .base import get_custom_path, NiftyRegCommand, NiftyRegCommandInputSpec from ...utils.filemanip import split_filename class RegResampleInputSpec(NiftyRegCommandInputSpec): """Input Spec for RegResample.""" # Input reference file ref_file = File( exists=True, desc="The input reference/target image", argstr="-ref %s", mandatory=True, ) # Input floating file flo_file = File( exists=True, desc="The input floating/source image", argstr="-flo %s", mandatory=True, ) # Input deformation field trans_file = File( exists=True, desc="The input transformation file", argstr="-trans %s" ) type = traits.Enum( "res", "blank", argstr="-%s", position=-2, usedefault=True, desc="Type of output", ) # Output file name out_file = File( name_source=["flo_file"], name_template="%s", argstr="%s", position=-1, desc="The output filename of the transformed image", ) # Interpolation type inter_val = traits.Enum( "NN", "LIN", "CUB", "SINC", desc="Interpolation type", argstr="-inter %d" ) # Padding value pad_val = traits.Float(desc="Padding value", argstr="-pad %f") # Tensor flag tensor_flag = traits.Bool(desc="Resample Tensor Map", argstr="-tensor ") # Verbosity off verbosity_off_flag = traits.Bool(argstr="-voff", desc="Turn off verbose output") # PSF flag desc = "Perform the resampling in two steps to resample an image to a \ lower resolution" psf_flag = traits.Bool(argstr="-psf", desc=desc) desc = "Minimise the matrix metric (0) or the determinant (1) when \ estimating the PSF [0]" psf_alg = traits.Enum(0, 1, argstr="-psf_alg %d", desc=desc) class RegResampleOutputSpec(TraitedSpec): """Output Spec for RegResample.""" out_file = File(desc="The output filename of the transformed image") class RegResample(NiftyRegCommand): """Interface for executable reg_resample from NiftyReg platform. Tool to resample floating image in the space of a defined reference image given a transformation parametrisation generated by reg_aladin, reg_f3d or reg_transform `Source code `_ Examples -------- >>> from nipype.interfaces import niftyreg >>> node = niftyreg.RegResample() >>> node.inputs.ref_file = 'im1.nii' >>> node.inputs.flo_file = 'im2.nii' >>> node.inputs.trans_file = 'warpfield.nii' >>> node.inputs.inter_val = 'LIN' >>> node.inputs.omp_core_val = 4 >>> node.cmdline 'reg_resample -flo im2.nii -inter 1 -omp 4 -ref im1.nii -trans \ warpfield.nii -res im2_res.nii.gz' """ _cmd = get_custom_path("reg_resample") input_spec = RegResampleInputSpec output_spec = RegResampleOutputSpec # Need this overload to properly constraint the interpolation type input def _format_arg(self, name, spec, value): if name == "inter_val": inter_val = {"NN": 0, "LIN": 1, "CUB": 3, "SINC": 4} return spec.argstr % inter_val[value] else: return super(RegResample, self)._format_arg(name, spec, value) def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) suffix = self.inputs.type return os.path.join(path, "{0}_{1}.nii.gz".format(base, suffix)) class RegJacobianInputSpec(NiftyRegCommandInputSpec): """Input Spec for RegJacobian.""" # Reference file name desc = "Reference/target file (required if specifying CPP transformations." ref_file = File(exists=True, desc=desc, argstr="-ref %s") # Input transformation file trans_file = File( exists=True, desc="The input non-rigid transformation", argstr="-trans %s", mandatory=True, ) type = traits.Enum( "jac", "jacL", "jacM", usedefault=True, argstr="-%s", position=-2, desc="Type of jacobian outcome", ) out_file = File( name_source=["trans_file"], name_template="%s", desc="The output jacobian determinant file name", argstr="%s", position=-1, ) class RegJacobianOutputSpec(TraitedSpec): """Output Spec for RegJacobian.""" out_file = File(desc="The output file") class RegJacobian(NiftyRegCommand): """Interface for executable reg_resample from NiftyReg platform. Tool to generate Jacobian determinant maps from transformation parametrisation generated by reg_f3d `Source code `_ Examples -------- >>> from nipype.interfaces import niftyreg >>> node = niftyreg.RegJacobian() >>> node.inputs.ref_file = 'im1.nii' >>> node.inputs.trans_file = 'warpfield.nii' >>> node.inputs.omp_core_val = 4 >>> node.cmdline 'reg_jacobian -omp 4 -ref im1.nii -trans warpfield.nii -jac \ warpfield_jac.nii.gz' """ _cmd = get_custom_path("reg_jacobian") input_spec = RegJacobianInputSpec output_spec = RegJacobianOutputSpec def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) suffix = self.inputs.type return os.path.join(path, "{0}_{1}.nii.gz".format(base, suffix)) class RegToolsInputSpec(NiftyRegCommandInputSpec): """Input Spec for RegTools.""" # Input image file in_file = File( exists=True, desc="The input image file path", argstr="-in %s", mandatory=True ) # Output file path out_file = File( name_source=["in_file"], name_template="%s_tools.nii.gz", desc="The output file name", argstr="-out %s", ) # Make the output image isotropic iso_flag = traits.Bool(argstr="-iso", desc="Make output image isotropic") # Set scale, slope to 0 and 1. noscl_flag = traits.Bool(argstr="-noscl", desc="Set scale, slope to 0 and 1") # Values outside the mask are set to NaN mask_file = File( exists=True, desc="Values outside the mask are set to NaN", argstr="-nan %s" ) # Threshold the input image desc = "Binarise the input image with the given threshold" thr_val = traits.Float(desc=desc, argstr="-thr %f") # Binarise the input image bin_flag = traits.Bool(argstr="-bin", desc="Binarise the input image") # Compute the mean RMS between the two images rms_val = File( exists=True, desc="Compute the mean RMS between the images", argstr="-rms %s" ) # Perform division by image or value div_val = traits.Either( traits.Float, File(exists=True), desc="Divide the input by image or value", argstr="-div %s", ) # Perform multiplication by image or value mul_val = traits.Either( traits.Float, File(exists=True), desc="Multiply the input by image or value", argstr="-mul %s", ) # Perform addition by image or value add_val = traits.Either( traits.Float, File(exists=True), desc="Add to the input image or value", argstr="-add %s", ) # Perform subtraction by image or value sub_val = traits.Either( traits.Float, File(exists=True), desc="Add to the input image or value", argstr="-sub %s", ) # Downsample the image by a factor of 2. down_flag = traits.Bool( desc="Downsample the image by a factor of 2", argstr="-down" ) # Smoothing using spline kernel desc = "Smooth the input image using a cubic spline kernel" smo_s_val = traits.Tuple( traits.Float, traits.Float, traits.Float, desc=desc, argstr="-smoS %f %f %f" ) # Change the resolution of the input image chg_res_val = traits.Tuple( traits.Float, traits.Float, traits.Float, desc="Change the resolution of the input image", argstr="-chgres %f %f %f", ) # Smoothing using Gaussian kernel desc = "Smooth the input image using a Gaussian kernel" smo_g_val = traits.Tuple( traits.Float, traits.Float, traits.Float, desc=desc, argstr="-smoG %f %f %f" ) # Interpolation type inter_val = traits.Enum( "NN", "LIN", "CUB", "SINC", desc="Interpolation order to use to warp the floating image", argstr="-interp %d", ) class RegToolsOutputSpec(TraitedSpec): """Output Spec for RegTools.""" out_file = File(desc="The output file", exists=True) class RegTools(NiftyRegCommand): """Interface for executable reg_tools from NiftyReg platform. Tool delivering various actions related to registration such as resampling the input image to a chosen resolution or remove the nan and inf in the input image by a specified value. `Source code `_ Examples -------- >>> from nipype.interfaces import niftyreg >>> node = niftyreg.RegTools() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.mul_val = 4 >>> node.inputs.omp_core_val = 4 >>> node.cmdline 'reg_tools -in im1.nii -mul 4.0 -omp 4 -out im1_tools.nii.gz' """ _cmd = get_custom_path("reg_tools") input_spec = RegToolsInputSpec output_spec = RegToolsOutputSpec _suffix = "_tools" # Need this overload to properly constraint the interpolation type input def _format_arg(self, name, spec, value): if name == "inter_val": inter_val = {"NN": 0, "LIN": 1, "CUB": 3, "SINC": 4} return spec.argstr % inter_val[value] else: return super(RegTools, self)._format_arg(name, spec, value) class RegAverageInputSpec(NiftyRegCommandInputSpec): """Input Spec for RegAverage.""" avg_files = traits.List( File(exist=True), position=1, argstr="-avg %s", sep=" ", xor=[ "avg_lts_files", "avg_ref_file", "demean1_ref_file", "demean2_ref_file", "demean3_ref_file", "warp_files", ], desc="Averaging of images/affine transformations", ) desc = "Robust average of affine transformations" avg_lts_files = traits.List( File(exist=True), position=1, argstr="-avg_lts %s", sep=" ", xor=[ "avg_files", "avg_ref_file", "demean1_ref_file", "demean2_ref_file", "demean3_ref_file", "warp_files", ], desc=desc, ) desc = "All input images are resampled into the space of \ and averaged. A cubic spline interpolation scheme is used for resampling" avg_ref_file = File( position=1, argstr="-avg_tran %s", xor=[ "avg_files", "avg_lts_files", "demean1_ref_file", "demean2_ref_file", "demean3_ref_file", ], requires=["warp_files"], desc=desc, ) desc = "Average images and demean average image that have affine \ transformations to a common space" demean1_ref_file = File( position=1, argstr="-demean1 %s", xor=[ "avg_files", "avg_lts_files", "avg_ref_file", "demean2_ref_file", "demean3_ref_file", ], requires=["warp_files"], desc=desc, ) desc = "Average images and demean average image that have non-rigid \ transformations to a common space" demean2_ref_file = File( position=1, argstr="-demean2 %s", xor=[ "avg_files", "avg_lts_files", "avg_ref_file", "demean1_ref_file", "demean3_ref_file", ], requires=["warp_files"], desc=desc, ) desc = "Average images and demean average image that have linear and \ non-rigid transformations to a common space" demean3_ref_file = File( position=1, argstr="-demean3 %s", xor=[ "avg_files", "avg_lts_files", "avg_ref_file", "demean1_ref_file", "demean2_ref_file", ], requires=["warp_files"], desc=desc, ) desc = "transformation files and floating image pairs/triplets to the \ reference space" warp_files = traits.List( File(exist=True), position=-1, argstr="%s", sep=" ", xor=["avg_files", "avg_lts_files"], desc=desc, ) out_file = File(genfile=True, position=0, desc="Output file name", argstr="%s") class RegAverageOutputSpec(TraitedSpec): """Output Spec for RegAverage.""" out_file = File(desc="Output file name") class RegAverage(NiftyRegCommand): """Interface for executable reg_average from NiftyReg platform. Compute average matrix or image from a list of matrices or image. The tool can be use to resample images given input transformation parametrisation as well as to demean transformations in Euclidean or log-Euclidean space. This interface is different than the others in the way that the options will be written in a command file that is given as a parameter. `Source code `_ Examples -------- >>> from nipype.interfaces import niftyreg >>> node = niftyreg.RegAverage() >>> one_file = 'im1.nii' >>> two_file = 'im2.nii' >>> three_file = 'im3.nii' >>> node.inputs.avg_files = [one_file, two_file, three_file] >>> node.cmdline # doctest: +ELLIPSIS 'reg_average --cmd_file .../reg_average_cmd' """ _cmd = get_custom_path("reg_average") input_spec = RegAverageInputSpec output_spec = RegAverageOutputSpec _suffix = "avg_out" def _gen_filename(self, name): if name == "out_file": if isdefined(self.inputs.avg_lts_files): return self._gen_fname(self._suffix, ext=".txt") elif isdefined(self.inputs.avg_files): _, _, _ext = split_filename(self.inputs.avg_files[0]) if _ext not in [".nii", ".nii.gz", ".hdr", ".img", ".img.gz"]: return self._gen_fname(self._suffix, ext=_ext) return self._gen_fname(self._suffix, ext=".nii.gz") return None def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): outputs["out_file"] = self.inputs.out_file else: outputs["out_file"] = self._gen_filename("out_file") return outputs @property def cmdline(self): """Rewrite the cmdline to write options in text_file.""" argv = super(RegAverage, self).cmdline reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") with open(reg_average_cmd, "w") as f: f.write(argv) return "%s --cmd_file %s" % (self.cmd, reg_average_cmd) class RegTransformInputSpec(NiftyRegCommandInputSpec): """Input Spec for RegTransform.""" ref1_file = File( exists=True, desc="The input reference/target image", argstr="-ref %s", position=0, ) ref2_file = File( exists=True, desc="The input second reference/target image", argstr="-ref2 %s", position=1, requires=["ref1_file"], ) def_input = File( exists=True, argstr="-def %s", position=-2, desc="Compute deformation field from transformation", xor=[ "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ) disp_input = File( exists=True, argstr="-disp %s", position=-2, desc="Compute displacement field from transformation", xor=[ "def_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ) flow_input = File( exists=True, argstr="-flow %s", position=-2, desc="Compute flow field from spline SVF", xor=[ "def_input", "disp_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ) comp_input = File( exists=True, argstr="-comp %s", position=-3, desc="compose two transformations", xor=[ "def_input", "disp_input", "flow_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], requires=["comp_input2"], ) comp_input2 = File( exists=True, argstr="%s", position=-2, desc="compose two transformations" ) desc = "Update s-form using the affine transformation" upd_s_form_input = File( exists=True, argstr="-updSform %s", position=-3, desc=desc, xor=[ "def_input", "disp_input", "flow_input", "comp_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], requires=["upd_s_form_input2"], ) desc = "Update s-form using the affine transformation" upd_s_form_input2 = File( exists=True, argstr="%s", position=-2, desc=desc, requires=["upd_s_form_input"] ) inv_aff_input = File( exists=True, argstr="-invAff %s", position=-2, desc="Invert an affine transformation", xor=[ "def_input", "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ) inv_nrr_input = traits.Tuple( File(exists=True), File(exists=True), desc="Invert a non-linear transformation", argstr="-invNrr %s %s", position=-2, xor=[ "def_input", "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ) half_input = File( exists=True, argstr="-half %s", position=-2, desc="Half way to the input transformation", xor=[ "def_input", "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ) argstr_tmp = "-makeAff %f %f %f %f %f %f %f %f %f %f %f %f" make_aff_input = traits.Tuple( traits.Float, traits.Float, traits.Float, traits.Float, traits.Float, traits.Float, traits.Float, traits.Float, traits.Float, traits.Float, traits.Float, traits.Float, argstr=argstr_tmp, position=-2, desc="Make an affine transformation matrix", xor=[ "def_input", "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "aff_2_rig_input", "flirt_2_nr_input", ], ) desc = "Extract the rigid component from affine transformation" aff_2_rig_input = File( exists=True, argstr="-aff2rig %s", position=-2, desc=desc, xor=[ "def_input", "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "flirt_2_nr_input", ], ) desc = "Convert a FLIRT affine transformation to niftyreg affine \ transformation" flirt_2_nr_input = traits.Tuple( File(exists=True), File(exists=True), File(exists=True), argstr="-flirtAff2NR %s %s %s", position=-2, desc=desc, xor=[ "def_input", "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", ], ) out_file = File( genfile=True, position=-1, argstr="%s", desc="transformation file to write" ) class RegTransformOutputSpec(TraitedSpec): """Output Spec for RegTransform.""" out_file = File(desc="Output File (transformation in any format)") class RegTransform(NiftyRegCommand): """Interface for executable reg_transform from NiftyReg platform. Tools to convert transformation parametrisation from one type to another as well as to compose, inverse or half transformations. `Source code `_ Examples -------- >>> from nipype.interfaces import niftyreg >>> node = niftyreg.RegTransform() >>> node.inputs.def_input = 'warpfield.nii' >>> node.inputs.omp_core_val = 4 >>> node.cmdline # doctest: +ELLIPSIS 'reg_transform -omp 4 -def warpfield.nii .../warpfield_trans.nii.gz' """ _cmd = get_custom_path("reg_transform") input_spec = RegTransformInputSpec output_spec = RegTransformOutputSpec _suffix = "_trans" def _find_input(self): inputs = [ self.inputs.def_input, self.inputs.disp_input, self.inputs.flow_input, self.inputs.comp_input, self.inputs.comp_input2, self.inputs.upd_s_form_input, self.inputs.inv_aff_input, self.inputs.inv_nrr_input, self.inputs.half_input, self.inputs.make_aff_input, self.inputs.aff_2_rig_input, self.inputs.flirt_2_nr_input, ] entries = [] for entry in inputs: if isdefined(entry): entries.append(entry) _, _, ext = split_filename(entry) if ext == ".nii" or ext == ".nii.gz" or ext == ".hdr": return entry if len(entries): return entries[0] return None def _gen_filename(self, name): if name == "out_file": if isdefined(self.inputs.make_aff_input): return self._gen_fname("matrix", suffix=self._suffix, ext=".txt") if isdefined(self.inputs.comp_input) and isdefined(self.inputs.comp_input2): _, bn1, ext1 = split_filename(self.inputs.comp_input) _, _, ext2 = split_filename(self.inputs.comp_input2) if ext1 in [".nii", ".nii.gz", ".hdr", ".img", ".img.gz"] or ext2 in [ ".nii", ".nii.gz", ".hdr", ".img", ".img.gz", ]: return self._gen_fname(bn1, suffix=self._suffix, ext=".nii.gz") else: return self._gen_fname(bn1, suffix=self._suffix, ext=ext1) if isdefined(self.inputs.flirt_2_nr_input): return self._gen_fname( self.inputs.flirt_2_nr_input[0], suffix=self._suffix, ext=".txt" ) input_to_use = self._find_input() _, _, ext = split_filename(input_to_use) if ext not in [".nii", ".nii.gz", ".hdr", ".img", ".img.gz"]: return self._gen_fname(input_to_use, suffix=self._suffix, ext=ext) else: return self._gen_fname(input_to_use, suffix=self._suffix, ext=".nii.gz") return None def _list_outputs(self): outputs = self.output_spec().get() if isdefined(self.inputs.out_file): outputs["out_file"] = self.inputs.out_file else: outputs["out_file"] = self._gen_filename("out_file") return outputs class RegMeasureInputSpec(NiftyRegCommandInputSpec): """Input Spec for RegMeasure.""" # Input reference file ref_file = File( exists=True, desc="The input reference/target image", argstr="-ref %s", mandatory=True, ) # Input floating file flo_file = File( exists=True, desc="The input floating/source image", argstr="-flo %s", mandatory=True, ) measure_type = traits.Enum( "ncc", "lncc", "nmi", "ssd", mandatory=True, argstr="-%s", desc="Measure of similarity to compute", ) out_file = File( name_source=["flo_file"], name_template="%s", argstr="-out %s", desc="The output text file containing the measure", ) class RegMeasureOutputSpec(TraitedSpec): """Output Spec for RegMeasure.""" out_file = File(desc="The output text file containing the measure") class RegMeasure(NiftyRegCommand): """Interface for executable reg_measure from NiftyReg platform. Given two input images, compute the specified measure(s) of similarity `Source code `_ Examples -------- >>> from nipype.interfaces import niftyreg >>> node = niftyreg.RegMeasure() >>> node.inputs.ref_file = 'im1.nii' >>> node.inputs.flo_file = 'im2.nii' >>> node.inputs.measure_type = 'lncc' >>> node.inputs.omp_core_val = 4 >>> node.cmdline 'reg_measure -flo im2.nii -lncc -omp 4 -out im2_lncc.txt -ref im1.nii' """ _cmd = get_custom_path("reg_measure") input_spec = RegMeasureInputSpec output_spec = RegMeasureOutputSpec def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) suffix = self.inputs.measure_type return os.path.join(path, "{0}_{1}.txt".format(base, suffix)) nipype-1.7.0/nipype/interfaces/niftyreg/tests/000077500000000000000000000000001413403311400214355ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/niftyreg/tests/__init__.py000066400000000000000000000000301413403311400235370ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/niftyreg/tests/test_auto_NiftyRegCommand.py000066400000000000000000000011371413403311400271260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import NiftyRegCommand def test_NiftyRegCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), omp_core_val=dict( argstr="-omp %i", usedefault=True, ), ) inputs = NiftyRegCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyreg/tests/test_auto_RegAladin.py000066400000000000000000000061361413403311400257320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..reg import RegAladin def test_RegAladin_inputs(): input_map = dict( aff_direct_flag=dict( argstr="-affDirect", ), aff_file=dict( argstr="-aff %s", extensions=None, name_source=["flo_file"], name_template="%s_aff.txt", ), args=dict( argstr="%s", ), cog_flag=dict( argstr="-cog", ), environ=dict( nohash=True, usedefault=True, ), flo_file=dict( argstr="-flo %s", extensions=None, mandatory=True, ), flo_low_val=dict( argstr="-floLowThr %f", ), flo_up_val=dict( argstr="-floUpThr %f", ), fmask_file=dict( argstr="-fmask %s", extensions=None, ), gpuid_val=dict( argstr="-gpuid %i", ), i_val=dict( argstr="-pi %d", ), in_aff_file=dict( argstr="-inaff %s", extensions=None, ), ln_val=dict( argstr="-ln %d", ), lp_val=dict( argstr="-lp %d", ), maxit_val=dict( argstr="-maxit %d", ), nac_flag=dict( argstr="-nac", ), nosym_flag=dict( argstr="-noSym", ), omp_core_val=dict( argstr="-omp %i", usedefault=True, ), platform_val=dict( argstr="-platf %i", ), ref_file=dict( argstr="-ref %s", extensions=None, mandatory=True, ), ref_low_val=dict( argstr="-refLowThr %f", ), ref_up_val=dict( argstr="-refUpThr %f", ), res_file=dict( argstr="-res %s", extensions=None, name_source=["flo_file"], name_template="%s_res.nii.gz", ), rig_only_flag=dict( argstr="-rigOnly", ), rmask_file=dict( argstr="-rmask %s", extensions=None, ), smoo_f_val=dict( argstr="-smooF %f", ), smoo_r_val=dict( argstr="-smooR %f", ), v_val=dict( argstr="-pv %d", ), verbosity_off_flag=dict( argstr="-voff", ), ) inputs = RegAladin.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RegAladin_outputs(): output_map = dict( aff_file=dict( extensions=None, ), avg_output=dict(), res_file=dict( extensions=None, ), ) outputs = RegAladin.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyreg/tests/test_auto_RegAverage.py000066400000000000000000000064701413403311400261150ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..regutils import RegAverage def test_RegAverage_inputs(): input_map = dict( args=dict( argstr="%s", ), avg_files=dict( argstr="-avg %s", position=1, sep=" ", xor=[ "avg_lts_files", "avg_ref_file", "demean1_ref_file", "demean2_ref_file", "demean3_ref_file", "warp_files", ], ), avg_lts_files=dict( argstr="-avg_lts %s", position=1, sep=" ", xor=[ "avg_files", "avg_ref_file", "demean1_ref_file", "demean2_ref_file", "demean3_ref_file", "warp_files", ], ), avg_ref_file=dict( argstr="-avg_tran %s", extensions=None, position=1, requires=["warp_files"], xor=[ "avg_files", "avg_lts_files", "demean1_ref_file", "demean2_ref_file", "demean3_ref_file", ], ), demean1_ref_file=dict( argstr="-demean1 %s", extensions=None, position=1, requires=["warp_files"], xor=[ "avg_files", "avg_lts_files", "avg_ref_file", "demean2_ref_file", "demean3_ref_file", ], ), demean2_ref_file=dict( argstr="-demean2 %s", extensions=None, position=1, requires=["warp_files"], xor=[ "avg_files", "avg_lts_files", "avg_ref_file", "demean1_ref_file", "demean3_ref_file", ], ), demean3_ref_file=dict( argstr="-demean3 %s", extensions=None, position=1, requires=["warp_files"], xor=[ "avg_files", "avg_lts_files", "avg_ref_file", "demean1_ref_file", "demean2_ref_file", ], ), environ=dict( nohash=True, usedefault=True, ), omp_core_val=dict( argstr="-omp %i", usedefault=True, ), out_file=dict( argstr="%s", extensions=None, genfile=True, position=0, ), warp_files=dict( argstr="%s", position=-1, sep=" ", xor=["avg_files", "avg_lts_files"], ), ) inputs = RegAverage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RegAverage_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = RegAverage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyreg/tests/test_auto_RegF3D.py000066400000000000000000000114311413403311400251100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..reg import RegF3D def test_RegF3D_inputs(): input_map = dict( aff_file=dict( argstr="-aff %s", extensions=None, ), amc_flag=dict( argstr="-amc", ), args=dict( argstr="%s", ), be_val=dict( argstr="-be %f", ), cpp_file=dict( argstr="-cpp %s", extensions=None, name_source=["flo_file"], name_template="%s_cpp.nii.gz", ), environ=dict( nohash=True, usedefault=True, ), fbn2_val=dict( argstr="-fbn %d %d", ), fbn_val=dict( argstr="--fbn %d", ), flo_file=dict( argstr="-flo %s", extensions=None, mandatory=True, ), flo_smooth_val=dict( argstr="-smooF %f", ), flwth2_thr_val=dict( argstr="-fLwTh %d %f", ), flwth_thr_val=dict( argstr="--fLwTh %f", ), fmask_file=dict( argstr="-fmask %s", extensions=None, ), fupth2_thr_val=dict( argstr="-fUpTh %d %f", ), fupth_thr_val=dict( argstr="--fUpTh %f", ), incpp_file=dict( argstr="-incpp %s", extensions=None, ), jl_val=dict( argstr="-jl %f", ), kld2_flag=dict( argstr="-kld %d", ), kld_flag=dict( argstr="--kld", ), le_val=dict( argstr="-le %f", ), ln_val=dict( argstr="-ln %d", ), lncc2_val=dict( argstr="-lncc %d %f", ), lncc_val=dict( argstr="--lncc %f", ), lp_val=dict( argstr="-lp %d", ), maxit_val=dict( argstr="-maxit %d", ), nmi_flag=dict( argstr="--nmi", ), no_app_jl_flag=dict( argstr="-noAppJL", ), noconj_flag=dict( argstr="-noConj", ), nopy_flag=dict( argstr="-nopy", ), nox_flag=dict( argstr="-nox", ), noy_flag=dict( argstr="-noy", ), noz_flag=dict( argstr="-noz", ), omp_core_val=dict( argstr="-omp %i", usedefault=True, ), pad_val=dict( argstr="-pad %f", ), pert_val=dict( argstr="-pert %d", ), rbn2_val=dict( argstr="-rbn %d %d", ), rbn_val=dict( argstr="--rbn %d", ), ref_file=dict( argstr="-ref %s", extensions=None, mandatory=True, ), ref_smooth_val=dict( argstr="-smooR %f", ), res_file=dict( argstr="-res %s", extensions=None, name_source=["flo_file"], name_template="%s_res.nii.gz", ), rlwth2_thr_val=dict( argstr="-rLwTh %d %f", ), rlwth_thr_val=dict( argstr="--rLwTh %f", ), rmask_file=dict( argstr="-rmask %s", extensions=None, ), rupth2_thr_val=dict( argstr="-rUpTh %d %f", ), rupth_thr_val=dict( argstr="--rUpTh %f", ), smooth_grad_val=dict( argstr="-smoothGrad %f", ), ssd2_flag=dict( argstr="-ssd %d", ), ssd_flag=dict( argstr="--ssd", ), sx_val=dict( argstr="-sx %f", ), sy_val=dict( argstr="-sy %f", ), sz_val=dict( argstr="-sz %f", ), vel_flag=dict( argstr="-vel", ), verbosity_off_flag=dict( argstr="-voff", ), ) inputs = RegF3D.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RegF3D_outputs(): output_map = dict( avg_output=dict(), cpp_file=dict( extensions=None, ), invcpp_file=dict( extensions=None, ), invres_file=dict( extensions=None, ), res_file=dict( extensions=None, ), ) outputs = RegF3D.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyreg/tests/test_auto_RegJacobian.py000066400000000000000000000026541413403311400262510ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..regutils import RegJacobian def test_RegJacobian_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), omp_core_val=dict( argstr="-omp %i", usedefault=True, ), out_file=dict( argstr="%s", extensions=None, name_source=["trans_file"], name_template="%s", position=-1, ), ref_file=dict( argstr="-ref %s", extensions=None, ), trans_file=dict( argstr="-trans %s", extensions=None, mandatory=True, ), type=dict( argstr="-%s", position=-2, usedefault=True, ), ) inputs = RegJacobian.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RegJacobian_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = RegJacobian.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyreg/tests/test_auto_RegMeasure.py000066400000000000000000000026271413403311400261440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..regutils import RegMeasure def test_RegMeasure_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), flo_file=dict( argstr="-flo %s", extensions=None, mandatory=True, ), measure_type=dict( argstr="-%s", mandatory=True, ), omp_core_val=dict( argstr="-omp %i", usedefault=True, ), out_file=dict( argstr="-out %s", extensions=None, name_source=["flo_file"], name_template="%s", ), ref_file=dict( argstr="-ref %s", extensions=None, mandatory=True, ), ) inputs = RegMeasure.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RegMeasure_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = RegMeasure.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyreg/tests/test_auto_RegResample.py000066400000000000000000000036611413403311400263120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..regutils import RegResample def test_RegResample_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), flo_file=dict( argstr="-flo %s", extensions=None, mandatory=True, ), inter_val=dict( argstr="-inter %d", ), omp_core_val=dict( argstr="-omp %i", usedefault=True, ), out_file=dict( argstr="%s", extensions=None, name_source=["flo_file"], name_template="%s", position=-1, ), pad_val=dict( argstr="-pad %f", ), psf_alg=dict( argstr="-psf_alg %d", ), psf_flag=dict( argstr="-psf", ), ref_file=dict( argstr="-ref %s", extensions=None, mandatory=True, ), tensor_flag=dict( argstr="-tensor ", ), trans_file=dict( argstr="-trans %s", extensions=None, ), type=dict( argstr="-%s", position=-2, usedefault=True, ), verbosity_off_flag=dict( argstr="-voff", ), ) inputs = RegResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RegResample_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = RegResample.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyreg/tests/test_auto_RegTools.py000066400000000000000000000043211413403311400256340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..regutils import RegTools def test_RegTools_inputs(): input_map = dict( add_val=dict( argstr="-add %s", ), args=dict( argstr="%s", ), bin_flag=dict( argstr="-bin", ), chg_res_val=dict( argstr="-chgres %f %f %f", ), div_val=dict( argstr="-div %s", ), down_flag=dict( argstr="-down", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, ), inter_val=dict( argstr="-interp %d", ), iso_flag=dict( argstr="-iso", ), mask_file=dict( argstr="-nan %s", extensions=None, ), mul_val=dict( argstr="-mul %s", ), noscl_flag=dict( argstr="-noscl", ), omp_core_val=dict( argstr="-omp %i", usedefault=True, ), out_file=dict( argstr="-out %s", extensions=None, name_source=["in_file"], name_template="%s_tools.nii.gz", ), rms_val=dict( argstr="-rms %s", extensions=None, ), smo_g_val=dict( argstr="-smoG %f %f %f", ), smo_s_val=dict( argstr="-smoS %f %f %f", ), sub_val=dict( argstr="-sub %s", ), thr_val=dict( argstr="-thr %f", ), ) inputs = RegTools.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RegTools_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = RegTools.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyreg/tests/test_auto_RegTransform.py000066400000000000000000000154761413403311400265240ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..regutils import RegTransform def test_RegTransform_inputs(): input_map = dict( aff_2_rig_input=dict( argstr="-aff2rig %s", extensions=None, position=-2, xor=[ "def_input", "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "flirt_2_nr_input", ], ), args=dict( argstr="%s", ), comp_input=dict( argstr="-comp %s", extensions=None, position=-3, requires=["comp_input2"], xor=[ "def_input", "disp_input", "flow_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ), comp_input2=dict( argstr="%s", extensions=None, position=-2, ), def_input=dict( argstr="-def %s", extensions=None, position=-2, xor=[ "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ), disp_input=dict( argstr="-disp %s", extensions=None, position=-2, xor=[ "def_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ), environ=dict( nohash=True, usedefault=True, ), flirt_2_nr_input=dict( argstr="-flirtAff2NR %s %s %s", position=-2, xor=[ "def_input", "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", ], ), flow_input=dict( argstr="-flow %s", extensions=None, position=-2, xor=[ "def_input", "disp_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ), half_input=dict( argstr="-half %s", extensions=None, position=-2, xor=[ "def_input", "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ), inv_aff_input=dict( argstr="-invAff %s", extensions=None, position=-2, xor=[ "def_input", "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ), inv_nrr_input=dict( argstr="-invNrr %s %s", position=-2, xor=[ "def_input", "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ), make_aff_input=dict( argstr="-makeAff %f %f %f %f %f %f %f %f %f %f %f %f", position=-2, xor=[ "def_input", "disp_input", "flow_input", "comp_input", "upd_s_form_input", "inv_aff_input", "inv_nrr_input", "half_input", "aff_2_rig_input", "flirt_2_nr_input", ], ), omp_core_val=dict( argstr="-omp %i", usedefault=True, ), out_file=dict( argstr="%s", extensions=None, genfile=True, position=-1, ), ref1_file=dict( argstr="-ref %s", extensions=None, position=0, ), ref2_file=dict( argstr="-ref2 %s", extensions=None, position=1, requires=["ref1_file"], ), upd_s_form_input=dict( argstr="-updSform %s", extensions=None, position=-3, requires=["upd_s_form_input2"], xor=[ "def_input", "disp_input", "flow_input", "comp_input", "inv_aff_input", "inv_nrr_input", "half_input", "make_aff_input", "aff_2_rig_input", "flirt_2_nr_input", ], ), upd_s_form_input2=dict( argstr="%s", extensions=None, position=-2, requires=["upd_s_form_input"], ), ) inputs = RegTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RegTransform_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = RegTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyreg/tests/test_reg.py000066400000000000000000000051011413403311400236200ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pytest from ....testing import example_data from .. import get_custom_path, RegAladin, RegF3D from .test_regutils import no_nifty_tool @pytest.mark.skipif( no_nifty_tool(cmd="reg_aladin"), reason="niftyreg is not installed. reg_aladin not found.", ) def test_reg_aladin(): """tests for reg_aladin interface""" # Create a reg_aladin object nr_aladin = RegAladin() # Check if the command is properly defined assert nr_aladin.cmd == get_custom_path("reg_aladin") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_aladin.run() # Assign some input data ref_file = example_data("im1.nii") flo_file = example_data("im2.nii") rmask_file = example_data("mask.nii") nr_aladin.inputs.ref_file = ref_file nr_aladin.inputs.flo_file = flo_file nr_aladin.inputs.rmask_file = rmask_file nr_aladin.inputs.omp_core_val = 4 cmd_tmp = "{cmd} -aff {aff} -flo {flo} -omp 4 -ref {ref} -res {res} \ -rmask {rmask}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_aladin"), aff="im2_aff.txt", flo=flo_file, ref=ref_file, res="im2_res.nii.gz", rmask=rmask_file, ) assert nr_aladin.cmdline == expected_cmd @pytest.mark.skipif( no_nifty_tool(cmd="reg_f3d"), reason="niftyreg is not installed. reg_f3d not found." ) def test_reg_f3d(): """tests for reg_f3d interface""" # Create a reg_f3d object nr_f3d = RegF3D() # Check if the command is properly defined assert nr_f3d.cmd == get_custom_path("reg_f3d") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_f3d.run() # Assign some input data ref_file = example_data("im1.nii") flo_file = example_data("im2.nii") rmask_file = example_data("mask.nii") nr_f3d.inputs.ref_file = ref_file nr_f3d.inputs.flo_file = flo_file nr_f3d.inputs.rmask_file = rmask_file nr_f3d.inputs.omp_core_val = 4 nr_f3d.inputs.vel_flag = True nr_f3d.inputs.be_val = 0.1 nr_f3d.inputs.le_val = 0.1 cmd_tmp = "{cmd} -be 0.100000 -cpp {cpp} -flo {flo} -le 0.100000 -omp 4 \ -ref {ref} -res {res} -rmask {rmask} -vel" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_f3d"), cpp="im2_cpp.nii.gz", flo=flo_file, ref=ref_file, res="im2_res.nii.gz", rmask=rmask_file, ) assert nr_f3d.cmdline == expected_cmd nipype-1.7.0/nipype/interfaces/niftyreg/tests/test_regutils.py000066400000000000000000000401261413403311400247070ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import pytest from ....utils.filemanip import which from ....testing import example_data from .. import ( get_custom_path, RegAverage, RegResample, RegJacobian, RegTools, RegMeasure, RegTransform, ) def no_nifty_tool(cmd=None): return which(cmd) is None @pytest.mark.skipif( no_nifty_tool(cmd="reg_resample"), reason="niftyreg is not installed. reg_resample not found.", ) def test_reg_resample_res(): """tests for reg_resample interface""" # Create a reg_resample object nr_resample = RegResample() # Check if the command is properly defined assert nr_resample.cmd == get_custom_path("reg_resample") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_resample.run() # Resample res ref_file = example_data("im1.nii") flo_file = example_data("im2.nii") trans_file = example_data("warpfield.nii") nr_resample.inputs.ref_file = ref_file nr_resample.inputs.flo_file = flo_file nr_resample.inputs.trans_file = trans_file nr_resample.inputs.inter_val = "LIN" nr_resample.inputs.omp_core_val = 4 cmd_tmp = "{cmd} -flo {flo} -inter 1 -omp 4 -ref {ref} -trans {trans} \ -res {res}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_resample"), flo=flo_file, ref=ref_file, trans=trans_file, res="im2_res.nii.gz", ) assert nr_resample.cmdline == expected_cmd # test_reg_resample_blank() nr_resample_2 = RegResample(type="blank", inter_val="LIN", omp_core_val=4) ref_file = example_data("im1.nii") flo_file = example_data("im2.nii") trans_file = example_data("warpfield.nii") nr_resample_2.inputs.ref_file = ref_file nr_resample_2.inputs.flo_file = flo_file nr_resample_2.inputs.trans_file = trans_file cmd_tmp = "{cmd} -flo {flo} -inter 1 -omp 4 -ref {ref} -trans {trans} \ -blank {blank}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_resample"), flo=flo_file, ref=ref_file, trans=trans_file, blank="im2_blank.nii.gz", ) assert nr_resample_2.cmdline == expected_cmd @pytest.mark.skipif( no_nifty_tool(cmd="reg_jacobian"), reason="niftyreg is not installed. reg_jacobian not found.", ) def test_reg_jacobian_jac(): """Test interface for RegJacobian""" # Create a reg_jacobian object nr_jacobian = RegJacobian() # Check if the command is properly defined assert nr_jacobian.cmd == get_custom_path("reg_jacobian") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_jacobian.run() # Test Reg Jacobian: jac ref_file = example_data("im1.nii") trans_file = example_data("warpfield.nii") nr_jacobian.inputs.ref_file = ref_file nr_jacobian.inputs.trans_file = trans_file nr_jacobian.inputs.omp_core_val = 4 cmd_tmp = "{cmd} -omp 4 -ref {ref} -trans {trans} -jac {jac}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_jacobian"), ref=ref_file, trans=trans_file, jac="warpfield_jac.nii.gz", ) assert nr_jacobian.cmdline == expected_cmd # Test Reg Jacobian: jac m nr_jacobian_2 = RegJacobian(type="jacM", omp_core_val=4) ref_file = example_data("im1.nii") trans_file = example_data("warpfield.nii") nr_jacobian_2.inputs.ref_file = ref_file nr_jacobian_2.inputs.trans_file = trans_file cmd_tmp = "{cmd} -omp 4 -ref {ref} -trans {trans} -jacM {jac}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_jacobian"), ref=ref_file, trans=trans_file, jac="warpfield_jacM.nii.gz", ) assert nr_jacobian_2.cmdline == expected_cmd # Test Reg Jacobian: jac l nr_jacobian_3 = RegJacobian(type="jacL", omp_core_val=4) ref_file = example_data("im1.nii") trans_file = example_data("warpfield.nii") nr_jacobian_3.inputs.ref_file = ref_file nr_jacobian_3.inputs.trans_file = trans_file cmd_tmp = "{cmd} -omp 4 -ref {ref} -trans {trans} -jacL {jac}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_jacobian"), ref=ref_file, trans=trans_file, jac="warpfield_jacL.nii.gz", ) assert nr_jacobian_3.cmdline == expected_cmd @pytest.mark.skipif( no_nifty_tool(cmd="reg_tools"), reason="niftyreg is not installed. reg_tools not found.", ) def test_reg_tools_mul(): """tests for reg_tools interface""" # Create a reg_tools object nr_tools = RegTools() # Check if the command is properly defined assert nr_tools.cmd == get_custom_path("reg_tools") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_tools.run() # Test reg_tools: mul in_file = example_data("im1.nii") nr_tools.inputs.in_file = in_file nr_tools.inputs.mul_val = 4 nr_tools.inputs.omp_core_val = 4 cmd_tmp = "{cmd} -in {in_file} -mul 4.0 -omp 4 -out {out_file}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_tools"), in_file=in_file, out_file="im1_tools.nii.gz" ) assert nr_tools.cmdline == expected_cmd # Test reg_tools: iso nr_tools_2 = RegTools(iso_flag=True, omp_core_val=4) in_file = example_data("im1.nii") nr_tools_2.inputs.in_file = in_file cmd_tmp = "{cmd} -in {in_file} -iso -omp 4 -out {out_file}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_tools"), in_file=in_file, out_file="im1_tools.nii.gz" ) assert nr_tools_2.cmdline == expected_cmd @pytest.mark.skipif( no_nifty_tool(cmd="reg_average"), reason="niftyreg is not installed. reg_average not found.", ) def test_reg_average(): """tests for reg_average interface""" # Create a reg_average object nr_average = RegAverage() # Check if the command is properly defined assert nr_average.cmd == get_custom_path("reg_average") # Average niis one_file = example_data("im1.nii") two_file = example_data("im2.nii") three_file = example_data("im3.nii") nr_average.inputs.avg_files = [one_file, two_file, three_file] nr_average.inputs.omp_core_val = 1 generated_cmd = nr_average.cmdline # Read the reg_average_cmd reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) expected_argv = "%s %s -avg %s %s %s -omp 1" % ( get_custom_path("reg_average"), os.path.join(os.getcwd(), "avg_out.nii.gz"), one_file, two_file, three_file, ) assert argv.decode("utf-8") == expected_argv # Test command line with text file expected_cmd = "%s --cmd_file %s" % ( get_custom_path("reg_average"), reg_average_cmd, ) assert generated_cmd == expected_cmd # Test Reg Average: average txt nr_average_2 = RegAverage() one_file = example_data("TransformParameters.0.txt") two_file = example_data("ants_Affine.txt") three_file = example_data("elastix.txt") nr_average_2.inputs.avg_files = [one_file, two_file, three_file] nr_average_2.inputs.omp_core_val = 1 generated_cmd = nr_average_2.cmdline # Read the reg_average_cmd reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) expected_argv = "%s %s -avg %s %s %s -omp 1" % ( get_custom_path("reg_average"), os.path.join(os.getcwd(), "avg_out.txt"), one_file, two_file, three_file, ) assert argv.decode("utf-8") == expected_argv # Test Reg Average: average list nr_average_3 = RegAverage() one_file = example_data("TransformParameters.0.txt") two_file = example_data("ants_Affine.txt") three_file = example_data("elastix.txt") nr_average_3.inputs.avg_lts_files = [one_file, two_file, three_file] nr_average_3.inputs.omp_core_val = 1 generated_cmd = nr_average_3.cmdline # Read the reg_average_cmd reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) expected_argv = "%s %s -avg_lts %s %s %s -omp 1" % ( get_custom_path("reg_average"), os.path.join(os.getcwd(), "avg_out.txt"), one_file, two_file, three_file, ) assert argv.decode("utf-8") == expected_argv # Test Reg Average: average ref nr_average_4 = RegAverage() ref_file = example_data("anatomical.nii") one_file = example_data("im1.nii") two_file = example_data("im2.nii") three_file = example_data("im3.nii") trans1_file = example_data("roi01.nii") trans2_file = example_data("roi02.nii") trans3_file = example_data("roi03.nii") nr_average_4.inputs.warp_files = [ trans1_file, one_file, trans2_file, two_file, trans3_file, three_file, ] nr_average_4.inputs.avg_ref_file = ref_file nr_average_4.inputs.omp_core_val = 1 generated_cmd = nr_average_4.cmdline # Read the reg_average_cmd reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) expected_argv = "%s %s -avg_tran %s -omp 1 %s %s %s %s %s %s" % ( get_custom_path("reg_average"), os.path.join(os.getcwd(), "avg_out.nii.gz"), ref_file, trans1_file, one_file, trans2_file, two_file, trans3_file, three_file, ) assert argv.decode("utf-8") == expected_argv # Test Reg Average: demean3 nr_average_5 = RegAverage() ref_file = example_data("anatomical.nii") one_file = example_data("im1.nii") two_file = example_data("im2.nii") three_file = example_data("im3.nii") aff1_file = example_data("TransformParameters.0.txt") aff2_file = example_data("ants_Affine.txt") aff3_file = example_data("elastix.txt") trans1_file = example_data("roi01.nii") trans2_file = example_data("roi02.nii") trans3_file = example_data("roi03.nii") nr_average_5.inputs.warp_files = [ aff1_file, trans1_file, one_file, aff2_file, trans2_file, two_file, aff3_file, trans3_file, three_file, ] nr_average_5.inputs.demean3_ref_file = ref_file nr_average_5.inputs.omp_core_val = 1 generated_cmd = nr_average_5.cmdline # Read the reg_average_cmd reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") with open(reg_average_cmd, "rb") as f_obj: argv = f_obj.read() os.remove(reg_average_cmd) expected_argv = "%s %s -demean3 %s -omp 1 %s %s %s %s %s %s %s %s %s" % ( get_custom_path("reg_average"), os.path.join(os.getcwd(), "avg_out.nii.gz"), ref_file, aff1_file, trans1_file, one_file, aff2_file, trans2_file, two_file, aff3_file, trans3_file, three_file, ) assert argv.decode("utf-8") == expected_argv @pytest.mark.skipif( no_nifty_tool(cmd="reg_transform"), reason="niftyreg is not installed. reg_transform not found.", ) def test_reg_transform_def(): """tests for reg_transform interface""" # Create a reg_transform object nr_transform = RegTransform() # Check if the command is properly defined assert nr_transform.cmd == get_custom_path("reg_transform") # Assign some input data trans_file = example_data("warpfield.nii") nr_transform.inputs.def_input = trans_file nr_transform.inputs.omp_core_val = 4 cmd_tmp = "{cmd} -omp 4 -def {trans_file} {out_file}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_transform"), trans_file=trans_file, out_file=os.path.join(os.getcwd(), "warpfield_trans.nii.gz"), ) assert nr_transform.cmdline == expected_cmd # Test reg_transform: def ref nr_transform_2 = RegTransform(omp_core_val=4) ref_file = example_data("im1.nii") trans_file = example_data("warpfield.nii") nr_transform_2.inputs.ref1_file = ref_file nr_transform_2.inputs.def_input = trans_file cmd_tmp = "{cmd} -ref {ref_file} -omp 4 -def {trans_file} {out_file}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_transform"), ref_file=ref_file, trans_file=trans_file, out_file=os.path.join(os.getcwd(), "warpfield_trans.nii.gz"), ) assert nr_transform_2.cmdline == expected_cmd # Test reg_transform: comp nii nr_transform_3 = RegTransform(omp_core_val=4) ref_file = example_data("im1.nii") trans_file = example_data("warpfield.nii") trans2_file = example_data("anatomical.nii") nr_transform_3.inputs.ref1_file = ref_file nr_transform_3.inputs.comp_input2 = trans2_file nr_transform_3.inputs.comp_input = trans_file cmd_tmp = "{cmd} -ref {ref_file} -omp 4 -comp {trans1} {trans2} {out_file}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_transform"), ref_file=ref_file, trans1=trans_file, trans2=trans2_file, out_file=os.path.join(os.getcwd(), "warpfield_trans.nii.gz"), ) assert nr_transform_3.cmdline == expected_cmd # Test reg_transform: comp txt nr_transform_4 = RegTransform(omp_core_val=4) aff1_file = example_data("ants_Affine.txt") aff2_file = example_data("elastix.txt") nr_transform_4.inputs.comp_input2 = aff2_file nr_transform_4.inputs.comp_input = aff1_file cmd_tmp = "{cmd} -omp 4 -comp {aff1} {aff2} {out_file}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_transform"), aff1=aff1_file, aff2=aff2_file, out_file=os.path.join(os.getcwd(), "ants_Affine_trans.txt"), ) assert nr_transform_4.cmdline == expected_cmd # Test reg_transform: comp nr_transform_5 = RegTransform(omp_core_val=4) trans_file = example_data("warpfield.nii") aff_file = example_data("elastix.txt") nr_transform_5.inputs.comp_input2 = trans_file nr_transform_5.inputs.comp_input = aff_file cmd_tmp = "{cmd} -omp 4 -comp {aff} {trans} {out_file}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_transform"), aff=aff_file, trans=trans_file, out_file=os.path.join(os.getcwd(), "elastix_trans.nii.gz"), ) assert nr_transform_5.cmdline == expected_cmd # Test reg_transform: flirt nr_transform_6 = RegTransform(omp_core_val=4) aff_file = example_data("elastix.txt") ref_file = example_data("im1.nii") in_file = example_data("im2.nii") nr_transform_6.inputs.flirt_2_nr_input = (aff_file, ref_file, in_file) cmd_tmp = "{cmd} -omp 4 -flirtAff2NR {aff} {ref} {in_file} {out_file}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_transform"), aff=aff_file, ref=ref_file, in_file=in_file, out_file=os.path.join(os.getcwd(), "elastix_trans.txt"), ) assert nr_transform_6.cmdline == expected_cmd @pytest.mark.skipif( no_nifty_tool(cmd="reg_measure"), reason="niftyreg is not installed. reg_measure not found.", ) def test_reg_measure(): """tests for reg_measure interface""" # Create a reg_measure object nr_measure = RegMeasure() # Check if the command is properly defined assert nr_measure.cmd == get_custom_path("reg_measure") # test raising error with mandatory args absent with pytest.raises(ValueError): nr_measure.run() # Assign some input data ref_file = example_data("im1.nii") flo_file = example_data("im2.nii") nr_measure.inputs.ref_file = ref_file nr_measure.inputs.flo_file = flo_file nr_measure.inputs.measure_type = "lncc" nr_measure.inputs.omp_core_val = 4 cmd_tmp = "{cmd} -flo {flo} -lncc -omp 4 -out {out} -ref {ref}" expected_cmd = cmd_tmp.format( cmd=get_custom_path("reg_measure"), flo=flo_file, out="im2_lncc.txt", ref=ref_file, ) assert nr_measure.cmdline == expected_cmd nipype-1.7.0/nipype/interfaces/niftyseg/000077500000000000000000000000001413403311400202745ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/niftyseg/__init__.py000066400000000000000000000010571413403311400224100ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The niftyseg module provides classes for interfacing with the `NIFTYSEG `_ command line tools. Top-level namespace for niftyseg. """ from .em import EM from .label_fusion import LabelFusion, CalcTopNCC from .lesions import FillLesions from .maths import UnaryMaths, BinaryMaths, BinaryMathsInteger, TupleMaths, Merge from .patchmatch import PatchMatch from .stats import UnaryStats, BinaryStats nipype-1.7.0/nipype/interfaces/niftyseg/base.py000066400000000000000000000022361413403311400215630ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The niftyseg module provides classes for interfacing with `niftyseg `_ command line tools. These are the base tools for working with niftyseg. EM Statistical Segmentation tool is found in niftyseg/em.py Fill lesions tool is found in niftyseg/lesions.py Mathematical operation tool is found in niftyseg/maths.py Patch Match tool is found in niftyseg/patchmatch.py Statistical operation tool is found in niftyseg/stats.py Label Fusion and CalcTopNcc tools are in niftyseg/steps.py Examples -------- See the docstrings of the individual classes for examples. """ from ..niftyfit.base import NiftyFitCommand class NiftySegCommand(NiftyFitCommand): """ Base support interface for NiftySeg commands. """ _suffix = "_ns" _min_version = None def __init__(self, **inputs): super(NiftySegCommand, self).__init__(**inputs) def get_version(self): return super(NiftySegCommand, self).version_from_command( cmd="seg_EM", flag="--version" ) nipype-1.7.0/nipype/interfaces/niftyseg/em.py000066400000000000000000000117301413403311400212510ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Nipype interface for seg_EM. The em module provides higher-level interfaces to some of the operations that can be performed with the seg_em command-line program. Examples -------- See the docstrings of the individual classes for examples. """ from ..base import TraitedSpec, File, traits, CommandLineInputSpec, InputMultiPath from .base import NiftySegCommand from ..niftyreg.base import get_custom_path class EMInputSpec(CommandLineInputSpec): """Input Spec for EM.""" in_file = File( argstr="-in %s", exists=True, mandatory=True, desc="Input image to segment", position=4, ) mask_file = File( argstr="-mask %s", exists=True, desc="Filename of the ROI for label fusion" ) # Priors no_prior = traits.Int( argstr="-nopriors %s", mandatory=True, desc="Number of classes to use without prior", xor=["prior_4D", "priors"], ) prior_4D = File( argstr="-prior4D %s", exists=True, mandatory=True, desc="4D file containing the priors", xor=["no_prior", "priors"], ) priors = InputMultiPath( argstr="%s", mandatory=True, desc="List of priors filepaths.", xor=["no_prior", "prior_4D"], ) # iterations max_iter = traits.Int( argstr="-max_iter %s", default_value=100, usedefault=True, desc="Maximum number of iterations", ) min_iter = traits.Int( argstr="-min_iter %s", default_value=0, usedefault=True, desc="Minimum number of iterations", ) # other options bc_order_val = traits.Int( argstr="-bc_order %s", default_value=3, usedefault=True, desc="Polynomial order for the bias field", ) mrf_beta_val = traits.Float( argstr="-mrf_beta %s", desc="Weight of the Markov Random Field" ) desc = "Bias field correction will run only if the ratio of improvement \ is below bc_thresh. (default=0 [OFF])" bc_thresh_val = traits.Float( argstr="-bc_thresh %s", default_value=0, usedefault=True, desc=desc ) desc = "Amount of regularization over the diagonal of the covariance \ matrix [above 1]" reg_val = traits.Float(argstr="-reg %s", desc=desc) desc = "Outlier detection as in (Van Leemput TMI 2003). is the \ Mahalanobis threshold [recommended between 3 and 7] is a convergence \ ratio below which the outlier detection is going to be done [recommended 0.01]" outlier_val = traits.Tuple( traits.Float(), traits.Float(), argstr="-outlier %s %s", desc=desc ) desc = "Relax Priors [relaxation factor: 00 (recommended=2.0)] /only 3D/" relax_priors = traits.Tuple( traits.Float(), traits.Float(), argstr="-rf %s %s", desc=desc ) # outputs out_file = File( name_source=["in_file"], name_template="%s_em.nii.gz", argstr="-out %s", desc="Output segmentation", ) out_bc_file = File( name_source=["in_file"], name_template="%s_bc_em.nii.gz", argstr="-bc_out %s", desc="Output bias corrected image", ) out_outlier_file = File( name_source=["in_file"], name_template="%s_outlier_em.nii.gz", argstr="-out_outlier %s", desc="Output outlierness image", ) class EMOutputSpec(TraitedSpec): """Output Spec for EM.""" out_file = File(desc="Output segmentation") out_bc_file = File(desc="Output bias corrected image") out_outlier_file = File(desc="Output outlierness image") class EM(NiftySegCommand): """Interface for executable seg_EM from NiftySeg platform. seg_EM is a general purpose intensity based image segmentation tool. In it's simplest form, it takes in one 2D or 3D image and segments it in n classes. `Source code `_ | `Documentation `_ Examples -------- >>> from nipype.interfaces import niftyseg >>> node = niftyseg.EM() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.no_prior = 4 >>> node.cmdline 'seg_EM -in im1.nii -bc_order 3 -bc_thresh 0 -max_iter 100 -min_iter 0 -nopriors 4 \ -bc_out im1_bc_em.nii.gz -out im1_em.nii.gz -out_outlier im1_outlier_em.nii.gz' """ _cmd = get_custom_path("seg_EM", env_dir="NIFTYSEGDIR") _suffix = "_em" input_spec = EMInputSpec output_spec = EMOutputSpec def _format_arg(self, opt, spec, val): """Convert input to appropriate format for seg_EM.""" if opt == "priors": _nb_priors = len(self.inputs.priors) return "-priors %d %s" % (_nb_priors, " ".join(self.inputs.priors)) else: return super(EM, self)._format_arg(opt, spec, val) nipype-1.7.0/nipype/interfaces/niftyseg/label_fusion.py000066400000000000000000000304571413403311400233210ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The fusion module provides higher-level interfaces to some of the operations that can be performed with the seg_LabFusion command-line program. """ import os import warnings from ..base import ( TraitedSpec, File, traits, isdefined, CommandLineInputSpec, NipypeInterfaceError, ) from .base import NiftySegCommand from ..niftyreg.base import get_custom_path from ...utils.filemanip import load_json, save_json, split_filename warn = warnings.warn warnings.filterwarnings("always", category=UserWarning) class LabelFusionInput(CommandLineInputSpec): """Input Spec for LabelFusion.""" in_file = File( argstr="-in %s", exists=True, mandatory=True, position=1, desc="Filename of the 4D integer label image.", ) template_file = File(exists=True, desc="Registered templates (4D Image)") file_to_seg = File( exists=True, mandatory=True, desc="Original image to segment (3D Image)" ) mask_file = File( argstr="-mask %s", exists=True, desc="Filename of the ROI for label fusion" ) out_file = File( argstr="-out %s", name_source=["in_file"], name_template="%s", desc="Output consensus segmentation", ) prob_flag = traits.Bool( desc="Probabilistic/Fuzzy segmented image", argstr="-outProb" ) desc = "Verbose level [0 = off, 1 = on, 2 = debug] (default = 0)" verbose = traits.Enum("0", "1", "2", desc=desc, argstr="-v %s") desc = "Only consider non-consensus voxels to calculate statistics" unc = traits.Bool(desc=desc, argstr="-unc") classifier_type = traits.Enum( "STEPS", "STAPLE", "MV", "SBA", argstr="-%s", mandatory=True, position=2, desc="Type of Classifier Fusion.", ) desc = "Gaussian kernel size in mm to compute the local similarity" kernel_size = traits.Float(desc=desc) template_num = traits.Int(desc="Number of labels to use") # STAPLE and MV options sm_ranking = traits.Enum( "ALL", "GNCC", "ROINCC", "LNCC", argstr="-%s", usedefault=True, position=3, desc="Ranking for STAPLE and MV", ) dilation_roi = traits.Int(desc="Dilation of the ROI ( d>=1 )") # STAPLE and STEPS options desc = "Proportion of the label (only for single labels)." proportion = traits.Float(argstr="-prop %s", desc=desc) desc = "Update label proportions at each iteration" prob_update_flag = traits.Bool(desc=desc, argstr="-prop_update") desc = "Value of P and Q [ 0 < (P,Q) < 1 ] (default = 0.99 0.99)" set_pq = traits.Tuple(traits.Float, traits.Float, argstr="-setPQ %f %f", desc=desc) mrf_value = traits.Float( argstr="-MRF_beta %f", desc="MRF prior strength (between 0 and 5)" ) desc = "Maximum number of iterations (default = 15)." max_iter = traits.Int(argstr="-max_iter %d", desc=desc) desc = "If percent of labels agree, then area is not uncertain." unc_thresh = traits.Float(argstr="-uncthres %f", desc=desc) desc = "Ratio for convergence (default epsilon = 10^-5)." conv = traits.Float(argstr="-conv %f", desc=desc) class LabelFusionOutput(TraitedSpec): """Output Spec for LabelFusion.""" out_file = File(exists=True, desc="image written after calculations") class LabelFusion(NiftySegCommand): """Interface for executable seg_LabelFusion from NiftySeg platform using type STEPS as classifier Fusion. This executable implements 4 fusion strategies (-STEPS, -STAPLE, -MV or - SBA), all of them using either a global (-GNCC), ROI-based (-ROINCC), local (-LNCC) or no image similarity (-ALL). Combinations of fusion algorithms and similarity metrics give rise to different variants of known algorithms. As an example, using LNCC and MV as options will run a locally weighted voting strategy with LNCC derived weights, while using STAPLE and LNCC is equivalent to running STEPS as per its original formulation. A few other options pertaining the use of an MRF (-MRF beta), the initial sensitivity and specificity estimates and the use of only non-consensus voxels (-unc) for the STAPLE and STEPS algorithm. All processing can be masked (-mask), greatly reducing memory consumption. As an example, the command to use STEPS should be: seg_LabFusion -in 4D_Propragated_Labels_to_fuse.nii -out \ FusedSegmentation.nii -STEPS 2 15 TargetImage.nii \ 4D_Propagated_Intensities.nii `Source code `_ | `Documentation `_ Examples -------- >>> from nipype.interfaces import niftyseg >>> node = niftyseg.LabelFusion() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.kernel_size = 2.0 >>> node.inputs.file_to_seg = 'im2.nii' >>> node.inputs.template_file = 'im3.nii' >>> node.inputs.template_num = 2 >>> node.inputs.classifier_type = 'STEPS' >>> node.cmdline 'seg_LabFusion -in im1.nii -STEPS 2.000000 2 im2.nii im3.nii -out im1_steps.nii' """ _cmd = get_custom_path("seg_LabFusion", env_dir="NIFTYSEGDIR") input_spec = LabelFusionInput output_spec = LabelFusionOutput _suffix = "_label_fused" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for seg_maths.""" # Remove options if not STAPLE or STEPS as fusion type: if ( opt in [ "proportion", "prob_update_flag", "set_pq", "mrf_value", "max_iter", "unc_thresh", "conv", ] and self.inputs.classifier_type not in ["STAPLE", "STEPS"] ): return "" if opt == "sm_ranking": return self.get_staple_args(val) # Return options string if STEPS: if opt == "classifier_type" and val == "STEPS": return self.get_steps_args() return super(LabelFusion, self)._format_arg(opt, spec, val) def get_steps_args(self): if not isdefined(self.inputs.template_file): err = "LabelFusion requires a value for input 'template_file' \ when 'classifier_type' is set to 'STEPS'." raise NipypeInterfaceError(err) if not isdefined(self.inputs.kernel_size): err = "LabelFusion requires a value for input 'kernel_size' when \ 'classifier_type' is set to 'STEPS'." raise NipypeInterfaceError(err) if not isdefined(self.inputs.template_num): err = "LabelFusion requires a value for input 'template_num' when \ 'classifier_type' is set to 'STEPS'." raise NipypeInterfaceError(err) return "-STEPS %f %d %s %s" % ( self.inputs.kernel_size, self.inputs.template_num, self.inputs.file_to_seg, self.inputs.template_file, ) def get_staple_args(self, ranking): classtype = self.inputs.classifier_type if classtype not in ["STAPLE", "MV"]: return None if ranking == "ALL": return "-ALL" if not isdefined(self.inputs.template_file): err = "LabelFusion requires a value for input 'tramplate_file' \ when 'classifier_type' is set to '%s' and 'sm_ranking' is set to '%s'." raise NipypeInterfaceError(err % (classtype, ranking)) if not isdefined(self.inputs.template_num): err = "LabelFusion requires a value for input 'template-num' when \ 'classifier_type' is set to '%s' and 'sm_ranking' is set to '%s'." raise NipypeInterfaceError(err % (classtype, ranking)) if ranking == "GNCC": if not isdefined(self.inputs.template_num): err = "LabelFusion requires a value for input 'template_num' \ when 'classifier_type' is set to '%s' and 'sm_ranking' is set to '%s'." raise NipypeInterfaceError(err % (classtype, ranking)) return "-%s %d %s %s" % ( ranking, self.inputs.template_num, self.inputs.file_to_seg, self.inputs.template_file, ) elif ranking == "ROINCC": if not isdefined(self.inputs.dilation_roi): err = "LabelFusion requires a value for input 'dilation_roi' \ when 'classifier_type' is set to '%s' and 'sm_ranking' is set to '%s'." raise NipypeInterfaceError(err % (classtype, ranking)) elif self.inputs.dilation_roi < 1: err = "The 'dilation_roi' trait of a LabelFusionInput \ instance must be an integer >= 1, but a value of '%s' was specified." raise NipypeInterfaceError(err % self.inputs.dilation_roi) return "-%s %d %d %s %s" % ( ranking, self.inputs.dilation_roi, self.inputs.template_num, self.inputs.file_to_seg, self.inputs.template_file, ) elif ranking == "LNCC": if not isdefined(self.inputs.kernel_size): err = "LabelFusion requires a value for input 'kernel_size' \ when 'classifier_type' is set to '%s' and 'sm_ranking' is set to '%s'." raise NipypeInterfaceError(err % (classtype, ranking)) return "-%s %f %d %s %s" % ( ranking, self.inputs.kernel_size, self.inputs.template_num, self.inputs.file_to_seg, self.inputs.template_file, ) def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) _, _, ext = split_filename(self.inputs.in_file) suffix = self.inputs.classifier_type.lower() return os.path.join(path, "{0}_{1}{2}".format(base, suffix, ext)) class CalcTopNCCInputSpec(CommandLineInputSpec): """Input Spec for CalcTopNCC.""" in_file = File( argstr="-target %s", exists=True, mandatory=True, desc="Target file", position=1 ) num_templates = traits.Int( argstr="-templates %s", mandatory=True, position=2, desc="Number of Templates" ) in_templates = traits.List( File(exists=True), argstr="%s", position=3, mandatory=True ) top_templates = traits.Int( argstr="-n %s", mandatory=True, position=4, desc="Number of Top Templates" ) mask_file = File( argstr="-mask %s", exists=True, desc="Filename of the ROI for label fusion" ) class CalcTopNCCOutputSpec(TraitedSpec): """Output Spec for CalcTopNCC.""" out_files = traits.Any(File(exists=True)) class CalcTopNCC(NiftySegCommand): """Interface for executable seg_CalcTopNCC from NiftySeg platform. Examples -------- >>> from nipype.interfaces import niftyseg >>> node = niftyseg.CalcTopNCC() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.num_templates = 2 >>> node.inputs.in_templates = ['im2.nii', 'im3.nii'] >>> node.inputs.top_templates = 1 >>> node.cmdline 'seg_CalcTopNCC -target im1.nii -templates 2 im2.nii im3.nii -n 1' """ _cmd = get_custom_path("seg_CalcTopNCC", env_dir="NIFTYSEGDIR") _suffix = "_topNCC" input_spec = CalcTopNCCInputSpec output_spec = CalcTopNCCOutputSpec def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() # local caching for backward compatibility outfile = os.path.join(os.getcwd(), "CalcTopNCC.json") if runtime is None or not runtime.stdout: try: out_files = load_json(outfile)["files"] except IOError: return self.run().outputs else: out_files = [] for line in runtime.stdout.split("\n"): if line: values = line.split() if len(values) > 1: out_files.append([str(val) for val in values]) else: out_files.extend([str(val) for val in values]) if len(out_files) == 1: out_files = out_files[0] save_json(outfile, dict(files=out_files)) outputs.out_files = out_files return outputs nipype-1.7.0/nipype/interfaces/niftyseg/lesions.py000066400000000000000000000075501413403311400223310ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Nipype interface for seg_FillLesions. The fusion module provides higher-level interfaces to some of the operations that can be performed with the seg_FillLesions command-line program. Examples -------- See the docstrings of the individual classes for examples. """ import warnings from ..base import TraitedSpec, File, traits, CommandLineInputSpec from .base import NiftySegCommand from ..niftyreg.base import get_custom_path warn = warnings.warn warnings.filterwarnings("always", category=UserWarning) class FillLesionsInputSpec(CommandLineInputSpec): """Input Spec for FillLesions.""" # Mandatory input arguments in_file = File( argstr="-i %s", exists=True, mandatory=True, desc="Input image to fill lesions", position=1, ) lesion_mask = File( argstr="-l %s", exists=True, mandatory=True, desc="Lesion mask", position=2 ) # Output file name out_file = File( name_source=["in_file"], name_template="%s_lesions_filled.nii.gz", desc="The output filename of the fill lesions results", argstr="-o %s", position=3, ) # Optional arguments desc = "Dilate the mask times (in voxels, by default 0)" in_dilation = traits.Int(desc=desc, argstr="-dil %d") desc = "Percentage of minimum number of voxels between patches \ (by default 0.5)." match = traits.Float(desc=desc, argstr="-match %f") desc = "Minimum percentage of valid voxels in target patch \ (by default 0)." search = traits.Float(desc=desc, argstr="-search %f") desc = "Smoothing by (in minimal 6-neighbourhood voxels \ (by default 0.1))." smooth = traits.Float(desc=desc, argstr="-smo %f") desc = "Search regions size respect biggest patch size (by default 4)." size = traits.Int(desc=desc, argstr="-size %d") desc = "Patch cardinality weighting factor (by default 2)." cwf = traits.Float(desc=desc, argstr="-cwf %f") desc = "Give a binary mask with the valid search areas." bin_mask = File(desc=desc, argstr="-mask %s") desc = "Guizard et al. (FIN 2015) method, it doesn't include the \ multiresolution/hierarchical inpainting part, this part needs to be done \ with some external software such as reg_tools and reg_resample from NiftyReg. \ By default it uses the method presented in Prados et al. (Neuroimage 2016)." other = traits.Bool(desc=desc, argstr="-other") use_2d = traits.Bool( desc="Uses 2D patches in the Z axis, by default 3D.", argstr="-2D" ) debug = traits.Bool( desc="Save all intermidium files (by default OFF).", argstr="-debug" ) desc = "Set output (char, short, int, uchar, ushort, uint, \ float, double)." out_datatype = traits.String(desc=desc, argstr="-odt %s") verbose = traits.Bool(desc="Verbose (by default OFF).", argstr="-v") class FillLesionsOutputSpec(TraitedSpec): """Output Spec for FillLesions.""" out_file = File(desc="Output segmentation") class FillLesions(NiftySegCommand): """Interface for executable seg_FillLesions from NiftySeg platform. Fill all the masked lesions with WM intensity average. `Source code `_ | `Documentation `_ Examples -------- >>> from nipype.interfaces import niftyseg >>> node = niftyseg.FillLesions() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.lesion_mask = 'im2.nii' >>> node.cmdline 'seg_FillLesions -i im1.nii -l im2.nii -o im1_lesions_filled.nii.gz' """ _cmd = get_custom_path("seg_FillLesions", env_dir="NIFTYSEGDIR") input_spec = FillLesionsInputSpec output_spec = FillLesionsOutputSpec nipype-1.7.0/nipype/interfaces/niftyseg/maths.py000066400000000000000000000456761413403311400220040ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Nipype interface for seg_maths. The maths module provides higher-level interfaces to some of the operations that can be performed with the niftysegmaths (seg_maths) command-line program. """ import os from ..base import ( TraitedSpec, File, traits, isdefined, CommandLineInputSpec, NipypeInterfaceError, ) from .base import NiftySegCommand from ..niftyreg.base import get_custom_path from ...utils.filemanip import split_filename class MathsInput(CommandLineInputSpec): """Input Spec for seg_maths interfaces.""" in_file = File( position=2, argstr="%s", exists=True, mandatory=True, desc="image to operate on" ) out_file = File( name_source=["in_file"], name_template="%s", position=-2, argstr="%s", desc="image to write", ) desc = "datatype to use for output (default uses input type)" output_datatype = traits.Enum( "float", "char", "int", "short", "double", "input", position=-3, argstr="-odt %s", desc=desc, ) class MathsOutput(TraitedSpec): """Output Spec for seg_maths interfaces.""" out_file = File(desc="image written after calculations") class MathsCommand(NiftySegCommand): """ Base Command Interface for seg_maths interfaces. The executable seg_maths enables the sequential execution of arithmetic operations, like multiplication (-mul), division (-div) or addition (-add), binarisation (-bin) or thresholding (-thr) operations and convolution by a Gaussian kernel (-smo). It also alows mathematical morphology based operations like dilation (-dil), erosion (-ero), connected components (-lconcomp) and hole filling (-fill), Euclidean (- euc) and geodesic (-geo) distance transforms, local image similarity metric calculation (-lncc and -lssd). Finally, it allows multiple operations over the dimensionality of the image, from merging 3D images together as a 4D image (-merge) or splitting (-split or -tp) 4D images into several 3D images, to estimating the maximum, minimum and average over all time-points, etc. """ _cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") input_spec = MathsInput output_spec = MathsOutput _suffix = "_maths" def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) _, _, ext = split_filename(self.inputs.in_file) suffix = self._suffix if suffix != "_merged" and isdefined(self.inputs.operation): suffix = "_" + self.inputs.operation return os.path.join(path, "{0}{1}{2}".format(base, suffix, ext)) class UnaryMathsInput(MathsInput): """Input Spec for seg_maths Unary operations.""" operation = traits.Enum( "sqrt", "exp", "log", "recip", "abs", "bin", "otsu", "lconcomp", "concomp6", "concomp26", "fill", "euc", "tpmax", "tmean", "tmax", "tmin", "splitlab", "removenan", "isnan", "subsamp2", "scl", "4to5", "range", argstr="-%s", position=4, mandatory=True, desc="""\ Operation to perform: * sqrt - Square root of the image). * exp - Exponential root of the image. * log - Log of the image. * recip - Reciprocal (1/I) of the image. * abs - Absolute value of the image. * bin - Binarise the image. * otsu - Otsu thresholding of the current image. * lconcomp - Take the largest connected component * concomp6 - Label the different connected components with a 6NN kernel * concomp26 - Label the different connected components with a 26NN kernel * fill - Fill holes in binary object (e.g. fill ventricle in brain mask). * euc - Euclidean distance transform * tpmax - Get the time point with the highest value (binarise 4D probabilities) * tmean - Mean value of all time points. * tmax - Max value of all time points. * tmin - Mean value of all time points. * splitlab - Split the integer labels into multiple timepoints * removenan - Remove all NaNs and replace then with 0 * isnan - Binary image equal to 1 if the value is NaN and 0 otherwise * subsamp2 - Subsample the image by 2 using NN sampling (qform and sform scaled) * scl - Reset scale and slope info. * 4to5 - Flip the 4th and 5th dimension. * range - Reset the image range to the min max. """, ) class UnaryMaths(MathsCommand): """Unary mathematical operations. See Also -------- `Source code `__ -- `Documentation `__ Examples -------- >>> import copy >>> from nipype.interfaces import niftyseg >>> unary = niftyseg.UnaryMaths() >>> unary.inputs.output_datatype = 'float' >>> unary.inputs.in_file = 'im1.nii' >>> # Test sqrt operation >>> unary_sqrt = copy.deepcopy(unary) >>> unary_sqrt.inputs.operation = 'sqrt' >>> unary_sqrt.cmdline 'seg_maths im1.nii -sqrt -odt float im1_sqrt.nii' >>> unary_sqrt.run() # doctest: +SKIP >>> # Test sqrt operation >>> unary_abs = copy.deepcopy(unary) >>> unary_abs.inputs.operation = 'abs' >>> unary_abs.cmdline 'seg_maths im1.nii -abs -odt float im1_abs.nii' >>> unary_abs.run() # doctest: +SKIP >>> # Test bin operation >>> unary_bin = copy.deepcopy(unary) >>> unary_bin.inputs.operation = 'bin' >>> unary_bin.cmdline 'seg_maths im1.nii -bin -odt float im1_bin.nii' >>> unary_bin.run() # doctest: +SKIP >>> # Test otsu operation >>> unary_otsu = copy.deepcopy(unary) >>> unary_otsu.inputs.operation = 'otsu' >>> unary_otsu.cmdline 'seg_maths im1.nii -otsu -odt float im1_otsu.nii' >>> unary_otsu.run() # doctest: +SKIP >>> # Test isnan operation >>> unary_isnan = copy.deepcopy(unary) >>> unary_isnan.inputs.operation = 'isnan' >>> unary_isnan.cmdline 'seg_maths im1.nii -isnan -odt float im1_isnan.nii' >>> unary_isnan.run() # doctest: +SKIP """ input_spec = UnaryMathsInput class BinaryMathsInput(MathsInput): """Input Spec for seg_maths Binary operations.""" operation = traits.Enum( "mul", "div", "add", "sub", "pow", "thr", "uthr", "smo", "edge", "sobel3", "sobel5", "min", "smol", "geo", "llsnorm", "masknan", "hdr_copy", "splitinter", mandatory=True, argstr="-%s", position=4, desc="""\ Operation to perform: * mul - - Multiply image value or by other image. * div - - Divide image by or by other image. * add - - Add image by or by other image. * sub - - Subtract image by or by other image. * pow - - Image to the power of . * thr - - Threshold the image below . * uthr - - Threshold image above . * smo - - Gaussian smoothing by std (in voxels and up to 4-D). * edge - - Calculate the edges of the image using a threshold . * sobel3 - - Calculate the edges of all timepoints using a Sobel filter with a 3x3x3 kernel and applying gaussian smoothing. * sobel5 - - Calculate the edges of all timepoints using a Sobel filter with a 5x5x5 kernel and applying gaussian smoothing. * min - - Get the min per voxel between and . * smol - - Gaussian smoothing of a 3D label image. * geo - - Geodesic distance according to the speed function * llsnorm - Linear LS normalisation between current and * masknan - Assign everything outside the mask (mask==0) with NaNs * hdr_copy - Copy header from working image to and save in . * splitinter - Split interleaved slices in direction into separate time points """, ) operand_file = File( exists=True, argstr="%s", mandatory=True, position=5, xor=["operand_value", "operand_str"], desc="second image to perform operation with", ) operand_value = traits.Float( argstr="%.8f", mandatory=True, position=5, xor=["operand_file", "operand_str"], desc="float value to perform operation with", ) desc = "string value to perform operation splitinter" operand_str = traits.Enum( "x", "y", "z", argstr="%s", mandatory=True, position=5, xor=["operand_value", "operand_file"], desc=desc, ) class BinaryMaths(MathsCommand): """Binary mathematical operations. See Also -------- `Source code `__ -- `Documentation `__ Examples -------- >>> import copy >>> from nipype.interfaces import niftyseg >>> binary = niftyseg.BinaryMaths() >>> binary.inputs.in_file = 'im1.nii' >>> binary.inputs.output_datatype = 'float' >>> # Test sub operation >>> binary_sub = copy.deepcopy(binary) >>> binary_sub.inputs.operation = 'sub' >>> binary_sub.inputs.operand_file = 'im2.nii' >>> binary_sub.cmdline 'seg_maths im1.nii -sub im2.nii -odt float im1_sub.nii' >>> binary_sub.run() # doctest: +SKIP >>> # Test mul operation >>> binary_mul = copy.deepcopy(binary) >>> binary_mul.inputs.operation = 'mul' >>> binary_mul.inputs.operand_value = 2.0 >>> binary_mul.cmdline 'seg_maths im1.nii -mul 2.00000000 -odt float im1_mul.nii' >>> binary_mul.run() # doctest: +SKIP >>> # Test llsnorm operation >>> binary_llsnorm = copy.deepcopy(binary) >>> binary_llsnorm.inputs.operation = 'llsnorm' >>> binary_llsnorm.inputs.operand_file = 'im2.nii' >>> binary_llsnorm.cmdline 'seg_maths im1.nii -llsnorm im2.nii -odt float im1_llsnorm.nii' >>> binary_llsnorm.run() # doctest: +SKIP >>> # Test splitinter operation >>> binary_splitinter = copy.deepcopy(binary) >>> binary_splitinter.inputs.operation = 'splitinter' >>> binary_splitinter.inputs.operand_str = 'z' >>> binary_splitinter.cmdline 'seg_maths im1.nii -splitinter z -odt float im1_splitinter.nii' >>> binary_splitinter.run() # doctest: +SKIP """ input_spec = BinaryMathsInput def _format_arg(self, opt, spec, val): """Convert input to appropriate format for seg_maths.""" if opt == "operand_str" and self.inputs.operation != "splitinter": err = 'operand_str set but with an operation different than \ "splitinter"' raise NipypeInterfaceError(err) if opt == "operation": # Only float if val in ["pow", "thr", "uthr", "smo", "edge", "sobel3", "sobel5", "smol"]: if not isdefined(self.inputs.operand_value): err = "operand_value not set for {0}.".format(val) raise NipypeInterfaceError(err) # only files elif val in ["min", "llsnorm", "masknan", "hdr_copy"]: if not isdefined(self.inputs.operand_file): err = "operand_file not set for {0}.".format(val) raise NipypeInterfaceError(err) # splitinter: elif val == "splitinter": if not isdefined(self.inputs.operand_str): err = "operand_str not set for splitinter." raise NipypeInterfaceError(err) if opt == "operand_value" and float(val) == 0.0: return "0" return super(BinaryMaths, self)._format_arg(opt, spec, val) def _overload_extension(self, value, name=None): if self.inputs.operation == "hdr_copy": path, base, _ = split_filename(value) _, base, ext = split_filename(self.inputs.operand_file) suffix = self.inputs.operation return os.path.join(path, "{0}{1}{2}".format(base, suffix, ext)) else: return super(BinaryMaths, self)._overload_extension(value, name) class BinaryMathsInputInteger(MathsInput): """Input Spec for seg_maths Binary operations that require integer.""" operation = traits.Enum( "dil", "ero", "tp", "equal", "pad", "crop", mandatory=True, argstr="-%s", position=4, desc="""\ Operation to perform: * equal - - Get voxels equal to * dil - - Dilate the image times (in voxels). * ero - - Erode the image times (in voxels). * tp - - Extract time point * crop - - Crop voxels around each 3D volume. * pad - - Pad voxels with NaN value around each 3D volume. """, ) operand_value = traits.Int( argstr="%d", mandatory=True, position=5, desc="int value to perform operation with", ) class BinaryMathsInteger(MathsCommand): """Integer mathematical operations. See Also -------- `Source code `__ -- `Documentation `__ Examples -------- >>> import copy >>> from nipype.interfaces.niftyseg import BinaryMathsInteger >>> binaryi = BinaryMathsInteger() >>> binaryi.inputs.in_file = 'im1.nii' >>> binaryi.inputs.output_datatype = 'float' >>> # Test dil operation >>> binaryi_dil = copy.deepcopy(binaryi) >>> binaryi_dil.inputs.operation = 'dil' >>> binaryi_dil.inputs.operand_value = 2 >>> binaryi_dil.cmdline 'seg_maths im1.nii -dil 2 -odt float im1_dil.nii' >>> binaryi_dil.run() # doctest: +SKIP >>> # Test dil operation >>> binaryi_ero = copy.deepcopy(binaryi) >>> binaryi_ero.inputs.operation = 'ero' >>> binaryi_ero.inputs.operand_value = 1 >>> binaryi_ero.cmdline 'seg_maths im1.nii -ero 1 -odt float im1_ero.nii' >>> binaryi_ero.run() # doctest: +SKIP >>> # Test pad operation >>> binaryi_pad = copy.deepcopy(binaryi) >>> binaryi_pad.inputs.operation = 'pad' >>> binaryi_pad.inputs.operand_value = 4 >>> binaryi_pad.cmdline 'seg_maths im1.nii -pad 4 -odt float im1_pad.nii' >>> binaryi_pad.run() # doctest: +SKIP """ input_spec = BinaryMathsInputInteger class TupleMathsInput(MathsInput): """Input Spec for seg_maths Tuple operations.""" operation = traits.Enum( "lncc", "lssd", "lltsnorm", mandatory=True, argstr="-%s", position=4, desc="""\ Operation to perform: * lncc Local CC between current img and on a kernel with * lssd Local SSD between current img and on a kernel with * lltsnorm Linear LTS normalisation assuming percent outliers """, ) operand_file1 = File( exists=True, argstr="%s", mandatory=True, position=5, xor=["operand_value1"], desc="image to perform operation 1 with", ) desc = "float value to perform operation 1 with" operand_value1 = traits.Float( argstr="%.8f", mandatory=True, position=5, xor=["operand_file1"], desc=desc ) operand_file2 = File( exists=True, argstr="%s", mandatory=True, position=6, xor=["operand_value2"], desc="image to perform operation 2 with", ) desc = "float value to perform operation 2 with" operand_value2 = traits.Float( argstr="%.8f", mandatory=True, position=6, xor=["operand_file2"], desc=desc ) class TupleMaths(MathsCommand): """Mathematical operations on tuples. See Also -------- `Source code `__ -- `Documentation `__ Examples -------- >>> import copy >>> from nipype.interfaces import niftyseg >>> tuple = niftyseg.TupleMaths() >>> tuple.inputs.in_file = 'im1.nii' >>> tuple.inputs.output_datatype = 'float' >>> # Test lncc operation >>> tuple_lncc = copy.deepcopy(tuple) >>> tuple_lncc.inputs.operation = 'lncc' >>> tuple_lncc.inputs.operand_file1 = 'im2.nii' >>> tuple_lncc.inputs.operand_value2 = 2.0 >>> tuple_lncc.cmdline 'seg_maths im1.nii -lncc im2.nii 2.00000000 -odt float im1_lncc.nii' >>> tuple_lncc.run() # doctest: +SKIP >>> # Test lssd operation >>> tuple_lssd = copy.deepcopy(tuple) >>> tuple_lssd.inputs.operation = 'lssd' >>> tuple_lssd.inputs.operand_file1 = 'im2.nii' >>> tuple_lssd.inputs.operand_value2 = 1.0 >>> tuple_lssd.cmdline 'seg_maths im1.nii -lssd im2.nii 1.00000000 -odt float im1_lssd.nii' >>> tuple_lssd.run() # doctest: +SKIP >>> # Test lltsnorm operation >>> tuple_lltsnorm = copy.deepcopy(tuple) >>> tuple_lltsnorm.inputs.operation = 'lltsnorm' >>> tuple_lltsnorm.inputs.operand_file1 = 'im2.nii' >>> tuple_lltsnorm.inputs.operand_value2 = 0.01 >>> tuple_lltsnorm.cmdline 'seg_maths im1.nii -lltsnorm im2.nii 0.01000000 -odt float im1_lltsnorm.nii' >>> tuple_lltsnorm.run() # doctest: +SKIP """ input_spec = TupleMathsInput class MergeInput(MathsInput): """Input Spec for seg_maths merge operation.""" dimension = traits.Int(mandatory=True, desc="Dimension to merge the images.") merge_files = traits.List( File(exists=True), argstr="%s", mandatory=True, position=4, desc="List of images to merge to the working image .", ) class Merge(MathsCommand): """Merge image files. See Also -------- `Source code `__ -- `Documentation `__ Examples -------- >>> from nipype.interfaces import niftyseg >>> node = niftyseg.Merge() >>> node.inputs.in_file = 'im1.nii' >>> files = ['im2.nii', 'im3.nii'] >>> node.inputs.merge_files = files >>> node.inputs.dimension = 2 >>> node.inputs.output_datatype = 'float' >>> node.cmdline 'seg_maths im1.nii -merge 2 2 im2.nii im3.nii -odt float im1_merged.nii' """ input_spec = MergeInput _suffix = "_merged" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for seg_maths.""" if opt == "merge_files": return "-merge %d %d %s" % (len(val), self.inputs.dimension, " ".join(val)) return super(Merge, self)._format_arg(opt, spec, val) nipype-1.7.0/nipype/interfaces/niftyseg/patchmatch.py000066400000000000000000000064701413403311400227710ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The fusion module provides higher-level interfaces to some of the operations that can be performed with the seg_DetectLesions command-line program. """ import warnings from ..base import TraitedSpec, File, traits, CommandLineInputSpec from .base import NiftySegCommand from ..niftyreg.base import get_custom_path warn = warnings.warn warnings.filterwarnings("always", category=UserWarning) class PatchMatchInputSpec(CommandLineInputSpec): """Input Spec for PatchMatch.""" # Mandatory input arguments in_file = File( argstr="-i %s", exists=True, mandatory=True, desc="Input image to segment", position=1, ) mask_file = File( argstr="-m %s", exists=True, mandatory=True, desc="Input mask for the area where applies PatchMatch", position=2, ) database_file = File( argstr="-db %s", exists=True, mandatory=True, desc="Database with the segmentations", position=3, ) # Output file name out_file = File( name_source=["in_file"], name_template="%s_pm.nii.gz", desc="The output filename of the patchmatch results", argstr="-o %s", position=4, ) # Optional arguments patch_size = traits.Int(desc="Patch size, #voxels", argstr="-size %i") desc = "Constrained search area size, number of times bigger than the \ patchsize" cs_size = traits.Int(desc=desc, argstr="-cs %i") match_num = traits.Int(desc="Number of better matching", argstr="-match %i") pm_num = traits.Int(desc="Number of patchmatch executions", argstr="-pm %i") desc = "Number of iterations for the patchmatch algorithm" it_num = traits.Int(desc=desc, argstr="-it %i") class PatchMatchOutputSpec(TraitedSpec): """OutputSpec for PatchMatch.""" out_file = File(desc="Output segmentation") class PatchMatch(NiftySegCommand): """Interface for executable seg_PatchMatch from NiftySeg platform. The database file is a text file and in each line we have a template file, a mask with the search region to consider and a file with the label to propagate. Input image, input mask, template images from database and masks from database must have the same 4D resolution (same number of XxYxZ voxels, modalities and/or time-points). Label files from database must have the same 3D resolution (XxYxZ voxels) than input image but can have different number of volumes than the input image allowing to propagate multiple labels in the same execution. `Source code `_ | `Documentation `_ Examples -------- >>> from nipype.interfaces import niftyseg >>> node = niftyseg.PatchMatch() >>> node.inputs.in_file = 'im1.nii' >>> node.inputs.mask_file = 'im2.nii' >>> node.inputs.database_file = 'db.xml' >>> node.cmdline 'seg_PatchMatch -i im1.nii -m im2.nii -db db.xml -o im1_pm.nii.gz' """ _cmd = get_custom_path("seg_PatchMatch", env_dir="NIFTYSEGDIR") input_spec = PatchMatchInputSpec output_spec = PatchMatchOutputSpec _suffix = "_pm" nipype-1.7.0/nipype/interfaces/niftyseg/stats.py000066400000000000000000000176311413403311400220140ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The stats module provides higher-level interfaces to some of the operations that can be performed with the niftyseg stats (seg_stats) command-line program. """ import numpy as np from ..base import TraitedSpec, File, traits, CommandLineInputSpec from .base import NiftySegCommand from ..niftyreg.base import get_custom_path class StatsInput(CommandLineInputSpec): """Input Spec for seg_stats interfaces.""" in_file = File( position=2, argstr="%s", exists=True, mandatory=True, desc="image to operate on" ) # Constrains mask_file = File( exists=True, position=-2, argstr="-m %s", desc="statistics within the masked area", ) desc = "Only estimate statistics if voxel is larger than " larger_voxel = traits.Float(argstr="-t %f", position=-3, desc=desc) class StatsOutput(TraitedSpec): """Output Spec for seg_stats interfaces.""" output = traits.Array(desc="Output array from seg_stats") class StatsCommand(NiftySegCommand): """ Base Command Interface for seg_stats interfaces. The executable seg_stats enables the estimation of image statistics on continuous voxel intensities (average, standard deviation, min/max, robust range, percentiles, sum, probabilistic volume, entropy, etc) either over the full image or on a per slice basis (slice axis can be specified), statistics over voxel coordinates (location of max, min and centre of mass, bounding box, etc) and statistics over categorical images (e.g. per region volume, count, average, Dice scores, etc). These statistics are robust to the presence of NaNs, and can be constrained by a mask and/or thresholded at a certain level. """ _cmd = get_custom_path("seg_stats", env_dir="NIFTYSEGDIR") input_spec = StatsInput output_spec = StatsOutput def _parse_stdout(self, stdout): out = [] for string_line in stdout.split("\n"): if string_line.startswith("#"): continue if len(string_line) <= 1: continue line = [float(s) for s in string_line.split()] out.append(line) return np.array(out).squeeze() def _run_interface(self, runtime): new_runtime = super(StatsCommand, self)._run_interface(runtime) self.output = self._parse_stdout(new_runtime.stdout) return new_runtime def _list_outputs(self): outputs = self.output_spec().get() outputs["output"] = self.output return outputs class UnaryStatsInput(StatsInput): """Input Spec for seg_stats unary operations.""" operation = traits.Enum( "r", "R", "a", "s", "v", "vl", "vp", "n", "np", "e", "ne", "x", "X", "c", "B", "xvox", "xdim", argstr="-%s", position=4, mandatory=True, desc="""\ Operation to perform: * r - The range of all voxels. * R - The robust range (assuming 2% outliers on both sides) of all voxels * a - Average of all voxels * s - Standard deviation of all voxels * v - Volume of all voxels above 0 (<# voxels> * ) * vl - Volume of each integer label (<# voxels per label> x ) * vp - Volume of all probabilsitic voxels (sum() x ) * n - Count of all voxels above 0 (<# voxels>) * np - Sum of all fuzzy voxels (sum()) * e - Entropy of all voxels * ne - Normalized entropy of all voxels * x - Location (i j k x y z) of the smallest value in the image * X - Location (i j k x y z) of the largest value in the image * c - Location (i j k x y z) of the centre of mass of the object * B - Bounding box of all nonzero voxels [ xmin xsize ymin ysize zmin zsize ] * xvox - Output the number of voxels in the x direction. Replace x with y/z for other directions. * xdim - Output the voxel dimention in the x direction. Replace x with y/z for other directions. """, ) class UnaryStats(StatsCommand): """Unary statistical operations. See Also -------- `Source code `__ -- `Documentation `__ Examples -------- >>> import copy >>> from nipype.interfaces import niftyseg >>> unary = niftyseg.UnaryStats() >>> unary.inputs.in_file = 'im1.nii' >>> # Test v operation >>> unary_v = copy.deepcopy(unary) >>> unary_v.inputs.operation = 'v' >>> unary_v.cmdline 'seg_stats im1.nii -v' >>> unary_v.run() # doctest: +SKIP >>> # Test vl operation >>> unary_vl = copy.deepcopy(unary) >>> unary_vl.inputs.operation = 'vl' >>> unary_vl.cmdline 'seg_stats im1.nii -vl' >>> unary_vl.run() # doctest: +SKIP >>> # Test x operation >>> unary_x = copy.deepcopy(unary) >>> unary_x.inputs.operation = 'x' >>> unary_x.cmdline 'seg_stats im1.nii -x' >>> unary_x.run() # doctest: +SKIP """ input_spec = UnaryStatsInput class BinaryStatsInput(StatsInput): """Input Spec for seg_stats Binary operations.""" operation = traits.Enum( "p", "sa", "ss", "svp", "al", "d", "ncc", "nmi", "Vl", "Nl", mandatory=True, argstr="-%s", position=4, desc="""\ Operation to perform: * p - - The th percentile of all voxels intensity (float=[0,100]) * sa - - Average of all voxels * ss - - Standard deviation of all voxels * svp - - Volume of all probabilsitic voxels (sum() x ) * al - - Average value in for each label in * d - - Calculate the Dice score between all classes in and * ncc - - Normalized cross correlation between and * nmi - - Normalized Mutual Information between and * Vl - - Volume of each integer label . Save to file. * Nl - - Count of each label . Save to file. """, ) operand_file = File( exists=True, argstr="%s", mandatory=True, position=5, xor=["operand_value"], desc="second image to perform operation with", ) operand_value = traits.Float( argstr="%.8f", mandatory=True, position=5, xor=["operand_file"], desc="value to perform operation with", ) class BinaryStats(StatsCommand): """Binary statistical operations. See Also -------- `Source code `__ -- `Documentation `__ Examples -------- >>> import copy >>> from nipype.interfaces import niftyseg >>> binary = niftyseg.BinaryStats() >>> binary.inputs.in_file = 'im1.nii' >>> # Test sa operation >>> binary_sa = copy.deepcopy(binary) >>> binary_sa.inputs.operation = 'sa' >>> binary_sa.inputs.operand_value = 2.0 >>> binary_sa.cmdline 'seg_stats im1.nii -sa 2.00000000' >>> binary_sa.run() # doctest: +SKIP >>> # Test ncc operation >>> binary_ncc = copy.deepcopy(binary) >>> binary_ncc.inputs.operation = 'ncc' >>> binary_ncc.inputs.operand_file = 'im2.nii' >>> binary_ncc.cmdline 'seg_stats im1.nii -ncc im2.nii' >>> binary_ncc.run() # doctest: +SKIP >>> # Test Nl operation >>> binary_nl = copy.deepcopy(binary) >>> binary_nl.inputs.operation = 'Nl' >>> binary_nl.inputs.operand_file = 'output.csv' >>> binary_nl.cmdline 'seg_stats im1.nii -Nl output.csv' >>> binary_nl.run() # doctest: +SKIP """ input_spec = BinaryStatsInput nipype-1.7.0/nipype/interfaces/niftyseg/tests/000077500000000000000000000000001413403311400214365ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/niftyseg/tests/__init__.py000066400000000000000000000000001413403311400235350ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_BinaryMaths.py000066400000000000000000000035471413403311400263310ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import BinaryMaths def test_BinaryMaths_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), operand_file=dict( argstr="%s", extensions=None, mandatory=True, position=5, xor=["operand_value", "operand_str"], ), operand_str=dict( argstr="%s", mandatory=True, position=5, xor=["operand_value", "operand_file"], ), operand_value=dict( argstr="%.8f", mandatory=True, position=5, xor=["operand_file", "operand_str"], ), operation=dict( argstr="-%s", mandatory=True, position=4, ), out_file=dict( argstr="%s", extensions=None, name_source=["in_file"], name_template="%s", position=-2, ), output_datatype=dict( argstr="-odt %s", position=-3, ), ) inputs = BinaryMaths.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BinaryMaths_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = BinaryMaths.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_BinaryMathsInteger.py000066400000000000000000000027601413403311400276430ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import BinaryMathsInteger def test_BinaryMathsInteger_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), operand_value=dict( argstr="%d", mandatory=True, position=5, ), operation=dict( argstr="-%s", mandatory=True, position=4, ), out_file=dict( argstr="%s", extensions=None, name_source=["in_file"], name_template="%s", position=-2, ), output_datatype=dict( argstr="-odt %s", position=-3, ), ) inputs = BinaryMathsInteger.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BinaryMathsInteger_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = BinaryMathsInteger.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_BinaryStats.py000066400000000000000000000030661413403311400263470ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..stats import BinaryStats def test_BinaryStats_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), larger_voxel=dict( argstr="-t %f", position=-3, ), mask_file=dict( argstr="-m %s", extensions=None, position=-2, ), operand_file=dict( argstr="%s", extensions=None, mandatory=True, position=5, xor=["operand_value"], ), operand_value=dict( argstr="%.8f", mandatory=True, position=5, xor=["operand_file"], ), operation=dict( argstr="-%s", mandatory=True, position=4, ), ) inputs = BinaryStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BinaryStats_outputs(): output_map = dict( output=dict(), ) outputs = BinaryStats.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_CalcTopNCC.py000066400000000000000000000025771413403311400257630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..label_fusion import CalcTopNCC def test_CalcTopNCC_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-target %s", extensions=None, mandatory=True, position=1, ), in_templates=dict( argstr="%s", mandatory=True, position=3, ), mask_file=dict( argstr="-mask %s", extensions=None, ), num_templates=dict( argstr="-templates %s", mandatory=True, position=2, ), top_templates=dict( argstr="-n %s", mandatory=True, position=4, ), ) inputs = CalcTopNCC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CalcTopNCC_outputs(): output_map = dict( out_files=dict(), ) outputs = CalcTopNCC.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_EM.py000066400000000000000000000054121413403311400244020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..em import EM def test_EM_inputs(): input_map = dict( args=dict( argstr="%s", ), bc_order_val=dict( argstr="-bc_order %s", usedefault=True, ), bc_thresh_val=dict( argstr="-bc_thresh %s", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, position=4, ), mask_file=dict( argstr="-mask %s", extensions=None, ), max_iter=dict( argstr="-max_iter %s", usedefault=True, ), min_iter=dict( argstr="-min_iter %s", usedefault=True, ), mrf_beta_val=dict( argstr="-mrf_beta %s", ), no_prior=dict( argstr="-nopriors %s", mandatory=True, xor=["prior_4D", "priors"], ), out_bc_file=dict( argstr="-bc_out %s", extensions=None, name_source=["in_file"], name_template="%s_bc_em.nii.gz", ), out_file=dict( argstr="-out %s", extensions=None, name_source=["in_file"], name_template="%s_em.nii.gz", ), out_outlier_file=dict( argstr="-out_outlier %s", extensions=None, name_source=["in_file"], name_template="%s_outlier_em.nii.gz", ), outlier_val=dict( argstr="-outlier %s %s", ), prior_4D=dict( argstr="-prior4D %s", extensions=None, mandatory=True, xor=["no_prior", "priors"], ), priors=dict( argstr="%s", mandatory=True, xor=["no_prior", "prior_4D"], ), reg_val=dict( argstr="-reg %s", ), relax_priors=dict( argstr="-rf %s %s", ), ) inputs = EM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EM_outputs(): output_map = dict( out_bc_file=dict( extensions=None, ), out_file=dict( extensions=None, ), out_outlier_file=dict( extensions=None, ), ) outputs = EM.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_FillLesions.py000066400000000000000000000040761413403311400263310ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..lesions import FillLesions def test_FillLesions_inputs(): input_map = dict( args=dict( argstr="%s", ), bin_mask=dict( argstr="-mask %s", extensions=None, ), cwf=dict( argstr="-cwf %f", ), debug=dict( argstr="-debug", ), environ=dict( nohash=True, usedefault=True, ), in_dilation=dict( argstr="-dil %d", ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, position=1, ), lesion_mask=dict( argstr="-l %s", extensions=None, mandatory=True, position=2, ), match=dict( argstr="-match %f", ), other=dict( argstr="-other", ), out_datatype=dict( argstr="-odt %s", ), out_file=dict( argstr="-o %s", extensions=None, name_source=["in_file"], name_template="%s_lesions_filled.nii.gz", position=3, ), search=dict( argstr="-search %f", ), size=dict( argstr="-size %d", ), smooth=dict( argstr="-smo %f", ), use_2d=dict( argstr="-2D", ), verbose=dict( argstr="-v", ), ) inputs = FillLesions.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FillLesions_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = FillLesions.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_LabelFusion.py000066400000000000000000000045311413403311400263050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..label_fusion import LabelFusion def test_LabelFusion_inputs(): input_map = dict( args=dict( argstr="%s", ), classifier_type=dict( argstr="-%s", mandatory=True, position=2, ), conv=dict( argstr="-conv %f", ), dilation_roi=dict(), environ=dict( nohash=True, usedefault=True, ), file_to_seg=dict( extensions=None, mandatory=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, position=1, ), kernel_size=dict(), mask_file=dict( argstr="-mask %s", extensions=None, ), max_iter=dict( argstr="-max_iter %d", ), mrf_value=dict( argstr="-MRF_beta %f", ), out_file=dict( argstr="-out %s", extensions=None, name_source=["in_file"], name_template="%s", ), prob_flag=dict( argstr="-outProb", ), prob_update_flag=dict( argstr="-prop_update", ), proportion=dict( argstr="-prop %s", ), set_pq=dict( argstr="-setPQ %f %f", ), sm_ranking=dict( argstr="-%s", position=3, usedefault=True, ), template_file=dict( extensions=None, ), template_num=dict(), unc=dict( argstr="-unc", ), unc_thresh=dict( argstr="-uncthres %f", ), verbose=dict( argstr="-v %s", ), ) inputs = LabelFusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_LabelFusion_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = LabelFusion.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_MathsCommand.py000066400000000000000000000023551413403311400264570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import MathsCommand def test_MathsCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), out_file=dict( argstr="%s", extensions=None, name_source=["in_file"], name_template="%s", position=-2, ), output_datatype=dict( argstr="-odt %s", position=-3, ), ) inputs = MathsCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MathsCommand_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MathsCommand.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_Merge.py000066400000000000000000000025731413403311400251450ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import Merge def test_Merge_inputs(): input_map = dict( args=dict( argstr="%s", ), dimension=dict( mandatory=True, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), merge_files=dict( argstr="%s", mandatory=True, position=4, ), out_file=dict( argstr="%s", extensions=None, name_source=["in_file"], name_template="%s", position=-2, ), output_datatype=dict( argstr="-odt %s", position=-3, ), ) inputs = Merge.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Merge_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Merge.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_NiftySegCommand.py000066400000000000000000000007761413403311400271400ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import NiftySegCommand def test_NiftySegCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ) inputs = NiftySegCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_PatchMatch.py000066400000000000000000000033721413403311400261200ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..patchmatch import PatchMatch def test_PatchMatch_inputs(): input_map = dict( args=dict( argstr="%s", ), cs_size=dict( argstr="-cs %i", ), database_file=dict( argstr="-db %s", extensions=None, mandatory=True, position=3, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, position=1, ), it_num=dict( argstr="-it %i", ), mask_file=dict( argstr="-m %s", extensions=None, mandatory=True, position=2, ), match_num=dict( argstr="-match %i", ), out_file=dict( argstr="-o %s", extensions=None, name_source=["in_file"], name_template="%s_pm.nii.gz", position=4, ), patch_size=dict( argstr="-size %i", ), pm_num=dict( argstr="-pm %i", ), ) inputs = PatchMatch.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_PatchMatch_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = PatchMatch.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_StatsCommand.py000066400000000000000000000021771413403311400265030ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..stats import StatsCommand def test_StatsCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), larger_voxel=dict( argstr="-t %f", position=-3, ), mask_file=dict( argstr="-m %s", extensions=None, position=-2, ), ) inputs = StatsCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_StatsCommand_outputs(): output_map = dict( output=dict(), ) outputs = StatsCommand.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_TupleMaths.py000066400000000000000000000037621413403311400261750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import TupleMaths def test_TupleMaths_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), operand_file1=dict( argstr="%s", extensions=None, mandatory=True, position=5, xor=["operand_value1"], ), operand_file2=dict( argstr="%s", extensions=None, mandatory=True, position=6, xor=["operand_value2"], ), operand_value1=dict( argstr="%.8f", mandatory=True, position=5, xor=["operand_file1"], ), operand_value2=dict( argstr="%.8f", mandatory=True, position=6, xor=["operand_file2"], ), operation=dict( argstr="-%s", mandatory=True, position=4, ), out_file=dict( argstr="%s", extensions=None, name_source=["in_file"], name_template="%s", position=-2, ), output_datatype=dict( argstr="-odt %s", position=-3, ), ) inputs = TupleMaths.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TupleMaths_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = TupleMaths.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_UnaryMaths.py000066400000000000000000000025241413403311400261750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maths import UnaryMaths def test_UnaryMaths_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), operation=dict( argstr="-%s", mandatory=True, position=4, ), out_file=dict( argstr="%s", extensions=None, name_source=["in_file"], name_template="%s", position=-2, ), output_datatype=dict( argstr="-odt %s", position=-3, ), ) inputs = UnaryMaths.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_UnaryMaths_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = UnaryMaths.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_auto_UnaryStats.py000066400000000000000000000023461413403311400262210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..stats import UnaryStats def test_UnaryStats_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), larger_voxel=dict( argstr="-t %f", position=-3, ), mask_file=dict( argstr="-m %s", extensions=None, position=-2, ), operation=dict( argstr="-%s", mandatory=True, position=4, ), ) inputs = UnaryStats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_UnaryStats_outputs(): output_map = dict( output=dict(), ) outputs = UnaryStats.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_em_interfaces.py000066400000000000000000000022201413403311400256470ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pytest from ....testing import example_data from ...niftyreg import get_custom_path from ...niftyreg.tests.test_regutils import no_nifty_tool from .. import EM @pytest.mark.skipif(no_nifty_tool(cmd="seg_EM"), reason="niftyseg is not installed") def test_seg_em(): # Create a node object seg_em = EM() # Check if the command is properly defined cmd = get_custom_path("seg_EM", env_dir="NIFTYSEGDIR") assert seg_em.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): seg_em.run() # Assign some input data in_file = example_data("im1.nii") seg_em.inputs.in_file = in_file seg_em.inputs.no_prior = 4 cmd_tmp = "{cmd} -in {in_file} -nopriors 4 -bc_out {bc_out} -out \ {out_file} -out_outlier {out_outlier}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, out_file="im1_em.nii.gz", bc_out="im1_bc_em.nii.gz", out_outlier="im1_outlier_em.nii.gz", ) assert seg_em.cmdline == expected_cmd nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_extra_PatchMatch.py000066400000000000000000000024611413403311400262710ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pytest from ....testing import example_data from ...niftyreg import get_custom_path from ...niftyreg.tests.test_regutils import no_nifty_tool from .. import PatchMatch @pytest.mark.skipif( no_nifty_tool(cmd="seg_PatchMatch"), reason="niftyseg is not installed" ) def test_seg_patchmatch(): # Create a node object seg_patchmatch = PatchMatch() # Check if the command is properly defined cmd = get_custom_path("seg_PatchMatch", env_dir="NIFTYSEGDIR") assert seg_patchmatch.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): seg_patchmatch.run() # Assign some input data in_file = example_data("im1.nii") mask_file = example_data("im2.nii") db_file = example_data("db.xml") seg_patchmatch.inputs.in_file = in_file seg_patchmatch.inputs.mask_file = mask_file seg_patchmatch.inputs.database_file = db_file cmd_tmp = "{cmd} -i {in_file} -m {mask_file} -db {db} -o {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, mask_file=mask_file, db=db_file, out_file="im1_pm.nii.gz", ) assert seg_patchmatch.cmdline == expected_cmd nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_label_fusion.py000066400000000000000000000074631413403311400255230ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pytest from ....testing import example_data from ...niftyreg import get_custom_path from ...niftyreg.tests.test_regutils import no_nifty_tool from .. import LabelFusion, CalcTopNCC @pytest.mark.skipif( no_nifty_tool(cmd="seg_LabFusion"), reason="niftyseg is not installed" ) def test_seg_lab_fusion(): """Test interfaces for seg_labfusion""" # Create a node object steps = LabelFusion() # Check if the command is properly defined cmd = get_custom_path("seg_LabFusion", env_dir="NIFTYSEGDIR") assert steps.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): steps.run() # Assign some input data in_file = example_data("im1.nii") file_to_seg = example_data("im2.nii") template_file = example_data("im3.nii") steps.inputs.in_file = in_file steps.inputs.kernel_size = 2.0 steps.inputs.file_to_seg = file_to_seg steps.inputs.template_file = template_file steps.inputs.template_num = 2 steps.inputs.classifier_type = "STEPS" cmd_tmp = "{cmd} -in {in_file} -STEPS 2.000000 2 {file_to_seg} \ {template_file} -out {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, file_to_seg=file_to_seg, template_file=template_file, out_file="im1_steps.nii", ) assert steps.cmdline == expected_cmd # Staple staple = LabelFusion(kernel_size=2.0, template_num=2, classifier_type="STAPLE") in_file = example_data("im1.nii") file_to_seg = example_data("im2.nii") template_file = example_data("im3.nii") staple.inputs.in_file = in_file staple.inputs.file_to_seg = file_to_seg staple.inputs.template_file = template_file cmd_tmp = "{cmd} -in {in_file} -STAPLE -ALL -out {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, file_to_seg=file_to_seg, template_file=template_file, out_file="im1_staple.nii", ) assert staple.cmdline == expected_cmd # Assign some input data mv_node = LabelFusion( template_num=2, classifier_type="MV", sm_ranking="ROINCC", dilation_roi=2 ) in_file = example_data("im1.nii") file_to_seg = example_data("im2.nii") template_file = example_data("im3.nii") mv_node.inputs.in_file = in_file mv_node.inputs.file_to_seg = file_to_seg mv_node.inputs.template_file = template_file cmd_tmp = "{cmd} -in {in_file} -MV -ROINCC 2 2 {file_to_seg} \ {template_file} -out {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, file_to_seg=file_to_seg, template_file=template_file, out_file="im1_mv.nii", ) assert mv_node.cmdline == expected_cmd @pytest.mark.skipif( no_nifty_tool(cmd="seg_CalcTopNCC"), reason="niftyseg is not installed" ) def test_seg_calctopncc(): """Test interfaces for seg_CalctoNCC""" # Create a node object calctopncc = CalcTopNCC() # Check if the command is properly defined cmd = get_custom_path("seg_CalcTopNCC", env_dir="NIFTYSEGDIR") assert calctopncc.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): calctopncc.run() # Assign some input data in_file = example_data("im1.nii") file1 = example_data("im2.nii") file2 = example_data("im3.nii") calctopncc.inputs.in_file = in_file calctopncc.inputs.num_templates = 2 calctopncc.inputs.in_templates = [file1, file2] calctopncc.inputs.top_templates = 1 cmd_tmp = "{cmd} -target {in_file} -templates 2 {file1} {file2} -n 1" expected_cmd = cmd_tmp.format(cmd=cmd, in_file=in_file, file1=file1, file2=file2) assert calctopncc.cmdline == expected_cmd nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_lesions.py000066400000000000000000000022401413403311400245210ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pytest from ....testing import example_data from ...niftyreg import get_custom_path from ...niftyreg.tests.test_regutils import no_nifty_tool from .. import FillLesions @pytest.mark.skipif( no_nifty_tool(cmd="seg_FillLesions"), reason="niftyseg is not installed" ) def test_seg_filllesions(): # Create a node object seg_fill = FillLesions() # Check if the command is properly defined cmd = get_custom_path("seg_FillLesions", env_dir="NIFTYSEGDIR") assert seg_fill.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): seg_fill.run() # Assign some input data in_file = example_data("im1.nii") lesion_mask = example_data("im2.nii") seg_fill.inputs.in_file = in_file seg_fill.inputs.lesion_mask = lesion_mask expected_cmd = "{cmd} -i {in_file} -l {lesion_mask} -o {out_file}".format( cmd=cmd, in_file=in_file, lesion_mask=lesion_mask, out_file="im1_lesions_filled.nii.gz", ) assert seg_fill.cmdline == expected_cmd nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_maths.py000066400000000000000000000111451413403311400241650ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pytest from ....testing import example_data from ...niftyreg import get_custom_path from ...niftyreg.tests.test_regutils import no_nifty_tool from .. import UnaryMaths, BinaryMaths, BinaryMathsInteger, TupleMaths, Merge @pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_unary_maths(): # Create a node object unarym = UnaryMaths() # Check if the command is properly defined cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert unarym.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): unarym.run() # Assign some input data in_file = example_data("im1.nii") unarym.inputs.in_file = in_file unarym.inputs.operation = "otsu" unarym.inputs.output_datatype = "float" expected_cmd = "{cmd} {in_file} -otsu -odt float {out_file}".format( cmd=cmd, in_file=in_file, out_file="im1_otsu.nii" ) assert unarym.cmdline == expected_cmd @pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_binary_maths(): # Create a node object binarym = BinaryMaths() # Check if the command is properly defined cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert binarym.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): binarym.run() # Assign some input data in_file = example_data("im1.nii") binarym.inputs.in_file = in_file binarym.inputs.operand_value = 2.0 binarym.inputs.operation = "sub" binarym.inputs.output_datatype = "float" cmd_tmp = "{cmd} {in_file} -sub 2.00000000 -odt float {out_file}" expected_cmd = cmd_tmp.format(cmd=cmd, in_file=in_file, out_file="im1_sub.nii") assert binarym.cmdline == expected_cmd @pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_int_binary_maths(): # Create a node object ibinarym = BinaryMathsInteger() # Check if the command is properly defined cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert ibinarym.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): ibinarym.run() # Assign some input data in_file = example_data("im1.nii") ibinarym.inputs.in_file = in_file ibinarym.inputs.operand_value = 2 ibinarym.inputs.operation = "dil" ibinarym.inputs.output_datatype = "float" expected_cmd = "{cmd} {in_file} -dil 2 -odt float {out_file}".format( cmd=cmd, in_file=in_file, out_file="im1_dil.nii" ) assert ibinarym.cmdline == expected_cmd @pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_tuple_maths(): # Create a node object tuplem = TupleMaths() # Check if the command is properly defined cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert tuplem.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): tuplem.run() # Assign some input data in_file = example_data("im1.nii") op_file = example_data("im2.nii") tuplem.inputs.in_file = in_file tuplem.inputs.operation = "lncc" tuplem.inputs.operand_file1 = op_file tuplem.inputs.operand_value2 = 2.0 tuplem.inputs.output_datatype = "float" cmd_tmp = "{cmd} {in_file} -lncc {op} 2.00000000 -odt float {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, op=op_file, out_file="im1_lncc.nii" ) assert tuplem.cmdline == expected_cmd @pytest.mark.skipif(no_nifty_tool(cmd="seg_maths"), reason="niftyseg is not installed") def test_merge(): # Create a node object merge = Merge() # Check if the command is properly defined cmd = get_custom_path("seg_maths", env_dir="NIFTYSEGDIR") assert merge.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): merge.run() # Assign some input data in_file = example_data("im1.nii") file1 = example_data("im2.nii") file2 = example_data("im3.nii") merge.inputs.in_file = in_file merge.inputs.merge_files = [file1, file2] merge.inputs.dimension = 2 merge.inputs.output_datatype = "float" cmd_tmp = "{cmd} {in_file} -merge 2 2 {f1} {f2} -odt float {out_file}" expected_cmd = cmd_tmp.format( cmd=cmd, in_file=in_file, f1=file1, f2=file2, out_file="im1_merged.nii" ) assert merge.cmdline == expected_cmd nipype-1.7.0/nipype/interfaces/niftyseg/tests/test_stats.py000066400000000000000000000034001413403311400242020ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pytest from ....testing import example_data from ...niftyreg import get_custom_path from ...niftyreg.tests.test_regutils import no_nifty_tool from .. import UnaryStats, BinaryStats @pytest.mark.skipif(no_nifty_tool(cmd="seg_stats"), reason="niftyseg is not installed") def test_unary_stats(): """Test for the seg_stats interfaces""" # Create a node object unarys = UnaryStats() # Check if the command is properly defined cmd = get_custom_path("seg_stats", env_dir="NIFTYSEGDIR") assert unarys.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): unarys.run() # Assign some input data in_file = example_data("im1.nii") unarys.inputs.in_file = in_file unarys.inputs.operation = "a" expected_cmd = "{cmd} {in_file} -a".format(cmd=cmd, in_file=in_file) assert unarys.cmdline == expected_cmd @pytest.mark.skipif(no_nifty_tool(cmd="seg_stats"), reason="niftyseg is not installed") def test_binary_stats(): """Test for the seg_stats interfaces""" # Create a node object binarys = BinaryStats() # Check if the command is properly defined cmd = get_custom_path("seg_stats", env_dir="NIFTYSEGDIR") assert binarys.cmd == cmd # test raising error with mandatory args absent with pytest.raises(ValueError): binarys.run() # Assign some input data in_file = example_data("im1.nii") binarys.inputs.in_file = in_file binarys.inputs.operand_value = 2 binarys.inputs.operation = "sa" expected_cmd = "{cmd} {in_file} -sa 2.00000000".format(cmd=cmd, in_file=in_file) assert binarys.cmdline == expected_cmd nipype-1.7.0/nipype/interfaces/nilearn.py000066400000000000000000000143041413403311400204500ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Nilearn is a Python library for fast and easy statistical learning on NeuroImaging data.""" import os import numpy as np import nibabel as nb from ..interfaces.base import ( traits, TraitedSpec, LibraryBaseInterface, SimpleInterface, BaseInterfaceInputSpec, File, InputMultiPath, ) class NilearnBaseInterface(LibraryBaseInterface): _pkg = "nilearn" class SignalExtractionInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="4-D fMRI nii file") label_files = InputMultiPath( File(exists=True), mandatory=True, desc="a 3-D label image, with 0 denoting " "background, or a list of 3-D probability " "maps (one per label) or the equivalent 4D " "file.", ) class_labels = traits.List( mandatory=True, desc="Human-readable labels for each segment " "in the label file, in order. The length of " "class_labels must be equal to the number of " "segments (background excluded). This list " "corresponds to the class labels in label_file " "in ascending order", ) out_file = File( "signals.tsv", usedefault=True, exists=False, desc="The name of the file to output to. " "signals.tsv by default", ) incl_shared_variance = traits.Bool( True, usedefault=True, desc="By default " "(True), returns simple time series calculated from each " "region independently (e.g., for noise regression). If " "False, returns unique signals for each region, discarding " "shared variance (e.g., for connectivity. Only has effect " "with 4D probability maps.", ) include_global = traits.Bool( False, usedefault=True, desc="If True, include an extra column " 'labeled "GlobalSignal", with values calculated from the entire brain ' "(instead of just regions).", ) detrend = traits.Bool( False, usedefault=True, desc="If True, perform detrending using nilearn." ) class SignalExtractionOutputSpec(TraitedSpec): out_file = File( exists=True, desc="tsv file containing the computed " "signals, with as many columns as there are labels and as " "many rows as there are timepoints in in_file, plus a " "header row with values from class_labels", ) class SignalExtraction(NilearnBaseInterface, SimpleInterface): """ Extracts signals over tissue classes or brain regions >>> seinterface = SignalExtraction() >>> seinterface.inputs.in_file = 'functional.nii' >>> seinterface.inputs.label_files = 'segmentation0.nii.gz' >>> seinterface.inputs.out_file = 'means.tsv' >>> segments = ['CSF', 'GrayMatter', 'WhiteMatter'] >>> seinterface.inputs.class_labels = segments >>> seinterface.inputs.detrend = True >>> seinterface.inputs.include_global = True """ input_spec = SignalExtractionInputSpec output_spec = SignalExtractionOutputSpec def _run_interface(self, runtime): maskers = self._process_inputs() signals = [] for masker in maskers: signals.append(masker.fit_transform(self.inputs.in_file)) region_signals = np.hstack(signals) output = np.vstack((self.inputs.class_labels, region_signals.astype(str))) # save output self._results["out_file"] = os.path.join(runtime.cwd, self.inputs.out_file) np.savetxt(self._results["out_file"], output, fmt=b"%s", delimiter="\t") return runtime def _process_inputs(self): """validate and process inputs into useful form. Returns a list of nilearn maskers and the list of corresponding label names.""" import nilearn.input_data as nl import nilearn.image as nli label_data = nli.concat_imgs(self.inputs.label_files) maskers = [] # determine form of label files, choose appropriate nilearn masker if np.amax(label_data.dataobj) > 1: # 3d label file n_labels = np.amax(label_data.dataobj) maskers.append(nl.NiftiLabelsMasker(label_data)) else: # 4d labels n_labels = label_data.shape[3] if self.inputs.incl_shared_variance: # independent computation for img in nli.iter_img(label_data): maskers.append( nl.NiftiMapsMasker(self._4d(img.dataobj, img.affine)) ) else: # one computation fitting all maskers.append(nl.NiftiMapsMasker(label_data)) # check label list size if not np.isclose(int(n_labels), n_labels): raise ValueError( "The label files {} contain invalid value {}. Check input.".format( self.inputs.label_files, n_labels ) ) if len(self.inputs.class_labels) != n_labels: raise ValueError( "The length of class_labels {} does not " "match the number of regions {} found in " "label_files {}".format( self.inputs.class_labels, n_labels, self.inputs.label_files ) ) if self.inputs.include_global: global_label_data = label_data.dataobj.sum(axis=3) # sum across all regions global_label_data = ( np.rint(global_label_data).astype(int).clip(0, 1) ) # binarize global_label_data = self._4d(global_label_data, label_data.affine) global_masker = nl.NiftiLabelsMasker( global_label_data, detrend=self.inputs.detrend ) maskers.insert(0, global_masker) self.inputs.class_labels.insert(0, "GlobalSignal") for masker in maskers: masker.set_params(detrend=self.inputs.detrend) return maskers def _4d(self, array, affine): """takes a 3-dimensional numpy array and an affine, returns the equivalent 4th dimensional nifti file""" return nb.Nifti1Image(array[:, :, :, np.newaxis], affine) nipype-1.7.0/nipype/interfaces/nipy/000077500000000000000000000000001413403311400174235ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/nipy/__init__.py000066400000000000000000000003661413403311400215410ustar00rootroot00000000000000# -*- coding: utf-8 -*- """NIPY is a python project for analysis of structural and functional neuroimaging data.""" from .model import FitGLM, EstimateContrast from .preprocess import ComputeMask, SpaceTimeRealigner from .utils import Similarity nipype-1.7.0/nipype/interfaces/nipy/base.py000066400000000000000000000010341413403311400207050ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Base interface for nipy """ from ..base import LibraryBaseInterface from ...utils.misc import package_check # Originally set in model, preprocess and utils # Set here to be imported, in case anybody depends on its presence # Remove in 2.0 have_nipy = True try: package_check("nipy") except ImportError: have_nipy = False class NipyBaseInterface(LibraryBaseInterface): _pkg = "nipy" nipype-1.7.0/nipype/interfaces/nipy/model.py000066400000000000000000000302251413403311400210770ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os from .base import NipyBaseInterface from ..base import ( TraitedSpec, traits, File, OutputMultiPath, BaseInterfaceInputSpec, isdefined, ) class FitGLMInputSpec(BaseInterfaceInputSpec): session_info = traits.List( minlen=1, maxlen=1, mandatory=True, desc=( "Session specific information generated by" " ``modelgen.SpecifyModel``, FitGLM does " "not support multiple runs uless they are " "concatenated (see SpecifyModel options)" ), ) hrf_model = traits.Enum( "Canonical", "Canonical With Derivative", "FIR", desc=( "that specifies the hemodynamic reponse " "function it can be 'Canonical', 'Canonical " "With Derivative' or 'FIR'" ), usedefault=True, ) drift_model = traits.Enum( "Cosine", "Polynomial", "Blank", desc=( "string that specifies the desired drift " "model, to be chosen among 'Polynomial', " "'Cosine', 'Blank'" ), usedefault=True, ) TR = traits.Float(mandatory=True) model = traits.Enum( "ar1", "spherical", desc=("autoregressive mode is available only for the " "kalman method"), usedefault=True, ) method = traits.Enum( "kalman", "ols", desc=( "method to fit the model, ols or kalma; kalman " "is more time consuming but it supports " "autoregressive model" ), usedefault=True, ) mask = File( exists=True, desc=("restrict the fitting only to the region defined " "by this mask"), ) normalize_design_matrix = traits.Bool( False, desc=("normalize (zscore) the " "regressors before fitting"), usedefault=True, ) save_residuals = traits.Bool(False, usedefault=True) plot_design_matrix = traits.Bool(False, usedefault=True) class FitGLMOutputSpec(TraitedSpec): beta = File(exists=True) nvbeta = traits.Any() s2 = File(exists=True) dof = traits.Any() constants = traits.Any() axis = traits.Any() reg_names = traits.List() residuals = File() a = File(exists=True) class FitGLM(NipyBaseInterface): """ Fit GLM model based on the specified design. Supports only single or concatenated runs. """ input_spec = FitGLMInputSpec output_spec = FitGLMOutputSpec def _run_interface(self, runtime): import nibabel as nb import numpy as np import nipy.modalities.fmri.glm as GLM import nipy.modalities.fmri.design_matrix as dm try: BlockParadigm = dm.BlockParadigm except AttributeError: from nipy.modalities.fmri.experimental_paradigm import BlockParadigm session_info = self.inputs.session_info functional_runs = self.inputs.session_info[0]["scans"] if isinstance(functional_runs, (str, bytes)): functional_runs = [functional_runs] nii = nb.load(functional_runs[0]) data = nii.get_fdata(caching="unchanged") if isdefined(self.inputs.mask): mask = np.asanyarray(nb.load(self.inputs.mask).dataobj) > 0 else: mask = np.ones(nii.shape[:3]) == 1 timeseries = data[mask, :] del data for functional_run in functional_runs[1:]: nii = nb.load(functional_run, mmap=False) npdata = np.asarray(nii.dataobj) timeseries = np.concatenate((timeseries, npdata[mask, :]), axis=1) del npdata nscans = timeseries.shape[1] if "hpf" in list(session_info[0].keys()): hpf = session_info[0]["hpf"] drift_model = self.inputs.drift_model else: hpf = 0 drift_model = "Blank" reg_names = [] for reg in session_info[0]["regress"]: reg_names.append(reg["name"]) reg_vals = np.zeros((nscans, len(reg_names))) for i in range(len(reg_names)): reg_vals[:, i] = np.array(session_info[0]["regress"][i]["val"]).reshape( 1, -1 ) frametimes = np.linspace(0, (nscans - 1) * self.inputs.TR, nscans) conditions = [] onsets = [] duration = [] for i, cond in enumerate(session_info[0]["cond"]): onsets += cond["onset"] conditions += [cond["name"]] * len(cond["onset"]) if len(cond["duration"]) == 1: duration += cond["duration"] * len(cond["onset"]) else: duration += cond["duration"] if conditions: paradigm = BlockParadigm(con_id=conditions, onset=onsets, duration=duration) else: paradigm = None design_matrix, self._reg_names = dm.dmtx_light( frametimes, paradigm, drift_model=drift_model, hfcut=hpf, hrf_model=self.inputs.hrf_model, add_regs=reg_vals, add_reg_names=reg_names, ) if self.inputs.normalize_design_matrix: for i in range(len(self._reg_names) - 1): design_matrix[:, i] = ( design_matrix[:, i] - design_matrix[:, i].mean() ) / design_matrix[:, i].std() if self.inputs.plot_design_matrix: import pylab pylab.pcolor(design_matrix) pylab.savefig("design_matrix.pdf") pylab.close() pylab.clf() glm = GLM.GeneralLinearModel() glm.fit( timeseries.T, design_matrix, method=self.inputs.method, model=self.inputs.model, ) self._beta_file = os.path.abspath("beta.nii") beta = np.zeros(mask.shape + (glm.beta.shape[0],)) beta[mask, :] = glm.beta.T nb.save(nb.Nifti1Image(beta, nii.affine), self._beta_file) self._s2_file = os.path.abspath("s2.nii") s2 = np.zeros(mask.shape) s2[mask] = glm.s2 nb.save(nb.Nifti1Image(s2, nii.affine), self._s2_file) if self.inputs.save_residuals: explained = np.dot(design_matrix, glm.beta) residuals = np.zeros(mask.shape + (nscans,)) residuals[mask, :] = timeseries - explained.T self._residuals_file = os.path.abspath("residuals.nii") nb.save(nb.Nifti1Image(residuals, nii.affine), self._residuals_file) self._nvbeta = glm.nvbeta self._dof = glm.dof self._constants = glm._constants self._axis = glm._axis if self.inputs.model == "ar1": self._a_file = os.path.abspath("a.nii") a = np.zeros(mask.shape) a[mask] = glm.a.squeeze() nb.save(nb.Nifti1Image(a, nii.affine), self._a_file) self._model = glm.model self._method = glm.method return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["beta"] = self._beta_file outputs["nvbeta"] = self._nvbeta outputs["s2"] = self._s2_file outputs["dof"] = self._dof outputs["constants"] = self._constants outputs["axis"] = self._axis outputs["reg_names"] = self._reg_names if self.inputs.model == "ar1": outputs["a"] = self._a_file if self.inputs.save_residuals: outputs["residuals"] = self._residuals_file return outputs class EstimateContrastInputSpec(BaseInterfaceInputSpec): contrasts = traits.List( traits.Either( traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), ), traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), traits.List(traits.Float), ), traits.Tuple( traits.Str, traits.Enum("F"), traits.List( traits.Either( traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), ), traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), traits.List(traits.Float), ), ) ), ), ), desc="""List of contrasts with each contrast being a list of the form: [('name', 'stat', [condition list], [weight list], [session list])]. if session list is None or not provided, all sessions are used. For F contrasts, the condition list should contain previously defined T-contrasts.""", mandatory=True, ) beta = File( exists=True, desc="beta coefficients of the fitted model", mandatory=True ) nvbeta = traits.Any(mandatory=True) s2 = File(exists=True, desc="squared variance of the residuals", mandatory=True) dof = traits.Any(desc="degrees of freedom", mandatory=True) constants = traits.Any(mandatory=True) axis = traits.Any(mandatory=True) reg_names = traits.List(mandatory=True) mask = File(exists=True) class EstimateContrastOutputSpec(TraitedSpec): stat_maps = OutputMultiPath(File(exists=True)) z_maps = OutputMultiPath(File(exists=True)) p_maps = OutputMultiPath(File(exists=True)) class EstimateContrast(NipyBaseInterface): """ Estimate contrast of a fitted model. """ input_spec = EstimateContrastInputSpec output_spec = EstimateContrastOutputSpec def _run_interface(self, runtime): import nibabel as nb import numpy as np import nipy.modalities.fmri.glm as GLM beta_nii = nb.load(self.inputs.beta) if isdefined(self.inputs.mask): mask = np.asanyarray(nb.load(self.inputs.mask).dataobj) > 0 else: mask = np.ones(beta_nii.shape[:3]) == 1 glm = GLM.GeneralLinearModel() glm.beta = np.array(beta_nii.dataobj)[mask, :].T glm.nvbeta = self.inputs.nvbeta glm.s2 = np.array(nb.load(self.inputs.s2).dataobj)[mask] glm.dof = self.inputs.dof glm._axis = self.inputs.axis glm._constants = self.inputs.constants reg_names = self.inputs.reg_names self._stat_maps = [] self._p_maps = [] self._z_maps = [] for contrast_def in self.inputs.contrasts: name = contrast_def[0] contrast = np.zeros(len(reg_names)) for i, reg_name in enumerate(reg_names): if reg_name in contrast_def[2]: idx = contrast_def[2].index(reg_name) contrast[i] = contrast_def[3][idx] est_contrast = glm.contrast(contrast) stat_map = np.zeros(mask.shape) stat_map[mask] = est_contrast.stat().T stat_map_file = os.path.abspath(name + "_stat_map.nii") nb.save(nb.Nifti1Image(stat_map, beta_nii.affine), stat_map_file) self._stat_maps.append(stat_map_file) p_map = np.zeros(mask.shape) p_map[mask] = est_contrast.pvalue().T p_map_file = os.path.abspath(name + "_p_map.nii") nb.save(nb.Nifti1Image(p_map, nii.affine), p_map_file) self._p_maps.append(p_map_file) z_map = np.zeros(mask.shape) z_map[mask] = est_contrast.zscore().T z_map_file = os.path.abspath(name + "_z_map.nii") nb.save(nb.Nifti1Image(z_map, nii.affine), z_map_file) self._z_maps.append(z_map_file) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["stat_maps"] = self._stat_maps outputs["p_maps"] = self._p_maps outputs["z_maps"] = self._z_maps return outputs nipype-1.7.0/nipype/interfaces/nipy/preprocess.py000066400000000000000000000215621413403311400221700ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os import nibabel as nb import numpy as np from ...utils.filemanip import split_filename, fname_presuffix from .base import NipyBaseInterface, have_nipy from ..base import ( TraitedSpec, traits, BaseInterfaceInputSpec, isdefined, File, InputMultiPath, OutputMultiPath, ) class ComputeMaskInputSpec(BaseInterfaceInputSpec): mean_volume = File( exists=True, mandatory=True, desc="mean EPI image, used to compute the threshold for the mask", ) reference_volume = File( exists=True, desc=( "reference volume used to compute the mask. " "If none is give, the mean volume is used." ), ) m = traits.Float(desc="lower fraction of the histogram to be discarded") M = traits.Float(desc="upper fraction of the histogram to be discarded") cc = traits.Bool(desc="Keep only the largest connected component") class ComputeMaskOutputSpec(TraitedSpec): brain_mask = File(exists=True) class ComputeMask(NipyBaseInterface): input_spec = ComputeMaskInputSpec output_spec = ComputeMaskOutputSpec def _run_interface(self, runtime): from nipy.labs.mask import compute_mask args = {} for key in [ k for k, _ in list(self.inputs.items()) if k not in BaseInterfaceInputSpec().trait_names() ]: value = getattr(self.inputs, key) if isdefined(value): if key in ["mean_volume", "reference_volume"]: value = np.asanyarray(nb.load(value).dataobj) args[key] = value brain_mask = compute_mask(**args) _, name, ext = split_filename(self.inputs.mean_volume) self._brain_mask_path = os.path.abspath("%s_mask.%s" % (name, ext)) nb.save( nb.Nifti1Image(brain_mask.astype(np.uint8), nii.affine), self._brain_mask_path, ) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["brain_mask"] = self._brain_mask_path return outputs class SpaceTimeRealignerInputSpec(BaseInterfaceInputSpec): in_file = InputMultiPath( File(exists=True), mandatory=True, min_ver="0.4.0.dev", desc="File to realign" ) tr = traits.Float(desc="TR in seconds", requires=["slice_times"]) slice_times = traits.Either( traits.List(traits.Float()), traits.Enum( "asc_alt_2", "asc_alt_2_1", "asc_alt_half", "asc_alt_siemens", "ascending", "desc_alt_2", "desc_alt_half", "descending", ), desc=("Actual slice acquisition times."), ) slice_info = traits.Either( traits.Int, traits.List(min_len=2, max_len=2), desc=( "Single integer or length 2 sequence " "If int, the axis in `images` that is the " "slice axis. In a 4D image, this will " "often be axis = 2. If a 2 sequence, then" " elements are ``(slice_axis, " "slice_direction)``, where ``slice_axis`` " "is the slice axis in the image as above, " "and ``slice_direction`` is 1 if the " "slices were acquired slice 0 first, slice" " -1 last, or -1 if acquired slice -1 " "first, slice 0 last. If `slice_info` is " "an int, assume " "``slice_direction`` == 1." ), requires=["slice_times"], ) class SpaceTimeRealignerOutputSpec(TraitedSpec): out_file = OutputMultiPath(File(exists=True), desc="Realigned files") par_file = OutputMultiPath( File(exists=True), desc=("Motion parameter files. Angles are not " "euler angles"), ) class SpaceTimeRealigner(NipyBaseInterface): """Simultaneous motion and slice timing correction algorithm If slice_times is not specified, this algorithm performs spatial motion correction This interface wraps nipy's SpaceTimeRealign algorithm [Roche2011]_ or simply the SpatialRealign algorithm when timing info is not provided. Examples -------- >>> from nipype.interfaces.nipy import SpaceTimeRealigner >>> #Run spatial realignment only >>> realigner = SpaceTimeRealigner() >>> realigner.inputs.in_file = ['functional.nii'] >>> res = realigner.run() # doctest: +SKIP >>> realigner = SpaceTimeRealigner() >>> realigner.inputs.in_file = ['functional.nii'] >>> realigner.inputs.tr = 2 >>> realigner.inputs.slice_times = list(range(0, 3, 67)) >>> realigner.inputs.slice_info = 2 >>> res = realigner.run() # doctest: +SKIP References ---------- .. [Roche2011] Roche A. A four-dimensional registration algorithm with \ application to joint correction of motion and slice timing \ in fMRI. IEEE Trans Med Imaging. 2011 Aug;30(8):1546-54. DOI_. .. _DOI: https://doi.org/10.1109/TMI.2011.2131152 """ input_spec = SpaceTimeRealignerInputSpec output_spec = SpaceTimeRealignerOutputSpec keywords = ["slice timing", "motion correction"] def _run_interface(self, runtime): from nipy import save_image, load_image all_ims = [load_image(fname) for fname in self.inputs.in_file] if not isdefined(self.inputs.slice_times): from nipy.algorithms.registration.groupwise_registration import SpaceRealign R = SpaceRealign(all_ims) else: from nipy.algorithms.registration import SpaceTimeRealign R = SpaceTimeRealign( all_ims, tr=self.inputs.tr, slice_times=self.inputs.slice_times, slice_info=self.inputs.slice_info, ) R.estimate(refscan=None) corr_run = R.resample() self._out_file_path = [] self._par_file_path = [] for j, corr in enumerate(corr_run): self._out_file_path.append( os.path.abspath( "corr_%s.nii.gz" % (split_filename(self.inputs.in_file[j])[1]) ) ) save_image(corr, self._out_file_path[j]) self._par_file_path.append( os.path.abspath("%s.par" % (os.path.split(self.inputs.in_file[j])[1])) ) mfile = open(self._par_file_path[j], "w") motion = R._transforms[j] # nipy does not encode euler angles. return in original form of # translation followed by rotation vector see: # http://en.wikipedia.org/wiki/Rodrigues'_rotation_formula for i, mo in enumerate(motion): params = [ "%.10f" % item for item in np.hstack((mo.translation, mo.rotation)) ] string = " ".join(params) + "\n" mfile.write(string) mfile.close() return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = self._out_file_path outputs["par_file"] = self._par_file_path return outputs class TrimInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="EPI image to trim") begin_index = traits.Int(0, usedefault=True, desc="first volume") end_index = traits.Int( 0, usedefault=True, desc="last volume indexed as in python (and 0 for last)" ) out_file = File(desc="output filename") suffix = traits.Str( "_trim", usedefault=True, desc="suffix for out_file to use if no out_file provided", ) class TrimOutputSpec(TraitedSpec): out_file = File(exists=True) class Trim(NipyBaseInterface): """Simple interface to trim a few volumes from a 4d fmri nifti file Examples -------- >>> from nipype.interfaces.nipy.preprocess import Trim >>> trim = Trim() >>> trim.inputs.in_file = 'functional.nii' >>> trim.inputs.begin_index = 3 # remove 3 first volumes >>> res = trim.run() # doctest: +SKIP """ input_spec = TrimInputSpec output_spec = TrimOutputSpec def _run_interface(self, runtime): out_file = self._list_outputs()["out_file"] nii = nb.load(self.inputs.in_file) if self.inputs.end_index == 0: s = slice(self.inputs.begin_index, nii.shape[3]) else: s = slice(self.inputs.begin_index, self.inputs.end_index) nii2 = nb.Nifti1Image(nii.dataobj[..., s], nii.affine, nii.header) nb.save(nii2, out_file) return runtime def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_file if not isdefined(outputs["out_file"]): outputs["out_file"] = fname_presuffix( self.inputs.in_file, newpath=os.getcwd(), suffix=self.inputs.suffix ) outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs nipype-1.7.0/nipype/interfaces/nipy/tests/000077500000000000000000000000001413403311400205655ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/nipy/tests/__init__.py000066400000000000000000000000301413403311400226670ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/nipy/tests/test_auto_ComputeMask.py000066400000000000000000000016341413403311400254620ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ComputeMask def test_ComputeMask_inputs(): input_map = dict( M=dict(), cc=dict(), m=dict(), mean_volume=dict( extensions=None, mandatory=True, ), reference_volume=dict( extensions=None, ), ) inputs = ComputeMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ComputeMask_outputs(): output_map = dict( brain_mask=dict( extensions=None, ), ) outputs = ComputeMask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/nipy/tests/test_auto_EstimateContrast.py000066400000000000000000000024561413403311400265260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import EstimateContrast def test_EstimateContrast_inputs(): input_map = dict( axis=dict( mandatory=True, ), beta=dict( extensions=None, mandatory=True, ), constants=dict( mandatory=True, ), contrasts=dict( mandatory=True, ), dof=dict( mandatory=True, ), mask=dict( extensions=None, ), nvbeta=dict( mandatory=True, ), reg_names=dict( mandatory=True, ), s2=dict( extensions=None, mandatory=True, ), ) inputs = EstimateContrast.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EstimateContrast_outputs(): output_map = dict( p_maps=dict(), stat_maps=dict(), z_maps=dict(), ) outputs = EstimateContrast.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/nipy/tests/test_auto_FitGLM.py000066400000000000000000000031151413403311400243100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import FitGLM def test_FitGLM_inputs(): input_map = dict( TR=dict( mandatory=True, ), drift_model=dict( usedefault=True, ), hrf_model=dict( usedefault=True, ), mask=dict( extensions=None, ), method=dict( usedefault=True, ), model=dict( usedefault=True, ), normalize_design_matrix=dict( usedefault=True, ), plot_design_matrix=dict( usedefault=True, ), save_residuals=dict( usedefault=True, ), session_info=dict( mandatory=True, ), ) inputs = FitGLM.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FitGLM_outputs(): output_map = dict( a=dict( extensions=None, ), axis=dict(), beta=dict( extensions=None, ), constants=dict(), dof=dict(), nvbeta=dict(), reg_names=dict(), residuals=dict( extensions=None, ), s2=dict( extensions=None, ), ) outputs = FitGLM.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/nipy/tests/test_auto_NipyBaseInterface.py000066400000000000000000000005611413403311400265630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import NipyBaseInterface def test_NipyBaseInterface_inputs(): input_map = dict() inputs = NipyBaseInterface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/nipy/tests/test_auto_Similarity.py000066400000000000000000000017711413403311400253620ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Similarity def test_Similarity_inputs(): input_map = dict( mask1=dict( extensions=None, ), mask2=dict( extensions=None, ), metric=dict( usedefault=True, ), volume1=dict( extensions=None, mandatory=True, ), volume2=dict( extensions=None, mandatory=True, ), ) inputs = Similarity.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Similarity_outputs(): output_map = dict( similarity=dict(), ) outputs = Similarity.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/nipy/tests/test_auto_SpaceTimeRealigner.py000066400000000000000000000017321413403311400267340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import SpaceTimeRealigner def test_SpaceTimeRealigner_inputs(): input_map = dict( in_file=dict( mandatory=True, min_ver="0.4.0.dev", ), slice_info=dict( requires=["slice_times"], ), slice_times=dict(), tr=dict( requires=["slice_times"], ), ) inputs = SpaceTimeRealigner.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SpaceTimeRealigner_outputs(): output_map = dict( out_file=dict(), par_file=dict(), ) outputs = SpaceTimeRealigner.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/nipy/tests/test_auto_Trim.py000066400000000000000000000017631413403311400241500ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Trim def test_Trim_inputs(): input_map = dict( begin_index=dict( usedefault=True, ), end_index=dict( usedefault=True, ), in_file=dict( extensions=None, mandatory=True, ), out_file=dict( extensions=None, ), suffix=dict( usedefault=True, ), ) inputs = Trim.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Trim_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Trim.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/nipy/utils.py000066400000000000000000000064311413403311400211410ustar00rootroot00000000000000# -*- coding: utf-8 -*- import warnings import numpy as np import nibabel as nb from .base import NipyBaseInterface, have_nipy from ..base import TraitedSpec, traits, BaseInterfaceInputSpec, File, isdefined class SimilarityInputSpec(BaseInterfaceInputSpec): volume1 = File(exists=True, desc="3D volume", mandatory=True) volume2 = File(exists=True, desc="3D volume", mandatory=True) mask1 = File(exists=True, desc="3D volume") mask2 = File(exists=True, desc="3D volume") metric = traits.Either( traits.Enum("cc", "cr", "crl1", "mi", "nmi", "slr"), traits.Callable(), desc="""str or callable Cost-function for assessing image similarity. If a string, one of 'cc': correlation coefficient, 'cr': correlation ratio, 'crl1': L1-norm based correlation ratio, 'mi': mutual information, 'nmi': normalized mutual information, 'slr': supervised log-likelihood ratio. If a callable, it should take a two-dimensional array representing the image joint histogram as an input and return a float.""", usedefault=True, ) class SimilarityOutputSpec(TraitedSpec): similarity = traits.Float(desc="Similarity between volume 1 and 2") class Similarity(NipyBaseInterface): """Calculates similarity between two 3D volumes. Both volumes have to be in the same coordinate system, same space within that coordinate system and with the same voxel dimensions. .. deprecated:: 0.10.0 Use :py:class:`nipype.algorithms.metrics.Similarity` instead. Example ------- >>> from nipype.interfaces.nipy.utils import Similarity >>> similarity = Similarity() >>> similarity.inputs.volume1 = 'rc1s1.nii' >>> similarity.inputs.volume2 = 'rc1s2.nii' >>> similarity.inputs.mask1 = 'mask.nii' >>> similarity.inputs.mask2 = 'mask.nii' >>> similarity.inputs.metric = 'cr' >>> res = similarity.run() # doctest: +SKIP """ input_spec = SimilarityInputSpec output_spec = SimilarityOutputSpec def __init__(self, **inputs): warnings.warn( ( "This interface is deprecated since 0.10.0." " Please use nipype.algorithms.metrics.Similarity" ), DeprecationWarning, ) super(Similarity, self).__init__(**inputs) def _run_interface(self, runtime): from nipy.algorithms.registration.histogram_registration import ( HistogramRegistration, ) from nipy.algorithms.registration.affine import Affine vol1_nii = nb.load(self.inputs.volume1) vol2_nii = nb.load(self.inputs.volume2) if isdefined(self.inputs.mask1): mask1 = np.asanyarray(nb.load(self.inputs.mask1).dataobj) == 1 else: mask1 = None if isdefined(self.inputs.mask2): mask2 = np.asanyarray(nb.load(self.inputs.mask2).dataobj) == 1 else: mask2 = None histreg = HistogramRegistration( from_img=vol1_nii, to_img=vol2_nii, similarity=self.inputs.metric, from_mask=mask1, to_mask=mask2, ) self._similarity = histreg.eval(Affine()) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["similarity"] = self._similarity return outputs nipype-1.7.0/nipype/interfaces/nitime/000077500000000000000000000000001413403311400177315ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/nitime/__init__.py000066400000000000000000000005261413403311400220450ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Nitime is a library for time-series analysis of data from neuroscience experiments.""" from .analysis import ( CoherenceAnalyzerInputSpec, CoherenceAnalyzerOutputSpec, CoherenceAnalyzer, ) nipype-1.7.0/nipype/interfaces/nitime/analysis.py000066400000000000000000000226161413403311400221350ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Interfaces to functionality from nitime for time-series analysis of fmri data - nitime.analysis.CoherenceAnalyzer: Coherence/y - nitime.fmri.io: - nitime.viz.drawmatrix_channels """ import numpy as np import tempfile from ...utils.misc import package_check from ...utils.filemanip import fname_presuffix from .base import NitimeBaseInterface from ..base import ( TraitedSpec, File, Undefined, traits, isdefined, BaseInterfaceInputSpec, ) have_nitime = True try: package_check("nitime") except ImportError: have_nitime = False class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): # Input either csv file, or time-series object and use _xor_inputs to # discriminate _xor_inputs = ("in_file", "in_TS") in_file = File( desc=( "csv file with ROIs on the columns and " "time-points on the rows. ROI names at the top row" ), exists=True, requires=("TR",), ) # If you gave just a file name, you need to specify the sampling_rate: TR = traits.Float( desc=("The TR used to collect the data in your csv file ") ) in_TS = traits.Any(desc="a nitime TimeSeries object") NFFT = traits.Range( low=32, value=64, usedefault=True, desc=( "This is the size of the window used for " "the spectral estimation. Use values between " "32 and the number of samples in your time-series." "(Defaults to 64.)" ), ) n_overlap = traits.Range( low=0, value=0, usedefault=True, desc=( "The number of samples which overlap" "between subsequent windows.(Defaults to 0)" ), ) frequency_range = traits.List( value=[0.02, 0.15], usedefault=True, minlen=2, maxlen=2, desc=( "The range of frequencies over" "which the analysis will average." "[low,high] (Default [0.02,0.15]" ), ) output_csv_file = File( desc="File to write outputs (coherence,time-delay) with file-names: " "``file_name_{coherence,timedelay}``" ) output_figure_file = File( desc="""\ File to write output figures (coherence,time-delay) with file-names: ``file_name_{coherence,timedelay}``. Possible formats: .png,.svg,.pdf,.jpg,...""" ) figure_type = traits.Enum( "matrix", "network", usedefault=True, desc=( "The type of plot to generate, where " "'matrix' denotes a matrix image and" "'network' denotes a graph representation." " Default: 'matrix'" ), ) class CoherenceAnalyzerOutputSpec(TraitedSpec): coherence_array = traits.Array( desc=("The pairwise coherence values between the ROIs") ) timedelay_array = traits.Array( desc=("The pairwise time delays between the ROIs (in seconds)") ) coherence_csv = File(desc=("A csv file containing the pairwise coherence values")) timedelay_csv = File(desc=("A csv file containing the pairwise time delay values")) coherence_fig = File(desc=("Figure representing coherence values")) timedelay_fig = File(desc=("Figure representing coherence values")) class CoherenceAnalyzer(NitimeBaseInterface): """Wraps nitime.analysis.CoherenceAnalyzer: Coherence/y""" input_spec = CoherenceAnalyzerInputSpec output_spec = CoherenceAnalyzerOutputSpec def _read_csv(self): """ Read from csv in_file and return an array and ROI names The input file should have a first row containing the names of the ROIs (strings) the rest of the data will be read in and transposed so that the rows (TRs) will becomes the second (and last) dimension of the array """ # Check that input conforms to expectations: first_row = open(self.inputs.in_file).readline() if not first_row[1].isalpha(): raise ValueError( "First row of in_file should contain ROI names as strings of characters" ) roi_names = ( open(self.inputs.in_file).readline().replace('"', "").strip("\n").split(",") ) # Transpose, so that the time is the last dimension: data = np.loadtxt(self.inputs.in_file, skiprows=1, delimiter=",").T return data, roi_names def _csv2ts(self): """Read data from the in_file and generate a nitime TimeSeries object""" from nitime.timeseries import TimeSeries data, roi_names = self._read_csv() TS = TimeSeries(data=data, sampling_interval=self.inputs.TR, time_unit="s") TS.metadata = dict(ROIs=roi_names) return TS # Rewrite _run_interface, but not run def _run_interface(self, runtime): import nitime.analysis as nta lb, ub = self.inputs.frequency_range if self.inputs.in_TS is Undefined: # get TS form csv and inputs.TR TS = self._csv2ts() else: # get TS from inputs.in_TS TS = self.inputs.in_TS # deal with creating or storing ROI names: if "ROIs" not in TS.metadata: self.ROIs = ["roi_%d" % x for x, _ in enumerate(TS.data)] else: self.ROIs = TS.metadata["ROIs"] A = nta.CoherenceAnalyzer( TS, method=dict( this_method="welch", NFFT=self.inputs.NFFT, n_overlap=self.inputs.n_overlap, ), ) freq_idx = np.where( (A.frequencies > self.inputs.frequency_range[0]) * (A.frequencies < self.inputs.frequency_range[1]) )[0] # Get the coherence matrix from the analyzer, averaging on the last # (frequency) dimension: (roi X roi array) self.coherence = np.mean(A.coherence[:, :, freq_idx], -1) # Get the time delay from analyzer, (roi X roi array) self.delay = np.mean(A.delay[:, :, freq_idx], -1) return runtime # Rewrite _list_outputs (look at BET) def _list_outputs(self): outputs = self.output_spec().get() # if isdefined(self.inputs.output_csv_file): # write to a csv file and assign a value to self.coherence_file (a # file name + path) # Always defined (the arrays): outputs["coherence_array"] = self.coherence outputs["timedelay_array"] = self.delay # Conditional if isdefined(self.inputs.output_csv_file) and hasattr(self, "coherence"): # we need to make a function that we call here that writes the # coherence values to this file "coherence_csv" and makes the # time_delay csv file?? self._make_output_files() outputs["coherence_csv"] = fname_presuffix( self.inputs.output_csv_file, suffix="_coherence" ) outputs["timedelay_csv"] = fname_presuffix( self.inputs.output_csv_file, suffix="_delay" ) if isdefined(self.inputs.output_figure_file) and hasattr(self, "coherence"): self._make_output_figures() outputs["coherence_fig"] = fname_presuffix( self.inputs.output_figure_file, suffix="_coherence" ) outputs["timedelay_fig"] = fname_presuffix( self.inputs.output_figure_file, suffix="_delay" ) return outputs def _make_output_files(self): """ Generate the output csv files. """ for this in zip([self.coherence, self.delay], ["coherence", "delay"]): tmp_f = tempfile.mkstemp()[1] np.savetxt(tmp_f, this[0], delimiter=",") fid = open( fname_presuffix(self.inputs.output_csv_file, suffix="_%s" % this[1]), "w+", ) # this writes ROIs as header line fid.write("," + ",".join(self.ROIs) + "\n") # this writes ROI and data to a line for r, line in zip(self.ROIs, open(tmp_f)): fid.write("%s,%s" % (r, line)) fid.close() def _make_output_figures(self): """ Generate the desired figure and save the files according to self.inputs.output_figure_file """ import nitime.viz as viz if self.inputs.figure_type == "matrix": fig_coh = viz.drawmatrix_channels( self.coherence, channel_names=self.ROIs, color_anchor=0 ) fig_coh.savefig( fname_presuffix(self.inputs.output_figure_file, suffix="_coherence") ) fig_dt = viz.drawmatrix_channels( self.delay, channel_names=self.ROIs, color_anchor=0 ) fig_dt.savefig( fname_presuffix(self.inputs.output_figure_file, suffix="_delay") ) else: fig_coh = viz.drawgraph_channels(self.coherence, channel_names=self.ROIs) fig_coh.savefig( fname_presuffix(self.inputs.output_figure_file, suffix="_coherence") ) fig_dt = viz.drawgraph_channels(self.delay, channel_names=self.ROIs) fig_dt.savefig( fname_presuffix(self.inputs.output_figure_file, suffix="_delay") ) nipype-1.7.0/nipype/interfaces/nitime/base.py000066400000000000000000000004341413403311400212160ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Base interface for nitime """ from ..base import LibraryBaseInterface class NitimeBaseInterface(LibraryBaseInterface): _pkg = "nitime" nipype-1.7.0/nipype/interfaces/nitime/tests/000077500000000000000000000000001413403311400210735ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/nitime/tests/__init__.py000066400000000000000000000000301413403311400231750ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/nitime/tests/test_auto_CoherenceAnalyzer.py000066400000000000000000000030021413403311400271300ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..analysis import CoherenceAnalyzer def test_CoherenceAnalyzer_inputs(): input_map = dict( NFFT=dict( usedefault=True, ), TR=dict(), figure_type=dict( usedefault=True, ), frequency_range=dict( usedefault=True, ), in_TS=dict(), in_file=dict( extensions=None, requires=("TR",), ), n_overlap=dict( usedefault=True, ), output_csv_file=dict( extensions=None, ), output_figure_file=dict( extensions=None, ), ) inputs = CoherenceAnalyzer.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CoherenceAnalyzer_outputs(): output_map = dict( coherence_array=dict(), coherence_csv=dict( extensions=None, ), coherence_fig=dict( extensions=None, ), timedelay_array=dict(), timedelay_csv=dict( extensions=None, ), timedelay_fig=dict( extensions=None, ), ) outputs = CoherenceAnalyzer.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/nitime/tests/test_auto_NitimeBaseInterface.py000066400000000000000000000005671413403311400274050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import NitimeBaseInterface def test_NitimeBaseInterface_inputs(): input_map = dict() inputs = NitimeBaseInterface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/nitime/tests/test_nitime.py000066400000000000000000000051511413403311400237730ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import tempfile import numpy as np import pytest from nipype.testing import example_data import nipype.interfaces.nitime as nitime no_nitime = not nitime.analysis.have_nitime display_available = "DISPLAY" in os.environ and os.environ["DISPLAY"] @pytest.mark.skipif(no_nitime, reason="nitime is not installed") def test_read_csv(): """Test that reading the data from csv file gives you back a reasonable time-series object""" CA = nitime.CoherenceAnalyzer() CA.inputs.TR = 1.89 # bogus value just to pass traits test CA.inputs.in_file = example_data("fmri_timeseries_nolabels.csv") with pytest.raises(ValueError): CA._read_csv() CA.inputs.in_file = example_data("fmri_timeseries.csv") data, roi_names = CA._read_csv() assert data[0][0] == 10125.9 assert roi_names[0] == "WM" @pytest.mark.skipif(no_nitime, reason="nitime is not installed") def test_coherence_analysis(tmpdir): """Test that the coherence analyzer works""" import nitime.analysis as nta import nitime.timeseries as ts tmpdir.chdir() # This is the nipype interface analysis: CA = nitime.CoherenceAnalyzer() CA.inputs.TR = 1.89 CA.inputs.in_file = example_data("fmri_timeseries.csv") if display_available: tmp_png = tempfile.mkstemp(suffix=".png")[1] CA.inputs.output_figure_file = tmp_png tmp_csv = tempfile.mkstemp(suffix=".csv")[1] CA.inputs.output_csv_file = tmp_csv o = CA.run() assert o.outputs.coherence_array.shape == (31, 31) # This is the nitime analysis: TR = 1.89 data_rec = np.recfromcsv(example_data("fmri_timeseries.csv")) roi_names = np.array(data_rec.dtype.names) n_samples = data_rec.shape[0] data = np.zeros((len(roi_names), n_samples)) for n_idx, roi in enumerate(roi_names): data[n_idx] = data_rec[roi] T = ts.TimeSeries(data, sampling_interval=TR) assert (CA._csv2ts().data == T.data).all() T.metadata["roi"] = roi_names C = nta.CoherenceAnalyzer( T, method=dict( this_method="welch", NFFT=CA.inputs.NFFT, n_overlap=CA.inputs.n_overlap ), ) freq_idx = np.where( (C.frequencies > CA.inputs.frequency_range[0]) * (C.frequencies < CA.inputs.frequency_range[1]) )[0] # Extract the coherence and average across these frequency bands: # Averaging is done on the last dimension coh = np.mean(C.coherence[:, :, freq_idx], -1) assert (o.outputs.coherence_array == coh).all() nipype-1.7.0/nipype/interfaces/petpvc.py000066400000000000000000000164571413403311400203340ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """PETPVC is a toolbox for partial volume correction in positron emission tomography.""" import os from .base import ( TraitedSpec, CommandLineInputSpec, CommandLine, File, isdefined, traits, ) from ..utils.filemanip import fname_presuffix from ..external.due import BibTeX pvc_methods = [ "GTM", "IY", "IY+RL", "IY+VC", "LABBE", "LABBE+MTC", "LABBE+MTC+RL", "LABBE+MTC+VC", "LABBE+RBV", "LABBE+RBV+RL", "LABBE+RBV+VC", "MG", "MG+RL", "MG+VC", "MTC", "MTC+RL", "MTC+VC", "RBV", "RBV+RL", "RBV+VC", "RL", "VC", ] class PETPVCInputSpec(CommandLineInputSpec): in_file = File(desc="PET image file", exists=True, mandatory=True, argstr="-i %s") out_file = File(desc="Output file", genfile=True, hash_files=False, argstr="-o %s") mask_file = File( desc="Mask image file", exists=True, mandatory=True, argstr="-m %s" ) pvc = traits.Enum( pvc_methods, mandatory=True, argstr="-p %s", desc="""\ Desired PVC method: * Geometric transfer matrix -- ``GTM`` * Labbe approach -- ``LABBE`` * Richardson-Lucy -- ``RL`` * Van-Cittert -- ``VC`` * Region-based voxel-wise correction -- ``RBV`` * RBV with Labbe -- ``LABBE+RBV`` * RBV with Van-Cittert -- ``RBV+VC`` * RBV with Richardson-Lucy -- ``RBV+RL`` * RBV with Labbe and Van-Cittert -- ``LABBE+RBV+VC`` * RBV with Labbe and Richardson-Lucy -- ``LABBE+RBV+RL`` * Multi-target correction -- ``MTC`` * MTC with Labbe -- ``LABBE+MTC`` * MTC with Van-Cittert -- ``MTC+VC`` * MTC with Richardson-Lucy -- ``MTC+RL`` * MTC with Labbe and Van-Cittert -- ``LABBE+MTC+VC`` * MTC with Labbe and Richardson-Lucy -- ``LABBE+MTC+RL`` * Iterative Yang -- ``IY`` * Iterative Yang with Van-Cittert -- ``IY+VC`` * Iterative Yang with Richardson-Lucy -- ``IY+RL`` * Muller Gartner -- ``MG`` * Muller Gartner with Van-Cittert -- ``MG+VC`` * Muller Gartner with Richardson-Lucy -- ``MG+RL`` """, ) fwhm_x = traits.Float( desc="The full-width at half maximum in mm along x-axis", mandatory=True, argstr="-x %.4f", ) fwhm_y = traits.Float( desc="The full-width at half maximum in mm along y-axis", mandatory=True, argstr="-y %.4f", ) fwhm_z = traits.Float( desc="The full-width at half maximum in mm along z-axis", mandatory=True, argstr="-z %.4f", ) debug = traits.Bool( desc="Prints debug information", usedefault=True, default_value=False, argstr="-d", ) n_iter = traits.Int( desc="Number of iterations", default_value=10, usedefault=True, argstr="-n %d" ) n_deconv = traits.Int( desc="Number of deconvolution iterations", default_value=10, usedefault=True, argstr="-k %d", ) alpha = traits.Float( desc="Alpha value", default_value=1.5, usedefault=True, argstr="-a %.4f" ) stop_crit = traits.Float( desc="Stopping criterion", default_value=0.01, usedefault=True, argstr="-s %.4f" ) class PETPVCOutputSpec(TraitedSpec): out_file = File(desc="Output file") class PETPVC(CommandLine): """Use PETPVC for partial volume correction of PET images. PETPVC ([1]_, [2]_) is a software from the Nuclear Medicine Department of the UCL University Hospital, London, UK. Examples -------- >>> from ..testing import example_data >>> #TODO get data for PETPVC >>> pvc = PETPVC() >>> pvc.inputs.in_file = 'pet.nii.gz' >>> pvc.inputs.mask_file = 'tissues.nii.gz' >>> pvc.inputs.out_file = 'pet_pvc_rbv.nii.gz' >>> pvc.inputs.pvc = 'RBV' >>> pvc.inputs.fwhm_x = 2.0 >>> pvc.inputs.fwhm_y = 2.0 >>> pvc.inputs.fwhm_z = 2.0 >>> outs = pvc.run() #doctest: +SKIP References ---------- .. [1] K. Erlandsson, I. Buvat, P. H. Pretorius, B. A. Thomas, and B. F. Hutton, "A review of partial volume correction techniques for emission tomography and their applications in neurology, cardiology and oncology," Phys. Med. Biol., vol. 57, no. 21, p. R119, 2012. .. [2] https://github.com/UCL/PETPVC """ input_spec = PETPVCInputSpec output_spec = PETPVCOutputSpec _cmd = "petpvc" _references = [ { "entry": BibTeX( "@article{0031-9155-61-22-7975," "author={Benjamin A Thomas and Vesna Cuplov and Alexandre Bousse and " "Adriana Mendes and Kris Thielemans and Brian F Hutton and Kjell Erlandsson}," "title={PETPVC: a toolbox for performing partial volume correction " "techniques in positron emission tomography}," "journal={Physics in Medicine and Biology}," "volume={61}," "number={22}," "pages={7975}," "url={http://stacks.iop.org/0031-9155/61/i=22/a=7975}," "doi={https://doi.org/10.1088/0031-9155/61/22/7975}," "year={2016}," "}" ), "description": "PETPVC software implementation publication", "tags": ["implementation"], } ] def _list_outputs(self): outputs = self.output_spec().get() outputs["out_file"] = self.inputs.out_file if not isdefined(outputs["out_file"]): method_name = self.inputs.pvc.lower() outputs["out_file"] = self._gen_fname( self.inputs.in_file, suffix="_{}_pvc".format(method_name) ) outputs["out_file"] = os.path.abspath(outputs["out_file"]) return outputs def _gen_fname( self, basename, cwd=None, suffix=None, change_ext=True, ext=".nii.gz" ): """Generate a filename based on the given parameters. The filename will take the form: cwd/basename. If change_ext is True, it will use the extentions specified in intputs.output_type. Parameters ---------- basename : str Filename to base the new filename on. cwd : str Path to prefix to the new filename. (default is os.getcwd()) suffix : str Suffix to add to the `basename`. (defaults is '' ) change_ext : bool Flag to change the filename extension to the given `ext`. (Default is False) Returns ------- fname : str New filename based on given parameters. """ if basename == "": msg = "Unable to generate filename for command %s. " % self.cmd msg += "basename is not set!" raise ValueError(msg) if cwd is None: cwd = os.getcwd() if change_ext: if suffix: suffix = "".join((suffix, ext)) else: suffix = ext if suffix is None: suffix = "" fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) return fname def _gen_filename(self, name): if name == "out_file": return self._list_outputs()["out_file"] return None nipype-1.7.0/nipype/interfaces/quickshear.py000066400000000000000000000061011413403311400211530ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Quickshear is a simple geometric defacing algorithm.""" from .base import CommandLineInputSpec, CommandLine, traits, TraitedSpec, File from ..external.due import BibTeX class QuickshearInputSpec(CommandLineInputSpec): in_file = File( exists=True, position=1, argstr="%s", mandatory=True, desc="neuroimage to deface", ) mask_file = File( exists=True, position=2, argstr="%s", desc="brain mask", mandatory=True ) out_file = File( name_template="%s_defaced", name_source="in_file", position=3, argstr="%s", desc="defaced output image", keep_extension=True, ) buff = traits.Int( position=4, argstr="%d", desc="buffer size (in voxels) between shearing " "plane and the brain", ) class QuickshearOutputSpec(TraitedSpec): out_file = File(exists=True, desc="defaced output image") class Quickshear(CommandLine): """ Quickshear is a simple geometric defacing algorithm Given an anatomical image and a reasonable brainmask, Quickshear estimates a shearing plane with the brain mask on one side and the face on the other, zeroing out the face side. >>> from nipype.interfaces.quickshear import Quickshear >>> qs = Quickshear(in_file='T1.nii', mask_file='brain_mask.nii') >>> qs.cmdline 'quickshear T1.nii brain_mask.nii T1_defaced.nii' In the absence of a precomputed mask, a simple pipeline can be generated with any tool that generates brain masks: >>> from nipype.pipeline import engine as pe >>> from nipype.interfaces import utility as niu >>> from nipype.interfaces.fsl import BET >>> deface_wf = pe.Workflow('deface_wf') >>> inputnode = pe.Node(niu.IdentityInterface(['in_file']), ... name='inputnode') >>> outputnode = pe.Node(niu.IdentityInterface(['out_file']), ... name='outputnode') >>> bet = pe.Node(BET(mask=True), name='bet') >>> quickshear = pe.Node(Quickshear(), name='quickshear') >>> deface_wf.connect([ ... (inputnode, bet, [('in_file', 'in_file')]), ... (inputnode, quickshear, [('in_file', 'in_file')]), ... (bet, quickshear, [('mask_file', 'mask_file')]), ... (quickshear, outputnode, [('out_file', 'out_file')]), ... ]) >>> inputnode.inputs.in_file = 'T1.nii' >>> res = deface_wf.run() # doctest: +SKIP """ _cmd = "quickshear" input_spec = QuickshearInputSpec output_spec = QuickshearOutputSpec _references = [ { "entry": BibTeX( "@inproceedings{Schimke2011," "address = {San Francisco}," "author = {Schimke, Nakeisha and Hale, John}," "booktitle = {Proceedings of the 2nd USENIX Conference on " "Health Security and Privacy}," "title = {{Quickshear Defacing for Neuroimages}}," "year = {2011}," "month = sep}" ), "tags": ["implementation"], } ] nipype-1.7.0/nipype/interfaces/r.py000066400000000000000000000066351413403311400172710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Interfaces to run R scripts.""" import os from shutil import which from .. import config from .base import ( CommandLineInputSpec, InputMultiPath, isdefined, CommandLine, traits, File, Directory, ) def get_r_command(): if "NIPYPE_NO_R" in os.environ: return None r_cmd = os.getenv("RCMD", default="R") return r_cmd if which(r_cmd) else None no_r = get_r_command() is None class RInputSpec(CommandLineInputSpec): """Basic expected inputs to R interface""" script = traits.Str( argstr='-e "%s"', desc="R code to run", mandatory=True, position=-1 ) # non-commandline options rfile = traits.Bool(True, desc="Run R using R script", usedefault=True) script_file = File( "pyscript.R", usedefault=True, desc="Name of file to write R code to" ) class RCommand(CommandLine): """Interface that runs R code >>> import nipype.interfaces.r as r >>> r = r.RCommand(rfile=False) # doctest: +SKIP >>> r.inputs.script = "Sys.getenv('USER')" # doctest: +SKIP >>> out = r.run() # doctest: +SKIP """ _cmd = get_r_command() input_spec = RInputSpec def __init__(self, r_cmd=None, **inputs): """initializes interface to r (default 'R') """ super(RCommand, self).__init__(**inputs) if r_cmd and isdefined(r_cmd): self._cmd = r_cmd # For r commands force all output to be returned since r # does not have a clean way of notifying an error self.terminal_output = "allatonce" def set_default_r_cmd(self, r_cmd): """Set the default R command line for R classes. This method is used to set values for all R subclasses. """ self._cmd = r_cmd def set_default_rfile(self, rfile): """Set the default R script file format for R classes. This method is used to set values for all R subclasses. """ self._rfile = rfile def _run_interface(self, runtime): self.terminal_output = "allatonce" runtime = super(RCommand, self)._run_interface(runtime) if "R code threw an exception" in runtime.stderr: self.raise_exception(runtime) return runtime def _format_arg(self, name, trait_spec, value): if name in ["script"]: argstr = trait_spec.argstr return self._gen_r_command(argstr, value) return super(RCommand, self)._format_arg(name, trait_spec, value) def _gen_r_command(self, argstr, script_lines): """Generates commands and, if rfile specified, writes it to disk.""" if not self.inputs.rfile: # replace newlines with ;, strip comments script = "; ".join( [ line for line in script_lines.split("\n") if not line.strip().startswith("#") ] ) # escape " and $ script = script.replace('"', '\\"') script = script.replace("$", "\\$") else: script_path = os.path.join(os.getcwd(), self.inputs.script_file) with open(script_path, "wt") as rfile: rfile.write(script_lines) script = "source('%s')" % script_path return argstr % script nipype-1.7.0/nipype/interfaces/semtools/000077500000000000000000000000001413403311400203115ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/__init__.py000066400000000000000000000006301413403311400224210ustar00rootroot00000000000000# -*- coding: utf-8 -*- """SEM Tools are useful tools for Structural Equation Modeling.""" from .diffusion import * from .featurecreator import GenerateCsfClippedFromClassifiedImage from .segmentation import * from .filtering import * from .brains import * from .testing import * from .utilities import * from .legacy import * from .registration import * from .converters import DWISimpleCompare, DWICompare nipype-1.7.0/nipype/interfaces/semtools/brains/000077500000000000000000000000001413403311400215675ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/brains/__init__.py000066400000000000000000000004131413403311400236760ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .segmentation import SimilarityIndex, BRAINSTalairach, BRAINSTalairachMask from .utilities import ( HistogramMatchingFilter, GenerateEdgeMapImage, GeneratePurePlugMask, ) from .classify import BRAINSPosteriorToContinuousClass nipype-1.7.0/nipype/interfaces/semtools/brains/classify.py000066400000000000000000000050751413403311400237650ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class BRAINSPosteriorToContinuousClassInputSpec(CommandLineInputSpec): inputWhiteVolume = File( desc="White Matter Posterior Volume", exists=True, argstr="--inputWhiteVolume %s", ) inputBasalGmVolume = File( desc="Basal Grey Matter Posterior Volume", exists=True, argstr="--inputBasalGmVolume %s", ) inputSurfaceGmVolume = File( desc="Surface Grey Matter Posterior Volume", exists=True, argstr="--inputSurfaceGmVolume %s", ) inputCsfVolume = File( desc="CSF Posterior Volume", exists=True, argstr="--inputCsfVolume %s" ) inputVbVolume = File( desc="Venous Blood Posterior Volume", exists=True, argstr="--inputVbVolume %s" ) inputCrblGmVolume = File( desc="Cerebellum Grey Matter Posterior Volume", exists=True, argstr="--inputCrblGmVolume %s", ) inputCrblWmVolume = File( desc="Cerebellum White Matter Posterior Volume", exists=True, argstr="--inputCrblWmVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Continuous Tissue Classified Image", argstr="--outputVolume %s", ) class BRAINSPosteriorToContinuousClassOutputSpec(TraitedSpec): outputVolume = File(desc="Output Continuous Tissue Classified Image", exists=True) class BRAINSPosteriorToContinuousClass(SEMLikeCommandLine): """title: Tissue Classification category: BRAINS.Classify description: This program will generate an 8-bit continuous tissue classified image based on BRAINSABC posterior images. version: 3.0 documentation-url: http://www.nitrc.org/plugins/mwiki/index.php/brains:BRAINSClassify license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Vincent A. Magnotta acknowledgements: Funding for this work was provided by NIH/NINDS award NS050568 """ input_spec = BRAINSPosteriorToContinuousClassInputSpec output_spec = BRAINSPosteriorToContinuousClassOutputSpec _cmd = " BRAINSPosteriorToContinuousClass " _outputs_filenames = {"outputVolume": "outputVolume"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/brains/segmentation.py000066400000000000000000000150511413403311400246400ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class SimilarityIndexInputSpec(CommandLineInputSpec): outputCSVFilename = File( desc="output CSV Filename", exists=True, argstr="--outputCSVFilename %s" ) ANNContinuousVolume = File( desc="ANN Continuous volume to be compared to the manual volume", exists=True, argstr="--ANNContinuousVolume %s", ) inputManualVolume = File( desc="input manual(reference) volume", exists=True, argstr="--inputManualVolume %s", ) thresholdInterval = traits.Float( desc="Threshold interval to compute similarity index between zero and one", argstr="--thresholdInterval %f", ) class SimilarityIndexOutputSpec(TraitedSpec): pass class SimilarityIndex(SEMLikeCommandLine): """title: BRAINSCut:SimilarityIndexComputation category: BRAINS.Segmentation description: Automatic analysis of BRAINSCut Output version: 1.0 license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Eunyoung Regin Kim """ input_spec = SimilarityIndexInputSpec output_spec = SimilarityIndexOutputSpec _cmd = " SimilarityIndex " _outputs_filenames = {} _redirect_x = False class BRAINSTalairachInputSpec(CommandLineInputSpec): AC = InputMultiPath( traits.Float, desc="Location of AC Point ", sep=",", argstr="--AC %s" ) ACisIndex = traits.Bool(desc="AC Point is Index", argstr="--ACisIndex ") PC = InputMultiPath( traits.Float, desc="Location of PC Point ", sep=",", argstr="--PC %s" ) PCisIndex = traits.Bool(desc="PC Point is Index", argstr="--PCisIndex ") SLA = InputMultiPath( traits.Float, desc="Location of SLA Point ", sep=",", argstr="--SLA %s" ) SLAisIndex = traits.Bool(desc="SLA Point is Index", argstr="--SLAisIndex ") IRP = InputMultiPath( traits.Float, desc="Location of IRP Point ", sep=",", argstr="--IRP %s" ) IRPisIndex = traits.Bool(desc="IRP Point is Index", argstr="--IRPisIndex ") inputVolume = File( desc="Input image used to define physical space of images", exists=True, argstr="--inputVolume %s", ) outputBox = traits.Either( traits.Bool, File(), hash_files=False, desc="Name of the resulting Talairach Bounding Box file", argstr="--outputBox %s", ) outputGrid = traits.Either( traits.Bool, File(), hash_files=False, desc="Name of the resulting Talairach Grid file", argstr="--outputGrid %s", ) class BRAINSTalairachOutputSpec(TraitedSpec): outputBox = File( desc="Name of the resulting Talairach Bounding Box file", exists=True ) outputGrid = File(desc="Name of the resulting Talairach Grid file", exists=True) class BRAINSTalairach(SEMLikeCommandLine): """title: BRAINS Talairach category: BRAINS.Segmentation description: This program creates a VTK structured grid defining the Talairach coordinate system based on four points: AC, PC, IRP, and SLA. The resulting structred grid can be written as either a classic VTK file or the new VTK XML file format. Two representations of the resulting grid can be written. The first is a bounding box representation that also contains the location of the AC and PC points. The second representation is the full Talairach grid representation that includes the additional rows of boxes added to the inferior allowing full coverage of the cerebellum. version: 0.1 documentation-url: http://www.nitrc.org/plugins/mwiki/index.php/brains:BRAINSTalairach license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Steven Dunn and Vincent Magnotta acknowledgements: Funding for this work was provided by NIH/NINDS award NS050568 """ input_spec = BRAINSTalairachInputSpec output_spec = BRAINSTalairachOutputSpec _cmd = " BRAINSTalairach " _outputs_filenames = {"outputGrid": "outputGrid", "outputBox": "outputBox"} _redirect_x = False class BRAINSTalairachMaskInputSpec(CommandLineInputSpec): inputVolume = File( desc="Input image used to define physical space of resulting mask", exists=True, argstr="--inputVolume %s", ) talairachParameters = File( desc="Name of the Talairach parameter file.", exists=True, argstr="--talairachParameters %s", ) talairachBox = File( desc="Name of the Talairach box file.", exists=True, argstr="--talairachBox %s" ) hemisphereMode = traits.Enum( "left", "right", "both", desc="Mode for box creation: left, right, both", argstr="--hemisphereMode %s", ) expand = traits.Bool( desc="Expand exterior box to include surface CSF", argstr="--expand " ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output filename for the resulting binary image", argstr="--outputVolume %s", ) class BRAINSTalairachMaskOutputSpec(TraitedSpec): outputVolume = File( desc="Output filename for the resulting binary image", exists=True ) class BRAINSTalairachMask(SEMLikeCommandLine): """title: Talairach Mask category: BRAINS.Segmentation description: This program creates a binary image representing the specified Talairach region. The input is an example image to define the physical space for the resulting image, the Talairach grid representation in VTK format, and the file containing the Talairach box definitions to be generated. These can be combined in BRAINS to create a label map using the procedure Brains::WorkupUtils::CreateLabelMapFromBinaryImages. version: 0.1 documentation-url: http://www.nitrc.org/plugins/mwiki/index.php/brains:BRAINSTalairachMask license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Steven Dunn and Vincent Magnotta acknowledgements: Funding for this work was provided by NIH/NINDS award NS050568 """ input_spec = BRAINSTalairachMaskInputSpec output_spec = BRAINSTalairachMaskOutputSpec _cmd = " BRAINSTalairachMask " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/brains/tests/000077500000000000000000000000001413403311400227315ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/brains/tests/__init__.py000066400000000000000000000000301413403311400250330ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSPosteriorToContinuousClass.py000066400000000000000000000035161413403311400336440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..classify import BRAINSPosteriorToContinuousClass def test_BRAINSPosteriorToContinuousClass_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputBasalGmVolume=dict( argstr="--inputBasalGmVolume %s", extensions=None, ), inputCrblGmVolume=dict( argstr="--inputCrblGmVolume %s", extensions=None, ), inputCrblWmVolume=dict( argstr="--inputCrblWmVolume %s", extensions=None, ), inputCsfVolume=dict( argstr="--inputCsfVolume %s", extensions=None, ), inputSurfaceGmVolume=dict( argstr="--inputSurfaceGmVolume %s", extensions=None, ), inputVbVolume=dict( argstr="--inputVbVolume %s", extensions=None, ), inputWhiteVolume=dict( argstr="--inputWhiteVolume %s", extensions=None, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = BRAINSPosteriorToContinuousClass.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSPosteriorToContinuousClass_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = BRAINSPosteriorToContinuousClass.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairach.py000066400000000000000000000034671413403311400301730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..segmentation import BRAINSTalairach def test_BRAINSTalairach_inputs(): input_map = dict( AC=dict( argstr="--AC %s", sep=",", ), ACisIndex=dict( argstr="--ACisIndex ", ), IRP=dict( argstr="--IRP %s", sep=",", ), IRPisIndex=dict( argstr="--IRPisIndex ", ), PC=dict( argstr="--PC %s", sep=",", ), PCisIndex=dict( argstr="--PCisIndex ", ), SLA=dict( argstr="--SLA %s", sep=",", ), SLAisIndex=dict( argstr="--SLAisIndex ", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), outputBox=dict( argstr="--outputBox %s", hash_files=False, ), outputGrid=dict( argstr="--outputGrid %s", hash_files=False, ), ) inputs = BRAINSTalairach.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSTalairach_outputs(): output_map = dict( outputBox=dict( extensions=None, ), outputGrid=dict( extensions=None, ), ) outputs = BRAINSTalairach.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/brains/tests/test_auto_BRAINSTalairachMask.py000066400000000000000000000027051413403311400310010ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..segmentation import BRAINSTalairachMask def test_BRAINSTalairachMask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), expand=dict( argstr="--expand ", ), hemisphereMode=dict( argstr="--hemisphereMode %s", ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), talairachBox=dict( argstr="--talairachBox %s", extensions=None, ), talairachParameters=dict( argstr="--talairachParameters %s", extensions=None, ), ) inputs = BRAINSTalairachMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSTalairachMask_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = BRAINSTalairachMask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/brains/tests/test_auto_GenerateEdgeMapImage.py000066400000000000000000000035161413403311400313170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utilities import GenerateEdgeMapImage def test_GenerateEdgeMapImage_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputMRVolumes=dict( argstr="--inputMRVolumes %s...", ), inputMask=dict( argstr="--inputMask %s", extensions=None, ), lowerPercentileMatching=dict( argstr="--lowerPercentileMatching %f", ), maximumOutputRange=dict( argstr="--maximumOutputRange %d", ), minimumOutputRange=dict( argstr="--minimumOutputRange %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputEdgeMap=dict( argstr="--outputEdgeMap %s", hash_files=False, ), outputMaximumGradientImage=dict( argstr="--outputMaximumGradientImage %s", hash_files=False, ), upperPercentileMatching=dict( argstr="--upperPercentileMatching %f", ), ) inputs = GenerateEdgeMapImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GenerateEdgeMapImage_outputs(): output_map = dict( outputEdgeMap=dict( extensions=None, ), outputMaximumGradientImage=dict( extensions=None, ), ) outputs = GenerateEdgeMapImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/brains/tests/test_auto_GeneratePurePlugMask.py000066400000000000000000000024061413403311400314260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utilities import GeneratePurePlugMask def test_GeneratePurePlugMask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputImageModalities=dict( argstr="--inputImageModalities %s...", ), numberOfSubSamples=dict( argstr="--numberOfSubSamples %s", sep=",", ), outputMaskFile=dict( argstr="--outputMaskFile %s", hash_files=False, ), threshold=dict( argstr="--threshold %f", ), ) inputs = GeneratePurePlugMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GeneratePurePlugMask_outputs(): output_map = dict( outputMaskFile=dict( extensions=None, ), ) outputs = GeneratePurePlugMask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/brains/tests/test_auto_HistogramMatchingFilter.py000066400000000000000000000035551413403311400321600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utilities import HistogramMatchingFilter def test_HistogramMatchingFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), histogramAlgorithm=dict( argstr="--histogramAlgorithm %s", ), inputBinaryVolume=dict( argstr="--inputBinaryVolume %s", extensions=None, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfHistogramBins=dict( argstr="--numberOfHistogramBins %d", ), numberOfMatchPoints=dict( argstr="--numberOfMatchPoints %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), referenceBinaryVolume=dict( argstr="--referenceBinaryVolume %s", extensions=None, ), referenceVolume=dict( argstr="--referenceVolume %s", extensions=None, ), verbose=dict( argstr="--verbose ", ), writeHistogram=dict( argstr="--writeHistogram %s", ), ) inputs = HistogramMatchingFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_HistogramMatchingFilter_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = HistogramMatchingFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/brains/tests/test_auto_SimilarityIndex.py000066400000000000000000000023311413403311400305070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..segmentation import SimilarityIndex def test_SimilarityIndex_inputs(): input_map = dict( ANNContinuousVolume=dict( argstr="--ANNContinuousVolume %s", extensions=None, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputManualVolume=dict( argstr="--inputManualVolume %s", extensions=None, ), outputCSVFilename=dict( argstr="--outputCSVFilename %s", extensions=None, ), thresholdInterval=dict( argstr="--thresholdInterval %f", ), ) inputs = SimilarityIndex.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SimilarityIndex_outputs(): output_map = dict() outputs = SimilarityIndex.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/brains/utilities.py000066400000000000000000000146571413403311400241710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class HistogramMatchingFilterInputSpec(CommandLineInputSpec): inputVolume = File( desc="The Input image to be computed for statistics", exists=True, argstr="--inputVolume %s", ) referenceVolume = File( desc="The Input image to be computed for statistics", exists=True, argstr="--referenceVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Image File Name", argstr="--outputVolume %s", ) referenceBinaryVolume = File( desc="referenceBinaryVolume", exists=True, argstr="--referenceBinaryVolume %s" ) inputBinaryVolume = File( desc="inputBinaryVolume", exists=True, argstr="--inputBinaryVolume %s" ) numberOfMatchPoints = traits.Int( desc=" number of histogram matching points", argstr="--numberOfMatchPoints %d" ) numberOfHistogramBins = traits.Int( desc=" number of histogram bin", argstr="--numberOfHistogramBins %d" ) writeHistogram = traits.Str( desc=" decide if histogram data would be written with prefixe of the file name", argstr="--writeHistogram %s", ) histogramAlgorithm = traits.Enum( "OtsuHistogramMatching", desc=" histogram algrithm selection", argstr="--histogramAlgorithm %s", ) verbose = traits.Bool( desc=" verbose mode running for debbuging", argstr="--verbose " ) class HistogramMatchingFilterOutputSpec(TraitedSpec): outputVolume = File(desc="Output Image File Name", exists=True) class HistogramMatchingFilter(SEMLikeCommandLine): """title: Write Out Image Intensities category: BRAINS.Utilities description: For Analysis version: 0.1 contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu """ input_spec = HistogramMatchingFilterInputSpec output_spec = HistogramMatchingFilterOutputSpec _cmd = " HistogramMatchingFilter " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GenerateEdgeMapImageInputSpec(CommandLineInputSpec): inputMRVolumes = InputMultiPath( File(exists=True), desc="List of input structural MR volumes to create the maximum edgemap", argstr="--inputMRVolumes %s...", ) inputMask = File( desc="Input mask file name. If set, image histogram percentiles will be calculated within the mask", exists=True, argstr="--inputMask %s", ) minimumOutputRange = traits.Int( desc="Map lower quantile and below to minimum output range. It should be a small number greater than zero. Default is 1", argstr="--minimumOutputRange %d", ) maximumOutputRange = traits.Int( desc="Map upper quantile and above to maximum output range. Default is 255 that is the maximum range of unsigned char", argstr="--maximumOutputRange %d", ) lowerPercentileMatching = traits.Float( desc="Map lower quantile and below to minOutputRange. It should be a value between zero and one", argstr="--lowerPercentileMatching %f", ) upperPercentileMatching = traits.Float( desc="Map upper quantile and above to maxOutputRange. It should be a value between zero and one", argstr="--upperPercentileMatching %f", ) outputEdgeMap = traits.Either( traits.Bool, File(), hash_files=False, desc="output edgemap file name", argstr="--outputEdgeMap %s", ) outputMaximumGradientImage = traits.Either( traits.Bool, File(), hash_files=False, desc="output gradient image file name", argstr="--outputMaximumGradientImage %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class GenerateEdgeMapImageOutputSpec(TraitedSpec): outputEdgeMap = File(desc="(required) output file name", exists=True) outputMaximumGradientImage = File( desc="output gradient image file name", exists=True ) class GenerateEdgeMapImage(SEMLikeCommandLine): """title: GenerateEdgeMapImage category: BRAINS.Utilities description: Automatic edgemap generation for edge-guided super-resolution reconstruction version: 1.0 contributor: Ali Ghayoor """ input_spec = GenerateEdgeMapImageInputSpec output_spec = GenerateEdgeMapImageOutputSpec _cmd = " GenerateEdgeMapImage " _outputs_filenames = { "outputEdgeMap": "outputEdgeMap", "outputMaximumGradientImage": "outputMaximumGradientImage", } _redirect_x = False class GeneratePurePlugMaskInputSpec(CommandLineInputSpec): inputImageModalities = InputMultiPath( File(exists=True), desc="List of input image file names to create pure plugs mask", argstr="--inputImageModalities %s...", ) threshold = traits.Float( desc="threshold value to define class membership", argstr="--threshold %f" ) numberOfSubSamples = InputMultiPath( traits.Int, desc="Number of continous index samples taken at each direction of lattice space for each plug volume", sep=",", argstr="--numberOfSubSamples %s", ) outputMaskFile = traits.Either( traits.Bool, File(), hash_files=False, desc="Output binary mask file name", argstr="--outputMaskFile %s", ) class GeneratePurePlugMaskOutputSpec(TraitedSpec): outputMaskFile = File(desc="(required) Output binary mask file name", exists=True) class GeneratePurePlugMask(SEMLikeCommandLine): """title: GeneratePurePlugMask category: BRAINS.Utilities description: This program gets several modality image files and returns a binary mask that defines the pure plugs version: 1.0 contributor: Ali Ghayoor """ input_spec = GeneratePurePlugMaskInputSpec output_spec = GeneratePurePlugMaskOutputSpec _cmd = " GeneratePurePlugMask " _outputs_filenames = {"outputMaskFile": "outputMaskFile"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/converters.py000066400000000000000000000066411413403311400230640ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ..base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class DWISimpleCompareInputSpec(CommandLineInputSpec): inputVolume1 = File( desc="First input volume (.nhdr or .nrrd)", exists=True, argstr="--inputVolume1 %s", ) inputVolume2 = File( desc="Second input volume (.nhdr or .nrrd)", exists=True, argstr="--inputVolume2 %s", ) checkDWIData = traits.Bool( desc="check for existence of DWI data, and if present, compare it", argstr="--checkDWIData ", ) class DWISimpleCompareOutputSpec(TraitedSpec): pass class DWISimpleCompare(SEMLikeCommandLine): """title: Nrrd DWI comparison category: Converters description: Compares two nrrd format DWI images and verifies that gradient magnitudes, gradient directions, measurement frame, and max B0 value are identicle. Used for testing DWIConvert. version: 0.1.0.$Revision: 916 $(alpha) documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DWIConvert license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Mark Scully (UIowa) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for DTI data produced on Philips scanners was contributed by Vincent Magnotta and Hans Johnson at the University of Iowa. """ input_spec = DWISimpleCompareInputSpec output_spec = DWISimpleCompareOutputSpec _cmd = " DWISimpleCompare " _outputs_filenames = {} _redirect_x = False class DWICompareInputSpec(CommandLineInputSpec): inputVolume1 = File( desc="First input volume (.nhdr or .nrrd)", exists=True, argstr="--inputVolume1 %s", ) inputVolume2 = File( desc="Second input volume (.nhdr or .nrrd)", exists=True, argstr="--inputVolume2 %s", ) class DWICompareOutputSpec(TraitedSpec): pass class DWICompare(SEMLikeCommandLine): """title: Nrrd DWI comparison category: Converters description: Compares two nrrd format DWI images and verifies that gradient magnitudes, gradient directions, measurement frame, and max B0 value are identicle. Used for testing DWIConvert. version: 0.1.0.$Revision: 916 $(alpha) documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DWIConvert license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Mark Scully (UIowa) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for DTI data produced on Philips scanners was contributed by Vincent Magnotta and Hans Johnson at the University of Iowa. """ input_spec = DWICompareInputSpec output_spec = DWICompareOutputSpec _cmd = " DWICompare " _outputs_filenames = {} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/diffusion/000077500000000000000000000000001413403311400222775ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/diffusion/__init__.py000066400000000000000000000015241413403311400244120ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .diffusion import dtiaverage, dtiestim, dtiprocess, DWIConvert from .tractography import * from .gtract import ( gtractTransformToDisplacementField, gtractInvertBSplineTransform, gtractConcatDwi, gtractAverageBvalues, gtractCoregBvalues, gtractResampleAnisotropy, gtractResampleCodeImage, gtractCopyImageOrientation, gtractCreateGuideFiber, gtractAnisotropyMap, gtractClipAnisotropy, gtractResampleB0, gtractInvertRigidTransform, gtractImageConformity, compareTractInclusion, gtractFastMarchingTracking, gtractInvertDisplacementField, gtractCoRegAnatomy, gtractResampleDWIInPlace, gtractCostFastMarching, gtractFiberTracking, extractNrrdVectorIndex, gtractResampleFibers, gtractTensor, ) from .maxcurvature import maxcurvature nipype-1.7.0/nipype/interfaces/semtools/diffusion/diffusion.py000066400000000000000000000712261413403311400246470ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class dtiaverageInputSpec(CommandLineInputSpec): inputs = InputMultiPath( File(exists=True), desc="List of all the tensor fields to be averaged", argstr="--inputs %s...", ) tensor_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Averaged tensor volume", argstr="--tensor_output %s", ) DTI_double = traits.Bool( desc="Tensor components are saved as doubles (cannot be visualized in Slicer)", argstr="--DTI_double ", ) verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") class dtiaverageOutputSpec(TraitedSpec): tensor_output = File(desc="Averaged tensor volume", exists=True) class dtiaverage(SEMLikeCommandLine): """title: DTIAverage (DTIProcess) category: Diffusion.Diffusion Tensor Images.CommandLineOnly description: dtiaverage is a program that allows to compute the average of an arbitrary number of tensor fields (listed after the --inputs option) This program is used in our pipeline as the last step of the atlas building processing. When all the tensor fields have been deformed in the same space, to create the average tensor field (--tensor_output) we use dtiaverage. Several average method can be used (specified by the --method option): euclidian, log-euclidian and pga. The default being euclidian. version: 1.0.0 documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess license: Copyright (c) Casey Goodlett. All rights reserved. See http://www.ia.unc.edu/dev/Copyright.htm for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notices for more information. contributor: Casey Goodlett """ input_spec = dtiaverageInputSpec output_spec = dtiaverageOutputSpec _cmd = " dtiaverage " _outputs_filenames = {"tensor_output": "tensor_output.nii"} _redirect_x = False class dtiestimInputSpec(CommandLineInputSpec): dwi_image = File( desc="DWI image volume (required)", exists=True, argstr="--dwi_image %s" ) tensor_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Tensor OutputImage", argstr="--tensor_output %s", ) B0 = traits.Either( traits.Bool, File(), hash_files=False, desc="Baseline image, average of all baseline images", argstr="--B0 %s", ) idwi = traits.Either( traits.Bool, File(), hash_files=False, desc="idwi output image. Image with isotropic diffusion-weighted information = geometric mean of diffusion images", argstr="--idwi %s", ) B0_mask_output = traits.Either( traits.Bool, File(), hash_files=False, desc="B0 mask used for the estimation. B0 thresholded either with the -t option value or the automatic OTSU value", argstr="--B0_mask_output %s", ) brain_mask = File( desc="Brain mask. Image where for every voxel == 0 the tensors are not estimated. Be aware that in addition a threshold based masking will be performed by default. If such an additional threshold masking is NOT desired, then use option -t 0.", exists=True, argstr="--brain_mask %s", ) bad_region_mask = File( desc="Bad region mask. Image where for every voxel > 0 the tensors are not estimated", exists=True, argstr="--bad_region_mask %s", ) method = traits.Enum( "lls", "wls", "nls", "ml", desc="Esitmation method (lls:linear least squares, wls:weighted least squares, nls:non-linear least squares, ml:maximum likelihood)", argstr="--method %s", ) correction = traits.Enum( "none", "zero", "abs", "nearest", desc="Correct the tensors if computed tensor is not semi-definite positive", argstr="--correction %s", ) threshold = traits.Int( desc="Baseline threshold for estimation. If not specified calculated using an OTSU threshold on the baseline image.", argstr="--threshold %d", ) weight_iterations = traits.Int( desc="Number of iterations to recaluate weightings from tensor estimate", argstr="--weight_iterations %d", ) step = traits.Float( desc="Gradient descent step size (for nls and ml methods)", argstr="--step %f" ) sigma = traits.Float(argstr="--sigma %f") DTI_double = traits.Bool( desc="Tensor components are saved as doubles (cannot be visualized in Slicer)", argstr="--DTI_double ", ) verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") defaultTensor = InputMultiPath( traits.Float, desc="Default tensor used if estimated tensor is below a given threshold", sep=",", argstr="--defaultTensor %s", ) shiftNeg = traits.Bool( desc="Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error). This is the same option as the one available in DWIToDTIEstimation in Slicer (but instead of just adding the minimum eigenvalue to all the eigenvalues if it is smaller than 0, we use a coefficient to have stictly positive eigenvalues", argstr="--shiftNeg ", ) shiftNegCoeff = traits.Float( desc="Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error). Instead of just adding the minimum eigenvalue to all the eigenvalues if it is smaller than 0, we use a coefficient to have stictly positive eigenvalues. Coefficient must be between 1.0 and 1.001 (included).", argstr="--shiftNegCoeff %f", ) class dtiestimOutputSpec(TraitedSpec): tensor_output = File(desc="Tensor OutputImage", exists=True) B0 = File(desc="Baseline image, average of all baseline images", exists=True) idwi = File( desc="idwi output image. Image with isotropic diffusion-weighted information = geometric mean of diffusion images", exists=True, ) B0_mask_output = File( desc="B0 mask used for the estimation. B0 thresholded either with the -t option value or the automatic OTSU value", exists=True, ) class dtiestim(SEMLikeCommandLine): """title: DTIEstim (DTIProcess) category: Diffusion.Diffusion Weighted Images description: dtiestim is a tool that takes in a set of DWIs (with --dwi_image option) in nrrd format and estimates a tensor field out of it. The output tensor file name is specified with the --tensor_output option There are several methods to estimate the tensors which you can specify with the option --method lls|wls|nls|ml . Here is a short description of the different methods: lls Linear least squares. Standard estimation technique that recovers the tensor parameters by multiplying the log of the normalized signal intensities by the pseudo-inverse of the gradient matrix. Default option. wls Weighted least squares. This method is similar to the linear least squares method except that the gradient matrix is weighted by the original lls estimate. (See Salvador, R., Pena, A., Menon, D. K., Carpenter, T. A., Pickard, J. D., and Bullmore, E. T. Formal characterization and extension of the linearized diffusion tensor model. Human Brain Mapping 24, 2 (Feb. 2005), 144-155. for more information on this method). This method is recommended for most applications. The weight for each iteration can be specified with the --weight_iterations. It is not currently the default due to occasional matrix singularities. nls Non-linear least squares. This method does not take the log of the signal and requires an optimization based on levenberg-marquadt to optimize the parameters of the signal. The lls estimate is used as an initialization. For this method the step size can be specified with the --step option. ml Maximum likelihood estimation. This method is experimental and is not currently recommended. For this ml method the sigma can be specified with the option --sigma and the step size can be specified with the --step option. You can set a threshold (--threshold) to have the tensor estimated to only a subset of voxels. All the baseline voxel value higher than the threshold define the voxels where the tensors are computed. If not specified the threshold is calculated using an OTSU threshold on the baseline image.The masked generated by the -t option or by the otsu value can be saved with the --B0_mask_output option. dtiestim also can extract a few scalar images out of the DWI set of images: - the average baseline image (--B0) which is the average of all the B0s. - the IDWI (--idwi)which is the geometric mean of the diffusion images. You can also load a mask if you want to compute the tensors only where the voxels are non-zero (--brain_mask) or a negative mask and the tensors will be estimated where the negative mask has zero values (--bad_region_mask) version: 1.2.0 documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess license: Copyright (c) Casey Goodlett. All rights reserved. See http://www.ia.unc.edu/dev/Copyright.htm for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notices for more information. contributor: Casey Goodlett, Francois Budin acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependancies on boost and a fortran compiler. """ input_spec = dtiestimInputSpec output_spec = dtiestimOutputSpec _cmd = " dtiestim " _outputs_filenames = { "B0": "B0.nii", "idwi": "idwi.nii", "tensor_output": "tensor_output.nii", "B0_mask_output": "B0_mask_output.nii", } _redirect_x = False class dtiprocessInputSpec(CommandLineInputSpec): dti_image = File(desc="DTI tensor volume", exists=True, argstr="--dti_image %s") fa_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Fractional Anisotropy output file", argstr="--fa_output %s", ) md_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Mean Diffusivity output file", argstr="--md_output %s", ) sigma = traits.Float(desc="Scale of gradients", argstr="--sigma %f") fa_gradient_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Fractional Anisotropy Gradient output file", argstr="--fa_gradient_output %s", ) fa_gradmag_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Fractional Anisotropy Gradient Magnitude output file", argstr="--fa_gradmag_output %s", ) color_fa_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Color Fractional Anisotropy output file", argstr="--color_fa_output %s", ) principal_eigenvector_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Principal Eigenvectors Output", argstr="--principal_eigenvector_output %s", ) negative_eigenvector_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Negative Eigenvectors Output: create a binary image where if any of the eigen value is below zero, the voxel is set to 1, otherwise 0.", argstr="--negative_eigenvector_output %s", ) frobenius_norm_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Frobenius Norm Output", argstr="--frobenius_norm_output %s", ) lambda1_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Axial Diffusivity - Lambda 1 (largest eigenvalue) output", argstr="--lambda1_output %s", ) lambda2_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Lambda 2 (middle eigenvalue) output", argstr="--lambda2_output %s", ) lambda3_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Lambda 3 (smallest eigenvalue) output", argstr="--lambda3_output %s", ) RD_output = traits.Either( traits.Bool, File(), hash_files=False, desc="RD (Radial Diffusivity 1/2*(lambda2+lambda3)) output", argstr="--RD_output %s", ) rot_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Rotated tensor output file. Must also specify the dof file.", argstr="--rot_output %s", ) affineitk_file = File( desc="Transformation file for affine transformation. ITK format.", exists=True, argstr="--affineitk_file %s", ) dof_file = File( desc="Transformation file for affine transformation. This can be ITK format (or the outdated RView).", exists=True, argstr="--dof_file %s", ) newdof_file = File( desc="Transformation file for affine transformation. RView NEW format. (txt file output of dof2mat)", exists=True, argstr="--newdof_file %s", ) mask = File( desc="Mask tensors. Specify --outmask if you want to save the masked tensor field, otherwise the mask is applied just for the current processing ", exists=True, argstr="--mask %s", ) outmask = traits.Either( traits.Bool, File(), hash_files=False, desc="Name of the masked tensor field.", argstr="--outmask %s", ) hField = traits.Bool( desc="forward and inverse transformations are h-fields instead of displacement fields", argstr="--hField ", ) forward = File( desc="Forward transformation. Assumed to be a deformation field in world coordinates, unless the --h-field option is specified.", exists=True, argstr="--forward %s", ) deformation_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Warped tensor field based on a deformation field. This option requires the --forward,-F transformation to be specified.", argstr="--deformation_output %s", ) interpolation = traits.Enum( "nearestneighbor", "linear", "cubic", desc="Interpolation type (nearestneighbor, linear, cubic)", argstr="--interpolation %s", ) reorientation = traits.Enum( "fs", "ppd", desc="Reorientation type (fs, ppd)", argstr="--reorientation %s" ) correction = traits.Enum( "none", "zero", "abs", "nearest", desc="Correct the tensors if computed tensor is not semi-definite positive", argstr="--correction %s", ) scalar_float = traits.Bool( desc="Write scalar [FA,MD] as unscaled float (with their actual values, otherwise scaled by 10 000). Also causes FA to be unscaled [0..1].", argstr="--scalar_float ", ) DTI_double = traits.Bool( desc="Tensor components are saved as doubles (cannot be visualized in Slicer)", argstr="--DTI_double ", ) verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") class dtiprocessOutputSpec(TraitedSpec): fa_output = File(desc="Fractional Anisotropy output file", exists=True) md_output = File(desc="Mean Diffusivity output file", exists=True) fa_gradient_output = File( desc="Fractional Anisotropy Gradient output file", exists=True ) fa_gradmag_output = File( desc="Fractional Anisotropy Gradient Magnitude output file", exists=True ) color_fa_output = File(desc="Color Fractional Anisotropy output file", exists=True) principal_eigenvector_output = File( desc="Principal Eigenvectors Output", exists=True ) negative_eigenvector_output = File( desc="Negative Eigenvectors Output: create a binary image where if any of the eigen value is below zero, the voxel is set to 1, otherwise 0.", exists=True, ) frobenius_norm_output = File(desc="Frobenius Norm Output", exists=True) lambda1_output = File( desc="Axial Diffusivity - Lambda 1 (largest eigenvalue) output", exists=True ) lambda2_output = File(desc="Lambda 2 (middle eigenvalue) output", exists=True) lambda3_output = File(desc="Lambda 3 (smallest eigenvalue) output", exists=True) RD_output = File( desc="RD (Radial Diffusivity 1/2*(lambda2+lambda3)) output", exists=True ) rot_output = File( desc="Rotated tensor output file. Must also specify the dof file.", exists=True ) outmask = File(desc="Name of the masked tensor field.", exists=True) deformation_output = File( desc="Warped tensor field based on a deformation field. This option requires the --forward,-F transformation to be specified.", exists=True, ) class dtiprocess(SEMLikeCommandLine): """title: DTIProcess (DTIProcess) category: Diffusion.Diffusion Tensor Images description: dtiprocess is a tool that handles tensor fields. It takes as an input a tensor field in nrrd format. It can generate diffusion scalar properties out of the tensor field such as : FA (--fa_output), Gradient FA image (--fa_gradient_output), color FA (--color_fa_output), MD (--md_output), Frobenius norm (--frobenius_norm_output), lbd1, lbd2, lbd3 (--lambda{1,2,3}_output), binary map of voxel where if any of the eigenvalue is negative, the voxel is set to 1 (--negative_eigenvector_output) It also creates 4D images out of the tensor field such as: Highest eigenvector map (highest eigenvector at each voxel) (--principal_eigenvector_output) Masking capabilities: For any of the processing done with dtiprocess, it's possible to apply it on a masked region of the tensor field. You need to use the --mask option for any of the option to be applied on that tensor field sub-region only. If you want to save the masked tensor field use the option --outmask and specify the new masked tensor field file name. dtiprocess also allows a range of transformations on the tensor fields. The transformed tensor field file name is specified with the option --deformation_output. There are 3 resampling interpolation methods specified with the tag --interpolation followed by the type to use (nearestneighbor, linear, cubic) Then you have several transformations possible to apply: - Affine transformations using as an input - itk affine transformation file (based on the itkAffineTransform class) - Affine transformations using rview (details and download at http://www.doc.ic.ac.uk/~dr/software/). There are 2 versions of rview both creating transformation files called dof files. The old version of rview outputs text files containing the transformation parameters. It can be read in with the --dof_file option. The new version outputs binary dof files. These dof files can be transformed into human readable file with the dof2mat tool which is part of the rview package. So you need to save the output of dof2mat into a text file which can then be used with the -- newdof_file option. Usage example: dof2mat mynewdoffile.dof >> mynewdoffile.txt dtiprocess --dti_image mytensorfield.nhdr --newdof_file mynewdoffile.txt --rot_output myaffinetensorfield.nhdr Non linear transformations as an input: The default transformation file type is d-field (displacement field) in nrrd format. The option to use is --forward with the name of the file. If the transformation file is a h-field you have to add the option --hField. version: 1.0.1 documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess license: Copyright (c) Casey Goodlett. All rights reserved. See http://www.ia.unc.edu/dev/Copyright.htm for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notices for more information. contributor: Casey Goodlett """ input_spec = dtiprocessInputSpec output_spec = dtiprocessOutputSpec _cmd = " dtiprocess " _outputs_filenames = { "fa_gradmag_output": "fa_gradmag_output.nii", "fa_gradient_output": "fa_gradient_output.nii", "lambda1_output": "lambda1_output.nii", "lambda2_output": "lambda2_output.nii", "color_fa_output": "color_fa_output.nii", "fa_output": "fa_output.nii", "frobenius_norm_output": "frobenius_norm_output.nii", "principal_eigenvector_output": "principal_eigenvector_output.nii", "outmask": "outmask.nii", "lambda3_output": "lambda3_output.nii", "negative_eigenvector_output": "negative_eigenvector_output.nii", "md_output": "md_output.nii", "RD_output": "RD_output.nii", "deformation_output": "deformation_output.nii", "rot_output": "rot_output.nii", } _redirect_x = False class DWIConvertInputSpec(CommandLineInputSpec): conversionMode = traits.Enum( "DicomToNrrd", "DicomToFSL", "NrrdToFSL", "FSLToNrrd", desc="Determine which conversion to perform. DicomToNrrd (default): Convert DICOM series to NRRD DicomToFSL: Convert DICOM series to NIfTI File + gradient/bvalue text files NrrdToFSL: Convert DWI NRRD file to NIfTI File + gradient/bvalue text files FSLToNrrd: Convert NIfTI File + gradient/bvalue text files to NRRD file.", argstr="--conversionMode %s", ) inputVolume = File( desc="Input DWI volume -- not used for DicomToNrrd mode.", exists=True, argstr="--inputVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output filename (.nhdr or .nrrd)", argstr="--outputVolume %s", ) inputDicomDirectory = Directory( desc="Directory holding Dicom series", exists=True, argstr="--inputDicomDirectory %s", ) fslNIFTIFile = File( desc="4D NIfTI file containing gradient volumes", exists=True, argstr="--fslNIFTIFile %s", ) inputBValues = File( desc="The B Values are stored in FSL .bval text file format", exists=True, argstr="--inputBValues %s", ) inputBVectors = File( desc="The Gradient Vectors are stored in FSL .bvec text file format", exists=True, argstr="--inputBVectors %s", ) outputBValues = traits.Either( traits.Bool, File(), hash_files=False, desc="The B Values are stored in FSL .bval text file format (defaults to .bval)", argstr="--outputBValues %s", ) outputBVectors = traits.Either( traits.Bool, File(), hash_files=False, desc="The Gradient Vectors are stored in FSL .bvec text file format (defaults to .bvec)", argstr="--outputBVectors %s", ) fMRI = traits.Bool( desc="Output a NRRD file, but without gradients", argstr="--fMRI " ) writeProtocolGradientsFile = traits.Bool( desc="Write the protocol gradients to a file suffixed by '.txt' as they were specified in the procol by multiplying each diffusion gradient direction by the measurement frame. This file is for debugging purposes only, the format is not fixed, and will likely change as debugging of new dicom formats is necessary.", argstr="--writeProtocolGradientsFile ", ) useIdentityMeaseurementFrame = traits.Bool( desc="Adjust all the gradients so that the measurement frame is an identity matrix.", argstr="--useIdentityMeaseurementFrame ", ) useBMatrixGradientDirections = traits.Bool( desc="Fill the nhdr header with the gradient directions and bvalues computed out of the BMatrix. Only changes behavior for Siemens data. In some cases the standard public gradients are not properly computed. The gradients can emperically computed from the private BMatrix fields. In some cases the private BMatrix is consistent with the public grandients, but not in all cases, when it exists BMatrix is usually most robust.", argstr="--useBMatrixGradientDirections ", ) outputDirectory = traits.Either( traits.Bool, Directory(), hash_files=False, desc="Directory holding the output NRRD file", argstr="--outputDirectory %s", ) gradientVectorFile = traits.Either( traits.Bool, File(), hash_files=False, desc="Text file giving gradient vectors", argstr="--gradientVectorFile %s", ) smallGradientThreshold = traits.Float( desc="If a gradient magnitude is greater than 0 and less than smallGradientThreshold, then DWIConvert will display an error message and quit, unless the useBMatrixGradientDirections option is set.", argstr="--smallGradientThreshold %f", ) allowLossyConversion = traits.Bool( desc="The only supported output type is 'short'. Conversion from images of a different type may cause data loss due to rounding or truncation. Use with caution!", argstr="--allowLossyConversion ", ) transposeInputBVectors = traits.Bool( desc="FSL input BVectors are expected to be encoded in the input file as one vector per line. If it is not the case, use this option to transpose the file as it is read.", argstr="--transposeInputBVectors ", ) class DWIConvertOutputSpec(TraitedSpec): outputVolume = File(desc="Output filename (.nhdr or .nrrd)", exists=True) outputBValues = File( desc="The B Values are stored in FSL .bval text file format (defaults to .bval)", exists=True, ) outputBVectors = File( desc="The Gradient Vectors are stored in FSL .bvec text file format (defaults to .bvec)", exists=True, ) outputDirectory = Directory( desc="Directory holding the output NRRD file", exists=True ) gradientVectorFile = File(desc="Text file giving gradient vectors", exists=True) class DWIConvert(SEMLikeCommandLine): """title: DWIConverter category: Diffusion.Diffusion Data Conversion description: Converts diffusion weighted MR images in dicom series into Nrrd format for analysis in Slicer. This program has been tested on only a limited subset of DTI dicom formats available from Siemens, GE, and Phillips scanners. Work in progress to support dicom multi-frame data. The program parses dicom header to extract necessary information about measurement frame, diffusion weighting directions, b-values, etc, and write out a nrrd image. For non-diffusion weighted dicom images, it loads in an entire dicom series and writes out a single dicom volume in a .nhdr/.raw pair. version: Version 1.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DWIConverter license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Vince Magnotta (UIowa), Hans Johnson (UIowa), Joy Matsui (UIowa), Kent Williams (UIowa), Mark Scully (Uiowa), Xiaodong Tao (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for DTI data produced on Philips scanners was contributed by Vincent Magnotta and Hans Johnson at the University of Iowa. """ input_spec = DWIConvertInputSpec output_spec = DWIConvertOutputSpec _cmd = " DWIConvert " _outputs_filenames = { "outputVolume": "outputVolume.nii", "outputDirectory": "outputDirectory", "outputBValues": "outputBValues.bval", "gradientVectorFile": "gradientVectorFile", "outputBVectors": "outputBVectors.bvec", } _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/diffusion/gtract.py000066400000000000000000002236111413403311400241420ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class gtractTransformToDisplacementFieldInputSpec(CommandLineInputSpec): inputTransform = File( desc="Input Transform File Name", exists=True, argstr="--inputTransform %s" ) inputReferenceVolume = File( desc="Required: input image file name to exemplify the anatomical space over which to vcl_express the transform as a displacement field.", exists=True, argstr="--inputReferenceVolume %s", ) outputDeformationFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output deformation field", argstr="--outputDeformationFieldVolume %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractTransformToDisplacementFieldOutputSpec(TraitedSpec): outputDeformationFieldVolume = File(desc="Output deformation field", exists=True) class gtractTransformToDisplacementField(SEMLikeCommandLine): """title: Create Displacement Field category: Diffusion.GTRACT description: This program will compute forward deformation from the given Transform. The size of the DF is equal to MNI space version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta, Madhura Ingalhalikar, and Greg Harris acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractTransformToDisplacementFieldInputSpec output_spec = gtractTransformToDisplacementFieldOutputSpec _cmd = " gtractTransformToDisplacementField " _outputs_filenames = { "outputDeformationFieldVolume": "outputDeformationFieldVolume.nii" } _redirect_x = False class gtractInvertBSplineTransformInputSpec(CommandLineInputSpec): inputReferenceVolume = File( desc="Required: input image file name to exemplify the anatomical space to interpolate over.", exists=True, argstr="--inputReferenceVolume %s", ) inputTransform = File( desc="Required: input B-Spline transform file name", exists=True, argstr="--inputTransform %s", ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output transform file name", argstr="--outputTransform %s", ) landmarkDensity = InputMultiPath( traits.Int, desc="Number of landmark subdivisions in all 3 directions", sep=",", argstr="--landmarkDensity %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractInvertBSplineTransformOutputSpec(TraitedSpec): outputTransform = File(desc="Required: output transform file name", exists=True) class gtractInvertBSplineTransform(SEMLikeCommandLine): """title: B-Spline Transform Inversion category: Diffusion.GTRACT description: This program will invert a B-Spline transform using a thin-plate spline approximation. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractInvertBSplineTransformInputSpec output_spec = gtractInvertBSplineTransformOutputSpec _cmd = " gtractInvertBSplineTransform " _outputs_filenames = {"outputTransform": "outputTransform.h5"} _redirect_x = False class gtractConcatDwiInputSpec(CommandLineInputSpec): inputVolume = InputMultiPath( File(exists=True), desc="Required: input file containing the first diffusion weighted image", argstr="--inputVolume %s...", ) ignoreOrigins = traits.Bool( desc="If image origins are different force all images to origin of first image", argstr="--ignoreOrigins ", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the combined diffusion weighted images.", argstr="--outputVolume %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractConcatDwiOutputSpec(TraitedSpec): outputVolume = File( desc="Required: name of output NRRD file containing the combined diffusion weighted images.", exists=True, ) class gtractConcatDwi(SEMLikeCommandLine): """title: Concat DWI Images category: Diffusion.GTRACT description: This program will concatenate two DTI runs together. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractConcatDwiInputSpec output_spec = gtractConcatDwiOutputSpec _cmd = " gtractConcatDwi " _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractAverageBvaluesInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image file name containing multiple baseline gradients to average", exists=True, argstr="--inputVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing directly averaged baseline images", argstr="--outputVolume %s", ) directionsTolerance = traits.Float( desc="Tolerance for matching identical gradient direction pairs", argstr="--directionsTolerance %f", ) averageB0only = traits.Bool( desc="Average only baseline gradients. All other gradient directions are not averaged, but retained in the outputVolume", argstr="--averageB0only ", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractAverageBvaluesOutputSpec(TraitedSpec): outputVolume = File( desc="Required: name of output NRRD file containing directly averaged baseline images", exists=True, ) class gtractAverageBvalues(SEMLikeCommandLine): """title: Average B-Values category: Diffusion.GTRACT description: This program will directly average together the baseline gradients (b value equals 0) within a DWI scan. This is usually used after gtractCoregBvalues. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractAverageBvaluesInputSpec output_spec = gtractAverageBvaluesOutputSpec _cmd = " gtractAverageBvalues " _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractCoregBvaluesInputSpec(CommandLineInputSpec): movingVolume = File( desc="Required: input moving image file name. In order to register gradients within a scan to its first gradient, set the movingVolume and fixedVolume as the same image.", exists=True, argstr="--movingVolume %s", ) fixedVolume = File( desc="Required: input fixed image file name. It is recommended that this image should either contain or be a b0 image.", exists=True, argstr="--fixedVolume %s", ) fixedVolumeIndex = traits.Int( desc="Index in the fixed image for registration. It is recommended that this image should be a b0 image.", argstr="--fixedVolumeIndex %d", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing moving images individually resampled and fit to the specified fixed image index.", argstr="--outputVolume %s", ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Registration 3D transforms concatenated in a single output file. There are no tools that can use this, but can be used for debugging purposes.", argstr="--outputTransform %s", ) eddyCurrentCorrection = traits.Bool( desc="Flag to perform eddy current corection in addition to motion correction (recommended)", argstr="--eddyCurrentCorrection ", ) numberOfIterations = traits.Int( desc="Number of iterations in each 3D fit", argstr="--numberOfIterations %d" ) numberOfSpatialSamples = traits.Int( desc="The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. NOTE that it is suggested to use samplingPercentage instead of this option. However, if set, it overwrites the samplingPercentage option. ", argstr="--numberOfSpatialSamples %d", ) samplingPercentage = traits.Float( desc="This is a number in (0.0,1.0] interval that shows the percentage of the input fixed image voxels that are sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is to use approximately 5% of voxels (for backwards compatibility 5% ~= 500000/(256*256*256)). Typical values range from 1% for low detail images to 20% for high detail images.", argstr="--samplingPercentage %f", ) relaxationFactor = traits.Float( desc="Fraction of gradient from Jacobian to attempt to move in each 3D fit step (adjust when eddyCurrentCorrection is enabled; suggested value = 0.25)", argstr="--relaxationFactor %f", ) maximumStepSize = traits.Float( desc="Maximum permitted step size to move in each 3D fit step (adjust when eddyCurrentCorrection is enabled; suggested value = 0.1)", argstr="--maximumStepSize %f", ) minimumStepSize = traits.Float( desc="Minimum required step size to move in each 3D fit step without converging -- decrease this to make the fit more exacting", argstr="--minimumStepSize %f", ) spatialScale = traits.Float( desc="How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more rotation in the fit", argstr="--spatialScale %f", ) registerB0Only = traits.Bool( desc="Register the B0 images only", argstr="--registerB0Only " ) debugLevel = traits.Int( desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", argstr="--debugLevel %d", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractCoregBvaluesOutputSpec(TraitedSpec): outputVolume = File( desc="Required: name of output NRRD file containing moving images individually resampled and fit to the specified fixed image index.", exists=True, ) outputTransform = File( desc="Registration 3D transforms concatenated in a single output file. There are no tools that can use this, but can be used for debugging purposes.", exists=True, ) class gtractCoregBvalues(SEMLikeCommandLine): """title: Coregister B-Values category: Diffusion.GTRACT description: This step should be performed after converting DWI scans from DICOM to NRRD format. This program will register all gradients in a NRRD diffusion weighted 4D vector image (moving image) to a specified index in a fixed image. It also supports co-registration with a T2 weighted image or field map in the same plane as the DWI data. The fixed image for the registration should be a b0 image. A mutual information metric cost function is used for the registration because of the differences in signal intensity as a result of the diffusion gradients. The full affine allows the registration procedure to correct for eddy current distortions that may exist in the data. If the eddyCurrentCorrection is enabled, relaxationFactor (0.25) and maximumStepSize (0.1) should be adjusted. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractCoregBvaluesInputSpec output_spec = gtractCoregBvaluesOutputSpec _cmd = " gtractCoregBvalues " _outputs_filenames = { "outputVolume": "outputVolume.nrrd", "outputTransform": "outputTransform.h5", } _redirect_x = False class gtractResampleAnisotropyInputSpec(CommandLineInputSpec): inputAnisotropyVolume = File( desc="Required: input file containing the anisotropy image", exists=True, argstr="--inputAnisotropyVolume %s", ) inputAnatomicalVolume = File( desc="Required: input file containing the anatomical image whose characteristics will be cloned.", exists=True, argstr="--inputAnatomicalVolume %s", ) inputTransform = File( desc="Required: input Rigid OR Bspline transform file name", exists=True, argstr="--inputTransform %s", ) transformType = traits.Enum( "Rigid", "B-Spline", desc="Transform type: Rigid, B-Spline", argstr="--transformType %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the resampled transformed anisotropy image.", argstr="--outputVolume %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractResampleAnisotropyOutputSpec(TraitedSpec): outputVolume = File( desc="Required: name of output NRRD file containing the resampled transformed anisotropy image.", exists=True, ) class gtractResampleAnisotropy(SEMLikeCommandLine): """title: Resample Anisotropy category: Diffusion.GTRACT description: This program will resample a floating point image using either the Rigid or B-Spline transform. You may want to save the aligned B0 image after each of the anisotropy map co-registration steps with the anatomical image to check the registration quality with another tool. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractResampleAnisotropyInputSpec output_spec = gtractResampleAnisotropyOutputSpec _cmd = " gtractResampleAnisotropy " _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractResampleCodeImageInputSpec(CommandLineInputSpec): inputCodeVolume = File( desc="Required: input file containing the code image", exists=True, argstr="--inputCodeVolume %s", ) inputReferenceVolume = File( desc="Required: input file containing the standard image to clone the characteristics of.", exists=True, argstr="--inputReferenceVolume %s", ) inputTransform = File( desc="Required: input Rigid or Inverse-B-Spline transform file name", exists=True, argstr="--inputTransform %s", ) transformType = traits.Enum( "Rigid", "Affine", "B-Spline", "Inverse-B-Spline", "None", desc="Transform type: Rigid or Inverse-B-Spline", argstr="--transformType %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the resampled code image in acquisition space.", argstr="--outputVolume %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractResampleCodeImageOutputSpec(TraitedSpec): outputVolume = File( desc="Required: name of output NRRD file containing the resampled code image in acquisition space.", exists=True, ) class gtractResampleCodeImage(SEMLikeCommandLine): """title: Resample Code Image category: Diffusion.GTRACT description: This program will resample a short integer code image using either the Rigid or Inverse-B-Spline transform. The reference image is the DTI tensor anisotropy image space, and the input code image is in anatomical space. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractResampleCodeImageInputSpec output_spec = gtractResampleCodeImageOutputSpec _cmd = " gtractResampleCodeImage " _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractCopyImageOrientationInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input file containing the signed short image to reorient without resampling.", exists=True, argstr="--inputVolume %s", ) inputReferenceVolume = File( desc="Required: input file containing orietation that will be cloned.", exists=True, argstr="--inputReferenceVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD or Nifti file containing the reoriented image in reference image space.", argstr="--outputVolume %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractCopyImageOrientationOutputSpec(TraitedSpec): outputVolume = File( desc="Required: name of output NRRD or Nifti file containing the reoriented image in reference image space.", exists=True, ) class gtractCopyImageOrientation(SEMLikeCommandLine): """title: Copy Image Orientation category: Diffusion.GTRACT description: This program will copy the orientation from the reference image into the moving image. Currently, the registration process requires that the diffusion weighted images and the anatomical images have the same image orientation (i.e. Axial, Coronal, Sagittal). It is suggested that you copy the image orientation from the diffusion weighted images and apply this to the anatomical image. This image can be subsequently removed after the registration step is complete. We anticipate that this limitation will be removed in future versions of the registration programs. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractCopyImageOrientationInputSpec output_spec = gtractCopyImageOrientationOutputSpec _cmd = " gtractCopyImageOrientation " _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractCreateGuideFiberInputSpec(CommandLineInputSpec): inputFiber = File( desc="Required: input fiber tract file name", exists=True, argstr="--inputFiber %s", ) numberOfPoints = traits.Int( desc="Number of points in output guide fiber", argstr="--numberOfPoints %d" ) outputFiber = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output guide fiber file name", argstr="--outputFiber %s", ) writeXMLPolyDataFile = traits.Bool( desc="Flag to make use of XML files when reading and writing vtkPolyData.", argstr="--writeXMLPolyDataFile ", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractCreateGuideFiberOutputSpec(TraitedSpec): outputFiber = File(desc="Required: output guide fiber file name", exists=True) class gtractCreateGuideFiber(SEMLikeCommandLine): """title: Create Guide Fiber category: Diffusion.GTRACT description: This program will create a guide fiber by averaging fibers from a previously generated tract. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractCreateGuideFiberInputSpec output_spec = gtractCreateGuideFiberOutputSpec _cmd = " gtractCreateGuideFiber " _outputs_filenames = {"outputFiber": "outputFiber.vtk"} _redirect_x = False class gtractAnisotropyMapInputSpec(CommandLineInputSpec): inputTensorVolume = File( desc="Required: input file containing the diffusion tensor image", exists=True, argstr="--inputTensorVolume %s", ) anisotropyType = traits.Enum( "ADC", "FA", "RA", "VR", "AD", "RD", "LI", desc="Anisotropy Mapping Type: ADC, FA, RA, VR, AD, RD, LI", argstr="--anisotropyType %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the selected kind of anisotropy scalar.", argstr="--outputVolume %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractAnisotropyMapOutputSpec(TraitedSpec): outputVolume = File( desc="Required: name of output NRRD file containing the selected kind of anisotropy scalar.", exists=True, ) class gtractAnisotropyMap(SEMLikeCommandLine): """title: Anisotropy Map category: Diffusion.GTRACT description: This program will generate a scalar map of anisotropy, given a tensor representation. Anisotropy images are used for fiber tracking, but the anisotropy scalars are not defined along the path. Instead, the tensor representation is included as point data allowing all of these metrics to be computed using only the fiber tract point data. The images can be saved in any ITK supported format, but it is suggested that you use an image format that supports the definition of the image origin. This includes NRRD, NifTI, and Meta formats. These images can also be used for scalar analysis including regional anisotropy measures or VBM style analysis. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractAnisotropyMapInputSpec output_spec = gtractAnisotropyMapOutputSpec _cmd = " gtractAnisotropyMap " _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractClipAnisotropyInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image file name", exists=True, argstr="--inputVolume %s" ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the clipped anisotropy image", argstr="--outputVolume %s", ) clipFirstSlice = traits.Bool( desc="Clip the first slice of the anisotropy image", argstr="--clipFirstSlice " ) clipLastSlice = traits.Bool( desc="Clip the last slice of the anisotropy image", argstr="--clipLastSlice " ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractClipAnisotropyOutputSpec(TraitedSpec): outputVolume = File( desc="Required: name of output NRRD file containing the clipped anisotropy image", exists=True, ) class gtractClipAnisotropy(SEMLikeCommandLine): """title: Clip Anisotropy category: Diffusion.GTRACT description: This program will zero the first and/or last slice of an anisotropy image, creating a clipped anisotropy image. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractClipAnisotropyInputSpec output_spec = gtractClipAnisotropyOutputSpec _cmd = " gtractClipAnisotropy " _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractResampleB0InputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input file containing the 4D image", exists=True, argstr="--inputVolume %s", ) inputAnatomicalVolume = File( desc="Required: input file containing the anatomical image defining the origin, spacing and size of the resampled image (template)", exists=True, argstr="--inputAnatomicalVolume %s", ) inputTransform = File( desc="Required: input Rigid OR Bspline transform file name", exists=True, argstr="--inputTransform %s", ) vectorIndex = traits.Int( desc="Index in the diffusion weighted image set for the B0 image", argstr="--vectorIndex %d", ) transformType = traits.Enum( "Rigid", "B-Spline", desc="Transform type: Rigid, B-Spline", argstr="--transformType %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the resampled input image.", argstr="--outputVolume %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractResampleB0OutputSpec(TraitedSpec): outputVolume = File( desc="Required: name of output NRRD file containing the resampled input image.", exists=True, ) class gtractResampleB0(SEMLikeCommandLine): """title: Resample B0 category: Diffusion.GTRACT description: This program will resample a signed short image using either a Rigid or B-Spline transform. The user must specify a template image that will be used to define the origin, orientation, spacing, and size of the resampled image. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractResampleB0InputSpec output_spec = gtractResampleB0OutputSpec _cmd = " gtractResampleB0 " _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractInvertRigidTransformInputSpec(CommandLineInputSpec): inputTransform = File( desc="Required: input rigid transform file name", exists=True, argstr="--inputTransform %s", ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output transform file name", argstr="--outputTransform %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractInvertRigidTransformOutputSpec(TraitedSpec): outputTransform = File(desc="Required: output transform file name", exists=True) class gtractInvertRigidTransform(SEMLikeCommandLine): """title: Rigid Transform Inversion category: Diffusion.GTRACT description: This program will invert a Rigid transform. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractInvertRigidTransformInputSpec output_spec = gtractInvertRigidTransformOutputSpec _cmd = " gtractInvertRigidTransform " _outputs_filenames = {"outputTransform": "outputTransform.h5"} _redirect_x = False class gtractImageConformityInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input file containing the signed short image to reorient without resampling.", exists=True, argstr="--inputVolume %s", ) inputReferenceVolume = File( desc="Required: input file containing the standard image to clone the characteristics of.", exists=True, argstr="--inputReferenceVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output Nrrd or Nifti file containing the reoriented image in reference image space.", argstr="--outputVolume %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractImageConformityOutputSpec(TraitedSpec): outputVolume = File( desc="Required: name of output Nrrd or Nifti file containing the reoriented image in reference image space.", exists=True, ) class gtractImageConformity(SEMLikeCommandLine): """title: Image Conformity category: Diffusion.GTRACT description: This program will straighten out the Direction and Origin to match the Reference Image. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractImageConformityInputSpec output_spec = gtractImageConformityOutputSpec _cmd = " gtractImageConformity " _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class compareTractInclusionInputSpec(CommandLineInputSpec): testFiber = File( desc="Required: test fiber tract file name", exists=True, argstr="--testFiber %s", ) standardFiber = File( desc="Required: standard fiber tract file name", exists=True, argstr="--standardFiber %s", ) closeness = traits.Float( desc="Closeness of every test fiber to some fiber in the standard tract, computed as a sum of squares of spatial differences of standard points", argstr="--closeness %f", ) numberOfPoints = traits.Int( desc="Number of points in comparison fiber pairs", argstr="--numberOfPoints %d" ) testForBijection = traits.Bool( desc="Flag to apply the closeness criterion both ways", argstr="--testForBijection ", ) testForFiberCardinality = traits.Bool( desc="Flag to require the same number of fibers in both tracts", argstr="--testForFiberCardinality ", ) writeXMLPolyDataFile = traits.Bool( desc="Flag to make use of XML files when reading and writing vtkPolyData.", argstr="--writeXMLPolyDataFile ", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class compareTractInclusionOutputSpec(TraitedSpec): pass class compareTractInclusion(SEMLikeCommandLine): """title: Compare Tracts category: Diffusion.GTRACT description: This program will halt with a status code indicating whether a test tract is nearly enough included in a standard tract in the sense that every fiber in the test tract has a low enough sum of squares distance to some fiber in the standard tract modulo spline resampling of every fiber to a fixed number of points. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = compareTractInclusionInputSpec output_spec = compareTractInclusionOutputSpec _cmd = " compareTractInclusion " _outputs_filenames = {} _redirect_x = False class gtractFastMarchingTrackingInputSpec(CommandLineInputSpec): inputTensorVolume = File( desc="Required: input tensor image file name", exists=True, argstr="--inputTensorVolume %s", ) inputAnisotropyVolume = File( desc="Required: input anisotropy image file name", exists=True, argstr="--inputAnisotropyVolume %s", ) inputCostVolume = File( desc="Required: input vcl_cost image file name", exists=True, argstr="--inputCostVolume %s", ) inputStartingSeedsLabelMapVolume = File( desc="Required: input starting seeds LabelMap image file name", exists=True, argstr="--inputStartingSeedsLabelMapVolume %s", ) startingSeedsLabel = traits.Int( desc="Label value for Starting Seeds", argstr="--startingSeedsLabel %d" ) outputTract = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", argstr="--outputTract %s", ) writeXMLPolyDataFile = traits.Bool( desc="Flag to make use of the XML format for vtkPolyData fiber tracts.", argstr="--writeXMLPolyDataFile ", ) numberOfIterations = traits.Int( desc="Number of iterations used for the optimization", argstr="--numberOfIterations %d", ) seedThreshold = traits.Float( desc="Anisotropy threshold used for seed selection", argstr="--seedThreshold %f" ) trackingThreshold = traits.Float( desc="Anisotropy threshold used for fiber tracking", argstr="--trackingThreshold %f", ) costStepSize = traits.Float( desc="Cost image sub-voxel sampling", argstr="--costStepSize %f" ) maximumStepSize = traits.Float( desc="Maximum step size to move when tracking", argstr="--maximumStepSize %f" ) minimumStepSize = traits.Float( desc="Minimum step size to move when tracking", argstr="--minimumStepSize %f" ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractFastMarchingTrackingOutputSpec(TraitedSpec): outputTract = File( desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", exists=True, ) class gtractFastMarchingTracking(SEMLikeCommandLine): """title: Fast Marching Tracking category: Diffusion.GTRACT description: This program will use a fast marching fiber tracking algorithm to identify fiber tracts from a tensor image. This program is the second portion of the algorithm. The user must first run gtractCostFastMarching to generate the vcl_cost image. The second step of the algorithm implemented here is a gradient descent soplution from the defined ending region back to the seed points specified in gtractCostFastMarching. This algorithm is roughly based on the work by G. Parker et al. from IEEE Transactions On Medical Imaging, 21(5): 505-512, 2002. An additional feature of including anisotropy into the vcl_cost function calculation is included. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. The original code here was developed by Daisy Espino. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractFastMarchingTrackingInputSpec output_spec = gtractFastMarchingTrackingOutputSpec _cmd = " gtractFastMarchingTracking " _outputs_filenames = {"outputTract": "outputTract.vtk"} _redirect_x = False class gtractInvertDisplacementFieldInputSpec(CommandLineInputSpec): baseImage = File( desc="Required: base image used to define the size of the inverse field", exists=True, argstr="--baseImage %s", ) deformationImage = File( desc="Required: Displacement field image", exists=True, argstr="--deformationImage %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: Output deformation field", argstr="--outputVolume %s", ) subsamplingFactor = traits.Int( desc="Subsampling factor for the deformation field", argstr="--subsamplingFactor %d", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractInvertDisplacementFieldOutputSpec(TraitedSpec): outputVolume = File(desc="Required: Output deformation field", exists=True) class gtractInvertDisplacementField(SEMLikeCommandLine): """title: Invert Displacement Field category: Diffusion.GTRACT description: This program will invert a deformatrion field. The size of the deformation field is defined by an example image provided by the user version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractInvertDisplacementFieldInputSpec output_spec = gtractInvertDisplacementFieldOutputSpec _cmd = " gtractInvertDisplacementField " _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False class gtractCoRegAnatomyInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input vector image file name. It is recommended that the input volume is the skull stripped baseline image of the DWI scan.", exists=True, argstr="--inputVolume %s", ) inputAnatomicalVolume = File( desc="Required: input anatomical image file name. It is recommended that that the input anatomical image has been skull stripped and has the same orientation as the DWI scan.", exists=True, argstr="--inputAnatomicalVolume %s", ) vectorIndex = traits.Int( desc="Vector image index in the moving image (within the DWI) to be used for registration.", argstr="--vectorIndex %d", ) inputRigidTransform = File( desc="Required (for B-Spline type co-registration): input rigid transform file name. Used as a starting point for the anatomical B-Spline registration.", exists=True, argstr="--inputRigidTransform %s", ) outputTransformName = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: filename for the fit transform.", argstr="--outputTransformName %s", ) transformType = traits.Enum( "Rigid", "Bspline", desc="Transform Type: Rigid|Bspline", argstr="--transformType %s", ) numberOfIterations = traits.Int( desc="Number of iterations in the selected 3D fit", argstr="--numberOfIterations %d", ) gridSize = InputMultiPath( traits.Int, desc="Number of grid subdivisions in all 3 directions", sep=",", argstr="--gridSize %s", ) borderSize = traits.Int(desc="Size of border", argstr="--borderSize %d") numberOfHistogramBins = traits.Int( desc="Number of histogram bins", argstr="--numberOfHistogramBins %d" ) spatialScale = traits.Int( desc="Scales the number of voxels in the image by this value to specify the number of voxels used in the registration", argstr="--spatialScale %d", ) convergence = traits.Float(desc="Convergence Factor", argstr="--convergence %f") gradientTolerance = traits.Float( desc="Gradient Tolerance", argstr="--gradientTolerance %f" ) maxBSplineDisplacement = traits.Float( desc=" Sets the maximum allowed displacements in image physical coordinates for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", argstr="--maxBSplineDisplacement %f", ) maximumStepSize = traits.Float( desc="Maximum permitted step size to move in the selected 3D fit", argstr="--maximumStepSize %f", ) minimumStepSize = traits.Float( desc="Minimum required step size to move in the selected 3D fit without converging -- decrease this to make the fit more exacting", argstr="--minimumStepSize %f", ) translationScale = traits.Float( desc="How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more translation in the fit", argstr="--translationScale %f", ) relaxationFactor = traits.Float( desc="Fraction of gradient from Jacobian to attempt to move in the selected 3D fit", argstr="--relaxationFactor %f", ) numberOfSamples = traits.Int( desc="The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. NOTE that it is suggested to use samplingPercentage instead of this option. However, if set, it overwrites the samplingPercentage option. ", argstr="--numberOfSamples %d", ) samplingPercentage = traits.Float( desc="This is a number in (0.0,1.0] interval that shows the percentage of the input fixed image voxels that are sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is to use approximately 5% of voxels (for backwards compatibility 5% ~= 500000/(256*256*256)). Typical values range from 1% for low detail images to 20% for high detail images.", argstr="--samplingPercentage %f", ) useMomentsAlign = traits.Bool( desc="MomentsAlign assumes that the center of mass of the images represent similar structures. Perform a MomentsAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either CenterOfHeadLAlign, GeometryAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", argstr="--useMomentsAlign ", ) useGeometryAlign = traits.Bool( desc="GeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. Perform a GeometryCenterAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either MomentsAlign, CenterOfHeadAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", argstr="--useGeometryAlign ", ) useCenterOfHeadAlign = traits.Bool( desc="CenterOfHeadAlign attempts to find a hemisphere full of foreground voxels from the superior direction as an estimate of where the center of a head shape would be to drive a center of mass estimate. Perform a CenterOfHeadAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either MomentsAlign, GeometryAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", argstr="--useCenterOfHeadAlign ", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractCoRegAnatomyOutputSpec(TraitedSpec): outputTransformName = File( desc="Required: filename for the fit transform.", exists=True ) class gtractCoRegAnatomy(SEMLikeCommandLine): """title: Coregister B0 to Anatomy B-Spline category: Diffusion.GTRACT description: This program will register a Nrrd diffusion weighted 4D vector image to a fixed anatomical image. Two registration methods are supported for alignment with anatomical images: Rigid and B-Spline. The rigid registration performs a rigid body registration with the anatomical images and should be done as well to initialize the B-Spline transform. The B-SPline transform is the deformable transform, where the user can control the amount of deformation based on the number of control points as well as the maximum distance that these points can move. The B-Spline registration places a low dimensional grid in the image, which is deformed. This allows for some susceptibility related distortions to be removed from the diffusion weighted images. In general the amount of motion in the slice selection and read-out directions direction should be kept low. The distortion is in the phase encoding direction in the images. It is recommended that skull stripped (i.e. image containing only brain with skull removed) images shoud be used for image co-registration with the B-Spline transform. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractCoRegAnatomyInputSpec output_spec = gtractCoRegAnatomyOutputSpec _cmd = " gtractCoRegAnatomy " _outputs_filenames = {"outputTransformName": "outputTransformName.h5"} _redirect_x = False class gtractResampleDWIInPlaceInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image is a 4D NRRD image.", exists=True, argstr="--inputVolume %s", ) referenceVolume = File( desc="If provided, resample to the final space of the referenceVolume 3D data set.", exists=True, argstr="--referenceVolume %s", ) outputResampledB0 = traits.Either( traits.Bool, File(), hash_files=False, desc="Convenience function for extracting the first index location (assumed to be the B0)", argstr="--outputResampledB0 %s", ) inputTransform = File( desc="Required: transform file derived from rigid registration of b0 image to reference structural image.", exists=True, argstr="--inputTransform %s", ) warpDWITransform = File( desc="Optional: transform file to warp gradient volumes.", exists=True, argstr="--warpDWITransform %s", ) debugLevel = traits.Int( desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", argstr="--debugLevel %d", ) imageOutputSize = InputMultiPath( traits.Int, desc="The voxel lattice for the output image, padding is added if necessary. NOTE: if 0,0,0, then the inputVolume size is used.", sep=",", argstr="--imageOutputSize %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image (NRRD file) that has been rigidly transformed into the space of the structural image and padded if image padding was changed from 0,0,0 default.", argstr="--outputVolume %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractResampleDWIInPlaceOutputSpec(TraitedSpec): outputResampledB0 = File( desc="Convenience function for extracting the first index location (assumed to be the B0)", exists=True, ) outputVolume = File( desc="Required: output image (NRRD file) that has been rigidly transformed into the space of the structural image and padded if image padding was changed from 0,0,0 default.", exists=True, ) class gtractResampleDWIInPlace(SEMLikeCommandLine): """title: Resample DWI In Place category: Diffusion.GTRACT description: Resamples DWI image to structural image. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta, Greg Harris, Hans Johnson, and Joy Matsui. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractResampleDWIInPlaceInputSpec output_spec = gtractResampleDWIInPlaceOutputSpec _cmd = " gtractResampleDWIInPlace " _outputs_filenames = { "outputResampledB0": "outputResampledB0.nii", "outputVolume": "outputVolume.nii", } _redirect_x = False class gtractCostFastMarchingInputSpec(CommandLineInputSpec): inputTensorVolume = File( desc="Required: input tensor image file name", exists=True, argstr="--inputTensorVolume %s", ) inputAnisotropyVolume = File( desc="Required: input anisotropy image file name", exists=True, argstr="--inputAnisotropyVolume %s", ) inputStartingSeedsLabelMapVolume = File( desc="Required: input starting seeds LabelMap image file name", exists=True, argstr="--inputStartingSeedsLabelMapVolume %s", ) startingSeedsLabel = traits.Int( desc="Label value for Starting Seeds", argstr="--startingSeedsLabel %d" ) outputCostVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output vcl_cost image", argstr="--outputCostVolume %s", ) outputSpeedVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output speed image", argstr="--outputSpeedVolume %s", ) anisotropyWeight = traits.Float( desc="Anisotropy weight used for vcl_cost function calculations", argstr="--anisotropyWeight %f", ) stoppingValue = traits.Float( desc="Terminiating value for vcl_cost function estimation", argstr="--stoppingValue %f", ) seedThreshold = traits.Float( desc="Anisotropy threshold used for seed selection", argstr="--seedThreshold %f" ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractCostFastMarchingOutputSpec(TraitedSpec): outputCostVolume = File(desc="Output vcl_cost image", exists=True) outputSpeedVolume = File(desc="Output speed image", exists=True) class gtractCostFastMarching(SEMLikeCommandLine): """title: Cost Fast Marching category: Diffusion.GTRACT description: This program will use a fast marching fiber tracking algorithm to identify fiber tracts from a tensor image. This program is the first portion of the algorithm. The user must first run gtractFastMarchingTracking to generate the actual fiber tracts. This algorithm is roughly based on the work by G. Parker et al. from IEEE Transactions On Medical Imaging, 21(5): 505-512, 2002. An additional feature of including anisotropy into the vcl_cost function calculation is included. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. The original code here was developed by Daisy Espino. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractCostFastMarchingInputSpec output_spec = gtractCostFastMarchingOutputSpec _cmd = " gtractCostFastMarching " _outputs_filenames = { "outputCostVolume": "outputCostVolume.nrrd", "outputSpeedVolume": "outputSpeedVolume.nrrd", } _redirect_x = False class gtractFiberTrackingInputSpec(CommandLineInputSpec): inputTensorVolume = File( desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input tensor image file name", exists=True, argstr="--inputTensorVolume %s", ) inputAnisotropyVolume = File( desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input anisotropy image file name", exists=True, argstr="--inputAnisotropyVolume %s", ) inputStartingSeedsLabelMapVolume = File( desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input starting seeds LabelMap image file name", exists=True, argstr="--inputStartingSeedsLabelMapVolume %s", ) startingSeedsLabel = traits.Int( desc="Label value for Starting Seeds (required if Label number used to create seed point in Slicer was not 1)", argstr="--startingSeedsLabel %d", ) inputEndingSeedsLabelMapVolume = File( desc="Required (for Streamline, GraphSearch, and Guided fiber tracking methods): input ending seeds LabelMap image file name", exists=True, argstr="--inputEndingSeedsLabelMapVolume %s", ) endingSeedsLabel = traits.Int( desc="Label value for Ending Seeds (required if Label number used to create seed point in Slicer was not 1)", argstr="--endingSeedsLabel %d", ) inputTract = File( desc="Required (for Guided fiber tracking method): guide fiber in vtkPolydata file containing one tract line.", exists=True, argstr="--inputTract %s", ) outputTract = traits.Either( traits.Bool, File(), hash_files=False, desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): name of output vtkPolydata file containing tract lines and the point data collected along them.", argstr="--outputTract %s", ) writeXMLPolyDataFile = traits.Bool( desc="Flag to make use of the XML format for vtkPolyData fiber tracts.", argstr="--writeXMLPolyDataFile ", ) trackingMethod = traits.Enum( "Guided", "Free", "Streamline", "GraphSearch", desc="Fiber tracking Filter Type: Guided|Free|Streamline|GraphSearch", argstr="--trackingMethod %s", ) guidedCurvatureThreshold = traits.Float( desc="Guided Curvature Threshold (Degrees)", argstr="--guidedCurvatureThreshold %f", ) maximumGuideDistance = traits.Float( desc="Maximum distance for using the guide fiber direction", argstr="--maximumGuideDistance %f", ) seedThreshold = traits.Float( desc="Anisotropy threshold for seed selection (recommended for Free fiber tracking)", argstr="--seedThreshold %f", ) trackingThreshold = traits.Float( desc="Anisotropy threshold for fiber tracking (anisotropy values of the next point along the path)", argstr="--trackingThreshold %f", ) curvatureThreshold = traits.Float( desc="Curvature threshold in degrees (recommended for Free fiber tracking)", argstr="--curvatureThreshold %f", ) branchingThreshold = traits.Float( desc="Anisotropy Branching threshold (recommended for GraphSearch fiber tracking method)", argstr="--branchingThreshold %f", ) maximumBranchPoints = traits.Int( desc="Maximum branch points (recommended for GraphSearch fiber tracking method)", argstr="--maximumBranchPoints %d", ) useRandomWalk = traits.Bool( desc="Flag to use random walk.", argstr="--useRandomWalk " ) randomSeed = traits.Int( desc="Random number generator seed", argstr="--randomSeed %d" ) branchingAngle = traits.Float( desc="Branching angle in degrees (recommended for GraphSearch fiber tracking method)", argstr="--branchingAngle %f", ) minimumLength = traits.Float( desc="Minimum fiber length. Helpful for filtering invalid tracts.", argstr="--minimumLength %f", ) maximumLength = traits.Float( desc="Maximum fiber length (voxels)", argstr="--maximumLength %f" ) stepSize = traits.Float(desc="Fiber tracking step size", argstr="--stepSize %f") useLoopDetection = traits.Bool( desc="Flag to make use of loop detection.", argstr="--useLoopDetection " ) useTend = traits.Bool( desc="Flag to make use of Tend F and Tend G parameters.", argstr="--useTend " ) tendF = traits.Float(desc="Tend F parameter", argstr="--tendF %f") tendG = traits.Float(desc="Tend G parameter", argstr="--tendG %f") numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractFiberTrackingOutputSpec(TraitedSpec): outputTract = File( desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): name of output vtkPolydata file containing tract lines and the point data collected along them.", exists=True, ) class gtractFiberTracking(SEMLikeCommandLine): """title: Fiber Tracking category: Diffusion.GTRACT description: This program implements four fiber tracking methods (Free, Streamline, GraphSearch, Guided). The output of the fiber tracking is vtkPolyData (i.e. Polylines) that can be loaded into Slicer3 for visualization. The poly data can be saved in either old VTK format files (.vtk) or in the new VTK XML format (.xml). The polylines contain point data that defines ther Tensor at each point along the fiber tract. This can then be used to rendered as glyphs in Slicer3 and can be used to define severeal scalar measures without referencing back to the anisotropy images. (1) Free tracking is a basic streamlines algorithm. This is a direct implementation of the method original proposed by Basser et al. The tracking follows the primarty eigenvector. The tracking begins with seed points in the starting region. Only those voxels above the specified anisotropy threshold in the starting region are used as seed points. Tracking terminates either as a result of maximum fiber length, low ansiotropy, or large curvature. This is a great way to explore your data. (2) The streamlines algorithm is a direct implementation of the method originally proposed by Basser et al. The tracking follows the primary eigenvector. The tracking begins with seed points in the starting region. Only those voxels above the specified anisotropy threshold in the starting region are used as seed points. Tracking terminates either by reaching the ending region or reaching some stopping criteria. Stopping criteria are specified using the following parameters: tracking threshold, curvature threshold, and max length. Only paths terminating in the ending region are kept in this method. The TEND algorithm proposed by Lazar et al. (Human Brain Mapping 18:306-321, 2003) has been instrumented. This can be enabled using the --useTend option while performing Streamlines tracking. This utilizes the entire diffusion tensor to deflect the incoming vector instead of simply following the primary eigenvector. The TEND parameters are set using the --tendF and --tendG options. (3) Graph Search tracking is the first step in the full GTRACT algorithm developed by Cheng et al. (NeuroImage 31(3): 1075-1085, 2006) for finding the tracks in a tensor image. This method was developed to generate fibers in a Tensor representation where crossing fibers occur. The graph search algorithm follows the primary eigenvector in non-ambigous regions and utilizes branching and a graph search algorithm in ambigous regions. Ambiguous tracking regions are defined based on two criteria: Branching Al Threshold (anisotropy values below this value and above the traching threshold) and Curvature Major Eigen (angles of the primary eigenvector direction and the current tracking direction). In regions that meet this criteria, two or three tracking paths are considered. The first is the standard primary eigenvector direction. The second is the seconadary eigenvector direction. This is based on the assumption that these regions may be prolate regions. If the Random Walk option is selected then a third direction is also considered. This direction is defined by a cone pointing from the current position to the centroid of the ending region. The interior angle of the cone is specified by the user with the Branch/Guide Angle parameter. A vector contained inside of the cone is selected at random and used as the third direction. This method can also utilize the TEND option where the primary tracking direction is that specified by the TEND method instead of the primary eigenvector. The parameter '--maximumBranchPoints' allows the tracking to have this number of branches being considered at a time. If this number of branch points is exceeded at any time, then the algorithm will revert back to a streamline alogrithm until the number of branches is reduced. This allows the user to constrain the computational complexity of the algorithm. (4) The second phase of the GTRACT algorithm is Guided Tracking. This method incorporates anatomical information about the track orientation using an initial guess of the fiber track. In the originally proposed GTRACT method, this would be created from the fibers resulting from the Graph Search tracking. However, in practice this can be created using any method and could be defined manually. To create the guide fiber the program gtractCreateGuideFiber can be used. This program will load a fiber tract that has been generated and create a centerline representation of the fiber tract (i.e. a single fiber). In this method, the fiber tracking follows the primary eigenvector direction unless it deviates from the guide fiber track by a angle greater than that specified by the '--guidedCurvatureThreshold' parameter. The user must specify the guide fiber when running this program. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta, Greg Harris and Yongqiang Zhao. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractFiberTrackingInputSpec output_spec = gtractFiberTrackingOutputSpec _cmd = " gtractFiberTracking " _outputs_filenames = {"outputTract": "outputTract.vtk"} _redirect_x = False class extractNrrdVectorIndexInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input file containing the vector that will be extracted", exists=True, argstr="--inputVolume %s", ) vectorIndex = traits.Int( desc="Index in the vector image to extract", argstr="--vectorIndex %d" ) setImageOrientation = traits.Enum( "AsAcquired", "Axial", "Coronal", "Sagittal", desc="Sets the image orientation of the extracted vector (Axial, Coronal, Sagittal)", argstr="--setImageOrientation %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the vector image at the given index", argstr="--outputVolume %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class extractNrrdVectorIndexOutputSpec(TraitedSpec): outputVolume = File( desc="Required: name of output NRRD file containing the vector image at the given index", exists=True, ) class extractNrrdVectorIndex(SEMLikeCommandLine): """title: Extract Nrrd Index category: Diffusion.GTRACT description: This program will extract a 3D image (single vector) from a vector 3D image at a given vector index. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = extractNrrdVectorIndexInputSpec output_spec = extractNrrdVectorIndexOutputSpec _cmd = " extractNrrdVectorIndex " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class gtractResampleFibersInputSpec(CommandLineInputSpec): inputForwardDeformationFieldVolume = File( desc="Required: input forward deformation field image file name", exists=True, argstr="--inputForwardDeformationFieldVolume %s", ) inputReverseDeformationFieldVolume = File( desc="Required: input reverse deformation field image file name", exists=True, argstr="--inputReverseDeformationFieldVolume %s", ) inputTract = File( desc="Required: name of input vtkPolydata file containing tract lines.", exists=True, argstr="--inputTract %s", ) outputTract = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", argstr="--outputTract %s", ) writeXMLPolyDataFile = traits.Bool( desc="Flag to make use of the XML format for vtkPolyData fiber tracts.", argstr="--writeXMLPolyDataFile ", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractResampleFibersOutputSpec(TraitedSpec): outputTract = File( desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", exists=True, ) class gtractResampleFibers(SEMLikeCommandLine): """title: Resample Fibers category: Diffusion.GTRACT description: This program will resample a fiber tract with respect to a pair of deformation fields that represent the forward and reverse deformation fields. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractResampleFibersInputSpec output_spec = gtractResampleFibersOutputSpec _cmd = " gtractResampleFibers " _outputs_filenames = {"outputTract": "outputTract.vtk"} _redirect_x = False class gtractTensorInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image 4D NRRD image. Must contain data based on at least 6 distinct diffusion directions. The inputVolume is allowed to have multiple b0 and gradient direction images. Averaging of the b0 image is done internally in this step. Prior averaging of the DWIs is not required.", exists=True, argstr="--inputVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the Tensor vector image", argstr="--outputVolume %s", ) medianFilterSize = InputMultiPath( traits.Int, desc="Median filter radius in all 3 directions", sep=",", argstr="--medianFilterSize %s", ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", desc="ROIAUTO: mask is implicitly defined using a otsu forground and hole filling algorithm. ROI: Uses the masks to define what parts of the image should be used for computing the transform. NOMASK: no mask used", argstr="--maskProcessingMode %s", ) maskVolume = File( desc="Mask Image, if maskProcessingMode is ROI", exists=True, argstr="--maskVolume %s", ) backgroundSuppressingThreshold = traits.Int( desc="Image threshold to suppress background. This sets a threshold used on the b0 image to remove background voxels from processing. Typically, values of 100 and 500 work well for Siemens and GE DTI data, respectively. Check your data particularly in the globus pallidus to make sure the brain tissue is not being eliminated with this threshold.", argstr="--backgroundSuppressingThreshold %d", ) resampleIsotropic = traits.Bool( desc="Flag to resample to isotropic voxels. Enabling this feature is recommended if fiber tracking will be performed.", argstr="--resampleIsotropic ", ) size = traits.Float(desc="Isotropic voxel size to resample to", argstr="--size %f") b0Index = traits.Int( desc="Index in input vector index to extract", argstr="--b0Index %d" ) applyMeasurementFrame = traits.Bool( desc="Flag to apply the measurement frame to the gradient directions", argstr="--applyMeasurementFrame ", ) ignoreIndex = InputMultiPath( traits.Int, desc="Ignore diffusion gradient index. Used to remove specific gradient directions with artifacts.", sep=",", argstr="--ignoreIndex %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class gtractTensorOutputSpec(TraitedSpec): outputVolume = File( desc="Required: name of output NRRD file containing the Tensor vector image", exists=True, ) class gtractTensor(SEMLikeCommandLine): """title: Tensor Estimation category: Diffusion.GTRACT description: This step will convert a b-value averaged diffusion tensor image to a 3x3 tensor voxel image. This step takes the diffusion tensor image data and generates a tensor representation of the data based on the signal intensity decay, b values applied, and the diffusion difrections. The apparent diffusion coefficient for a given orientation is computed on a pixel-by-pixel basis by fitting the image data (voxel intensities) to the Stejskal-Tanner equation. If at least 6 diffusion directions are used, then the diffusion tensor can be computed. This program uses itk::DiffusionTensor3DReconstructionImageFilter. The user can adjust background threshold, median filter, and isotropic resampling. version: 4.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt contributor: This tool was developed by Vincent Magnotta and Greg Harris. acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1 """ input_spec = gtractTensorInputSpec output_spec = gtractTensorOutputSpec _cmd = " gtractTensor " _outputs_filenames = {"outputVolume": "outputVolume.nrrd"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/diffusion/maxcurvature.py000066400000000000000000000047331413403311400254060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class maxcurvatureInputSpec(CommandLineInputSpec): image = File(desc="FA Image", exists=True, argstr="--image %s") output = traits.Either( traits.Bool, File(), hash_files=False, desc="Output File", argstr="--output %s" ) sigma = traits.Float(desc="Scale of Gradients", argstr="--sigma %f") verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") class maxcurvatureOutputSpec(TraitedSpec): output = File(desc="Output File", exists=True) class maxcurvature(SEMLikeCommandLine): """title: MaxCurvature-Hessian (DTIProcess) category: Diffusion description: This program computes the Hessian of the FA image (--image). We use this scalar image as a registration input when doing DTI atlas building. For most adult FA we use a sigma of 2 whereas for neonate or primate images and sigma of 1 or 1.5 is more appropriate. For really noisy images, 2.5 - 4 can be considered. The final image (--output) shows the main feature of the input image. version: 1.1.0 documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess license: Copyright (c) Casey Goodlett. All rights reserved. See http://www.ia.unc.edu/dev/Copyright.htm for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notices for more information. contributor: Casey Goodlett acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependancies on boost and a fortran compiler. """ input_spec = maxcurvatureInputSpec output_spec = maxcurvatureOutputSpec _cmd = " maxcurvature " _outputs_filenames = {"output": "output.nii"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/000077500000000000000000000000001413403311400234415ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/__init__.py000066400000000000000000000000301413403311400255430ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_DWIConvert.py000066400000000000000000000055441413403311400300760ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..diffusion import DWIConvert def test_DWIConvert_inputs(): input_map = dict( allowLossyConversion=dict( argstr="--allowLossyConversion ", ), args=dict( argstr="%s", ), conversionMode=dict( argstr="--conversionMode %s", ), environ=dict( nohash=True, usedefault=True, ), fMRI=dict( argstr="--fMRI ", ), fslNIFTIFile=dict( argstr="--fslNIFTIFile %s", extensions=None, ), gradientVectorFile=dict( argstr="--gradientVectorFile %s", hash_files=False, ), inputBValues=dict( argstr="--inputBValues %s", extensions=None, ), inputBVectors=dict( argstr="--inputBVectors %s", extensions=None, ), inputDicomDirectory=dict( argstr="--inputDicomDirectory %s", ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), outputBValues=dict( argstr="--outputBValues %s", hash_files=False, ), outputBVectors=dict( argstr="--outputBVectors %s", hash_files=False, ), outputDirectory=dict( argstr="--outputDirectory %s", hash_files=False, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), smallGradientThreshold=dict( argstr="--smallGradientThreshold %f", ), transposeInputBVectors=dict( argstr="--transposeInputBVectors ", ), useBMatrixGradientDirections=dict( argstr="--useBMatrixGradientDirections ", ), useIdentityMeaseurementFrame=dict( argstr="--useIdentityMeaseurementFrame ", ), writeProtocolGradientsFile=dict( argstr="--writeProtocolGradientsFile ", ), ) inputs = DWIConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DWIConvert_outputs(): output_map = dict( gradientVectorFile=dict( extensions=None, ), outputBValues=dict( extensions=None, ), outputBVectors=dict( extensions=None, ), outputDirectory=dict(), outputVolume=dict( extensions=None, ), ) outputs = DWIConvert.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_compareTractInclusion.py000066400000000000000000000030101413403311400324040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import compareTractInclusion def test_compareTractInclusion_inputs(): input_map = dict( args=dict( argstr="%s", ), closeness=dict( argstr="--closeness %f", ), environ=dict( nohash=True, usedefault=True, ), numberOfPoints=dict( argstr="--numberOfPoints %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), standardFiber=dict( argstr="--standardFiber %s", extensions=None, ), testFiber=dict( argstr="--testFiber %s", extensions=None, ), testForBijection=dict( argstr="--testForBijection ", ), testForFiberCardinality=dict( argstr="--testForFiberCardinality ", ), writeXMLPolyDataFile=dict( argstr="--writeXMLPolyDataFile ", ), ) inputs = compareTractInclusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_compareTractInclusion_outputs(): output_map = dict() outputs = compareTractInclusion.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiaverage.py000066400000000000000000000022101413403311400302100ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..diffusion import dtiaverage def test_dtiaverage_inputs(): input_map = dict( DTI_double=dict( argstr="--DTI_double ", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputs=dict( argstr="--inputs %s...", ), tensor_output=dict( argstr="--tensor_output %s", hash_files=False, ), verbose=dict( argstr="--verbose ", ), ) inputs = dtiaverage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_dtiaverage_outputs(): output_map = dict( tensor_output=dict( extensions=None, ), ) outputs = dtiaverage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiestim.py000066400000000000000000000047661413403311400277410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..diffusion import dtiestim def test_dtiestim_inputs(): input_map = dict( B0=dict( argstr="--B0 %s", hash_files=False, ), B0_mask_output=dict( argstr="--B0_mask_output %s", hash_files=False, ), DTI_double=dict( argstr="--DTI_double ", ), args=dict( argstr="%s", ), bad_region_mask=dict( argstr="--bad_region_mask %s", extensions=None, ), brain_mask=dict( argstr="--brain_mask %s", extensions=None, ), correction=dict( argstr="--correction %s", ), defaultTensor=dict( argstr="--defaultTensor %s", sep=",", ), dwi_image=dict( argstr="--dwi_image %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), idwi=dict( argstr="--idwi %s", hash_files=False, ), method=dict( argstr="--method %s", ), shiftNeg=dict( argstr="--shiftNeg ", ), shiftNegCoeff=dict( argstr="--shiftNegCoeff %f", ), sigma=dict( argstr="--sigma %f", ), step=dict( argstr="--step %f", ), tensor_output=dict( argstr="--tensor_output %s", hash_files=False, ), threshold=dict( argstr="--threshold %d", ), verbose=dict( argstr="--verbose ", ), weight_iterations=dict( argstr="--weight_iterations %d", ), ) inputs = dtiestim.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_dtiestim_outputs(): output_map = dict( B0=dict( extensions=None, ), B0_mask_output=dict( extensions=None, ), idwi=dict( extensions=None, ), tensor_output=dict( extensions=None, ), ) outputs = dtiestim.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_dtiprocess.py000066400000000000000000000111721413403311400302630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..diffusion import dtiprocess def test_dtiprocess_inputs(): input_map = dict( DTI_double=dict( argstr="--DTI_double ", ), RD_output=dict( argstr="--RD_output %s", hash_files=False, ), affineitk_file=dict( argstr="--affineitk_file %s", extensions=None, ), args=dict( argstr="%s", ), color_fa_output=dict( argstr="--color_fa_output %s", hash_files=False, ), correction=dict( argstr="--correction %s", ), deformation_output=dict( argstr="--deformation_output %s", hash_files=False, ), dof_file=dict( argstr="--dof_file %s", extensions=None, ), dti_image=dict( argstr="--dti_image %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), fa_gradient_output=dict( argstr="--fa_gradient_output %s", hash_files=False, ), fa_gradmag_output=dict( argstr="--fa_gradmag_output %s", hash_files=False, ), fa_output=dict( argstr="--fa_output %s", hash_files=False, ), forward=dict( argstr="--forward %s", extensions=None, ), frobenius_norm_output=dict( argstr="--frobenius_norm_output %s", hash_files=False, ), hField=dict( argstr="--hField ", ), interpolation=dict( argstr="--interpolation %s", ), lambda1_output=dict( argstr="--lambda1_output %s", hash_files=False, ), lambda2_output=dict( argstr="--lambda2_output %s", hash_files=False, ), lambda3_output=dict( argstr="--lambda3_output %s", hash_files=False, ), mask=dict( argstr="--mask %s", extensions=None, ), md_output=dict( argstr="--md_output %s", hash_files=False, ), negative_eigenvector_output=dict( argstr="--negative_eigenvector_output %s", hash_files=False, ), newdof_file=dict( argstr="--newdof_file %s", extensions=None, ), outmask=dict( argstr="--outmask %s", hash_files=False, ), principal_eigenvector_output=dict( argstr="--principal_eigenvector_output %s", hash_files=False, ), reorientation=dict( argstr="--reorientation %s", ), rot_output=dict( argstr="--rot_output %s", hash_files=False, ), scalar_float=dict( argstr="--scalar_float ", ), sigma=dict( argstr="--sigma %f", ), verbose=dict( argstr="--verbose ", ), ) inputs = dtiprocess.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_dtiprocess_outputs(): output_map = dict( RD_output=dict( extensions=None, ), color_fa_output=dict( extensions=None, ), deformation_output=dict( extensions=None, ), fa_gradient_output=dict( extensions=None, ), fa_gradmag_output=dict( extensions=None, ), fa_output=dict( extensions=None, ), frobenius_norm_output=dict( extensions=None, ), lambda1_output=dict( extensions=None, ), lambda2_output=dict( extensions=None, ), lambda3_output=dict( extensions=None, ), md_output=dict( extensions=None, ), negative_eigenvector_output=dict( extensions=None, ), outmask=dict( extensions=None, ), principal_eigenvector_output=dict( extensions=None, ), rot_output=dict( extensions=None, ), ) outputs = dtiprocess.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_extractNrrdVectorIndex.py000066400000000000000000000025241413403311400325600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import extractNrrdVectorIndex def test_extractNrrdVectorIndex_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), setImageOrientation=dict( argstr="--setImageOrientation %s", ), vectorIndex=dict( argstr="--vectorIndex %d", ), ) inputs = extractNrrdVectorIndex.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_extractNrrdVectorIndex_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = extractNrrdVectorIndex.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAnisotropyMap.py000066400000000000000000000023731413403311400321210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractAnisotropyMap def test_gtractAnisotropyMap_inputs(): input_map = dict( anisotropyType=dict( argstr="--anisotropyType %s", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputTensorVolume=dict( argstr="--inputTensorVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = gtractAnisotropyMap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractAnisotropyMap_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = gtractAnisotropyMap.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractAverageBvalues.py000066400000000000000000000025141413403311400322050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractAverageBvalues def test_gtractAverageBvalues_inputs(): input_map = dict( args=dict( argstr="%s", ), averageB0only=dict( argstr="--averageB0only ", ), directionsTolerance=dict( argstr="--directionsTolerance %f", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = gtractAverageBvalues.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractAverageBvalues_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = gtractAverageBvalues.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractClipAnisotropy.py000066400000000000000000000025001413403311400322630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractClipAnisotropy def test_gtractClipAnisotropy_inputs(): input_map = dict( args=dict( argstr="%s", ), clipFirstSlice=dict( argstr="--clipFirstSlice ", ), clipLastSlice=dict( argstr="--clipLastSlice ", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = gtractClipAnisotropy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractClipAnisotropy_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = gtractClipAnisotropy.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoRegAnatomy.py000066400000000000000000000057661413403311400316550ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractCoRegAnatomy def test_gtractCoRegAnatomy_inputs(): input_map = dict( args=dict( argstr="%s", ), borderSize=dict( argstr="--borderSize %d", ), convergence=dict( argstr="--convergence %f", ), environ=dict( nohash=True, usedefault=True, ), gradientTolerance=dict( argstr="--gradientTolerance %f", ), gridSize=dict( argstr="--gridSize %s", sep=",", ), inputAnatomicalVolume=dict( argstr="--inputAnatomicalVolume %s", extensions=None, ), inputRigidTransform=dict( argstr="--inputRigidTransform %s", extensions=None, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), maxBSplineDisplacement=dict( argstr="--maxBSplineDisplacement %f", ), maximumStepSize=dict( argstr="--maximumStepSize %f", ), minimumStepSize=dict( argstr="--minimumStepSize %f", ), numberOfHistogramBins=dict( argstr="--numberOfHistogramBins %d", ), numberOfIterations=dict( argstr="--numberOfIterations %d", ), numberOfSamples=dict( argstr="--numberOfSamples %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputTransformName=dict( argstr="--outputTransformName %s", hash_files=False, ), relaxationFactor=dict( argstr="--relaxationFactor %f", ), samplingPercentage=dict( argstr="--samplingPercentage %f", ), spatialScale=dict( argstr="--spatialScale %d", ), transformType=dict( argstr="--transformType %s", ), translationScale=dict( argstr="--translationScale %f", ), useCenterOfHeadAlign=dict( argstr="--useCenterOfHeadAlign ", ), useGeometryAlign=dict( argstr="--useGeometryAlign ", ), useMomentsAlign=dict( argstr="--useMomentsAlign ", ), vectorIndex=dict( argstr="--vectorIndex %d", ), ) inputs = gtractCoRegAnatomy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractCoRegAnatomy_outputs(): output_map = dict( outputTransformName=dict( extensions=None, ), ) outputs = gtractCoRegAnatomy.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractConcatDwi.py000066400000000000000000000022751413403311400311700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractConcatDwi def test_gtractConcatDwi_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ignoreOrigins=dict( argstr="--ignoreOrigins ", ), inputVolume=dict( argstr="--inputVolume %s...", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = gtractConcatDwi.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractConcatDwi_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = gtractConcatDwi.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCopyImageOrientation.py000066400000000000000000000024731413403311400334060ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractCopyImageOrientation def test_gtractCopyImageOrientation_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputReferenceVolume=dict( argstr="--inputReferenceVolume %s", extensions=None, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = gtractCopyImageOrientation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractCopyImageOrientation_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = gtractCopyImageOrientation.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCoregBvalues.py000066400000000000000000000045531413403311400316770ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractCoregBvalues def test_gtractCoregBvalues_inputs(): input_map = dict( args=dict( argstr="%s", ), debugLevel=dict( argstr="--debugLevel %d", ), eddyCurrentCorrection=dict( argstr="--eddyCurrentCorrection ", ), environ=dict( nohash=True, usedefault=True, ), fixedVolume=dict( argstr="--fixedVolume %s", extensions=None, ), fixedVolumeIndex=dict( argstr="--fixedVolumeIndex %d", ), maximumStepSize=dict( argstr="--maximumStepSize %f", ), minimumStepSize=dict( argstr="--minimumStepSize %f", ), movingVolume=dict( argstr="--movingVolume %s", extensions=None, ), numberOfIterations=dict( argstr="--numberOfIterations %d", ), numberOfSpatialSamples=dict( argstr="--numberOfSpatialSamples %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputTransform=dict( argstr="--outputTransform %s", hash_files=False, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), registerB0Only=dict( argstr="--registerB0Only ", ), relaxationFactor=dict( argstr="--relaxationFactor %f", ), samplingPercentage=dict( argstr="--samplingPercentage %f", ), spatialScale=dict( argstr="--spatialScale %f", ), ) inputs = gtractCoregBvalues.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractCoregBvalues_outputs(): output_map = dict( outputTransform=dict( extensions=None, ), outputVolume=dict( extensions=None, ), ) outputs = gtractCoregBvalues.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCostFastMarching.py000066400000000000000000000037421413403311400325140ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractCostFastMarching def test_gtractCostFastMarching_inputs(): input_map = dict( anisotropyWeight=dict( argstr="--anisotropyWeight %f", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputAnisotropyVolume=dict( argstr="--inputAnisotropyVolume %s", extensions=None, ), inputStartingSeedsLabelMapVolume=dict( argstr="--inputStartingSeedsLabelMapVolume %s", extensions=None, ), inputTensorVolume=dict( argstr="--inputTensorVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputCostVolume=dict( argstr="--outputCostVolume %s", hash_files=False, ), outputSpeedVolume=dict( argstr="--outputSpeedVolume %s", hash_files=False, ), seedThreshold=dict( argstr="--seedThreshold %f", ), startingSeedsLabel=dict( argstr="--startingSeedsLabel %d", ), stoppingValue=dict( argstr="--stoppingValue %f", ), ) inputs = gtractCostFastMarching.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractCostFastMarching_outputs(): output_map = dict( outputCostVolume=dict( extensions=None, ), outputSpeedVolume=dict( extensions=None, ), ) outputs = gtractCostFastMarching.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractCreateGuideFiber.py000066400000000000000000000025251413403311400324440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractCreateGuideFiber def test_gtractCreateGuideFiber_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputFiber=dict( argstr="--inputFiber %s", extensions=None, ), numberOfPoints=dict( argstr="--numberOfPoints %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputFiber=dict( argstr="--outputFiber %s", hash_files=False, ), writeXMLPolyDataFile=dict( argstr="--writeXMLPolyDataFile ", ), ) inputs = gtractCreateGuideFiber.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractCreateGuideFiber_outputs(): output_map = dict( outputFiber=dict( extensions=None, ), ) outputs = gtractCreateGuideFiber.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFastMarchingTracking.py000066400000000000000000000043701413403311400333440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractFastMarchingTracking def test_gtractFastMarchingTracking_inputs(): input_map = dict( args=dict( argstr="%s", ), costStepSize=dict( argstr="--costStepSize %f", ), environ=dict( nohash=True, usedefault=True, ), inputAnisotropyVolume=dict( argstr="--inputAnisotropyVolume %s", extensions=None, ), inputCostVolume=dict( argstr="--inputCostVolume %s", extensions=None, ), inputStartingSeedsLabelMapVolume=dict( argstr="--inputStartingSeedsLabelMapVolume %s", extensions=None, ), inputTensorVolume=dict( argstr="--inputTensorVolume %s", extensions=None, ), maximumStepSize=dict( argstr="--maximumStepSize %f", ), minimumStepSize=dict( argstr="--minimumStepSize %f", ), numberOfIterations=dict( argstr="--numberOfIterations %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputTract=dict( argstr="--outputTract %s", hash_files=False, ), seedThreshold=dict( argstr="--seedThreshold %f", ), startingSeedsLabel=dict( argstr="--startingSeedsLabel %d", ), trackingThreshold=dict( argstr="--trackingThreshold %f", ), writeXMLPolyDataFile=dict( argstr="--writeXMLPolyDataFile ", ), ) inputs = gtractFastMarchingTracking.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractFastMarchingTracking_outputs(): output_map = dict( outputTract=dict( extensions=None, ), ) outputs = gtractFastMarchingTracking.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractFiberTracking.py000066400000000000000000000065541413403311400320330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractFiberTracking def test_gtractFiberTracking_inputs(): input_map = dict( args=dict( argstr="%s", ), branchingAngle=dict( argstr="--branchingAngle %f", ), branchingThreshold=dict( argstr="--branchingThreshold %f", ), curvatureThreshold=dict( argstr="--curvatureThreshold %f", ), endingSeedsLabel=dict( argstr="--endingSeedsLabel %d", ), environ=dict( nohash=True, usedefault=True, ), guidedCurvatureThreshold=dict( argstr="--guidedCurvatureThreshold %f", ), inputAnisotropyVolume=dict( argstr="--inputAnisotropyVolume %s", extensions=None, ), inputEndingSeedsLabelMapVolume=dict( argstr="--inputEndingSeedsLabelMapVolume %s", extensions=None, ), inputStartingSeedsLabelMapVolume=dict( argstr="--inputStartingSeedsLabelMapVolume %s", extensions=None, ), inputTensorVolume=dict( argstr="--inputTensorVolume %s", extensions=None, ), inputTract=dict( argstr="--inputTract %s", extensions=None, ), maximumBranchPoints=dict( argstr="--maximumBranchPoints %d", ), maximumGuideDistance=dict( argstr="--maximumGuideDistance %f", ), maximumLength=dict( argstr="--maximumLength %f", ), minimumLength=dict( argstr="--minimumLength %f", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputTract=dict( argstr="--outputTract %s", hash_files=False, ), randomSeed=dict( argstr="--randomSeed %d", ), seedThreshold=dict( argstr="--seedThreshold %f", ), startingSeedsLabel=dict( argstr="--startingSeedsLabel %d", ), stepSize=dict( argstr="--stepSize %f", ), tendF=dict( argstr="--tendF %f", ), tendG=dict( argstr="--tendG %f", ), trackingMethod=dict( argstr="--trackingMethod %s", ), trackingThreshold=dict( argstr="--trackingThreshold %f", ), useLoopDetection=dict( argstr="--useLoopDetection ", ), useRandomWalk=dict( argstr="--useRandomWalk ", ), useTend=dict( argstr="--useTend ", ), writeXMLPolyDataFile=dict( argstr="--writeXMLPolyDataFile ", ), ) inputs = gtractFiberTracking.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractFiberTracking_outputs(): output_map = dict( outputTract=dict( extensions=None, ), ) outputs = gtractFiberTracking.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractImageConformity.py000066400000000000000000000024421413403311400324050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractImageConformity def test_gtractImageConformity_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputReferenceVolume=dict( argstr="--inputReferenceVolume %s", extensions=None, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = gtractImageConformity.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractImageConformity_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = gtractImageConformity.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertBSplineTransform.py000066400000000000000000000026751413403311400337410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractInvertBSplineTransform def test_gtractInvertBSplineTransform_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputReferenceVolume=dict( argstr="--inputReferenceVolume %s", extensions=None, ), inputTransform=dict( argstr="--inputTransform %s", extensions=None, ), landmarkDensity=dict( argstr="--landmarkDensity %s", sep=",", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputTransform=dict( argstr="--outputTransform %s", hash_files=False, ), ) inputs = gtractInvertBSplineTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractInvertBSplineTransform_outputs(): output_map = dict( outputTransform=dict( extensions=None, ), ) outputs = gtractInvertBSplineTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertDisplacementField.py000066400000000000000000000026261413403311400340610ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractInvertDisplacementField def test_gtractInvertDisplacementField_inputs(): input_map = dict( args=dict( argstr="%s", ), baseImage=dict( argstr="--baseImage %s", extensions=None, ), deformationImage=dict( argstr="--deformationImage %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), subsamplingFactor=dict( argstr="--subsamplingFactor %d", ), ) inputs = gtractInvertDisplacementField.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractInvertDisplacementField_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = gtractInvertDisplacementField.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractInvertRigidTransform.py000066400000000000000000000023171413403311400334340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractInvertRigidTransform def test_gtractInvertRigidTransform_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputTransform=dict( argstr="--inputTransform %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputTransform=dict( argstr="--outputTransform %s", hash_files=False, ), ) inputs = gtractInvertRigidTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractInvertRigidTransform_outputs(): output_map = dict( outputTransform=dict( extensions=None, ), ) outputs = gtractInvertRigidTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleAnisotropy.py000066400000000000000000000030061413403311400331460ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractResampleAnisotropy def test_gtractResampleAnisotropy_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputAnatomicalVolume=dict( argstr="--inputAnatomicalVolume %s", extensions=None, ), inputAnisotropyVolume=dict( argstr="--inputAnisotropyVolume %s", extensions=None, ), inputTransform=dict( argstr="--inputTransform %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), transformType=dict( argstr="--transformType %s", ), ) inputs = gtractResampleAnisotropy.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractResampleAnisotropy_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = gtractResampleAnisotropy.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleB0.py000066400000000000000000000030261413403311400312420ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractResampleB0 def test_gtractResampleB0_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputAnatomicalVolume=dict( argstr="--inputAnatomicalVolume %s", extensions=None, ), inputTransform=dict( argstr="--inputTransform %s", extensions=None, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), transformType=dict( argstr="--transformType %s", ), vectorIndex=dict( argstr="--vectorIndex %d", ), ) inputs = gtractResampleB0.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractResampleB0_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = gtractResampleB0.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleCodeImage.py000066400000000000000000000027631413403311400326250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractResampleCodeImage def test_gtractResampleCodeImage_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputCodeVolume=dict( argstr="--inputCodeVolume %s", extensions=None, ), inputReferenceVolume=dict( argstr="--inputReferenceVolume %s", extensions=None, ), inputTransform=dict( argstr="--inputTransform %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), transformType=dict( argstr="--transformType %s", ), ) inputs = gtractResampleCodeImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractResampleCodeImage_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = gtractResampleCodeImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleDWIInPlace.py000066400000000000000000000035721413403311400326660ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractResampleDWIInPlace def test_gtractResampleDWIInPlace_inputs(): input_map = dict( args=dict( argstr="%s", ), debugLevel=dict( argstr="--debugLevel %d", ), environ=dict( nohash=True, usedefault=True, ), imageOutputSize=dict( argstr="--imageOutputSize %s", sep=",", ), inputTransform=dict( argstr="--inputTransform %s", extensions=None, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputResampledB0=dict( argstr="--outputResampledB0 %s", hash_files=False, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), referenceVolume=dict( argstr="--referenceVolume %s", extensions=None, ), warpDWITransform=dict( argstr="--warpDWITransform %s", extensions=None, ), ) inputs = gtractResampleDWIInPlace.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractResampleDWIInPlace_outputs(): output_map = dict( outputResampledB0=dict( extensions=None, ), outputVolume=dict( extensions=None, ), ) outputs = gtractResampleDWIInPlace.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractResampleFibers.py000066400000000000000000000030471413403311400322160ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractResampleFibers def test_gtractResampleFibers_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputForwardDeformationFieldVolume=dict( argstr="--inputForwardDeformationFieldVolume %s", extensions=None, ), inputReverseDeformationFieldVolume=dict( argstr="--inputReverseDeformationFieldVolume %s", extensions=None, ), inputTract=dict( argstr="--inputTract %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputTract=dict( argstr="--outputTract %s", hash_files=False, ), writeXMLPolyDataFile=dict( argstr="--writeXMLPolyDataFile ", ), ) inputs = gtractResampleFibers.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractResampleFibers_outputs(): output_map = dict( outputTract=dict( extensions=None, ), ) outputs = gtractResampleFibers.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_gtractTensor.py000066400000000000000000000036571413403311400305740ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractTensor def test_gtractTensor_inputs(): input_map = dict( applyMeasurementFrame=dict( argstr="--applyMeasurementFrame ", ), args=dict( argstr="%s", ), b0Index=dict( argstr="--b0Index %d", ), backgroundSuppressingThreshold=dict( argstr="--backgroundSuppressingThreshold %d", ), environ=dict( nohash=True, usedefault=True, ), ignoreIndex=dict( argstr="--ignoreIndex %s", sep=",", ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), maskProcessingMode=dict( argstr="--maskProcessingMode %s", ), maskVolume=dict( argstr="--maskVolume %s", extensions=None, ), medianFilterSize=dict( argstr="--medianFilterSize %s", sep=",", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), resampleIsotropic=dict( argstr="--resampleIsotropic ", ), size=dict( argstr="--size %f", ), ) inputs = gtractTensor.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractTensor_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = gtractTensor.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value test_auto_gtractTransformToDisplacementField.py000066400000000000000000000026311413403311400350050ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..gtract import gtractTransformToDisplacementField def test_gtractTransformToDisplacementField_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputReferenceVolume=dict( argstr="--inputReferenceVolume %s", extensions=None, ), inputTransform=dict( argstr="--inputTransform %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputDeformationFieldVolume=dict( argstr="--outputDeformationFieldVolume %s", hash_files=False, ), ) inputs = gtractTransformToDisplacementField.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_gtractTransformToDisplacementField_outputs(): output_map = dict( outputDeformationFieldVolume=dict( extensions=None, ), ) outputs = gtractTransformToDisplacementField.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tests/test_auto_maxcurvature.py000066400000000000000000000022201413403311400306240ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..maxcurvature import maxcurvature def test_maxcurvature_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), image=dict( argstr="--image %s", extensions=None, ), output=dict( argstr="--output %s", hash_files=False, ), sigma=dict( argstr="--sigma %f", ), verbose=dict( argstr="--verbose ", ), ) inputs = maxcurvature.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_maxcurvature_outputs(): output_map = dict( output=dict( extensions=None, ), ) outputs = maxcurvature.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tractography/000077500000000000000000000000001413403311400250065ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/diffusion/tractography/__init__.py000066400000000000000000000002671413403311400271240ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .commandlineonly import fiberstats from .fiberprocess import fiberprocess from .fibertrack import fibertrack from .ukftractography import UKFTractography nipype-1.7.0/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py000066400000000000000000000036231413403311400305540ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ....base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class fiberstatsInputSpec(CommandLineInputSpec): fiber_file = File(desc="DTI Fiber File", exists=True, argstr="--fiber_file %s") verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") class fiberstatsOutputSpec(TraitedSpec): pass class fiberstats(SEMLikeCommandLine): """title: FiberStats (DTIProcess) category: Diffusion.Tractography.CommandLineOnly description: Obsolete tool - Not used anymore version: 1.1.0 documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess license: Copyright (c) Casey Goodlett. All rights reserved. See http://www.ia.unc.edu/dev/Copyright.htm for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notices for more information. contributor: Casey Goodlett acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependancies on boost and a fortran compiler. """ input_spec = fiberstatsInputSpec output_spec = fiberstatsOutputSpec _cmd = " fiberstats " _outputs_filenames = {} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py000066400000000000000000000141471413403311400300550ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ....base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class fiberprocessInputSpec(CommandLineInputSpec): fiber_file = File(desc="DTI fiber file", exists=True, argstr="--fiber_file %s") fiber_output = traits.Either( traits.Bool, File(), hash_files=False, desc="Output fiber file. May be warped or updated with new data depending on other options used.", argstr="--fiber_output %s", ) tensor_volume = File( desc="Interpolate tensor values from the given field", exists=True, argstr="--tensor_volume %s", ) h_field = File( desc="HField for warp and statistics lookup. If this option is used tensor-volume must also be specified.", exists=True, argstr="--h_field %s", ) displacement_field = File( desc="Displacement Field for warp and statistics lookup. If this option is used tensor-volume must also be specified.", exists=True, argstr="--displacement_field %s", ) saveProperties = traits.Bool( desc="save the tensor property as scalar data into the vtk (only works for vtk fiber files). ", argstr="--saveProperties ", ) no_warp = traits.Bool( desc="Do not warp the geometry of the tensors only obtain the new statistics.", argstr="--no_warp ", ) fiber_radius = traits.Float( desc="set radius of all fibers to this value", argstr="--fiber_radius %f" ) index_space = traits.Bool( desc="Use index-space for fiber output coordinates, otherwise us world space for fiber output coordinates (from tensor file).", argstr="--index_space ", ) voxelize = traits.Either( traits.Bool, File(), hash_files=False, desc="Voxelize fiber into a label map (the labelmap filename is the argument of -V). The tensor file must be specified using -T for information about the size, origin, spacing of the image. The deformation is applied before the voxelization ", argstr="--voxelize %s", ) voxelize_count_fibers = traits.Bool( desc="Count number of fibers per-voxel instead of just setting to 1", argstr="--voxelize_count_fibers ", ) voxel_label = traits.Int( desc="Label for voxelized fiber", argstr="--voxel_label %d" ) verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") noDataChange = traits.Bool(desc="Do not change data ??? ", argstr="--noDataChange ") class fiberprocessOutputSpec(TraitedSpec): fiber_output = File( desc="Output fiber file. May be warped or updated with new data depending on other options used.", exists=True, ) voxelize = File( desc="Voxelize fiber into a label map (the labelmap filename is the argument of -V). The tensor file must be specified using -T for information about the size, origin, spacing of the image. The deformation is applied before the voxelization ", exists=True, ) class fiberprocess(SEMLikeCommandLine): """title: FiberProcess (DTIProcess) category: Diffusion.Tractography description: fiberprocess is a tool that manage fiber files extracted from the fibertrack tool or any fiber tracking algorithm. It takes as an input .fib and .vtk files (--fiber_file) and saves the changed fibers (--fiber_output) into the 2 same formats. The main purpose of this tool is to deform the fiber file with a transformation field as an input (--displacement_field or --h_field depending if you deal with dfield or hfield). To use that option you need to specify the tensor field from which the fiber file was extracted with the option --tensor_volume. The transformation applied on the fiber file is the inverse of the one input. If the transformation is from one case to an atlas, fiberprocess assumes that the fiber file is in the atlas space and you want it in the original case space, so it's the inverse of the transformation which has been computed. You have 2 options for fiber modification. You can either deform the fibers (their geometry) into the space OR you can keep the same geometry but map the diffusion properties (fa, md, lbd's...) of the original tensor field along the fibers at the corresponding locations. This is triggered by the --no_warp option. To use the previous example: when you have a tensor field in the original space and the deformed tensor field in the atlas space, you want to track the fibers in the atlas space, keeping this geometry but with the original case diffusion properties. Then you can specify the transformations field (from original case -> atlas) and the original tensor field with the --tensor_volume option. With fiberprocess you can also binarize a fiber file. Using the --voxelize option will create an image where each voxel through which a fiber is passing is set to 1. The output is going to be a binary image with the values 0 or 1 by default but the 1 value voxel can be set to any number with the --voxel_label option. Finally you can create an image where the value at the voxel is the number of fiber passing through. (--voxelize_count_fibers) version: 1.0.0 documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess license: Copyright (c) Casey Goodlett. All rights reserved. See http://www.ia.unc.edu/dev/Copyright.htm for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notices for more information. contributor: Casey Goodlett """ input_spec = fiberprocessInputSpec output_spec = fiberprocessOutputSpec _cmd = " fiberprocess " _outputs_filenames = { "fiber_output": "fiber_output.vtk", "voxelize": "voxelize.nii", } _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py000066400000000000000000000127351413403311400275040ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ....base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class fibertrackInputSpec(CommandLineInputSpec): input_tensor_file = File( desc="Tensor Image", exists=True, argstr="--input_tensor_file %s" ) input_roi_file = File( desc="The filename of the image which contains the labels used for seeding and constraining the algorithm.", exists=True, argstr="--input_roi_file %s", ) output_fiber_file = traits.Either( traits.Bool, File(), hash_files=False, desc="The filename for the fiber file produced by the algorithm. This file must end in a .fib or .vtk extension for ITK spatial object and vtkPolyData formats respectively.", argstr="--output_fiber_file %s", ) source_label = traits.Int( desc="The label of voxels in the labelfile to use for seeding tractography. One tract is seeded from the center of each voxel with this label", argstr="--source_label %d", ) target_label = traits.Int( desc="The label of voxels in the labelfile used to constrain tractography. Tracts that do not pass through a voxel with this label are rejected. Set this keep all tracts.", argstr="--target_label %d", ) forbidden_label = traits.Int(desc="Forbidden label", argstr="--forbidden_label %d") whole_brain = traits.Bool( desc="If this option is enabled all voxels in the image are used to seed tractography. When this option is enabled both source and target labels function as target labels", argstr="--whole_brain ", ) max_angle = traits.Float( desc="Maximum angle of change in radians", argstr="--max_angle %f" ) step_size = traits.Float( desc="Step size in mm for the tracking algorithm", argstr="--step_size %f" ) min_fa = traits.Float( desc="The minimum FA threshold to continue tractography", argstr="--min_fa %f" ) force = traits.Bool(desc="Ignore sanity checks.", argstr="--force ") verbose = traits.Bool(desc="produce verbose output", argstr="--verbose ") really_verbose = traits.Bool( desc="Follow detail of fiber tracking algorithm", argstr="--really_verbose " ) class fibertrackOutputSpec(TraitedSpec): output_fiber_file = File( desc="The filename for the fiber file produced by the algorithm. This file must end in a .fib or .vtk extension for ITK spatial object and vtkPolyData formats respectively.", exists=True, ) class fibertrack(SEMLikeCommandLine): """title: FiberTrack (DTIProcess) category: Diffusion.Tractography description: This program implements a simple streamline tractography method based on the principal eigenvector of the tensor field. A fourth order Runge-Kutta integration rule used to advance the streamlines. As a first parameter you have to input the tensor field (with the --input_tensor_file option). Then the region of interest image file is set with the --input_roi_file. Next you want to set the output fiber file name after the --output_fiber_file option. You can specify the label value in the input_roi_file with the --target_label, --source_label and --fobidden_label options. By default target label is 1, source label is 2 and forbidden label is 0. The source label is where the streamlines are seeded, the target label defines the voxels through which the fibers must pass by to be kept in the final fiber file and the forbidden label defines the voxels where the streamlines are stopped if they pass through it. There is also a --whole_brain option which, if enabled, consider both target and source labels of the roi image as target labels and all the voxels of the image are considered as sources. During the tractography, the --fa_min parameter is used as the minimum value needed at different voxel for the tracking to keep going along a streamline. The --step_size parameter is used for each iteration of the tracking algorithm and defines the length of each step. The --max_angle option defines the maximum angle allowed between two successive segments along the tracked fiber. version: 1.1.0 documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess license: Copyright (c) Casey Goodlett. All rights reserved. See http://www.ia.unc.edu/dev/Copyright.htm for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notices for more information. contributor: Casey Goodlett acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering) provided conversions to make DTIProcess compatible with Slicer execution, and simplified the stand-alone build requirements by removing the dependancies on boost and a fortran compiler. """ input_spec = fibertrackInputSpec output_spec = fibertrackOutputSpec _cmd = " fibertrack " _outputs_filenames = {"output_fiber_file": "output_fiber_file.vtk"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/diffusion/tractography/tests/000077500000000000000000000000001413403311400261505ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/diffusion/tractography/tests/__init__.py000066400000000000000000000000301413403311400302520ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_UKFTractography.py000066400000000000000000000067641413403311400336430ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..ukftractography import UKFTractography def test_UKFTractography_inputs(): input_map = dict( Ql=dict( argstr="--Ql %f", ), Qm=dict( argstr="--Qm %f", ), Qw=dict( argstr="--Qw %f", ), Rs=dict( argstr="--Rs %f", ), args=dict( argstr="%s", ), dwiFile=dict( argstr="--dwiFile %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), freeWater=dict( argstr="--freeWater ", ), fullTensorModel=dict( argstr="--fullTensorModel ", ), labels=dict( argstr="--labels %s", sep=",", ), maskFile=dict( argstr="--maskFile %s", extensions=None, ), maxBranchingAngle=dict( argstr="--maxBranchingAngle %f", ), maxHalfFiberLength=dict( argstr="--maxHalfFiberLength %f", ), minBranchingAngle=dict( argstr="--minBranchingAngle %f", ), minFA=dict( argstr="--minFA %f", ), minGA=dict( argstr="--minGA %f", ), numTensor=dict( argstr="--numTensor %s", ), numThreads=dict( argstr="--numThreads %d", ), recordCovariance=dict( argstr="--recordCovariance ", ), recordFA=dict( argstr="--recordFA ", ), recordFreeWater=dict( argstr="--recordFreeWater ", ), recordLength=dict( argstr="--recordLength %f", ), recordNMSE=dict( argstr="--recordNMSE ", ), recordState=dict( argstr="--recordState ", ), recordTensors=dict( argstr="--recordTensors ", ), recordTrace=dict( argstr="--recordTrace ", ), seedFALimit=dict( argstr="--seedFALimit %f", ), seedsFile=dict( argstr="--seedsFile %s", extensions=None, ), seedsPerVoxel=dict( argstr="--seedsPerVoxel %d", ), stepLength=dict( argstr="--stepLength %f", ), storeGlyphs=dict( argstr="--storeGlyphs ", ), tracts=dict( argstr="--tracts %s", hash_files=False, ), tractsWithSecondTensor=dict( argstr="--tractsWithSecondTensor %s", hash_files=False, ), writeAsciiTracts=dict( argstr="--writeAsciiTracts ", ), writeUncompressedTracts=dict( argstr="--writeUncompressedTracts ", ), ) inputs = UKFTractography.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_UKFTractography_outputs(): output_map = dict( tracts=dict( extensions=None, ), tractsWithSecondTensor=dict( extensions=None, ), ) outputs = UKFTractography.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberprocess.py000066400000000000000000000041641413403311400333040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..fiberprocess import fiberprocess def test_fiberprocess_inputs(): input_map = dict( args=dict( argstr="%s", ), displacement_field=dict( argstr="--displacement_field %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), fiber_file=dict( argstr="--fiber_file %s", extensions=None, ), fiber_output=dict( argstr="--fiber_output %s", hash_files=False, ), fiber_radius=dict( argstr="--fiber_radius %f", ), h_field=dict( argstr="--h_field %s", extensions=None, ), index_space=dict( argstr="--index_space ", ), noDataChange=dict( argstr="--noDataChange ", ), no_warp=dict( argstr="--no_warp ", ), saveProperties=dict( argstr="--saveProperties ", ), tensor_volume=dict( argstr="--tensor_volume %s", extensions=None, ), verbose=dict( argstr="--verbose ", ), voxel_label=dict( argstr="--voxel_label %d", ), voxelize=dict( argstr="--voxelize %s", hash_files=False, ), voxelize_count_fibers=dict( argstr="--voxelize_count_fibers ", ), ) inputs = fiberprocess.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_fiberprocess_outputs(): output_map = dict( fiber_output=dict( extensions=None, ), voxelize=dict( extensions=None, ), ) outputs = fiberprocess.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fiberstats.py000066400000000000000000000016611413403311400327630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..commandlineonly import fiberstats def test_fiberstats_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fiber_file=dict( argstr="--fiber_file %s", extensions=None, ), verbose=dict( argstr="--verbose ", ), ) inputs = fiberstats.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_fiberstats_outputs(): output_map = dict() outputs = fiberstats.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tractography/tests/test_auto_fibertrack.py000066400000000000000000000036061413403311400327320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..fibertrack import fibertrack def test_fibertrack_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), forbidden_label=dict( argstr="--forbidden_label %d", ), force=dict( argstr="--force ", ), input_roi_file=dict( argstr="--input_roi_file %s", extensions=None, ), input_tensor_file=dict( argstr="--input_tensor_file %s", extensions=None, ), max_angle=dict( argstr="--max_angle %f", ), min_fa=dict( argstr="--min_fa %f", ), output_fiber_file=dict( argstr="--output_fiber_file %s", hash_files=False, ), really_verbose=dict( argstr="--really_verbose ", ), source_label=dict( argstr="--source_label %d", ), step_size=dict( argstr="--step_size %f", ), target_label=dict( argstr="--target_label %d", ), verbose=dict( argstr="--verbose ", ), whole_brain=dict( argstr="--whole_brain ", ), ) inputs = fibertrack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_fibertrack_outputs(): output_map = dict( output_fiber_file=dict( extensions=None, ), ) outputs = fibertrack.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py000066400000000000000000000170311413403311400305770ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ....base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class UKFTractographyInputSpec(CommandLineInputSpec): dwiFile = File(desc="Input DWI volume", exists=True, argstr="--dwiFile %s") seedsFile = File( desc="Seeds for diffusion. If not specified, full brain tractography will be performed, and the algorithm will start from every voxel in the brain mask where the Generalized Anisotropy is bigger than 0.18", exists=True, argstr="--seedsFile %s", ) labels = InputMultiPath( traits.Int, desc="A vector of the ROI labels to be used", sep=",", argstr="--labels %s", ) maskFile = File( desc="Mask for diffusion tractography", exists=True, argstr="--maskFile %s" ) tracts = traits.Either( traits.Bool, File(), hash_files=False, desc="Tracts generated, with first tensor output", argstr="--tracts %s", ) writeAsciiTracts = traits.Bool( desc="Write tract file as a VTK binary data file", argstr="--writeAsciiTracts " ) writeUncompressedTracts = traits.Bool( desc="Write tract file as a VTK uncompressed data file", argstr="--writeUncompressedTracts ", ) seedsPerVoxel = traits.Int( desc=" Each seed generates a fiber, thus using more seeds generates more fibers. In general use 1 or 2 seeds, and for a more thorough result use 5 or 10 (depending on your machine this may take up to 2 days to run)., ", argstr="--seedsPerVoxel %d", ) numTensor = traits.Enum( "1", "2", desc="Number of tensors used", argstr="--numTensor %s" ) freeWater = traits.Bool( desc="Adds a term for free water difusion to the model. (Note for experts: if checked, the 1T simple model is forced) ", argstr="--freeWater ", ) recordFA = traits.Bool( desc="Whether to store FA. Attaches field 'FA', and 'FA2' for 2-tensor case to fiber. ", argstr="--recordFA ", ) recordFreeWater = traits.Bool( desc="Whether to store the fraction of free water. Attaches field 'FreeWater' to fiber.", argstr="--recordFreeWater ", ) recordTrace = traits.Bool( desc="Whether to store Trace. Attaches field 'Trace', and 'Trace2' for 2-tensor case to fiber.", argstr="--recordTrace ", ) recordTensors = traits.Bool( desc="Recording the tensors enables Slicer to color the fiber bundles by FA, orientation, and so on. The fields will be called 'TensorN', where N is the tensor number. ", argstr="--recordTensors ", ) recordNMSE = traits.Bool( desc="Whether to store NMSE. Attaches field 'NMSE' to fiber. ", argstr="--recordNMSE ", ) recordState = traits.Bool( desc="Whether to attach the states to the fiber. Will generate field 'state'.", argstr="--recordState ", ) recordCovariance = traits.Bool( desc="Whether to store the covariance. Will generate field 'covariance' in fiber.", argstr="--recordCovariance ", ) recordLength = traits.Float( desc="Record length of tractography, in millimeters", argstr="--recordLength %f" ) minFA = traits.Float( desc="Abort the tractography when the Fractional Anisotropy is less than this value", argstr="--minFA %f", ) minGA = traits.Float( desc="Abort the tractography when the Generalized Anisotropy is less than this value", argstr="--minGA %f", ) fullTensorModel = traits.Bool( desc="Whether to use the full tensor model. If unchecked, use the default simple tensor model", argstr="--fullTensorModel ", ) numThreads = traits.Int( desc="Number of threads used during computation. Set to the number of cores on your workstation for optimal speed. If left undefined the number of cores detected will be used. ", argstr="--numThreads %d", ) stepLength = traits.Float( desc="Step length of tractography, in millimeters", argstr="--stepLength %f" ) maxHalfFiberLength = traits.Float( desc="The max length limit of the half fibers generated during tractography. Here the fiber is 'half' because the tractography goes in only one direction from one seed point at a time", argstr="--maxHalfFiberLength %f", ) seedFALimit = traits.Float( desc="Seed points whose FA are below this value are excluded", argstr="--seedFALimit %f", ) Qm = traits.Float(desc="Process noise for angles/direction", argstr="--Qm %f") Ql = traits.Float(desc="Process noise for eigenvalues", argstr="--Ql %f") Qw = traits.Float( desc="Process noise for free water weights, ignored if no free water estimation", argstr="--Qw %f", ) Rs = traits.Float(desc="Measurement noise", argstr="--Rs %f") maxBranchingAngle = traits.Float( desc="Maximum branching angle, in degrees. When using multiple tensors, a new branch will be created when the tensors' major directions form an angle between (minBranchingAngle, maxBranchingAngle). Branching is supressed when this maxBranchingAngle is set to 0.0", argstr="--maxBranchingAngle %f", ) minBranchingAngle = traits.Float( desc="Minimum branching angle, in degrees. When using multiple tensors, a new branch will be created when the tensors' major directions form an angle between (minBranchingAngle, maxBranchingAngle)", argstr="--minBranchingAngle %f", ) tractsWithSecondTensor = traits.Either( traits.Bool, File(), hash_files=False, desc="Tracts generated, with second tensor output (if there is one)", argstr="--tractsWithSecondTensor %s", ) storeGlyphs = traits.Bool( desc="Store tensors' main directions as two-point lines in a separate file named glyphs_{tracts}. When using multiple tensors, only the major tensors' main directions are stored", argstr="--storeGlyphs ", ) class UKFTractographyOutputSpec(TraitedSpec): tracts = File(desc="Tracts generated, with first tensor output", exists=True) tractsWithSecondTensor = File( desc="Tracts generated, with second tensor output (if there is one)", exists=True, ) class UKFTractography(SEMLikeCommandLine): """title: UKF Tractography category: Diffusion.Tractography description: This module traces fibers in a DWI Volume using the multiple tensor unscented Kalman Filter methology. For more informations check the documentation. version: 1.0 documentation-url: http://www.nitrc.org/plugins/mwiki/index.php/ukftractography:MainPage contributor: Yogesh Rathi, Stefan Lienhard, Yinpeng Li, Martin Styner, Ipek Oguz, Yundi Shi, Christian Baumgartner, Kent Williams, Hans Johnson, Peter Savadjiev, Carl-Fredrik Westin. acknowledgements: The development of this module was supported by NIH grants R01 MH097979 (PI Rathi), R01 MH092862 (PIs Westin and Verma), U01 NS083223 (PI Westin), R01 MH074794 (PI Westin) and P41 EB015902 (PI Kikinis). """ input_spec = UKFTractographyInputSpec output_spec = UKFTractographyOutputSpec _cmd = " UKFTractography " _outputs_filenames = { "tracts": "tracts.vtp", "tractsWithSecondTensor": "tractsWithSecondTensor.vtp", } _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/featurecreator.py000066400000000000000000000032201413403311400236730ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ..base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class GenerateCsfClippedFromClassifiedImageInputSpec(CommandLineInputSpec): inputCassifiedVolume = File( desc="Required: input tissue label image", exists=True, argstr="--inputCassifiedVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", argstr="--outputVolume %s", ) class GenerateCsfClippedFromClassifiedImageOutputSpec(TraitedSpec): outputVolume = File(desc="Required: output image", exists=True) class GenerateCsfClippedFromClassifiedImage(SEMLikeCommandLine): """title: GenerateCsfClippedFromClassifiedImage category: FeatureCreator description: Get the distance from a voxel to the nearest voxel of a given tissue type. version: 0.1.0.$Revision: 1 $(alpha) documentation-url: http:://www.na-mic.org/ license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was written by Hans J. Johnson. """ input_spec = GenerateCsfClippedFromClassifiedImageInputSpec output_spec = GenerateCsfClippedFromClassifiedImageOutputSpec _cmd = " GenerateCsfClippedFromClassifiedImage " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/filtering/000077500000000000000000000000001413403311400222745ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/filtering/__init__.py000066400000000000000000000010661413403311400244100ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .denoising import UnbiasedNonLocalMeans from .featuredetection import ( GenerateSummedGradientImage, CannySegmentationLevelSetImageFilter, DilateImage, TextureFromNoiseImageFilter, FlippedDifference, ErodeImage, GenerateBrainClippedImage, NeighborhoodMedian, GenerateTestImage, NeighborhoodMean, HammerAttributeCreator, TextureMeasureFilter, DilateMask, DumpBinaryTrainingVectors, DistanceMaps, STAPLEAnalysis, GradientAnisotropicDiffusionImageFilter, CannyEdge, ) nipype-1.7.0/nipype/interfaces/semtools/filtering/denoising.py000066400000000000000000000113571413403311400246340ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class UnbiasedNonLocalMeansInputSpec(CommandLineInputSpec): sigma = traits.Float( desc="The root power of noise (sigma) in the complex Gaussian process the Rician comes from. If it is underestimated, the algorithm fails to remove the noise. If it is overestimated, over-blurring is likely to occur.", argstr="--sigma %f", ) rs = InputMultiPath( traits.Int, desc="The algorithm search for similar voxels in a neighborhood of this radius (radii larger than 5,5,5 are very slow, and the results can be only marginally better. Small radii may fail to effectively remove the noise).", sep=",", argstr="--rs %s", ) rc = InputMultiPath( traits.Int, desc="Similarity between blocks is computed as the difference between mean values and gradients. These parameters are computed fitting a hyperplane with LS inside a neighborhood of this size", sep=",", argstr="--rc %s", ) hp = traits.Float( desc="This parameter is related to noise; the larger the parameter, the more aggressive the filtering. Should be near 1, and only values between 0.8 and 1.2 are allowed", argstr="--hp %f", ) ps = traits.Float( desc="To accelerate computations, preselection is used: if the normalized difference is above this threshold, the voxel will be discarded (non used for average)", argstr="--ps %f", ) inputVolume = File(position=-2, desc="Input MRI volume.", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output (filtered) MRI volume.", argstr="%s", ) class UnbiasedNonLocalMeansOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output (filtered) MRI volume.", exists=True) class UnbiasedNonLocalMeans(SEMLikeCommandLine): """title: Unbiased NLM for MRI category: Filtering.Denoising description: This module implements a fast version of the popular Non-Local Means filter for image denoising. This algorithm filters each pixel as a weighted average of its neighbors in a large vicinity. The weights are computed based on the similarity of each neighbor with the voxel to be denoised. In the original formulation a patch with a certain radius is centered in each of the voxels, and the Mean Squared Error between each pair of corresponding voxels is computed. In this implementation, only the mean value and gradient components are compared. This, together with an efficient memory management, can attain a speed-up of nearly 20x. Besides, the filtering is more accurate than the original with poor SNR. This code is intended for its use with MRI (or any other Rician-distributed modality): the second order moment is estimated, then we subtract twice the squared power of noise, and finally we take the square root of the result to remove the Rician bias. The original implementation of the NLM filter may be found in: A. Buades, B. Coll, J. Morel, "A review of image denoising algorithms, with a new one", Multiscale Modelling and Simulation 4(2): 490-530. 2005. The correction of the Rician bias is described in the following reference (among others): S. Aja-Fernandez, K. Krissian, "An unbiased Non-Local Means scheme for DWI filtering", in: Proceedings of the MICCAI Workshop on Computational Diffusion MRI, 2008, pp. 277-284. The whole description of this version may be found in the following paper (please, cite it if you are willing to use this software): A. Tristan-Vega, V. Garcia Perez, S. Aja-Fenandez, and C.-F. Westin, "Efficient and Robust Nonlocal Means Denoising of MR Data Based on Salient Features Matching", Computer Methods and Programs in Biomedicine. (Accepted for publication) 2011. version: 0.0.1.$Revision: 1 $(beta) documentation-url: http://www.slicer.org/slicerWiki/index.php/Modules:UnbiasedNonLocalMeans-Documentation-3.6 contributor: Antonio Tristan Vega, Veronica Garcia-Perez, Santiago Aja-Fernandez, Carl-Fredrik Westin acknowledgements: Supported by grant number FMECD-2010/71131616E from the Spanish Ministry of Education/Fulbright Committee """ input_spec = UnbiasedNonLocalMeansInputSpec output_spec = UnbiasedNonLocalMeansOutputSpec _cmd = " UnbiasedNonLocalMeans " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/filtering/featuredetection.py000066400000000000000000000637671413403311400262230ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class GenerateSummedGradientImageInputSpec(CommandLineInputSpec): inputVolume1 = File( desc="input volume 1, usally t1 image", exists=True, argstr="--inputVolume1 %s" ) inputVolume2 = File( desc="input volume 2, usally t2 image", exists=True, argstr="--inputVolume2 %s" ) outputFileName = traits.Either( traits.Bool, File(), hash_files=False, desc="(required) output file name", argstr="--outputFileName %s", ) MaximumGradient = traits.Bool( desc="If set this flag, it will compute maximum gradient between two input volumes instead of sum of it.", argstr="--MaximumGradient ", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class GenerateSummedGradientImageOutputSpec(TraitedSpec): outputFileName = File(desc="(required) output file name", exists=True) class GenerateSummedGradientImage(SEMLikeCommandLine): """title: GenerateSummedGradient category: Filtering.FeatureDetection description: Automatic FeatureImages using neural networks version: 1.0 license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Greg Harris, Eun Young Kim """ input_spec = GenerateSummedGradientImageInputSpec output_spec = GenerateSummedGradientImageOutputSpec _cmd = " GenerateSummedGradientImage " _outputs_filenames = {"outputFileName": "outputFileName"} _redirect_x = False class CannySegmentationLevelSetImageFilterInputSpec(CommandLineInputSpec): inputVolume = File(exists=True, argstr="--inputVolume %s") initialModel = File(exists=True, argstr="--initialModel %s") outputVolume = traits.Either( traits.Bool, File(), hash_files=False, argstr="--outputVolume %s" ) outputSpeedVolume = traits.Either( traits.Bool, File(), hash_files=False, argstr="--outputSpeedVolume %s" ) cannyThreshold = traits.Float( desc="Canny Threshold Value", argstr="--cannyThreshold %f" ) cannyVariance = traits.Float(desc="Canny variance", argstr="--cannyVariance %f") advectionWeight = traits.Float( desc="Controls the smoothness of the resulting mask, small number are more smooth, large numbers allow more sharp corners. ", argstr="--advectionWeight %f", ) initialModelIsovalue = traits.Float( desc="The identification of the input model iso-surface. (for a binary image with 0s and 1s use 0.5) (for a binary image with 0s and 255's use 127.5).", argstr="--initialModelIsovalue %f", ) maxIterations = traits.Int(desc="The", argstr="--maxIterations %d") class CannySegmentationLevelSetImageFilterOutputSpec(TraitedSpec): outputVolume = File(exists=True) outputSpeedVolume = File(exists=True) class CannySegmentationLevelSetImageFilter(SEMLikeCommandLine): """title: Canny Level Set Image Filter category: Filtering.FeatureDetection description: The CannySegmentationLevelSet is commonly used to refine a manually generated manual mask. version: 0.3.0 license: CC contributor: Regina Kim acknowledgements: This command module was derived from Insight/Examples/Segmentation/CannySegmentationLevelSetImageFilter.cxx (copyright) Insight Software Consortium. See http://wiki.na-mic.org/Wiki/index.php/Slicer3:Execution_Model_Documentation for more detailed descriptions. """ input_spec = CannySegmentationLevelSetImageFilterInputSpec output_spec = CannySegmentationLevelSetImageFilterOutputSpec _cmd = " CannySegmentationLevelSetImageFilter " _outputs_filenames = { "outputVolume": "outputVolume.nii", "outputSpeedVolume": "outputSpeedVolume.nii", } _redirect_x = False class DilateImageInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image", exists=True, argstr="--inputVolume %s" ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, argstr="--inputMaskVolume %s", ) inputRadius = traits.Int( desc="Required: input neighborhood radius", argstr="--inputRadius %d" ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", argstr="--outputVolume %s", ) class DilateImageOutputSpec(TraitedSpec): outputVolume = File(desc="Required: output image", exists=True) class DilateImage(SEMLikeCommandLine): """title: Dilate Image category: Filtering.FeatureDetection description: Uses mathematical morphology to dilate the input images. version: 0.1.0.$Revision: 1 $(alpha) documentation-url: http:://www.na-mic.org/ license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Mark Scully and Jeremy Bockholt. """ input_spec = DilateImageInputSpec output_spec = DilateImageOutputSpec _cmd = " DilateImage " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class TextureFromNoiseImageFilterInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image", exists=True, argstr="--inputVolume %s" ) inputRadius = traits.Int( desc="Required: input neighborhood radius", argstr="--inputRadius %d" ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", argstr="--outputVolume %s", ) class TextureFromNoiseImageFilterOutputSpec(TraitedSpec): outputVolume = File(desc="Required: output image", exists=True) class TextureFromNoiseImageFilter(SEMLikeCommandLine): """title: TextureFromNoiseImageFilter category: Filtering.FeatureDetection description: Calculate the local noise in an image. version: 0.1.0.$Revision: 1 $(alpha) documentation-url: http:://www.na-mic.org/ license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Eunyoung Regina Kim """ input_spec = TextureFromNoiseImageFilterInputSpec output_spec = TextureFromNoiseImageFilterOutputSpec _cmd = " TextureFromNoiseImageFilter " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class FlippedDifferenceInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image", exists=True, argstr="--inputVolume %s" ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, argstr="--inputMaskVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", argstr="--outputVolume %s", ) class FlippedDifferenceOutputSpec(TraitedSpec): outputVolume = File(desc="Required: output image", exists=True) class FlippedDifference(SEMLikeCommandLine): """title: Flip Image category: Filtering.FeatureDetection description: Difference between an image and the axially flipped version of that image. version: 0.1.0.$Revision: 1 $(alpha) documentation-url: http:://www.na-mic.org/ license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Mark Scully and Jeremy Bockholt. """ input_spec = FlippedDifferenceInputSpec output_spec = FlippedDifferenceOutputSpec _cmd = " FlippedDifference " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class ErodeImageInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image", exists=True, argstr="--inputVolume %s" ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, argstr="--inputMaskVolume %s", ) inputRadius = traits.Int( desc="Required: input neighborhood radius", argstr="--inputRadius %d" ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", argstr="--outputVolume %s", ) class ErodeImageOutputSpec(TraitedSpec): outputVolume = File(desc="Required: output image", exists=True) class ErodeImage(SEMLikeCommandLine): """title: Erode Image category: Filtering.FeatureDetection description: Uses mathematical morphology to erode the input images. version: 0.1.0.$Revision: 1 $(alpha) documentation-url: http:://www.na-mic.org/ license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Mark Scully and Jeremy Bockholt. """ input_spec = ErodeImageInputSpec output_spec = ErodeImageOutputSpec _cmd = " ErodeImage " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GenerateBrainClippedImageInputSpec(CommandLineInputSpec): inputImg = File( desc="input volume 1, usally t1 image", exists=True, argstr="--inputImg %s" ) inputMsk = File( desc="input volume 2, usally t2 image", exists=True, argstr="--inputMsk %s" ) outputFileName = traits.Either( traits.Bool, File(), hash_files=False, desc="(required) output file name", argstr="--outputFileName %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class GenerateBrainClippedImageOutputSpec(TraitedSpec): outputFileName = File(desc="(required) output file name", exists=True) class GenerateBrainClippedImage(SEMLikeCommandLine): """title: GenerateBrainClippedImage category: Filtering.FeatureDetection description: Automatic FeatureImages using neural networks version: 1.0 license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Eun Young Kim """ input_spec = GenerateBrainClippedImageInputSpec output_spec = GenerateBrainClippedImageOutputSpec _cmd = " GenerateBrainClippedImage " _outputs_filenames = {"outputFileName": "outputFileName"} _redirect_x = False class NeighborhoodMedianInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image", exists=True, argstr="--inputVolume %s" ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, argstr="--inputMaskVolume %s", ) inputRadius = traits.Int( desc="Required: input neighborhood radius", argstr="--inputRadius %d" ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", argstr="--outputVolume %s", ) class NeighborhoodMedianOutputSpec(TraitedSpec): outputVolume = File(desc="Required: output image", exists=True) class NeighborhoodMedian(SEMLikeCommandLine): """title: Neighborhood Median category: Filtering.FeatureDetection description: Calculates the median, for the given neighborhood size, at each voxel of the input image. version: 0.1.0.$Revision: 1 $(alpha) documentation-url: http:://www.na-mic.org/ license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Mark Scully and Jeremy Bockholt. """ input_spec = NeighborhoodMedianInputSpec output_spec = NeighborhoodMedianOutputSpec _cmd = " NeighborhoodMedian " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GenerateTestImageInputSpec(CommandLineInputSpec): inputVolume = File( desc="input volume 1, usally t1 image", exists=True, argstr="--inputVolume %s" ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="(required) output file name", argstr="--outputVolume %s", ) lowerBoundOfOutputVolume = traits.Float(argstr="--lowerBoundOfOutputVolume %f") upperBoundOfOutputVolume = traits.Float(argstr="--upperBoundOfOutputVolume %f") outputVolumeSize = traits.Float( desc="output Volume Size", argstr="--outputVolumeSize %f" ) class GenerateTestImageOutputSpec(TraitedSpec): outputVolume = File(desc="(required) output file name", exists=True) class GenerateTestImage(SEMLikeCommandLine): """title: DownSampleImage category: Filtering.FeatureDetection description: Down sample image for testing version: 1.0 license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Eun Young Kim """ input_spec = GenerateTestImageInputSpec output_spec = GenerateTestImageOutputSpec _cmd = " GenerateTestImage " _outputs_filenames = {"outputVolume": "outputVolume"} _redirect_x = False class NeighborhoodMeanInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image", exists=True, argstr="--inputVolume %s" ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, argstr="--inputMaskVolume %s", ) inputRadius = traits.Int( desc="Required: input neighborhood radius", argstr="--inputRadius %d" ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", argstr="--outputVolume %s", ) class NeighborhoodMeanOutputSpec(TraitedSpec): outputVolume = File(desc="Required: output image", exists=True) class NeighborhoodMean(SEMLikeCommandLine): """title: Neighborhood Mean category: Filtering.FeatureDetection description: Calculates the mean, for the given neighborhood size, at each voxel of the T1, T2, and FLAIR. version: 0.1.0.$Revision: 1 $(alpha) documentation-url: http:://www.na-mic.org/ license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Mark Scully and Jeremy Bockholt. """ input_spec = NeighborhoodMeanInputSpec output_spec = NeighborhoodMeanOutputSpec _cmd = " NeighborhoodMean " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class HammerAttributeCreatorInputSpec(CommandLineInputSpec): Scale = traits.Int(desc="Determine Scale of Ball", argstr="--Scale %d") Strength = traits.Float(desc="Determine Strength of Edges", argstr="--Strength %f") inputGMVolume = File( desc="Required: input grey matter posterior image", exists=True, argstr="--inputGMVolume %s", ) inputWMVolume = File( desc="Required: input white matter posterior image", exists=True, argstr="--inputWMVolume %s", ) inputCSFVolume = File( desc="Required: input CSF posterior image", exists=True, argstr="--inputCSFVolume %s", ) outputVolumeBase = traits.Str( desc="Required: output image base name to be appended for each feature vector.", argstr="--outputVolumeBase %s", ) class HammerAttributeCreatorOutputSpec(TraitedSpec): pass class HammerAttributeCreator(SEMLikeCommandLine): """title: HAMMER Feature Vectors category: Filtering.FeatureDetection description: Create the feature vectors used by HAMMER. version: 0.1.0.$Revision: 1 $(alpha) documentation-url: http:://www.na-mic.org/ license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This was extracted from the Hammer Registration source code, and wrapped up by Hans J. Johnson. """ input_spec = HammerAttributeCreatorInputSpec output_spec = HammerAttributeCreatorOutputSpec _cmd = " HammerAttributeCreator " _outputs_filenames = {} _redirect_x = False class TextureMeasureFilterInputSpec(CommandLineInputSpec): inputVolume = File(exists=True, argstr="--inputVolume %s") inputMaskVolume = File(exists=True, argstr="--inputMaskVolume %s") distance = traits.Int(argstr="--distance %d") insideROIValue = traits.Float(argstr="--insideROIValue %f") outputFilename = traits.Either( traits.Bool, File(), hash_files=False, argstr="--outputFilename %s" ) class TextureMeasureFilterOutputSpec(TraitedSpec): outputFilename = File(exists=True) class TextureMeasureFilter(SEMLikeCommandLine): """title: Canny Level Set Image Filter category: Filtering.FeatureDetection description: The CannySegmentationLevelSet is commonly used to refine a manually generated manual mask. version: 0.3.0 license: CC contributor: Regina Kim acknowledgements: This command module was derived from Insight/Examples/Segmentation/CannySegmentationLevelSetImageFilter.cxx (copyright) Insight Software Consortium. See http://wiki.na-mic.org/Wiki/index.php/Slicer3:Execution_Model_Documentation for more detailed descriptions. """ input_spec = TextureMeasureFilterInputSpec output_spec = TextureMeasureFilterOutputSpec _cmd = " TextureMeasureFilter " _outputs_filenames = {"outputFilename": "outputFilename"} _redirect_x = False class DilateMaskInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image", exists=True, argstr="--inputVolume %s" ) inputBinaryVolume = File( desc="Required: input brain mask image", exists=True, argstr="--inputBinaryVolume %s", ) sizeStructuralElement = traits.Int( desc="size of structural element. sizeStructuralElement=1 means that 3x3x3 structuring element for 3D", argstr="--sizeStructuralElement %d", ) lowerThreshold = traits.Float( desc="Required: lowerThreshold value", argstr="--lowerThreshold %f" ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", argstr="--outputVolume %s", ) class DilateMaskOutputSpec(TraitedSpec): outputVolume = File(desc="Required: output image", exists=True) class DilateMask(SEMLikeCommandLine): """title: Dilate Image category: Filtering.FeatureDetection description: Uses mathematical morphology to dilate the input images. version: 0.1.0.$Revision: 1 $(alpha) documentation-url: http:://www.na-mic.org/ license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Mark Scully and Jeremy Bockholt. """ input_spec = DilateMaskInputSpec output_spec = DilateMaskOutputSpec _cmd = " DilateMask " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class DumpBinaryTrainingVectorsInputSpec(CommandLineInputSpec): inputHeaderFilename = File( desc="Required: input header file name", exists=True, argstr="--inputHeaderFilename %s", ) inputVectorFilename = File( desc="Required: input vector filename", exists=True, argstr="--inputVectorFilename %s", ) class DumpBinaryTrainingVectorsOutputSpec(TraitedSpec): pass class DumpBinaryTrainingVectors(SEMLikeCommandLine): """title: Erode Image category: Filtering.FeatureDetection description: Uses mathematical morphology to erode the input images. version: 0.1.0.$Revision: 1 $(alpha) documentation-url: http:://www.na-mic.org/ license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Mark Scully and Jeremy Bockholt. """ input_spec = DumpBinaryTrainingVectorsInputSpec output_spec = DumpBinaryTrainingVectorsOutputSpec _cmd = " DumpBinaryTrainingVectors " _outputs_filenames = {} _redirect_x = False class DistanceMapsInputSpec(CommandLineInputSpec): inputLabelVolume = File( desc="Required: input tissue label image", exists=True, argstr="--inputLabelVolume %s", ) inputMaskVolume = File( desc="Required: input brain mask image", exists=True, argstr="--inputMaskVolume %s", ) inputTissueLabel = traits.Int( desc="Required: input integer value of tissue type used to calculate distance", argstr="--inputTissueLabel %d", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", argstr="--outputVolume %s", ) class DistanceMapsOutputSpec(TraitedSpec): outputVolume = File(desc="Required: output image", exists=True) class DistanceMaps(SEMLikeCommandLine): """title: Mauerer Distance category: Filtering.FeatureDetection description: Get the distance from a voxel to the nearest voxel of a given tissue type. version: 0.1.0.$Revision: 1 $(alpha) documentation-url: http:://www.na-mic.org/ license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Mark Scully and Jeremy Bockholt. """ input_spec = DistanceMapsInputSpec output_spec = DistanceMapsOutputSpec _cmd = " DistanceMaps " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class STAPLEAnalysisInputSpec(CommandLineInputSpec): inputDimension = traits.Int( desc="Required: input image Dimension 2 or 3", argstr="--inputDimension %d" ) inputLabelVolume = InputMultiPath( File(exists=True), desc="Required: input label volume", argstr="--inputLabelVolume %s...", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", argstr="--outputVolume %s", ) class STAPLEAnalysisOutputSpec(TraitedSpec): outputVolume = File(desc="Required: output image", exists=True) class STAPLEAnalysis(SEMLikeCommandLine): """title: Dilate Image category: Filtering.FeatureDetection description: Uses mathematical morphology to dilate the input images. version: 0.1.0.$Revision: 1 $(alpha) documentation-url: http:://www.na-mic.org/ license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Mark Scully and Jeremy Bockholt. """ input_spec = STAPLEAnalysisInputSpec output_spec = STAPLEAnalysisOutputSpec _cmd = " STAPLEAnalysis " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GradientAnisotropicDiffusionImageFilterInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input image", exists=True, argstr="--inputVolume %s" ) numberOfIterations = traits.Int( desc="Optional value for number of Iterations", argstr="--numberOfIterations %d" ) timeStep = traits.Float( desc="Time step for diffusion process", argstr="--timeStep %f" ) conductance = traits.Float( desc="Conductance for diffusion process", argstr="--conductance %f" ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", argstr="--outputVolume %s", ) class GradientAnisotropicDiffusionImageFilterOutputSpec(TraitedSpec): outputVolume = File(desc="Required: output image", exists=True) class GradientAnisotropicDiffusionImageFilter(SEMLikeCommandLine): """title: GradientAnisopropicDiffusionFilter category: Filtering.FeatureDetection description: Image Smoothing using Gradient Anisotropic Diffuesion Filer contributor: This tool was developed by Eun Young Kim by modifying ITK Example """ input_spec = GradientAnisotropicDiffusionImageFilterInputSpec output_spec = GradientAnisotropicDiffusionImageFilterOutputSpec _cmd = " GradientAnisotropicDiffusionImageFilter " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class CannyEdgeInputSpec(CommandLineInputSpec): inputVolume = File( desc="Required: input tissue label image", exists=True, argstr="--inputVolume %s", ) variance = traits.Float( desc="Variance and Maximum error are used in the Gaussian smoothing of the input image. See itkDiscreteGaussianImageFilter for information on these parameters.", argstr="--variance %f", ) upperThreshold = traits.Float( desc="Threshold is the lowest allowed value in the output image. Its data type is the same as the data type of the output image. Any values below the Threshold level will be replaced with the OutsideValue parameter value, whose default is zero. ", argstr="--upperThreshold %f", ) lowerThreshold = traits.Float( desc="Threshold is the lowest allowed value in the output image. Its data type is the same as the data type of the output image. Any values below the Threshold level will be replaced with the OutsideValue parameter value, whose default is zero. ", argstr="--lowerThreshold %f", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output image", argstr="--outputVolume %s", ) class CannyEdgeOutputSpec(TraitedSpec): outputVolume = File(desc="Required: output image", exists=True) class CannyEdge(SEMLikeCommandLine): """title: Canny Edge Detection category: Filtering.FeatureDetection description: Get the distance from a voxel to the nearest voxel of a given tissue type. version: 0.1.0.(alpha) documentation-url: http:://www.na-mic.org/ license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was written by Hans J. Johnson. """ input_spec = CannyEdgeInputSpec output_spec = CannyEdgeOutputSpec _cmd = " CannyEdge " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/000077500000000000000000000000001413403311400234365ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/__init__.py000066400000000000000000000000301413403311400255400ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_CannyEdge.py000066400000000000000000000024131413403311400277340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import CannyEdge def test_CannyEdge_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), lowerThreshold=dict( argstr="--lowerThreshold %f", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), upperThreshold=dict( argstr="--upperThreshold %f", ), variance=dict( argstr="--variance %f", ), ) inputs = CannyEdge.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CannyEdge_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = CannyEdge.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value test_auto_CannySegmentationLevelSetImageFilter.py000066400000000000000000000035651413403311400352340ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/filtering/tests# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import CannySegmentationLevelSetImageFilter def test_CannySegmentationLevelSetImageFilter_inputs(): input_map = dict( advectionWeight=dict( argstr="--advectionWeight %f", ), args=dict( argstr="%s", ), cannyThreshold=dict( argstr="--cannyThreshold %f", ), cannyVariance=dict( argstr="--cannyVariance %f", ), environ=dict( nohash=True, usedefault=True, ), initialModel=dict( argstr="--initialModel %s", extensions=None, ), initialModelIsovalue=dict( argstr="--initialModelIsovalue %f", ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), maxIterations=dict( argstr="--maxIterations %d", ), outputSpeedVolume=dict( argstr="--outputSpeedVolume %s", hash_files=False, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = CannySegmentationLevelSetImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CannySegmentationLevelSetImageFilter_outputs(): output_map = dict( outputSpeedVolume=dict( extensions=None, ), outputVolume=dict( extensions=None, ), ) outputs = CannySegmentationLevelSetImageFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_DilateImage.py000066400000000000000000000023501413403311400302440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import DilateImage def test_DilateImage_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputMaskVolume=dict( argstr="--inputMaskVolume %s", extensions=None, ), inputRadius=dict( argstr="--inputRadius %d", ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = DilateImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DilateImage_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = DilateImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_DilateMask.py000066400000000000000000000025151413403311400301200ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import DilateMask def test_DilateMask_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputBinaryVolume=dict( argstr="--inputBinaryVolume %s", extensions=None, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), lowerThreshold=dict( argstr="--lowerThreshold %f", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), sizeStructuralElement=dict( argstr="--sizeStructuralElement %d", ), ) inputs = DilateMask.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DilateMask_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = DilateMask.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_DistanceMaps.py000066400000000000000000000024011413403311400304470ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import DistanceMaps def test_DistanceMaps_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputLabelVolume=dict( argstr="--inputLabelVolume %s", extensions=None, ), inputMaskVolume=dict( argstr="--inputMaskVolume %s", extensions=None, ), inputTissueLabel=dict( argstr="--inputTissueLabel %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = DistanceMaps.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DistanceMaps_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = DistanceMaps.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_DumpBinaryTrainingVectors.py000066400000000000000000000021061413403311400332120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import DumpBinaryTrainingVectors def test_DumpBinaryTrainingVectors_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputHeaderFilename=dict( argstr="--inputHeaderFilename %s", extensions=None, ), inputVectorFilename=dict( argstr="--inputVectorFilename %s", extensions=None, ), ) inputs = DumpBinaryTrainingVectors.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DumpBinaryTrainingVectors_outputs(): output_map = dict() outputs = DumpBinaryTrainingVectors.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_ErodeImage.py000066400000000000000000000023431413403311400301020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import ErodeImage def test_ErodeImage_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputMaskVolume=dict( argstr="--inputMaskVolume %s", extensions=None, ), inputRadius=dict( argstr="--inputRadius %d", ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = ErodeImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ErodeImage_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = ErodeImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_FlippedDifference.py000066400000000000000000000022721413403311400314400ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import FlippedDifference def test_FlippedDifference_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputMaskVolume=dict( argstr="--inputMaskVolume %s", extensions=None, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = FlippedDifference.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FlippedDifference_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = FlippedDifference.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateBrainClippedImage.py000066400000000000000000000024501413403311400330520ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import GenerateBrainClippedImage def test_GenerateBrainClippedImage_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputImg=dict( argstr="--inputImg %s", extensions=None, ), inputMsk=dict( argstr="--inputMsk %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputFileName=dict( argstr="--outputFileName %s", hash_files=False, ), ) inputs = GenerateBrainClippedImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GenerateBrainClippedImage_outputs(): output_map = dict( outputFileName=dict( extensions=None, ), ) outputs = GenerateBrainClippedImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateSummedGradientImage.py000066400000000000000000000026241413403311400334310ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import GenerateSummedGradientImage def test_GenerateSummedGradientImage_inputs(): input_map = dict( MaximumGradient=dict( argstr="--MaximumGradient ", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume1=dict( argstr="--inputVolume1 %s", extensions=None, ), inputVolume2=dict( argstr="--inputVolume2 %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputFileName=dict( argstr="--outputFileName %s", hash_files=False, ), ) inputs = GenerateSummedGradientImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GenerateSummedGradientImage_outputs(): output_map = dict( outputFileName=dict( extensions=None, ), ) outputs = GenerateSummedGradientImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_GenerateTestImage.py000066400000000000000000000025531413403311400314410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import GenerateTestImage def test_GenerateTestImage_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), lowerBoundOfOutputVolume=dict( argstr="--lowerBoundOfOutputVolume %f", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), outputVolumeSize=dict( argstr="--outputVolumeSize %f", ), upperBoundOfOutputVolume=dict( argstr="--upperBoundOfOutputVolume %f", ), ) inputs = GenerateTestImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GenerateTestImage_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = GenerateTestImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value test_auto_GradientAnisotropicDiffusionImageFilter.py000066400000000000000000000026431413403311400357550ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/filtering/tests# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import GradientAnisotropicDiffusionImageFilter def test_GradientAnisotropicDiffusionImageFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), conductance=dict( argstr="--conductance %f", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfIterations=dict( argstr="--numberOfIterations %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), timeStep=dict( argstr="--timeStep %f", ), ) inputs = GradientAnisotropicDiffusionImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GradientAnisotropicDiffusionImageFilter_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = GradientAnisotropicDiffusionImageFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_HammerAttributeCreator.py000066400000000000000000000025521413403311400325200ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import HammerAttributeCreator def test_HammerAttributeCreator_inputs(): input_map = dict( Scale=dict( argstr="--Scale %d", ), Strength=dict( argstr="--Strength %f", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputCSFVolume=dict( argstr="--inputCSFVolume %s", extensions=None, ), inputGMVolume=dict( argstr="--inputGMVolume %s", extensions=None, ), inputWMVolume=dict( argstr="--inputWMVolume %s", extensions=None, ), outputVolumeBase=dict( argstr="--outputVolumeBase %s", ), ) inputs = HammerAttributeCreator.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_HammerAttributeCreator_outputs(): output_map = dict() outputs = HammerAttributeCreator.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMean.py000066400000000000000000000024011413403311400313040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import NeighborhoodMean def test_NeighborhoodMean_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputMaskVolume=dict( argstr="--inputMaskVolume %s", extensions=None, ), inputRadius=dict( argstr="--inputRadius %d", ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = NeighborhoodMean.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_NeighborhoodMean_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = NeighborhoodMean.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_NeighborhoodMedian.py000066400000000000000000000024131413403311400316240ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import NeighborhoodMedian def test_NeighborhoodMedian_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputMaskVolume=dict( argstr="--inputMaskVolume %s", extensions=None, ), inputRadius=dict( argstr="--inputRadius %d", ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = NeighborhoodMedian.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_NeighborhoodMedian_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = NeighborhoodMedian.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_STAPLEAnalysis.py000066400000000000000000000021741413403311400305770ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import STAPLEAnalysis def test_STAPLEAnalysis_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputDimension=dict( argstr="--inputDimension %d", ), inputLabelVolume=dict( argstr="--inputLabelVolume %s...", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = STAPLEAnalysis.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_STAPLEAnalysis_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = STAPLEAnalysis.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_TextureFromNoiseImageFilter.py000066400000000000000000000023071413403311400334740ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import TextureFromNoiseImageFilter def test_TextureFromNoiseImageFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputRadius=dict( argstr="--inputRadius %d", ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = TextureFromNoiseImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TextureFromNoiseImageFilter_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = TextureFromNoiseImageFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_TextureMeasureFilter.py000066400000000000000000000025471413403311400322370ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featuredetection import TextureMeasureFilter def test_TextureMeasureFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), distance=dict( argstr="--distance %d", ), environ=dict( nohash=True, usedefault=True, ), inputMaskVolume=dict( argstr="--inputMaskVolume %s", extensions=None, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), insideROIValue=dict( argstr="--insideROIValue %f", ), outputFilename=dict( argstr="--outputFilename %s", hash_files=False, ), ) inputs = TextureMeasureFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TextureMeasureFilter_outputs(): output_map = dict( outputFilename=dict( extensions=None, ), ) outputs = TextureMeasureFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/filtering/tests/test_auto_UnbiasedNonLocalMeans.py000066400000000000000000000027261413403311400322520ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..denoising import UnbiasedNonLocalMeans def test_UnbiasedNonLocalMeans_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), hp=dict( argstr="--hp %f", ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), ps=dict( argstr="--ps %f", ), rc=dict( argstr="--rc %s", sep=",", ), rs=dict( argstr="--rs %s", sep=",", ), sigma=dict( argstr="--sigma %f", ), ) inputs = UnbiasedNonLocalMeans.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_UnbiasedNonLocalMeans_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = UnbiasedNonLocalMeans.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/generated.sh000066400000000000000000000005741413403311400226110ustar00rootroot00000000000000local_generate_classes.py --python_paths=/scratch/johnsonhj/src/NEP-11/NIPYPE --program_paths=/scratch/johnsonhj/src/NEP-11/bin:/usr/local/bin:/opt/ogs/bin/darwin-x64:/bin:/sbin:/usr/local/bin:/usr/bin:/usr/sbin:/usr/texbin:/Shared/sinapse/sharedopt/20120722/Darwin_i386/vv/bin:/usr/texbin:/scratch/johnsonhj/bin --output_path=/scratch/johnsonhj/src/NEP-11/BRAINSTools/AutoWorkup nipype-1.7.0/nipype/interfaces/semtools/legacy/000077500000000000000000000000001413403311400215555ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/legacy/__init__.py000066400000000000000000000001021413403311400236570ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .registration import scalartransform nipype-1.7.0/nipype/interfaces/semtools/legacy/registration.py000066400000000000000000000047071413403311400246510ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class scalartransformInputSpec(CommandLineInputSpec): input_image = File(desc="Image to tranform", exists=True, argstr="--input_image %s") output_image = traits.Either( traits.Bool, File(), hash_files=False, desc="The transformed image", argstr="--output_image %s", ) transformation = traits.Either( traits.Bool, File(), hash_files=False, desc="Output file for transformation parameters", argstr="--transformation %s", ) invert = traits.Bool(desc="Invert tranform before applying.", argstr="--invert ") deformation = File( desc="Deformation field.", exists=True, argstr="--deformation %s" ) h_field = traits.Bool(desc="The deformation is an h-field.", argstr="--h_field ") interpolation = traits.Enum( "nearestneighbor", "linear", "cubic", desc="Interpolation type (nearestneighbor, linear, cubic)", argstr="--interpolation %s", ) class scalartransformOutputSpec(TraitedSpec): output_image = File(desc="The transformed image", exists=True) transformation = File(desc="Output file for transformation parameters", exists=True) class scalartransform(SEMLikeCommandLine): """title: ScalarTransform (DTIProcess) category: Legacy.Registration version: 1.0.0 documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/Nightly/Extensions/DTIProcess license: Copyright (c) Casey Goodlett. All rights reserved. See http://www.ia.unc.edu/dev/Copyright.htm for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notices for more information. contributor: Casey Goodlett """ input_spec = scalartransformInputSpec output_spec = scalartransformOutputSpec _cmd = " scalartransform " _outputs_filenames = { "output_image": "output_image.nii", "transformation": "transformation", } _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/legacy/tests/000077500000000000000000000000001413403311400227175ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/legacy/tests/__init__.py000066400000000000000000000000301413403311400250210ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/semtools/legacy/tests/test_auto_scalartransform.py000066400000000000000000000030531413403311400305620ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import scalartransform def test_scalartransform_inputs(): input_map = dict( args=dict( argstr="%s", ), deformation=dict( argstr="--deformation %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), h_field=dict( argstr="--h_field ", ), input_image=dict( argstr="--input_image %s", extensions=None, ), interpolation=dict( argstr="--interpolation %s", ), invert=dict( argstr="--invert ", ), output_image=dict( argstr="--output_image %s", hash_files=False, ), transformation=dict( argstr="--transformation %s", hash_files=False, ), ) inputs = scalartransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_scalartransform_outputs(): output_map = dict( output_image=dict( extensions=None, ), transformation=dict( extensions=None, ), ) outputs = scalartransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/registration/000077500000000000000000000000001413403311400230235ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/registration/__init__.py000066400000000000000000000003441413403311400251350ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .specialized import VBRAINSDemonWarp, BRAINSDemonWarp, BRAINSTransformFromFiducials from .brainsresample import BRAINSResample from .brainsfit import BRAINSFit from .brainsresize import BRAINSResize nipype-1.7.0/nipype/interfaces/semtools/registration/brainsfit.py000066400000000000000000000610551413403311400253650ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class BRAINSFitInputSpec(CommandLineInputSpec): fixedVolume = File( desc="Input fixed image (the moving image will be transformed into this image space).", exists=True, argstr="--fixedVolume %s", ) movingVolume = File( desc="Input moving image (this image will be transformed into the fixed image space).", exists=True, argstr="--movingVolume %s", ) samplingPercentage = traits.Float( desc="Fraction of voxels of the fixed image that will be used for registration. The number has to be larger than zero and less or equal to one. Higher values increase the computation time but may give more accurate results. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is 0.002 (use approximately 0.2% of voxels, resulting in 100000 samples in a 512x512x192 volume) to provide a very fast registration in most cases. Typical values range from 0.01 (1%) for low detail images to 0.2 (20%) for high detail images.", argstr="--samplingPercentage %f", ) splineGridSize = InputMultiPath( traits.Int, desc="Number of BSpline grid subdivisions along each axis of the fixed image, centered on the image space. Values must be 3 or higher for the BSpline to be correctly computed.", sep=",", argstr="--splineGridSize %s", ) linearTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="(optional) Output estimated transform - in case the computed transform is not BSpline. NOTE: You must set at least one output object (transform and/or output volume).", argstr="--linearTransform %s", ) bsplineTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="(optional) Output estimated transform - in case the computed transform is BSpline. NOTE: You must set at least one output object (transform and/or output volume).", argstr="--bsplineTransform %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="(optional) Output image: the moving image warped to the fixed image space. NOTE: You must set at least one output object (transform and/or output volume).", argstr="--outputVolume %s", ) initialTransform = File( desc="Transform to be applied to the moving image to initialize the registration. This can only be used if Initialize Transform Mode is Off.", exists=True, argstr="--initialTransform %s", ) initializeTransformMode = traits.Enum( "Off", "useMomentsAlign", "useCenterOfHeadAlign", "useGeometryAlign", "useCenterOfROIAlign", desc="Determine how to initialize the transform center. useMomentsAlign assumes that the center of mass of the images represent similar structures. useCenterOfHeadAlign attempts to use the top of head and shape of neck to drive a center of mass estimate. useGeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. Off assumes that the physical space of the images are close. This flag is mutually exclusive with the Initialization transform.", argstr="--initializeTransformMode %s", ) useRigid = traits.Bool( desc="Perform a rigid registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", argstr="--useRigid ", ) useScaleVersor3D = traits.Bool( desc="Perform a ScaleVersor3D registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", argstr="--useScaleVersor3D ", ) useScaleSkewVersor3D = traits.Bool( desc="Perform a ScaleSkewVersor3D registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", argstr="--useScaleSkewVersor3D ", ) useAffine = traits.Bool( desc="Perform an Affine registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", argstr="--useAffine ", ) useBSpline = traits.Bool( desc="Perform a BSpline registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", argstr="--useBSpline ", ) useSyN = traits.Bool( desc="Perform a SyN registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", argstr="--useSyN ", ) useComposite = traits.Bool( desc="Perform a Composite registration as part of the sequential registration steps. This family of options overrides the use of transformType if any of them are set.", argstr="--useComposite ", ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", desc="Specifies a mask to only consider a certain image region for the registration. If ROIAUTO is chosen, then the mask is computed using Otsu thresholding and hole filling. If ROI is chosen then the mask has to be specified as in input.", argstr="--maskProcessingMode %s", ) fixedBinaryVolume = File( desc="Fixed Image binary mask volume, required if Masking Option is ROI. Image areas where the mask volume has zero value are ignored during the registration.", exists=True, argstr="--fixedBinaryVolume %s", ) movingBinaryVolume = File( desc="Moving Image binary mask volume, required if Masking Option is ROI. Image areas where the mask volume has zero value are ignored during the registration.", exists=True, argstr="--movingBinaryVolume %s", ) outputFixedVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, desc="ROI that is automatically computed from the fixed image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", argstr="--outputFixedVolumeROI %s", ) outputMovingVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, desc="ROI that is automatically computed from the moving image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", argstr="--outputMovingVolumeROI %s", ) useROIBSpline = traits.Bool( desc="If enabled then the bounding box of the input ROIs defines the BSpline grid support region. Otherwise the BSpline grid support region is the whole fixed image.", argstr="--useROIBSpline ", ) histogramMatch = traits.Bool( desc="Apply histogram matching operation for the input images to make them more similar. This is suitable for images of the same modality that may have different brightness or contrast, but the same overall intensity profile. Do NOT use if registering images from different modalities.", argstr="--histogramMatch ", ) medianFilterSize = InputMultiPath( traits.Int, desc="Apply median filtering to reduce noise in the input volumes. The 3 values specify the radius for the optional MedianImageFilter preprocessing in all 3 directions (in voxels).", sep=",", argstr="--medianFilterSize %s", ) removeIntensityOutliers = traits.Float( desc="Remove very high and very low intensity voxels from the input volumes. The parameter specifies the half percentage to decide outliers of image intensities. The default value is zero, which means no outlier removal. If the value of 0.005 is given, the 0.005% of both tails will be thrown away, so 0.01% of intensities in total would be ignored in the statistic calculation.", argstr="--removeIntensityOutliers %f", ) fixedVolume2 = File( desc="Input fixed image that will be used for multimodal registration. (the moving image will be transformed into this image space).", exists=True, argstr="--fixedVolume2 %s", ) movingVolume2 = File( desc="Input moving image that will be used for multimodal registration(this image will be transformed into the fixed image space).", exists=True, argstr="--movingVolume2 %s", ) outputVolumePixelType = traits.Enum( "float", "short", "ushort", "int", "uint", "uchar", desc="Data type for representing a voxel of the Output Volume.", argstr="--outputVolumePixelType %s", ) backgroundFillValue = traits.Float( desc="This value will be used for filling those areas of the output image that have no corresponding voxels in the input moving image.", argstr="--backgroundFillValue %f", ) scaleOutputValues = traits.Bool( desc="If true, and the voxel values do not fit within the minimum and maximum values of the desired outputVolumePixelType, then linearly scale the min/max output image voxel values to fit within the min/max range of the outputVolumePixelType.", argstr="--scaleOutputValues ", ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", "ResampleInPlace", "BSpline", "WindowedSinc", "Hamming", "Cosine", "Welch", "Lanczos", "Blackman", desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, Hamming, Cosine, Welch, Lanczos, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", argstr="--interpolationMode %s", ) numberOfIterations = InputMultiPath( traits.Int, desc="The maximum number of iterations to try before stopping the optimization. When using a lower value (500-1000) then the registration is forced to terminate earlier but there is a higher risk of stopping before an optimal solution is reached.", sep=",", argstr="--numberOfIterations %s", ) maximumStepLength = traits.Float( desc="Starting step length of the optimizer. In general, higher values allow for recovering larger initial misalignments but there is an increased chance that the registration will not converge.", argstr="--maximumStepLength %f", ) minimumStepLength = InputMultiPath( traits.Float, desc="Each step in the optimization takes steps at least this big. When none are possible, registration is complete. Smaller values allows the optimizer to make smaller adjustments, but the registration time may increase.", sep=",", argstr="--minimumStepLength %s", ) relaxationFactor = traits.Float( desc="Specifies how quickly the optimization step length is decreased during registration. The value must be larger than 0 and smaller than 1. Larger values result in slower step size decrease, which allow for recovering larger initial misalignments but it increases the registration time and the chance that the registration will not converge.", argstr="--relaxationFactor %f", ) translationScale = traits.Float( desc="How much to scale up changes in position (in mm) compared to unit rotational changes (in radians) -- decrease this to allow for more rotation in the search pattern.", argstr="--translationScale %f", ) reproportionScale = traits.Float( desc="ScaleVersor3D 'Scale' compensation factor. Increase this to allow for more rescaling in a ScaleVersor3D or ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", argstr="--reproportionScale %f", ) skewScale = traits.Float( desc="ScaleSkewVersor3D Skew compensation factor. Increase this to allow for more skew in a ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", argstr="--skewScale %f", ) maxBSplineDisplacement = traits.Float( desc="Maximum allowed displacements in image physical coordinates (mm) for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", argstr="--maxBSplineDisplacement %f", ) fixedVolumeTimeIndex = traits.Int( desc="The index in the time series for the 3D fixed image to fit. Only allowed if the fixed input volume is 4-dimensional.", argstr="--fixedVolumeTimeIndex %d", ) movingVolumeTimeIndex = traits.Int( desc="The index in the time series for the 3D moving image to fit. Only allowed if the moving input volume is 4-dimensional", argstr="--movingVolumeTimeIndex %d", ) numberOfHistogramBins = traits.Int( desc="The number of histogram levels used for mutual information metric estimation.", argstr="--numberOfHistogramBins %d", ) numberOfMatchPoints = traits.Int( desc="Number of histogram match points used for mutual information metric estimation.", argstr="--numberOfMatchPoints %d", ) costMetric = traits.Enum( "MMI", "MSE", "NC", "MIH", desc="The cost metric to be used during fitting. Defaults to MMI. Options are MMI (Mattes Mutual Information), MSE (Mean Square Error), NC (Normalized Correlation), MC (Match Cardinality for binary images)", argstr="--costMetric %s", ) maskInferiorCutOffFromCenter = traits.Float( desc="If Initialize Transform Mode is set to useCenterOfHeadAlign or Masking Option is ROIAUTO then this value defines the how much is cut of from the inferior part of the image. The cut-off distance is specified in millimeters, relative to the image center. If the value is 1000 or larger then no cut-off performed.", argstr="--maskInferiorCutOffFromCenter %f", ) ROIAutoDilateSize = traits.Float( desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. A setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", argstr="--ROIAutoDilateSize %f", ) ROIAutoClosingSize = traits.Float( desc="This flag is only relevant when using ROIAUTO mode for initializing masks. It defines the hole closing size in mm. It is rounded up to the nearest whole pixel size in each direction. The default is to use a closing size of 9mm. For mouse data this value may need to be reset to 0.9 or smaller.", argstr="--ROIAutoClosingSize %f", ) numberOfSamples = traits.Int( desc="The number of voxels sampled for mutual information computation. Increase this for higher accuracy, at the cost of longer computation time., NOTE that it is suggested to use samplingPercentage instead of this option. However, if set to non-zero, numberOfSamples overwrites the samplingPercentage option. ", argstr="--numberOfSamples %d", ) strippedOutputTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overridden if either bsplineTransform or linearTransform is set.", argstr="--strippedOutputTransform %s", ) transformType = InputMultiPath( traits.Str, desc="Specifies a list of registration types to be used. The valid types are, Rigid, ScaleVersor3D, ScaleSkewVersor3D, Affine, BSpline and SyN. Specifying more than one in a comma separated list will initialize the next stage with the previous results. If registrationClass flag is used, it overrides this parameter setting.", sep=",", argstr="--transformType %s", ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", argstr="--outputTransform %s", ) initializeRegistrationByCurrentGenericTransform = traits.Bool( desc="If this flag is ON, the current generic composite transform, resulted from the linear registration stages, is set to initialize the follow nonlinear registration process. However, by the default behaviour, the moving image is first warped based on the existant transform before it is passed to the BSpline registration filter. It is done to speed up the BSpline registration by reducing the computations of composite transform Jacobian.", argstr="--initializeRegistrationByCurrentGenericTransform ", ) failureExitCode = traits.Int( desc="If the fit fails, exit with this status code. (It can be used to force a successfult exit status of (0) if the registration fails due to reaching the maximum number of iterations.", argstr="--failureExitCode %d", ) writeTransformOnFailure = traits.Bool( desc="Flag to save the final transform even if the numberOfIterations are reached without convergence. (Intended for use when --failureExitCode 0 )", argstr="--writeTransformOnFailure ", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use. (default is auto-detected)", argstr="--numberOfThreads %d", ) debugLevel = traits.Int( desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", argstr="--debugLevel %d", ) costFunctionConvergenceFactor = traits.Float( desc="From itkLBFGSBOptimizer.h: Set/Get the CostFunctionConvergenceFactor. Algorithm terminates when the reduction in cost function is less than (factor * epsmcj) where epsmch is the machine precision. Typical values for factor: 1e+12 for low accuracy; 1e+7 for moderate accuracy and 1e+1 for extremely high accuracy. 1e+9 seems to work well., ", argstr="--costFunctionConvergenceFactor %f", ) projectedGradientTolerance = traits.Float( desc="From itkLBFGSBOptimizer.h: Set/Get the ProjectedGradientTolerance. Algorithm terminates when the project gradient is below the tolerance. Default lbfgsb value is 1e-5, but 1e-4 seems to work well., ", argstr="--projectedGradientTolerance %f", ) maximumNumberOfEvaluations = traits.Int( desc="Maximum number of evaluations for line search in lbfgsb optimizer.", argstr="--maximumNumberOfEvaluations %d", ) maximumNumberOfCorrections = traits.Int( desc="Maximum number of corrections in lbfgsb optimizer.", argstr="--maximumNumberOfCorrections %d", ) gui = traits.Bool( desc="Display intermediate image volumes for debugging. NOTE: This is not part of the standard build sytem, and probably does nothing on your installation.", argstr="--gui ", ) promptUser = traits.Bool( desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", argstr="--promptUser ", ) metricSamplingStrategy = traits.Enum( "Random", desc="It defines the method that registration filter uses to sample the input fixed image. Only Random is supported for now.", argstr="--metricSamplingStrategy %s", ) logFileReport = traits.Either( traits.Bool, File(), hash_files=False, desc="A file to write out final information report in CSV file: MetricName,MetricValue,FixedImageName,FixedMaskName,MovingImageName,MovingMaskName", argstr="--logFileReport %s", ) writeOutputTransformInFloat = traits.Bool( desc="By default, the output registration transforms (either the output composite transform or each transform component) are written to the disk in double precision. If this flag is ON, the output transforms will be written in single (float) precision. It is especially important if the output transform is a displacement field transform, or it is a composite transform that includes several displacement fields.", argstr="--writeOutputTransformInFloat ", ) class BRAINSFitOutputSpec(TraitedSpec): linearTransform = File( desc="(optional) Output estimated transform - in case the computed transform is not BSpline. NOTE: You must set at least one output object (transform and/or output volume).", exists=True, ) bsplineTransform = File( desc="(optional) Output estimated transform - in case the computed transform is BSpline. NOTE: You must set at least one output object (transform and/or output volume).", exists=True, ) outputVolume = File( desc="(optional) Output image: the moving image warped to the fixed image space. NOTE: You must set at least one output object (transform and/or output volume).", exists=True, ) outputFixedVolumeROI = File( desc="ROI that is automatically computed from the fixed image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", exists=True, ) outputMovingVolumeROI = File( desc="ROI that is automatically computed from the moving image. Only available if Masking Option is ROIAUTO. Image areas where the mask volume has zero value are ignored during the registration.", exists=True, ) strippedOutputTransform = File( desc="Rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overridden if either bsplineTransform or linearTransform is set.", exists=True, ) outputTransform = File( desc="(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", exists=True, ) logFileReport = File( desc="A file to write out final information report in CSV file: MetricName,MetricValue,FixedImageName,FixedMaskName,MovingImageName,MovingMaskName", exists=True, ) class BRAINSFit(SEMLikeCommandLine): """title: General Registration (BRAINS) category: Registration description: Register a three-dimensional volume to a reference volume (Mattes Mutual Information by default). Full documentation avalable here: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSFit. Method described in BRAINSFit: Mutual Information Registrations of Whole-Brain 3D Images, Using the Insight Toolkit, Johnson H.J., Harris G., Williams K., The Insight Journal, 2007. http://hdl.handle.net/1926/1291 version: 3.0.0 documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSFit license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://www.psychiatry.uiowa.edu acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); Gregory Harris(1), Vincent Magnotta(1,2,3); Andriy Fedorov(5) 1=University of Iowa Department of Psychiatry, 2=University of Iowa Department of Radiology, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering, 5=Surgical Planning Lab, Harvard """ input_spec = BRAINSFitInputSpec output_spec = BRAINSFitOutputSpec _cmd = " BRAINSFit " _outputs_filenames = { "outputVolume": "outputVolume.nii", "bsplineTransform": "bsplineTransform.h5", "outputTransform": "outputTransform.h5", "outputFixedVolumeROI": "outputFixedVolumeROI.nii", "strippedOutputTransform": "strippedOutputTransform.h5", "outputMovingVolumeROI": "outputMovingVolumeROI.nii", "linearTransform": "linearTransform.h5", "logFileReport": "logFileReport", } _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/registration/brainsresample.py000066400000000000000000000106651413403311400264140ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class BRAINSResampleInputSpec(CommandLineInputSpec): inputVolume = File(desc="Image To Warp", exists=True, argstr="--inputVolume %s") referenceVolume = File( desc="Reference image used only to define the output space. If not specified, the warping is done in the same space as the image to warp.", exists=True, argstr="--referenceVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Resulting deformed image", argstr="--outputVolume %s", ) pixelType = traits.Enum( "float", "short", "ushort", "int", "uint", "uchar", "binary", desc="Specifies the pixel type for the input/output images. The 'binary' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", argstr="--pixelType %s", ) deformationVolume = File( desc="Displacement Field to be used to warp the image (ITKv3 or earlier)", exists=True, argstr="--deformationVolume %s", ) warpTransform = File( desc="Filename for the BRAINSFit transform (ITKv3 or earlier) or composite transform file (ITKv4)", exists=True, argstr="--warpTransform %s", ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", "ResampleInPlace", "BSpline", "WindowedSinc", "Hamming", "Cosine", "Welch", "Lanczos", "Blackman", desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", argstr="--interpolationMode %s", ) inverseTransform = traits.Bool( desc="True/False is to compute inverse of given transformation. Default is false", argstr="--inverseTransform ", ) defaultValue = traits.Float(desc="Default voxel value", argstr="--defaultValue %f") gridSpacing = InputMultiPath( traits.Int, desc="Add warped grid to output image to help show the deformation that occured with specified spacing. A spacing of 0 in a dimension indicates that grid lines should be rendered to fall exactly (i.e. do not allow displacements off that plane). This is useful for makeing a 2D image of grid lines from the 3D space", sep=",", argstr="--gridSpacing %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSResampleOutputSpec(TraitedSpec): outputVolume = File(desc="Resulting deformed image", exists=True) class BRAINSResample(SEMLikeCommandLine): """title: Resample Image (BRAINS) category: Registration description: This program collects together three common image processing tasks that all involve resampling an image volume: Resampling to a new resolution and spacing, applying a transformation (using an ITK transform IO mechanisms) and Warping (using a vector image deformation field). Full documentation available here: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSResample. version: 3.0.0 documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSResample license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Vincent Magnotta, Greg Harris, and Hans Johnson. acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. """ input_spec = BRAINSResampleInputSpec output_spec = BRAINSResampleOutputSpec _cmd = " BRAINSResample " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/registration/brainsresize.py000066400000000000000000000042761413403311400261060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class BRAINSResizeInputSpec(CommandLineInputSpec): inputVolume = File(desc="Image To Scale", exists=True, argstr="--inputVolume %s") outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Resulting scaled image", argstr="--outputVolume %s", ) pixelType = traits.Enum( "float", "short", "ushort", "int", "uint", "uchar", "binary", desc="Specifies the pixel type for the input/output images. The 'binary' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", argstr="--pixelType %s", ) scaleFactor = traits.Float( desc="The scale factor for the image spacing.", argstr="--scaleFactor %f" ) class BRAINSResizeOutputSpec(TraitedSpec): outputVolume = File(desc="Resulting scaled image", exists=True) class BRAINSResize(SEMLikeCommandLine): """title: Resize Image (BRAINS) category: Registration description: This program is useful for downsampling an image by a constant scale factor. version: 3.0.0 license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Hans Johnson. acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. """ input_spec = BRAINSResizeInputSpec output_spec = BRAINSResizeOutputSpec _cmd = " BRAINSResize " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/registration/specialized.py000066400000000000000000000562741413403311400257070ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class VBRAINSDemonWarpInputSpec(CommandLineInputSpec): movingVolume = InputMultiPath( File(exists=True), desc="Required: input moving image", argstr="--movingVolume %s...", ) fixedVolume = InputMultiPath( File(exists=True), desc="Required: input fixed (target) image", argstr="--fixedVolume %s...", ) inputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", desc="Input volumes will be typecast to this format: float|short|ushort|int|uchar", argstr="--inputPixelType %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", argstr="--outputVolume %s", ) outputDisplacementFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", argstr="--outputDisplacementFieldVolume %s", ) outputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", desc="outputVolume will be typecast to this format: float|short|ushort|int|uchar", argstr="--outputPixelType %s", ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", "ResampleInPlace", "BSpline", "WindowedSinc", "Hamming", "Cosine", "Welch", "Lanczos", "Blackman", desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", argstr="--interpolationMode %s", ) registrationFilterType = traits.Enum( "Demons", "FastSymmetricForces", "Diffeomorphic", "LogDemons", "SymmetricLogDemons", desc="Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic|LogDemons|SymmetricLogDemons", argstr="--registrationFilterType %s", ) smoothDisplacementFieldSigma = traits.Float( desc="A gaussian smoothing value to be applied to the deformation feild at each iteration.", argstr="--smoothDisplacementFieldSigma %f", ) numberOfPyramidLevels = traits.Int( desc="Number of image pyramid levels to use in the multi-resolution registration.", argstr="--numberOfPyramidLevels %d", ) minimumFixedPyramid = InputMultiPath( traits.Int, desc="The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", argstr="--minimumFixedPyramid %s", ) minimumMovingPyramid = InputMultiPath( traits.Int, desc="The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", argstr="--minimumMovingPyramid %s", ) arrayOfPyramidLevelIterations = InputMultiPath( traits.Int, desc="The number of iterations for each pyramid level", sep=",", argstr="--arrayOfPyramidLevelIterations %s", ) histogramMatch = traits.Bool( desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", argstr="--histogramMatch ", ) numberOfHistogramBins = traits.Int( desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" ) numberOfMatchPoints = traits.Int( desc="The number of match points for histrogramMatch", argstr="--numberOfMatchPoints %d", ) medianFilterSize = InputMultiPath( traits.Int, desc="Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", sep=",", argstr="--medianFilterSize %s", ) initializeWithDisplacementField = File( desc="Initial deformation field vector image file name", exists=True, argstr="--initializeWithDisplacementField %s", ) initializeWithTransform = File( desc="Initial Transform filename", exists=True, argstr="--initializeWithTransform %s", ) makeBOBF = traits.Bool( desc="Flag to make Brain-Only Background-Filled versions of the input and target volumes.", argstr="--makeBOBF ", ) fixedBinaryVolume = File( desc="Mask filename for desired region of interest in the Fixed image.", exists=True, argstr="--fixedBinaryVolume %s", ) movingBinaryVolume = File( desc="Mask filename for desired region of interest in the Moving image.", exists=True, argstr="--movingBinaryVolume %s", ) lowerThresholdForBOBF = traits.Int( desc="Lower threshold for performing BOBF", argstr="--lowerThresholdForBOBF %d" ) upperThresholdForBOBF = traits.Int( desc="Upper threshold for performing BOBF", argstr="--upperThresholdForBOBF %d" ) backgroundFillValue = traits.Int( desc="Replacement value to overwrite background when performing BOBF", argstr="--backgroundFillValue %d", ) seedForBOBF = InputMultiPath( traits.Int, desc="coordinates in all 3 directions for Seed when performing BOBF", sep=",", argstr="--seedForBOBF %s", ) neighborhoodForBOBF = InputMultiPath( traits.Int, desc="neighborhood in all 3 directions to be included when performing BOBF", sep=",", argstr="--neighborhoodForBOBF %s", ) outputDisplacementFieldPrefix = traits.Str( desc="Displacement field filename prefix for writing separate x, y, and z component images", argstr="--outputDisplacementFieldPrefix %s", ) outputCheckerboardVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", argstr="--outputCheckerboardVolume %s", ) checkerboardPatternSubdivisions = InputMultiPath( traits.Int, desc="Number of Checkerboard subdivisions in all 3 directions", sep=",", argstr="--checkerboardPatternSubdivisions %s", ) outputNormalized = traits.Bool( desc="Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", argstr="--outputNormalized ", ) outputDebug = traits.Bool( desc="Flag to write debugging images after each step.", argstr="--outputDebug " ) weightFactors = InputMultiPath( traits.Float, desc="Weight fatctors for each input images", sep=",", argstr="--weightFactors %s", ) gradient_type = traits.Enum( "0", "1", "2", desc="Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", argstr="--gradient_type %s", ) upFieldSmoothing = traits.Float( desc="Smoothing sigma for the update field at each iteration", argstr="--upFieldSmoothing %f", ) max_step_length = traits.Float( desc="Maximum length of an update vector (0: no restriction)", argstr="--max_step_length %f", ) use_vanilla_dem = traits.Bool( desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem " ) gui = traits.Bool( desc="Display intermediate image volumes for debugging", argstr="--gui " ) promptUser = traits.Bool( desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", argstr="--promptUser ", ) numberOfBCHApproximationTerms = traits.Int( desc="Number of terms in the BCH expansion", argstr="--numberOfBCHApproximationTerms %d", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class VBRAINSDemonWarpOutputSpec(TraitedSpec): outputVolume = File( desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", exists=True, ) outputDisplacementFieldVolume = File( desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", exists=True, ) outputCheckerboardVolume = File( desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", exists=True, ) class VBRAINSDemonWarp(SEMLikeCommandLine): """title: Vector Demon Registration (BRAINS) category: Registration.Specialized description: This program finds a deformation field to warp a moving image onto a fixed image. The images must be of the same signal kind, and contain an image of the same kind of object. This program uses the Thirion Demons warp software in ITK, the Insight Toolkit. Additional information is available at: http://www.nitrc.org/projects/brainsdemonwarp. version: 3.0.0 documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSDemonWarp license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Hans J. Johnson and Greg Harris. acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. """ input_spec = VBRAINSDemonWarpInputSpec output_spec = VBRAINSDemonWarpOutputSpec _cmd = " VBRAINSDemonWarp " _outputs_filenames = { "outputVolume": "outputVolume.nii", "outputCheckerboardVolume": "outputCheckerboardVolume.nii", "outputDisplacementFieldVolume": "outputDisplacementFieldVolume.nrrd", } _redirect_x = False class BRAINSDemonWarpInputSpec(CommandLineInputSpec): movingVolume = File( desc="Required: input moving image", exists=True, argstr="--movingVolume %s" ) fixedVolume = File( desc="Required: input fixed (target) image", exists=True, argstr="--fixedVolume %s", ) inputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", desc="Input volumes will be typecast to this format: float|short|ushort|int|uchar", argstr="--inputPixelType %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", argstr="--outputVolume %s", ) outputDisplacementFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", argstr="--outputDisplacementFieldVolume %s", ) outputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", desc="outputVolume will be typecast to this format: float|short|ushort|int|uchar", argstr="--outputPixelType %s", ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", "ResampleInPlace", "BSpline", "WindowedSinc", "Hamming", "Cosine", "Welch", "Lanczos", "Blackman", desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", argstr="--interpolationMode %s", ) registrationFilterType = traits.Enum( "Demons", "FastSymmetricForces", "Diffeomorphic", desc="Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic", argstr="--registrationFilterType %s", ) smoothDisplacementFieldSigma = traits.Float( desc="A gaussian smoothing value to be applied to the deformation feild at each iteration.", argstr="--smoothDisplacementFieldSigma %f", ) numberOfPyramidLevels = traits.Int( desc="Number of image pyramid levels to use in the multi-resolution registration.", argstr="--numberOfPyramidLevels %d", ) minimumFixedPyramid = InputMultiPath( traits.Int, desc="The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", argstr="--minimumFixedPyramid %s", ) minimumMovingPyramid = InputMultiPath( traits.Int, desc="The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", argstr="--minimumMovingPyramid %s", ) arrayOfPyramidLevelIterations = InputMultiPath( traits.Int, desc="The number of iterations for each pyramid level", sep=",", argstr="--arrayOfPyramidLevelIterations %s", ) histogramMatch = traits.Bool( desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", argstr="--histogramMatch ", ) numberOfHistogramBins = traits.Int( desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" ) numberOfMatchPoints = traits.Int( desc="The number of match points for histrogramMatch", argstr="--numberOfMatchPoints %d", ) medianFilterSize = InputMultiPath( traits.Int, desc="Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", sep=",", argstr="--medianFilterSize %s", ) initializeWithDisplacementField = File( desc="Initial deformation field vector image file name", exists=True, argstr="--initializeWithDisplacementField %s", ) initializeWithTransform = File( desc="Initial Transform filename", exists=True, argstr="--initializeWithTransform %s", ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", "BOBF", desc="What mode to use for using the masks: NOMASK|ROIAUTO|ROI|BOBF. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. Where the Region Of Interest mode uses the masks to define what parts of the image should be used for computing the deformation field. Brain Only Background Fill uses the masks to pre-process the input images by clipping and filling in the background with a predefined value.", argstr="--maskProcessingMode %s", ) fixedBinaryVolume = File( desc="Mask filename for desired region of interest in the Fixed image.", exists=True, argstr="--fixedBinaryVolume %s", ) movingBinaryVolume = File( desc="Mask filename for desired region of interest in the Moving image.", exists=True, argstr="--movingBinaryVolume %s", ) lowerThresholdForBOBF = traits.Int( desc="Lower threshold for performing BOBF", argstr="--lowerThresholdForBOBF %d" ) upperThresholdForBOBF = traits.Int( desc="Upper threshold for performing BOBF", argstr="--upperThresholdForBOBF %d" ) backgroundFillValue = traits.Int( desc="Replacement value to overwrite background when performing BOBF", argstr="--backgroundFillValue %d", ) seedForBOBF = InputMultiPath( traits.Int, desc="coordinates in all 3 directions for Seed when performing BOBF", sep=",", argstr="--seedForBOBF %s", ) neighborhoodForBOBF = InputMultiPath( traits.Int, desc="neighborhood in all 3 directions to be included when performing BOBF", sep=",", argstr="--neighborhoodForBOBF %s", ) outputDisplacementFieldPrefix = traits.Str( desc="Displacement field filename prefix for writing separate x, y, and z component images", argstr="--outputDisplacementFieldPrefix %s", ) outputCheckerboardVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", argstr="--outputCheckerboardVolume %s", ) checkerboardPatternSubdivisions = InputMultiPath( traits.Int, desc="Number of Checkerboard subdivisions in all 3 directions", sep=",", argstr="--checkerboardPatternSubdivisions %s", ) outputNormalized = traits.Bool( desc="Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", argstr="--outputNormalized ", ) outputDebug = traits.Bool( desc="Flag to write debugging images after each step.", argstr="--outputDebug " ) gradient_type = traits.Enum( "0", "1", "2", desc="Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", argstr="--gradient_type %s", ) upFieldSmoothing = traits.Float( desc="Smoothing sigma for the update field at each iteration", argstr="--upFieldSmoothing %f", ) max_step_length = traits.Float( desc="Maximum length of an update vector (0: no restriction)", argstr="--max_step_length %f", ) use_vanilla_dem = traits.Bool( desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem " ) gui = traits.Bool( desc="Display intermediate image volumes for debugging", argstr="--gui " ) promptUser = traits.Bool( desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", argstr="--promptUser ", ) numberOfBCHApproximationTerms = traits.Int( desc="Number of terms in the BCH expansion", argstr="--numberOfBCHApproximationTerms %d", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSDemonWarpOutputSpec(TraitedSpec): outputVolume = File( desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", exists=True, ) outputDisplacementFieldVolume = File( desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", exists=True, ) outputCheckerboardVolume = File( desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", exists=True, ) class BRAINSDemonWarp(SEMLikeCommandLine): """title: Demon Registration (BRAINS) category: Registration.Specialized description: This program finds a deformation field to warp a moving image onto a fixed image. The images must be of the same signal kind, and contain an image of the same kind of object. This program uses the Thirion Demons warp software in ITK, the Insight Toolkit. Additional information is available at: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSDemonWarp. version: 3.0.0 documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BRAINSDemonWarp license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Hans J. Johnson and Greg Harris. acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. """ input_spec = BRAINSDemonWarpInputSpec output_spec = BRAINSDemonWarpOutputSpec _cmd = " BRAINSDemonWarp " _outputs_filenames = { "outputVolume": "outputVolume.nii", "outputCheckerboardVolume": "outputCheckerboardVolume.nii", "outputDisplacementFieldVolume": "outputDisplacementFieldVolume.nrrd", } _redirect_x = False class BRAINSTransformFromFiducialsInputSpec(CommandLineInputSpec): fixedLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the fixed image", argstr="--fixedLandmarks %s...", ) movingLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the moving image", argstr="--movingLandmarks %s...", ) saveTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Save the transform that results from registration", argstr="--saveTransform %s", ) transformType = traits.Enum( "Translation", "Rigid", "Similarity", desc="Type of transform to produce", argstr="--transformType %s", ) fixedLandmarksFile = File( desc="An fcsv formatted file with a list of landmark points.", exists=True, argstr="--fixedLandmarksFile %s", ) movingLandmarksFile = File( desc="An fcsv formatted file with a list of landmark points.", exists=True, argstr="--movingLandmarksFile %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSTransformFromFiducialsOutputSpec(TraitedSpec): saveTransform = File( desc="Save the transform that results from registration", exists=True ) class BRAINSTransformFromFiducials(SEMLikeCommandLine): """title: Fiducial Registration (BRAINS) category: Registration.Specialized description: Computes a rigid, similarity or affine transform from a matched list of fiducials version: 0.1.0.$Revision$ documentation-url: http://www.slicer.org/slicerWiki/index.php/Modules:TransformFromFiducials-Documentation-3.6 contributor: Casey B Goodlett acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = BRAINSTransformFromFiducialsInputSpec output_spec = BRAINSTransformFromFiducialsOutputSpec _cmd = " BRAINSTransformFromFiducials " _outputs_filenames = {"saveTransform": "saveTransform.h5"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/registration/tests/000077500000000000000000000000001413403311400241655ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/registration/tests/__init__.py000066400000000000000000000000301413403311400262670ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSDemonWarp.py000066400000000000000000000117271413403311400314310ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import BRAINSDemonWarp def test_BRAINSDemonWarp_inputs(): input_map = dict( args=dict( argstr="%s", ), arrayOfPyramidLevelIterations=dict( argstr="--arrayOfPyramidLevelIterations %s", sep=",", ), backgroundFillValue=dict( argstr="--backgroundFillValue %d", ), checkerboardPatternSubdivisions=dict( argstr="--checkerboardPatternSubdivisions %s", sep=",", ), environ=dict( nohash=True, usedefault=True, ), fixedBinaryVolume=dict( argstr="--fixedBinaryVolume %s", extensions=None, ), fixedVolume=dict( argstr="--fixedVolume %s", extensions=None, ), gradient_type=dict( argstr="--gradient_type %s", ), gui=dict( argstr="--gui ", ), histogramMatch=dict( argstr="--histogramMatch ", ), initializeWithDisplacementField=dict( argstr="--initializeWithDisplacementField %s", extensions=None, ), initializeWithTransform=dict( argstr="--initializeWithTransform %s", extensions=None, ), inputPixelType=dict( argstr="--inputPixelType %s", ), interpolationMode=dict( argstr="--interpolationMode %s", ), lowerThresholdForBOBF=dict( argstr="--lowerThresholdForBOBF %d", ), maskProcessingMode=dict( argstr="--maskProcessingMode %s", ), max_step_length=dict( argstr="--max_step_length %f", ), medianFilterSize=dict( argstr="--medianFilterSize %s", sep=",", ), minimumFixedPyramid=dict( argstr="--minimumFixedPyramid %s", sep=",", ), minimumMovingPyramid=dict( argstr="--minimumMovingPyramid %s", sep=",", ), movingBinaryVolume=dict( argstr="--movingBinaryVolume %s", extensions=None, ), movingVolume=dict( argstr="--movingVolume %s", extensions=None, ), neighborhoodForBOBF=dict( argstr="--neighborhoodForBOBF %s", sep=",", ), numberOfBCHApproximationTerms=dict( argstr="--numberOfBCHApproximationTerms %d", ), numberOfHistogramBins=dict( argstr="--numberOfHistogramBins %d", ), numberOfMatchPoints=dict( argstr="--numberOfMatchPoints %d", ), numberOfPyramidLevels=dict( argstr="--numberOfPyramidLevels %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputCheckerboardVolume=dict( argstr="--outputCheckerboardVolume %s", hash_files=False, ), outputDebug=dict( argstr="--outputDebug ", ), outputDisplacementFieldPrefix=dict( argstr="--outputDisplacementFieldPrefix %s", ), outputDisplacementFieldVolume=dict( argstr="--outputDisplacementFieldVolume %s", hash_files=False, ), outputNormalized=dict( argstr="--outputNormalized ", ), outputPixelType=dict( argstr="--outputPixelType %s", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), promptUser=dict( argstr="--promptUser ", ), registrationFilterType=dict( argstr="--registrationFilterType %s", ), seedForBOBF=dict( argstr="--seedForBOBF %s", sep=",", ), smoothDisplacementFieldSigma=dict( argstr="--smoothDisplacementFieldSigma %f", ), upFieldSmoothing=dict( argstr="--upFieldSmoothing %f", ), upperThresholdForBOBF=dict( argstr="--upperThresholdForBOBF %d", ), use_vanilla_dem=dict( argstr="--use_vanilla_dem ", ), ) inputs = BRAINSDemonWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSDemonWarp_outputs(): output_map = dict( outputCheckerboardVolume=dict( extensions=None, ), outputDisplacementFieldVolume=dict( extensions=None, ), outputVolume=dict( extensions=None, ), ) outputs = BRAINSDemonWarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSFit.py000066400000000000000000000166771413403311400302700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsfit import BRAINSFit def test_BRAINSFit_inputs(): input_map = dict( ROIAutoClosingSize=dict( argstr="--ROIAutoClosingSize %f", ), ROIAutoDilateSize=dict( argstr="--ROIAutoDilateSize %f", ), args=dict( argstr="%s", ), backgroundFillValue=dict( argstr="--backgroundFillValue %f", ), bsplineTransform=dict( argstr="--bsplineTransform %s", hash_files=False, ), costFunctionConvergenceFactor=dict( argstr="--costFunctionConvergenceFactor %f", ), costMetric=dict( argstr="--costMetric %s", ), debugLevel=dict( argstr="--debugLevel %d", ), environ=dict( nohash=True, usedefault=True, ), failureExitCode=dict( argstr="--failureExitCode %d", ), fixedBinaryVolume=dict( argstr="--fixedBinaryVolume %s", extensions=None, ), fixedVolume=dict( argstr="--fixedVolume %s", extensions=None, ), fixedVolume2=dict( argstr="--fixedVolume2 %s", extensions=None, ), fixedVolumeTimeIndex=dict( argstr="--fixedVolumeTimeIndex %d", ), gui=dict( argstr="--gui ", ), histogramMatch=dict( argstr="--histogramMatch ", ), initialTransform=dict( argstr="--initialTransform %s", extensions=None, ), initializeRegistrationByCurrentGenericTransform=dict( argstr="--initializeRegistrationByCurrentGenericTransform ", ), initializeTransformMode=dict( argstr="--initializeTransformMode %s", ), interpolationMode=dict( argstr="--interpolationMode %s", ), linearTransform=dict( argstr="--linearTransform %s", hash_files=False, ), logFileReport=dict( argstr="--logFileReport %s", hash_files=False, ), maskInferiorCutOffFromCenter=dict( argstr="--maskInferiorCutOffFromCenter %f", ), maskProcessingMode=dict( argstr="--maskProcessingMode %s", ), maxBSplineDisplacement=dict( argstr="--maxBSplineDisplacement %f", ), maximumNumberOfCorrections=dict( argstr="--maximumNumberOfCorrections %d", ), maximumNumberOfEvaluations=dict( argstr="--maximumNumberOfEvaluations %d", ), maximumStepLength=dict( argstr="--maximumStepLength %f", ), medianFilterSize=dict( argstr="--medianFilterSize %s", sep=",", ), metricSamplingStrategy=dict( argstr="--metricSamplingStrategy %s", ), minimumStepLength=dict( argstr="--minimumStepLength %s", sep=",", ), movingBinaryVolume=dict( argstr="--movingBinaryVolume %s", extensions=None, ), movingVolume=dict( argstr="--movingVolume %s", extensions=None, ), movingVolume2=dict( argstr="--movingVolume2 %s", extensions=None, ), movingVolumeTimeIndex=dict( argstr="--movingVolumeTimeIndex %d", ), numberOfHistogramBins=dict( argstr="--numberOfHistogramBins %d", ), numberOfIterations=dict( argstr="--numberOfIterations %s", sep=",", ), numberOfMatchPoints=dict( argstr="--numberOfMatchPoints %d", ), numberOfSamples=dict( argstr="--numberOfSamples %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputFixedVolumeROI=dict( argstr="--outputFixedVolumeROI %s", hash_files=False, ), outputMovingVolumeROI=dict( argstr="--outputMovingVolumeROI %s", hash_files=False, ), outputTransform=dict( argstr="--outputTransform %s", hash_files=False, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), outputVolumePixelType=dict( argstr="--outputVolumePixelType %s", ), projectedGradientTolerance=dict( argstr="--projectedGradientTolerance %f", ), promptUser=dict( argstr="--promptUser ", ), relaxationFactor=dict( argstr="--relaxationFactor %f", ), removeIntensityOutliers=dict( argstr="--removeIntensityOutliers %f", ), reproportionScale=dict( argstr="--reproportionScale %f", ), samplingPercentage=dict( argstr="--samplingPercentage %f", ), scaleOutputValues=dict( argstr="--scaleOutputValues ", ), skewScale=dict( argstr="--skewScale %f", ), splineGridSize=dict( argstr="--splineGridSize %s", sep=",", ), strippedOutputTransform=dict( argstr="--strippedOutputTransform %s", hash_files=False, ), transformType=dict( argstr="--transformType %s", sep=",", ), translationScale=dict( argstr="--translationScale %f", ), useAffine=dict( argstr="--useAffine ", ), useBSpline=dict( argstr="--useBSpline ", ), useComposite=dict( argstr="--useComposite ", ), useROIBSpline=dict( argstr="--useROIBSpline ", ), useRigid=dict( argstr="--useRigid ", ), useScaleSkewVersor3D=dict( argstr="--useScaleSkewVersor3D ", ), useScaleVersor3D=dict( argstr="--useScaleVersor3D ", ), useSyN=dict( argstr="--useSyN ", ), writeOutputTransformInFloat=dict( argstr="--writeOutputTransformInFloat ", ), writeTransformOnFailure=dict( argstr="--writeTransformOnFailure ", ), ) inputs = BRAINSFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSFit_outputs(): output_map = dict( bsplineTransform=dict( extensions=None, ), linearTransform=dict( extensions=None, ), logFileReport=dict( extensions=None, ), outputFixedVolumeROI=dict( extensions=None, ), outputMovingVolumeROI=dict( extensions=None, ), outputTransform=dict( extensions=None, ), outputVolume=dict( extensions=None, ), strippedOutputTransform=dict( extensions=None, ), ) outputs = BRAINSFit.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResample.py000066400000000000000000000036021413403311400312760ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsresample import BRAINSResample def test_BRAINSResample_inputs(): input_map = dict( args=dict( argstr="%s", ), defaultValue=dict( argstr="--defaultValue %f", ), deformationVolume=dict( argstr="--deformationVolume %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), gridSpacing=dict( argstr="--gridSpacing %s", sep=",", ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), interpolationMode=dict( argstr="--interpolationMode %s", ), inverseTransform=dict( argstr="--inverseTransform ", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), pixelType=dict( argstr="--pixelType %s", ), referenceVolume=dict( argstr="--referenceVolume %s", extensions=None, ), warpTransform=dict( argstr="--warpTransform %s", extensions=None, ), ) inputs = BRAINSResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSResample_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = BRAINSResample.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSResize.py000066400000000000000000000023001413403311400307610ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsresize import BRAINSResize def test_BRAINSResize_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), pixelType=dict( argstr="--pixelType %s", ), scaleFactor=dict( argstr="--scaleFactor %f", ), ) inputs = BRAINSResize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSResize_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = BRAINSResize.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/registration/tests/test_auto_BRAINSTransformFromFiducials.py000066400000000000000000000031251413403311400341510ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import BRAINSTransformFromFiducials def test_BRAINSTransformFromFiducials_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixedLandmarks=dict( argstr="--fixedLandmarks %s...", ), fixedLandmarksFile=dict( argstr="--fixedLandmarksFile %s", extensions=None, ), movingLandmarks=dict( argstr="--movingLandmarks %s...", ), movingLandmarksFile=dict( argstr="--movingLandmarksFile %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), saveTransform=dict( argstr="--saveTransform %s", hash_files=False, ), transformType=dict( argstr="--transformType %s", ), ) inputs = BRAINSTransformFromFiducials.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSTransformFromFiducials_outputs(): output_map = dict( saveTransform=dict( extensions=None, ), ) outputs = BRAINSTransformFromFiducials.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/registration/tests/test_auto_VBRAINSDemonWarp.py000066400000000000000000000117671413403311400315630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import VBRAINSDemonWarp def test_VBRAINSDemonWarp_inputs(): input_map = dict( args=dict( argstr="%s", ), arrayOfPyramidLevelIterations=dict( argstr="--arrayOfPyramidLevelIterations %s", sep=",", ), backgroundFillValue=dict( argstr="--backgroundFillValue %d", ), checkerboardPatternSubdivisions=dict( argstr="--checkerboardPatternSubdivisions %s", sep=",", ), environ=dict( nohash=True, usedefault=True, ), fixedBinaryVolume=dict( argstr="--fixedBinaryVolume %s", extensions=None, ), fixedVolume=dict( argstr="--fixedVolume %s...", ), gradient_type=dict( argstr="--gradient_type %s", ), gui=dict( argstr="--gui ", ), histogramMatch=dict( argstr="--histogramMatch ", ), initializeWithDisplacementField=dict( argstr="--initializeWithDisplacementField %s", extensions=None, ), initializeWithTransform=dict( argstr="--initializeWithTransform %s", extensions=None, ), inputPixelType=dict( argstr="--inputPixelType %s", ), interpolationMode=dict( argstr="--interpolationMode %s", ), lowerThresholdForBOBF=dict( argstr="--lowerThresholdForBOBF %d", ), makeBOBF=dict( argstr="--makeBOBF ", ), max_step_length=dict( argstr="--max_step_length %f", ), medianFilterSize=dict( argstr="--medianFilterSize %s", sep=",", ), minimumFixedPyramid=dict( argstr="--minimumFixedPyramid %s", sep=",", ), minimumMovingPyramid=dict( argstr="--minimumMovingPyramid %s", sep=",", ), movingBinaryVolume=dict( argstr="--movingBinaryVolume %s", extensions=None, ), movingVolume=dict( argstr="--movingVolume %s...", ), neighborhoodForBOBF=dict( argstr="--neighborhoodForBOBF %s", sep=",", ), numberOfBCHApproximationTerms=dict( argstr="--numberOfBCHApproximationTerms %d", ), numberOfHistogramBins=dict( argstr="--numberOfHistogramBins %d", ), numberOfMatchPoints=dict( argstr="--numberOfMatchPoints %d", ), numberOfPyramidLevels=dict( argstr="--numberOfPyramidLevels %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputCheckerboardVolume=dict( argstr="--outputCheckerboardVolume %s", hash_files=False, ), outputDebug=dict( argstr="--outputDebug ", ), outputDisplacementFieldPrefix=dict( argstr="--outputDisplacementFieldPrefix %s", ), outputDisplacementFieldVolume=dict( argstr="--outputDisplacementFieldVolume %s", hash_files=False, ), outputNormalized=dict( argstr="--outputNormalized ", ), outputPixelType=dict( argstr="--outputPixelType %s", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), promptUser=dict( argstr="--promptUser ", ), registrationFilterType=dict( argstr="--registrationFilterType %s", ), seedForBOBF=dict( argstr="--seedForBOBF %s", sep=",", ), smoothDisplacementFieldSigma=dict( argstr="--smoothDisplacementFieldSigma %f", ), upFieldSmoothing=dict( argstr="--upFieldSmoothing %f", ), upperThresholdForBOBF=dict( argstr="--upperThresholdForBOBF %d", ), use_vanilla_dem=dict( argstr="--use_vanilla_dem ", ), weightFactors=dict( argstr="--weightFactors %s", sep=",", ), ) inputs = VBRAINSDemonWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_VBRAINSDemonWarp_outputs(): output_map = dict( outputCheckerboardVolume=dict( extensions=None, ), outputDisplacementFieldVolume=dict( extensions=None, ), outputVolume=dict( extensions=None, ), ) outputs = VBRAINSDemonWarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/segmentation/000077500000000000000000000000001413403311400230065ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/segmentation/__init__.py000066400000000000000000000003731413403311400251220ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .specialized import ( BRAINSCut, BRAINSROIAuto, BRAINSConstellationDetector, BRAINSCreateLabelMapFromProbabilityMaps, BinaryMaskEditorBasedOnLandmarks, BRAINSMultiSTAPLE, BRAINSABC, ESLR, ) nipype-1.7.0/nipype/interfaces/semtools/segmentation/specialized.py000066400000000000000000001174361413403311400256700ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class BRAINSCutInputSpec(CommandLineInputSpec): netConfiguration = File( desc="XML File defining BRAINSCut parameters. OLD NAME. PLEASE USE modelConfigurationFilename instead.", exists=True, argstr="--netConfiguration %s", ) modelConfigurationFilename = File( desc="XML File defining BRAINSCut parameters", exists=True, argstr="--modelConfigurationFilename %s", ) trainModelStartIndex = traits.Int( desc="Starting iteration for training", argstr="--trainModelStartIndex %d" ) verbose = traits.Int( desc="print out some debugging information", argstr="--verbose %d" ) multiStructureThreshold = traits.Bool( desc="multiStructureThreshold module to deal with overlaping area", argstr="--multiStructureThreshold ", ) histogramEqualization = traits.Bool( desc="A Histogram Equalization process could be added to the creating/applying process from Subject To Atlas. Default is false, which genreate input vectors without Histogram Equalization. ", argstr="--histogramEqualization ", ) computeSSEOn = traits.Bool( desc="compute Sum of Square Error (SSE) along the trained model until the number of iteration given in the modelConfigurationFilename file", argstr="--computeSSEOn ", ) generateProbability = traits.Bool( desc="Generate probability map", argstr="--generateProbability " ) createVectors = traits.Bool( desc="create vectors for training neural net", argstr="--createVectors " ) trainModel = traits.Bool(desc="train the neural net", argstr="--trainModel ") NoTrainingVectorShuffling = traits.Bool( desc="If this flag is on, there will be no shuffling.", argstr="--NoTrainingVectorShuffling ", ) applyModel = traits.Bool(desc="apply the neural net", argstr="--applyModel ") validate = traits.Bool( desc="validate data set.Just need for the first time run ( This is for validation of xml file and not working yet )", argstr="--validate ", ) method = traits.Enum("RandomForest", "ANN", argstr="--method %s") numberOfTrees = traits.Int( desc=" Random tree: number of trees. This is to be used when only one model with specified depth wish to be created. ", argstr="--numberOfTrees %d", ) randomTreeDepth = traits.Int( desc=" Random tree depth. This is to be used when only one model with specified depth wish to be created. ", argstr="--randomTreeDepth %d", ) modelFilename = traits.Str( desc=" model file name given from user (not by xml configuration file) ", argstr="--modelFilename %s", ) class BRAINSCutOutputSpec(TraitedSpec): pass class BRAINSCut(SEMLikeCommandLine): """title: BRAINSCut (BRAINS) category: Segmentation.Specialized description: Automatic Segmentation using neural networks version: 1.0 license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Vince Magnotta, Hans Johnson, Greg Harris, Kent Williams, Eunyoung Regina Kim """ input_spec = BRAINSCutInputSpec output_spec = BRAINSCutOutputSpec _cmd = " BRAINSCut " _outputs_filenames = {} _redirect_x = False class BRAINSROIAutoInputSpec(CommandLineInputSpec): inputVolume = File( desc="The input image for finding the largest region filled mask.", exists=True, argstr="--inputVolume %s", ) outputROIMaskVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The ROI automatically found from the input image.", argstr="--outputROIMaskVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The inputVolume with optional [maskOutput|cropOutput] to the region of the brain mask.", argstr="--outputVolume %s", ) maskOutput = traits.Bool( desc="The inputVolume multiplied by the ROI mask.", argstr="--maskOutput " ) cropOutput = traits.Bool( desc="The inputVolume cropped to the region of the ROI mask.", argstr="--cropOutput ", ) otsuPercentileThreshold = traits.Float( desc="Parameter to the Otsu threshold algorithm.", argstr="--otsuPercentileThreshold %f", ) thresholdCorrectionFactor = traits.Float( desc="A factor to scale the Otsu algorithm's result threshold, in case clipping mangles the image.", argstr="--thresholdCorrectionFactor %f", ) closingSize = traits.Float( desc="The Closing Size (in millimeters) for largest connected filled mask. This value is divided by image spacing and rounded to the next largest voxel number.", argstr="--closingSize %f", ) ROIAutoDilateSize = traits.Float( desc="This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", argstr="--ROIAutoDilateSize %f", ) outputVolumePixelType = traits.Enum( "float", "short", "ushort", "int", "uint", "uchar", desc="The output image Pixel Type is the scalar datatype for representation of the Output Volume.", argstr="--outputVolumePixelType %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSROIAutoOutputSpec(TraitedSpec): outputROIMaskVolume = File( desc="The ROI automatically found from the input image.", exists=True ) outputVolume = File( desc="The inputVolume with optional [maskOutput|cropOutput] to the region of the brain mask.", exists=True, ) class BRAINSROIAuto(SEMLikeCommandLine): """title: Foreground masking (BRAINS) category: Segmentation.Specialized description: This program is used to create a mask over the most prominant forground region in an image. This is accomplished via a combination of otsu thresholding and a closing operation. More documentation is available here: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ForegroundMasking. version: 2.4.1 license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://www.psychiatry.uiowa.edu acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); Gregory Harris(1), Vincent Magnotta(1,2,3); Andriy Fedorov(5), fedorov -at- bwh.harvard.edu (Slicer integration); (1=University of Iowa Department of Psychiatry, 2=University of Iowa Department of Radiology, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering, 5=Surgical Planning Lab, Harvard) """ input_spec = BRAINSROIAutoInputSpec output_spec = BRAINSROIAutoOutputSpec _cmd = " BRAINSROIAuto " _outputs_filenames = { "outputVolume": "outputVolume.nii", "outputROIMaskVolume": "outputROIMaskVolume.nii", } _redirect_x = False class BRAINSConstellationDetectorInputSpec(CommandLineInputSpec): houghEyeDetectorMode = traits.Int( desc=", This flag controls the mode of Hough eye detector. By default, value of 1 is for T1W images, while the value of 0 is for T2W and PD images., ", argstr="--houghEyeDetectorMode %d", ) inputTemplateModel = File( desc="User-specified template model., ", exists=True, argstr="--inputTemplateModel %s", ) LLSModel = File( desc="Linear least squares model filename in HD5 format", exists=True, argstr="--LLSModel %s", ) inputVolume = File( desc="Input image in which to find ACPC points", exists=True, argstr="--inputVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="ACPC-aligned output image with the same voxels, but updated origin, and direction cosign so that the AC point would fall at the physical location (0.0,0.0,0.0), and the mid-sagital plane is the plane where physical L/R coordinate is 0.0.", argstr="--outputVolume %s", ) outputResampledVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="ACPC-aligned output image in a resampled unifor space. Currently this is a 1mm, 256^3, Identity direction image.", argstr="--outputResampledVolume %s", ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="The filename for the original space to ACPC alignment to be written (in .h5 format)., ", argstr="--outputTransform %s", ) outputLandmarksInInputSpace = traits.Either( traits.Bool, File(), hash_files=False, desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the original image space (the detected RP, AC, PC, and VN4) in it to be written., ", argstr="--outputLandmarksInInputSpace %s", ) outputLandmarksInACPCAlignedSpace = traits.Either( traits.Bool, File(), hash_files=False, desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the output image space (the detected RP, AC, PC, and VN4) in it to be written., ", argstr="--outputLandmarksInACPCAlignedSpace %s", ) outputMRML = traits.Either( traits.Bool, File(), hash_files=False, desc=", The filename for the new subject-specific scene definition file in the same format produced by Slicer3 (in .mrml format). Only the components that were specified by the user on command line would be generated. Compatible components include inputVolume, outputVolume, outputLandmarksInInputSpace, outputLandmarksInACPCAlignedSpace, and outputTransform., ", argstr="--outputMRML %s", ) outputVerificationScript = traits.Either( traits.Bool, File(), hash_files=False, desc=", The filename for the Slicer3 script that verifies the aligned landmarks against the aligned image file. This will happen only in conjunction with saveOutputLandmarks and an outputVolume., ", argstr="--outputVerificationScript %s", ) mspQualityLevel = traits.Int( desc=", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds), NOTE: -1= Prealigned so no estimate!., ", argstr="--mspQualityLevel %d", ) otsuPercentileThreshold = traits.Float( desc=", This is a parameter to FindLargestForegroundFilledMask, which is employed when acLowerBound is set and an outputUntransformedClippedVolume is requested., ", argstr="--otsuPercentileThreshold %f", ) acLowerBound = traits.Float( desc=", When generating a resampled output image, replace the image with the BackgroundFillValue everywhere below the plane This Far in physical units (millimeters) below (inferior to) the AC point (as found by the model.) The oversize default was chosen to have no effect. Based on visualizing a thousand masks in the IPIG study, we recommend a limit no smaller than 80.0 mm., ", argstr="--acLowerBound %f", ) cutOutHeadInOutputVolume = traits.Bool( desc=", Flag to cut out just the head tissue when producing an (un)transformed clipped volume., ", argstr="--cutOutHeadInOutputVolume ", ) outputUntransformedClippedVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output image in which to store neck-clipped input image, with the use of --acLowerBound and maybe --cutOutHeadInUntransformedVolume.", argstr="--outputUntransformedClippedVolume %s", ) rescaleIntensities = traits.Bool( desc=", Flag to turn on rescaling image intensities on input., ", argstr="--rescaleIntensities ", ) trimRescaledIntensities = traits.Float( desc=", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", argstr="--trimRescaledIntensities %f", ) rescaleIntensitiesOutputRange = InputMultiPath( traits.Int, desc=", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", sep=",", argstr="--rescaleIntensitiesOutputRange %s", ) BackgroundFillValue = traits.Str( desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", argstr="--BackgroundFillValue %s", ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", "ResampleInPlace", "BSpline", "WindowedSinc", "Hamming", "Cosine", "Welch", "Lanczos", "Blackman", desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", argstr="--interpolationMode %s", ) forceACPoint = InputMultiPath( traits.Float, desc=", Use this flag to manually specify the AC point from the original image on the command line., ", sep=",", argstr="--forceACPoint %s", ) forcePCPoint = InputMultiPath( traits.Float, desc=", Use this flag to manually specify the PC point from the original image on the command line., ", sep=",", argstr="--forcePCPoint %s", ) forceVN4Point = InputMultiPath( traits.Float, desc=", Use this flag to manually specify the VN4 point from the original image on the command line., ", sep=",", argstr="--forceVN4Point %s", ) forceRPPoint = InputMultiPath( traits.Float, desc=", Use this flag to manually specify the RP point from the original image on the command line., ", sep=",", argstr="--forceRPPoint %s", ) inputLandmarksEMSP = File( desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (in .fcsv) with the landmarks in the estimated MSP aligned space to be loaded. The detector will only process landmarks not enlisted on the file., ", exists=True, argstr="--inputLandmarksEMSP %s", ) forceHoughEyeDetectorReportFailure = traits.Bool( desc=", Flag indicates whether the Hough eye detector should report failure, ", argstr="--forceHoughEyeDetectorReportFailure ", ) rmpj = traits.Float( desc=", Search radius for MPJ in unit of mm, ", argstr="--rmpj %f", ) rac = traits.Float( desc=", Search radius for AC in unit of mm, ", argstr="--rac %f", ) rpc = traits.Float( desc=", Search radius for PC in unit of mm, ", argstr="--rpc %f", ) rVN4 = traits.Float( desc=", Search radius for VN4 in unit of mm, ", argstr="--rVN4 %f", ) debug = traits.Bool( desc=", Show internal debugging information., ", argstr="--debug ", ) verbose = traits.Bool( desc=", Show more verbose output, ", argstr="--verbose ", ) writeBranded2DImage = traits.Either( traits.Bool, File(), hash_files=False, desc=", The filename for the 2D .png branded midline debugging image. This will happen only in conjunction with requesting an outputVolume., ", argstr="--writeBranded2DImage %s", ) resultsDir = traits.Either( traits.Bool, Directory(), hash_files=False, desc=", The directory for the debuging images to be written., ", argstr="--resultsDir %s", ) writedebuggingImagesLevel = traits.Int( desc=", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", argstr="--writedebuggingImagesLevel %d", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) atlasVolume = File( desc="Atlas volume image to be used for BRAINSFit registration", exists=True, argstr="--atlasVolume %s", ) atlasLandmarks = File( desc="Atlas landmarks to be used for BRAINSFit registration initialization, ", exists=True, argstr="--atlasLandmarks %s", ) atlasLandmarkWeights = File( desc="Weights associated with atlas landmarks to be used for BRAINSFit registration initialization, ", exists=True, argstr="--atlasLandmarkWeights %s", ) class BRAINSConstellationDetectorOutputSpec(TraitedSpec): outputVolume = File( desc="ACPC-aligned output image with the same voxels, but updated origin, and direction cosign so that the AC point would fall at the physical location (0.0,0.0,0.0), and the mid-sagital plane is the plane where physical L/R coordinate is 0.0.", exists=True, ) outputResampledVolume = File( desc="ACPC-aligned output image in a resampled unifor space. Currently this is a 1mm, 256^3, Identity direction image.", exists=True, ) outputTransform = File( desc="The filename for the original space to ACPC alignment to be written (in .h5 format)., ", exists=True, ) outputLandmarksInInputSpace = File( desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the original image space (the detected RP, AC, PC, and VN4) in it to be written., ", exists=True, ) outputLandmarksInACPCAlignedSpace = File( desc=", The filename for the new subject-specific landmark definition file in the same format produced by Slicer3 (.fcsv) with the landmarks in the output image space (the detected RP, AC, PC, and VN4) in it to be written., ", exists=True, ) outputMRML = File( desc=", The filename for the new subject-specific scene definition file in the same format produced by Slicer3 (in .mrml format). Only the components that were specified by the user on command line would be generated. Compatible components include inputVolume, outputVolume, outputLandmarksInInputSpace, outputLandmarksInACPCAlignedSpace, and outputTransform., ", exists=True, ) outputVerificationScript = File( desc=", The filename for the Slicer3 script that verifies the aligned landmarks against the aligned image file. This will happen only in conjunction with saveOutputLandmarks and an outputVolume., ", exists=True, ) outputUntransformedClippedVolume = File( desc="Output image in which to store neck-clipped input image, with the use of --acLowerBound and maybe --cutOutHeadInUntransformedVolume.", exists=True, ) writeBranded2DImage = File( desc=", The filename for the 2D .png branded midline debugging image. This will happen only in conjunction with requesting an outputVolume., ", exists=True, ) resultsDir = Directory( desc=", The directory for the debuging images to be written., ", exists=True, ) class BRAINSConstellationDetector(SEMLikeCommandLine): """title: Brain Landmark Constellation Detector (BRAINS) category: Segmentation.Specialized description: This program will find the mid-sagittal plane, a constellation of landmarks in a volume, and create an AC/PC aligned data set with the AC point at the center of the voxel lattice (labeled at the origin of the image physical space.) Part of this work is an extention of the algorithms originally described by Dr. Babak A. Ardekani, Alvin H. Bachman, Model-based automatic detection of the anterior and posterior commissures on MRI scans, NeuroImage, Volume 46, Issue 3, 1 July 2009, Pages 677-682, ISSN 1053-8119, DOI: 10.1016/j.neuroimage.2009.02.030. (http://www.sciencedirect.com/science/article/B6WNP-4VRP25C-4/2/8207b962a38aa83c822c6379bc43fe4c) version: 1.0 documentation-url: http://www.nitrc.org/projects/brainscdetector/ """ input_spec = BRAINSConstellationDetectorInputSpec output_spec = BRAINSConstellationDetectorOutputSpec _cmd = " BRAINSConstellationDetector " _outputs_filenames = { "outputVolume": "outputVolume.nii.gz", "outputMRML": "outputMRML.mrml", "resultsDir": "resultsDir", "outputResampledVolume": "outputResampledVolume.nii.gz", "outputTransform": "outputTransform.h5", "writeBranded2DImage": "writeBranded2DImage.png", "outputLandmarksInACPCAlignedSpace": "outputLandmarksInACPCAlignedSpace.fcsv", "outputLandmarksInInputSpace": "outputLandmarksInInputSpace.fcsv", "outputUntransformedClippedVolume": "outputUntransformedClippedVolume.nii.gz", "outputVerificationScript": "outputVerificationScript.sh", } _redirect_x = False class BRAINSCreateLabelMapFromProbabilityMapsInputSpec(CommandLineInputSpec): inputProbabilityVolume = InputMultiPath( File(exists=True), desc="The list of proobabilityimages.", argstr="--inputProbabilityVolume %s...", ) priorLabelCodes = InputMultiPath( traits.Int, desc="A list of PriorLabelCode values used for coding the output label images", sep=",", argstr="--priorLabelCodes %s", ) foregroundPriors = InputMultiPath( traits.Int, desc="A list: For each Prior Label, 1 if foreground, 0 if background", sep=",", argstr="--foregroundPriors %s", ) nonAirRegionMask = File( desc="a mask representing the 'NonAirRegion' -- Just force pixels in this region to zero", exists=True, argstr="--nonAirRegionMask %s", ) inclusionThreshold = traits.Float( desc="tolerance for inclusion", argstr="--inclusionThreshold %f" ) dirtyLabelVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="the labels prior to cleaning", argstr="--dirtyLabelVolume %s", ) cleanLabelVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="the foreground labels volume", argstr="--cleanLabelVolume %s", ) class BRAINSCreateLabelMapFromProbabilityMapsOutputSpec(TraitedSpec): dirtyLabelVolume = File(desc="the labels prior to cleaning", exists=True) cleanLabelVolume = File(desc="the foreground labels volume", exists=True) class BRAINSCreateLabelMapFromProbabilityMaps(SEMLikeCommandLine): """title: Create Label Map From Probability Maps (BRAINS) category: Segmentation.Specialized description: Given A list of Probability Maps, generate a LabelMap. """ input_spec = BRAINSCreateLabelMapFromProbabilityMapsInputSpec output_spec = BRAINSCreateLabelMapFromProbabilityMapsOutputSpec _cmd = " BRAINSCreateLabelMapFromProbabilityMaps " _outputs_filenames = { "dirtyLabelVolume": "dirtyLabelVolume.nii", "cleanLabelVolume": "cleanLabelVolume.nii", } _redirect_x = False class BinaryMaskEditorBasedOnLandmarksInputSpec(CommandLineInputSpec): inputBinaryVolume = File( desc="Input binary image in which to be edited", exists=True, argstr="--inputBinaryVolume %s", ) outputBinaryVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output binary image in which to be edited", argstr="--outputBinaryVolume %s", ) inputLandmarksFilename = File( desc=" The filename for the landmark definition file in the same format produced by Slicer3 (.fcsv). ", exists=True, argstr="--inputLandmarksFilename %s", ) inputLandmarkNames = InputMultiPath( traits.Str, desc=" A target input landmark name to be edited. This should be listed in the inputLandmakrFilename Given. ", sep=",", argstr="--inputLandmarkNames %s", ) setCutDirectionForLandmark = InputMultiPath( traits.Str, desc="Setting the cutting out direction of the input binary image to the one of anterior, posterior, left, right, superior or posterior. (ENUMERATION: ANTERIOR, POSTERIOR, LEFT, RIGHT, SUPERIOR, POSTERIOR) ", sep=",", argstr="--setCutDirectionForLandmark %s", ) setCutDirectionForObliquePlane = InputMultiPath( traits.Str, desc="If this is true, the mask will be thresholded out to the direction of inferior, posterior, and/or left. Default behavrior is that cutting out to the direction of superior, anterior and/or right. ", sep=",", argstr="--setCutDirectionForObliquePlane %s", ) inputLandmarkNamesForObliquePlane = InputMultiPath( traits.Str, desc=" Three subset landmark names of inputLandmarksFilename for a oblique plane computation. The plane computed for binary volume editing. ", sep=",", argstr="--inputLandmarkNamesForObliquePlane %s", ) class BinaryMaskEditorBasedOnLandmarksOutputSpec(TraitedSpec): outputBinaryVolume = File( desc="Output binary image in which to be edited", exists=True ) class BinaryMaskEditorBasedOnLandmarks(SEMLikeCommandLine): """title: BRAINS Binary Mask Editor Based On Landmarks(BRAINS) category: Segmentation.Specialized version: 1.0 documentation-url: http://www.nitrc.org/projects/brainscdetector/ """ input_spec = BinaryMaskEditorBasedOnLandmarksInputSpec output_spec = BinaryMaskEditorBasedOnLandmarksOutputSpec _cmd = " BinaryMaskEditorBasedOnLandmarks " _outputs_filenames = {"outputBinaryVolume": "outputBinaryVolume.nii"} _redirect_x = False class BRAINSMultiSTAPLEInputSpec(CommandLineInputSpec): inputCompositeT1Volume = File( desc="Composite T1, all label maps transofrmed into the space for this image.", exists=True, argstr="--inputCompositeT1Volume %s", ) inputLabelVolume = InputMultiPath( File(exists=True), desc="The list of proobabilityimages.", argstr="--inputLabelVolume %s...", ) inputTransform = InputMultiPath( File(exists=True), desc="transforms to apply to label volumes", argstr="--inputTransform %s...", ) labelForUndecidedPixels = traits.Int( desc="Label for undecided pixels", argstr="--labelForUndecidedPixels %d" ) resampledVolumePrefix = traits.Str( desc="if given, write out resampled volumes with this prefix", argstr="--resampledVolumePrefix %s", ) skipResampling = traits.Bool( desc="Omit resampling images into reference space", argstr="--skipResampling " ) outputMultiSTAPLE = traits.Either( traits.Bool, File(), hash_files=False, desc="the MultiSTAPLE average of input label volumes", argstr="--outputMultiSTAPLE %s", ) outputConfusionMatrix = traits.Either( traits.Bool, File(), hash_files=False, desc="Confusion Matrix", argstr="--outputConfusionMatrix %s", ) class BRAINSMultiSTAPLEOutputSpec(TraitedSpec): outputMultiSTAPLE = File( desc="the MultiSTAPLE average of input label volumes", exists=True ) outputConfusionMatrix = File(desc="Confusion Matrix", exists=True) class BRAINSMultiSTAPLE(SEMLikeCommandLine): """title: Create best representative label map) category: Segmentation.Specialized description: given a list of label map images, create a representative/average label map. """ input_spec = BRAINSMultiSTAPLEInputSpec output_spec = BRAINSMultiSTAPLEOutputSpec _cmd = " BRAINSMultiSTAPLE " _outputs_filenames = { "outputMultiSTAPLE": "outputMultiSTAPLE.nii", "outputConfusionMatrix": "outputConfusionMatrixh5|mat|txt", } _redirect_x = False class BRAINSABCInputSpec(CommandLineInputSpec): inputVolumes = InputMultiPath( File(exists=True), desc="The list of input image files to be segmented.", argstr="--inputVolumes %s...", ) atlasDefinition = File( desc="Contains all parameters for Atlas", exists=True, argstr="--atlasDefinition %s", ) restoreState = File( desc="The initial state for the registration process", exists=True, argstr="--restoreState %s", ) saveState = traits.Either( traits.Bool, File(), hash_files=False, desc="(optional) Filename to which save the final state of the registration", argstr="--saveState %s", ) inputVolumeTypes = InputMultiPath( traits.Str, desc="The list of input image types corresponding to the inputVolumes.", sep=",", argstr="--inputVolumeTypes %s", ) outputDir = traits.Either( traits.Bool, Directory(), hash_files=False, desc="Ouput directory", argstr="--outputDir %s", ) atlasToSubjectTransformType = traits.Enum( "Identity", "Rigid", "Affine", "BSpline", "SyN", desc=" What type of linear transform type do you want to use to register the atlas to the reference subject image.", argstr="--atlasToSubjectTransformType %s", ) atlasToSubjectTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="The transform from atlas to the subject", argstr="--atlasToSubjectTransform %s", ) atlasToSubjectInitialTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="The initial transform from atlas to the subject", argstr="--atlasToSubjectInitialTransform %s", ) subjectIntermodeTransformType = traits.Enum( "Identity", "Rigid", "Affine", "BSpline", desc=" What type of linear transform type do you want to use to register the atlas to the reference subject image.", argstr="--subjectIntermodeTransformType %s", ) outputVolumes = traits.Either( traits.Bool, InputMultiPath(File()), hash_files=False, desc="Corrected Output Images: should specify the same number of images as inputVolume, if only one element is given, then it is used as a file pattern where %s is replaced by the imageVolumeType, and %d by the index list location.", argstr="--outputVolumes %s...", ) outputLabels = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Label Image", argstr="--outputLabels %s", ) outputDirtyLabels = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Dirty Label Image", argstr="--outputDirtyLabels %s", ) posteriorTemplate = traits.Str( desc="filename template for Posterior output files", argstr="--posteriorTemplate %s", ) outputFormat = traits.Enum( "NIFTI", "Meta", "Nrrd", desc="Output format", argstr="--outputFormat %s" ) interpolationMode = traits.Enum( "BSpline", "NearestNeighbor", "WindowedSinc", "Linear", "ResampleInPlace", "Hamming", "Cosine", "Welch", "Lanczos", "Blackman", desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", argstr="--interpolationMode %s", ) maxIterations = traits.Int(desc="Filter iterations", argstr="--maxIterations %d") medianFilterSize = InputMultiPath( traits.Int, desc="The radius for the optional MedianImageFilter preprocessing in all 3 directions.", sep=",", argstr="--medianFilterSize %s", ) filterIteration = traits.Int( desc="Filter iterations", argstr="--filterIteration %d" ) filterTimeStep = traits.Float( desc="Filter time step should be less than (PixelSpacing/(1^(DIM+1)), value is set to negative, then allow automatic setting of this value. ", argstr="--filterTimeStep %f", ) filterMethod = traits.Enum( "None", "CurvatureFlow", "GradientAnisotropicDiffusion", "Median", desc="Filter method for preprocessing of registration", argstr="--filterMethod %s", ) maxBiasDegree = traits.Int(desc="Maximum bias degree", argstr="--maxBiasDegree %d") useKNN = traits.Bool( desc="Use the KNN stage of estimating posteriors.", argstr="--useKNN " ) purePlugsThreshold = traits.Float( desc="If this threshold value is greater than zero, only pure samples are used to compute the distributions in EM classification, and only pure samples are used for KNN training. The default value is set to 0, that means not using pure plugs. However, a value of 0.2 is suggested if you want to activate using pure plugs option.", argstr="--purePlugsThreshold %f", ) numberOfSubSamplesInEachPlugArea = InputMultiPath( traits.Int, desc="Number of continous index samples taken at each direction of lattice space for each plug volume.", sep=",", argstr="--numberOfSubSamplesInEachPlugArea %s", ) atlasWarpingOff = traits.Bool( desc="Deformable registration of atlas to subject", argstr="--atlasWarpingOff " ) gridSize = InputMultiPath( traits.Int, desc="Grid size for atlas warping with BSplines", sep=",", argstr="--gridSize %s", ) defaultSuffix = traits.Str(argstr="--defaultSuffix %s") implicitOutputs = traits.Either( traits.Bool, InputMultiPath(File()), hash_files=False, desc="Outputs to be made available to NiPype. Needed because not all BRAINSABC outputs have command line arguments.", argstr="--implicitOutputs %s...", ) debuglevel = traits.Int( desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", argstr="--debuglevel %d", ) writeLess = traits.Bool( desc="Does not write posteriors and filtered, bias corrected images", argstr="--writeLess ", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSABCOutputSpec(TraitedSpec): saveState = File( desc="(optional) Filename to which save the final state of the registration", exists=True, ) outputDir = Directory(desc="Ouput directory", exists=True) atlasToSubjectTransform = File( desc="The transform from atlas to the subject", exists=True ) atlasToSubjectInitialTransform = File( desc="The initial transform from atlas to the subject", exists=True ) outputVolumes = OutputMultiPath( File(exists=True), desc="Corrected Output Images: should specify the same number of images as inputVolume, if only one element is given, then it is used as a file pattern where %s is replaced by the imageVolumeType, and %d by the index list location.", ) outputLabels = File(desc="Output Label Image", exists=True) outputDirtyLabels = File(desc="Output Dirty Label Image", exists=True) implicitOutputs = OutputMultiPath( File(exists=True), desc="Outputs to be made available to NiPype. Needed because not all BRAINSABC outputs have command line arguments.", ) class BRAINSABC(SEMLikeCommandLine): """title: Intra-subject registration, bias Correction, and tissue classification (BRAINS) category: Segmentation.Specialized description: Atlas-based tissue segmentation method. This is an algorithmic extension of work done by XXXX at UNC and Utah XXXX need more description here. """ input_spec = BRAINSABCInputSpec output_spec = BRAINSABCOutputSpec _cmd = " BRAINSABC " _outputs_filenames = { "saveState": "saveState.h5", "outputLabels": "outputLabels.nii.gz", "atlasToSubjectTransform": "atlasToSubjectTransform.h5", "atlasToSubjectInitialTransform": "atlasToSubjectInitialTransform.h5", "outputDirtyLabels": "outputDirtyLabels.nii.gz", "outputVolumes": "outputVolumes.nii.gz", "outputDir": "outputDir", "implicitOutputs": "implicitOutputs.nii.gz", } _redirect_x = False class ESLRInputSpec(CommandLineInputSpec): inputVolume = File( desc="Input Label Volume", exists=True, argstr="--inputVolume %s" ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Label Volume", argstr="--outputVolume %s", ) low = traits.Int( desc="The lower bound of the labels to be used.", argstr="--low %d" ) high = traits.Int( desc="The higher bound of the labels to be used.", argstr="--high %d" ) closingSize = traits.Int( desc="The closing size for hole filling.", argstr="--closingSize %d" ) openingSize = traits.Int( desc="The opening size for hole filling.", argstr="--openingSize %d" ) safetySize = traits.Int( desc="The safetySize size for the clipping region.", argstr="--safetySize %d" ) preserveOutside = traits.Bool( desc="For values outside the specified range, preserve those values.", argstr="--preserveOutside ", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class ESLROutputSpec(TraitedSpec): outputVolume = File(desc="Output Label Volume", exists=True) class ESLR(SEMLikeCommandLine): """title: Clean Contiguous Label Map (BRAINS) category: Segmentation.Specialized description: From a range of label map values, extract the largest contiguous region of those labels """ input_spec = ESLRInputSpec output_spec = ESLROutputSpec _cmd = " ESLR " _outputs_filenames = {"outputVolume": "outputVolume.nii.gz"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/segmentation/tests/000077500000000000000000000000001413403311400241505ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/segmentation/tests/__init__.py000066400000000000000000000000301413403311400262520ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSABC.py000066400000000000000000000103471413403311400301020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import BRAINSABC def test_BRAINSABC_inputs(): input_map = dict( args=dict( argstr="%s", ), atlasDefinition=dict( argstr="--atlasDefinition %s", extensions=None, ), atlasToSubjectInitialTransform=dict( argstr="--atlasToSubjectInitialTransform %s", hash_files=False, ), atlasToSubjectTransform=dict( argstr="--atlasToSubjectTransform %s", hash_files=False, ), atlasToSubjectTransformType=dict( argstr="--atlasToSubjectTransformType %s", ), atlasWarpingOff=dict( argstr="--atlasWarpingOff ", ), debuglevel=dict( argstr="--debuglevel %d", ), defaultSuffix=dict( argstr="--defaultSuffix %s", ), environ=dict( nohash=True, usedefault=True, ), filterIteration=dict( argstr="--filterIteration %d", ), filterMethod=dict( argstr="--filterMethod %s", ), filterTimeStep=dict( argstr="--filterTimeStep %f", ), gridSize=dict( argstr="--gridSize %s", sep=",", ), implicitOutputs=dict( argstr="--implicitOutputs %s...", hash_files=False, ), inputVolumeTypes=dict( argstr="--inputVolumeTypes %s", sep=",", ), inputVolumes=dict( argstr="--inputVolumes %s...", ), interpolationMode=dict( argstr="--interpolationMode %s", ), maxBiasDegree=dict( argstr="--maxBiasDegree %d", ), maxIterations=dict( argstr="--maxIterations %d", ), medianFilterSize=dict( argstr="--medianFilterSize %s", sep=",", ), numberOfSubSamplesInEachPlugArea=dict( argstr="--numberOfSubSamplesInEachPlugArea %s", sep=",", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputDir=dict( argstr="--outputDir %s", hash_files=False, ), outputDirtyLabels=dict( argstr="--outputDirtyLabels %s", hash_files=False, ), outputFormat=dict( argstr="--outputFormat %s", ), outputLabels=dict( argstr="--outputLabels %s", hash_files=False, ), outputVolumes=dict( argstr="--outputVolumes %s...", hash_files=False, ), posteriorTemplate=dict( argstr="--posteriorTemplate %s", ), purePlugsThreshold=dict( argstr="--purePlugsThreshold %f", ), restoreState=dict( argstr="--restoreState %s", extensions=None, ), saveState=dict( argstr="--saveState %s", hash_files=False, ), subjectIntermodeTransformType=dict( argstr="--subjectIntermodeTransformType %s", ), useKNN=dict( argstr="--useKNN ", ), writeLess=dict( argstr="--writeLess ", ), ) inputs = BRAINSABC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSABC_outputs(): output_map = dict( atlasToSubjectInitialTransform=dict( extensions=None, ), atlasToSubjectTransform=dict( extensions=None, ), implicitOutputs=dict(), outputDir=dict(), outputDirtyLabels=dict( extensions=None, ), outputLabels=dict( extensions=None, ), outputVolumes=dict(), saveState=dict( extensions=None, ), ) outputs = BRAINSABC.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSConstellationDetector.py000066400000000000000000000130071413403311400340210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import BRAINSConstellationDetector def test_BRAINSConstellationDetector_inputs(): input_map = dict( BackgroundFillValue=dict( argstr="--BackgroundFillValue %s", ), LLSModel=dict( argstr="--LLSModel %s", extensions=None, ), acLowerBound=dict( argstr="--acLowerBound %f", ), args=dict( argstr="%s", ), atlasLandmarkWeights=dict( argstr="--atlasLandmarkWeights %s", extensions=None, ), atlasLandmarks=dict( argstr="--atlasLandmarks %s", extensions=None, ), atlasVolume=dict( argstr="--atlasVolume %s", extensions=None, ), cutOutHeadInOutputVolume=dict( argstr="--cutOutHeadInOutputVolume ", ), debug=dict( argstr="--debug ", ), environ=dict( nohash=True, usedefault=True, ), forceACPoint=dict( argstr="--forceACPoint %s", sep=",", ), forceHoughEyeDetectorReportFailure=dict( argstr="--forceHoughEyeDetectorReportFailure ", ), forcePCPoint=dict( argstr="--forcePCPoint %s", sep=",", ), forceRPPoint=dict( argstr="--forceRPPoint %s", sep=",", ), forceVN4Point=dict( argstr="--forceVN4Point %s", sep=",", ), houghEyeDetectorMode=dict( argstr="--houghEyeDetectorMode %d", ), inputLandmarksEMSP=dict( argstr="--inputLandmarksEMSP %s", extensions=None, ), inputTemplateModel=dict( argstr="--inputTemplateModel %s", extensions=None, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), interpolationMode=dict( argstr="--interpolationMode %s", ), mspQualityLevel=dict( argstr="--mspQualityLevel %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), otsuPercentileThreshold=dict( argstr="--otsuPercentileThreshold %f", ), outputLandmarksInACPCAlignedSpace=dict( argstr="--outputLandmarksInACPCAlignedSpace %s", hash_files=False, ), outputLandmarksInInputSpace=dict( argstr="--outputLandmarksInInputSpace %s", hash_files=False, ), outputMRML=dict( argstr="--outputMRML %s", hash_files=False, ), outputResampledVolume=dict( argstr="--outputResampledVolume %s", hash_files=False, ), outputTransform=dict( argstr="--outputTransform %s", hash_files=False, ), outputUntransformedClippedVolume=dict( argstr="--outputUntransformedClippedVolume %s", hash_files=False, ), outputVerificationScript=dict( argstr="--outputVerificationScript %s", hash_files=False, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), rVN4=dict( argstr="--rVN4 %f", ), rac=dict( argstr="--rac %f", ), rescaleIntensities=dict( argstr="--rescaleIntensities ", ), rescaleIntensitiesOutputRange=dict( argstr="--rescaleIntensitiesOutputRange %s", sep=",", ), resultsDir=dict( argstr="--resultsDir %s", hash_files=False, ), rmpj=dict( argstr="--rmpj %f", ), rpc=dict( argstr="--rpc %f", ), trimRescaledIntensities=dict( argstr="--trimRescaledIntensities %f", ), verbose=dict( argstr="--verbose ", ), writeBranded2DImage=dict( argstr="--writeBranded2DImage %s", hash_files=False, ), writedebuggingImagesLevel=dict( argstr="--writedebuggingImagesLevel %d", ), ) inputs = BRAINSConstellationDetector.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSConstellationDetector_outputs(): output_map = dict( outputLandmarksInACPCAlignedSpace=dict( extensions=None, ), outputLandmarksInInputSpace=dict( extensions=None, ), outputMRML=dict( extensions=None, ), outputResampledVolume=dict( extensions=None, ), outputTransform=dict( extensions=None, ), outputUntransformedClippedVolume=dict( extensions=None, ), outputVerificationScript=dict( extensions=None, ), outputVolume=dict( extensions=None, ), resultsDir=dict(), writeBranded2DImage=dict( extensions=None, ), ) outputs = BRAINSConstellationDetector.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value test_auto_BRAINSCreateLabelMapFromProbabilityMaps.py000066400000000000000000000034261413403311400361050ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/segmentation/tests# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import BRAINSCreateLabelMapFromProbabilityMaps def test_BRAINSCreateLabelMapFromProbabilityMaps_inputs(): input_map = dict( args=dict( argstr="%s", ), cleanLabelVolume=dict( argstr="--cleanLabelVolume %s", hash_files=False, ), dirtyLabelVolume=dict( argstr="--dirtyLabelVolume %s", hash_files=False, ), environ=dict( nohash=True, usedefault=True, ), foregroundPriors=dict( argstr="--foregroundPriors %s", sep=",", ), inclusionThreshold=dict( argstr="--inclusionThreshold %f", ), inputProbabilityVolume=dict( argstr="--inputProbabilityVolume %s...", ), nonAirRegionMask=dict( argstr="--nonAirRegionMask %s", extensions=None, ), priorLabelCodes=dict( argstr="--priorLabelCodes %s", sep=",", ), ) inputs = BRAINSCreateLabelMapFromProbabilityMaps.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSCreateLabelMapFromProbabilityMaps_outputs(): output_map = dict( cleanLabelVolume=dict( extensions=None, ), dirtyLabelVolume=dict( extensions=None, ), ) outputs = BRAINSCreateLabelMapFromProbabilityMaps.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSCut.py000066400000000000000000000042771413403311400302550ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import BRAINSCut def test_BRAINSCut_inputs(): input_map = dict( NoTrainingVectorShuffling=dict( argstr="--NoTrainingVectorShuffling ", ), applyModel=dict( argstr="--applyModel ", ), args=dict( argstr="%s", ), computeSSEOn=dict( argstr="--computeSSEOn ", ), createVectors=dict( argstr="--createVectors ", ), environ=dict( nohash=True, usedefault=True, ), generateProbability=dict( argstr="--generateProbability ", ), histogramEqualization=dict( argstr="--histogramEqualization ", ), method=dict( argstr="--method %s", ), modelConfigurationFilename=dict( argstr="--modelConfigurationFilename %s", extensions=None, ), modelFilename=dict( argstr="--modelFilename %s", ), multiStructureThreshold=dict( argstr="--multiStructureThreshold ", ), netConfiguration=dict( argstr="--netConfiguration %s", extensions=None, ), numberOfTrees=dict( argstr="--numberOfTrees %d", ), randomTreeDepth=dict( argstr="--randomTreeDepth %d", ), trainModel=dict( argstr="--trainModel ", ), trainModelStartIndex=dict( argstr="--trainModelStartIndex %d", ), validate=dict( argstr="--validate ", ), verbose=dict( argstr="--verbose %d", ), ) inputs = BRAINSCut.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSCut_outputs(): output_map = dict() outputs = BRAINSCut.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSMultiSTAPLE.py000066400000000000000000000033651413403311400315220ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import BRAINSMultiSTAPLE def test_BRAINSMultiSTAPLE_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputCompositeT1Volume=dict( argstr="--inputCompositeT1Volume %s", extensions=None, ), inputLabelVolume=dict( argstr="--inputLabelVolume %s...", ), inputTransform=dict( argstr="--inputTransform %s...", ), labelForUndecidedPixels=dict( argstr="--labelForUndecidedPixels %d", ), outputConfusionMatrix=dict( argstr="--outputConfusionMatrix %s", hash_files=False, ), outputMultiSTAPLE=dict( argstr="--outputMultiSTAPLE %s", hash_files=False, ), resampledVolumePrefix=dict( argstr="--resampledVolumePrefix %s", ), skipResampling=dict( argstr="--skipResampling ", ), ) inputs = BRAINSMultiSTAPLE.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSMultiSTAPLE_outputs(): output_map = dict( outputConfusionMatrix=dict( extensions=None, ), outputMultiSTAPLE=dict( extensions=None, ), ) outputs = BRAINSMultiSTAPLE.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/segmentation/tests/test_auto_BRAINSROIAuto.py000066400000000000000000000036501413403311400307760ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import BRAINSROIAuto def test_BRAINSROIAuto_inputs(): input_map = dict( ROIAutoDilateSize=dict( argstr="--ROIAutoDilateSize %f", ), args=dict( argstr="%s", ), closingSize=dict( argstr="--closingSize %f", ), cropOutput=dict( argstr="--cropOutput ", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), maskOutput=dict( argstr="--maskOutput ", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), otsuPercentileThreshold=dict( argstr="--otsuPercentileThreshold %f", ), outputROIMaskVolume=dict( argstr="--outputROIMaskVolume %s", hash_files=False, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), outputVolumePixelType=dict( argstr="--outputVolumePixelType %s", ), thresholdCorrectionFactor=dict( argstr="--thresholdCorrectionFactor %f", ), ) inputs = BRAINSROIAuto.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSROIAuto_outputs(): output_map = dict( outputROIMaskVolume=dict( extensions=None, ), outputVolume=dict( extensions=None, ), ) outputs = BRAINSROIAuto.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value test_auto_BinaryMaskEditorBasedOnLandmarks.py000066400000000000000000000034561413403311400350420ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/segmentation/tests# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import BinaryMaskEditorBasedOnLandmarks def test_BinaryMaskEditorBasedOnLandmarks_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputBinaryVolume=dict( argstr="--inputBinaryVolume %s", extensions=None, ), inputLandmarkNames=dict( argstr="--inputLandmarkNames %s", sep=",", ), inputLandmarkNamesForObliquePlane=dict( argstr="--inputLandmarkNamesForObliquePlane %s", sep=",", ), inputLandmarksFilename=dict( argstr="--inputLandmarksFilename %s", extensions=None, ), outputBinaryVolume=dict( argstr="--outputBinaryVolume %s", hash_files=False, ), setCutDirectionForLandmark=dict( argstr="--setCutDirectionForLandmark %s", sep=",", ), setCutDirectionForObliquePlane=dict( argstr="--setCutDirectionForObliquePlane %s", sep=",", ), ) inputs = BinaryMaskEditorBasedOnLandmarks.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BinaryMaskEditorBasedOnLandmarks_outputs(): output_map = dict( outputBinaryVolume=dict( extensions=None, ), ) outputs = BinaryMaskEditorBasedOnLandmarks.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/segmentation/tests/test_auto_ESLR.py000066400000000000000000000030051413403311400273540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import ESLR def test_ESLR_inputs(): input_map = dict( args=dict( argstr="%s", ), closingSize=dict( argstr="--closingSize %d", ), environ=dict( nohash=True, usedefault=True, ), high=dict( argstr="--high %d", ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), low=dict( argstr="--low %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), openingSize=dict( argstr="--openingSize %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), preserveOutside=dict( argstr="--preserveOutside ", ), safetySize=dict( argstr="--safetySize %d", ), ) inputs = ESLR.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ESLR_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = ESLR.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/testing/000077500000000000000000000000001413403311400217665ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/testing/__init__.py000066400000000000000000000002761413403311400241040ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .featuredetection import SphericalCoordinateGeneration from .landmarkscompare import LandmarksCompare from .generateaveragelmkfile import GenerateAverageLmkFile nipype-1.7.0/nipype/interfaces/semtools/testing/featuredetection.py000066400000000000000000000024211413403311400256710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class SphericalCoordinateGenerationInputSpec(CommandLineInputSpec): inputAtlasImage = File( desc="Input atlas image", exists=True, argstr="--inputAtlasImage %s" ) outputPath = traits.Str( desc="Output path for rho, phi and theta images", argstr="--outputPath %s" ) class SphericalCoordinateGenerationOutputSpec(TraitedSpec): pass class SphericalCoordinateGeneration(SEMLikeCommandLine): """title: Spherical Coordinate Generation category: Testing.FeatureDetection description: get the atlas image as input and generates the rho, phi and theta images. version: 0.1.0.$Revision: 1 $(alpha) contributor: Ali Ghayoor """ input_spec = SphericalCoordinateGenerationInputSpec output_spec = SphericalCoordinateGenerationOutputSpec _cmd = " SphericalCoordinateGeneration " _outputs_filenames = {} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/testing/generateaveragelmkfile.py000066400000000000000000000032301413403311400270270ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class GenerateAverageLmkFileInputSpec(CommandLineInputSpec): inputLandmarkFiles = InputMultiPath( traits.Str, desc="Input landmark files names (.fcsv or .wts)", sep=",", argstr="--inputLandmarkFiles %s", ) outputLandmarkFile = traits.Either( traits.Bool, File(), hash_files=False, desc="Ouput landmark file name that includes average values for landmarks (.fcsv or .wts)", argstr="--outputLandmarkFile %s", ) class GenerateAverageLmkFileOutputSpec(TraitedSpec): outputLandmarkFile = File( desc="Ouput landmark file name that includes average values for landmarks (.fcsv or .wts)", exists=True, ) class GenerateAverageLmkFile(SEMLikeCommandLine): """title: Average Fiducials category: Testing description: This program gets several fcsv file each one contains several landmarks with the same name but slightly different coordinates. For EACH landmark we compute the average coordination. contributor: Ali Ghayoor """ input_spec = GenerateAverageLmkFileInputSpec output_spec = GenerateAverageLmkFileOutputSpec _cmd = " GenerateAverageLmkFile " _outputs_filenames = {"outputLandmarkFile": "outputLandmarkFile"} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/testing/landmarkscompare.py000066400000000000000000000026161413403311400256700ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class LandmarksCompareInputSpec(CommandLineInputSpec): inputLandmarkFile1 = File( desc="First input landmark file (.fcsv or .wts)", exists=True, argstr="--inputLandmarkFile1 %s", ) inputLandmarkFile2 = File( desc="Second input landmark file (.fcsv or .wts)", exists=True, argstr="--inputLandmarkFile2 %s", ) tolerance = traits.Float( desc="The maximum error (in mm) allowed in each direction of a landmark", argstr="--tolerance %f", ) class LandmarksCompareOutputSpec(TraitedSpec): pass class LandmarksCompare(SEMLikeCommandLine): """title: Compare Fiducials category: Testing description: Compares two .fcsv or .wts text files and verifies that they are identicle. Used for testing landmarks files. contributor: Ali Ghayoor """ input_spec = LandmarksCompareInputSpec output_spec = LandmarksCompareOutputSpec _cmd = " LandmarksCompare " _outputs_filenames = {} _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/tests/000077500000000000000000000000001413403311400214535ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/tests/__init__.py000066400000000000000000000000301413403311400235550ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/semtools/tests/test_auto_DWICompare.py000066400000000000000000000017311413403311400260500ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..converters import DWICompare def test_DWICompare_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume1=dict( argstr="--inputVolume1 %s", extensions=None, ), inputVolume2=dict( argstr="--inputVolume2 %s", extensions=None, ), ) inputs = DWICompare.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DWICompare_outputs(): output_map = dict() outputs = DWICompare.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/tests/test_auto_DWISimpleCompare.py000066400000000000000000000021031413403311400272140ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..converters import DWISimpleCompare def test_DWISimpleCompare_inputs(): input_map = dict( args=dict( argstr="%s", ), checkDWIData=dict( argstr="--checkDWIData ", ), environ=dict( nohash=True, usedefault=True, ), inputVolume1=dict( argstr="--inputVolume1 %s", extensions=None, ), inputVolume2=dict( argstr="--inputVolume2 %s", extensions=None, ), ) inputs = DWISimpleCompare.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DWISimpleCompare_outputs(): output_map = dict() outputs = DWISimpleCompare.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/tests/test_auto_GenerateCsfClippedFromClassifiedImage.py000066400000000000000000000022751413403311400333670ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..featurecreator import GenerateCsfClippedFromClassifiedImage def test_GenerateCsfClippedFromClassifiedImage_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputCassifiedVolume=dict( argstr="--inputCassifiedVolume %s", extensions=None, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = GenerateCsfClippedFromClassifiedImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GenerateCsfClippedFromClassifiedImage_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = GenerateCsfClippedFromClassifiedImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/000077500000000000000000000000001413403311400223245ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/utilities/__init__.py000066400000000000000000000011341413403311400244340ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .brains import ( BRAINSConstellationModeler, landmarksConstellationWeights, BRAINSTrimForegroundInDirection, BRAINSLmkTransform, BRAINSMush, BRAINSTransformConvert, landmarksConstellationAligner, BRAINSEyeDetector, BRAINSLinearModelerEPCA, BRAINSInitializedControlPoints, CleanUpOverlapLabels, BRAINSClipInferior, GenerateLabelMapFromProbabilityMap, BRAINSAlignMSP, BRAINSLandmarkInitializer, insertMidACPCpoint, BRAINSSnapShotWriter, JointHistogram, ShuffleVectorsModule, ImageRegionPlotter, ) nipype-1.7.0/nipype/interfaces/semtools/utilities/brains.py000066400000000000000000001343751413403311400241710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" import os from ...base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) class BRAINSConstellationModelerInputSpec(CommandLineInputSpec): verbose = traits.Bool( desc=", Show more verbose output, ", argstr="--verbose ", ) inputTrainingList = File( desc=", Setup file, giving all parameters for training up a template model for each landmark., ", exists=True, argstr="--inputTrainingList %s", ) outputModel = traits.Either( traits.Bool, File(), hash_files=False, desc=", The full filename of the output model file., ", argstr="--outputModel %s", ) saveOptimizedLandmarks = traits.Bool( desc=", Flag to make a new subject-specific landmark definition file in the same format produced by Slicer3 with the optimized landmark (the detected RP, AC, and PC) in it. Useful to tighten the variances in the ConstellationModeler., ", argstr="--saveOptimizedLandmarks ", ) optimizedLandmarksFilenameExtender = traits.Str( desc=", If the trainingList is (indexFullPathName) and contains landmark data filenames [path]/[filename].fcsv , make the optimized landmarks filenames out of [path]/[filename](thisExtender) and the optimized version of the input trainingList out of (indexFullPathName)(thisExtender) , when you rewrite all the landmarks according to the saveOptimizedLandmarks flag., ", argstr="--optimizedLandmarksFilenameExtender %s", ) resultsDir = traits.Either( traits.Bool, Directory(), hash_files=False, desc=", The directory for the results to be written., ", argstr="--resultsDir %s", ) mspQualityLevel = traits.Int( desc=", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ", argstr="--mspQualityLevel %d", ) rescaleIntensities = traits.Bool( desc=", Flag to turn on rescaling image intensities on input., ", argstr="--rescaleIntensities ", ) trimRescaledIntensities = traits.Float( desc=", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", argstr="--trimRescaledIntensities %f", ) rescaleIntensitiesOutputRange = InputMultiPath( traits.Int, desc=", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", sep=",", argstr="--rescaleIntensitiesOutputRange %s", ) BackgroundFillValue = traits.Str( desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", argstr="--BackgroundFillValue %s", ) writedebuggingImagesLevel = traits.Int( desc=", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", argstr="--writedebuggingImagesLevel %d", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSConstellationModelerOutputSpec(TraitedSpec): outputModel = File( desc=", The full filename of the output model file., ", exists=True, ) resultsDir = Directory( desc=", The directory for the results to be written., ", exists=True, ) class BRAINSConstellationModeler(SEMLikeCommandLine): """title: Generate Landmarks Model (BRAINS) category: Utilities.BRAINS description: Train up a model for BRAINSConstellationDetector """ input_spec = BRAINSConstellationModelerInputSpec output_spec = BRAINSConstellationModelerOutputSpec _cmd = " BRAINSConstellationModeler " _outputs_filenames = {"outputModel": "outputModel.mdl", "resultsDir": "resultsDir"} _redirect_x = False class landmarksConstellationWeightsInputSpec(CommandLineInputSpec): inputTrainingList = File( desc=", Setup file, giving all parameters for training up a Weight list for landmark., ", exists=True, argstr="--inputTrainingList %s", ) inputTemplateModel = File( desc="User-specified template model., ", exists=True, argstr="--inputTemplateModel %s", ) LLSModel = File( desc="Linear least squares model filename in HD5 format", exists=True, argstr="--LLSModel %s", ) outputWeightsList = traits.Either( traits.Bool, File(), hash_files=False, desc=", The filename of a csv file which is a list of landmarks and their corresponding weights., ", argstr="--outputWeightsList %s", ) class landmarksConstellationWeightsOutputSpec(TraitedSpec): outputWeightsList = File( desc=", The filename of a csv file which is a list of landmarks and their corresponding weights., ", exists=True, ) class landmarksConstellationWeights(SEMLikeCommandLine): """title: Generate Landmarks Weights (BRAINS) category: Utilities.BRAINS description: Train up a list of Weights for the Landmarks in BRAINSConstellationDetector """ input_spec = landmarksConstellationWeightsInputSpec output_spec = landmarksConstellationWeightsOutputSpec _cmd = " landmarksConstellationWeights " _outputs_filenames = {"outputWeightsList": "outputWeightsList.wts"} _redirect_x = False class BRAINSTrimForegroundInDirectionInputSpec(CommandLineInputSpec): inputVolume = File( desc="Input image to trim off the neck (and also air-filling noise.)", exists=True, argstr="--inputVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output image with neck and air-filling noise trimmed isotropic image with AC at center of image.", argstr="--outputVolume %s", ) directionCode = traits.Int( desc=", This flag chooses which dimension to compare. The sign lets you flip direction., ", argstr="--directionCode %d", ) otsuPercentileThreshold = traits.Float( desc=", This is a parameter to FindLargestForegroundFilledMask, which is employed to trim off air-filling noise., ", argstr="--otsuPercentileThreshold %f", ) closingSize = traits.Int( desc=", This is a parameter to FindLargestForegroundFilledMask, ", argstr="--closingSize %d", ) headSizeLimit = traits.Float( desc=", Use this to vary from the command line our search for how much upper tissue is head for the center-of-mass calculation. Units are CCs, not cubic millimeters., ", argstr="--headSizeLimit %f", ) BackgroundFillValue = traits.Str( desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", argstr="--BackgroundFillValue %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSTrimForegroundInDirectionOutputSpec(TraitedSpec): outputVolume = File( desc="Output image with neck and air-filling noise trimmed isotropic image with AC at center of image.", exists=True, ) class BRAINSTrimForegroundInDirection(SEMLikeCommandLine): """title: Trim Foreground In Direction (BRAINS) category: Utilities.BRAINS description: This program will trim off the neck and also air-filling noise from the inputImage. version: 0.1 documentation-url: http://www.nitrc.org/projects/art/ """ input_spec = BRAINSTrimForegroundInDirectionInputSpec output_spec = BRAINSTrimForegroundInDirectionOutputSpec _cmd = " BRAINSTrimForegroundInDirection " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class BRAINSLmkTransformInputSpec(CommandLineInputSpec): inputMovingLandmarks = File( desc="Input Moving Landmark list file in fcsv, ", exists=True, argstr="--inputMovingLandmarks %s", ) inputFixedLandmarks = File( desc="Input Fixed Landmark list file in fcsv, ", exists=True, argstr="--inputFixedLandmarks %s", ) outputAffineTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="The filename for the estimated affine transform, ", argstr="--outputAffineTransform %s", ) inputMovingVolume = File( desc="The filename of input moving volume", exists=True, argstr="--inputMovingVolume %s", ) inputReferenceVolume = File( desc="The filename of the reference volume", exists=True, argstr="--inputReferenceVolume %s", ) outputResampledVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The filename of the output resampled volume", argstr="--outputResampledVolume %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSLmkTransformOutputSpec(TraitedSpec): outputAffineTransform = File( desc="The filename for the estimated affine transform, ", exists=True, ) outputResampledVolume = File( desc="The filename of the output resampled volume", exists=True ) class BRAINSLmkTransform(SEMLikeCommandLine): """title: Landmark Transform (BRAINS) category: Utilities.BRAINS description: This utility program estimates the affine transform to align the fixed landmarks to the moving landmarks, and then generate the resampled moving image to the same physical space as that of the reference image. version: 1.0 documentation-url: http://www.nitrc.org/projects/brainscdetector/ """ input_spec = BRAINSLmkTransformInputSpec output_spec = BRAINSLmkTransformOutputSpec _cmd = " BRAINSLmkTransform " _outputs_filenames = { "outputResampledVolume": "outputResampledVolume.nii", "outputAffineTransform": "outputAffineTransform.h5", } _redirect_x = False class BRAINSMushInputSpec(CommandLineInputSpec): inputFirstVolume = File( desc="Input image (1) for mixture optimization", exists=True, argstr="--inputFirstVolume %s", ) inputSecondVolume = File( desc="Input image (2) for mixture optimization", exists=True, argstr="--inputSecondVolume %s", ) inputMaskVolume = File( desc="Input label image for mixture optimization", exists=True, argstr="--inputMaskVolume %s", ) outputWeightsFile = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Weights File", argstr="--outputWeightsFile %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The MUSH image produced from the T1 and T2 weighted images", argstr="--outputVolume %s", ) outputMask = traits.Either( traits.Bool, File(), hash_files=False, desc="The brain volume mask generated from the MUSH image", argstr="--outputMask %s", ) seed = InputMultiPath( traits.Int, desc="Seed Point for Brain Region Filling", sep=",", argstr="--seed %s", ) desiredMean = traits.Float( desc="Desired mean within the mask for weighted sum of both images.", argstr="--desiredMean %f", ) desiredVariance = traits.Float( desc="Desired variance within the mask for weighted sum of both images.", argstr="--desiredVariance %f", ) lowerThresholdFactorPre = traits.Float( desc="Lower threshold factor for finding an initial brain mask", argstr="--lowerThresholdFactorPre %f", ) upperThresholdFactorPre = traits.Float( desc="Upper threshold factor for finding an initial brain mask", argstr="--upperThresholdFactorPre %f", ) lowerThresholdFactor = traits.Float( desc="Lower threshold factor for defining the brain mask", argstr="--lowerThresholdFactor %f", ) upperThresholdFactor = traits.Float( desc="Upper threshold factor for defining the brain mask", argstr="--upperThresholdFactor %f", ) boundingBoxSize = InputMultiPath( traits.Int, desc="Size of the cubic bounding box mask used when no brain mask is present", sep=",", argstr="--boundingBoxSize %s", ) boundingBoxStart = InputMultiPath( traits.Int, desc="XYZ point-coordinate for the start of the cubic bounding box mask used when no brain mask is present", sep=",", argstr="--boundingBoxStart %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSMushOutputSpec(TraitedSpec): outputWeightsFile = File(desc="Output Weights File", exists=True) outputVolume = File( desc="The MUSH image produced from the T1 and T2 weighted images", exists=True ) outputMask = File( desc="The brain volume mask generated from the MUSH image", exists=True ) class BRAINSMush(SEMLikeCommandLine): """title: Brain Extraction from T1/T2 image (BRAINS) category: Utilities.BRAINS description: This program: 1) generates a weighted mixture image optimizing the mean and variance and 2) produces a mask of the brain volume version: 0.1.0.$Revision: 1.4 $(alpha) documentation-url: http:://mri.radiology.uiowa.edu license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool is a modification by Steven Dunn of a program developed by Greg Harris and Ron Pierson. acknowledgements: This work was developed by the University of Iowa Departments of Radiology and Psychiatry. This software was supported in part of NIH/NINDS award NS050568. """ input_spec = BRAINSMushInputSpec output_spec = BRAINSMushOutputSpec _cmd = " BRAINSMush " _outputs_filenames = { "outputMask": "outputMask.nii.gz", "outputWeightsFile": "outputWeightsFile.txt", "outputVolume": "outputVolume.nii.gz", } _redirect_x = False class BRAINSTransformConvertInputSpec(CommandLineInputSpec): inputTransform = File(exists=True, argstr="--inputTransform %s") referenceVolume = File(exists=True, argstr="--referenceVolume %s") outputTransformType = traits.Enum( "Affine", "VersorRigid", "ScaleVersor", "ScaleSkewVersor", "DisplacementField", "Same", desc="The target transformation type. Must be conversion-compatible with the input transform type", argstr="--outputTransformType %s", ) outputPrecisionType = traits.Enum( "double", "float", desc="Precision type of the output transform. It can be either single precision or double precision", argstr="--outputPrecisionType %s", ) displacementVolume = traits.Either( traits.Bool, File(), hash_files=False, argstr="--displacementVolume %s" ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, argstr="--outputTransform %s" ) class BRAINSTransformConvertOutputSpec(TraitedSpec): displacementVolume = File(exists=True) outputTransform = File(exists=True) class BRAINSTransformConvert(SEMLikeCommandLine): """title: BRAINS Transform Convert category: Utilities.BRAINS description: Convert ITK transforms to higher order transforms version: 1.0 documentation-url: A utility to convert between transform file formats. license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Hans J. Johnson,Kent Williams, Ali Ghayoor """ input_spec = BRAINSTransformConvertInputSpec output_spec = BRAINSTransformConvertOutputSpec _cmd = " BRAINSTransformConvert " _outputs_filenames = { "displacementVolume": "displacementVolume.nii", "outputTransform": "outputTransform.mat", } _redirect_x = False class landmarksConstellationAlignerInputSpec(CommandLineInputSpec): inputLandmarksPaired = File( desc="Input landmark file (.fcsv)", exists=True, argstr="--inputLandmarksPaired %s", ) outputLandmarksPaired = traits.Either( traits.Bool, File(), hash_files=False, desc="Output landmark file (.fcsv)", argstr="--outputLandmarksPaired %s", ) class landmarksConstellationAlignerOutputSpec(TraitedSpec): outputLandmarksPaired = File(desc="Output landmark file (.fcsv)", exists=True) class landmarksConstellationAligner(SEMLikeCommandLine): """title: MidACPC Landmark Insertion category: Utilities.BRAINS description: This program converts the original landmark files to the acpc-aligned landmark files contributor: Ali Ghayoor """ input_spec = landmarksConstellationAlignerInputSpec output_spec = landmarksConstellationAlignerOutputSpec _cmd = " landmarksConstellationAligner " _outputs_filenames = {"outputLandmarksPaired": "outputLandmarksPaired"} _redirect_x = False class BRAINSEyeDetectorInputSpec(CommandLineInputSpec): numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) inputVolume = File(desc="The input volume", exists=True, argstr="--inputVolume %s") outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The output volume", argstr="--outputVolume %s", ) debugDir = traits.Str(desc="A place for debug information", argstr="--debugDir %s") class BRAINSEyeDetectorOutputSpec(TraitedSpec): outputVolume = File(desc="The output volume", exists=True) class BRAINSEyeDetector(SEMLikeCommandLine): """title: Eye Detector (BRAINS) category: Utilities.BRAINS version: 1.0 documentation-url: http://www.nitrc.org/projects/brainscdetector/ """ input_spec = BRAINSEyeDetectorInputSpec output_spec = BRAINSEyeDetectorOutputSpec _cmd = " BRAINSEyeDetector " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class BRAINSLinearModelerEPCAInputSpec(CommandLineInputSpec): inputTrainingList = File( desc="Input Training Landmark List Filename, ", exists=True, argstr="--inputTrainingList %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSLinearModelerEPCAOutputSpec(TraitedSpec): pass class BRAINSLinearModelerEPCA(SEMLikeCommandLine): """title: Landmark Linear Modeler (BRAINS) category: Utilities.BRAINS description: Training linear model using EPCA. Implementation based on my MS thesis, "A METHOD FOR AUTOMATED LANDMARK CONSTELLATION DETECTION USING EVOLUTIONARY PRINCIPAL COMPONENTS AND STATISTICAL SHAPE MODELS" version: 1.0 documentation-url: http://www.nitrc.org/projects/brainscdetector/ """ input_spec = BRAINSLinearModelerEPCAInputSpec output_spec = BRAINSLinearModelerEPCAOutputSpec _cmd = " BRAINSLinearModelerEPCA " _outputs_filenames = {} _redirect_x = False class BRAINSInitializedControlPointsInputSpec(CommandLineInputSpec): inputVolume = File(desc="Input Volume", exists=True, argstr="--inputVolume %s") outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output Volume", argstr="--outputVolume %s", ) splineGridSize = InputMultiPath( traits.Int, desc="The number of subdivisions of the BSpline Grid to be centered on the image space. Each dimension must have at least 3 subdivisions for the BSpline to be correctly computed. ", sep=",", argstr="--splineGridSize %s", ) permuteOrder = InputMultiPath( traits.Int, desc="The permutation order for the images. The default is 0,1,2 (i.e. no permutation)", sep=",", argstr="--permuteOrder %s", ) outputLandmarksFile = traits.Str( desc="Output filename", argstr="--outputLandmarksFile %s" ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSInitializedControlPointsOutputSpec(TraitedSpec): outputVolume = File(desc="Output Volume", exists=True) class BRAINSInitializedControlPoints(SEMLikeCommandLine): """title: Initialized Control Points (BRAINS) category: Utilities.BRAINS description: Outputs bspline control points as landmarks version: 0.1.0.$Revision: 916 $(alpha) license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Mark Scully acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for Mark Scully and Hans Johnson at the University of Iowa. """ input_spec = BRAINSInitializedControlPointsInputSpec output_spec = BRAINSInitializedControlPointsOutputSpec _cmd = " BRAINSInitializedControlPoints " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class CleanUpOverlapLabelsInputSpec(CommandLineInputSpec): inputBinaryVolumes = InputMultiPath( File(exists=True), desc="The list of binary images to be checked and cleaned up. Order is important. Binary volume given first always wins out. ", argstr="--inputBinaryVolumes %s...", ) outputBinaryVolumes = traits.Either( traits.Bool, InputMultiPath(File()), hash_files=False, desc="The output label map images, with integer values in it. Each label value specified in the inputLabels is combined into this output label map volume", argstr="--outputBinaryVolumes %s...", ) class CleanUpOverlapLabelsOutputSpec(TraitedSpec): outputBinaryVolumes = OutputMultiPath( File(exists=True), desc="The output label map images, with integer values in it. Each label value specified in the inputLabels is combined into this output label map volume", ) class CleanUpOverlapLabels(SEMLikeCommandLine): """title: Clean Up Overla Labels category: Utilities.BRAINS description: Take a series of input binary images and clean up for those overlapped area. Binary volumes given first always wins out version: 0.1.0 contributor: Eun Young Kim """ input_spec = CleanUpOverlapLabelsInputSpec output_spec = CleanUpOverlapLabelsOutputSpec _cmd = " CleanUpOverlapLabels " _outputs_filenames = {"outputBinaryVolumes": "outputBinaryVolumes.nii"} _redirect_x = False class BRAINSClipInferiorInputSpec(CommandLineInputSpec): inputVolume = File( desc="Input image to make a clipped short int copy from.", exists=True, argstr="--inputVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output image, a short int copy of the upper portion of the input image, filled with BackgroundFillValue.", argstr="--outputVolume %s", ) acLowerBound = traits.Float( desc=", When the input image to the output image, replace the image with the BackgroundFillValue everywhere below the plane This Far in physical units (millimeters) below (inferior to) the AC point (assumed to be the voxel field middle.) The oversize default was chosen to have no effect. Based on visualizing a thousand masks in the IPIG study, we recommend a limit no smaller than 80.0 mm., ", argstr="--acLowerBound %f", ) BackgroundFillValue = traits.Str( desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", argstr="--BackgroundFillValue %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSClipInferiorOutputSpec(TraitedSpec): outputVolume = File( desc="Output image, a short int copy of the upper portion of the input image, filled with BackgroundFillValue.", exists=True, ) class BRAINSClipInferior(SEMLikeCommandLine): """title: Clip Inferior of Center of Brain (BRAINS) category: Utilities.BRAINS description: This program will read the inputVolume as a short int image, write the BackgroundFillValue everywhere inferior to the lower bound, and write the resulting clipped short int image in the outputVolume. version: 1.0 """ input_spec = BRAINSClipInferiorInputSpec output_spec = BRAINSClipInferiorOutputSpec _cmd = " BRAINSClipInferior " _outputs_filenames = {"outputVolume": "outputVolume.nii"} _redirect_x = False class GenerateLabelMapFromProbabilityMapInputSpec(CommandLineInputSpec): inputVolumes = InputMultiPath( File(exists=True), desc="The Input probaiblity images to be computed for lable maps", argstr="--inputVolumes %s...", ) outputLabelVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The Input binary image for region of interest", argstr="--outputLabelVolume %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class GenerateLabelMapFromProbabilityMapOutputSpec(TraitedSpec): outputLabelVolume = File( desc="The Input binary image for region of interest", exists=True ) class GenerateLabelMapFromProbabilityMap(SEMLikeCommandLine): """title: Label Map from Probability Images category: Utilities.BRAINS description: Given a list of probability maps for labels, create a discrete label map where only the highest probability region is used for the labeling. version: 0.1 contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu """ input_spec = GenerateLabelMapFromProbabilityMapInputSpec output_spec = GenerateLabelMapFromProbabilityMapOutputSpec _cmd = " GenerateLabelMapFromProbabilityMap " _outputs_filenames = {"outputLabelVolume": "outputLabelVolume.nii.gz"} _redirect_x = False class BRAINSAlignMSPInputSpec(CommandLineInputSpec): inputVolume = File( desc=", The Image to be resampled, ", exists=True, argstr="--inputVolume %s", ) OutputresampleMSP = traits.Either( traits.Bool, File(), hash_files=False, desc=", The image to be output., ", argstr="--OutputresampleMSP %s", ) verbose = traits.Bool( desc=", Show more verbose output, ", argstr="--verbose " ) resultsDir = traits.Either( traits.Bool, Directory(), hash_files=False, desc=", The directory for the results to be written., ", argstr="--resultsDir %s", ) writedebuggingImagesLevel = traits.Int( desc=", This flag controls if debugging images are produced. By default value of 0 is no images. Anything greater than zero will be increasing level of debugging images., ", argstr="--writedebuggingImagesLevel %d", ) mspQualityLevel = traits.Int( desc=", Flag cotrols how agressive the MSP is estimated. 0=quick estimate (9 seconds), 1=normal estimate (11 seconds), 2=great estimate (22 seconds), 3=best estimate (58 seconds)., ", argstr="--mspQualityLevel %d", ) rescaleIntensities = traits.Bool( desc=", Flag to turn on rescaling image intensities on input., ", argstr="--rescaleIntensities ", ) trimRescaledIntensities = traits.Float( desc=", Turn on clipping the rescaled image one-tailed on input. Units of standard deviations above the mean. Very large values are very permissive. Non-positive value turns clipping off. Defaults to removing 0.00001 of a normal tail above the mean., ", argstr="--trimRescaledIntensities %f", ) rescaleIntensitiesOutputRange = InputMultiPath( traits.Int, desc=", This pair of integers gives the lower and upper bounds on the signal portion of the output image. Out-of-field voxels are taken from BackgroundFillValue., ", sep=",", argstr="--rescaleIntensitiesOutputRange %s", ) BackgroundFillValue = traits.Str( desc="Fill the background of image with specified short int value. Enter number or use BIGNEG for a large negative number.", argstr="--BackgroundFillValue %s", ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", "ResampleInPlace", "BSpline", "WindowedSinc", "Hamming", "Cosine", "Welch", "Lanczos", "Blackman", desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", argstr="--interpolationMode %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSAlignMSPOutputSpec(TraitedSpec): OutputresampleMSP = File( desc=", The image to be output., ", exists=True ) resultsDir = Directory( desc=", The directory for the results to be written., ", exists=True, ) class BRAINSAlignMSP(SEMLikeCommandLine): """title: Align Mid Saggital Brain (BRAINS) category: Utilities.BRAINS description: Resample an image into ACPC alignement ACPCDetect """ input_spec = BRAINSAlignMSPInputSpec output_spec = BRAINSAlignMSPOutputSpec _cmd = " BRAINSAlignMSP " _outputs_filenames = { "OutputresampleMSP": "OutputresampleMSP.nii", "resultsDir": "resultsDir", } _redirect_x = False class BRAINSLandmarkInitializerInputSpec(CommandLineInputSpec): inputFixedLandmarkFilename = File( desc="input fixed landmark. *.fcsv", exists=True, argstr="--inputFixedLandmarkFilename %s", ) inputMovingLandmarkFilename = File( desc="input moving landmark. *.fcsv", exists=True, argstr="--inputMovingLandmarkFilename %s", ) inputWeightFilename = File( desc="Input weight file name for landmarks. Higher weighted landmark will be considered more heavily. Weights are propotional, that is the magnitude of weights will be normalized by its minimum and maximum value. ", exists=True, argstr="--inputWeightFilename %s", ) outputTransformFilename = traits.Either( traits.Bool, File(), hash_files=False, desc="output transform file name (ex: ./outputTransform.mat) ", argstr="--outputTransformFilename %s", ) class BRAINSLandmarkInitializerOutputSpec(TraitedSpec): outputTransformFilename = File( desc="output transform file name (ex: ./outputTransform.mat) ", exists=True ) class BRAINSLandmarkInitializer(SEMLikeCommandLine): """title: BRAINSLandmarkInitializer category: Utilities.BRAINS description: Create transformation file (*mat) from a pair of landmarks (*fcsv) files. version: 1.0 license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Eunyoung Regina Kim """ input_spec = BRAINSLandmarkInitializerInputSpec output_spec = BRAINSLandmarkInitializerOutputSpec _cmd = " BRAINSLandmarkInitializer " _outputs_filenames = {"outputTransformFilename": "outputTransformFilename"} _redirect_x = False class insertMidACPCpointInputSpec(CommandLineInputSpec): inputLandmarkFile = File( desc="Input landmark file (.fcsv)", exists=True, argstr="--inputLandmarkFile %s" ) outputLandmarkFile = traits.Either( traits.Bool, File(), hash_files=False, desc="Output landmark file (.fcsv)", argstr="--outputLandmarkFile %s", ) class insertMidACPCpointOutputSpec(TraitedSpec): outputLandmarkFile = File(desc="Output landmark file (.fcsv)", exists=True) class insertMidACPCpoint(SEMLikeCommandLine): """title: MidACPC Landmark Insertion category: Utilities.BRAINS description: This program gets a landmark fcsv file and adds a new landmark as the midpoint between AC and PC points to the output landmark fcsv file contributor: Ali Ghayoor """ input_spec = insertMidACPCpointInputSpec output_spec = insertMidACPCpointOutputSpec _cmd = " insertMidACPCpoint " _outputs_filenames = {"outputLandmarkFile": "outputLandmarkFile"} _redirect_x = False class BRAINSSnapShotWriterInputSpec(CommandLineInputSpec): inputVolumes = InputMultiPath( File(exists=True), desc="Input image volume list to be extracted as 2D image. Multiple input is possible. At least one input is required.", argstr="--inputVolumes %s...", ) inputBinaryVolumes = InputMultiPath( File(exists=True), desc="Input mask (binary) volume list to be extracted as 2D image. Multiple input is possible.", argstr="--inputBinaryVolumes %s...", ) inputSliceToExtractInPhysicalPoint = InputMultiPath( traits.Float, desc="2D slice number of input images. For autoWorkUp output, which AC-PC aligned, 0,0,0 will be the center.", sep=",", argstr="--inputSliceToExtractInPhysicalPoint %s", ) inputSliceToExtractInIndex = InputMultiPath( traits.Int, desc="2D slice number of input images. For size of 256*256*256 image, 128 is usually used.", sep=",", argstr="--inputSliceToExtractInIndex %s", ) inputSliceToExtractInPercent = InputMultiPath( traits.Int, desc="2D slice number of input images. Percentage input from 0%-100%. (ex. --inputSliceToExtractInPercent 50,50,50", sep=",", argstr="--inputSliceToExtractInPercent %s", ) inputPlaneDirection = InputMultiPath( traits.Int, desc="Plane to display. In general, 0=saggital, 1=coronal, and 2=axial plane.", sep=",", argstr="--inputPlaneDirection %s", ) outputFilename = traits.Either( traits.Bool, File(), hash_files=False, desc="2D file name of input images. Required.", argstr="--outputFilename %s", ) class BRAINSSnapShotWriterOutputSpec(TraitedSpec): outputFilename = File(desc="2D file name of input images. Required.", exists=True) class BRAINSSnapShotWriter(SEMLikeCommandLine): """title: BRAINSSnapShotWriter category: Utilities.BRAINS description: Create 2D snapshot of input images. Mask images are color-coded version: 1.0 license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Eunyoung Regina Kim """ input_spec = BRAINSSnapShotWriterInputSpec output_spec = BRAINSSnapShotWriterOutputSpec _cmd = " BRAINSSnapShotWriter " _outputs_filenames = {"outputFilename": "outputFilename"} _redirect_x = False class JointHistogramInputSpec(CommandLineInputSpec): inputVolumeInXAxis = File( desc="The Input image to be computed for statistics", exists=True, argstr="--inputVolumeInXAxis %s", ) inputVolumeInYAxis = File( desc="The Input image to be computed for statistics", exists=True, argstr="--inputVolumeInYAxis %s", ) inputMaskVolumeInXAxis = File( desc="Input mask volume for inputVolumeInXAxis. Histogram will be computed just for the masked region", exists=True, argstr="--inputMaskVolumeInXAxis %s", ) inputMaskVolumeInYAxis = File( desc="Input mask volume for inputVolumeInYAxis. Histogram will be computed just for the masked region", exists=True, argstr="--inputMaskVolumeInYAxis %s", ) outputJointHistogramImage = traits.Str( desc=" output joint histogram image file name. Histogram is usually 2D image. ", argstr="--outputJointHistogramImage %s", ) verbose = traits.Bool( desc=" print debugging information, ", argstr="--verbose " ) class JointHistogramOutputSpec(TraitedSpec): pass class JointHistogram(SEMLikeCommandLine): """title: Write Out Image Intensities category: Utilities.BRAINS description: For Analysis version: 0.1 contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu """ input_spec = JointHistogramInputSpec output_spec = JointHistogramOutputSpec _cmd = " JointHistogram " _outputs_filenames = {} _redirect_x = False class ShuffleVectorsModuleInputSpec(CommandLineInputSpec): inputVectorFileBaseName = File( desc="input vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", exists=True, argstr="--inputVectorFileBaseName %s", ) outputVectorFileBaseName = traits.Either( traits.Bool, File(), hash_files=False, desc="output vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", argstr="--outputVectorFileBaseName %s", ) resampleProportion = traits.Float( desc="downsample size of 1 will be the same size as the input images, downsample size of 3 will throw 2/3 the vectors away.", argstr="--resampleProportion %f", ) class ShuffleVectorsModuleOutputSpec(TraitedSpec): outputVectorFileBaseName = File( desc="output vector file name prefix. Usually end with .txt and header file has prost fix of .txt.hdr", exists=True, ) class ShuffleVectorsModule(SEMLikeCommandLine): """title: ShuffleVectors category: Utilities.BRAINS description: Automatic Segmentation using neural networks version: 1.0 license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Hans Johnson """ input_spec = ShuffleVectorsModuleInputSpec output_spec = ShuffleVectorsModuleOutputSpec _cmd = " ShuffleVectorsModule " _outputs_filenames = {"outputVectorFileBaseName": "outputVectorFileBaseName"} _redirect_x = False class ImageRegionPlotterInputSpec(CommandLineInputSpec): inputVolume1 = File( desc="The Input image to be computed for statistics", exists=True, argstr="--inputVolume1 %s", ) inputVolume2 = File( desc="The Input image to be computed for statistics", exists=True, argstr="--inputVolume2 %s", ) inputBinaryROIVolume = File( desc="The Input binary image for region of interest", exists=True, argstr="--inputBinaryROIVolume %s", ) inputLabelVolume = File( desc="The Label Image", exists=True, argstr="--inputLabelVolume %s" ) numberOfHistogramBins = traits.Int( desc=" the number of histogram levels", argstr="--numberOfHistogramBins %d" ) outputJointHistogramData = traits.Str( desc=" output data file name", argstr="--outputJointHistogramData %s" ) useROIAUTO = traits.Bool( desc=" Use ROIAUTO to compute region of interest. This cannot be used with inputLabelVolume", argstr="--useROIAUTO ", ) useIntensityForHistogram = traits.Bool( desc=" Create Intensity Joint Histogram instead of Quantile Joint Histogram", argstr="--useIntensityForHistogram ", ) verbose = traits.Bool( desc=" print debugging information, ", argstr="--verbose " ) class ImageRegionPlotterOutputSpec(TraitedSpec): pass class ImageRegionPlotter(SEMLikeCommandLine): """title: Write Out Image Intensities category: Utilities.BRAINS description: For Analysis version: 0.1 contributor: University of Iowa Department of Psychiatry, http:://www.psychiatry.uiowa.edu """ input_spec = ImageRegionPlotterInputSpec output_spec = ImageRegionPlotterOutputSpec _cmd = " ImageRegionPlotter " _outputs_filenames = {} _redirect_x = False class fcsv_to_hdf5InputSpec(CommandLineInputSpec): versionID = traits.Str( desc=", Current version ID. It should be match with the version of BCD that will be using the output model file, ", argstr="--versionID %s", ) landmarksInformationFile = traits.Either( traits.Bool, File(), hash_files=False, desc=", name of HDF5 file to write matrices into, ", argstr="--landmarksInformationFile %s", ) landmarkTypesList = File( desc=", file containing list of landmark types, ", exists=True, argstr="--landmarkTypesList %s", ) modelFile = traits.Either( traits.Bool, File(), hash_files=False, desc=", name of HDF5 file containing BRAINSConstellationDetector Model file (LLSMatrices, LLSMeans and LLSSearchRadii), ", argstr="--modelFile %s", ) landmarkGlobPattern = traits.Str( desc="Glob pattern to select fcsv files", argstr="--landmarkGlobPattern %s" ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class fcsv_to_hdf5OutputSpec(TraitedSpec): landmarksInformationFile = File( desc=", name of HDF5 file to write matrices into, ", exists=True ) modelFile = File( desc=", name of HDF5 file containing BRAINSConstellationDetector Model file (LLSMatrices, LLSMeans and LLSSearchRadii), ", exists=True, ) class fcsv_to_hdf5(SEMLikeCommandLine): """title: fcsv_to_hdf5 (BRAINS) category: Utilities.BRAINS description: Convert a collection of fcsv files to a HDF5 format file """ input_spec = fcsv_to_hdf5InputSpec output_spec = fcsv_to_hdf5OutputSpec _cmd = " fcsv_to_hdf5 " _outputs_filenames = { "modelFile": "modelFile", "landmarksInformationFile": "landmarksInformationFile.h5", } _redirect_x = False class FindCenterOfBrainInputSpec(CommandLineInputSpec): inputVolume = File( desc="The image in which to find the center.", exists=True, argstr="--inputVolume %s", ) imageMask = File(exists=True, argstr="--imageMask %s") clippedImageMask = traits.Either( traits.Bool, File(), hash_files=False, argstr="--clippedImageMask %s" ) maximize = traits.Bool(argstr="--maximize ") axis = traits.Int(argstr="--axis %d") otsuPercentileThreshold = traits.Float(argstr="--otsuPercentileThreshold %f") closingSize = traits.Int(argstr="--closingSize %d") headSizeLimit = traits.Float(argstr="--headSizeLimit %f") headSizeEstimate = traits.Float(argstr="--headSizeEstimate %f") backgroundValue = traits.Int(argstr="--backgroundValue %d") generateDebugImages = traits.Bool(argstr="--generateDebugImages ") debugDistanceImage = traits.Either( traits.Bool, File(), hash_files=False, argstr="--debugDistanceImage %s" ) debugGridImage = traits.Either( traits.Bool, File(), hash_files=False, argstr="--debugGridImage %s" ) debugAfterGridComputationsForegroundImage = traits.Either( traits.Bool, File(), hash_files=False, argstr="--debugAfterGridComputationsForegroundImage %s", ) debugClippedImageMask = traits.Either( traits.Bool, File(), hash_files=False, argstr="--debugClippedImageMask %s" ) debugTrimmedImage = traits.Either( traits.Bool, File(), hash_files=False, argstr="--debugTrimmedImage %s" ) class FindCenterOfBrainOutputSpec(TraitedSpec): clippedImageMask = File(exists=True) debugDistanceImage = File(exists=True) debugGridImage = File(exists=True) debugAfterGridComputationsForegroundImage = File(exists=True) debugClippedImageMask = File(exists=True) debugTrimmedImage = File(exists=True) class FindCenterOfBrain(SEMLikeCommandLine): """title: Center Of Brain (BRAINS) category: Utilities.BRAINS description: Finds the center point of a brain version: 3.0.0 license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://wwww.psychiatry.uiowa.edu acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); (1=University of Iowa Department of Psychiatry, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering """ input_spec = FindCenterOfBrainInputSpec output_spec = FindCenterOfBrainOutputSpec _cmd = " FindCenterOfBrain " _outputs_filenames = { "debugClippedImageMask": "debugClippedImageMask.nii", "debugTrimmedImage": "debugTrimmedImage.nii", "debugDistanceImage": "debugDistanceImage.nii", "debugGridImage": "debugGridImage.nii", "clippedImageMask": "clippedImageMask.nii", "debugAfterGridComputationsForegroundImage": "debugAfterGridComputationsForegroundImage.nii", } _redirect_x = False nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/000077500000000000000000000000001413403311400234665ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/__init__.py000066400000000000000000000000301413403311400255700ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSAlignMSP.py000066400000000000000000000040111413403311400304340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import BRAINSAlignMSP def test_BRAINSAlignMSP_inputs(): input_map = dict( BackgroundFillValue=dict( argstr="--BackgroundFillValue %s", ), OutputresampleMSP=dict( argstr="--OutputresampleMSP %s", hash_files=False, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), interpolationMode=dict( argstr="--interpolationMode %s", ), mspQualityLevel=dict( argstr="--mspQualityLevel %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), rescaleIntensities=dict( argstr="--rescaleIntensities ", ), rescaleIntensitiesOutputRange=dict( argstr="--rescaleIntensitiesOutputRange %s", sep=",", ), resultsDir=dict( argstr="--resultsDir %s", hash_files=False, ), trimRescaledIntensities=dict( argstr="--trimRescaledIntensities %f", ), verbose=dict( argstr="--verbose ", ), writedebuggingImagesLevel=dict( argstr="--writedebuggingImagesLevel %d", ), ) inputs = BRAINSAlignMSP.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSAlignMSP_outputs(): output_map = dict( OutputresampleMSP=dict( extensions=None, ), resultsDir=dict(), ) outputs = BRAINSAlignMSP.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSClipInferior.py000066400000000000000000000025021413403311400314120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import BRAINSClipInferior def test_BRAINSClipInferior_inputs(): input_map = dict( BackgroundFillValue=dict( argstr="--BackgroundFillValue %s", ), acLowerBound=dict( argstr="--acLowerBound %f", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = BRAINSClipInferior.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSClipInferior_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = BRAINSClipInferior.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSConstellationModeler.py000066400000000000000000000043011413403311400331520ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import BRAINSConstellationModeler def test_BRAINSConstellationModeler_inputs(): input_map = dict( BackgroundFillValue=dict( argstr="--BackgroundFillValue %s", ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputTrainingList=dict( argstr="--inputTrainingList %s", extensions=None, ), mspQualityLevel=dict( argstr="--mspQualityLevel %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), optimizedLandmarksFilenameExtender=dict( argstr="--optimizedLandmarksFilenameExtender %s", ), outputModel=dict( argstr="--outputModel %s", hash_files=False, ), rescaleIntensities=dict( argstr="--rescaleIntensities ", ), rescaleIntensitiesOutputRange=dict( argstr="--rescaleIntensitiesOutputRange %s", sep=",", ), resultsDir=dict( argstr="--resultsDir %s", hash_files=False, ), saveOptimizedLandmarks=dict( argstr="--saveOptimizedLandmarks ", ), trimRescaledIntensities=dict( argstr="--trimRescaledIntensities %f", ), verbose=dict( argstr="--verbose ", ), writedebuggingImagesLevel=dict( argstr="--writedebuggingImagesLevel %d", ), ) inputs = BRAINSConstellationModeler.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSConstellationModeler_outputs(): output_map = dict( outputModel=dict( extensions=None, ), resultsDir=dict(), ) outputs = BRAINSConstellationModeler.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSEyeDetector.py000066400000000000000000000023311413403311400312410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import BRAINSEyeDetector def test_BRAINSEyeDetector_inputs(): input_map = dict( args=dict( argstr="%s", ), debugDir=dict( argstr="--debugDir %s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = BRAINSEyeDetector.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSEyeDetector_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = BRAINSEyeDetector.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSInitializedControlPoints.py000066400000000000000000000027721413403311400340410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import BRAINSInitializedControlPoints def test_BRAINSInitializedControlPoints_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputLandmarksFile=dict( argstr="--outputLandmarksFile %s", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), permuteOrder=dict( argstr="--permuteOrder %s", sep=",", ), splineGridSize=dict( argstr="--splineGridSize %s", sep=",", ), ) inputs = BRAINSInitializedControlPoints.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSInitializedControlPoints_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = BRAINSInitializedControlPoints.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLandmarkInitializer.py000066400000000000000000000026501413403311400327660ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import BRAINSLandmarkInitializer def test_BRAINSLandmarkInitializer_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputFixedLandmarkFilename=dict( argstr="--inputFixedLandmarkFilename %s", extensions=None, ), inputMovingLandmarkFilename=dict( argstr="--inputMovingLandmarkFilename %s", extensions=None, ), inputWeightFilename=dict( argstr="--inputWeightFilename %s", extensions=None, ), outputTransformFilename=dict( argstr="--outputTransformFilename %s", hash_files=False, ), ) inputs = BRAINSLandmarkInitializer.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSLandmarkInitializer_outputs(): output_map = dict( outputTransformFilename=dict( extensions=None, ), ) outputs = BRAINSLandmarkInitializer.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLinearModelerEPCA.py000066400000000000000000000020111413403311400321730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import BRAINSLinearModelerEPCA def test_BRAINSLinearModelerEPCA_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputTrainingList=dict( argstr="--inputTrainingList %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), ) inputs = BRAINSLinearModelerEPCA.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSLinearModelerEPCA_outputs(): output_map = dict() outputs = BRAINSLinearModelerEPCA.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSLmkTransform.py000066400000000000000000000033701413403311400314500ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import BRAINSLmkTransform def test_BRAINSLmkTransform_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputFixedLandmarks=dict( argstr="--inputFixedLandmarks %s", extensions=None, ), inputMovingLandmarks=dict( argstr="--inputMovingLandmarks %s", extensions=None, ), inputMovingVolume=dict( argstr="--inputMovingVolume %s", extensions=None, ), inputReferenceVolume=dict( argstr="--inputReferenceVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputAffineTransform=dict( argstr="--outputAffineTransform %s", hash_files=False, ), outputResampledVolume=dict( argstr="--outputResampledVolume %s", hash_files=False, ), ) inputs = BRAINSLmkTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSLmkTransform_outputs(): output_map = dict( outputAffineTransform=dict( extensions=None, ), outputResampledVolume=dict( extensions=None, ), ) outputs = BRAINSLmkTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSMush.py000066400000000000000000000050221413403311400277410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import BRAINSMush def test_BRAINSMush_inputs(): input_map = dict( args=dict( argstr="%s", ), boundingBoxSize=dict( argstr="--boundingBoxSize %s", sep=",", ), boundingBoxStart=dict( argstr="--boundingBoxStart %s", sep=",", ), desiredMean=dict( argstr="--desiredMean %f", ), desiredVariance=dict( argstr="--desiredVariance %f", ), environ=dict( nohash=True, usedefault=True, ), inputFirstVolume=dict( argstr="--inputFirstVolume %s", extensions=None, ), inputMaskVolume=dict( argstr="--inputMaskVolume %s", extensions=None, ), inputSecondVolume=dict( argstr="--inputSecondVolume %s", extensions=None, ), lowerThresholdFactor=dict( argstr="--lowerThresholdFactor %f", ), lowerThresholdFactorPre=dict( argstr="--lowerThresholdFactorPre %f", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputMask=dict( argstr="--outputMask %s", hash_files=False, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), outputWeightsFile=dict( argstr="--outputWeightsFile %s", hash_files=False, ), seed=dict( argstr="--seed %s", sep=",", ), upperThresholdFactor=dict( argstr="--upperThresholdFactor %f", ), upperThresholdFactorPre=dict( argstr="--upperThresholdFactorPre %f", ), ) inputs = BRAINSMush.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSMush_outputs(): output_map = dict( outputMask=dict( extensions=None, ), outputVolume=dict( extensions=None, ), outputWeightsFile=dict( extensions=None, ), ) outputs = BRAINSMush.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSSnapShotWriter.py000066400000000000000000000032331413403311400317630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import BRAINSSnapShotWriter def test_BRAINSSnapShotWriter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputBinaryVolumes=dict( argstr="--inputBinaryVolumes %s...", ), inputPlaneDirection=dict( argstr="--inputPlaneDirection %s", sep=",", ), inputSliceToExtractInIndex=dict( argstr="--inputSliceToExtractInIndex %s", sep=",", ), inputSliceToExtractInPercent=dict( argstr="--inputSliceToExtractInPercent %s", sep=",", ), inputSliceToExtractInPhysicalPoint=dict( argstr="--inputSliceToExtractInPhysicalPoint %s", sep=",", ), inputVolumes=dict( argstr="--inputVolumes %s...", ), outputFilename=dict( argstr="--outputFilename %s", hash_files=False, ), ) inputs = BRAINSSnapShotWriter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSSnapShotWriter_outputs(): output_map = dict( outputFilename=dict( extensions=None, ), ) outputs = BRAINSSnapShotWriter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTransformConvert.py000066400000000000000000000031211413403311400323370ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import BRAINSTransformConvert def test_BRAINSTransformConvert_inputs(): input_map = dict( args=dict( argstr="%s", ), displacementVolume=dict( argstr="--displacementVolume %s", hash_files=False, ), environ=dict( nohash=True, usedefault=True, ), inputTransform=dict( argstr="--inputTransform %s", extensions=None, ), outputPrecisionType=dict( argstr="--outputPrecisionType %s", ), outputTransform=dict( argstr="--outputTransform %s", hash_files=False, ), outputTransformType=dict( argstr="--outputTransformType %s", ), referenceVolume=dict( argstr="--referenceVolume %s", extensions=None, ), ) inputs = BRAINSTransformConvert.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSTransformConvert_outputs(): output_map = dict( displacementVolume=dict( extensions=None, ), outputTransform=dict( extensions=None, ), ) outputs = BRAINSTransformConvert.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_BRAINSTrimForegroundInDirection.py000066400000000000000000000032051413403311400341240ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import BRAINSTrimForegroundInDirection def test_BRAINSTrimForegroundInDirection_inputs(): input_map = dict( BackgroundFillValue=dict( argstr="--BackgroundFillValue %s", ), args=dict( argstr="%s", ), closingSize=dict( argstr="--closingSize %d", ), directionCode=dict( argstr="--directionCode %d", ), environ=dict( nohash=True, usedefault=True, ), headSizeLimit=dict( argstr="--headSizeLimit %f", ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), otsuPercentileThreshold=dict( argstr="--otsuPercentileThreshold %f", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), ) inputs = BRAINSTrimForegroundInDirection.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSTrimForegroundInDirection_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = BRAINSTrimForegroundInDirection.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_CleanUpOverlapLabels.py000066400000000000000000000020641413403311400321340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import CleanUpOverlapLabels def test_CleanUpOverlapLabels_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputBinaryVolumes=dict( argstr="--inputBinaryVolumes %s...", ), outputBinaryVolumes=dict( argstr="--outputBinaryVolumes %s...", hash_files=False, ), ) inputs = CleanUpOverlapLabels.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CleanUpOverlapLabels_outputs(): output_map = dict( outputBinaryVolumes=dict(), ) outputs = CleanUpOverlapLabels.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_FindCenterOfBrain.py000066400000000000000000000054721413403311400314210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import FindCenterOfBrain def test_FindCenterOfBrain_inputs(): input_map = dict( args=dict( argstr="%s", ), axis=dict( argstr="--axis %d", ), backgroundValue=dict( argstr="--backgroundValue %d", ), clippedImageMask=dict( argstr="--clippedImageMask %s", hash_files=False, ), closingSize=dict( argstr="--closingSize %d", ), debugAfterGridComputationsForegroundImage=dict( argstr="--debugAfterGridComputationsForegroundImage %s", hash_files=False, ), debugClippedImageMask=dict( argstr="--debugClippedImageMask %s", hash_files=False, ), debugDistanceImage=dict( argstr="--debugDistanceImage %s", hash_files=False, ), debugGridImage=dict( argstr="--debugGridImage %s", hash_files=False, ), debugTrimmedImage=dict( argstr="--debugTrimmedImage %s", hash_files=False, ), environ=dict( nohash=True, usedefault=True, ), generateDebugImages=dict( argstr="--generateDebugImages ", ), headSizeEstimate=dict( argstr="--headSizeEstimate %f", ), headSizeLimit=dict( argstr="--headSizeLimit %f", ), imageMask=dict( argstr="--imageMask %s", extensions=None, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), maximize=dict( argstr="--maximize ", ), otsuPercentileThreshold=dict( argstr="--otsuPercentileThreshold %f", ), ) inputs = FindCenterOfBrain.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FindCenterOfBrain_outputs(): output_map = dict( clippedImageMask=dict( extensions=None, ), debugAfterGridComputationsForegroundImage=dict( extensions=None, ), debugClippedImageMask=dict( extensions=None, ), debugDistanceImage=dict( extensions=None, ), debugGridImage=dict( extensions=None, ), debugTrimmedImage=dict( extensions=None, ), ) outputs = FindCenterOfBrain.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value test_auto_GenerateLabelMapFromProbabilityMap.py000066400000000000000000000023371413403311400346700ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/semtools/utilities/tests# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import GenerateLabelMapFromProbabilityMap def test_GenerateLabelMapFromProbabilityMap_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolumes=dict( argstr="--inputVolumes %s...", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputLabelVolume=dict( argstr="--outputLabelVolume %s", hash_files=False, ), ) inputs = GenerateLabelMapFromProbabilityMap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GenerateLabelMapFromProbabilityMap_outputs(): output_map = dict( outputLabelVolume=dict( extensions=None, ), ) outputs = GenerateLabelMapFromProbabilityMap.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_ImageRegionPlotter.py000066400000000000000000000032371413403311400316740ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import ImageRegionPlotter def test_ImageRegionPlotter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputBinaryROIVolume=dict( argstr="--inputBinaryROIVolume %s", extensions=None, ), inputLabelVolume=dict( argstr="--inputLabelVolume %s", extensions=None, ), inputVolume1=dict( argstr="--inputVolume1 %s", extensions=None, ), inputVolume2=dict( argstr="--inputVolume2 %s", extensions=None, ), numberOfHistogramBins=dict( argstr="--numberOfHistogramBins %d", ), outputJointHistogramData=dict( argstr="--outputJointHistogramData %s", ), useIntensityForHistogram=dict( argstr="--useIntensityForHistogram ", ), useROIAUTO=dict( argstr="--useROIAUTO ", ), verbose=dict( argstr="--verbose ", ), ) inputs = ImageRegionPlotter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ImageRegionPlotter_outputs(): output_map = dict() outputs = ImageRegionPlotter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_JointHistogram.py000066400000000000000000000026511413403311400310740ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import JointHistogram def test_JointHistogram_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputMaskVolumeInXAxis=dict( argstr="--inputMaskVolumeInXAxis %s", extensions=None, ), inputMaskVolumeInYAxis=dict( argstr="--inputMaskVolumeInYAxis %s", extensions=None, ), inputVolumeInXAxis=dict( argstr="--inputVolumeInXAxis %s", extensions=None, ), inputVolumeInYAxis=dict( argstr="--inputVolumeInYAxis %s", extensions=None, ), outputJointHistogramImage=dict( argstr="--outputJointHistogramImage %s", ), verbose=dict( argstr="--verbose ", ), ) inputs = JointHistogram.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JointHistogram_outputs(): output_map = dict() outputs = JointHistogram.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_ShuffleVectorsModule.py000066400000000000000000000023441413403311400322420ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import ShuffleVectorsModule def test_ShuffleVectorsModule_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVectorFileBaseName=dict( argstr="--inputVectorFileBaseName %s", extensions=None, ), outputVectorFileBaseName=dict( argstr="--outputVectorFileBaseName %s", hash_files=False, ), resampleProportion=dict( argstr="--resampleProportion %f", ), ) inputs = ShuffleVectorsModule.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ShuffleVectorsModule_outputs(): output_map = dict( outputVectorFileBaseName=dict( extensions=None, ), ) outputs = ShuffleVectorsModule.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_fcsv_to_hdf5.py000066400000000000000000000027641413403311400305110ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import fcsv_to_hdf5 def test_fcsv_to_hdf5_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), landmarkGlobPattern=dict( argstr="--landmarkGlobPattern %s", ), landmarkTypesList=dict( argstr="--landmarkTypesList %s", extensions=None, ), landmarksInformationFile=dict( argstr="--landmarksInformationFile %s", hash_files=False, ), modelFile=dict( argstr="--modelFile %s", hash_files=False, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), versionID=dict( argstr="--versionID %s", ), ) inputs = fcsv_to_hdf5.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_fcsv_to_hdf5_outputs(): output_map = dict( landmarksInformationFile=dict( extensions=None, ), modelFile=dict( extensions=None, ), ) outputs = fcsv_to_hdf5.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_insertMidACPCpoint.py000066400000000000000000000021421413403311400315650ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import insertMidACPCpoint def test_insertMidACPCpoint_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputLandmarkFile=dict( argstr="--inputLandmarkFile %s", extensions=None, ), outputLandmarkFile=dict( argstr="--outputLandmarkFile %s", hash_files=False, ), ) inputs = insertMidACPCpoint.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_insertMidACPCpoint_outputs(): output_map = dict( outputLandmarkFile=dict( extensions=None, ), ) outputs = insertMidACPCpoint.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationAligner.py000066400000000000000000000022501413403311400341430ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import landmarksConstellationAligner def test_landmarksConstellationAligner_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputLandmarksPaired=dict( argstr="--inputLandmarksPaired %s", extensions=None, ), outputLandmarksPaired=dict( argstr="--outputLandmarksPaired %s", hash_files=False, ), ) inputs = landmarksConstellationAligner.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_landmarksConstellationAligner_outputs(): output_map = dict( outputLandmarksPaired=dict( extensions=None, ), ) outputs = landmarksConstellationAligner.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/semtools/utilities/tests/test_auto_landmarksConstellationWeights.py000066400000000000000000000025601413403311400342000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brains import landmarksConstellationWeights def test_landmarksConstellationWeights_inputs(): input_map = dict( LLSModel=dict( argstr="--LLSModel %s", extensions=None, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputTemplateModel=dict( argstr="--inputTemplateModel %s", extensions=None, ), inputTrainingList=dict( argstr="--inputTrainingList %s", extensions=None, ), outputWeightsList=dict( argstr="--outputWeightsList %s", hash_files=False, ), ) inputs = landmarksConstellationWeights.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_landmarksConstellationWeights_outputs(): output_map = dict( outputWeightsList=dict( extensions=None, ), ) outputs = landmarksConstellationWeights.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/000077500000000000000000000000001413403311400177255ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/__init__.py000066400000000000000000000013051413403311400220350ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ 3D Slicer is a platform for medical image informatics processing and visualization. For an EXPERIMENTAL implementation of an interface for the ``3dSlicer`` full framework, please check `"dynamic" Slicer `__. """ from .diffusion import * from .segmentation import * from .filtering import * from .utilities import EMSegmentTransformToNewFormat from .surface import ( MergeModels, ModelToLabelMap, GrayscaleModelMaker, ProbeVolumeWithModel, LabelMapSmoothing, ModelMaker, ) from .quantification import * from .legacy import * from .registration import * from .converters import DicomToNrrdConverter, OrientScalarVolume nipype-1.7.0/nipype/interfaces/slicer/base.py000066400000000000000000000001661413403311400212140ustar00rootroot00000000000000# -*- coding: utf-8 -*- from ..base import SEMLikeCommandLine class SlicerCommandLine(SEMLikeCommandLine): pass nipype-1.7.0/nipype/interfaces/slicer/converters.py000066400000000000000000000144761413403311400225050ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class DicomToNrrdConverterInputSpec(CommandLineInputSpec): inputDicomDirectory = Directory( desc="Directory holding Dicom series", exists=True, argstr="--inputDicomDirectory %s", ) outputDirectory = traits.Either( traits.Bool, Directory(), hash_files=False, desc="Directory holding the output NRRD format", argstr="--outputDirectory %s", ) outputVolume = traits.Str( desc="Output filename (.nhdr or .nrrd)", argstr="--outputVolume %s" ) smallGradientThreshold = traits.Float( desc="If a gradient magnitude is greater than 0 and less than smallGradientThreshold, then DicomToNrrdConverter will display an error message and quit, unless the useBMatrixGradientDirections option is set.", argstr="--smallGradientThreshold %f", ) writeProtocolGradientsFile = traits.Bool( desc="Write the protocol gradients to a file suffixed by '.txt' as they were specified in the procol by multiplying each diffusion gradient direction by the measurement frame. This file is for debugging purposes only, the format is not fixed, and will likely change as debugging of new dicom formats is necessary.", argstr="--writeProtocolGradientsFile ", ) useIdentityMeaseurementFrame = traits.Bool( desc="Adjust all the gradients so that the measurement frame is an identity matrix.", argstr="--useIdentityMeaseurementFrame ", ) useBMatrixGradientDirections = traits.Bool( desc="Fill the nhdr header with the gradient directions and bvalues computed out of the BMatrix. Only changes behavior for Siemens data.", argstr="--useBMatrixGradientDirections ", ) class DicomToNrrdConverterOutputSpec(TraitedSpec): outputDirectory = Directory( desc="Directory holding the output NRRD format", exists=True ) class DicomToNrrdConverter(SEMLikeCommandLine): """title: DICOM to NRRD Converter category: Converters description: Converts diffusion weighted MR images in dicom series into Nrrd format for analysis in Slicer. This program has been tested on only a limited subset of DTI dicom formats available from Siemens, GE, and Phillips scanners. Work in progress to support dicom multi-frame data. The program parses dicom header to extract necessary information about measurement frame, diffusion weighting directions, b-values, etc, and write out a nrrd image. For non-diffusion weighted dicom images, it loads in an entire dicom series and writes out a single dicom volume in a .nhdr/.raw pair. version: 0.2.0.$Revision: 916 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DicomToNrrdConverter license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Xiaodong Tao (GE), Vince Magnotta (UIowa), Hans Johnson (UIowa) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Additional support for DTI data produced on Philips scanners was contributed by Vincent Magnotta and Hans Johnson at the University of Iowa. """ input_spec = DicomToNrrdConverterInputSpec output_spec = DicomToNrrdConverterOutputSpec _cmd = "DicomToNrrdConverter " _outputs_filenames = {"outputDirectory": "outputDirectory"} class OrientScalarVolumeInputSpec(CommandLineInputSpec): inputVolume1 = File(position=-2, desc="Input volume 1", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="The oriented volume", argstr="%s", ) orientation = traits.Enum( "Axial", "Coronal", "Sagittal", "RIP", "LIP", "RSP", "LSP", "RIA", "LIA", "RSA", "LSA", "IRP", "ILP", "SRP", "SLP", "IRA", "ILA", "SRA", "SLA", "RPI", "LPI", "RAI", "LAI", "RPS", "LPS", "RAS", "LAS", "PRI", "PLI", "ARI", "ALI", "PRS", "PLS", "ARS", "ALS", "IPR", "SPR", "IAR", "SAR", "IPL", "SPL", "IAL", "SAL", "PIR", "PSR", "AIR", "ASR", "PIL", "PSL", "AIL", "ASL", desc="Orientation choices", argstr="--orientation %s", ) class OrientScalarVolumeOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="The oriented volume", exists=True) class OrientScalarVolume(SEMLikeCommandLine): """title: Orient Scalar Volume category: Converters description: Orients an output volume. Rearranges the slices in a volume according to the selected orientation. The slices are not interpolated. They are just reordered and/or permuted. The resulting volume will cover the original volume. NOTE: since Slicer takes into account the orientation of a volume, the re-oriented volume will not show any difference from the original volume, To see the difference, save the volume and display it with a system that either ignores the orientation of the image (e.g. Paraview) or displays individual images. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/OrientImage contributor: Bill Lorensen (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = OrientScalarVolumeInputSpec output_spec = OrientScalarVolumeOutputSpec _cmd = "OrientScalarVolume " _outputs_filenames = {"outputVolume": "outputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/diffusion/000077500000000000000000000000001413403311400217135ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/diffusion/__init__.py000066400000000000000000000004451413403311400240270ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .diffusion import ( ResampleDTIVolume, DWIRicianLMMSEFilter, TractographyLabelMapSeeding, DWIJointRicianLMMSEFilter, DiffusionWeightedVolumeMasking, DTIimport, DWIToDTIEstimation, DiffusionTensorScalarMeasurements, DTIexport, ) nipype-1.7.0/nipype/interfaces/slicer/diffusion/diffusion.py000066400000000000000000000627521413403311400242670ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class ResampleDTIVolumeInputSpec(CommandLineInputSpec): inputVolume = File( position=-2, desc="Input volume to be resampled", exists=True, argstr="%s" ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Resampled Volume", argstr="%s", ) Reference = File( desc="Reference Volume (spacing,size,orientation,origin)", exists=True, argstr="--Reference %s", ) transformationFile = File(exists=True, argstr="--transformationFile %s") defField = File( desc="File containing the deformation field (3D vector image containing vectors with 3 components)", exists=True, argstr="--defField %s", ) hfieldtype = traits.Enum( "displacement", "h-Field", desc="Set if the deformation field is an -Field", argstr="--hfieldtype %s", ) interpolation = traits.Enum( "linear", "nn", "ws", "bs", desc="Sampling algorithm (linear , nn (nearest neighborhoor), ws (WindowedSinc), bs (BSpline) )", argstr="--interpolation %s", ) correction = traits.Enum( "zero", "none", "abs", "nearest", desc="Correct the tensors if computed tensor is not semi-definite positive", argstr="--correction %s", ) transform_tensor_method = traits.Enum( "PPD", "FS", desc="Chooses between 2 methods to transform the tensors: Finite Strain (FS), faster but less accurate, or Preservation of the Principal Direction (PPD)", argstr="--transform_tensor_method %s", ) transform_order = traits.Enum( "input-to-output", "output-to-input", desc="Select in what order the transforms are read", argstr="--transform_order %s", ) notbulk = traits.Bool( desc="The transform following the BSpline transform is not set as a bulk transform for the BSpline transform", argstr="--notbulk ", ) spaceChange = traits.Bool( desc="Space Orientation between transform and image is different (RAS/LPS) (warning: if the transform is a Transform Node in Slicer3, do not select)", argstr="--spaceChange ", ) rotation_point = traits.List( desc="Center of rotation (only for rigid and affine transforms)", argstr="--rotation_point %s", ) centered_transform = traits.Bool( desc="Set the center of the transformation to the center of the input image (only for rigid and affine transforms)", argstr="--centered_transform ", ) image_center = traits.Enum( "input", "output", desc="Image to use to center the transform (used only if 'Centered Transform' is selected)", argstr="--image_center %s", ) Inverse_ITK_Transformation = traits.Bool( desc="Inverse the transformation before applying it from output image to input image (only for rigid and affine transforms)", argstr="--Inverse_ITK_Transformation ", ) spacing = InputMultiPath( traits.Float, desc="Spacing along each dimension (0 means use input spacing)", sep=",", argstr="--spacing %s", ) size = InputMultiPath( traits.Float, desc="Size along each dimension (0 means use input size)", sep=",", argstr="--size %s", ) origin = traits.List(desc="Origin of the output Image", argstr="--origin %s") direction_matrix = InputMultiPath( traits.Float, desc="9 parameters of the direction matrix by rows (ijk to LPS if LPS transform, ijk to RAS if RAS transform)", sep=",", argstr="--direction_matrix %s", ) number_of_thread = traits.Int( desc="Number of thread used to compute the output image", argstr="--number_of_thread %d", ) default_pixel_value = traits.Float( desc="Default pixel value for samples falling outside of the input region", argstr="--default_pixel_value %f", ) window_function = traits.Enum( "h", "c", "w", "l", "b", desc="Window Function , h = Hamming , c = Cosine , w = Welch , l = Lanczos , b = Blackman", argstr="--window_function %s", ) spline_order = traits.Int( desc="Spline Order (Spline order may be from 0 to 5)", argstr="--spline_order %d", ) transform_matrix = InputMultiPath( traits.Float, desc="12 parameters of the transform matrix by rows ( --last 3 being translation-- )", sep=",", argstr="--transform_matrix %s", ) transform = traits.Enum( "rt", "a", desc="Transform algorithm, rt = Rigid Transform, a = Affine Transform", argstr="--transform %s", ) class ResampleDTIVolumeOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Resampled Volume", exists=True) class ResampleDTIVolume(SEMLikeCommandLine): """title: Resample DTI Volume category: Diffusion.Diffusion Tensor Images description: Resampling an image is a very important task in image analysis. It is especially important in the frame of image registration. This module implements DT image resampling through the use of itk Transforms. The resampling is controlled by the Output Spacing. "Resampling" is performed in space coordinates, not pixel/grid coordinates. It is quite important to ensure that image spacing is properly set on the images involved. The interpolator is required since the mapping from one space to the other will often require evaluation of the intensity of the image at non-grid positions. version: 0.1 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ResampleDTI contributor: Francois Budin (UNC) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Information on the National Centers for Biomedical Computing can be obtained from http://nihroadmap.nih.gov/bioinformatics """ input_spec = ResampleDTIVolumeInputSpec output_spec = ResampleDTIVolumeOutputSpec _cmd = "ResampleDTIVolume " _outputs_filenames = {"outputVolume": "outputVolume.nii"} class DWIRicianLMMSEFilterInputSpec(CommandLineInputSpec): iter = traits.Int( desc="Number of iterations for the noise removal filter.", argstr="--iter %d" ) re = InputMultiPath( traits.Int, desc="Estimation radius.", sep=",", argstr="--re %s" ) rf = InputMultiPath(traits.Int, desc="Filtering radius.", sep=",", argstr="--rf %s") mnvf = traits.Int( desc="Minimum number of voxels in kernel used for filtering.", argstr="--mnvf %d", ) mnve = traits.Int( desc="Minimum number of voxels in kernel used for estimation.", argstr="--mnve %d", ) minnstd = traits.Int( desc="Minimum allowed noise standard deviation.", argstr="--minnstd %d" ) maxnstd = traits.Int( desc="Maximum allowed noise standard deviation.", argstr="--maxnstd %d" ) hrf = traits.Float( desc="How many histogram bins per unit interval.", argstr="--hrf %f" ) uav = traits.Bool( desc="Use absolute value in case of negative square.", argstr="--uav " ) inputVolume = File(position=-2, desc="Input DWI volume.", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DWI volume.", argstr="%s", ) compressOutput = traits.Bool( desc="Compress the data of the compressed file using gzip", argstr="--compressOutput ", ) class DWIRicianLMMSEFilterOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output DWI volume.", exists=True) class DWIRicianLMMSEFilter(SEMLikeCommandLine): """title: DWI Rician LMMSE Filter category: Diffusion.Diffusion Weighted Images description: This module reduces noise (or unwanted detail) on a set of diffusion weighted images. For this, it filters the image in the mean squared error sense using a Rician noise model. Images corresponding to each gradient direction, including baseline, are processed individually. The noise parameter is automatically estimated (noise estimation improved but slower). Note that this is a general purpose filter for MRi images. The module jointLMMSE has been specifically designed for DWI volumes and shows a better performance, so its use is recommended instead. A complete description of the algorithm in this module can be found in: S. Aja-Fernandez, M. Niethammer, M. Kubicki, M. Shenton, and C.-F. Westin. Restoration of DWI data using a Rician LMMSE estimator. IEEE Transactions on Medical Imaging, 27(10): pp. 1389-1403, Oct. 2008. version: 0.1.1.$Revision: 1 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/RicianLMMSEImageFilter contributor: Antonio Tristan Vega (UVa), Santiago Aja Fernandez (UVa), Marc Niethammer (UNC) acknowledgements: Partially founded by grant number TEC2007-67073/TCM from the Comision Interministerial de Ciencia y Tecnologia (Spain). """ input_spec = DWIRicianLMMSEFilterInputSpec output_spec = DWIRicianLMMSEFilterOutputSpec _cmd = "DWIRicianLMMSEFilter " _outputs_filenames = {"outputVolume": "outputVolume.nii"} class TractographyLabelMapSeedingInputSpec(CommandLineInputSpec): InputVolume = File(position=-2, desc="Input DTI volume", exists=True, argstr="%s") inputroi = File( desc="Label map with seeding ROIs", exists=True, argstr="--inputroi %s" ) OutputFibers = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Tractography result", argstr="%s", ) useindexspace = traits.Bool( desc="Seed at IJK voxel grid", argstr="--useindexspace " ) seedspacing = traits.Float( desc="Spacing (in mm) between seed points, only matters if use Use Index Space is off", argstr="--seedspacing %f", ) randomgrid = traits.Bool( desc="Enable random placing of seeds", argstr="--randomgrid " ) clthreshold = traits.Float( desc="Minimum Linear Measure for the seeding to start.", argstr="--clthreshold %f", ) minimumlength = traits.Float( desc="Minimum length of the fibers (in mm)", argstr="--minimumlength %f" ) maximumlength = traits.Float( desc="Maximum length of fibers (in mm)", argstr="--maximumlength %f" ) stoppingmode = traits.Enum( "LinearMeasure", "FractionalAnisotropy", desc="Tensor measurement used to stop the tractography", argstr="--stoppingmode %s", ) stoppingvalue = traits.Float( desc="Tractography will stop when the stopping measurement drops below this value", argstr="--stoppingvalue %f", ) stoppingcurvature = traits.Float( desc="Tractography will stop if radius of curvature becomes smaller than this number units are degrees per mm", argstr="--stoppingcurvature %f", ) integrationsteplength = traits.Float( desc="Distance between points on the same fiber in mm", argstr="--integrationsteplength %f", ) label = traits.Int( desc="Label value that defines seeding region.", argstr="--label %d" ) writetofile = traits.Bool( desc="Write fibers to disk or create in the scene?", argstr="--writetofile " ) outputdirectory = traits.Either( traits.Bool, Directory(), hash_files=False, desc="Directory in which to save fiber(s)", argstr="--outputdirectory %s", ) name = traits.Str(desc="Name to use for fiber files", argstr="--name %s") class TractographyLabelMapSeedingOutputSpec(TraitedSpec): OutputFibers = File(position=-1, desc="Tractography result", exists=True) outputdirectory = Directory(desc="Directory in which to save fiber(s)", exists=True) class TractographyLabelMapSeeding(SEMLikeCommandLine): """title: Tractography Label Map Seeding category: Diffusion.Diffusion Tensor Images description: Seed tracts on a Diffusion Tensor Image (DT) from a label map version: 0.1.0.$Revision: 1892 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Seeding license: slicer3 contributor: Raul San Jose (SPL, BWH), Demian Wassermann (SPL, BWH) acknowledgements: Laboratory of Mathematics in Imaging. This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = TractographyLabelMapSeedingInputSpec output_spec = TractographyLabelMapSeedingOutputSpec _cmd = "TractographyLabelMapSeeding " _outputs_filenames = { "OutputFibers": "OutputFibers.vtk", "outputdirectory": "outputdirectory", } class DWIJointRicianLMMSEFilterInputSpec(CommandLineInputSpec): re = InputMultiPath( traits.Int, desc="Estimation radius.", sep=",", argstr="--re %s" ) rf = InputMultiPath(traits.Int, desc="Filtering radius.", sep=",", argstr="--rf %s") ng = traits.Int( desc="The number of the closest gradients that are used to jointly filter a given gradient direction (0 to use all).", argstr="--ng %d", ) inputVolume = File(position=-2, desc="Input DWI volume.", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DWI volume.", argstr="%s", ) compressOutput = traits.Bool( desc="Compress the data of the compressed file using gzip", argstr="--compressOutput ", ) class DWIJointRicianLMMSEFilterOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output DWI volume.", exists=True) class DWIJointRicianLMMSEFilter(SEMLikeCommandLine): """title: DWI Joint Rician LMMSE Filter category: Diffusion.Diffusion Weighted Images description: This module reduces Rician noise (or unwanted detail) on a set of diffusion weighted images. For this, it filters the image in the mean squared error sense using a Rician noise model. The N closest gradient directions to the direction being processed are filtered together to improve the results: the noise-free signal is seen as an n-diemensional vector which has to be estimated with the LMMSE method from a set of corrupted measurements. To that end, the covariance matrix of the noise-free vector and the cross covariance between this signal and the noise have to be estimated, which is done taking into account the image formation process. The noise parameter is automatically estimated from a rough segmentation of the background of the image. In this area the signal is simply 0, so that Rician statistics reduce to Rayleigh and the noise power can be easily estimated from the mode of the histogram. A complete description of the algorithm may be found in: Antonio Tristan-Vega and Santiago Aja-Fernandez, DWI filtering using joint information for DTI and HARDI, Medical Image Analysis, Volume 14, Issue 2, Pages 205-218. 2010. version: 0.1.1.$Revision: 1 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/JointRicianLMMSEImageFilter contributor: Antonio Tristan Vega (UVa), Santiago Aja Fernandez (UVa) acknowledgements: Partially founded by grant number TEC2007-67073/TCM from the Comision Interministerial de Ciencia y Tecnologia (Spain). """ input_spec = DWIJointRicianLMMSEFilterInputSpec output_spec = DWIJointRicianLMMSEFilterOutputSpec _cmd = "DWIJointRicianLMMSEFilter " _outputs_filenames = {"outputVolume": "outputVolume.nii"} class DiffusionWeightedVolumeMaskingInputSpec(CommandLineInputSpec): inputVolume = File(position=-4, desc="Input DWI volume", exists=True, argstr="%s") outputBaseline = traits.Either( traits.Bool, File(), position=-2, hash_files=False, desc="Estimated baseline volume", argstr="%s", ) thresholdMask = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Otsu Threshold Mask", argstr="%s", ) otsuomegathreshold = traits.Float( desc="Control the sharpness of the threshold in the Otsu computation. 0: lower threshold, 1: higher threhold", argstr="--otsuomegathreshold %f", ) removeislands = traits.Bool( desc="Remove Islands in Threshold Mask?", argstr="--removeislands " ) class DiffusionWeightedVolumeMaskingOutputSpec(TraitedSpec): outputBaseline = File(position=-2, desc="Estimated baseline volume", exists=True) thresholdMask = File(position=-1, desc="Otsu Threshold Mask", exists=True) class DiffusionWeightedVolumeMasking(SEMLikeCommandLine): """title: Diffusion Weighted Volume Masking category: Diffusion.Diffusion Weighted Images description:

Performs a mask calculation from a diffusion weighted (DW) image.

Starting from a dw image, this module computes the baseline image averaging all the images without diffusion weighting and then applies the otsu segmentation algorithm in order to produce a mask. this mask can then be used when estimating the diffusion tensor (dt) image, not to estimate tensors all over the volume.

version: 0.1.0.$Revision: 1892 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DiffusionWeightedMasking license: slicer3 contributor: Demian Wassermann (SPL, BWH) """ input_spec = DiffusionWeightedVolumeMaskingInputSpec output_spec = DiffusionWeightedVolumeMaskingOutputSpec _cmd = "DiffusionWeightedVolumeMasking " _outputs_filenames = { "outputBaseline": "outputBaseline.nii", "thresholdMask": "thresholdMask.nii", } class DTIimportInputSpec(CommandLineInputSpec): inputFile = File(position=-2, desc="Input DTI file", exists=True, argstr="%s") outputTensor = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DTI volume", argstr="%s", ) testingmode = traits.Bool( desc="Enable testing mode. Sample helix file (helix-DTI.nhdr) will be loaded into Slicer and converted in Nifti.", argstr="--testingmode ", ) class DTIimportOutputSpec(TraitedSpec): outputTensor = File(position=-1, desc="Output DTI volume", exists=True) class DTIimport(SEMLikeCommandLine): """title: DTIimport category: Diffusion.Diffusion Data Conversion description: Import tensor datasets from various formats, including the NifTi file format version: 1.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DTIImport contributor: Sonia Pujol (SPL, BWH) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NA-MIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = DTIimportInputSpec output_spec = DTIimportOutputSpec _cmd = "DTIimport " _outputs_filenames = {"outputTensor": "outputTensor.nii"} class DWIToDTIEstimationInputSpec(CommandLineInputSpec): inputVolume = File(position=-3, desc="Input DWI volume", exists=True, argstr="%s") mask = File( desc="Mask where the tensors will be computed", exists=True, argstr="--mask %s" ) outputTensor = traits.Either( traits.Bool, File(), position=-2, hash_files=False, desc="Estimated DTI volume", argstr="%s", ) outputBaseline = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Estimated baseline volume", argstr="%s", ) enumeration = traits.Enum( "LS", "WLS", desc="LS: Least Squares, WLS: Weighted Least Squares", argstr="--enumeration %s", ) shiftNeg = traits.Bool( desc="Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error)", argstr="--shiftNeg ", ) class DWIToDTIEstimationOutputSpec(TraitedSpec): outputTensor = File(position=-2, desc="Estimated DTI volume", exists=True) outputBaseline = File(position=-1, desc="Estimated baseline volume", exists=True) class DWIToDTIEstimation(SEMLikeCommandLine): """title: DWI to DTI Estimation category: Diffusion.Diffusion Weighted Images description: Performs a tensor model estimation from diffusion weighted images. There are three estimation methods available: least squares, weigthed least squares and non-linear estimation. The first method is the traditional method for tensor estimation and the fastest one. Weighted least squares takes into account the noise characteristics of the MRI images to weight the DWI samples used in the estimation based on its intensity magnitude. The last method is the more complex. version: 0.1.0.$Revision: 1892 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DiffusionTensorEstimation license: slicer3 contributor: Raul San Jose (SPL, BWH) acknowledgements: This command module is based on the estimation functionality provided by the Teem library. This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = DWIToDTIEstimationInputSpec output_spec = DWIToDTIEstimationOutputSpec _cmd = "DWIToDTIEstimation " _outputs_filenames = { "outputTensor": "outputTensor.nii", "outputBaseline": "outputBaseline.nii", } class DiffusionTensorScalarMeasurementsInputSpec(CommandLineInputSpec): inputVolume = File(position=-3, desc="Input DTI volume", exists=True, argstr="%s") outputScalar = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Scalar volume derived from tensor", argstr="%s", ) enumeration = traits.Enum( "Trace", "Determinant", "RelativeAnisotropy", "FractionalAnisotropy", "Mode", "LinearMeasure", "PlanarMeasure", "SphericalMeasure", "MinEigenvalue", "MidEigenvalue", "MaxEigenvalue", "MaxEigenvalueProjectionX", "MaxEigenvalueProjectionY", "MaxEigenvalueProjectionZ", "RAIMaxEigenvecX", "RAIMaxEigenvecY", "RAIMaxEigenvecZ", "MaxEigenvecX", "MaxEigenvecY", "MaxEigenvecZ", "D11", "D22", "D33", "ParallelDiffusivity", "PerpendicularDffusivity", desc="An enumeration of strings", argstr="--enumeration %s", ) class DiffusionTensorScalarMeasurementsOutputSpec(TraitedSpec): outputScalar = File( position=-1, desc="Scalar volume derived from tensor", exists=True ) class DiffusionTensorScalarMeasurements(SEMLikeCommandLine): """title: Diffusion Tensor Scalar Measurements category: Diffusion.Diffusion Tensor Images description: Compute a set of different scalar measurements from a tensor field, specially oriented for Diffusion Tensors where some rotationally invariant measurements, like Fractional Anisotropy, are highly used to describe the anistropic behaviour of the tensor. version: 0.1.0.$Revision: 1892 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DiffusionTensorMathematics contributor: Raul San Jose (SPL, BWH) acknowledgements: LMI """ input_spec = DiffusionTensorScalarMeasurementsInputSpec output_spec = DiffusionTensorScalarMeasurementsOutputSpec _cmd = "DiffusionTensorScalarMeasurements " _outputs_filenames = {"outputScalar": "outputScalar.nii"} class DTIexportInputSpec(CommandLineInputSpec): inputTensor = File(position=-2, desc="Input DTI volume", exists=True, argstr="%s") outputFile = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DTI file", argstr="%s", ) class DTIexportOutputSpec(TraitedSpec): outputFile = File(position=-1, desc="Output DTI file", exists=True) class DTIexport(SEMLikeCommandLine): """title: DTIexport category: Diffusion.Diffusion Data Conversion description: Export DTI data to various file formats version: 1.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DTIExport contributor: Sonia Pujol (SPL, BWH) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NA-MIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = DTIexportInputSpec output_spec = DTIexportOutputSpec _cmd = "DTIexport " _outputs_filenames = {"outputFile": "outputFile"} nipype-1.7.0/nipype/interfaces/slicer/diffusion/tests/000077500000000000000000000000001413403311400230555ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/diffusion/tests/__init__.py000066400000000000000000000000301413403311400251570ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIexport.py000066400000000000000000000021041413403311400273750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..diffusion import DTIexport def test_DTIexport_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputTensor=dict( argstr="%s", extensions=None, position=-2, ), outputFile=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = DTIexport.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DTIexport_outputs(): output_map = dict( outputFile=dict( extensions=None, position=-1, ), ) outputs = DTIexport.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/diffusion/tests/test_auto_DTIimport.py000066400000000000000000000022201413403311400273650ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..diffusion import DTIimport def test_DTIimport_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputFile=dict( argstr="%s", extensions=None, position=-2, ), outputTensor=dict( argstr="%s", hash_files=False, position=-1, ), testingmode=dict( argstr="--testingmode ", ), ) inputs = DTIimport.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DTIimport_outputs(): output_map = dict( outputTensor=dict( extensions=None, position=-1, ), ) outputs = DTIimport.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIJointRicianLMMSEFilter.py000066400000000000000000000027001413403311400322360ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..diffusion import DWIJointRicianLMMSEFilter def test_DWIJointRicianLMMSEFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), compressOutput=dict( argstr="--compressOutput ", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), ng=dict( argstr="--ng %d", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), re=dict( argstr="--re %s", sep=",", ), rf=dict( argstr="--rf %s", sep=",", ), ) inputs = DWIJointRicianLMMSEFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DWIJointRicianLMMSEFilter_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = DWIJointRicianLMMSEFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIRicianLMMSEFilter.py000066400000000000000000000034451413403311400312410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..diffusion import DWIRicianLMMSEFilter def test_DWIRicianLMMSEFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), compressOutput=dict( argstr="--compressOutput ", ), environ=dict( nohash=True, usedefault=True, ), hrf=dict( argstr="--hrf %f", ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), iter=dict( argstr="--iter %d", ), maxnstd=dict( argstr="--maxnstd %d", ), minnstd=dict( argstr="--minnstd %d", ), mnve=dict( argstr="--mnve %d", ), mnvf=dict( argstr="--mnvf %d", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), re=dict( argstr="--re %s", sep=",", ), rf=dict( argstr="--rf %s", sep=",", ), uav=dict( argstr="--uav ", ), ) inputs = DWIRicianLMMSEFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DWIRicianLMMSEFilter_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = DWIRicianLMMSEFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/diffusion/tests/test_auto_DWIToDTIEstimation.py000066400000000000000000000030661413403311400310470ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..diffusion import DWIToDTIEstimation def test_DWIToDTIEstimation_inputs(): input_map = dict( args=dict( argstr="%s", ), enumeration=dict( argstr="--enumeration %s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="%s", extensions=None, position=-3, ), mask=dict( argstr="--mask %s", extensions=None, ), outputBaseline=dict( argstr="%s", hash_files=False, position=-1, ), outputTensor=dict( argstr="%s", hash_files=False, position=-2, ), shiftNeg=dict( argstr="--shiftNeg ", ), ) inputs = DWIToDTIEstimation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DWIToDTIEstimation_outputs(): output_map = dict( outputBaseline=dict( extensions=None, position=-1, ), outputTensor=dict( extensions=None, position=-2, ), ) outputs = DWIToDTIEstimation.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionTensorScalarMeasurements.py000066400000000000000000000024141413403311400343570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..diffusion import DiffusionTensorScalarMeasurements def test_DiffusionTensorScalarMeasurements_inputs(): input_map = dict( args=dict( argstr="%s", ), enumeration=dict( argstr="--enumeration %s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="%s", extensions=None, position=-3, ), outputScalar=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = DiffusionTensorScalarMeasurements.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DiffusionTensorScalarMeasurements_outputs(): output_map = dict( outputScalar=dict( extensions=None, position=-1, ), ) outputs = DiffusionTensorScalarMeasurements.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/diffusion/tests/test_auto_DiffusionWeightedVolumeMasking.py000066400000000000000000000030611413403311400336270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..diffusion import DiffusionWeightedVolumeMasking def test_DiffusionWeightedVolumeMasking_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="%s", extensions=None, position=-4, ), otsuomegathreshold=dict( argstr="--otsuomegathreshold %f", ), outputBaseline=dict( argstr="%s", hash_files=False, position=-2, ), removeislands=dict( argstr="--removeislands ", ), thresholdMask=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = DiffusionWeightedVolumeMasking.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DiffusionWeightedVolumeMasking_outputs(): output_map = dict( outputBaseline=dict( extensions=None, position=-2, ), thresholdMask=dict( extensions=None, position=-1, ), ) outputs = DiffusionWeightedVolumeMasking.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/diffusion/tests/test_auto_ResampleDTIVolume.py000066400000000000000000000062271413403311400310260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..diffusion import ResampleDTIVolume def test_ResampleDTIVolume_inputs(): input_map = dict( Inverse_ITK_Transformation=dict( argstr="--Inverse_ITK_Transformation ", ), Reference=dict( argstr="--Reference %s", extensions=None, ), args=dict( argstr="%s", ), centered_transform=dict( argstr="--centered_transform ", ), correction=dict( argstr="--correction %s", ), defField=dict( argstr="--defField %s", extensions=None, ), default_pixel_value=dict( argstr="--default_pixel_value %f", ), direction_matrix=dict( argstr="--direction_matrix %s", sep=",", ), environ=dict( nohash=True, usedefault=True, ), hfieldtype=dict( argstr="--hfieldtype %s", ), image_center=dict( argstr="--image_center %s", ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), interpolation=dict( argstr="--interpolation %s", ), notbulk=dict( argstr="--notbulk ", ), number_of_thread=dict( argstr="--number_of_thread %d", ), origin=dict( argstr="--origin %s", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), rotation_point=dict( argstr="--rotation_point %s", ), size=dict( argstr="--size %s", sep=",", ), spaceChange=dict( argstr="--spaceChange ", ), spacing=dict( argstr="--spacing %s", sep=",", ), spline_order=dict( argstr="--spline_order %d", ), transform=dict( argstr="--transform %s", ), transform_matrix=dict( argstr="--transform_matrix %s", sep=",", ), transform_order=dict( argstr="--transform_order %s", ), transform_tensor_method=dict( argstr="--transform_tensor_method %s", ), transformationFile=dict( argstr="--transformationFile %s", extensions=None, ), window_function=dict( argstr="--window_function %s", ), ) inputs = ResampleDTIVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ResampleDTIVolume_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = ResampleDTIVolume.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/diffusion/tests/test_auto_TractographyLabelMapSeeding.py000066400000000000000000000046031413403311400330650ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..diffusion import TractographyLabelMapSeeding def test_TractographyLabelMapSeeding_inputs(): input_map = dict( InputVolume=dict( argstr="%s", extensions=None, position=-2, ), OutputFibers=dict( argstr="%s", hash_files=False, position=-1, ), args=dict( argstr="%s", ), clthreshold=dict( argstr="--clthreshold %f", ), environ=dict( nohash=True, usedefault=True, ), inputroi=dict( argstr="--inputroi %s", extensions=None, ), integrationsteplength=dict( argstr="--integrationsteplength %f", ), label=dict( argstr="--label %d", ), maximumlength=dict( argstr="--maximumlength %f", ), minimumlength=dict( argstr="--minimumlength %f", ), name=dict( argstr="--name %s", ), outputdirectory=dict( argstr="--outputdirectory %s", hash_files=False, ), randomgrid=dict( argstr="--randomgrid ", ), seedspacing=dict( argstr="--seedspacing %f", ), stoppingcurvature=dict( argstr="--stoppingcurvature %f", ), stoppingmode=dict( argstr="--stoppingmode %s", ), stoppingvalue=dict( argstr="--stoppingvalue %f", ), useindexspace=dict( argstr="--useindexspace ", ), writetofile=dict( argstr="--writetofile ", ), ) inputs = TractographyLabelMapSeeding.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TractographyLabelMapSeeding_outputs(): output_map = dict( OutputFibers=dict( extensions=None, position=-1, ), outputdirectory=dict(), ) outputs = TractographyLabelMapSeeding.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/000077500000000000000000000000001413403311400217105ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/filtering/__init__.py000066400000000000000000000015521413403311400240240ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .morphology import GrayscaleGrindPeakImageFilter, GrayscaleFillHoleImageFilter from .denoising import ( GradientAnisotropicDiffusion, CurvatureAnisotropicDiffusion, GaussianBlurImageFilter, MedianImageFilter, ) from .arithmetic import ( MultiplyScalarVolumes, MaskScalarVolume, SubtractScalarVolumes, AddScalarVolumes, CastScalarVolume, ) from .extractskeleton import ExtractSkeleton from .histogrammatching import HistogramMatching from .thresholdscalarvolume import ThresholdScalarVolume from .n4itkbiasfieldcorrection import N4ITKBiasFieldCorrection from .checkerboardfilter import CheckerBoardFilter from .imagelabelcombine import ImageLabelCombine from .votingbinaryholefillingimagefilter import VotingBinaryHoleFillingImageFilter from .resamplescalarvectordwivolume import ResampleScalarVectorDWIVolume nipype-1.7.0/nipype/interfaces/slicer/filtering/arithmetic.py000066400000000000000000000216241413403311400244200ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class MultiplyScalarVolumesInputSpec(CommandLineInputSpec): inputVolume1 = File(position=-3, desc="Input volume 1", exists=True, argstr="%s") inputVolume2 = File(position=-2, desc="Input volume 2", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Volume1 * Volume2", argstr="%s", ) order = traits.Enum( "0", "1", "2", "3", desc="Interpolation order if two images are in different coordinate frames or have different sampling.", argstr="--order %s", ) class MultiplyScalarVolumesOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Volume1 * Volume2", exists=True) class MultiplyScalarVolumes(SEMLikeCommandLine): """title: Multiply Scalar Volumes category: Filtering.Arithmetic description: Multiplies two images. Although all image types are supported on input, only signed types are produced. The two images do not have to have the same dimensions. version: 0.1.0.$Revision: 8595 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Multiply contributor: Bill Lorensen (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = MultiplyScalarVolumesInputSpec output_spec = MultiplyScalarVolumesOutputSpec _cmd = "MultiplyScalarVolumes " _outputs_filenames = {"outputVolume": "outputVolume.nii"} class MaskScalarVolumeInputSpec(CommandLineInputSpec): InputVolume = File( position=-3, desc="Input volume to be masked", exists=True, argstr="%s" ) MaskVolume = File( position=-2, desc="Label volume containing the mask", exists=True, argstr="%s" ) OutputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output volume: Input Volume masked by label value from Mask Volume", argstr="%s", ) label = traits.Int( desc="Label value in the Mask Volume to use as the mask", argstr="--label %d" ) replace = traits.Int( desc="Value to use for the output volume outside of the mask", argstr="--replace %d", ) class MaskScalarVolumeOutputSpec(TraitedSpec): OutputVolume = File( position=-1, desc="Output volume: Input Volume masked by label value from Mask Volume", exists=True, ) class MaskScalarVolume(SEMLikeCommandLine): """title: Mask Scalar Volume category: Filtering.Arithmetic description: Masks two images. The output image is set to 0 everywhere except where the chosen label from the mask volume is present, at which point it will retain it's original values. Although all image types are supported on input, only signed types are produced. The two images do not have to have the same dimensions. version: 0.1.0.$Revision: 8595 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Mask contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = MaskScalarVolumeInputSpec output_spec = MaskScalarVolumeOutputSpec _cmd = "MaskScalarVolume " _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} class SubtractScalarVolumesInputSpec(CommandLineInputSpec): inputVolume1 = File(position=-3, desc="Input volume 1", exists=True, argstr="%s") inputVolume2 = File(position=-2, desc="Input volume 2", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Volume1 - Volume2", argstr="%s", ) order = traits.Enum( "0", "1", "2", "3", desc="Interpolation order if two images are in different coordinate frames or have different sampling.", argstr="--order %s", ) class SubtractScalarVolumesOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Volume1 - Volume2", exists=True) class SubtractScalarVolumes(SEMLikeCommandLine): """title: Subtract Scalar Volumes category: Filtering.Arithmetic description: Subtracts two images. Although all image types are supported on input, only signed types are produced. The two images do not have to have the same dimensions. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Subtract contributor: Bill Lorensen (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = SubtractScalarVolumesInputSpec output_spec = SubtractScalarVolumesOutputSpec _cmd = "SubtractScalarVolumes " _outputs_filenames = {"outputVolume": "outputVolume.nii"} class AddScalarVolumesInputSpec(CommandLineInputSpec): inputVolume1 = File(position=-3, desc="Input volume 1", exists=True, argstr="%s") inputVolume2 = File(position=-2, desc="Input volume 2", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Volume1 + Volume2", argstr="%s", ) order = traits.Enum( "0", "1", "2", "3", desc="Interpolation order if two images are in different coordinate frames or have different sampling.", argstr="--order %s", ) class AddScalarVolumesOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Volume1 + Volume2", exists=True) class AddScalarVolumes(SEMLikeCommandLine): """title: Add Scalar Volumes category: Filtering.Arithmetic description: Adds two images. Although all image types are supported on input, only signed types are produced. The two images do not have to have the same dimensions. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Add contributor: Bill Lorensen (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = AddScalarVolumesInputSpec output_spec = AddScalarVolumesOutputSpec _cmd = "AddScalarVolumes " _outputs_filenames = {"outputVolume": "outputVolume.nii"} class CastScalarVolumeInputSpec(CommandLineInputSpec): InputVolume = File( position=-2, desc="Input volume, the volume to cast.", exists=True, argstr="%s" ) OutputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output volume, cast to the new type.", argstr="%s", ) type = traits.Enum( "Char", "UnsignedChar", "Short", "UnsignedShort", "Int", "UnsignedInt", "Float", "Double", desc="Type for the new output volume.", argstr="--type %s", ) class CastScalarVolumeOutputSpec(TraitedSpec): OutputVolume = File( position=-1, desc="Output volume, cast to the new type.", exists=True ) class CastScalarVolume(SEMLikeCommandLine): """title: Cast Scalar Volume category: Filtering.Arithmetic description: Cast a volume to a given data type. Use at your own risk when casting an input volume into a lower precision type! Allows casting to the same type as the input volume. version: 0.1.0.$Revision: 2104 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Cast contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = CastScalarVolumeInputSpec output_spec = CastScalarVolumeOutputSpec _cmd = "CastScalarVolume " _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/filtering/checkerboardfilter.py000066400000000000000000000047531413403311400261150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class CheckerBoardFilterInputSpec(CommandLineInputSpec): checkerPattern = InputMultiPath( traits.Int, desc="The pattern of input 1 and input 2 in the output image. The user can specify the number of checkers in each dimension. A checkerPattern of 2,2,1 means that images will alternate in every other checker in the first two dimensions. The same pattern will be used in the 3rd dimension.", sep=",", argstr="--checkerPattern %s", ) inputVolume1 = File( position=-3, desc="First Input volume", exists=True, argstr="%s" ) inputVolume2 = File( position=-2, desc="Second Input volume", exists=True, argstr="%s" ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", argstr="%s", ) class CheckerBoardFilterOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output filtered", exists=True) class CheckerBoardFilter(SEMLikeCommandLine): """title: CheckerBoard Filter category: Filtering description: Create a checkerboard volume of two volumes. The output volume will show the two inputs alternating according to the user supplied checkerPattern. This filter is often used to compare the results of image registration. Note that the second input is resampled to the same origin, spacing and direction before it is composed with the first input. The scalar type of the output volume will be the same as the input image scalar type. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/CheckerBoard contributor: Bill Lorensen (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = CheckerBoardFilterInputSpec output_spec = CheckerBoardFilterOutputSpec _cmd = "CheckerBoardFilter " _outputs_filenames = {"outputVolume": "outputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/filtering/denoising.py000066400000000000000000000225421413403311400242460ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class GradientAnisotropicDiffusionInputSpec(CommandLineInputSpec): conductance = traits.Float( desc="Conductance controls the sensitivity of the conductance term. As a general rule, the lower the value, the more strongly the filter preserves edges. A high value will cause diffusion (smoothing) across edges. Note that the number of iterations controls how much smoothing is done within regions bounded by edges.", argstr="--conductance %f", ) iterations = traits.Int( desc="The more iterations, the more smoothing. Each iteration takes the same amount of time. If it takes 10 seconds for one iteration, then it will take 100 seconds for 10 iterations. Note that the conductance controls how much each iteration smooths across edges.", argstr="--iterations %d", ) timeStep = traits.Float( desc="The time step depends on the dimensionality of the image. In Slicer the images are 3D and the default (.0625) time step will provide a stable solution.", argstr="--timeStep %f", ) inputVolume = File( position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", argstr="%s", ) class GradientAnisotropicDiffusionOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output filtered", exists=True) class GradientAnisotropicDiffusion(SEMLikeCommandLine): """title: Gradient Anisotropic Diffusion category: Filtering.Denoising description: Runs gradient anisotropic diffusion on a volume. Anisotropic diffusion methods reduce noise (or unwanted detail) in images while preserving specific image features, like edges. For many applications, there is an assumption that light-dark transitions (edges) are interesting. Standard isotropic diffusion methods move and blur light-dark boundaries. Anisotropic diffusion methods are formulated to specifically preserve edges. The conductance term for this implementation is a function of the gradient magnitude of the image at each point, reducing the strength of diffusion at edges. The numerical implementation of this equation is similar to that described in the Perona-Malik paper, but uses a more robust technique for gradient magnitude estimation and has been generalized to N-dimensions. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GradientAnisotropicDiffusion contributor: Bill Lorensen (GE) acknowledgements: This command module was derived from Insight/Examples (copyright) Insight Software Consortium """ input_spec = GradientAnisotropicDiffusionInputSpec output_spec = GradientAnisotropicDiffusionOutputSpec _cmd = "GradientAnisotropicDiffusion " _outputs_filenames = {"outputVolume": "outputVolume.nii"} class CurvatureAnisotropicDiffusionInputSpec(CommandLineInputSpec): conductance = traits.Float( desc="Conductance controls the sensitivity of the conductance term. As a general rule, the lower the value, the more strongly the filter preserves edges. A high value will cause diffusion (smoothing) across edges. Note that the number of iterations controls how much smoothing is done within regions bounded by edges.", argstr="--conductance %f", ) iterations = traits.Int( desc="The more iterations, the more smoothing. Each iteration takes the same amount of time. If it takes 10 seconds for one iteration, then it will take 100 seconds for 10 iterations. Note that the conductance controls how much each iteration smooths across edges.", argstr="--iterations %d", ) timeStep = traits.Float( desc="The time step depends on the dimensionality of the image. In Slicer the images are 3D and the default (.0625) time step will provide a stable solution.", argstr="--timeStep %f", ) inputVolume = File( position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", argstr="%s", ) class CurvatureAnisotropicDiffusionOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output filtered", exists=True) class CurvatureAnisotropicDiffusion(SEMLikeCommandLine): """title: Curvature Anisotropic Diffusion category: Filtering.Denoising description: Performs anisotropic diffusion on an image using a modified curvature diffusion equation (MCDE). MCDE does not exhibit the edge enhancing properties of classic anisotropic diffusion, which can under certain conditions undergo a 'negative' diffusion, which enhances the contrast of edges. Equations of the form of MCDE always undergo positive diffusion, with the conductance term only varying the strength of that diffusion. Qualitatively, MCDE compares well with other non-linear diffusion techniques. It is less sensitive to contrast than classic Perona-Malik style diffusion, and preserves finer detailed structures in images. There is a potential speed trade-off for using this function in place of Gradient Anisotropic Diffusion. Each iteration of the solution takes roughly twice as long. Fewer iterations, however, may be required to reach an acceptable solution. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/CurvatureAnisotropicDiffusion contributor: Bill Lorensen (GE) acknowledgements: This command module was derived from Insight/Examples (copyright) Insight Software Consortium """ input_spec = CurvatureAnisotropicDiffusionInputSpec output_spec = CurvatureAnisotropicDiffusionOutputSpec _cmd = "CurvatureAnisotropicDiffusion " _outputs_filenames = {"outputVolume": "outputVolume.nii"} class GaussianBlurImageFilterInputSpec(CommandLineInputSpec): sigma = traits.Float( desc="Sigma value in physical units (e.g., mm) of the Gaussian kernel", argstr="--sigma %f", ) inputVolume = File(position=-2, desc="Input volume", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Blurred Volume", argstr="%s", ) class GaussianBlurImageFilterOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Blurred Volume", exists=True) class GaussianBlurImageFilter(SEMLikeCommandLine): """title: Gaussian Blur Image Filter category: Filtering.Denoising description: Apply a gaussian blurr to an image version: 0.1.0.$Revision: 1.1 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GaussianBlurImageFilter contributor: Julien Jomier (Kitware), Stephen Aylward (Kitware) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = GaussianBlurImageFilterInputSpec output_spec = GaussianBlurImageFilterOutputSpec _cmd = "GaussianBlurImageFilter " _outputs_filenames = {"outputVolume": "outputVolume.nii"} class MedianImageFilterInputSpec(CommandLineInputSpec): neighborhood = InputMultiPath( traits.Int, desc="The size of the neighborhood in each dimension", sep=",", argstr="--neighborhood %s", ) inputVolume = File( position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", argstr="%s", ) class MedianImageFilterOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output filtered", exists=True) class MedianImageFilter(SEMLikeCommandLine): """title: Median Image Filter category: Filtering.Denoising description: The MedianImageFilter is commonly used as a robust approach for noise reduction. This filter is particularly efficient against "salt-and-pepper" noise. In other words, it is robust to the presence of gray-level outliers. MedianImageFilter computes the value of each output pixel as the statistical median of the neighborhood of values around the corresponding input pixel. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/MedianImageFilter contributor: Bill Lorensen (GE) acknowledgements: This command module was derived from Insight/Examples/Filtering/MedianImageFilter (copyright) Insight Software Consortium """ input_spec = MedianImageFilterInputSpec output_spec = MedianImageFilterOutputSpec _cmd = "MedianImageFilter " _outputs_filenames = {"outputVolume": "outputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/filtering/extractskeleton.py000066400000000000000000000047561413403311400255150ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class ExtractSkeletonInputSpec(CommandLineInputSpec): InputImageFileName = File(position=-2, desc="Input image", exists=True, argstr="%s") OutputImageFileName = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Skeleton of the input image", argstr="%s", ) type = traits.Enum( "1D", "2D", desc="Type of skeleton to create", argstr="--type %s" ) dontPrune = traits.Bool( desc="Return the full skeleton, not just the maximal skeleton", argstr="--dontPrune ", ) numPoints = traits.Int( desc="Number of points used to represent the skeleton", argstr="--numPoints %d" ) pointsFile = traits.Str( desc="Name of the file to store the coordinates of the central (1D) skeleton points", argstr="--pointsFile %s", ) class ExtractSkeletonOutputSpec(TraitedSpec): OutputImageFileName = File( position=-1, desc="Skeleton of the input image", exists=True ) class ExtractSkeleton(SEMLikeCommandLine): """title: Extract Skeleton category: Filtering description: Extract the skeleton of a binary object. The skeleton can be limited to being a 1D curve or allowed to be a full 2D manifold. The branches of the skeleton can be pruned so that only the maximal center skeleton is returned. version: 0.1.0.$Revision: 2104 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ExtractSkeleton contributor: Pierre Seroul (UNC), Martin Styner (UNC), Guido Gerig (UNC), Stephen Aylward (Kitware) acknowledgements: The original implementation of this method was provided by ETH Zurich, Image Analysis Laboratory of Profs Olaf Kuebler, Gabor Szekely and Guido Gerig. Martin Styner at UNC, Chapel Hill made enhancements. Wrapping for Slicer was provided by Pierre Seroul and Stephen Aylward at Kitware, Inc. """ input_spec = ExtractSkeletonInputSpec output_spec = ExtractSkeletonOutputSpec _cmd = "ExtractSkeleton " _outputs_filenames = {"OutputImageFileName": "OutputImageFileName.nii"} nipype-1.7.0/nipype/interfaces/slicer/filtering/histogrammatching.py000066400000000000000000000063261413403311400260010ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class HistogramMatchingInputSpec(CommandLineInputSpec): numberOfHistogramLevels = traits.Int( desc="The number of hisogram levels to use", argstr="--numberOfHistogramLevels %d", ) numberOfMatchPoints = traits.Int( desc="The number of match points to use", argstr="--numberOfMatchPoints %d" ) threshold = traits.Bool( desc="If on, only pixels above the mean in each volume are thresholded.", argstr="--threshold ", ) inputVolume = File( position=-3, desc="Input volume to be filtered", exists=True, argstr="%s" ) referenceVolume = File( position=-2, desc="Input volume whose histogram will be matched", exists=True, argstr="%s", ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output volume. This is the input volume with intensities matched to the reference volume.", argstr="%s", ) class HistogramMatchingOutputSpec(TraitedSpec): outputVolume = File( position=-1, desc="Output volume. This is the input volume with intensities matched to the reference volume.", exists=True, ) class HistogramMatching(SEMLikeCommandLine): """title: Histogram Matching category: Filtering description: Normalizes the grayscale values of a source image based on the grayscale values of a reference image. This filter uses a histogram matching technique where the histograms of the two images are matched only at a specified number of quantile values. The filter was orginally designed to normalize MR images of the sameMR protocol and same body part. The algorithm works best if background pixels are excluded from both the source and reference histograms. A simple background exclusion method is to exclude all pixels whose grayscale values are smaller than the mean grayscale value. ThresholdAtMeanIntensity switches on this simple background exclusion method. Number of match points governs the number of quantile values to be matched. The filter assumes that both the source and reference are of the same type and that the input and output image type have the same number of dimension and have scalar pixel types. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/HistogramMatching contributor: Bill Lorensen (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = HistogramMatchingInputSpec output_spec = HistogramMatchingOutputSpec _cmd = "HistogramMatching " _outputs_filenames = {"outputVolume": "outputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/filtering/imagelabelcombine.py000066400000000000000000000032011413403311400256750ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class ImageLabelCombineInputSpec(CommandLineInputSpec): InputLabelMap_A = File( position=-3, desc="Label map image", exists=True, argstr="%s" ) InputLabelMap_B = File( position=-2, desc="Label map image", exists=True, argstr="%s" ) OutputLabelMap = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Resulting Label map image", argstr="%s", ) first_overwrites = traits.Bool( desc="Use first or second label when both are present", argstr="--first_overwrites ", ) class ImageLabelCombineOutputSpec(TraitedSpec): OutputLabelMap = File(position=-1, desc="Resulting Label map image", exists=True) class ImageLabelCombine(SEMLikeCommandLine): """title: Image Label Combine category: Filtering description: Combine two label maps into one version: 0.1.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ImageLabelCombine contributor: Alex Yarmarkovich (SPL, BWH) """ input_spec = ImageLabelCombineInputSpec output_spec = ImageLabelCombineOutputSpec _cmd = "ImageLabelCombine " _outputs_filenames = {"OutputLabelMap": "OutputLabelMap.nii"} nipype-1.7.0/nipype/interfaces/slicer/filtering/morphology.py000066400000000000000000000125461413403311400244710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class GrayscaleGrindPeakImageFilterInputSpec(CommandLineInputSpec): inputVolume = File( position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", argstr="%s", ) class GrayscaleGrindPeakImageFilterOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output filtered", exists=True) class GrayscaleGrindPeakImageFilter(SEMLikeCommandLine): """title: Grayscale Grind Peak Image Filter category: Filtering.Morphology description: GrayscaleGrindPeakImageFilter removes peaks in a grayscale image. Peaks are local maxima in the grayscale topography that are not connected to boundaries of the image. Gray level values adjacent to a peak are extrapolated through the peak. This filter is used to smooth over local maxima without affecting the values of local minima. If you take the difference between the output of this filter and the original image (and perhaps threshold the difference above a small value), you'll obtain a map of the local maxima. This filter uses the GrayscaleGeodesicDilateImageFilter. It provides its own input as the "mask" input to the geodesic erosion. The "marker" image for the geodesic erosion is constructed such that boundary pixels match the boundary pixels of the input image and the interior pixels are set to the minimum pixel value in the input image. This filter is the dual to the GrayscaleFillholeImageFilter which implements the Fillhole algorithm. Since it is a dual, it is somewhat superfluous but is provided as a convenience. Geodesic morphology and the Fillhole algorithm is described in Chapter 6 of Pierre Soille's book "Morphological Image Analysis: Principles and Applications", Second Edition, Springer, 2003. A companion filter, Grayscale Fill Hole, fills holes in grayscale images. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GrayscaleGrindPeakImageFilter contributor: Bill Lorensen (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = GrayscaleGrindPeakImageFilterInputSpec output_spec = GrayscaleGrindPeakImageFilterOutputSpec _cmd = "GrayscaleGrindPeakImageFilter " _outputs_filenames = {"outputVolume": "outputVolume.nii"} class GrayscaleFillHoleImageFilterInputSpec(CommandLineInputSpec): inputVolume = File( position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", argstr="%s", ) class GrayscaleFillHoleImageFilterOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output filtered", exists=True) class GrayscaleFillHoleImageFilter(SEMLikeCommandLine): """title: Grayscale Fill Hole Image Filter category: Filtering.Morphology description: GrayscaleFillholeImageFilter fills holes in a grayscale image. Holes are local minima in the grayscale topography that are not connected to boundaries of the image. Gray level values adjacent to a hole are extrapolated across the hole. This filter is used to smooth over local minima without affecting the values of local maxima. If you take the difference between the output of this filter and the original image (and perhaps threshold the difference above a small value), you'll obtain a map of the local minima. This filter uses the itkGrayscaleGeodesicErodeImageFilter. It provides its own input as the "mask" input to the geodesic erosion. The "marker" image for the geodesic erosion is constructed such that boundary pixels match the boundary pixels of the input image and the interior pixels are set to the maximum pixel value in the input image. Geodesic morphology and the Fillhole algorithm is described in Chapter 6 of Pierre Soille's book "Morphological Image Analysis: Principles and Applications", Second Edition, Springer, 2003. A companion filter, Grayscale Grind Peak, removes peaks in grayscale images. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GrayscaleFillHoleImageFilter contributor: Bill Lorensen (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = GrayscaleFillHoleImageFilterInputSpec output_spec = GrayscaleFillHoleImageFilterOutputSpec _cmd = "GrayscaleFillHoleImageFilter " _outputs_filenames = {"outputVolume": "outputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py000066400000000000000000000117501413403311400272520ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class N4ITKBiasFieldCorrectionInputSpec(CommandLineInputSpec): inputimage = File( desc="Input image where you observe signal inhomegeneity", exists=True, argstr="--inputimage %s", ) maskimage = File( desc="Binary mask that defines the structure of your interest. NOTE: This parameter is OPTIONAL. If the mask is not specified, the module will use internally Otsu thresholding to define this mask. Better processing results can often be obtained when a meaningful mask is defined.", exists=True, argstr="--maskimage %s", ) outputimage = traits.Either( traits.Bool, File(), hash_files=False, desc="Result of processing", argstr="--outputimage %s", ) outputbiasfield = traits.Either( traits.Bool, File(), hash_files=False, desc="Recovered bias field (OPTIONAL)", argstr="--outputbiasfield %s", ) iterations = InputMultiPath( traits.Int, desc="Maximum number of iterations at each level of resolution. Larger values will increase execution time, but may lead to better results.", sep=",", argstr="--iterations %s", ) convergencethreshold = traits.Float( desc="Stopping criterion for the iterative bias estimation. Larger values will lead to smaller execution time.", argstr="--convergencethreshold %f", ) meshresolution = InputMultiPath( traits.Float, desc="Resolution of the initial bspline grid defined as a sequence of three numbers. The actual resolution will be defined by adding the bspline order (default is 3) to the resolution in each dimension specified here. For example, 1,1,1 will result in a 4x4x4 grid of control points. This parameter may need to be adjusted based on your input image. In the multi-resolution N4 framework, the resolution of the bspline grid at subsequent iterations will be doubled. The number of resolutions is implicitly defined by Number of iterations parameter (the size of this list is the number of resolutions)", sep=",", argstr="--meshresolution %s", ) splinedistance = traits.Float( desc="An alternative means to define the spline grid, by setting the distance between the control points. This parameter is used only if the grid resolution is not specified.", argstr="--splinedistance %f", ) shrinkfactor = traits.Int( desc="Defines how much the image should be upsampled before estimating the inhomogeneity field. Increase if you want to reduce the execution time. 1 corresponds to the original resolution. Larger values will significantly reduce the computation time.", argstr="--shrinkfactor %d", ) bsplineorder = traits.Int( desc="Order of B-spline used in the approximation. Larger values will lead to longer execution times, may result in overfitting and poor result.", argstr="--bsplineorder %d", ) weightimage = File(desc="Weight Image", exists=True, argstr="--weightimage %s") histogramsharpening = InputMultiPath( traits.Float, desc="A vector of up to three values. Non-zero values correspond to Bias Field Full Width at Half Maximum, Wiener filter noise, and Number of histogram bins.", sep=",", argstr="--histogramsharpening %s", ) class N4ITKBiasFieldCorrectionOutputSpec(TraitedSpec): outputimage = File(desc="Result of processing", exists=True) outputbiasfield = File(desc="Recovered bias field (OPTIONAL)", exists=True) class N4ITKBiasFieldCorrection(SEMLikeCommandLine): """title: N4ITK MRI Bias correction category: Filtering description: Performs image bias correction using N4 algorithm. This module is based on the ITK filters contributed in the following publication: Tustison N, Gee J "N4ITK: Nick's N3 ITK Implementation For MRI Bias Field Correction", The Insight Journal 2009 January-June, http://hdl.handle.net/10380/3053 version: 9 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/N4ITKBiasFieldCorrection contributor: Nick Tustison (UPenn), Andrey Fedorov (SPL, BWH), Ron Kikinis (SPL, BWH) acknowledgements: The development of this module was partially supported by NIH grants R01 AA016748-01, R01 CA111288 and U01 CA151261 as well as by NA-MIC, NAC, NCIGT and the Slicer community. """ input_spec = N4ITKBiasFieldCorrectionInputSpec output_spec = N4ITKBiasFieldCorrectionOutputSpec _cmd = "N4ITKBiasFieldCorrection " _outputs_filenames = { "outputimage": "outputimage.nii", "outputbiasfield": "outputbiasfield.nii", } nipype-1.7.0/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py000066400000000000000000000142571413403311400304500ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class ResampleScalarVectorDWIVolumeInputSpec(CommandLineInputSpec): inputVolume = File( position=-2, desc="Input Volume to be resampled", exists=True, argstr="%s" ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Resampled Volume", argstr="%s", ) Reference = File( desc="Reference Volume (spacing,size,orientation,origin)", exists=True, argstr="--Reference %s", ) transformationFile = File(exists=True, argstr="--transformationFile %s") defField = File( desc="File containing the deformation field (3D vector image containing vectors with 3 components)", exists=True, argstr="--defField %s", ) hfieldtype = traits.Enum( "displacement", "h-Field", desc="Set if the deformation field is an h-Field", argstr="--hfieldtype %s", ) interpolation = traits.Enum( "linear", "nn", "ws", "bs", desc="Sampling algorithm (linear or nn (nearest neighborhoor), ws (WindowedSinc), bs (BSpline) )", argstr="--interpolation %s", ) transform_order = traits.Enum( "input-to-output", "output-to-input", desc="Select in what order the transforms are read", argstr="--transform_order %s", ) notbulk = traits.Bool( desc="The transform following the BSpline transform is not set as a bulk transform for the BSpline transform", argstr="--notbulk ", ) spaceChange = traits.Bool( desc="Space Orientation between transform and image is different (RAS/LPS) (warning: if the transform is a Transform Node in Slicer3, do not select)", argstr="--spaceChange ", ) rotation_point = traits.List( desc="Rotation Point in case of rotation around a point (otherwise useless)", argstr="--rotation_point %s", ) centered_transform = traits.Bool( desc="Set the center of the transformation to the center of the input image", argstr="--centered_transform ", ) image_center = traits.Enum( "input", "output", desc="Image to use to center the transform (used only if 'Centered Transform' is selected)", argstr="--image_center %s", ) Inverse_ITK_Transformation = traits.Bool( desc="Inverse the transformation before applying it from output image to input image", argstr="--Inverse_ITK_Transformation ", ) spacing = InputMultiPath( traits.Float, desc="Spacing along each dimension (0 means use input spacing)", sep=",", argstr="--spacing %s", ) size = InputMultiPath( traits.Float, desc="Size along each dimension (0 means use input size)", sep=",", argstr="--size %s", ) origin = traits.List(desc="Origin of the output Image", argstr="--origin %s") direction_matrix = InputMultiPath( traits.Float, desc="9 parameters of the direction matrix by rows (ijk to LPS if LPS transform, ijk to RAS if RAS transform)", sep=",", argstr="--direction_matrix %s", ) number_of_thread = traits.Int( desc="Number of thread used to compute the output image", argstr="--number_of_thread %d", ) default_pixel_value = traits.Float( desc="Default pixel value for samples falling outside of the input region", argstr="--default_pixel_value %f", ) window_function = traits.Enum( "h", "c", "w", "l", "b", desc="Window Function , h = Hamming , c = Cosine , w = Welch , l = Lanczos , b = Blackman", argstr="--window_function %s", ) spline_order = traits.Int(desc="Spline Order", argstr="--spline_order %d") transform_matrix = InputMultiPath( traits.Float, desc="12 parameters of the transform matrix by rows ( --last 3 being translation-- )", sep=",", argstr="--transform_matrix %s", ) transform = traits.Enum( "rt", "a", desc="Transform algorithm, rt = Rigid Transform, a = Affine Transform", argstr="--transform %s", ) class ResampleScalarVectorDWIVolumeOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Resampled Volume", exists=True) class ResampleScalarVectorDWIVolume(SEMLikeCommandLine): """title: Resample Scalar/Vector/DWI Volume category: Filtering description: This module implements image and vector-image resampling through the use of itk Transforms.It can also handle diffusion weighted MRI image resampling. "Resampling" is performed in space coordinates, not pixel/grid coordinates. It is quite important to ensure that image spacing is properly set on the images involved. The interpolator is required since the mapping from one space to the other will often require evaluation of the intensity of the image at non-grid positions. Warning: To resample DWMR Images, use nrrd input and output files. Warning: Do not use to resample Diffusion Tensor Images, tensors would not be reoriented version: 0.1 documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ResampleScalarVectorDWIVolume contributor: Francois Budin (UNC) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Information on the National Centers for Biomedical Computing can be obtained from http://nihroadmap.nih.gov/bioinformatics """ input_spec = ResampleScalarVectorDWIVolumeInputSpec output_spec = ResampleScalarVectorDWIVolumeOutputSpec _cmd = "ResampleScalarVectorDWIVolume " _outputs_filenames = {"outputVolume": "outputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/000077500000000000000000000000001413403311400230525ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/__init__.py000066400000000000000000000000301413403311400251540ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_AddScalarVolumes.py000066400000000000000000000024421413403311400307060ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..arithmetic import AddScalarVolumes def test_AddScalarVolumes_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume1=dict( argstr="%s", extensions=None, position=-3, ), inputVolume2=dict( argstr="%s", extensions=None, position=-2, ), order=dict( argstr="--order %s", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = AddScalarVolumes.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AddScalarVolumes_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = AddScalarVolumes.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_CastScalarVolume.py000066400000000000000000000022521413403311400307240ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..arithmetic import CastScalarVolume def test_CastScalarVolume_inputs(): input_map = dict( InputVolume=dict( argstr="%s", extensions=None, position=-2, ), OutputVolume=dict( argstr="%s", hash_files=False, position=-1, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), type=dict( argstr="--type %s", ), ) inputs = CastScalarVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CastScalarVolume_outputs(): output_map = dict( OutputVolume=dict( extensions=None, position=-1, ), ) outputs = CastScalarVolume.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_CheckerBoardFilter.py000066400000000000000000000025331413403311400312000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..checkerboardfilter import CheckerBoardFilter def test_CheckerBoardFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), checkerPattern=dict( argstr="--checkerPattern %s", sep=",", ), environ=dict( nohash=True, usedefault=True, ), inputVolume1=dict( argstr="%s", extensions=None, position=-3, ), inputVolume2=dict( argstr="%s", extensions=None, position=-2, ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = CheckerBoardFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CheckerBoardFilter_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = CheckerBoardFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_CurvatureAnisotropicDiffusion.py000066400000000000000000000026101413403311400335540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..denoising import CurvatureAnisotropicDiffusion def test_CurvatureAnisotropicDiffusion_inputs(): input_map = dict( args=dict( argstr="%s", ), conductance=dict( argstr="--conductance %f", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), iterations=dict( argstr="--iterations %d", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), timeStep=dict( argstr="--timeStep %f", ), ) inputs = CurvatureAnisotropicDiffusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CurvatureAnisotropicDiffusion_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = CurvatureAnisotropicDiffusion.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_ExtractSkeleton.py000066400000000000000000000026271413403311400306410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..extractskeleton import ExtractSkeleton def test_ExtractSkeleton_inputs(): input_map = dict( InputImageFileName=dict( argstr="%s", extensions=None, position=-2, ), OutputImageFileName=dict( argstr="%s", hash_files=False, position=-1, ), args=dict( argstr="%s", ), dontPrune=dict( argstr="--dontPrune ", ), environ=dict( nohash=True, usedefault=True, ), numPoints=dict( argstr="--numPoints %d", ), pointsFile=dict( argstr="--pointsFile %s", ), type=dict( argstr="--type %s", ), ) inputs = ExtractSkeleton.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ExtractSkeleton_outputs(): output_map = dict( OutputImageFileName=dict( extensions=None, position=-1, ), ) outputs = ExtractSkeleton.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_GaussianBlurImageFilter.py000066400000000000000000000023161413403311400322250ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..denoising import GaussianBlurImageFilter def test_GaussianBlurImageFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), sigma=dict( argstr="--sigma %f", ), ) inputs = GaussianBlurImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GaussianBlurImageFilter_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = GaussianBlurImageFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_GradientAnisotropicDiffusion.py000066400000000000000000000026031413403311400333330ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..denoising import GradientAnisotropicDiffusion def test_GradientAnisotropicDiffusion_inputs(): input_map = dict( args=dict( argstr="%s", ), conductance=dict( argstr="--conductance %f", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), iterations=dict( argstr="--iterations %d", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), timeStep=dict( argstr="--timeStep %f", ), ) inputs = GradientAnisotropicDiffusion.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GradientAnisotropicDiffusion_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = GradientAnisotropicDiffusion.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleFillHoleImageFilter.py000066400000000000000000000022501413403311400331540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..morphology import GrayscaleFillHoleImageFilter def test_GrayscaleFillHoleImageFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = GrayscaleFillHoleImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GrayscaleFillHoleImageFilter_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = GrayscaleFillHoleImageFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_GrayscaleGrindPeakImageFilter.py000066400000000000000000000022551413403311400333270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..morphology import GrayscaleGrindPeakImageFilter def test_GrayscaleGrindPeakImageFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = GrayscaleGrindPeakImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GrayscaleGrindPeakImageFilter_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = GrayscaleGrindPeakImageFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_HistogramMatching.py000066400000000000000000000027661413403311400311360ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..histogrammatching import HistogramMatching def test_HistogramMatching_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="%s", extensions=None, position=-3, ), numberOfHistogramLevels=dict( argstr="--numberOfHistogramLevels %d", ), numberOfMatchPoints=dict( argstr="--numberOfMatchPoints %d", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), referenceVolume=dict( argstr="%s", extensions=None, position=-2, ), threshold=dict( argstr="--threshold ", ), ) inputs = HistogramMatching.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_HistogramMatching_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = HistogramMatching.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_ImageLabelCombine.py000066400000000000000000000025141413403311400307740ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..imagelabelcombine import ImageLabelCombine def test_ImageLabelCombine_inputs(): input_map = dict( InputLabelMap_A=dict( argstr="%s", extensions=None, position=-3, ), InputLabelMap_B=dict( argstr="%s", extensions=None, position=-2, ), OutputLabelMap=dict( argstr="%s", hash_files=False, position=-1, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), first_overwrites=dict( argstr="--first_overwrites ", ), ) inputs = ImageLabelCombine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ImageLabelCombine_outputs(): output_map = dict( OutputLabelMap=dict( extensions=None, position=-1, ), ) outputs = ImageLabelCombine.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_MaskScalarVolume.py000066400000000000000000000025431413403311400307300ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..arithmetic import MaskScalarVolume def test_MaskScalarVolume_inputs(): input_map = dict( InputVolume=dict( argstr="%s", extensions=None, position=-3, ), MaskVolume=dict( argstr="%s", extensions=None, position=-2, ), OutputVolume=dict( argstr="%s", hash_files=False, position=-1, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), label=dict( argstr="--label %d", ), replace=dict( argstr="--replace %d", ), ) inputs = MaskScalarVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MaskScalarVolume_outputs(): output_map = dict( OutputVolume=dict( extensions=None, position=-1, ), ) outputs = MaskScalarVolume.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_MedianImageFilter.py000066400000000000000000000023231413403311400310210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..denoising import MedianImageFilter def test_MedianImageFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), neighborhood=dict( argstr="--neighborhood %s", sep=",", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = MedianImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MedianImageFilter_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = MedianImageFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_MultiplyScalarVolumes.py000066400000000000000000000024731413403311400320410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..arithmetic import MultiplyScalarVolumes def test_MultiplyScalarVolumes_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume1=dict( argstr="%s", extensions=None, position=-3, ), inputVolume2=dict( argstr="%s", extensions=None, position=-2, ), order=dict( argstr="--order %s", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = MultiplyScalarVolumes.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MultiplyScalarVolumes_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = MultiplyScalarVolumes.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_N4ITKBiasFieldCorrection.py000066400000000000000000000041701413403311400321410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..n4itkbiasfieldcorrection import N4ITKBiasFieldCorrection def test_N4ITKBiasFieldCorrection_inputs(): input_map = dict( args=dict( argstr="%s", ), bsplineorder=dict( argstr="--bsplineorder %d", ), convergencethreshold=dict( argstr="--convergencethreshold %f", ), environ=dict( nohash=True, usedefault=True, ), histogramsharpening=dict( argstr="--histogramsharpening %s", sep=",", ), inputimage=dict( argstr="--inputimage %s", extensions=None, ), iterations=dict( argstr="--iterations %s", sep=",", ), maskimage=dict( argstr="--maskimage %s", extensions=None, ), meshresolution=dict( argstr="--meshresolution %s", sep=",", ), outputbiasfield=dict( argstr="--outputbiasfield %s", hash_files=False, ), outputimage=dict( argstr="--outputimage %s", hash_files=False, ), shrinkfactor=dict( argstr="--shrinkfactor %d", ), splinedistance=dict( argstr="--splinedistance %f", ), weightimage=dict( argstr="--weightimage %s", extensions=None, ), ) inputs = N4ITKBiasFieldCorrection.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_N4ITKBiasFieldCorrection_outputs(): output_map = dict( outputbiasfield=dict( extensions=None, ), outputimage=dict( extensions=None, ), ) outputs = N4ITKBiasFieldCorrection.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_ResampleScalarVectorDWIVolume.py000066400000000000000000000060711413403311400333340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..resamplescalarvectordwivolume import ResampleScalarVectorDWIVolume def test_ResampleScalarVectorDWIVolume_inputs(): input_map = dict( Inverse_ITK_Transformation=dict( argstr="--Inverse_ITK_Transformation ", ), Reference=dict( argstr="--Reference %s", extensions=None, ), args=dict( argstr="%s", ), centered_transform=dict( argstr="--centered_transform ", ), defField=dict( argstr="--defField %s", extensions=None, ), default_pixel_value=dict( argstr="--default_pixel_value %f", ), direction_matrix=dict( argstr="--direction_matrix %s", sep=",", ), environ=dict( nohash=True, usedefault=True, ), hfieldtype=dict( argstr="--hfieldtype %s", ), image_center=dict( argstr="--image_center %s", ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), interpolation=dict( argstr="--interpolation %s", ), notbulk=dict( argstr="--notbulk ", ), number_of_thread=dict( argstr="--number_of_thread %d", ), origin=dict( argstr="--origin %s", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), rotation_point=dict( argstr="--rotation_point %s", ), size=dict( argstr="--size %s", sep=",", ), spaceChange=dict( argstr="--spaceChange ", ), spacing=dict( argstr="--spacing %s", sep=",", ), spline_order=dict( argstr="--spline_order %d", ), transform=dict( argstr="--transform %s", ), transform_matrix=dict( argstr="--transform_matrix %s", sep=",", ), transform_order=dict( argstr="--transform_order %s", ), transformationFile=dict( argstr="--transformationFile %s", extensions=None, ), window_function=dict( argstr="--window_function %s", ), ) inputs = ResampleScalarVectorDWIVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ResampleScalarVectorDWIVolume_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = ResampleScalarVectorDWIVolume.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_SubtractScalarVolumes.py000066400000000000000000000024731413403311400320110ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..arithmetic import SubtractScalarVolumes def test_SubtractScalarVolumes_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume1=dict( argstr="%s", extensions=None, position=-3, ), inputVolume2=dict( argstr="%s", extensions=None, position=-2, ), order=dict( argstr="--order %s", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = SubtractScalarVolumes.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SubtractScalarVolumes_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = SubtractScalarVolumes.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/tests/test_auto_ThresholdScalarVolume.py000066400000000000000000000027661413403311400320000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..thresholdscalarvolume import ThresholdScalarVolume def test_ThresholdScalarVolume_inputs(): input_map = dict( InputVolume=dict( argstr="%s", extensions=None, position=-2, ), OutputVolume=dict( argstr="%s", hash_files=False, position=-1, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), lower=dict( argstr="--lower %d", ), outsidevalue=dict( argstr="--outsidevalue %d", ), threshold=dict( argstr="--threshold %d", ), thresholdtype=dict( argstr="--thresholdtype %s", ), upper=dict( argstr="--upper %d", ), ) inputs = ThresholdScalarVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ThresholdScalarVolume_outputs(): output_map = dict( OutputVolume=dict( extensions=None, position=-1, ), ) outputs = ThresholdScalarVolume.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value test_auto_VotingBinaryHoleFillingImageFilter.py000066400000000000000000000030411413403311400342730ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/filtering/tests# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..votingbinaryholefillingimagefilter import VotingBinaryHoleFillingImageFilter def test_VotingBinaryHoleFillingImageFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), background=dict( argstr="--background %d", ), environ=dict( nohash=True, usedefault=True, ), foreground=dict( argstr="--foreground %d", ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), majorityThreshold=dict( argstr="--majorityThreshold %d", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), radius=dict( argstr="--radius %s", sep=",", ), ) inputs = VotingBinaryHoleFillingImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_VotingBinaryHoleFillingImageFilter_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = VotingBinaryHoleFillingImageFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py000066400000000000000000000054671413403311400267100ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class ThresholdScalarVolumeInputSpec(CommandLineInputSpec): InputVolume = File(position=-2, desc="Input volume", exists=True, argstr="%s") OutputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Thresholded input volume", argstr="%s", ) threshold = traits.Int(desc="Threshold value", argstr="--threshold %d") lower = traits.Int(desc="Lower threshold value", argstr="--lower %d") upper = traits.Int(desc="Upper threshold value", argstr="--upper %d") outsidevalue = traits.Int( desc="Set the voxels to this value if they fall outside the threshold range", argstr="--outsidevalue %d", ) thresholdtype = traits.Enum( "Below", "Above", "Outside", desc="What kind of threshold to perform. If Outside is selected, uses Upper and Lower values. If Below is selected, uses the ThresholdValue, if Above is selected, uses the ThresholdValue.", argstr="--thresholdtype %s", ) class ThresholdScalarVolumeOutputSpec(TraitedSpec): OutputVolume = File(position=-1, desc="Thresholded input volume", exists=True) class ThresholdScalarVolume(SEMLikeCommandLine): """title: Threshold Scalar Volume category: Filtering description:

Threshold an image.

Set image values to a user-specified outside value if they are below, above, or between simple threshold values.

ThresholdAbove: The values greater than or equal to the threshold value are set to OutsideValue.

ThresholdBelow: The values less than or equal to the threshold value are set to OutsideValue.

ThresholdOutside: The values outside the range Lower-Upper are set to OutsideValue.

Although all image types are supported on input, only signed types are produced.

version: 0.1.0.$Revision: 2104 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Threshold contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = ThresholdScalarVolumeInputSpec output_spec = ThresholdScalarVolumeOutputSpec _cmd = "ThresholdScalarVolume " _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py000066400000000000000000000055121413403311400314260ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class VotingBinaryHoleFillingImageFilterInputSpec(CommandLineInputSpec): radius = InputMultiPath( traits.Int, desc="The radius of a hole to be filled", sep=",", argstr="--radius %s", ) majorityThreshold = traits.Int( desc="The number of pixels over 50% that will decide whether an OFF pixel will become ON or not. For example, if the neighborhood of a pixel has 124 pixels (excluding itself), the 50% will be 62, and if you set a Majority threshold of 5, that means that the filter will require 67 or more neighbor pixels to be ON in order to switch the current OFF pixel to ON.", argstr="--majorityThreshold %d", ) background = traits.Int( desc="The value associated with the background (not object)", argstr="--background %d", ) foreground = traits.Int( desc="The value associated with the foreground (object)", argstr="--foreground %d", ) inputVolume = File( position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", argstr="%s", ) class VotingBinaryHoleFillingImageFilterOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output filtered", exists=True) class VotingBinaryHoleFillingImageFilter(SEMLikeCommandLine): """title: Voting Binary Hole Filling Image Filter category: Filtering description: Applies a voting operation in order to fill-in cavities. This can be used for smoothing contours and for filling holes in binary images. This technique is used frequently when segmenting complete organs that may have ducts or vasculature that may not have been included in the initial segmentation, e.g. lungs, kidneys, liver. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/VotingBinaryHoleFillingImageFilter contributor: Bill Lorensen (GE) acknowledgements: This command module was derived from Insight/Examples/Filtering/VotingBinaryHoleFillingImageFilter (copyright) Insight Software Consortium """ input_spec = VotingBinaryHoleFillingImageFilterInputSpec output_spec = VotingBinaryHoleFillingImageFilterOutputSpec _cmd = "VotingBinaryHoleFillingImageFilter " _outputs_filenames = {"outputVolume": "outputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/generate_classes.py000066400000000000000000000547141413403311400236210ustar00rootroot00000000000000# -*- coding: utf-8 -*- """This script generates Slicer Interfaces based on the CLI modules XML. CLI modules are selected from the hardcoded list below and generated code is placed in the cli_modules.py file (and imported in __init__.py). For this to work correctly you must have your CLI executabes in $PATH""" import xml.dom.minidom import subprocess import os from shutil import rmtree import keyword python_keywords = ( keyword.kwlist ) # If c++ SEM module uses one of these key words as a command line parameter, we need to modify variable def force_to_valid_python_variable_name(old_name): """Valid c++ names are not always valid in python, so provide alternate naming >>> force_to_valid_python_variable_name('lambda') 'opt_lambda' >>> force_to_valid_python_variable_name('inputVolume') 'inputVolume' """ new_name = old_name new_name = new_name.lstrip().rstrip() if old_name in python_keywords: new_name = "opt_" + old_name return new_name def add_class_to_package(class_codes, class_names, module_name, package_dir): module_python_filename = os.path.join(package_dir, "%s.py" % module_name) f_m = open(module_python_filename, "w") f_i = open(os.path.join(package_dir, "__init__.py"), "a+") f_m.write( """# -*- coding: utf-8 -*- \"\"\"Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.\"\"\"\n\n""" ) imports = """\ from ..base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath) import os\n\n\n""" f_m.write(imports) f_m.write("\n\n".join(class_codes)) f_i.write("from %s import %s\n" % (module_name, ", ".join(class_names))) f_m.close() f_i.close() def crawl_code_struct(code_struct, package_dir): subpackages = [] for k, v in code_struct.items(): if isinstance(v, str) or isinstance(v, (str, bytes)): module_name = k.lower() class_name = k class_code = v add_class_to_package([class_code], [class_name], module_name, package_dir) else: l1 = {} l2 = {} for key in list(v.keys()): if isinstance(v[key], str) or isinstance(v[key], (str, bytes)): l1[key] = v[key] else: l2[key] = v[key] if l2: v = l2 subpackages.append(k.lower()) f_i = open(os.path.join(package_dir, "__init__.py"), "a+") f_i.write("from %s import *\n" % k.lower()) f_i.close() new_pkg_dir = os.path.join(package_dir, k.lower()) if os.path.exists(new_pkg_dir): rmtree(new_pkg_dir) os.mkdir(new_pkg_dir) crawl_code_struct(v, new_pkg_dir) if l1: for ik, iv in l1.items(): crawl_code_struct({ik: {ik: iv}}, new_pkg_dir) elif l1: v = l1 module_name = k.lower() add_class_to_package( list(v.values()), list(v.keys()), module_name, package_dir ) if subpackages: f = open(os.path.join(package_dir, "setup.py"), "w") f.write( """# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('{pkg_name}', parent_package, top_path) {sub_pks} return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict()) """.format( pkg_name=package_dir.split("/")[-1], sub_pks="\n ".join( [ "config.add_data_dir('%s')" % sub_pkg for sub_pkg in subpackages ] ), ) ) f.close() def generate_all_classes( modules_list=[], launcher=[], redirect_x=False, mipav_hacks=False ): """modules_list contains all the SEM compliant tools that should have wrappers created for them. launcher containtains the command line prefix wrapper arugments needed to prepare a proper environment for each of the modules. """ all_code = {} for module in modules_list: print("=" * 80) print("Generating Definition for module {0}".format(module)) print("^" * 80) package, code, module = generate_class( module, launcher, redirect_x=redirect_x, mipav_hacks=mipav_hacks ) cur_package = all_code module_name = package.strip().split(" ")[0].split(".")[-1] for package in package.strip().split(" ")[0].split(".")[:-1]: if package not in cur_package: cur_package[package] = {} cur_package = cur_package[package] if module_name not in cur_package: cur_package[module_name] = {} cur_package[module_name][module] = code if os.path.exists("__init__.py"): os.unlink("__init__.py") crawl_code_struct(all_code, os.getcwd()) def generate_class( module, launcher, strip_module_name_prefix=True, redirect_x=False, mipav_hacks=False ): dom = grab_xml(module, launcher, mipav_hacks=mipav_hacks) if strip_module_name_prefix: module_name = module.split(".")[-1] else: module_name = module inputTraits = [] outputTraits = [] outputs_filenames = {} # self._outputs_nodes = [] class_string = '"""' for desc_str in [ "title", "category", "description", "version", "documentation-url", "license", "contributor", "acknowledgements", ]: el = dom.getElementsByTagName(desc_str) if el and el[0].firstChild and el[0].firstChild.nodeValue.strip(): class_string += ( desc_str + ": " + el[0].firstChild.nodeValue.strip() + "\n\n" ) if desc_str == "category": category = el[0].firstChild.nodeValue.strip() class_string += '"""' for paramGroup in dom.getElementsByTagName("parameters"): indices = paramGroup.getElementsByTagName("index") max_index = 0 for index in indices: if int(index.firstChild.nodeValue) > max_index: max_index = int(index.firstChild.nodeValue) for param in paramGroup.childNodes: if param.nodeName in ["label", "description", "#text", "#comment"]: continue traitsParams = {} longFlagNode = param.getElementsByTagName("longflag") if longFlagNode: # Prefer to use longFlag as name if it is given, rather than the parameter name longFlagName = longFlagNode[0].firstChild.nodeValue # SEM automatically strips prefixed "--" or "-" from from xml before processing # we need to replicate that behavior here The following # two nodes in xml have the same behavior in the program # --test # test longFlagName = longFlagName.lstrip(" -").rstrip(" ") name = longFlagName name = force_to_valid_python_variable_name(name) traitsParams["argstr"] = "--" + longFlagName + " " else: name = param.getElementsByTagName("name")[0].firstChild.nodeValue name = force_to_valid_python_variable_name(name) if param.getElementsByTagName("index"): traitsParams["argstr"] = "" else: traitsParams["argstr"] = "--" + name + " " if ( param.getElementsByTagName("description") and param.getElementsByTagName("description")[0].firstChild ): traitsParams["desc"] = ( param.getElementsByTagName("description")[0] .firstChild.nodeValue.replace('"', '\\"') .replace("\n", ", ") ) argsDict = { "directory": "%s", "file": "%s", "integer": "%d", "double": "%f", "float": "%f", "image": "%s", "transform": "%s", "boolean": "", "string-enumeration": "%s", "string": "%s", "integer-enumeration": "%s", "table": "%s", "point": "%s", "region": "%s", "geometry": "%s", } if param.nodeName.endswith("-vector"): traitsParams["argstr"] += "%s" else: traitsParams["argstr"] += argsDict[param.nodeName] index = param.getElementsByTagName("index") if index: traitsParams["position"] = int(index[0].firstChild.nodeValue) - ( max_index + 1 ) desc = param.getElementsByTagName("description") if index: traitsParams["desc"] = desc[0].firstChild.nodeValue typesDict = { "integer": "traits.Int", "double": "traits.Float", "float": "traits.Float", "image": "File", "transform": "File", "boolean": "traits.Bool", "string": "traits.Str", "file": "File", "geometry": "File", "directory": "Directory", "table": "File", "point": "traits.List", "region": "traits.List", } if param.nodeName.endswith("-enumeration"): type = "traits.Enum" values = [ '"%s"' % str(el.firstChild.nodeValue).replace('"', "") for el in param.getElementsByTagName("element") ] elif param.nodeName.endswith("-vector"): type = "InputMultiPath" if param.nodeName in [ "file", "directory", "image", "geometry", "transform", "table", ]: values = [ "%s(exists=True)" % typesDict[param.nodeName.replace("-vector", "")] ] else: values = [typesDict[param.nodeName.replace("-vector", "")]] if mipav_hacks is True: traitsParams["sep"] = ";" else: traitsParams["sep"] = "," elif param.getAttribute("multiple") == "true": type = "InputMultiPath" if param.nodeName in [ "file", "directory", "image", "geometry", "transform", "table", ]: values = ["%s(exists=True)" % typesDict[param.nodeName]] elif param.nodeName in ["point", "region"]: values = [ "%s(traits.Float(), minlen=3, maxlen=3)" % typesDict[param.nodeName] ] else: values = [typesDict[param.nodeName]] traitsParams["argstr"] += "..." else: values = [] type = typesDict[param.nodeName] if param.nodeName in [ "file", "directory", "image", "geometry", "transform", "table", ]: if not param.getElementsByTagName("channel"): raise RuntimeError( "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field.\n{0}".format( traitsParams ) ) elif ( param.getElementsByTagName("channel")[0].firstChild.nodeValue == "output" ): traitsParams["hash_files"] = False inputTraits.append( "%s = traits.Either(traits.Bool, %s(%s), %s)" % ( name, type, parse_values(values).replace("exists=True", ""), parse_params(traitsParams), ) ) traitsParams["exists"] = True traitsParams.pop("argstr") traitsParams.pop("hash_files") outputTraits.append( "%s = %s(%s%s)" % ( name, type.replace("Input", "Output"), parse_values(values), parse_params(traitsParams), ) ) outputs_filenames[name] = gen_filename_from_param(param, name) elif ( param.getElementsByTagName("channel")[0].firstChild.nodeValue == "input" ): if ( param.nodeName in [ "file", "directory", "image", "geometry", "transform", "table", ] and type not in ["InputMultiPath", "traits.List"] ): traitsParams["exists"] = True inputTraits.append( "%s = %s(%s%s)" % (name, type, parse_values(values), parse_params(traitsParams)) ) else: raise RuntimeError( "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field to be in ['input','output'].\n{0}".format( traitsParams ) ) else: # For all other parameter types, they are implicitly only input types inputTraits.append( "%s = %s(%s%s)" % (name, type, parse_values(values), parse_params(traitsParams)) ) if mipav_hacks: blacklisted_inputs = ["maxMemoryUsage"] inputTraits = [ trait for trait in inputTraits if trait.split()[0] not in blacklisted_inputs ] compulsory_inputs = [ 'xDefaultMem = traits.Int(desc="Set default maximum heap size", argstr="-xDefaultMem %d")', 'xMaxProcess = traits.Int(1, desc="Set default maximum number of processes.", argstr="-xMaxProcess %d", usedefault=True)', ] inputTraits += compulsory_inputs input_spec_code = "class " + module_name + "InputSpec(CommandLineInputSpec):\n" for trait in inputTraits: input_spec_code += " " + trait + "\n" output_spec_code = "class " + module_name + "OutputSpec(TraitedSpec):\n" if not outputTraits: output_spec_code += " pass\n" else: for trait in outputTraits: output_spec_code += " " + trait + "\n" output_filenames_code = "_outputs_filenames = {" output_filenames_code += ",".join( ["'%s':'%s'" % (key, value) for key, value in outputs_filenames.items()] ) output_filenames_code += "}" input_spec_code += "\n\n" output_spec_code += "\n\n" template = """class %module_name%(SEMLikeCommandLine): %class_str% input_spec = %module_name%InputSpec output_spec = %module_name%OutputSpec _cmd = "%launcher% %name% " %output_filenames_code%\n""" template += " _redirect_x = {0}\n".format(str(redirect_x)) main_class = ( template.replace("%class_str%", class_string) .replace("%module_name%", module_name) .replace("%name%", module) .replace("%output_filenames_code%", output_filenames_code) .replace("%launcher%", " ".join(launcher)) ) return category, input_spec_code + output_spec_code + main_class, module_name def grab_xml(module, launcher, mipav_hacks=False): # cmd = CommandLine(command = "Slicer3", args="--launch %s --xml"%module) # ret = cmd.run() command_list = launcher[:] # force copy to preserve original command_list.extend([module, "--xml"]) final_command = " ".join(command_list) xmlReturnValue = subprocess.Popen( final_command, stdout=subprocess.PIPE, shell=True ).communicate()[0] if mipav_hacks: # workaround for a jist bug https://www.nitrc.org/tracker/index.php?func=detail&aid=7234&group_id=228&atid=942 new_xml = "" replace_closing_tag = False for line in xmlReturnValue.splitlines(): if line.strip() == "": new_xml += "\n" replace_closing_tag = True elif replace_closing_tag and line.strip() == "": new_xml += "\n" replace_closing_tag = False else: new_xml += line + "\n" xmlReturnValue = new_xml # workaround for a JIST bug https://www.nitrc.org/tracker/index.php?func=detail&aid=7233&group_id=228&atid=942 if xmlReturnValue.strip().endswith("XML"): xmlReturnValue = xmlReturnValue.strip()[:-3] if xmlReturnValue.strip().startswith("Error: Unable to set default atlas"): xmlReturnValue = xmlReturnValue.strip()[ len("Error: Unable to set default atlas") : ] try: dom = xml.dom.minidom.parseString(xmlReturnValue.strip()) except Exception as e: print(xmlReturnValue.strip()) raise e return dom # if ret.runtime.returncode == 0: # return xml.dom.minidom.parseString(ret.runtime.stdout) # else: # raise Exception(cmd.cmdline + " failed:\n%s"%ret.runtime.stderr) def parse_params(params): list = [] for key, value in params.items(): if isinstance(value, (str, bytes)): list.append('%s="%s"' % (key, value.replace('"', "'"))) else: list.append("%s=%s" % (key, value)) return ", ".join(list) def parse_values(values): values = ["%s" % value for value in values] if len(values) > 0: retstr = ", ".join(values) + ", " else: retstr = "" return retstr def gen_filename_from_param(param, base): fileExtensions = param.getAttribute("fileExtensions") if fileExtensions: # It is possible that multiple file extensions can be specified in a # comma separated list, This will extract just the first extension firstFileExtension = fileExtensions.split(",")[0] ext = firstFileExtension else: ext = { "image": ".nii", "transform": ".mat", "file": "", "directory": "", "geometry": ".vtk", }[param.nodeName] return base + ext if __name__ == "__main__": # NOTE: For now either the launcher needs to be found on the default path, or # every tool in the modules list must be found on the default path # AND calling the module with --xml must be supported and compliant. modules_list = [ "MedianImageFilter", "CheckerBoardFilter", "EMSegmentCommandLine", "GrayscaleFillHoleImageFilter", # 'CreateDICOMSeries', #missing channel "TractographyLabelMapSeeding", "IntensityDifferenceMetric", "DWIToDTIEstimation", "MaskScalarVolume", "ImageLabelCombine", "DTIimport", "OtsuThresholdImageFilter", "ExpertAutomatedRegistration", "ThresholdScalarVolume", "DWIUnbiasedNonLocalMeansFilter", "BRAINSFit", "MergeModels", "ResampleDTIVolume", "MultiplyScalarVolumes", "LabelMapSmoothing", "RigidRegistration", "VotingBinaryHoleFillingImageFilter", "BRAINSROIAuto", "RobustStatisticsSegmenter", "GradientAnisotropicDiffusion", "ProbeVolumeWithModel", "ModelMaker", "ExtractSkeleton", "GrayscaleGrindPeakImageFilter", "N4ITKBiasFieldCorrection", "BRAINSResample", "DTIexport", "VBRAINSDemonWarp", "ResampleScalarVectorDWIVolume", "ResampleScalarVolume", "OtsuThresholdSegmentation", # 'ExecutionModelTour', "HistogramMatching", "BRAINSDemonWarp", "ModelToLabelMap", "GaussianBlurImageFilter", "DiffusionWeightedVolumeMasking", "GrayscaleModelMaker", "CastScalarVolume", "DicomToNrrdConverter", "AffineRegistration", "AddScalarVolumes", "LinearRegistration", "SimpleRegionGrowingSegmentation", "DWIJointRicianLMMSEFilter", "MultiResolutionAffineRegistration", "SubtractScalarVolumes", "DWIRicianLMMSEFilter", "OrientScalarVolume", "FiducialRegistration", "BSplineDeformableRegistration", "CurvatureAnisotropicDiffusion", "PETStandardUptakeValueComputation", "DiffusionTensorScalarMeasurements", "ACPCTransform", "EMSegmentTransformToNewFormat", "BSplineToDeformationField", ] # SlicerExecutionModel compliant tools that are usually statically built, and don't need the Slicer3 --launcher generate_all_classes(modules_list=modules_list, launcher=[]) # Tools compliant with SlicerExecutionModel called from the Slicer environment (for shared lib compatibility) # launcher = ['/home/raid3/gorgolewski/software/slicer/Slicer', '--launch'] # generate_all_classes(modules_list=modules_list, launcher=launcher) # generate_all_classes(modules_list=['BRAINSABC'], launcher=[] ) nipype-1.7.0/nipype/interfaces/slicer/legacy/000077500000000000000000000000001413403311400211715ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/legacy/__init__.py000066400000000000000000000006551413403311400233100ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .diffusion import * from .segmentation import OtsuThresholdSegmentation from .filtering import OtsuThresholdImageFilter, ResampleScalarVolume from .converters import BSplineToDeformationField from .registration import ( BSplineDeformableRegistration, AffineRegistration, MultiResolutionAffineRegistration, RigidRegistration, LinearRegistration, ExpertAutomatedRegistration, ) nipype-1.7.0/nipype/interfaces/slicer/legacy/converters.py000066400000000000000000000027141413403311400237410ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class BSplineToDeformationFieldInputSpec(CommandLineInputSpec): tfm = File(exists=True, argstr="--tfm %s") refImage = File(exists=True, argstr="--refImage %s") defImage = traits.Either( traits.Bool, File(), hash_files=False, argstr="--defImage %s" ) class BSplineToDeformationFieldOutputSpec(TraitedSpec): defImage = File(exists=True) class BSplineToDeformationField(SEMLikeCommandLine): """title: BSpline to deformation field category: Legacy.Converters description: Create a dense deformation field from a bspline+bulk transform. version: 0.1.0.$Revision: 2104 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BSplineToDeformationField contributor: Andrey Fedorov (SPL, BWH) acknowledgements: This work is funded by NIH grants R01 CA111288 and U01 CA151261. """ input_spec = BSplineToDeformationFieldInputSpec output_spec = BSplineToDeformationFieldOutputSpec _cmd = "BSplineToDeformationField " _outputs_filenames = {"defImage": "defImage.nii"} nipype-1.7.0/nipype/interfaces/slicer/legacy/diffusion/000077500000000000000000000000001413403311400231575ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/legacy/diffusion/__init__.py000066400000000000000000000001161413403311400252660ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .denoising import DWIUnbiasedNonLocalMeansFilter nipype-1.7.0/nipype/interfaces/slicer/legacy/diffusion/denoising.py000066400000000000000000000074641413403311400255230ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class DWIUnbiasedNonLocalMeansFilterInputSpec(CommandLineInputSpec): rs = InputMultiPath( traits.Int, desc="The algorithm search for similar voxels in a neighborhood of this size (larger sizes than the default one are extremely slow).", sep=",", argstr="--rs %s", ) rc = InputMultiPath( traits.Int, desc="Similarity between blocks is measured using windows of this size.", sep=",", argstr="--rc %s", ) hp = traits.Float( desc="This parameter is related to noise; the larger the parameter, the more agressive the filtering. Should be near 1, and only values between 0.8 and 1.2 are allowed", argstr="--hp %f", ) ng = traits.Int( desc="The number of the closest gradients that are used to jointly filter a given gradient direction (a maximum of 5 is allowed).", argstr="--ng %d", ) re = InputMultiPath( traits.Int, desc="A neighborhood of this size is used to compute the statistics for noise estimation.", sep=",", argstr="--re %s", ) inputVolume = File(position=-2, desc="Input DWI volume.", exists=True, argstr="%s") outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output DWI volume.", argstr="%s", ) class DWIUnbiasedNonLocalMeansFilterOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output DWI volume.", exists=True) class DWIUnbiasedNonLocalMeansFilter(SEMLikeCommandLine): """title: DWI Unbiased Non Local Means Filter category: Legacy.Diffusion.Denoising description: This module reduces noise (or unwanted detail) on a set of diffusion weighted images. For this, it filters the images using a Unbiased Non Local Means for Rician noise algorithm. It exploits not only the spatial redundancy, but the redundancy in similar gradient directions as well; it takes into account the N closest gradient directions to the direction being processed (a maximum of 5 gradient directions is allowed to keep a reasonable computational load, since we do not use neither similarity maps nor block-wise implementation). The noise parameter is automatically estimated in the same way as in the jointLMMSE module. A complete description of the algorithm may be found in: Antonio Tristan-Vega and Santiago Aja-Fernandez, DWI filtering using joint information for DTI and HARDI, Medical Image Analysis, Volume 14, Issue 2, Pages 205-218. 2010. Please, note that the execution of this filter is extremely slow, son only very conservative parameters (block size and search size as small as possible) should be used. Even so, its execution may take several hours. The advantage of this filter over joint LMMSE is its better preservation of edges and fine structures. version: 0.0.1.$Revision: 1 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/UnbiasedNonLocalMeansFilterForDWI contributor: Antonio Tristan Vega (UVa), Santiago Aja Fernandez (UVa) acknowledgements: Partially founded by grant number TEC2007-67073/TCM from the Comision Interministerial de Ciencia y Tecnologia (Spain). """ input_spec = DWIUnbiasedNonLocalMeansFilterInputSpec output_spec = DWIUnbiasedNonLocalMeansFilterOutputSpec _cmd = "DWIUnbiasedNonLocalMeansFilter " _outputs_filenames = {"outputVolume": "outputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/legacy/diffusion/tests/000077500000000000000000000000001413403311400243215ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/legacy/diffusion/tests/__init__.py000066400000000000000000000000301413403311400264230ustar00rootroot00000000000000# -*- coding: utf-8 -*- test_auto_DWIUnbiasedNonLocalMeansFilter.py000066400000000000000000000030221413403311400345560ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/legacy/diffusion/tests# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..denoising import DWIUnbiasedNonLocalMeansFilter def test_DWIUnbiasedNonLocalMeansFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), hp=dict( argstr="--hp %f", ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), ng=dict( argstr="--ng %d", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), rc=dict( argstr="--rc %s", sep=",", ), re=dict( argstr="--re %s", sep=",", ), rs=dict( argstr="--rs %s", sep=",", ), ) inputs = DWIUnbiasedNonLocalMeansFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DWIUnbiasedNonLocalMeansFilter_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = DWIUnbiasedNonLocalMeansFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/legacy/filtering.py000066400000000000000000000135211413403311400235300ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class OtsuThresholdImageFilterInputSpec(CommandLineInputSpec): insideValue = traits.Int( desc="The value assigned to pixels that are inside the computed threshold", argstr="--insideValue %d", ) outsideValue = traits.Int( desc="The value assigned to pixels that are outside the computed threshold", argstr="--outsideValue %d", ) numberOfBins = traits.Int( desc="This is an advanced parameter. The number of bins in the histogram used to model the probability mass function of the two intensity distributions. Small numbers of bins may result in a more conservative threshold. The default should suffice for most applications. Experimentation is the only way to see the effect of varying this parameter.", argstr="--numberOfBins %d", ) inputVolume = File( position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", argstr="%s", ) class OtsuThresholdImageFilterOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output filtered", exists=True) class OtsuThresholdImageFilter(SEMLikeCommandLine): """title: Otsu Threshold Image Filter category: Legacy.Filtering description: This filter creates a binary thresholded image that separates an image into foreground and background components. The filter calculates the optimum threshold separating those two classes so that their combined spread (intra-class variance) is minimal (see http://en.wikipedia.org/wiki/Otsu%27s_method). Then the filter applies that threshold to the input image using the itkBinaryThresholdImageFilter. The numberOfHistogram bins can be set for the Otsu Calculator. The insideValue and outsideValue can be set for the BinaryThresholdImageFilter. The filter produces a labeled volume. The original reference is: N.Otsu, A threshold selection method from gray level histograms, IEEE Trans.Syst.ManCybern.SMC-9,62–66 1979. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/OtsuThresholdImageFilter contributor: Bill Lorensen (GE) acknowledgements: This command module was derived from Insight/Examples (copyright) Insight Software Consortium """ input_spec = OtsuThresholdImageFilterInputSpec output_spec = OtsuThresholdImageFilterOutputSpec _cmd = "OtsuThresholdImageFilter " _outputs_filenames = {"outputVolume": "outputVolume.nii"} class ResampleScalarVolumeInputSpec(CommandLineInputSpec): spacing = InputMultiPath( traits.Float, desc="Spacing along each dimension (0 means use input spacing)", sep=",", argstr="--spacing %s", ) interpolation = traits.Enum( "linear", "nearestNeighbor", "bspline", "hamming", "cosine", "welch", "lanczos", "blackman", desc="Sampling algorithm (linear, nearest neighbor, bspline(cubic) or windowed sinc). There are several sinc algorithms available as described in the following publication: Erik H. W. Meijering, Wiro J. Niessen, Josien P. W. Pluim, Max A. Viergever: Quantitative Comparison of Sinc-Approximating Kernels for Medical Image Interpolation. MICCAI 1999, pp. 210-217. Each window has a radius of 3;", argstr="--interpolation %s", ) InputVolume = File( position=-2, desc="Input volume to be resampled", exists=True, argstr="%s" ) OutputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Resampled Volume", argstr="%s", ) class ResampleScalarVolumeOutputSpec(TraitedSpec): OutputVolume = File(position=-1, desc="Resampled Volume", exists=True) class ResampleScalarVolume(SEMLikeCommandLine): """title: Resample Scalar Volume category: Legacy.Filtering description: Resampling an image is an important task in image analysis. It is especially important in the frame of image registration. This module implements image resampling through the use of itk Transforms. This module uses an Identity Transform. The resampling is controlled by the Output Spacing. "Resampling" is performed in space coordinates, not pixel/grid coordinates. It is quite important to ensure that image spacing is properly set on the images involved. The interpolator is required since the mapping from one space to the other will often require evaluation of the intensity of the image at non-grid positions. Several interpolators are available: linear, nearest neighbor, bspline and five flavors of sinc. The sinc interpolators, although more precise, are much slower than the linear and nearest neighbor interpolator. To resample label volumnes, nearest neighbor interpolation should be used exclusively. version: 0.1.0.$Revision: 20594 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ResampleVolume contributor: Bill Lorensen (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = ResampleScalarVolumeInputSpec output_spec = ResampleScalarVolumeOutputSpec _cmd = "ResampleScalarVolume " _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/legacy/registration.py000066400000000000000000000741361413403311400242700ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class BSplineDeformableRegistrationInputSpec(CommandLineInputSpec): iterations = traits.Int(desc="Number of iterations", argstr="--iterations %d") gridSize = traits.Int( desc="Number of grid points on interior of the fixed image. Larger grid sizes allow for finer registrations.", argstr="--gridSize %d", ) histogrambins = traits.Int( desc="Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a deformable registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", argstr="--histogrambins %d", ) spatialsamples = traits.Int( desc="Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", argstr="--spatialsamples %d", ) constrain = traits.Bool( desc="Constrain the deformation to the amount specified in Maximum Deformation", argstr="--constrain ", ) maximumDeformation = traits.Float( desc="If Constrain Deformation is checked, limit the deformation to this amount.", argstr="--maximumDeformation %f", ) default = traits.Int( desc="Default pixel value used if resampling a pixel outside of the volume.", argstr="--default %d", ) initialtransform = File( desc="Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. This transform should be an affine or rigid transform. It is used an a bulk transform for the BSpline. Optional.", exists=True, argstr="--initialtransform %s", ) FixedImageFileName = File( position=-2, desc="Fixed image to which to register", exists=True, argstr="%s" ) MovingImageFileName = File( position=-1, desc="Moving image", exists=True, argstr="%s" ) outputtransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Transform calculated that aligns the fixed and moving image. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", argstr="--outputtransform %s", ) outputwarp = traits.Either( traits.Bool, File(), hash_files=False, desc="Vector field that applies an equivalent warp as the BSpline. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional.", argstr="--outputwarp %s", ) resampledmovingfilename = traits.Either( traits.Bool, File(), hash_files=False, desc="Resampled moving image to fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", argstr="--resampledmovingfilename %s", ) class BSplineDeformableRegistrationOutputSpec(TraitedSpec): outputtransform = File( desc="Transform calculated that aligns the fixed and moving image. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", exists=True, ) outputwarp = File( desc="Vector field that applies an equivalent warp as the BSpline. Maps positions from the fixed coordinate frame to the moving coordinate frame. Optional.", exists=True, ) resampledmovingfilename = File( desc="Resampled moving image to fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", exists=True, ) class BSplineDeformableRegistration(SEMLikeCommandLine): """title: BSpline Deformable Registration category: Legacy.Registration description: Registers two images together using BSpline transform and mutual information. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/BSplineDeformableRegistration contributor: Bill Lorensen (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = BSplineDeformableRegistrationInputSpec output_spec = BSplineDeformableRegistrationOutputSpec _cmd = "BSplineDeformableRegistration " _outputs_filenames = { "resampledmovingfilename": "resampledmovingfilename.nii", "outputtransform": "outputtransform.txt", "outputwarp": "outputwarp.nrrd", } class AffineRegistrationInputSpec(CommandLineInputSpec): fixedsmoothingfactor = traits.Int( desc="Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", argstr="--fixedsmoothingfactor %d", ) movingsmoothingfactor = traits.Int( desc="Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", argstr="--movingsmoothingfactor %d", ) histogrambins = traits.Int( desc="Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", argstr="--histogrambins %d", ) spatialsamples = traits.Int( desc="Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", argstr="--spatialsamples %d", ) iterations = traits.Int(desc="Number of iterations", argstr="--iterations %d") translationscale = traits.Float( desc="Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used is 1/(TranslationScale^2)). This parameter is used to 'weight' or 'standardized' the transform parameters and their effect on the registration objective function.", argstr="--translationscale %f", ) initialtransform = File( desc="Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", exists=True, argstr="--initialtransform %s", ) FixedImageFileName = File( position=-2, desc="Fixed image to which to register", exists=True, argstr="%s" ) MovingImageFileName = File( position=-1, desc="Moving image", exists=True, argstr="%s" ) outputtransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", argstr="--outputtransform %s", ) resampledmovingfilename = traits.Either( traits.Bool, File(), hash_files=False, desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", argstr="--resampledmovingfilename %s", ) class AffineRegistrationOutputSpec(TraitedSpec): outputtransform = File( desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", exists=True, ) resampledmovingfilename = File( desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", exists=True, ) class AffineRegistration(SEMLikeCommandLine): """title: Affine Registration category: Legacy.Registration description: Registers two images together using an affine transform and mutual information. This module is often used to align images of different subjects or images of the same subject from different modalities. This module can smooth images prior to registration to mitigate noise and improve convergence. Many of the registration parameters require a working knowledge of the algorithm although the default parameters are sufficient for many registration tasks. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/AffineRegistration contributor: Daniel Blezek (GE) acknowledgements: This module was developed by Daniel Blezek while at GE Research with contributions from Jim Miller. This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = AffineRegistrationInputSpec output_spec = AffineRegistrationOutputSpec _cmd = "AffineRegistration " _outputs_filenames = { "resampledmovingfilename": "resampledmovingfilename.nii", "outputtransform": "outputtransform.txt", } class MultiResolutionAffineRegistrationInputSpec(CommandLineInputSpec): fixedImage = File( position=-2, desc="Image which defines the space into which the moving image is registered", exists=True, argstr="%s", ) movingImage = File( position=-1, desc="The transform goes from the fixed image's space into the moving image's space", exists=True, argstr="%s", ) resampledImage = traits.Either( traits.Bool, File(), hash_files=False, desc="Registration results", argstr="--resampledImage %s", ) saveTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Save the output transform from the registration", argstr="--saveTransform %s", ) fixedImageMask = File( desc="Label image which defines a mask of interest for the fixed image", exists=True, argstr="--fixedImageMask %s", ) fixedImageROI = traits.List( desc="Label image which defines a ROI of interest for the fixed image", argstr="--fixedImageROI %s", ) numIterations = traits.Int( desc="Number of iterations to run at each resolution level.", argstr="--numIterations %d", ) numLineIterations = traits.Int( desc="Number of iterations to run at each resolution level.", argstr="--numLineIterations %d", ) stepSize = traits.Float( desc="The maximum step size of the optimizer in voxels", argstr="--stepSize %f" ) stepTolerance = traits.Float( desc="The maximum step size of the optimizer in voxels", argstr="--stepTolerance %f", ) metricTolerance = traits.Float(argstr="--metricTolerance %f") class MultiResolutionAffineRegistrationOutputSpec(TraitedSpec): resampledImage = File(desc="Registration results", exists=True) saveTransform = File( desc="Save the output transform from the registration", exists=True ) class MultiResolutionAffineRegistration(SEMLikeCommandLine): """title: Robust Multiresolution Affine Registration category: Legacy.Registration description: Provides affine registration using multiple resolution levels and decomposed affine transforms. version: 0.1.0.$Revision: 2104 $(alpha) documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/MultiResolutionAffineRegistration contributor: Casey B Goodlett (Utah) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = MultiResolutionAffineRegistrationInputSpec output_spec = MultiResolutionAffineRegistrationOutputSpec _cmd = "MultiResolutionAffineRegistration " _outputs_filenames = { "resampledImage": "resampledImage.nii", "saveTransform": "saveTransform.txt", } class RigidRegistrationInputSpec(CommandLineInputSpec): fixedsmoothingfactor = traits.Int( desc="Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", argstr="--fixedsmoothingfactor %d", ) movingsmoothingfactor = traits.Int( desc="Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", argstr="--movingsmoothingfactor %d", ) testingmode = traits.Bool( desc="Enable testing mode. Input transform will be used to construct floating image. The floating image will be ignored if passed.", argstr="--testingmode ", ) histogrambins = traits.Int( desc="Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", argstr="--histogrambins %d", ) spatialsamples = traits.Int( desc="Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", argstr="--spatialsamples %d", ) iterations = InputMultiPath( traits.Int, desc="Comma separated list of iterations. Must have the same number of elements as the learning rate.", sep=",", argstr="--iterations %s", ) learningrate = InputMultiPath( traits.Float, desc="Comma separated list of learning rates. Learning rate is a scale factor on the gradient of the registration objective function (gradient with respect to the parameters of the transformation) used to update the parameters of the transformation during optimization. Smaller values cause the optimizer to take smaller steps through the parameter space. Larger values are typically used early in the registration process to take large jumps in parameter space followed by smaller values to home in on the optimum value of the registration objective function. Default is: 0.01, 0.005, 0.0005, 0.0002. Must have the same number of elements as iterations.", sep=",", argstr="--learningrate %s", ) translationscale = traits.Float( desc="Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used 1/(TranslationScale^2)). This parameter is used to 'weight' or 'standardized' the transform parameters and their effect on the registration objective function.", argstr="--translationscale %f", ) initialtransform = File( desc="Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", exists=True, argstr="--initialtransform %s", ) FixedImageFileName = File( position=-2, desc="Fixed image to which to register", exists=True, argstr="%s" ) MovingImageFileName = File( position=-1, desc="Moving image", exists=True, argstr="%s" ) outputtransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", argstr="--outputtransform %s", ) resampledmovingfilename = traits.Either( traits.Bool, File(), hash_files=False, desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", argstr="--resampledmovingfilename %s", ) class RigidRegistrationOutputSpec(TraitedSpec): outputtransform = File( desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", exists=True, ) resampledmovingfilename = File( desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", exists=True, ) class RigidRegistration(SEMLikeCommandLine): """title: Rigid Registration category: Legacy.Registration description: Registers two images together using a rigid transform and mutual information. This module was originally distributed as "Linear registration" but has been renamed to eliminate confusion with the "Affine registration" module. This module is often used to align images of different subjects or images of the same subject from different modalities. This module can smooth images prior to registration to mitigate noise and improve convergence. Many of the registration parameters require a working knowledge of the algorithm although the default parameters are sufficient for many registration tasks. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/RigidRegistration contributor: Daniel Blezek (GE) acknowledgements: This module was developed by Daniel Blezek while at GE Research with contributions from Jim Miller. This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = RigidRegistrationInputSpec output_spec = RigidRegistrationOutputSpec _cmd = "RigidRegistration " _outputs_filenames = { "resampledmovingfilename": "resampledmovingfilename.nii", "outputtransform": "outputtransform.txt", } class LinearRegistrationInputSpec(CommandLineInputSpec): fixedsmoothingfactor = traits.Int( desc="Amount of smoothing applied to fixed image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", argstr="--fixedsmoothingfactor %d", ) movingsmoothingfactor = traits.Int( desc="Amount of smoothing applied to moving image prior to registration. Default is 0 (none). Range is 0-5 (unitless). Consider smoothing the input data if there is considerable amounts of noise or the noise pattern in the fixed and moving images is very different.", argstr="--movingsmoothingfactor %d", ) histogrambins = traits.Int( desc="Number of histogram bins to use for Mattes Mutual Information. Reduce the number of bins if a registration fails. If the number of bins is too large, the estimated PDFs will be a field of impulses and will inhibit reliable registration estimation.", argstr="--histogrambins %d", ) spatialsamples = traits.Int( desc="Number of spatial samples to use in estimating Mattes Mutual Information. Larger values yield more accurate PDFs and improved registration quality.", argstr="--spatialsamples %d", ) iterations = InputMultiPath( traits.Int, desc="Comma separated list of iterations. Must have the same number of elements as the learning rate.", sep=",", argstr="--iterations %s", ) learningrate = InputMultiPath( traits.Float, desc="Comma separated list of learning rates. Learning rate is a scale factor on the gradient of the registration objective function (gradient with respect to the parameters of the transformation) used to update the parameters of the transformation during optimization. Smaller values cause the optimizer to take smaller steps through the parameter space. Larger values are typically used early in the registration process to take large jumps in parameter space followed by smaller values to home in on the optimum value of the registration objective function. Default is: 0.01, 0.005, 0.0005, 0.0002. Must have the same number of elements as iterations.", sep=",", argstr="--learningrate %s", ) translationscale = traits.Float( desc="Relative scale of translations to rotations, i.e. a value of 100 means 10mm = 1 degree. (Actual scale used 1/(TranslationScale^2)). This parameter is used to 'weight' or 'standardized' the transform parameters and their effect on the registration objective function.", argstr="--translationscale %f", ) initialtransform = File( desc="Initial transform for aligning the fixed and moving image. Maps positions in the fixed coordinate frame to positions in the moving coordinate frame. Optional.", exists=True, argstr="--initialtransform %s", ) FixedImageFileName = File( position=-2, desc="Fixed image to which to register", exists=True, argstr="%s" ) MovingImageFileName = File( position=-1, desc="Moving image", exists=True, argstr="%s" ) outputtransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", argstr="--outputtransform %s", ) resampledmovingfilename = traits.Either( traits.Bool, File(), hash_files=False, desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", argstr="--resampledmovingfilename %s", ) class LinearRegistrationOutputSpec(TraitedSpec): outputtransform = File( desc="Transform calculated that aligns the fixed and moving image. Maps positions in the fixed coordinate frame to the moving coordinate frame. Optional (specify an output transform or an output volume or both).", exists=True, ) resampledmovingfilename = File( desc="Resampled moving image to the fixed image coordinate frame. Optional (specify an output transform or an output volume or both).", exists=True, ) class LinearRegistration(SEMLikeCommandLine): """title: Linear Registration category: Legacy.Registration description: Registers two images together using a rigid transform and mutual information. version: 0.1.0.$Revision: 19608 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/LinearRegistration contributor: Daniel Blezek (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = LinearRegistrationInputSpec output_spec = LinearRegistrationOutputSpec _cmd = "LinearRegistration " _outputs_filenames = { "resampledmovingfilename": "resampledmovingfilename.nii", "outputtransform": "outputtransform.txt", } class ExpertAutomatedRegistrationInputSpec(CommandLineInputSpec): fixedImage = File( position=-2, desc="Image which defines the space into which the moving image is registered", exists=True, argstr="%s", ) movingImage = File( position=-1, desc="The transform goes from the fixed image's space into the moving image's space", exists=True, argstr="%s", ) resampledImage = traits.Either( traits.Bool, File(), hash_files=False, desc="Registration results", argstr="--resampledImage %s", ) loadTransform = File( desc="Load a transform that is immediately applied to the moving image", exists=True, argstr="--loadTransform %s", ) saveTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Save the transform that results from registration", argstr="--saveTransform %s", ) initialization = traits.Enum( "None", "Landmarks", "ImageCenters", "CentersOfMass", "SecondMoments", desc="Method to prime the registration process", argstr="--initialization %s", ) registration = traits.Enum( "None", "Initial", "Rigid", "Affine", "BSpline", "PipelineRigid", "PipelineAffine", "PipelineBSpline", desc="Method for the registration process", argstr="--registration %s", ) metric = traits.Enum( "MattesMI", "NormCorr", "MeanSqrd", desc="Method to quantify image match", argstr="--metric %s", ) expectedOffset = traits.Float( desc="Expected misalignment after initialization", argstr="--expectedOffset %f" ) expectedRotation = traits.Float( desc="Expected misalignment after initialization", argstr="--expectedRotation %f", ) expectedScale = traits.Float( desc="Expected misalignment after initialization", argstr="--expectedScale %f" ) expectedSkew = traits.Float( desc="Expected misalignment after initialization", argstr="--expectedSkew %f" ) verbosityLevel = traits.Enum( "Silent", "Standard", "Verbose", desc="Level of detail of reporting progress", argstr="--verbosityLevel %s", ) sampleFromOverlap = traits.Bool( desc="Limit metric evaluation to the fixed image region overlapped by the moving image", argstr="--sampleFromOverlap ", ) fixedImageMask = File( desc="Image which defines a mask for the fixed image", exists=True, argstr="--fixedImageMask %s", ) randomNumberSeed = traits.Int( desc="Seed to generate a consistent random number sequence", argstr="--randomNumberSeed %d", ) numberOfThreads = traits.Int( desc="Number of CPU threads to use", argstr="--numberOfThreads %d" ) minimizeMemory = traits.Bool( desc="Reduce the amount of memory required at the cost of increased computation time", argstr="--minimizeMemory ", ) interpolation = traits.Enum( "NearestNeighbor", "Linear", "BSpline", desc="Method for interpolation within the optimization process", argstr="--interpolation %s", ) fixedLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the fixed image", argstr="--fixedLandmarks %s...", ) movingLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the moving image", argstr="--movingLandmarks %s...", ) rigidMaxIterations = traits.Int( desc="Maximum number of rigid optimization iterations", argstr="--rigidMaxIterations %d", ) rigidSamplingRatio = traits.Float( desc="Portion of the image to use in computing the metric during rigid registration", argstr="--rigidSamplingRatio %f", ) affineMaxIterations = traits.Int( desc="Maximum number of affine optimization iterations", argstr="--affineMaxIterations %d", ) affineSamplingRatio = traits.Float( desc="Portion of the image to use in computing the metric during affine registration", argstr="--affineSamplingRatio %f", ) bsplineMaxIterations = traits.Int( desc="Maximum number of bspline optimization iterations", argstr="--bsplineMaxIterations %d", ) bsplineSamplingRatio = traits.Float( desc="Portion of the image to use in computing the metric during BSpline registration", argstr="--bsplineSamplingRatio %f", ) controlPointSpacing = traits.Int( desc="Number of pixels between control points", argstr="--controlPointSpacing %d", ) class ExpertAutomatedRegistrationOutputSpec(TraitedSpec): resampledImage = File(desc="Registration results", exists=True) saveTransform = File( desc="Save the transform that results from registration", exists=True ) class ExpertAutomatedRegistration(SEMLikeCommandLine): """title: Expert Automated Registration category: Legacy.Registration description: Provides rigid, affine, and BSpline registration methods via a simple GUI version: 0.1.0.$Revision: 2104 $(alpha) documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ExpertAutomatedRegistration contributor: Stephen R Aylward (Kitware), Casey B Goodlett (Kitware) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = ExpertAutomatedRegistrationInputSpec output_spec = ExpertAutomatedRegistrationOutputSpec _cmd = "ExpertAutomatedRegistration " _outputs_filenames = { "resampledImage": "resampledImage.nii", "saveTransform": "saveTransform.txt", } nipype-1.7.0/nipype/interfaces/slicer/legacy/segmentation.py000066400000000000000000000062341413403311400242450ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class OtsuThresholdSegmentationInputSpec(CommandLineInputSpec): brightObjects = traits.Bool( desc="Segmenting bright objects on a dark background or dark objects on a bright background.", argstr="--brightObjects ", ) numberOfBins = traits.Int( desc="This is an advanced parameter. The number of bins in the histogram used to model the probability mass function of the two intensity distributions. Small numbers of bins may result in a more conservative threshold. The default should suffice for most applications. Experimentation is the only way to see the effect of varying this parameter.", argstr="--numberOfBins %d", ) faceConnected = traits.Bool( desc="This is an advanced parameter. Adjacent voxels are face connected. This affects the connected component algorithm. If this parameter is false, more regions are likely to be identified.", argstr="--faceConnected ", ) minimumObjectSize = traits.Int( desc="Minimum size of object to retain. This parameter can be used to get rid of small regions in noisy images.", argstr="--minimumObjectSize %d", ) inputVolume = File( position=-2, desc="Input volume to be segmented", exists=True, argstr="%s" ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", argstr="%s", ) class OtsuThresholdSegmentationOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output filtered", exists=True) class OtsuThresholdSegmentation(SEMLikeCommandLine): """title: Otsu Threshold Segmentation category: Legacy.Segmentation description: This filter creates a labeled image from a grayscale image. First, it calculates an optimal threshold that separates the image into foreground and background. This threshold separates those two classes so that their intra-class variance is minimal (see http://en.wikipedia.org/wiki/Otsu%27s_method). Then the filter runs a connected component algorithm to generate unique labels for each connected region of the foreground. Finally, the resulting image is relabeled to provide consecutive numbering. version: 1.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/OtsuThresholdSegmentation contributor: Bill Lorensen (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = OtsuThresholdSegmentationInputSpec output_spec = OtsuThresholdSegmentationOutputSpec _cmd = "OtsuThresholdSegmentation " _outputs_filenames = {"outputVolume": "outputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/legacy/tests/000077500000000000000000000000001413403311400223335ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/legacy/tests/__init__.py000066400000000000000000000000301413403311400244350ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/slicer/legacy/tests/test_auto_AffineRegistration.py000066400000000000000000000040441413403311400305610ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import AffineRegistration def test_AffineRegistration_inputs(): input_map = dict( FixedImageFileName=dict( argstr="%s", extensions=None, position=-2, ), MovingImageFileName=dict( argstr="%s", extensions=None, position=-1, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixedsmoothingfactor=dict( argstr="--fixedsmoothingfactor %d", ), histogrambins=dict( argstr="--histogrambins %d", ), initialtransform=dict( argstr="--initialtransform %s", extensions=None, ), iterations=dict( argstr="--iterations %d", ), movingsmoothingfactor=dict( argstr="--movingsmoothingfactor %d", ), outputtransform=dict( argstr="--outputtransform %s", hash_files=False, ), resampledmovingfilename=dict( argstr="--resampledmovingfilename %s", hash_files=False, ), spatialsamples=dict( argstr="--spatialsamples %d", ), translationscale=dict( argstr="--translationscale %f", ), ) inputs = AffineRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_AffineRegistration_outputs(): output_map = dict( outputtransform=dict( extensions=None, ), resampledmovingfilename=dict( extensions=None, ), ) outputs = AffineRegistration.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineDeformableRegistration.py000066400000000000000000000044321413403311400327070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import BSplineDeformableRegistration def test_BSplineDeformableRegistration_inputs(): input_map = dict( FixedImageFileName=dict( argstr="%s", extensions=None, position=-2, ), MovingImageFileName=dict( argstr="%s", extensions=None, position=-1, ), args=dict( argstr="%s", ), constrain=dict( argstr="--constrain ", ), default=dict( argstr="--default %d", ), environ=dict( nohash=True, usedefault=True, ), gridSize=dict( argstr="--gridSize %d", ), histogrambins=dict( argstr="--histogrambins %d", ), initialtransform=dict( argstr="--initialtransform %s", extensions=None, ), iterations=dict( argstr="--iterations %d", ), maximumDeformation=dict( argstr="--maximumDeformation %f", ), outputtransform=dict( argstr="--outputtransform %s", hash_files=False, ), outputwarp=dict( argstr="--outputwarp %s", hash_files=False, ), resampledmovingfilename=dict( argstr="--resampledmovingfilename %s", hash_files=False, ), spatialsamples=dict( argstr="--spatialsamples %d", ), ) inputs = BSplineDeformableRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BSplineDeformableRegistration_outputs(): output_map = dict( outputtransform=dict( extensions=None, ), outputwarp=dict( extensions=None, ), resampledmovingfilename=dict( extensions=None, ), ) outputs = BSplineDeformableRegistration.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/legacy/tests/test_auto_BSplineToDeformationField.py000066400000000000000000000022621413403311400317710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..converters import BSplineToDeformationField def test_BSplineToDeformationField_inputs(): input_map = dict( args=dict( argstr="%s", ), defImage=dict( argstr="--defImage %s", hash_files=False, ), environ=dict( nohash=True, usedefault=True, ), refImage=dict( argstr="--refImage %s", extensions=None, ), tfm=dict( argstr="--tfm %s", extensions=None, ), ) inputs = BSplineToDeformationField.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BSplineToDeformationField_outputs(): output_map = dict( defImage=dict( extensions=None, ), ) outputs = BSplineToDeformationField.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/legacy/tests/test_auto_ExpertAutomatedRegistration.py000066400000000000000000000067231413403311400325120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import ExpertAutomatedRegistration def test_ExpertAutomatedRegistration_inputs(): input_map = dict( affineMaxIterations=dict( argstr="--affineMaxIterations %d", ), affineSamplingRatio=dict( argstr="--affineSamplingRatio %f", ), args=dict( argstr="%s", ), bsplineMaxIterations=dict( argstr="--bsplineMaxIterations %d", ), bsplineSamplingRatio=dict( argstr="--bsplineSamplingRatio %f", ), controlPointSpacing=dict( argstr="--controlPointSpacing %d", ), environ=dict( nohash=True, usedefault=True, ), expectedOffset=dict( argstr="--expectedOffset %f", ), expectedRotation=dict( argstr="--expectedRotation %f", ), expectedScale=dict( argstr="--expectedScale %f", ), expectedSkew=dict( argstr="--expectedSkew %f", ), fixedImage=dict( argstr="%s", extensions=None, position=-2, ), fixedImageMask=dict( argstr="--fixedImageMask %s", extensions=None, ), fixedLandmarks=dict( argstr="--fixedLandmarks %s...", ), initialization=dict( argstr="--initialization %s", ), interpolation=dict( argstr="--interpolation %s", ), loadTransform=dict( argstr="--loadTransform %s", extensions=None, ), metric=dict( argstr="--metric %s", ), minimizeMemory=dict( argstr="--minimizeMemory ", ), movingImage=dict( argstr="%s", extensions=None, position=-1, ), movingLandmarks=dict( argstr="--movingLandmarks %s...", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), randomNumberSeed=dict( argstr="--randomNumberSeed %d", ), registration=dict( argstr="--registration %s", ), resampledImage=dict( argstr="--resampledImage %s", hash_files=False, ), rigidMaxIterations=dict( argstr="--rigidMaxIterations %d", ), rigidSamplingRatio=dict( argstr="--rigidSamplingRatio %f", ), sampleFromOverlap=dict( argstr="--sampleFromOverlap ", ), saveTransform=dict( argstr="--saveTransform %s", hash_files=False, ), verbosityLevel=dict( argstr="--verbosityLevel %s", ), ) inputs = ExpertAutomatedRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ExpertAutomatedRegistration_outputs(): output_map = dict( resampledImage=dict( extensions=None, ), saveTransform=dict( extensions=None, ), ) outputs = ExpertAutomatedRegistration.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/legacy/tests/test_auto_LinearRegistration.py000066400000000000000000000042341413403311400306040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import LinearRegistration def test_LinearRegistration_inputs(): input_map = dict( FixedImageFileName=dict( argstr="%s", extensions=None, position=-2, ), MovingImageFileName=dict( argstr="%s", extensions=None, position=-1, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixedsmoothingfactor=dict( argstr="--fixedsmoothingfactor %d", ), histogrambins=dict( argstr="--histogrambins %d", ), initialtransform=dict( argstr="--initialtransform %s", extensions=None, ), iterations=dict( argstr="--iterations %s", sep=",", ), learningrate=dict( argstr="--learningrate %s", sep=",", ), movingsmoothingfactor=dict( argstr="--movingsmoothingfactor %d", ), outputtransform=dict( argstr="--outputtransform %s", hash_files=False, ), resampledmovingfilename=dict( argstr="--resampledmovingfilename %s", hash_files=False, ), spatialsamples=dict( argstr="--spatialsamples %d", ), translationscale=dict( argstr="--translationscale %f", ), ) inputs = LinearRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_LinearRegistration_outputs(): output_map = dict( outputtransform=dict( extensions=None, ), resampledmovingfilename=dict( extensions=None, ), ) outputs = LinearRegistration.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/legacy/tests/test_auto_MultiResolutionAffineRegistration.py000066400000000000000000000040341413403311400336570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import MultiResolutionAffineRegistration def test_MultiResolutionAffineRegistration_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixedImage=dict( argstr="%s", extensions=None, position=-2, ), fixedImageMask=dict( argstr="--fixedImageMask %s", extensions=None, ), fixedImageROI=dict( argstr="--fixedImageROI %s", ), metricTolerance=dict( argstr="--metricTolerance %f", ), movingImage=dict( argstr="%s", extensions=None, position=-1, ), numIterations=dict( argstr="--numIterations %d", ), numLineIterations=dict( argstr="--numLineIterations %d", ), resampledImage=dict( argstr="--resampledImage %s", hash_files=False, ), saveTransform=dict( argstr="--saveTransform %s", hash_files=False, ), stepSize=dict( argstr="--stepSize %f", ), stepTolerance=dict( argstr="--stepTolerance %f", ), ) inputs = MultiResolutionAffineRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MultiResolutionAffineRegistration_outputs(): output_map = dict( resampledImage=dict( extensions=None, ), saveTransform=dict( extensions=None, ), ) outputs = MultiResolutionAffineRegistration.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdImageFilter.py000066400000000000000000000025731413403311400317230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..filtering import OtsuThresholdImageFilter def test_OtsuThresholdImageFilter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), insideValue=dict( argstr="--insideValue %d", ), numberOfBins=dict( argstr="--numberOfBins %d", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), outsideValue=dict( argstr="--outsideValue %d", ), ) inputs = OtsuThresholdImageFilter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_OtsuThresholdImageFilter_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = OtsuThresholdImageFilter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/legacy/tests/test_auto_OtsuThresholdSegmentation.py000066400000000000000000000027351413403311400321700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..segmentation import OtsuThresholdSegmentation def test_OtsuThresholdSegmentation_inputs(): input_map = dict( args=dict( argstr="%s", ), brightObjects=dict( argstr="--brightObjects ", ), environ=dict( nohash=True, usedefault=True, ), faceConnected=dict( argstr="--faceConnected ", ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), minimumObjectSize=dict( argstr="--minimumObjectSize %d", ), numberOfBins=dict( argstr="--numberOfBins %d", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = OtsuThresholdSegmentation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_OtsuThresholdSegmentation_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = OtsuThresholdSegmentation.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/legacy/tests/test_auto_ResampleScalarVolume.py000066400000000000000000000024501413403311400310630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..filtering import ResampleScalarVolume def test_ResampleScalarVolume_inputs(): input_map = dict( InputVolume=dict( argstr="%s", extensions=None, position=-2, ), OutputVolume=dict( argstr="%s", hash_files=False, position=-1, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), interpolation=dict( argstr="--interpolation %s", ), spacing=dict( argstr="--spacing %s", sep=",", ), ) inputs = ResampleScalarVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ResampleScalarVolume_outputs(): output_map = dict( OutputVolume=dict( extensions=None, position=-1, ), ) outputs = ResampleScalarVolume.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/legacy/tests/test_auto_RigidRegistration.py000066400000000000000000000043411413403311400304270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..registration import RigidRegistration def test_RigidRegistration_inputs(): input_map = dict( FixedImageFileName=dict( argstr="%s", extensions=None, position=-2, ), MovingImageFileName=dict( argstr="%s", extensions=None, position=-1, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixedsmoothingfactor=dict( argstr="--fixedsmoothingfactor %d", ), histogrambins=dict( argstr="--histogrambins %d", ), initialtransform=dict( argstr="--initialtransform %s", extensions=None, ), iterations=dict( argstr="--iterations %s", sep=",", ), learningrate=dict( argstr="--learningrate %s", sep=",", ), movingsmoothingfactor=dict( argstr="--movingsmoothingfactor %d", ), outputtransform=dict( argstr="--outputtransform %s", hash_files=False, ), resampledmovingfilename=dict( argstr="--resampledmovingfilename %s", hash_files=False, ), spatialsamples=dict( argstr="--spatialsamples %d", ), testingmode=dict( argstr="--testingmode ", ), translationscale=dict( argstr="--translationscale %f", ), ) inputs = RigidRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RigidRegistration_outputs(): output_map = dict( outputtransform=dict( extensions=None, ), resampledmovingfilename=dict( extensions=None, ), ) outputs = RigidRegistration.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/quantification/000077500000000000000000000000001413403311400227435ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/quantification/__init__.py000066400000000000000000000002451413403311400250550ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .changequantification import IntensityDifferenceMetric from .petstandarduptakevaluecomputation import PETStandardUptakeValueComputation nipype-1.7.0/nipype/interfaces/slicer/quantification/changequantification.py000066400000000000000000000052351413403311400275060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class IntensityDifferenceMetricInputSpec(CommandLineInputSpec): sensitivityThreshold = traits.Float( desc="This parameter should be between 0 and 1, and defines how sensitive the metric should be to the intensity changes.", argstr="--sensitivityThreshold %f", ) changingBandSize = traits.Int( desc="How far (in mm) from the boundary of the segmentation should the intensity changes be considered.", argstr="--changingBandSize %d", ) baselineVolume = File( position=-4, desc="Baseline volume to be compared to", exists=True, argstr="%s" ) baselineSegmentationVolume = File( position=-3, desc="Label volume that contains segmentation of the structure of interest in the baseline volume.", exists=True, argstr="%s", ) followupVolume = File( position=-2, desc="Followup volume to be compare to the baseline", exists=True, argstr="%s", ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output volume to keep the results of change quantification.", argstr="%s", ) reportFileName = traits.Either( traits.Bool, File(), hash_files=False, desc="Report file name", argstr="--reportFileName %s", ) class IntensityDifferenceMetricOutputSpec(TraitedSpec): outputVolume = File( position=-1, desc="Output volume to keep the results of change quantification.", exists=True, ) reportFileName = File(desc="Report file name", exists=True) class IntensityDifferenceMetric(SEMLikeCommandLine): """title: Intensity Difference Change Detection (FAST) category: Quantification.ChangeQuantification description: Quantifies the changes between two spatially aligned images based on the pixel-wise difference of image intensities. version: 0.1 contributor: Andrey Fedorov acknowledgements: """ input_spec = IntensityDifferenceMetricInputSpec output_spec = IntensityDifferenceMetricOutputSpec _cmd = "IntensityDifferenceMetric " _outputs_filenames = { "outputVolume": "outputVolume.nii", "reportFileName": "reportFileName", } nipype-1.7.0/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py000066400000000000000000000066711413403311400323720ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class PETStandardUptakeValueComputationInputSpec(CommandLineInputSpec): petDICOMPath = Directory( desc="Input path to a directory containing a PET volume containing DICOM header information for SUV computation", exists=True, argstr="--petDICOMPath %s", ) petVolume = File( desc="Input PET volume for SUVbw computation (must be the same volume as pointed to by the DICOM path!).", exists=True, argstr="--petVolume %s", ) labelMap = File( desc="Input label volume containing the volumes of interest", exists=True, argstr="--labelMap %s", ) color = File( desc="Color table to to map labels to colors and names", exists=True, argstr="--color %s", ) csvFile = traits.Either( traits.Bool, File(), hash_files=False, desc="A file holding the output SUV values in comma separated lines, one per label. Optional.", argstr="--csvFile %s", ) OutputLabel = traits.Str( desc="List of labels for which SUV values were computed", argstr="--OutputLabel %s", ) OutputLabelValue = traits.Str( desc="List of label values for which SUV values were computed", argstr="--OutputLabelValue %s", ) SUVMax = traits.Str(desc="SUV max for each label", argstr="--SUVMax %s") SUVMean = traits.Str(desc="SUV mean for each label", argstr="--SUVMean %s") SUVMin = traits.Str(desc="SUV minimum for each label", argstr="--SUVMin %s") class PETStandardUptakeValueComputationOutputSpec(TraitedSpec): csvFile = File( desc="A file holding the output SUV values in comma separated lines, one per label. Optional.", exists=True, ) class PETStandardUptakeValueComputation(SEMLikeCommandLine): """title: PET Standard Uptake Value Computation category: Quantification description: Computes the standardized uptake value based on body weight. Takes an input PET image in DICOM and NRRD format (DICOM header must contain Radiopharmaceutical parameters). Produces a CSV file that contains patientID, studyDate, dose, labelID, suvmin, suvmax, suvmean, labelName for each volume of interest. It also displays some of the information as output strings in the GUI, the CSV file is optional in that case. The CSV file is appended to on each execution of the CLI. version: 0.1.0.$Revision: 8595 $(alpha) documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ComputeSUVBodyWeight contributor: Wendy Plesniak (SPL, BWH), Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) acknowledgements: This work is funded by the Harvard Catalyst, and the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = PETStandardUptakeValueComputationInputSpec output_spec = PETStandardUptakeValueComputationOutputSpec _cmd = "PETStandardUptakeValueComputation " _outputs_filenames = {"csvFile": "csvFile.csv"} nipype-1.7.0/nipype/interfaces/slicer/quantification/tests/000077500000000000000000000000001413403311400241055ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/quantification/tests/__init__.py000066400000000000000000000000301413403311400262070ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/slicer/quantification/tests/test_auto_IntensityDifferenceMetric.py000066400000000000000000000034111413403311400336520ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..changequantification import IntensityDifferenceMetric def test_IntensityDifferenceMetric_inputs(): input_map = dict( args=dict( argstr="%s", ), baselineSegmentationVolume=dict( argstr="%s", extensions=None, position=-3, ), baselineVolume=dict( argstr="%s", extensions=None, position=-4, ), changingBandSize=dict( argstr="--changingBandSize %d", ), environ=dict( nohash=True, usedefault=True, ), followupVolume=dict( argstr="%s", extensions=None, position=-2, ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), reportFileName=dict( argstr="--reportFileName %s", hash_files=False, ), sensitivityThreshold=dict( argstr="--sensitivityThreshold %f", ), ) inputs = IntensityDifferenceMetric.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_IntensityDifferenceMetric_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), reportFileName=dict( extensions=None, ), ) outputs = IntensityDifferenceMetric.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value test_auto_PETStandardUptakeValueComputation.py000066400000000000000000000034171413403311400351770ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/quantification/tests# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..petstandarduptakevaluecomputation import PETStandardUptakeValueComputation def test_PETStandardUptakeValueComputation_inputs(): input_map = dict( OutputLabel=dict( argstr="--OutputLabel %s", ), OutputLabelValue=dict( argstr="--OutputLabelValue %s", ), SUVMax=dict( argstr="--SUVMax %s", ), SUVMean=dict( argstr="--SUVMean %s", ), SUVMin=dict( argstr="--SUVMin %s", ), args=dict( argstr="%s", ), color=dict( argstr="--color %s", extensions=None, ), csvFile=dict( argstr="--csvFile %s", hash_files=False, ), environ=dict( nohash=True, usedefault=True, ), labelMap=dict( argstr="--labelMap %s", extensions=None, ), petDICOMPath=dict( argstr="--petDICOMPath %s", ), petVolume=dict( argstr="--petVolume %s", extensions=None, ), ) inputs = PETStandardUptakeValueComputation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_PETStandardUptakeValueComputation_outputs(): output_map = dict( csvFile=dict( extensions=None, ), ) outputs = PETStandardUptakeValueComputation.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/registration/000077500000000000000000000000001413403311400224375ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/registration/__init__.py000066400000000000000000000003311413403311400245450ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .specialized import ( ACPCTransform, FiducialRegistration, VBRAINSDemonWarp, BRAINSDemonWarp, ) from .brainsresample import BRAINSResample from .brainsfit import BRAINSFit nipype-1.7.0/nipype/interfaces/slicer/registration/brainsfit.py000066400000000000000000000521411413403311400247750ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class BRAINSFitInputSpec(CommandLineInputSpec): fixedVolume = File( desc="The fixed image for registration by mutual information optimization.", exists=True, argstr="--fixedVolume %s", ) movingVolume = File( desc="The moving image for registration by mutual information optimization.", exists=True, argstr="--movingVolume %s", ) bsplineTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS BSpline", argstr="--bsplineTransform %s", ) linearTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS ---NOT--- BSpline", argstr="--linearTransform %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="(optional) Output image for registration. NOTE: You must select either the outputTransform or the outputVolume option.", argstr="--outputVolume %s", ) initialTransform = File( desc="Filename of transform used to initialize the registration. This CAN NOT be used with either CenterOfHeadLAlign, MomentsAlign, GeometryAlign, or initialTransform file.", exists=True, argstr="--initialTransform %s", ) initializeTransformMode = traits.Enum( "Off", "useMomentsAlign", "useCenterOfHeadAlign", "useGeometryAlign", "useCenterOfROIAlign", desc="Determine how to initialize the transform center. GeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. MomentsAlign assumes that the center of mass of the images represent similar structures. useCenterOfHeadAlign attempts to use the top of head and shape of neck to drive a center of mass estimate. Off assumes that the physical space of the images are close, and that centering in terms of the image Origins is a good starting point. This flag is mutually exclusive with the initialTransform flag.", argstr="--initializeTransformMode %s", ) useRigid = traits.Bool( desc="Perform a rigid registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", argstr="--useRigid ", ) useScaleVersor3D = traits.Bool( desc="Perform a ScaleVersor3D registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", argstr="--useScaleVersor3D ", ) useScaleSkewVersor3D = traits.Bool( desc="Perform a ScaleSkewVersor3D registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", argstr="--useScaleSkewVersor3D ", ) useAffine = traits.Bool( desc="Perform an Affine registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", argstr="--useAffine ", ) useBSpline = traits.Bool( desc="Perform a BSpline registration as part of the sequential registration steps. This family of options superceeds the use of transformType if any of them are set.", argstr="--useBSpline ", ) numberOfSamples = traits.Int( desc="The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation.", argstr="--numberOfSamples %d", ) splineGridSize = InputMultiPath( traits.Int, desc="The number of subdivisions of the BSpline Grid to be centered on the image space. Each dimension must have at least 3 subdivisions for the BSpline to be correctly computed. ", sep=",", argstr="--splineGridSize %s", ) numberOfIterations = InputMultiPath( traits.Int, desc="The maximum number of iterations to try before failing to converge. Use an explicit limit like 500 or 1000 to manage risk of divergence", sep=",", argstr="--numberOfIterations %s", ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", desc="What mode to use for using the masks. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. The Region Of Interest mode (choose ROI) uses the masks to define what parts of the image should be used for computing the transform.", argstr="--maskProcessingMode %s", ) fixedBinaryVolume = File( desc="Fixed Image binary mask volume, ONLY FOR MANUAL ROI mode.", exists=True, argstr="--fixedBinaryVolume %s", ) movingBinaryVolume = File( desc="Moving Image binary mask volume, ONLY FOR MANUAL ROI mode.", exists=True, argstr="--movingBinaryVolume %s", ) outputFixedVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, desc="The ROI automatically found in fixed image, ONLY FOR ROIAUTO mode.", argstr="--outputFixedVolumeROI %s", ) outputMovingVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, desc="The ROI automatically found in moving image, ONLY FOR ROIAUTO mode.", argstr="--outputMovingVolumeROI %s", ) outputVolumePixelType = traits.Enum( "float", "short", "ushort", "int", "uint", "uchar", desc="The output image Pixel Type is the scalar datatype for representation of the Output Volume.", argstr="--outputVolumePixelType %s", ) backgroundFillValue = traits.Float( desc="Background fill value for output image.", argstr="--backgroundFillValue %f", ) maskInferiorCutOffFromCenter = traits.Float( desc="For use with --useCenterOfHeadAlign (and --maskProcessingMode ROIAUTO): the cut-off below the image centers, in millimeters, ", argstr="--maskInferiorCutOffFromCenter %f", ) scaleOutputValues = traits.Bool( desc="If true, and the voxel values do not fit within the minimum and maximum values of the desired outputVolumePixelType, then linearly scale the min/max output image voxel values to fit within the min/max range of the outputVolumePixelType.", argstr="--scaleOutputValues ", ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", "ResampleInPlace", "BSpline", "WindowedSinc", "Hamming", "Cosine", "Welch", "Lanczos", "Blackman", desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, NearestNeighbor, BSpline, WindowedSinc, or ResampleInPlace. The ResampleInPlace option will create an image with the same discrete voxel values and will adjust the origin and direction of the physical space interpretation.", argstr="--interpolationMode %s", ) minimumStepLength = InputMultiPath( traits.Float, desc="Each step in the optimization takes steps at least this big. When none are possible, registration is complete.", sep=",", argstr="--minimumStepLength %s", ) translationScale = traits.Float( desc="How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more rotation in the search pattern.", argstr="--translationScale %f", ) reproportionScale = traits.Float( desc="ScaleVersor3D 'Scale' compensation factor. Increase this to put more rescaling in a ScaleVersor3D or ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", argstr="--reproportionScale %f", ) skewScale = traits.Float( desc="ScaleSkewVersor3D Skew compensation factor. Increase this to put more skew in a ScaleSkewVersor3D search pattern. 1.0 works well with a translationScale of 1000.0", argstr="--skewScale %f", ) maxBSplineDisplacement = traits.Float( desc=" Sets the maximum allowed displacements in image physical coordinates for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", argstr="--maxBSplineDisplacement %f", ) histogramMatch = traits.Bool( desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile. Do NOT use if registering images from different modailties.", argstr="--histogramMatch ", ) numberOfHistogramBins = traits.Int( desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" ) numberOfMatchPoints = traits.Int( desc="the number of match points", argstr="--numberOfMatchPoints %d" ) strippedOutputTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="File name for the rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overwritten if either bsplineTransform or linearTransform is set.", argstr="--strippedOutputTransform %s", ) transformType = InputMultiPath( traits.Str, desc="Specifies a list of registration types to be used. The valid types are, Rigid, ScaleVersor3D, ScaleSkewVersor3D, Affine, and BSpline. Specifiying more than one in a comma separated list will initialize the next stage with the previous results. If registrationClass flag is used, it overrides this parameter setting.", sep=",", argstr="--transformType %s", ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", argstr="--outputTransform %s", ) fixedVolumeTimeIndex = traits.Int( desc="The index in the time series for the 3D fixed image to fit, if 4-dimensional.", argstr="--fixedVolumeTimeIndex %d", ) movingVolumeTimeIndex = traits.Int( desc="The index in the time series for the 3D moving image to fit, if 4-dimensional.", argstr="--movingVolumeTimeIndex %d", ) medianFilterSize = InputMultiPath( traits.Int, desc="The radius for the optional MedianImageFilter preprocessing in all 3 directions.", sep=",", argstr="--medianFilterSize %s", ) removeIntensityOutliers = traits.Float( desc="The half percentage to decide outliers of image intensities. The default value is zero, which means no outlier removal. If the value of 0.005 is given, the moduel will throw away 0.005 % of both tails, so 0.01% of intensities in total would be ignored in its statistic calculation. ", argstr="--removeIntensityOutliers %f", ) useCachingOfBSplineWeightsMode = traits.Enum( "ON", "OFF", desc="This is a 5x speed advantage at the expense of requiring much more memory. Only relevant when transformType is BSpline.", argstr="--useCachingOfBSplineWeightsMode %s", ) useExplicitPDFDerivativesMode = traits.Enum( "AUTO", "ON", "OFF", desc="Using mode AUTO means OFF for BSplineDeformableTransforms and ON for the linear transforms. The ON alternative uses more memory to sometimes do a better job.", argstr="--useExplicitPDFDerivativesMode %s", ) ROIAutoDilateSize = traits.Float( desc="This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", argstr="--ROIAutoDilateSize %f", ) ROIAutoClosingSize = traits.Float( desc="This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the hole closing size in mm. It is rounded up to the nearest whole pixel size in each direction. The default is to use a closing size of 9mm. For mouse data this value may need to be reset to 0.9 or smaller.", argstr="--ROIAutoClosingSize %f", ) relaxationFactor = traits.Float( desc="Internal debugging parameter, and should probably never be used from the command line. This will be removed in the future.", argstr="--relaxationFactor %f", ) maximumStepLength = traits.Float( desc="Internal debugging parameter, and should probably never be used from the command line. This will be removed in the future.", argstr="--maximumStepLength %f", ) failureExitCode = traits.Int( desc="If the fit fails, exit with this status code. (It can be used to force a successfult exit status of (0) if the registration fails due to reaching the maximum number of iterations.", argstr="--failureExitCode %d", ) writeTransformOnFailure = traits.Bool( desc="Flag to save the final transform even if the numberOfIterations are reached without convergence. (Intended for use when --failureExitCode 0 )", argstr="--writeTransformOnFailure ", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use. (default is auto-detected)", argstr="--numberOfThreads %d", ) forceMINumberOfThreads = traits.Int( desc="Force the the maximum number of threads to use for non thread safe MI metric. CAUTION: Inconsistent results my arise!", argstr="--forceMINumberOfThreads %d", ) debugLevel = traits.Int( desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", argstr="--debugLevel %d", ) costFunctionConvergenceFactor = traits.Float( desc=" From itkLBFGSBOptimizer.h: Set/Get the CostFunctionConvergenceFactor. Algorithm terminates when the reduction in cost function is less than (factor * epsmcj) where epsmch is the machine precision. Typical values for factor: 1e+12 for low accuracy; 1e+7 for moderate accuracy and 1e+1 for extremely high accuracy. 1e+9 seems to work well., ", argstr="--costFunctionConvergenceFactor %f", ) projectedGradientTolerance = traits.Float( desc=" From itkLBFGSBOptimizer.h: Set/Get the ProjectedGradientTolerance. Algorithm terminates when the project gradient is below the tolerance. Default lbfgsb value is 1e-5, but 1e-4 seems to work well., ", argstr="--projectedGradientTolerance %f", ) gui = traits.Bool( desc="Display intermediate image volumes for debugging. NOTE: This is not part of the standard build sytem, and probably does nothing on your installation.", argstr="--gui ", ) promptUser = traits.Bool( desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", argstr="--promptUser ", ) NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00 = traits.Bool( desc="DO NOT USE THIS FLAG", argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00 " ) NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01 = traits.Bool( desc="DO NOT USE THIS FLAG", argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01 " ) NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 = traits.Bool( desc="DO NOT USE THIS FLAG", argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 " ) permitParameterVariation = InputMultiPath( traits.Int, desc="A bit vector to permit linear transform parameters to vary under optimization. The vector order corresponds with transform parameters, and beyond the end ones fill in as a default. For instance, you can choose to rotate only in x (pitch) with 1,0,0; this is mostly for expert use in turning on and off individual degrees of freedom in rotation, translation or scaling without multiplying the number of transform representations; this trick is probably meaningless when tried with the general affine transform.", sep=",", argstr="--permitParameterVariation %s", ) costMetric = traits.Enum( "MMI", "MSE", "NC", "MC", desc="The cost metric to be used during fitting. Defaults to MMI. Options are MMI (Mattes Mutual Information), MSE (Mean Square Error), NC (Normalized Correlation), MC (Match Cardinality for binary images)", argstr="--costMetric %s", ) writeOutputTransformInFloat = traits.Bool( desc="By default, the output registration transforms (either the output composite transform or each transform component) are written to the disk in double precision. If this flag is ON, the output transforms will be written in single (float) precision. It is especially important if the output transform is a displacement field transform, or it is a composite transform that includes several displacement fields.", argstr="--writeOutputTransformInFloat ", ) class BRAINSFitOutputSpec(TraitedSpec): bsplineTransform = File( desc="(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS BSpline", exists=True, ) linearTransform = File( desc="(optional) Filename to which save the estimated transform. NOTE: You must set at least one output object (either a deformed image or a transform. NOTE: USE THIS ONLY IF THE FINAL TRANSFORM IS ---NOT--- BSpline", exists=True, ) outputVolume = File( desc="(optional) Output image for registration. NOTE: You must select either the outputTransform or the outputVolume option.", exists=True, ) outputFixedVolumeROI = File( desc="The ROI automatically found in fixed image, ONLY FOR ROIAUTO mode.", exists=True, ) outputMovingVolumeROI = File( desc="The ROI automatically found in moving image, ONLY FOR ROIAUTO mode.", exists=True, ) strippedOutputTransform = File( desc="File name for the rigid component of the estimated affine transform. Can be used to rigidly register the moving image to the fixed image. NOTE: This value is overwritten if either bsplineTransform or linearTransform is set.", exists=True, ) outputTransform = File( desc="(optional) Filename to which save the (optional) estimated transform. NOTE: You must select either the outputTransform or the outputVolume option.", exists=True, ) class BRAINSFit(SEMLikeCommandLine): """title: General Registration (BRAINS) category: Registration description: Register a three-dimensional volume to a reference volume (Mattes Mutual Information by default). Described in BRAINSFit: Mutual Information Registrations of Whole-Brain 3D Images, Using the Insight Toolkit, Johnson H.J., Harris G., Williams K., The Insight Journal, 2007. http://hdl.handle.net/1926/1291 version: 3.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:BRAINSFit license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://wwww.psychiatry.uiowa.edu acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); Gregory Harris(1), Vincent Magnotta(1,2,3); Andriy Fedorov(5) 1=University of Iowa Department of Psychiatry, 2=University of Iowa Department of Radiology, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering, 5=Surgical Planning Lab, Harvard """ input_spec = BRAINSFitInputSpec output_spec = BRAINSFitOutputSpec _cmd = "BRAINSFit " _outputs_filenames = { "outputVolume": "outputVolume.nii", "bsplineTransform": "bsplineTransform.mat", "outputTransform": "outputTransform.mat", "outputFixedVolumeROI": "outputFixedVolumeROI.nii", "strippedOutputTransform": "strippedOutputTransform.mat", "outputMovingVolumeROI": "outputMovingVolumeROI.nii", "linearTransform": "linearTransform.mat", } nipype-1.7.0/nipype/interfaces/slicer/registration/brainsresample.py000066400000000000000000000101401413403311400260140ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class BRAINSResampleInputSpec(CommandLineInputSpec): inputVolume = File(desc="Image To Warp", exists=True, argstr="--inputVolume %s") referenceVolume = File( desc="Reference image used only to define the output space. If not specified, the warping is done in the same space as the image to warp.", exists=True, argstr="--referenceVolume %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Resulting deformed image", argstr="--outputVolume %s", ) pixelType = traits.Enum( "float", "short", "ushort", "int", "uint", "uchar", "binary", desc="Specifies the pixel type for the input/output images. The 'binary' pixel type uses a modified algorithm whereby the image is read in as unsigned char, a signed distance map is created, signed distance map is resampled, and then a thresholded image of type unsigned char is written to disk.", argstr="--pixelType %s", ) deformationVolume = File( desc="Displacement Field to be used to warp the image", exists=True, argstr="--deformationVolume %s", ) warpTransform = File( desc="Filename for the BRAINSFit transform used in place of the deformation field", exists=True, argstr="--warpTransform %s", ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", "ResampleInPlace", "BSpline", "WindowedSinc", "Hamming", "Cosine", "Welch", "Lanczos", "Blackman", desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", argstr="--interpolationMode %s", ) inverseTransform = traits.Bool( desc="True/False is to compute inverse of given transformation. Default is false", argstr="--inverseTransform ", ) defaultValue = traits.Float(desc="Default voxel value", argstr="--defaultValue %f") gridSpacing = InputMultiPath( traits.Int, desc="Add warped grid to output image to help show the deformation that occured with specified spacing. A spacing of 0 in a dimension indicates that grid lines should be rendered to fall exactly (i.e. do not allow displacements off that plane). This is useful for makeing a 2D image of grid lines from the 3D space ", sep=",", argstr="--gridSpacing %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSResampleOutputSpec(TraitedSpec): outputVolume = File(desc="Resulting deformed image", exists=True) class BRAINSResample(SEMLikeCommandLine): """title: Resample Image (BRAINS) category: Registration description: This program resamples an image image using a deformation field or a transform (BSpline, Affine, Rigid, etc.). version: 3.0.0 documentation-url: http://www.slicer.org/slicerWiki/index.php/Modules:BRAINSResample license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Vincent Magnotta, Greg Harris, and Hans Johnson. acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. """ input_spec = BRAINSResampleInputSpec output_spec = BRAINSResampleOutputSpec _cmd = "BRAINSResample " _outputs_filenames = {"outputVolume": "outputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/registration/specialized.py000066400000000000000000000622241413403311400253130ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class ACPCTransformInputSpec(CommandLineInputSpec): acpc = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="ACPC line, two fiducial points, one at the anterior commissure and one at the posterior commissure.", argstr="--acpc %s...", ) midline = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="The midline is a series of points defining the division between the hemispheres of the brain (the mid sagittal plane).", argstr="--midline %s...", ) outputTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="A transform filled in from the ACPC and Midline registration calculation", argstr="--outputTransform %s", ) debugSwitch = traits.Bool( desc="Click if wish to see debugging output", argstr="--debugSwitch " ) class ACPCTransformOutputSpec(TraitedSpec): outputTransform = File( desc="A transform filled in from the ACPC and Midline registration calculation", exists=True, ) class ACPCTransform(SEMLikeCommandLine): """title: ACPC Transform category: Registration.Specialized description:

Calculate a transformation from two lists of fiducial points.

ACPC line is two fiducial points, one at the anterior commissure and one at the posterior commissure. The resulting transform will bring the line connecting them to horizontal to the AP axis.

The midline is a series of points defining the division between the hemispheres of the brain (the mid sagittal plane). The resulting transform will put the output volume with the mid sagittal plane lined up with the AS plane.

Use the Filtering moduleResample Scalar/Vector/DWI Volumeto apply the transformation to a volume.

version: 1.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ACPCTransform license: slicer3 contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = ACPCTransformInputSpec output_spec = ACPCTransformOutputSpec _cmd = "ACPCTransform " _outputs_filenames = {"outputTransform": "outputTransform.mat"} class FiducialRegistrationInputSpec(CommandLineInputSpec): fixedLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the fixed image", argstr="--fixedLandmarks %s...", ) movingLandmarks = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Ordered list of landmarks in the moving image", argstr="--movingLandmarks %s...", ) saveTransform = traits.Either( traits.Bool, File(), hash_files=False, desc="Save the transform that results from registration", argstr="--saveTransform %s", ) transformType = traits.Enum( "Translation", "Rigid", "Similarity", desc="Type of transform to produce", argstr="--transformType %s", ) rms = traits.Float(desc="Display RMS Error.", argstr="--rms %f") outputMessage = traits.Str( desc="Provides more information on the output", argstr="--outputMessage %s" ) class FiducialRegistrationOutputSpec(TraitedSpec): saveTransform = File( desc="Save the transform that results from registration", exists=True ) class FiducialRegistration(SEMLikeCommandLine): """title: Fiducial Registration category: Registration.Specialized description: Computes a rigid, similarity or affine transform from a matched list of fiducials version: 0.1.0.$Revision$ documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/TransformFromFiducials contributor: Casey B Goodlett (Kitware), Dominik Meier (SPL, BWH) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = FiducialRegistrationInputSpec output_spec = FiducialRegistrationOutputSpec _cmd = "FiducialRegistration " _outputs_filenames = {"saveTransform": "saveTransform.txt"} class VBRAINSDemonWarpInputSpec(CommandLineInputSpec): movingVolume = InputMultiPath( File(exists=True), desc="Required: input moving image", argstr="--movingVolume %s...", ) fixedVolume = InputMultiPath( File(exists=True), desc="Required: input fixed (target) image", argstr="--fixedVolume %s...", ) inputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", desc="Input volumes will be typecast to this format: float|short|ushort|int|uchar", argstr="--inputPixelType %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", argstr="--outputVolume %s", ) outputDisplacementFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", argstr="--outputDisplacementFieldVolume %s", ) outputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", desc="outputVolume will be typecast to this format: float|short|ushort|int|uchar", argstr="--outputPixelType %s", ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", "ResampleInPlace", "BSpline", "WindowedSinc", "Hamming", "Cosine", "Welch", "Lanczos", "Blackman", desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", argstr="--interpolationMode %s", ) registrationFilterType = traits.Enum( "Demons", "FastSymmetricForces", "Diffeomorphic", "LogDemons", "SymmetricLogDemons", desc="Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic|LogDemons|SymmetricLogDemons", argstr="--registrationFilterType %s", ) smoothDisplacementFieldSigma = traits.Float( desc="A gaussian smoothing value to be applied to the deformation feild at each iteration.", argstr="--smoothDisplacementFieldSigma %f", ) numberOfPyramidLevels = traits.Int( desc="Number of image pyramid levels to use in the multi-resolution registration.", argstr="--numberOfPyramidLevels %d", ) minimumFixedPyramid = InputMultiPath( traits.Int, desc="The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", argstr="--minimumFixedPyramid %s", ) minimumMovingPyramid = InputMultiPath( traits.Int, desc="The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", argstr="--minimumMovingPyramid %s", ) arrayOfPyramidLevelIterations = InputMultiPath( traits.Int, desc="The number of iterations for each pyramid level", sep=",", argstr="--arrayOfPyramidLevelIterations %s", ) histogramMatch = traits.Bool( desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", argstr="--histogramMatch ", ) numberOfHistogramBins = traits.Int( desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" ) numberOfMatchPoints = traits.Int( desc="The number of match points for histrogramMatch", argstr="--numberOfMatchPoints %d", ) medianFilterSize = InputMultiPath( traits.Int, desc="Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", sep=",", argstr="--medianFilterSize %s", ) initializeWithDisplacementField = File( desc="Initial deformation field vector image file name", exists=True, argstr="--initializeWithDisplacementField %s", ) initializeWithTransform = File( desc="Initial Transform filename", exists=True, argstr="--initializeWithTransform %s", ) makeBOBF = traits.Bool( desc="Flag to make Brain-Only Background-Filled versions of the input and target volumes.", argstr="--makeBOBF ", ) fixedBinaryVolume = File( desc="Mask filename for desired region of interest in the Fixed image.", exists=True, argstr="--fixedBinaryVolume %s", ) movingBinaryVolume = File( desc="Mask filename for desired region of interest in the Moving image.", exists=True, argstr="--movingBinaryVolume %s", ) lowerThresholdForBOBF = traits.Int( desc="Lower threshold for performing BOBF", argstr="--lowerThresholdForBOBF %d" ) upperThresholdForBOBF = traits.Int( desc="Upper threshold for performing BOBF", argstr="--upperThresholdForBOBF %d" ) backgroundFillValue = traits.Int( desc="Replacement value to overwrite background when performing BOBF", argstr="--backgroundFillValue %d", ) seedForBOBF = InputMultiPath( traits.Int, desc="coordinates in all 3 directions for Seed when performing BOBF", sep=",", argstr="--seedForBOBF %s", ) neighborhoodForBOBF = InputMultiPath( traits.Int, desc="neighborhood in all 3 directions to be included when performing BOBF", sep=",", argstr="--neighborhoodForBOBF %s", ) outputDisplacementFieldPrefix = traits.Str( desc="Displacement field filename prefix for writing separate x, y, and z component images", argstr="--outputDisplacementFieldPrefix %s", ) outputCheckerboardVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", argstr="--outputCheckerboardVolume %s", ) checkerboardPatternSubdivisions = InputMultiPath( traits.Int, desc="Number of Checkerboard subdivisions in all 3 directions", sep=",", argstr="--checkerboardPatternSubdivisions %s", ) outputNormalized = traits.Bool( desc="Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", argstr="--outputNormalized ", ) outputDebug = traits.Bool( desc="Flag to write debugging images after each step.", argstr="--outputDebug " ) weightFactors = InputMultiPath( traits.Float, desc="Weight fatctors for each input images", sep=",", argstr="--weightFactors %s", ) gradient_type = traits.Enum( "0", "1", "2", desc="Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", argstr="--gradient_type %s", ) upFieldSmoothing = traits.Float( desc="Smoothing sigma for the update field at each iteration", argstr="--upFieldSmoothing %f", ) max_step_length = traits.Float( desc="Maximum length of an update vector (0: no restriction)", argstr="--max_step_length %f", ) use_vanilla_dem = traits.Bool( desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem " ) gui = traits.Bool( desc="Display intermediate image volumes for debugging", argstr="--gui " ) promptUser = traits.Bool( desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", argstr="--promptUser ", ) numberOfBCHApproximationTerms = traits.Int( desc="Number of terms in the BCH expansion", argstr="--numberOfBCHApproximationTerms %d", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class VBRAINSDemonWarpOutputSpec(TraitedSpec): outputVolume = File( desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", exists=True, ) outputDisplacementFieldVolume = File( desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", exists=True, ) outputCheckerboardVolume = File( desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", exists=True, ) class VBRAINSDemonWarp(SEMLikeCommandLine): """title: Vector Demon Registration (BRAINS) category: Registration.Specialized description: This program finds a deformation field to warp a moving image onto a fixed image. The images must be of the same signal kind, and contain an image of the same kind of object. This program uses the Thirion Demons warp software in ITK, the Insight Toolkit. Additional information is available at: http://www.nitrc.org/projects/brainsdemonwarp. version: 3.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:BRAINSDemonWarp license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Hans J. Johnson and Greg Harris. acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. """ input_spec = VBRAINSDemonWarpInputSpec output_spec = VBRAINSDemonWarpOutputSpec _cmd = "VBRAINSDemonWarp " _outputs_filenames = { "outputVolume": "outputVolume.nii", "outputCheckerboardVolume": "outputCheckerboardVolume.nii", "outputDisplacementFieldVolume": "outputDisplacementFieldVolume.nrrd", } class BRAINSDemonWarpInputSpec(CommandLineInputSpec): movingVolume = File( desc="Required: input moving image", exists=True, argstr="--movingVolume %s" ) fixedVolume = File( desc="Required: input fixed (target) image", exists=True, argstr="--fixedVolume %s", ) inputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", desc="Input volumes will be typecast to this format: float|short|ushort|int|uchar", argstr="--inputPixelType %s", ) outputVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", argstr="--outputVolume %s", ) outputDisplacementFieldVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", argstr="--outputDisplacementFieldVolume %s", ) outputPixelType = traits.Enum( "float", "short", "ushort", "int", "uchar", desc="outputVolume will be typecast to this format: float|short|ushort|int|uchar", argstr="--outputPixelType %s", ) interpolationMode = traits.Enum( "NearestNeighbor", "Linear", "ResampleInPlace", "BSpline", "WindowedSinc", "Hamming", "Cosine", "Welch", "Lanczos", "Blackman", desc="Type of interpolation to be used when applying transform to moving volume. Options are Linear, ResampleInPlace, NearestNeighbor, BSpline, or WindowedSinc", argstr="--interpolationMode %s", ) registrationFilterType = traits.Enum( "Demons", "FastSymmetricForces", "Diffeomorphic", desc="Registration Filter Type: Demons|FastSymmetricForces|Diffeomorphic", argstr="--registrationFilterType %s", ) smoothDisplacementFieldSigma = traits.Float( desc="A gaussian smoothing value to be applied to the deformation feild at each iteration.", argstr="--smoothDisplacementFieldSigma %f", ) numberOfPyramidLevels = traits.Int( desc="Number of image pyramid levels to use in the multi-resolution registration.", argstr="--numberOfPyramidLevels %d", ) minimumFixedPyramid = InputMultiPath( traits.Int, desc="The shrink factor for the first level of the fixed image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", argstr="--minimumFixedPyramid %s", ) minimumMovingPyramid = InputMultiPath( traits.Int, desc="The shrink factor for the first level of the moving image pyramid. (i.e. start at 1/16 scale, then 1/8, then 1/4, then 1/2, and finally full scale)", sep=",", argstr="--minimumMovingPyramid %s", ) arrayOfPyramidLevelIterations = InputMultiPath( traits.Int, desc="The number of iterations for each pyramid level", sep=",", argstr="--arrayOfPyramidLevelIterations %s", ) histogramMatch = traits.Bool( desc="Histogram Match the input images. This is suitable for images of the same modality that may have different absolute scales, but the same overall intensity profile.", argstr="--histogramMatch ", ) numberOfHistogramBins = traits.Int( desc="The number of histogram levels", argstr="--numberOfHistogramBins %d" ) numberOfMatchPoints = traits.Int( desc="The number of match points for histrogramMatch", argstr="--numberOfMatchPoints %d", ) medianFilterSize = InputMultiPath( traits.Int, desc="Median filter radius in all 3 directions. When images have a lot of salt and pepper noise, this step can improve the registration.", sep=",", argstr="--medianFilterSize %s", ) initializeWithDisplacementField = File( desc="Initial deformation field vector image file name", exists=True, argstr="--initializeWithDisplacementField %s", ) initializeWithTransform = File( desc="Initial Transform filename", exists=True, argstr="--initializeWithTransform %s", ) maskProcessingMode = traits.Enum( "NOMASK", "ROIAUTO", "ROI", "BOBF", desc="What mode to use for using the masks: NOMASK|ROIAUTO|ROI|BOBF. If ROIAUTO is choosen, then the mask is implicitly defined using a otsu forground and hole filling algorithm. Where the Region Of Interest mode uses the masks to define what parts of the image should be used for computing the deformation field. Brain Only Background Fill uses the masks to pre-process the input images by clipping and filling in the background with a predefined value.", argstr="--maskProcessingMode %s", ) fixedBinaryVolume = File( desc="Mask filename for desired region of interest in the Fixed image.", exists=True, argstr="--fixedBinaryVolume %s", ) movingBinaryVolume = File( desc="Mask filename for desired region of interest in the Moving image.", exists=True, argstr="--movingBinaryVolume %s", ) lowerThresholdForBOBF = traits.Int( desc="Lower threshold for performing BOBF", argstr="--lowerThresholdForBOBF %d" ) upperThresholdForBOBF = traits.Int( desc="Upper threshold for performing BOBF", argstr="--upperThresholdForBOBF %d" ) backgroundFillValue = traits.Int( desc="Replacement value to overwrite background when performing BOBF", argstr="--backgroundFillValue %d", ) seedForBOBF = InputMultiPath( traits.Int, desc="coordinates in all 3 directions for Seed when performing BOBF", sep=",", argstr="--seedForBOBF %s", ) neighborhoodForBOBF = InputMultiPath( traits.Int, desc="neighborhood in all 3 directions to be included when performing BOBF", sep=",", argstr="--neighborhoodForBOBF %s", ) outputDisplacementFieldPrefix = traits.Str( desc="Displacement field filename prefix for writing separate x, y, and z component images", argstr="--outputDisplacementFieldPrefix %s", ) outputCheckerboardVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", argstr="--outputCheckerboardVolume %s", ) checkerboardPatternSubdivisions = InputMultiPath( traits.Int, desc="Number of Checkerboard subdivisions in all 3 directions", sep=",", argstr="--checkerboardPatternSubdivisions %s", ) outputNormalized = traits.Bool( desc="Flag to warp and write the normalized images to output. In normalized images the image values are fit-scaled to be between 0 and the maximum storage type value.", argstr="--outputNormalized ", ) outputDebug = traits.Bool( desc="Flag to write debugging images after each step.", argstr="--outputDebug " ) gradient_type = traits.Enum( "0", "1", "2", desc="Type of gradient used for computing the demons force (0 is symmetrized, 1 is fixed image, 2 is moving image)", argstr="--gradient_type %s", ) upFieldSmoothing = traits.Float( desc="Smoothing sigma for the update field at each iteration", argstr="--upFieldSmoothing %f", ) max_step_length = traits.Float( desc="Maximum length of an update vector (0: no restriction)", argstr="--max_step_length %f", ) use_vanilla_dem = traits.Bool( desc="Run vanilla demons algorithm", argstr="--use_vanilla_dem " ) gui = traits.Bool( desc="Display intermediate image volumes for debugging", argstr="--gui " ) promptUser = traits.Bool( desc="Prompt the user to hit enter each time an image is sent to the DebugImageViewer", argstr="--promptUser ", ) numberOfBCHApproximationTerms = traits.Int( desc="Number of terms in the BCH expansion", argstr="--numberOfBCHApproximationTerms %d", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSDemonWarpOutputSpec(TraitedSpec): outputVolume = File( desc="Required: output resampled moving image (will have the same physical space as the fixedVolume).", exists=True, ) outputDisplacementFieldVolume = File( desc="Output deformation field vector image (will have the same physical space as the fixedVolume).", exists=True, ) outputCheckerboardVolume = File( desc="Genete a checkerboard image volume between the fixedVolume and the deformed movingVolume.", exists=True, ) class BRAINSDemonWarp(SEMLikeCommandLine): """title: Demon Registration (BRAINS) category: Registration.Specialized description: This program finds a deformation field to warp a moving image onto a fixed image. The images must be of the same signal kind, and contain an image of the same kind of object. This program uses the Thirion Demons warp software in ITK, the Insight Toolkit. Additional information is available at: http://www.nitrc.org/projects/brainsdemonwarp. version: 3.0.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:BRAINSDemonWarp license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: This tool was developed by Hans J. Johnson and Greg Harris. acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. """ input_spec = BRAINSDemonWarpInputSpec output_spec = BRAINSDemonWarpOutputSpec _cmd = "BRAINSDemonWarp " _outputs_filenames = { "outputVolume": "outputVolume.nii", "outputCheckerboardVolume": "outputCheckerboardVolume.nii", "outputDisplacementFieldVolume": "outputDisplacementFieldVolume.nrrd", } nipype-1.7.0/nipype/interfaces/slicer/registration/tests/000077500000000000000000000000001413403311400236015ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/registration/tests/__init__.py000066400000000000000000000000301413403311400257030ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/slicer/registration/tests/test_auto_ACPCTransform.py000066400000000000000000000022421413403311400306440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import ACPCTransform def test_ACPCTransform_inputs(): input_map = dict( acpc=dict( argstr="--acpc %s...", ), args=dict( argstr="%s", ), debugSwitch=dict( argstr="--debugSwitch ", ), environ=dict( nohash=True, usedefault=True, ), midline=dict( argstr="--midline %s...", ), outputTransform=dict( argstr="--outputTransform %s", hash_files=False, ), ) inputs = ACPCTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ACPCTransform_outputs(): output_map = dict( outputTransform=dict( extensions=None, ), ) outputs = ACPCTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSDemonWarp.py000066400000000000000000000117271413403311400310450ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import BRAINSDemonWarp def test_BRAINSDemonWarp_inputs(): input_map = dict( args=dict( argstr="%s", ), arrayOfPyramidLevelIterations=dict( argstr="--arrayOfPyramidLevelIterations %s", sep=",", ), backgroundFillValue=dict( argstr="--backgroundFillValue %d", ), checkerboardPatternSubdivisions=dict( argstr="--checkerboardPatternSubdivisions %s", sep=",", ), environ=dict( nohash=True, usedefault=True, ), fixedBinaryVolume=dict( argstr="--fixedBinaryVolume %s", extensions=None, ), fixedVolume=dict( argstr="--fixedVolume %s", extensions=None, ), gradient_type=dict( argstr="--gradient_type %s", ), gui=dict( argstr="--gui ", ), histogramMatch=dict( argstr="--histogramMatch ", ), initializeWithDisplacementField=dict( argstr="--initializeWithDisplacementField %s", extensions=None, ), initializeWithTransform=dict( argstr="--initializeWithTransform %s", extensions=None, ), inputPixelType=dict( argstr="--inputPixelType %s", ), interpolationMode=dict( argstr="--interpolationMode %s", ), lowerThresholdForBOBF=dict( argstr="--lowerThresholdForBOBF %d", ), maskProcessingMode=dict( argstr="--maskProcessingMode %s", ), max_step_length=dict( argstr="--max_step_length %f", ), medianFilterSize=dict( argstr="--medianFilterSize %s", sep=",", ), minimumFixedPyramid=dict( argstr="--minimumFixedPyramid %s", sep=",", ), minimumMovingPyramid=dict( argstr="--minimumMovingPyramid %s", sep=",", ), movingBinaryVolume=dict( argstr="--movingBinaryVolume %s", extensions=None, ), movingVolume=dict( argstr="--movingVolume %s", extensions=None, ), neighborhoodForBOBF=dict( argstr="--neighborhoodForBOBF %s", sep=",", ), numberOfBCHApproximationTerms=dict( argstr="--numberOfBCHApproximationTerms %d", ), numberOfHistogramBins=dict( argstr="--numberOfHistogramBins %d", ), numberOfMatchPoints=dict( argstr="--numberOfMatchPoints %d", ), numberOfPyramidLevels=dict( argstr="--numberOfPyramidLevels %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputCheckerboardVolume=dict( argstr="--outputCheckerboardVolume %s", hash_files=False, ), outputDebug=dict( argstr="--outputDebug ", ), outputDisplacementFieldPrefix=dict( argstr="--outputDisplacementFieldPrefix %s", ), outputDisplacementFieldVolume=dict( argstr="--outputDisplacementFieldVolume %s", hash_files=False, ), outputNormalized=dict( argstr="--outputNormalized ", ), outputPixelType=dict( argstr="--outputPixelType %s", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), promptUser=dict( argstr="--promptUser ", ), registrationFilterType=dict( argstr="--registrationFilterType %s", ), seedForBOBF=dict( argstr="--seedForBOBF %s", sep=",", ), smoothDisplacementFieldSigma=dict( argstr="--smoothDisplacementFieldSigma %f", ), upFieldSmoothing=dict( argstr="--upFieldSmoothing %f", ), upperThresholdForBOBF=dict( argstr="--upperThresholdForBOBF %d", ), use_vanilla_dem=dict( argstr="--use_vanilla_dem ", ), ) inputs = BRAINSDemonWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSDemonWarp_outputs(): output_map = dict( outputCheckerboardVolume=dict( extensions=None, ), outputDisplacementFieldVolume=dict( extensions=None, ), outputVolume=dict( extensions=None, ), ) outputs = BRAINSDemonWarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSFit.py000066400000000000000000000161621413403311400276710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsfit import BRAINSFit def test_BRAINSFit_inputs(): input_map = dict( NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00=dict( argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_00 ", ), NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01=dict( argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_01 ", ), NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02=dict( argstr="--NEVER_USE_THIS_FLAG_IT_IS_OUTDATED_02 ", ), ROIAutoClosingSize=dict( argstr="--ROIAutoClosingSize %f", ), ROIAutoDilateSize=dict( argstr="--ROIAutoDilateSize %f", ), args=dict( argstr="%s", ), backgroundFillValue=dict( argstr="--backgroundFillValue %f", ), bsplineTransform=dict( argstr="--bsplineTransform %s", hash_files=False, ), costFunctionConvergenceFactor=dict( argstr="--costFunctionConvergenceFactor %f", ), costMetric=dict( argstr="--costMetric %s", ), debugLevel=dict( argstr="--debugLevel %d", ), environ=dict( nohash=True, usedefault=True, ), failureExitCode=dict( argstr="--failureExitCode %d", ), fixedBinaryVolume=dict( argstr="--fixedBinaryVolume %s", extensions=None, ), fixedVolume=dict( argstr="--fixedVolume %s", extensions=None, ), fixedVolumeTimeIndex=dict( argstr="--fixedVolumeTimeIndex %d", ), forceMINumberOfThreads=dict( argstr="--forceMINumberOfThreads %d", ), gui=dict( argstr="--gui ", ), histogramMatch=dict( argstr="--histogramMatch ", ), initialTransform=dict( argstr="--initialTransform %s", extensions=None, ), initializeTransformMode=dict( argstr="--initializeTransformMode %s", ), interpolationMode=dict( argstr="--interpolationMode %s", ), linearTransform=dict( argstr="--linearTransform %s", hash_files=False, ), maskInferiorCutOffFromCenter=dict( argstr="--maskInferiorCutOffFromCenter %f", ), maskProcessingMode=dict( argstr="--maskProcessingMode %s", ), maxBSplineDisplacement=dict( argstr="--maxBSplineDisplacement %f", ), maximumStepLength=dict( argstr="--maximumStepLength %f", ), medianFilterSize=dict( argstr="--medianFilterSize %s", sep=",", ), minimumStepLength=dict( argstr="--minimumStepLength %s", sep=",", ), movingBinaryVolume=dict( argstr="--movingBinaryVolume %s", extensions=None, ), movingVolume=dict( argstr="--movingVolume %s", extensions=None, ), movingVolumeTimeIndex=dict( argstr="--movingVolumeTimeIndex %d", ), numberOfHistogramBins=dict( argstr="--numberOfHistogramBins %d", ), numberOfIterations=dict( argstr="--numberOfIterations %s", sep=",", ), numberOfMatchPoints=dict( argstr="--numberOfMatchPoints %d", ), numberOfSamples=dict( argstr="--numberOfSamples %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputFixedVolumeROI=dict( argstr="--outputFixedVolumeROI %s", hash_files=False, ), outputMovingVolumeROI=dict( argstr="--outputMovingVolumeROI %s", hash_files=False, ), outputTransform=dict( argstr="--outputTransform %s", hash_files=False, ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), outputVolumePixelType=dict( argstr="--outputVolumePixelType %s", ), permitParameterVariation=dict( argstr="--permitParameterVariation %s", sep=",", ), projectedGradientTolerance=dict( argstr="--projectedGradientTolerance %f", ), promptUser=dict( argstr="--promptUser ", ), relaxationFactor=dict( argstr="--relaxationFactor %f", ), removeIntensityOutliers=dict( argstr="--removeIntensityOutliers %f", ), reproportionScale=dict( argstr="--reproportionScale %f", ), scaleOutputValues=dict( argstr="--scaleOutputValues ", ), skewScale=dict( argstr="--skewScale %f", ), splineGridSize=dict( argstr="--splineGridSize %s", sep=",", ), strippedOutputTransform=dict( argstr="--strippedOutputTransform %s", hash_files=False, ), transformType=dict( argstr="--transformType %s", sep=",", ), translationScale=dict( argstr="--translationScale %f", ), useAffine=dict( argstr="--useAffine ", ), useBSpline=dict( argstr="--useBSpline ", ), useCachingOfBSplineWeightsMode=dict( argstr="--useCachingOfBSplineWeightsMode %s", ), useExplicitPDFDerivativesMode=dict( argstr="--useExplicitPDFDerivativesMode %s", ), useRigid=dict( argstr="--useRigid ", ), useScaleSkewVersor3D=dict( argstr="--useScaleSkewVersor3D ", ), useScaleVersor3D=dict( argstr="--useScaleVersor3D ", ), writeOutputTransformInFloat=dict( argstr="--writeOutputTransformInFloat ", ), writeTransformOnFailure=dict( argstr="--writeTransformOnFailure ", ), ) inputs = BRAINSFit.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSFit_outputs(): output_map = dict( bsplineTransform=dict( extensions=None, ), linearTransform=dict( extensions=None, ), outputFixedVolumeROI=dict( extensions=None, ), outputMovingVolumeROI=dict( extensions=None, ), outputTransform=dict( extensions=None, ), outputVolume=dict( extensions=None, ), strippedOutputTransform=dict( extensions=None, ), ) outputs = BRAINSFit.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/registration/tests/test_auto_BRAINSResample.py000066400000000000000000000036021413403311400307120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..brainsresample import BRAINSResample def test_BRAINSResample_inputs(): input_map = dict( args=dict( argstr="%s", ), defaultValue=dict( argstr="--defaultValue %f", ), deformationVolume=dict( argstr="--deformationVolume %s", extensions=None, ), environ=dict( nohash=True, usedefault=True, ), gridSpacing=dict( argstr="--gridSpacing %s", sep=",", ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), interpolationMode=dict( argstr="--interpolationMode %s", ), inverseTransform=dict( argstr="--inverseTransform ", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), pixelType=dict( argstr="--pixelType %s", ), referenceVolume=dict( argstr="--referenceVolume %s", extensions=None, ), warpTransform=dict( argstr="--warpTransform %s", extensions=None, ), ) inputs = BRAINSResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSResample_outputs(): output_map = dict( outputVolume=dict( extensions=None, ), ) outputs = BRAINSResample.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/registration/tests/test_auto_FiducialRegistration.py000066400000000000000000000025651413403311400323650ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import FiducialRegistration def test_FiducialRegistration_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fixedLandmarks=dict( argstr="--fixedLandmarks %s...", ), movingLandmarks=dict( argstr="--movingLandmarks %s...", ), outputMessage=dict( argstr="--outputMessage %s", ), rms=dict( argstr="--rms %f", ), saveTransform=dict( argstr="--saveTransform %s", hash_files=False, ), transformType=dict( argstr="--transformType %s", ), ) inputs = FiducialRegistration.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FiducialRegistration_outputs(): output_map = dict( saveTransform=dict( extensions=None, ), ) outputs = FiducialRegistration.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/registration/tests/test_auto_VBRAINSDemonWarp.py000066400000000000000000000117671413403311400311770ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import VBRAINSDemonWarp def test_VBRAINSDemonWarp_inputs(): input_map = dict( args=dict( argstr="%s", ), arrayOfPyramidLevelIterations=dict( argstr="--arrayOfPyramidLevelIterations %s", sep=",", ), backgroundFillValue=dict( argstr="--backgroundFillValue %d", ), checkerboardPatternSubdivisions=dict( argstr="--checkerboardPatternSubdivisions %s", sep=",", ), environ=dict( nohash=True, usedefault=True, ), fixedBinaryVolume=dict( argstr="--fixedBinaryVolume %s", extensions=None, ), fixedVolume=dict( argstr="--fixedVolume %s...", ), gradient_type=dict( argstr="--gradient_type %s", ), gui=dict( argstr="--gui ", ), histogramMatch=dict( argstr="--histogramMatch ", ), initializeWithDisplacementField=dict( argstr="--initializeWithDisplacementField %s", extensions=None, ), initializeWithTransform=dict( argstr="--initializeWithTransform %s", extensions=None, ), inputPixelType=dict( argstr="--inputPixelType %s", ), interpolationMode=dict( argstr="--interpolationMode %s", ), lowerThresholdForBOBF=dict( argstr="--lowerThresholdForBOBF %d", ), makeBOBF=dict( argstr="--makeBOBF ", ), max_step_length=dict( argstr="--max_step_length %f", ), medianFilterSize=dict( argstr="--medianFilterSize %s", sep=",", ), minimumFixedPyramid=dict( argstr="--minimumFixedPyramid %s", sep=",", ), minimumMovingPyramid=dict( argstr="--minimumMovingPyramid %s", sep=",", ), movingBinaryVolume=dict( argstr="--movingBinaryVolume %s", extensions=None, ), movingVolume=dict( argstr="--movingVolume %s...", ), neighborhoodForBOBF=dict( argstr="--neighborhoodForBOBF %s", sep=",", ), numberOfBCHApproximationTerms=dict( argstr="--numberOfBCHApproximationTerms %d", ), numberOfHistogramBins=dict( argstr="--numberOfHistogramBins %d", ), numberOfMatchPoints=dict( argstr="--numberOfMatchPoints %d", ), numberOfPyramidLevels=dict( argstr="--numberOfPyramidLevels %d", ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), outputCheckerboardVolume=dict( argstr="--outputCheckerboardVolume %s", hash_files=False, ), outputDebug=dict( argstr="--outputDebug ", ), outputDisplacementFieldPrefix=dict( argstr="--outputDisplacementFieldPrefix %s", ), outputDisplacementFieldVolume=dict( argstr="--outputDisplacementFieldVolume %s", hash_files=False, ), outputNormalized=dict( argstr="--outputNormalized ", ), outputPixelType=dict( argstr="--outputPixelType %s", ), outputVolume=dict( argstr="--outputVolume %s", hash_files=False, ), promptUser=dict( argstr="--promptUser ", ), registrationFilterType=dict( argstr="--registrationFilterType %s", ), seedForBOBF=dict( argstr="--seedForBOBF %s", sep=",", ), smoothDisplacementFieldSigma=dict( argstr="--smoothDisplacementFieldSigma %f", ), upFieldSmoothing=dict( argstr="--upFieldSmoothing %f", ), upperThresholdForBOBF=dict( argstr="--upperThresholdForBOBF %d", ), use_vanilla_dem=dict( argstr="--use_vanilla_dem ", ), weightFactors=dict( argstr="--weightFactors %s", sep=",", ), ) inputs = VBRAINSDemonWarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_VBRAINSDemonWarp_outputs(): output_map = dict( outputCheckerboardVolume=dict( extensions=None, ), outputDisplacementFieldVolume=dict( extensions=None, ), outputVolume=dict( extensions=None, ), ) outputs = VBRAINSDemonWarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/segmentation/000077500000000000000000000000001413403311400224225ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/segmentation/__init__.py000066400000000000000000000002751413403311400245370ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .specialized import RobustStatisticsSegmenter, EMSegmentCommandLine, BRAINSROIAuto from .simpleregiongrowingsegmentation import SimpleRegionGrowingSegmentation nipype-1.7.0/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py000066400000000000000000000060201413403311400315020ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class SimpleRegionGrowingSegmentationInputSpec(CommandLineInputSpec): smoothingIterations = traits.Int( desc="Number of smoothing iterations", argstr="--smoothingIterations %d" ) timestep = traits.Float(desc="Timestep for curvature flow", argstr="--timestep %f") iterations = traits.Int( desc="Number of iterations of region growing", argstr="--iterations %d" ) multiplier = traits.Float( desc="Number of standard deviations to include in intensity model", argstr="--multiplier %f", ) neighborhood = traits.Int( desc="The radius of the neighborhood over which to calculate intensity model", argstr="--neighborhood %d", ) labelvalue = traits.Int( desc="The integer value (0-255) to use for the segmentation results. This will determine the color of the segmentation that will be generated by the Region growing algorithm", argstr="--labelvalue %d", ) seed = InputMultiPath( traits.List(traits.Float(), minlen=3, maxlen=3), desc="Seed point(s) for region growing", argstr="--seed %s...", ) inputVolume = File( position=-2, desc="Input volume to be filtered", exists=True, argstr="%s" ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output filtered", argstr="%s", ) class SimpleRegionGrowingSegmentationOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Output filtered", exists=True) class SimpleRegionGrowingSegmentation(SEMLikeCommandLine): """title: Simple Region Growing Segmentation category: Segmentation description: A simple region growing segmentation algorithm based on intensity statistics. To create a list of fiducials (Seeds) for this algorithm, click on the tool bar icon of an arrow pointing to a starburst fiducial to enter the 'place a new object mode' and then use the fiducials module. This module uses the Slicer Command Line Interface (CLI) and the ITK filters CurvatureFlowImageFilter and ConfidenceConnectedImageFilter. version: 0.1.0.$Revision: 19904 $(alpha) documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/SimpleRegionGrowingSegmentation contributor: Jim Miller (GE) acknowledgements: This command module was derived from Insight/Examples (copyright) Insight Software Consortium """ input_spec = SimpleRegionGrowingSegmentationInputSpec output_spec = SimpleRegionGrowingSegmentationOutputSpec _cmd = "SimpleRegionGrowingSegmentation " _outputs_filenames = {"outputVolume": "outputVolume.nii"} nipype-1.7.0/nipype/interfaces/slicer/segmentation/specialized.py000066400000000000000000000300641413403311400252730ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class RobustStatisticsSegmenterInputSpec(CommandLineInputSpec): expectedVolume = traits.Float( desc="The approximate volume of the object, in mL.", argstr="--expectedVolume %f", ) intensityHomogeneity = traits.Float( desc="What is the homogeneity of intensity within the object? Given constant intensity at 1.0 score and extreme fluctuating intensity at 0.", argstr="--intensityHomogeneity %f", ) curvatureWeight = traits.Float( desc="Given sphere 1.0 score and extreme rough bounday/surface 0 score, what is the expected smoothness of the object?", argstr="--curvatureWeight %f", ) labelValue = traits.Int( desc="Label value of the output image", argstr="--labelValue %d" ) maxRunningTime = traits.Float( desc="The program will stop if this time is reached.", argstr="--maxRunningTime %f", ) originalImageFileName = File( position=-3, desc="Original image to be segmented", exists=True, argstr="%s" ) labelImageFileName = File( position=-2, desc="Label image for initialization", exists=True, argstr="%s" ) segmentedImageFileName = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Segmented image", argstr="%s", ) class RobustStatisticsSegmenterOutputSpec(TraitedSpec): segmentedImageFileName = File(position=-1, desc="Segmented image", exists=True) class RobustStatisticsSegmenter(SEMLikeCommandLine): """title: Robust Statistics Segmenter category: Segmentation.Specialized description: Active contour segmentation using robust statistic. version: 1.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/RobustStatisticsSegmenter contributor: Yi Gao (gatech), Allen Tannenbaum (gatech), Ron Kikinis (SPL, BWH) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health """ input_spec = RobustStatisticsSegmenterInputSpec output_spec = RobustStatisticsSegmenterOutputSpec _cmd = "RobustStatisticsSegmenter " _outputs_filenames = {"segmentedImageFileName": "segmentedImageFileName.nii"} class EMSegmentCommandLineInputSpec(CommandLineInputSpec): mrmlSceneFileName = File( desc="Active MRML scene that contains EMSegment algorithm parameters.", exists=True, argstr="--mrmlSceneFileName %s", ) resultVolumeFileName = traits.Either( traits.Bool, File(), hash_files=False, desc="The file name that the segmentation result volume will be written to.", argstr="--resultVolumeFileName %s", ) targetVolumeFileNames = InputMultiPath( File(exists=True), desc="File names of target volumes (to be segmented). The number of target images must be equal to the number of target images specified in the parameter set, and these images must be spatially aligned.", argstr="--targetVolumeFileNames %s...", ) intermediateResultsDirectory = Directory( desc="Directory where EMSegmenter will write intermediate data (e.g., aligned atlas data).", exists=True, argstr="--intermediateResultsDirectory %s", ) parametersMRMLNodeName = traits.Str( desc="The name of the EMSegment parameters node within the active MRML scene. Leave blank for default.", argstr="--parametersMRMLNodeName %s", ) disableMultithreading = traits.Int( desc="Disable multithreading for the EMSegmenter algorithm only! Preprocessing might still run in multi-threaded mode. -1: Do not overwrite default value. 0: Disable. 1: Enable.", argstr="--disableMultithreading %d", ) dontUpdateIntermediateData = traits.Int( desc="Disable update of intermediate results. -1: Do not overwrite default value. 0: Disable. 1: Enable.", argstr="--dontUpdateIntermediateData %d", ) verbose = traits.Bool(desc="Enable verbose output.", argstr="--verbose ") loadTargetCentered = traits.Bool( desc="Read target files centered.", argstr="--loadTargetCentered " ) loadAtlasNonCentered = traits.Bool( desc="Read atlas files non-centered.", argstr="--loadAtlasNonCentered " ) taskPreProcessingSetting = traits.Str( desc="Specifies the different task parameter. Leave blank for default.", argstr="--taskPreProcessingSetting %s", ) keepTempFiles = traits.Bool( desc="If flag is set then at the end of command the temporary files are not removed", argstr="--keepTempFiles ", ) resultStandardVolumeFileName = File( desc="Used for testing. Compare segmentation results to this image and return EXIT_FAILURE if they do not match.", exists=True, argstr="--resultStandardVolumeFileName %s", ) dontWriteResults = traits.Bool( desc="Used for testing. Don't actually write the resulting labelmap to disk.", argstr="--dontWriteResults ", ) generateEmptyMRMLSceneAndQuit = traits.Either( traits.Bool, File(), hash_files=False, desc="Used for testing. Only write a scene with default mrml parameters.", argstr="--generateEmptyMRMLSceneAndQuit %s", ) resultMRMLSceneFileName = traits.Either( traits.Bool, File(), hash_files=False, desc="Write out the MRML scene after command line substitutions have been made.", argstr="--resultMRMLSceneFileName %s", ) disableCompression = traits.Bool( desc="Don't use compression when writing result image to disk.", argstr="--disableCompression ", ) atlasVolumeFileNames = InputMultiPath( File(exists=True), desc="Use an alternative atlas to the one that is specified by the mrml file - note the order matters ! ", argstr="--atlasVolumeFileNames %s...", ) registrationPackage = traits.Str( desc="specify the registration package for preprocessing (CMTK or BRAINS or PLASTIMATCH or DEMONS)", argstr="--registrationPackage %s", ) registrationAffineType = traits.Int( desc="specify the accuracy of the affine registration. -2: Do not overwrite default, -1: Test, 0: Disable, 1: Fast, 2: Accurate", argstr="--registrationAffineType %d", ) registrationDeformableType = traits.Int( desc="specify the accuracy of the deformable registration. -2: Do not overwrite default, -1: Test, 0: Disable, 1: Fast, 2: Accurate", argstr="--registrationDeformableType %d", ) class EMSegmentCommandLineOutputSpec(TraitedSpec): resultVolumeFileName = File( desc="The file name that the segmentation result volume will be written to.", exists=True, ) generateEmptyMRMLSceneAndQuit = File( desc="Used for testing. Only write a scene with default mrml parameters.", exists=True, ) resultMRMLSceneFileName = File( desc="Write out the MRML scene after command line substitutions have been made.", exists=True, ) class EMSegmentCommandLine(SEMLikeCommandLine): """title: EMSegment Command-line category: Segmentation.Specialized description: This module is used to simplify the process of segmenting large collections of images by providing a command line interface to the EMSegment algorithm for script and batch processing. documentation-url: http://www.slicer.org/slicerWiki/index.php/Documentation/4.0/EMSegment_Command-line contributor: Sebastien Barre, Brad Davis, Kilian Pohl, Polina Golland, Yumin Yuan, Daniel Haehn acknowledgements: Many people and organizations have contributed to the funding, design, and development of the EMSegment algorithm and its various implementations. """ input_spec = EMSegmentCommandLineInputSpec output_spec = EMSegmentCommandLineOutputSpec _cmd = "EMSegmentCommandLine " _outputs_filenames = { "generateEmptyMRMLSceneAndQuit": "generateEmptyMRMLSceneAndQuit", "resultMRMLSceneFileName": "resultMRMLSceneFileName", "resultVolumeFileName": "resultVolumeFileName.mhd", } class BRAINSROIAutoInputSpec(CommandLineInputSpec): inputVolume = File( desc="The input image for finding the largest region filled mask.", exists=True, argstr="--inputVolume %s", ) outputROIMaskVolume = traits.Either( traits.Bool, File(), hash_files=False, desc="The ROI automatically found from the input image.", argstr="--outputROIMaskVolume %s", ) outputClippedVolumeROI = traits.Either( traits.Bool, File(), hash_files=False, desc="The inputVolume clipped to the region of the brain mask.", argstr="--outputClippedVolumeROI %s", ) otsuPercentileThreshold = traits.Float( desc="Parameter to the Otsu threshold algorithm.", argstr="--otsuPercentileThreshold %f", ) thresholdCorrectionFactor = traits.Float( desc="A factor to scale the Otsu algorithm's result threshold, in case clipping mangles the image.", argstr="--thresholdCorrectionFactor %f", ) closingSize = traits.Float( desc="The Closing Size (in millimeters) for largest connected filled mask. This value is divided by image spacing and rounded to the next largest voxel number.", argstr="--closingSize %f", ) ROIAutoDilateSize = traits.Float( desc="This flag is only relavent when using ROIAUTO mode for initializing masks. It defines the final dilation size to capture a bit of background outside the tissue region. At setting of 10mm has been shown to help regularize a BSpline registration type so that there is some background constraints to match the edges of the head better.", argstr="--ROIAutoDilateSize %f", ) outputVolumePixelType = traits.Enum( "float", "short", "ushort", "int", "uint", "uchar", desc="The output image Pixel Type is the scalar datatype for representation of the Output Volume.", argstr="--outputVolumePixelType %s", ) numberOfThreads = traits.Int( desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d", ) class BRAINSROIAutoOutputSpec(TraitedSpec): outputROIMaskVolume = File( desc="The ROI automatically found from the input image.", exists=True ) outputClippedVolumeROI = File( desc="The inputVolume clipped to the region of the brain mask.", exists=True ) class BRAINSROIAuto(SEMLikeCommandLine): """title: Foreground masking (BRAINS) category: Segmentation.Specialized description: This tool uses a combination of otsu thresholding and a closing operations to identify the most prominant foreground region in an image. version: 2.4.1 license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt contributor: Hans J. Johnson, hans-johnson -at- uiowa.edu, http://wwww.psychiatry.uiowa.edu acknowledgements: Hans Johnson(1,3,4); Kent Williams(1); Gregory Harris(1), Vincent Magnotta(1,2,3); Andriy Fedorov(5), fedorov -at- bwh.harvard.edu (Slicer integration); (1=University of Iowa Department of Psychiatry, 2=University of Iowa Department of Radiology, 3=University of Iowa Department of Biomedical Engineering, 4=University of Iowa Department of Electrical and Computer Engineering, 5=Surgical Planning Lab, Harvard) """ input_spec = BRAINSROIAutoInputSpec output_spec = BRAINSROIAutoOutputSpec _cmd = "BRAINSROIAuto " _outputs_filenames = { "outputROIMaskVolume": "outputROIMaskVolume.nii", "outputClippedVolumeROI": "outputClippedVolumeROI.nii", } nipype-1.7.0/nipype/interfaces/slicer/segmentation/tests/000077500000000000000000000000001413403311400235645ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/segmentation/tests/__init__.py000066400000000000000000000000301413403311400256660ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/slicer/segmentation/tests/test_auto_BRAINSROIAuto.py000066400000000000000000000034661413403311400304170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import BRAINSROIAuto def test_BRAINSROIAuto_inputs(): input_map = dict( ROIAutoDilateSize=dict( argstr="--ROIAutoDilateSize %f", ), args=dict( argstr="%s", ), closingSize=dict( argstr="--closingSize %f", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="--inputVolume %s", extensions=None, ), numberOfThreads=dict( argstr="--numberOfThreads %d", ), otsuPercentileThreshold=dict( argstr="--otsuPercentileThreshold %f", ), outputClippedVolumeROI=dict( argstr="--outputClippedVolumeROI %s", hash_files=False, ), outputROIMaskVolume=dict( argstr="--outputROIMaskVolume %s", hash_files=False, ), outputVolumePixelType=dict( argstr="--outputVolumePixelType %s", ), thresholdCorrectionFactor=dict( argstr="--thresholdCorrectionFactor %f", ), ) inputs = BRAINSROIAuto.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BRAINSROIAuto_outputs(): output_map = dict( outputClippedVolumeROI=dict( extensions=None, ), outputROIMaskVolume=dict( extensions=None, ), ) outputs = BRAINSROIAuto.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/segmentation/tests/test_auto_EMSegmentCommandLine.py000066400000000000000000000062201413403311400321600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import EMSegmentCommandLine def test_EMSegmentCommandLine_inputs(): input_map = dict( args=dict( argstr="%s", ), atlasVolumeFileNames=dict( argstr="--atlasVolumeFileNames %s...", ), disableCompression=dict( argstr="--disableCompression ", ), disableMultithreading=dict( argstr="--disableMultithreading %d", ), dontUpdateIntermediateData=dict( argstr="--dontUpdateIntermediateData %d", ), dontWriteResults=dict( argstr="--dontWriteResults ", ), environ=dict( nohash=True, usedefault=True, ), generateEmptyMRMLSceneAndQuit=dict( argstr="--generateEmptyMRMLSceneAndQuit %s", hash_files=False, ), intermediateResultsDirectory=dict( argstr="--intermediateResultsDirectory %s", ), keepTempFiles=dict( argstr="--keepTempFiles ", ), loadAtlasNonCentered=dict( argstr="--loadAtlasNonCentered ", ), loadTargetCentered=dict( argstr="--loadTargetCentered ", ), mrmlSceneFileName=dict( argstr="--mrmlSceneFileName %s", extensions=None, ), parametersMRMLNodeName=dict( argstr="--parametersMRMLNodeName %s", ), registrationAffineType=dict( argstr="--registrationAffineType %d", ), registrationDeformableType=dict( argstr="--registrationDeformableType %d", ), registrationPackage=dict( argstr="--registrationPackage %s", ), resultMRMLSceneFileName=dict( argstr="--resultMRMLSceneFileName %s", hash_files=False, ), resultStandardVolumeFileName=dict( argstr="--resultStandardVolumeFileName %s", extensions=None, ), resultVolumeFileName=dict( argstr="--resultVolumeFileName %s", hash_files=False, ), targetVolumeFileNames=dict( argstr="--targetVolumeFileNames %s...", ), taskPreProcessingSetting=dict( argstr="--taskPreProcessingSetting %s", ), verbose=dict( argstr="--verbose ", ), ) inputs = EMSegmentCommandLine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EMSegmentCommandLine_outputs(): output_map = dict( generateEmptyMRMLSceneAndQuit=dict( extensions=None, ), resultMRMLSceneFileName=dict( extensions=None, ), resultVolumeFileName=dict( extensions=None, ), ) outputs = EMSegmentCommandLine.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/segmentation/tests/test_auto_RobustStatisticsSegmenter.py000066400000000000000000000033231413403311400334310ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..specialized import RobustStatisticsSegmenter def test_RobustStatisticsSegmenter_inputs(): input_map = dict( args=dict( argstr="%s", ), curvatureWeight=dict( argstr="--curvatureWeight %f", ), environ=dict( nohash=True, usedefault=True, ), expectedVolume=dict( argstr="--expectedVolume %f", ), intensityHomogeneity=dict( argstr="--intensityHomogeneity %f", ), labelImageFileName=dict( argstr="%s", extensions=None, position=-2, ), labelValue=dict( argstr="--labelValue %d", ), maxRunningTime=dict( argstr="--maxRunningTime %f", ), originalImageFileName=dict( argstr="%s", extensions=None, position=-3, ), segmentedImageFileName=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = RobustStatisticsSegmenter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RobustStatisticsSegmenter_outputs(): output_map = dict( segmentedImageFileName=dict( extensions=None, position=-1, ), ) outputs = RobustStatisticsSegmenter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value test_auto_SimpleRegionGrowingSegmentation.py000066400000000000000000000033331413403311400344600ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/segmentation/tests# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..simpleregiongrowingsegmentation import SimpleRegionGrowingSegmentation def test_SimpleRegionGrowingSegmentation_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), iterations=dict( argstr="--iterations %d", ), labelvalue=dict( argstr="--labelvalue %d", ), multiplier=dict( argstr="--multiplier %f", ), neighborhood=dict( argstr="--neighborhood %d", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), seed=dict( argstr="--seed %s...", ), smoothingIterations=dict( argstr="--smoothingIterations %d", ), timestep=dict( argstr="--timestep %f", ), ) inputs = SimpleRegionGrowingSegmentation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SimpleRegionGrowingSegmentation_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = SimpleRegionGrowingSegmentation.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/surface.py000066400000000000000000000420461413403311400217350ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class MergeModelsInputSpec(CommandLineInputSpec): Model1 = File(position=-3, desc="Model", exists=True, argstr="%s") Model2 = File(position=-2, desc="Model", exists=True, argstr="%s") ModelOutput = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Model", argstr="%s" ) class MergeModelsOutputSpec(TraitedSpec): ModelOutput = File(position=-1, desc="Model", exists=True) class MergeModels(SEMLikeCommandLine): """title: Merge Models category: Surface Models description: Merge the polydata from two input models and output a new model with the added polydata. Uses the vtkAppendPolyData filter. Works on .vtp and .vtk surface files. version: $Revision$ documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/MergeModels contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH), Daniel Haehn (SPL, BWH) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = MergeModelsInputSpec output_spec = MergeModelsOutputSpec _cmd = "MergeModels " _outputs_filenames = {"ModelOutput": "ModelOutput.vtk"} class ModelToLabelMapInputSpec(CommandLineInputSpec): distance = traits.Float(desc="Sample distance", argstr="--distance %f") InputVolume = File(position=-3, desc="Input volume", exists=True, argstr="%s") surface = File(position=-2, desc="Model", exists=True, argstr="%s") OutputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="The label volume", argstr="%s", ) class ModelToLabelMapOutputSpec(TraitedSpec): OutputVolume = File(position=-1, desc="The label volume", exists=True) class ModelToLabelMap(SEMLikeCommandLine): """title: Model To Label Map category: Surface Models description: Intersects an input model with an reference volume and produces an output label map. version: 0.1.0.$Revision: 8643 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/PolyDataToLabelMap contributor: Nicole Aucoin (SPL, BWH), Xiaodong Tao (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = ModelToLabelMapInputSpec output_spec = ModelToLabelMapOutputSpec _cmd = "ModelToLabelMap " _outputs_filenames = {"OutputVolume": "OutputVolume.nii"} class GrayscaleModelMakerInputSpec(CommandLineInputSpec): InputVolume = File( position=-2, desc="Volume containing the input grayscale data.", exists=True, argstr="%s", ) OutputGeometry = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output that contains geometry model.", argstr="%s", ) threshold = traits.Float( desc="Grayscale threshold of isosurface. The resulting surface of triangles separates the volume into voxels that lie above (inside) and below (outside) the threshold.", argstr="--threshold %f", ) name = traits.Str(desc="Name to use for this model.", argstr="--name %s") smooth = traits.Int( desc="Number of smoothing iterations. If 0, no smoothing will be done.", argstr="--smooth %d", ) decimate = traits.Float( desc="Target reduction during decimation, as a decimal percentage reduction in the number of polygons. If 0, no decimation will be done.", argstr="--decimate %f", ) splitnormals = traits.Bool( desc="Splitting normals is useful for visualizing sharp features. However it creates holes in surfaces which affect measurements", argstr="--splitnormals ", ) pointnormals = traits.Bool( desc="Calculate the point normals? Calculated point normals make the surface appear smooth. Without point normals, the surface will appear faceted.", argstr="--pointnormals ", ) class GrayscaleModelMakerOutputSpec(TraitedSpec): OutputGeometry = File( position=-1, desc="Output that contains geometry model.", exists=True ) class GrayscaleModelMaker(SEMLikeCommandLine): """title: Grayscale Model Maker category: Surface Models description: Create 3D surface models from grayscale data. This module uses Marching Cubes to create an isosurface at a given threshold. The resulting surface consists of triangles that separate a volume into regions below and above the threshold. The resulting surface can be smoothed and decimated. This model works on continuous data while the module Model Maker works on labeled (or discrete) data. version: 3.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/GrayscaleModelMaker license: slicer3 contributor: Nicole Aucoin (SPL, BWH), Bill Lorensen (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = GrayscaleModelMakerInputSpec output_spec = GrayscaleModelMakerOutputSpec _cmd = "GrayscaleModelMaker " _outputs_filenames = {"OutputGeometry": "OutputGeometry.vtk"} class ProbeVolumeWithModelInputSpec(CommandLineInputSpec): InputVolume = File( position=-3, desc="Volume to use to 'paint' the model", exists=True, argstr="%s" ) InputModel = File(position=-2, desc="Input model", exists=True, argstr="%s") OutputModel = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Output 'painted' model", argstr="%s", ) class ProbeVolumeWithModelOutputSpec(TraitedSpec): OutputModel = File(position=-1, desc="Output 'painted' model", exists=True) class ProbeVolumeWithModel(SEMLikeCommandLine): """title: Probe Volume With Model category: Surface Models description: Paint a model by a volume (using vtkProbeFilter). version: 0.1.0.$Revision: 1892 $(alpha) documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ProbeVolumeWithModel contributor: Lauren O'Donnell (SPL, BWH) acknowledgements: BWH, NCIGT/LMI """ input_spec = ProbeVolumeWithModelInputSpec output_spec = ProbeVolumeWithModelOutputSpec _cmd = "ProbeVolumeWithModel " _outputs_filenames = {"OutputModel": "OutputModel.vtk"} class LabelMapSmoothingInputSpec(CommandLineInputSpec): labelToSmooth = traits.Int( desc="The label to smooth. All others will be ignored. If no label is selected by the user, the maximum label in the image is chosen by default.", argstr="--labelToSmooth %d", ) numberOfIterations = traits.Int( desc="The number of iterations of the level set AntiAliasing algorithm", argstr="--numberOfIterations %d", ) maxRMSError = traits.Float(desc="The maximum RMS error.", argstr="--maxRMSError %f") gaussianSigma = traits.Float( desc="The standard deviation of the Gaussian kernel", argstr="--gaussianSigma %f", ) inputVolume = File( position=-2, desc="Input label map to smooth", exists=True, argstr="%s" ) outputVolume = traits.Either( traits.Bool, File(), position=-1, hash_files=False, desc="Smoothed label map", argstr="%s", ) class LabelMapSmoothingOutputSpec(TraitedSpec): outputVolume = File(position=-1, desc="Smoothed label map", exists=True) class LabelMapSmoothing(SEMLikeCommandLine): """title: Label Map Smoothing category: Surface Models description: This filter smoothes a binary label map. With a label map as input, this filter runs an anti-alising algorithm followed by a Gaussian smoothing algorithm. The output is a smoothed label map. version: 1.0 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/LabelMapSmoothing contributor: Dirk Padfield (GE), Josh Cates (Utah), Ross Whitaker (Utah) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. This filter is based on work developed at the University of Utah, and implemented at GE Research. """ input_spec = LabelMapSmoothingInputSpec output_spec = LabelMapSmoothingOutputSpec _cmd = "LabelMapSmoothing " _outputs_filenames = {"outputVolume": "outputVolume.nii"} class ModelMakerInputSpec(CommandLineInputSpec): InputVolume = File( position=-1, desc="Input label map. The Input Volume drop down menu is populated with the label map volumes that are present in the scene, select one from which to generate models.", exists=True, argstr="%s", ) color = File( desc="Color table to make labels to colors and objects", exists=True, argstr="--color %s", ) modelSceneFile = traits.Either( traits.Bool, InputMultiPath(File()), hash_files=False, desc="Generated models, under a model hierarchy node. Models are imported into Slicer under a model hierarchy node, and their colors are set by the color table associated with the input label map volume. The model hierarchy node must be created before running the model maker, by selecting Create New ModelHierarchy from the Models drop down menu. If you're running from the command line, a model hierarchy node in a new mrml scene will be created for you.", argstr="--modelSceneFile %s...", ) name = traits.Str( desc="Name to use for this model. Any text entered in the entry box will be the starting string for the created model file names. The label number and the color name will also be part of the file name. If making multiple models, use this as a prefix to the label and color name.", argstr="--name %s", ) generateAll = traits.Bool( desc="Generate models for all labels in the input volume. select this option if you want to create all models that correspond to all values in a labelmap volume (using the Joint Smoothing option below is useful with this option). Ignores Labels, Start Label, End Label settings. Skips label 0.", argstr="--generateAll ", ) labels = InputMultiPath( traits.Int, desc="A comma separated list of label values from which to make models. f you specify a list of Labels, it will override any start/end label settings. If you click Generate All Models it will override the list of labels and any start/end label settings.", sep=",", argstr="--labels %s", ) start = traits.Int( desc="If you want to specify a continuous range of labels from which to generate models, enter the lower label here. Voxel value from which to start making models. Used instead of the label list to specify a range (make sure the label list is empty or it will over ride this).", argstr="--start %d", ) end = traits.Int( desc="If you want to specify a continuous range of labels from which to generate models, enter the higher label here. Voxel value up to which to continue making models. Skip any values with zero voxels.", argstr="--end %d", ) skipUnNamed = traits.Bool( desc="Select this to not generate models from labels that do not have names defined in the color look up table associated with the input label map. If true, only models which have an entry in the color table will be generated. If false, generate all models that exist within the label range.", argstr="--skipUnNamed ", ) jointsmooth = traits.Bool( desc="This will ensure that all resulting models fit together smoothly, like jigsaw puzzle pieces. Otherwise the models will be smoothed independently and may overlap.", argstr="--jointsmooth ", ) smooth = traits.Int( desc="Here you can set the number of smoothing iterations for Laplacian smoothing, or the degree of the polynomial approximating the windowed Sinc function. Use 0 if you wish no smoothing. ", argstr="--smooth %d", ) filtertype = traits.Enum( "Sinc", "Laplacian", desc="You can control the type of smoothing done on the models by selecting a filter type of either Sinc or Laplacian.", argstr="--filtertype %s", ) decimate = traits.Float( desc="Chose the target reduction in number of polygons as a decimal percentage (between 0 and 1) of the number of polygons. Specifies the percentage of triangles to be removed. For example, 0.1 means 10% reduction and 0.9 means 90% reduction.", argstr="--decimate %f", ) splitnormals = traits.Bool( desc="Splitting normals is useful for visualizing sharp features. However it creates holes in surfaces which affects measurements.", argstr="--splitnormals ", ) pointnormals = traits.Bool( desc="Turn this flag on if you wish to calculate the normal vectors for the points.", argstr="--pointnormals ", ) pad = traits.Bool( desc="Pad the input volume with zero value voxels on all 6 faces in order to ensure the production of closed surfaces. Sets the origin translation and extent translation so that the models still line up with the unpadded input volume.", argstr="--pad ", ) saveIntermediateModels = traits.Bool( desc="You can save a copy of the models after each of the intermediate steps (marching cubes, smoothing, and decimation if not joint smoothing, otherwise just after decimation). These intermediate models are not saved in the mrml file, you have to load them manually after turning off deleting temporary files in they python console (View ->Python Interactor) using the following command slicer.modules.modelmaker.cliModuleLogic().DeleteTemporaryFilesOff().", argstr="--saveIntermediateModels ", ) debug = traits.Bool( desc="turn this flag on in order to see debugging output (look in the Error Log window that is accessed via the View menu)", argstr="--debug ", ) class ModelMakerOutputSpec(TraitedSpec): modelSceneFile = OutputMultiPath( File(exists=True), desc="Generated models, under a model hierarchy node. Models are imported into Slicer under a model hierarchy node, and their colors are set by the color table associated with the input label map volume. The model hierarchy node must be created before running the model maker, by selecting Create New ModelHierarchy from the Models drop down menu. If you're running from the command line, a model hierarchy node in a new mrml scene will be created for you.", ) class ModelMaker(SEMLikeCommandLine): """title: Model Maker category: Surface Models description: Create 3D surface models from segmented data.

Models are imported into Slicer under a model hierarchy node in a MRML scene. The model colors are set by the color table associated with the input volume (these colours will only be visible if you load the model scene file).

Create Multiple:

If you specify a list of Labels, it will over ride any start/end label settings.

If you clickGenerate Allit will over ride the list of lables and any start/end label settings.

Model Maker Settings:

You can set the number of smoothing iterations, target reduction in number of polygons (decimal percentage). Use 0 and 1 if you wish no smoothing nor decimation.
You can set the flags to split normals or generate point normals in this pane as well.
You can save a copy of the models after intermediate steps (marching cubes, smoothing, and decimation if not joint smoothing, otherwise just after decimation); these models are not saved in the mrml file, turn off deleting temporary files first in the python window:
slicer.modules.modelmaker.cliModuleLogic().DeleteTemporaryFilesOff()

version: 4.1 documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ModelMaker license: slicer4 contributor: Nicole Aucoin (SPL, BWH), Ron Kikinis (SPL, BWH), Bill Lorensen (GE) acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. """ input_spec = ModelMakerInputSpec output_spec = ModelMakerOutputSpec _cmd = "ModelMaker " _outputs_filenames = {"modelSceneFile": "modelSceneFile.mrml"} nipype-1.7.0/nipype/interfaces/slicer/tests/000077500000000000000000000000001413403311400210675ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/slicer/tests/__init__.py000066400000000000000000000000301413403311400231710ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/slicer/tests/test_auto_DicomToNrrdConverter.py000066400000000000000000000030301413403311400276000ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..converters import DicomToNrrdConverter def test_DicomToNrrdConverter_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputDicomDirectory=dict( argstr="--inputDicomDirectory %s", ), outputDirectory=dict( argstr="--outputDirectory %s", hash_files=False, ), outputVolume=dict( argstr="--outputVolume %s", ), smallGradientThreshold=dict( argstr="--smallGradientThreshold %f", ), useBMatrixGradientDirections=dict( argstr="--useBMatrixGradientDirections ", ), useIdentityMeaseurementFrame=dict( argstr="--useIdentityMeaseurementFrame ", ), writeProtocolGradientsFile=dict( argstr="--writeProtocolGradientsFile ", ), ) inputs = DicomToNrrdConverter.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DicomToNrrdConverter_outputs(): output_map = dict( outputDirectory=dict(), ) outputs = DicomToNrrdConverter.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/tests/test_auto_EMSegmentTransformToNewFormat.py000066400000000000000000000023501413403311400313760ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utilities import EMSegmentTransformToNewFormat def test_EMSegmentTransformToNewFormat_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputMRMLFileName=dict( argstr="--inputMRMLFileName %s", extensions=None, ), outputMRMLFileName=dict( argstr="--outputMRMLFileName %s", hash_files=False, ), templateFlag=dict( argstr="--templateFlag ", ), ) inputs = EMSegmentTransformToNewFormat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EMSegmentTransformToNewFormat_outputs(): output_map = dict( outputMRMLFileName=dict( extensions=None, ), ) outputs = EMSegmentTransformToNewFormat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/tests/test_auto_GrayscaleModelMaker.py000066400000000000000000000030421413403311400274020ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..surface import GrayscaleModelMaker def test_GrayscaleModelMaker_inputs(): input_map = dict( InputVolume=dict( argstr="%s", extensions=None, position=-2, ), OutputGeometry=dict( argstr="%s", hash_files=False, position=-1, ), args=dict( argstr="%s", ), decimate=dict( argstr="--decimate %f", ), environ=dict( nohash=True, usedefault=True, ), name=dict( argstr="--name %s", ), pointnormals=dict( argstr="--pointnormals ", ), smooth=dict( argstr="--smooth %d", ), splitnormals=dict( argstr="--splitnormals ", ), threshold=dict( argstr="--threshold %f", ), ) inputs = GrayscaleModelMaker.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GrayscaleModelMaker_outputs(): output_map = dict( OutputGeometry=dict( extensions=None, position=-1, ), ) outputs = GrayscaleModelMaker.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/tests/test_auto_LabelMapSmoothing.py000066400000000000000000000026641413403311400271050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..surface import LabelMapSmoothing def test_LabelMapSmoothing_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), gaussianSigma=dict( argstr="--gaussianSigma %f", ), inputVolume=dict( argstr="%s", extensions=None, position=-2, ), labelToSmooth=dict( argstr="--labelToSmooth %d", ), maxRMSError=dict( argstr="--maxRMSError %f", ), numberOfIterations=dict( argstr="--numberOfIterations %d", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = LabelMapSmoothing.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_LabelMapSmoothing_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = LabelMapSmoothing.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/tests/test_auto_MergeModels.py000066400000000000000000000022701413403311400257340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..surface import MergeModels def test_MergeModels_inputs(): input_map = dict( Model1=dict( argstr="%s", extensions=None, position=-3, ), Model2=dict( argstr="%s", extensions=None, position=-2, ), ModelOutput=dict( argstr="%s", hash_files=False, position=-1, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ) inputs = MergeModels.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MergeModels_outputs(): output_map = dict( ModelOutput=dict( extensions=None, position=-1, ), ) outputs = MergeModels.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/tests/test_auto_ModelMaker.py000066400000000000000000000042311413403311400255500ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..surface import ModelMaker def test_ModelMaker_inputs(): input_map = dict( InputVolume=dict( argstr="%s", extensions=None, position=-1, ), args=dict( argstr="%s", ), color=dict( argstr="--color %s", extensions=None, ), debug=dict( argstr="--debug ", ), decimate=dict( argstr="--decimate %f", ), end=dict( argstr="--end %d", ), environ=dict( nohash=True, usedefault=True, ), filtertype=dict( argstr="--filtertype %s", ), generateAll=dict( argstr="--generateAll ", ), jointsmooth=dict( argstr="--jointsmooth ", ), labels=dict( argstr="--labels %s", sep=",", ), modelSceneFile=dict( argstr="--modelSceneFile %s...", hash_files=False, ), name=dict( argstr="--name %s", ), pad=dict( argstr="--pad ", ), pointnormals=dict( argstr="--pointnormals ", ), saveIntermediateModels=dict( argstr="--saveIntermediateModels ", ), skipUnNamed=dict( argstr="--skipUnNamed ", ), smooth=dict( argstr="--smooth %d", ), splitnormals=dict( argstr="--splitnormals ", ), start=dict( argstr="--start %d", ), ) inputs = ModelMaker.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ModelMaker_outputs(): output_map = dict( modelSceneFile=dict(), ) outputs = ModelMaker.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/tests/test_auto_ModelToLabelMap.py000066400000000000000000000024321413403311400264720ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..surface import ModelToLabelMap def test_ModelToLabelMap_inputs(): input_map = dict( InputVolume=dict( argstr="%s", extensions=None, position=-3, ), OutputVolume=dict( argstr="%s", hash_files=False, position=-1, ), args=dict( argstr="%s", ), distance=dict( argstr="--distance %f", ), environ=dict( nohash=True, usedefault=True, ), surface=dict( argstr="%s", extensions=None, position=-2, ), ) inputs = ModelToLabelMap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ModelToLabelMap_outputs(): output_map = dict( OutputVolume=dict( extensions=None, position=-1, ), ) outputs = ModelToLabelMap.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/tests/test_auto_OrientScalarVolume.py000066400000000000000000000023031413403311400273040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..converters import OrientScalarVolume def test_OrientScalarVolume_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), inputVolume1=dict( argstr="%s", extensions=None, position=-2, ), orientation=dict( argstr="--orientation %s", ), outputVolume=dict( argstr="%s", hash_files=False, position=-1, ), ) inputs = OrientScalarVolume.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_OrientScalarVolume_outputs(): output_map = dict( outputVolume=dict( extensions=None, position=-1, ), ) outputs = OrientScalarVolume.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/tests/test_auto_ProbeVolumeWithModel.py000066400000000000000000000023561413403311400276120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..surface import ProbeVolumeWithModel def test_ProbeVolumeWithModel_inputs(): input_map = dict( InputModel=dict( argstr="%s", extensions=None, position=-2, ), InputVolume=dict( argstr="%s", extensions=None, position=-3, ), OutputModel=dict( argstr="%s", hash_files=False, position=-1, ), args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ) inputs = ProbeVolumeWithModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ProbeVolumeWithModel_outputs(): output_map = dict( OutputModel=dict( extensions=None, position=-1, ), ) outputs = ProbeVolumeWithModel.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/tests/test_auto_SlicerCommandLine.py000066400000000000000000000010041413403311400270530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import SlicerCommandLine def test_SlicerCommandLine_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ) inputs = SlicerCommandLine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/slicer/utilities.py000066400000000000000000000040241413403311400223120ustar00rootroot00000000000000# -*- coding: utf-8 -*- # -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" from nipype.interfaces.base import ( CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath, ) import os class EMSegmentTransformToNewFormatInputSpec(CommandLineInputSpec): inputMRMLFileName = File( desc="Active MRML scene that contains EMSegment algorithm parameters in the format before 3.6.3 - please include absolute file name in path.", exists=True, argstr="--inputMRMLFileName %s", ) outputMRMLFileName = traits.Either( traits.Bool, File(), hash_files=False, desc="Write out the MRML scene after transformation to format 3.6.3 has been made. - has to be in the same directory as the input MRML file due to Slicer Core bug - please include absolute file name in path ", argstr="--outputMRMLFileName %s", ) templateFlag = traits.Bool( desc="Set to true if the transformed mrml file should be used as template file ", argstr="--templateFlag ", ) class EMSegmentTransformToNewFormatOutputSpec(TraitedSpec): outputMRMLFileName = File( desc="Write out the MRML scene after transformation to format 3.6.3 has been made. - has to be in the same directory as the input MRML file due to Slicer Core bug - please include absolute file name in path ", exists=True, ) class EMSegmentTransformToNewFormat(SEMLikeCommandLine): """title: Transform MRML Files to New EMSegmenter Standard category: Utilities description: Transform MRML Files to New EMSegmenter Standard """ input_spec = EMSegmentTransformToNewFormatInputSpec output_spec = EMSegmentTransformToNewFormatOutputSpec _cmd = "EMSegmentTransformToNewFormat " _outputs_filenames = {"outputMRMLFileName": "outputMRMLFileName.mrml"} nipype-1.7.0/nipype/interfaces/spm/000077500000000000000000000000001413403311400172435ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/spm/__init__.py000066400000000000000000000016621413403311400213610ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """SPM is a software package for the analysis of brain imaging data sequences.""" from .base import Info, SPMCommand, logger, no_spm, scans_for_fname, scans_for_fnames from .preprocess import ( FieldMap, SliceTiming, Realign, RealignUnwarp, Coregister, Normalize, Normalize12, Segment, Smooth, NewSegment, MultiChannelNewSegment, DARTEL, DARTELNorm2MNI, CreateWarped, VBMSegment, ) from .model import ( Level1Design, EstimateModel, EstimateContrast, Threshold, OneSampleTTestDesign, TwoSampleTTestDesign, PairedTTestDesign, MultipleRegressionDesign, ) from .utils import ( Analyze2nii, CalcCoregAffine, ApplyTransform, Reslice, ApplyInverseDeformation, ResliceToReference, DicomImport, ) nipype-1.7.0/nipype/interfaces/spm/base.py000066400000000000000000000517011413403311400205330ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The spm module provides basic functions for interfacing with SPM tools. In order to use the standalone MCR version of spm, you need to ensure that the following commands are executed at the beginning of your script:: from nipype.interfaces import spm matlab_cmd = '/path/to/run_spm8.sh /path/to/Compiler_Runtime/v713/ script' spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True) you can test by calling:: spm.SPMCommand().version """ # Standard library imports import os from copy import deepcopy # Third-party imports from nibabel import load import numpy as np # Local imports from ... import logging from ...utils import spm_docs as sd from ..base import ( BaseInterface, traits, isdefined, InputMultiPath, BaseInterfaceInputSpec, Directory, Undefined, ImageFile, PackageInfo, ) from ..base.traits_extension import NoDefaultSpecified from ..matlab import MatlabCommand from ...external.due import due, Doi, BibTeX __docformat__ = "restructuredtext" logger = logging.getLogger("nipype.interface") def func_is_3d(in_file): """Checks if input functional files are 3d.""" if isinstance(in_file, list): return func_is_3d(in_file[0]) else: img = load(in_file) shape = img.shape if len(shape) == 3 or (len(shape) == 4 and shape[3] == 1): return True else: return False def get_first_3dfile(in_files): if not func_is_3d(in_files): return None if isinstance(in_files[0], list): return in_files[0] return in_files def scans_for_fname(fname): """Reads a nifti file and converts it to a numpy array storing individual nifti volumes. Opens images so will fail if they are not found. """ if isinstance(fname, list): scans = np.zeros((len(fname),), dtype=object) for sno, f in enumerate(fname): scans[sno] = "%s,1" % f return scans img = load(fname) if len(img.shape) == 3: return np.array(("%s,1" % fname,), dtype=object) else: n_scans = img.shape[3] scans = np.zeros((n_scans,), dtype=object) for sno in range(n_scans): scans[sno] = "%s,%d" % (fname, sno + 1) return scans def scans_for_fnames(fnames, keep4d=False, separate_sessions=False): """Converts a list of files to a concatenated numpy array for each volume. keep4d : boolean keeps the entries of the numpy array as 4d files instead of extracting the individual volumes. separate_sessions: boolean if 4d nifti files are being used, then separate_sessions ensures a cell array per session is created in the structure. """ flist = None if not isinstance(fnames[0], list): if func_is_3d(fnames[0]): fnames = [fnames] if separate_sessions or keep4d: flist = np.zeros((len(fnames),), dtype=object) for i, f in enumerate(fnames): if separate_sessions: if keep4d: if isinstance(f, list): flist[i] = np.array(f, dtype=object) else: flist[i] = np.array([f], dtype=object) else: flist[i] = scans_for_fname(f) else: if keep4d: flist[i] = f else: scans = scans_for_fname(f) if flist is None: flist = scans else: flist = np.concatenate((flist, scans)) return flist class Info(PackageInfo): """Handles SPM version information If you use `SPMCommand.set_mlab_paths` to set alternate entries for matlab_cmd, paths, and use_mcr, then you will need to use the same entries to any call in the Info class to maintain memoization. Otherwise, it will default to the parameters in the `getinfo` function below. """ _path = None _name = None _command = None _paths = None _version = None @classmethod def path(klass, matlab_cmd=None, paths=None, use_mcr=None): klass.getinfo(matlab_cmd, paths, use_mcr) return klass._path @classmethod def version(klass, matlab_cmd=None, paths=None, use_mcr=None): klass.getinfo(matlab_cmd, paths, use_mcr) return klass._version @classmethod def name(klass, matlab_cmd=None, paths=None, use_mcr=None): klass.getinfo(matlab_cmd, paths, use_mcr) return klass._name @classmethod def getinfo(klass, matlab_cmd=None, paths=None, use_mcr=None): """ Returns the path to the SPM directory in the Matlab path If path not found, returns None. Parameters ---------- matlab_cmd: str Sets the default matlab command. If None, the value of the environment variable SPMMCRCMD will be used if set and use_mcr is True or the environment variable FORCE_SPMMCR is set. If one of FORCE_SPMMCR or SPMMCRCMD is not set, the existence of the environment variable MATLABCMD is checked and its value is used as the matlab command if possible. If none of the above was successful, the fallback value of 'matlab -nodesktop -nosplash' will be used. paths : str Add paths to matlab session use_mcr : bool Whether to use the MATLAB Common Runtime. In this case, the matlab_cmd is expected to be a valid MCR call. Returns ------- spm_path : string representing path to SPM directory returns None of path not found """ use_mcr = use_mcr or "FORCE_SPMMCR" in os.environ matlab_cmd = matlab_cmd or ( (use_mcr and os.getenv("SPMMCRCMD")) or os.getenv("MATLABCMD", "matlab -nodesktop -nosplash") ) if ( klass._name and klass._path and klass._version and klass._command == matlab_cmd and klass._paths == paths ): return {"name": klass._name, "path": klass._path, "release": klass._version} logger.debug("matlab command or path has changed. recomputing version.") mlab = MatlabCommand(matlab_cmd=matlab_cmd, resource_monitor=False) mlab.inputs.mfile = False if paths: mlab.inputs.paths = paths if use_mcr: mlab.inputs.nodesktop = Undefined mlab.inputs.nosplash = Undefined mlab.inputs.single_comp_thread = Undefined mlab.inputs.mfile = True mlab.inputs.uses_mcr = True mlab.inputs.script = """ if isempty(which('spm')), throw(MException('SPMCheck:NotFound','SPM not in matlab path')); end; spm_path = spm('dir'); [name, version] = spm('ver'); fprintf(1, 'NIPYPE path:%s|name:%s|release:%s', spm_path, name, version); exit; """ try: out = mlab.run() except (IOError, RuntimeError) as e: # if no Matlab at all -- exception could be raised # No Matlab -- no spm logger.debug("%s", e) klass._version = None klass._path = None klass._name = None klass._command = matlab_cmd klass._paths = paths return None out = sd._strip_header(out.runtime.stdout) out_dict = {} for part in out.split("|"): key, val = part.split(":") out_dict[key] = val klass._version = out_dict["release"] klass._path = out_dict["path"] klass._name = out_dict["name"] klass._command = matlab_cmd klass._paths = paths return out_dict def no_spm(): """Checks if SPM is NOT installed used with pytest.mark.skipif decorator to skip tests that will fail if spm is not installed""" if "NIPYPE_NO_MATLAB" in os.environ or Info.version() is None: return True else: return False class SPMCommandInputSpec(BaseInterfaceInputSpec): matlab_cmd = traits.Str(desc="matlab command to use") paths = InputMultiPath(Directory(), desc="Paths to add to matlabpath") mfile = traits.Bool(True, desc="Run m-code using m-file", usedefault=True) use_mcr = traits.Bool(desc="Run m-code using SPM MCR") use_v8struct = traits.Bool( True, min_ver="8", usedefault=True, desc=("Generate SPM8 and higher " "compatible jobs"), ) class SPMCommand(BaseInterface): """Extends `BaseInterface` class to implement SPM specific interfaces. WARNING: Pseudo prototype class, meant to be subclassed """ input_spec = SPMCommandInputSpec _additional_metadata = ["field"] _jobtype = "basetype" _jobname = "basename" _matlab_cmd = None _paths = None _use_mcr = None _references = [ { "entry": BibTeX( "@book{FrackowiakFristonFrithDolanMazziotta1997," "author={R.S.J. Frackowiak, K.J. Friston, C.D. Frith, R.J. Dolan, and J.C. Mazziotta}," "title={Human Brain Function}," "publisher={Academic Press USA}," "year={1997}," "}" ), "description": "The fundamental text on Statistical Parametric Mapping (SPM)", # 'path': "nipype.interfaces.spm", "tags": ["implementation"], } ] def __init__(self, **inputs): super(SPMCommand, self).__init__(**inputs) self.inputs.on_trait_change( self._matlab_cmd_update, ["matlab_cmd", "mfile", "paths", "use_mcr"] ) self._find_mlab_cmd_defaults() self._check_mlab_inputs() self._matlab_cmd_update() @classmethod def set_mlab_paths(cls, matlab_cmd=None, paths=None, use_mcr=None): cls._matlab_cmd = matlab_cmd cls._paths = paths cls._use_mcr = use_mcr info_dict = Info.getinfo(matlab_cmd=matlab_cmd, paths=paths, use_mcr=use_mcr) def _find_mlab_cmd_defaults(self): # check if the user has set environment variables to enforce # the standalone (MCR) version of SPM if self._use_mcr or "FORCE_SPMMCR" in os.environ: self._use_mcr = True if self._matlab_cmd is None: try: self._matlab_cmd = os.environ["SPMMCRCMD"] except KeyError: pass def _matlab_cmd_update(self): # MatlabCommand has to be created here, # because matlab_cmd is not a proper input # and can be set only during init self.mlab = MatlabCommand( matlab_cmd=self.inputs.matlab_cmd, mfile=self.inputs.mfile, paths=self.inputs.paths, resource_monitor=False, ) self.mlab.inputs.script_file = ( "pyscript_%s.m" % self.__class__.__name__.split(".")[-1].lower() ) if isdefined(self.inputs.use_mcr) and self.inputs.use_mcr: self.mlab.inputs.nodesktop = Undefined self.mlab.inputs.nosplash = Undefined self.mlab.inputs.single_comp_thread = Undefined self.mlab.inputs.uses_mcr = True self.mlab.inputs.mfile = True @property def version(self): info_dict = Info.getinfo( matlab_cmd=self.inputs.matlab_cmd, paths=self.inputs.paths, use_mcr=self.inputs.use_mcr, ) if info_dict: return "%s.%s" % (info_dict["name"].split("SPM")[-1], info_dict["release"]) @property def jobtype(self): return self._jobtype @property def jobname(self): return self._jobname def _check_mlab_inputs(self): if not isdefined(self.inputs.matlab_cmd) and self._matlab_cmd: self.inputs.matlab_cmd = self._matlab_cmd if not isdefined(self.inputs.paths) and self._paths: self.inputs.paths = self._paths if not isdefined(self.inputs.use_mcr) and self._use_mcr: self.inputs.use_mcr = self._use_mcr def _run_interface(self, runtime): """Executes the SPM function using MATLAB.""" self.mlab.inputs.script = self._make_matlab_command( deepcopy(self._parse_inputs()) ) results = self.mlab.run() runtime.returncode = results.runtime.returncode if self.mlab.inputs.uses_mcr: if "Skipped" in results.runtime.stdout: self.raise_exception(runtime) runtime.stdout = results.runtime.stdout runtime.stderr = results.runtime.stderr runtime.merged = results.runtime.merged return runtime def _list_outputs(self): """Determine the expected outputs based on inputs.""" raise NotImplementedError def _format_arg(self, opt, spec, val): """Convert input to appropriate format for SPM.""" if spec.is_trait_type(traits.Bool): return int(val) elif spec.is_trait_type(traits.Tuple): return list(val) else: return val def _parse_inputs(self, skip=()): spmdict = {} metadata = dict(field=lambda t: t is not None) for name, spec in list(self.inputs.traits(**metadata).items()): if skip and name in skip: continue value = getattr(self.inputs, name) if not isdefined(value): continue field = spec.field if "." in field: fields = field.split(".") dictref = spmdict for f in fields[:-1]: if f not in list(dictref.keys()): dictref[f] = {} dictref = dictref[f] dictref[fields[-1]] = self._format_arg(name, spec, value) else: spmdict[field] = self._format_arg(name, spec, value) return [spmdict] def _reformat_dict_for_savemat(self, contents): """Encloses a dict representation within hierarchical lists. In order to create an appropriate SPM job structure, a Python dict storing the job needs to be modified so that each dict embedded in dict needs to be enclosed as a list element. Examples -------- >>> a = SPMCommand()._reformat_dict_for_savemat(dict(a=1, ... b=dict(c=2, d=3))) >>> a == [{'a': 1, 'b': [{'c': 2, 'd': 3}]}] True """ newdict = {} try: for key, value in list(contents.items()): if isinstance(value, dict): if value: newdict[key] = self._reformat_dict_for_savemat(value) # if value is None, skip else: newdict[key] = value return [newdict] except TypeError: print("Requires dict input") def _generate_job(self, prefix="", contents=None): """Recursive function to generate spm job specification as a string Parameters ---------- prefix : string A string that needs to get contents : dict A non-tuple Python structure containing spm job information gets converted to an appropriate sequence of matlab commands. """ jobstring = "" if contents is None: return jobstring if isinstance(contents, list): for i, value in enumerate(contents): if prefix.endswith(")"): newprefix = "%s,%d)" % (prefix[:-1], i + 1) else: newprefix = "%s(%d)" % (prefix, i + 1) jobstring += self._generate_job(newprefix, value) return jobstring if isinstance(contents, dict): for key, value in list(contents.items()): newprefix = "%s.%s" % (prefix, key) jobstring += self._generate_job(newprefix, value) return jobstring if isinstance(contents, np.ndarray): if contents.dtype == np.dtype(object): if prefix: jobstring += "%s = {...\n" % (prefix) else: jobstring += "{...\n" for i, val in enumerate(contents): if isinstance(val, np.ndarray): jobstring += self._generate_job(prefix=None, contents=val) elif isinstance(val, list): items_format = [] for el in val: items_format += [ "{}" if not isinstance(el, (str, bytes)) else "'{}'" ] val_format = ", ".join(items_format).format jobstring += "[{}];...\n".format(val_format(*val)) elif isinstance(val, (str, bytes)): jobstring += "'{}';...\n".format(val) else: jobstring += "%s;...\n" % str(val) jobstring += "};\n" else: for i, val in enumerate(contents): for field in val.dtype.fields: if prefix: newprefix = "%s(%d).%s" % (prefix, i + 1, field) else: newprefix = "(%d).%s" % (i + 1, field) jobstring += self._generate_job(newprefix, val[field]) return jobstring if isinstance(contents, (str, bytes)): jobstring += "%s = '%s';\n" % (prefix, contents) return jobstring jobstring += "%s = %s;\n" % (prefix, str(contents)) return jobstring def _make_matlab_command(self, contents, postscript=None): """Generates a mfile to build job structure Parameters ---------- contents : list a list of dicts generated by _parse_inputs in each subclass cwd : string default os.getcwd() Returns ------- mscript : string contents of a script called by matlab """ cwd = os.getcwd() mscript = """ %% Generated by nipype.interfaces.spm if isempty(which('spm')), throw(MException('SPMCheck:NotFound', 'SPM not in matlab path')); end [name, version] = spm('ver'); fprintf('SPM version: %s Release: %s\\n',name, version); fprintf('SPM path: %s\\n', which('spm')); spm('Defaults','fMRI'); if strcmp(name, 'SPM8') || strcmp(name(1:5), 'SPM12'), spm_jobman('initcfg'); spm_get_defaults('cmdline', 1); end\n """ if self.mlab.inputs.mfile: if isdefined(self.inputs.use_v8struct) and self.inputs.use_v8struct: mscript += self._generate_job( "jobs{1}.spm.%s.%s" % (self.jobtype, self.jobname), contents[0] ) else: if self.jobname in [ "st", "smooth", "preproc", "preproc8", "fmri_spec", "fmri_est", "factorial_design", "defs", ]: # parentheses mscript += self._generate_job( "jobs{1}.%s{1}.%s(1)" % (self.jobtype, self.jobname), contents[0], ) else: # curly brackets mscript += self._generate_job( "jobs{1}.%s{1}.%s{1}" % (self.jobtype, self.jobname), contents[0], ) else: from scipy.io import savemat jobdef = { "jobs": [ { self.jobtype: [ {self.jobname: self.reformat_dict_for_savemat(contents[0])} ] } ] } savemat(os.path.join(cwd, "pyjobs_%s.mat" % self.jobname), jobdef) mscript += "load pyjobs_%s;\n\n" % self.jobname mscript += """ spm_jobman(\'run\', jobs);\n """ if self.inputs.use_mcr: mscript += """ if strcmp(name, 'SPM8') || strcmp(name(1:5), 'SPM12'), close(\'all\', \'force\'); end; """ if postscript is not None: mscript += postscript return mscript class ImageFileSPM(ImageFile): """Defines a trait whose value must be a NIfTI file.""" def __init__( self, value=NoDefaultSpecified, exists=False, resolve=False, **metadata ): """Create an ImageFileSPM trait.""" super(ImageFileSPM, self).__init__( value=value, exists=exists, types=["nifti1", "nifti2"], allow_compressed=False, resolve=resolve, **metadata ) nipype-1.7.0/nipype/interfaces/spm/model.py000066400000000000000000001203431413403311400207200ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The spm module provides basic functions for interfacing with matlab and spm to access spm tools. """ # Standard library imports import os from glob import glob # Third-party imports import numpy as np # Local imports from ... import logging from ...utils.filemanip import ensure_list, simplify_list, split_filename from ..base import ( Bunch, traits, TraitedSpec, File, Directory, OutputMultiPath, InputMultiPath, isdefined, ) from .base import SPMCommand, SPMCommandInputSpec, scans_for_fnames, ImageFileSPM __docformat__ = "restructuredtext" iflogger = logging.getLogger("nipype.interface") class Level1DesignInputSpec(SPMCommandInputSpec): spm_mat_dir = Directory( exists=True, field="dir", desc="directory to store SPM.mat file (opt)" ) timing_units = traits.Enum( "secs", "scans", field="timing.units", desc="units for specification of onsets", mandatory=True, ) interscan_interval = traits.Float( field="timing.RT", desc="Interscan interval in secs", mandatory=True ) microtime_resolution = traits.Int( field="timing.fmri_t", desc=("Number of time-bins per scan in secs (opt)") ) microtime_onset = traits.Float( field="timing.fmri_t0", desc=("The onset/time-bin in seconds for alignment (opt)"), ) session_info = traits.Any( field="sess", desc=("Session specific information generated by ``modelgen.SpecifyModel``"), mandatory=True, ) factor_info = traits.List( traits.Dict(traits.Enum("name", "levels")), field="fact", desc=("Factor specific information file (opt)"), ) bases = traits.Dict( traits.Enum("hrf", "fourier", "fourier_han", "gamma", "fir"), field="bases", desc="""\ Dictionary names of the basis function to parameters: * hrf * derivs -- (2-element list) Model HRF Derivatives. No derivatives: [0,0], Time derivatives : [1,0], Time and Dispersion derivatives: [1,1] * fourier, fourier_han, gamma, or fir: * length -- (int) Post-stimulus window length (in seconds) * order -- (int) Number of basis functions """, mandatory=True, ) volterra_expansion_order = traits.Enum( 1, 2, field="volt", desc=("Model interactions - no:1, yes:2") ) global_intensity_normalization = traits.Enum( "none", "scaling", field="global", desc=("Global intensity normalization - scaling or none"), ) mask_image = File( exists=True, field="mask", desc="Image for explicitly masking the analysis" ) mask_threshold = traits.Either( traits.Enum("-Inf"), traits.Float(), desc="Thresholding for the mask", default="-Inf", usedefault=True, ) model_serial_correlations = traits.Enum( "AR(1)", "FAST", "none", field="cvi", desc=( "Model serial correlations " "AR(1), FAST or none. FAST " "is available in SPM12" ), ) flags = traits.Dict( desc="Additional arguments to the job, e.g., a common SPM operation is to " "modify the default masking threshold (mthresh)" ) class Level1DesignOutputSpec(TraitedSpec): spm_mat_file = File(exists=True, desc="SPM mat file") class Level1Design(SPMCommand): """Generate an SPM design matrix http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=59 Examples -------- >>> level1design = Level1Design() >>> level1design.inputs.timing_units = 'secs' >>> level1design.inputs.interscan_interval = 2.5 >>> level1design.inputs.bases = {'hrf':{'derivs': [0,0]}} >>> level1design.inputs.session_info = 'session_info.npz' >>> level1design.inputs.flags = {'mthresh': 0.4} >>> level1design.run() # doctest: +SKIP """ input_spec = Level1DesignInputSpec output_spec = Level1DesignOutputSpec _jobtype = "stats" _jobname = "fmri_spec" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["spm_mat_dir", "mask_image"]: return np.array([str(val)], dtype=object) if opt in ["session_info"]: # , 'factor_info']: if isinstance(val, dict): return [val] else: return val return super(Level1Design, self)._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm realign options if set to None ignore""" einputs = super(Level1Design, self)._parse_inputs( skip=("mask_threshold", "flags") ) if isdefined(self.inputs.flags): einputs[0].update({flag: val for (flag, val) in self.inputs.flags.items()}) for sessinfo in einputs[0]["sess"]: sessinfo["scans"] = scans_for_fnames( ensure_list(sessinfo["scans"]), keep4d=False ) if not isdefined(self.inputs.spm_mat_dir): einputs[0]["dir"] = np.array([str(os.getcwd())], dtype=object) return einputs def _make_matlab_command(self, content): """validates spm options and generates job structure if mfile is True uses matlab .m file else generates a job structure and saves in .mat """ if isdefined(self.inputs.mask_image): # SPM doesn't handle explicit masking properly, especially # when you want to use the entire mask image postscript = "load SPM;\n" postscript += "SPM.xM.VM = spm_vol('%s');\n" % simplify_list( self.inputs.mask_image ) postscript += "SPM.xM.I = 0;\n" postscript += "SPM.xM.T = [];\n" postscript += ( "SPM.xM.TH = ones(size(SPM.xM.TH))*(%s);\n" % self.inputs.mask_threshold ) postscript += "SPM.xM.xs = struct('Masking', 'explicit masking only');\n" postscript += "save SPM SPM;\n" else: postscript = None return super(Level1Design, self)._make_matlab_command( content, postscript=postscript ) def _list_outputs(self): outputs = self._outputs().get() spm = os.path.join(os.getcwd(), "SPM.mat") outputs["spm_mat_file"] = spm return outputs class EstimateModelInputSpec(SPMCommandInputSpec): spm_mat_file = File( exists=True, field="spmmat", copyfile=True, mandatory=True, desc="Absolute path to SPM.mat", ) estimation_method = traits.Dict( traits.Enum("Classical", "Bayesian2", "Bayesian"), field="method", mandatory=True, desc=("Dictionary of either Classical: 1, Bayesian: 1, or Bayesian2: 1 (dict)"), ) write_residuals = traits.Bool( field="write_residuals", desc="Write individual residual images" ) flags = traits.Dict(desc="Additional arguments") class EstimateModelOutputSpec(TraitedSpec): mask_image = ImageFileSPM(exists=True, desc="binary mask to constrain estimation") beta_images = OutputMultiPath( ImageFileSPM(exists=True), desc="design parameter estimates" ) residual_image = ImageFileSPM( exists=True, desc="Mean-squared image of the residuals" ) residual_images = OutputMultiPath( ImageFileSPM(exists=True), desc="individual residual images (requires `write_residuals`", ) RPVimage = ImageFileSPM(exists=True, desc="Resels per voxel image") spm_mat_file = File(exists=True, desc="Updated SPM mat file") labels = ImageFileSPM(exists=True, desc="label file") SDerror = OutputMultiPath( ImageFileSPM(exists=True), desc="Images of the standard deviation of the error" ) ARcoef = OutputMultiPath( ImageFileSPM(exists=True), desc="Images of the AR coefficient" ) Cbetas = OutputMultiPath( ImageFileSPM(exists=True), desc="Images of the parameter posteriors" ) SDbetas = OutputMultiPath( ImageFileSPM(exists=True), desc="Images of the standard deviation of parameter posteriors", ) class EstimateModel(SPMCommand): """Use spm_spm to estimate the parameters of a model http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=69 Examples -------- >>> est = EstimateModel() >>> est.inputs.spm_mat_file = 'SPM.mat' >>> est.inputs.estimation_method = {'Classical': 1} >>> est.run() # doctest: +SKIP """ input_spec = EstimateModelInputSpec output_spec = EstimateModelOutputSpec _jobtype = "stats" _jobname = "fmri_est" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt == "spm_mat_file": return np.array([str(val)], dtype=object) if opt == "estimation_method": if isinstance(val, (str, bytes)): return {"{}".format(val): 1} else: return val return super(EstimateModel, self)._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm realign options if set to None ignore""" einputs = super(EstimateModel, self)._parse_inputs(skip=("flags")) if isdefined(self.inputs.flags): einputs[0].update({flag: val for (flag, val) in self.inputs.flags.items()}) return einputs def _list_outputs(self): import scipy.io as sio outputs = self._outputs().get() pth = os.path.dirname(self.inputs.spm_mat_file) outtype = "nii" if "12" in self.version.split(".")[0] else "img" spm = sio.loadmat(self.inputs.spm_mat_file, struct_as_record=False) betas = [vbeta.fname[0] for vbeta in spm["SPM"][0, 0].Vbeta[0]] if ( "Bayesian" in self.inputs.estimation_method.keys() or "Bayesian2" in self.inputs.estimation_method.keys() ): outputs["labels"] = os.path.join(pth, "labels.{}".format(outtype)) outputs["SDerror"] = glob(os.path.join(pth, "Sess*_SDerror*")) outputs["ARcoef"] = glob(os.path.join(pth, "Sess*_AR_*")) if betas: outputs["Cbetas"] = [ os.path.join(pth, "C{}".format(beta)) for beta in betas ] outputs["SDbetas"] = [ os.path.join(pth, "SD{}".format(beta)) for beta in betas ] if "Classical" in self.inputs.estimation_method.keys(): outputs["residual_image"] = os.path.join(pth, "ResMS.{}".format(outtype)) outputs["RPVimage"] = os.path.join(pth, "RPV.{}".format(outtype)) if self.inputs.write_residuals: outputs["residual_images"] = glob(os.path.join(pth, "Res_*")) if betas: outputs["beta_images"] = [os.path.join(pth, beta) for beta in betas] outputs["mask_image"] = os.path.join(pth, "mask.{}".format(outtype)) outputs["spm_mat_file"] = os.path.join(pth, "SPM.mat") return outputs class EstimateContrastInputSpec(SPMCommandInputSpec): spm_mat_file = File( exists=True, field="spmmat", desc="Absolute path to SPM.mat", copyfile=True, mandatory=True, ) contrasts = traits.List( traits.Either( traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), ), traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), traits.List(traits.Float), ), traits.Tuple( traits.Str, traits.Enum("F"), traits.List( traits.Either( traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), ), traits.Tuple( traits.Str, traits.Enum("T"), traits.List(traits.Str), traits.List(traits.Float), traits.List(traits.Float), ), ) ), ), ), desc="""List of contrasts with each contrast being a list of the form: [('name', 'stat', [condition list], [weight list], [session list])] If session list is None or not provided, all sessions are used. For F contrasts, the condition list should contain previously defined T-contrasts.""", mandatory=True, ) beta_images = InputMultiPath( File(exists=True), desc=("Parameter estimates of the design matrix"), copyfile=False, mandatory=True, ) residual_image = File( exists=True, desc="Mean-squared image of the residuals", copyfile=False, mandatory=True, ) use_derivs = traits.Bool( desc="use derivatives for estimation", xor=["group_contrast"] ) group_contrast = traits.Bool(desc="higher level contrast", xor=["use_derivs"]) class EstimateContrastOutputSpec(TraitedSpec): con_images = OutputMultiPath( File(exists=True), desc="contrast images from a t-contrast" ) spmT_images = OutputMultiPath( File(exists=True), desc="stat images from a t-contrast" ) ess_images = OutputMultiPath( File(exists=True), desc="contrast images from an F-contrast" ) spmF_images = OutputMultiPath( File(exists=True), desc="stat images from an F-contrast" ) spm_mat_file = File(exists=True, desc="Updated SPM mat file") class EstimateContrast(SPMCommand): """Use spm_contrasts to estimate contrasts of interest Examples -------- >>> import nipype.interfaces.spm as spm >>> est = spm.EstimateContrast() >>> est.inputs.spm_mat_file = 'SPM.mat' >>> cont1 = ('Task>Baseline','T', ['Task-Odd','Task-Even'],[0.5,0.5]) >>> cont2 = ('Task-Odd>Task-Even','T', ['Task-Odd','Task-Even'],[1,-1]) >>> contrasts = [cont1,cont2] >>> est.inputs.contrasts = contrasts >>> est.run() # doctest: +SKIP """ input_spec = EstimateContrastInputSpec output_spec = EstimateContrastOutputSpec _jobtype = "stats" _jobname = "con" def _make_matlab_command(self, _): """Validate spm options and generate job structure.""" contrasts = [] cname = [] for i, cont in enumerate(self.inputs.contrasts): cname.insert(i, cont[0]) contrasts.insert( i, Bunch( name=cont[0], stat=cont[1], conditions=cont[2], weights=None, sessions=None, ), ) if len(cont) >= 4: contrasts[i].weights = cont[3] if len(cont) >= 5: contrasts[i].sessions = cont[4] script = [ """\ %% generated by nipype.interfaces.spm spm_defaults; jobs{1}.stats{1}.con.spmmat = {'%s'}; load(jobs{1}.stats{1}.con.spmmat{:}); SPM.swd = '%s'; save(jobs{1}.stats{1}.con.spmmat{:},'SPM'); names = SPM.xX.name;""" % (self.inputs.spm_mat_file, os.getcwd()) ] # get names for columns if isdefined(self.inputs.group_contrast) and self.inputs.group_contrast: script += ["condnames=names;"] else: if self.inputs.use_derivs: script += [r"pat = 'Sn\([0-9]*\) (.*)';"] else: script += [ r"pat = 'Sn\([0-9]*\) (.*)\*bf\(1\)|Sn\([0-9]*\) " r".*\*bf\([2-9]\)|Sn\([0-9]*\) (.*)';" ] script += ["t = regexp(names,pat,'tokens');"] # get sessidx for columns script += [r"pat1 = 'Sn\(([0-9].*)\)\s.*';"] script += ["t1 = regexp(names,pat1,'tokens');"] script += [ """\ for i0=1:numel(t) condnames{i0}=''; condsess(i0)=0; if ~isempty(t{i0}{1}) condnames{i0} = t{i0}{1}{1}; condsess(i0)=str2num(t1{i0}{1}{1}); end; end;""" ] # BUILD CONTRAST SESSION STRUCTURE for i, contrast in enumerate(contrasts): if contrast.stat == "T": script += ["consess{%d}.tcon.name = '%s';" % (i + 1, contrast.name)] script += ["consess{%d}.tcon.convec = zeros(1,numel(names));" % (i + 1)] for c0, cond in enumerate(contrast.conditions): script += ["idx = strmatch('%s',condnames,'exact');" % cond] script += [ """\ if isempty(idx) throw(MException('CondName:Chk', sprintf('Condition %%s not found in design','%s'))); end;""" % cond ] if contrast.sessions: for sno, sw in enumerate(contrast.sessions): script += ["sidx = find(condsess(idx)==%d);" % (sno + 1)] script += [ "consess{%d}.tcon.convec(idx(sidx)) = %f;" % (i + 1, sw * contrast.weights[c0]) ] else: script += [ "consess{%d}.tcon.convec(idx) = %f;" % (i + 1, contrast.weights[c0]) ] for i, contrast in enumerate(contrasts): if contrast.stat == "F": script += ["consess{%d}.fcon.name = '%s';" % (i + 1, contrast.name)] for cl0, fcont in enumerate(contrast.conditions): tidx = cname.index(fcont[0]) script += [ "consess{%d}.fcon.convec{%d} = consess{%d}.tcon.convec;" % (i + 1, cl0 + 1, tidx + 1) ] script += ["jobs{1}.stats{1}.con.consess = consess;"] script += [ """\ if strcmp(spm('ver'),'SPM8') spm_jobman('initcfg'); jobs=spm_jobman('spm5tospm8',{jobs}); end;""" ] script += ["spm_jobman('run',jobs);"] return "\n".join(script) def _list_outputs(self): import scipy.io as sio outputs = self._outputs().get() pth, _ = os.path.split(self.inputs.spm_mat_file) spm = sio.loadmat(self.inputs.spm_mat_file, struct_as_record=False) con_images = [] spmT_images = [] for con in spm["SPM"][0, 0].xCon[0]: con_images.append(str(os.path.join(pth, con.Vcon[0, 0].fname[0]))) spmT_images.append(str(os.path.join(pth, con.Vspm[0, 0].fname[0]))) if con_images: outputs["con_images"] = con_images outputs["spmT_images"] = spmT_images spm12 = "12" in self.version.split(".")[0] if spm12: ess = glob(os.path.join(pth, "ess*.nii")) else: ess = glob(os.path.join(pth, "ess*.img")) if len(ess) > 0: outputs["ess_images"] = sorted(ess) if spm12: spmf = glob(os.path.join(pth, "spmF*.nii")) else: spmf = glob(os.path.join(pth, "spmF*.img")) if len(spmf) > 0: outputs["spmF_images"] = sorted(spmf) outputs["spm_mat_file"] = self.inputs.spm_mat_file return outputs class ThresholdInputSpec(SPMCommandInputSpec): spm_mat_file = File( exists=True, desc="absolute path to SPM.mat", copyfile=True, mandatory=True ) stat_image = File(exists=True, desc="stat image", copyfile=False, mandatory=True) contrast_index = traits.Int( mandatory=True, desc="which contrast in the SPM.mat to use" ) use_fwe_correction = traits.Bool( True, usedefault=True, desc=( "whether to use FWE (Bonferroni) " "correction for initial threshold " "(height_threshold_type has to be " "set to p-value)" ), ) use_topo_fdr = traits.Bool( True, usedefault=True, desc=("whether to use FDR over cluster extent probabilities"), ) height_threshold = traits.Float( 0.05, usedefault=True, desc=("value for initial thresholding (defining clusters)"), ) height_threshold_type = traits.Enum( "p-value", "stat", usedefault=True, desc=("Is the cluster forming threshold a stat value or p-value?"), ) extent_fdr_p_threshold = traits.Float( 0.05, usedefault=True, desc=("p threshold on FDR corrected cluster size probabilities"), ) extent_threshold = traits.Int( 0, usedefault=True, desc="Minimum cluster size in voxels" ) force_activation = traits.Bool( False, usedefault=True, desc=( "In case no clusters survive the " "topological inference step this " "will pick a culster with the highes " "sum of t-values. Use with care." ), ) class ThresholdOutputSpec(TraitedSpec): thresholded_map = File(exists=True) n_clusters = traits.Int() pre_topo_fdr_map = File(exists=True) pre_topo_n_clusters = traits.Int() activation_forced = traits.Bool() cluster_forming_thr = traits.Float() class Threshold(SPMCommand): """Topological FDR thresholding based on cluster extent/size. Smoothness is estimated from GLM residuals but is assumed to be the same for all of the voxels. Examples -------- >>> thresh = Threshold() >>> thresh.inputs.spm_mat_file = 'SPM.mat' >>> thresh.inputs.stat_image = 'spmT_0001.img' >>> thresh.inputs.contrast_index = 1 >>> thresh.inputs.extent_fdr_p_threshold = 0.05 >>> thresh.run() # doctest: +SKIP """ input_spec = ThresholdInputSpec output_spec = ThresholdOutputSpec def _gen_thresholded_map_filename(self): _, fname, ext = split_filename(self.inputs.stat_image) return os.path.abspath(fname + "_thr" + ext) def _gen_pre_topo_map_filename(self): _, fname, ext = split_filename(self.inputs.stat_image) return os.path.abspath(fname + "_pre_topo_thr" + ext) def _make_matlab_command(self, _): script = "con_index = %d;\n" % self.inputs.contrast_index script += "cluster_forming_thr = %f;\n" % self.inputs.height_threshold if self.inputs.use_fwe_correction: script += "thresDesc = 'FWE';\n" else: script += "thresDesc = 'none';\n" if self.inputs.use_topo_fdr: script += "use_topo_fdr = 1;\n" else: script += "use_topo_fdr = 0;\n" if self.inputs.force_activation: script += "force_activation = 1;\n" else: script += "force_activation = 0;\n" script += ( "cluster_extent_p_fdr_thr = %f;\n" % self.inputs.extent_fdr_p_threshold ) script += "stat_filename = '%s';\n" % self.inputs.stat_image script += "height_threshold_type = '%s';\n" % self.inputs.height_threshold_type script += "extent_threshold = %d;\n" % self.inputs.extent_threshold script += "load %s;\n" % self.inputs.spm_mat_file script += """ FWHM = SPM.xVol.FWHM; df = [SPM.xCon(con_index).eidf SPM.xX.erdf]; STAT = SPM.xCon(con_index).STAT; R = SPM.xVol.R; S = SPM.xVol.S; n = 1; switch thresDesc case 'FWE' cluster_forming_thr = spm_uc(cluster_forming_thr,df,STAT,R,n,S); case 'none' if strcmp(height_threshold_type, 'p-value') cluster_forming_thr = spm_u(cluster_forming_thr^(1/n),df,STAT); end end stat_map_vol = spm_vol(stat_filename); [stat_map_data, stat_map_XYZmm] = spm_read_vols(stat_map_vol); Z = stat_map_data(:)'; [x,y,z] = ind2sub(size(stat_map_data),(1:numel(stat_map_data))'); XYZ = cat(1, x', y', z'); XYZth = XYZ(:, Z >= cluster_forming_thr); Zth = Z(Z >= cluster_forming_thr); """ script += ( "spm_write_filtered(Zth,XYZth,stat_map_vol.dim'," "stat_map_vol.mat,'thresholded map', '%s');\n" ) % self._gen_pre_topo_map_filename() script += """ max_size = 0; max_size_index = 0; th_nclusters = 0; nclusters = 0; if isempty(XYZth) thresholded_XYZ = []; thresholded_Z = []; else if use_topo_fdr V2R = 1/prod(FWHM(stat_map_vol.dim > 1)); [uc,Pc,ue] = spm_uc_clusterFDR(cluster_extent_p_fdr_thr,df,STAT,R,n,Z,XYZ,V2R,cluster_forming_thr); end voxel_labels = spm_clusters(XYZth); nclusters = max(voxel_labels); thresholded_XYZ = []; thresholded_Z = []; for i = 1:nclusters cluster_size = sum(voxel_labels==i); if cluster_size > extent_threshold && (~use_topo_fdr || (cluster_size - uc) > -1) thresholded_XYZ = cat(2, thresholded_XYZ, XYZth(:,voxel_labels == i)); thresholded_Z = cat(2, thresholded_Z, Zth(voxel_labels == i)); th_nclusters = th_nclusters + 1; end if force_activation cluster_sum = sum(Zth(voxel_labels == i)); if cluster_sum > max_size max_size = cluster_sum; max_size_index = i; end end end end activation_forced = 0; if isempty(thresholded_XYZ) if force_activation && max_size ~= 0 thresholded_XYZ = XYZth(:,voxel_labels == max_size_index); thresholded_Z = Zth(voxel_labels == max_size_index); th_nclusters = 1; activation_forced = 1; else thresholded_Z = [0]; thresholded_XYZ = [1 1 1]'; th_nclusters = 0; end end fprintf('activation_forced = %d\\n',activation_forced); fprintf('pre_topo_n_clusters = %d\\n',nclusters); fprintf('n_clusters = %d\\n',th_nclusters); fprintf('cluster_forming_thr = %f\\n',cluster_forming_thr); """ script += ( "spm_write_filtered(thresholded_Z,thresholded_XYZ," "stat_map_vol.dim',stat_map_vol.mat,'thresholded map'," " '%s');\n" ) % self._gen_thresholded_map_filename() return script def aggregate_outputs(self, runtime=None): outputs = self._outputs() setattr(outputs, "thresholded_map", self._gen_thresholded_map_filename()) setattr(outputs, "pre_topo_fdr_map", self._gen_pre_topo_map_filename()) for line in runtime.stdout.split("\n"): if line.startswith("activation_forced = "): setattr( outputs, "activation_forced", line[len("activation_forced = ") :].strip() == "1", ) elif line.startswith("n_clusters = "): setattr( outputs, "n_clusters", int(line[len("n_clusters = ") :].strip()) ) elif line.startswith("pre_topo_n_clusters = "): setattr( outputs, "pre_topo_n_clusters", int(line[len("pre_topo_n_clusters = ") :].strip()), ) elif line.startswith("cluster_forming_thr = "): setattr( outputs, "cluster_forming_thr", float(line[len("cluster_forming_thr = ") :].strip()), ) return outputs def _list_outputs(self): outputs = self._outputs().get() outputs["thresholded_map"] = self._gen_thresholded_map_filename() outputs["pre_topo_fdr_map"] = self._gen_pre_topo_map_filename() return outputs class ThresholdStatisticsInputSpec(SPMCommandInputSpec): spm_mat_file = File( exists=True, desc="absolute path to SPM.mat", copyfile=True, mandatory=True ) stat_image = File(exists=True, desc="stat image", copyfile=False, mandatory=True) contrast_index = traits.Int( mandatory=True, desc="which contrast in the SPM.mat to use" ) height_threshold = traits.Float( desc=("stat value for initial thresholding (defining clusters)"), mandatory=True ) extent_threshold = traits.Int( 0, usedefault=True, desc="Minimum cluster size in voxels" ) class ThresholdStatisticsOutputSpec(TraitedSpec): voxelwise_P_Bonf = traits.Float() voxelwise_P_RF = traits.Float() voxelwise_P_uncor = traits.Float() voxelwise_P_FDR = traits.Float() clusterwise_P_RF = traits.Float() clusterwise_P_FDR = traits.Float() class ThresholdStatistics(SPMCommand): """Given height and cluster size threshold calculate theoretical probabilities concerning false positives Examples -------- >>> thresh = ThresholdStatistics() >>> thresh.inputs.spm_mat_file = 'SPM.mat' >>> thresh.inputs.stat_image = 'spmT_0001.img' >>> thresh.inputs.contrast_index = 1 >>> thresh.inputs.height_threshold = 4.56 >>> thresh.run() # doctest: +SKIP """ input_spec = ThresholdStatisticsInputSpec output_spec = ThresholdStatisticsOutputSpec def _make_matlab_command(self, _): script = "con_index = %d;\n" % self.inputs.contrast_index script += "cluster_forming_thr = %f;\n" % self.inputs.height_threshold script += "stat_filename = '%s';\n" % self.inputs.stat_image script += "extent_threshold = %d;\n" % self.inputs.extent_threshold script += "load '%s'\n" % self.inputs.spm_mat_file script += """ FWHM = SPM.xVol.FWHM; df = [SPM.xCon(con_index).eidf SPM.xX.erdf]; STAT = SPM.xCon(con_index).STAT; R = SPM.xVol.R; S = SPM.xVol.S; n = 1; voxelwise_P_Bonf = spm_P_Bonf(cluster_forming_thr,df,STAT,S,n) voxelwise_P_RF = spm_P_RF(1,0,cluster_forming_thr,df,STAT,R,n) stat_map_vol = spm_vol(stat_filename); [stat_map_data, stat_map_XYZmm] = spm_read_vols(stat_map_vol); Z = stat_map_data(:); Zum = Z; switch STAT case 'Z' VPs = (1-spm_Ncdf(Zum)).^n; voxelwise_P_uncor = (1-spm_Ncdf(cluster_forming_thr)).^n case 'T' VPs = (1 - spm_Tcdf(Zum,df(2))).^n; voxelwise_P_uncor = (1 - spm_Tcdf(cluster_forming_thr,df(2))).^n case 'X' VPs = (1-spm_Xcdf(Zum,df(2))).^n; voxelwise_P_uncor = (1-spm_Xcdf(cluster_forming_thr,df(2))).^n case 'F' VPs = (1 - spm_Fcdf(Zum,df)).^n; voxelwise_P_uncor = (1 - spm_Fcdf(cluster_forming_thr,df)).^n end VPs = sort(VPs); voxelwise_P_FDR = spm_P_FDR(cluster_forming_thr,df,STAT,n,VPs) V2R = 1/prod(FWHM(stat_map_vol.dim > 1)); clusterwise_P_RF = spm_P_RF(1,extent_threshold*V2R,cluster_forming_thr,df,STAT,R,n) [x,y,z] = ind2sub(size(stat_map_data),(1:numel(stat_map_data))'); XYZ = cat(1, x', y', z'); [u, CPs, ue] = spm_uc_clusterFDR(0.05,df,STAT,R,n,Z,XYZ,V2R,cluster_forming_thr); clusterwise_P_FDR = spm_P_clusterFDR(extent_threshold*V2R,df,STAT,R,n,cluster_forming_thr,CPs') """ return script def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() cur_output = "" for line in runtime.stdout.split("\n"): if cur_output != "" and len(line.split()) != 0: setattr(outputs, cur_output, float(line)) cur_output = "" continue if len(line.split()) != 0 and line.split()[0] in [ "clusterwise_P_FDR", "clusterwise_P_RF", "voxelwise_P_Bonf", "voxelwise_P_FDR", "voxelwise_P_RF", "voxelwise_P_uncor", ]: cur_output = line.split()[0] continue return outputs class FactorialDesignInputSpec(SPMCommandInputSpec): spm_mat_dir = Directory( exists=True, field="dir", desc="directory to store SPM.mat file (opt)" ) # Need to make an alias of InputMultiPath; the inputs below are not Path covariates = InputMultiPath( traits.Dict( key_trait=traits.Enum("vector", "name", "interaction", "centering") ), field="cov", desc=("covariate dictionary {vector, name, interaction, centering}"), ) threshold_mask_none = traits.Bool( field="masking.tm.tm_none", xor=["threshold_mask_absolute", "threshold_mask_relative"], desc="do not use threshold masking", ) threshold_mask_absolute = traits.Float( field="masking.tm.tma.athresh", xor=["threshold_mask_none", "threshold_mask_relative"], desc="use an absolute threshold", ) threshold_mask_relative = traits.Float( field="masking.tm.tmr.rthresh", xor=["threshold_mask_absolute", "threshold_mask_none"], desc=("threshold using a proportion of the global value"), ) use_implicit_threshold = traits.Bool( field="masking.im", desc=("use implicit mask NaNs or zeros to threshold") ) explicit_mask_file = File( field="masking.em", # requires cell desc="use an implicit mask file to threshold", ) global_calc_omit = traits.Bool( field="globalc.g_omit", xor=["global_calc_mean", "global_calc_values"], desc="omit global calculation", ) global_calc_mean = traits.Bool( field="globalc.g_mean", xor=["global_calc_omit", "global_calc_values"], desc="use mean for global calculation", ) global_calc_values = traits.List( traits.Float, field="globalc.g_user.global_uval", xor=["global_calc_mean", "global_calc_omit"], desc="omit global calculation", ) no_grand_mean_scaling = traits.Bool( field="globalm.gmsca.gmsca_no", desc=("do not perform grand mean scaling") ) global_normalization = traits.Enum( 1, 2, 3, field="globalm.glonorm", desc=("global normalization None-1, Proportional-2, ANCOVA-3"), ) class FactorialDesignOutputSpec(TraitedSpec): spm_mat_file = File(exists=True, desc="SPM mat file") class FactorialDesign(SPMCommand): """Base class for factorial designs http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=77 """ input_spec = FactorialDesignInputSpec output_spec = FactorialDesignOutputSpec _jobtype = "stats" _jobname = "factorial_design" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["spm_mat_dir", "explicit_mask_file"]: return np.array([str(val)], dtype=object) if opt in ["covariates"]: outlist = [] mapping = { "name": "cname", "vector": "c", "interaction": "iCFI", "centering": "iCC", } for dictitem in val: outdict = {} for key, keyval in list(dictitem.items()): outdict[mapping[key]] = keyval outlist.append(outdict) return outlist return super(FactorialDesign, self)._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm realign options if set to None ignore""" einputs = super(FactorialDesign, self)._parse_inputs() if not isdefined(self.inputs.spm_mat_dir): einputs[0]["dir"] = np.array([str(os.getcwd())], dtype=object) return einputs def _list_outputs(self): outputs = self._outputs().get() spm = os.path.join(os.getcwd(), "SPM.mat") outputs["spm_mat_file"] = spm return outputs class OneSampleTTestDesignInputSpec(FactorialDesignInputSpec): in_files = traits.List( File(exists=True), field="des.t1.scans", mandatory=True, minlen=2, desc="input files", ) class OneSampleTTestDesign(FactorialDesign): """Create SPM design for one sample t-test Examples -------- >>> ttest = OneSampleTTestDesign() >>> ttest.inputs.in_files = ['cont1.nii', 'cont2.nii'] >>> ttest.run() # doctest: +SKIP """ input_spec = OneSampleTTestDesignInputSpec def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["in_files"]: return np.array(val, dtype=object) return super(OneSampleTTestDesign, self)._format_arg(opt, spec, val) class TwoSampleTTestDesignInputSpec(FactorialDesignInputSpec): # very unlikely that you will have a single image in one group, so setting # parameters to require at least two files in each group [SG] group1_files = traits.List( File(exists=True), field="des.t2.scans1", mandatory=True, minlen=2, desc="Group 1 input files", ) group2_files = traits.List( File(exists=True), field="des.t2.scans2", mandatory=True, minlen=2, desc="Group 2 input files", ) dependent = traits.Bool( field="des.t2.dept", desc=("Are the measurements dependent between levels") ) unequal_variance = traits.Bool( field="des.t2.variance", desc=("Are the variances equal or unequal between groups"), ) class TwoSampleTTestDesign(FactorialDesign): """Create SPM design for two sample t-test Examples -------- >>> ttest = TwoSampleTTestDesign() >>> ttest.inputs.group1_files = ['cont1.nii', 'cont2.nii'] >>> ttest.inputs.group2_files = ['cont1a.nii', 'cont2a.nii'] >>> ttest.run() # doctest: +SKIP """ input_spec = TwoSampleTTestDesignInputSpec def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["group1_files", "group2_files"]: return np.array(val, dtype=object) return super(TwoSampleTTestDesign, self)._format_arg(opt, spec, val) class PairedTTestDesignInputSpec(FactorialDesignInputSpec): paired_files = traits.List( traits.List(File(exists=True), minlen=2, maxlen=2), field="des.pt.pair", mandatory=True, minlen=2, desc="List of paired files", ) grand_mean_scaling = traits.Bool( field="des.pt.gmsca", desc="Perform grand mean scaling" ) ancova = traits.Bool( field="des.pt.ancova", desc="Specify ancova-by-factor regressors" ) class PairedTTestDesign(FactorialDesign): """Create SPM design for paired t-test Examples -------- >>> pttest = PairedTTestDesign() >>> pttest.inputs.paired_files = [['cont1.nii','cont1a.nii'],['cont2.nii','cont2a.nii']] >>> pttest.run() # doctest: +SKIP """ input_spec = PairedTTestDesignInputSpec def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["paired_files"]: return [dict(scans=np.array(files, dtype=object)) for files in val] return super(PairedTTestDesign, self)._format_arg(opt, spec, val) class MultipleRegressionDesignInputSpec(FactorialDesignInputSpec): in_files = traits.List( File(exists=True), field="des.mreg.scans", mandatory=True, minlen=2, desc="List of files", ) include_intercept = traits.Bool( True, field="des.mreg.incint", usedefault=True, desc="Include intercept in design", ) user_covariates = InputMultiPath( traits.Dict(key_trait=traits.Enum("vector", "name", "centering")), field="des.mreg.mcov", desc=("covariate dictionary {vector, name, centering}"), ) class MultipleRegressionDesign(FactorialDesign): """Create SPM design for multiple regression Examples -------- >>> mreg = MultipleRegressionDesign() >>> mreg.inputs.in_files = ['cont1.nii','cont2.nii'] >>> mreg.run() # doctest: +SKIP """ input_spec = MultipleRegressionDesignInputSpec def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["in_files"]: return np.array(val, dtype=object) if opt in ["user_covariates"]: outlist = [] mapping = {"name": "cname", "vector": "c", "centering": "iCC"} for dictitem in val: outdict = {} for key, keyval in list(dictitem.items()): outdict[mapping[key]] = keyval outlist.append(outdict) return outlist return super(MultipleRegressionDesign, self)._format_arg(opt, spec, val) nipype-1.7.0/nipype/interfaces/spm/preprocess.py000066400000000000000000003035051413403311400220100ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """SPM wrappers for preprocessing data """ import os from copy import deepcopy # Third-party imports import numpy as np # Local imports from ...utils.filemanip import ( fname_presuffix, ensure_list, simplify_list, split_filename, ) from ..base import ( OutputMultiPath, TraitedSpec, isdefined, traits, InputMultiPath, InputMultiObject, File, Str, ) from .base import ( SPMCommand, scans_for_fname, func_is_3d, scans_for_fnames, SPMCommandInputSpec, ImageFileSPM, ) __docformat__ = "restructuredtext" class FieldMapInputSpec(SPMCommandInputSpec): jobtype = traits.Enum( "calculatevdm", "applyvdm", usedefault=True, desc="one of: calculatevdm, applyvdm", ) phase_file = File( mandatory=True, exists=True, copyfile=False, field="subj.data.presubphasemag.phase", desc="presubstracted phase file", ) magnitude_file = File( mandatory=True, exists=True, copyfile=False, field="subj.data.presubphasemag.magnitude", desc="presubstracted magnitude file", ) echo_times = traits.Tuple( traits.Float, traits.Float, mandatory=True, field="subj.defaults.defaultsval.et", desc="short and long echo times", ) maskbrain = traits.Bool( True, usedefault=True, field="subj.defaults.defaultsval.maskbrain", desc="masking or no masking of the brain", ) blip_direction = traits.Enum( 1, -1, mandatory=True, field="subj.defaults.defaultsval.blipdir", desc="polarity of the phase-encode blips", ) total_readout_time = traits.Float( mandatory=True, field="subj.defaults.defaultsval.tert", desc="total EPI readout time", ) epifm = traits.Bool( False, usedefault=True, field="subj.defaults.defaultsval.epifm", desc="epi-based field map", ) jacobian_modulation = traits.Bool( False, usedefault=True, field="subj.defaults.defaultsval.ajm", desc="jacobian modulation", ) # Unwarping defaults parameters method = traits.Enum( "Mark3D", "Mark2D", "Huttonish", usedefault=True, desc="One of: Mark3D, Mark2D, Huttonish", field="subj.defaults.defaultsval.uflags.method", ) unwarp_fwhm = traits.Range( low=0, value=10, usedefault=True, field="subj.defaults.defaultsval.uflags.fwhm", desc="gaussian smoothing kernel width", ) pad = traits.Range( low=0, value=0, usedefault=True, field="subj.defaults.defaultsval.uflags.pad", desc="padding kernel width", ) ws = traits.Bool( True, usedefault=True, field="subj.defaults.defaultsval.uflags.ws", desc="weighted smoothing", ) # Brain mask defaults parameters template = File( copyfile=False, exists=True, field="subj.defaults.defaultsval.mflags.template", desc="template image for brain masking", ) mask_fwhm = traits.Range( low=0, value=5, usedefault=True, field="subj.defaults.defaultsval.mflags.fwhm", desc="gaussian smoothing kernel width", ) nerode = traits.Range( low=0, value=2, usedefault=True, field="subj.defaults.defaultsval.mflags.nerode", desc="number of erosions", ) ndilate = traits.Range( low=0, value=4, usedefault=True, field="subj.defaults.defaultsval.mflags.ndilate", desc="number of erosions", ) thresh = traits.Float( 0.5, usedefault=True, field="subj.defaults.defaultsval.mflags.thresh", desc="threshold used to create brain mask from segmented data", ) reg = traits.Float( 0.02, usedefault=True, field="subj.defaults.defaultsval.mflags.reg", desc="regularization value used in the segmentation", ) # EPI unwarping for quality check epi_file = File( copyfile=False, exists=True, mandatory=True, field="subj.session.epi", desc="EPI to unwarp", ) matchvdm = traits.Bool( True, usedefault=True, field="subj.matchvdm", desc="match VDM to EPI" ) sessname = Str( "_run-", usedefault=True, field="subj.sessname", desc="VDM filename extension" ) writeunwarped = traits.Bool( False, usedefault=True, field="subj.writeunwarped", desc="write unwarped EPI" ) anat_file = File( copyfile=False, exists=True, field="subj.anat", desc="anatomical image for comparison", ) matchanat = traits.Bool( True, usedefault=True, field="subj.matchanat", desc="match anatomical image to EPI", ) class FieldMapOutputSpec(TraitedSpec): vdm = File(exists=True, desc="voxel difference map") class FieldMap(SPMCommand): """Use the fieldmap toolbox from spm to calculate the voxel displacement map (VDM). http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=173 .. important:: This interface does not deal with real/imag magnitude images nor with the two phase files case. Examples -------- >>> from nipype.interfaces.spm import FieldMap >>> fm = FieldMap() >>> fm.inputs.phase_file = 'phase.nii' >>> fm.inputs.magnitude_file = 'magnitude.nii' >>> fm.inputs.echo_times = (5.19, 7.65) >>> fm.inputs.blip_direction = 1 >>> fm.inputs.total_readout_time = 15.6 >>> fm.inputs.epi_file = 'epi.nii' >>> fm.run() # doctest: +SKIP """ input_spec = FieldMapInputSpec output_spec = FieldMapOutputSpec _jobtype = "tools" _jobname = "fieldmap" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["phase_file", "magnitude_file", "anat_file", "epi_file"]: return scans_for_fname(ensure_list(val)) return super(FieldMap, self)._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm fieldmap options if set to None ignore""" einputs = super(FieldMap, self)._parse_inputs() return [{self.inputs.jobtype: einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() jobtype = self.inputs.jobtype if jobtype == "calculatevdm": outputs["vdm"] = fname_presuffix(self.inputs.phase_file, prefix="vdm5_sc") return outputs class SliceTimingInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( traits.Either( traits.List(ImageFileSPM(exists=True)), ImageFileSPM(exists=True) ), field="scans", desc="list of filenames to apply slice timing", mandatory=True, copyfile=False, ) num_slices = traits.Int( field="nslices", desc="number of slices in a volume", mandatory=True ) time_repetition = traits.Float( field="tr", desc=("time between volume acquisitions (start to start time)"), mandatory=True, ) time_acquisition = traits.Float( field="ta", desc=("time of volume acquisition. usually calculated as TR-(TR/num_slices)"), mandatory=True, ) slice_order = traits.List( traits.Either(traits.Int(), traits.Float()), field="so", desc=("1-based order or onset (in ms) in which slices are acquired"), mandatory=True, ) ref_slice = traits.Either( traits.Int(), traits.Float(), field="refslice", desc="1-based Number of the reference slice or " "reference time point if slice_order is in " "onsets (ms)", mandatory=True, ) out_prefix = traits.String( "a", field="prefix", usedefault=True, desc="slicetimed output prefix" ) class SliceTimingOutputSpec(TraitedSpec): timecorrected_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), desc="slice time corrected files", ) class SliceTiming(SPMCommand): """Use spm to perform slice timing correction. http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=19 Examples -------- >>> from nipype.interfaces.spm import SliceTiming >>> st = SliceTiming() >>> st.inputs.in_files = 'functional.nii' >>> st.inputs.num_slices = 32 >>> st.inputs.time_repetition = 6.0 >>> st.inputs.time_acquisition = 6. - 6./32. >>> st.inputs.slice_order = list(range(32,0,-1)) >>> st.inputs.ref_slice = 1 >>> st.run() # doctest: +SKIP """ input_spec = SliceTimingInputSpec output_spec = SliceTimingOutputSpec _jobtype = "temporal" _jobname = "st" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt == "in_files": return scans_for_fnames( ensure_list(val), keep4d=False, separate_sessions=True ) return super(SliceTiming, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["timecorrected_files"] = [] filelist = ensure_list(self.inputs.in_files) for f in filelist: if isinstance(f, list): run = [ fname_presuffix(in_f, prefix=self.inputs.out_prefix) for in_f in f ] else: run = fname_presuffix(f, prefix=self.inputs.out_prefix) outputs["timecorrected_files"].append(run) return outputs class RealignInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( traits.Either( ImageFileSPM(exists=True), traits.List(ImageFileSPM(exists=True)) ), field="data", mandatory=True, copyfile=True, desc="list of filenames to realign", ) jobtype = traits.Enum( "estwrite", "estimate", "write", desc="one of: estimate, write, estwrite", usedefault=True, ) quality = traits.Range( low=0.0, high=1.0, field="eoptions.quality", desc="0.1 = fast, 1.0 = precise" ) fwhm = traits.Range( low=0.0, field="eoptions.fwhm", desc="gaussian smoothing kernel width" ) separation = traits.Range( low=0.0, field="eoptions.sep", desc="sampling separation in mm" ) register_to_mean = traits.Bool( field="eoptions.rtm", desc=("Indicate whether realignment is done to the mean image"), ) weight_img = File( exists=True, field="eoptions.weight", desc="filename of weighting image" ) interp = traits.Range( low=0, high=7, field="eoptions.interp", desc="degree of b-spline used for interpolation", ) wrap = traits.List( traits.Int(), minlen=3, maxlen=3, field="eoptions.wrap", desc="Check if interpolation should wrap in [x,y,z]", ) write_which = traits.ListInt( [2, 1], field="roptions.which", minlen=2, maxlen=2, usedefault=True, desc="determines which images to reslice", ) write_interp = traits.Range( low=0, high=7, field="roptions.interp", desc=("degree of b-spline used for interpolation"), ) write_wrap = traits.List( traits.Int(), minlen=3, maxlen=3, field="roptions.wrap", desc=("Check if interpolation should wrap in [x,y,z]"), ) write_mask = traits.Bool(field="roptions.mask", desc="True/False mask output image") out_prefix = traits.String( "r", field="roptions.prefix", usedefault=True, desc="realigned output prefix" ) class RealignOutputSpec(TraitedSpec): mean_image = File(exists=True, desc="Mean image file from the realignment") modified_in_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), desc=( "Copies of all files passed to " "in_files. Headers will have " "been modified to align all " "images with the first, or " "optionally to first do that, " "extract a mean image, and " "re-align to that mean image." ), ) realigned_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), desc=( "If jobtype is write or estwrite, " "these will be the resliced files." " Otherwise, they will be copies " "of in_files that have had their " "headers rewritten." ), ) realignment_parameters = OutputMultiPath( File(exists=True), desc=("Estimated translation and rotation parameters") ) class Realign(SPMCommand): """Use spm_realign for estimating within modality rigid body alignment http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=25 Examples -------- >>> import nipype.interfaces.spm as spm >>> realign = spm.Realign() >>> realign.inputs.in_files = 'functional.nii' >>> realign.inputs.register_to_mean = True >>> realign.run() # doctest: +SKIP """ input_spec = RealignInputSpec output_spec = RealignOutputSpec _jobtype = "spatial" _jobname = "realign" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt == "in_files": if self.inputs.jobtype == "write": separate_sessions = False else: separate_sessions = True return scans_for_fnames( val, keep4d=False, separate_sessions=separate_sessions ) return super(Realign, self)._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm realign options if set to None ignore""" einputs = super(Realign, self)._parse_inputs() return [{"%s" % (self.inputs.jobtype): einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() resliced_all = self.inputs.write_which[0] > 0 resliced_mean = self.inputs.write_which[1] > 0 if self.inputs.jobtype != "write": if isdefined(self.inputs.in_files): outputs["realignment_parameters"] = [] for imgf in self.inputs.in_files: if isinstance(imgf, list): tmp_imgf = imgf[0] else: tmp_imgf = imgf outputs["realignment_parameters"].append( fname_presuffix( tmp_imgf, prefix="rp_", suffix=".txt", use_ext=False ) ) if not isinstance(imgf, list) and func_is_3d(imgf): break if self.inputs.jobtype == "estimate": outputs["realigned_files"] = self.inputs.in_files if self.inputs.jobtype == "estimate" or self.inputs.jobtype == "estwrite": outputs["modified_in_files"] = self.inputs.in_files if self.inputs.jobtype == "write" or self.inputs.jobtype == "estwrite": if isinstance(self.inputs.in_files[0], list): first_image = self.inputs.in_files[0][0] else: first_image = self.inputs.in_files[0] if resliced_mean: outputs["mean_image"] = fname_presuffix(first_image, prefix="mean") if resliced_all: outputs["realigned_files"] = [] for idx, imgf in enumerate(ensure_list(self.inputs.in_files)): realigned_run = [] if isinstance(imgf, list): for i, inner_imgf in enumerate(ensure_list(imgf)): newfile = fname_presuffix( inner_imgf, prefix=self.inputs.out_prefix ) realigned_run.append(newfile) else: realigned_run = fname_presuffix( imgf, prefix=self.inputs.out_prefix ) outputs["realigned_files"].append(realigned_run) return outputs class RealignUnwarpInputSpec(SPMCommandInputSpec): in_files = InputMultiObject( traits.Either( ImageFileSPM(exists=True), traits.List(ImageFileSPM(exists=True)) ), field="data.scans", mandatory=True, copyfile=True, desc="list of filenames to realign and unwarp", ) phase_map = File( field="data.pmscan", desc="Voxel displacement map to use in unwarping. Unlike SPM standard " "behaviour, the same map will be used for all sessions", copyfile=False, ) quality = traits.Range( low=0.0, high=1.0, field="eoptions.quality", desc="0.1 = fast, 1.0 = precise" ) fwhm = traits.Range( low=0.0, field="eoptions.fwhm", desc="gaussian smoothing kernel width" ) separation = traits.Range( low=0.0, field="eoptions.sep", desc="sampling separation in mm" ) register_to_mean = traits.Bool( field="eoptions.rtm", desc="Indicate whether realignment is done to the mean image", ) weight_img = File( exists=True, field="eoptions.weight", desc="filename of weighting image" ) interp = traits.Range( low=0, high=7, field="eoptions.einterp", desc="degree of b-spline used for interpolation", ) wrap = traits.List( traits.Int(), minlen=3, maxlen=3, field="eoptions.ewrap", desc="Check if interpolation should wrap in [x,y,z]", ) est_basis_func = traits.List( traits.Int(), minlen=2, maxlen=2, field="uweoptions.basfcn", desc="Number of basis functions to use for each dimension", ) est_reg_order = traits.Range( low=0, high=3, field="uweoptions.regorder", desc=( "This parameter determines how to balance the compromise between likelihood " "maximization and smoothness maximization of the estimated field." ), ) est_reg_factor = traits.ListInt( [100000], field="uweoptions.lambda", minlen=1, maxlen=1, usedefault=True, desc="Regularisation factor. Default: 100000 (medium).", ) est_jacobian_deformations = traits.Bool( field="uweoptions.jm", desc=( "Jacobian deformations. In theory a good idea to include them, " " in practice a bad idea. Default: No." ), ) est_first_order_effects = traits.List( traits.Int(), minlen=1, maxlen=6, field="uweoptions.fot", desc="First order effects should only depend on pitch and roll, i.e. [4 5]", ) est_second_order_effects = traits.List( traits.Int(), minlen=1, maxlen=6, field="uweoptions.sot", desc="List of second order terms to model second derivatives of.", ) est_unwarp_fwhm = traits.Range( low=0.0, field="uweoptions.uwfwhm", desc="gaussian smoothing kernel width for unwarp", ) est_re_est_mov_par = traits.Bool( field="uweoptions.rem", desc="Re-estimate movement parameters at each unwarping iteration.", ) est_num_of_iterations = traits.ListInt( [5], field="uweoptions.noi", minlen=1, maxlen=1, usedefault=True, desc="Number of iterations.", ) est_taylor_expansion_point = traits.String( "Average", field="uweoptions.expround", usedefault=True, desc="Point in position space to perform Taylor-expansion around.", ) reslice_which = traits.ListInt( [2, 1], field="uwroptions.uwwhich", minlen=2, maxlen=2, usedefault=True, desc="determines which images to reslice", ) reslice_interp = traits.Range( low=0, high=7, field="uwroptions.rinterp", desc="degree of b-spline used for interpolation", ) reslice_wrap = traits.List( traits.Int(), minlen=3, maxlen=3, field="uwroptions.wrap", desc="Check if interpolation should wrap in [x,y,z]", ) reslice_mask = traits.Bool( field="uwroptions.mask", desc="True/False mask output image" ) out_prefix = traits.String( "u", field="uwroptions.prefix", usedefault=True, desc="realigned and unwarped output prefix", ) class RealignUnwarpOutputSpec(TraitedSpec): mean_image = File( exists=True, desc="Mean image file from the realignment & unwarping" ) modified_in_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), desc=( "Copies of all files passed to " "in_files. Headers will have " "been modified to align all " "images with the first, or " "optionally to first do that, " "extract a mean image, and " "re-align to that mean image." ), ) realigned_unwarped_files = OutputMultiPath( traits.Either(traits.List(File(exists=True)), File(exists=True)), desc="Realigned and unwarped files written to disc.", ) realignment_parameters = OutputMultiPath( File(exists=True), desc="Estimated translation and rotation parameters" ) class RealignUnwarp(SPMCommand): """Use spm_uw_estimate for estimating within subject registration and unwarping of time series. Function accepts only one single field map. If in_files is a list of files they will be treated as separate sessions but associated to the same fieldmap. http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=31 Examples -------- >>> import nipype.interfaces.spm as spm >>> realignUnwarp = spm.RealignUnwarp() >>> realignUnwarp.inputs.in_files = ['functional.nii', 'functional2.nii'] >>> realignUnwarp.inputs.phase_map = 'voxeldisplacemap.vdm' >>> realignUnwarp.inputs.register_to_mean = True >>> realignUnwarp.run() # doctest: +SKIP """ input_spec = RealignUnwarpInputSpec output_spec = RealignUnwarpOutputSpec _jobtype = "spatial" _jobname = "realignunwarp" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt == "in_files": return scans_for_fnames( ensure_list(val), keep4d=False, separate_sessions=True ) return super(RealignUnwarp, self)._format_arg(opt, spec, val) def _parse_inputs(self, skip=()): spmdict = super(RealignUnwarp, self)._parse_inputs(skip=())[0] if isdefined(self.inputs.phase_map): pmscan = spmdict["data"]["pmscan"] else: pmscan = "" if isdefined(self.inputs.in_files): if isinstance(self.inputs.in_files, list): data = [ dict(scans=sess, pmscan=pmscan) for sess in spmdict["data"]["scans"] ] else: data = [dict(scans=spmdict["data"]["scans"], pmscan=pmscan)] spmdict["data"] = data return [spmdict] def _list_outputs(self): outputs = self._outputs().get() resliced_all = self.inputs.reslice_which[0] > 0 resliced_mean = self.inputs.reslice_which[1] > 0 if isdefined(self.inputs.in_files): outputs["realignment_parameters"] = [] for imgf in self.inputs.in_files: if isinstance(imgf, list): tmp_imgf = imgf[0] else: tmp_imgf = imgf outputs["realignment_parameters"].append( fname_presuffix(tmp_imgf, prefix="rp_", suffix=".txt", use_ext=False) ) if not isinstance(imgf, list) and func_is_3d(imgf): break if isinstance(self.inputs.in_files[0], list): first_image = self.inputs.in_files[0][0] else: first_image = self.inputs.in_files[0] if resliced_mean: outputs["mean_image"] = fname_presuffix(first_image, prefix="meanu") if resliced_all: outputs["realigned_unwarped_files"] = [] for idx, imgf in enumerate(ensure_list(self.inputs.in_files)): realigned_run = [] if isinstance(imgf, list): for i, inner_imgf in enumerate(ensure_list(imgf)): newfile = fname_presuffix( inner_imgf, prefix=self.inputs.out_prefix ) realigned_run.append(newfile) else: realigned_run = fname_presuffix(imgf, prefix=self.inputs.out_prefix) outputs["realigned_unwarped_files"].append(realigned_run) return outputs class CoregisterInputSpec(SPMCommandInputSpec): target = ImageFileSPM( exists=True, mandatory=True, field="ref", desc="reference file to register to", copyfile=False, ) source = InputMultiPath( ImageFileSPM(exists=True), field="source", desc="file to register to target", copyfile=True, mandatory=True, ) jobtype = traits.Enum( "estwrite", "estimate", "write", desc="one of: estimate, write, estwrite", usedefault=True, ) apply_to_files = InputMultiPath( File(exists=True), field="other", desc="files to apply transformation to", copyfile=True, ) cost_function = traits.Enum( "mi", "nmi", "ecc", "ncc", field="eoptions.cost_fun", desc="""cost function, one of: 'mi' - Mutual Information, 'nmi' - Normalised Mutual Information, 'ecc' - Entropy Correlation Coefficient, 'ncc' - Normalised Cross Correlation""", ) fwhm = traits.List( traits.Float(), minlen=2, maxlen=2, field="eoptions.fwhm", desc="gaussian smoothing kernel width (mm)", ) separation = traits.List( traits.Float(), field="eoptions.sep", desc="sampling separation in mm" ) tolerance = traits.List( traits.Float(), field="eoptions.tol", desc="acceptable tolerance for each of 12 params", ) write_interp = traits.Range( low=0, high=7, field="roptions.interp", desc=("degree of b-spline used for interpolation"), ) write_wrap = traits.List( traits.Int(), minlen=3, maxlen=3, field="roptions.wrap", desc=("Check if interpolation should wrap in [x,y,z]"), ) write_mask = traits.Bool(field="roptions.mask", desc="True/False mask output image") out_prefix = traits.String( "r", field="roptions.prefix", usedefault=True, desc="coregistered output prefix" ) class CoregisterOutputSpec(TraitedSpec): coregistered_source = OutputMultiPath( File(exists=True), desc="Coregistered source files" ) coregistered_files = OutputMultiPath( File(exists=True), desc="Coregistered other files" ) class Coregister(SPMCommand): """Use spm_coreg for estimating cross-modality rigid body alignment http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=39 Examples -------- >>> import nipype.interfaces.spm as spm >>> coreg = spm.Coregister() >>> coreg.inputs.target = 'functional.nii' >>> coreg.inputs.source = 'structural.nii' >>> coreg.run() # doctest: +SKIP """ input_spec = CoregisterInputSpec output_spec = CoregisterOutputSpec _jobtype = "spatial" _jobname = "coreg" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt == "target" or (opt == "source" and self.inputs.jobtype != "write"): return scans_for_fnames(ensure_list(val), keep4d=True) if opt == "apply_to_files": return np.array(ensure_list(val), dtype=object) if opt == "source" and self.inputs.jobtype == "write": if isdefined(self.inputs.apply_to_files): return scans_for_fnames(val + self.inputs.apply_to_files) else: return scans_for_fnames(val) return super(Coregister, self)._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm coregister options if set to None ignore""" if self.inputs.jobtype == "write": einputs = super(Coregister, self)._parse_inputs( skip=("jobtype", "apply_to_files") ) else: einputs = super(Coregister, self)._parse_inputs(skip=("jobtype")) jobtype = self.inputs.jobtype return [{"%s" % (jobtype): einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() if self.inputs.jobtype == "estimate": if isdefined(self.inputs.apply_to_files): outputs["coregistered_files"] = self.inputs.apply_to_files outputs["coregistered_source"] = self.inputs.source elif self.inputs.jobtype == "write" or self.inputs.jobtype == "estwrite": if isdefined(self.inputs.apply_to_files): outputs["coregistered_files"] = [] for imgf in ensure_list(self.inputs.apply_to_files): ( outputs["coregistered_files"].append( fname_presuffix(imgf, prefix=self.inputs.out_prefix) ) ) outputs["coregistered_source"] = [] for imgf in ensure_list(self.inputs.source): ( outputs["coregistered_source"].append( fname_presuffix(imgf, prefix=self.inputs.out_prefix) ) ) return outputs class NormalizeInputSpec(SPMCommandInputSpec): template = File( exists=True, field="eoptions.template", desc="template file to normalize to", mandatory=True, xor=["parameter_file"], copyfile=False, ) source = InputMultiPath( ImageFileSPM(exists=True), field="subj.source", xor=["parameter_file"], desc="file to normalize to template", mandatory=True, copyfile=True, ) jobtype = traits.Enum( "estwrite", "est", "write", usedefault=True, desc="Estimate, Write or do both" ) apply_to_files = InputMultiPath( traits.Either(File(exists=True), traits.List(File(exists=True))), field="subj.resample", desc="files to apply transformation to", copyfile=True, ) parameter_file = File( field="subj.matname", mandatory=True, xor=["source", "template"], desc="normalization parameter file*_sn.mat", copyfile=False, ) source_weight = File( field="subj.wtsrc", desc="name of weighting image for source", copyfile=False ) template_weight = File( field="eoptions.weight", desc="name of weighting image for template", copyfile=False, ) source_image_smoothing = traits.Float( field="eoptions.smosrc", desc="source smoothing" ) template_image_smoothing = traits.Float( field="eoptions.smoref", desc="template smoothing" ) affine_regularization_type = traits.Enum( "mni", "size", "none", field="eoptions.regtype", desc="mni, size, none" ) DCT_period_cutoff = traits.Float( field="eoptions.cutoff", desc="Cutoff of for DCT bases" ) nonlinear_iterations = traits.Int( field="eoptions.nits", desc=("Number of iterations of nonlinear warping") ) nonlinear_regularization = traits.Float( field="eoptions.reg", desc=( "the amount of the " "regularization for the " "nonlinear part of the " "normalization" ), ) write_preserve = traits.Bool( field="roptions.preserve", desc="True/False warped images are modulated" ) write_bounding_box = traits.List( traits.List(traits.Float(), minlen=3, maxlen=3), field="roptions.bb", minlen=2, maxlen=2, desc="3x2-element list of lists", ) write_voxel_sizes = traits.List( traits.Float(), field="roptions.vox", minlen=3, maxlen=3, desc="3-element list" ) write_interp = traits.Range( low=0, high=7, field="roptions.interp", desc=("degree of b-spline used for interpolation"), ) write_wrap = traits.List( traits.Int(), field="roptions.wrap", desc=("Check if interpolation should wrap in [x,y,z] - list of bools"), ) out_prefix = traits.String( "w", field="roptions.prefix", usedefault=True, desc="normalized output prefix" ) class NormalizeOutputSpec(TraitedSpec): normalization_parameters = OutputMultiPath( File(exists=True), desc=("MAT files containing the normalization parameters") ) normalized_source = OutputMultiPath( File(exists=True), desc="Normalized source files" ) normalized_files = OutputMultiPath(File(exists=True), desc="Normalized other files") class Normalize(SPMCommand): """use spm_normalise for warping an image to a template http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=203 Examples -------- >>> import nipype.interfaces.spm as spm >>> norm = spm.Normalize() >>> norm.inputs.source = 'functional.nii' >>> norm.run() # doctest: +SKIP """ input_spec = NormalizeInputSpec output_spec = NormalizeOutputSpec _jobtype = "spatial" _jobname = "normalise" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt == "template": return scans_for_fname(ensure_list(val)) if opt == "source": return scans_for_fname(ensure_list(val)) if opt == "apply_to_files": return scans_for_fnames(ensure_list(val)) if opt == "parameter_file": return np.array([simplify_list(val)], dtype=object) if opt in ["write_wrap"]: if len(val) != 3: raise ValueError("%s must have 3 elements" % opt) return super(Normalize, self)._format_arg(opt, spec, val) def _parse_inputs(self): """Validate spm normalize options if set to None ignore""" einputs = super(Normalize, self)._parse_inputs( skip=("jobtype", "apply_to_files") ) if isdefined(self.inputs.apply_to_files): inputfiles = deepcopy(self.inputs.apply_to_files) if isdefined(self.inputs.source): inputfiles.extend(self.inputs.source) einputs[0]["subj"]["resample"] = scans_for_fnames(inputfiles) jobtype = self.inputs.jobtype if jobtype in ["estwrite", "write"]: if not isdefined(self.inputs.apply_to_files): if isdefined(self.inputs.source): einputs[0]["subj"]["resample"] = scans_for_fname(self.inputs.source) return [{"%s" % (jobtype): einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() jobtype = self.inputs.jobtype if jobtype.startswith("est"): outputs["normalization_parameters"] = [] for imgf in ensure_list(self.inputs.source): outputs["normalization_parameters"].append( fname_presuffix(imgf, suffix="_sn.mat", use_ext=False) ) outputs["normalization_parameters"] = simplify_list( outputs["normalization_parameters"] ) if self.inputs.jobtype == "estimate": if isdefined(self.inputs.apply_to_files): outputs["normalized_files"] = self.inputs.apply_to_files outputs["normalized_source"] = self.inputs.source elif "write" in self.inputs.jobtype: if isdefined(self.inputs.write_preserve) and self.inputs.write_preserve: prefixNorm = "".join(["m", self.inputs.out_prefix]) else: prefixNorm = self.inputs.out_prefix outputs["normalized_files"] = [] if isdefined(self.inputs.apply_to_files): filelist = ensure_list(self.inputs.apply_to_files) for f in filelist: if isinstance(f, list): run = [fname_presuffix(in_f, prefix=prefixNorm) for in_f in f] else: run = [fname_presuffix(f, prefix=prefixNorm)] outputs["normalized_files"].extend(run) if isdefined(self.inputs.source): outputs["normalized_source"] = [] for imgf in ensure_list(self.inputs.source): outputs["normalized_source"].append( fname_presuffix(imgf, prefix=prefixNorm) ) return outputs class Normalize12InputSpec(SPMCommandInputSpec): image_to_align = ImageFileSPM( exists=True, field="subj.vol", desc=("file to estimate normalization parameters with"), xor=["deformation_file"], mandatory=True, copyfile=True, ) apply_to_files = InputMultiPath( traits.Either( ImageFileSPM(exists=True), traits.List(ImageFileSPM(exists=True)) ), field="subj.resample", desc="files to apply transformation to", copyfile=True, ) deformation_file = ImageFileSPM( field="subj.def", mandatory=True, xor=["image_to_align", "tpm"], copyfile=False, desc=( "file y_*.nii containing 3 deformation " "fields for the deformation in x, y and z " "dimension" ), ) jobtype = traits.Enum( "estwrite", "est", "write", usedefault=True, desc="Estimate, Write or do Both" ) bias_regularization = traits.Enum( 0, 0.00001, 0.0001, 0.001, 0.01, 0.1, 1, 10, field="eoptions.biasreg", desc="no(0) - extremely heavy (10)", ) bias_fwhm = traits.Enum( 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, "Inf", field="eoptions.biasfwhm", desc="FWHM of Gaussian smoothness of bias", ) tpm = File( exists=True, field="eoptions.tpm", desc=("template in form of tissue probablitiy maps to normalize to"), xor=["deformation_file"], copyfile=False, ) affine_regularization_type = traits.Enum( "mni", "size", "none", field="eoptions.affreg", desc="mni, size, none" ) warping_regularization = traits.List( traits.Float(), field="eoptions.reg", minlen=5, maxlen=5, desc=("controls balance between parameters and data"), ) smoothness = traits.Float( field="eoptions.fwhm", desc=("value (in mm) to smooth the data before normalization"), ) sampling_distance = traits.Float( field="eoptions.samp", desc=("Sampling distance on data for parameter estimation"), ) write_bounding_box = traits.List( traits.List(traits.Float(), minlen=3, maxlen=3), field="woptions.bb", minlen=2, maxlen=2, desc=( "3x2-element list of lists " "representing the bounding box " "(in mm) to be written" ), ) write_voxel_sizes = traits.List( traits.Float(), field="woptions.vox", minlen=3, maxlen=3, desc=( "3-element list representing the " "voxel sizes (in mm) of the written " "normalised images" ), ) write_interp = traits.Range( low=0, high=7, field="woptions.interp", desc=("degree of b-spline used for interpolation"), ) out_prefix = traits.String( "w", field="woptions.prefix", usedefault=True, desc="Normalized output prefix" ) class Normalize12OutputSpec(TraitedSpec): deformation_field = OutputMultiPath( File(exists=True), desc=( "NIfTI file containing 3 " "deformation fields for the " "deformation in x, y and z " "dimension" ), ) normalized_image = OutputMultiPath( File(exists=True), desc=("Normalized file that needed to be aligned") ) normalized_files = OutputMultiPath(File(exists=True), desc="Normalized other files") class Normalize12(SPMCommand): """uses SPM12's new Normalise routine for warping an image to a template. Spatial normalisation is now done via the segmentation routine (which was known as ``New Segment`` in SPM8). Note that the normalisation in SPM12 is done towards a file containing multiple tissue probability maps, which was not the case in SPM8. http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=49 Examples -------- >>> import nipype.interfaces.spm as spm >>> norm12 = spm.Normalize12() >>> norm12.inputs.image_to_align = 'structural.nii' >>> norm12.inputs.apply_to_files = 'functional.nii' >>> norm12.run() # doctest: +SKIP """ input_spec = Normalize12InputSpec output_spec = Normalize12OutputSpec _jobtype = "spatial" _jobname = "normalise" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt == "tpm": return scans_for_fname(ensure_list(val)) if opt == "image_to_align": return scans_for_fname(ensure_list(val)) if opt == "apply_to_files": return scans_for_fnames(ensure_list(val)) if opt == "deformation_file": return np.array([simplify_list(val)], dtype=object) if opt in ["nonlinear_regularization"]: if len(val) != 5: raise ValueError("%s must have 5 elements" % opt) return super(Normalize12, self)._format_arg(opt, spec, val) def _parse_inputs(self, skip=()): """validate spm normalize options if set to None ignore""" einputs = super(Normalize12, self)._parse_inputs( skip=("jobtype", "apply_to_files") ) if isdefined(self.inputs.apply_to_files): inputfiles = deepcopy(self.inputs.apply_to_files) if isdefined(self.inputs.image_to_align): inputfiles.extend([self.inputs.image_to_align]) einputs[0]["subj"]["resample"] = scans_for_fnames(inputfiles) jobtype = self.inputs.jobtype if jobtype in ["estwrite", "write"]: if not isdefined(self.inputs.apply_to_files): if isdefined(self.inputs.image_to_align): einputs[0]["subj"]["resample"] = scans_for_fname( self.inputs.image_to_align ) return [{"%s" % (jobtype): einputs[0]}] def _list_outputs(self): outputs = self._outputs().get() jobtype = self.inputs.jobtype if jobtype.startswith("est"): outputs["deformation_field"] = [] for imgf in ensure_list(self.inputs.image_to_align): outputs["deformation_field"].append(fname_presuffix(imgf, prefix="y_")) outputs["deformation_field"] = simplify_list(outputs["deformation_field"]) if self.inputs.jobtype == "estimate": if isdefined(self.inputs.apply_to_files): outputs["normalized_files"] = self.inputs.apply_to_files outputs["normalized_image"] = fname_presuffix( self.inputs.image_to_align, prefix="w" ) elif "write" in self.inputs.jobtype: outputs["normalized_files"] = [] if isdefined(self.inputs.apply_to_files): filelist = ensure_list(self.inputs.apply_to_files) for f in filelist: if isinstance(f, list): run = [fname_presuffix(in_f, prefix="w") for in_f in f] else: run = [fname_presuffix(f, prefix="w")] outputs["normalized_files"].extend(run) if isdefined(self.inputs.image_to_align): outputs["normalized_image"] = fname_presuffix( self.inputs.image_to_align, prefix="w" ) return outputs class SegmentInputSpec(SPMCommandInputSpec): data = InputMultiPath( ImageFileSPM(exists=True), field="data", desc="one scan per subject", copyfile=False, mandatory=True, ) gm_output_type = traits.List( traits.Bool(), minlen=3, maxlen=3, field="output.GM", desc="""Options to produce grey matter images: c1*.img, wc1*.img and mwc1*.img. None: [False,False,False], Native Space: [False,False,True], Unmodulated Normalised: [False,True,False], Modulated Normalised: [True,False,False], Native + Unmodulated Normalised: [False,True,True], Native + Modulated Normalised: [True,False,True], Native + Modulated + Unmodulated: [True,True,True], Modulated + Unmodulated Normalised: [True,True,False]""", ) wm_output_type = traits.List( traits.Bool(), minlen=3, maxlen=3, field="output.WM", desc=""" Options to produce white matter images: c2*.img, wc2*.img and mwc2*.img. None: [False,False,False], Native Space: [False,False,True], Unmodulated Normalised: [False,True,False], Modulated Normalised: [True,False,False], Native + Unmodulated Normalised: [False,True,True], Native + Modulated Normalised: [True,False,True], Native + Modulated + Unmodulated: [True,True,True], Modulated + Unmodulated Normalised: [True,True,False]""", ) csf_output_type = traits.List( traits.Bool(), minlen=3, maxlen=3, field="output.CSF", desc=""" Options to produce CSF images: c3*.img, wc3*.img and mwc3*.img. None: [False,False,False], Native Space: [False,False,True], Unmodulated Normalised: [False,True,False], Modulated Normalised: [True,False,False], Native + Unmodulated Normalised: [False,True,True], Native + Modulated Normalised: [True,False,True], Native + Modulated + Unmodulated: [True,True,True], Modulated + Unmodulated Normalised: [True,True,False]""", ) save_bias_corrected = traits.Bool( field="output.biascor", desc=("True/False produce a bias corrected image") ) clean_masks = traits.Enum( "no", "light", "thorough", field="output.cleanup", desc=("clean using estimated brain mask ('no','light','thorough')"), ) tissue_prob_maps = traits.List( File(exists=True), field="opts.tpm", desc=("list of gray, white & csf prob. (opt,)"), ) gaussians_per_class = traits.List( traits.Int(), field="opts.ngaus", desc=("num Gaussians capture intensity distribution"), ) affine_regularization = traits.Enum( "mni", "eastern", "subj", "none", "", field="opts.regtype", desc=( 'Possible options: "mni", ' '"eastern", "subj", "none" ' '(no reguralisation), "" ' "(no affine registration)" ), ) warping_regularization = traits.Float( field="opts.warpreg", desc=("Controls balance between parameters and data") ) warp_frequency_cutoff = traits.Float( field="opts.warpco", desc="Cutoff of DCT bases" ) bias_regularization = traits.Enum( 0, 0.00001, 0.0001, 0.001, 0.01, 0.1, 1, 10, field="opts.biasreg", desc="no(0) - extremely heavy (10)", ) bias_fwhm = traits.Enum( 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, "Inf", field="opts.biasfwhm", desc="FWHM of Gaussian smoothness of bias", ) sampling_distance = traits.Float( field="opts.samp", desc=("Sampling distance on data for parameter estimation") ) mask_image = File( exists=True, field="opts.msk", desc="Binary image to restrict parameter estimation ", ) class SegmentOutputSpec(TraitedSpec): native_gm_image = File(desc="native space grey probability map") normalized_gm_image = File(desc="normalized grey probability map") modulated_gm_image = File(desc=("modulated, normalized grey probability map")) native_wm_image = File(desc="native space white probability map") normalized_wm_image = File(desc="normalized white probability map") modulated_wm_image = File(desc=("modulated, normalized white probability map")) native_csf_image = File(desc="native space csf probability map") normalized_csf_image = File(desc="normalized csf probability map") modulated_csf_image = File(desc=("modulated, normalized csf probability map")) modulated_input_image = File( deprecated="0.10", new_name="bias_corrected_image", desc="bias-corrected version of input image", ) bias_corrected_image = File(desc="bias-corrected version of input image") transformation_mat = File(exists=True, desc="Normalization transformation") inverse_transformation_mat = File(exists=True, desc="Inverse normalization info") class Segment(SPMCommand): """use spm_segment to separate structural images into different tissue classes. http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=209 Examples -------- >>> import nipype.interfaces.spm as spm >>> seg = spm.Segment() >>> seg.inputs.data = 'structural.nii' >>> seg.run() # doctest: +SKIP """ input_spec = SegmentInputSpec output_spec = SegmentOutputSpec def __init__(self, **inputs): _local_version = SPMCommand().version if _local_version and "12." in _local_version: self._jobtype = "tools" self._jobname = "oldseg" else: self._jobtype = "spatial" self._jobname = "preproc" SPMCommand.__init__(self, **inputs) def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" clean_masks_dict = {"no": 0, "light": 1, "thorough": 2} if opt in ["data", "tissue_prob_maps"]: if isinstance(val, list): return scans_for_fnames(val) else: return scans_for_fname(val) if "output_type" in opt: return [int(v) for v in val] if opt == "mask_image": return scans_for_fname(val) if opt == "clean_masks": return clean_masks_dict[val] return super(Segment, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() f = self.inputs.data[0] for tidx, tissue in enumerate(["gm", "wm", "csf"]): outtype = "%s_output_type" % tissue if isdefined(getattr(self.inputs, outtype)): for idx, (image, prefix) in enumerate( [("modulated", "mw"), ("normalized", "w"), ("native", "")] ): if getattr(self.inputs, outtype)[idx]: outfield = "%s_%s_image" % (image, tissue) outputs[outfield] = fname_presuffix( f, prefix="%sc%d" % (prefix, tidx + 1) ) if ( isdefined(self.inputs.save_bias_corrected) and self.inputs.save_bias_corrected ): outputs["bias_corrected_image"] = fname_presuffix(f, prefix="m") t_mat = fname_presuffix(f, suffix="_seg_sn.mat", use_ext=False) outputs["transformation_mat"] = t_mat invt_mat = fname_presuffix(f, suffix="_seg_inv_sn.mat", use_ext=False) outputs["inverse_transformation_mat"] = invt_mat return outputs class NewSegmentInputSpec(SPMCommandInputSpec): channel_files = InputMultiPath( ImageFileSPM(exists=True), mandatory=True, desc="A list of files to be segmented", field="channel", copyfile=False, ) channel_info = traits.Tuple( traits.Float(), traits.Float(), traits.Tuple(traits.Bool, traits.Bool), desc="""A tuple with the following fields: - bias reguralisation (0-10) - FWHM of Gaussian smoothness of bias - which maps to save (Field, Corrected) - a tuple of two boolean values""", field="channel", ) tissues = traits.List( traits.Tuple( traits.Tuple(ImageFileSPM(exists=True), traits.Int()), traits.Int(), traits.Tuple(traits.Bool, traits.Bool), traits.Tuple(traits.Bool, traits.Bool), ), desc="""A list of tuples (one per tissue) with the following fields: - tissue probability map (4D), 1-based index to frame - number of gaussians - which maps to save [Native, DARTEL] - a tuple of two boolean values - which maps to save [Unmodulated, Modulated] - a tuple of two boolean values""", field="tissue", ) affine_regularization = traits.Enum( "mni", "eastern", "subj", "none", field="warp.affreg", desc="mni, eastern, subj, none ", ) warping_regularization = traits.Either( traits.List(traits.Float(), minlen=5, maxlen=5), traits.Float(), field="warp.reg", desc=( "Warping regularization " "parameter(s). Accepts float " "or list of floats (the " "latter is required by " "SPM12)" ), ) sampling_distance = traits.Float( field="warp.samp", desc=("Sampling distance on data for parameter estimation") ) write_deformation_fields = traits.List( traits.Bool(), minlen=2, maxlen=2, field="warp.write", desc=("Which deformation fields to write:[Inverse, Forward]"), ) class NewSegmentOutputSpec(TraitedSpec): native_class_images = traits.List( traits.List(File(exists=True)), desc="native space probability maps" ) dartel_input_images = traits.List( traits.List(File(exists=True)), desc="dartel imported class images" ) normalized_class_images = traits.List( traits.List(File(exists=True)), desc="normalized class images" ) modulated_class_images = traits.List( traits.List(File(exists=True)), desc=("modulated+normalized class images") ) transformation_mat = OutputMultiPath( File(exists=True), desc="Normalization transformation" ) bias_corrected_images = OutputMultiPath( File(exists=True), desc="bias corrected images" ) bias_field_images = OutputMultiPath(File(exists=True), desc="bias field images") forward_deformation_field = OutputMultiPath(File(exists=True)) inverse_deformation_field = OutputMultiPath(File(exists=True)) class NewSegment(SPMCommand): """Use spm_preproc8 (New Segment) to separate structural images into different tissue classes. Supports multiple modalities. NOTE: This interface currently supports single channel input only http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=43 Examples -------- >>> import nipype.interfaces.spm as spm >>> seg = spm.NewSegment() >>> seg.inputs.channel_files = 'structural.nii' >>> seg.inputs.channel_info = (0.0001, 60, (True, True)) >>> seg.run() # doctest: +SKIP For VBM pre-processing [http://www.fil.ion.ucl.ac.uk/~john/misc/VBMclass10.pdf], TPM.nii should be replaced by /path/to/spm8/toolbox/Seg/TPM.nii >>> seg = NewSegment() >>> seg.inputs.channel_files = 'structural.nii' >>> tissue1 = (('TPM.nii', 1), 2, (True,True), (False, False)) >>> tissue2 = (('TPM.nii', 2), 2, (True,True), (False, False)) >>> tissue3 = (('TPM.nii', 3), 2, (True,False), (False, False)) >>> tissue4 = (('TPM.nii', 4), 2, (False,False), (False, False)) >>> tissue5 = (('TPM.nii', 5), 2, (False,False), (False, False)) >>> seg.inputs.tissues = [tissue1, tissue2, tissue3, tissue4, tissue5] >>> seg.run() # doctest: +SKIP """ input_spec = NewSegmentInputSpec output_spec = NewSegmentOutputSpec def __init__(self, **inputs): _local_version = SPMCommand().version if _local_version and "12." in _local_version: self._jobtype = "spatial" self._jobname = "preproc" else: self._jobtype = "tools" self._jobname = "preproc8" SPMCommand.__init__(self, **inputs) def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["channel_files", "channel_info"]: # structure have to be recreated because of some weird traits error new_channel = {} new_channel["vols"] = scans_for_fnames(self.inputs.channel_files) if isdefined(self.inputs.channel_info): info = self.inputs.channel_info new_channel["biasreg"] = info[0] new_channel["biasfwhm"] = info[1] new_channel["write"] = [int(info[2][0]), int(info[2][1])] return [new_channel] elif opt == "tissues": new_tissues = [] for tissue in val: new_tissue = {} new_tissue["tpm"] = np.array( [",".join([tissue[0][0], str(tissue[0][1])])], dtype=object ) new_tissue["ngaus"] = tissue[1] new_tissue["native"] = [int(tissue[2][0]), int(tissue[2][1])] new_tissue["warped"] = [int(tissue[3][0]), int(tissue[3][1])] new_tissues.append(new_tissue) return new_tissues elif opt == "write_deformation_fields": return super(NewSegment, self)._format_arg( opt, spec, [int(val[0]), int(val[1])] ) else: return super(NewSegment, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["native_class_images"] = [] outputs["dartel_input_images"] = [] outputs["normalized_class_images"] = [] outputs["modulated_class_images"] = [] outputs["transformation_mat"] = [] outputs["bias_corrected_images"] = [] outputs["bias_field_images"] = [] outputs["inverse_deformation_field"] = [] outputs["forward_deformation_field"] = [] n_classes = 5 if isdefined(self.inputs.tissues): n_classes = len(self.inputs.tissues) for i in range(n_classes): outputs["native_class_images"].append([]) outputs["dartel_input_images"].append([]) outputs["normalized_class_images"].append([]) outputs["modulated_class_images"].append([]) for filename in self.inputs.channel_files: pth, base, ext = split_filename(filename) if isdefined(self.inputs.tissues): for i, tissue in enumerate(self.inputs.tissues): if tissue[2][0]: outputs["native_class_images"][i].append( os.path.join(pth, "c%d%s.nii" % (i + 1, base)) ) if tissue[2][1]: outputs["dartel_input_images"][i].append( os.path.join(pth, "rc%d%s.nii" % (i + 1, base)) ) if tissue[3][0]: outputs["normalized_class_images"][i].append( os.path.join(pth, "wc%d%s.nii" % (i + 1, base)) ) if tissue[3][1]: outputs["modulated_class_images"][i].append( os.path.join(pth, "mwc%d%s.nii" % (i + 1, base)) ) else: for i in range(n_classes): outputs["native_class_images"][i].append( os.path.join(pth, "c%d%s.nii" % (i + 1, base)) ) outputs["transformation_mat"].append( os.path.join(pth, "%s_seg8.mat" % base) ) if isdefined(self.inputs.write_deformation_fields): if self.inputs.write_deformation_fields[0]: outputs["inverse_deformation_field"].append( os.path.join(pth, "iy_%s.nii" % base) ) if self.inputs.write_deformation_fields[1]: outputs["forward_deformation_field"].append( os.path.join(pth, "y_%s.nii" % base) ) if isdefined(self.inputs.channel_info): if self.inputs.channel_info[2][0]: outputs["bias_field_images"].append( os.path.join(pth, "BiasField_%s.nii" % (base)) ) if self.inputs.channel_info[2][1]: outputs["bias_corrected_images"].append( os.path.join(pth, "m%s.nii" % (base)) ) return outputs class MultiChannelNewSegmentInputSpec(SPMCommandInputSpec): channels = traits.List( traits.Tuple( InputMultiPath( ImageFileSPM(exists=True), mandatory=True, desc="A list of files to be segmented", field="channel", copyfile=False, ), traits.Tuple( traits.Float(), traits.Float(), traits.Tuple(traits.Bool, traits.Bool), desc="""A tuple with the following fields: - bias reguralisation (0-10) - FWHM of Gaussian smoothness of bias - which maps to save (Field, Corrected) - a tuple of two boolean values""", field="channel", ), ), desc="""A list of tuples (one per each channel) with the following fields: - a list of channel files (only 1rst channel files will be segmented) - a tuple with the following channel-specific info fields: - bias reguralisation (0-10) - FWHM of Gaussian smoothness of bias - which maps to save (Field, Corrected) - a tuple of two boolean values""", field="channel", ) tissues = traits.List( traits.Tuple( traits.Tuple(ImageFileSPM(exists=True), traits.Int()), traits.Int(), traits.Tuple(traits.Bool, traits.Bool), traits.Tuple(traits.Bool, traits.Bool), ), desc="""A list of tuples (one per tissue) with the following fields: - tissue probability map (4D), 1-based index to frame - number of gaussians - which maps to save [Native, DARTEL] - a tuple of two boolean values - which maps to save [Unmodulated, Modulated] - a tuple of two boolean values""", field="tissue", ) affine_regularization = traits.Enum( "mni", "eastern", "subj", "none", field="warp.affreg", desc="mni, eastern, subj, none ", ) warping_regularization = traits.Either( traits.List(traits.Float(), minlen=5, maxlen=5), traits.Float(), field="warp.reg", desc=( "Warping regularization " "parameter(s). Accepts float " "or list of floats (the " "latter is required by " "SPM12)" ), ) sampling_distance = traits.Float( field="warp.samp", desc=("Sampling distance on data for parameter estimation") ) write_deformation_fields = traits.List( traits.Bool(), minlen=2, maxlen=2, field="warp.write", desc=("Which deformation fields to write:[Inverse, Forward]"), ) class MultiChannelNewSegmentOutputSpec(TraitedSpec): native_class_images = traits.List( traits.List(File(exists=True)), desc="native space probability maps" ) dartel_input_images = traits.List( traits.List(File(exists=True)), desc="dartel imported class images" ) normalized_class_images = traits.List( traits.List(File(exists=True)), desc="normalized class images" ) modulated_class_images = traits.List( traits.List(File(exists=True)), desc=("modulated+normalized class images") ) transformation_mat = OutputMultiPath( File(exists=True), desc="Normalization transformation" ) bias_corrected_images = OutputMultiPath( File(exists=True), desc="bias corrected images" ) bias_field_images = OutputMultiPath(File(exists=True), desc="bias field images") forward_deformation_field = OutputMultiPath(File(exists=True)) inverse_deformation_field = OutputMultiPath(File(exists=True)) class MultiChannelNewSegment(SPMCommand): """Use spm_preproc8 (New Segment) to separate structural images into different tissue classes. Supports multiple modalities and multichannel inputs. http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=45 Examples -------- >>> import nipype.interfaces.spm as spm >>> seg = spm.MultiChannelNewSegment() >>> seg.inputs.channels = [('structural.nii',(0.0001, 60, (True, True)))] >>> seg.run() # doctest: +SKIP For VBM pre-processing [http://www.fil.ion.ucl.ac.uk/~john/misc/VBMclass10.pdf], TPM.nii should be replaced by /path/to/spm8/toolbox/Seg/TPM.nii >>> seg = MultiChannelNewSegment() >>> channel1= ('T1.nii',(0.0001, 60, (True, True))) >>> channel2= ('T2.nii',(0.0001, 60, (True, True))) >>> seg.inputs.channels = [channel1, channel2] >>> tissue1 = (('TPM.nii', 1), 2, (True,True), (False, False)) >>> tissue2 = (('TPM.nii', 2), 2, (True,True), (False, False)) >>> tissue3 = (('TPM.nii', 3), 2, (True,False), (False, False)) >>> tissue4 = (('TPM.nii', 4), 2, (False,False), (False, False)) >>> tissue5 = (('TPM.nii', 5), 2, (False,False), (False, False)) >>> seg.inputs.tissues = [tissue1, tissue2, tissue3, tissue4, tissue5] >>> seg.run() # doctest: +SKIP """ input_spec = MultiChannelNewSegmentInputSpec output_spec = MultiChannelNewSegmentOutputSpec def __init__(self, **inputs): _local_version = SPMCommand().version if _local_version and "12." in _local_version: self._jobtype = "spatial" self._jobname = "preproc" else: self._jobtype = "tools" self._jobname = "preproc8" SPMCommand.__init__(self, **inputs) def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt == "channels": # structure have to be recreated because of some weird traits error new_channels = [] for channel in val: new_channel = {} new_channel["vols"] = scans_for_fnames(channel[0]) if isdefined(channel[1]): info = channel[1] new_channel["biasreg"] = info[0] new_channel["biasfwhm"] = info[1] new_channel["write"] = [int(info[2][0]), int(info[2][1])] new_channels.append(new_channel) return new_channels elif opt == "tissues": new_tissues = [] for tissue in val: new_tissue = {} new_tissue["tpm"] = np.array( [",".join([tissue[0][0], str(tissue[0][1])])], dtype=object ) new_tissue["ngaus"] = tissue[1] new_tissue["native"] = [int(tissue[2][0]), int(tissue[2][1])] new_tissue["warped"] = [int(tissue[3][0]), int(tissue[3][1])] new_tissues.append(new_tissue) return new_tissues elif opt == "write_deformation_fields": return super(MultiChannelNewSegment, self)._format_arg( opt, spec, [int(val[0]), int(val[1])] ) else: return super(MultiChannelNewSegment, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["native_class_images"] = [] outputs["dartel_input_images"] = [] outputs["normalized_class_images"] = [] outputs["modulated_class_images"] = [] outputs["transformation_mat"] = [] outputs["bias_corrected_images"] = [] outputs["bias_field_images"] = [] outputs["inverse_deformation_field"] = [] outputs["forward_deformation_field"] = [] n_classes = 5 if isdefined(self.inputs.tissues): n_classes = len(self.inputs.tissues) for i in range(n_classes): outputs["native_class_images"].append([]) outputs["dartel_input_images"].append([]) outputs["normalized_class_images"].append([]) outputs["modulated_class_images"].append([]) # main outputs are generated for the first channel images only for filename in self.inputs.channels[0][0]: pth, base, ext = split_filename(filename) if isdefined(self.inputs.tissues): for i, tissue in enumerate(self.inputs.tissues): if tissue[2][0]: outputs["native_class_images"][i].append( os.path.join(pth, "c%d%s.nii" % (i + 1, base)) ) if tissue[2][1]: outputs["dartel_input_images"][i].append( os.path.join(pth, "rc%d%s.nii" % (i + 1, base)) ) if tissue[3][0]: outputs["normalized_class_images"][i].append( os.path.join(pth, "wc%d%s.nii" % (i + 1, base)) ) if tissue[3][1]: outputs["modulated_class_images"][i].append( os.path.join(pth, "mwc%d%s.nii" % (i + 1, base)) ) else: for i in range(n_classes): outputs["native_class_images"][i].append( os.path.join(pth, "c%d%s.nii" % (i + 1, base)) ) outputs["transformation_mat"].append( os.path.join(pth, "%s_seg8.mat" % base) ) if isdefined(self.inputs.write_deformation_fields): if self.inputs.write_deformation_fields[0]: outputs["inverse_deformation_field"].append( os.path.join(pth, "iy_%s.nii" % base) ) if self.inputs.write_deformation_fields[1]: outputs["forward_deformation_field"].append( os.path.join(pth, "y_%s.nii" % base) ) # bias field related images are generated for images in all channels for channel in self.inputs.channels: for filename in channel[0]: pth, base, ext = split_filename(filename) if isdefined(channel[1]): if channel[1][2][0]: outputs["bias_field_images"].append( os.path.join(pth, "BiasField_%s.nii" % (base)) ) if channel[1][2][1]: outputs["bias_corrected_images"].append( os.path.join(pth, "m%s.nii" % (base)) ) return outputs class SmoothInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( ImageFileSPM(exists=True), field="data", desc="list of files to smooth", mandatory=True, copyfile=False, ) fwhm = traits.Either( traits.List(traits.Float(), minlen=3, maxlen=3), traits.Float(), field="fwhm", desc="3-list of fwhm for each dimension", ) data_type = traits.Int(field="dtype", desc="Data type of the output images") implicit_masking = traits.Bool( field="im", desc=("A mask implied by a particular voxel value") ) out_prefix = traits.String( "s", field="prefix", usedefault=True, desc="smoothed output prefix" ) class SmoothOutputSpec(TraitedSpec): smoothed_files = OutputMultiPath(File(exists=True), desc="smoothed files") class Smooth(SPMCommand): """Use spm_smooth for 3D Gaussian smoothing of image volumes. http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=55 Examples -------- >>> import nipype.interfaces.spm as spm >>> smooth = spm.Smooth() >>> smooth.inputs.in_files = 'functional.nii' >>> smooth.inputs.fwhm = [4, 4, 4] >>> smooth.run() # doctest: +SKIP """ input_spec = SmoothInputSpec output_spec = SmoothOutputSpec _jobtype = "spatial" _jobname = "smooth" def _format_arg(self, opt, spec, val): if opt in ["in_files"]: return scans_for_fnames(ensure_list(val)) if opt == "fwhm": if not isinstance(val, list): return [val, val, val] if isinstance(val, list): if len(val) == 1: return [val[0], val[0], val[0]] else: return val return super(Smooth, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["smoothed_files"] = [] for imgf in ensure_list(self.inputs.in_files): outputs["smoothed_files"].append( fname_presuffix(imgf, prefix=self.inputs.out_prefix) ) return outputs class DARTELInputSpec(SPMCommandInputSpec): image_files = traits.List( traits.List(ImageFileSPM(exists=True)), desc="A list of files to be segmented", field="warp.images", copyfile=False, mandatory=True, ) template_prefix = traits.Str( "Template", usedefault=True, field="warp.settings.template", desc="Prefix for template", ) regularization_form = traits.Enum( "Linear", "Membrane", "Bending", field="warp.settings.rform", desc=("Form of regularization energy term"), ) iteration_parameters = traits.List( traits.Tuple( traits.Range(1, 10), traits.Tuple(traits.Float, traits.Float, traits.Float), traits.Enum(1, 2, 4, 8, 16, 32, 64, 128, 256, 512), traits.Enum(0, 0.5, 1, 2, 4, 8, 16, 32), ), minlen=3, maxlen=12, field="warp.settings.param", desc="""\ List of tuples for each iteration * Inner iterations * Regularization parameters * Time points for deformation model * smoothing parameter """, ) optimization_parameters = traits.Tuple( traits.Float, traits.Range(1, 8), traits.Range(1, 8), field="warp.settings.optim", desc="""\ Optimization settings a tuple: * LM regularization * cycles of multigrid solver * relaxation iterations """, ) class DARTELOutputSpec(TraitedSpec): final_template_file = File(exists=True, desc="final DARTEL template") template_files = traits.List( File(exists=True), desc=("Templates from different stages of iteration") ) dartel_flow_fields = traits.List(File(exists=True), desc="DARTEL flow fields") class DARTEL(SPMCommand): """Use spm DARTEL to create a template and flow fields http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=185 Examples -------- >>> import nipype.interfaces.spm as spm >>> dartel = spm.DARTEL() >>> dartel.inputs.image_files = [['rc1s1.nii','rc1s2.nii'],['rc2s1.nii', 'rc2s2.nii']] >>> dartel.run() # doctest: +SKIP """ input_spec = DARTELInputSpec output_spec = DARTELOutputSpec _jobtype = "tools" _jobname = "dartel" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["image_files"]: return scans_for_fnames(val, keep4d=True, separate_sessions=True) elif opt == "regularization_form": mapper = {"Linear": 0, "Membrane": 1, "Bending": 2} return mapper[val] elif opt == "iteration_parameters": params = [] for param in val: new_param = {} new_param["its"] = param[0] new_param["rparam"] = list(param[1]) new_param["K"] = param[2] new_param["slam"] = param[3] params.append(new_param) return params elif opt == "optimization_parameters": new_param = {} new_param["lmreg"] = val[0] new_param["cyc"] = val[1] new_param["its"] = val[2] return [new_param] else: return super(DARTEL, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["template_files"] = [] for i in range(6): outputs["template_files"].append( os.path.realpath("%s_%d.nii" % (self.inputs.template_prefix, i + 1)) ) outputs["final_template_file"] = os.path.realpath( "%s_6.nii" % self.inputs.template_prefix ) outputs["dartel_flow_fields"] = [] for filename in self.inputs.image_files[0]: pth, base, ext = split_filename(filename) outputs["dartel_flow_fields"].append( os.path.realpath("u_%s_%s%s" % (base, self.inputs.template_prefix, ext)) ) return outputs class DARTELNorm2MNIInputSpec(SPMCommandInputSpec): template_file = ImageFileSPM( exists=True, copyfile=False, mandatory=True, desc="DARTEL template", field="mni_norm.template", ) flowfield_files = InputMultiPath( ImageFileSPM(exists=True), mandatory=True, desc="DARTEL flow fields u_rc1*", field="mni_norm.data.subjs.flowfields", ) apply_to_files = InputMultiPath( ImageFileSPM(exists=True), desc="Files to apply the transform to", field="mni_norm.data.subjs.images", mandatory=True, copyfile=False, ) voxel_size = traits.Tuple( traits.Float, traits.Float, traits.Float, desc="Voxel sizes for output file", field="mni_norm.vox", ) bounding_box = traits.Tuple( traits.Float, traits.Float, traits.Float, traits.Float, traits.Float, traits.Float, desc="Voxel sizes for output file", field="mni_norm.bb", ) modulate = traits.Bool( field="mni_norm.preserve", desc=("Modulate out images - no modulation preserves concentrations"), ) fwhm = traits.Either( traits.List(traits.Float(), minlen=3, maxlen=3), traits.Float(), field="mni_norm.fwhm", desc="3-list of fwhm for each dimension", ) class DARTELNorm2MNIOutputSpec(TraitedSpec): normalized_files = OutputMultiPath( File(exists=True), desc="Normalized files in MNI space" ) normalization_parameter_file = File( exists=True, desc=("Transform parameters to MNI space") ) class DARTELNorm2MNI(SPMCommand): """Use spm DARTEL to normalize data to MNI space http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=188 Examples -------- >>> import nipype.interfaces.spm as spm >>> nm = spm.DARTELNorm2MNI() >>> nm.inputs.template_file = 'Template_6.nii' >>> nm.inputs.flowfield_files = ['u_rc1s1_Template.nii', 'u_rc1s3_Template.nii'] >>> nm.inputs.apply_to_files = ['c1s1.nii', 'c1s3.nii'] >>> nm.inputs.modulate = True >>> nm.run() # doctest: +SKIP """ input_spec = DARTELNorm2MNIInputSpec output_spec = DARTELNorm2MNIOutputSpec _jobtype = "tools" _jobname = "dartel" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["template_file"]: return np.array([val], dtype=object) elif opt in ["flowfield_files"]: return scans_for_fnames(val, keep4d=True) elif opt in ["apply_to_files"]: return scans_for_fnames(val, keep4d=True, separate_sessions=True) elif opt == "voxel_size": return list(val) elif opt == "bounding_box": return list(val) elif opt == "fwhm": if isinstance(val, list): return val else: return [val, val, val] else: return super(DARTELNorm2MNI, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() pth, base, ext = split_filename(self.inputs.template_file) outputs["normalization_parameter_file"] = os.path.realpath(base + "_2mni.mat") outputs["normalized_files"] = [] prefix = "w" if isdefined(self.inputs.modulate) and self.inputs.modulate: prefix = "m" + prefix if not isdefined(self.inputs.fwhm) or self.inputs.fwhm > 0: prefix = "s" + prefix for filename in self.inputs.apply_to_files: pth, base, ext = split_filename(filename) outputs["normalized_files"].append( os.path.realpath("%s%s%s" % (prefix, base, ext)) ) return outputs class CreateWarpedInputSpec(SPMCommandInputSpec): image_files = InputMultiPath( ImageFileSPM(exists=True), mandatory=True, desc="A list of files to be warped", field="crt_warped.images", copyfile=False, ) flowfield_files = InputMultiPath( ImageFileSPM(exists=True), copyfile=False, desc="DARTEL flow fields u_rc1*", field="crt_warped.flowfields", mandatory=True, ) iterations = traits.Range( low=0, high=9, desc=("The number of iterations: log2(number of time steps)"), field="crt_warped.K", ) interp = traits.Range( low=0, high=7, field="crt_warped.interp", desc="degree of b-spline used for interpolation", ) modulate = traits.Bool(field="crt_warped.jactransf", desc="Modulate images") class CreateWarpedOutputSpec(TraitedSpec): warped_files = traits.List(File(exists=True, desc="final warped files")) class CreateWarped(SPMCommand): """Apply a flow field estimated by DARTEL to create warped images http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=190 Examples -------- >>> import nipype.interfaces.spm as spm >>> create_warped = spm.CreateWarped() >>> create_warped.inputs.image_files = ['rc1s1.nii', 'rc1s2.nii'] >>> create_warped.inputs.flowfield_files = ['u_rc1s1_Template.nii', 'u_rc1s2_Template.nii'] >>> create_warped.run() # doctest: +SKIP """ input_spec = CreateWarpedInputSpec output_spec = CreateWarpedOutputSpec _jobtype = "tools" _jobname = "dartel" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["image_files"]: return scans_for_fnames(val, keep4d=True, separate_sessions=True) if opt in ["flowfield_files"]: return scans_for_fnames(val, keep4d=True) else: return super(CreateWarped, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["warped_files"] = [] for filename in self.inputs.image_files: pth, base, ext = split_filename(filename) if isdefined(self.inputs.modulate) and self.inputs.modulate: outputs["warped_files"].append(os.path.realpath("mw%s%s" % (base, ext))) else: outputs["warped_files"].append(os.path.realpath("w%s%s" % (base, ext))) return outputs class ApplyDeformationFieldInputSpec(SPMCommandInputSpec): in_files = InputMultiPath(ImageFileSPM(exists=True), mandatory=True, field="fnames") deformation_field = File(exists=True, mandatory=True, field="comp{1}.def") reference_volume = ImageFileSPM( exists=True, mandatory=True, field="comp{2}.id.space" ) interp = traits.Range( low=0, high=7, field="interp", desc="degree of b-spline used for interpolation" ) class ApplyDeformationFieldOutputSpec(TraitedSpec): out_files = OutputMultiPath(File(exists=True)) class ApplyDeformations(SPMCommand): input_spec = ApplyDeformationFieldInputSpec output_spec = ApplyDeformationFieldOutputSpec _jobtype = "util" _jobname = "defs" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["deformation_field", "reference_volume"]: val = [val] if opt in ["deformation_field"]: return scans_for_fnames(val, keep4d=True, separate_sessions=False) if opt in ["in_files", "reference_volume"]: return scans_for_fnames(val, keep4d=False, separate_sessions=False) else: return super(ApplyDeformations, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() outputs["out_files"] = [] for filename in self.inputs.in_files: _, fname = os.path.split(filename) outputs["out_files"].append(os.path.realpath("w%s" % fname)) return outputs class VBMSegmentInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( ImageFileSPM(exists=True), desc="A list of files to be segmented", field="estwrite.data", copyfile=False, mandatory=True, ) tissues = ImageFileSPM( exists=True, field="estwrite.tpm", desc="tissue probability map" ) gaussians_per_class = traits.Tuple( (2, 2, 2, 3, 4, 2), *([traits.Int()] * 6), usedefault=True, desc="number of gaussians for each tissue class" ) bias_regularization = traits.Enum( 0.0001, (0, 0.00001, 0.0001, 0.001, 0.01, 0.1, 1, 10), field="estwrite.opts.biasreg", usedefault=True, desc="no(0) - extremely heavy (10)", ) bias_fwhm = traits.Enum( 60, (30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, "Inf"), field="estwrite.opts.biasfwhm", usedefault=True, desc="FWHM of Gaussian smoothness of bias", ) sampling_distance = traits.Float( 3, usedefault=True, field="estwrite.opts.samp", desc="Sampling distance on data for parameter estimation", ) warping_regularization = traits.Float( 4, usedefault=True, field="estwrite.opts.warpreg", desc="Controls balance between parameters and data", ) spatial_normalization = traits.Enum("high", "low", usedefault=True) dartel_template = ImageFileSPM( exists=True, field="estwrite.extopts.dartelwarp.normhigh.darteltpm" ) use_sanlm_denoising_filter = traits.Range( 0, 2, 2, usedefault=True, field="estwrite.extopts.sanlm", desc="0=No denoising, 1=denoising,2=denoising multi-threaded", ) mrf_weighting = traits.Float(0.15, usedefault=True, field="estwrite.extopts.mrf") cleanup_partitions = traits.Int( 1, usedefault=True, field="estwrite.extopts.cleanup", desc="0=None,1=light,2=thorough", ) display_results = traits.Bool(True, usedefault=True, field="estwrite.extopts.print") gm_native = traits.Bool(False, usedefault=True, field="estwrite.output.GM.native") gm_normalized = traits.Bool( False, usedefault=True, field="estwrite.output.GM.warped" ) gm_modulated_normalized = traits.Range( 0, 2, 2, usedefault=True, field="estwrite.output.GM.modulated", desc="0=none,1=affine+non-linear(SPM8 default),2=non-linear only", ) gm_dartel = traits.Range( 0, 2, 0, usedefault=True, field="estwrite.output.GM.dartel", desc="0=None,1=rigid(SPM8 default),2=affine", ) wm_native = traits.Bool(False, usedefault=True, field="estwrite.output.WM.native") wm_normalized = traits.Bool( False, usedefault=True, field="estwrite.output.WM.warped" ) wm_modulated_normalized = traits.Range( 0, 2, 2, usedefault=True, field="estwrite.output.WM.modulated", desc="0=none,1=affine+non-linear(SPM8 default),2=non-linear only", ) wm_dartel = traits.Range( 0, 2, 0, usedefault=True, field="estwrite.output.WM.dartel", desc="0=None,1=rigid(SPM8 default),2=affine", ) csf_native = traits.Bool(False, usedefault=True, field="estwrite.output.CSF.native") csf_normalized = traits.Bool( False, usedefault=True, field="estwrite.output.CSF.warped" ) csf_modulated_normalized = traits.Range( 0, 2, 2, usedefault=True, field="estwrite.output.CSF.modulated", desc="0=none,1=affine+non-linear(SPM8 default),2=non-linear only", ) csf_dartel = traits.Range( 0, 2, 0, usedefault=True, field="estwrite.output.CSF.dartel", desc="0=None,1=rigid(SPM8 default),2=affine", ) bias_corrected_native = traits.Bool( False, usedefault=True, field="estwrite.output.bias.native" ) bias_corrected_normalized = traits.Bool( True, usedefault=True, field="estwrite.output.bias.warped" ) bias_corrected_affine = traits.Bool( False, usedefault=True, field="estwrite.output.bias.affine" ) pve_label_native = traits.Bool( False, usedefault=True, field="estwrite.output.label.native" ) pve_label_normalized = traits.Bool( False, usedefault=True, field="estwrite.output.label.warped" ) pve_label_dartel = traits.Range( 0, 2, 0, usedefault=True, field="estwrite.output.label.dartel", desc="0=None,1=rigid(SPM8 default),2=affine", ) jacobian_determinant = traits.Bool( False, usedefault=True, field="estwrite.jacobian.warped" ) deformation_field = traits.Tuple( (0, 0), traits.Bool, traits.Bool, usedefault=True, field="estwrite.output.warps", desc="forward and inverse field", ) class VBMSegmentOuputSpec(TraitedSpec): native_class_images = traits.List( traits.List(File(exists=True)), desc="native space probability maps" ) dartel_input_images = traits.List( traits.List(File(exists=True)), desc="dartel imported class images" ) normalized_class_images = traits.List( traits.List(File(exists=True)), desc="normalized class images" ) modulated_class_images = traits.List( traits.List(File(exists=True)), desc=("modulated+normalized class images") ) transformation_mat = OutputMultiPath( File(exists=True), desc="Normalization transformation" ) bias_corrected_images = OutputMultiPath( File(exists=True), desc="bias corrected images" ) normalized_bias_corrected_images = OutputMultiPath( File(exists=True), desc="bias corrected images" ) pve_label_native_images = OutputMultiPath(File(exists=True)) pve_label_normalized_images = OutputMultiPath(File(exists=True)) pve_label_registered_images = OutputMultiPath(File(exists=True)) forward_deformation_field = OutputMultiPath(File(exists=True)) inverse_deformation_field = OutputMultiPath(File(exists=True)) jacobian_determinant_images = OutputMultiPath(File(exists=True)) class VBMSegment(SPMCommand): """Use VBM8 toolbox to separate structural images into different tissue classes. Example ------- >>> import nipype.interfaces.spm as spm >>> seg = spm.VBMSegment() >>> seg.inputs.tissues = 'TPM.nii' >>> seg.inputs.dartel_template = 'Template_1_IXI550_MNI152.nii' >>> seg.inputs.bias_corrected_native = True >>> seg.inputs.gm_native = True >>> seg.inputs.wm_native = True >>> seg.inputs.csf_native = True >>> seg.inputs.pve_label_native = True >>> seg.inputs.deformation_field = (True, False) >>> seg.run() # doctest: +SKIP """ input_spec = VBMSegmentInputSpec output_spec = VBMSegmentOuputSpec _jobtype = "tools" _jobname = "vbm8" def _list_outputs(self): outputs = self._outputs().get() do_dartel = self.inputs.spatial_normalization dartel_px = "" if do_dartel: dartel_px = "r" outputs["native_class_images"] = [[], [], []] outputs["dartel_input_images"] = [[], [], []] outputs["normalized_class_images"] = [[], [], []] outputs["modulated_class_images"] = [[], [], []] outputs["transformation_mat"] = [] outputs["bias_corrected_images"] = [] outputs["normalized_bias_corrected_images"] = [] outputs["inverse_deformation_field"] = [] outputs["forward_deformation_field"] = [] outputs["jacobian_determinant_images"] = [] outputs["pve_label_native_images"] = [] outputs["pve_label_normalized_images"] = [] outputs["pve_label_registered_images"] = [] for filename in self.inputs.in_files: pth, base, ext = split_filename(filename) outputs["transformation_mat"].append( os.path.join(pth, "%s_seg8.mat" % base) ) for i, tis in enumerate(["gm", "wm", "csf"]): # native space if getattr(self.inputs, "%s_native" % tis): outputs["native_class_images"][i].append( os.path.join(pth, "p%d%s.nii" % (i + 1, base)) ) if getattr(self.inputs, "%s_dartel" % tis) == 1: outputs["dartel_input_images"][i].append( os.path.join(pth, "rp%d%s.nii" % (i + 1, base)) ) elif getattr(self.inputs, "%s_dartel" % tis) == 2: outputs["dartel_input_images"][i].append( os.path.join(pth, "rp%d%s_affine.nii" % (i + 1, base)) ) # normalized space if getattr(self.inputs, "%s_normalized" % tis): outputs["normalized_class_images"][i].append( os.path.join(pth, "w%sp%d%s.nii" % (dartel_px, i + 1, base)) ) if getattr(self.inputs, "%s_modulated_normalized" % tis) == 1: outputs["modulated_class_images"][i].append( os.path.join(pth, "mw%sp%d%s.nii" % (dartel_px, i + 1, base)) ) elif getattr(self.inputs, "%s_modulated_normalized" % tis) == 2: outputs["normalized_class_images"][i].append( os.path.join(pth, "m0w%sp%d%s.nii" % (dartel_px, i + 1, base)) ) if self.inputs.pve_label_native: outputs["pve_label_native_images"].append( os.path.join(pth, "p0%s.nii" % (base)) ) if self.inputs.pve_label_normalized: outputs["pve_label_normalized_images"].append( os.path.join(pth, "w%sp0%s.nii" % (dartel_px, base)) ) if self.inputs.pve_label_dartel == 1: outputs["pve_label_registered_images"].append( os.path.join(pth, "rp0%s.nii" % (base)) ) elif self.inputs.pve_label_dartel == 2: outputs["pve_label_registered_images"].append( os.path.join(pth, "rp0%s_affine.nii" % (base)) ) if self.inputs.bias_corrected_native: outputs["bias_corrected_images"].append( os.path.join(pth, "m%s.nii" % (base)) ) if self.inputs.bias_corrected_normalized: outputs["normalized_bias_corrected_images"].append( os.path.join(pth, "wm%s%s.nii" % (dartel_px, base)) ) if self.inputs.deformation_field[0]: outputs["forward_deformation_field"].append( os.path.join(pth, "y_%s%s.nii" % (dartel_px, base)) ) if self.inputs.deformation_field[1]: outputs["inverse_deformation_field"].append( os.path.join(pth, "iy_%s%s.nii" % (dartel_px, base)) ) if self.inputs.jacobian_determinant and do_dartel: outputs["jacobian_determinant_images"].append( os.path.join(pth, "jac_wrp1%s.nii" % (base)) ) return outputs def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["in_files"]: return scans_for_fnames(val, keep4d=True) elif opt in ["spatial_normalization"]: if val == "low": return {"normlow": []} elif opt in ["dartel_template"]: return np.array([val], dtype=object) elif opt in ["deformation_field"]: return super(VBMSegment, self)._format_arg( opt, spec, [int(val[0]), int(val[1])] ) else: return super(VBMSegment, self)._format_arg(opt, spec, val) def _parse_inputs(self): if self.inputs.spatial_normalization == "low": einputs = super(VBMSegment, self)._parse_inputs( skip=("spatial_normalization", "dartel_template") ) einputs[0]["estwrite"]["extopts"]["dartelwarp"] = {"normlow": 1} return einputs else: return super(VBMSegment, self)._parse_inputs(skip=("spatial_normalization")) nipype-1.7.0/nipype/interfaces/spm/tests/000077500000000000000000000000001413403311400204055ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/spm/tests/__init__.py000066400000000000000000000000301413403311400225070ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_Analyze2nii.py000066400000000000000000000023341413403311400252350ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Analyze2nii def test_Analyze2nii_inputs(): input_map = dict( analyze_file=dict( extensions=None, mandatory=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = Analyze2nii.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Analyze2nii_outputs(): output_map = dict( matlab_cmd=dict(), mfile=dict( usedefault=True, ), nifti_file=dict( extensions=None, ), paths=dict(), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) outputs = Analyze2nii.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_ApplyDeformations.py000066400000000000000000000025331413403311400265110ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import ApplyDeformations def test_ApplyDeformations_inputs(): input_map = dict( deformation_field=dict( extensions=None, field="comp{1}.def", mandatory=True, ), in_files=dict( field="fnames", mandatory=True, ), interp=dict( field="interp", ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), reference_volume=dict( extensions=[".hdr", ".img", ".img.gz", ".nii"], field="comp{2}.id.space", mandatory=True, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = ApplyDeformations.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ApplyDeformations_outputs(): output_map = dict( out_files=dict(), ) outputs = ApplyDeformations.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_ApplyInverseDeformation.py000066400000000000000000000032331413403311400276600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ApplyInverseDeformation def test_ApplyInverseDeformation_inputs(): input_map = dict( bounding_box=dict( field="comp{1}.inv.comp{1}.sn2def.bb", ), deformation=dict( extensions=None, field="comp{1}.inv.comp{1}.sn2def.matname", xor=["deformation_field"], ), deformation_field=dict( extensions=None, field="comp{1}.inv.comp{1}.def", xor=["deformation"], ), in_files=dict( field="fnames", mandatory=True, ), interpolation=dict( field="interp", ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), target=dict( extensions=None, field="comp{1}.inv.space", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), voxel_sizes=dict( field="comp{1}.inv.comp{1}.sn2def.vox", ), ) inputs = ApplyInverseDeformation.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ApplyInverseDeformation_outputs(): output_map = dict( out_files=dict(), ) outputs = ApplyInverseDeformation.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_ApplyTransform.py000066400000000000000000000023151413403311400260300ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ApplyTransform def test_ApplyTransform_inputs(): input_map = dict( in_file=dict( copyfile=True, extensions=None, mandatory=True, ), mat=dict( extensions=None, mandatory=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), out_file=dict( extensions=None, genfile=True, ), paths=dict(), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = ApplyTransform.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ApplyTransform_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ApplyTransform.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_CalcCoregAffine.py000066400000000000000000000024531413403311400260050ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import CalcCoregAffine def test_CalcCoregAffine_inputs(): input_map = dict( invmat=dict( extensions=None, ), mat=dict( extensions=None, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), moving=dict( copyfile=False, extensions=None, mandatory=True, ), paths=dict(), target=dict( extensions=None, mandatory=True, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = CalcCoregAffine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CalcCoregAffine_outputs(): output_map = dict( invmat=dict( extensions=None, ), mat=dict( extensions=None, ), ) outputs = CalcCoregAffine.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_Coregister.py000066400000000000000000000036671413403311400251700ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Coregister def test_Coregister_inputs(): input_map = dict( apply_to_files=dict( copyfile=True, field="other", ), cost_function=dict( field="eoptions.cost_fun", ), fwhm=dict( field="eoptions.fwhm", ), jobtype=dict( usedefault=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), out_prefix=dict( field="roptions.prefix", usedefault=True, ), paths=dict(), separation=dict( field="eoptions.sep", ), source=dict( copyfile=True, field="source", mandatory=True, ), target=dict( copyfile=False, extensions=[".hdr", ".img", ".img.gz", ".nii"], field="ref", mandatory=True, ), tolerance=dict( field="eoptions.tol", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), write_interp=dict( field="roptions.interp", ), write_mask=dict( field="roptions.mask", ), write_wrap=dict( field="roptions.wrap", ), ) inputs = Coregister.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Coregister_outputs(): output_map = dict( coregistered_files=dict(), coregistered_source=dict(), ) outputs = Coregister.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_CreateWarped.py000066400000000000000000000025531413403311400254210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import CreateWarped def test_CreateWarped_inputs(): input_map = dict( flowfield_files=dict( copyfile=False, field="crt_warped.flowfields", mandatory=True, ), image_files=dict( copyfile=False, field="crt_warped.images", mandatory=True, ), interp=dict( field="crt_warped.interp", ), iterations=dict( field="crt_warped.K", ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), modulate=dict( field="crt_warped.jactransf", ), paths=dict(), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = CreateWarped.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CreateWarped_outputs(): output_map = dict( warped_files=dict(), ) outputs = CreateWarped.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_DARTEL.py000066400000000000000000000027121413403311400240230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import DARTEL def test_DARTEL_inputs(): input_map = dict( image_files=dict( copyfile=False, field="warp.images", mandatory=True, ), iteration_parameters=dict( field="warp.settings.param", ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), optimization_parameters=dict( field="warp.settings.optim", ), paths=dict(), regularization_form=dict( field="warp.settings.rform", ), template_prefix=dict( field="warp.settings.template", usedefault=True, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = DARTEL.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DARTEL_outputs(): output_map = dict( dartel_flow_fields=dict(), final_template_file=dict( extensions=None, ), template_files=dict(), ) outputs = DARTEL.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_DARTELNorm2MNI.py000066400000000000000000000033051413403311400253040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import DARTELNorm2MNI def test_DARTELNorm2MNI_inputs(): input_map = dict( apply_to_files=dict( copyfile=False, field="mni_norm.data.subjs.images", mandatory=True, ), bounding_box=dict( field="mni_norm.bb", ), flowfield_files=dict( field="mni_norm.data.subjs.flowfields", mandatory=True, ), fwhm=dict( field="mni_norm.fwhm", ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), modulate=dict( field="mni_norm.preserve", ), paths=dict(), template_file=dict( copyfile=False, extensions=[".hdr", ".img", ".img.gz", ".nii"], field="mni_norm.template", mandatory=True, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), voxel_size=dict( field="mni_norm.vox", ), ) inputs = DARTELNorm2MNI.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DARTELNorm2MNI_outputs(): output_map = dict( normalization_parameter_file=dict( extensions=None, ), normalized_files=dict(), ) outputs = DARTELNorm2MNI.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_DicomImport.py000066400000000000000000000025221413403311400252750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import DicomImport def test_DicomImport_inputs(): input_map = dict( format=dict( field="convopts.format", usedefault=True, ), icedims=dict( field="convopts.icedims", usedefault=True, ), in_files=dict( field="data", mandatory=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), output_dir=dict( field="outdir", usedefault=True, ), output_dir_struct=dict( field="root", usedefault=True, ), paths=dict(), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = DicomImport.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DicomImport_outputs(): output_map = dict( out_files=dict(), ) outputs = DicomImport.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_EstimateContrast.py000066400000000000000000000031441413403311400263410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import EstimateContrast def test_EstimateContrast_inputs(): input_map = dict( beta_images=dict( copyfile=False, mandatory=True, ), contrasts=dict( mandatory=True, ), group_contrast=dict( xor=["use_derivs"], ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), residual_image=dict( copyfile=False, extensions=None, mandatory=True, ), spm_mat_file=dict( copyfile=True, extensions=None, field="spmmat", mandatory=True, ), use_derivs=dict( xor=["group_contrast"], ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = EstimateContrast.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EstimateContrast_outputs(): output_map = dict( con_images=dict(), ess_images=dict(), spmF_images=dict(), spmT_images=dict(), spm_mat_file=dict( extensions=None, ), ) outputs = EstimateContrast.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_EstimateModel.py000066400000000000000000000034351413403311400256070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import EstimateModel def test_EstimateModel_inputs(): input_map = dict( estimation_method=dict( field="method", mandatory=True, ), flags=dict(), matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), spm_mat_file=dict( copyfile=True, extensions=None, field="spmmat", mandatory=True, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), write_residuals=dict( field="write_residuals", ), ) inputs = EstimateModel.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_EstimateModel_outputs(): output_map = dict( ARcoef=dict(), Cbetas=dict(), RPVimage=dict( extensions=[".hdr", ".img", ".img.gz", ".nii"], ), SDbetas=dict(), SDerror=dict(), beta_images=dict(), labels=dict( extensions=[".hdr", ".img", ".img.gz", ".nii"], ), mask_image=dict( extensions=[".hdr", ".img", ".img.gz", ".nii"], ), residual_image=dict( extensions=[".hdr", ".img", ".img.gz", ".nii"], ), residual_images=dict(), spm_mat_file=dict( extensions=None, ), ) outputs = EstimateModel.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_FactorialDesign.py000066400000000000000000000044351413403311400261120ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import FactorialDesign def test_FactorialDesign_inputs(): input_map = dict( covariates=dict( field="cov", ), explicit_mask_file=dict( extensions=None, field="masking.em", ), global_calc_mean=dict( field="globalc.g_mean", xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( field="globalc.g_omit", xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( field="globalc.g_user.global_uval", xor=["global_calc_mean", "global_calc_omit"], ), global_normalization=dict( field="globalm.glonorm", ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), no_grand_mean_scaling=dict( field="globalm.gmsca.gmsca_no", ), paths=dict(), spm_mat_dir=dict( field="dir", ), threshold_mask_absolute=dict( field="masking.tm.tma.athresh", xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( field="masking.tm.tm_none", xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( field="masking.tm.tmr.rthresh", xor=["threshold_mask_absolute", "threshold_mask_none"], ), use_implicit_threshold=dict( field="masking.im", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = FactorialDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FactorialDesign_outputs(): output_map = dict( spm_mat_file=dict( extensions=None, ), ) outputs = FactorialDesign.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_FieldMap.py000066400000000000000000000075751413403311400245450ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import FieldMap def test_FieldMap_inputs(): input_map = dict( anat_file=dict( copyfile=False, extensions=None, field="subj.anat", ), blip_direction=dict( field="subj.defaults.defaultsval.blipdir", mandatory=True, ), echo_times=dict( field="subj.defaults.defaultsval.et", mandatory=True, ), epi_file=dict( copyfile=False, extensions=None, field="subj.session.epi", mandatory=True, ), epifm=dict( field="subj.defaults.defaultsval.epifm", usedefault=True, ), jacobian_modulation=dict( field="subj.defaults.defaultsval.ajm", usedefault=True, ), jobtype=dict( usedefault=True, ), magnitude_file=dict( copyfile=False, extensions=None, field="subj.data.presubphasemag.magnitude", mandatory=True, ), mask_fwhm=dict( field="subj.defaults.defaultsval.mflags.fwhm", usedefault=True, ), maskbrain=dict( field="subj.defaults.defaultsval.maskbrain", usedefault=True, ), matchanat=dict( field="subj.matchanat", usedefault=True, ), matchvdm=dict( field="subj.matchvdm", usedefault=True, ), matlab_cmd=dict(), method=dict( field="subj.defaults.defaultsval.uflags.method", usedefault=True, ), mfile=dict( usedefault=True, ), ndilate=dict( field="subj.defaults.defaultsval.mflags.ndilate", usedefault=True, ), nerode=dict( field="subj.defaults.defaultsval.mflags.nerode", usedefault=True, ), pad=dict( field="subj.defaults.defaultsval.uflags.pad", usedefault=True, ), paths=dict(), phase_file=dict( copyfile=False, extensions=None, field="subj.data.presubphasemag.phase", mandatory=True, ), reg=dict( field="subj.defaults.defaultsval.mflags.reg", usedefault=True, ), sessname=dict( field="subj.sessname", usedefault=True, ), template=dict( copyfile=False, extensions=None, field="subj.defaults.defaultsval.mflags.template", ), thresh=dict( field="subj.defaults.defaultsval.mflags.thresh", usedefault=True, ), total_readout_time=dict( field="subj.defaults.defaultsval.tert", mandatory=True, ), unwarp_fwhm=dict( field="subj.defaults.defaultsval.uflags.fwhm", usedefault=True, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), writeunwarped=dict( field="subj.writeunwarped", usedefault=True, ), ws=dict( field="subj.defaults.defaultsval.uflags.ws", usedefault=True, ), ) inputs = FieldMap.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FieldMap_outputs(): output_map = dict( vdm=dict( extensions=None, ), ) outputs = FieldMap.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_Level1Design.py000066400000000000000000000037351413403311400253400ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import Level1Design def test_Level1Design_inputs(): input_map = dict( bases=dict( field="bases", mandatory=True, ), factor_info=dict( field="fact", ), flags=dict(), global_intensity_normalization=dict( field="global", ), interscan_interval=dict( field="timing.RT", mandatory=True, ), mask_image=dict( extensions=None, field="mask", ), mask_threshold=dict( usedefault=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), microtime_onset=dict( field="timing.fmri_t0", ), microtime_resolution=dict( field="timing.fmri_t", ), model_serial_correlations=dict( field="cvi", ), paths=dict(), session_info=dict( field="sess", mandatory=True, ), spm_mat_dir=dict( field="dir", ), timing_units=dict( field="timing.units", mandatory=True, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), volterra_expansion_order=dict( field="volt", ), ) inputs = Level1Design.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Level1Design_outputs(): output_map = dict( spm_mat_file=dict( extensions=None, ), ) outputs = Level1Design.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_MultiChannelNewSegment.py000066400000000000000000000032431413403311400274300ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import MultiChannelNewSegment def test_MultiChannelNewSegment_inputs(): input_map = dict( affine_regularization=dict( field="warp.affreg", ), channels=dict( field="channel", ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), sampling_distance=dict( field="warp.samp", ), tissues=dict( field="tissue", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), warping_regularization=dict( field="warp.reg", ), write_deformation_fields=dict( field="warp.write", ), ) inputs = MultiChannelNewSegment.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MultiChannelNewSegment_outputs(): output_map = dict( bias_corrected_images=dict(), bias_field_images=dict(), dartel_input_images=dict(), forward_deformation_field=dict(), inverse_deformation_field=dict(), modulated_class_images=dict(), native_class_images=dict(), normalized_class_images=dict(), transformation_mat=dict(), ) outputs = MultiChannelNewSegment.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_MultipleRegressionDesign.py000066400000000000000000000051451413403311400300410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import MultipleRegressionDesign def test_MultipleRegressionDesign_inputs(): input_map = dict( covariates=dict( field="cov", ), explicit_mask_file=dict( extensions=None, field="masking.em", ), global_calc_mean=dict( field="globalc.g_mean", xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( field="globalc.g_omit", xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( field="globalc.g_user.global_uval", xor=["global_calc_mean", "global_calc_omit"], ), global_normalization=dict( field="globalm.glonorm", ), in_files=dict( field="des.mreg.scans", mandatory=True, ), include_intercept=dict( field="des.mreg.incint", usedefault=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), no_grand_mean_scaling=dict( field="globalm.gmsca.gmsca_no", ), paths=dict(), spm_mat_dir=dict( field="dir", ), threshold_mask_absolute=dict( field="masking.tm.tma.athresh", xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( field="masking.tm.tm_none", xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( field="masking.tm.tmr.rthresh", xor=["threshold_mask_absolute", "threshold_mask_none"], ), use_implicit_threshold=dict( field="masking.im", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), user_covariates=dict( field="des.mreg.mcov", ), ) inputs = MultipleRegressionDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MultipleRegressionDesign_outputs(): output_map = dict( spm_mat_file=dict( extensions=None, ), ) outputs = MultipleRegressionDesign.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_NewSegment.py000066400000000000000000000033471413403311400251310ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import NewSegment def test_NewSegment_inputs(): input_map = dict( affine_regularization=dict( field="warp.affreg", ), channel_files=dict( copyfile=False, field="channel", mandatory=True, ), channel_info=dict( field="channel", ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), sampling_distance=dict( field="warp.samp", ), tissues=dict( field="tissue", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), warping_regularization=dict( field="warp.reg", ), write_deformation_fields=dict( field="warp.write", ), ) inputs = NewSegment.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_NewSegment_outputs(): output_map = dict( bias_corrected_images=dict(), bias_field_images=dict(), dartel_input_images=dict(), forward_deformation_field=dict(), inverse_deformation_field=dict(), modulated_class_images=dict(), native_class_images=dict(), normalized_class_images=dict(), transformation_mat=dict(), ) outputs = NewSegment.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_Normalize.py000066400000000000000000000055561413403311400250210ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Normalize def test_Normalize_inputs(): input_map = dict( DCT_period_cutoff=dict( field="eoptions.cutoff", ), affine_regularization_type=dict( field="eoptions.regtype", ), apply_to_files=dict( copyfile=True, field="subj.resample", ), jobtype=dict( usedefault=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), nonlinear_iterations=dict( field="eoptions.nits", ), nonlinear_regularization=dict( field="eoptions.reg", ), out_prefix=dict( field="roptions.prefix", usedefault=True, ), parameter_file=dict( copyfile=False, extensions=None, field="subj.matname", mandatory=True, xor=["source", "template"], ), paths=dict(), source=dict( copyfile=True, field="subj.source", mandatory=True, xor=["parameter_file"], ), source_image_smoothing=dict( field="eoptions.smosrc", ), source_weight=dict( copyfile=False, extensions=None, field="subj.wtsrc", ), template=dict( copyfile=False, extensions=None, field="eoptions.template", mandatory=True, xor=["parameter_file"], ), template_image_smoothing=dict( field="eoptions.smoref", ), template_weight=dict( copyfile=False, extensions=None, field="eoptions.weight", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), write_bounding_box=dict( field="roptions.bb", ), write_interp=dict( field="roptions.interp", ), write_preserve=dict( field="roptions.preserve", ), write_voxel_sizes=dict( field="roptions.vox", ), write_wrap=dict( field="roptions.wrap", ), ) inputs = Normalize.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Normalize_outputs(): output_map = dict( normalization_parameters=dict(), normalized_files=dict(), normalized_source=dict(), ) outputs = Normalize.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_Normalize12.py000066400000000000000000000047641413403311400251640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Normalize12 def test_Normalize12_inputs(): input_map = dict( affine_regularization_type=dict( field="eoptions.affreg", ), apply_to_files=dict( copyfile=True, field="subj.resample", ), bias_fwhm=dict( field="eoptions.biasfwhm", ), bias_regularization=dict( field="eoptions.biasreg", ), deformation_file=dict( copyfile=False, extensions=[".hdr", ".img", ".img.gz", ".nii"], field="subj.def", mandatory=True, xor=["image_to_align", "tpm"], ), image_to_align=dict( copyfile=True, extensions=[".hdr", ".img", ".img.gz", ".nii"], field="subj.vol", mandatory=True, xor=["deformation_file"], ), jobtype=dict( usedefault=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), out_prefix=dict( field="woptions.prefix", usedefault=True, ), paths=dict(), sampling_distance=dict( field="eoptions.samp", ), smoothness=dict( field="eoptions.fwhm", ), tpm=dict( copyfile=False, extensions=None, field="eoptions.tpm", xor=["deformation_file"], ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), warping_regularization=dict( field="eoptions.reg", ), write_bounding_box=dict( field="woptions.bb", ), write_interp=dict( field="woptions.interp", ), write_voxel_sizes=dict( field="woptions.vox", ), ) inputs = Normalize12.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Normalize12_outputs(): output_map = dict( deformation_field=dict(), normalized_files=dict(), normalized_image=dict(), ) outputs = Normalize12.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_OneSampleTTestDesign.py000066400000000000000000000046261413403311400270570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import OneSampleTTestDesign def test_OneSampleTTestDesign_inputs(): input_map = dict( covariates=dict( field="cov", ), explicit_mask_file=dict( extensions=None, field="masking.em", ), global_calc_mean=dict( field="globalc.g_mean", xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( field="globalc.g_omit", xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( field="globalc.g_user.global_uval", xor=["global_calc_mean", "global_calc_omit"], ), global_normalization=dict( field="globalm.glonorm", ), in_files=dict( field="des.t1.scans", mandatory=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), no_grand_mean_scaling=dict( field="globalm.gmsca.gmsca_no", ), paths=dict(), spm_mat_dir=dict( field="dir", ), threshold_mask_absolute=dict( field="masking.tm.tma.athresh", xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( field="masking.tm.tm_none", xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( field="masking.tm.tmr.rthresh", xor=["threshold_mask_absolute", "threshold_mask_none"], ), use_implicit_threshold=dict( field="masking.im", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = OneSampleTTestDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_OneSampleTTestDesign_outputs(): output_map = dict( spm_mat_file=dict( extensions=None, ), ) outputs = OneSampleTTestDesign.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_PairedTTestDesign.py000066400000000000000000000050331413403311400263710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import PairedTTestDesign def test_PairedTTestDesign_inputs(): input_map = dict( ancova=dict( field="des.pt.ancova", ), covariates=dict( field="cov", ), explicit_mask_file=dict( extensions=None, field="masking.em", ), global_calc_mean=dict( field="globalc.g_mean", xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( field="globalc.g_omit", xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( field="globalc.g_user.global_uval", xor=["global_calc_mean", "global_calc_omit"], ), global_normalization=dict( field="globalm.glonorm", ), grand_mean_scaling=dict( field="des.pt.gmsca", ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), no_grand_mean_scaling=dict( field="globalm.gmsca.gmsca_no", ), paired_files=dict( field="des.pt.pair", mandatory=True, ), paths=dict(), spm_mat_dir=dict( field="dir", ), threshold_mask_absolute=dict( field="masking.tm.tma.athresh", xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( field="masking.tm.tm_none", xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( field="masking.tm.tmr.rthresh", xor=["threshold_mask_absolute", "threshold_mask_none"], ), use_implicit_threshold=dict( field="masking.im", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = PairedTTestDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_PairedTTestDesign_outputs(): output_map = dict( spm_mat_file=dict( extensions=None, ), ) outputs = PairedTTestDesign.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_Realign.py000066400000000000000000000041761413403311400244370ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Realign def test_Realign_inputs(): input_map = dict( fwhm=dict( field="eoptions.fwhm", ), in_files=dict( copyfile=True, field="data", mandatory=True, ), interp=dict( field="eoptions.interp", ), jobtype=dict( usedefault=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), out_prefix=dict( field="roptions.prefix", usedefault=True, ), paths=dict(), quality=dict( field="eoptions.quality", ), register_to_mean=dict( field="eoptions.rtm", ), separation=dict( field="eoptions.sep", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), weight_img=dict( extensions=None, field="eoptions.weight", ), wrap=dict( field="eoptions.wrap", ), write_interp=dict( field="roptions.interp", ), write_mask=dict( field="roptions.mask", ), write_which=dict( field="roptions.which", maxlen=2, minlen=2, usedefault=True, ), write_wrap=dict( field="roptions.wrap", ), ) inputs = Realign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Realign_outputs(): output_map = dict( mean_image=dict( extensions=None, ), modified_in_files=dict(), realigned_files=dict(), realignment_parameters=dict(), ) outputs = Realign.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_RealignUnwarp.py000066400000000000000000000063571413403311400256370ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import RealignUnwarp def test_RealignUnwarp_inputs(): input_map = dict( est_basis_func=dict( field="uweoptions.basfcn", ), est_first_order_effects=dict( field="uweoptions.fot", ), est_jacobian_deformations=dict( field="uweoptions.jm", ), est_num_of_iterations=dict( field="uweoptions.noi", maxlen=1, minlen=1, usedefault=True, ), est_re_est_mov_par=dict( field="uweoptions.rem", ), est_reg_factor=dict( field="uweoptions.lambda", maxlen=1, minlen=1, usedefault=True, ), est_reg_order=dict( field="uweoptions.regorder", ), est_second_order_effects=dict( field="uweoptions.sot", ), est_taylor_expansion_point=dict( field="uweoptions.expround", usedefault=True, ), est_unwarp_fwhm=dict( field="uweoptions.uwfwhm", ), fwhm=dict( field="eoptions.fwhm", ), in_files=dict( copyfile=True, field="data.scans", mandatory=True, ), interp=dict( field="eoptions.einterp", ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), out_prefix=dict( field="uwroptions.prefix", usedefault=True, ), paths=dict(), phase_map=dict( copyfile=False, extensions=None, field="data.pmscan", ), quality=dict( field="eoptions.quality", ), register_to_mean=dict( field="eoptions.rtm", ), reslice_interp=dict( field="uwroptions.rinterp", ), reslice_mask=dict( field="uwroptions.mask", ), reslice_which=dict( field="uwroptions.uwwhich", maxlen=2, minlen=2, usedefault=True, ), reslice_wrap=dict( field="uwroptions.wrap", ), separation=dict( field="eoptions.sep", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), weight_img=dict( extensions=None, field="eoptions.weight", ), wrap=dict( field="eoptions.ewrap", ), ) inputs = RealignUnwarp.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_RealignUnwarp_outputs(): output_map = dict( mean_image=dict( extensions=None, ), modified_in_files=dict(), realigned_unwarped_files=dict(), realignment_parameters=dict(), ) outputs = RealignUnwarp.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_Reslice.py000066400000000000000000000022751413403311400244420ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import Reslice def test_Reslice_inputs(): input_map = dict( in_file=dict( extensions=None, mandatory=True, ), interp=dict( usedefault=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), out_file=dict( extensions=None, ), paths=dict(), space_defining=dict( extensions=None, mandatory=True, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = Reslice.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Reslice_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Reslice.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_ResliceToReference.py000066400000000000000000000024641413403311400265640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..utils import ResliceToReference def test_ResliceToReference_inputs(): input_map = dict( bounding_box=dict( field="comp{2}.idbbvox.bb", ), in_files=dict( field="fnames", mandatory=True, ), interpolation=dict( field="interp", ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), target=dict( extensions=None, field="comp{1}.id.space", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), voxel_sizes=dict( field="comp{2}.idbbvox.vox", ), ) inputs = ResliceToReference.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ResliceToReference_outputs(): output_map = dict( out_files=dict(), ) outputs = ResliceToReference.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_SPMCommand.py000066400000000000000000000011021413403311400247760ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import SPMCommand def test_SPMCommand_inputs(): input_map = dict( matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = SPMCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_Segment.py000066400000000000000000000057761413403311400244670ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Segment def test_Segment_inputs(): input_map = dict( affine_regularization=dict( field="opts.regtype", ), bias_fwhm=dict( field="opts.biasfwhm", ), bias_regularization=dict( field="opts.biasreg", ), clean_masks=dict( field="output.cleanup", ), csf_output_type=dict( field="output.CSF", ), data=dict( copyfile=False, field="data", mandatory=True, ), gaussians_per_class=dict( field="opts.ngaus", ), gm_output_type=dict( field="output.GM", ), mask_image=dict( extensions=None, field="opts.msk", ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), sampling_distance=dict( field="opts.samp", ), save_bias_corrected=dict( field="output.biascor", ), tissue_prob_maps=dict( field="opts.tpm", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), warp_frequency_cutoff=dict( field="opts.warpco", ), warping_regularization=dict( field="opts.warpreg", ), wm_output_type=dict( field="output.WM", ), ) inputs = Segment.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Segment_outputs(): output_map = dict( bias_corrected_image=dict( extensions=None, ), inverse_transformation_mat=dict( extensions=None, ), modulated_csf_image=dict( extensions=None, ), modulated_gm_image=dict( extensions=None, ), modulated_input_image=dict( deprecated="0.10", extensions=None, new_name="bias_corrected_image", ), modulated_wm_image=dict( extensions=None, ), native_csf_image=dict( extensions=None, ), native_gm_image=dict( extensions=None, ), native_wm_image=dict( extensions=None, ), normalized_csf_image=dict( extensions=None, ), normalized_gm_image=dict( extensions=None, ), normalized_wm_image=dict( extensions=None, ), transformation_mat=dict( extensions=None, ), ) outputs = Segment.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_SliceTiming.py000066400000000000000000000030441413403311400252560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import SliceTiming def test_SliceTiming_inputs(): input_map = dict( in_files=dict( copyfile=False, field="scans", mandatory=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), num_slices=dict( field="nslices", mandatory=True, ), out_prefix=dict( field="prefix", usedefault=True, ), paths=dict(), ref_slice=dict( field="refslice", mandatory=True, ), slice_order=dict( field="so", mandatory=True, ), time_acquisition=dict( field="ta", mandatory=True, ), time_repetition=dict( field="tr", mandatory=True, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = SliceTiming.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SliceTiming_outputs(): output_map = dict( timecorrected_files=dict(), ) outputs = SliceTiming.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_Smooth.py000066400000000000000000000023571413403311400243260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import Smooth def test_Smooth_inputs(): input_map = dict( data_type=dict( field="dtype", ), fwhm=dict( field="fwhm", ), implicit_masking=dict( field="im", ), in_files=dict( copyfile=False, field="data", mandatory=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), out_prefix=dict( field="prefix", usedefault=True, ), paths=dict(), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = Smooth.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Smooth_outputs(): output_map = dict( smoothed_files=dict(), ) outputs = Smooth.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_Threshold.py000066400000000000000000000036241413403311400250070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import Threshold def test_Threshold_inputs(): input_map = dict( contrast_index=dict( mandatory=True, ), extent_fdr_p_threshold=dict( usedefault=True, ), extent_threshold=dict( usedefault=True, ), force_activation=dict( usedefault=True, ), height_threshold=dict( usedefault=True, ), height_threshold_type=dict( usedefault=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), spm_mat_file=dict( copyfile=True, extensions=None, mandatory=True, ), stat_image=dict( copyfile=False, extensions=None, mandatory=True, ), use_fwe_correction=dict( usedefault=True, ), use_mcr=dict(), use_topo_fdr=dict( usedefault=True, ), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = Threshold.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Threshold_outputs(): output_map = dict( activation_forced=dict(), cluster_forming_thr=dict(), n_clusters=dict(), pre_topo_fdr_map=dict( extensions=None, ), pre_topo_n_clusters=dict(), thresholded_map=dict( extensions=None, ), ) outputs = Threshold.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_ThresholdStatistics.py000066400000000000000000000030141413403311400270530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import ThresholdStatistics def test_ThresholdStatistics_inputs(): input_map = dict( contrast_index=dict( mandatory=True, ), extent_threshold=dict( usedefault=True, ), height_threshold=dict( mandatory=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), paths=dict(), spm_mat_file=dict( copyfile=True, extensions=None, mandatory=True, ), stat_image=dict( copyfile=False, extensions=None, mandatory=True, ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = ThresholdStatistics.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ThresholdStatistics_outputs(): output_map = dict( clusterwise_P_FDR=dict(), clusterwise_P_RF=dict(), voxelwise_P_Bonf=dict(), voxelwise_P_FDR=dict(), voxelwise_P_RF=dict(), voxelwise_P_uncor=dict(), ) outputs = ThresholdStatistics.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_TwoSampleTTestDesign.py000066400000000000000000000052231413403311400271010ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..model import TwoSampleTTestDesign def test_TwoSampleTTestDesign_inputs(): input_map = dict( covariates=dict( field="cov", ), dependent=dict( field="des.t2.dept", ), explicit_mask_file=dict( extensions=None, field="masking.em", ), global_calc_mean=dict( field="globalc.g_mean", xor=["global_calc_omit", "global_calc_values"], ), global_calc_omit=dict( field="globalc.g_omit", xor=["global_calc_mean", "global_calc_values"], ), global_calc_values=dict( field="globalc.g_user.global_uval", xor=["global_calc_mean", "global_calc_omit"], ), global_normalization=dict( field="globalm.glonorm", ), group1_files=dict( field="des.t2.scans1", mandatory=True, ), group2_files=dict( field="des.t2.scans2", mandatory=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), no_grand_mean_scaling=dict( field="globalm.gmsca.gmsca_no", ), paths=dict(), spm_mat_dir=dict( field="dir", ), threshold_mask_absolute=dict( field="masking.tm.tma.athresh", xor=["threshold_mask_none", "threshold_mask_relative"], ), threshold_mask_none=dict( field="masking.tm.tm_none", xor=["threshold_mask_absolute", "threshold_mask_relative"], ), threshold_mask_relative=dict( field="masking.tm.tmr.rthresh", xor=["threshold_mask_absolute", "threshold_mask_none"], ), unequal_variance=dict( field="des.t2.variance", ), use_implicit_threshold=dict( field="masking.im", ), use_mcr=dict(), use_v8struct=dict( min_ver="8", usedefault=True, ), ) inputs = TwoSampleTTestDesign.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_TwoSampleTTestDesign_outputs(): output_map = dict( spm_mat_file=dict( extensions=None, ), ) outputs = TwoSampleTTestDesign.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_auto_VBMSegment.py000066400000000000000000000122251413403311400250170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..preprocess import VBMSegment def test_VBMSegment_inputs(): input_map = dict( bias_corrected_affine=dict( field="estwrite.output.bias.affine", usedefault=True, ), bias_corrected_native=dict( field="estwrite.output.bias.native", usedefault=True, ), bias_corrected_normalized=dict( field="estwrite.output.bias.warped", usedefault=True, ), bias_fwhm=dict( field="estwrite.opts.biasfwhm", usedefault=True, ), bias_regularization=dict( field="estwrite.opts.biasreg", usedefault=True, ), cleanup_partitions=dict( field="estwrite.extopts.cleanup", usedefault=True, ), csf_dartel=dict( field="estwrite.output.CSF.dartel", usedefault=True, ), csf_modulated_normalized=dict( field="estwrite.output.CSF.modulated", usedefault=True, ), csf_native=dict( field="estwrite.output.CSF.native", usedefault=True, ), csf_normalized=dict( field="estwrite.output.CSF.warped", usedefault=True, ), dartel_template=dict( extensions=[".hdr", ".img", ".img.gz", ".nii"], field="estwrite.extopts.dartelwarp.normhigh.darteltpm", ), deformation_field=dict( field="estwrite.output.warps", usedefault=True, ), display_results=dict( field="estwrite.extopts.print", usedefault=True, ), gaussians_per_class=dict( usedefault=True, ), gm_dartel=dict( field="estwrite.output.GM.dartel", usedefault=True, ), gm_modulated_normalized=dict( field="estwrite.output.GM.modulated", usedefault=True, ), gm_native=dict( field="estwrite.output.GM.native", usedefault=True, ), gm_normalized=dict( field="estwrite.output.GM.warped", usedefault=True, ), in_files=dict( copyfile=False, field="estwrite.data", mandatory=True, ), jacobian_determinant=dict( field="estwrite.jacobian.warped", usedefault=True, ), matlab_cmd=dict(), mfile=dict( usedefault=True, ), mrf_weighting=dict( field="estwrite.extopts.mrf", usedefault=True, ), paths=dict(), pve_label_dartel=dict( field="estwrite.output.label.dartel", usedefault=True, ), pve_label_native=dict( field="estwrite.output.label.native", usedefault=True, ), pve_label_normalized=dict( field="estwrite.output.label.warped", usedefault=True, ), sampling_distance=dict( field="estwrite.opts.samp", usedefault=True, ), spatial_normalization=dict( usedefault=True, ), tissues=dict( extensions=[".hdr", ".img", ".img.gz", ".nii"], field="estwrite.tpm", ), use_mcr=dict(), use_sanlm_denoising_filter=dict( field="estwrite.extopts.sanlm", usedefault=True, ), use_v8struct=dict( min_ver="8", usedefault=True, ), warping_regularization=dict( field="estwrite.opts.warpreg", usedefault=True, ), wm_dartel=dict( field="estwrite.output.WM.dartel", usedefault=True, ), wm_modulated_normalized=dict( field="estwrite.output.WM.modulated", usedefault=True, ), wm_native=dict( field="estwrite.output.WM.native", usedefault=True, ), wm_normalized=dict( field="estwrite.output.WM.warped", usedefault=True, ), ) inputs = VBMSegment.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_VBMSegment_outputs(): output_map = dict( bias_corrected_images=dict(), dartel_input_images=dict(), forward_deformation_field=dict(), inverse_deformation_field=dict(), jacobian_determinant_images=dict(), modulated_class_images=dict(), native_class_images=dict(), normalized_bias_corrected_images=dict(), normalized_class_images=dict(), pve_label_native_images=dict(), pve_label_normalized_images=dict(), pve_label_registered_images=dict(), transformation_mat=dict(), ) outputs = VBMSegment.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/spm/tests/test_base.py000066400000000000000000000122401413403311400227270ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import numpy as np import pytest from nipype.testing.fixtures import create_files_in_directory import nipype.interfaces.spm.base as spm from nipype.interfaces.spm import no_spm import nipype.interfaces.matlab as mlab from nipype.interfaces.spm.base import SPMCommandInputSpec from nipype.interfaces.base import traits mlab.MatlabCommand.set_default_matlab_cmd(os.getenv("MATLABCMD", "matlab")) def test_scan_for_fnames(create_files_in_directory): filelist, outdir = create_files_in_directory names = spm.scans_for_fnames(filelist, keep4d=True) assert names[0] == filelist[0] assert names[1] == filelist[1] save_time = False if not save_time: @pytest.mark.skipif(no_spm(), reason="spm is not installed") def test_spm_path(): spm_path = spm.Info.path() if spm_path is not None: assert isinstance(spm_path, (str, bytes)) assert "spm" in spm_path.lower() def test_use_mfile(): class TestClass(spm.SPMCommand): input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class assert dc.inputs.mfile def test_find_mlab_cmd_defaults(): saved_env = dict(os.environ) class TestClass(spm.SPMCommand): pass # test without FORCE_SPMMCR, SPMMCRCMD set for varname in ["FORCE_SPMMCR", "SPMMCRCMD"]: try: del os.environ[varname] except KeyError: pass dc = TestClass() assert dc._use_mcr is None assert dc._matlab_cmd is None # test with only FORCE_SPMMCR set os.environ["FORCE_SPMMCR"] = "1" dc = TestClass() assert dc._use_mcr assert dc._matlab_cmd is None # test with both, FORCE_SPMMCR and SPMMCRCMD set os.environ["SPMMCRCMD"] = "spmcmd" dc = TestClass() assert dc._use_mcr assert dc._matlab_cmd == "spmcmd" # restore environment os.environ.clear() os.environ.update(saved_env) @pytest.mark.skipif(no_spm(), reason="spm is not installed") def test_cmd_update(): class TestClass(spm.SPMCommand): input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class dc.inputs.matlab_cmd = "foo" assert dc.mlab._cmd == "foo" def test_cmd_update2(): class TestClass(spm.SPMCommand): _jobtype = "jobtype" _jobname = "jobname" input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class assert dc.jobtype == "jobtype" assert dc.jobname == "jobname" def test_reformat_dict_for_savemat(): class TestClass(spm.SPMCommand): input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class out = dc._reformat_dict_for_savemat({"a": {"b": {"c": []}}}) assert out == [{"a": [{"b": [{"c": []}]}]}] def test_generate_job(create_files_in_directory): class TestClass(spm.SPMCommand): input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class out = dc._generate_job() assert out == "" # struct array contents = {"contents": [1, 2, 3, 4]} out = dc._generate_job(contents=contents) assert out == ( ".contents(1) = 1;\n.contents(2) = 2;" "\n.contents(3) = 3;\n.contents(4) = 4;\n" ) # cell array of strings filelist, outdir = create_files_in_directory names = spm.scans_for_fnames(filelist, keep4d=True) contents = {"files": names} out = dc._generate_job(prefix="test", contents=contents) assert out == "test.files = {...\n'a.nii';...\n'b.nii';...\n};\n" # string assignment contents = "foo" out = dc._generate_job(prefix="test", contents=contents) assert out == "test = 'foo';\n" # cell array of vectors contents = {"onsets": np.array((1,), dtype=object)} contents["onsets"][0] = [1, 2, 3, 4] out = dc._generate_job(prefix="test", contents=contents) assert out == "test.onsets = {...\n[1, 2, 3, 4];...\n};\n" def test_bool(): class TestClassInputSpec(SPMCommandInputSpec): test_in = include_intercept = traits.Bool(field="testfield") class TestClass(spm.SPMCommand): input_spec = TestClassInputSpec _jobtype = "jobtype" _jobname = "jobname" dc = TestClass() # dc = derived_class dc.inputs.test_in = True out = dc._make_matlab_command(dc._parse_inputs()) assert out.find("jobs{1}.spm.jobtype.jobname.testfield = 1;") > 0, 1 dc.inputs.use_v8struct = False out = dc._make_matlab_command(dc._parse_inputs()) assert out.find("jobs{1}.jobtype{1}.jobname{1}.testfield = 1;") > 0, 1 def test_make_matlab_command(create_files_in_directory): class TestClass(spm.SPMCommand): _jobtype = "jobtype" _jobname = "jobname" input_spec = spm.SPMCommandInputSpec dc = TestClass() # dc = derived_class filelist, outdir = create_files_in_directory contents = {"contents": [1, 2, 3, 4]} script = dc._make_matlab_command([contents]) assert "jobs{1}.spm.jobtype.jobname.contents(3) = 3;" in script dc.inputs.use_v8struct = False script = dc._make_matlab_command([contents]) assert "jobs{1}.jobtype{1}.jobname{1}.contents(3) = 3;" in script nipype-1.7.0/nipype/interfaces/spm/tests/test_model.py000066400000000000000000000023741413403311400231240ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import nipype.interfaces.spm.model as spm import nipype.interfaces.matlab as mlab mlab.MatlabCommand.set_default_matlab_cmd(os.getenv("MATLABCMD", "matlab")) def test_level1design(): assert spm.Level1Design._jobtype == "stats" assert spm.Level1Design._jobname == "fmri_spec" def test_estimatemodel(): assert spm.EstimateModel._jobtype == "stats" assert spm.EstimateModel._jobname == "fmri_est" def test_estimatecontrast(): assert spm.EstimateContrast._jobtype == "stats" assert spm.EstimateContrast._jobname == "con" def test_threshold(): assert spm.Threshold._jobtype == "basetype" assert spm.Threshold._jobname == "basename" def test_factorialdesign(): assert spm.FactorialDesign._jobtype == "stats" assert spm.FactorialDesign._jobname == "factorial_design" def test_onesamplettestdesign(): assert spm.OneSampleTTestDesign._jobtype == "stats" assert spm.OneSampleTTestDesign._jobname == "factorial_design" def test_twosamplettestdesign(): assert spm.TwoSampleTTestDesign._jobtype == "stats" assert spm.TwoSampleTTestDesign._jobname == "factorial_design" nipype-1.7.0/nipype/interfaces/spm/tests/test_preprocess.py000066400000000000000000000076531413403311400242160ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import pytest from nipype.testing.fixtures import create_files_in_directory import nipype.interfaces.spm as spm from nipype.interfaces.spm import no_spm import nipype.interfaces.matlab as mlab mlab.MatlabCommand.set_default_matlab_cmd(os.getenv("MATLABCMD", "matlab")) def test_slicetiming(): assert spm.SliceTiming._jobtype == "temporal" assert spm.SliceTiming._jobname == "st" def test_slicetiming_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory st = spm.SliceTiming(in_files=filelist[0]) assert st._list_outputs()["timecorrected_files"][0][0] == "a" def test_realign(): assert spm.Realign._jobtype == "spatial" assert spm.Realign._jobname == "realign" assert spm.Realign().inputs.jobtype == "estwrite" def test_realign_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory rlgn = spm.Realign(in_files=filelist[0]) assert rlgn._list_outputs()["realignment_parameters"][0].startswith("rp_") assert rlgn._list_outputs()["realigned_files"][0].startswith("r") assert rlgn._list_outputs()["mean_image"].startswith("mean") def test_coregister(): assert spm.Coregister._jobtype == "spatial" assert spm.Coregister._jobname == "coreg" assert spm.Coregister().inputs.jobtype == "estwrite" def test_coregister_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory coreg = spm.Coregister(source=filelist[0]) assert coreg._list_outputs()["coregistered_source"][0].startswith("r") coreg = spm.Coregister(source=filelist[0], apply_to_files=filelist[1]) assert coreg._list_outputs()["coregistered_files"][0].startswith("r") def test_normalize(): assert spm.Normalize._jobtype == "spatial" assert spm.Normalize._jobname == "normalise" assert spm.Normalize().inputs.jobtype == "estwrite" def test_normalize_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory norm = spm.Normalize(source=filelist[0]) assert norm._list_outputs()["normalized_source"][0].startswith("w") norm = spm.Normalize(source=filelist[0], apply_to_files=filelist[1]) assert norm._list_outputs()["normalized_files"][0].startswith("w") def test_normalize12(): assert spm.Normalize12._jobtype == "spatial" assert spm.Normalize12._jobname == "normalise" assert spm.Normalize12().inputs.jobtype == "estwrite" def test_normalize12_list_outputs(create_files_in_directory): filelist, outdir = create_files_in_directory norm12 = spm.Normalize12(image_to_align=filelist[0]) assert norm12._list_outputs()["normalized_image"][0].startswith("w") norm12 = spm.Normalize12(image_to_align=filelist[0], apply_to_files=filelist[1]) assert norm12._list_outputs()["normalized_files"][0].startswith("w") @pytest.mark.skipif(no_spm(), reason="spm is not installed") def test_segment(): if spm.Info.name() == "SPM12": assert spm.Segment()._jobtype == "tools" assert spm.Segment()._jobname == "oldseg" else: assert spm.Segment()._jobtype == "spatial" assert spm.Segment()._jobname == "preproc" @pytest.mark.skipif(no_spm(), reason="spm is not installed") def test_newsegment(): if spm.Info.name() == "SPM12": assert spm.NewSegment()._jobtype == "spatial" assert spm.NewSegment()._jobname == "preproc" else: assert spm.NewSegment()._jobtype == "tools" assert spm.NewSegment()._jobname == "preproc8" def test_smooth(): assert spm.Smooth._jobtype == "spatial" assert spm.Smooth._jobname == "smooth" def test_dartel(): assert spm.DARTEL._jobtype == "tools" assert spm.DARTEL._jobname == "dartel" def test_dartelnorm2mni(): assert spm.DARTELNorm2MNI._jobtype == "tools" assert spm.DARTELNorm2MNI._jobname == "dartel" nipype-1.7.0/nipype/interfaces/spm/tests/test_utils.py000066400000000000000000000062551413403311400231660ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import pytest from nipype.testing import example_data import nipype.interfaces.spm.utils as spmu from nipype.interfaces.base import isdefined from nipype.utils.filemanip import split_filename, fname_presuffix from nipype.interfaces.base import TraitError def test_coreg(): moving = example_data(infile="functional.nii") target = example_data(infile="T1.nii") mat = example_data(infile="trans.mat") coreg = spmu.CalcCoregAffine(matlab_cmd="mymatlab") coreg.inputs.target = target assert coreg.inputs.matlab_cmd == "mymatlab" coreg.inputs.moving = moving assert not isdefined(coreg.inputs.mat) pth, mov, _ = split_filename(moving) _, tgt, _ = split_filename(target) mat = os.path.join(pth, "%s_to_%s.mat" % (mov, tgt)) invmat = fname_presuffix(mat, prefix="inverse_") scrpt = coreg._make_matlab_command(None) assert coreg.inputs.mat == mat assert coreg.inputs.invmat == invmat def test_apply_transform(): moving = example_data(infile="functional.nii") mat = example_data(infile="trans.mat") applymat = spmu.ApplyTransform(matlab_cmd="mymatlab") assert applymat.inputs.matlab_cmd == "mymatlab" applymat.inputs.in_file = moving applymat.inputs.mat = mat scrpt = applymat._make_matlab_command(None) expected = "[p n e v] = spm_fileparts(V.fname);" assert expected in scrpt expected = "V.mat = transform.M * V.mat;" assert expected in scrpt def test_reslice(): moving = example_data(infile="functional.nii") space_defining = example_data(infile="T1.nii") reslice = spmu.Reslice(matlab_cmd="mymatlab_version") assert reslice.inputs.matlab_cmd == "mymatlab_version" reslice.inputs.in_file = moving reslice.inputs.space_defining = space_defining assert reslice.inputs.interp == 0 with pytest.raises(TraitError): reslice.inputs.trait_set(interp="nearest") with pytest.raises(TraitError): reslice.inputs.trait_set(interp=10) reslice.inputs.interp = 1 script = reslice._make_matlab_command(None) outfile = fname_presuffix(moving, prefix="r") assert reslice.inputs.out_file == outfile expected = "\nflags.mean=0;\nflags.which=1;\nflags.mask=0;" assert expected in script.replace(" ", "") expected_interp = "flags.interp = 1;\n" assert expected_interp in script assert "spm_reslice(invols, flags);" in script def test_dicom_import(): dicom = example_data(infile="dicomdir/123456-1-1.dcm") di = spmu.DicomImport(matlab_cmd="mymatlab") assert di.inputs.matlab_cmd == "mymatlab" assert di.inputs.output_dir_struct == "flat" assert di.inputs.output_dir == "./converted_dicom" assert di.inputs.format == "nii" assert not di.inputs.icedims with pytest.raises(TraitError): di.inputs.trait_set(output_dir_struct="wrong") with pytest.raises(TraitError): di.inputs.trait_set(format="FAT") with pytest.raises(TraitError): di.inputs.trait_set(in_files=["does_sfd_not_32fn_exist.dcm"]) di.inputs.in_files = [dicom] assert di.inputs.in_files == [dicom] nipype-1.7.0/nipype/interfaces/spm/utils.py000066400000000000000000000374231413403311400207660ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import numpy as np from ...utils.filemanip import ( split_filename, fname_presuffix, ensure_list, simplify_list, ) from ..base import TraitedSpec, isdefined, File, traits, OutputMultiPath, InputMultiPath from .base import SPMCommandInputSpec, SPMCommand, scans_for_fnames, scans_for_fname class Analyze2niiInputSpec(SPMCommandInputSpec): analyze_file = File(exists=True, mandatory=True) class Analyze2niiOutputSpec(SPMCommandInputSpec): nifti_file = File(exists=True) class Analyze2nii(SPMCommand): input_spec = Analyze2niiInputSpec output_spec = Analyze2niiOutputSpec def _make_matlab_command(self, _): script = "V = spm_vol('%s');\n" % self.inputs.analyze_file _, name, _ = split_filename(self.inputs.analyze_file) self.output_name = os.path.join(os.getcwd(), name + ".nii") script += "[Y, XYZ] = spm_read_vols(V);\n" script += "V.fname = '%s';\n" % self.output_name script += "spm_write_vol(V, Y);\n" return script def _list_outputs(self): outputs = self._outputs().get() outputs["nifti_file"] = self.output_name return outputs class CalcCoregAffineInputSpec(SPMCommandInputSpec): target = File( exists=True, mandatory=True, desc="target for generating affine transform" ) moving = File( exists=True, mandatory=True, copyfile=False, desc=("volume transform can be applied to register with " "target"), ) mat = File(desc="Filename used to store affine matrix") invmat = File(desc="Filename used to store inverse affine matrix") class CalcCoregAffineOutputSpec(TraitedSpec): mat = File(exists=True, desc="Matlab file holding transform") invmat = File(desc="Matlab file holding inverse transform") class CalcCoregAffine(SPMCommand): """Uses SPM (spm_coreg) to calculate the transform mapping moving to target. Saves Transform in mat (matlab binary file) Also saves inverse transform Examples -------- >>> import nipype.interfaces.spm.utils as spmu >>> coreg = spmu.CalcCoregAffine(matlab_cmd='matlab-spm8') >>> coreg.inputs.target = 'structural.nii' >>> coreg.inputs.moving = 'functional.nii' >>> coreg.inputs.mat = 'func_to_struct.mat' >>> coreg.run() # doctest: +SKIP .. note:: * the output file mat is saves as a matlab binary file * calculating the transforms does NOT change either input image it does not **move** the moving image, only calculates the transform that can be used to move it """ input_spec = CalcCoregAffineInputSpec output_spec = CalcCoregAffineOutputSpec def _make_inv_file(self): """makes filename to hold inverse transform if not specified""" invmat = fname_presuffix(self.inputs.mat, prefix="inverse_") return invmat def _make_mat_file(self): """makes name for matfile if doesn exist""" pth, mv, _ = split_filename(self.inputs.moving) _, tgt, _ = split_filename(self.inputs.target) mat = os.path.join(pth, "%s_to_%s.mat" % (mv, tgt)) return mat def _make_matlab_command(self, _): """checks for SPM, generates script""" if not isdefined(self.inputs.mat): self.inputs.mat = self._make_mat_file() if not isdefined(self.inputs.invmat): self.inputs.invmat = self._make_inv_file() script = """ target = '%s'; moving = '%s'; targetv = spm_vol(target); movingv = spm_vol(moving); x = spm_coreg(targetv, movingv); M = spm_matrix(x); save('%s' , 'M' ); M = inv(M); save('%s','M') """ % ( self.inputs.target, self.inputs.moving, self.inputs.mat, self.inputs.invmat, ) return script def _list_outputs(self): outputs = self._outputs().get() outputs["mat"] = os.path.abspath(self.inputs.mat) outputs["invmat"] = os.path.abspath(self.inputs.invmat) return outputs class ApplyTransformInputSpec(SPMCommandInputSpec): in_file = File( exists=True, mandatory=True, copyfile=True, desc="file to apply transform to, (only updates header)", ) mat = File(exists=True, mandatory=True, desc="file holding transform to apply") out_file = File(desc="output file name for transformed data", genfile=True) class ApplyTransformOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Transformed image file") class ApplyTransform(SPMCommand): """Uses SPM to apply transform stored in a .mat file to given file Examples -------- >>> import nipype.interfaces.spm.utils as spmu >>> applymat = spmu.ApplyTransform() >>> applymat.inputs.in_file = 'functional.nii' >>> applymat.inputs.mat = 'func_to_struct.mat' >>> applymat.run() # doctest: +SKIP """ input_spec = ApplyTransformInputSpec output_spec = ApplyTransformOutputSpec def _make_matlab_command(self, _): """checks for SPM, generates script""" outputs = self._list_outputs() self.inputs.out_file = outputs["out_file"] script = """ infile = '%s'; outfile = '%s' transform = load('%s'); V = spm_vol(infile); X = spm_read_vols(V); [p n e v] = spm_fileparts(V.fname); V.mat = transform.M * V.mat; V.fname = fullfile(outfile); spm_write_vol(V,X); """ % ( self.inputs.in_file, self.inputs.out_file, self.inputs.mat, ) # img_space = spm_get_space(infile); # spm_get_space(infile, transform.M * img_space); return script def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_file): outputs["out_file"] = os.path.abspath(self._gen_outfilename()) else: outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + "_trans.nii" class ResliceInputSpec(SPMCommandInputSpec): in_file = File( exists=True, mandatory=True, desc="file to apply transform to, (only updates header)", ) space_defining = File( exists=True, mandatory=True, desc="Volume defining space to slice in_file into" ) interp = traits.Range( low=0, high=7, usedefault=True, desc="degree of b-spline used for interpolation" "0 is nearest neighbor (default)", ) out_file = File(desc="Optional file to save resliced volume") class ResliceOutputSpec(TraitedSpec): out_file = File(exists=True, desc="resliced volume") class Reslice(SPMCommand): """uses spm_reslice to resample in_file into space of space_defining""" input_spec = ResliceInputSpec output_spec = ResliceOutputSpec def _make_matlab_command(self, _): """generates script""" if not isdefined(self.inputs.out_file): self.inputs.out_file = fname_presuffix(self.inputs.in_file, prefix="r") script = """ flags.mean = 0; flags.which = 1; flags.mask = 0; flags.interp = %d; infiles = strvcat(\'%s\', \'%s\'); invols = spm_vol(infiles); spm_reslice(invols, flags); """ % ( self.inputs.interp, self.inputs.space_defining, self.inputs.in_file, ) return script def _list_outputs(self): outputs = self._outputs().get() outputs["out_file"] = os.path.abspath(self.inputs.out_file) return outputs class ApplyInverseDeformationInput(SPMCommandInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, field="fnames", desc="Files on which deformation is applied", ) target = File( exists=True, field="comp{1}.inv.space", desc="File defining target space" ) deformation = File( exists=True, field="comp{1}.inv.comp{1}.sn2def.matname", desc="SN SPM deformation file", xor=["deformation_field"], ) deformation_field = File( exists=True, field="comp{1}.inv.comp{1}.def", desc="SN SPM deformation file", xor=["deformation"], ) interpolation = traits.Range( low=0, high=7, field="interp", desc="degree of b-spline used for interpolation" ) bounding_box = traits.List( traits.Float(), field="comp{1}.inv.comp{1}.sn2def.bb", minlen=6, maxlen=6, desc="6-element list (opt)", ) voxel_sizes = traits.List( traits.Float(), field="comp{1}.inv.comp{1}.sn2def.vox", minlen=3, maxlen=3, desc="3-element list (opt)", ) class ApplyInverseDeformationOutput(TraitedSpec): out_files = OutputMultiPath(File(exists=True), desc="Transformed files") class ApplyInverseDeformation(SPMCommand): """Uses spm to apply inverse deformation stored in a .mat file or a deformation field to a given file Examples -------- >>> import nipype.interfaces.spm.utils as spmu >>> inv = spmu.ApplyInverseDeformation() >>> inv.inputs.in_files = 'functional.nii' >>> inv.inputs.deformation = 'struct_to_func.mat' >>> inv.inputs.target = 'structural.nii' >>> inv.run() # doctest: +SKIP """ input_spec = ApplyInverseDeformationInput output_spec = ApplyInverseDeformationOutput _jobtype = "util" _jobname = "defs" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt == "in_files": return scans_for_fnames(ensure_list(val)) if opt == "target": return scans_for_fname(ensure_list(val)) if opt == "deformation": return np.array([simplify_list(val)], dtype=object) if opt == "deformation_field": return np.array([simplify_list(val)], dtype=object) return val def _list_outputs(self): outputs = self._outputs().get() outputs["out_files"] = [] for filename in self.inputs.in_files: _, fname = os.path.split(filename) outputs["out_files"].append(os.path.realpath("w%s" % fname)) return outputs class ResliceToReferenceInput(SPMCommandInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, field="fnames", desc="Files on which deformation is applied", ) target = File( exists=True, field="comp{1}.id.space", desc="File defining target space" ) interpolation = traits.Range( low=0, high=7, field="interp", desc="degree of b-spline used for interpolation" ) bounding_box = traits.List( traits.Float(), field="comp{2}.idbbvox.bb", minlen=6, maxlen=6, desc="6-element list (opt)", ) voxel_sizes = traits.List( traits.Float(), field="comp{2}.idbbvox.vox", minlen=3, maxlen=3, desc="3-element list (opt)", ) class ResliceToReferenceOutput(TraitedSpec): out_files = OutputMultiPath(File(exists=True), desc="Transformed files") class ResliceToReference(SPMCommand): """Uses spm to reslice a volume to a target image space or to a provided voxel size and bounding box Examples -------- >>> import nipype.interfaces.spm.utils as spmu >>> r2ref = spmu.ResliceToReference() >>> r2ref.inputs.in_files = 'functional.nii' >>> r2ref.inputs.target = 'structural.nii' >>> r2ref.run() # doctest: +SKIP """ input_spec = ResliceToReferenceInput output_spec = ResliceToReferenceOutput _jobtype = "util" _jobname = "defs" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt == "in_files": return scans_for_fnames(ensure_list(val)) if opt == "target": return scans_for_fname(ensure_list(val)) if opt == "deformation": return np.array([simplify_list(val)], dtype=object) if opt == "deformation_field": return np.array([simplify_list(val)], dtype=object) return val def _list_outputs(self): outputs = self._outputs().get() outputs["out_files"] = [] for filename in self.inputs.in_files: _, fname = os.path.split(filename) outputs["out_files"].append(os.path.realpath("w%s" % fname)) return outputs class DicomImportInputSpec(SPMCommandInputSpec): in_files = InputMultiPath( File(exists=True), mandatory=True, field="data", desc="dicom files to be converted", ) output_dir_struct = traits.Enum( "flat", "series", "patname", "patid_date", "patid", "date_time", field="root", usedefault=True, desc="directory structure for the output.", ) output_dir = traits.Str( "./converted_dicom", field="outdir", usedefault=True, desc="output directory." ) format = traits.Enum( "nii", "img", field="convopts.format", usedefault=True, desc="output format." ) icedims = traits.Bool( False, field="convopts.icedims", usedefault=True, desc=( "If image sorting fails, one can try using " "the additional SIEMENS ICEDims information " "to create unique filenames. Use this only if " "there would be multiple volumes with exactly " "the same file names." ), ) class DicomImportOutputSpec(TraitedSpec): out_files = OutputMultiPath(File(exists=True), desc="converted files") class DicomImport(SPMCommand): """Uses spm to convert DICOM files to nii or img+hdr. Examples -------- >>> import nipype.interfaces.spm.utils as spmu >>> di = spmu.DicomImport() >>> di.inputs.in_files = ['functional_1.dcm', 'functional_2.dcm'] >>> di.run() # doctest: +SKIP """ input_spec = DicomImportInputSpec output_spec = DicomImportOutputSpec _jobtype = "util" _jobname = "dicom" def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt == "in_files": return np.array(val, dtype=object) if opt == "output_dir": return np.array([val], dtype=object) if opt == "output_dir": return os.path.abspath(val) if opt == "icedims": if val: return 1 return 0 return super(DicomImport, self)._format_arg(opt, spec, val) def _run_interface(self, runtime): od = os.path.abspath(self.inputs.output_dir) if not os.path.isdir(od): os.mkdir(od) return super(DicomImport, self)._run_interface(runtime) def _list_outputs(self): from glob import glob outputs = self._outputs().get() od = os.path.abspath(self.inputs.output_dir) ext = self.inputs.format if self.inputs.output_dir_struct == "flat": outputs["out_files"] = glob(os.path.join(od, "*.%s" % ext)) elif self.inputs.output_dir_struct == "series": outputs["out_files"] = glob( os.path.join(od, os.path.join("*", "*.%s" % ext)) ) elif self.inputs.output_dir_struct in ["patid", "date_time", "patname"]: outputs["out_files"] = glob( os.path.join(od, os.path.join("*", "*", "*.%s" % ext)) ) elif self.inputs.output_dir_struct == "patid_date": outputs["out_files"] = glob( os.path.join(od, os.path.join("*", "*", "*", "*.%s" % ext)) ) return outputs nipype-1.7.0/nipype/interfaces/tests/000077500000000000000000000000001413403311400176065ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/tests/__init__.py000066400000000000000000000000301413403311400217100ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/tests/test_auto_BIDSDataGrabber.py000066400000000000000000000016541413403311400250550ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import BIDSDataGrabber def test_BIDSDataGrabber_inputs(): input_map = dict( base_dir=dict( mandatory=True, ), extra_derivatives=dict(), index_derivatives=dict( mandatory=True, usedefault=True, ), output_query=dict(), raise_on_empty=dict( usedefault=True, ), ) inputs = BIDSDataGrabber.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_BIDSDataGrabber_outputs(): output_map = dict() outputs = BIDSDataGrabber.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_Bru2.py000066400000000000000000000023711413403311400230640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..bru2nii import Bru2 def test_Bru2_inputs(): input_map = dict( actual_size=dict( argstr="-a", ), append_protocol_name=dict( argstr="-p", ), args=dict( argstr="%s", ), compress=dict( argstr="-z", ), environ=dict( nohash=True, usedefault=True, ), force_conversion=dict( argstr="-f", ), input_dir=dict( argstr="%s", mandatory=True, position=-1, ), output_filename=dict( argstr="-o %s", genfile=True, ), ) inputs = Bru2.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Bru2_outputs(): output_map = dict( nii_file=dict( extensions=None, ), ) outputs = Bru2.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_C3d.py000066400000000000000000000032611413403311400226620ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..c3 import C3d def test_C3d_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", mandatory=True, position=1, ), interp=dict( argstr="-interpolation %s", ), is_4d=dict( usedefault=True, ), multicomp_split=dict( argstr="-mcr", position=0, usedefault=True, ), out_file=dict( argstr="-o %s", extensions=None, position=-1, xor=["out_files"], ), out_files=dict( argstr="-oo %s", position=-1, xor=["out_file"], ), pix_type=dict( argstr="-type %s", ), resample=dict( argstr="-resample %s", ), scale=dict( argstr="-scale %s", ), shift=dict( argstr="-shift %s", ), smooth=dict( argstr="-smooth %s", ), ) inputs = C3d.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_C3d_outputs(): output_map = dict( out_files=dict(), ) outputs = C3d.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_C3dAffineTool.py000066400000000000000000000026201413403311400246270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..c3 import C3dAffineTool def test_C3dAffineTool_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), fsl2ras=dict( argstr="-fsl2ras", position=4, ), itk_transform=dict( argstr="-oitk %s", hash_files=False, position=5, ), reference_file=dict( argstr="-ref %s", extensions=None, position=1, ), source_file=dict( argstr="-src %s", extensions=None, position=2, ), transform_file=dict( argstr="%s", extensions=None, position=3, ), ) inputs = C3dAffineTool.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_C3dAffineTool_outputs(): output_map = dict( itk_transform=dict( extensions=None, ), ) outputs = C3dAffineTool.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_CopyMeta.py000066400000000000000000000016451413403311400237760ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dcmstack import CopyMeta def test_CopyMeta_inputs(): input_map = dict( dest_file=dict( extensions=None, mandatory=True, ), exclude_classes=dict(), include_classes=dict(), src_file=dict( extensions=None, mandatory=True, ), ) inputs = CopyMeta.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CopyMeta_outputs(): output_map = dict( dest_file=dict( extensions=None, ), ) outputs = CopyMeta.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_DataFinder.py000066400000000000000000000016061413403311400242530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import DataFinder def test_DataFinder_inputs(): input_map = dict( ignore_regexes=dict(), match_regex=dict( usedefault=True, ), max_depth=dict(), min_depth=dict(), root_paths=dict( mandatory=True, ), unpack_single=dict( usedefault=True, ), ) inputs = DataFinder.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DataFinder_outputs(): output_map = dict() outputs = DataFinder.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_DataGrabber.py000066400000000000000000000016761413403311400244170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import DataGrabber def test_DataGrabber_inputs(): input_map = dict( base_directory=dict(), drop_blank_outputs=dict( usedefault=True, ), raise_on_empty=dict( usedefault=True, ), sort_filelist=dict( mandatory=True, ), template=dict( mandatory=True, ), template_args=dict(), ) inputs = DataGrabber.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DataGrabber_outputs(): output_map = dict() outputs = DataGrabber.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_DataSink.py000066400000000000000000000021241413403311400237440ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import DataSink def test_DataSink_inputs(): input_map = dict( _outputs=dict( usedefault=True, ), base_directory=dict(), bucket=dict(), container=dict(), creds_path=dict(), encrypt_bucket_keys=dict(), local_copy=dict(), parameterization=dict( usedefault=True, ), regexp_substitutions=dict(), remove_dest_dir=dict( usedefault=True, ), strip_dir=dict(), substitutions=dict(), ) inputs = DataSink.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DataSink_outputs(): output_map = dict( out_file=dict(), ) outputs = DataSink.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_Dcm2nii.py000066400000000000000000000051241413403311400235360ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dcm2nii import Dcm2nii def test_Dcm2nii_inputs(): input_map = dict( anonymize=dict( argstr="-a", usedefault=True, ), args=dict( argstr="%s", ), collapse_folders=dict( argstr="-c", usedefault=True, ), config_file=dict( argstr="-b %s", extensions=None, genfile=True, ), convert_all_pars=dict( argstr="-v", usedefault=True, ), date_in_filename=dict( argstr="-d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), events_in_filename=dict( argstr="-e", usedefault=True, ), gzip_output=dict( argstr="-g", usedefault=True, ), id_in_filename=dict( argstr="-i", usedefault=True, ), nii_output=dict( argstr="-n", usedefault=True, ), output_dir=dict( argstr="-o %s", genfile=True, ), protocol_in_filename=dict( argstr="-p", usedefault=True, ), reorient=dict( argstr="-r", ), reorient_and_crop=dict( argstr="-x", usedefault=True, ), source_dir=dict( argstr="%s", mandatory=True, position=-1, xor=["source_names"], ), source_in_filename=dict( argstr="-f", usedefault=True, ), source_names=dict( argstr="%s", copyfile=False, mandatory=True, position=-1, xor=["source_dir"], ), spm_analyze=dict( argstr="-s", xor=["nii_output"], ), ) inputs = Dcm2nii.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Dcm2nii_outputs(): output_map = dict( bvals=dict(), bvecs=dict(), converted_files=dict(), reoriented_and_cropped_files=dict(), reoriented_files=dict(), ) outputs = Dcm2nii.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_Dcm2niix.py000066400000000000000000000046171413403311400237340ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dcm2nii import Dcm2niix def test_Dcm2niix_inputs(): input_map = dict( anon_bids=dict( argstr="-ba", requires=["bids_format"], ), args=dict( argstr="%s", ), bids_format=dict( argstr="-b", usedefault=True, ), comment=dict( argstr="-c %s", ), compress=dict( argstr="-z %s", usedefault=True, ), compression=dict( argstr="-%d", ), crop=dict( argstr="-x", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), has_private=dict( argstr="-t", usedefault=True, ), ignore_deriv=dict( argstr="-i", ), merge_imgs=dict( argstr="-m", usedefault=True, ), out_filename=dict( argstr="-f %s", ), output_dir=dict( argstr="-o %s", usedefault=True, ), philips_float=dict( argstr="-p", ), series_numbers=dict( argstr="-n %s...", ), single_file=dict( argstr="-s", usedefault=True, ), source_dir=dict( argstr="%s", mandatory=True, position=-1, xor=["source_names"], ), source_names=dict( argstr="%s", copyfile=False, mandatory=True, position=-1, xor=["source_dir"], ), to_nrrd=dict( argstr="-e", ), verbose=dict( argstr="-v", usedefault=True, ), ) inputs = Dcm2niix.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Dcm2niix_outputs(): output_map = dict( bids=dict(), bvals=dict(), bvecs=dict(), converted_files=dict(), ) outputs = Dcm2niix.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_DcmStack.py000066400000000000000000000017741413403311400237510ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dcmstack import DcmStack def test_DcmStack_inputs(): input_map = dict( dicom_files=dict( mandatory=True, ), embed_meta=dict(), exclude_regexes=dict(), force_read=dict( usedefault=True, ), include_regexes=dict(), out_ext=dict( usedefault=True, ), out_format=dict(), out_path=dict(), ) inputs = DcmStack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_DcmStack_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = DcmStack.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_ExportFile.py000066400000000000000000000017041413403311400243320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import ExportFile def test_ExportFile_inputs(): input_map = dict( check_extension=dict( usedefault=True, ), clobber=dict(), in_file=dict( extensions=None, mandatory=True, ), out_file=dict( extensions=None, mandatory=True, ), ) inputs = ExportFile.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_ExportFile_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = ExportFile.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_FreeSurferSource.py000066400000000000000000000072661413403311400255130ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import FreeSurferSource def test_FreeSurferSource_inputs(): input_map = dict( hemi=dict( usedefault=True, ), subject_id=dict( mandatory=True, ), subjects_dir=dict( mandatory=True, ), ) inputs = FreeSurferSource.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_FreeSurferSource_outputs(): output_map = dict( BA_stats=dict( altkey="BA", loc="stats", ), T1=dict( extensions=None, loc="mri", ), annot=dict( altkey="*annot", loc="label", ), aparc_a2009s_stats=dict( altkey="aparc.a2009s", loc="stats", ), aparc_aseg=dict( altkey="aparc*aseg", loc="mri", ), aparc_stats=dict( altkey="aparc", loc="stats", ), area_pial=dict( altkey="area.pial", loc="surf", ), aseg=dict( extensions=None, loc="mri", ), aseg_stats=dict( altkey="aseg", loc="stats", ), avg_curv=dict( loc="surf", ), brain=dict( extensions=None, loc="mri", ), brainmask=dict( extensions=None, loc="mri", ), curv=dict( loc="surf", ), curv_pial=dict( altkey="curv.pial", loc="surf", ), curv_stats=dict( altkey="curv", loc="stats", ), entorhinal_exvivo_stats=dict( altkey="entorhinal_exvivo", loc="stats", ), filled=dict( extensions=None, loc="mri", ), graymid=dict( altkey=["graymid", "midthickness"], loc="surf", ), inflated=dict( loc="surf", ), jacobian_white=dict( loc="surf", ), label=dict( altkey="*label", loc="label", ), norm=dict( extensions=None, loc="mri", ), nu=dict( extensions=None, loc="mri", ), orig=dict( extensions=None, loc="mri", ), pial=dict( loc="surf", ), rawavg=dict( extensions=None, loc="mri", ), ribbon=dict( altkey="*ribbon", loc="mri", ), smoothwm=dict( loc="surf", ), sphere=dict( loc="surf", ), sphere_reg=dict( altkey="sphere.reg", loc="surf", ), sulc=dict( loc="surf", ), thickness=dict( loc="surf", ), volume=dict( loc="surf", ), white=dict( loc="surf", ), wm=dict( extensions=None, loc="mri", ), wmparc=dict( extensions=None, loc="mri", ), wmparc_stats=dict( altkey="wmparc", loc="stats", ), ) outputs = FreeSurferSource.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_GroupAndStack.py000066400000000000000000000017571413403311400247660ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dcmstack import GroupAndStack def test_GroupAndStack_inputs(): input_map = dict( dicom_files=dict( mandatory=True, ), embed_meta=dict(), exclude_regexes=dict(), force_read=dict( usedefault=True, ), include_regexes=dict(), out_ext=dict( usedefault=True, ), out_format=dict(), out_path=dict(), ) inputs = GroupAndStack.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_GroupAndStack_outputs(): output_map = dict( out_list=dict(), ) outputs = GroupAndStack.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_IOBase.py000066400000000000000000000005161413403311400233530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import IOBase def test_IOBase_inputs(): input_map = dict() inputs = IOBase.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_JSONFileGrabber.py000066400000000000000000000013351413403311400251070ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import JSONFileGrabber def test_JSONFileGrabber_inputs(): input_map = dict( defaults=dict(), in_file=dict( extensions=None, ), ) inputs = JSONFileGrabber.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JSONFileGrabber_outputs(): output_map = dict() outputs = JSONFileGrabber.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_JSONFileSink.py000066400000000000000000000015671413403311400244560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import JSONFileSink def test_JSONFileSink_inputs(): input_map = dict( _outputs=dict( usedefault=True, ), in_dict=dict( usedefault=True, ), out_file=dict( extensions=None, ), ) inputs = JSONFileSink.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_JSONFileSink_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = JSONFileSink.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_LookupMeta.py000066400000000000000000000014141413403311400243270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dcmstack import LookupMeta def test_LookupMeta_inputs(): input_map = dict( in_file=dict( extensions=None, mandatory=True, ), meta_keys=dict( mandatory=True, ), ) inputs = LookupMeta.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_LookupMeta_outputs(): output_map = dict() outputs = LookupMeta.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_MatlabCommand.py000066400000000000000000000027371413403311400247570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..matlab import MatlabCommand def test_MatlabCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), logfile=dict( argstr="-logfile %s", extensions=None, ), mfile=dict( usedefault=True, ), nodesktop=dict( argstr="-nodesktop", nohash=True, usedefault=True, ), nosplash=dict( argstr="-nosplash", nohash=True, usedefault=True, ), paths=dict(), postscript=dict( usedefault=True, ), prescript=dict( usedefault=True, ), script=dict( argstr='-r "%s;exit"', mandatory=True, position=-1, ), script_file=dict( extensions=None, usedefault=True, ), single_comp_thread=dict( argstr="-singleCompThread", nohash=True, ), uses_mcr=dict( nohash=True, xor=["nodesktop", "nosplash", "single_comp_thread"], ), ) inputs = MatlabCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_MergeNifti.py000066400000000000000000000016341413403311400243040ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dcmstack import MergeNifti def test_MergeNifti_inputs(): input_map = dict( in_files=dict( mandatory=True, ), merge_dim=dict(), out_ext=dict( usedefault=True, ), out_format=dict(), out_path=dict(), sort_order=dict(), ) inputs = MergeNifti.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MergeNifti_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = MergeNifti.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_MeshFix.py000066400000000000000000000074571413403311400236270ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..meshfix import MeshFix def test_MeshFix_inputs(): input_map = dict( args=dict( argstr="%s", ), cut_inner=dict( argstr="--cut-inner %d", ), cut_outer=dict( argstr="--cut-outer %d", ), decouple_inin=dict( argstr="--decouple-inin %d", ), decouple_outin=dict( argstr="--decouple-outin %d", ), decouple_outout=dict( argstr="--decouple-outout %d", ), dilation=dict( argstr="--dilate %d", ), dont_clean=dict( argstr="--no-clean", ), environ=dict( nohash=True, usedefault=True, ), epsilon_angle=dict( argstr="-a %f", ), finetuning_distance=dict( argstr="%f", position=-2, requires=["finetuning_substeps"], ), finetuning_inwards=dict( argstr="--fineTuneIn ", position=-3, requires=["finetuning_distance", "finetuning_substeps"], ), finetuning_outwards=dict( argstr="--fineTuneOut ", position=-3, requires=["finetuning_distance", "finetuning_substeps"], xor=["finetuning_inwards"], ), finetuning_substeps=dict( argstr="%d", position=-1, requires=["finetuning_distance"], ), in_file1=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), in_file2=dict( argstr="%s", extensions=None, position=2, ), join_closest_components=dict( argstr="-jc", xor=["join_closest_components"], ), join_overlapping_largest_components=dict( argstr="-j", xor=["join_closest_components"], ), laplacian_smoothing_steps=dict( argstr="--smooth %d", ), number_of_biggest_shells=dict( argstr="--shells %d", ), out_filename=dict( argstr="-o %s", extensions=None, genfile=True, ), output_type=dict( usedefault=True, ), quiet_mode=dict( argstr="-q", ), remove_handles=dict( argstr="--remove-handles", ), save_as_freesurfer_mesh=dict( argstr="--fsmesh", xor=["save_as_vrml", "save_as_stl"], ), save_as_stl=dict( argstr="--stl", xor=["save_as_vrml", "save_as_freesurfer_mesh"], ), save_as_vrml=dict( argstr="--wrl", xor=["save_as_stl", "save_as_freesurfer_mesh"], ), set_intersections_to_one=dict( argstr="--intersect", ), uniform_remeshing_steps=dict( argstr="-u %d", requires=["uniform_remeshing_vertices"], ), uniform_remeshing_vertices=dict( argstr="--vertices %d", requires=["uniform_remeshing_steps"], ), x_shift=dict( argstr="--smooth %d", ), ) inputs = MeshFix.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MeshFix_outputs(): output_map = dict( mesh_file=dict( extensions=None, ), ) outputs = MeshFix.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_MySQLSink.py000066400000000000000000000014461413403311400240460ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import MySQLSink def test_MySQLSink_inputs(): input_map = dict( config=dict( extensions=None, mandatory=True, xor=["host"], ), database_name=dict( mandatory=True, ), host=dict( mandatory=True, requires=["username", "password"], usedefault=True, xor=["config"], ), password=dict(), table_name=dict( mandatory=True, ), username=dict(), ) inputs = MySQLSink.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_NiftiGeneratorBase.py000066400000000000000000000005701413403311400257640ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dcmstack import NiftiGeneratorBase def test_NiftiGeneratorBase_inputs(): input_map = dict() inputs = NiftiGeneratorBase.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_NilearnBaseInterface.py000066400000000000000000000005751413403311400262620ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..nilearn import NilearnBaseInterface def test_NilearnBaseInterface_inputs(): input_map = dict() inputs = NilearnBaseInterface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_PETPVC.py000066400000000000000000000037241413403311400232560ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..petpvc import PETPVC def test_PETPVC_inputs(): input_map = dict( alpha=dict( argstr="-a %.4f", usedefault=True, ), args=dict( argstr="%s", ), debug=dict( argstr="-d", usedefault=True, ), environ=dict( nohash=True, usedefault=True, ), fwhm_x=dict( argstr="-x %.4f", mandatory=True, ), fwhm_y=dict( argstr="-y %.4f", mandatory=True, ), fwhm_z=dict( argstr="-z %.4f", mandatory=True, ), in_file=dict( argstr="-i %s", extensions=None, mandatory=True, ), mask_file=dict( argstr="-m %s", extensions=None, mandatory=True, ), n_deconv=dict( argstr="-k %d", usedefault=True, ), n_iter=dict( argstr="-n %d", usedefault=True, ), out_file=dict( argstr="-o %s", extensions=None, genfile=True, hash_files=False, ), pvc=dict( argstr="-p %s", mandatory=True, ), stop_crit=dict( argstr="-s %.4f", usedefault=True, ), ) inputs = PETPVC.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_PETPVC_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = PETPVC.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_Quickshear.py000066400000000000000000000026121413403311400243470ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..quickshear import Quickshear def test_Quickshear_inputs(): input_map = dict( args=dict( argstr="%s", ), buff=dict( argstr="%d", position=4, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), mask_file=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), out_file=dict( argstr="%s", extensions=None, keep_extension=True, name_source="in_file", name_template="%s_defaced", position=3, ), ) inputs = Quickshear.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Quickshear_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Quickshear.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_RCommand.py000066400000000000000000000013641413403311400237530ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..r import RCommand def test_RCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), rfile=dict( usedefault=True, ), script=dict( argstr='-e "%s"', mandatory=True, position=-1, ), script_file=dict( extensions=None, usedefault=True, ), ) inputs = RCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_Reorient.py000066400000000000000000000016061413403311400240410ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..image import Reorient def test_Reorient_inputs(): input_map = dict( in_file=dict( extensions=None, mandatory=True, ), orientation=dict( usedefault=True, ), ) inputs = Reorient.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Reorient_outputs(): output_map = dict( out_file=dict( extensions=None, ), transform=dict( extensions=None, ), ) outputs = Reorient.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_Rescale.py000066400000000000000000000016621413403311400236320ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..image import Rescale def test_Rescale_inputs(): input_map = dict( in_file=dict( extensions=None, mandatory=True, ), invert=dict(), percentile=dict( usedefault=True, ), ref_file=dict( extensions=None, mandatory=True, ), ) inputs = Rescale.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Rescale_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Rescale.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_S3DataGrabber.py000066400000000000000000000021661413403311400246200ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import S3DataGrabber def test_S3DataGrabber_inputs(): input_map = dict( anon=dict( usedefault=True, ), bucket=dict( mandatory=True, ), bucket_path=dict( usedefault=True, ), local_directory=dict(), raise_on_empty=dict( usedefault=True, ), region=dict( usedefault=True, ), sort_filelist=dict( mandatory=True, ), template=dict( mandatory=True, ), template_args=dict(), ) inputs = S3DataGrabber.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_S3DataGrabber_outputs(): output_map = dict() outputs = S3DataGrabber.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_SQLiteSink.py000066400000000000000000000007771413403311400242500ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import SQLiteSink def test_SQLiteSink_inputs(): input_map = dict( database_file=dict( extensions=None, mandatory=True, ), table_name=dict( mandatory=True, ), ) inputs = SQLiteSink.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_SSHDataGrabber.py000066400000000000000000000024671413403311400247740ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import SSHDataGrabber def test_SSHDataGrabber_inputs(): input_map = dict( base_directory=dict( mandatory=True, ), download_files=dict( usedefault=True, ), drop_blank_outputs=dict( usedefault=True, ), hostname=dict( mandatory=True, ), password=dict(), raise_on_empty=dict( usedefault=True, ), sort_filelist=dict( mandatory=True, ), ssh_log_to_file=dict( usedefault=True, ), template=dict( mandatory=True, ), template_args=dict(), template_expression=dict( usedefault=True, ), username=dict(), ) inputs = SSHDataGrabber.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SSHDataGrabber_outputs(): output_map = dict() outputs = SSHDataGrabber.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_SelectFiles.py000066400000000000000000000015341413403311400244540ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import SelectFiles def test_SelectFiles_inputs(): input_map = dict( base_directory=dict(), force_lists=dict( usedefault=True, ), raise_on_empty=dict( usedefault=True, ), sort_filelist=dict( usedefault=True, ), ) inputs = SelectFiles.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SelectFiles_outputs(): output_map = dict() outputs = SelectFiles.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_SignalExtraction.py000066400000000000000000000023331413403311400255260ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..nilearn import SignalExtraction def test_SignalExtraction_inputs(): input_map = dict( class_labels=dict( mandatory=True, ), detrend=dict( usedefault=True, ), in_file=dict( extensions=None, mandatory=True, ), incl_shared_variance=dict( usedefault=True, ), include_global=dict( usedefault=True, ), label_files=dict( mandatory=True, ), out_file=dict( extensions=None, usedefault=True, ), ) inputs = SignalExtraction.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SignalExtraction_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = SignalExtraction.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_SlicerCommandLine.py000066400000000000000000000015011413403311400255740ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dynamic_slicer import SlicerCommandLine def test_SlicerCommandLine_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), module=dict(), ) inputs = SlicerCommandLine.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SlicerCommandLine_outputs(): output_map = dict() outputs = SlicerCommandLine.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_SplitNifti.py000066400000000000000000000015671413403311400243450ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..dcmstack import SplitNifti def test_SplitNifti_inputs(): input_map = dict( in_file=dict( extensions=None, mandatory=True, ), out_ext=dict( usedefault=True, ), out_format=dict(), out_path=dict(), split_dim=dict(), ) inputs = SplitNifti.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_SplitNifti_outputs(): output_map = dict( out_list=dict(), ) outputs = SplitNifti.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_XNATSink.py000066400000000000000000000021451413403311400236500ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import XNATSink def test_XNATSink_inputs(): input_map = dict( _outputs=dict( usedefault=True, ), assessor_id=dict( xor=["reconstruction_id"], ), cache_dir=dict(), config=dict( extensions=None, mandatory=True, xor=["server"], ), experiment_id=dict( mandatory=True, ), project_id=dict( mandatory=True, ), pwd=dict(), reconstruction_id=dict( xor=["assessor_id"], ), server=dict( mandatory=True, requires=["user", "pwd"], xor=["config"], ), share=dict( usedefault=True, ), subject_id=dict( mandatory=True, ), user=dict(), ) inputs = XNATSink.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_auto_XNATSource.py000066400000000000000000000020611413403311400242010ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..io import XNATSource def test_XNATSource_inputs(): input_map = dict( cache_dir=dict(), config=dict( extensions=None, mandatory=True, xor=["server"], ), pwd=dict(), query_template=dict( mandatory=True, ), query_template_args=dict( usedefault=True, ), server=dict( mandatory=True, requires=["user", "pwd"], xor=["config"], ), user=dict(), ) inputs = XNATSource.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_XNATSource_outputs(): output_map = dict() outputs = XNATSource.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/tests/test_extra_dcm2nii.py000066400000000000000000000035601413403311400237530ustar00rootroot00000000000000import os import pytest import shutil from nipype.interfaces.dcm2nii import Dcm2niix no_dcm2niix = not bool(Dcm2niix().version) no_datalad = False try: from datalad import api # to pull and grab data from datalad.support.exceptions import IncompleteResultsError except ImportError: no_datalad = True DICOM_DIR = "http://datasets-tests.datalad.org/dicoms/dcm2niix-tests" @pytest.fixture def fetch_data(): def _fetch_data(datadir, dicoms): try: """Fetches some test DICOMs using datalad""" api.install(path=datadir, source=DICOM_DIR) data = os.path.join(datadir, dicoms) api.get(path=data, dataset=datadir) except IncompleteResultsError as exc: pytest.skip("Failed to fetch test data: %s" % str(exc)) return data return _fetch_data @pytest.mark.skipif(no_datalad, reason="Datalad required") @pytest.mark.skipif(no_dcm2niix, reason="Dcm2niix required") def test_dcm2niix_dti(fetch_data, tmpdir): tmpdir.chdir() datadir = tmpdir.mkdir("data").strpath dicoms = fetch_data(datadir, "Siemens_Sag_DTI_20160825_145811") def assert_dti(res): "Some assertions we will make" assert res.outputs.converted_files assert res.outputs.bvals assert res.outputs.bvecs outputs = [y for x, y in res.outputs.get().items()] if res.inputs.get("bids_format"): # ensure all outputs are of equal lengths assert len(set(map(len, outputs))) == 1 else: assert not res.outputs.bids dcm = Dcm2niix() dcm.inputs.source_dir = dicoms dcm.inputs.out_filename = "%u%z" assert_dti(dcm.run()) # now run specifying output directory and removing BIDS option outdir = tmpdir.mkdir("conversion").strpath dcm.inputs.output_dir = outdir dcm.inputs.bids_format = False assert_dti(dcm.run()) nipype-1.7.0/nipype/interfaces/tests/test_image.py000066400000000000000000000047641413403311400223140ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import numpy as np import nibabel as nb import pytest from nibabel.orientations import axcodes2ornt, ornt_transform from ..image import _as_reoriented_backport, _orientations from ... import LooseVersion nibabel24 = LooseVersion(nb.__version__) >= LooseVersion("2.4.0") @pytest.mark.skipif(not nibabel24, reason="Old nibabel - can't directly compare") def test_reorientation_backport(): pixdims = ((1, 1, 1), (2, 2, 3)) data = np.random.normal(size=(17, 18, 19, 2)) for pixdim in pixdims: # Generate a randomly rotated affine angles = np.random.uniform(-np.pi, np.pi, 3) * [1, 0.5, 1] rot = nb.eulerangles.euler2mat(*angles) scale = np.diag(pixdim) translation = np.array((17, 18, 19)) / 2 affine = nb.affines.from_matvec(rot.dot(scale), translation) # Create image img = nb.Nifti1Image(data, affine) dim_info = {"freq": 0, "phase": 1, "slice": 2} img.header.set_dim_info(**dim_info) # Find a random, non-identity transform targ_ornt = orig_ornt = nb.io_orientation(affine) while np.array_equal(targ_ornt, orig_ornt): new_code = np.random.choice(_orientations) targ_ornt = axcodes2ornt(new_code) identity = ornt_transform(orig_ornt, orig_ornt) transform = ornt_transform(orig_ornt, targ_ornt) # Identity transform returns exact image assert img.as_reoriented(identity) is img assert _as_reoriented_backport(img, identity) is img reoriented_a = img.as_reoriented(transform) reoriented_b = _as_reoriented_backport(img, transform) flips_only = img.shape == reoriented_a.shape # Reorientation changes affine and data array assert not np.allclose(img.affine, reoriented_a.affine) assert not ( flips_only and np.allclose(img.get_fdata(), reoriented_a.get_fdata()) ) # Dimension info changes iff axes are reordered assert flips_only == np.array_equal( img.header.get_dim_info(), reoriented_a.header.get_dim_info() ) # Both approaches produce equivalent images assert np.allclose(reoriented_a.affine, reoriented_b.affine) assert np.array_equal(reoriented_a.get_fdata(), reoriented_b.get_fdata()) assert np.array_equal( reoriented_a.header.get_dim_info(), reoriented_b.header.get_dim_info() ) nipype-1.7.0/nipype/interfaces/tests/test_io.py000066400000000000000000000544051413403311400216360ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import copy import simplejson import glob import os.path as op from subprocess import Popen import hashlib from collections import namedtuple import pytest import nipype import nipype.interfaces.io as nio from nipype.interfaces.base.traits_extension import isdefined from nipype.interfaces.base import Undefined, TraitError from nipype.utils.filemanip import dist_is_editable # Check for boto noboto = False try: import boto from boto.s3.connection import S3Connection, OrdinaryCallingFormat except ImportError: noboto = True # Check for boto3 noboto3 = False try: import boto3 from botocore.utils import fix_s3_host except ImportError: noboto3 = True # Check for paramiko try: import paramiko no_paramiko = False # Check for localhost SSH Server # FIXME: Tests requiring this are never run on CI try: proxy = None client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect("127.0.0.1", username=os.getenv("USER"), sock=proxy, timeout=10) no_local_ssh = False except ( paramiko.SSHException, paramiko.ssh_exception.NoValidConnectionsError, OSError, ): no_local_ssh = True except ImportError: no_paramiko = True no_local_ssh = True # Check for fakes3 from subprocess import check_call, CalledProcessError try: ret_code = check_call(["which", "fakes3"], stdout=open(os.devnull, "wb")) fakes3 = ret_code == 0 except CalledProcessError: fakes3 = False # check for bids have_pybids = True try: import bids filepath = os.path.realpath(os.path.dirname(bids.__file__)) datadir = os.path.realpath(os.path.join(filepath, "tests/data/")) except ImportError: have_pybids = False def test_datagrabber(): dg = nio.DataGrabber() assert dg.inputs.template == Undefined assert dg.inputs.base_directory == Undefined assert dg.inputs.template_args == {"outfiles": []} @pytest.mark.skipif(noboto, reason="boto library is not available") def test_s3datagrabber(): dg = nio.S3DataGrabber() assert dg.inputs.template == Undefined assert dg.inputs.local_directory == Undefined assert dg.inputs.template_args == {"outfiles": []} templates1 = { "model": "interfaces/{package}/model.py", "preprocess": "interfaces/{package}/pre*.py", } templates2 = {"converter": "interfaces/dcm{to!s}nii.py"} templates3 = {"model": "interfaces/{package.name}/model.py"} @pytest.mark.parametrize( "SF_args, inputs_att, expected", [ ( {"templates": templates1}, {"package": "fsl"}, { "infields": ["package"], "outfields": ["model", "preprocess"], "run_output": { "model": op.join( op.dirname(nipype.__file__), "interfaces/fsl/model.py" ), "preprocess": op.join( op.dirname(nipype.__file__), "interfaces/fsl/preprocess.py" ), }, "node_output": ["model", "preprocess"], }, ), ( {"templates": templates1, "force_lists": True}, {"package": "spm"}, { "infields": ["package"], "outfields": ["model", "preprocess"], "run_output": { "model": [ op.join(op.dirname(nipype.__file__), "interfaces/spm/model.py") ], "preprocess": [ op.join( op.dirname(nipype.__file__), "interfaces/spm/preprocess.py" ) ], }, "node_output": ["model", "preprocess"], }, ), ( {"templates": templates1}, {"package": "fsl", "force_lists": ["model"]}, { "infields": ["package"], "outfields": ["model", "preprocess"], "run_output": { "model": [ op.join(op.dirname(nipype.__file__), "interfaces/fsl/model.py") ], "preprocess": op.join( op.dirname(nipype.__file__), "interfaces/fsl/preprocess.py" ), }, "node_output": ["model", "preprocess"], }, ), ( {"templates": templates2}, {"to": 2}, { "infields": ["to"], "outfields": ["converter"], "run_output": { "converter": op.join( op.dirname(nipype.__file__), "interfaces/dcm2nii.py" ) }, "node_output": ["converter"], }, ), ( {"templates": templates3}, {"package": namedtuple("package", ["name"])("fsl")}, { "infields": ["package"], "outfields": ["model"], "run_output": { "model": op.join( op.dirname(nipype.__file__), "interfaces/fsl/model.py" ) }, "node_output": ["model"], }, ), ], ) def test_selectfiles(tmpdir, SF_args, inputs_att, expected): tmpdir.chdir() base_dir = op.dirname(nipype.__file__) dg = nio.SelectFiles(base_directory=base_dir, **SF_args) for key, val in inputs_att.items(): setattr(dg.inputs, key, val) assert dg._infields == expected["infields"] assert sorted(dg._outfields) == expected["outfields"] assert sorted(dg._outputs().get()) == expected["node_output"] res = dg.run() for key, val in expected["run_output"].items(): assert getattr(res.outputs, key) == val def test_selectfiles_valueerror(): """Test ValueError when force_lists has field that isn't in template.""" base_dir = op.dirname(nipype.__file__) templates = { "model": "interfaces/{package}/model.py", "preprocess": "interfaces/{package}/pre*.py", } force_lists = ["model", "preprocess", "registration"] sf = nio.SelectFiles(templates, base_directory=base_dir, force_lists=force_lists) with pytest.raises(ValueError): sf.run() @pytest.mark.skipif(noboto, reason="boto library is not available") def test_s3datagrabber_communication(tmpdir): dg = nio.S3DataGrabber( infields=["subj_id", "run_num"], outfields=["func", "struct"] ) dg.inputs.anon = True dg.inputs.bucket = "openfmri" dg.inputs.bucket_path = "ds001/" dg.inputs.local_directory = tmpdir.strpath dg.inputs.sort_filelist = True dg.inputs.template = "*" dg.inputs.field_template = dict( func="%s/BOLD/task001_%s/bold.nii.gz", struct="%s/anatomy/highres001_brain.nii.gz", ) dg.inputs.subj_id = ["sub001", "sub002"] dg.inputs.run_num = ["run001", "run003"] dg.inputs.template_args = dict(func=[["subj_id", "run_num"]], struct=[["subj_id"]]) res = dg.run() func_outfiles = res.outputs.func struct_outfiles = res.outputs.struct # check for all files assert ( os.path.join( dg.inputs.local_directory, "/sub001/BOLD/task001_run001/bold.nii.gz" ) in func_outfiles[0] ) assert os.path.exists(func_outfiles[0]) assert ( os.path.join( dg.inputs.local_directory, "/sub001/anatomy/highres001_brain.nii.gz" ) in struct_outfiles[0] ) assert os.path.exists(struct_outfiles[0]) assert ( os.path.join( dg.inputs.local_directory, "/sub002/BOLD/task001_run003/bold.nii.gz" ) in func_outfiles[1] ) assert os.path.exists(func_outfiles[1]) assert ( os.path.join( dg.inputs.local_directory, "/sub002/anatomy/highres001_brain.nii.gz" ) in struct_outfiles[1] ) assert os.path.exists(struct_outfiles[1]) def test_datagrabber_order(tmpdir): for file_name in [ "sub002_L1_R1.q", "sub002_L1_R2.q", "sub002_L2_R1.q", "sub002_L2_R2.qd", "sub002_L3_R10.q", "sub002_L3_R2.q", ]: tmpdir.join(file_name).open("a").close() dg = nio.DataGrabber(infields=["sid"]) dg.inputs.base_directory = tmpdir.strpath dg.inputs.template = "%s_L%d_R*.q*" dg.inputs.template_args = {"outfiles": [["sid", 1], ["sid", 2], ["sid", 3]]} dg.inputs.sid = "sub002" dg.inputs.sort_filelist = True res = dg.run() outfiles = res.outputs.outfiles assert "sub002_L1_R1" in outfiles[0][0] assert "sub002_L1_R2" in outfiles[0][1] assert "sub002_L2_R1" in outfiles[1][0] assert "sub002_L2_R2" in outfiles[1][1] assert "sub002_L3_R2" in outfiles[2][0] assert "sub002_L3_R10" in outfiles[2][1] def test_datasink(): ds = nio.DataSink() assert ds.inputs.parameterization assert ds.inputs.base_directory == Undefined assert ds.inputs.strip_dir == Undefined assert ds.inputs._outputs == {} ds = nio.DataSink(base_directory="foo") assert ds.inputs.base_directory == "foo" ds = nio.DataSink(infields=["test"]) assert "test" in ds.inputs.copyable_trait_names() # Make dummy input file @pytest.fixture(scope="module") def dummy_input(request, tmpdir_factory): """ Function to create a dummy file """ # Init variables input_path = tmpdir_factory.mktemp("input_data").join("datasink_test_s3.txt") # Create input file input_path.write_binary(b"ABCD1234") # Return path return str(input_path) # Test datasink writes to s3 properly @pytest.mark.skipif( noboto3 or not fakes3, reason="boto3 or fakes3 library is not available" ) def test_datasink_to_s3(dummy_input, tmpdir): """ This function tests to see if the S3 functionality of a DataSink works properly """ # Init variables ds = nio.DataSink() bucket_name = "test" container = "outputs" attr_folder = "text_file" output_dir = "s3://" + bucket_name # Local temporary filepaths for testing fakes3_dir = tmpdir.strpath input_path = dummy_input # Start up fake-S3 server proc = Popen( ["fakes3", "-r", fakes3_dir, "-p", "4567"], stdout=open(os.devnull, "wb") ) # Init boto3 s3 resource to talk with fakes3 resource = boto3.resource( aws_access_key_id="mykey", aws_secret_access_key="mysecret", service_name="s3", endpoint_url="http://127.0.0.1:4567", use_ssl=False, ) resource.meta.client.meta.events.unregister("before-sign.s3", fix_s3_host) # Create bucket bucket = resource.create_bucket(Bucket=bucket_name) # Prep datasink ds.inputs.base_directory = output_dir ds.inputs.container = container ds.inputs.bucket = bucket setattr(ds.inputs, attr_folder, input_path) # Run datasink ds.run() # Get MD5sums and compare key = "/".join([container, attr_folder, os.path.basename(input_path)]) obj = bucket.Object(key=key) dst_md5 = obj.e_tag.replace('"', "") src_md5 = hashlib.md5(open(input_path, "rb").read()).hexdigest() # Kill fakes3 proc.kill() # Make sure md5sums match assert src_md5 == dst_md5 # Test AWS creds read from env vars @pytest.mark.skipif( noboto3 or not fakes3, reason="boto3 or fakes3 library is not available" ) def test_aws_keys_from_env(): """ Function to ensure the DataSink can successfully read in AWS credentials from the environment variables """ # Init variables ds = nio.DataSink() aws_access_key_id = "ABCDACCESS" aws_secret_access_key = "DEFGSECRET" # Set env vars os.environ["AWS_ACCESS_KEY_ID"] = aws_access_key_id os.environ["AWS_SECRET_ACCESS_KEY"] = aws_secret_access_key # Call function to return creds access_key_test, secret_key_test = ds._return_aws_keys() # Assert match assert aws_access_key_id == access_key_test assert aws_secret_access_key == secret_key_test # Test the local copy attribute def test_datasink_localcopy(dummy_input, tmpdir): """ Function to validate DataSink will make local copy via local_copy attribute """ # Init variables local_dir = tmpdir.strpath container = "outputs" attr_folder = "text_file" # Make dummy input file and datasink input_path = dummy_input ds = nio.DataSink() # Set up datasink ds.inputs.container = container ds.inputs.local_copy = local_dir setattr(ds.inputs, attr_folder, input_path) # Expected local copy path local_copy = os.path.join( local_dir, container, attr_folder, os.path.basename(input_path) ) # Run the datasink ds.run() # Check md5sums of both src_md5 = hashlib.md5(open(input_path, "rb").read()).hexdigest() dst_md5 = hashlib.md5(open(local_copy, "rb").read()).hexdigest() # Perform test assert src_md5 == dst_md5 def test_datasink_substitutions(tmpdir): indir = tmpdir.mkdir("-Tmp-nipype_ds_subs_in") outdir = tmpdir.mkdir("-Tmp-nipype_ds_subs_out") files = [] for n in ["ababab.n", "xabababyz.n"]: f = str(indir.join(n)) files.append(f) open(f, "w") ds = nio.DataSink( parameterization=False, base_directory=str(outdir), substitutions=[("ababab", "ABABAB")], # end archoring ($) is used to assure operation on the filename # instead of possible temporary directories names matches # Patterns should be more comprehendable in the real-world usage # cases since paths would be quite more sensible regexp_substitutions=[ (r"xABABAB(\w*)\.n$", r"a-\1-b.n"), ("(.*%s)[-a]([^%s]*)$" % ((os.path.sep,) * 2), r"\1!\2"), ], ) setattr(ds.inputs, "@outdir", files) ds.run() assert sorted( [os.path.basename(x) for x in glob.glob(os.path.join(str(outdir), "*"))] ) == [ "!-yz-b.n", "ABABAB.n", ] # so we got re used 2nd and both patterns @pytest.fixture() def _temp_analyze_files(tmpdir): """Generate temporary analyze file pair.""" img_dir = tmpdir.mkdir("img") orig_img = img_dir.join("orig.img") orig_hdr = img_dir.join("orig.hdr") orig_img.open("w") orig_hdr.open("w") return orig_img.strpath, orig_hdr.strpath def test_datasink_copydir_1(_temp_analyze_files, tmpdir): orig_img, orig_hdr = _temp_analyze_files outdir = tmpdir pth, fname = os.path.split(orig_img) ds = nio.DataSink( base_directory=outdir.mkdir("basedir").strpath, parameterization=False ) setattr(ds.inputs, "@outdir", pth) ds.run() sep = os.path.sep assert tmpdir.join("basedir", pth.split(sep)[-1], fname).check() def test_datasink_copydir_2(_temp_analyze_files, tmpdir): orig_img, orig_hdr = _temp_analyze_files pth, fname = os.path.split(orig_img) ds = nio.DataSink( base_directory=tmpdir.mkdir("basedir").strpath, parameterization=False ) ds.inputs.remove_dest_dir = True setattr(ds.inputs, "outdir", pth) ds.run() sep = os.path.sep assert not tmpdir.join("basedir", pth.split(sep)[-1], fname).check() assert tmpdir.join("basedir", "outdir", pth.split(sep)[-1], fname).check() def test_datafinder_depth(tmpdir): outdir = tmpdir.strpath os.makedirs(os.path.join(outdir, "0", "1", "2", "3")) df = nio.DataFinder() df.inputs.root_paths = os.path.join(outdir, "0") for min_depth in range(4): for max_depth in range(min_depth, 4): df.inputs.min_depth = min_depth df.inputs.max_depth = max_depth result = df.run() expected = ["{}".format(x) for x in range(min_depth, max_depth + 1)] for path, exp_fname in zip(result.outputs.out_paths, expected): _, fname = os.path.split(path) assert fname == exp_fname def test_datafinder_unpack(tmpdir): outdir = tmpdir.strpath single_res = os.path.join(outdir, "findme.txt") open(single_res, "a").close() open(os.path.join(outdir, "dontfindme"), "a").close() df = nio.DataFinder() df.inputs.root_paths = outdir df.inputs.match_regex = r".+/(?P.+)\.txt" df.inputs.unpack_single = True result = df.run() print(result.outputs.out_paths) assert result.outputs.out_paths == single_res def test_freesurfersource(): fss = nio.FreeSurferSource() assert fss.inputs.hemi == "both" assert fss.inputs.subject_id == Undefined assert fss.inputs.subjects_dir == Undefined def test_freesurfersource_incorrectdir(): fss = nio.FreeSurferSource() with pytest.raises(TraitError) as err: fss.inputs.subjects_dir = "path/to/no/existing/directory" def test_jsonsink_input(): ds = nio.JSONFileSink() assert ds.inputs._outputs == {} ds = nio.JSONFileSink(in_dict={"foo": "var"}) assert ds.inputs.in_dict == {"foo": "var"} ds = nio.JSONFileSink(infields=["test"]) assert "test" in ds.inputs.copyable_trait_names() @pytest.mark.parametrize( "inputs_attributes", [{"new_entry": "someValue"}, {"new_entry": "someValue", "test": "testInfields"}], ) def test_jsonsink(tmpdir, inputs_attributes): tmpdir.chdir() js = nio.JSONFileSink(infields=["test"], in_dict={"foo": "var"}) setattr(js.inputs, "contrasts.alt", "someNestedValue") expected_data = {"contrasts": {"alt": "someNestedValue"}, "foo": "var"} for key, val in inputs_attributes.items(): setattr(js.inputs, key, val) expected_data[key] = val res = js.run() with open(res.outputs.out_file, "r") as f: data = simplejson.load(f) assert data == expected_data # There are three reasons these tests will be skipped: @pytest.mark.skipif(not have_pybids, reason="Pybids is not installed") @pytest.mark.skipif( not dist_is_editable("pybids"), reason="Pybids is not installed in editable mode" ) def test_bids_grabber(tmpdir): tmpdir.chdir() bg = nio.BIDSDataGrabber() bg.inputs.base_dir = os.path.join(datadir, "ds005") bg.inputs.subject = "01" results = bg.run() assert "sub-01_T1w.nii.gz" in map(os.path.basename, results.outputs.T1w) assert "sub-01_task-mixedgamblestask_run-01_bold.nii.gz" in map( os.path.basename, results.outputs.bold ) @pytest.mark.skipif(not have_pybids, reason="Pybids is not installed") @pytest.mark.skipif( not dist_is_editable("pybids"), reason="Pybids is not installed in editable mode" ) def test_bids_fields(tmpdir): tmpdir.chdir() bg = nio.BIDSDataGrabber(infields=["subject"], outfields=["dwi"]) bg.inputs.base_dir = os.path.join(datadir, "ds005") bg.inputs.subject = "01" bg.inputs.output_query["dwi"] = dict(datatype="dwi") results = bg.run() assert "sub-01_dwi.nii.gz" in map(os.path.basename, results.outputs.dwi) @pytest.mark.skipif(not have_pybids, reason="Pybids is not installed") @pytest.mark.skipif( not dist_is_editable("pybids"), reason="Pybids is not installed in editable mode" ) def test_bids_infields_outfields(tmpdir): tmpdir.chdir() infields = ["infield1", "infield2"] outfields = ["outfield1", "outfield2"] bg = nio.BIDSDataGrabber(infields=infields) for outfield in outfields: bg.inputs.output_query[outfield] = {"key": "value"} for infield in infields: assert infield in bg.inputs.traits() assert not (isdefined(bg.inputs.get()[infield])) for outfield in outfields: assert outfield in bg._outputs().traits() # now try without defining outfields bg = nio.BIDSDataGrabber() for outfield in ["T1w", "bold"]: assert outfield in bg._outputs().traits() @pytest.mark.skipif(no_paramiko, reason="paramiko library is not available") @pytest.mark.skipif(no_local_ssh, reason="SSH Server is not running") def test_SSHDataGrabber(tmpdir): """Test SSHDataGrabber by connecting to localhost and collecting some data.""" old_cwd = tmpdir.chdir() source_dir = tmpdir.mkdir("source") source_hdr = source_dir.join("somedata.hdr") source_dat = source_dir.join("somedata.img") source_hdr.ensure() # create source_dat.ensure() # create # ssh client that connects to localhost, current user, regardless of # ~/.ssh/config def _mock_get_ssh_client(self): proxy = None client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect("127.0.0.1", username=os.getenv("USER"), sock=proxy, timeout=10) return client MockSSHDataGrabber = copy.copy(nio.SSHDataGrabber) MockSSHDataGrabber._get_ssh_client = _mock_get_ssh_client # grabber to get files from source_dir matching test.hdr ssh_grabber = MockSSHDataGrabber(infields=["test"], outfields=["test_file"]) ssh_grabber.inputs.base_directory = str(source_dir) ssh_grabber.inputs.hostname = "127.0.0.1" ssh_grabber.inputs.field_template = dict(test_file="%s.hdr") ssh_grabber.inputs.template = "" ssh_grabber.inputs.template_args = dict(test_file=[["test"]]) ssh_grabber.inputs.test = "somedata" ssh_grabber.inputs.sort_filelist = True runtime = ssh_grabber.run() # did we successfully get the header? assert runtime.outputs.test_file == str(tmpdir.join(source_hdr.basename)) # did we successfully get the data? assert ( tmpdir.join(source_hdr.basename) # header file .new(ext=".img") # data file .check(file=True, exists=True) ) # exists? old_cwd.chdir() def test_ExportFile(tmp_path): testin = tmp_path / "in.txt" testin.write_text("test string") i = nio.ExportFile() i.inputs.in_file = str(testin) i.inputs.out_file = str(tmp_path / "out.tsv") i.inputs.check_extension = True with pytest.raises(RuntimeError): i.run() i.inputs.check_extension = False i.run() assert (tmp_path / "out.tsv").read_text() == "test string" i.inputs.out_file = str(tmp_path / "out.txt") i.inputs.check_extension = True i.run() assert (tmp_path / "out.txt").read_text() == "test string" with pytest.raises(FileExistsError): i.run() i.inputs.clobber = True i.run() assert (tmp_path / "out.txt").read_text() == "test string" nipype-1.7.0/nipype/interfaces/tests/test_matlab.py000066400000000000000000000110601413403311400224550ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import pytest import nipype.interfaces.matlab as mlab matlab_cmd = mlab.get_matlab_command() no_matlab = matlab_cmd is None if not no_matlab: mlab.MatlabCommand.set_default_matlab_cmd(matlab_cmd) def clean_workspace_and_get_default_script_file(): # Make sure things are clean. default_script_file = mlab.MatlabInputSpec().script_file if os.path.exists(default_script_file): os.remove( default_script_file ) # raise Exception('Default script file needed for tests; please remove %s!' % default_script_file) return default_script_file @pytest.mark.skipif(no_matlab, reason="matlab is not available") def test_cmdline(): default_script_file = clean_workspace_and_get_default_script_file() mi = mlab.MatlabCommand(script="whos", script_file="testscript", mfile=False) assert mi.cmdline == matlab_cmd + ( ' -nodesktop -nosplash -singleCompThread -r "fprintf(1,' "'Executing code at %s:\\n',datestr(now));ver,try," "whos,catch ME,fprintf(2,'MATLAB code threw an " "exception:\\n');fprintf(2,'%s\\n',ME.message);if " "length(ME.stack) ~= 0, fprintf(2,'File:%s\\nName:%s\\n" "Line:%d\\n',ME.stack.file,ME.stack.name," 'ME.stack.line);, end;end;;exit"' ) assert mi.inputs.script == "whos" assert mi.inputs.script_file == "testscript" assert not os.path.exists(mi.inputs.script_file), "scriptfile should not exist" assert not os.path.exists( default_script_file ), "default scriptfile should not exist." @pytest.mark.skipif(no_matlab, reason="matlab is not available") def test_mlab_inputspec(): default_script_file = clean_workspace_and_get_default_script_file() spec = mlab.MatlabInputSpec() for k in [ "paths", "script", "nosplash", "mfile", "logfile", "script_file", "nodesktop", ]: assert k in spec.copyable_trait_names() assert spec.nodesktop assert spec.nosplash assert spec.mfile assert spec.script_file == default_script_file @pytest.mark.skipif(no_matlab, reason="matlab is not available") def test_mlab_init(): default_script_file = clean_workspace_and_get_default_script_file() assert mlab.MatlabCommand._cmd == "matlab" assert mlab.MatlabCommand.input_spec == mlab.MatlabInputSpec assert mlab.MatlabCommand().cmd == matlab_cmd mc = mlab.MatlabCommand(matlab_cmd="foo_m") assert mc.cmd == "foo_m" @pytest.mark.skipif(no_matlab, reason="matlab is not available") def test_run_interface(tmpdir): default_script_file = clean_workspace_and_get_default_script_file() mc = mlab.MatlabCommand(matlab_cmd="foo_m") assert not os.path.exists(default_script_file), "scriptfile should not exist 1." with pytest.raises(ValueError): mc.run() # script is mandatory assert not os.path.exists(default_script_file), "scriptfile should not exist 2." if os.path.exists(default_script_file): # cleanup os.remove(default_script_file) mc.inputs.script = "a=1;" assert not os.path.exists(default_script_file), "scriptfile should not exist 3." with pytest.raises(IOError): mc.run() # foo_m is not an executable assert os.path.exists(default_script_file), "scriptfile should exist 3." if os.path.exists(default_script_file): # cleanup os.remove(default_script_file) cwd = tmpdir.chdir() # bypasses ubuntu dash issue mc = mlab.MatlabCommand(script="foo;", paths=[tmpdir.strpath], mfile=True) assert not os.path.exists(default_script_file), "scriptfile should not exist 4." with pytest.raises(OSError): mc.run() assert os.path.exists(default_script_file), "scriptfile should exist 4." if os.path.exists(default_script_file): # cleanup os.remove(default_script_file) # bypasses ubuntu dash issue res = mlab.MatlabCommand(script="a=1;", paths=[tmpdir.strpath], mfile=True).run() assert res.runtime.returncode == 0 assert os.path.exists(default_script_file), "scriptfile should exist 5." cwd.chdir() @pytest.mark.skipif(no_matlab, reason="matlab is not available") def test_set_matlabcmd(): default_script_file = clean_workspace_and_get_default_script_file() mi = mlab.MatlabCommand() mi.set_default_matlab_cmd("foo") assert not os.path.exists(default_script_file), "scriptfile should not exist." assert mi._default_matlab_cmd == "foo" mi.set_default_matlab_cmd(matlab_cmd) nipype-1.7.0/nipype/interfaces/tests/test_nilearn.py000066400000000000000000000162251413403311400226550ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import numpy as np from ...testing import utils from .. import nilearn as iface from ...pipeline import engine as pe import pytest import numpy.testing as npt no_nilearn = True try: __import__("nilearn") no_nilearn = False except ImportError: pass @pytest.mark.skipif(no_nilearn, reason="the nilearn library is not available") class TestSignalExtraction: filenames = { "in_file": "fmri.nii", "label_files": "labels.nii", "4d_label_file": "4dlabels.nii", "out_file": "signals.tsv", } labels = ["CSF", "GrayMatter", "WhiteMatter"] global_labels = ["GlobalSignal"] + labels @pytest.fixture(autouse=True, scope="class") def setup_class(self, tmpdir_factory): tempdir = tmpdir_factory.mktemp("test") self.orig_dir = tempdir.chdir() utils.save_toy_nii(self.fake_fmri_data, self.filenames["in_file"]) utils.save_toy_nii(self.fake_label_data, self.filenames["label_files"]) def test_signal_extract_no_shared(self): # run iface.SignalExtraction( in_file=self.filenames["in_file"], label_files=self.filenames["label_files"], class_labels=self.labels, incl_shared_variance=False, ).run() # assert self.assert_expected_output(self.labels, self.base_wanted) def test_signal_extr_bad_label_list(self): # run with pytest.raises(ValueError): iface.SignalExtraction( in_file=self.filenames["in_file"], label_files=self.filenames["label_files"], class_labels=["bad"], incl_shared_variance=False, ).run() def test_signal_extr_equiv_4d_no_shared(self): self._test_4d_label( self.base_wanted, self.fake_equiv_4d_label_data, incl_shared_variance=False ) def test_signal_extr_4d_no_shared(self): # set up & run & assert self._test_4d_label( self.fourd_wanted, self.fake_4d_label_data, incl_shared_variance=False ) def test_signal_extr_global_no_shared(self): # set up wanted_global = [[-4.0 / 6], [-1.0 / 6], [3.0 / 6], [-1.0 / 6], [-7.0 / 6]] for i, vals in enumerate(self.base_wanted): wanted_global[i].extend(vals) # run iface.SignalExtraction( in_file=self.filenames["in_file"], label_files=self.filenames["label_files"], class_labels=self.labels, include_global=True, incl_shared_variance=False, ).run() # assert self.assert_expected_output(self.global_labels, wanted_global) def test_signal_extr_4d_global_no_shared(self): # set up wanted_global = [[3.0 / 8], [-3.0 / 8], [1.0 / 8], [-7.0 / 8], [-9.0 / 8]] for i, vals in enumerate(self.fourd_wanted): wanted_global[i].extend(vals) # run & assert self._test_4d_label( wanted_global, self.fake_4d_label_data, include_global=True, incl_shared_variance=False, ) def test_signal_extr_shared(self): # set up wanted = [] for vol in range(self.fake_fmri_data.shape[3]): volume = self.fake_fmri_data[:, :, :, vol].flatten() wanted_row = [] for reg in range(self.fake_4d_label_data.shape[3]): region = self.fake_4d_label_data[:, :, :, reg].flatten() wanted_row.append((volume * region).sum() / (region * region).sum()) wanted.append(wanted_row) # run & assert self._test_4d_label(wanted, self.fake_4d_label_data) def test_signal_extr_traits_valid(self): """Test a node using the SignalExtraction interface. Unlike interface.run(), node.run() checks the traits """ # run node = pe.Node( iface.SignalExtraction( in_file=os.path.abspath(self.filenames["in_file"]), label_files=os.path.abspath(self.filenames["label_files"]), class_labels=self.labels, incl_shared_variance=False, ), name="SignalExtraction", ) node.run() # assert # just checking that it passes trait validations def _test_4d_label( self, wanted, fake_labels, include_global=False, incl_shared_variance=True ): # set up utils.save_toy_nii(fake_labels, self.filenames["4d_label_file"]) # run iface.SignalExtraction( in_file=self.filenames["in_file"], label_files=self.filenames["4d_label_file"], class_labels=self.labels, incl_shared_variance=incl_shared_variance, include_global=include_global, ).run() wanted_labels = self.global_labels if include_global else self.labels # assert self.assert_expected_output(wanted_labels, wanted) def assert_expected_output(self, labels, wanted): with open(self.filenames["out_file"], "r") as output: got = [line.split() for line in output] labels_got = got.pop(0) # remove header assert labels_got == labels assert len(got) == self.fake_fmri_data.shape[3], "num rows and num volumes" # convert from string to float got = [[float(num) for num in row] for row in got] for i, time in enumerate(got): assert len(labels) == len(time) for j, segment in enumerate(time): npt.assert_almost_equal(segment, wanted[i][j], decimal=1) # dj: self doesnt have orig_dir at this point, not sure how to change it. # should work without it # def teardown_class(self): # self.orig_dir.chdir() fake_fmri_data = np.array( [ [ [[2, -1, 4, -2, 3], [4, -2, -5, -1, 0]], [[-2, 0, 1, 4, 4], [-5, 3, -3, 1, -5]], ], [ [[2, -2, -1, -2, -5], [3, 0, 3, -5, -2]], [[-4, -2, -2, 1, -2], [3, 1, 4, -3, -2]], ], ] ) fake_label_data = np.array([[[1, 0], [3, 1]], [[2, 0], [1, 3]]]) fake_equiv_4d_label_data = np.array( [ [[[1.0, 0.0, 0.0], [0.0, 0.0, 0.0]], [[0.0, 0.0, 1.0], [1.0, 0.0, 0.0]]], [[[0.0, 1.0, 0.0], [0.0, 0.0, 0.0]], [[1.0, 0.0, 0.0], [0.0, 0.0, 1.0]]], ] ) base_wanted = [ [-2.33333, 2, 0.5], [0, -2, 0.5], [-0.3333333, -1, 2.5], [0, -2, 0.5], [-1.3333333, -5, 1], ] fake_4d_label_data = np.array( [ [[[0.2, 0.3, 0.5], [0.1, 0.1, 0.8]], [[0.1, 0.3, 0.6], [0.3, 0.4, 0.3]]], [[[0.2, 0.2, 0.6], [0.0, 0.3, 0.7]], [[0.3, 0.3, 0.4], [0.3, 0.4, 0.3]]], ] ) fourd_wanted = [ [-5.0652173913, -5.44565217391, 5.50543478261], [-7.02173913043, 11.1847826087, -4.33152173913], [-19.0869565217, 21.2391304348, -4.57608695652], [5.19565217391, -3.66304347826, -1.51630434783], [-12.0, 3.0, 0.5], ] nipype-1.7.0/nipype/interfaces/tests/test_r.py000066400000000000000000000041311413403311400214570ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import pytest from nipype.interfaces import r no_r = r.no_r @pytest.mark.skipif(no_r, reason="R is not available") def test_cmdline(tmp_path): default_script_file = str(tmp_path / "testscript") ri = r.RCommand(script="1 + 1", script_file=default_script_file, rfile=False) r_cmd = r.get_r_command() assert ri.cmdline == r_cmd + (' -e "1 + 1"') assert ri.inputs.script == "1 + 1" assert ri.inputs.script_file == default_script_file assert not os.path.exists(ri.inputs.script_file), "scriptfile should not exist" assert not os.path.exists( default_script_file ), "default scriptfile should not exist." @pytest.mark.skipif(no_r, reason="R is not available") def test_run_interface(tmpdir): cwd = tmpdir.chdir() default_script_file = r.RInputSpec().script_file rc = r.RCommand(r_cmd="foo_m") assert not os.path.exists(default_script_file), "scriptfile should not exist 1." with pytest.raises(ValueError): rc.run() # script is mandatory assert not os.path.exists(default_script_file), "scriptfile should not exist 2." if os.path.exists(default_script_file): # cleanup os.remove(default_script_file) rc.inputs.script = "a=1;" assert not os.path.exists(default_script_file), "scriptfile should not exist 3." with pytest.raises(IOError): rc.run() # foo_m is not an executable assert os.path.exists(default_script_file), "scriptfile should exist 3." if os.path.exists(default_script_file): # cleanup os.remove(default_script_file) cwd.chdir() @pytest.mark.skipif(no_r, reason="R is not available") def test_set_rcmd(tmpdir): cwd = tmpdir.chdir() default_script_file = r.RInputSpec().script_file ri = r.RCommand() _default_r_cmd = ri._cmd ri.set_default_r_cmd("foo") assert not os.path.exists(default_script_file), "scriptfile should not exist." assert ri._cmd == "foo" ri.set_default_r_cmd(_default_r_cmd) cwd.chdir() nipype-1.7.0/nipype/interfaces/utility/000077500000000000000000000000001413403311400201475ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/utility/__init__.py000066400000000000000000000006161413403311400222630ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Package contains interfaces for using existing functionality in other packages Requires Packages to be installed """ from .base import IdentityInterface, Rename, Select, Split, Merge, AssertEqual from .csv import CSVReader from .wrappers import Function nipype-1.7.0/nipype/interfaces/utility/base.py000066400000000000000000000333471413403311400214450ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ # changing to temporary directories >>> tmp = getfixture('tmpdir') >>> old = tmp.chdir() """ import os import re import numpy as np from ..base import ( traits, TraitedSpec, DynamicTraitedSpec, File, Undefined, isdefined, OutputMultiPath, InputMultiPath, BaseInterface, BaseInterfaceInputSpec, Str, SimpleInterface, ) from ..io import IOBase, add_traits from ...utils.filemanip import ensure_list, copyfile, split_filename class IdentityInterface(IOBase): """Basic interface class generates identity mappings Examples -------- >>> from nipype.interfaces.utility import IdentityInterface >>> ii = IdentityInterface(fields=['a', 'b'], mandatory_inputs=False) >>> ii.inputs.a >>> ii.inputs.a = 'foo' >>> out = ii._outputs() >>> out.a >>> out = ii.run() >>> out.outputs.a 'foo' >>> ii2 = IdentityInterface(fields=['a', 'b'], mandatory_inputs=True) >>> ii2.inputs.a = 'foo' >>> out = ii2.run() # doctest: +SKIP ValueError: IdentityInterface requires a value for input 'b' because it was listed in 'fields' Interface IdentityInterface failed to run. """ input_spec = DynamicTraitedSpec output_spec = DynamicTraitedSpec def __init__(self, fields=None, mandatory_inputs=True, **inputs): super(IdentityInterface, self).__init__(**inputs) if fields is None or not fields: raise ValueError("Identity Interface fields must be a non-empty list") # Each input must be in the fields. for in_field in inputs: if in_field not in fields: raise ValueError( "Identity Interface input is not in the fields: %s" % in_field ) self._fields = fields self._mandatory_inputs = mandatory_inputs add_traits(self.inputs, fields) # Adding any traits wipes out all input values set in superclass initialization, # even it the trait is not in the add_traits argument. The work-around is to reset # the values after adding the traits. self.inputs.trait_set(**inputs) def _add_output_traits(self, base): return add_traits(base, self._fields) def _list_outputs(self): # manual mandatory inputs check if self._fields and self._mandatory_inputs: for key in self._fields: value = getattr(self.inputs, key) if not isdefined(value): msg = ( "%s requires a value for input '%s' because it was listed in 'fields'. \ You can turn off mandatory inputs checking by passing mandatory_inputs = False to the constructor." % (self.__class__.__name__, key) ) raise ValueError(msg) outputs = self._outputs().get() for key in self._fields: val = getattr(self.inputs, key) if isdefined(val): outputs[key] = val return outputs class MergeInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): axis = traits.Enum( "vstack", "hstack", usedefault=True, desc="direction in which to merge, hstack requires same number of elements in each input", ) no_flatten = traits.Bool( False, usedefault=True, desc="append to outlist instead of extending in vstack mode", ) ravel_inputs = traits.Bool( False, usedefault=True, desc="ravel inputs when no_flatten is False" ) class MergeOutputSpec(TraitedSpec): out = traits.List(desc="Merged output") def _ravel(in_val): if not isinstance(in_val, list): return in_val flat_list = [] for val in in_val: raveled_val = _ravel(val) if isinstance(raveled_val, list): flat_list.extend(raveled_val) else: flat_list.append(raveled_val) return flat_list class Merge(IOBase): """Basic interface class to merge inputs into a single list ``Merge(1)`` will merge a list of lists Examples -------- >>> from nipype.interfaces.utility import Merge >>> mi = Merge(3) >>> mi.inputs.in1 = 1 >>> mi.inputs.in2 = [2, 5] >>> mi.inputs.in3 = 3 >>> out = mi.run() >>> out.outputs.out [1, 2, 5, 3] >>> merge = Merge(1) >>> merge.inputs.in1 = [1, [2, 5], 3] >>> out = merge.run() >>> out.outputs.out [1, [2, 5], 3] >>> merge = Merge(1) >>> merge.inputs.in1 = [1, [2, 5], 3] >>> merge.inputs.ravel_inputs = True >>> out = merge.run() >>> out.outputs.out [1, 2, 5, 3] >>> merge = Merge(1) >>> merge.inputs.in1 = [1, [2, 5], 3] >>> merge.inputs.no_flatten = True >>> out = merge.run() >>> out.outputs.out [[1, [2, 5], 3]] """ input_spec = MergeInputSpec output_spec = MergeOutputSpec def __init__(self, numinputs=0, **inputs): super(Merge, self).__init__(**inputs) self._numinputs = numinputs if numinputs >= 1: input_names = ["in%d" % (i + 1) for i in range(numinputs)] else: input_names = [] add_traits(self.inputs, input_names) def _list_outputs(self): outputs = self._outputs().get() out = [] if self._numinputs < 1: return outputs else: getval = lambda idx: getattr(self.inputs, "in%d" % (idx + 1)) values = [ getval(idx) for idx in range(self._numinputs) if isdefined(getval(idx)) ] if self.inputs.axis == "vstack": for value in values: if isinstance(value, list) and not self.inputs.no_flatten: out.extend(_ravel(value) if self.inputs.ravel_inputs else value) else: out.append(value) else: lists = [ensure_list(val) for val in values] out = [[val[i] for val in lists] for i in range(len(lists[0]))] outputs["out"] = out return outputs class RenameInputSpec(DynamicTraitedSpec): in_file = File(exists=True, mandatory=True, desc="file to rename") keep_ext = traits.Bool( desc=("Keep in_file extension, replace " "non-extension component of name") ) format_string = Str( mandatory=True, desc="Python formatting string for output template" ) parse_string = Str( desc="Python regexp parse string to define " "replacement inputs" ) use_fullpath = traits.Bool( False, usedefault=True, desc="Use full path as input to regex parser" ) class RenameOutputSpec(TraitedSpec): out_file = File(exists=True, desc="softlink to original file with new name") class Rename(SimpleInterface, IOBase): """Change the name of a file based on a mapped format string. To use additional inputs that will be defined at run-time, the class constructor must be called with the format template, and the fields identified will become inputs to the interface. Additionally, you may set the parse_string input, which will be run over the input filename with a regular expressions search, and will fill in additional input fields from matched groups. Fields set with inputs have precedence over fields filled in with the regexp match. Examples -------- >>> from nipype.interfaces.utility import Rename >>> rename1 = Rename() >>> rename1.inputs.in_file = os.path.join(datadir, "zstat1.nii.gz") # datadir is a directory with exemplary files, defined in conftest.py >>> rename1.inputs.format_string = "Faces-Scenes.nii.gz" >>> res = rename1.run() # doctest: +SKIP >>> res.outputs.out_file # doctest: +SKIP 'Faces-Scenes.nii.gz" # doctest: +SKIP >>> rename2 = Rename(format_string="%(subject_id)s_func_run%(run)02d") >>> rename2.inputs.in_file = os.path.join(datadir, "functional.nii") >>> rename2.inputs.keep_ext = True >>> rename2.inputs.subject_id = "subj_201" >>> rename2.inputs.run = 2 >>> res = rename2.run() # doctest: +SKIP >>> res.outputs.out_file # doctest: +SKIP 'subj_201_func_run02.nii' # doctest: +SKIP >>> rename3 = Rename(format_string="%(subject_id)s_%(seq)s_run%(run)02d.nii") >>> rename3.inputs.in_file = os.path.join(datadir, "func_epi_1_1.nii") >>> rename3.inputs.parse_string = r"func_(?P\w*)_.*" >>> rename3.inputs.subject_id = "subj_201" >>> rename3.inputs.run = 2 >>> res = rename3.run() # doctest: +SKIP >>> res.outputs.out_file # doctest: +SKIP 'subj_201_epi_run02.nii' # doctest: +SKIP """ input_spec = RenameInputSpec output_spec = RenameOutputSpec def __init__(self, format_string=None, **inputs): super(Rename, self).__init__(**inputs) if format_string is not None: self.inputs.format_string = format_string self.fmt_fields = re.findall(r"%\((.+?)\)", format_string) add_traits(self.inputs, self.fmt_fields) else: self.fmt_fields = [] def _rename(self): fmt_dict = dict() if isdefined(self.inputs.parse_string): if isdefined(self.inputs.use_fullpath) and self.inputs.use_fullpath: m = re.search(self.inputs.parse_string, self.inputs.in_file) else: m = re.search( self.inputs.parse_string, os.path.split(self.inputs.in_file)[1] ) if m: fmt_dict.update(m.groupdict()) for field in self.fmt_fields: val = getattr(self.inputs, field) if isdefined(val): fmt_dict[field] = getattr(self.inputs, field) if self.inputs.keep_ext: fmt_string = "".join( [self.inputs.format_string, split_filename(self.inputs.in_file)[2]] ) else: fmt_string = self.inputs.format_string return fmt_string % fmt_dict def _run_interface(self, runtime): runtime.returncode = 0 out_file = os.path.join(runtime.cwd, self._rename()) _ = copyfile(self.inputs.in_file, out_file) self._results["out_file"] = out_file return runtime class SplitInputSpec(BaseInterfaceInputSpec): inlist = traits.List(traits.Any, mandatory=True, desc="list of values to split") splits = traits.List( traits.Int, mandatory=True, desc="Number of outputs in each split - should add to number of inputs", ) squeeze = traits.Bool( False, usedefault=True, desc="unfold one-element splits removing the list" ) class Split(IOBase): """Basic interface class to split lists into multiple outputs Examples -------- >>> from nipype.interfaces.utility import Split >>> sp = Split() >>> _ = sp.inputs.trait_set(inlist=[1, 2, 3], splits=[2, 1]) >>> out = sp.run() >>> out.outputs.out1 [1, 2] """ input_spec = SplitInputSpec output_spec = DynamicTraitedSpec def _add_output_traits(self, base): undefined_traits = {} for i in range(len(self.inputs.splits)): key = "out%d" % (i + 1) base.add_trait(key, traits.Any) undefined_traits[key] = Undefined base.trait_set(trait_change_notify=False, **undefined_traits) return base def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.splits): if sum(self.inputs.splits) != len(self.inputs.inlist): raise RuntimeError("sum of splits != num of list elements") splits = [0] splits.extend(self.inputs.splits) splits = np.cumsum(splits) for i in range(len(splits) - 1): val = np.array(self.inputs.inlist, dtype=object)[ splits[i] : splits[i + 1] ].tolist() if self.inputs.squeeze and len(val) == 1: val = val[0] outputs["out%d" % (i + 1)] = val return outputs class SelectInputSpec(BaseInterfaceInputSpec): inlist = InputMultiPath( traits.Any, mandatory=True, desc="list of values to choose from" ) index = InputMultiPath( traits.Int, mandatory=True, desc="0-based indices of values to choose" ) class SelectOutputSpec(TraitedSpec): out = OutputMultiPath(traits.Any, desc="list of selected values") class Select(IOBase): """Basic interface class to select specific elements from a list Examples -------- >>> from nipype.interfaces.utility import Select >>> sl = Select() >>> _ = sl.inputs.trait_set(inlist=[1, 2, 3, 4, 5], index=[3]) >>> out = sl.run() >>> out.outputs.out 4 >>> _ = sl.inputs.trait_set(inlist=[1, 2, 3, 4, 5], index=[3, 4]) >>> out = sl.run() >>> out.outputs.out [4, 5] """ input_spec = SelectInputSpec output_spec = SelectOutputSpec def _list_outputs(self): outputs = self._outputs().get() out = np.array(self.inputs.inlist, dtype=object)[ np.array(self.inputs.index) ].tolist() outputs["out"] = out return outputs class AssertEqualInputSpec(BaseInterfaceInputSpec): volume1 = File(exists=True, mandatory=True) volume2 = File(exists=True, mandatory=True) class AssertEqual(BaseInterface): input_spec = AssertEqualInputSpec def _run_interface(self, runtime): import nibabel as nb data1 = np.asanyarray(nb.load(self.inputs.volume1)) data2 = np.asanyarray(nb.load(self.inputs.volume2)) if not np.array_equal(data1, data2): raise RuntimeError("Input images are not exactly equal") return runtime nipype-1.7.0/nipype/interfaces/utility/csv.py000066400000000000000000000060331413403311400213160ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """CSV Handling utilities """ from ..base import traits, TraitedSpec, DynamicTraitedSpec, File, BaseInterface from ..io import add_traits class CSVReaderInputSpec(DynamicTraitedSpec, TraitedSpec): in_file = File( exists=True, mandatory=True, desc="Input comma-seperated value (CSV) file" ) header = traits.Bool( False, usedefault=True, desc="True if the first line is a column header" ) class CSVReader(BaseInterface): """ Examples -------- >>> reader = CSVReader() # doctest: +SKIP >>> reader.inputs.in_file = 'noHeader.csv' # doctest: +SKIP >>> out = reader.run() # doctest: +SKIP >>> out.outputs.column_0 == ['foo', 'bar', 'baz'] # doctest: +SKIP True >>> out.outputs.column_1 == ['hello', 'world', 'goodbye'] # doctest: +SKIP True >>> out.outputs.column_2 == ['300.1', '5', '0.3'] # doctest: +SKIP True >>> reader = CSVReader() # doctest: +SKIP >>> reader.inputs.in_file = 'header.csv' # doctest: +SKIP >>> reader.inputs.header = True # doctest: +SKIP >>> out = reader.run() # doctest: +SKIP >>> out.outputs.files == ['foo', 'bar', 'baz'] # doctest: +SKIP True >>> out.outputs.labels == ['hello', 'world', 'goodbye'] # doctest: +SKIP True >>> out.outputs.erosion == ['300.1', '5', '0.3'] # doctest: +SKIP True """ input_spec = CSVReaderInputSpec output_spec = DynamicTraitedSpec _always_run = True def _append_entry(self, outputs, entry): for key, value in zip(self._outfields, entry): outputs[key].append(value) return outputs def _parse_line(self, line): line = line.replace("\n", "") entry = [x.strip() for x in line.split(",")] return entry def _get_outfields(self): with open(self.inputs.in_file, "r") as fid: entry = self._parse_line(fid.readline()) if self.inputs.header: self._outfields = tuple(entry) else: self._outfields = tuple(["column_" + str(x) for x in range(len(entry))]) return self._outfields def _run_interface(self, runtime): self._get_outfields() return runtime def _outputs(self): return self._add_output_traits(super(CSVReader, self)._outputs()) def _add_output_traits(self, base): return add_traits(base, self._get_outfields()) def _list_outputs(self): outputs = self.output_spec().get() isHeader = True for key in self._outfields: outputs[key] = [] # initialize outfields with open(self.inputs.in_file, "r") as fid: for line in fid.readlines(): if self.inputs.header and isHeader: # skip header line isHeader = False continue entry = self._parse_line(line) outputs = self._append_entry(outputs, entry) return outputs nipype-1.7.0/nipype/interfaces/utility/tests/000077500000000000000000000000001413403311400213115ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/utility/tests/__init__.py000066400000000000000000000000301413403311400234130ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/utility/tests/test_auto_AssertEqual.py000066400000000000000000000010301413403311400261750ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import AssertEqual def test_AssertEqual_inputs(): input_map = dict( volume1=dict( extensions=None, mandatory=True, ), volume2=dict( extensions=None, mandatory=True, ), ) inputs = AssertEqual.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/utility/tests/test_auto_CSVReader.py000066400000000000000000000014001413403311400255230ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..csv import CSVReader def test_CSVReader_inputs(): input_map = dict( header=dict( usedefault=True, ), in_file=dict( extensions=None, mandatory=True, ), ) inputs = CSVReader.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CSVReader_outputs(): output_map = dict() outputs = CSVReader.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/utility/tests/test_auto_Function.py000066400000000000000000000012531413403311400255400ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..wrappers import Function def test_Function_inputs(): input_map = dict( function_str=dict( mandatory=True, ), ) inputs = Function.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Function_outputs(): output_map = dict() outputs = Function.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/utility/tests/test_auto_IdentityInterface.py000066400000000000000000000012151413403311400273630ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import IdentityInterface def test_IdentityInterface_inputs(): input_map = dict() inputs = IdentityInterface.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_IdentityInterface_outputs(): output_map = dict() outputs = IdentityInterface.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/utility/tests/test_auto_Merge.py000066400000000000000000000014561413403311400250170ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import Merge def test_Merge_inputs(): input_map = dict( axis=dict( usedefault=True, ), no_flatten=dict( usedefault=True, ), ravel_inputs=dict( usedefault=True, ), ) inputs = Merge.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Merge_outputs(): output_map = dict( out=dict(), ) outputs = Merge.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/utility/tests/test_auto_Rename.py000066400000000000000000000016651413403311400251710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import Rename def test_Rename_inputs(): input_map = dict( format_string=dict( mandatory=True, ), in_file=dict( extensions=None, mandatory=True, ), keep_ext=dict(), parse_string=dict(), use_fullpath=dict( usedefault=True, ), ) inputs = Rename.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Rename_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Rename.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/utility/tests/test_auto_Select.py000066400000000000000000000013531413403311400251730ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import Select def test_Select_inputs(): input_map = dict( index=dict( mandatory=True, ), inlist=dict( mandatory=True, ), ) inputs = Select.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Select_outputs(): output_map = dict( out=dict(), ) outputs = Select.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/utility/tests/test_auto_Split.py000066400000000000000000000014141413403311400250450ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import Split def test_Split_inputs(): input_map = dict( inlist=dict( mandatory=True, ), splits=dict( mandatory=True, ), squeeze=dict( usedefault=True, ), ) inputs = Split.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Split_outputs(): output_map = dict() outputs = Split.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/utility/tests/test_base.py000066400000000000000000000047001413403311400236350ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import pytest from nipype.interfaces import utility from nipype.interfaces.base import isdefined import nipype.pipeline.engine as pe def test_rename(tmpdir): tmpdir.chdir() # Test very simple rename _ = open("file.txt", "w").close() rn = utility.Rename(in_file="file.txt", format_string="test_file1.txt") res = rn.run() outfile = tmpdir.join("test_file1.txt").strpath assert res.outputs.out_file == outfile assert os.path.exists(outfile) # Now a string-formatting version rn = utility.Rename( in_file="file.txt", format_string="%(field1)s_file%(field2)d", keep_ext=True ) # Test .input field creation assert hasattr(rn.inputs, "field1") assert hasattr(rn.inputs, "field2") # Set the inputs rn.inputs.field1 = "test" rn.inputs.field2 = 2 res = rn.run() outfile = tmpdir.join("test_file2.txt").strpath assert res.outputs.out_file == outfile assert os.path.exists(outfile) @pytest.mark.parametrize( "args, expected", [({}, ([0], [1, 2, 3])), ({"squeeze": True}, (0, [1, 2, 3]))] ) def test_split(tmpdir, args, expected): tmpdir.chdir() node = pe.Node( utility.Split(inlist=list(range(4)), splits=[1, 3], **args), name="split_squeeze", ) res = node.run() assert res.outputs.out1 == expected[0] assert res.outputs.out2 == expected[1] @pytest.mark.parametrize( "args, kwargs, in_lists, expected", [ ([3], {}, [0, [1, 2], [3, 4, 5]], [0, 1, 2, 3, 4, 5]), ([0], {}, None, None), ([], {}, [], []), ([], {}, [0, [1, 2], [3, 4, 5]], [0, [1, 2], [3, 4, 5]]), ([3], {"axis": "hstack"}, [[0], [1, 2], [3, 4, 5]], [[0, 1, 3]]), ([3], {"axis": "hstack"}, [[0, 1], [2, 3], [4, 5]], [[0, 2, 4], [1, 3, 5]]), ([3], {"axis": "hstack"}, [[0, 1], [2, 3], [4, 5]], [[0, 2, 4], [1, 3, 5]]), ], ) def test_merge(tmpdir, args, kwargs, in_lists, expected): tmpdir.chdir() node = pe.Node(utility.Merge(*args, **kwargs), name="merge") numinputs = args[0] if args else 0 if numinputs >= 1: for i in range(1, numinputs + 1): setattr(node.inputs, "in{:d}".format(i), in_lists[i - 1]) res = node.run() if numinputs < 1: assert not isdefined(res.outputs.out) else: assert res.outputs.out == expected nipype-1.7.0/nipype/interfaces/utility/tests/test_csv.py000066400000000000000000000022251413403311400235160ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from nipype.interfaces import utility def test_csvReader(tmpdir): header = "files,labels,erosion\n" lines = ["foo,hello,300.1\n", "bar,world,5\n", "baz,goodbye,0.3\n"] for x in range(2): name = tmpdir.join("testfile.csv").strpath with open(name, "w") as fid: reader = utility.CSVReader() if x % 2 == 0: fid.write(header) reader.inputs.header = True fid.writelines(lines) fid.flush() reader.inputs.in_file = name out = reader.run() if x % 2 == 0: assert out.outputs.files == ["foo", "bar", "baz"] assert out.outputs.labels == ["hello", "world", "goodbye"] assert out.outputs.erosion == ["300.1", "5", "0.3"] else: assert out.outputs.column_0 == ["foo", "bar", "baz"] assert out.outputs.column_1 == ["hello", "world", "goodbye"] assert out.outputs.column_2 == ["300.1", "5", "0.3"] nipype-1.7.0/nipype/interfaces/utility/tests/test_wrappers.py000066400000000000000000000065631413403311400245770ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import pytest from nipype.interfaces import utility import nipype.pipeline.engine as pe concat_sort = """\ def concat_sort(in_arrays): import numpy as np all_vals = np.concatenate([arr.flatten() for arr in in_arrays]) return np.sort(all_vals) """ def test_function(tmpdir): tmpdir.chdir() def gen_random_array(size): import numpy as np return np.random.rand(size, size) f1 = pe.MapNode( utility.Function( input_names=["size"], output_names=["random_array"], function=gen_random_array, ), name="random_array", iterfield=["size"], ) f1.inputs.size = [2, 3, 5] wf = pe.Workflow(name="test_workflow") def increment_array(in_array): return in_array + 1 f2 = pe.MapNode( utility.Function(function=increment_array), name="increment_array", iterfield=["in_array"], ) wf.connect(f1, "random_array", f2, "in_array") f3 = pe.Node(utility.Function(function=concat_sort), name="concat_sort") wf.connect(f2, "out", f3, "in_arrays") wf.run() def make_random_array(size): return np.random.randn(size, size) # noqa def should_fail(tmp): tmp.chdir() node = pe.Node( utility.Function( input_names=["size"], output_names=["random_array"], function=make_random_array, ), name="should_fail", ) node.inputs.size = 10 node.run() def test_should_fail(tmpdir): with pytest.raises(pe.nodes.NodeExecutionError): should_fail(tmpdir) def test_function_with_imports(tmpdir): tmpdir.chdir() node = pe.Node( utility.Function( input_names=["size"], output_names=["random_array"], function=make_random_array, imports=["import numpy as np"], ), name="should_not_fail", ) print(node.inputs.function_str) node.inputs.size = 10 node.run() def test_aux_connect_function(tmpdir): """This tests excution nodes with multiple inputs and auxiliary function inside the Workflow connect function. """ tmpdir.chdir() wf = pe.Workflow(name="test_workflow") def _gen_tuple(size): return [1] * size def _sum_and_sub_mul(a, b, c): return (a + b) * c, (a - b) * c def _inc(x): return x + 1 params = pe.Node(utility.IdentityInterface(fields=["size", "num"]), name="params") params.inputs.num = 42 params.inputs.size = 1 gen_tuple = pe.Node( utility.Function( input_names=["size"], output_names=["tuple"], function=_gen_tuple ), name="gen_tuple", ) ssm = pe.Node( utility.Function( input_names=["a", "b", "c"], output_names=["sum", "sub"], function=_sum_and_sub_mul, ), name="sum_and_sub_mul", ) split = pe.Node(utility.Split(splits=[1, 1], squeeze=True), name="split") wf.connect( [ (params, gen_tuple, [(("size", _inc), "size")]), (params, ssm, [(("num", _inc), "c")]), (gen_tuple, split, [("tuple", "inlist")]), (split, ssm, [(("out1", _inc), "a"), ("out2", "b")]), ] ) wf.run() nipype-1.7.0/nipype/interfaces/utility/wrappers.py000066400000000000000000000126351413403311400223730ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ # changing to temporary directories >>> tmp = getfixture('tmpdir') >>> old = tmp.chdir() """ from ... import logging from ..base import ( traits, DynamicTraitedSpec, Undefined, isdefined, BaseInterfaceInputSpec, ) from ..io import IOBase, add_traits from ...utils.filemanip import ensure_list from ...utils.functions import getsource, create_function_from_source iflogger = logging.getLogger("nipype.interface") class FunctionInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): function_str = traits.Str(mandatory=True, desc="code for function") class Function(IOBase): """Runs arbitrary function as an interface Examples -------- >>> func = 'def func(arg1, arg2=5): return arg1 + arg2' >>> fi = Function(input_names=['arg1', 'arg2'], output_names=['out']) >>> fi.inputs.function_str = func >>> res = fi.run(arg1=1) >>> res.outputs.out 6 """ input_spec = FunctionInputSpec output_spec = DynamicTraitedSpec def __init__( self, input_names=None, output_names="out", function=None, imports=None, **inputs ): """ Parameters ---------- input_names: single str or list or None names corresponding to function inputs if ``None``, derive input names from function argument names output_names: single str or list names corresponding to function outputs (default: 'out'). if list of length > 1, has to match the number of outputs function : callable callable python object. must be able to execute in an isolated namespace (possibly in concert with the ``imports`` parameter) imports : list of strings list of import statements that allow the function to execute in an otherwise empty namespace """ super(Function, self).__init__(**inputs) if function: if hasattr(function, "__call__"): try: self.inputs.function_str = getsource(function) except IOError: raise Exception( "Interface Function does not accept " "function objects defined interactively " "in a python session" ) else: if input_names is None: fninfo = function.__code__ elif isinstance(function, (str, bytes)): self.inputs.function_str = function if input_names is None: fninfo = create_function_from_source(function, imports).__code__ else: raise Exception("Unknown type of function") if input_names is None: input_names = fninfo.co_varnames[: fninfo.co_argcount] self.inputs.on_trait_change(self._set_function_string, "function_str") self._input_names = ensure_list(input_names) self._output_names = ensure_list(output_names) add_traits(self.inputs, [name for name in self._input_names]) self.imports = imports self._out = {} for name in self._output_names: self._out[name] = None def _set_function_string(self, obj, name, old, new): if name == "function_str": if hasattr(new, "__call__"): function_source = getsource(new) fninfo = new.__code__ elif isinstance(new, (str, bytes)): function_source = new fninfo = create_function_from_source(new, self.imports).__code__ self.inputs.trait_set( trait_change_notify=False, **{"%s" % name: function_source} ) # Update input traits input_names = fninfo.co_varnames[: fninfo.co_argcount] new_names = set(input_names) - set(self._input_names) add_traits(self.inputs, list(new_names)) self._input_names.extend(new_names) def _add_output_traits(self, base): undefined_traits = {} for key in self._output_names: base.add_trait(key, traits.Any) undefined_traits[key] = Undefined base.trait_set(trait_change_notify=False, **undefined_traits) return base def _run_interface(self, runtime): # Create function handle function_handle = create_function_from_source( self.inputs.function_str, self.imports ) # Get function args args = {} for name in self._input_names: value = getattr(self.inputs, name) if isdefined(value): args[name] = value out = function_handle(**args) if len(self._output_names) == 1: self._out[self._output_names[0]] = out else: if isinstance(out, tuple) and (len(out) != len(self._output_names)): raise RuntimeError("Mismatch in number of expected outputs") else: for idx, name in enumerate(self._output_names): self._out[name] = out[idx] return runtime def _list_outputs(self): outputs = self._outputs().get() for key in self._output_names: outputs[key] = self._out[key] return outputs nipype-1.7.0/nipype/interfaces/vista/000077500000000000000000000000001413403311400175725ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/vista/__init__.py000066400000000000000000000004051413403311400217020ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """VistaSoft contains Matlab code to perform a variety of analysis on MRI data.""" from .vista import Vnifti2Image, VtoMat nipype-1.7.0/nipype/interfaces/vista/tests/000077500000000000000000000000001413403311400207345ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/vista/tests/__init__.py000066400000000000000000000000301413403311400230360ustar00rootroot00000000000000# -*- coding: utf-8 -*- nipype-1.7.0/nipype/interfaces/vista/tests/test_auto_Vnifti2Image.py000066400000000000000000000025201413403311400256600ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..vista import Vnifti2Image def test_Vnifti2Image_inputs(): input_map = dict( args=dict( argstr="%s", ), attributes=dict( argstr="-attr %s", extensions=None, position=2, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, position=1, ), out_file=dict( argstr="-out %s", extensions=None, hash_files=False, keep_extension=False, name_source=["in_file"], name_template="%s.v", position=-1, ), ) inputs = Vnifti2Image.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_Vnifti2Image_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = Vnifti2Image.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/vista/tests/test_auto_VtoMat.py000066400000000000000000000022741413403311400246140ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..vista import VtoMat def test_VtoMat_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="-in %s", extensions=None, mandatory=True, position=1, ), out_file=dict( argstr="-out %s", extensions=None, hash_files=False, keep_extension=False, name_source=["in_file"], name_template="%s.mat", position=-1, ), ) inputs = VtoMat.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_VtoMat_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = VtoMat.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/vista/vista.py000066400000000000000000000040051413403311400212710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from ..base import CommandLineInputSpec, CommandLine, TraitedSpec, File class Vnifti2ImageInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="-in %s", mandatory=True, position=1, desc="in file" ) attributes = File(exists=True, argstr="-attr %s", position=2, desc="attribute file") out_file = File( name_template="%s.v", keep_extension=False, argstr="-out %s", hash_files=False, position=-1, desc="output data file", name_source=["in_file"], ) class Vnifti2ImageOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Output vista file") class Vnifti2Image(CommandLine): """ Convert a nifti file into a vista file. Example ------- >>> vimage = Vnifti2Image() >>> vimage.inputs.in_file = 'image.nii' >>> vimage.cmdline 'vnifti2image -in image.nii -out image.v' >>> vimage.run() # doctest: +SKIP """ _cmd = "vnifti2image" input_spec = Vnifti2ImageInputSpec output_spec = Vnifti2ImageOutputSpec class VtoMatInputSpec(CommandLineInputSpec): in_file = File( exists=True, argstr="-in %s", mandatory=True, position=1, desc="in file" ) out_file = File( name_template="%s.mat", keep_extension=False, argstr="-out %s", hash_files=False, position=-1, desc="output mat file", name_source=["in_file"], ) class VtoMatOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Output mat file") class VtoMat(CommandLine): """ Convert a nifti file into a vista file. Example ------- >>> vimage = VtoMat() >>> vimage.inputs.in_file = 'image.v' >>> vimage.cmdline 'vtomat -in image.v -out image.mat' >>> vimage.run() # doctest: +SKIP """ _cmd = "vtomat" input_spec = VtoMatInputSpec output_spec = VtoMatOutputSpec nipype-1.7.0/nipype/interfaces/vtkbase.py000066400000000000000000000037471413403311400204700ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ vtkbase provides some helpers to use VTK through the tvtk package (mayavi) Code using tvtk should import it through this module """ import os from .. import logging iflogger = logging.getLogger("nipype.interface") # Check that VTK can be imported and get version _vtk_version = None try: import vtk _vtk_version = ( vtk.vtkVersion.GetVTKMajorVersion(), vtk.vtkVersion.GetVTKMinorVersion(), ) except ImportError: iflogger.warning("VTK was not found") # Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var old_ets = os.getenv("ETS_TOOLKIT") os.environ["ETS_TOOLKIT"] = "null" _have_tvtk = False try: from tvtk.api import tvtk _have_tvtk = True except ImportError: iflogger.warning("tvtk wasn't found") tvtk = None finally: if old_ets is not None: os.environ["ETS_TOOLKIT"] = old_ets else: del os.environ["ETS_TOOLKIT"] def vtk_version(): """Get VTK version""" global _vtk_version return _vtk_version def no_vtk(): """Checks if VTK is installed and the python wrapper is functional""" global _vtk_version return _vtk_version is None def no_tvtk(): """Checks if tvtk was found""" global _have_tvtk return not _have_tvtk def vtk_old(): """Checks if VTK uses the old-style pipeline (VTK<6.0)""" global _vtk_version if _vtk_version is None: raise RuntimeException("VTK is not correctly installed.") return _vtk_version[0] < 6 def configure_input_data(obj, data): """ Configure the input data for vtk pipeline object obj. Copied from latest version of mayavi """ if vtk_old(): obj.input = data else: obj.set_input_data(data) def vtk_output(obj): """Configure the input data for vtk pipeline object obj.""" if vtk_old(): return obj.output return obj.get_output() nipype-1.7.0/nipype/interfaces/workbench/000077500000000000000000000000001413403311400204265ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/workbench/__init__.py000066400000000000000000000004551413403311400225430ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Connectome Workbench is a visualization for neuroimaging data, esp. derived from HCP data.""" from .metric import MetricResample from .cifti import CiftiSmooth nipype-1.7.0/nipype/interfaces/workbench/base.py000066400000000000000000000036551413403311400217230ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ The workbench module provides classes for interfacing with `connectome workbench `_ tools. Connectome Workbench is an open source, freely available visualization and discovery tool used to map neuroimaging data, especially data generated by the Human Connectome Project. """ import os import re from ... import logging from ...utils.filemanip import split_filename from ..base import CommandLine, PackageInfo iflogger = logging.getLogger("nipype.interface") class Info(PackageInfo): """Handle Connectome Workbench version information.""" version_cmd = "wb_command -version" @staticmethod def parse_version(raw_info): m = re.search(r"\nVersion (\S+)", raw_info) return m.groups()[0] if m else None class WBCommand(CommandLine): """Base support for workbench commands.""" @property def version(self): return Info.version() def _gen_filename(self, name, outdir=None, suffix="", ext=None): """Generate a filename based on the given parameters. The filename will take the form: . Parameters ---------- name : str Filename to base the new filename on. suffix : str Suffix to add to the `basename`. (defaults is '' ) ext : str Extension to use for the new filename. Returns ------- fname : str New filename based on given parameters. """ if not name: raise ValueError("Cannot generate filename - filename not set") _, fname, fext = split_filename(name) if ext is None: ext = fext if outdir is None: outdir = "." return os.path.join(outdir, fname + suffix + ext) nipype-1.7.0/nipype/interfaces/workbench/cifti.py000066400000000000000000000123411413403311400220770ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """This module provides interfaces for workbench CIFTI commands""" from ..base import TraitedSpec, File, traits, CommandLineInputSpec from .base import WBCommand from ... import logging iflogger = logging.getLogger("nipype.interface") class CiftiSmoothInputSpec(CommandLineInputSpec): in_file = File( exists=True, mandatory=True, argstr="%s", position=0, desc="The input CIFTI file", ) sigma_surf = traits.Float( mandatory=True, argstr="%s", position=1, desc="the sigma for the gaussian surface smoothing kernel, in mm", ) sigma_vol = traits.Float( mandatory=True, argstr="%s", position=2, desc="the sigma for the gaussian volume smoothing kernel, in mm", ) direction = traits.Enum( "ROW", "COLUMN", mandatory=True, argstr="%s", position=3, desc="which dimension to smooth along, ROW or COLUMN", ) out_file = File( name_source=["in_file"], name_template="smoothed_%s.nii", keep_extension=True, argstr="%s", position=4, desc="The output CIFTI", ) left_surf = File( exists=True, mandatory=True, position=5, argstr="-left-surface %s", desc="Specify the left surface to use", ) left_corrected_areas = File( exists=True, position=6, argstr="-left-corrected-areas %s", desc="vertex areas (as a metric) to use instead of computing them from " "the left surface.", ) right_surf = File( exists=True, mandatory=True, position=7, argstr="-right-surface %s", desc="Specify the right surface to use", ) right_corrected_areas = File( exists=True, position=8, argstr="-right-corrected-areas %s", desc="vertex areas (as a metric) to use instead of computing them from " "the right surface", ) cerebellum_surf = File( exists=True, position=9, argstr="-cerebellum-surface %s", desc="specify the cerebellum surface to use", ) cerebellum_corrected_areas = File( exists=True, position=10, requires=["cerebellum_surf"], argstr="cerebellum-corrected-areas %s", desc="vertex areas (as a metric) to use instead of computing them from " "the cerebellum surface", ) cifti_roi = File( exists=True, position=11, argstr="-cifti-roi %s", desc="CIFTI file for ROI smoothing", ) fix_zeros_vol = traits.Bool( position=12, argstr="-fix-zeros-volume", desc="treat values of zero in the volume as missing data", ) fix_zeros_surf = traits.Bool( position=13, argstr="-fix-zeros-surface", desc="treat values of zero on the surface as missing data", ) merged_volume = traits.Bool( position=14, argstr="-merged-volume", desc="smooth across subcortical structure boundaries", ) class CiftiSmoothOutputSpec(TraitedSpec): out_file = File(exists=True, desc="output CIFTI file") class CiftiSmooth(WBCommand): """ Smooth a CIFTI file The input cifti file must have a brain models mapping on the chosen dimension, columns for .dtseries, and either for .dconn. By default, data in different structures is smoothed independently (i.e., "parcel constrained" smoothing), so volume structures that touch do not smooth across this boundary. Specify ``merged_volume`` to ignore these boundaries. Surface smoothing uses the ``GEO_GAUSS_AREA`` smoothing method. The ``*_corrected_areas`` options are intended for when it is unavoidable to smooth on group average surfaces, it is only an approximate correction for the reduction of structure in a group average surface. It is better to smooth the data on individuals before averaging, when feasible. The ``fix_zeros_*`` options will treat values of zero as lack of data, and not use that value when generating the smoothed values, but will fill zeros with extrapolated values. The ROI should have a brain models mapping along columns, exactly matching the mapping of the chosen direction in the input file. Data outside the ROI is ignored. >>> from nipype.interfaces.workbench import CiftiSmooth >>> smooth = CiftiSmooth() >>> smooth.inputs.in_file = 'sub-01_task-rest.dtseries.nii' >>> smooth.inputs.sigma_surf = 4 >>> smooth.inputs.sigma_vol = 4 >>> smooth.inputs.direction = 'COLUMN' >>> smooth.inputs.right_surf = 'sub-01.R.midthickness.32k_fs_LR.surf.gii' >>> smooth.inputs.left_surf = 'sub-01.L.midthickness.32k_fs_LR.surf.gii' >>> smooth.cmdline 'wb_command -cifti-smoothing sub-01_task-rest.dtseries.nii 4.0 4.0 COLUMN \ smoothed_sub-01_task-rest.dtseries.nii \ -left-surface sub-01.L.midthickness.32k_fs_LR.surf.gii \ -right-surface sub-01.R.midthickness.32k_fs_LR.surf.gii' """ input_spec = CiftiSmoothInputSpec output_spec = CiftiSmoothOutputSpec _cmd = "wb_command -cifti-smoothing" nipype-1.7.0/nipype/interfaces/workbench/metric.py000066400000000000000000000151351413403311400222700ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """This module provides interfaces for workbench surface commands""" import os from ..base import TraitedSpec, File, traits, CommandLineInputSpec from .base import WBCommand from ... import logging iflogger = logging.getLogger("nipype.interface") class MetricResampleInputSpec(CommandLineInputSpec): in_file = File( exists=True, mandatory=True, argstr="%s", position=0, desc="The metric file to resample", ) current_sphere = File( exists=True, mandatory=True, argstr="%s", position=1, desc="A sphere surface with the mesh that the metric is currently on", ) new_sphere = File( exists=True, mandatory=True, argstr="%s", position=2, desc="A sphere surface that is in register with and" " has the desired output mesh", ) method = traits.Enum( "ADAP_BARY_AREA", "BARYCENTRIC", argstr="%s", mandatory=True, position=3, desc="The method name - ADAP_BARY_AREA method is recommended for" " ordinary metric data, because it should use all data while" " downsampling, unlike BARYCENTRIC. If ADAP_BARY_AREA is used," " exactly one of area_surfs or area_metrics must be specified", ) out_file = File( name_source=["new_sphere"], name_template="%s.out", keep_extension=True, argstr="%s", position=4, desc="The output metric", ) area_surfs = traits.Bool( position=5, argstr="-area-surfs", xor=["area_metrics"], desc="Specify surfaces to do vertex area correction based on", ) area_metrics = traits.Bool( position=5, argstr="-area-metrics", xor=["area_surfs"], desc="Specify vertex area metrics to do area correction based on", ) current_area = File( exists=True, position=6, argstr="%s", desc="A relevant anatomical surface with mesh OR" " a metric file with vertex areas for mesh", ) new_area = File( exists=True, position=7, argstr="%s", desc="A relevant anatomical surface with mesh OR" " a metric file with vertex areas for mesh", ) roi_metric = File( exists=True, position=8, argstr="-current-roi %s", desc="Input roi on the current mesh used to exclude non-data vertices", ) valid_roi_out = traits.Bool( position=9, argstr="-valid-roi-out", desc="Output the ROI of vertices that got data from valid source vertices", ) largest = traits.Bool( position=10, argstr="-largest", desc="Use only the value of the vertex with the largest weight", ) class MetricResampleOutputSpec(TraitedSpec): out_file = File(exists=True, desc="the output metric") roi_file = File(desc="ROI of vertices that got data from valid source vertices") class MetricResample(WBCommand): """ Resample a metric file to a different mesh Resamples a metric file, given two spherical surfaces that are in register. If ``ADAP_BARY_AREA`` is used, exactly one of -area-surfs or ``-area-metrics`` must be specified. The ``ADAP_BARY_AREA`` method is recommended for ordinary metric data, because it should use all data while downsampling, unlike ``BARYCENTRIC``. The recommended areas option for most data is individual midthicknesses for individual data, and averaged vertex area metrics from individual midthicknesses for group average data. The ``-current-roi`` option only masks the input, the output may be slightly dilated in comparison, consider using ``-metric-mask`` on the output when using ``-current-roi``. The ``-largest option`` results in nearest vertex behavior when used with ``BARYCENTRIC``. When resampling a binary metric, consider thresholding at 0.5 after resampling rather than using ``-largest``. >>> from nipype.interfaces.workbench import MetricResample >>> metres = MetricResample() >>> metres.inputs.in_file = 'sub-01_task-rest_bold_space-fsaverage5.L.func.gii' >>> metres.inputs.method = 'ADAP_BARY_AREA' >>> metres.inputs.current_sphere = 'fsaverage5_std_sphere.L.10k_fsavg_L.surf.gii' >>> metres.inputs.new_sphere = 'fs_LR-deformed_to-fsaverage.L.sphere.32k_fs_LR.surf.gii' >>> metres.inputs.area_metrics = True >>> metres.inputs.current_area = 'fsaverage5.L.midthickness_va_avg.10k_fsavg_L.shape.gii' >>> metres.inputs.new_area = 'fs_LR.L.midthickness_va_avg.32k_fs_LR.shape.gii' >>> metres.cmdline 'wb_command -metric-resample sub-01_task-rest_bold_space-fsaverage5.L.func.gii \ fsaverage5_std_sphere.L.10k_fsavg_L.surf.gii \ fs_LR-deformed_to-fsaverage.L.sphere.32k_fs_LR.surf.gii \ ADAP_BARY_AREA fs_LR-deformed_to-fsaverage.L.sphere.32k_fs_LR.surf.out \ -area-metrics fsaverage5.L.midthickness_va_avg.10k_fsavg_L.shape.gii \ fs_LR.L.midthickness_va_avg.32k_fs_LR.shape.gii' """ input_spec = MetricResampleInputSpec output_spec = MetricResampleOutputSpec _cmd = "wb_command -metric-resample" def _format_arg(self, opt, spec, val): if opt in ["current_area", "new_area"]: if not self.inputs.area_surfs and not self.inputs.area_metrics: raise ValueError( "{} was set but neither area_surfs or" " area_metrics were set".format(opt) ) if opt == "method": if ( val == "ADAP_BARY_AREA" and not self.inputs.area_surfs and not self.inputs.area_metrics ): raise ValueError( "Exactly one of area_surfs or area_metrics" " must be specified" ) if opt == "valid_roi_out" and val: # generate a filename and add it to argstr roi_out = self._gen_filename(self.inputs.in_file, suffix="_roi") iflogger.info("Setting roi output file as", roi_out) spec.argstr += " " + roi_out return super(MetricResample, self)._format_arg(opt, spec, val) def _list_outputs(self): outputs = super(MetricResample, self)._list_outputs() if self.inputs.valid_roi_out: roi_file = self._gen_filename(self.inputs.in_file, suffix="_roi") outputs["roi_file"] = os.path.abspath(roi_file) return outputs nipype-1.7.0/nipype/interfaces/workbench/tests/000077500000000000000000000000001413403311400215705ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/workbench/tests/__init__.py000066400000000000000000000000001413403311400236670ustar00rootroot00000000000000nipype-1.7.0/nipype/interfaces/workbench/tests/test_auto_CiftiSmooth.py000066400000000000000000000055531413403311400264710ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..cifti import CiftiSmooth def test_CiftiSmooth_inputs(): input_map = dict( args=dict( argstr="%s", ), cerebellum_corrected_areas=dict( argstr="cerebellum-corrected-areas %s", extensions=None, position=10, requires=["cerebellum_surf"], ), cerebellum_surf=dict( argstr="-cerebellum-surface %s", extensions=None, position=9, ), cifti_roi=dict( argstr="-cifti-roi %s", extensions=None, position=11, ), direction=dict( argstr="%s", mandatory=True, position=3, ), environ=dict( nohash=True, usedefault=True, ), fix_zeros_surf=dict( argstr="-fix-zeros-surface", position=13, ), fix_zeros_vol=dict( argstr="-fix-zeros-volume", position=12, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), left_corrected_areas=dict( argstr="-left-corrected-areas %s", extensions=None, position=6, ), left_surf=dict( argstr="-left-surface %s", extensions=None, mandatory=True, position=5, ), merged_volume=dict( argstr="-merged-volume", position=14, ), out_file=dict( argstr="%s", extensions=None, keep_extension=True, name_source=["in_file"], name_template="smoothed_%s.nii", position=4, ), right_corrected_areas=dict( argstr="-right-corrected-areas %s", extensions=None, position=8, ), right_surf=dict( argstr="-right-surface %s", extensions=None, mandatory=True, position=7, ), sigma_surf=dict( argstr="%s", mandatory=True, position=1, ), sigma_vol=dict( argstr="%s", mandatory=True, position=2, ), ) inputs = CiftiSmooth.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_CiftiSmooth_outputs(): output_map = dict( out_file=dict( extensions=None, ), ) outputs = CiftiSmooth.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/workbench/tests/test_auto_MetricResample.py000066400000000000000000000046551413403311400271570ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..metric import MetricResample def test_MetricResample_inputs(): input_map = dict( area_metrics=dict( argstr="-area-metrics", position=5, xor=["area_surfs"], ), area_surfs=dict( argstr="-area-surfs", position=5, xor=["area_metrics"], ), args=dict( argstr="%s", ), current_area=dict( argstr="%s", extensions=None, position=6, ), current_sphere=dict( argstr="%s", extensions=None, mandatory=True, position=1, ), environ=dict( nohash=True, usedefault=True, ), in_file=dict( argstr="%s", extensions=None, mandatory=True, position=0, ), largest=dict( argstr="-largest", position=10, ), method=dict( argstr="%s", mandatory=True, position=3, ), new_area=dict( argstr="%s", extensions=None, position=7, ), new_sphere=dict( argstr="%s", extensions=None, mandatory=True, position=2, ), out_file=dict( argstr="%s", extensions=None, keep_extension=True, name_source=["new_sphere"], name_template="%s.out", position=4, ), roi_metric=dict( argstr="-current-roi %s", extensions=None, position=8, ), valid_roi_out=dict( argstr="-valid-roi-out", position=9, ), ) inputs = MetricResample.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value def test_MetricResample_outputs(): output_map = dict( out_file=dict( extensions=None, ), roi_file=dict( extensions=None, ), ) outputs = MetricResample.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value nipype-1.7.0/nipype/interfaces/workbench/tests/test_auto_WBCommand.py000066400000000000000000000007541413403311400260460ustar00rootroot00000000000000# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ..base import WBCommand def test_WBCommand_inputs(): input_map = dict( args=dict( argstr="%s", ), environ=dict( nohash=True, usedefault=True, ), ) inputs = WBCommand.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value nipype-1.7.0/nipype/pipeline/000077500000000000000000000000001413403311400161265ustar00rootroot00000000000000nipype-1.7.0/nipype/pipeline/__init__.py000066400000000000000000000004571413403311400202450ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Package contains modules for generating pipelines using interfaces """ __docformat__ = "restructuredtext" from .engine import Node, MapNode, JoinNode, Workflow nipype-1.7.0/nipype/pipeline/engine/000077500000000000000000000000001413403311400173735ustar00rootroot00000000000000nipype-1.7.0/nipype/pipeline/engine/__init__.py000066400000000000000000000006061413403311400215060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Package contains modules for generating pipelines using interfaces """ __docformat__ = "restructuredtext" from .workflows import Workflow from .nodes import Node, MapNode, JoinNode from .utils import generate_expanded_graph nipype-1.7.0/nipype/pipeline/engine/base.py000066400000000000000000000066621413403311400206710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Defines functionality for pipelined execution of interfaces.""" from copy import deepcopy import re import numpy as np from ... import config from ...interfaces.base import DynamicTraitedSpec from ...utils.filemanip import loadpkl, savepkl class EngineBase(object): """ Defines common attributes and functions for workflows and nodes. Implements the more general view of a task. """ def __init__(self, name=None, base_dir=None): """ Initialize base parameters of a workflow or node. Parameters ---------- name : string (mandatory) Name of this node. Name must be alphanumeric and not contain any special characters (e.g., '.', '@'). base_dir : string base output directory (will be hashed before creations) default=None, which results in the use of mkdtemp """ self._name = None self._hierarchy = None self.name = name self._id = self.name # for compatibility with node expansion using iterables self.base_dir = base_dir """Define the work directory for this instance of workflow element.""" self.config = deepcopy(config._sections) @property def name(self): """Set the unique name of this workflow element.""" return self._name @name.setter def name(self, name): if not name or not re.match(r"^[\w-]+$", name): raise ValueError('[Workflow|Node] name "%s" is not valid.' % name) self._name = name @property def fullname(self): """Build the full name down the hierarchy.""" if self._hierarchy: return "%s.%s" % (self._hierarchy, self.name) return self.name @property def inputs(self): raise NotImplementedError @property def outputs(self): raise NotImplementedError @property def itername(self): """Get the name of the expanded iterable.""" itername = self._id if self._hierarchy: itername = "%s.%s" % (self._hierarchy, self._id) return itername def clone(self, name): """ Clone an EngineBase object. Parameters ---------- name : string (mandatory) A clone of node or workflow must have a new name """ if name == self.name: raise ValueError('Cloning requires a new name, "%s" is ' "in use." % name) clone = deepcopy(self) clone.name = name if hasattr(clone, "_id"): clone._id = name return clone def _check_outputs(self, parameter): return hasattr(self.outputs, parameter) def _check_inputs(self, parameter): if isinstance(self.inputs, DynamicTraitedSpec): return True return hasattr(self.inputs, parameter) def __str__(self): """Convert to string.""" return self.fullname def __repr__(self): """Get Python representation.""" return self.itername def save(self, filename=None): """Store this workflow element to a file.""" if filename is None: filename = "temp.pklz" savepkl(filename, self) @staticmethod def load(filename): """Load this workflow element from a file.""" return loadpkl(filename) nipype-1.7.0/nipype/pipeline/engine/nodes.py000066400000000000000000001457141413403311400210710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Defines functionality for pipelined execution of interfaces The `Node` class provides core functionality for batch processing. """ from collections import OrderedDict, defaultdict import os import os.path as op from pathlib import Path import shutil import socket from copy import deepcopy from glob import glob from logging import INFO from tempfile import mkdtemp from ... import config, logging from ...utils.misc import flatten, unflatten, str2bool, dict_diff from ...utils.filemanip import ( md5, ensure_list, simplify_list, copyfiles, fnames_presuffix, loadpkl, split_filename, load_json, emptydirs, savepkl, silentrm, ) from ...interfaces.base import ( traits, InputMultiPath, CommandLine, Undefined, DynamicTraitedSpec, Bunch, InterfaceResult, Interface, isdefined, ) from ...interfaces.base.specs import get_filecopy_info from .utils import ( _parameterization_dir, save_hashfile as _save_hashfile, load_resultfile as _load_resultfile, save_resultfile as _save_resultfile, nodelist_runner as _node_runner, strip_temp as _strip_temp, write_node_report, clean_working_directory, merge_dict, evaluate_connect_function, ) from .base import EngineBase logger = logging.getLogger("nipype.workflow") class NodeExecutionError(RuntimeError): """A nipype-specific name for exceptions when executing a Node.""" class Node(EngineBase): """ Wraps interface objects for use in pipeline A Node creates a sandbox-like directory for executing the underlying interface. It will copy or link inputs into this directory to ensure that input data are not overwritten. A hash of the input state is used to determine if the Node inputs have changed and whether the node needs to be re-executed. Examples -------- >>> from nipype import Node >>> from nipype.interfaces import spm >>> realign = Node(spm.Realign(), 'realign') >>> realign.inputs.in_files = 'functional.nii' >>> realign.inputs.register_to_mean = True >>> realign.run() # doctest: +SKIP """ def __init__( self, interface, name, iterables=None, itersource=None, synchronize=False, overwrite=None, needed_outputs=None, run_without_submitting=False, n_procs=None, mem_gb=0.20, **kwargs, ): """ Parameters ---------- interface : interface object node specific interface (fsl.Bet(), spm.Coregister()) name : alphanumeric string node specific name iterables : generator Input field and list to iterate using the pipeline engine for example to iterate over different frac values in fsl.Bet() for a single field the input can be a tuple, otherwise a list of tuples :: node.iterables = ('frac',[0.5,0.6,0.7]) node.iterables = [('fwhm',[2,4]),('fieldx',[0.5,0.6,0.7])] If this node has an itersource, then the iterables values is a dictionary which maps an iterable source field value to the target iterables field values, e.g.: :: inputspec.iterables = ('images',['img1.nii', 'img2.nii']]) node.itersource = ('inputspec', ['frac']) node.iterables = ('frac', {'img1.nii': [0.5, 0.6], 'img2.nii': [0.6, 0.7]}) If this node's synchronize flag is set, then an alternate form of the iterables is a [fields, values] list, where fields is the list of iterated fields and values is the list of value tuples for the given fields, e.g.: :: node.synchronize = True node.iterables = [('frac', 'threshold'), [(0.5, True), (0.6, False)]] itersource: tuple The (name, fields) iterables source which specifies the name of the predecessor iterable node and the input fields to use from that source node. The output field values comprise the key to the iterables parameter value mapping dictionary. synchronize: boolean Flag indicating whether iterables are synchronized. If the iterables are synchronized, then this iterable node is expanded once per iteration over all of the iterables values. Otherwise, this iterable node is expanded once per each permutation of the iterables values. overwrite : Boolean Whether to overwrite contents of output directory if it already exists. If directory exists and hash matches it assumes that process has been executed needed_outputs : list of output_names Force the node to keep only specific outputs. By default all outputs are kept. Setting this attribute will delete any output files and directories from the node's working directory that are not part of the `needed_outputs`. run_without_submitting : boolean Run the node without submitting to a job engine or to a multiprocessing pool """ # Make sure an interface is set, and that it is an Interface if interface is None: raise IOError("Interface must be provided") if not isinstance(interface, Interface): raise IOError("interface must be an instance of an Interface") super(Node, self).__init__(name, kwargs.get("base_dir")) self._interface = interface self._hierarchy = None self._got_inputs = False self._originputs = None self._output_dir = None self.iterables = iterables self.synchronize = synchronize self.itersource = itersource self.overwrite = overwrite self.parameterization = [] self.input_source = {} self.plugin_args = {} self.run_without_submitting = run_without_submitting self._mem_gb = mem_gb self._n_procs = n_procs # Downstream n_procs if hasattr(self._interface.inputs, "num_threads") and self._n_procs is not None: self._interface.inputs.num_threads = self._n_procs # Initialize needed_outputs and hashes self._hashvalue = None self._hashed_inputs = None self._needed_outputs = [] self.needed_outputs = needed_outputs self.config = None @property def interface(self): """Return the underlying interface object""" return self._interface @property def result(self): """Get result from result file (do not hold it in memory)""" return _load_resultfile( op.join(self.output_dir(), "result_%s.pklz" % self.name) ) @property def inputs(self): """Return the inputs of the underlying interface""" return self._interface.inputs @property def outputs(self): """Return the output fields of the underlying interface""" return self._interface._outputs() @property def needed_outputs(self): return self._needed_outputs @needed_outputs.setter def needed_outputs(self, new_outputs): """Needed outputs changes the hash, refresh if changed""" new_outputs = sorted(list(set(new_outputs or []))) if new_outputs != self._needed_outputs: # Reset hash self._hashvalue = None self._hashed_inputs = None self._needed_outputs = new_outputs @property def mem_gb(self): """Get estimated memory (GB)""" if hasattr(self._interface, "estimated_memory_gb"): self._mem_gb = self._interface.estimated_memory_gb logger.warning( 'Setting "estimated_memory_gb" on Interfaces has been ' "deprecated as of nipype 1.0, please use Node.mem_gb." ) return self._mem_gb @property def n_procs(self): """Get the estimated number of processes/threads""" if self._n_procs is not None: return self._n_procs if hasattr(self._interface.inputs, "num_threads") and isdefined( self._interface.inputs.num_threads ): return self._interface.inputs.num_threads return 1 @n_procs.setter def n_procs(self, value): """Set an estimated number of processes/threads""" self._n_procs = value # Overwrite interface's dynamic input of num_threads if hasattr(self._interface.inputs, "num_threads"): self._interface.inputs.num_threads = self._n_procs def output_dir(self): """Return the location of the output directory for the node""" # Output dir is cached if self._output_dir: return self._output_dir # Calculate & cache otherwise if self.base_dir is None: self.base_dir = mkdtemp() outputdir = self.base_dir if self._hierarchy: outputdir = op.join(outputdir, *self._hierarchy.split(".")) if self.parameterization: params_str = ["{}".format(p) for p in self.parameterization] if not str2bool(self.config["execution"]["parameterize_dirs"]): params_str = [_parameterization_dir(p) for p in params_str] outputdir = op.join(outputdir, *params_str) self._output_dir = op.realpath(op.join(outputdir, self.name)) return self._output_dir def set_input(self, parameter, val): """Set interface input value""" logger.debug( "[Node] %s - setting input %s = %s", self.name, parameter, str(val) ) setattr(self.inputs, parameter, deepcopy(val)) def get_output(self, parameter): """Retrieve a particular output of the node""" return getattr(self.result.outputs, parameter, None) def help(self): """Print interface help""" self._interface.help() def is_cached(self, rm_outdated=False): """ Check if the interface has been run previously, and whether cached results are up-to-date. """ outdir = self.output_dir() # The output folder does not exist: not cached if not op.exists(outdir) or not op.exists( op.join(outdir, "result_%s.pklz" % self.name) ): logger.debug('[Node] Not cached "%s".', outdir) return False, False # Check if there are hashfiles globhashes = glob(op.join(outdir, "_0x*.json")) unfinished = [path for path in globhashes if path.endswith("_unfinished.json")] hashfiles = list(set(globhashes) - set(unfinished)) # Update hash hashed_inputs, hashvalue = self._get_hashval() hashfile = op.join(outdir, "_0x%s.json" % hashvalue) logger.debug( "[Node] Hashes: %s, %s, %s, %s", hashed_inputs, hashvalue, hashfile, hashfiles, ) cached = hashfile in hashfiles # No previous hashfiles found, we're all set. if cached and len(hashfiles) == 1: assert hashfile == hashfiles[0] logger.debug('[Node] Up-to-date cache found for "%s".', self.fullname) return True, True # Cached and updated if len(hashfiles) > 1: if cached: hashfiles.remove(hashfile) # Do not clean up the node, if cached logger.warning( "[Node] Found %d previous hashfiles indicating that the working " 'directory of node "%s" is stale, deleting old hashfiles.', len(hashfiles), self.fullname, ) for rmfile in hashfiles: os.remove(rmfile) hashfiles = [hashfile] if cached else [] if not hashfiles: logger.debug('[Node] No hashfiles found in "%s".', outdir) assert not cached return False, False # At this point only one hashfile is in the folder # and we directly check whether it is updated updated = hashfile == hashfiles[0] if not updated: # Report differences depending on log verbosity cached = True logger.info('[Node] Outdated cache found for "%s".', self.fullname) # If logging is more verbose than INFO (20), print diff between hashes loglevel = logger.getEffectiveLevel() if loglevel < INFO: # Lazy logging: only < INFO exp_hash_file_base = split_filename(hashfiles[0])[1] exp_hash = exp_hash_file_base[len("_0x") :] logger.log( loglevel, "[Node] Old/new hashes = %s/%s", exp_hash, hashvalue ) try: prev_inputs = load_json(hashfiles[0]) except Exception: pass else: logger.log(loglevel, dict_diff(prev_inputs, hashed_inputs, 10)) if rm_outdated: os.remove(hashfiles[0]) assert cached # At this point, node is cached (may not be up-to-date) return cached, updated def hash_exists(self, updatehash=False): """ Decorate the new `is_cached` method with hash updating to maintain backwards compatibility. """ # Get a dictionary with hashed filenames and a hashvalue # of the dictionary itself. cached, updated = self.is_cached(rm_outdated=True) outdir = self.output_dir() hashfile = op.join(outdir, "_0x%s.json" % self._hashvalue) if updated: return True, self._hashvalue, hashfile, self._hashed_inputs # Update only possible if it exists if cached and updatehash: logger.debug("[Node] Updating hash: %s", self._hashvalue) _save_hashfile(hashfile, self._hashed_inputs) return cached, self._hashvalue, hashfile, self._hashed_inputs def run(self, updatehash=False): """ Execute the node in its directory. Parameters ---------- updatehash: boolean When the hash stored in the output directory as a result of a previous run does not match that calculated for this execution, updatehash=True only updates the hash without re-running. """ if self.config is None: self.config = {} self.config = merge_dict(deepcopy(config._sections), self.config) outdir = self.output_dir() force_run = self.overwrite or ( self.overwrite is None and self._interface.always_run ) # Check hash, check whether run should be enforced if not isinstance(self, MapNode): logger.info(f'[Node] Setting-up "{self.fullname}" in "{outdir}".') cached, updated = self.is_cached() # If the node is cached, check on pklz files and finish if not force_run and (updated or (not updated and updatehash)): logger.debug("Only updating node hashes or skipping execution") inputs_file = op.join(outdir, "_inputs.pklz") if not op.exists(inputs_file): logger.debug("Creating inputs file %s", inputs_file) savepkl(inputs_file, self.inputs.get_traitsfree()) node_file = op.join(outdir, "_node.pklz") if not op.exists(node_file): logger.debug("Creating node file %s", node_file) savepkl(node_file, self) result = self._run_interface( execute=False, updatehash=updatehash and not updated ) logger.info( '[Node] "%s" found cached%s.', self.fullname, " (and hash updated)" * (updatehash and not updated), ) return result if cached and updated and not isinstance(self, MapNode): logger.debug('[Node] Rerunning cached, up-to-date node "%s"', self.fullname) if not force_run and str2bool( self.config["execution"]["stop_on_first_rerun"] ): raise Exception( 'Cannot rerun when "stop_on_first_rerun" is set to True' ) # Remove any hashfile that exists at this point (re)running. if cached: for outdatedhash in glob(op.join(self.output_dir(), "_0x*.json")): os.remove(outdatedhash) # _get_hashval needs to be called before running. When there is a valid (or seemingly # valid cache), the is_cached() member updates the hashval via _get_hashval. # However, if this node's folder doesn't exist or the result file is not found, then # the hashval needs to be generated here. See #3026 for a larger context. self._get_hashval() # Hashfile while running hashfile_unfinished = op.join(outdir, "_0x%s_unfinished.json" % self._hashvalue) # Delete directory contents if this is not a MapNode or can't resume can_resume = not (self._interface.can_resume and op.isfile(hashfile_unfinished)) if can_resume and not isinstance(self, MapNode): emptydirs(outdir, noexist_ok=True) else: logger.debug( "[%sNode] Resume - hashfile=%s", "Map" * int(isinstance(self, MapNode)), hashfile_unfinished, ) if isinstance(self, MapNode): # remove old json files for filename in glob(op.join(outdir, "_0x*.json")): os.remove(filename) # Make sure outdir is created os.makedirs(outdir, exist_ok=True) # Store runtime-hashfile, pre-execution report, the node and the inputs set. _save_hashfile(hashfile_unfinished, self._hashed_inputs) write_node_report(self, is_mapnode=isinstance(self, MapNode)) savepkl(op.join(outdir, "_node.pklz"), self) savepkl(op.join(outdir, "_inputs.pklz"), self.inputs.get_traitsfree()) try: result = self._run_interface(execute=True) except Exception: logger.warning('[Node] Error on "%s" (%s)', self.fullname, outdir) # Tear-up after error if not silentrm(hashfile_unfinished): logger.warning( """\ Interface finished unexpectedly and the corresponding unfinished hashfile %s \ does not exist. Another nipype instance may be running against the same work \ directory. Please ensure no other concurrent workflows are racing""", hashfile_unfinished, ) raise # Tear-up after success shutil.move(hashfile_unfinished, hashfile_unfinished.replace("_unfinished", "")) write_node_report(self, result=result, is_mapnode=isinstance(self, MapNode)) return result def _get_hashval(self): """Return a hash of the input state""" self._get_inputs() if self._hashvalue is None and self._hashed_inputs is None: self._hashed_inputs, self._hashvalue = self.inputs.get_hashval( hash_method=self.config["execution"]["hash_method"] ) rm_extra = self.config["execution"]["remove_unnecessary_outputs"] if str2bool(rm_extra) and self.needed_outputs: hashobject = md5() hashobject.update(self._hashvalue.encode()) hashobject.update(str(self.needed_outputs).encode()) self._hashvalue = hashobject.hexdigest() self._hashed_inputs.append(("needed_outputs", self.needed_outputs)) return self._hashed_inputs, self._hashvalue def _get_inputs(self): """ Retrieve inputs from pointers to results files. This mechanism can be easily extended/replaced to retrieve data from other data sources (e.g., XNAT, HTTP, etc.,.) """ if self._got_inputs: # Inputs cached return if not self.input_source: # No previous nodes self._got_inputs = True return prev_results = defaultdict(list) for key, info in list(self.input_source.items()): prev_results[info[0]].append((key, info[1])) logger.debug( '[Node] Setting %d connected inputs of node "%s" from %d previous nodes.', len(self.input_source), self.name, len(prev_results), ) for results_fname, connections in list(prev_results.items()): outputs = None try: outputs = _load_resultfile(results_fname).outputs except AttributeError as e: logger.critical("%s", e) if outputs is None: raise NodeExecutionError( """\ Error populating the inputs of node "%s": the results file of the source node \ (%s) does not contain any outputs.""" % (self.name, results_fname) ) for key, conn in connections: output_value = Undefined if isinstance(conn, tuple): value = getattr(outputs, conn[0]) if isdefined(value): output_value = evaluate_connect_function( conn[1], conn[2], value ) else: output_name = conn try: output_value = outputs.trait_get()[output_name] except AttributeError: output_value = outputs.dictcopy()[output_name] logger.debug("output: %s", output_name) try: self.set_input(key, deepcopy(output_value)) except traits.TraitError as e: msg = ( e.args[0], "", "Error setting node input:", "Node: %s" % self.name, "input: %s" % key, "results_file: %s" % results_fname, "value: %s" % str(output_value), ) e.args = ("\n".join(msg),) raise # Successfully set inputs self._got_inputs = True def _update_hash(self): for outdatedhash in glob(op.join(self.output_dir(), "_0x*.json")): os.remove(outdatedhash) _save_hashfile(self._hashvalue, self._hashed_inputs) def _run_interface(self, execute=True, updatehash=False): if updatehash: self._update_hash() return self._load_results() return self._run_command(execute) def _load_results(self): cwd = self.output_dir() try: result = _load_resultfile(op.join(cwd, "result_%s.pklz" % self.name)) except (traits.TraitError, EOFError): logger.debug("Error populating inputs/outputs, (re)aggregating results...") except (AttributeError, ImportError) as err: logger.debug( "attribute error: %s probably using " "different trait pickled file", str(err), ) old_inputs = loadpkl(op.join(cwd, "_inputs.pklz")) self.inputs.trait_set(**old_inputs) else: return result # try aggregating first if not isinstance(self, MapNode): self._copyfiles_to_wd(linksonly=True) aggouts = self._interface.aggregate_outputs( needed_outputs=self.needed_outputs ) runtime = Bunch( cwd=cwd, returncode=0, environ=dict(os.environ), hostname=socket.gethostname(), ) result = InterfaceResult( interface=self._interface.__class__, runtime=runtime, inputs=self._interface.inputs.get_traitsfree(), outputs=aggouts, ) _save_resultfile( result, cwd, self.name, rebase=str2bool(self.config["execution"]["use_relative_paths"]), ) else: logger.debug("aggregating mapnode results") result = self._run_interface() return result def _run_command(self, execute, copyfiles=True): if not execute: try: result = self._load_results() except (FileNotFoundError, AttributeError): # if aggregation does not work, rerun the node logger.info( "[Node] Some of the outputs were not found: " "rerunning node." ) copyfiles = False # OE: this was like this before, execute = True # I'll keep them for safety else: logger.info( '[Node] Cached "%s" - collecting precomputed outputs', self.fullname ) return result outdir = Path(self.output_dir()) if copyfiles: self._originputs = deepcopy(self._interface.inputs) self._copyfiles_to_wd(execute=execute) # Run command: either execute is true or load_results failed. logger.info( f'[Node] Executing "{self.name}" <{self._interface.__module__}' f".{self._interface.__class__.__name__}>" ) # Invoke core run method of the interface ignoring exceptions result = self._interface.run(cwd=outdir, ignore_exception=True) logger.info( f'[Node] Finished "{self.name}", elapsed time {result.runtime.duration}s.' ) if issubclass(self._interface.__class__, CommandLine): # Write out command line as it happened Path.write_text(outdir / "command.txt", f"{result.runtime.cmdline}\n") exc_tb = getattr(result.runtime, "traceback", None) if not exc_tb: # Clean working directory if no errors dirs2keep = None if isinstance(self, MapNode): dirs2keep = [op.join(outdir, "mapflow")] result.outputs = clean_working_directory( result.outputs, outdir, self._interface.inputs, self.needed_outputs, self.config, dirs2keep=dirs2keep, ) # Store results file under all circumstances _save_resultfile( result, outdir, self.name, rebase=str2bool(self.config["execution"]["use_relative_paths"]), ) if exc_tb: raise NodeExecutionError( f"Exception raised while executing Node {self.name}.\n\n{result.runtime.traceback}" ) return result def _copyfiles_to_wd(self, execute=True, linksonly=False): """copy files over and change the inputs""" filecopy_info = get_filecopy_info(self.interface) if not filecopy_info: # Nothing to be done return logger.debug( "copying files to wd [execute=%s, linksonly=%s]", execute, linksonly ) outdir = self.output_dir() if execute and linksonly: olddir = outdir outdir = op.join(outdir, "_tempinput") os.makedirs(outdir, exist_ok=True) for info in filecopy_info: files = self.inputs.trait_get().get(info["key"]) if not isdefined(files) or not files: continue infiles = ensure_list(files) if execute: if linksonly: if not info["copy"]: newfiles = copyfiles( infiles, [outdir], copy=info["copy"], create_new=True ) else: newfiles = fnames_presuffix(infiles, newpath=outdir) newfiles = _strip_temp( newfiles, op.abspath(olddir).split(op.sep)[-1] ) else: newfiles = copyfiles( infiles, [outdir], copy=info["copy"], create_new=True ) else: newfiles = fnames_presuffix(infiles, newpath=outdir) if not isinstance(files, list): newfiles = simplify_list(newfiles) setattr(self.inputs, info["key"], newfiles) if execute and linksonly: emptydirs(outdir, noexist_ok=True) def update(self, **opts): """Update inputs""" self.inputs.update(**opts) class JoinNode(Node): """Wraps interface objects that join inputs into a list. Examples -------- >>> import nipype.pipeline.engine as pe >>> from nipype import Node, JoinNode, Workflow >>> from nipype.interfaces.utility import IdentityInterface >>> from nipype.interfaces import (ants, dcm2nii, fsl) >>> wf = Workflow(name='preprocess') >>> inputspec = Node(IdentityInterface(fields=['image']), ... name='inputspec') >>> inputspec.iterables = [('image', ... ['img1.nii', 'img2.nii', 'img3.nii'])] >>> img2flt = Node(fsl.ImageMaths(out_data_type='float'), ... name='img2flt') >>> wf.connect(inputspec, 'image', img2flt, 'in_file') >>> average = JoinNode(ants.AverageImages(), joinsource='inputspec', ... joinfield='images', name='average') >>> wf.connect(img2flt, 'out_file', average, 'images') >>> realign = Node(fsl.FLIRT(), name='realign') >>> wf.connect(img2flt, 'out_file', realign, 'in_file') >>> wf.connect(average, 'output_average_image', realign, 'reference') >>> strip = Node(fsl.BET(), name='strip') >>> wf.connect(realign, 'out_file', strip, 'in_file') """ def __init__( self, interface, name, joinsource, joinfield=None, unique=False, **kwargs ): """ Parameters ---------- interface : interface object node specific interface (fsl.Bet(), spm.Coregister()) name : alphanumeric string node specific name joinsource : node name name of the join predecessor iterable node joinfield : string or list of strings name(s) of list input fields that will be aggregated. The default is all of the join node input fields. unique : flag indicating whether to ignore duplicate input values See Node docstring for additional keyword arguments. """ super(JoinNode, self).__init__(interface, name, **kwargs) self._joinsource = None # The member should be defined self.joinsource = joinsource # Let the setter do the job """the join predecessor iterable node""" if not joinfield: # default is the interface fields joinfield = self._interface.inputs.copyable_trait_names() elif isinstance(joinfield, (str, bytes)): joinfield = [joinfield] self.joinfield = joinfield """the fields to join""" self._inputs = self._override_join_traits( self._interface.inputs, self.joinfield ) """the override inputs""" self._unique = unique """flag indicating whether to ignore duplicate input values""" self._next_slot_index = 0 """the joinfield index assigned to an iterated input""" @property def joinsource(self): return self._joinsource @joinsource.setter def joinsource(self, value): """Set the joinsource property. If the given value is a Node, then the joinsource is set to the node name. """ if isinstance(value, Node): value = value.name self._joinsource = value @property def inputs(self): """The JoinNode inputs include the join field overrides.""" return self._inputs def _add_join_item_fields(self): """Add new join item fields assigned to the next iterated input This method is intended solely for workflow graph expansion. Examples -------- >>> from nipype.interfaces.utility import IdentityInterface >>> import nipype.pipeline.engine as pe >>> from nipype import Node, JoinNode, Workflow >>> inputspec = Node(IdentityInterface(fields=['image']), ... name='inputspec'), >>> join = JoinNode(IdentityInterface(fields=['images', 'mask']), ... joinsource='inputspec', joinfield='images', name='join') >>> join._add_join_item_fields() {'images': 'imagesJ1'} Return the {base field: slot field} dictionary """ # create the new join item fields idx = self._next_slot_index newfields = dict( [(field, self._add_join_item_field(field, idx)) for field in self.joinfield] ) # increment the join slot index logger.debug("Added the %s join item fields %s.", self, newfields) self._next_slot_index += 1 return newfields def _add_join_item_field(self, field, index): """Add new join item fields qualified by the given index Return the new field name """ # the new field name name = "%sJ%d" % (field, index + 1) # make a copy of the join trait trait = self._inputs.trait(field, False, True) # add the join item trait to the override traits self._inputs.add_trait(name, trait) return name def _override_join_traits(self, basetraits, fields): """Convert the given join fields to accept an input that is a list item rather than a list. Non-join fields delegate to the interface traits. Return the override DynamicTraitedSpec """ dyntraits = DynamicTraitedSpec() if fields is None: fields = basetraits.copyable_trait_names() else: # validate the fields for field in fields: if not basetraits.trait(field): raise ValueError( "The JoinNode %s does not have a field" " named %s" % (self.name, field) ) for name, trait in list(basetraits.items()): # if a join field has a single inner trait, then the item # trait is that inner trait. Otherwise, the item trait is # a new Any trait. if name in fields and len(trait.inner_traits) == 1: item_trait = trait.inner_traits[0] dyntraits.add_trait(name, item_trait) setattr(dyntraits, name, Undefined) logger.debug( "Converted the join node %s field %s trait type from %s to %s", self, name, trait.trait_type.info(), item_trait.info(), ) else: dyntraits.add_trait(name, traits.Any) setattr(dyntraits, name, Undefined) return dyntraits def _run_command(self, execute, copyfiles=True): """Collates the join inputs prior to delegating to the superclass.""" self._collate_join_field_inputs() return super(JoinNode, self)._run_command(execute, copyfiles) def _collate_join_field_inputs(self): """ Collects each override join item field into the interface join field input.""" for field in self.inputs.copyable_trait_names(): if field in self.joinfield: # collate the join field val = self._collate_input_value(field) try: setattr(self._interface.inputs, field, val) except Exception as e: raise ValueError( ">>JN %s %s %s %s %s: %s" % ( self, field, val, self.inputs.copyable_trait_names(), self.joinfield, e, ) ) elif hasattr(self._interface.inputs, field): # copy the non-join field val = getattr(self._inputs, field) if isdefined(val): setattr(self._interface.inputs, field, val) logger.debug( "Collated %d inputs into the %s node join fields", self._next_slot_index, self, ) def _collate_input_value(self, field): """ Collects the join item field values into a list or set value for the given field, as follows: - If the field trait is a Set, then the values are collected into a set. - Otherwise, the values are collected into a list which preserves the iterables order. If the ``unique`` flag is set, then duplicate values are removed but the iterables order is preserved. """ val = [self._slot_value(field, idx) for idx in range(self._next_slot_index)] basetrait = self._interface.inputs.trait(field) if isinstance(basetrait.trait_type, traits.Set): return set(val) if self._unique: return list(OrderedDict.fromkeys(val)) return val def _slot_value(self, field, index): slot_field = "%sJ%d" % (field, index + 1) try: return getattr(self._inputs, slot_field) except AttributeError as e: raise AttributeError( "The join node %s does not have a slot field %s" " to hold the %s value at index %d: %s" % (self, slot_field, field, index, e) ) class MapNode(Node): """Wraps interface objects that need to be iterated on a list of inputs. Examples -------- >>> from nipype import MapNode >>> from nipype.interfaces import fsl >>> realign = MapNode(fsl.MCFLIRT(), 'in_file', 'realign') >>> realign.inputs.in_file = ['functional.nii', ... 'functional2.nii', ... 'functional3.nii'] >>> realign.run() # doctest: +SKIP """ def __init__( self, interface, iterfield, name, serial=False, nested=False, **kwargs ): """ Parameters ---------- interface : interface object node specific interface (fsl.Bet(), spm.Coregister()) iterfield : string or list of strings name(s) of input fields that will receive a list of whatever kind of input they take. the node will be run separately for each value in these lists. for more than one input, the values are paired (i.e. it does not compute a combinatorial product). name : alphanumeric string node specific name serial : boolean flag to enforce executing the jobs of the mapnode in a serial manner rather than parallel nested : boolean support for nested lists. If set, the input list will be flattened before running and the nested list structure of the outputs will be resored. See Node docstring for additional keyword arguments. """ super(MapNode, self).__init__(interface, name, **kwargs) if isinstance(iterfield, (str, bytes)): iterfield = [iterfield] self.iterfield = iterfield self.nested = nested self._inputs = self._create_dynamic_traits( self._interface.inputs, fields=self.iterfield ) self._inputs.on_trait_change(self._set_mapnode_input) self._got_inputs = False self._serial = serial def _create_dynamic_traits(self, basetraits, fields=None, nitems=None): """Convert specific fields of a trait to accept multiple inputs""" output = DynamicTraitedSpec() if fields is None: fields = basetraits.copyable_trait_names() for name, spec in list(basetraits.items()): if name in fields and ((nitems is None) or (nitems > 1)): logger.debug("adding multipath trait: %s", name) if self.nested: output.add_trait(name, InputMultiPath(traits.Any())) else: output.add_trait(name, InputMultiPath(spec.trait_type)) else: output.add_trait(name, traits.Trait(spec)) setattr(output, name, Undefined) value = getattr(basetraits, name) if isdefined(value): setattr(output, name, value) value = getattr(output, name) return output def set_input(self, parameter, val): """ Set interface input value or nodewrapper attribute Priority goes to interface. """ logger.debug( "setting nodelevel(%s) input %s = %s", str(self), parameter, str(val) ) self._set_mapnode_input(parameter, deepcopy(val)) def _set_mapnode_input(self, name, newvalue): logger.debug( "setting mapnode(%s) input: %s -> %s", str(self), name, str(newvalue) ) if name in self.iterfield: setattr(self._inputs, name, newvalue) else: setattr(self._interface.inputs, name, newvalue) def _get_hashval(self): """Compute hash including iterfield lists.""" self._get_inputs() if self._hashvalue is not None and self._hashed_inputs is not None: return self._hashed_inputs, self._hashvalue self._check_iterfield() hashinputs = deepcopy(self._interface.inputs) for name in self.iterfield: hashinputs.remove_trait(name) hashinputs.add_trait( name, InputMultiPath(self._interface.inputs.traits()[name].trait_type) ) logger.debug("setting hashinput %s-> %s", name, getattr(self._inputs, name)) if self.nested: setattr(hashinputs, name, flatten(getattr(self._inputs, name))) else: setattr(hashinputs, name, getattr(self._inputs, name)) hashed_inputs, hashvalue = hashinputs.get_hashval( hash_method=self.config["execution"]["hash_method"] ) rm_extra = self.config["execution"]["remove_unnecessary_outputs"] if str2bool(rm_extra) and self.needed_outputs: hashobject = md5() hashobject.update(hashvalue.encode()) sorted_outputs = sorted(self.needed_outputs) hashobject.update(str(sorted_outputs).encode()) hashvalue = hashobject.hexdigest() hashed_inputs.append(("needed_outputs", sorted_outputs)) self._hashed_inputs, self._hashvalue = hashed_inputs, hashvalue return self._hashed_inputs, self._hashvalue @property def inputs(self): return self._inputs @property def outputs(self): if self._interface._outputs(): return Bunch(self._interface._outputs().trait_get()) def _make_nodes(self, cwd=None): if cwd is None: cwd = self.output_dir() if self.nested: nitems = len(flatten(ensure_list(getattr(self.inputs, self.iterfield[0])))) else: nitems = len(ensure_list(getattr(self.inputs, self.iterfield[0]))) for i in range(nitems): nodename = "_%s%d" % (self.name, i) node = Node( deepcopy(self._interface), n_procs=self._n_procs, mem_gb=self._mem_gb, overwrite=self.overwrite, needed_outputs=self.needed_outputs, run_without_submitting=self.run_without_submitting, base_dir=op.join(cwd, "mapflow"), name=nodename, ) node.plugin_args = self.plugin_args node.interface.inputs.trait_set( **deepcopy(self._interface.inputs.trait_get()) ) node.interface.resource_monitor = self._interface.resource_monitor for field in self.iterfield: if self.nested: fieldvals = flatten(ensure_list(getattr(self.inputs, field))) else: fieldvals = ensure_list(getattr(self.inputs, field)) logger.debug("setting input %d %s %s", i, field, fieldvals[i]) setattr(node.inputs, field, fieldvals[i]) node.config = self.config yield i, node def _collate_results(self, nodes): finalresult = InterfaceResult( interface=[], runtime=[], provenance=[], inputs=[], outputs=self.outputs ) returncode = [] for i, nresult, err in nodes: finalresult.runtime.insert(i, None) returncode.insert(i, err) if nresult: if hasattr(nresult, "runtime"): finalresult.interface.insert(i, nresult.interface) finalresult.inputs.insert(i, nresult.inputs) finalresult.runtime[i] = nresult.runtime if hasattr(nresult, "provenance"): finalresult.provenance.insert(i, nresult.provenance) if self.outputs: for key, _ in list(self.outputs.items()): rm_extra = self.config["execution"]["remove_unnecessary_outputs"] if str2bool(rm_extra) and self.needed_outputs: if key not in self.needed_outputs: continue values = getattr(finalresult.outputs, key) if not isdefined(values): values = [] if nresult and nresult.outputs: values.insert(i, nresult.outputs.trait_get()[key]) else: values.insert(i, None) defined_vals = [isdefined(val) for val in values] if any(defined_vals) and finalresult.outputs: setattr(finalresult.outputs, key, values) if self.nested: for key, _ in list(self.outputs.items()): values = getattr(finalresult.outputs, key) if isdefined(values): values = unflatten( values, ensure_list(getattr(self.inputs, self.iterfield[0])) ) setattr(finalresult.outputs, key, values) if returncode and any([code is not None for code in returncode]): msg = [] for i, code in enumerate(returncode): if code is not None: msg += ["Subnode %d failed" % i] msg += ["Error: %s" % str(code)] raise NodeExecutionError( "Subnodes of node: %s failed:\n%s" % (self.name, "\n".join(msg)) ) return finalresult def get_subnodes(self): """Generate subnodes of a mapnode and write pre-execution report""" self._get_inputs() self._check_iterfield() write_node_report(self, result=None, is_mapnode=True) return [node for _, node in self._make_nodes()] def num_subnodes(self): """Get the number of subnodes to iterate in this MapNode""" self._get_inputs() self._check_iterfield() if self._serial: return 1 if self.nested: return len(ensure_list(flatten(getattr(self.inputs, self.iterfield[0])))) return len(ensure_list(getattr(self.inputs, self.iterfield[0]))) def _get_inputs(self): old_inputs = self._inputs.trait_get() self._inputs = self._create_dynamic_traits( self._interface.inputs, fields=self.iterfield ) self._inputs.trait_set(**old_inputs) super(MapNode, self)._get_inputs() def _check_iterfield(self): """Checks iterfield * iterfield must be in inputs * number of elements must match across iterfield """ for iterfield in self.iterfield: if not isdefined(getattr(self.inputs, iterfield)): raise ValueError( ("Input %s was not set but it is listed " "in iterfields.") % iterfield ) if len(self.iterfield) > 1: first_len = len(ensure_list(getattr(self.inputs, self.iterfield[0]))) for iterfield in self.iterfield[1:]: if first_len != len(ensure_list(getattr(self.inputs, iterfield))): raise ValueError( ( "All iterfields of a MapNode have to " "have the same length. %s" ) % str(self.inputs) ) def _run_interface(self, execute=True, updatehash=False): """Run the mapnode interface This is primarily intended for serial execution of mapnode. A parallel execution requires creation of new nodes that can be spawned """ self._check_iterfield() cwd = self.output_dir() if not execute: return self._load_results() # Set up mapnode folder names if self.nested: nitems = len(ensure_list(flatten(getattr(self.inputs, self.iterfield[0])))) else: nitems = len(ensure_list(getattr(self.inputs, self.iterfield[0]))) nnametpl = "_%s{}" % self.name nodenames = [nnametpl.format(i) for i in range(nitems)] # Run mapnode outdir = self.output_dir() result = InterfaceResult( interface=self._interface.__class__, runtime=Bunch( cwd=outdir, returncode=1, environ=dict(os.environ), hostname=socket.gethostname(), ), inputs=self._interface.inputs.get_traitsfree(), ) try: result = self._collate_results( _node_runner( self._make_nodes(cwd), updatehash=updatehash, stop_first=str2bool( self.config["execution"]["stop_on_first_crash"] ), ) ) except Exception as msg: result.runtime.stderr = "%s\n\n%s".format( getattr(result.runtime, "stderr", ""), msg ) _save_resultfile( result, outdir, self.name, rebase=str2bool(self.config["execution"]["use_relative_paths"]), ) raise # And store results _save_resultfile(result, cwd, self.name, rebase=False) # remove any node directories no longer required dirs2remove = [] for path in glob(op.join(cwd, "mapflow", "*")): if op.isdir(path): if path.split(op.sep)[-1] not in nodenames: dirs2remove.append(path) for path in dirs2remove: logger.debug('[MapNode] Removing folder "%s".', path) shutil.rmtree(path) return result nipype-1.7.0/nipype/pipeline/engine/report_template.html000066400000000000000000000150341413403311400234720ustar00rootroot00000000000000

Flare imports
hierarchical edge bundling

tension:
nipype-1.7.0/nipype/pipeline/engine/report_template2.html000066400000000000000000000061031413403311400235510ustar00rootroot00000000000000 Sankey Diagram

Nipype workflow: Sankey Diagram

nipype-1.7.0/nipype/pipeline/engine/tests/000077500000000000000000000000001413403311400205355ustar00rootroot00000000000000nipype-1.7.0/nipype/pipeline/engine/tests/__init__.py000066400000000000000000000002121413403311400226410ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: nipype-1.7.0/nipype/pipeline/engine/tests/test_base.py000066400000000000000000000045511413403311400230650ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pytest from ..base import EngineBase from ....interfaces import base as nib from ....interfaces import utility as niu from ... import engine as pe class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int(desc="a random int") input2 = nib.traits.Int(desc="a random int") input_file = nib.File(desc="Random File") class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc="outputs") class EngineTestInterface(nib.SimpleInterface): input_spec = InputSpec output_spec = OutputSpec def _run_interface(self, runtime): runtime.returncode = 0 self._results["output1"] = [1, self.inputs.input1] return runtime @pytest.mark.parametrize("name", ["valid1", "valid_node", "valid-node", "ValidNode0"]) def test_create(name): base = EngineBase(name=name) assert base.name == name @pytest.mark.parametrize( "name", ["invalid*1", "invalid.1", "invalid@", "in/valid", None] ) def test_create_invalid(name): with pytest.raises(ValueError): EngineBase(name=name) def test_hierarchy(): base = EngineBase(name="nodename") base._hierarchy = "some.history.behind" assert base.name == "nodename" assert base.fullname == "some.history.behind.nodename" def test_clone(): base = EngineBase(name="nodename") base2 = base.clone("newnodename") assert ( base.base_dir == base2.base_dir and base.config == base2.config and base2.name == "newnodename" ) with pytest.raises(ValueError): base.clone("nodename") def test_clone_node_iterables(tmpdir): tmpdir.chdir() def addstr(string): return "%s + 2" % string subject_list = ["sub-001", "sub-002"] inputnode = pe.Node(niu.IdentityInterface(fields=["subject"]), name="inputnode") inputnode.iterables = [("subject", subject_list)] node_1 = pe.Node( niu.Function(input_names="string", output_names="string", function=addstr), name="node_1", ) node_2 = node_1.clone("node_2") workflow = pe.Workflow(name="iter_clone_wf") workflow.connect( [ (inputnode, node_1, [("subject", "string")]), (node_1, node_2, [("string", "string")]), ] ) workflow.run() nipype-1.7.0/nipype/pipeline/engine/tests/test_engine.py000066400000000000000000000562601413403311400234240ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine module """ from copy import deepcopy from glob import glob import os import pytest from ... import engine as pe from .test_base import EngineTestInterface # Test graph expansion. The following set tests the building blocks # of the graph expansion routine. # XXX - SG I'll create a graphical version of these tests and actually # ensure that all connections are tested later @pytest.mark.parametrize( "iterables, expected", [ ({"1": None}, (1, 0)), # test1 ({"1": dict(input1=lambda: [1, 2], input2=lambda: [1, 2])}, (4, 0)), # test2 ], ) def test_1mod(iterables, expected): pipe = pe.Workflow(name="pipe") mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") setattr(mod1, "iterables", iterables["1"]) pipe.add_nodes([mod1]) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) assert len(pipe._execgraph.nodes()) == expected[0] assert len(pipe._execgraph.edges()) == expected[1] @pytest.mark.parametrize( "iterables, expected", [ ({"1": {}, "2": dict(input1=lambda: [1, 2])}, (3, 2)), # test3 ({"1": dict(input1=lambda: [1, 2]), "2": {}}, (4, 2)), # test4 ( {"1": dict(input1=lambda: [1, 2]), "2": dict(input1=lambda: [1, 2])}, (6, 4), ), # test5 ], ) def test_2mods(iterables, expected): pipe = pe.Workflow(name="pipe") mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") for nr in ["1", "2"]: setattr(eval("mod" + nr), "iterables", iterables[nr]) pipe.connect([(mod1, mod2, [("output1", "input2")])]) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) assert len(pipe._execgraph.nodes()) == expected[0] assert len(pipe._execgraph.edges()) == expected[1] @pytest.mark.parametrize( "iterables, expected, connect", [ ( {"1": {}, "2": dict(input1=lambda: [1, 2]), "3": {}}, (5, 4), ("1-2", "2-3"), ), # test6 ( {"1": dict(input1=lambda: [1, 2]), "2": {}, "3": {}}, (5, 4), ("1-3", "2-3"), ), # test7 ( { "1": dict(input1=lambda: [1, 2]), "2": dict(input1=lambda: [1, 2]), "3": {}, }, (8, 8), ("1-3", "2-3"), ), # test8 ], ) def test_3mods(iterables, expected, connect): pipe = pe.Workflow(name="pipe") mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") mod3 = pe.Node(interface=EngineTestInterface(), name="mod3") for nr in ["1", "2", "3"]: setattr(eval("mod" + nr), "iterables", iterables[nr]) if connect == ("1-2", "2-3"): pipe.connect( [ (mod1, mod2, [("output1", "input2")]), (mod2, mod3, [("output1", "input2")]), ] ) elif connect == ("1-3", "2-3"): pipe.connect( [ (mod1, mod3, [("output1", "input1")]), (mod2, mod3, [("output1", "input2")]), ] ) else: raise Exception( "connect pattern is not implemented yet within the test function" ) pipe._flatgraph = pipe._create_flat_graph() pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) assert len(pipe._execgraph.nodes()) == expected[0] assert len(pipe._execgraph.edges()) == expected[1] edgenum = sorted( [ (len(pipe._execgraph.in_edges(node)) + len(pipe._execgraph.out_edges(node))) for node in pipe._execgraph.nodes() ] ) assert edgenum[0] > 0 def test_expansion(): pipe1 = pe.Workflow(name="pipe1") mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") pipe1.connect([(mod1, mod2, [("output1", "input2")])]) pipe2 = pe.Workflow(name="pipe2") mod3 = pe.Node(interface=EngineTestInterface(), name="mod3") mod4 = pe.Node(interface=EngineTestInterface(), name="mod4") pipe2.connect([(mod3, mod4, [("output1", "input2")])]) pipe3 = pe.Workflow(name="pipe3") pipe3.connect([(pipe1, pipe2, [("mod2.output1", "mod4.input1")])]) pipe4 = pe.Workflow(name="pipe4") mod5 = pe.Node(interface=EngineTestInterface(), name="mod5") pipe4.add_nodes([mod5]) pipe5 = pe.Workflow(name="pipe5") pipe5.add_nodes([pipe4]) pipe6 = pe.Workflow(name="pipe6") pipe6.connect([(pipe5, pipe3, [("pipe4.mod5.output1", "pipe2.mod3.input1")])]) pipe6._flatgraph = pipe6._create_flat_graph() def test_iterable_expansion(): wf1 = pe.Workflow(name="test") node1 = pe.Node(EngineTestInterface(), name="node1") node2 = pe.Node(EngineTestInterface(), name="node2") node1.iterables = ("input1", [1, 2]) wf1.connect(node1, "output1", node2, "input2") wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() assert len(pe.generate_expanded_graph(wf3._flatgraph).nodes()) == 12 def test_synchronize_expansion(): wf1 = pe.Workflow(name="test") node1 = pe.Node(EngineTestInterface(), name="node1") node1.iterables = [("input1", [1, 2]), ("input2", [3, 4, 5])] node1.synchronize = True node2 = pe.Node(EngineTestInterface(), name="node2") wf1.connect(node1, "output1", node2, "input2") wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() # Each expanded graph clone has: # 3 node1 expansion nodes and # 1 node2 replicate per node1 replicate # => 2 * 3 = 6 nodes per expanded subgraph # => 18 nodes in the group assert len(pe.generate_expanded_graph(wf3._flatgraph).nodes()) == 18 def test_synchronize_tuples_expansion(): wf1 = pe.Workflow(name="test") node1 = pe.Node(EngineTestInterface(), name="node1") node2 = pe.Node(EngineTestInterface(), name="node2") node1.iterables = [("input1", "input2"), [(1, 3), (2, 4), (None, 5)]] node1.synchronize = True wf1.connect(node1, "output1", node2, "input2") wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() # Identical to test_synchronize_expansion assert len(pe.generate_expanded_graph(wf3._flatgraph).nodes()) == 18 def test_itersource_expansion(): wf1 = pe.Workflow(name="test") node1 = pe.Node(EngineTestInterface(), name="node1") node1.iterables = ("input1", [1, 2]) node2 = pe.Node(EngineTestInterface(), name="node2") wf1.connect(node1, "output1", node2, "input1") node3 = pe.Node(EngineTestInterface(), name="node3") node3.itersource = ("node1", "input1") node3.iterables = [("input1", {1: [3, 4], 2: [5, 6, 7]})] wf1.connect(node2, "output1", node3, "input1") node4 = pe.Node(EngineTestInterface(), name="node4") wf1.connect(node3, "output1", node4, "input1") wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() # each expanded graph clone has: # 2 node1 expansion nodes, # 1 node2 per node1 replicate, # 2 node3 replicates for the node1 input1 value 1, # 3 node3 replicates for the node1 input1 value 2 and # 1 node4 successor per node3 replicate # => 2 + 2 + (2 + 3) + 5 = 14 nodes per expanded graph clone # => 3 * 14 = 42 nodes in the group assert len(pe.generate_expanded_graph(wf3._flatgraph).nodes()) == 42 def test_itersource_synchronize1_expansion(): wf1 = pe.Workflow(name="test") node1 = pe.Node(EngineTestInterface(), name="node1") node1.iterables = [("input1", [1, 2]), ("input2", [3, 4])] node1.synchronize = True node2 = pe.Node(EngineTestInterface(), name="node2") wf1.connect(node1, "output1", node2, "input1") node3 = pe.Node(EngineTestInterface(), name="node3") node3.itersource = ("node1", ["input1", "input2"]) node3.iterables = [ ("input1", {(1, 3): [5, 6]}), ("input2", {(1, 3): [7, 8], (2, 4): [9]}), ] wf1.connect(node2, "output1", node3, "input1") node4 = pe.Node(EngineTestInterface(), name="node4") wf1.connect(node3, "output1", node4, "input1") wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() # each expanded graph clone has: # 2 node1 expansion nodes, # 1 node2 per node1 replicate, # 2 node3 replicates for the node1 input1 value 1, # 3 node3 replicates for the node1 input1 value 2 and # 1 node4 successor per node3 replicate # => 2 + 2 + (2 + 3) + 5 = 14 nodes per expanded graph clone # => 3 * 14 = 42 nodes in the group assert len(pe.generate_expanded_graph(wf3._flatgraph).nodes()) == 42 def test_itersource_synchronize2_expansion(): wf1 = pe.Workflow(name="test") node1 = pe.Node(EngineTestInterface(), name="node1") node1.iterables = [("input1", [1, 2]), ("input2", [3, 4])] node1.synchronize = True node2 = pe.Node(EngineTestInterface(), name="node2") wf1.connect(node1, "output1", node2, "input1") node3 = pe.Node(EngineTestInterface(), name="node3") node3.itersource = ("node1", ["input1", "input2"]) node3.synchronize = True node3.iterables = [ ("input1", "input2"), {(1, 3): [(5, 7), (6, 8)], (2, 4): [(None, 9)]}, ] wf1.connect(node2, "output1", node3, "input1") node4 = pe.Node(EngineTestInterface(), name="node4") wf1.connect(node3, "output1", node4, "input1") wf3 = pe.Workflow(name="group") for i in [0, 1, 2]: wf3.add_nodes([wf1.clone(name="test%d" % i)]) wf3._flatgraph = wf3._create_flat_graph() # each expanded graph clone has: # 2 node1 expansion nodes, # 1 node2 per node1 replicate, # 2 node3 replicates for the node1 input1 value 1, # 1 node3 replicates for the node1 input1 value 2 and # 1 node4 successor per node3 replicate # => 2 + 2 + (2 + 1) + 3 = 10 nodes per expanded graph clone # => 3 * 10 = 30 nodes in the group assert len(pe.generate_expanded_graph(wf3._flatgraph).nodes()) == 30 def test_old_config(tmpdir): tmpdir.chdir() wd = os.getcwd() from nipype.interfaces.utility import Function def func1(): return 1 def func2(a): return a + 1 n1 = pe.Node( Function(input_names=[], output_names=["a"], function=func1), name="n1" ) n2 = pe.Node( Function(input_names=["a"], output_names=["b"], function=func2), name="n2" ) w1 = pe.Workflow(name="test") modify = lambda x: x + 1 n1.inputs.a = 1 w1.connect(n1, ("a", modify), n2, "a") w1.base_dir = wd w1.config["execution"]["crashdump_dir"] = wd # generate outputs w1.run(plugin="Linear") def test_mapnode_json(tmpdir): """Tests that mapnodes don't generate excess jsons""" tmpdir.chdir() wd = os.getcwd() from nipype import MapNode, Function, Workflow def func1(in1): return in1 + 1 n1 = MapNode( Function(input_names=["in1"], output_names=["out"], function=func1), iterfield=["in1"], name="n1", ) n1.inputs.in1 = [1] w1 = Workflow(name="test") w1.base_dir = wd w1.config["execution"]["crashdump_dir"] = wd w1.add_nodes([n1]) w1.run() n1.inputs.in1 = [2] w1.run() # should rerun n1.inputs.in1 = [1] eg = w1.run() node = list(eg.nodes())[0] outjson = glob(os.path.join(node.output_dir(), "_0x*.json")) assert len(outjson) == 1 # check that multiple json's don't trigger rerun with open(os.path.join(node.output_dir(), "test.json"), "wt") as fp: fp.write("dummy file") w1.config["execution"].update(**{"stop_on_first_rerun": True}) w1.run() def test_parameterize_dirs_false(tmpdir): from ....interfaces.utility import IdentityInterface from ....testing import example_data input_file = example_data("fsl_motion_outliers_fd.txt") n1 = pe.Node(EngineTestInterface(), name="Node1") n1.iterables = ("input_file", (input_file, input_file)) n1.interface.inputs.input1 = 1 n2 = pe.Node(IdentityInterface(fields="in1"), name="Node2") wf = pe.Workflow(name="Test") wf.base_dir = tmpdir.strpath wf.config["execution"]["parameterize_dirs"] = False wf.connect([(n1, n2, [("output1", "in1")])]) wf.run() def test_serial_input(tmpdir): tmpdir.chdir() wd = os.getcwd() from nipype import MapNode, Function, Workflow def func1(in1): return in1 n1 = MapNode( Function(input_names=["in1"], output_names=["out"], function=func1), iterfield=["in1"], name="n1", ) n1.inputs.in1 = [1, 2, 3] w1 = Workflow(name="test") w1.base_dir = wd w1.add_nodes([n1]) # set local check w1.config["execution"] = { "stop_on_first_crash": "true", "local_hash_check": "true", "crashdump_dir": wd, "poll_sleep_duration": 2, } # test output of num_subnodes method when serial is default (False) assert n1.num_subnodes() == len(n1.inputs.in1) # test running the workflow on default conditions w1.run(plugin="MultiProc") # test output of num_subnodes method when serial is True n1._serial = True assert n1.num_subnodes() == 1 # test running the workflow on serial conditions w1.run(plugin="MultiProc") def test_write_graph_runs(tmpdir): tmpdir.chdir() for graph in ("orig", "flat", "exec", "hierarchical", "colored"): for simple in (True, False): pipe = pe.Workflow(name="pipe") mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") pipe.connect([(mod1, mod2, [("output1", "input1")])]) try: pipe.write_graph(graph2use=graph, simple_form=simple, format="dot") except Exception: assert False, "Failed to plot {} {} graph".format( "simple" if simple else "detailed", graph ) assert os.path.exists("graph.dot") or os.path.exists("graph_detailed.dot") try: os.remove("graph.dot") except OSError: pass try: os.remove("graph_detailed.dot") except OSError: pass def test_deep_nested_write_graph_runs(tmpdir): tmpdir.chdir() for graph in ("orig", "flat", "exec", "hierarchical", "colored"): for simple in (True, False): pipe = pe.Workflow(name="pipe") parent = pipe for depth in range(10): sub = pe.Workflow(name="pipe_nest_{}".format(depth)) parent.add_nodes([sub]) parent = sub mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") parent.add_nodes([mod1]) try: pipe.write_graph(graph2use=graph, simple_form=simple, format="dot") except Exception as e: assert False, "Failed to plot {} {} deep graph: {!s}".format( "simple" if simple else "detailed", graph, e ) assert os.path.exists("graph.dot") or os.path.exists("graph_detailed.dot") try: os.remove("graph.dot") except OSError: pass try: os.remove("graph_detailed.dot") except OSError: pass import networkx # Format of the graph has slightly changed graph_str = '""' if int(networkx.__version__.split(".")[0]) == 1 else "" # examples of dot files used in the following test dotfile_orig = [ "strict digraph " + graph_str + " {\n", '"mod1 (engine)";\n', '"mod2 (engine)";\n', '"mod1 (engine)" -> "mod2 (engine)";\n', "}\n", ] dotfile_detailed_orig = [ "digraph structs {\n", "node [shape=record];\n", 'pipemod1 [label="{IN}|{ mod1 | engine | }|{OUT| output1}"];\n', 'pipemod2 [label="{IN| input1}|{ mod2 | engine | }|{OUT}"];\n', "pipemod1:outoutput1:e -> pipemod2:ininput1:w;\n", "}", ] dotfile_hierarchical = [ "digraph pipe{\n", ' label="pipe";\n', ' pipe_mod1[label="mod1 (engine)"];\n', ' pipe_mod2[label="mod2 (engine)"];\n', " pipe_mod1 -> pipe_mod2;\n", "}", ] dotfile_colored = [ "digraph pipe{\n", ' label="pipe";\n', ' pipe_mod1[label="mod1 (engine)", style=filled, fillcolor="#FFFFC8"];\n', ' pipe_mod2[label="mod2 (engine)", style=filled, fillcolor="#FFFFC8"];\n', " pipe_mod1 -> pipe_mod2;\n", "}", ] dotfiles = { "orig": dotfile_orig, "flat": dotfile_orig, "exec": dotfile_orig, "hierarchical": dotfile_hierarchical, "colored": dotfile_colored, } @pytest.mark.parametrize("simple", [True, False]) @pytest.mark.parametrize( "graph_type", ["orig", "flat", "exec", "hierarchical", "colored"] ) def test_write_graph_dotfile(tmpdir, graph_type, simple): """checking dot files for a workflow without iterables""" tmpdir.chdir() pipe = pe.Workflow(name="pipe") mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.write_graph(graph2use=graph_type, simple_form=simple, format="dot") with open("graph.dot") as f: graph_str = f.read() if simple: for line in dotfiles[graph_type]: assert line in graph_str else: # if simple=False graph.dot uses longer names for line in dotfiles[graph_type]: if graph_type in ["hierarchical", "colored"]: assert ( line.replace( "mod1 (engine)", "mod1.EngineTestInterface.engine" ).replace("mod2 (engine)", "mod2.EngineTestInterface.engine") in graph_str ) else: assert ( line.replace( "mod1 (engine)", "pipe.mod1.EngineTestInterface.engine" ).replace("mod2 (engine)", "pipe.mod2.EngineTestInterface.engine") in graph_str ) # graph_detailed is the same for orig, flat, exec (if no iterables) # graph_detailed is not created for hierachical or colored if graph_type not in ["hierarchical", "colored"]: with open("graph_detailed.dot") as f: graph_str = f.read() for line in dotfile_detailed_orig: assert line in graph_str # examples of dot files used in the following test dotfile_detailed_iter_exec = [ "digraph structs {\n", "node [shape=record];\n", 'pipemod1aIa1 [label="{IN}|{ a1 | engine | mod1.aI }|{OUT| output1}"];\n', 'pipemod2a1 [label="{IN| input1}|{ a1 | engine | mod2 }|{OUT}"];\n', 'pipemod1aIa0 [label="{IN}|{ a0 | engine | mod1.aI }|{OUT| output1}"];\n', 'pipemod2a0 [label="{IN| input1}|{ a0 | engine | mod2 }|{OUT}"];\n', "pipemod1aIa0:outoutput1:e -> pipemod2a0:ininput1:w;\n", "pipemod1aIa1:outoutput1:e -> pipemod2a1:ininput1:w;\n", "}", ] dotfile_iter_hierarchical = [ "digraph pipe{\n", ' label="pipe";\n', ' pipe_mod1[label="mod1 (engine)", shape=box3d,style=filled, color=black, colorscheme=greys7 fillcolor=2];\n', ' pipe_mod2[label="mod2 (engine)"];\n', " pipe_mod1 -> pipe_mod2;\n", "}", ] dotfile_iter_colored = [ "digraph pipe{\n", ' label="pipe";\n', ' pipe_mod1[label="mod1 (engine)", shape=box3d,style=filled, color=black, colorscheme=greys7 fillcolor=2];\n', ' pipe_mod2[label="mod2 (engine)", style=filled, fillcolor="#FFFFC8"];\n', " pipe_mod1 -> pipe_mod2;\n", "}", ] dotfiles_iter = { "orig": dotfile_orig, "flat": dotfile_orig, "exec": dotfile_orig, "hierarchical": dotfile_iter_hierarchical, "colored": dotfile_iter_colored, } dotfiles_detailed_iter = { "orig": dotfile_detailed_orig, "flat": dotfile_detailed_orig, "exec": dotfile_detailed_iter_exec, } @pytest.mark.parametrize("simple", [True, False]) @pytest.mark.parametrize( "graph_type", ["orig", "flat", "exec", "hierarchical", "colored"] ) def test_write_graph_dotfile_iterables(tmpdir, graph_type, simple): """checking dot files for a workflow with iterables""" tmpdir.chdir() pipe = pe.Workflow(name="pipe") mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") mod1.iterables = ("input1", [1, 2]) mod2 = pe.Node(interface=EngineTestInterface(), name="mod2") pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.write_graph(graph2use=graph_type, simple_form=simple, format="dot") with open("graph.dot") as f: graph_str = f.read() if simple: for line in dotfiles_iter[graph_type]: assert line in graph_str else: # if simple=False graph.dot uses longer names for line in dotfiles_iter[graph_type]: if graph_type in ["hierarchical", "colored"]: assert ( line.replace( "mod1 (engine)", "mod1.EngineTestInterface.engine" ).replace("mod2 (engine)", "mod2.EngineTestInterface.engine") in graph_str ) else: assert ( line.replace( "mod1 (engine)", "pipe.mod1.EngineTestInterface.engine" ).replace("mod2 (engine)", "pipe.mod2.EngineTestInterface.engine") in graph_str ) # graph_detailed is not created for hierachical or colored if graph_type not in ["hierarchical", "colored"]: with open("graph_detailed.dot") as f: graph_str = f.read() for line in dotfiles_detailed_iter[graph_type]: assert line in graph_str def test_io_subclass(): """Ensure any io subclass allows dynamic traits""" from nipype.interfaces.io import IOBase from nipype.interfaces.base import DynamicTraitedSpec class TestKV(IOBase): _always_run = True output_spec = DynamicTraitedSpec def _list_outputs(self): outputs = {} outputs["test"] = 1 outputs["foo"] = "bar" return outputs wf = pe.Workflow("testkv") def testx2(test): return test * 2 kvnode = pe.Node(TestKV(), name="testkv") from nipype.interfaces.utility import Function func = pe.Node( Function(input_names=["test"], output_names=["test2"], function=testx2), name="func", ) exception_not_raised = True try: wf.connect(kvnode, "test", func, "test") except Exception as e: if "Module testkv has no output called test" in e: exception_not_raised = False assert exception_not_raised nipype-1.7.0/nipype/pipeline/engine/tests/test_join.py000066400000000000000000000547141413403311400231200ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for join expansion """ import pytest from .... import config from ... import engine as pe from ....interfaces import base as nib from ....interfaces.utility import IdentityInterface, Function, Merge from ....interfaces.base import traits, File class PickFirstSpec(nib.TraitedSpec): in_files = traits.List(File(exists=True), argstr="%s", position=2, mandatory=True) class PickFirstOutSpec(nib.TraitedSpec): output1 = File(exists=True) class PickFirst(nib.BaseInterface): input_spec = PickFirstSpec output_spec = PickFirstOutSpec def _run_interface(self, runtime): runtime.returncode = 0 return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["output1"] = self.inputs.in_files[0] return outputs class IncrementInputSpec(nib.TraitedSpec): input1 = nib.traits.Int(mandatory=True, desc="input") inc = nib.traits.Int(usedefault=True, default_value=1, desc="increment") class IncrementOutputSpec(nib.TraitedSpec): output1 = nib.traits.Int(desc="ouput") class IncrementInterface(nib.SimpleInterface): input_spec = IncrementInputSpec output_spec = IncrementOutputSpec def _run_interface(self, runtime): runtime.returncode = 0 self._results["output1"] = self.inputs.input1 + self.inputs.inc return runtime _sums = [] _sum_operands = [] class SumInputSpec(nib.TraitedSpec): input1 = nib.traits.List(nib.traits.Int, mandatory=True, desc="input") class SumOutputSpec(nib.TraitedSpec): output1 = nib.traits.Int(desc="ouput") operands = nib.traits.List(nib.traits.Int, desc="operands") class SumInterface(nib.SimpleInterface): input_spec = SumInputSpec output_spec = SumOutputSpec def _run_interface(self, runtime): global _sum global _sum_operands runtime.returncode = 0 self._results["operands"] = self.inputs.input1 self._results["output1"] = sum(self.inputs.input1) _sum_operands.append(self.inputs.input1) _sums.append(sum(self.inputs.input1)) return runtime _set_len = None """The Set interface execution result.""" class SetInputSpec(nib.TraitedSpec): input1 = nib.traits.Set(nib.traits.Int, mandatory=True, desc="input") class SetOutputSpec(nib.TraitedSpec): output1 = nib.traits.Int(desc="ouput") class SetInterface(nib.BaseInterface): input_spec = SetInputSpec output_spec = SetOutputSpec def _run_interface(self, runtime): runtime.returncode = 0 return runtime def _list_outputs(self): global _set_len outputs = self._outputs().get() _set_len = outputs["output1"] = len(self.inputs.input1) return outputs _products = [] """The Products interface execution results.""" class ProductInputSpec(nib.TraitedSpec): input1 = nib.traits.Int(mandatory=True, desc="input1") input2 = nib.traits.Int(mandatory=True, desc="input2") class ProductOutputSpec(nib.TraitedSpec): output1 = nib.traits.Int(mandatory=True, desc="output") class ProductInterface(nib.BaseInterface): input_spec = ProductInputSpec output_spec = ProductOutputSpec def _run_interface(self, runtime): runtime.returncode = 0 return runtime def _list_outputs(self): global _products outputs = self._outputs().get() outputs["output1"] = self.inputs.input1 * self.inputs.input2 _products.append(outputs["output1"]) return outputs @pytest.mark.parametrize("needed_outputs", ["true", "false"]) def test_join_expansion(tmpdir, needed_outputs): global _sums global _sum_operands global _products tmpdir.chdir() # Clean up, just in case some other test modified them _products = [] _sum_operands = [] _sums = [] prev_state = config.get("execution", "remove_unnecessary_outputs") config.set("execution", "remove_unnecessary_outputs", needed_outputs) # Make the workflow. wf = pe.Workflow(name="test") # the iterated input node inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") inputspec.iterables = [("n", [1, 2])] # a pre-join node in the iterated path pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") # another pre-join node in the iterated path pre_join2 = pe.Node(IncrementInterface(), name="pre_join2") # the join node join = pe.JoinNode( SumInterface(), joinsource="inputspec", joinfield="input1", name="join" ) # an uniterated post-join node post_join1 = pe.Node(IncrementInterface(), name="post_join1") # a post-join node in the iterated path post_join2 = pe.Node(ProductInterface(), name="post_join2") wf.connect( [ (inputspec, pre_join1, [("n", "input1")]), (pre_join1, pre_join2, [("output1", "input1")]), (pre_join1, post_join2, [("output1", "input2")]), (pre_join2, join, [("output1", "input1")]), (join, post_join1, [("output1", "input1")]), (join, post_join2, [("output1", "input1")]), ] ) result = wf.run() # the two expanded pre-join predecessor nodes feed into one join node joins = [node for node in result.nodes() if node.name == "join"] assert len(joins) == 1, "The number of join result nodes is incorrect." # the expanded graph contains 2 * 2 = 4 iteration pre-join nodes, 1 join # node, 1 non-iterated post-join node and 2 * 1 iteration post-join nodes. # Nipype factors away the IdentityInterface. assert len(result.nodes()) == 8, "The number of expanded nodes is incorrect." # the join Sum result is (1 + 1 + 1) + (2 + 1 + 1) assert len(_sums) == 1, "The number of join outputs is incorrect" assert _sums[0] == 7, "The join Sum output value is incorrect: %s." % _sums[0] # the join input preserves the iterables input order assert _sum_operands[0] == [3, 4], ( "The join Sum input is incorrect: %s." % _sum_operands[0] ) # there are two iterations of the post-join node in the iterable path assert len(_products) == 2, "The number of iterated post-join outputs is incorrect" config.set("execution", "remove_unnecessary_outputs", prev_state) def test_node_joinsource(tmpdir): """Test setting the joinsource to a Node.""" tmpdir.chdir() # the iterated input node inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") inputspec.iterables = [("n", [1, 2])] # the join node join = pe.JoinNode( SetInterface(), joinsource=inputspec, joinfield="input1", name="join" ) # the joinsource is the inputspec name assert ( join.joinsource == inputspec.name ), "The joinsource is not set to the node name." def test_set_join_node(tmpdir): """Test collecting join inputs to a set.""" tmpdir.chdir() # Make the workflow. wf = pe.Workflow(name="test") # the iterated input node inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") inputspec.iterables = [("n", [1, 2, 1, 3, 2])] # a pre-join node in the iterated path pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") wf.connect(inputspec, "n", pre_join1, "input1") # the set join node join = pe.JoinNode( SetInterface(), joinsource="inputspec", joinfield="input1", name="join" ) wf.connect(pre_join1, "output1", join, "input1") wf.run() # the join length is the number of unique inputs assert _set_len == 3, "The join Set output value is incorrect: %s." % _set_len def test_unique_join_node(tmpdir): """Test join with the ``unique`` flag set to True.""" global _sum_operands _sum_operands = [] tmpdir.chdir() # Make the workflow. wf = pe.Workflow(name="test") # the iterated input node inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") inputspec.iterables = [("n", [3, 1, 2, 1, 3])] # a pre-join node in the iterated path pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") wf.connect(inputspec, "n", pre_join1, "input1") # the set join node join = pe.JoinNode( SumInterface(), joinsource="inputspec", joinfield="input1", unique=True, name="join", ) wf.connect(pre_join1, "output1", join, "input1") wf.run() assert _sum_operands[0] == [4, 2, 3], ( "The unique join output value is incorrect: %s." % _sum_operands[0] ) def test_multiple_join_nodes(tmpdir): """Test two join nodes, one downstream of the other.""" global _products _products = [] tmpdir.chdir() # Make the workflow. wf = pe.Workflow(name="test") # the iterated input node inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") inputspec.iterables = [("n", [1, 2, 3])] # a pre-join node in the iterated path pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") wf.connect(inputspec, "n", pre_join1, "input1") # the first join node join1 = pe.JoinNode( IdentityInterface(fields=["vector"]), joinsource="inputspec", joinfield="vector", name="join1", ) wf.connect(pre_join1, "output1", join1, "vector") # an uniterated post-join node post_join1 = pe.Node(SumInterface(), name="post_join1") wf.connect(join1, "vector", post_join1, "input1") # the downstream join node connected to both an upstream join # path output and a separate input in the iterated path join2 = pe.JoinNode( IdentityInterface(fields=["vector", "scalar"]), joinsource="inputspec", joinfield="vector", name="join2", ) wf.connect(pre_join1, "output1", join2, "vector") wf.connect(post_join1, "output1", join2, "scalar") # a second post-join node post_join2 = pe.Node(SumInterface(), name="post_join2") wf.connect(join2, "vector", post_join2, "input1") # a third post-join node post_join3 = pe.Node(ProductInterface(), name="post_join3") wf.connect(post_join2, "output1", post_join3, "input1") wf.connect(join2, "scalar", post_join3, "input2") result = wf.run() # The expanded graph contains one pre_join1 replicate per inputspec # replicate and one of each remaining node = 3 + 5 = 8 nodes. # The replicated inputspec nodes are factored out of the expansion. assert len(result.nodes()) == 8, "The number of expanded nodes is incorrect." # The outputs are: # pre_join1: [2, 3, 4] # post_join1: 9 # join2: [2, 3, 4] and 9 # post_join2: 9 # post_join3: 9 * 9 = 81 assert _products == [81], "The post-join product is incorrect" def test_identity_join_node(tmpdir): """Test an IdentityInterface join.""" global _sum_operands _sum_operands = [] tmpdir.chdir() # Make the workflow. wf = pe.Workflow(name="test") # the iterated input node inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") inputspec.iterables = [("n", [1, 2, 3])] # a pre-join node in the iterated path pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") wf.connect(inputspec, "n", pre_join1, "input1") # the IdentityInterface join node join = pe.JoinNode( IdentityInterface(fields=["vector"]), joinsource="inputspec", joinfield="vector", name="join", ) wf.connect(pre_join1, "output1", join, "vector") # an uniterated post-join node post_join1 = pe.Node(SumInterface(), name="post_join1") wf.connect(join, "vector", post_join1, "input1") result = wf.run() # the expanded graph contains 1 * 3 iteration pre-join nodes, 1 join # node and 1 post-join node. Nipype factors away the iterable input # IdentityInterface but keeps the join IdentityInterface. assert len(result.nodes()) == 5, "The number of expanded nodes is incorrect." assert _sum_operands[0] == [2, 3, 4], ( "The join Sum input is incorrect: %s." % _sum_operands[0] ) def test_multifield_join_node(tmpdir): """Test join on several fields.""" global _products _products = [] tmpdir.chdir() # Make the workflow. wf = pe.Workflow(name="test") # the iterated input node inputspec = pe.Node(IdentityInterface(fields=["m", "n"]), name="inputspec") inputspec.iterables = [("m", [1, 2]), ("n", [3, 4])] # two pre-join nodes in a parallel iterated path inc1 = pe.Node(IncrementInterface(), name="inc1") wf.connect(inputspec, "m", inc1, "input1") inc2 = pe.Node(IncrementInterface(), name="inc2") wf.connect(inputspec, "n", inc2, "input1") # the join node join = pe.JoinNode( IdentityInterface(fields=["vector1", "vector2"]), joinsource="inputspec", name="join", ) wf.connect(inc1, "output1", join, "vector1") wf.connect(inc2, "output1", join, "vector2") # a post-join node prod = pe.MapNode(ProductInterface(), name="prod", iterfield=["input1", "input2"]) wf.connect(join, "vector1", prod, "input1") wf.connect(join, "vector2", prod, "input2") result = wf.run() # the iterables are expanded as the cartesian product of the iterables values. # thus, the expanded graph contains 2 * (2 * 2) iteration pre-join nodes, 1 join # node and 1 post-join node. assert len(result.nodes()) == 10, "The number of expanded nodes is incorrect." # the product inputs are [2, 4], [2, 5], [3, 4], [3, 5] assert set(_products) == set([8, 10, 12, 15]), ( "The post-join products is incorrect: %s." % _products ) def test_synchronize_join_node(tmpdir): """Test join on an input node which has the ``synchronize`` flag set to True.""" global _products _products = [] tmpdir.chdir() # Make the workflow. wf = pe.Workflow(name="test") # the iterated input node inputspec = pe.Node(IdentityInterface(fields=["m", "n"]), name="inputspec") inputspec.iterables = [("m", [1, 2]), ("n", [3, 4])] inputspec.synchronize = True # two pre-join nodes in a parallel iterated path inc1 = pe.Node(IncrementInterface(), name="inc1") wf.connect(inputspec, "m", inc1, "input1") inc2 = pe.Node(IncrementInterface(), name="inc2") wf.connect(inputspec, "n", inc2, "input1") # the join node join = pe.JoinNode( IdentityInterface(fields=["vector1", "vector2"]), joinsource="inputspec", name="join", ) wf.connect(inc1, "output1", join, "vector1") wf.connect(inc2, "output1", join, "vector2") # a post-join node prod = pe.MapNode(ProductInterface(), name="prod", iterfield=["input1", "input2"]) wf.connect(join, "vector1", prod, "input1") wf.connect(join, "vector2", prod, "input2") result = wf.run() # there are 3 iterables expansions. # thus, the expanded graph contains 2 * 2 iteration pre-join nodes, 1 join # node and 1 post-join node. assert len(result.nodes()) == 6, "The number of expanded nodes is incorrect." # the product inputs are [2, 3] and [4, 5] assert _products == [8, 15], "The post-join products is incorrect: %s." % _products def test_itersource_join_source_node(tmpdir): """Test join on an input node which has an ``itersource``.""" tmpdir.chdir() # Make the workflow. wf = pe.Workflow(name="test") # the iterated input node inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") inputspec.iterables = [("n", [1, 2])] # an intermediate node in the first iteration path pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") wf.connect(inputspec, "n", pre_join1, "input1") # an iterable pre-join node with an itersource pre_join2 = pe.Node(ProductInterface(), name="pre_join2") pre_join2.itersource = ("inputspec", "n") pre_join2.iterables = ("input1", {1: [3, 4], 2: [5, 6]}) wf.connect(pre_join1, "output1", pre_join2, "input2") # an intermediate node in the second iteration path pre_join3 = pe.Node(IncrementInterface(), name="pre_join3") wf.connect(pre_join2, "output1", pre_join3, "input1") # the join node join = pe.JoinNode( IdentityInterface(fields=["vector"]), joinsource="pre_join2", joinfield="vector", name="join", ) wf.connect(pre_join3, "output1", join, "vector") # a join successor node post_join1 = pe.Node(SumInterface(), name="post_join1") wf.connect(join, "vector", post_join1, "input1") result = wf.run() # the expanded graph contains # 1 pre_join1 replicate for each inputspec iteration, # 2 pre_join2 replicates for each inputspec iteration, # 1 pre_join3 for each pre_join2 iteration, # 1 join replicate for each inputspec iteration and # 1 post_join1 replicate for each join replicate = # 2 + (2 * 2) + 4 + 2 + 2 = 14 expansion graph nodes. # Nipype factors away the iterable input # IdentityInterface but keeps the join IdentityInterface. assert len(result.nodes()) == 14, "The number of expanded nodes is incorrect." # The first join inputs are: # 1 + (3 * 2) and 1 + (4 * 2) # The second join inputs are: # 1 + (5 * 3) and 1 + (6 * 3) # the post-join nodes execution order is indeterminate; # therefore, compare the lists item-wise. assert [16, 19] in _sum_operands, ( "The join Sum input is incorrect: %s." % _sum_operands ) assert [7, 9] in _sum_operands, ( "The join Sum input is incorrect: %s." % _sum_operands ) def test_itersource_two_join_nodes(tmpdir): """Test join with a midstream ``itersource`` and an upstream iterable.""" tmpdir.chdir() # Make the workflow. wf = pe.Workflow(name="test") # the iterated input node inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") inputspec.iterables = [("n", [1, 2])] # an intermediate node in the first iteration path pre_join1 = pe.Node(IncrementInterface(), name="pre_join1") wf.connect(inputspec, "n", pre_join1, "input1") # an iterable pre-join node with an itersource pre_join2 = pe.Node(ProductInterface(), name="pre_join2") pre_join2.itersource = ("inputspec", "n") pre_join2.iterables = ("input1", {1: [3, 4], 2: [5, 6]}) wf.connect(pre_join1, "output1", pre_join2, "input2") # an intermediate node in the second iteration path pre_join3 = pe.Node(IncrementInterface(), name="pre_join3") wf.connect(pre_join2, "output1", pre_join3, "input1") # the first join node join1 = pe.JoinNode( IdentityInterface(fields=["vector"]), joinsource="pre_join2", joinfield="vector", name="join1", ) wf.connect(pre_join3, "output1", join1, "vector") # a join successor node post_join1 = pe.Node(SumInterface(), name="post_join1") wf.connect(join1, "vector", post_join1, "input1") # a summary join node join2 = pe.JoinNode( IdentityInterface(fields=["vector"]), joinsource="inputspec", joinfield="vector", name="join2", ) wf.connect(post_join1, "output1", join2, "vector") result = wf.run() # the expanded graph contains the 14 test_itersource_join_source_node # nodes plus the summary join node. assert len(result.nodes()) == 15, "The number of expanded nodes is incorrect." def test_set_join_node_file_input(tmpdir): """Test collecting join inputs to a set.""" tmpdir.chdir() open("test.nii", "w+").close() open("test2.nii", "w+").close() # Make the workflow. wf = pe.Workflow(name="test") # the iterated input node inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") inputspec.iterables = [ ("n", [tmpdir.join("test.nii").strpath, tmpdir.join("test2.nii").strpath]) ] # a pre-join node in the iterated path pre_join1 = pe.Node(IdentityInterface(fields=["n"]), name="pre_join1") wf.connect(inputspec, "n", pre_join1, "n") # the set join node join = pe.JoinNode( PickFirst(), joinsource="inputspec", joinfield="in_files", name="join" ) wf.connect(pre_join1, "n", join, "in_files") wf.run() def test_nested_workflow_join(tmpdir): """Test collecting join inputs within a nested workflow""" tmpdir.chdir() # Make the nested workflow def nested_wf(i, name="smallwf"): # iterables with list of nums inputspec = pe.Node(IdentityInterface(fields=["n"]), name="inputspec") inputspec.iterables = [("n", i)] # increment each iterable before joining pre_join = pe.Node(IncrementInterface(), name="pre_join") # rejoin nums into list join = pe.JoinNode( IdentityInterface(fields=["n"]), joinsource="inputspec", joinfield="n", name="join", ) # define and connect nested workflow wf = pe.Workflow(name="wf_%d" % i[0]) wf.connect(inputspec, "n", pre_join, "input1") wf.connect(pre_join, "output1", join, "n") return wf # master wf meta_wf = pe.Workflow(name="meta", base_dir=".") # add each mini-workflow to master for i in [[1, 3], [2, 4]]: mini_wf = nested_wf(i) meta_wf.add_nodes([mini_wf]) result = meta_wf.run() # there should be six nodes in total assert len(result.nodes()) == 6, "The number of expanded nodes is incorrect." def test_name_prefix_join(tmpdir): tmpdir.chdir() def sq(x): return x ** 2 wf = pe.Workflow("wf", base_dir=tmpdir.strpath) square = pe.Node(Function(function=sq), name="square") square.iterables = [("x", [1, 2])] square_join = pe.JoinNode( Merge(1, ravel_inputs=True), name="square_join", joinsource="square", joinfield=["in1"], ) wf.connect(square, "out", square_join, "in1") wf.run() def test_join_nestediters(tmpdir): tmpdir.chdir() def exponent(x, p): return x ** p wf = pe.Workflow("wf", base_dir=tmpdir.strpath) xs = pe.Node(IdentityInterface(["x"]), iterables=[("x", [1, 2])], name="xs") ps = pe.Node(IdentityInterface(["p"]), iterables=[("p", [3, 4])], name="ps") exp = pe.Node(Function(function=exponent), name="exp") exp_joinx = pe.JoinNode( Merge(1, ravel_inputs=True), name="exp_joinx", joinsource="xs", joinfield=["in1"], ) exp_joinp = pe.JoinNode( Merge(1, ravel_inputs=True), name="exp_joinp", joinsource="ps", joinfield=["in1"], ) wf.connect( [ (xs, exp, [("x", "x")]), (ps, exp, [("p", "p")]), (exp, exp_joinx, [("out", "in1")]), (exp_joinx, exp_joinp, [("out", "in1")]), ] ) wf.run() nipype-1.7.0/nipype/pipeline/engine/tests/test_nodes.py000066400000000000000000000227201413403311400232610ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os from copy import deepcopy import pytest from .... import config from ....interfaces import utility as niu from ... import engine as pe from ..utils import merge_dict from .test_base import EngineTestInterface from .test_utils import UtilsTestInterface """ Test for order of iterables import nipype.pipeline.engine as pe import nipype.interfaces.utility as niu wf1 = pe.Workflow(name='wf1') node1 = pe.Node(interface=niu.IdentityInterface(fields=['a1','b1']), name='node1') node1.iterables = ('a1', [1,2]) wf1.add_nodes([node1]) wf2 = pe.Workflow(name='wf2') node2 = pe.Node(interface=niu.IdentityInterface(fields=['a2','b2']), name='node2') wf2.add_nodes([node2]) wf1.connect(node1, 'a1', wf2, 'node2.a2') node4 = pe.Node(interface=niu.IdentityInterface(fields=['a4','b4']), name='node4') #node4.iterables = ('a4', [5,6]) wf2.connect(node2, 'b2', node4, 'b4') wf3 = pe.Workflow(name='wf3') node3 = pe.Node(interface=niu.IdentityInterface(fields=['a3','b3']), name='node3') node3.iterables = ('b3', [3,4]) wf3.add_nodes([node3]) wf1.connect(wf3, 'node3.b3', wf2, 'node2.b2') wf1.base_dir = os.path.join(os.getcwd(),'testit') wf1.run(inseries=True, createdirsonly=True) wf1.write_graph(graph2use='exec') """ ''' import nipype.pipeline.engine as pe import nipype.interfaces.spm as spm import os from io import StringIO from nipype.utils.config import config config.readfp(StringIO(""" [execution] remove_unnecessary_outputs = true """)) segment = pe.Node(interface=spm.Segment(), name="segment") segment.inputs.data = os.path.abspath("data/T1.nii") segment.inputs.gm_output_type = [True, True, True] segment.inputs.wm_output_type = [True, True, True] smooth_gm = pe.Node(interface=spm.Smooth(), name="smooth_gm") workflow = pe.Workflow(name="workflow_cleanup_test") workflow.base_dir = os.path.abspath('./workflow_cleanup_test') workflow.connect([(segment, smooth_gm, [('native_gm_image','in_files')])]) workflow.run() #adding new node that uses one of the previously deleted outputs of segment; this should force segment to rerun smooth_wm = pe.Node(interface=spm.Smooth(), name="smooth_wm") workflow.connect([(segment, smooth_wm, [('native_wm_image','in_files')])]) workflow.run() workflow.run() ''' # Node def test_node_init(): with pytest.raises(TypeError): pe.Node() with pytest.raises(IOError): pe.Node(EngineTestInterface, name="test") def test_node_get_output(): mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") mod1.inputs.input1 = 1 mod1.run() assert mod1.get_output("output1") == [1, 1] mod1._result = None assert mod1.get_output("output1") == [1, 1] def test_mapnode_iterfield_check(): mod1 = pe.MapNode(EngineTestInterface(), iterfield=["input1"], name="mod1") with pytest.raises(ValueError): mod1._check_iterfield() mod1 = pe.MapNode( EngineTestInterface(), iterfield=["input1", "input2"], name="mod1" ) mod1.inputs.input1 = [1, 2] mod1.inputs.input2 = 3 with pytest.raises(ValueError): mod1._check_iterfield() @pytest.mark.parametrize( "x_inp, f_exp", [ (3, [6]), ([2, 3], [4, 6]), ((2, 3), [4, 6]), (range(3), [0, 2, 4]), ("Str", ["StrStr"]), (["Str1", "Str2"], ["Str1Str1", "Str2Str2"]), ], ) def test_mapnode_iterfield_type(x_inp, f_exp): from nipype import MapNode, Function def double_func(x): return 2 * x double = Function(["x"], ["f_x"], double_func) double_node = MapNode(double, name="double", iterfield=["x"]) double_node.inputs.x = x_inp res = double_node.run() assert res.outputs.f_x == f_exp def test_mapnode_nested(tmpdir): tmpdir.chdir() from nipype import MapNode, Function def func1(in1): return in1 + 1 n1 = MapNode( Function(input_names=["in1"], output_names=["out"], function=func1), iterfield=["in1"], nested=True, name="n1", ) n1.inputs.in1 = [[1, [2]], 3, [4, 5]] n1.run() assert n1.get_output("out") == [[2, [3]], 4, [5, 6]] n2 = MapNode( Function(input_names=["in1"], output_names=["out"], function=func1), iterfield=["in1"], nested=False, name="n1", ) n2.inputs.in1 = [[1, [2]], 3, [4, 5]] with pytest.raises(Exception) as excinfo: n2.run() assert "can only concatenate list" in str(excinfo.value) def test_mapnode_expansion(tmpdir): tmpdir.chdir() from nipype import MapNode, Function def func1(in1): return in1 + 1 mapnode = MapNode( Function(function=func1), iterfield="in1", name="mapnode", n_procs=2, mem_gb=2 ) mapnode.inputs.in1 = [1, 2] for idx, node in mapnode._make_nodes(): for attr in ("overwrite", "run_without_submitting", "plugin_args"): assert getattr(node, attr) == getattr(mapnode, attr) for attr in ("_n_procs", "_mem_gb"): assert getattr(node, attr) == getattr(mapnode, attr) def test_node_hash(tmpdir): from nipype.interfaces.utility import Function tmpdir.chdir() config.set_default_config() config.set("execution", "stop_on_first_crash", True) config.set("execution", "crashdump_dir", os.getcwd()) def func1(): return 1 def func2(a): return a + 1 n1 = pe.Node( Function(input_names=[], output_names=["a"], function=func1), name="n1" ) n2 = pe.Node( Function(input_names=["a"], output_names=["b"], function=func2), name="n2" ) w1 = pe.Workflow(name="test") def modify(x): return x + 1 n1.inputs.a = 1 w1.connect(n1, ("a", modify), n2, "a") w1.base_dir = os.getcwd() # create dummy distributed plugin class from nipype.pipeline.plugins.base import DistributedPluginBase # create a custom exception class EngineTestException(Exception): pass class RaiseError(DistributedPluginBase): def _submit_job(self, node, updatehash=False): raise EngineTestException( "Submit called - cached=%s, updated=%s" % node.is_cached() ) # check if a proper exception is raised with pytest.raises(EngineTestException) as excinfo: w1.run(plugin=RaiseError()) assert str(excinfo.value).startswith("Submit called") # generate outputs w1.run(plugin="Linear") # ensure plugin is being called config.set("execution", "local_hash_check", False) # rerun to ensure we have outputs w1.run(plugin="Linear") # set local check config.set("execution", "local_hash_check", True) w1 = pe.Workflow(name="test") w1.connect(n1, ("a", modify), n2, "a") w1.base_dir = os.getcwd() w1.run(plugin=RaiseError()) def test_outputs_removal(tmpdir): def test_function(arg1): import os file1 = os.path.join(os.getcwd(), "file1.txt") file2 = os.path.join(os.getcwd(), "file2.txt") with open(file1, "wt") as fp: fp.write("%d" % arg1) with open(file2, "wt") as fp: fp.write("%d" % arg1) return file1, file2 n1 = pe.Node( niu.Function( input_names=["arg1"], output_names=["file1", "file2"], function=test_function, ), base_dir=tmpdir.strpath, name="testoutputs", ) n1.inputs.arg1 = 1 n1.config = {"execution": {"remove_unnecessary_outputs": True}} n1.config = merge_dict(deepcopy(config._sections), n1.config) n1.run() assert tmpdir.join(n1.name, "file1.txt").check() assert tmpdir.join(n1.name, "file1.txt").check() n1.needed_outputs = ["file2"] n1.run() assert not tmpdir.join(n1.name, "file1.txt").check() assert tmpdir.join(n1.name, "file2.txt").check() def test_inputs_removal(tmpdir): file1 = tmpdir.join("file1.txt") file1.write("dummy_file") n1 = pe.Node(UtilsTestInterface(), base_dir=tmpdir.strpath, name="testinputs") n1.inputs.in_file = file1.strpath n1.config = {"execution": {"keep_inputs": True}} n1.config = merge_dict(deepcopy(config._sections), n1.config) n1.run() assert tmpdir.join(n1.name, "file1.txt").check() n1.inputs.in_file = file1.strpath n1.config = {"execution": {"keep_inputs": False}} n1.config = merge_dict(deepcopy(config._sections), n1.config) n1.overwrite = True n1.run() assert not tmpdir.join(n1.name, "file1.txt").check() def test_outputmultipath_collapse(tmpdir): """Test an OutputMultiPath whose initial value is ``[[x]]`` to ensure that it is returned as ``[x]``, regardless of how accessed.""" select_if = niu.Select(inlist=[[1, 2, 3], [4]], index=1) select_nd = pe.Node(niu.Select(inlist=[[1, 2, 3], [4]], index=1), name="select_nd") ifres = select_if.run() ndres = select_nd.run() assert ifres.outputs.out == [4] assert ndres.outputs.out == [4] assert select_nd.result.outputs.out == [4] @pytest.mark.timeout(30) def test_mapnode_single(tmpdir): tmpdir.chdir() def _producer(num=1, deadly_num=7): if num == deadly_num: raise RuntimeError("Got the deadly num (%d)." % num) return num + 1 pnode = pe.MapNode( niu.Function(function=_producer), name="ProducerNode", iterfield=["num"] ) pnode.inputs.num = [7] wf = pe.Workflow(name="PC_Workflow") wf.add_nodes([pnode]) wf.base_dir = os.path.abspath("./test_output") with pytest.raises(RuntimeError): wf.run(plugin="MultiProc") nipype-1.7.0/nipype/pipeline/engine/tests/test_utils.py000066400000000000000000000254531413403311400233170ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine utils module """ import os from copy import deepcopy import pytest from ... import engine as pe from ....interfaces import base as nib from ....interfaces import utility as niu from .... import config from ..utils import ( clean_working_directory, write_workflow_prov, load_resultfile, format_node, ) class InputSpec(nib.TraitedSpec): in_file = nib.File(exists=True, copyfile=True) class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc="outputs") class UtilsTestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec def _run_interface(self, runtime): runtime.returncode = 0 return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["output1"] = [1] return outputs def test_identitynode_removal(tmpdir): def test_function(arg1, arg2, arg3): import numpy as np return (np.array(arg1) + arg2 + arg3).tolist() wf = pe.Workflow(name="testidentity", base_dir=tmpdir.strpath) n1 = pe.Node( niu.IdentityInterface(fields=["a", "b"]), name="src", base_dir=tmpdir.strpath ) n1.iterables = ("b", [0, 1, 2, 3]) n1.inputs.a = [0, 1, 2, 3] n2 = pe.Node(niu.Select(), name="selector", base_dir=tmpdir.strpath) wf.connect(n1, ("a", test_function, 1, -1), n2, "inlist") wf.connect(n1, "b", n2, "index") n3 = pe.Node( niu.IdentityInterface(fields=["c", "d"]), name="passer", base_dir=tmpdir.strpath ) n3.inputs.c = [1, 2, 3, 4] wf.connect(n2, "out", n3, "d") n4 = pe.Node(niu.Select(), name="selector2", base_dir=tmpdir.strpath) wf.connect(n3, ("c", test_function, 1, -1), n4, "inlist") wf.connect(n3, "d", n4, "index") fg = wf._create_flat_graph() wf._set_needed_outputs(fg) eg = pe.generate_expanded_graph(deepcopy(fg)) assert len(eg.nodes()) == 8 def test_clean_working_directory(tmpdir): class OutputSpec(nib.TraitedSpec): files = nib.traits.List(nib.File) others = nib.File() class InputSpec(nib.TraitedSpec): infile = nib.File() outputs = OutputSpec() inputs = InputSpec() filenames = [ "file.hdr", "file.img", "file.BRIK", "file.HEAD", "_0x1234.json", "foo.txt", ] outfiles = [] for filename in filenames: outfile = tmpdir.join(filename) outfile.write("dummy") outfiles.append(outfile.strpath) outputs.files = outfiles[:4:2] outputs.others = outfiles[5] inputs.infile = outfiles[-1] needed_outputs = ["files"] config.set_default_config() assert os.path.exists(outfiles[5]) config.set_default_config() config.set("execution", "remove_unnecessary_outputs", False) out = clean_working_directory( outputs, tmpdir.strpath, inputs, needed_outputs, deepcopy(config._sections) ) assert os.path.exists(outfiles[5]) assert out.others == outfiles[5] config.set("execution", "remove_unnecessary_outputs", True) out = clean_working_directory( outputs, tmpdir.strpath, inputs, needed_outputs, deepcopy(config._sections) ) assert os.path.exists(outfiles[1]) assert os.path.exists(outfiles[3]) assert os.path.exists(outfiles[4]) assert not os.path.exists(outfiles[5]) assert out.others == nib.Undefined assert len(out.files) == 2 config.set_default_config() def create_wf(name): """Creates a workflow for the following tests""" def fwhm(fwhm): return fwhm pipe = pe.Workflow(name=name) process = pe.Node( niu.Function(input_names=["fwhm"], output_names=["fwhm"], function=fwhm), name="proc", ) process.iterables = ("fwhm", [0]) process2 = pe.Node( niu.Function(input_names=["fwhm"], output_names=["fwhm"], function=fwhm), name="proc2", ) process2.iterables = ("fwhm", [0]) pipe.connect(process, "fwhm", process2, "fwhm") return pipe def test_multi_disconnected_iterable(tmpdir): metawf = pe.Workflow(name="meta") metawf.base_dir = tmpdir.strpath metawf.add_nodes([create_wf("wf%d" % i) for i in range(30)]) eg = metawf.run(plugin="Linear") assert len(eg.nodes()) == 60 def test_provenance(tmpdir): metawf = pe.Workflow(name="meta") metawf.base_dir = tmpdir.strpath metawf.add_nodes([create_wf("wf%d" % i) for i in range(1)]) eg = metawf.run(plugin="Linear") prov_base = tmpdir.join("workflow_provenance_test").strpath psg = write_workflow_prov(eg, prov_base, format="all") assert len(psg.bundles) == 2 assert len(psg.get_records()) == 7 def dummy_func(value): return value + 1 def test_mapnode_crash(tmpdir): """Test mapnode crash when stop_on_first_crash is True""" cwd = os.getcwd() node = pe.MapNode( niu.Function( input_names=["WRONG"], output_names=["newstring"], function=dummy_func ), iterfield=["WRONG"], name="myfunc", ) node.inputs.WRONG = ["string{}".format(i) for i in range(3)] node.config = deepcopy(config._sections) node.config["execution"]["stop_on_first_crash"] = True node.base_dir = tmpdir.strpath with pytest.raises(pe.nodes.NodeExecutionError): node.run() os.chdir(cwd) def test_mapnode_crash2(tmpdir): """Test mapnode crash when stop_on_first_crash is False""" cwd = os.getcwd() node = pe.MapNode( niu.Function( input_names=["WRONG"], output_names=["newstring"], function=dummy_func ), iterfield=["WRONG"], name="myfunc", ) node.inputs.WRONG = ["string{}".format(i) for i in range(3)] node.base_dir = tmpdir.strpath with pytest.raises(Exception): node.run() os.chdir(cwd) def test_mapnode_crash3(tmpdir): """Test mapnode crash when mapnode is embedded in a workflow""" tmpdir.chdir() node = pe.MapNode( niu.Function( input_names=["WRONG"], output_names=["newstring"], function=dummy_func ), iterfield=["WRONG"], name="myfunc", ) node.inputs.WRONG = ["string{}".format(i) for i in range(3)] wf = pe.Workflow("testmapnodecrash") wf.add_nodes([node]) wf.base_dir = tmpdir.strpath # changing crashdump dir to current working directory (to avoid problems with read-only systems) wf.config["execution"]["crashdump_dir"] = os.getcwd() with pytest.raises(RuntimeError): wf.run(plugin="Linear") class StrPathConfuserInputSpec(nib.TraitedSpec): in_str = nib.traits.String() class StrPathConfuserOutputSpec(nib.TraitedSpec): out_tuple = nib.traits.Tuple(nib.File, nib.traits.String) out_dict_path = nib.traits.Dict(nib.traits.String, nib.File(exists=True)) out_dict_str = nib.traits.DictStrStr() out_list = nib.traits.List(nib.traits.String) out_str = nib.traits.String() out_path = nib.File(exists=True) class StrPathConfuser(nib.SimpleInterface): input_spec = StrPathConfuserInputSpec output_spec = StrPathConfuserOutputSpec def _run_interface(self, runtime): out_path = os.path.abspath(os.path.basename(self.inputs.in_str) + "_path") open(out_path, "w").close() self._results["out_str"] = self.inputs.in_str self._results["out_path"] = out_path self._results["out_tuple"] = (out_path, self.inputs.in_str) self._results["out_dict_path"] = {self.inputs.in_str: out_path} self._results["out_dict_str"] = {self.inputs.in_str: self.inputs.in_str} self._results["out_list"] = [self.inputs.in_str] * 2 return runtime def test_modify_paths_bug(tmpdir): """ There was a bug in which, if the current working directory contained a file with the name of an output String, the string would get transformed into a path, and generally wreak havoc. This attempts to replicate that condition, using an object with strings and paths in various trait configurations, to ensure that the guards added resolve the issue. Please see https://github.com/nipy/nipype/issues/2944 for more details. """ tmpdir.chdir() spc = pe.Node(StrPathConfuser(in_str="2"), name="spc") open("2", "w").close() outputs = spc.run().outputs # Basic check that string was not manipulated out_str = outputs.out_str assert out_str == "2" # Check path exists and is absolute out_path = outputs.out_path assert os.path.isabs(out_path) # Assert data structures pass through correctly assert outputs.out_tuple == (out_path, out_str) assert outputs.out_dict_path == {out_str: out_path} assert outputs.out_dict_str == {out_str: out_str} assert outputs.out_list == [out_str] * 2 @pytest.mark.parametrize("use_relative", [True, False]) def test_save_load_resultfile(tmpdir, use_relative): """Test minimally the save/load functions for result files.""" from shutil import copytree, rmtree tmpdir.chdir() old_use_relative = config.getboolean("execution", "use_relative_paths") config.set("execution", "use_relative_paths", use_relative) spc = pe.Node(StrPathConfuser(in_str="2"), name="spc") spc.base_dir = tmpdir.mkdir("node").strpath result = spc.run() loaded_result = load_resultfile( tmpdir.join("node").join("spc").join("result_spc.pklz").strpath ) assert result.runtime.dictcopy() == loaded_result.runtime.dictcopy() assert result.inputs == loaded_result.inputs assert result.outputs.get() == loaded_result.outputs.get() # Test the mobility of the result file. copytree(tmpdir.join("node").strpath, tmpdir.join("node2").strpath) rmtree(tmpdir.join("node").strpath) if use_relative: loaded_result2 = load_resultfile( tmpdir.join("node2").join("spc").join("result_spc.pklz").strpath ) assert result.runtime.dictcopy() == loaded_result2.runtime.dictcopy() assert result.inputs == loaded_result2.inputs assert loaded_result2.outputs.get() != result.outputs.get() newpath = result.outputs.out_path.replace("/node/", "/node2/") assert loaded_result2.outputs.out_path == newpath assert loaded_result2.outputs.out_tuple[0] == newpath assert loaded_result2.outputs.out_dict_path["2"] == newpath else: with pytest.raises(nib.TraitError): load_resultfile( tmpdir.join("node2").join("spc").join("result_spc.pklz").strpath ) config.set("execution", "use_relative_paths", old_use_relative) def test_format_node(): node = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="node") serialized = format_node(node) workspace = {"Node": pe.Node} exec("\n".join(serialized), workspace) assert workspace["node"].interface._fields == node.interface._fields nipype-1.7.0/nipype/pipeline/engine/tests/test_workflows.py000066400000000000000000000221651413403311400242110ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine workflows module """ from glob import glob import os from shutil import rmtree from itertools import product import pytest import networkx as nx from .... import config from ....interfaces import utility as niu from ... import engine as pe from .test_base import EngineTestInterface from .test_utils import UtilsTestInterface def test_init(): with pytest.raises(TypeError): pe.Workflow() pipe = pe.Workflow(name="pipe") assert type(pipe._graph) == nx.DiGraph def test_connect(): pipe = pe.Workflow(name="pipe") mod2 = pe.Node(EngineTestInterface(), name="mod2") mod1 = pe.Node(EngineTestInterface(), name="mod1") pipe.connect([(mod1, mod2, [("output1", "input1")])]) assert mod1 in pipe._graph.nodes() assert mod2 in pipe._graph.nodes() assert pipe._graph.get_edge_data(mod1, mod2) == {"connect": [("output1", "input1")]} def test_add_nodes(): pipe = pe.Workflow(name="pipe") mod1 = pe.Node(EngineTestInterface(), name="mod1") mod2 = pe.Node(EngineTestInterface(), name="mod2") pipe.add_nodes([mod1, mod2]) assert mod1 in pipe._graph.nodes() assert mod2 in pipe._graph.nodes() def test_disconnect(): a = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="a") b = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="b") flow1 = pe.Workflow(name="test") flow1.connect(a, "a", b, "a") flow1.disconnect(a, "a", b, "a") assert list(flow1._graph.edges()) == [] def test_workflow_add(): n1 = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="n1") n2 = pe.Node(niu.IdentityInterface(fields=["c", "d"]), name="n2") n3 = pe.Node(niu.IdentityInterface(fields=["c", "d"]), name="n1") w1 = pe.Workflow(name="test") w1.connect(n1, "a", n2, "c") for node in [n1, n2, n3]: with pytest.raises(IOError): w1.add_nodes([node]) with pytest.raises(IOError): w1.connect([(w1, n2, [("n1.a", "d")])]) def test_doubleconnect(): a = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="a") b = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="b") flow1 = pe.Workflow(name="test") flow1.connect(a, "a", b, "a") with pytest.raises(Exception) as excinfo: flow1.connect(a, "b", b, "a") assert "Trying to connect" in str(excinfo.value) c = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="c") flow1 = pe.Workflow(name="test2") with pytest.raises(Exception) as excinfo: flow1.connect([(a, c, [("b", "b")]), (b, c, [("a", "b")])]) assert "Trying to connect" in str(excinfo.value) def test_nested_workflow_doubleconnect(): # double input with nested workflows a = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="a") b = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="b") c = pe.Node(niu.IdentityInterface(fields=["a", "b"]), name="c") flow1 = pe.Workflow(name="test1") flow2 = pe.Workflow(name="test2") flow3 = pe.Workflow(name="test3") flow1.add_nodes([b]) flow2.connect(a, "a", flow1, "b.a") with pytest.raises(Exception) as excinfo: flow3.connect(c, "a", flow2, "test1.b.a") assert "Some connections were not found" in str(excinfo.value) flow3.connect(c, "b", flow2, "test1.b.b") def test_duplicate_node_check(): wf = pe.Workflow(name="testidentity") original_list = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] selector1 = pe.Node(niu.Select(), name="selector1") selector1.inputs.index = original_list[:-1] selector1.inputs.inlist = original_list selector2 = pe.Node(niu.Select(), name="selector2") selector2.inputs.index = original_list[:-2] selector3 = pe.Node(niu.Select(), name="selector3") selector3.inputs.index = original_list[:-3] selector4 = pe.Node(niu.Select(), name="selector3") selector4.inputs.index = original_list[:-4] wf_connections = [ (selector1, selector2, [("out", "inlist")]), (selector2, selector3, [("out", "inlist")]), (selector3, selector4, [("out", "inlist")]), ] with pytest.raises(IOError) as excinfo: wf.connect(wf_connections) assert 'Duplicate node name "selector3" found.' == str(excinfo.value) def _test_function(arg1): import os file1 = os.path.join(os.getcwd(), "file1.txt") file2 = os.path.join(os.getcwd(), "file2.txt") file3 = os.path.join(os.getcwd(), "file3.txt") file4 = os.path.join(os.getcwd(), "subdir", "file4.txt") os.mkdir("subdir") for filename in [file1, file2, file3, file4]: with open(filename, "wt") as fp: fp.write("%d" % arg1) return file1, file2, os.path.join(os.getcwd(), "subdir") def _test_function2(in_file, arg): import os with open(in_file, "rt") as fp: in_arg = fp.read() file1 = os.path.join(os.getcwd(), "file1.txt") file2 = os.path.join(os.getcwd(), "file2.txt") file3 = os.path.join(os.getcwd(), "file3.txt") files = [file1, file2, file3] for filename in files: with open(filename, "wt") as fp: fp.write("%d" % arg + in_arg) return file1, file2, 1 def _test_function3(arg): return arg @pytest.mark.parametrize( "plugin, remove_unnecessary_outputs, keep_inputs", list(product(["Linear", "MultiProc"], [False, True], [True, False])), ) def test_outputs_removal_wf(tmpdir, plugin, remove_unnecessary_outputs, keep_inputs): config.set_default_config() config.set("execution", "remove_unnecessary_outputs", remove_unnecessary_outputs) config.set("execution", "keep_inputs", keep_inputs) n1 = pe.Node( niu.Function( output_names=["out_file1", "out_file2", "dir"], function=_test_function ), name="n1", base_dir=tmpdir.strpath, ) n1.inputs.arg1 = 1 n2 = pe.Node( niu.Function( output_names=["out_file1", "out_file2", "n"], function=_test_function2 ), name="n2", base_dir=tmpdir.strpath, ) n2.inputs.arg = 2 n3 = pe.Node( niu.Function(output_names=["n"], function=_test_function3), name="n3", base_dir=tmpdir.strpath, ) wf = pe.Workflow(name="node_rem_test" + plugin, base_dir=tmpdir.strpath) wf.connect(n1, "out_file1", n2, "in_file") wf.run(plugin=plugin) # Necessary outputs HAVE to exist assert os.path.exists(os.path.join(wf.base_dir, wf.name, n1.name, "file1.txt")) assert os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file1.txt")) assert os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file2.txt")) # Unnecessary outputs exist only iff remove_unnecessary_outputs is True assert ( os.path.exists(os.path.join(wf.base_dir, wf.name, n1.name, "file2.txt")) is not remove_unnecessary_outputs ) assert ( os.path.exists( os.path.join(wf.base_dir, wf.name, n1.name, "subdir", "file4.txt") ) is not remove_unnecessary_outputs ) assert ( os.path.exists(os.path.join(wf.base_dir, wf.name, n1.name, "file3.txt")) is not remove_unnecessary_outputs ) assert ( os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file3.txt")) is not remove_unnecessary_outputs ) n4 = pe.Node(UtilsTestInterface(), name="n4", base_dir=tmpdir.strpath) wf.connect(n2, "out_file1", n4, "in_file") def pick_first(l): return l[0] wf.connect(n4, ("output1", pick_first), n3, "arg") rmtree(os.path.join(wf.base_dir, wf.name)) wf.run(plugin=plugin) # Test necessary outputs assert os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file1.txt")) assert os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file1.txt")) # Test unnecessary outputs assert ( os.path.exists(os.path.join(wf.base_dir, wf.name, n2.name, "file2.txt")) is not remove_unnecessary_outputs ) # Test keep_inputs assert ( os.path.exists(os.path.join(wf.base_dir, wf.name, n4.name, "file1.txt")) is keep_inputs ) def _test_function4(): raise FileNotFoundError("Generic error") def test_config_setting(tmpdir): tmpdir.chdir() wf = pe.Workflow("config") wf.base_dir = os.getcwd() crashdir = os.path.join(os.getcwd(), "crashdir") os.mkdir(crashdir) wf.config = {"execution": {"crashdump_dir": crashdir}} n1 = pe.Node(niu.Function(function=_test_function4), name="errorfunc") wf.add_nodes([n1]) try: wf.run() except RuntimeError: pass fl = glob(os.path.join(crashdir, "crash*")) assert len(fl) == 1 # Now test node overwrite crashdir2 = os.path.join(os.getcwd(), "crashdir2") os.mkdir(crashdir2) crashdir3 = os.path.join(os.getcwd(), "crashdir3") os.mkdir(crashdir3) wf.config = {"execution": {"crashdump_dir": crashdir3}} n1.config = {"execution": {"crashdump_dir": crashdir2}} try: wf.run() except RuntimeError: pass fl = glob(os.path.join(crashdir2, "crash*")) assert len(fl) == 1 fl = glob(os.path.join(crashdir3, "crash*")) assert len(fl) == 0 nipype-1.7.0/nipype/pipeline/engine/utils.py000066400000000000000000001715641413403311400211230ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Utility routines for workflow graphs""" import os import sys import pickle from collections import defaultdict import re from copy import deepcopy from glob import glob from pathlib import Path from traceback import format_exception from hashlib import sha1 from functools import reduce import numpy as np from ... import logging, config, LooseVersion from ...utils.filemanip import ( indirectory, relpath, fname_presuffix, ensure_list, get_related_files, save_json, savepkl, loadpkl, write_rst_header, write_rst_dict, write_rst_list, ) from ...utils.misc import str2bool from ...utils.functions import create_function_from_source from ...interfaces.base.traits_extension import ( rebase_path_traits, resolve_path_traits, OutputMultiPath, isdefined, Undefined, ) from ...interfaces.base.support import Bunch, InterfaceResult from ...interfaces.base import CommandLine from ...interfaces.utility import IdentityInterface from ...utils.provenance import ProvStore, pm, nipype_ns, get_id from inspect import signature logger = logging.getLogger("nipype.workflow") def _parameterization_dir(param): """ Returns the directory name for the given parameterization string as follows: - If the parameterization is longer than 32 characters, then return the SHA-1 hex digest. - Otherwise, return the parameterization unchanged. """ if len(param) > 32: return sha1(param.encode()).hexdigest() return param def save_hashfile(hashfile, hashed_inputs): """Store a hashfile""" try: save_json(hashfile, hashed_inputs) except (IOError, TypeError): err_type = sys.exc_info()[0] if err_type is TypeError: # XXX - SG current workaround is to just # create the hashed file and not put anything # in it with open(hashfile, "wt") as fd: fd.writelines(str(hashed_inputs)) logger.debug("Unable to write a particular type to the json file") else: logger.critical("Unable to open the file in write mode: %s", hashfile) def nodelist_runner(nodes, updatehash=False, stop_first=False): """ A generator that iterates and over a list of ``nodes`` and executes them. """ for i, node in nodes: err = None result = None try: result = node.run(updatehash=updatehash) except Exception: if stop_first: raise result = node.result err = [] if result.runtime and hasattr(result.runtime, "traceback"): err = [result.runtime.traceback] err += format_exception(*sys.exc_info()) err = "\n".join(err) finally: yield i, result, err def write_node_report(node, result=None, is_mapnode=False): """Write a report file for a node.""" if not str2bool(node.config["execution"]["create_report"]): return cwd = node.output_dir() report_file = Path(cwd) / "_report" / "report.rst" report_file.parent.mkdir(exist_ok=True, parents=True) lines = [ write_rst_header("Node: %s" % get_print_name(node), level=0), write_rst_list(["Hierarchy : %s" % node.fullname, "Exec ID : %s" % node._id]), write_rst_header("Original Inputs", level=1), write_rst_dict(node.inputs.trait_get()), ] if result is None: logger.debug('[Node] Writing pre-exec report to "%s"', report_file) report_file.write_text("\n".join(lines)) return logger.debug('[Node] Writing post-exec report to "%s"', report_file) lines += [ write_rst_header("Execution Inputs", level=1), write_rst_dict(node.inputs.trait_get()), write_rst_header("Execution Outputs", level=1), ] outputs = result.outputs if outputs is None: lines += ["None"] report_file.write_text("\n".join(lines)) return if isinstance(outputs, Bunch): lines.append(write_rst_dict(outputs.dictcopy())) elif outputs: lines.append(write_rst_dict(outputs.trait_get())) else: lines += ["Outputs object was empty."] if is_mapnode: lines.append(write_rst_header("Subnode reports", level=1)) nitems = len(ensure_list(getattr(node.inputs, node.iterfield[0]))) subnode_report_files = [] for i in range(nitems): subnode_file = ( Path(cwd) / "mapflow" / ("_%s%d" % (node.name, i)) / "_report" / "report.rst" ) subnode_report_files.append("subnode %d : %s" % (i, subnode_file)) lines.append(write_rst_list(subnode_report_files)) report_file.write_text("\n".join(lines)) return lines.append(write_rst_header("Runtime info", level=1)) # Init rst dictionary of runtime stats rst_dict = { "hostname": result.runtime.hostname, "duration": result.runtime.duration, "working_dir": result.runtime.cwd, "prev_wd": getattr(result.runtime, "prevcwd", ""), } for prop in ("cmdline", "mem_peak_gb", "cpu_percent"): if hasattr(result.runtime, prop): rst_dict[prop] = getattr(result.runtime, prop) lines.append(write_rst_dict(rst_dict)) # Collect terminal output if hasattr(result.runtime, "merged"): lines += [ write_rst_header("Terminal output", level=2), write_rst_list(result.runtime.merged), ] if hasattr(result.runtime, "stdout"): lines += [ write_rst_header("Terminal - standard output", level=2), write_rst_list(result.runtime.stdout), ] if hasattr(result.runtime, "stderr"): lines += [ write_rst_header("Terminal - standard error", level=2), write_rst_list(result.runtime.stderr), ] # Store environment if hasattr(result.runtime, "environ"): lines += [ write_rst_header("Environment", level=2), write_rst_dict(result.runtime.environ), ] report_file.write_text("\n".join(lines)) def write_report(node, report_type=None, is_mapnode=False): """Write a report file for a node - DEPRECATED""" if report_type not in ("preexec", "postexec"): logger.warning('[Node] Unknown report type "%s".', report_type) return write_node_report( node, is_mapnode=is_mapnode, result=node.result if report_type == "postexec" else None, ) def save_resultfile(result, cwd, name, rebase=None): """Save a result pklz file to ``cwd``.""" if rebase is None: rebase = config.getboolean("execution", "use_relative_paths") cwd = os.path.abspath(cwd) resultsfile = os.path.join(cwd, "result_%s.pklz" % name) logger.debug("Saving results file: '%s'", resultsfile) if result.outputs is None: logger.warning("Storing result file without outputs") savepkl(resultsfile, result) return try: output_names = result.outputs.copyable_trait_names() except AttributeError: logger.debug("Storing non-traited results, skipping rebase of paths") savepkl(resultsfile, result) return if not rebase: savepkl(resultsfile, result) return backup_traits = {} try: with indirectory(cwd): # All the magic to fix #2944 resides here: for key in output_names: old = getattr(result.outputs, key) if isdefined(old): if result.outputs.trait(key).is_trait_type(OutputMultiPath): old = result.outputs.trait(key).handler.get_value( result.outputs, key ) backup_traits[key] = old val = rebase_path_traits(result.outputs.trait(key), old, cwd) setattr(result.outputs, key, val) savepkl(resultsfile, result) finally: # Restore resolved paths from the outputs dict no matter what for key, val in list(backup_traits.items()): setattr(result.outputs, key, val) def load_resultfile(results_file, resolve=True): """ Load InterfaceResult file from path. Parameters ---------- results_file : pathlike Path to an existing pickle (``result_.pklz``) created with ``save_resultfile``. Raises ``FileNotFoundError`` if ``results_file`` does not exist. resolve : bool Determines whether relative paths will be resolved to absolute (default is ``True``). Returns ------- result : InterfaceResult A Nipype object containing the runtime, inputs, outputs and other interface information such as a traceback in the case of errors. """ results_file = Path(results_file) if not results_file.exists(): raise FileNotFoundError(results_file) result = loadpkl(results_file) if resolve and getattr(result, "outputs", None): try: outputs = result.outputs.get() except TypeError: # This is a Bunch logger.debug("Outputs object of loaded result %s is a Bunch.", results_file) return result logger.debug("Resolving paths in outputs loaded from results file.") for trait_name, old in list(outputs.items()): if isdefined(old): if result.outputs.trait(trait_name).is_trait_type(OutputMultiPath): old = result.outputs.trait(trait_name).handler.get_value( result.outputs, trait_name ) value = resolve_path_traits( result.outputs.trait(trait_name), old, results_file.parent ) setattr(result.outputs, trait_name, value) return result def strip_temp(files, wd): """Remove temp from a list of file paths""" out = [] for f in files: if isinstance(f, list): out.append(strip_temp(f, wd)) else: out.append(f.replace(os.path.join(wd, "_tempinput"), wd)) return out def _write_inputs(node): lines = [] nodename = node.fullname.replace(".", "_") for key, _ in list(node.inputs.items()): val = getattr(node.inputs, key) if isdefined(val): if isinstance(val, (str, bytes)): try: func = create_function_from_source(val) except RuntimeError: lines.append("%s.inputs.%s = '%s'" % (nodename, key, val)) else: funcname = [ name for name in func.__globals__ if name != "__builtins__" ][0] lines.append(pickle.loads(val)) if funcname == nodename: lines[-1] = lines[-1].replace( " %s(" % funcname, " %s_1(" % funcname ) funcname = "%s_1" % funcname lines.append("from nipype.utils.functions import getsource") lines.append( "%s.inputs.%s = getsource(%s)" % (nodename, key, funcname) ) else: lines.append("%s.inputs.%s = %s" % (nodename, key, val)) return lines def format_node(node, format="python", include_config=False): """Format a node in a given output syntax.""" from .nodes import MapNode lines = [] name = node.fullname.replace(".", "_") if format == "python": klass = node.interface importline = "from %s import %s" % (klass.__module__, klass.__class__.__name__) comment = "# Node: %s" % node.fullname spec = signature(node.interface.__init__) filled_args = [] for param in spec.parameters.values(): val = getattr(node.interface, f"_{param.name}", None) if val is not None: filled_args.append(f"{param.name}={val!r}") args = ", ".join(filled_args) klass_name = klass.__class__.__name__ if isinstance(node, MapNode): nodedef = '%s = MapNode(%s(%s), iterfield=%s, name="%s")' % ( name, klass_name, args, node.iterfield, name, ) else: nodedef = '%s = Node(%s(%s), name="%s")' % (name, klass_name, args, name) lines = [importline, comment, nodedef] if include_config: lines = [ importline, "from collections import OrderedDict", comment, nodedef, ] lines.append("%s.config = %s" % (name, node.config)) if node.iterables is not None: lines.append("%s.iterables = %s" % (name, node.iterables)) lines.extend(_write_inputs(node)) return lines def modify_paths(object, relative=True, basedir=None): """Convert paths in data structure to either full paths or relative paths Supports combinations of lists, dicts, tuples, strs Parameters ---------- relative : boolean indicating whether paths should be set relative to the current directory basedir : default os.getcwd() what base directory to use as default """ if not basedir: basedir = os.getcwd() if isinstance(object, dict): out = {} for key, val in sorted(object.items()): if isdefined(val): out[key] = modify_paths(val, relative=relative, basedir=basedir) elif isinstance(object, (list, tuple)): out = [] for val in object: if isdefined(val): out.append(modify_paths(val, relative=relative, basedir=basedir)) if isinstance(object, tuple): out = tuple(out) else: if isdefined(object): if isinstance(object, (str, bytes)) and os.path.isfile(object): if relative: if config.getboolean("execution", "use_relative_paths"): out = relpath(object, start=basedir) else: out = object else: out = os.path.abspath(os.path.join(basedir, object)) if not os.path.exists(out): raise IOError("File %s not found" % out) else: out = object else: raise TypeError("Object {} is undefined".format(object)) return out def get_print_name(node, simple_form=True): """Get the name of the node For example, a node containing an instance of interfaces.fsl.BET would be called nodename.BET.fsl """ name = node.fullname if hasattr(node, "_interface"): pkglist = node.interface.__class__.__module__.split(".") interface = node.interface.__class__.__name__ destclass = "" if len(pkglist) > 2: destclass = ".%s" % pkglist[2] if simple_form: name = node.fullname + destclass else: name = ".".join([node.fullname, interface]) + destclass if simple_form: parts = name.split(".") if len(parts) > 2: return " (".join(parts[1:]) + ")" elif len(parts) == 2: return parts[1] return name def _create_dot_graph(graph, show_connectinfo=False, simple_form=True): """Create a graph that can be pickled. Ensures that edge info is pickleable. """ logger.debug("creating dot graph") import networkx as nx pklgraph = nx.DiGraph() for edge in graph.edges(): data = graph.get_edge_data(*edge) srcname = get_print_name(edge[0], simple_form=simple_form) destname = get_print_name(edge[1], simple_form=simple_form) if show_connectinfo: pklgraph.add_edge(srcname, destname, l=str(data["connect"])) else: pklgraph.add_edge(srcname, destname) return pklgraph def _write_detailed_dot(graph, dotfilename): r""" Create a dot file with connection info :: digraph structs { node [shape=record]; struct1 [label=" left| middle| right"]; struct2 [label=" one| two"]; struct3 [label="hello\nworld |{ b |{c| d|e}| f}| g | h"]; struct1:f1 -> struct2:f0; struct1:f0 -> struct2:f1; struct1:f2 -> struct3:here; } """ import networkx as nx text = ["digraph structs {", "node [shape=record];"] # write nodes edges = [] for n in nx.topological_sort(graph): nodename = n.itername inports = [] for u, v, d in graph.in_edges(nbunch=n, data=True): for cd in d["connect"]: if isinstance(cd[0], (str, bytes)): outport = cd[0] else: outport = cd[0][0] inport = cd[1] ipstrip = "in%s" % _replacefunk(inport) opstrip = "out%s" % _replacefunk(outport) edges.append( "%s:%s:e -> %s:%s:w;" % ( u.itername.replace(".", ""), opstrip, v.itername.replace(".", ""), ipstrip, ) ) if inport not in inports: inports.append(inport) inputstr = ( ["{IN"] + ["| %s" % (_replacefunk(ip), ip) for ip in sorted(inports)] + ["}"] ) outports = [] for u, v, d in graph.out_edges(nbunch=n, data=True): for cd in d["connect"]: if isinstance(cd[0], (str, bytes)): outport = cd[0] else: outport = cd[0][0] if outport not in outports: outports.append(outport) outputstr = ( ["{OUT"] + [ "| %s" % (_replacefunk(oport), oport) for oport in sorted(outports) ] + ["}"] ) srcpackage = "" if hasattr(n, "_interface"): pkglist = n.interface.__class__.__module__.split(".") if len(pkglist) > 2: srcpackage = pkglist[2] srchierarchy = ".".join(nodename.split(".")[1:-1]) nodenamestr = "{ %s | %s | %s }" % ( nodename.split(".")[-1], srcpackage, srchierarchy, ) text += [ '%s [label="%s|%s|%s"];' % ( nodename.replace(".", ""), "".join(inputstr), nodenamestr, "".join(outputstr), ) ] # write edges for edge in sorted(edges): text.append(edge) text.append("}") with open(dotfilename, "wt") as filep: filep.write("\n".join(text)) return text def _replacefunk(x): return x.replace("_", "").replace(".", "").replace("@", "").replace("-", "") # Graph manipulations for iterable expansion def _get_valid_pathstr(pathstr): """Remove disallowed characters from path Removes: [][ (){}?:<>#!|"';] Replaces: ',' -> '.' """ if not isinstance(pathstr, (str, bytes)): pathstr = str(pathstr) pathstr = pathstr.replace(os.sep, "..") pathstr = re.sub(r"""[][ (){}?:<>#!|"';]""", "", pathstr) pathstr = pathstr.replace(",", ".") return pathstr def expand_iterables(iterables, synchronize=False): if synchronize: return synchronize_iterables(iterables) return list(walk(list(iterables.items()))) def count_iterables(iterables, synchronize=False): """Return the number of iterable expansion nodes. If synchronize is True, then the count is the maximum number of iterables value lists. Otherwise, the count is the product of the iterables value list sizes. """ op = max if synchronize else lambda x, y: x * y return reduce(op, [len(func()) for _, func in list(iterables.items())]) def walk(children, level=0, path=None, usename=True): """Generate all the full paths in a tree, as a dict. Examples -------- >>> from nipype.pipeline.engine.utils import walk >>> iterables = [('a', lambda: [1, 2]), ('b', lambda: [3, 4])] >>> [val['a'] for val in walk(iterables)] [1, 1, 2, 2] >>> [val['b'] for val in walk(iterables)] [3, 4, 3, 4] """ # Entry point if level == 0: path = {} # Exit condition if not children: yield path.copy() return # Tree recursion head, tail = children[0], children[1:] name, func = head for child in func(): # We can use the arg name or the tree level as a key if usename: path[name] = child else: path[level] = child # Recurse into the next level for child_paths in walk(tail, level + 1, path, usename): yield child_paths def synchronize_iterables(iterables): """Synchronize the given iterables in item-wise order. Return: the {field: value} dictionary list Examples -------- >>> from nipype.pipeline.engine.utils import synchronize_iterables >>> iterables = dict(a=lambda: [1, 2], b=lambda: [3, 4]) >>> synced = synchronize_iterables(iterables) >>> synced == [{'a': 1, 'b': 3}, {'a': 2, 'b': 4}] True >>> iterables = dict(a=lambda: [1, 2], b=lambda: [3], c=lambda: [4, 5, 6]) >>> synced = synchronize_iterables(iterables) >>> synced == [{'a': 1, 'b': 3, 'c': 4}, {'a': 2, 'c': 5}, {'c': 6}] True """ out_list = [] iterable_items = [ (field, iter(fvals())) for field, fvals in sorted(iterables.items()) ] while True: cur_dict = {} for field, iter_values in iterable_items: try: cur_dict[field] = next(iter_values) except StopIteration: pass if cur_dict: out_list.append(cur_dict) else: break return out_list def evaluate_connect_function(function_source, args, first_arg): func = create_function_from_source(function_source) try: output_value = func(first_arg, *list(args)) except NameError as e: if e.args[0].startswith("global name") and e.args[0].endswith("is not defined"): e.args = ( e.args[0], ( "Due to engine constraints all imports have to be done " "inside each function definition" ), ) raise e return output_value def get_levels(G): import networkx as nx levels = {} for n in nx.topological_sort(G): levels[n] = 0 for pred in G.predecessors(n): levels[n] = max(levels[n], levels[pred] + 1) return levels def _merge_graphs( supergraph, nodes, subgraph, nodeid, iterables, prefix, synchronize=False ): """Merges two graphs that share a subset of nodes. If the subgraph needs to be replicated for multiple iterables, the merge happens with every copy of the subgraph. Assumes that edges between nodes of supergraph and subgraph contain data. Parameters ---------- supergraph : networkx graph Parent graph from which subgraph was selected nodes : networkx nodes Nodes of the parent graph from which the subgraph was initially constructed. subgraph : networkx graph A subgraph that contains as a subset nodes from the supergraph. These nodes connect the subgraph to the supergraph nodeid : string Identifier of a node for which parameterization has been sought iterables : dict of functions see `pipeline.NodeWrapper` for iterable requirements Returns ------- Returns a merged graph containing copies of the subgraph with appropriate edge connections to the supergraph. """ # Retrieve edge information connecting nodes of the subgraph to other # nodes of the supergraph. supernodes = supergraph.nodes() ids = [n._hierarchy + n._id for n in supernodes] if len(set(ids)) != len(ids): # This should trap the problem of miswiring when multiple iterables are # used at the same level. The use of the template below for naming # updates to nodes is the general solution. raise Exception( ( "Execution graph does not have a unique set of node " "names. Please rerun the workflow" ) ) edgeinfo = {} for n in list(subgraph.nodes()): nidx = ids.index(n._hierarchy + n._id) for edge in supergraph.in_edges(list(supernodes)[nidx]): # make sure edge is not part of subgraph if edge[0] not in subgraph.nodes(): if n._hierarchy + n._id not in list(edgeinfo.keys()): edgeinfo[n._hierarchy + n._id] = [] edgeinfo[n._hierarchy + n._id].append( (edge[0], supergraph.get_edge_data(*edge)) ) supergraph.remove_nodes_from(nodes) # Add copies of the subgraph depending on the number of iterables iterable_params = expand_iterables(iterables, synchronize) # If there are no iterable subgraphs, then return if not iterable_params: return supergraph # Make an iterable subgraph node id template count = len(iterable_params) template = ".%s%%0%dd" % (prefix, np.ceil(np.log10(count))) # Copy the iterable subgraphs for i, params in enumerate(iterable_params): Gc = deepcopy(subgraph) ids = [n._hierarchy + n._id for n in Gc.nodes()] nodeidx = ids.index(nodeid) rootnode = list(Gc.nodes())[nodeidx] paramstr = "" for key, val in sorted(params.items()): paramstr = "{}_{}_{}".format( paramstr, _get_valid_pathstr(key), _get_valid_pathstr(val) ) rootnode.set_input(key, val) logger.debug("Parameterization: paramstr=%s", paramstr) levels = get_levels(Gc) for n in Gc.nodes(): # update parameterization of the node to reflect the location of # the output directory. For example, if the iterables along a # path of the directed graph consisted of the variables 'a' and # 'b', then every node in the path including and after the node # with iterable 'b' will be placed in a directory # _a_aval/_b_bval/. path_length = levels[n] # enter as negative numbers so that earlier iterables with longer # path lengths get precedence in a sort paramlist = [(-path_length, paramstr)] if n.parameterization: n.parameterization = paramlist + n.parameterization else: n.parameterization = paramlist supergraph.add_nodes_from(Gc.nodes()) supergraph.add_edges_from(Gc.edges(data=True)) for node in Gc.nodes(): if node._hierarchy + node._id in list(edgeinfo.keys()): for info in edgeinfo[node._hierarchy + node._id]: supergraph.add_edges_from([(info[0], node, info[1])]) node._id += template % i return supergraph def _connect_nodes(graph, srcnode, destnode, connection_info): """Add a connection between two nodes""" data = graph.get_edge_data(srcnode, destnode, default=None) if not data: data = {"connect": connection_info} graph.add_edges_from([(srcnode, destnode, data)]) else: data["connect"].extend(connection_info) def _remove_nonjoin_identity_nodes(graph, keep_iterables=False): """Remove non-join identity nodes from the given graph Iterable nodes are retained if and only if the keep_iterables flag is set to True. """ # if keep_iterables is False, then include the iterable # and join nodes in the nodes to delete for node in _identity_nodes(graph, not keep_iterables): if not hasattr(node, "joinsource"): _remove_identity_node(graph, node) return graph def _identity_nodes(graph, include_iterables): """Return the IdentityInterface nodes in the graph The nodes are in topological sort order. The iterable nodes are included if and only if the include_iterables flag is set to True. """ import networkx as nx return [ node for node in nx.topological_sort(graph) if isinstance(node.interface, IdentityInterface) and (include_iterables or getattr(node, "iterables") is None) ] def _remove_identity_node(graph, node): """Remove identity nodes from an execution graph""" portinputs, portoutputs = _node_ports(graph, node) for field, connections in list(portoutputs.items()): if portinputs: _propagate_internal_output(graph, node, field, connections, portinputs) else: _propagate_root_output(graph, node, field, connections) graph.remove_nodes_from([node]) logger.debug("Removed the identity node %s from the graph.", node) def _node_ports(graph, node): """Return the given node's input and output ports The return value is the (inputs, outputs) dictionaries. The inputs is a {destination field: (source node, source field)} dictionary. The outputs is a {source field: destination items} dictionary, where each destination item is a (destination node, destination field, source field) tuple. """ portinputs = {} portoutputs = {} for u, _, d in graph.in_edges(node, data=True): for src, dest in d["connect"]: portinputs[dest] = (u, src) for _, v, d in graph.out_edges(node, data=True): for src, dest in d["connect"]: if isinstance(src, tuple): srcport = src[0] else: srcport = src if srcport not in portoutputs: portoutputs[srcport] = [] portoutputs[srcport].append((v, dest, src)) return (portinputs, portoutputs) def _propagate_root_output(graph, node, field, connections): """Propagates the given graph root node output port field connections to the out-edge destination nodes.""" for destnode, inport, src in connections: value = getattr(node.inputs, field) if isinstance(src, tuple): value = evaluate_connect_function(src[1], src[2], value) destnode.set_input(inport, value) def _propagate_internal_output(graph, node, field, connections, portinputs): """Propagates the given graph internal node output port field connections to the out-edge source node and in-edge destination nodes.""" for destnode, inport, src in connections: if field in portinputs: srcnode, srcport = portinputs[field] if isinstance(srcport, tuple) and isinstance(src, tuple): src_func = srcport[1].split("\\n")[0] dst_func = src[1].split("\\n")[0] raise ValueError( "Does not support two inline functions " "in series ('{}' and '{}'), found when " "connecting {} to {}. Please use a Function " "node.".format(src_func, dst_func, srcnode, destnode) ) connect = graph.get_edge_data(srcnode, destnode, default={"connect": []}) if isinstance(src, tuple): connect["connect"].append(((srcport, src[1], src[2]), inport)) else: connect = {"connect": [(srcport, inport)]} old_connect = graph.get_edge_data( srcnode, destnode, default={"connect": []} ) old_connect["connect"] += connect["connect"] graph.add_edges_from([(srcnode, destnode, old_connect)]) else: value = getattr(node.inputs, field) if isinstance(src, tuple): value = evaluate_connect_function(src[1], src[2], value) destnode.set_input(inport, value) def generate_expanded_graph(graph_in): """Generates an expanded graph based on node parameterization Parameterization is controlled using the `iterables` field of the pipeline elements. Thus if there are two nodes with iterables a=[1,2] and b=[3,4] this procedure will generate a graph with sub-graphs parameterized as (a=1,b=3), (a=1,b=4), (a=2,b=3) and (a=2,b=4). """ import networkx as nx try: dfs_preorder = nx.dfs_preorder except AttributeError: dfs_preorder = nx.dfs_preorder_nodes logger.debug("PE: expanding iterables") graph_in = _remove_nonjoin_identity_nodes(graph_in, keep_iterables=True) # standardize the iterables as {(field, function)} dictionaries for node in graph_in.nodes(): if node.iterables: _standardize_iterables(node) allprefixes = list("abcdefghijklmnopqrstuvwxyz") # the iterable nodes inodes = _iterable_nodes(graph_in) logger.debug("Detected iterable nodes %s", inodes) # while there is an iterable node, expand the iterable node's # subgraphs while inodes: inode = inodes[0] logger.debug("Expanding the iterable node %s...", inode) # the join successor nodes of the current iterable node jnodes = [ node for node in graph_in.nodes() if hasattr(node, "joinsource") and inode.name == node.joinsource and nx.has_path(graph_in, inode, node) ] # excise the join in-edges. save the excised edges in a # {jnode: {source name: (destination name, edge data)}} # dictionary jedge_dict = {} for jnode in jnodes: in_edges = jedge_dict[jnode] = {} edges2remove = [] for src, dest, data in graph_in.in_edges(jnode, True): in_edges[src.itername] = data edges2remove.append((src, dest)) for src, dest in edges2remove: graph_in.remove_edge(src, dest) logger.debug("Excised the %s -> %s join node in-edge.", src, dest) if inode.itersource: # the itersource is a (node name, fields) tuple src_name, src_fields = inode.itersource # convert a single field to a list if isinstance(src_fields, (str, bytes)): src_fields = [src_fields] # find the unique iterable source node in the graph try: iter_src = next( ( node for node in graph_in.nodes() if node.name == src_name and nx.has_path(graph_in, node, inode) ) ) except StopIteration: raise ValueError( "The node %s itersource %s was not found" " among the iterable predecessor nodes" % (inode, src_name) ) logger.debug("The node %s has iterable source node %s", inode, iter_src) # look up the iterables for this particular itersource descendant # using the iterable source ancestor values as a key iterables = {} # the source node iterables values src_values = [getattr(iter_src.inputs, field) for field in src_fields] # if there is one source field, then the key is the the source value, # otherwise the key is the tuple of source values if len(src_values) == 1: key = src_values[0] else: key = tuple(src_values) # The itersource iterables is a {field: lookup} dictionary, where the # lookup is a {source key: iteration list} dictionary. Look up the # current iterable value using the predecessor itersource input values. iter_dict = dict( [ (field, lookup[key]) for field, lookup in inode.iterables if key in lookup ] ) # convert the iterables to the standard {field: function} format def make_field_func(*pair): return pair[0], lambda: pair[1] iterables = dict( [make_field_func(*pair) for pair in list(iter_dict.items())] ) else: iterables = inode.iterables.copy() inode.iterables = None logger.debug("node: %s iterables: %s", inode, iterables) # collect the subnodes to expand subnodes = [s for s in dfs_preorder(graph_in, inode)] prior_prefix = [re.findall(r"\.(.)I", s._id) for s in subnodes if s._id] prior_prefix = sorted([l for item in prior_prefix for l in item]) if not prior_prefix: iterable_prefix = "a" else: if prior_prefix[-1] == "z": raise ValueError("Too many iterables in the workflow") iterable_prefix = allprefixes[allprefixes.index(prior_prefix[-1]) + 1] logger.debug(("subnodes:", subnodes)) # append a suffix to the iterable node id inode._id += ".%sI" % iterable_prefix # merge the iterated subgraphs # dj: the behaviour of .copy changes in version 2 if LooseVersion(nx.__version__) < LooseVersion("2"): subgraph = graph_in.subgraph(subnodes) else: subgraph = graph_in.subgraph(subnodes).copy() graph_in = _merge_graphs( graph_in, subnodes, subgraph, inode._hierarchy + inode._id, iterables, iterable_prefix, inode.synchronize, ) # reconnect the join nodes for jnode in jnodes: # the {node id: edge data} dictionary for edges connecting # to the join node in the unexpanded graph old_edge_dict = jedge_dict[jnode] # the edge source node replicates expansions = defaultdict(list) for node in graph_in: for src_id in list(old_edge_dict.keys()): # Drop the original JoinNodes; only concerned with # generated Nodes itername = node.itername if hasattr(node, "joinfield") and itername == src_id: continue # Patterns: # - src_id : Non-iterable node # - src_id.[a-z]\d+ : # IdentityInterface w/ iterables or nested JoinNode # - src_id.[a-z]I.[a-z]\d+ : # Non-IdentityInterface w/ iterables # - src_idJ\d+ : JoinNode(IdentityInterface) if itername.startswith(src_id): suffix = itername[len(src_id) :] if re.fullmatch(r"((\.[a-z](I\.[a-z])?|J)\d+)?", suffix): expansions[src_id].append(node) for in_id, in_nodes in list(expansions.items()): logger.debug( "The join node %s input %s was expanded" " to %d nodes.", jnode, in_id, len(in_nodes), ) # preserve the node iteration order by sorting on the node id for in_nodes in list(expansions.values()): in_nodes.sort(key=lambda node: node._id) # the number of join source replicates. iter_cnt = count_iterables(iterables, inode.synchronize) # make new join node fields to connect to each replicated # join in-edge source node. slot_dicts = [jnode._add_join_item_fields() for _ in range(iter_cnt)] # for each join in-edge, connect every expanded source node # which matches on the in-edge source name to the destination # join node. Qualify each edge connect join field name by # appending the next join slot index, e.g. the connect # from two expanded nodes from field 'out_file' to join # field 'in' are qualified as ('out_file', 'in1') and # ('out_file', 'in2'), resp. This preserves connection port # integrity. for old_id, in_nodes in list(expansions.items()): # reconnect each replication of the current join in-edge # source for in_idx, in_node in enumerate(in_nodes): olddata = old_edge_dict[old_id] newdata = deepcopy(olddata) # the (source, destination) field tuples connects = newdata["connect"] # the join fields connected to the source join_fields = [ field for _, field in connects if field in jnode.joinfield ] # the {field: slot fields} maps assigned to the input # node, e.g. {'image': 'imageJ3', 'mask': 'maskJ3'} # for the third join source expansion replicate of a # join node with join fields image and mask slots = slot_dicts[in_idx] for con_idx, connect in enumerate(connects): src_field, dest_field = connect # qualify a join destination field name if dest_field in slots: slot_field = slots[dest_field] connects[con_idx] = (src_field, slot_field) logger.debug( "Qualified the %s -> %s join field %s as %s.", in_node, jnode, dest_field, slot_field, ) graph_in.add_edge(in_node, jnode, **newdata) logger.debug( "Connected the join node %s subgraph to the" " expanded join point %s", jnode, in_node, ) # nx.write_dot(graph_in, '%s_post.dot' % node) # the remaining iterable nodes inodes = _iterable_nodes(graph_in) for node in graph_in.nodes(): if node.parameterization: node.parameterization = [ param for _, param in sorted(node.parameterization) ] logger.debug("PE: expanding iterables ... done") return _remove_nonjoin_identity_nodes(graph_in) def _iterable_nodes(graph_in): """Returns the iterable nodes in the given graph and their join dependencies. The nodes are ordered as follows: - nodes without an itersource precede nodes with an itersource - nodes without an itersource are sorted in reverse topological order - nodes with an itersource are sorted in topological order This order implies the following: - every iterable node without an itersource is expanded before any node with an itersource - every iterable node without an itersource is expanded before any of it's predecessor iterable nodes without an itersource - every node with an itersource is expanded before any of it's successor nodes with an itersource Return the iterable nodes list """ import networkx as nx nodes = nx.topological_sort(graph_in) inodes = [node for node in nodes if node.iterables is not None] inodes_no_src = [node for node in inodes if not node.itersource] inodes_src = [node for node in inodes if node.itersource] inodes_no_src.reverse() return inodes_no_src + inodes_src def _standardize_iterables(node): """Converts the given iterables to a {field: function} dictionary, if necessary, where the function returns a list.""" if not node.iterables: return iterables = node.iterables # The candidate iterable fields fields = set(node.inputs.copyable_trait_names()) # A synchronize iterables node without an itersource can be in # [fields, value tuples] format rather than # [(field, value list), (field, value list), ...] if node.synchronize: if len(iterables) == 2: first, last = iterables if all( (isinstance(item, (str, bytes)) and item in fields for item in first) ): iterables = _transpose_iterables(first, last) # Convert a tuple to a list if isinstance(iterables, tuple): iterables = [iterables] # Validate the standard [(field, values)] format _validate_iterables(node, iterables, fields) # Convert a list to a dictionary if isinstance(iterables, list): # Convert a values list to a function. This is a legacy # Nipype requirement with unknown rationale. if not node.itersource: def make_field_func(*pair): return pair[0], lambda: pair[1] iter_items = [make_field_func(*field_value1) for field_value1 in iterables] iterables = dict(iter_items) node.iterables = iterables def _validate_iterables(node, iterables, fields): """ Raise TypeError if an iterables member is not iterable. Raise ValueError if an iterables member is not a (field, values) pair. Raise ValueError if an iterable field is not in the inputs. """ # The iterables can be a {field: value list} dictionary. if isinstance(iterables, dict): iterables = list(iterables.items()) elif not isinstance(iterables, tuple) and not isinstance(iterables, list): raise ValueError( "The %s iterables type is not a list or a dictionary:" " %s" % (node.name, iterables.__class__) ) for item in iterables: try: if len(item) != 2: raise ValueError( "The %s iterables is not a [(field, values)]" " list" % node.name ) except TypeError as e: raise TypeError( "A %s iterables member is not iterable: %s" % (node.name, e) ) field, _ = item if field not in fields: raise ValueError( "The %s iterables field is unrecognized: %s" % (node.name, field) ) def _transpose_iterables(fields, values): """ Converts the given fields and tuple values into a standardized iterables value. If the input values is a synchronize iterables dictionary, then the result is a (field, {key: values}) list. Otherwise, the result is a list of (field: value list) pairs. """ if isinstance(values, dict): transposed = dict([(field, defaultdict(list)) for field in fields]) for key, tuples in list(values.items()): for kvals in tuples: for idx, val in enumerate(kvals): if val is not None: transposed[fields[idx]][key].append(val) return list(transposed.items()) return list( zip( fields, [ [v for v in list(transpose) if v is not None] for transpose in zip(*values) ], ) ) def export_graph( graph_in, base_dir=None, show=False, use_execgraph=False, show_connectinfo=False, dotfilename="graph.dot", format="png", simple_form=True, ): """Displays the graph layout of the pipeline This function requires that pygraphviz and matplotlib are available on the system. Parameters ---------- show : boolean Indicate whether to generate pygraphviz output fromn networkx. default [False] use_execgraph : boolean Indicates whether to use the specification graph or the execution graph. default [False] show_connectioninfo : boolean Indicates whether to show the edge data on the graph. This makes the graph rather cluttered. default [False] """ import networkx as nx graph = deepcopy(graph_in) if use_execgraph: graph = generate_expanded_graph(graph) logger.debug("using execgraph") else: logger.debug("using input graph") if base_dir is None: base_dir = os.getcwd() os.makedirs(base_dir, exist_ok=True) out_dot = fname_presuffix( dotfilename, suffix="_detailed.dot", use_ext=False, newpath=base_dir ) _write_detailed_dot(graph, out_dot) # Convert .dot if format != 'dot' outfname, res = _run_dot(out_dot, format_ext=format) if res is not None and res.runtime.returncode: logger.warning("dot2png: %s", res.runtime.stderr) pklgraph = _create_dot_graph(graph, show_connectinfo, simple_form) simple_dot = fname_presuffix( dotfilename, suffix=".dot", use_ext=False, newpath=base_dir ) nx.drawing.nx_pydot.write_dot(pklgraph, simple_dot) # Convert .dot if format != 'dot' simplefname, res = _run_dot(simple_dot, format_ext=format) if res is not None and res.runtime.returncode: logger.warning("dot2png: %s", res.runtime.stderr) if show: pos = nx.graphviz_layout(pklgraph, prog="dot") nx.draw(pklgraph, pos) if show_connectinfo: nx.draw_networkx_edge_labels(pklgraph, pos) return simplefname if simple_form else outfname def format_dot(dotfilename, format="png"): """Dump a directed graph (Linux only; install via `brew` on OSX)""" try: formatted_dot, _ = _run_dot(dotfilename, format_ext=format) except IOError as ioe: if "could not be found" in str(ioe): raise IOError("Cannot draw directed graph; executable 'dot' is unavailable") else: raise ioe return formatted_dot def _run_dot(dotfilename, format_ext): if format_ext == "dot": return dotfilename, None dot_base = os.path.splitext(dotfilename)[0] formatted_dot = "{}.{}".format(dot_base, format_ext) cmd = 'dot -T{} -o"{}" "{}"'.format(format_ext, formatted_dot, dotfilename) res = CommandLine(cmd, terminal_output="allatonce", resource_monitor=False).run() return formatted_dot, res def get_all_files(infile): files = [infile] if infile.endswith(".img"): files.append(infile[:-4] + ".hdr") files.append(infile[:-4] + ".mat") if infile.endswith(".img.gz"): files.append(infile[:-7] + ".hdr.gz") return files def walk_outputs(object): """Extract every file and directory from a python structure""" out = [] if isinstance(object, dict): for _, val in sorted(object.items()): if isdefined(val): out.extend(walk_outputs(val)) elif isinstance(object, (list, tuple)): for val in object: if isdefined(val): out.extend(walk_outputs(val)) else: if isdefined(object) and isinstance(object, (str, bytes)): if os.path.islink(object) or os.path.isfile(object): out = [(filename, "f") for filename in get_all_files(object)] elif os.path.isdir(object): out = [(object, "d")] return out def walk_files(cwd): for path, _, files in os.walk(cwd): for f in files: yield os.path.join(path, f) def clean_working_directory( outputs, cwd, inputs, needed_outputs, config, files2keep=None, dirs2keep=None ): """Removes all files not needed for further analysis from the directory""" if not outputs: return outputs_to_keep = list(outputs.trait_get().keys()) if needed_outputs and str2bool(config["execution"]["remove_unnecessary_outputs"]): outputs_to_keep = needed_outputs # build a list of needed files output_files = [] outputdict = outputs.trait_get() for output in outputs_to_keep: output_files.extend(walk_outputs(outputdict[output])) needed_files = [path for path, type in output_files if type == "f"] if str2bool(config["execution"]["keep_inputs"]): input_files = [] inputdict = inputs.trait_get() input_files.extend(walk_outputs(inputdict)) needed_files += [path for path, type in input_files if type == "f"] for extra in [ "_0x*.json", "provenance.*", "pyscript*.m", "pyjobs*.mat", "command.txt", "result*.pklz", "_inputs.pklz", "_node.pklz", ".proc-*", ]: needed_files.extend(glob(os.path.join(cwd, extra))) if files2keep: needed_files.extend(ensure_list(files2keep)) needed_dirs = [path for path, type in output_files if type == "d"] if dirs2keep: needed_dirs.extend(ensure_list(dirs2keep)) for extra in ["_nipype", "_report"]: needed_dirs.extend(glob(os.path.join(cwd, extra))) temp = [] for filename in needed_files: temp.extend(get_related_files(filename)) needed_files = temp logger.debug("Needed files: %s", ";".join(needed_files)) logger.debug("Needed dirs: %s", ";".join(needed_dirs)) files2remove = [] if str2bool(config["execution"]["remove_unnecessary_outputs"]): for f in walk_files(cwd): if f not in needed_files: if not needed_dirs: files2remove.append(f) elif not any([f.startswith(dname) for dname in needed_dirs]): files2remove.append(f) else: if not str2bool(config["execution"]["keep_inputs"]): input_files = [] inputdict = inputs.trait_get() input_files.extend(walk_outputs(inputdict)) input_files = [path for path, type in input_files if type == "f"] for f in walk_files(cwd): if f in input_files and f not in needed_files: files2remove.append(f) logger.debug("Removing files: %s", ";".join(files2remove)) for f in files2remove: os.remove(f) for key in outputs.copyable_trait_names(): if key not in outputs_to_keep: setattr(outputs, key, Undefined) return outputs def merge_dict(d1, d2, merge=lambda x, y: y): """ Merges two dictionaries, non-destructively, combining values on duplicate keys as defined by the optional merge function. The default behavior replaces the values in d1 with corresponding values in d2. (There is no other generally applicable merge strategy, but often you'll have homogeneous types in your dicts, so specifying a merge technique can be valuable.) Examples: >>> d1 = {'a': 1, 'c': 3, 'b': 2} >>> d2 = merge_dict(d1, d1) >>> len(d2) 3 >>> [d2[k] for k in ['a', 'b', 'c']] [1, 2, 3] >>> d3 = merge_dict(d1, d1, lambda x,y: x+y) >>> len(d3) 3 >>> [d3[k] for k in ['a', 'b', 'c']] [2, 4, 6] """ if not isinstance(d1, dict): return merge(d1, d2) result = dict(d1) if d2 is None: return result for k, v in list(d2.items()): if k in result: result[k] = merge_dict(result[k], v, merge=merge) else: result[k] = v return result def merge_bundles(g1, g2): for rec in g2.get_records(): g1._add_record(rec) return g1 def write_workflow_prov(graph, filename=None, format="all"): """Write W3C PROV Model JSON file""" if not filename: filename = os.path.join(os.getcwd(), "workflow_provenance") ps = ProvStore() processes = [] nodes = graph.nodes() for node in nodes: result = node.result classname = node.interface.__class__.__name__ _, hashval, _, _ = node.hash_exists() attrs = { pm.PROV["type"]: nipype_ns[classname], pm.PROV["label"]: "_".join((classname, node.name)), nipype_ns["hashval"]: hashval, } process = ps.g.activity(get_id(), None, None, attrs) if isinstance(result.runtime, list): process.add_attributes({pm.PROV["type"]: nipype_ns["MapNode"]}) # add info about sub processes for idx, runtime in enumerate(result.runtime): subresult = InterfaceResult(result.interface[idx], runtime, outputs={}) if result.inputs: if idx < len(result.inputs): subresult.inputs = result.inputs[idx] if result.outputs: for key, _ in list(result.outputs.items()): values = getattr(result.outputs, key) if isdefined(values) and idx < len(values): subresult.outputs[key] = values[idx] sub_doc = ProvStore().add_results(subresult) sub_bundle = pm.ProvBundle(sub_doc.get_records(), identifier=get_id()) ps.g.add_bundle(sub_bundle) bundle_entity = ps.g.entity( sub_bundle.identifier, other_attributes={"prov:type": pm.PROV_BUNDLE}, ) ps.g.wasGeneratedBy(bundle_entity, process) else: process.add_attributes({pm.PROV["type"]: nipype_ns["Node"]}) if result.provenance: prov_doc = result.provenance else: prov_doc = ProvStore().add_results(result) result_bundle = pm.ProvBundle(prov_doc.get_records(), identifier=get_id()) ps.g.add_bundle(result_bundle) bundle_entity = ps.g.entity( result_bundle.identifier, other_attributes={"prov:type": pm.PROV_BUNDLE} ) ps.g.wasGeneratedBy(bundle_entity, process) processes.append(process) # add dependencies (edges) # Process->Process for idx, edgeinfo in enumerate(graph.in_edges()): ps.g.wasStartedBy( processes[list(nodes).index(edgeinfo[1])], starter=processes[list(nodes).index(edgeinfo[0])], ) # write provenance ps.write_provenance(filename, format=format) return ps.g def write_workflow_resources(graph, filename=None, append=None): """ Generate a JSON file with profiling traces that can be loaded in a pandas DataFrame or processed with JavaScript like D3.js """ import simplejson as json # Overwrite filename if nipype config is set filename = config.get("monitoring", "summary_file", filename) # If filename still does not make sense, store in $PWD if not filename: filename = os.path.join(os.getcwd(), "resource_monitor.json") if append is None: append = str2bool(config.get("monitoring", "summary_append", "true")) big_dict = { "time": [], "name": [], "interface": [], "rss_GiB": [], "vms_GiB": [], "cpus": [], "mapnode": [], "params": [], } # If file exists, just append new profile information # If we append different runs, then we will see different # "bursts" of timestamps corresponding to those executions. if append and os.path.isfile(filename): with open(filename, "r") as rsf: big_dict = json.load(rsf) for _, node in enumerate(graph.nodes()): nodename = node.fullname classname = node.interface.__class__.__name__ params = "" if node.parameterization: params = "_".join(["{}".format(p) for p in node.parameterization]) try: rt_list = node.result.runtime except Exception: logger.warning( "Could not access runtime info for node %s" " (%s interface)", nodename, classname, ) continue if not isinstance(rt_list, list): rt_list = [rt_list] for subidx, runtime in enumerate(rt_list): try: nsamples = len(runtime.prof_dict["time"]) except AttributeError: logger.warning( 'Could not retrieve profiling information for node "%s" ' "(mapflow %d/%d).", nodename, subidx + 1, len(rt_list), ) continue for key in ["time", "cpus", "rss_GiB", "vms_GiB"]: big_dict[key] += runtime.prof_dict[key] big_dict["interface"] += [classname] * nsamples big_dict["name"] += [nodename] * nsamples big_dict["mapnode"] += [subidx] * nsamples big_dict["params"] += [params] * nsamples with open(filename, "w") as rsf: json.dump(big_dict, rsf, ensure_ascii=False) return filename def topological_sort(graph, depth_first=False): """Returns a depth first sorted order if depth_first is True""" import networkx as nx nodesort = list(nx.topological_sort(graph)) if not depth_first: return nodesort, None logger.debug("Performing depth first search") nodes = [] groups = [] group = 0 G = nx.Graph() G.add_nodes_from(graph.nodes()) G.add_edges_from(graph.edges()) components = nx.connected_components(G) for desc in components: group += 1 indices = [] for node in desc: indices.append(nodesort.index(node)) nodes.extend( np.array(nodesort)[np.array(indices)[np.argsort(indices)]].tolist() ) for node in desc: nodesort.remove(node) groups.extend([group] * len(desc)) return nodes, groups nipype-1.7.0/nipype/pipeline/engine/workflows.py000066400000000000000000001275231413403311400220140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Defines functionality for pipelined execution of interfaces The `Workflow` class provides core functionality for batch processing. """ import os import os.path as op import sys from datetime import datetime from copy import deepcopy import pickle import shutil import numpy as np from ... import config, logging from ...utils.misc import str2bool from ...utils.functions import getsource, create_function_from_source from ...interfaces.base import traits, TraitedSpec, TraitDictObject, TraitListObject from ...utils.filemanip import save_json from .utils import ( generate_expanded_graph, export_graph, write_workflow_prov, write_workflow_resources, format_dot, topological_sort, get_print_name, merge_dict, format_node, ) from .base import EngineBase from .nodes import MapNode logger = logging.getLogger("nipype.workflow") class Workflow(EngineBase): """Controls the setup and execution of a pipeline of processes.""" def __init__(self, name, base_dir=None): """Create a workflow object. Parameters ---------- name : alphanumeric string unique identifier for the workflow base_dir : string, optional path to workflow storage """ import networkx as nx super(Workflow, self).__init__(name, base_dir) self._graph = nx.DiGraph() self._nodes_cache = set() self._nested_workflows_cache = set() # PUBLIC API def clone(self, name): """Clone a workflow .. note:: Will reset attributes used for executing workflow. See _init_runtime_fields. Parameters ---------- name: alphanumeric name unique name for the workflow """ clone = super(Workflow, self).clone(name) clone._reset_hierarchy() return clone # Graph creation functions def connect(self, *args, **kwargs): """Connect nodes in the pipeline. This routine also checks if inputs and outputs are actually provided by the nodes that are being connected. Creates edges in the directed graph using the nodes and edges specified in the `connection_list`. Uses the NetworkX method DiGraph.add_edges_from. Parameters ---------- args : list or a set of four positional arguments Four positional arguments of the form:: connect(source, sourceoutput, dest, destinput) source : nodewrapper node sourceoutput : string (must be in source.outputs) dest : nodewrapper node destinput : string (must be in dest.inputs) A list of 3-tuples of the following form:: [(source, target, [('sourceoutput/attribute', 'targetinput'), ...]), ...] Or:: [(source, target, [(('sourceoutput1', func, arg2, ...), 'targetinput'), ...]), ...] sourceoutput1 will always be the first argument to func and func will be evaluated and the results sent ot targetinput currently func needs to define all its needed imports within the function as we use the inspect module to get at the source code and execute it remotely """ if len(args) == 1: connection_list = args[0] elif len(args) == 4: connection_list = [(args[0], args[2], [(args[1], args[3])])] else: raise TypeError( "connect() takes either 4 arguments, or 1 list of" " connection tuples (%d args given)" % len(args) ) disconnect = False if kwargs: disconnect = kwargs.get("disconnect", False) if disconnect: self.disconnect(connection_list) return newnodes = set() for srcnode, destnode, _ in connection_list: if self in [srcnode, destnode]: msg = ( "Workflow connect cannot contain itself as node:" " src[%s] dest[%s] workflow[%s]" ) % (srcnode, destnode, self.name) raise IOError(msg) if (srcnode not in newnodes) and not self._has_node(srcnode): newnodes.add(srcnode) if (destnode not in newnodes) and not self._has_node(destnode): newnodes.add(destnode) if newnodes: self._check_nodes(newnodes) for node in newnodes: if node._hierarchy is None: node._hierarchy = self.name not_found = [] connected_ports = {} for srcnode, destnode, connects in connection_list: if destnode not in connected_ports: connected_ports[destnode] = set() # check to see which ports of destnode are already # connected. if not disconnect and (destnode in self._graph.nodes()): for edge in self._graph.in_edges(destnode): data = self._graph.get_edge_data(*edge) connected_ports[destnode].update( destname for _, destname in data["connect"] ) for source, dest in connects: # Currently datasource/sink/grabber.io modules # determine their inputs/outputs depending on # connection settings. Skip these modules in the check if dest in connected_ports[destnode]: raise Exception( """\ Trying to connect %s:%s to %s:%s but input '%s' of node '%s' is already connected. """ % (srcnode, source, destnode, dest, dest, destnode) ) if not ( hasattr(destnode, "_interface") and ( ".io" in str(destnode._interface.__class__) or any( [ ".io" in str(val) for val in destnode._interface.__class__.__bases__ ] ) ) ): if not destnode._check_inputs(dest): not_found.append(["in", destnode.name, dest]) if not ( hasattr(srcnode, "_interface") and ( ".io" in str(srcnode._interface.__class__) or any( [ ".io" in str(val) for val in srcnode._interface.__class__.__bases__ ] ) ) ): if isinstance(source, tuple): # handles the case that source is specified # with a function sourcename = source[0] elif isinstance(source, (str, bytes)): sourcename = source else: raise Exception( ( "Unknown source specification in " "connection from output of %s" ) % srcnode.name ) if sourcename and not srcnode._check_outputs(sourcename): not_found.append(["out", srcnode.name, sourcename]) connected_ports[destnode].add(dest) infostr = [] for info in not_found: infostr += [ "Module %s has no %sput called %s\n" % (info[1], info[0], info[2]) ] if not_found: raise Exception("\n".join(["Some connections were not found"] + infostr)) # turn functions into strings for srcnode, destnode, connects in connection_list: for idx, (src, dest) in enumerate(connects): if isinstance(src, tuple) and not isinstance(src[1], (str, bytes)): function_source = getsource(src[1]) connects[idx] = ((src[0], function_source, src[2:]), dest) # add connections for srcnode, destnode, connects in connection_list: edge_data = self._graph.get_edge_data(srcnode, destnode, None) if edge_data: logger.debug( "(%s, %s): Edge data exists: %s", srcnode, destnode, str(edge_data) ) for data in connects: if data not in edge_data["connect"]: edge_data["connect"].append(data) if disconnect: logger.debug("Removing connection: %s", str(data)) edge_data["connect"].remove(data) if edge_data["connect"]: self._graph.add_edges_from([(srcnode, destnode, edge_data)]) else: # pass logger.debug("Removing connection: %s->%s", srcnode, destnode) self._graph.remove_edges_from([(srcnode, destnode)]) elif not disconnect: logger.debug("(%s, %s): No edge data", srcnode, destnode) self._graph.add_edges_from([(srcnode, destnode, {"connect": connects})]) edge_data = self._graph.get_edge_data(srcnode, destnode) logger.debug( "(%s, %s): new edge data: %s", srcnode, destnode, str(edge_data) ) if newnodes: self._update_node_cache() def disconnect(self, *args): """Disconnect nodes See the docstring for connect for format. """ if len(args) == 1: connection_list = args[0] elif len(args) == 4: connection_list = [(args[0], args[2], [(args[1], args[3])])] else: raise TypeError( "disconnect() takes either 4 arguments, or 1 list " "of connection tuples (%d args given)" % len(args) ) for srcnode, dstnode, conn in connection_list: logger.debug("disconnect(): %s->%s %s", srcnode, dstnode, str(conn)) if self in [srcnode, dstnode]: raise IOError( "Workflow connect cannot contain itself as node: src[%s] " "dest[%s] workflow[%s]" ) % (srcnode, dstnode, self.name) # If node is not in the graph, not connected if not self._has_node(srcnode) or not self._has_node(dstnode): continue edge_data = self._graph.get_edge_data(srcnode, dstnode, {"connect": []}) ed_conns = [(c[0], c[1]) for c in edge_data["connect"]] remove = [] for edge in conn: if edge in ed_conns: # idx = ed_conns.index(edge) remove.append((edge[0], edge[1])) logger.debug("disconnect(): remove list %s", str(remove)) for el in remove: edge_data["connect"].remove(el) logger.debug("disconnect(): removed connection %s", str(el)) if not edge_data["connect"]: self._graph.remove_edge(srcnode, dstnode) else: self._graph.add_edges_from([(srcnode, dstnode, edge_data)]) def add_nodes(self, nodes): """Add nodes to a workflow Parameters ---------- nodes : list A list of EngineBase-based objects """ newnodes = [] all_nodes = self._get_all_nodes() for node in nodes: if node in all_nodes: raise IOError("Node %s already exists in the workflow" % node) if isinstance(node, Workflow): for subnode in node._get_all_nodes(): if subnode in all_nodes: raise IOError( ("Subnode %s of node %s already exists " "in the workflow") % (subnode, node) ) newnodes.append(node) if not newnodes: logger.debug("no new nodes to add") return for node in newnodes: if not issubclass(node.__class__, EngineBase): raise Exception("Node %s must be a subclass of EngineBase", node) self._check_nodes(newnodes) for node in newnodes: if node._hierarchy is None: node._hierarchy = self.name self._graph.add_nodes_from(newnodes) self._update_node_cache() def remove_nodes(self, nodes): """Remove nodes from a workflow Parameters ---------- nodes : list A list of EngineBase-based objects """ self._graph.remove_nodes_from(nodes) self._update_node_cache() # Input-Output access @property def inputs(self): return self._get_inputs() @property def outputs(self): return self._get_outputs() def get_node(self, name): """Return an internal node by name""" nodenames = name.split(".") nodename = nodenames[0] outnode = [ node for node in self._graph.nodes() if str(node).endswith("." + nodename) ] if outnode: outnode = outnode[0] if nodenames[1:] and issubclass(outnode.__class__, Workflow): outnode = outnode.get_node(".".join(nodenames[1:])) else: outnode = None return outnode def list_node_names(self): """List names of all nodes in a workflow""" import networkx as nx outlist = [] for node in nx.topological_sort(self._graph): if isinstance(node, Workflow): outlist.extend( [ ".".join((node.name, nodename)) for nodename in node.list_node_names() ] ) else: outlist.append(node.name) return sorted(outlist) def write_graph( self, dotfilename="graph.dot", graph2use="hierarchical", format="png", simple_form=True, ): """Generates a graphviz dot file and a png file Parameters ---------- graph2use: 'orig', 'hierarchical' (default), 'flat', 'exec', 'colored' orig - creates a top level graph without expanding internal workflow nodes; flat - expands workflow nodes recursively; hierarchical - expands workflow nodes recursively with a notion on hierarchy; colored - expands workflow nodes recursively with a notion on hierarchy in color; exec - expands workflows to depict iterables format: 'png', 'svg' simple_form: boolean (default: True) Determines if the node name used in the graph should be of the form 'nodename (package)' when True or 'nodename.Class.package' when False. """ graphtypes = ["orig", "flat", "hierarchical", "exec", "colored"] if graph2use not in graphtypes: raise ValueError( "Unknown graph2use keyword. Must be one of: " + str(graphtypes) ) base_dir, dotfilename = op.split(dotfilename) if base_dir == "": if self.base_dir: base_dir = self.base_dir if self.name: base_dir = op.join(base_dir, self.name) else: base_dir = os.getcwd() os.makedirs(base_dir, exist_ok=True) if graph2use in ["hierarchical", "colored"]: if self.name[:1].isdigit(): # these graphs break if int raise ValueError( "{} graph failed, workflow name cannot begin " "with a number".format(graph2use) ) dotfilename = op.join(base_dir, dotfilename) self.write_hierarchical_dotfile( dotfilename=dotfilename, colored=graph2use == "colored", simple_form=simple_form, ) outfname = format_dot(dotfilename, format=format) else: graph = self._graph if graph2use in ["flat", "exec"]: graph = self._create_flat_graph() if graph2use == "exec": graph = generate_expanded_graph(deepcopy(graph)) outfname = export_graph( graph, base_dir, dotfilename=dotfilename, format=format, simple_form=simple_form, ) logger.info( "Generated workflow graph: %s (graph2use=%s, simple_form=%s)." % (outfname, graph2use, simple_form) ) return outfname def write_hierarchical_dotfile( self, dotfilename=None, colored=False, simple_form=True ): dotlist = ["digraph %s{" % self.name] dotlist.append( self._get_dot(prefix=" ", colored=colored, simple_form=simple_form) ) dotlist.append("}") dotstr = "\n".join(dotlist) if dotfilename: fp = open(dotfilename, "wt") fp.writelines(dotstr) fp.close() else: logger.info(dotstr) def export( self, filename=None, prefix="output", format="python", include_config=False ): """Export object into a different format Parameters ---------- filename: string file to save the code to; overrides prefix prefix: string prefix to use for output file format: string one of "python" include_config: boolean whether to include node and workflow config values """ import networkx as nx formats = ["python"] if format not in formats: raise ValueError("format must be one of: %s" % "|".join(formats)) flatgraph = self._create_flat_graph() nodes = nx.topological_sort(flatgraph) all_lines = None lines = ["# Workflow"] importlines = ["from nipype.pipeline.engine import Workflow, " "Node, MapNode"] functions = {} if format == "python": connect_template = '%s.connect(%%s, %%s, %%s, "%%s")' % self.name connect_template2 = '%s.connect(%%s, "%%s", %%s, "%%s")' % self.name wfdef = '%s = Workflow("%s")' % (self.name, self.name) lines.append(wfdef) if include_config: lines.append("%s.config = %s" % (self.name, self.config)) for idx, node in enumerate(nodes): nodename = node.fullname.replace(".", "_") # write nodes nodelines = format_node( node, format="python", include_config=include_config ) for line in nodelines: if line.startswith("from"): if line not in importlines: importlines.append(line) else: lines.append(line) # write connections for u, _, d in flatgraph.in_edges(nbunch=node, data=True): for cd in d["connect"]: if isinstance(cd[0], tuple): args = list(cd[0]) if args[1] in functions: funcname = functions[args[1]] else: func = create_function_from_source(args[1]) funcname = [ name for name in func.__globals__ if name != "__builtins__" ][0] functions[args[1]] = funcname args[1] = funcname args = tuple([arg for arg in args if arg]) line_args = ( u.fullname.replace(".", "_"), args, nodename, cd[1], ) line = connect_template % line_args line = line.replace("'%s'" % funcname, funcname) lines.append(line) else: line_args = ( u.fullname.replace(".", "_"), cd[0], nodename, cd[1], ) lines.append(connect_template2 % line_args) functionlines = ["# Functions"] for function in functions: functionlines.append(pickle.loads(function).rstrip()) all_lines = importlines + functionlines + lines if not filename: filename = "%s%s.py" % (prefix, self.name) with open(filename, "wt") as fp: fp.writelines("\n".join(all_lines)) return all_lines def run(self, plugin=None, plugin_args=None, updatehash=False): """Execute the workflow Parameters ---------- plugin: plugin name or object Plugin to use for execution. You can create your own plugins for execution. plugin_args : dictionary containing arguments to be sent to plugin constructor. see individual plugin doc strings for details. """ if plugin is None: plugin = config.get("execution", "plugin") if not isinstance(plugin, (str, bytes)): runner = plugin plugin = runner.__class__.__name__[: -len("Plugin")] plugin_args = runner.plugin_args else: name = ".".join(__name__.split(".")[:-2] + ["plugins"]) try: __import__(name) except ImportError: msg = "Could not import plugin module: %s" % name logger.error(msg) raise ImportError(msg) else: plugin_mod = getattr(sys.modules[name], "%sPlugin" % plugin) runner = plugin_mod(plugin_args=plugin_args) flatgraph = self._create_flat_graph() self.config = merge_dict(deepcopy(config._sections), self.config) logger.info("Workflow %s settings: %s", self.name, str(sorted(self.config))) self._set_needed_outputs(flatgraph) execgraph = generate_expanded_graph(deepcopy(flatgraph)) for index, node in enumerate(execgraph.nodes()): node.config = merge_dict(deepcopy(self.config), node.config) node.base_dir = self.base_dir node.index = index if isinstance(node, MapNode): node.use_plugin = (plugin, plugin_args) self._configure_exec_nodes(execgraph) if str2bool(self.config["execution"]["create_report"]): self._write_report_info(self.base_dir, self.name, execgraph) runner.run(execgraph, updatehash=updatehash, config=self.config) datestr = datetime.utcnow().strftime("%Y%m%dT%H%M%S") if str2bool(self.config["execution"]["write_provenance"]): prov_base = op.join(self.base_dir, "workflow_provenance_%s" % datestr) logger.info("Provenance file prefix: %s" % prov_base) write_workflow_prov(execgraph, prov_base, format="all") if config.resource_monitor: base_dir = self.base_dir or os.getcwd() write_workflow_resources( execgraph, filename=op.join(base_dir, self.name, "resource_monitor.json"), ) return execgraph # PRIVATE API AND FUNCTIONS def _write_report_info(self, workingdir, name, graph): if workingdir is None: workingdir = os.getcwd() report_dir = op.join(workingdir, name) os.makedirs(report_dir, exist_ok=True) shutil.copyfile( op.join(op.dirname(__file__), "report_template.html"), op.join(report_dir, "index.html"), ) shutil.copyfile( op.join(op.dirname(__file__), "..", "..", "external", "d3.js"), op.join(report_dir, "d3.js"), ) nodes, groups = topological_sort(graph, depth_first=True) graph_file = op.join(report_dir, "graph1.json") json_dict = {"nodes": [], "links": [], "groups": [], "maxN": 0} for i, node in enumerate(nodes): report_file = "%s/_report/report.rst" % node.output_dir().replace( report_dir, "" ) result_file = "%s/result_%s.pklz" % ( node.output_dir().replace(report_dir, ""), node.name, ) json_dict["nodes"].append( dict( name="%d_%s" % (i, node.name), report=report_file, result=result_file, group=groups[i], ) ) maxN = 0 for gid in np.unique(groups): procs = [i for i, val in enumerate(groups) if val == gid] N = len(procs) if N > maxN: maxN = N json_dict["groups"].append( dict(procs=procs, total=N, name="Group_%05d" % gid) ) json_dict["maxN"] = maxN for u, v in graph.in_edges(): json_dict["links"].append( dict(source=nodes.index(u), target=nodes.index(v), value=1) ) save_json(graph_file, json_dict) graph_file = op.join(report_dir, "graph.json") # Avoid RuntimeWarning: divide by zero encountered in log10 num_nodes = len(nodes) if num_nodes > 0: index_name = np.ceil(np.log10(num_nodes)).astype(int) else: index_name = 0 template = "%%0%dd_" % index_name def getname(u, i): name_parts = u.fullname.split(".") # return '.'.join(name_parts[:-1] + [template % i + name_parts[-1]]) return template % i + name_parts[-1] json_dict = [] for i, node in enumerate(nodes): imports = [] for u, v in graph.in_edges(nbunch=node): imports.append(getname(u, nodes.index(u))) json_dict.append( dict(name=getname(node, i), size=1, group=groups[i], imports=imports) ) save_json(graph_file, json_dict) def _set_needed_outputs(self, graph): """Initialize node with list of which outputs are needed.""" rm_outputs = self.config["execution"]["remove_unnecessary_outputs"] if not str2bool(rm_outputs): return for node in graph.nodes(): node.needed_outputs = [] for edge in graph.out_edges(node): data = graph.get_edge_data(*edge) sourceinfo = [ v1[0] if isinstance(v1, tuple) else v1 for v1, v2 in data["connect"] ] node.needed_outputs += [ v for v in sourceinfo if v not in node.needed_outputs ] if node.needed_outputs: node.needed_outputs = sorted(node.needed_outputs) def _configure_exec_nodes(self, graph): """Ensure that each node knows where to get inputs from""" for node in graph.nodes(): node.input_source = {} for edge in graph.in_edges(node): data = graph.get_edge_data(*edge) for sourceinfo, field in data["connect"]: node.input_source[field] = ( op.join(edge[0].output_dir(), "result_%s.pklz" % edge[0].name), sourceinfo, ) def _check_nodes(self, nodes): """Checks if any of the nodes are already in the graph""" node_names = [node.name for node in self._graph.nodes()] node_lineage = [node._hierarchy for node in self._graph.nodes()] for node in nodes: if node.name in node_names: idx = node_names.index(node.name) try: this_node_lineage = node_lineage[idx] except IndexError: raise IOError('Duplicate node name "%s" found.' % node.name) else: if this_node_lineage in [node._hierarchy, self.name]: raise IOError('Duplicate node name "%s" found.' % node.name) else: node_names.append(node.name) def _has_attr(self, parameter, subtype="in"): """Checks if a parameter is available as an input or output""" hierarchy = parameter.split(".") # Connecting to a workflow needs at least two values, # the name of the child node and the name of the input/output if len(hierarchy) < 2: return False attrname = hierarchy.pop() nodename = hierarchy.pop() def _check_is_already_connected(workflow, node, attrname): for _, _, d in workflow._graph.in_edges(nbunch=node, data=True): for cd in d["connect"]: if attrname == cd[1]: return False return True targetworkflow = self while hierarchy: workflowname = hierarchy.pop(0) workflow = None for node in targetworkflow._graph.nodes(): if node.name == workflowname: if isinstance(node, Workflow): workflow = node break if workflow is None: return False # Verify input does not already have an incoming connection # in the hierarchy of workflows if subtype == "in": hierattrname = ".".join(hierarchy + [nodename, attrname]) if not _check_is_already_connected( targetworkflow, workflow, hierattrname ): return False targetworkflow = workflow targetnode = None for node in targetworkflow._graph.nodes(): if node.name == nodename: if isinstance(node, Workflow): return False else: targetnode = node break if targetnode is None: return False if subtype == "in": if not hasattr(targetnode.inputs, attrname): return False else: if not hasattr(targetnode.outputs, attrname): return False # Verify input does not already have an incoming connection # in the target workflow if subtype == "in": if not _check_is_already_connected(targetworkflow, targetnode, attrname): return False return True def _check_outputs(self, parameter): return self._has_attr(parameter, subtype="out") def _check_inputs(self, parameter): return self._has_attr(parameter, subtype="in") def _get_inputs(self): """Returns the inputs of a workflow This function does not return any input ports that are already connected """ inputdict = TraitedSpec() for node in self._graph.nodes(): inputdict.add_trait(node.name, traits.Instance(TraitedSpec)) if isinstance(node, Workflow): setattr(inputdict, node.name, node.inputs) else: taken_inputs = [] for _, _, d in self._graph.in_edges(nbunch=node, data=True): for cd in d["connect"]: taken_inputs.append(cd[1]) unconnectedinputs = TraitedSpec() for key, trait in list(node.inputs.items()): if key not in taken_inputs: unconnectedinputs.add_trait(key, traits.Trait(trait, node=node)) value = getattr(node.inputs, key) setattr(unconnectedinputs, key, value) setattr(inputdict, node.name, unconnectedinputs) getattr(inputdict, node.name).on_trait_change(self._set_input) return inputdict def _get_outputs(self): """Returns all possible output ports that are not already connected""" outputdict = TraitedSpec() for node in self._graph.nodes(): outputdict.add_trait(node.name, traits.Instance(TraitedSpec)) if isinstance(node, Workflow): setattr(outputdict, node.name, node.outputs) elif node.outputs: outputs = TraitedSpec() for key, _ in list(node.outputs.items()): outputs.add_trait(key, traits.Any(node=node)) setattr(outputs, key, None) setattr(outputdict, node.name, outputs) return outputdict def _set_input(self, objekt, name, newvalue): """Trait callback function to update a node input""" objekt.traits()[name].node.set_input(name, newvalue) def _set_node_input(self, node, param, source, sourceinfo): """Set inputs of a node given the edge connection""" if isinstance(sourceinfo, (str, bytes)): val = source.get_output(sourceinfo) elif isinstance(sourceinfo, tuple): if callable(sourceinfo[1]): val = sourceinfo[1](source.get_output(sourceinfo[0]), *sourceinfo[2:]) newval = val if isinstance(val, TraitDictObject): newval = dict(val) if isinstance(val, TraitListObject): newval = val[:] logger.debug("setting node input: %s->%s", param, str(newval)) node.set_input(param, deepcopy(newval)) def _get_all_nodes(self): allnodes = self._nodes_cache - self._nested_workflows_cache for node in self._nested_workflows_cache: allnodes |= node._get_all_nodes() return allnodes def _update_node_cache(self): nodes = set(self._graph) added_nodes = nodes.difference(self._nodes_cache) removed_nodes = self._nodes_cache.difference(nodes) self._nodes_cache = nodes self._nested_workflows_cache.difference_update(removed_nodes) for node in added_nodes: if isinstance(node, Workflow): self._nested_workflows_cache.add(node) def _has_node(self, wanted_node): return wanted_node in self._nodes_cache or any( wf._has_node(wanted_node) for wf in self._nested_workflows_cache ) def _create_flat_graph(self): """Make a simple DAG where no node is a workflow.""" logger.debug("Creating flat graph for workflow: %s", self.name) workflowcopy = deepcopy(self) workflowcopy._generate_flatgraph() return workflowcopy._graph def _reset_hierarchy(self): """Reset the hierarchy on a graph""" for node in self._graph.nodes(): if isinstance(node, Workflow): node._reset_hierarchy() for innernode in node._graph.nodes(): innernode._hierarchy = ".".join((self.name, innernode._hierarchy)) else: node._hierarchy = self.name def _generate_flatgraph(self): """Generate a graph containing only Nodes or MapNodes""" import networkx as nx logger.debug("expanding workflow: %s", self) nodes2remove = [] if not nx.is_directed_acyclic_graph(self._graph): raise Exception( ("Workflow: %s is not a directed acyclic graph " "(DAG)") % self.name ) nodes = list(self._graph.nodes) for node in nodes: logger.debug("processing node: %s", node) if isinstance(node, Workflow): nodes2remove.append(node) # use in_edges instead of in_edges_iter to allow # disconnections to take place properly. otherwise, the # edge dict is modified. # dj: added list() for networkx ver.2 for u, _, d in list(self._graph.in_edges(nbunch=node, data=True)): logger.debug("in: connections-> %s", str(d["connect"])) for cd in deepcopy(d["connect"]): logger.debug("in: %s", str(cd)) dstnode = node.get_node(cd[1].rsplit(".", 1)[0]) srcnode = u srcout = cd[0] dstin = cd[1].split(".")[-1] logger.debug( "in edges: %s %s %s %s", srcnode, srcout, dstnode, dstin ) self.disconnect(u, cd[0], node, cd[1]) self.connect(srcnode, srcout, dstnode, dstin) # do not use out_edges_iter for reasons stated in in_edges # dj: for ver 2 use list(out_edges) for _, v, d in list(self._graph.out_edges(nbunch=node, data=True)): logger.debug("out: connections-> %s", str(d["connect"])) for cd in deepcopy(d["connect"]): logger.debug("out: %s", str(cd)) dstnode = v if isinstance(cd[0], tuple): parameter = cd[0][0] else: parameter = cd[0] srcnode = node.get_node(parameter.rsplit(".", 1)[0]) if isinstance(cd[0], tuple): srcout = list(cd[0]) srcout[0] = parameter.split(".")[-1] srcout = tuple(srcout) else: srcout = parameter.split(".")[-1] dstin = cd[1] logger.debug( "out edges: %s %s %s %s", srcnode, srcout, dstnode, dstin ) self.disconnect(node, cd[0], v, cd[1]) self.connect(srcnode, srcout, dstnode, dstin) # expand the workflow node # logger.debug('expanding workflow: %s', node) node._generate_flatgraph() for innernode in node._graph.nodes(): innernode._hierarchy = ".".join((self.name, innernode._hierarchy)) self._graph.add_nodes_from(node._graph.nodes()) self._graph.add_edges_from(node._graph.edges(data=True)) if nodes2remove: self._graph.remove_nodes_from(nodes2remove) logger.debug("finished expanding workflow: %s", self) def _get_dot( self, prefix=None, hierarchy=None, colored=False, simple_form=True, level=0 ): """Create a dot file with connection info""" import networkx as nx if prefix is None: prefix = " " if hierarchy is None: hierarchy = [] colorset = [ "#FFFFC8", # Y "#0000FF", "#B4B4FF", "#E6E6FF", # B "#FF0000", "#FFB4B4", "#FFE6E6", # R "#00A300", "#B4FFB4", "#E6FFE6", # G "#0000FF", "#B4B4FF", ] # loop B if level > len(colorset) - 2: level = 3 # Loop back to blue dotlist = ['%slabel="%s";' % (prefix, self.name)] for node in nx.topological_sort(self._graph): fullname = ".".join(hierarchy + [node.fullname]) nodename = fullname.replace(".", "_") if not isinstance(node, Workflow): node_class_name = get_print_name(node, simple_form=simple_form) if not simple_form: node_class_name = ".".join(node_class_name.split(".")[1:]) if hasattr(node, "iterables") and node.iterables: dotlist.append( ( '%s[label="%s", shape=box3d,' "style=filled, color=black, colorscheme" "=greys7 fillcolor=2];" ) % (nodename, node_class_name) ) else: if colored: dotlist.append( ('%s[label="%s", style=filled,' ' fillcolor="%s"];') % (nodename, node_class_name, colorset[level]) ) else: dotlist.append( ('%s[label="%s"];') % (nodename, node_class_name) ) for node in nx.topological_sort(self._graph): if isinstance(node, Workflow): fullname = ".".join(hierarchy + [node.fullname]) nodename = fullname.replace(".", "_") dotlist.append("subgraph cluster_%s {" % nodename) if colored: dotlist.append( prefix + prefix + 'edge [color="%s"];' % (colorset[level + 1]) ) dotlist.append(prefix + prefix + "style=filled;") dotlist.append( prefix + prefix + 'fillcolor="%s";' % (colorset[level + 2]) ) dotlist.append( node._get_dot( prefix=prefix + prefix, hierarchy=hierarchy + [self.name], colored=colored, simple_form=simple_form, level=level + 3, ) ) dotlist.append("}") else: for subnode in self._graph.successors(node): if node._hierarchy != subnode._hierarchy: continue if not isinstance(subnode, Workflow): nodefullname = ".".join(hierarchy + [node.fullname]) subnodefullname = ".".join(hierarchy + [subnode.fullname]) nodename = nodefullname.replace(".", "_") subnodename = subnodefullname.replace(".", "_") for _ in self._graph.get_edge_data(node, subnode)["connect"]: dotlist.append("%s -> %s;" % (nodename, subnodename)) logger.debug("connection: %s", dotlist[-1]) # add between workflow connections for u, v, d in self._graph.edges(data=True): uname = ".".join(hierarchy + [u.fullname]) vname = ".".join(hierarchy + [v.fullname]) for src, dest in d["connect"]: uname1 = uname vname1 = vname if isinstance(src, tuple): srcname = src[0] else: srcname = src if "." in srcname: uname1 += "." + ".".join(srcname.split(".")[:-1]) if "." in dest and "@" not in dest: if not isinstance(v, Workflow): if "datasink" not in str(v._interface.__class__).lower(): vname1 += "." + ".".join(dest.split(".")[:-1]) else: vname1 += "." + ".".join(dest.split(".")[:-1]) if uname1.split(".")[:-1] != vname1.split(".")[:-1]: dotlist.append( "%s -> %s;" % (uname1.replace(".", "_"), vname1.replace(".", "_")) ) logger.debug("cross connection: %s", dotlist[-1]) return ("\n" + prefix).join(dotlist) nipype-1.7.0/nipype/pipeline/plugins/000077500000000000000000000000001413403311400176075ustar00rootroot00000000000000nipype-1.7.0/nipype/pipeline/plugins/__init__.py000066400000000000000000000013261413403311400217220ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from .debug import DebugPlugin from .linear import LinearPlugin from .pbs import PBSPlugin from .oar import OARPlugin from .sge import SGEPlugin from .condor import CondorPlugin from .dagman import CondorDAGManPlugin from .multiproc import MultiProcPlugin from .legacymultiproc import LegacyMultiProcPlugin from .ipython import IPythonPlugin from .somaflow import SomaFlowPlugin from .pbsgraph import PBSGraphPlugin from .sgegraph import SGEGraphPlugin from .lsf import LSFPlugin from .slurm import SLURMPlugin from .slurmgraph import SLURMGraphPlugin from . import semaphore_singleton nipype-1.7.0/nipype/pipeline/plugins/base.py000066400000000000000000000620131413403311400210750ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Common graph operations for execution.""" import sys from copy import deepcopy from glob import glob import os import shutil from time import sleep, time from traceback import format_exception import numpy as np from ... import logging from ...utils.misc import str2bool from ..engine.utils import topological_sort, load_resultfile from ..engine import MapNode from .tools import report_crash, report_nodes_not_run, create_pyscript logger = logging.getLogger("nipype.workflow") class PluginBase(object): """Base class for plugins.""" def __init__(self, plugin_args=None): if plugin_args is None: plugin_args = {} self.plugin_args = plugin_args self._config = None self._status_callback = plugin_args.get("status_callback") def run(self, graph, config, updatehash=False): """ Instruct the plugin to execute the workflow graph. The core plugin member that should be implemented by all plugins. Parameters ---------- graph : a networkx, flattened :abbr:`DAG (Directed Acyclic Graph)` to be executed config : :obj:`~nipype.config` a nipype.config object updatehash : :obj:`bool` whether cached nodes with stale hash should be just updated. """ raise NotImplementedError class DistributedPluginBase(PluginBase): """ Execute workflow with a distribution engine Combinations of ``proc_done`` and ``proc_pending``: +------------+---------------+--------------------------------+ | proc_done | proc_pending | outcome | +============+===============+================================+ | True | False | Process is finished | +------------+---------------+--------------------------------+ | True | True | Process is currently being run | +------------+---------------+--------------------------------+ | False | False | Process is queued | +------------+---------------+--------------------------------+ | False | True | INVALID COMBINATION | +------------+---------------+--------------------------------+ Attributes ---------- procs : :obj:`list` list (N) of underlying interface elements to be processed proc_done : :obj:`numpy.ndarray` a boolean numpy array (N,) signifying whether a process has been submitted for execution proc_pending : :obj:`numpy.ndarray` a boolean numpy array (N,) signifying whether a process is currently running. depidx : :obj:`numpy.matrix` a boolean matrix (NxN) storing the dependency structure accross processes. Process dependencies are derived from each column. """ def __init__(self, plugin_args=None): """ Initialize runtime attributes to none """ super(DistributedPluginBase, self).__init__(plugin_args=plugin_args) self.procs = None self.depidx = None self.refidx = None self.mapnodes = None self.mapnodesubids = None self.proc_done = None self.proc_pending = None self.pending_tasks = [] self.max_jobs = self.plugin_args.get("max_jobs", np.inf) def _prerun_check(self, graph): """Stub method to validate/massage graph and nodes before running""" def _postrun_check(self): """Stub method to close any open resources""" def run(self, graph, config, updatehash=False): """ Executes a pre-defined pipeline using distributed approaches """ logger.info("Running in parallel.") self._config = config poll_sleep_secs = float(config["execution"]["poll_sleep_duration"]) self._prerun_check(graph) # Generate appropriate structures for worker-manager model self._generate_dependency_list(graph) self.mapnodes = [] self.mapnodesubids = {} # setup polling - TODO: change to threaded model notrun = [] errors = [] old_progress_stats = None old_presub_stats = None while not np.all(self.proc_done) or np.any(self.proc_pending): loop_start = time() # Check if a job is available (jobs with all dependencies run) # https://github.com/nipy/nipype/pull/2200#discussion_r141605722 jobs_ready = np.nonzero(~self.proc_done & (self.depidx.sum(0) == 0))[1] progress_stats = ( len(self.proc_done), np.sum(self.proc_done ^ self.proc_pending), np.sum(self.proc_done & self.proc_pending), len(jobs_ready), len(self.pending_tasks), np.sum(~self.proc_done & ~self.proc_pending), ) display_stats = progress_stats != old_progress_stats if display_stats: logger.debug( "Progress: %d jobs, %d/%d/%d " "(done/running/ready), %d/%d " "(pending_tasks/waiting).", *progress_stats, ) old_progress_stats = progress_stats toappend = [] # trigger callbacks for any pending results while self.pending_tasks: taskid, jobid = self.pending_tasks.pop() try: result = self._get_result(taskid) except Exception as exc: notrun.append(self._clean_queue(jobid, graph)) errors.append(exc) else: if result: if result["traceback"]: notrun.append( self._clean_queue(jobid, graph, result=result) ) errors.append("".join(result["traceback"])) else: self._task_finished_cb(jobid) self._remove_node_dirs() self._clear_task(taskid) else: assert self.proc_done[jobid] and self.proc_pending[jobid] toappend.insert(0, (taskid, jobid)) if toappend: self.pending_tasks.extend(toappend) num_jobs = len(self.pending_tasks) presub_stats = (num_jobs, np.sum(self.proc_done & self.proc_pending)) display_stats = display_stats or presub_stats != old_presub_stats if display_stats: logger.debug("Tasks currently running: %d. Pending: %d.", *presub_stats) old_presub_stats = presub_stats if num_jobs < self.max_jobs: self._send_procs_to_workers(updatehash=updatehash, graph=graph) elif display_stats: logger.debug("Not submitting (max jobs reached)") sleep_til = loop_start + poll_sleep_secs sleep(max(0, sleep_til - time())) self._remove_node_dirs() report_nodes_not_run(notrun) # close any open resources self._postrun_check() if errors: # If one or more nodes failed, re-rise first of them error, cause = errors[0], None if isinstance(error, str): error = RuntimeError(error) if len(errors) > 1: error, cause = ( RuntimeError(f"{len(errors)} raised. Re-raising first."), error, ) raise error from cause def _get_result(self, taskid): raise NotImplementedError def _submit_job(self, node, updatehash=False): raise NotImplementedError def _report_crash(self, node, result=None): tb = None if result is not None: node._result = result["result"] tb = result["traceback"] node._traceback = tb return report_crash(node, traceback=tb) def _clear_task(self, taskid): raise NotImplementedError def _clean_queue(self, jobid, graph, result=None): logger.debug("Clearing %d from queue", jobid) if self._status_callback: self._status_callback(self.procs[jobid], "exception") if result is None: result = { "result": None, "traceback": "\n".join(format_exception(*sys.exc_info())), } crashfile = self._report_crash(self.procs[jobid], result=result) if str2bool(self._config["execution"]["stop_on_first_crash"]): raise RuntimeError("".join(result["traceback"])) if jobid in self.mapnodesubids: # remove current jobid self.proc_pending[jobid] = False self.proc_done[jobid] = True # remove parent mapnode jobid = self.mapnodesubids[jobid] self.proc_pending[jobid] = False self.proc_done[jobid] = True # remove dependencies from queue return self._remove_node_deps(jobid, crashfile, graph) def _submit_mapnode(self, jobid): import scipy.sparse as ssp if jobid in self.mapnodes: return True self.mapnodes.append(jobid) mapnodesubids = self.procs[jobid].get_subnodes() numnodes = len(mapnodesubids) logger.debug("Adding %d jobs for mapnode %s", numnodes, self.procs[jobid]) for i in range(numnodes): self.mapnodesubids[self.depidx.shape[0] + i] = jobid self.procs.extend(mapnodesubids) self.depidx = ssp.vstack( (self.depidx, ssp.lil_matrix(np.zeros((numnodes, self.depidx.shape[1])))), "lil", ) self.depidx = ssp.hstack( (self.depidx, ssp.lil_matrix(np.zeros((self.depidx.shape[0], numnodes)))), "lil", ) self.depidx[-numnodes:, jobid] = 1 self.proc_done = np.concatenate( (self.proc_done, np.zeros(numnodes, dtype=bool)) ) self.proc_pending = np.concatenate( (self.proc_pending, np.zeros(numnodes, dtype=bool)) ) return False def _send_procs_to_workers(self, updatehash=False, graph=None): """ Sends jobs to workers """ while not np.all(self.proc_done): num_jobs = len(self.pending_tasks) if np.isinf(self.max_jobs): slots = None else: slots = max(0, self.max_jobs - num_jobs) logger.debug("Slots available: %s", slots) if (num_jobs >= self.max_jobs) or (slots == 0): break # Check if a job is available (jobs with all dependencies run) # https://github.com/nipy/nipype/pull/2200#discussion_r141605722 jobids = np.nonzero(~self.proc_done & (self.depidx.sum(0) == 0))[1] if len(jobids) > 0: # send all available jobs logger.info( "Pending[%d] Submitting[%d] jobs Slots[%s]", num_jobs, len(jobids[:slots]), slots or "inf", ) for jobid in jobids[:slots]: if isinstance(self.procs[jobid], MapNode): try: num_subnodes = self.procs[jobid].num_subnodes() except Exception: self._clean_queue(jobid, graph) self.proc_pending[jobid] = False continue if num_subnodes > 1: submit = self._submit_mapnode(jobid) if not submit: continue # change job status in appropriate queues self.proc_done[jobid] = True self.proc_pending[jobid] = True # Send job to task manager and add to pending tasks logger.info("Submitting: %s ID: %d", self.procs[jobid], jobid) if self._status_callback: self._status_callback(self.procs[jobid], "start") if not self._local_hash_check(jobid, graph): if self.procs[jobid].run_without_submitting: logger.debug( "Running node %s on master thread", self.procs[jobid] ) try: self.procs[jobid].run() except Exception: self._clean_queue(jobid, graph) self._task_finished_cb(jobid) self._remove_node_dirs() else: tid = self._submit_job( deepcopy(self.procs[jobid]), updatehash=updatehash ) if tid is None: self.proc_done[jobid] = False self.proc_pending[jobid] = False else: self.pending_tasks.insert(0, (tid, jobid)) logger.info( "Finished submitting: %s ID: %d", self.procs[jobid], jobid ) else: break def _local_hash_check(self, jobid, graph): if not str2bool(self.procs[jobid].config["execution"]["local_hash_check"]): return False try: cached, updated = self.procs[jobid].is_cached() except Exception: logger.warning( "Error while checking node hash, forcing re-run. " "Although this error may not prevent the workflow from running, " "it could indicate a major problem. Please report a new issue " "at https://github.com/nipy/nipype/issues adding the following " "information:\n\n\tNode: %s\n\tInterface: %s.%s\n\tTraceback:\n%s", self.procs[jobid], self.procs[jobid].interface.__module__, self.procs[jobid].interface.__class__.__name__, "\n".join(format_exception(*sys.exc_info())), ) return False logger.debug( 'Checking hash "%s" locally: cached=%s, updated=%s.', self.procs[jobid], cached, updated, ) overwrite = self.procs[jobid].overwrite always_run = self.procs[jobid].interface.always_run if ( cached and updated and (overwrite is False or overwrite is None and not always_run) ): logger.debug( "Skipping cached node %s with ID %s.", self.procs[jobid], jobid ) try: self._task_finished_cb(jobid, cached=True) self._remove_node_dirs() except Exception: logger.debug( "Error skipping cached node %s (%s).\n\n%s", self.procs[jobid], jobid, "\n".join(format_exception(*sys.exc_info())), ) self._clean_queue(jobid, graph) self.proc_pending[jobid] = False return True return False def _task_finished_cb(self, jobid, cached=False): """Extract outputs and assign to inputs of dependent tasks This is called when a job is completed. """ logger.info( "[Job %d] %s (%s).", jobid, "Cached" if cached else "Completed", self.procs[jobid], ) if self._status_callback: self._status_callback(self.procs[jobid], "end") # Update job and worker queues self.proc_pending[jobid] = False # update the job dependency structure rowview = self.depidx.getrowview(jobid) rowview[rowview.nonzero()] = 0 if jobid not in self.mapnodesubids: self.refidx[self.refidx[:, jobid].nonzero()[0], jobid] = 0 def _generate_dependency_list(self, graph): """Generates a dependency list for a list of graphs.""" import networkx as nx self.procs, _ = topological_sort(graph) self.depidx = nx.to_scipy_sparse_matrix( graph, nodelist=self.procs, format="lil" ) self.refidx = self.depidx.astype(int) self.proc_done = np.zeros(len(self.procs), dtype=bool) self.proc_pending = np.zeros(len(self.procs), dtype=bool) def _remove_node_deps(self, jobid, crashfile, graph): import networkx as nx try: dfs_preorder = nx.dfs_preorder except AttributeError: dfs_preorder = nx.dfs_preorder_nodes subnodes = [s for s in dfs_preorder(graph, self.procs[jobid])] for node in subnodes: idx = self.procs.index(node) self.proc_done[idx] = True self.proc_pending[idx] = False return dict(node=self.procs[jobid], dependents=subnodes, crashfile=crashfile) def _remove_node_dirs(self): """Removes directories whose outputs have already been used up""" if str2bool(self._config["execution"]["remove_node_directories"]): indices = np.nonzero((self.refidx.sum(axis=1) == 0).__array__())[0] for idx in indices: if idx in self.mapnodesubids: continue if self.proc_done[idx] and (not self.proc_pending[idx]): self.refidx[idx, idx] = -1 outdir = self.procs[idx].output_dir() logger.info( ( "[node dependencies finished] " "removing node: %s from directory %s" ) % (self.procs[idx]._id, outdir) ) shutil.rmtree(outdir) class SGELikeBatchManagerBase(DistributedPluginBase): """Execute workflow with SGE/OGE/PBS like batch system""" def __init__(self, template, plugin_args=None): super(SGELikeBatchManagerBase, self).__init__(plugin_args=plugin_args) self._template = template self._qsub_args = None if plugin_args: if "template" in plugin_args: self._template = plugin_args["template"] if os.path.isfile(self._template): with open(self._template) as tpl_file: self._template = tpl_file.read() if "qsub_args" in plugin_args: self._qsub_args = plugin_args["qsub_args"] self._pending = {} def _is_pending(self, taskid): """Check if a task is pending in the batch system""" raise NotImplementedError def _submit_batchtask(self, scriptfile, node): """Submit a task to the batch system""" raise NotImplementedError def _get_result(self, taskid): if taskid not in self._pending: raise Exception("Task %d not found" % taskid) if self._is_pending(taskid): return None node_dir = self._pending[taskid] # MIT HACK # on the pbs system at mit the parent node directory needs to be # accessed before internal directories become available. there # is a disconnect when the queueing engine knows a job is # finished to when the directories become statable. t = time() timeout = float(self._config["execution"]["job_finished_timeout"]) timed_out = True while (time() - t) < timeout: try: glob(os.path.join(node_dir, "result_*.pklz")).pop() timed_out = False break except Exception as e: logger.debug(e) sleep(2) if timed_out: result_data = {"hostname": "unknown", "result": None, "traceback": None} results_file = None try: error_message = ( "Job id ({0}) finished or terminated, but " "results file does not exist after ({1}) " "seconds. Batch dir contains crashdump file " "if node raised an exception.\n" "Node working directory: ({2}) ".format(taskid, timeout, node_dir) ) raise IOError(error_message) except IOError as e: result_data["traceback"] = "\n".join(format_exception(*sys.exc_info())) else: results_file = glob(os.path.join(node_dir, "result_*.pklz"))[0] result_data = load_resultfile(results_file) result_out = dict(result=None, traceback=None) if isinstance(result_data, dict): result_out["result"] = result_data["result"] result_out["traceback"] = result_data["traceback"] result_out["hostname"] = result_data["hostname"] if results_file: crash_file = os.path.join(node_dir, "crashstore.pklz") os.rename(results_file, crash_file) else: result_out["result"] = result_data return result_out def _submit_job(self, node, updatehash=False): """submit job and return taskid""" pyscript = create_pyscript(node, updatehash=updatehash) batch_dir, name = os.path.split(pyscript) name = ".".join(name.split(".")[:-1]) batchscript = "\n".join( (self._template.rstrip("\n"), "%s %s" % (sys.executable, pyscript)) ) batchscriptfile = os.path.join(batch_dir, "batchscript_%s.sh" % name) with open(batchscriptfile, "wt") as fp: fp.writelines(batchscript) return self._submit_batchtask(batchscriptfile, node) def _clear_task(self, taskid): del self._pending[taskid] class GraphPluginBase(PluginBase): """Base class for plugins that distribute graphs to workflows""" def __init__(self, plugin_args=None): if plugin_args and plugin_args.get("status_callback"): logger.warning( "status_callback not supported for Graph submission" " plugins" ) super(GraphPluginBase, self).__init__(plugin_args=plugin_args) def run(self, graph, config, updatehash=False): import networkx as nx pyfiles = [] dependencies = {} self._config = config nodes = list(nx.topological_sort(graph)) logger.debug("Creating executable python files for each node") for idx, node in enumerate(nodes): pyfiles.append( create_pyscript(node, updatehash=updatehash, store_exception=False) ) dependencies[idx] = [ nodes.index(prevnode) for prevnode in list(graph.predecessors(node)) ] self._submit_graph(pyfiles, dependencies, nodes) def _get_args(self, node, keywords): values = () for keyword in keywords: value = getattr(self, "_" + keyword) if keyword == "template" and os.path.isfile(value): with open(value) as f: value = f.read() if ( hasattr(node, "plugin_args") and isinstance(node.plugin_args, dict) and keyword in node.plugin_args ): if keyword == "template" and os.path.isfile(node.plugin_args[keyword]): with open(node.plugin_args[keyword]) as f: tmp_value = f.read() else: tmp_value = node.plugin_args[keyword] if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: value = tmp_value else: value += tmp_value values += (value,) return values def _submit_graph(self, pyfiles, dependencies, nodes): """ pyfiles: list of files corresponding to a topological sort dependencies: dictionary of dependencies based on the toplogical sort """ raise NotImplementedError def _get_result(self, taskid): if taskid not in self._pending: raise Exception("Task %d not found" % taskid) if self._is_pending(taskid): return None node_dir = self._pending[taskid] glob(os.path.join(node_dir, "result_*.pklz")).pop() results_file = glob(os.path.join(node_dir, "result_*.pklz"))[0] result_data = load_resultfile(results_file) result_out = dict(result=None, traceback=None) if isinstance(result_data, dict): result_out["result"] = result_data["result"] result_out["traceback"] = result_data["traceback"] result_out["hostname"] = result_data["hostname"] if results_file: crash_file = os.path.join(node_dir, "crashstore.pklz") os.rename(results_file, crash_file) else: result_out["result"] = result_data return result_out nipype-1.7.0/nipype/pipeline/plugins/condor.py000066400000000000000000000107061413403311400214510ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Parallel workflow execution via Condor """ import os from time import sleep from ...interfaces.base import CommandLine from ... import logging from .base import SGELikeBatchManagerBase, logger iflogger = logging.getLogger("nipype.interface") class CondorPlugin(SGELikeBatchManagerBase): """Execute using Condor This plugin doesn't work with a plain stock-Condor installation, but requires a 'qsub' emulation script for Condor, called 'condor_qsub'. This script is shipped with the Condor package from NeuroDebian, or can be downloaded from its Git repository at http://anonscm.debian.org/gitweb/?p=pkg-exppsy/condor.git;a=blob_plain;f=debian/condor_qsub;hb=HEAD The plugin_args input to run can be used to control the Condor execution. Currently supported options are: - template : template to use for batch job submission. This can be an SGE-style script with the (limited) set of options supported by condor_qsub - qsub_args : arguments to be prepended to the job execution script in the qsub call """ def __init__(self, **kwargs): template = """ #$ -V #$ -S /bin/sh """ self._retry_timeout = 2 self._max_tries = 2 if "plugin_args" in kwargs and kwargs["plugin_args"]: if "retry_timeout" in kwargs["plugin_args"]: self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] if "max_tries" in kwargs["plugin_args"]: self._max_tries = kwargs["plugin_args"]["max_tries"] super(CondorPlugin, self).__init__(template, **kwargs) def _is_pending(self, taskid): cmd = CommandLine( "condor_q", resource_monitor=False, terminal_output="allatonce" ) cmd.inputs.args = "%d" % taskid # check condor cluster oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) result = cmd.run(ignore_exception=True) iflogger.setLevel(oldlevel) if result.runtime.stdout.count("\n%d" % taskid): return True return False def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( "condor_qsub", environ=dict(os.environ), resource_monitor=False, terminal_output="allatonce", ) path = os.path.dirname(scriptfile) qsubargs = "" if self._qsub_args: qsubargs = self._qsub_args if "qsub_args" in node.plugin_args: if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: qsubargs = node.plugin_args["qsub_args"] else: qsubargs += " " + node.plugin_args["qsub_args"] if self._qsub_args: qsubargs = self._qsub_args if "-o" not in qsubargs: qsubargs = "%s -o %s" % (qsubargs, path) if "-e" not in qsubargs: qsubargs = "%s -e %s" % (qsubargs, path) if node._hierarchy: jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) jobnameitems = jobname.split(".") jobnameitems.reverse() jobname = ".".join(jobnameitems) cmd.inputs.args = "%s -N %s %s" % (qsubargs, jobname, scriptfile) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: result = cmd.run() except Exception as e: if tries < self._max_tries: tries += 1 sleep(self._retry_timeout) # sleep 2 seconds and try again else: iflogger.setLevel(oldlevel) raise RuntimeError( "\n".join( ( ("Could not submit condor " "cluster" " for node %s") % node._id, str(e), ) ) ) else: break iflogger.setLevel(oldlevel) # retrieve condor clusterid taskid = int(result.runtime.stdout.split(" ")[2]) self._pending[taskid] = node.output_dir() logger.debug("submitted condor cluster: %d for node %s" % (taskid, node._id)) return taskid nipype-1.7.0/nipype/pipeline/plugins/dagman.py000066400000000000000000000176311413403311400214200ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Parallel workflow execution via Condor DAGMan """ import os import sys import uuid import time from warnings import warn from .base import GraphPluginBase, logger from ...interfaces.base import CommandLine class CondorDAGManPlugin(GraphPluginBase): """Execute using Condor DAGMan The plugin_args input to run can be used to control the DAGMan execution. The value of most arguments can be a literal string or a filename, where in the latter case the content of the file will be used as the argument value. Currently supported options are: - submit_template : submit spec template for individual jobs in a DAG (see CondorDAGManPlugin.default_submit_template for the default. - initial_specs : additional submit specs that are prepended to any job's submit file - override_specs : additional submit specs that are appended to any job's submit file - wrapper_cmd : path to an executable that will be started instead of a node script. This is useful for wrapper script that execute certain functionality prior or after a node runs. If this option is given the wrapper command is called with the respective Python executable and the path to the node script as final arguments - wrapper_args : optional additional arguments to a wrapper command - dagman_args : arguments to be prepended to the arguments of the condor_submit_dag call - block : if True the plugin call will block until Condor has finished processing the entire workflow (default: False) """ default_submit_template = """ universe = vanilla notification = Never executable = %(executable)s arguments = %(nodescript)s output = %(basename)s.out error = %(basename)s.err log = %(basename)s.log getenv = True """ def _get_str_or_file(self, arg): if os.path.isfile(arg): with open(arg) as f: content = f.read() else: content = arg return content # XXX feature wishlist # - infer data file dependencies from jobs # - infer CPU requirements from jobs # - infer memory requirements from jobs # - looks like right now all jobs come in here, regardless of whether they # actually have to run. would be good to be able to decide whether they # actually have to be scheduled (i.e. output already exist). def __init__(self, **kwargs): for var, id_, val in ( ("_template", "submit_template", self.default_submit_template), ("_initial_specs", "template", ""), ("_initial_specs", "initial_specs", ""), ("_override_specs", "submit_specs", ""), ("_override_specs", "override_specs", ""), ("_wrapper_cmd", "wrapper_cmd", None), ("_wrapper_args", "wrapper_args", ""), ("_block", "block", False), ("_dagman_args", "dagman_args", ""), ): if ( "plugin_args" in kwargs and not kwargs["plugin_args"] is None and id_ in kwargs["plugin_args"] ): if id_ == "wrapper_cmd": val = os.path.abspath(kwargs["plugin_args"][id_]) elif id_ == "block": val = kwargs["plugin_args"][id_] else: val = self._get_str_or_file(kwargs["plugin_args"][id_]) setattr(self, var, val) # TODO remove after some time if "plugin_args" in kwargs and not kwargs["plugin_args"] is None: plugin_args = kwargs["plugin_args"] if "template" in plugin_args: warn( "the 'template' argument is deprecated, use 'initial_specs' instead" ) if "submit_specs" in plugin_args: warn( "the 'submit_specs' argument is deprecated, use 'override_specs' instead" ) super(CondorDAGManPlugin, self).__init__(**kwargs) def _submit_graph(self, pyfiles, dependencies, nodes): # location of all scripts, place dagman output in here too batch_dir, _ = os.path.split(pyfiles[0]) # DAG description filename dagfilename = os.path.join(batch_dir, "workflow-%s.dag" % uuid.uuid4()) with open(dagfilename, "wt") as dagfileptr: # loop over all scripts, create submit files, and define them # as jobs in the DAG for idx, pyscript in enumerate(pyfiles): node = nodes[idx] # XXX redundant with previous value? or could it change between # scripts? ( template, initial_specs, override_specs, wrapper_cmd, wrapper_args, ) = self._get_args( node, [ "template", "initial_specs", "override_specs", "wrapper_cmd", "wrapper_args", ], ) # add required slots to the template template = "%s\n%s\n%s\nqueue\n" % ( "%(initial_specs)s", template, "%(override_specs)s", ) batch_dir, name = os.path.split(pyscript) name = ".".join(name.split(".")[:-1]) specs = dict( # TODO make parameter for this, initial_specs=initial_specs, executable=sys.executable, nodescript=pyscript, basename=os.path.join(batch_dir, name), override_specs=override_specs, ) if wrapper_cmd is not None: specs["executable"] = wrapper_cmd specs["nodescript"] = "%s %s %s" % ( wrapper_args % specs, # give access to variables sys.executable, pyscript, ) submitspec = template % specs # write submit spec for this job submitfile = os.path.join(batch_dir, "%s.submit" % name) with open(submitfile, "wt") as submitfileprt: submitfileprt.writelines(submitspec) submitfileprt.close() # define job in DAG dagfileptr.write("JOB %i %s\n" % (idx, submitfile)) # define dependencies in DAG for child in dependencies: parents = dependencies[child] if len(parents): dagfileptr.write( "PARENT %s CHILD %i\n" % (" ".join([str(i) for i in parents]), child) ) # hand over DAG to condor_dagman cmd = CommandLine( "condor_submit_dag", environ=dict(os.environ), resource_monitor=False, terminal_output="allatonce", ) # needs -update_submit or re-running a workflow will fail cmd.inputs.args = "%s -update_submit %s" % (self._dagman_args, dagfilename) cmd.run() logger.info("submitted all jobs to Condor DAGMan") if self._block: # wait for DAGMan to settle down, no time wasted it is already running time.sleep(10) if not os.path.exists("%s.condor.sub" % dagfilename): raise EnvironmentError( "DAGMan did not create its submit file, please check the logs" ) # wait for completion logger.info("waiting for DAGMan to finish") lockfilename = "%s.lock" % dagfilename while os.path.exists(lockfilename): time.sleep(5) nipype-1.7.0/nipype/pipeline/plugins/debug.py000066400000000000000000000022411413403311400212460ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Debug plugin """ import networkx as nx from .base import PluginBase, logger class DebugPlugin(PluginBase): """Execute workflow in series""" def __init__(self, plugin_args=None): super(DebugPlugin, self).__init__(plugin_args=plugin_args) if ( plugin_args and "callable" in plugin_args and hasattr(plugin_args["callable"], "__call__") ): self._callable = plugin_args["callable"] else: raise ValueError("plugin_args must contain a callable function") def run(self, graph, config, updatehash=False): """Executes a pre-defined pipeline in a serial order. Parameters ---------- graph : networkx digraph defines order of execution """ if not isinstance(graph, nx.DiGraph): raise ValueError("Input must be a networkx digraph object") logger.info("Executing debug plugin") for node in nx.topological_sort(graph): self._callable(node, graph) nipype-1.7.0/nipype/pipeline/plugins/ipython.py000066400000000000000000000105521413403311400216560ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via IPython controller """ from pickle import dumps import sys from .base import DistributedPluginBase, logger, report_crash IPython_not_loaded = False try: from IPython import __version__ as IPyversion from ipyparallel.error import TimeoutError except: IPython_not_loaded = True def execute_task(pckld_task, node_config, updatehash): from socket import gethostname from traceback import format_exc from nipype import config, logging traceback = None result = None import os cwd = os.getcwd() try: config.update_config(node_config) logging.update_logging(config) from pickle import loads task = loads(pckld_task) result = task.run(updatehash=updatehash) except: traceback = format_exc() from pickle import loads task = loads(pckld_task) result = task.result os.chdir(cwd) return result, traceback, gethostname() class IPythonPlugin(DistributedPluginBase): """Execute workflow with ipython""" def __init__(self, plugin_args=None): if IPython_not_loaded: raise ImportError("Please install ipyparallel to use this plugin.") super(IPythonPlugin, self).__init__(plugin_args=plugin_args) valid_args = ( "url_file", "profile", "cluster_id", "context", "debug", "timeout", "config", "username", "sshserver", "sshkey", "password", "paramiko", ) self.client_args = { arg: plugin_args[arg] for arg in valid_args if arg in plugin_args } self.iparallel = None self.taskclient = None self.taskmap = {} self._taskid = 0 def run(self, graph, config, updatehash=False): """Executes a pre-defined pipeline is distributed approaches based on IPython's ipyparallel processing interface """ # retrieve clients again try: name = "ipyparallel" __import__(name) self.iparallel = sys.modules[name] except ImportError as e: raise ImportError( "ipyparallel not found. Parallel execution " "will be unavailable" ) from e try: self.taskclient = self.iparallel.Client(**self.client_args) except Exception as e: if isinstance(e, TimeoutError): raise Exception("No IPython clients found.") from e if isinstance(e, IOError): raise Exception("ipcluster/ipcontroller has not been started") from e if isinstance(e, ValueError): raise Exception("Ipython kernel not installed") from e else: raise e return super(IPythonPlugin, self).run(graph, config, updatehash=updatehash) def _get_result(self, taskid): if taskid not in self.taskmap: raise ValueError("Task %d not in pending list" % taskid) if self.taskmap[taskid].ready(): result, traceback, hostname = self.taskmap[taskid].get() result_out = dict(result=None, traceback=None) result_out["result"] = result result_out["traceback"] = traceback result_out["hostname"] = hostname return result_out else: return None def _submit_job(self, node, updatehash=False): pckld_node = dumps(node, 2) result_object = self.taskclient.load_balanced_view().apply( execute_task, pckld_node, node.config, updatehash ) self._taskid += 1 self.taskmap[self._taskid] = result_object return self._taskid def _report_crash(self, node, result=None): if result and result["traceback"]: node._result = result["result"] node._traceback = result["traceback"] return report_crash(node, traceback=result["traceback"]) else: return report_crash(node) def _clear_task(self, taskid): if IPyversion >= "0.11": logger.debug("Clearing id: %d" % taskid) self.taskclient.purge_results(self.taskmap[taskid]) del self.taskmap[taskid] nipype-1.7.0/nipype/pipeline/plugins/legacymultiproc.py000066400000000000000000000371441413403311400233750ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via multiprocessing Support for child processes running as non-daemons based on http://stackoverflow.com/a/8963618/1183453 """ # Import packages import os import multiprocessing as mp from multiprocessing import Pool, cpu_count, pool from traceback import format_exception import sys from logging import INFO import gc from copy import deepcopy import numpy as np from ... import logging from ...utils.profiler import get_system_total_memory_gb from ..engine import MapNode from .base import DistributedPluginBase try: from textwrap import indent except ImportError: def indent(text, prefix): """A textwrap.indent replacement for Python < 3.3""" if not prefix: return text splittext = text.splitlines(True) return prefix + prefix.join(splittext) # Init logger logger = logging.getLogger("nipype.workflow") # Run node def run_node(node, updatehash, taskid): """Function to execute node.run(), catch and log any errors and return the result dictionary Parameters ---------- node : nipype Node instance the node to run updatehash : boolean flag for updating hash taskid : int an identifier for this task Returns ------- result : dictionary dictionary containing the node runtime results and stats """ # Init variables result = dict(result=None, traceback=None, taskid=taskid) # Try and execute the node via node.run() try: result["result"] = node.run(updatehash=updatehash) except: # noqa: E722, intendedly catch all here result["traceback"] = format_exception(*sys.exc_info()) result["result"] = node.result # Return the result dictionary return result # Pythons 2.7, 3.4-3.7.0, and 3.7.1 have three different implementations of # pool.Pool().Process(), and the type of the result varies based on the default # multiprocessing context, so we need to dynamically patch the daemon property class NonDaemonMixin(object): @property def daemon(self): return False @daemon.setter def daemon(self, val): pass try: from multiprocessing import context # Exists on all platforms class NonDaemonSpawnProcess(NonDaemonMixin, context.SpawnProcess): pass class NonDaemonSpawnContext(context.SpawnContext): Process = NonDaemonSpawnProcess _nondaemon_context_mapper = {"spawn": NonDaemonSpawnContext()} # POSIX only try: class NonDaemonForkProcess(NonDaemonMixin, context.ForkProcess): pass class NonDaemonForkContext(context.ForkContext): Process = NonDaemonForkProcess _nondaemon_context_mapper["fork"] = NonDaemonForkContext() except AttributeError: pass # POSIX only try: class NonDaemonForkServerProcess(NonDaemonMixin, context.ForkServerProcess): pass class NonDaemonForkServerContext(context.ForkServerContext): Process = NonDaemonForkServerProcess _nondaemon_context_mapper["forkserver"] = NonDaemonForkServerContext() except AttributeError: pass class NonDaemonPool(pool.Pool): def __init__( self, processes=None, initializer=None, initargs=(), maxtasksperchild=None, context=None, ): if context is None: context = mp.get_context() context = _nondaemon_context_mapper[context._name] super(NonDaemonPool, self).__init__( processes=processes, initializer=initializer, initargs=initargs, maxtasksperchild=maxtasksperchild, context=context, ) except ImportError: class NonDaemonProcess(NonDaemonMixin, mp.Process): pass class NonDaemonPool(pool.Pool): Process = NonDaemonProcess def process_initializer(cwd): """Initializes the environment of the child process""" os.chdir(cwd) os.environ["NIPYPE_NO_ET"] = "1" class LegacyMultiProcPlugin(DistributedPluginBase): """ Execute workflow with multiprocessing, not sending more jobs at once than the system can support. The plugin_args input to run can be used to control the multiprocessing execution and defining the maximum amount of memory and threads that should be used. When those parameters are not specified, the number of threads and memory of the system is used. System consuming nodes should be tagged:: memory_consuming_node.mem_gb = 8 thread_consuming_node.n_procs = 16 The default number of threads and memory are set at node creation, and are 1 and 0.25GB respectively. Currently supported options are: - non_daemon : boolean flag to execute as non-daemon processes - n_procs: maximum number of threads to be executed in parallel - memory_gb: maximum memory (in GB) that can be used at once. - raise_insufficient: raise error if the requested resources for a node over the maximum `n_procs` and/or `memory_gb` (default is ``True``). - scheduler: sort jobs topologically (``'tsort'``, default value) or prioritize jobs by, first, memory consumption and, second, number of threads (``'mem_thread'`` option). - maxtasksperchild: number of nodes to run on each process before refreshing the worker (default: 10). """ def __init__(self, plugin_args=None): # Init variables and instance attributes super(LegacyMultiProcPlugin, self).__init__(plugin_args=plugin_args) self._taskresult = {} self._task_obj = {} self._taskid = 0 # Cache current working directory and make sure we # change to it when workers are set up self._cwd = os.getcwd() # Read in options or set defaults. non_daemon = self.plugin_args.get("non_daemon", True) maxtasks = self.plugin_args.get("maxtasksperchild", 10) self.processors = self.plugin_args.get("n_procs", cpu_count()) self.memory_gb = self.plugin_args.get( "memory_gb", # Allocate 90% of system memory get_system_total_memory_gb() * 0.9, ) self.raise_insufficient = self.plugin_args.get("raise_insufficient", True) # Instantiate different thread pools for non-daemon processes logger.debug( '[LegacyMultiProc] Starting in "%sdaemon" mode (n_procs=%d, ' "mem_gb=%0.2f, cwd=%s)", "non" * int(non_daemon), self.processors, self.memory_gb, self._cwd, ) NipypePool = NonDaemonPool if non_daemon else Pool try: self.pool = NipypePool( processes=self.processors, maxtasksperchild=maxtasks, initializer=process_initializer, initargs=(self._cwd,), ) except TypeError: # Python < 3.2 does not have maxtasksperchild # When maxtasksperchild is not set, initializer is not to be # called self.pool = NipypePool(processes=self.processors) self._stats = None def _async_callback(self, args): # Make sure runtime is not left at a dubious working directory os.chdir(self._cwd) self._taskresult[args["taskid"]] = args def _get_result(self, taskid): return self._taskresult.get(taskid) def _clear_task(self, taskid): del self._task_obj[taskid] def _submit_job(self, node, updatehash=False): self._taskid += 1 # Don't allow streaming outputs if getattr(node.interface, "terminal_output", "") == "stream": node.interface.terminal_output = "allatonce" self._task_obj[self._taskid] = self.pool.apply_async( run_node, (node, updatehash, self._taskid), callback=self._async_callback ) logger.debug( "[LegacyMultiProc] Submitted task %s (taskid=%d).", node.fullname, self._taskid, ) return self._taskid def _prerun_check(self, graph): """Check if any node exeeds the available resources""" tasks_mem_gb = [] tasks_num_th = [] for node in graph.nodes(): tasks_mem_gb.append(node.mem_gb) tasks_num_th.append(node.n_procs) if np.any(np.array(tasks_mem_gb) > self.memory_gb): logger.warning( "Some nodes exceed the total amount of memory available " "(%0.2fGB).", self.memory_gb, ) if self.raise_insufficient: raise RuntimeError("Insufficient resources available for job") if np.any(np.array(tasks_num_th) > self.processors): logger.warning( "Some nodes demand for more threads than available (%d).", self.processors, ) if self.raise_insufficient: raise RuntimeError("Insufficient resources available for job") def _postrun_check(self): self.pool.close() def _check_resources(self, running_tasks): """ Make sure there are resources available """ free_memory_gb = self.memory_gb free_processors = self.processors for _, jobid in running_tasks: free_memory_gb -= min(self.procs[jobid].mem_gb, free_memory_gb) free_processors -= min(self.procs[jobid].n_procs, free_processors) return free_memory_gb, free_processors def _send_procs_to_workers(self, updatehash=False, graph=None): """ Sends jobs to workers when system resources are available. """ # Check to see if a job is available (jobs with all dependencies run) # See https://github.com/nipy/nipype/pull/2200#discussion_r141605722 # See also https://github.com/nipy/nipype/issues/2372 jobids = np.flatnonzero( ~self.proc_done & (self.depidx.sum(axis=0) == 0).__array__() ) # Check available resources by summing all threads and memory used free_memory_gb, free_processors = self._check_resources(self.pending_tasks) stats = ( len(self.pending_tasks), len(jobids), free_memory_gb, self.memory_gb, free_processors, self.processors, ) if self._stats != stats: tasks_list_msg = "" if logger.level <= INFO: running_tasks = [ " * %s" % self.procs[jobid].fullname for _, jobid in self.pending_tasks ] if running_tasks: tasks_list_msg = "\nCurrently running:\n" tasks_list_msg += "\n".join(running_tasks) tasks_list_msg = indent(tasks_list_msg, " " * 21) logger.info( "[LegacyMultiProc] Running %d tasks, and %d jobs ready. Free " "memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s", len(self.pending_tasks), len(jobids), free_memory_gb, self.memory_gb, free_processors, self.processors, tasks_list_msg, ) self._stats = stats if free_memory_gb < 0.01 or free_processors == 0: logger.debug("No resources available") return if len(jobids) + len(self.pending_tasks) == 0: logger.debug( "No tasks are being run, and no jobs can " "be submitted to the queue. Potential deadlock" ) return jobids = self._sort_jobs(jobids, scheduler=self.plugin_args.get("scheduler")) # Run garbage collector before potentially submitting jobs gc.collect() # Submit jobs for jobid in jobids: # First expand mapnodes if isinstance(self.procs[jobid], MapNode): try: num_subnodes = self.procs[jobid].num_subnodes() except Exception: traceback = format_exception(*sys.exc_info()) self._clean_queue( jobid, graph, result={"result": None, "traceback": traceback} ) self.proc_pending[jobid] = False continue if num_subnodes > 1: submit = self._submit_mapnode(jobid) if not submit: continue # Check requirements of this job next_job_gb = min(self.procs[jobid].mem_gb, self.memory_gb) next_job_th = min(self.procs[jobid].n_procs, self.processors) # If node does not fit, skip at this moment if next_job_th > free_processors or next_job_gb > free_memory_gb: logger.debug( "Cannot allocate job %d (%0.2fGB, %d threads).", jobid, next_job_gb, next_job_th, ) continue free_memory_gb -= next_job_gb free_processors -= next_job_th logger.debug( "Allocating %s ID=%d (%0.2fGB, %d threads). Free: " "%0.2fGB, %d threads.", self.procs[jobid].fullname, jobid, next_job_gb, next_job_th, free_memory_gb, free_processors, ) # change job status in appropriate queues self.proc_done[jobid] = True self.proc_pending[jobid] = True # If cached and up-to-date just retrieve it, don't run if self._local_hash_check(jobid, graph): continue # updatehash and run_without_submitting are also run locally if updatehash or self.procs[jobid].run_without_submitting: logger.debug("Running node %s on master thread", self.procs[jobid]) try: self.procs[jobid].run(updatehash=updatehash) except Exception: traceback = format_exception(*sys.exc_info()) self._clean_queue( jobid, graph, result={"result": None, "traceback": traceback} ) # Release resources self._task_finished_cb(jobid) self._remove_node_dirs() free_memory_gb += next_job_gb free_processors += next_job_th # Display stats next loop self._stats = None # Clean up any debris from running node in main process gc.collect() continue # Task should be submitted to workers # Send job to task manager and add to pending tasks if self._status_callback: self._status_callback(self.procs[jobid], "start") tid = self._submit_job(deepcopy(self.procs[jobid]), updatehash=updatehash) if tid is None: self.proc_done[jobid] = False self.proc_pending[jobid] = False else: self.pending_tasks.insert(0, (tid, jobid)) # Display stats next loop self._stats = None def _sort_jobs(self, jobids, scheduler="tsort"): if scheduler == "mem_thread": return sorted( jobids, key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs), ) return jobids nipype-1.7.0/nipype/pipeline/plugins/linear.py000066400000000000000000000053471413403311400214440ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Local serial workflow execution """ import os from .base import PluginBase, logger, report_crash, report_nodes_not_run, str2bool from ..engine.utils import topological_sort class LinearPlugin(PluginBase): """Execute workflow in series""" def run(self, graph, config, updatehash=False): """Executes a pre-defined pipeline in a serial order. Parameters ---------- graph : networkx digraph defines order of execution """ import networkx as nx try: dfs_preorder = nx.dfs_preorder except AttributeError: dfs_preorder = nx.dfs_preorder_nodes if not isinstance(graph, nx.DiGraph): raise ValueError("Input must be a networkx digraph object") logger.info("Running serially.") old_wd = os.getcwd() notrun = [] donotrun = [] stop_on_first_crash = str2bool(config["execution"]["stop_on_first_crash"]) errors = [] nodes, _ = topological_sort(graph) for node in nodes: endstatus = "end" try: if node in donotrun: continue if self._status_callback: self._status_callback(node, "start") node.run(updatehash=updatehash) except Exception as exc: endstatus = "exception" # bare except, but i really don't know where a # node might fail crashfile = report_crash(node) # remove dependencies from queue subnodes = [s for s in dfs_preorder(graph, node)] notrun.append( {"node": node, "dependents": subnodes, "crashfile": crashfile} ) donotrun.extend(subnodes) # Delay raising the crash until we cleaned the house errors.append(exc) if stop_on_first_crash: break finally: if self._status_callback: self._status_callback(node, endstatus) os.chdir(old_wd) # Return wherever we were before report_nodes_not_run(notrun) if errors: # If one or more nodes failed, re-rise first of them error, cause = errors[0], None if isinstance(error, str): error = RuntimeError(error) if len(errors) > 1: error, cause = ( RuntimeError(f"{len(errors)} raised. Re-raising first."), error, ) raise error from cause nipype-1.7.0/nipype/pipeline/plugins/lsf.py000066400000000000000000000113361413403311400207510ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Parallel workflow execution via LSF """ import os import re from time import sleep from ... import logging from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger iflogger = logging.getLogger("nipype.interface") class LSFPlugin(SGELikeBatchManagerBase): """Execute using LSF Cluster Submission The plugin_args input to run can be used to control the LSF execution. Currently supported options are: - template : template to use for batch job submission - bsub_args : arguments to be prepended to the job execution script in the bsub call """ def __init__(self, **kwargs): template = """ #$ -S /bin/sh """ self._retry_timeout = 2 self._max_tries = 2 self._bsub_args = "" if "plugin_args" in kwargs and kwargs["plugin_args"]: if "retry_timeout" in kwargs["plugin_args"]: self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] if "max_tries" in kwargs["plugin_args"]: self._max_tries = kwargs["plugin_args"]["max_tries"] if "bsub_args" in kwargs["plugin_args"]: self._bsub_args = kwargs["plugin_args"]["bsub_args"] super(LSFPlugin, self).__init__(template, **kwargs) def _is_pending(self, taskid): """LSF lists a status of 'PEND' when a job has been submitted but is waiting to be picked up, and 'RUN' when it is actively being processed. But _is_pending should return True until a job has finished and is ready to be checked for completeness. So return True if status is either 'PEND' or 'RUN'""" cmd = CommandLine("bjobs", resource_monitor=False, terminal_output="allatonce") cmd.inputs.args = "%d" % taskid # check lsf task oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) result = cmd.run(ignore_exception=True) iflogger.setLevel(oldlevel) # logger.debug(result.runtime.stdout) if "DONE" in result.runtime.stdout or "EXIT" in result.runtime.stdout: return False else: return True def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( "bsub", environ=dict(os.environ), resource_monitor=False, terminal_output="allatonce", ) bsubargs = "" if self._bsub_args: bsubargs = self._bsub_args if "bsub_args" in node.plugin_args: if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: bsubargs = node.plugin_args["bsub_args"] else: bsubargs += " " + node.plugin_args["bsub_args"] if "-o" not in bsubargs: # -o outfile bsubargs = "%s -o %s" % (bsubargs, scriptfile + ".log") if "-e" not in bsubargs: # -e error file bsubargs = "%s -e %s" % (bsubargs, scriptfile + ".log") if node._hierarchy: jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) jobnameitems = jobname.split(".") jobnameitems.reverse() jobname = ".".join(jobnameitems) cmd.inputs.args = "%s -J %s sh %s" % ( bsubargs, jobname, scriptfile, ) # -J job_name_spec logger.debug("bsub " + cmd.inputs.args) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: result = cmd.run() except Exception as e: if tries < self._max_tries: tries += 1 sleep(self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) raise RuntimeError( "\n".join( ( ("Could not submit lsf task" " for node %s") % node._id, str(e), ) ) ) else: break iflogger.setLevel(oldlevel) # retrieve lsf taskid match = re.search(r"<(\d*)>", result.runtime.stdout) if match: taskid = int(match.groups()[0]) else: raise IOError( "Can't parse submission job output id: %s" % result.runtime.stdout ) self._pending[taskid] = node.output_dir() logger.debug("submitted lsf task: %d for node %s" % (taskid, node._id)) return taskid nipype-1.7.0/nipype/pipeline/plugins/multiproc.py000066400000000000000000000323541413403311400222060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via multiprocessing Support for child processes running as non-daemons based on http://stackoverflow.com/a/8963618/1183453 """ # Import packages import os import multiprocessing as mp from concurrent.futures import ProcessPoolExecutor, wait from traceback import format_exception import sys from logging import INFO import gc from copy import deepcopy import numpy as np from ... import logging from ...utils.profiler import get_system_total_memory_gb from ..engine import MapNode from .base import DistributedPluginBase try: from textwrap import indent except ImportError: def indent(text, prefix): """A textwrap.indent replacement for Python < 3.3""" if not prefix: return text splittext = text.splitlines(True) return prefix + prefix.join(splittext) # Init logger logger = logging.getLogger("nipype.workflow") # Run node def run_node(node, updatehash, taskid): """Function to execute node.run(), catch and log any errors and return the result dictionary Parameters ---------- node : nipype Node instance the node to run updatehash : boolean flag for updating hash taskid : int an identifier for this task Returns ------- result : dictionary dictionary containing the node runtime results and stats """ # Init variables result = dict(result=None, traceback=None, taskid=taskid) # Try and execute the node via node.run() try: result["result"] = node.run(updatehash=updatehash) except: # noqa: E722, intendedly catch all here result["traceback"] = format_exception(*sys.exc_info()) result["result"] = node.result # Return the result dictionary return result def process_initializer(cwd): """Initializes the environment of the child process""" os.chdir(cwd) os.environ["NIPYPE_NO_ET"] = "1" class MultiProcPlugin(DistributedPluginBase): """ Execute workflow with multiprocessing, not sending more jobs at once than the system can support. The plugin_args input to run can be used to control the multiprocessing execution and defining the maximum amount of memory and threads that should be used. When those parameters are not specified, the number of threads and memory of the system is used. System consuming nodes should be tagged:: memory_consuming_node.mem_gb = 8 thread_consuming_node.n_procs = 16 The default number of threads and memory are set at node creation, and are 1 and 0.25GB respectively. Currently supported options are: - non_daemon: boolean flag to execute as non-daemon processes - n_procs: maximum number of threads to be executed in parallel - memory_gb: maximum memory (in GB) that can be used at once. - raise_insufficient: raise error if the requested resources for a node over the maximum `n_procs` and/or `memory_gb` (default is ``True``). - scheduler: sort jobs topologically (``'tsort'``, default value) or prioritize jobs by, first, memory consumption and, second, number of threads (``'mem_thread'`` option). - mp_context: name of multiprocessing context to use """ def __init__(self, plugin_args=None): # Init variables and instance attributes super(MultiProcPlugin, self).__init__(plugin_args=plugin_args) self._taskresult = {} self._task_obj = {} self._taskid = 0 # Cache current working directory and make sure we # change to it when workers are set up self._cwd = os.getcwd() # Read in options or set defaults. self.processors = self.plugin_args.get("n_procs", mp.cpu_count()) self.memory_gb = self.plugin_args.get( "memory_gb", # Allocate 90% of system memory get_system_total_memory_gb() * 0.9, ) self.raise_insufficient = self.plugin_args.get("raise_insufficient", True) # Instantiate different thread pools for non-daemon processes logger.debug( "[MultiProc] Starting (n_procs=%d, " "mem_gb=%0.2f, cwd=%s)", self.processors, self.memory_gb, self._cwd, ) try: mp_context = mp.get_context(self.plugin_args.get("mp_context")) self.pool = ProcessPoolExecutor( max_workers=self.processors, initializer=process_initializer, initargs=(self._cwd,), mp_context=mp_context, ) except (AttributeError, TypeError): # Python < 3.7 does not support initialization or contexts self.pool = ProcessPoolExecutor(max_workers=self.processors) result_future = self.pool.submit(process_initializer, self._cwd) wait([result_future], timeout=5) self._stats = None def _async_callback(self, args): result = args.result() self._taskresult[result["taskid"]] = result def _get_result(self, taskid): return self._taskresult.get(taskid) def _clear_task(self, taskid): del self._task_obj[taskid] def _submit_job(self, node, updatehash=False): self._taskid += 1 # Don't allow streaming outputs if getattr(node.interface, "terminal_output", "") == "stream": node.interface.terminal_output = "allatonce" result_future = self.pool.submit(run_node, node, updatehash, self._taskid) result_future.add_done_callback(self._async_callback) self._task_obj[self._taskid] = result_future logger.debug( "[MultiProc] Submitted task %s (taskid=%d).", node.fullname, self._taskid ) return self._taskid def _prerun_check(self, graph): """Check if any node exeeds the available resources""" tasks_mem_gb = [] tasks_num_th = [] for node in graph.nodes(): tasks_mem_gb.append(node.mem_gb) tasks_num_th.append(node.n_procs) if np.any(np.array(tasks_mem_gb) > self.memory_gb): logger.warning( "Some nodes exceed the total amount of memory available " "(%0.2fGB).", self.memory_gb, ) if self.raise_insufficient: raise RuntimeError("Insufficient resources available for job") if np.any(np.array(tasks_num_th) > self.processors): logger.warning( "Some nodes demand for more threads than available (%d).", self.processors, ) if self.raise_insufficient: raise RuntimeError("Insufficient resources available for job") def _postrun_check(self): self.pool.shutdown() def _check_resources(self, running_tasks): """ Make sure there are resources available """ free_memory_gb = self.memory_gb free_processors = self.processors for _, jobid in running_tasks: free_memory_gb -= min(self.procs[jobid].mem_gb, free_memory_gb) free_processors -= min(self.procs[jobid].n_procs, free_processors) return free_memory_gb, free_processors def _send_procs_to_workers(self, updatehash=False, graph=None): """ Sends jobs to workers when system resources are available. """ # Check to see if a job is available (jobs with all dependencies run) # See https://github.com/nipy/nipype/pull/2200#discussion_r141605722 # See also https://github.com/nipy/nipype/issues/2372 jobids = np.flatnonzero( ~self.proc_done & (self.depidx.sum(axis=0) == 0).__array__() ) # Check available resources by summing all threads and memory used free_memory_gb, free_processors = self._check_resources(self.pending_tasks) stats = ( len(self.pending_tasks), len(jobids), free_memory_gb, self.memory_gb, free_processors, self.processors, ) if self._stats != stats: tasks_list_msg = "" if logger.level <= INFO: running_tasks = [ " * %s" % self.procs[jobid].fullname for _, jobid in self.pending_tasks ] if running_tasks: tasks_list_msg = "\nCurrently running:\n" tasks_list_msg += "\n".join(running_tasks) tasks_list_msg = indent(tasks_list_msg, " " * 21) logger.info( "[MultiProc] Running %d tasks, and %d jobs ready. Free " "memory (GB): %0.2f/%0.2f, Free processors: %d/%d.%s", len(self.pending_tasks), len(jobids), free_memory_gb, self.memory_gb, free_processors, self.processors, tasks_list_msg, ) self._stats = stats if free_memory_gb < 0.01 or free_processors == 0: logger.debug("No resources available") return if len(jobids) + len(self.pending_tasks) == 0: logger.debug( "No tasks are being run, and no jobs can " "be submitted to the queue. Potential deadlock" ) return jobids = self._sort_jobs(jobids, scheduler=self.plugin_args.get("scheduler")) # Run garbage collector before potentially submitting jobs gc.collect() # Submit jobs for jobid in jobids: # First expand mapnodes if isinstance(self.procs[jobid], MapNode): try: num_subnodes = self.procs[jobid].num_subnodes() except Exception: traceback = format_exception(*sys.exc_info()) self._clean_queue( jobid, graph, result={"result": None, "traceback": traceback} ) self.proc_pending[jobid] = False continue if num_subnodes > 1: submit = self._submit_mapnode(jobid) if not submit: continue # Check requirements of this job next_job_gb = min(self.procs[jobid].mem_gb, self.memory_gb) next_job_th = min(self.procs[jobid].n_procs, self.processors) # If node does not fit, skip at this moment if next_job_th > free_processors or next_job_gb > free_memory_gb: logger.debug( "Cannot allocate job %d (%0.2fGB, %d threads).", jobid, next_job_gb, next_job_th, ) continue free_memory_gb -= next_job_gb free_processors -= next_job_th logger.debug( "Allocating %s ID=%d (%0.2fGB, %d threads). Free: " "%0.2fGB, %d threads.", self.procs[jobid].fullname, jobid, next_job_gb, next_job_th, free_memory_gb, free_processors, ) # change job status in appropriate queues self.proc_done[jobid] = True self.proc_pending[jobid] = True # If cached and up-to-date just retrieve it, don't run if self._local_hash_check(jobid, graph): continue # updatehash and run_without_submitting are also run locally if updatehash or self.procs[jobid].run_without_submitting: logger.debug("Running node %s on master thread", self.procs[jobid]) try: self.procs[jobid].run(updatehash=updatehash) except Exception: traceback = format_exception(*sys.exc_info()) self._clean_queue( jobid, graph, result={"result": None, "traceback": traceback} ) # Release resources self._task_finished_cb(jobid) self._remove_node_dirs() free_memory_gb += next_job_gb free_processors += next_job_th # Display stats next loop self._stats = None # Clean up any debris from running node in main process gc.collect() continue # Task should be submitted to workers # Send job to task manager and add to pending tasks if self._status_callback: self._status_callback(self.procs[jobid], "start") tid = self._submit_job(deepcopy(self.procs[jobid]), updatehash=updatehash) if tid is None: self.proc_done[jobid] = False self.proc_pending[jobid] = False else: self.pending_tasks.insert(0, (tid, jobid)) # Display stats next loop self._stats = None def _sort_jobs(self, jobids, scheduler="tsort"): if scheduler == "mem_thread": return sorted( jobids, key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs), ) return jobids nipype-1.7.0/nipype/pipeline/plugins/oar.py000066400000000000000000000120101413403311400207340ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Parallel workflow execution via OAR http://oar.imag.fr """ import os import stat from time import sleep import subprocess import simplejson as json from ... import logging from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger iflogger = logging.getLogger("nipype.interface") class OARPlugin(SGELikeBatchManagerBase): """Execute using OAR The plugin_args input to run can be used to control the OAR execution. Currently supported options are: - template : template to use for batch job submission - oarsub_args : arguments to be prepended to the job execution script in the oarsub call - max_jobname_len: maximum length of the job name. Default 15. """ # Addtional class variables _max_jobname_len = 15 _oarsub_args = "" def __init__(self, **kwargs): template = """ # oarsub -J """ self._retry_timeout = 2 self._max_tries = 2 self._max_jobname_length = 15 if "plugin_args" in kwargs and kwargs["plugin_args"]: if "oarsub_args" in kwargs["plugin_args"]: self._oarsub_args = kwargs["plugin_args"]["oarsub_args"] if "retry_timeout" in kwargs["plugin_args"]: self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] if "max_tries" in kwargs["plugin_args"]: self._max_tries = kwargs["plugin_args"]["max_tries"] if "max_jobname_len" in kwargs["plugin_args"]: self._max_jobname_len = kwargs["plugin_args"]["max_jobname_len"] super(OARPlugin, self).__init__(template, **kwargs) def _is_pending(self, taskid): # subprocess.Popen requires taskid to be a string proc = subprocess.Popen( ["oarstat", "-J", "-s", "-j", taskid], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) o, e = proc.communicate() parsed_result = json.loads(o)[taskid].lower() is_pending = ("error" not in parsed_result) and ( "terminated" not in parsed_result ) return is_pending def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( "oarsub", environ=dict(os.environ), resource_monitor=False, terminal_output="allatonce", ) path = os.path.dirname(scriptfile) oarsubargs = "" if self._oarsub_args: oarsubargs = self._oarsub_args if "oarsub_args" in node.plugin_args: if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: oarsubargs = node.plugin_args["oarsub_args"] else: oarsubargs += " " + node.plugin_args["oarsub_args"] if node._hierarchy: jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) jobnameitems = jobname.split(".") jobnameitems.reverse() jobname = ".".join(jobnameitems) jobname = jobname[0 : self._max_jobname_len] if "-O" not in oarsubargs: oarsubargs = "%s -O %s" % ( oarsubargs, os.path.join(path, jobname + ".stdout"), ) if "-E" not in oarsubargs: oarsubargs = "%s -E %s" % ( oarsubargs, os.path.join(path, jobname + ".stderr"), ) if "-J" not in oarsubargs: oarsubargs = "%s -J" % (oarsubargs) os.chmod(scriptfile, stat.S_IEXEC | stat.S_IREAD | stat.S_IWRITE) cmd.inputs.args = "%s -n %s -S %s" % (oarsubargs, jobname, scriptfile) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: result = cmd.run() except Exception as e: if tries < self._max_tries: tries += 1 sleep(self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) raise RuntimeError( "\n".join( ( ("Could not submit OAR task" " for node %s") % node._id, str(e), ) ) ) else: break iflogger.setLevel(oldlevel) # retrieve OAR taskid o = "" add = False for line in result.runtime.stdout.splitlines(): if line.strip().startswith("{"): add = True if add: o += line + "\n" if line.strip().startswith("}"): break taskid = json.loads(o)["job_id"] self._pending[taskid] = node.output_dir() logger.debug("submitted OAR task: %s for node %s" % (taskid, node._id)) return taskid nipype-1.7.0/nipype/pipeline/plugins/pbs.py000066400000000000000000000101431413403311400207440ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Parallel workflow execution via PBS/Torque """ import os from time import sleep from ... import logging from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger iflogger = logging.getLogger("nipype.interface") class PBSPlugin(SGELikeBatchManagerBase): """Execute using PBS/Torque The plugin_args input to run can be used to control the SGE execution. Currently supported options are: - template : template to use for batch job submission - qsub_args : arguments to be prepended to the job execution script in the qsub call - max_jobname_len: maximum length of the job name. Default 15. """ # Addtional class variables _max_jobname_len = 15 def __init__(self, **kwargs): template = """ #PBS -V """ self._retry_timeout = 2 self._max_tries = 2 self._max_jobname_length = 15 if "plugin_args" in kwargs and kwargs["plugin_args"]: if "retry_timeout" in kwargs["plugin_args"]: self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] if "max_tries" in kwargs["plugin_args"]: self._max_tries = kwargs["plugin_args"]["max_tries"] if "max_jobname_len" in kwargs["plugin_args"]: self._max_jobname_len = kwargs["plugin_args"]["max_jobname_len"] super(PBSPlugin, self).__init__(template, **kwargs) def _is_pending(self, taskid): result = CommandLine( "qstat -f {}".format(taskid), environ=dict(os.environ), terminal_output="file_split", resource_monitor=False, ignore_exception=True, ).run() stdout = result.runtime.stdout stderr = result.runtime.stderr errmsg = "Unknown Job Id" success = "Job has finished" if (success in stderr) or ("job_state = C" in stdout): return False else: return errmsg not in stderr def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( "qsub", environ=dict(os.environ), resource_monitor=False, terminal_output="allatonce", ) path = os.path.dirname(scriptfile) qsubargs = "" if self._qsub_args: qsubargs = self._qsub_args if "qsub_args" in node.plugin_args: if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: qsubargs = node.plugin_args["qsub_args"] else: qsubargs += " " + node.plugin_args["qsub_args"] if "-o" not in qsubargs: qsubargs = "%s -o %s" % (qsubargs, path) if "-e" not in qsubargs: qsubargs = "%s -e %s" % (qsubargs, path) if node._hierarchy: jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) jobnameitems = jobname.split(".") jobnameitems.reverse() jobname = ".".join(jobnameitems) jobname = jobname[0 : self._max_jobname_len] cmd.inputs.args = "%s -N %s %s" % (qsubargs, jobname, scriptfile) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: result = cmd.run() except Exception as e: if tries < self._max_tries: tries += 1 # sleep 2 seconds and try again. sleep(self._retry_timeout) else: iflogger.setLevel(oldlevel) raise RuntimeError( "Could not submit pbs task for node {}\n{}".format(node._id, e) ) else: break iflogger.setLevel(oldlevel) # retrieve pbs taskid taskid = result.runtime.stdout.split(".")[0] self._pending[taskid] = node.output_dir() logger.debug("submitted pbs task: {} for node {}".format(taskid, node._id)) return taskid nipype-1.7.0/nipype/pipeline/plugins/pbsgraph.py000066400000000000000000000042211413403311400217660ustar00rootroot00000000000000"""Parallel workflow execution via PBS/Torque """ import os import sys from ...interfaces.base import CommandLine from .sgegraph import SGEGraphPlugin from .base import logger class PBSGraphPlugin(SGEGraphPlugin): """Execute using PBS/Torque The plugin_args input to run can be used to control the SGE execution. Currently supported options are: - template : template to use for batch job submission - qsub_args : arguments to be prepended to the job execution script in the qsub call """ _template = """ #PBS -V """ def _submit_graph(self, pyfiles, dependencies, nodes): batch_dir, _ = os.path.split(pyfiles[0]) submitjobsfile = os.path.join(batch_dir, "submit_jobs.sh") with open(submitjobsfile, "wt") as fp: fp.writelines("#!/usr/bin/env sh\n") for idx, pyscript in enumerate(pyfiles): node = nodes[idx] template, qsub_args = self._get_args(node, ["template", "qsub_args"]) batch_dir, name = os.path.split(pyscript) name = ".".join(name.split(".")[:-1]) batchscript = "\n".join( (template, "%s %s" % (sys.executable, pyscript)) ) batchscriptfile = os.path.join(batch_dir, "batchscript_%s.sh" % name) with open(batchscriptfile, "wt") as batchfp: batchfp.writelines(batchscript) batchfp.close() deps = "" if idx in dependencies: values = ["$job%05d" % jobid for jobid in dependencies[idx]] if len(values): deps = "-W depend=afterok:%s" % ":".join(values) fp.writelines( "job%05d=`qsub %s %s %s`\n" % (idx, deps, qsub_args, batchscriptfile) ) cmd = CommandLine( "sh", environ=dict(os.environ), resource_monitor=False, terminal_output="allatonce", ) cmd.inputs.args = "%s" % submitjobsfile cmd.run() logger.info("submitted all jobs to queue") nipype-1.7.0/nipype/pipeline/plugins/semaphore_singleton.py000066400000000000000000000001151413403311400242230ustar00rootroot00000000000000# -*- coding: utf-8 -*- import threading semaphore = threading.Semaphore(0) nipype-1.7.0/nipype/pipeline/plugins/sge.py000066400000000000000000000454721413403311400207530ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Parallel workflow execution via SGE """ import os import pwd import re import subprocess import time import xml.dom.minidom import random from ... import logging from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger iflogger = logging.getLogger("nipype.interface") DEBUGGING_PREFIX = str(int(random.uniform(100, 999))) def sge_debug_print(message): """Needed for debugging on big jobs. Once this is fully vetted, it can be removed.""" logger.debug(DEBUGGING_PREFIX + " " + "=!" * 3 + " " + message) # print DEBUGGING_PREFIX + " " + "=!" * 3 + " " + message class QJobInfo(object): """Information about a single job created by OGE/SGE or similar Each job is responsible for knowing it's own refresh state :author Hans J. Johnson """ def __init__( self, job_num, job_queue_state, job_time, job_queue_name, job_slots, qsub_command_line, ): # self._jobName = None # Ascii text name of job not unique self._job_num = int( job_num ) # The primary unique identifier for this job, must be an integer! # self._jobOwn = None # Who owns this job self._job_queue_state = str(job_queue_state) # ["running","zombie",...??] # self._jobActionState = str(jobActionState) # ['r','qw','S',...??] self._job_time = job_time # The job start time self._job_info_creation_time = ( time.time() ) # When this job was created (for comparing against initalization) self._job_queue_name = job_queue_name # Where the job is running self._job_slots = int(job_slots) # How many slots are being used self._qsub_command_line = qsub_command_line def __repr__(self): return "{:<8d}{:12}{:<3d}{:20}{:8}{}".format( self._job_num, self._job_queue_state, self._job_slots, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(self._job_time)), self._job_queue_name, self._qsub_command_line, ) def is_initializing(self): return self._job_queue_state == "initializing" def is_zombie(self): return self._job_queue_state == "zombie" or self._job_queue_state == "finished" def is_running(self): return self._job_queue_state == "running" def is_pending(self): return self._job_queue_state == "pending" def is_job_state_pending(self): """Return True, unless job is in the "zombie" status""" time_diff = time.time() - self._job_info_creation_time if self.is_zombie(): sge_debug_print( "DONE! QJobInfo.IsPending found in 'zombie' list, returning False so claiming done!\n{0}".format( self ) ) is_pending_status = False # Job explicitly found as being completed! elif self.is_initializing() and (time_diff > 600): # if initializing for more than 5 minute, failure due to # initialization and completion before registration sge_debug_print( "FAILURE! QJobInfo.IsPending found long running at {1} seconds" "'initializing' returning False for to break loop!\n{0}".format( self, time_diff ) ) is_pending_status = True # Job initialization took too long, so report! else: # self.is_running() || self.is_pending(): is_pending_status = True # Job cache last listed as running return is_pending_status # The job is in one of the hold states def update_info(self, job_queue_state, job_time, job_queue_name, job_slots): self._job_queue_state = job_queue_state self._job_time = job_time self._job_queue_name = job_queue_name self._job_slots = int(job_slots) def set_state(self, new_state): self._job_queue_state = new_state class QstatSubstitute(object): """A wrapper for Qstat to avoid overloading the SGE/OGS server with rapid continuous qstat requests""" def __init__( self, qstat_instant_executable="qstat", qstat_cached_executable="qstat" ): """ :param qstat_instant_executable: :param qstat_cached_executable: """ self._qstat_instant_executable = qstat_instant_executable self._qstat_cached_executable = qstat_cached_executable self._out_of_scope_jobs = list() # Initialize first self._task_dictionary = ( dict() ) # {'taskid': QJobInfo(), .... } The dictionaryObject self._remove_old_jobs() def _remove_old_jobs(self): """This is only called during initialization of the function for the purpose of identifying jobs that are not part of this run of nipype. They are jobs that existed prior to starting a new jobs, so they are irrelevant. """ self._run_qstat("QstatInitialization", True) # If qstat does not exist on this system, then quietly # fail during init def add_startup_job(self, taskid, qsub_command_line): """ :param taskid: The job id :param qsub_command_line: When initializing, re-use the job_queue_name :return: NONE """ taskid = int(taskid) # Ensure that it is an integer self._task_dictionary[taskid] = QJobInfo( taskid, "initializing", time.time(), "noQueue", 1, qsub_command_line ) @staticmethod def _qacct_verified_complete(taskid): """request definitive job completion information for the current job from the qacct report """ sge_debug_print( "WARNING: " "CONTACTING qacct for finished jobs, " "{0}: {1}".format(time.time(), "Verifying Completion") ) this_command = "qacct" qacct_retries = 10 is_complete = False while qacct_retries > 0: qacct_retries -= 1 try: proc = subprocess.Popen( [ this_command, "-o", pwd.getpwuid(os.getuid())[0], "-j", str(taskid), ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) qacct_result, _ = proc.communicate() if qacct_result.find(str(taskid)): is_complete = True sge_debug_print("NOTE: qacct for jobs\n{0}".format(qacct_result)) break except: sge_debug_print("NOTE: qacct call failed") time.sleep(5) pass return is_complete def _parse_qstat_job_list(self, xml_job_list): current_jobs_parsed = list() for current_job_element in xml_job_list: # jobname = current_job_element.getElementsByTagName('JB_name')[0].childNodes[0].data # jobown = # current_job_element.getElementsByTagName('JB_owner')[0].childNodes[0].data try: job_queue_name = ( current_job_element.getElementsByTagName("queue_name")[0] .childNodes[0] .data ) except: job_queue_name = "unknown" try: job_slots = int( current_job_element.getElementsByTagName("slots")[0] .childNodes[0] .data ) except: job_slots = -1 job_queue_state = current_job_element.getAttribute("state") job_num = int( current_job_element.getElementsByTagName("JB_job_number")[0] .childNodes[0] .data ) try: job_time_text = ( current_job_element.getElementsByTagName("JAT_start_time")[0] .childNodes[0] .data ) job_time = float( time.mktime(time.strptime(job_time_text, "%Y-%m-%dT%H:%M:%S")) ) except: job_time = float(0.0) # Make job entry task_id = int(job_num) if task_id in self._task_dictionary: self._task_dictionary[task_id].update_info( job_queue_state, job_time, job_queue_name, job_slots ) sge_debug_print( "Updating job: {0}".format(self._task_dictionary[task_id]) ) current_jobs_parsed.append(task_id) # Changed from job_num as "in" is used to check which does not cast else: # Any Job that was not explicitly added with qsub command is # out of scope self._out_of_scope_jobs.append(task_id) # To ensure that every job is in the dictionary has a state reported # by the SGE environment, it is necessary to explicitly check jobs # that are not reported by the qstat command to determine if they # were started and finished, and pushed out of the window of review # before their state being recorded. The qacct command is slower, but # much more robust for ensuring that a job has completed. for dictionary_job in list(self._task_dictionary.keys()): if dictionary_job not in current_jobs_parsed: is_completed = self._qacct_verified_complete(dictionary_job) if is_completed: self._task_dictionary[dictionary_job].set_state("zombie") else: sge_debug_print( "ERROR: Job not in current parselist, " "and not in done list {0}: {1}".format( dictionary_job, self._task_dictionary[dictionary_job] ) ) pass if self._task_dictionary[dictionary_job].is_initializing(): is_completed = self._qacct_verified_complete(dictionary_job) if is_completed: self._task_dictionary[dictionary_job].set_state("zombie") else: sge_debug_print( "ERROR: Job not in still in intializing mode, " "and not in done list {0}: {1}".format( dictionary_job, self._task_dictionary[dictionary_job] ) ) pass def _run_qstat(self, reason_for_qstat, force_instant=True): """request all job information for the current user in xmlformat. See documentation from java documentation: http://arc.liv.ac.uk/SGE/javadocs/jgdi/com/sun/grid/jgdi/monitoring/filter/JobStateFilter.html -s r gives running jobs -s z gives recently completed jobs (**recently** is very ambiguous) -s s suspended jobs """ sge_debug_print( "WARNING: CONTACTING qmaster for jobs, " "{0}: {1}".format(time.time(), reason_for_qstat) ) if force_instant: this_command = self._qstat_instant_executable else: this_command = self._qstat_cached_executable qstat_retries = 10 while qstat_retries > 0: qstat_retries -= 1 try: proc = subprocess.Popen( [ this_command, "-u", pwd.getpwuid(os.getuid())[0], "-xml", "-s", "psrz", ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) qstat_xml_result, _ = proc.communicate() dom = xml.dom.minidom.parseString(qstat_xml_result) jobs = dom.getElementsByTagName("job_info") run = jobs[0] runjobs = run.getElementsByTagName("job_list") self._parse_qstat_job_list(runjobs) break except Exception as inst: exception_message = "QstatParsingError:\n\t{0}\n\t{1}\n".format( type(inst), # the exception instance inst, # __str__ allows args to printed directly ) sge_debug_print(exception_message) time.sleep(5) pass def print_dictionary(self): """For debugging""" for vv in list(self._task_dictionary.values()): sge_debug_print(str(vv)) def is_job_pending(self, task_id): task_id = int(task_id) # Ensure that it is an integer # Check if the task is in the dictionary first (before running qstat) if task_id in self._task_dictionary: # Trust the cache, only False if state='zombie' job_is_pending = self._task_dictionary[task_id].is_job_state_pending() # Double check pending jobs in case of change (since we don't check at the beginning) if job_is_pending: self._run_qstat( "checking job pending status {0}".format(task_id), False ) job_is_pending = self._task_dictionary[task_id].is_job_state_pending() else: self._run_qstat("checking job pending status {0}".format(task_id), True) if task_id in self._task_dictionary: # Trust the cache, only False if state='zombie' job_is_pending = self._task_dictionary[task_id].is_job_state_pending() else: sge_debug_print( "ERROR: Job {0} not in task list, " "even after forced qstat!".format(task_id) ) job_is_pending = False if not job_is_pending: sge_debug_print("DONE! Returning for {0} claiming done!".format(task_id)) if task_id in self._task_dictionary: sge_debug_print( "NOTE: Adding {0} to OutOfScopeJobs list!".format(task_id) ) self._out_of_scope_jobs.append(int(task_id)) self._task_dictionary.pop(task_id) else: sge_debug_print( "ERROR: Job {0} not in task list, " "but attempted to be removed!".format(task_id) ) return job_is_pending def qsub_sanitize_job_name(testjobname): """Ensure that qsub job names must begin with a letter. Numbers and punctuation are not allowed. >>> qsub_sanitize_job_name('01') 'J01' >>> qsub_sanitize_job_name('a01') 'a01' """ if testjobname[0].isalpha(): return testjobname else: return "J" + testjobname class SGEPlugin(SGELikeBatchManagerBase): """Execute using SGE (OGE not tested) The plugin_args input to run can be used to control the SGE execution. Currently supported options are: - template : template to use for batch job submission - qsub_args : arguments to be prepended to the job execution script in the qsub call """ def __init__(self, **kwargs): template = """ #$ -V #$ -S /bin/sh """ self._retry_timeout = 2 self._max_tries = 2 instant_qstat = "qstat" cached_qstat = "qstat" if "plugin_args" in kwargs and kwargs["plugin_args"]: if "retry_timeout" in kwargs["plugin_args"]: self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] if "max_tries" in kwargs["plugin_args"]: self._max_tries = kwargs["plugin_args"]["max_tries"] if "qstatProgramPath" in kwargs["plugin_args"]: instant_qstat = kwargs["plugin_args"]["qstatProgramPath"] if "qstatCachedProgramPath" in kwargs["plugin_args"]: cached_qstat = kwargs["plugin_args"]["qstatCachedProgramPath"] self._refQstatSubstitute = QstatSubstitute(instant_qstat, cached_qstat) super(SGEPlugin, self).__init__(template, **kwargs) def _is_pending(self, taskid): return self._refQstatSubstitute.is_job_pending(int(taskid)) def _submit_batchtask(self, scriptfile, node): cmd = CommandLine( "qsub", environ=dict(os.environ), resource_monitor=False, terminal_output="allatonce", ) path = os.path.dirname(scriptfile) qsubargs = "" if self._qsub_args: qsubargs = self._qsub_args if "qsub_args" in node.plugin_args: if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: qsubargs = node.plugin_args["qsub_args"] else: qsubargs += " " + node.plugin_args["qsub_args"] if "-o" not in qsubargs: qsubargs = "%s -o %s" % (qsubargs, path) if "-e" not in qsubargs: qsubargs = "%s -e %s" % (qsubargs, path) if node._hierarchy: jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) jobnameitems = jobname.split(".") jobnameitems.reverse() jobname = ".".join(jobnameitems) jobname = qsub_sanitize_job_name(jobname) cmd.inputs.args = "%s -N %s %s" % (qsubargs, jobname, scriptfile) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 result = list() while True: try: result = cmd.run() except Exception as e: if tries < self._max_tries: tries += 1 time.sleep(self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) raise RuntimeError( "\n".join( ( ("Could not submit sge task" " for node %s") % node._id, str(e), ) ) ) else: break iflogger.setLevel(oldlevel) # retrieve sge taskid lines = [line for line in result.runtime.stdout.split("\n") if line] taskid = int( re.match("Your job ([0-9]*) .* has been submitted", lines[-1]).groups()[0] ) self._pending[taskid] = node.output_dir() self._refQstatSubstitute.add_startup_job(taskid, cmd.cmdline) logger.debug( "submitted sge task: %d for node %s with %s" % (taskid, node._id, cmd.cmdline) ) return taskid nipype-1.7.0/nipype/pipeline/plugins/sgegraph.py000066400000000000000000000157731413403311400217760ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Parallel workflow execution via SGE """ import os import sys from ...interfaces.base import CommandLine from .base import GraphPluginBase, logger def node_completed_status(checknode): """ A function to determine if a node has previously completed it's work :param checknode: The node to check the run status :return: boolean value True indicates that the node does not need to be run. """ """ TODO: place this in the base.py file and refactor """ node_state_does_not_require_overwrite = checknode.overwrite is False or ( checknode.overwrite is None and not checknode._interface.always_run ) hash_exists = False try: hash_exists, _, _, _ = checknode.hash_exists() except Exception: hash_exists = False return hash_exists and node_state_does_not_require_overwrite class SGEGraphPlugin(GraphPluginBase): """Execute using SGE The plugin_args input to run can be used to control the SGE execution. Currently supported options are: - template : template to use for batch job submission - qsub_args : arguments to be prepended to the job execution script in the qsub call """ _template = """ #!/bin/bash #$ -V #$ -S /bin/bash """ def __init__(self, **kwargs): self._qsub_args = "" self._dont_resubmit_completed_jobs = False if "plugin_args" in kwargs and kwargs["plugin_args"]: plugin_args = kwargs["plugin_args"] if "template" in plugin_args: self._template = plugin_args["template"] if os.path.isfile(self._template): self._template = open(self._template).read() if "qsub_args" in plugin_args: self._qsub_args = plugin_args["qsub_args"] if "dont_resubmit_completed_jobs" in plugin_args: self._dont_resubmit_completed_jobs = plugin_args[ "dont_resubmit_completed_jobs" ] super(SGEGraphPlugin, self).__init__(**kwargs) def _submit_graph(self, pyfiles, dependencies, nodes): def make_job_name(jobnumber, nodeslist): """ - jobnumber: The index number of the job to create - nodeslist: The name of the node being processed - return: A string representing this job to be displayed by SGE """ job_name = "j{0}_{1}".format(jobnumber, nodeslist[jobnumber]._id) # Condition job_name to be a valid bash identifier (i.e. - is invalid) job_name = job_name.replace("-", "_").replace(".", "_").replace(":", "_") return job_name batch_dir, _ = os.path.split(pyfiles[0]) submitjobsfile = os.path.join(batch_dir, "submit_jobs.sh") cache_doneness_per_node = dict() if ( self._dont_resubmit_completed_jobs ): # A future parameter for controlling this behavior could be added here for idx, pyscript in enumerate(pyfiles): node = nodes[idx] node_status_done = node_completed_status(node) # if the node itself claims done, then check to ensure all # dependancies are also done if node_status_done and idx in dependencies: for child_idx in dependencies[idx]: if child_idx in cache_doneness_per_node: child_status_done = cache_doneness_per_node[child_idx] else: child_status_done = node_completed_status(nodes[child_idx]) node_status_done = node_status_done and child_status_done cache_doneness_per_node[idx] = node_status_done with open(submitjobsfile, "wt") as fp: fp.writelines("#!/usr/bin/env bash\n") fp.writelines("# Condense format attempted\n") for idx, pyscript in enumerate(pyfiles): node = nodes[idx] if cache_doneness_per_node.get(idx, False): continue else: template, qsub_args = self._get_args( node, ["template", "qsub_args"] ) batch_dir, name = os.path.split(pyscript) name = ".".join(name.split(".")[:-1]) batchscript = "\n".join( (template, "%s %s" % (sys.executable, pyscript)) ) batchscriptfile = os.path.join( batch_dir, "batchscript_%s.sh" % name ) batchscriptoutfile = batchscriptfile + ".o" batchscripterrfile = batchscriptfile + ".e" with open(batchscriptfile, "wt") as batchfp: batchfp.writelines(batchscript) batchfp.close() deps = "" if idx in dependencies: values = " " for jobid in dependencies[idx]: # Avoid dependancies of done jobs if ( not self._dont_resubmit_completed_jobs or not cache_doneness_per_node[jobid] ): values += "${{{0}}},".format( make_job_name(jobid, nodes) ) if ( values != " " ): # i.e. if some jobs were added to dependency list values = values.rstrip(",") deps = "-hold_jid%s" % values jobname = make_job_name(idx, nodes) # Do not use default output locations if they are set in self._qsub_args stderrFile = "" if self._qsub_args.count("-e ") == 0: stderrFile = "-e {errFile}".format(errFile=batchscripterrfile) stdoutFile = "" if self._qsub_args.count("-o ") == 0: stdoutFile = "-o {outFile}".format(outFile=batchscriptoutfile) full_line = "{jobNm}=$(qsub {outFileOption} {errFileOption} {extraQSubArgs} {dependantIndex} -N {jobNm} {batchscript} | awk '/^Your job/{{print $3}}')\n".format( jobNm=jobname, outFileOption=stdoutFile, errFileOption=stderrFile, extraQSubArgs=qsub_args, dependantIndex=deps, batchscript=batchscriptfile, ) fp.writelines(full_line) cmd = CommandLine( "bash", environ=dict(os.environ), resource_monitor=False, terminal_output="allatonce", ) cmd.inputs.args = "%s" % submitjobsfile cmd.run() logger.info("submitted all jobs to queue") nipype-1.7.0/nipype/pipeline/plugins/slurm.py000066400000000000000000000125451413403311400213320ustar00rootroot00000000000000""" Created on Aug 2, 2013 @author: chadcumba Parallel workflow execution with SLURM """ import os import re from time import sleep from ... import logging from ...interfaces.base import CommandLine from .base import SGELikeBatchManagerBase, logger iflogger = logging.getLogger("nipype.interface") class SLURMPlugin(SGELikeBatchManagerBase): """ Execute using SLURM The plugin_args input to run can be used to control the SLURM execution. Currently supported options are: - template : template to use for batch job submission - sbatch_args: arguments to pass prepend to the sbatch call """ def __init__(self, **kwargs): template = "#!/bin/bash" self._retry_timeout = 2 self._max_tries = 2 self._template = template self._sbatch_args = None self._jobid_re = "Submitted batch job ([0-9]*)" if "plugin_args" in kwargs and kwargs["plugin_args"]: if "retry_timeout" in kwargs["plugin_args"]: self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] if "max_tries" in kwargs["plugin_args"]: self._max_tries = kwargs["plugin_args"]["max_tries"] if "jobid_re" in kwargs["plugin_args"]: self._jobid_re = kwargs["plugin_args"]["jobid_re"] if "template" in kwargs["plugin_args"]: self._template = kwargs["plugin_args"]["template"] if os.path.isfile(self._template): with open(self._template) as f: self._template = f.read() if "sbatch_args" in kwargs["plugin_args"]: self._sbatch_args = kwargs["plugin_args"]["sbatch_args"] self._pending = {} super(SLURMPlugin, self).__init__(self._template, **kwargs) def _is_pending(self, taskid): try: res = CommandLine( "squeue", args=" ".join(["-j", "%s" % taskid]), resource_monitor=False, terminal_output="allatonce", ).run() return res.runtime.stdout.find(str(taskid)) > -1 except RuntimeError as e: if any( ss in str(e) for ss in ["Socket timed out", "not available at the moment"] ): # do not raise error and allow recheck logger.warning( "SLURM timeout encountered while checking job status," " treating job %d as pending", taskid, ) return True if "Invalid job id" not in str(e): raise (e) return False def _submit_batchtask(self, scriptfile, node): """ This is more or less the _submit_batchtask from sge.py with flipped variable names, different command line switches, and different output formatting/processing """ cmd = CommandLine( "sbatch", environ=dict(os.environ), resource_monitor=False, terminal_output="allatonce", ) path = os.path.dirname(scriptfile) sbatch_args = "" if self._sbatch_args: sbatch_args = self._sbatch_args if "sbatch_args" in node.plugin_args: if "overwrite" in node.plugin_args and node.plugin_args["overwrite"]: sbatch_args = node.plugin_args["sbatch_args"] else: sbatch_args += " " + node.plugin_args["sbatch_args"] if "-o" not in sbatch_args: sbatch_args = "%s -o %s" % (sbatch_args, os.path.join(path, "slurm-%j.out")) if "-e" not in sbatch_args: sbatch_args = "%s -e %s" % (sbatch_args, os.path.join(path, "slurm-%j.out")) if node._hierarchy: jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: jobname = ".".join((dict(os.environ)["LOGNAME"], node._id)) jobnameitems = jobname.split(".") jobnameitems.reverse() jobname = ".".join(jobnameitems) cmd.inputs.args = "%s -J %s %s" % (sbatch_args, jobname, scriptfile) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 while True: try: result = cmd.run() except Exception as e: if tries < self._max_tries: tries += 1 # sleep 2 seconds and try again. sleep(self._retry_timeout) else: iflogger.setLevel(oldlevel) raise RuntimeError( "\n".join( ( ("Could not submit sbatch task" " for node %s") % node._id, str(e), ) ) ) else: break logger.debug("Ran command ({0})".format(cmd.cmdline)) iflogger.setLevel(oldlevel) # retrieve taskid lines = [line for line in result.runtime.stdout.split("\n") if line] taskid = int(re.match(self._jobid_re, lines[-1]).groups()[0]) self._pending[taskid] = node.output_dir() logger.debug("submitted sbatch task: %d for node %s" % (taskid, node._id)) return taskid nipype-1.7.0/nipype/pipeline/plugins/slurmgraph.py000066400000000000000000000164651413403311400223610ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Parallel workflow execution via SLURM """ import os import sys from ...interfaces.base import CommandLine from .base import GraphPluginBase, logger def node_completed_status(checknode): """ A function to determine if a node has previously completed it's work :param checknode: The node to check the run status :return: boolean value True indicates that the node does not need to be run. """ """ TODO: place this in the base.py file and refactor """ node_state_does_not_require_overwrite = checknode.overwrite is False or ( checknode.overwrite is None and not checknode._interface.always_run ) hash_exists = False try: hash_exists, _, _, _ = checknode.hash_exists() except Exception: hash_exists = False return hash_exists and node_state_does_not_require_overwrite class SLURMGraphPlugin(GraphPluginBase): """Execute using SLURM The plugin_args input to run can be used to control the SGE execution. Currently supported options are: - template : template to use for batch job submission - qsub_args : arguments to be prepended to the job execution script in the qsub call """ _template = "#!/bin/bash" def __init__(self, **kwargs): self._sbatch_args = "" if "plugin_args" in kwargs and kwargs["plugin_args"]: if "retry_timeout" in kwargs["plugin_args"]: self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] if "max_tries" in kwargs["plugin_args"]: self._max_tries = kwargs["plugin_args"]["max_tries"] if "template" in kwargs["plugin_args"]: self._template = kwargs["plugin_args"]["template"] if os.path.isfile(self._template): self._template = open(self._template).read() if "sbatch_args" in kwargs["plugin_args"]: self._sbatch_args = kwargs["plugin_args"]["sbatch_args"] if "dont_resubmit_completed_jobs" in kwargs["plugin_args"]: self._dont_resubmit_completed_jobs = kwargs["plugin_args"][ "dont_resubmit_completed_jobs" ] else: self._dont_resubmit_completed_jobs = False super(SLURMGraphPlugin, self).__init__(**kwargs) def _submit_graph(self, pyfiles, dependencies, nodes): def make_job_name(jobnumber, nodeslist): """ - jobnumber: The index number of the job to create - nodeslist: The name of the node being processed - return: A string representing this job to be displayed by SLURM """ job_name = "j{0}_{1}".format(jobnumber, nodeslist[jobnumber]._id) # Condition job_name to be a valid bash identifier (i.e. - is invalid) job_name = job_name.replace("-", "_").replace(".", "_").replace(":", "_") return job_name batch_dir, _ = os.path.split(pyfiles[0]) submitjobsfile = os.path.join(batch_dir, "submit_jobs.sh") cache_doneness_per_node = dict() if ( self._dont_resubmit_completed_jobs ): # A future parameter for controlling this behavior could be added here for idx, pyscript in enumerate(pyfiles): node = nodes[idx] node_status_done = node_completed_status(node) # if the node itself claims done, then check to ensure all # dependancies are also done if node_status_done and idx in dependencies: for child_idx in dependencies[idx]: if child_idx in cache_doneness_per_node: child_status_done = cache_doneness_per_node[child_idx] else: child_status_done = node_completed_status(nodes[child_idx]) node_status_done = node_status_done and child_status_done cache_doneness_per_node[idx] = node_status_done with open(submitjobsfile, "wt") as fp: fp.writelines("#!/usr/bin/env bash\n") fp.writelines("# Condense format attempted\n") for idx, pyscript in enumerate(pyfiles): node = nodes[idx] if cache_doneness_per_node.get(idx, False): continue else: template, sbatch_args = self._get_args( node, ["template", "sbatch_args"] ) batch_dir, name = os.path.split(pyscript) name = ".".join(name.split(".")[:-1]) batchscript = "\n".join( (template, "%s %s" % (sys.executable, pyscript)) ) batchscriptfile = os.path.join( batch_dir, "batchscript_%s.sh" % name ) batchscriptoutfile = batchscriptfile + ".o" batchscripterrfile = batchscriptfile + ".e" with open(batchscriptfile, "wt") as batchfp: batchfp.writelines(batchscript) batchfp.close() deps = "" if idx in dependencies: values = "" for jobid in dependencies[idx]: # Avoid dependancies of done jobs if ( not self._dont_resubmit_completed_jobs or not cache_doneness_per_node[jobid] ): values += "${{{0}}}:".format( make_job_name(jobid, nodes) ) if ( values != "" ): # i.e. if some jobs were added to dependency list values = values.rstrip(":") deps = "--dependency=afterok:%s" % values jobname = make_job_name(idx, nodes) # Do not use default output locations if they are set in self._sbatch_args stderrFile = "" if self._sbatch_args.count("-e ") == 0: stderrFile = "-e {errFile}".format(errFile=batchscripterrfile) stdoutFile = "" if self._sbatch_args.count("-o ") == 0: stdoutFile = "-o {outFile}".format(outFile=batchscriptoutfile) full_line = "{jobNm}=$(sbatch {outFileOption} {errFileOption} {extraSBatchArgs} {dependantIndex} -J {jobNm} {batchscript} | awk '/^Submitted/ {{print $4}}')\n".format( jobNm=jobname, outFileOption=stdoutFile, errFileOption=stderrFile, extraSBatchArgs=sbatch_args, dependantIndex=deps, batchscript=batchscriptfile, ) fp.writelines(full_line) cmd = CommandLine( "bash", environ=dict(os.environ), resource_monitor=False, terminal_output="allatonce", ) cmd.inputs.args = "%s" % submitjobsfile cmd.run() logger.info("submitted all jobs to queue") nipype-1.7.0/nipype/pipeline/plugins/somaflow.py000066400000000000000000000024231413403311400220110ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Parallel workflow execution via PBS/Torque """ import os import sys from .base import GraphPluginBase, logger soma_not_loaded = False try: from soma.workflow.client import Job, Workflow, WorkflowController, Helper except: soma_not_loaded = True class SomaFlowPlugin(GraphPluginBase): """Execute using Soma workflow""" def __init__(self, plugin_args=None): if soma_not_loaded: raise ImportError("SomaFlow could not be imported") super(SomaFlowPlugin, self).__init__(plugin_args=plugin_args) def _submit_graph(self, pyfiles, dependencies, nodes): jobs = [] soma_deps = [] for idx, fname in enumerate(pyfiles): name = os.path.splitext(os.path.split(fname)[1])[0] jobs.append(Job(command=[sys.executable, fname], name=name)) for key, values in list(dependencies.items()): for val in values: soma_deps.append((jobs[val], jobs[key])) wf = Workflow(jobs, soma_deps) logger.info("serializing workflow") Helper.serialize("workflow", wf) controller = WorkflowController() logger.info("submitting workflow") wf_id = controller.submit_workflow(wf) Helper.wait_workflow(wf_id, controller) nipype-1.7.0/nipype/pipeline/plugins/tests/000077500000000000000000000000001413403311400207515ustar00rootroot00000000000000nipype-1.7.0/nipype/pipeline/plugins/tests/__init__.py000066400000000000000000000002121413403311400230550ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: nipype-1.7.0/nipype/pipeline/plugins/tests/test_base.py000066400000000000000000000020221413403311400232700ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine module """ import numpy as np import scipy.sparse as ssp def test_scipy_sparse(): foo = ssp.lil_matrix(np.eye(3, k=1)) goo = foo.getrowview(0) goo[goo.nonzero()] = 0 assert foo[0, 1] == 0 """ Can use the following code to test that a mapnode crash continues successfully Need to put this into a unit-test with a timeout import nipype.interfaces.utility as niu import nipype.pipeline.engine as pe wf = pe.Workflow(name='test') def func(arg1): if arg1 == 2: raise Exception('arg cannot be ' + str(arg1)) return arg1 funkynode = pe.MapNode(niu.Function(function=func, input_names=['arg1'], output_names=['out']), iterfield=['arg1'], name = 'functor') funkynode.inputs.arg1 = [1,2] wf.add_nodes([funkynode]) wf.base_dir = '/tmp' wf.run(plugin='MultiProc') """ nipype-1.7.0/nipype/pipeline/plugins/tests/test_callback.py000066400000000000000000000036611413403311400241240ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for workflow callbacks """ from time import sleep import pytest import nipype.interfaces.utility as niu import nipype.pipeline.engine as pe def func(): return def bad_func(): raise Exception class Status(object): def __init__(self): self.statuses = [] def callback(self, node, status, result=None): self.statuses.append((node.name, status)) @pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) def test_callback_normal(tmpdir, plugin): tmpdir.chdir() so = Status() wf = pe.Workflow(name="test", base_dir=tmpdir.strpath) f_node = pe.Node( niu.Function(function=func, input_names=[], output_names=[]), name="f_node" ) wf.add_nodes([f_node]) wf.config["execution"] = {"crashdump_dir": wf.base_dir, "poll_sleep_duration": 2} wf.run(plugin=plugin, plugin_args={"status_callback": so.callback}) assert so.statuses == [("f_node", "start"), ("f_node", "end")] @pytest.mark.parametrize("plugin", ["Linear", "MultiProc", "LegacyMultiProc"]) @pytest.mark.parametrize("stop_on_first_crash", [False, True]) def test_callback_exception(tmpdir, plugin, stop_on_first_crash): tmpdir.chdir() so = Status() wf = pe.Workflow(name="test", base_dir=tmpdir.strpath) f_node = pe.Node( niu.Function(function=bad_func, input_names=[], output_names=[]), name="f_node" ) wf.add_nodes([f_node]) wf.config["execution"] = { "crashdump_dir": wf.base_dir, "stop_on_first_crash": stop_on_first_crash, "poll_sleep_duration": 2, } with pytest.raises(Exception): wf.run(plugin=plugin, plugin_args={"status_callback": so.callback}) sleep(0.5) # Wait for callback to be called (python 2.7) assert so.statuses == [("f_node", "start"), ("f_node", "exception")] nipype-1.7.0/nipype/pipeline/plugins/tests/test_debug.py000066400000000000000000000025151413403311400234530ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os import nipype.interfaces.base as nib import pytest import nipype.pipeline.engine as pe class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int(desc="a random int") input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc="outputs") class DebugTestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec def _run_interface(self, runtime): runtime.returncode = 0 return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["output1"] = [1, self.inputs.input1] return outputs def callme(node, graph): pass def test_debug(tmpdir): tmpdir.chdir() pipe = pe.Workflow(name="pipe") mod1 = pe.Node(DebugTestInterface(), name="mod1") mod2 = pe.MapNode(DebugTestInterface(), iterfield=["input1"], name="mod2") pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 run_wf = lambda: pipe.run(plugin="Debug") with pytest.raises(ValueError): run_wf() exc = None try: pipe.run(plugin="Debug", plugin_args={"callable": callme}) except Exception as e: exc = e assert exc is None, "unexpected exception caught" nipype-1.7.0/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py000066400000000000000000000105241413403311400276250ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Testing module for functions and classes from multiproc.py """ # Import packages import os import sys from tempfile import mkdtemp from shutil import rmtree import pytest import nipype.pipeline.engine as pe from nipype.interfaces.utility import Function def mytestFunction(insum=0): """ Run a multiprocessing job and spawn child processes. """ # need to import here since this is executed as an external process import multiprocessing import os import tempfile import time numberOfThreads = 2 # list of processes t = [None] * numberOfThreads # list of alive flags a = [None] * numberOfThreads # list of tempFiles f = [None] * numberOfThreads def dummyFunction(filename): """ This function writes the value 45 to the given filename. """ j = 0 for i in range(0, 10): j += i # j is now 45 (0+1+2+3+4+5+6+7+8+9) with open(filename, "w") as f: f.write(str(j)) for n in range(numberOfThreads): # mark thread as alive a[n] = True # create a temp file to use as the data exchange container tmpFile = tempfile.mkstemp(".txt", "test_engine_")[1] f[n] = tmpFile # keep track of the temp file t[n] = multiprocessing.Process(target=dummyFunction, args=(tmpFile,)) # fire up the job t[n].start() # block until all processes are done allDone = False while not allDone: time.sleep(1) for n in range(numberOfThreads): a[n] = t[n].is_alive() if not any(a): # if no thread is alive allDone = True # here, all processes are done # read in all temp files and sum them up total = insum for ff in f: with open(ff) as fd: total += int(fd.read()) os.remove(ff) return total def run_multiproc_nondaemon_with_flag(nondaemon_flag): """ Start a pipe with two nodes using the resource multiproc plugin and passing the nondaemon_flag. """ cur_dir = os.getcwd() temp_dir = mkdtemp(prefix="test_engine_") os.chdir(temp_dir) pipe = pe.Workflow(name="pipe") f1 = pe.Node( interface=Function( function=mytestFunction, input_names=["insum"], output_names=["sum_out"] ), name="f1", ) f2 = pe.Node( interface=Function( function=mytestFunction, input_names=["insum"], output_names=["sum_out"] ), name="f2", ) pipe.connect([(f1, f2, [("sum_out", "insum")])]) pipe.base_dir = os.getcwd() f1.inputs.insum = 0 pipe.config["execution"]["stop_on_first_crash"] = True # execute the pipe using the LegacyMultiProc plugin with 2 processes and the # non_daemon flag to enable child processes which start other # multiprocessing jobs execgraph = pipe.run( plugin="LegacyMultiProc", plugin_args={"n_procs": 2, "non_daemon": nondaemon_flag}, ) names = [".".join((node._hierarchy, node.name)) for node in execgraph.nodes()] node = list(execgraph.nodes())[names.index("pipe.f2")] result = node.get_output("sum_out") os.chdir(cur_dir) rmtree(temp_dir) return result @pytest.mark.skipif( sys.version_info >= (3, 8), reason="multiprocessing issues in Python 3.8" ) def test_run_multiproc_nondaemon_false(): """ This is the entry point for the test. Two times a pipe of several multiprocessing jobs gets executed. First, without the nondaemon flag. Second, with the nondaemon flag. Since the processes of the pipe start child processes, the execution only succeeds when the non_daemon flag is on. """ shouldHaveFailed = False try: # with nondaemon_flag = False, the execution should fail run_multiproc_nondaemon_with_flag(False) except: shouldHaveFailed = True assert shouldHaveFailed @pytest.mark.skipif( sys.version_info >= (3, 8), reason="multiprocessing issues in Python 3.8" ) def test_run_multiproc_nondaemon_true(): # with nondaemon_flag = True, the execution should succeed result = run_multiproc_nondaemon_with_flag(True) assert result == 180 # n_procs (2) * numberOfThreads (2) * 45 == 180 nipype-1.7.0/nipype/pipeline/plugins/tests/test_linear.py000066400000000000000000000024431413403311400236370ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os import nipype.interfaces.base as nib import nipype.pipeline.engine as pe class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int(desc="a random int") input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc="outputs") class LinearTestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec def _run_interface(self, runtime): runtime.returncode = 0 return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["output1"] = [1, self.inputs.input1] return outputs def test_run_in_series(tmpdir): tmpdir.chdir() pipe = pe.Workflow(name="pipe") mod1 = pe.Node(interface=LinearTestInterface(), name="mod1") mod2 = pe.MapNode( interface=LinearTestInterface(), iterfield=["input1"], name="mod2" ) pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="Linear") names = [".".join((node._hierarchy, node.name)) for node in execgraph.nodes()] node = list(execgraph.nodes())[names.index("pipe.mod1")] result = node.get_output("output1") assert result == [1, 1] nipype-1.7.0/nipype/pipeline/plugins/tests/test_multiproc.py000066400000000000000000000105571413403311400244100ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Test the resource management of MultiProc """ import sys import os import pytest from nipype.pipeline import engine as pe from nipype.interfaces import base as nib class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int(desc="a random int") input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc="outputs") class MultiprocTestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec def _run_interface(self, runtime): runtime.returncode = 0 return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["output1"] = [1, self.inputs.input1] return outputs @pytest.mark.skipif( sys.version_info >= (3, 8), reason="multiprocessing issues in Python 3.8" ) def test_run_multiproc(tmpdir): tmpdir.chdir() pipe = pe.Workflow(name="pipe") mod1 = pe.Node(MultiprocTestInterface(), name="mod1") mod2 = pe.MapNode(MultiprocTestInterface(), iterfield=["input1"], name="mod2") pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 pipe.config["execution"]["poll_sleep_duration"] = 2 execgraph = pipe.run(plugin="MultiProc") names = [node.fullname for node in execgraph.nodes()] node = list(execgraph.nodes())[names.index("pipe.mod1")] result = node.get_output("output1") assert result == [1, 1] class InputSpecSingleNode(nib.TraitedSpec): input1 = nib.traits.Int(desc="a random int") input2 = nib.traits.Int(desc="a random int") class OutputSpecSingleNode(nib.TraitedSpec): output1 = nib.traits.Int(desc="a random int") class SingleNodeTestInterface(nib.BaseInterface): input_spec = InputSpecSingleNode output_spec = OutputSpecSingleNode def _run_interface(self, runtime): runtime.returncode = 0 return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["output1"] = self.inputs.input1 return outputs def test_no_more_memory_than_specified(tmpdir): tmpdir.chdir() pipe = pe.Workflow(name="pipe") n1 = pe.Node(SingleNodeTestInterface(), name="n1", mem_gb=1) n2 = pe.Node(SingleNodeTestInterface(), name="n2", mem_gb=1) n3 = pe.Node(SingleNodeTestInterface(), name="n3", mem_gb=1) n4 = pe.Node(SingleNodeTestInterface(), name="n4", mem_gb=1) pipe.connect(n1, "output1", n2, "input1") pipe.connect(n1, "output1", n3, "input1") pipe.connect(n2, "output1", n4, "input1") pipe.connect(n3, "output1", n4, "input2") n1.inputs.input1 = 1 max_memory = 0.5 with pytest.raises(RuntimeError): pipe.run( plugin="MultiProc", plugin_args={"memory_gb": max_memory, "n_procs": 2} ) def test_no_more_threads_than_specified(tmpdir): tmpdir.chdir() pipe = pe.Workflow(name="pipe") n1 = pe.Node(SingleNodeTestInterface(), name="n1", n_procs=2) n2 = pe.Node(SingleNodeTestInterface(), name="n2", n_procs=2) n3 = pe.Node(SingleNodeTestInterface(), name="n3", n_procs=4) n4 = pe.Node(SingleNodeTestInterface(), name="n4", n_procs=2) pipe.connect(n1, "output1", n2, "input1") pipe.connect(n1, "output1", n3, "input1") pipe.connect(n2, "output1", n4, "input1") pipe.connect(n3, "output1", n4, "input2") n1.inputs.input1 = 4 max_threads = 2 with pytest.raises(RuntimeError): pipe.run(plugin="MultiProc", plugin_args={"n_procs": max_threads}) @pytest.mark.skipif( sys.version_info >= (3, 8), reason="multiprocessing issues in Python 3.8" ) def test_hold_job_until_procs_available(tmpdir): tmpdir.chdir() pipe = pe.Workflow(name="pipe") n1 = pe.Node(SingleNodeTestInterface(), name="n1", n_procs=2) n2 = pe.Node(SingleNodeTestInterface(), name="n2", n_procs=2) n3 = pe.Node(SingleNodeTestInterface(), name="n3", n_procs=2) n4 = pe.Node(SingleNodeTestInterface(), name="n4", n_procs=2) pipe.connect(n1, "output1", n2, "input1") pipe.connect(n1, "output1", n3, "input1") pipe.connect(n2, "output1", n4, "input1") pipe.connect(n3, "output1", n4, "input2") n1.inputs.input1 = 4 max_threads = 2 pipe.run(plugin="MultiProc", plugin_args={"n_procs": max_threads}) nipype-1.7.0/nipype/pipeline/plugins/tests/test_oar.py000066400000000000000000000026151413403311400231470ustar00rootroot00000000000000# -*- coding: utf-8 -*- from shutil import which import nipype.interfaces.base as nib import pytest import nipype.pipeline.engine as pe class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int(desc="a random int") input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc="outputs") class OarTestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec def _run_interface(self, runtime): runtime.returncode = 0 return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["output1"] = [1, self.inputs.input1] return outputs @pytest.mark.skipif(which("oarsub") is None, reason="OAR not installed") @pytest.mark.timeout(60) def test_run_oargraph(tmp_path): pipe = pe.Workflow(name="pipe", base_dir=str(tmp_path)) mod1 = pe.Node(interface=OarTestInterface(), name="mod1") mod2 = pe.MapNode(interface=OarTestInterface(), iterfield=["input1"], name="mod2") pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="OAR") names = [".".join((node._hierarchy, node.name)) for node in execgraph.nodes()] node = list(execgraph.nodes())[names.index("pipe.mod1")] result = node.get_output("output1") assert result == [1, 1] nipype-1.7.0/nipype/pipeline/plugins/tests/test_pbs.py000066400000000000000000000025601413403311400231510ustar00rootroot00000000000000# -*- coding: utf-8 -*- from shutil import which import nipype.interfaces.base as nib import pytest import nipype.pipeline.engine as pe class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int(desc="a random int") input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc="outputs") class PbsTestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec def _run_interface(self, runtime): runtime.returncode = 0 return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["output1"] = [1, self.inputs.input1] return outputs @pytest.mark.skipif(which("qsub") is None, reason="PBS not installed") @pytest.mark.timeout(60) def test_run_pbsgraph(tmp_path): pipe = pe.Workflow(name="pipe", base_dir=str(tmp_path)) mod1 = pe.Node(interface=PbsTestInterface(), name="mod1") mod2 = pe.MapNode(interface=PbsTestInterface(), iterfield=["input1"], name="mod2") pipe.connect([(mod1, mod2, [("output1", "input1")])]) mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="PBSGraph") names = [".".join((node._hierarchy, node.name)) for node in execgraph.nodes()] node = list(execgraph.nodes())[names.index("pipe.mod1")] result = node.get_output("output1") assert result == [1, 1] nipype-1.7.0/nipype/pipeline/plugins/tests/test_sgelike.py000066400000000000000000000021561413403311400240110ustar00rootroot00000000000000from nipype.pipeline.plugins.base import SGELikeBatchManagerBase from nipype.interfaces.utility import Function import nipype.pipeline.engine as pe import pytest from unittest.mock import patch import subprocess def crasher(): raise ValueError() def submit_batchtask(self, scriptfile, node): self._pending[1] = node.output_dir() subprocess.call(["bash", scriptfile]) return 1 def is_pending(self, taskid): return False @patch.object(SGELikeBatchManagerBase, "_submit_batchtask", new=submit_batchtask) @patch.object(SGELikeBatchManagerBase, "_is_pending", new=is_pending) def test_crashfile_creation(tmp_path): pipe = pe.Workflow(name="pipe", base_dir=str(tmp_path)) pipe.config["execution"]["crashdump_dir"] = str(tmp_path) pipe.add_nodes([pe.Node(interface=Function(function=crasher), name="crasher")]) sgelike_plugin = SGELikeBatchManagerBase("") with pytest.raises(RuntimeError) as e: assert pipe.run(plugin=sgelike_plugin) crashfiles = list(tmp_path.glob("crash*crasher*.pklz")) + list( tmp_path.glob("crash*crasher*.txt") ) assert len(crashfiles) == 1 nipype-1.7.0/nipype/pipeline/plugins/tests/test_somaflow.py000066400000000000000000000026621413403311400242170ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os from time import sleep import nipype.interfaces.base as nib import pytest import nipype.pipeline.engine as pe from nipype.pipeline.plugins.somaflow import soma_not_loaded class InputSpec(nib.TraitedSpec): input1 = nib.traits.Int(desc="a random int") input2 = nib.traits.Int(desc="a random int") class OutputSpec(nib.TraitedSpec): output1 = nib.traits.List(nib.traits.Int, desc="outputs") class SomaTestInterface(nib.BaseInterface): input_spec = InputSpec output_spec = OutputSpec def _run_interface(self, runtime): runtime.returncode = 0 return runtime def _list_outputs(self): outputs = self._outputs().get() outputs["output1"] = [1, self.inputs.input1] return outputs @pytest.mark.skipif(soma_not_loaded, reason="soma not loaded") def test_run_somaflow(tmpdir): tmpdir.chdir() pipe = pe.Workflow(name="pipe") mod1 = pe.Node(interface=SomaTestInterface(), name="mod1") mod2 = pe.MapNode(interface=SomaTestInterface(), iterfield=["input1"], name="mod2") pipe.connect([(mod1, mod2, [("output1", "input1")])]) pipe.base_dir = os.getcwd() mod1.inputs.input1 = 1 execgraph = pipe.run(plugin="SomaFlow") names = [".".join((node._hierarchy, node.name)) for node in execgraph.nodes()] node = list(execgraph.nodes())[names.index("pipe.mod1")] result = node.get_output("output1") assert result == [1, 1] nipype-1.7.0/nipype/pipeline/plugins/tests/test_tools.py000066400000000000000000000034311413403311400235230ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine module """ import numpy as np import scipy.sparse as ssp import re from unittest import mock from nipype.pipeline.plugins.tools import report_crash def test_report_crash(): with mock.patch("pickle.dump", mock.MagicMock()) as mock_pickle_dump: with mock.patch( "nipype.pipeline.plugins.tools.format_exception", mock.MagicMock() ): # see iss 1517 mock_pickle_dump.return_value = True mock_node = mock.MagicMock(name="mock_node") mock_node._id = "an_id" mock_node.config = { "execution": {"crashdump_dir": ".", "crashfile_format": "pklz"} } actual_crashfile = report_crash(mock_node) expected_crashfile = re.compile(r".*/crash-.*-an_id-[0-9a-f\-]*.pklz") assert ( expected_crashfile.match(actual_crashfile).group() == actual_crashfile ) assert mock_pickle_dump.call_count == 1 """ Can use the following code to test that a mapnode crash continues successfully Need to put this into a unit-test with a timeout import nipype.interfaces.utility as niu import nipype.pipeline.engine as pe wf = pe.Workflow(name='test') def func(arg1): if arg1 == 2: raise Exception('arg cannot be ' + str(arg1)) return arg1 funkynode = pe.MapNode(niu.Function(function=func, input_names=['arg1'], output_names=['out']), iterfield=['arg1'], name = 'functor') funkynode.inputs.arg1 = [1,2] wf.add_nodes([funkynode]) wf.base_dir = '/tmp' wf.run(plugin='MultiProc') """ nipype-1.7.0/nipype/pipeline/plugins/tools.py000066400000000000000000000137441413403311400213320ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Common graph operations for execution """ import os import getpass from socket import gethostname import sys import uuid from time import strftime from traceback import format_exception from ... import logging from ...utils.filemanip import savepkl, crash2txt logger = logging.getLogger("nipype.workflow") def report_crash(node, traceback=None, hostname=None): """Writes crash related information to a file""" name = node._id host = None traceback = traceback or format_exception(*sys.exc_info()) try: result = node.result except FileNotFoundError: traceback += """ When creating this crashfile, the results file corresponding to the node could not be found.""".splitlines( keepends=True ) except Exception as exc: traceback += """ During the creation of this crashfile triggered by the above exception, another exception occurred:\n\n{}.""".format( exc ).splitlines( keepends=True ) else: if getattr(result, "runtime", None): if isinstance(result.runtime, list): host = result.runtime[0].hostname else: host = result.runtime.hostname # Try everything to fill in the host host = host or hostname or gethostname() logger.error("Node %s failed to run on host %s.", name, host) timeofcrash = strftime("%Y%m%d-%H%M%S") try: login_name = getpass.getuser() except KeyError: login_name = "UID{:d}".format(os.getuid()) crashfile = "crash-%s-%s-%s-%s" % (timeofcrash, login_name, name, str(uuid.uuid4())) crashdir = node.config["execution"].get("crashdump_dir", os.getcwd()) os.makedirs(crashdir, exist_ok=True) crashfile = os.path.join(crashdir, crashfile) if node.config["execution"]["crashfile_format"].lower() in ("text", "txt", ".txt"): crashfile += ".txt" else: crashfile += ".pklz" logger.error("Saving crash info to %s\n%s", crashfile, "".join(traceback)) if crashfile.endswith(".txt"): crash2txt(crashfile, dict(node=node, traceback=traceback)) else: savepkl(crashfile, dict(node=node, traceback=traceback), versioning=True) return crashfile def report_nodes_not_run(notrun): """List nodes that crashed with crashfile info Optionally displays dependent nodes that weren't executed as a result of the crash. """ if notrun: logger.info("***********************************") for info in notrun: logger.error( "could not run node: %s" % ".".join((info["node"]._hierarchy, info["node"]._id)) ) logger.info("crashfile: %s" % info["crashfile"]) logger.debug("The following dependent nodes were not run") for subnode in info["dependents"]: logger.debug(subnode._id) logger.info("***********************************") def create_pyscript(node, updatehash=False, store_exception=True): # pickle node timestamp = strftime("%Y%m%d_%H%M%S") if node._hierarchy: suffix = "%s_%s_%s" % (timestamp, node._hierarchy, node._id) batch_dir = os.path.join(node.base_dir, node._hierarchy.split(".")[0], "batch") else: suffix = "%s_%s" % (timestamp, node._id) batch_dir = os.path.join(node.base_dir, "batch") if not os.path.exists(batch_dir): os.makedirs(batch_dir) pkl_file = os.path.join(batch_dir, "node_%s.pklz" % suffix) savepkl(pkl_file, dict(node=node, updatehash=updatehash)) mpl_backend = node.config["execution"]["matplotlib_backend"] # create python script to load and trap exception cmdstr = """import os import sys can_import_matplotlib = True #Silently allow matplotlib to be ignored try: import matplotlib matplotlib.use('%s') except ImportError: can_import_matplotlib = False pass import os value = os.environ.get('NIPYPE_NO_ET', None) if value is None: # disable ET for any submitted job os.environ['NIPYPE_NO_ET'] = "1" from nipype import config, logging from nipype.utils.filemanip import loadpkl, savepkl from socket import gethostname from traceback import format_exception info = None pklfile = '%s' batchdir = '%s' from nipype.utils.filemanip import loadpkl, savepkl try: from collections import OrderedDict config_dict=%s config.update_config(config_dict) ## Only configure matplotlib if it was successfully imported, ## matplotlib is an optional component to nipype if can_import_matplotlib: config.update_matplotlib() logging.update_logging(config) traceback=None cwd = os.getcwd() info = loadpkl(pklfile) result = info['node'].run(updatehash=info['updatehash']) except Exception as e: etype, eval, etr = sys.exc_info() traceback = format_exception(etype,eval,etr) if info is None or not os.path.exists(info['node'].output_dir()): result = None resultsfile = os.path.join(batchdir, 'crashdump_%s.pklz') else: result = info['node'].result resultsfile = os.path.join(info['node'].output_dir(), 'result_%%s.pklz'%%info['node'].name) """ if store_exception: cmdstr += """ savepkl(resultsfile, dict(result=result, hostname=gethostname(), traceback=traceback)) """ else: cmdstr += """ if info is None: savepkl(resultsfile, dict(result=result, hostname=gethostname(), traceback=traceback)) else: from nipype.pipeline.plugins.base import report_crash report_crash(info['node'], traceback, gethostname()) raise Exception(e) """ cmdstr = cmdstr % (mpl_backend, pkl_file, batch_dir, node.config, suffix) pyscript = os.path.join(batch_dir, "pyscript_%s.py" % suffix) with open(pyscript, "wt") as fp: fp.writelines(cmdstr) return pyscript nipype-1.7.0/nipype/pkg_info.py000066400000000000000000000060751413403311400164770ustar00rootroot00000000000000# -*- coding: utf-8 -*- import configparser import os import sys import subprocess COMMIT_INFO_FNAME = "COMMIT_INFO.txt" def pkg_commit_hash(pkg_path): """Get short form of commit hash given directory `pkg_path` There should be a file called 'COMMIT_INFO.txt' in `pkg_path`. This is a file in INI file format, with at least one section: ``commit hash`` and two variables ``archive_subst_hash`` and ``install_hash``. The first has a substitution pattern in it which may have been filled by the execution of ``git archive`` if this is an archive generated that way. The second is filled in by the installation, if the installation is from a git archive. We get the commit hash from (in order of preference): * A substituted value in ``archive_subst_hash`` * A written commit hash value in ``install_hash` * git's output, if we are in a git repository If all these fail, we return a not-found placeholder tuple Parameters ---------- pkg_path : str directory containing package Returns ------- hash_from : str Where we got the hash from - description hash_str : str short form of hash """ # Try and get commit from written commit text file pth = os.path.join(pkg_path, COMMIT_INFO_FNAME) if not os.path.isfile(pth): raise IOError("Missing commit info file %s" % pth) cfg_parser = configparser.RawConfigParser() with open(pth, encoding="utf-8") as fp: cfg_parser.read_file(fp) archive_subst = cfg_parser.get("commit hash", "archive_subst_hash") if not archive_subst.startswith("$Format"): # it has been substituted return "archive substitution", archive_subst install_subst = cfg_parser.get("commit hash", "install_hash") if install_subst != "": return "installation", install_subst # maybe we are in a repository proc = subprocess.Popen( "git rev-parse --short HEAD", stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=pkg_path, shell=True, ) repo_commit, _ = proc.communicate() if repo_commit: return "repository", repo_commit.decode().strip() return "(none found)", "" def get_pkg_info(pkg_path): """Return dict describing the context of this package Parameters ---------- pkg_path : str path containing __init__.py for package Returns ------- context : dict with named parameters of interest """ src, hsh = pkg_commit_hash(pkg_path) from .info import VERSION import networkx import nibabel import numpy import scipy import traits return dict( pkg_path=pkg_path, commit_source=src, commit_hash=hsh, nipype_version=VERSION, sys_version=sys.version, sys_executable=sys.executable, sys_platform=sys.platform, numpy_version=numpy.__version__, scipy_version=scipy.__version__, networkx_version=networkx.__version__, nibabel_version=nibabel.__version__, traits_version=traits.__version__, ) nipype-1.7.0/nipype/pytest.ini000066400000000000000000000003241413403311400163510ustar00rootroot00000000000000[pytest] norecursedirs = .git build dist doc nipype/external tools examples src addopts = --doctest-modules doctest_optionflags = ALLOW_UNICODE NORMALIZE_WHITESPACE env = PYTHONHASHSEED=0 junit_family=xunit2 nipype-1.7.0/nipype/refs.py000066400000000000000000000012521413403311400156320ustar00rootroot00000000000000# Use duecredit (duecredit.org) to provide a citation to relevant work to # be cited. This does nothing, unless the user has duecredit installed, # And calls this with duecredit (as in `python -m duecredit script.py`): from .external.due import due, Doi, BibTeX due.cite( Doi("10.3389/fninf.2011.00013"), description="A flexible, lightweight and extensible neuroimaging data" " processing framework in Python", path="nipype", tags=["implementation"], ) due.cite( Doi("10.5281/zenodo.50186"), description="A flexible, lightweight and extensible neuroimaging data" " processing framework in Python", path="nipype", tags=["implementation"], ) nipype-1.7.0/nipype/scripts/000077500000000000000000000000001413403311400160105ustar00rootroot00000000000000nipype-1.7.0/nipype/scripts/__init__.py000066400000000000000000000000001413403311400201070ustar00rootroot00000000000000nipype-1.7.0/nipype/scripts/cli.py000066400000000000000000000165601413403311400171410ustar00rootroot00000000000000#!python # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import click from .instance import list_interfaces from .utils import ( CONTEXT_SETTINGS, UNKNOWN_OPTIONS, ExistingDirPath, ExistingFilePath, UnexistingFilePath, RegularExpression, PythonModule, check_not_none, ) from .. import __version__ # declare the CLI group @click.group(context_settings=CONTEXT_SETTINGS) def cli(): pass @cli.command(context_settings=CONTEXT_SETTINGS) @click.argument("logdir", type=ExistingDirPath, callback=check_not_none) @click.option( "-r", "--regex", type=RegularExpression(), callback=check_not_none, help="Regular expression to be searched in each traceback.", ) def search(logdir, regex): """Search for tracebacks content. Search for traceback inside a folder of nipype crash log files that match a given regular expression. Examples:\n nipypecli search nipype/wd/log -r '.*subject123.*' """ from .crash_files import iter_tracebacks for file, trace in iter_tracebacks(logdir): if regex.search(trace): click.echo("-" * len(file)) click.echo(file) click.echo("-" * len(file)) click.echo(trace) @cli.command(context_settings=CONTEXT_SETTINGS) @click.argument("crashfile", type=ExistingFilePath, callback=check_not_none) @click.option( "-r", "--rerun", is_flag=True, flag_value=True, help="Rerun crashed node." ) @click.option( "-d", "--debug", is_flag=True, flag_value=True, help="Enable Python debugger when re-executing.", ) @click.option( "-i", "--ipydebug", is_flag=True, flag_value=True, help="Enable IPython debugger when re-executing.", ) @click.option( "-w", "--dir", type=ExistingDirPath, help="Directory where to run the node in." ) def crash(crashfile, rerun, debug, ipydebug, dir): """Display Nipype crash files. For certain crash files, one can rerun a failed node in a temp directory. Examples:\n nipypecli crash crashfile.pklz\n nipypecli crash crashfile.pklz -r -i\n """ from .crash_files import display_crash_file debug = "ipython" if ipydebug else debug if debug == "ipython": import sys from IPython.core import ultratb sys.excepthook = ultratb.FormattedTB( mode="Verbose", color_scheme="Linux", call_pdb=1 ) display_crash_file(crashfile, rerun, debug, dir) @cli.command(context_settings=CONTEXT_SETTINGS) @click.argument("pklz_file", type=ExistingFilePath, callback=check_not_none) def show(pklz_file): """Print the content of Nipype node .pklz file. Examples:\n nipypecli show node.pklz """ from pprint import pprint from ..utils.filemanip import loadpkl pkl_data = loadpkl(pklz_file) pprint(pkl_data) @cli.command(context_settings=UNKNOWN_OPTIONS) @click.argument("module", type=PythonModule(), required=False, callback=check_not_none) @click.argument("interface", type=str, required=False) @click.option( "--list", is_flag=True, flag_value=True, help="List the available Interfaces inside the given module.", ) @click.option( "-h", "--help", is_flag=True, flag_value=True, help="Show help message and exit." ) @click.pass_context def run(ctx, module, interface, list, help): """Run a Nipype Interface. Examples:\n nipypecli run nipype.interfaces.nipy --list\n nipypecli run nipype.interfaces.nipy ComputeMask --help """ import argparse from .utils import add_args_options from ..utils.nipype_cmd import run_instance # print run command help if no arguments are given module_given = bool(module) if not module_given: click.echo(ctx.command.get_help(ctx)) # print the list of available interfaces for the given module elif (module_given and list) or (module_given and not interface): iface_names = list_interfaces(module) click.echo("Available Interfaces:") for if_name in iface_names: click.echo(" {}".format(if_name)) # check the interface elif module_given and interface: # create the argument parser description = "Run {}".format(interface) prog = " ".join([ctx.command_path, module.__name__, interface] + ctx.args) iface_parser = argparse.ArgumentParser(description=description, prog=prog) # instantiate the interface node = getattr(module, interface)() iface_parser = add_args_options(iface_parser, node) if not ctx.args: # print the interface help try: iface_parser.print_help() except: print( "An error ocurred when trying to print the full" "command help, printing usage." ) finally: iface_parser.print_usage() else: # run the interface args = iface_parser.parse_args(args=ctx.args) run_instance(node, args) @cli.command(context_settings=CONTEXT_SETTINGS) def version(): """Print current version of Nipype.""" click.echo(__version__) @cli.group() def convert(): """Export nipype interfaces to other formats.""" pass @convert.command(context_settings=CONTEXT_SETTINGS) @click.option( "-i", "--interface", type=str, required=True, help="Name of the Nipype interface to export.", ) @click.option( "-m", "--module", type=PythonModule(), required=True, callback=check_not_none, help="Module where the interface is defined.", ) @click.option( "-o", "--output", type=UnexistingFilePath, required=True, callback=check_not_none, help="JSON file name where the Boutiques descriptor will be " "written.", ) @click.option( "-c", "--container-image", required=True, type=str, help="Name of the container image where the tool is installed.", ) @click.option( "-p", "--container-type", required=True, type=str, help="Type of container image (Docker or Singularity).", ) @click.option( "-x", "--container-index", type=str, help="Optional index where the image is available (e.g. " "http://index.docker.io).", ) @click.option( "-g", "--ignore-inputs", type=str, multiple=True, help="List of interface inputs to not include in the descriptor.", ) @click.option( "-v", "--verbose", is_flag=True, flag_value=True, help="Print information messages." ) @click.option( "-a", "--author", type=str, help="Author of the tool (required for publishing)." ) @click.option( "-t", "--tags", type=str, help="JSON string containing tags to include in the descriptor," 'e.g. "{"key1": "value1"}"', ) def boutiques( module, interface, container_image, container_type, output, container_index, verbose, author, ignore_inputs, tags, ): """Nipype to Boutiques exporter. See Boutiques specification at https://github.com/boutiques/schema. """ from nipype.utils.nipype2boutiques import generate_boutiques_descriptor # Generates JSON string and saves it to file generate_boutiques_descriptor( module, interface, container_image, container_type, container_index, verbose, True, output, author, ignore_inputs, tags, ) nipype-1.7.0/nipype/scripts/crash_files.py000066400000000000000000000041701413403311400206460ustar00rootroot00000000000000"""Utilities to manipulate and search through .pklz crash files.""" import re import sys import os.path as op from glob import glob from traits.trait_errors import TraitError from nipype.utils.filemanip import loadcrash def load_pklz_traceback(crash_filepath): """Return the traceback message in the given crash file.""" try: data = loadcrash(crash_filepath) except TraitError as te: return str(te) except: raise else: return "\n".join(data["traceback"]) def iter_tracebacks(logdir): """Return an iterator over each file path and traceback field inside `logdir`. Parameters ---------- logdir: str Path to the log folder. field: str Field name to be read from the crash file. Yields ------ path_file: str traceback: str """ crash_files = sorted(glob(op.join(logdir, "*.pkl*"))) for cf in crash_files: yield cf, load_pklz_traceback(cf) def display_crash_file(crashfile, rerun, debug, directory): """display crash file content and rerun if required""" from nipype.utils.filemanip import loadcrash crash_data = loadcrash(crashfile) node = None if "node" in crash_data: node = crash_data["node"] tb = crash_data["traceback"] print("\n") print("File: %s" % crashfile) if node: print("Node: %s" % node) if node.base_dir: print("Working directory: %s" % node.output_dir()) else: print("Node crashed before execution") print("\n") print("Node inputs:") print(node.inputs) print("\n") print("Traceback: ") print("".join(tb)) print("\n") if rerun: if node is None: print("No node in crashfile. Cannot rerun") return print("Rerunning node") node.base_dir = directory node.config = {"execution": {"crashdump_dir": "/tmp"}} try: node.run() except: if debug and debug != "ipython": import pdb pdb.post_mortem() else: raise print("\n") nipype-1.7.0/nipype/scripts/instance.py000066400000000000000000000025061413403311400201710ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ Import lib and class meta programming utilities. """ import inspect import importlib from ..interfaces.base import Interface def import_module(module_path): """Import any module to the global Python environment. The module_path argument specifies what module to import in absolute or relative terms (e.g. either pkg.mod or ..mod). If the name is specified in relative terms, then the package argument must be set to the name of the package which is to act as the anchor for resolving the package name (e.g. import_module('..mod', 'pkg.subpkg') will import pkg.mod). Parameters ---------- module_path: str Path to the module to be imported Returns ------- The specified module will be inserted into sys.modules and returned. """ try: mod = importlib.import_module(module_path) except: raise ImportError("Error when importing object {}.".format(module_path)) else: return mod def list_interfaces(module): """Return a list with the names of the Interface subclasses inside the given module. """ iface_names = [] for k, v in sorted(list(module.__dict__.items())): if inspect.isclass(v) and issubclass(v, Interface): iface_names.append(k) return iface_names nipype-1.7.0/nipype/scripts/utils.py000066400000000000000000000105751413403311400175320ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ Utilities for the CLI functions. """ import re import click import json from .instance import import_module from ..interfaces.base import InputMultiPath, traits from ..interfaces.base.support import get_trait_desc # different context options CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) UNKNOWN_OPTIONS = dict(allow_extra_args=True, ignore_unknown_options=True) # specification of existing ParamTypes ExistingDirPath = click.Path(exists=True, file_okay=False, resolve_path=True) ExistingFilePath = click.Path(exists=True, dir_okay=False, resolve_path=True) UnexistingFilePath = click.Path(dir_okay=False, resolve_path=True) # validators def check_not_none(ctx, param, value): if value is None: raise click.BadParameter("got {}.".format(value)) return value # declare custom click.ParamType class RegularExpression(click.ParamType): name = "regex" def convert(self, value, param, ctx): try: rex = re.compile(value, re.IGNORECASE) except ValueError: self.fail("%s is not a valid regular expression." % value, param, ctx) else: return rex class PythonModule(click.ParamType): name = "Python module path" def convert(self, value, param, ctx): try: module = import_module(value) except ValueError: self.fail("%s is not a valid Python module." % value, param, ctx) else: return module def add_args_options(arg_parser, interface): """Add arguments to `arg_parser` to create a CLI for `interface`.""" inputs = interface.input_spec() for name, spec in sorted(interface.inputs.traits(transient=None).items()): desc = "\n".join(get_trait_desc(inputs, name, spec))[len(name) + 2 :] # Escape any % signs with a % desc = desc.replace("%", "%%") args = {} has_multiple_inner_traits = False if spec.is_trait_type(traits.Bool): args["default"] = getattr(inputs, name) args["action"] = "store_true" # current support is for simple trait types if not spec.inner_traits: if not spec.is_trait_type(traits.TraitCompound): trait_type = type(spec.trait_type.default_value) if trait_type in (bytes, str, int, float): if trait_type == bytes: trait_type = str args["type"] = trait_type elif len(spec.inner_traits) == 1: trait_type = type(spec.inner_traits[0].trait_type.default_value) if trait_type == bytes: trait_type = str if trait_type in (bytes, bool, str, int, float): args["type"] = trait_type else: if len(spec.inner_traits) > 1: if not spec.is_trait_type(traits.Dict): has_multiple_inner_traits = True if getattr(spec, "mandatory", False): if spec.is_trait_type(InputMultiPath): args["nargs"] = "+" elif spec.is_trait_type(traits.List): if ( spec.trait_type.minlen == spec.trait_type.maxlen ) and spec.trait_type.maxlen: args["nargs"] = spec.trait_type.maxlen else: args["nargs"] = "+" elif spec.is_trait_type(traits.Dict): args["type"] = json.loads if has_multiple_inner_traits: raise NotImplementedError( ( "This interface cannot be used. via the" " command line as multiple inner traits" " are currently not supported for mandatory" " argument: {}.".format(name) ) ) arg_parser.add_argument(name, help=desc, **args) else: if spec.is_trait_type(InputMultiPath): args["nargs"] = "*" elif spec.is_trait_type(traits.List): if ( spec.trait_type.minlen == spec.trait_type.maxlen ) and spec.trait_type.maxlen: args["nargs"] = spec.trait_type.maxlen else: args["nargs"] = "*" if not has_multiple_inner_traits: arg_parser.add_argument("--%s" % name, dest=name, help=desc, **args) return arg_parser nipype-1.7.0/nipype/sphinxext/000077500000000000000000000000001413403311400163535ustar00rootroot00000000000000nipype-1.7.0/nipype/sphinxext/__init__.py000066400000000000000000000002401413403311400204600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: nipype-1.7.0/nipype/sphinxext/apidoc/000077500000000000000000000000001413403311400176125ustar00rootroot00000000000000nipype-1.7.0/nipype/sphinxext/apidoc/__init__.py000066400000000000000000000144111413403311400217240ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Settings for sphinxext.interfaces and connection to sphinx-apidoc.""" import re from sphinxcontrib.napoleon import ( Config as NapoleonConfig, _patch_python_domain, _skip_member as _napoleon_skip_member, ) from ... import __version__ from ...interfaces.base import BaseInterface, TraitedSpec from .docstring import NipypeDocstring, InterfaceDocstring class Config(NapoleonConfig): r""" Sphinx-nipype extension settings in ``conf.py``. Listed below are all the settings used by this extension and their default values. These settings can be changed in the Sphinx's ``conf.py`` file. Make sure that ``nipype.sphinxext.interfaces`` is enabled in ``conf.py``:: # conf.py # Add this extension to the corresponding list: extensions = ['nipype.sphinxext.interfaces'] # NiPype settings nipype_references = False Attributes ---------- nipype_skip_classes: :obj:`bool` (Defaults to True) True to include referenced publications with the interface (requires duecredit to be installed). """ _config_values = { "nipype_skip_classes": ( ["Tester", "InputSpec", "OutputSpec", "Numpy", "NipypeTester"], "env", ), **NapoleonConfig._config_values, } def setup(app): # type: (Sphinx) -> Dict[unicode, Any] """ Sphinx extension setup function. When the extension is loaded, Sphinx imports this module and executes the ``setup()`` function, which in turn notifies Sphinx of everything the extension offers. Parameters ---------- app : sphinx.application.Sphinx Application object representing the Sphinx process See Also -------- `The Sphinx documentation on Extensions `_ `The Extension Tutorial `_ `The Extension API `_ """ from sphinx.application import Sphinx if not isinstance(app, Sphinx): # probably called by tests return {"version": __version__, "parallel_read_safe": True} _patch_python_domain() app.setup_extension("sphinx.ext.autodoc") app.connect("autodoc-process-docstring", _process_docstring) app.connect("autodoc-skip-member", _skip_member) for name, (default, rebuild) in Config._config_values.items(): app.add_config_value(name, default, rebuild) return {"version": __version__, "parallel_read_safe": True} def _process_docstring(app, what, name, obj, options, lines): # type: (Sphinx, unicode, unicode, Any, Any, List[unicode]) -> None """Process the docstring for a given python object. Called when autodoc has read and processed a docstring. `lines` is a list of docstring lines that `_process_docstring` modifies in place to change what Sphinx outputs. The following settings in conf.py control what styles of docstrings will be parsed: * ``napoleon_google_docstring`` -- parse Google style docstrings * ``napoleon_numpy_docstring`` -- parse NumPy style docstrings Parameters ---------- app : sphinx.application.Sphinx Application object representing the Sphinx process. what : str A string specifying the type of the object to which the docstring belongs. Valid values: "module", "class", "exception", "function", "method", "attribute". name : str The fully qualified name of the object. obj : module, class, exception, function, method, or attribute The object to which the docstring belongs. options : sphinx.ext.autodoc.Options The options given to the directive: an object with attributes inherited_members, undoc_members, show_inheritance and noindex that are True if the flag option of same name was given to the auto directive. lines : list of str The lines of the docstring, see above. .. note:: `lines` is modified *in place* """ result_lines = lines # Parse Nipype Interfaces if what == "class" and issubclass(obj, BaseInterface): result_lines[:] = InterfaceDocstring( result_lines, app.config, app, what, name, obj, options ).lines() result_lines = NipypeDocstring( result_lines, app.config, app, what, name, obj, options ).lines() lines[:] = result_lines[:] def _skip_member(app, what, name, obj, skip, options): # type: (Sphinx, unicode, unicode, Any, bool, Any) -> bool """ Determine if private and special class members are included in docs. Parameters ---------- app : sphinx.application.Sphinx Application object representing the Sphinx process what : str A string specifying the type of the object to which the member belongs. Valid values: "module", "class", "exception", "function", "method", "attribute". name : str The name of the member. obj : module, class, exception, function, method, or attribute. For example, if the member is the __init__ method of class A, then `obj` will be `A.__init__`. skip : bool A boolean indicating if autodoc will skip this member if `_skip_member` does not override the decision options : sphinx.ext.autodoc.Options The options given to the directive: an object with attributes inherited_members, undoc_members, show_inheritance and noindex that are True if the flag option of same name was given to the auto directive. Returns ------- bool True if the member should be skipped during creation of the docs, False if it should be included in the docs. """ # Parse Nipype Interfaces patterns = [ pat if hasattr(pat, "search") else re.compile(pat) for pat in app.config.nipype_skip_classes ] isbase = False try: isbase = issubclass(obj, BaseInterface) if issubclass(obj, TraitedSpec): return True except TypeError: pass if isbase: for pattern in patterns: if pattern.search(name): return True return _napoleon_skip_member(app, what, name, obj, skip, options) nipype-1.7.0/nipype/sphinxext/apidoc/docstring.py000066400000000000000000000137571413403311400221750ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Reformat interface docstrings.""" import re from sphinxcontrib.napoleon._upstream import _ from sphinxcontrib.napoleon.docstring import NumpyDocstring class NipypeDocstring(NumpyDocstring): """Patch the NumpyDocstring from napoleon to get special section headers.""" def _parse_parameters_section(self, section): # type: (unicode) -> List[unicode] labels = { "args": _("Parameters"), "arguments": _("Parameters"), "parameters": _("Parameters"), } # type: Dict[unicode, unicode] label = labels.get(section.lower(), section) fields = self._consume_fields() if self._config.napoleon_use_param: return self._format_docutils_params(fields) return self._format_fields(label, fields) class InterfaceDocstring(NipypeDocstring): """ Convert docstrings of Nipype Interfaces to reStructuredText. Parameters ---------- docstring : :obj:`str` or :obj:`list` of :obj:`str` The docstring to parse, given either as a string or split into individual lines. config: :obj:`sphinxcontrib.napoleon.Config` or :obj:`sphinx.config.Config` The configuration settings to use. If not given, defaults to the config object on `app`; or if `app` is not given defaults to the a new :class:`nipype.sphinxext.apidoc.Config` object. Other Parameters ---------------- app : :class:`sphinx.application.Sphinx`, optional Application object representing the Sphinx process. what : :obj:`str`, optional A string specifying the type of the object to which the docstring belongs. Valid values: "module", "class", "exception", "function", "method", "attribute". name : :obj:`str`, optional The fully qualified name of the object. obj : module, class, exception, function, method, or attribute The object to which the docstring belongs. options : :class:`sphinx.ext.autodoc.Options`, optional The options given to the directive: an object with attributes inherited_members, undoc_members, show_inheritance and noindex that are True if the flag option of same name was given to the auto directive. """ _name_rgx = re.compile( r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" r" (?P[a-zA-Z0-9_.-]+))\s*", re.X, ) def __init__( self, docstring, config=None, app=None, what="", name="", obj=None, options=None ): # type: (Union[unicode, List[unicode]], SphinxConfig, Sphinx, unicode, unicode, Any, Any) -> None # NOQA super().__init__(docstring, config, app, what, name, obj, options) cmd = getattr(obj, "_cmd", "") if cmd and cmd.strip(): self._parsed_lines = [ "Wrapped executable: ``%s``." % cmd.strip(), "", ] + self._parsed_lines if obj is not None: self._parsed_lines += _parse_interface(obj) def _parse_interface(obj): """Print description for input parameters.""" parsed = [] if obj.input_spec: inputs = obj.input_spec() mandatory_items = sorted(inputs.traits(mandatory=True).items()) if mandatory_items: parsed += ["", "Mandatory Inputs"] parsed += ["-" * len(parsed[-1])] for name, spec in mandatory_items: parsed += _parse_spec(inputs, name, spec) mandatory_keys = {item[0] for item in mandatory_items} optional_items = sorted( [ (name, val) for name, val in inputs.traits(transient=None).items() if name not in mandatory_keys ] ) if optional_items: parsed += ["", "Optional Inputs"] parsed += ["-" * len(parsed[-1])] for name, spec in optional_items: parsed += _parse_spec(inputs, name, spec) if obj.output_spec: outputs = sorted(obj.output_spec().traits(transient=None).items()) if outputs: parsed += ["", "Outputs"] parsed += ["-" * len(parsed[-1])] for name, spec in outputs: parsed += _parse_spec(inputs, name, spec) return parsed def _indent(lines, n=4): # type: (List[unicode], int) -> List[unicode] return [(" " * n) + line for line in lines] def _parse_spec(inputs, name, spec): """Parse a HasTraits object into a Numpy-style docstring.""" desc_lines = [] if spec.desc: desc = "".join([spec.desc[0].capitalize(), spec.desc[1:]]) if not desc.endswith(".") and not desc.endswith("\n"): desc = "%s." % desc desc_lines += desc.splitlines() argstr = spec.argstr if argstr and argstr.strip(): pos = spec.position if pos is None: desc_lines += [ """Maps to a command-line argument: :code:`{arg}`.""".format( arg=argstr.strip() ) ] else: desc_lines += [ """Maps to a command-line argument: :code:`{arg}` (position: {pos}).""".format( arg=argstr.strip(), pos=pos ) ] xor = spec.xor if xor: desc_lines += [ "Mutually **exclusive** with inputs: %s." % ", ".join(["``%s``" % x for x in xor]) ] requires = spec.requires if requires: desc_lines += [ "**Requires** inputs: %s." % ", ".join(["``%s``" % x for x in requires]) ] if spec.usedefault: default = spec.default_value()[1] if isinstance(default, (bytes, str)) and not default: default = '""' desc_lines += ["(Nipype **default** value: ``%s``)" % str(default)] out_rst = [ "{name} : {type}".format(name=name, type=spec.full_info(inputs, name, None)) ] out_rst += _indent(desc_lines, 4) return out_rst nipype-1.7.0/nipype/sphinxext/documenter.py000066400000000000000000000052601413403311400210750ustar00rootroot00000000000000"""sphinx autodoc ext.""" from sphinx.locale import _ from sphinx.ext import autodoc from nipype.interfaces.base import BaseInterface from .gh import get_url _ClassDocumenter = autodoc.ClassDocumenter RST_CLASS_BLOCK = """ .. index:: {name} .. _{module}.{name}: {name} {underline} `Link to code <{code_url}>`__ """ class NipypeClassDocumenter(_ClassDocumenter): # type: ignore priority = 20 def add_directive_header(self, sig: str) -> None: if self.doc_as_attr: self.directivetype = "attribute" # Copied from super domain = getattr(self, "domain", "py") directive = getattr(self, "directivetype", self.objtype) name = self.format_name() sourcename = self.get_sourcename() is_interface = False try: is_interface = issubclass(self.object, BaseInterface) except TypeError: pass if is_interface is True: lines = RST_CLASS_BLOCK.format( code_url=get_url(self.object), module=self.modname, name=name, underline="=" * len(name), ) for line in lines.splitlines(): self.add_line(line, sourcename) else: self.add_line( ".. %s:%s:: %s%s" % (domain, directive, name, sig), sourcename ) if self.options.noindex: self.add_line(" :noindex:", sourcename) if self.objpath: # Be explicit about the module, this is necessary since .. class:: # etc. don't support a prepended module name self.add_line(" :module: %s" % self.modname, sourcename) # add inheritance info, if wanted if not self.doc_as_attr and self.options.show_inheritance: sourcename = self.get_sourcename() self.add_line("", sourcename) bases = getattr(self.object, "__bases__", []) bases_links = [] for b in bases: based_interface = False try: based_interface = issubclass(b, BaseInterface) except TypeError: pass if b.__module__ in ("__builtin__", "builtins"): bases_links.append(":class:`%s`" % b.__name__) elif based_interface: bases_links.append(":ref:`%s.%s`" % (b.__module__, b.__name__)) else: bases_links.append(":class:`%s.%s`" % (b.__module__, b.__name__)) self.add_line(" " + _("Bases: %s") % ", ".join(bases_links), sourcename) def setup(app): app.add_autodocumenter(NipypeClassDocumenter) nipype-1.7.0/nipype/sphinxext/gh.py000066400000000000000000000016571413403311400173340ustar00rootroot00000000000000"""Build a file URL.""" import os import inspect import subprocess REVISION_CMD = "git rev-parse --short HEAD" def _get_git_revision(): # Comes from scikit-learn # https://github.com/scikit-learn/scikit-learn/blob/master/doc/sphinxext/github_link.py try: revision = subprocess.check_output(REVISION_CMD.split()).strip() except (subprocess.CalledProcessError, OSError): return None return revision.decode("utf-8") def get_url(obj): """Return local or remote url for an object.""" filename = inspect.getsourcefile(obj) uri = "file://%s" % filename revision = _get_git_revision() if revision is not None: shortfile = os.path.join("nipype", filename.split("nipype/")[-1]) uri = "http://github.com/nipy/nipype/blob/%s/%s" % (revision, shortfile) lines, lstart = inspect.getsourcelines(obj) lend = len(lines) + lstart return "%s#L%d-L%d" % (uri, lstart, lend) nipype-1.7.0/nipype/sphinxext/plot_workflow.py000066400000000000000000000610051413403311400216370ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ :mod:`nipype.sphinxext.plot_workflow` -- Workflow plotting extension ==================================================================== A directive for including a nipype workflow graph in a Sphinx document. This code is forked from the plot_figure sphinx extension of matplotlib. By default, in HTML output, `workflow` will include a .png file with a link to a high-res .png. In LaTeX output, it will include a .pdf. The source code for the workflow may be included as **inline content** to the directive `workflow`:: .. workflow :: :graph2use: flat :simple_form: no from niflow.nipype1.workflows.dmri.camino.connectivity_mapping import create_connectivity_pipeline wf = create_connectivity_pipeline() For example, the following graph has been generated inserting the previous code block in this documentation: .. workflow :: :graph2use: flat :simple_form: no from niflow.nipype1.workflows.dmri.camino.connectivity_mapping import create_connectivity_pipeline wf = create_connectivity_pipeline() Options ------- The ``workflow`` directive supports the following options: graph2use : {'hierarchical', 'colored', 'flat', 'orig', 'exec'} Specify the type of graph to be generated. simple_form: bool Whether the graph will be in detailed or simple form. format : {'python', 'doctest'} Specify the format of the input include-source : bool Whether to display the source code. The default can be changed using the `workflow_include_source` variable in conf.py encoding : str If this source file is in a non-UTF8 or non-ASCII encoding, the encoding must be specified using the `:encoding:` option. The encoding will not be inferred using the ``-*- coding -*-`` metacomment. Additionally, this directive supports all of the options of the `image` directive, except for `target` (since workflow will add its own target). These include `alt`, `height`, `width`, `scale`, `align` and `class`. Configuration options --------------------- The workflow directive has the following configuration options: graph2use Select a graph type to use simple_form determines if the node name shown in the visualization is either of the form nodename (package) when set to True or nodename.Class.package when set to False. wf_include_source Default value for the include-source option wf_html_show_source_link Whether to show a link to the source in HTML. wf_pre_code Code that should be executed before each workflow. wf_basedir Base directory, to which ``workflow::`` file names are relative to. (If None or empty, file names are relative to the directory where the file containing the directive is.) wf_formats File formats to generate. List of tuples or strings:: [(suffix, dpi), suffix, ...] that determine the file format and the DPI. For entries whose DPI was omitted, sensible defaults are chosen. When passing from the command line through sphinx_build the list should be passed as suffix:dpi,suffix:dpi, .... wf_html_show_formats Whether to show links to the files in HTML. wf_rcparams A dictionary containing any non-standard rcParams that should be applied before each workflow. wf_apply_rcparams By default, rcParams are applied when `context` option is not used in a workflow directive. This configuration option overrides this behavior and applies rcParams before each workflow. wf_working_directory By default, the working directory will be changed to the directory of the example, so the code can get at its data files, if any. Also its path will be added to `sys.path` so it can import any helper modules sitting beside it. This configuration option can be used to specify a central directory (also added to `sys.path`) where data files and helper modules for all code are located. wf_template Provide a customized template for preparing restructured text. """ import sys import os import shutil import io import re import textwrap from os.path import relpath from errno import EEXIST import traceback missing_imports = [] try: from docutils.parsers.rst import directives, Directive from docutils.parsers.rst.directives.images import Image align = Image.align except ImportError as e: missing_imports = [str(e)] try: # Sphinx depends on either Jinja or Jinja2 import jinja2 def format_template(template, **kw): return jinja2.Template(template).render(**kw) except ImportError as e: missing_imports.append(str(e)) try: import jinja def format_template(template, **kw): return jinja.from_string(template, **kw) missing_imports.pop() except ImportError as e: missing_imports.append(str(e)) def _option_boolean(arg): if not arg or not arg.strip(): # no argument given, assume used as a flag return True elif arg.strip().lower() in ("no", "0", "false"): return False elif arg.strip().lower() in ("yes", "1", "true"): return True else: raise ValueError('"%s" unknown boolean' % arg) def _option_graph2use(arg): return directives.choice(arg, ("hierarchical", "colored", "flat", "orig", "exec")) def _option_context(arg): if arg in [None, "reset", "close-figs"]: return arg raise ValueError("argument should be None or 'reset' or 'close-figs'") def _option_format(arg): return directives.choice(arg, ("python", "doctest")) def _option_align(arg): return directives.choice( arg, ("top", "middle", "bottom", "left", "center", "right") ) def mark_wf_labels(app, document): """ To make graphs referenceable, we need to move the reference from the "htmlonly" (or "latexonly") node to the actual figure node itself. """ for name, explicit in list(document.nametypes.items()): if not explicit: continue labelid = document.nameids[name] if labelid is None: continue node = document.ids[labelid] if node.tagname in ("html_only", "latex_only"): for n in node: if n.tagname == "figure": sectname = name for c in n: if c.tagname == "caption": sectname = c.astext() break node["ids"].remove(labelid) node["names"].remove(name) n["ids"].append(labelid) n["names"].append(name) document.settings.env.labels[name] = ( document.settings.env.docname, labelid, sectname, ) break class WorkflowDirective(Directive): has_content = True required_arguments = 0 optional_arguments = 2 final_argument_whitespace = False option_spec = { "alt": directives.unchanged, "height": directives.length_or_unitless, "width": directives.length_or_percentage_or_unitless, "scale": directives.nonnegative_int, "align": _option_align, "class": directives.class_option, "include-source": _option_boolean, "format": _option_format, "context": _option_context, "nofigs": directives.flag, "encoding": directives.encoding, "graph2use": _option_graph2use, "simple_form": _option_boolean, } def run(self): if missing_imports: raise ImportError("\n".join(missing_imports)) document = self.state_machine.document config = document.settings.env.config nofigs = "nofigs" in self.options formats = get_wf_formats(config) default_fmt = formats[0][0] graph2use = self.options.get("graph2use", "hierarchical") simple_form = self.options.get("simple_form", True) self.options.setdefault("include-source", config.wf_include_source) keep_context = "context" in self.options context_opt = None if not keep_context else self.options["context"] rst_file = document.attributes["source"] rst_dir = os.path.dirname(rst_file) if len(self.arguments): if not config.wf_basedir: source_file_name = os.path.join( setup.app.builder.srcdir, directives.uri(self.arguments[0]) ) else: source_file_name = os.path.join( setup.confdir, config.wf_basedir, directives.uri(self.arguments[0]) ) # If there is content, it will be passed as a caption. caption = "\n".join(self.content) # If the optional function name is provided, use it if len(self.arguments) == 2: function_name = self.arguments[1] else: function_name = None with io.open(source_file_name, "r", encoding="utf-8") as fd: code = fd.read() output_base = os.path.basename(source_file_name) else: source_file_name = rst_file code = textwrap.dedent("\n".join([str(c) for c in self.content])) counter = document.attributes.get("_wf_counter", 0) + 1 document.attributes["_wf_counter"] = counter base, _ = os.path.splitext(os.path.basename(source_file_name)) output_base = "%s-%d.py" % (base, counter) function_name = None caption = "" base, source_ext = os.path.splitext(output_base) if source_ext in (".py", ".rst", ".txt"): output_base = base else: source_ext = "" # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames output_base = output_base.replace(".", "-") # is it in doctest format? is_doctest = contains_doctest(code) if "format" in self.options: if self.options["format"] == "python": is_doctest = False else: is_doctest = True # determine output directory name fragment source_rel_name = relpath(source_file_name, setup.confdir) source_rel_dir = os.path.dirname(source_rel_name) while source_rel_dir.startswith(os.path.sep): source_rel_dir = source_rel_dir[1:] # build_dir: where to place output files (temporarily) build_dir = os.path.join( os.path.dirname(setup.app.doctreedir), "wf_directive", source_rel_dir ) # get rid of .. in paths, also changes pathsep # see note in Python docs for warning about symbolic links on Windows. # need to compare source and dest paths at end build_dir = os.path.normpath(build_dir) if not os.path.exists(build_dir): os.makedirs(build_dir) # output_dir: final location in the builder's directory dest_dir = os.path.abspath( os.path.join(setup.app.builder.outdir, source_rel_dir) ) if not os.path.exists(dest_dir): os.makedirs(dest_dir) # no problem here for me, but just use built-ins # how to link to files from the RST file dest_dir_link = os.path.join( relpath(setup.confdir, rst_dir), source_rel_dir ).replace(os.path.sep, "/") try: build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, "/") except ValueError: # on Windows, relpath raises ValueError when path and start are on # different mounts/drives build_dir_link = build_dir source_link = dest_dir_link + "/" + output_base + source_ext # make figures try: results = render_figures( code, source_file_name, build_dir, output_base, keep_context, function_name, config, graph2use, simple_form, context_reset=context_opt == "reset", close_figs=context_opt == "close-figs", ) errors = [] except GraphError as err: reporter = self.state.memo.reporter sm = reporter.system_message( 2, "Exception occurred in plotting %s\n from %s:\n%s" % (output_base, source_file_name, err), line=self.lineno, ) results = [(code, [])] errors = [sm] # Properly indent the caption caption = "\n".join(" " + line.strip() for line in caption.split("\n")) # generate output restructuredtext total_lines = [] for j, (code_piece, images) in enumerate(results): if self.options["include-source"]: if is_doctest: lines = [""] lines += [row.rstrip() for row in code_piece.split("\n")] else: lines = [".. code-block:: python", ""] lines += [" %s" % row.rstrip() for row in code_piece.split("\n")] source_code = "\n".join(lines) else: source_code = "" if nofigs: images = [] opts = [ ":%s: %s" % (key, val) for key, val in list(self.options.items()) if key in ("alt", "height", "width", "scale", "align", "class") ] only_html = ".. only:: html" only_latex = ".. only:: latex" only_texinfo = ".. only:: texinfo" # Not-None src_link signals the need for a source link in the generated # html if j == 0 and config.wf_html_show_source_link: src_link = source_link else: src_link = None result = format_template( config.wf_template or TEMPLATE, default_fmt=default_fmt, dest_dir=dest_dir_link, build_dir=build_dir_link, source_link=src_link, multi_image=len(images) > 1, only_html=only_html, only_latex=only_latex, only_texinfo=only_texinfo, options=opts, images=images, source_code=source_code, html_show_formats=config.wf_html_show_formats and len(images), caption=caption, ) total_lines.extend(result.split("\n")) total_lines.extend("\n") if total_lines: self.state_machine.insert_input(total_lines, source=source_file_name) # copy image files to builder's output directory, if necessary os.makedirs(dest_dir, exist_ok=True) for code_piece, images in results: for img in images: for fn in img.filenames(): destimg = os.path.join(dest_dir, os.path.basename(fn)) if fn != destimg: shutil.copyfile(fn, destimg) # copy script (if necessary) target_name = os.path.join(dest_dir, output_base + source_ext) with io.open(target_name, "w", encoding="utf-8") as f: if source_file_name == rst_file: code_escaped = unescape_doctest(code) else: code_escaped = code f.write(code_escaped) return errors def setup(app): setup.app = app setup.config = app.config setup.confdir = app.confdir app.add_directive("workflow", WorkflowDirective) app.add_config_value("graph2use", "hierarchical", "html") app.add_config_value("simple_form", True, "html") app.add_config_value("wf_pre_code", None, True) app.add_config_value("wf_include_source", False, True) app.add_config_value("wf_html_show_source_link", True, True) app.add_config_value("wf_formats", ["png", "svg", "pdf"], True) app.add_config_value("wf_basedir", None, True) app.add_config_value("wf_html_show_formats", True, True) app.add_config_value("wf_rcparams", {}, True) app.add_config_value("wf_apply_rcparams", False, True) app.add_config_value("wf_working_directory", None, True) app.add_config_value("wf_template", None, True) app.connect("doctree-read", mark_wf_labels) metadata = {"parallel_read_safe": True, "parallel_write_safe": True} return metadata # ------------------------------------------------------------------------------ # Doctest handling # ------------------------------------------------------------------------------ def contains_doctest(text): try: # check if it's valid Python as-is compile(text, "", "exec") return False except SyntaxError: pass r = re.compile(r"^\s*>>>", re.M) m = r.search(text) return bool(m) def unescape_doctest(text): """ Extract code from a piece of text, which contains either Python code or doctests. """ if not contains_doctest(text): return text code = "" for line in text.split("\n"): m = re.match(r"^\s*(>>>|\.\.\.) (.*)$", line) if m: code += m.group(2) + "\n" elif line.strip(): code += "# " + line.strip() + "\n" else: code += "\n" return code def remove_coding(text): """ Remove the coding comment, which exec doesn't like. """ sub_re = re.compile(r"^#\s*-\*-\s*coding:\s*.*-\*-$", flags=re.MULTILINE) return sub_re.sub("", text) # ------------------------------------------------------------------------------ # Template # ------------------------------------------------------------------------------ TEMPLATE = """ {{ source_code }} {{ only_html }} {% for img in images %} .. figure:: {{ build_dir }}/{{ img.basename }}.{{ default_fmt }} {% for option in options -%} {{ option }} {% endfor %} {% if html_show_formats and multi_image -%} ( {%- for fmt in img.formats -%} {%- if not loop.first -%}, {% endif -%} `{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__ {%- endfor -%} ) {%- endif -%} {{ caption }} {% endfor %} {% if source_link or (html_show_formats and not multi_image) %} ( {%- if source_link -%} `Source code <{{ source_link }}>`__ {%- endif -%} {%- if html_show_formats and not multi_image -%} {%- for img in images -%} {%- for fmt in img.formats -%} {%- if source_link or not loop.first -%}, {% endif -%} `{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__ {%- endfor -%} {%- endfor -%} {%- endif -%} ) {% endif %} {{ only_latex }} {% for img in images %} {% if 'pdf' in img.formats -%} .. figure:: {{ build_dir }}/{{ img.basename }}.pdf {% for option in options -%} {{ option }} {% endfor %} {{ caption }} {% endif -%} {% endfor %} {{ only_texinfo }} {% for img in images %} .. image:: {{ build_dir }}/{{ img.basename }}.png {% for option in options -%} {{ option }} {% endfor %} {% endfor %} """ exception_template = """ .. htmlonly:: [`source code <%(linkdir)s/%(basename)s.py>`__] Exception occurred rendering plot. """ # the context of the plot for all directives specified with the # :context: option wf_context = dict() class ImageFile(object): def __init__(self, basename, dirname): self.basename = basename self.dirname = dirname self.formats = [] def filename(self, fmt): return os.path.join(self.dirname, "%s.%s" % (self.basename, fmt)) def filenames(self): return [self.filename(fmt) for fmt in self.formats] def out_of_date(original, derived): """ Returns True if derivative is out-of-date wrt original, both of which are full file paths. """ return not os.path.exists(derived) or ( os.path.exists(original) and os.stat(derived).st_mtime < os.stat(original).st_mtime ) class GraphError(RuntimeError): pass def run_code(code, code_path, ns=None, function_name=None): """ Import a Python module from a path, and run the function given by name, if function_name is not None. """ # Change the working directory to the directory of the example, so # it can get at its data files, if any. Add its path to sys.path # so it can import any helper modules sitting beside it. pwd = str(os.getcwd()) old_sys_path = list(sys.path) if setup.config.wf_working_directory is not None: try: os.chdir(setup.config.wf_working_directory) except OSError as err: raise OSError( str(err) + "\n`wf_working_directory` option in" "Sphinx configuration file must be a valid " "directory path" ) except TypeError as err: raise TypeError( str(err) + "\n`wf_working_directory` option in " "Sphinx configuration file must be a string or " "None" ) sys.path.insert(0, setup.config.wf_working_directory) elif code_path is not None: dirname = os.path.abspath(os.path.dirname(code_path)) os.chdir(dirname) sys.path.insert(0, dirname) # Reset sys.argv old_sys_argv = sys.argv sys.argv = [code_path] # Redirect stdout stdout = sys.stdout sys.stdout = io.StringIO() # Assign a do-nothing print function to the namespace. There # doesn't seem to be any other way to provide a way to (not) print # that works correctly across Python 2 and 3. def _dummy_print(*arg, **kwarg): pass try: try: code = unescape_doctest(code) if ns is None: ns = {} if not ns: if setup.config.wf_pre_code is not None: exec(str(setup.config.wf_pre_code), ns) ns["print"] = _dummy_print if "__main__" in code: exec("__name__ = '__main__'", ns) code = remove_coding(code) exec(code, ns) if function_name is not None: exec(function_name + "()", ns) except (Exception, SystemExit) as err: raise GraphError(traceback.format_exc()) finally: os.chdir(pwd) sys.argv = old_sys_argv sys.path[:] = old_sys_path sys.stdout = stdout return ns def get_wf_formats(config): default_dpi = {"png": 80, "hires.png": 200, "pdf": 200} formats = [] wf_formats = config.wf_formats if isinstance(wf_formats, (str, bytes)): # String Sphinx < 1.3, Split on , to mimic # Sphinx 1.3 and later. Sphinx 1.3 always # returns a list. wf_formats = wf_formats.split(",") for fmt in wf_formats: if isinstance(fmt, (str, bytes)): if ":" in fmt: suffix, dpi = fmt.split(":") formats.append((str(suffix), int(dpi))) else: formats.append((fmt, default_dpi.get(fmt, 80))) elif isinstance(fmt, (tuple, list)) and len(fmt) == 2: formats.append((str(fmt[0]), int(fmt[1]))) else: raise GraphError('invalid image format "%r" in wf_formats' % fmt) return formats def render_figures( code, code_path, output_dir, output_base, context, function_name, config, graph2use, simple_form, context_reset=False, close_figs=False, ): """ Run a nipype workflow creation script and save the graph in *output_dir*. Save the images under *output_dir* with file names derived from *output_base* """ formats = get_wf_formats(config) ns = wf_context if context else {} if context_reset: wf_context.clear() run_code(code, code_path, ns, function_name) img = ImageFile(output_base, output_dir) for fmt, dpi in formats: try: img_path = img.filename(fmt) imgname, ext = os.path.splitext(os.path.basename(img_path)) ns["wf"].base_dir = output_dir src = ns["wf"].write_graph( imgname, format=ext[1:], graph2use=graph2use, simple_form=simple_form ) shutil.move(src, img_path) except Exception: raise GraphError(traceback.format_exc()) img.formats.append(fmt) return [(code, [img])] nipype-1.7.0/nipype/testing/000077500000000000000000000000001413403311400157765ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/__init__.py000066400000000000000000000020221413403311400201030ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The testing directory contains a small set of imaging files to be used for doctests only. """ import os # Discover directory path filepath = os.path.abspath(__file__) basedir = os.path.dirname(filepath) funcfile = os.path.join(basedir, "data", "functional.nii") anatfile = os.path.join(basedir, "data", "structural.nii") template = funcfile transfm = funcfile from . import decorators from .utils import package_check, TempFATFS skipif = decorators.dec.skipif def example_data(infile="functional.nii"): """returns path to empty example data files for doc tests it will raise an exception if filename is not in the directory""" filepath = os.path.abspath(__file__) basedir = os.path.dirname(filepath) outfile = os.path.join(basedir, "data", infile) if not os.path.exists(outfile): raise IOError("%s empty data file does NOT exist" % outfile) return outfile nipype-1.7.0/nipype/testing/data/000077500000000000000000000000001413403311400167075ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/4d_dwi.nii000066400000000000000000000000001413403311400205500ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/A.scheme000066400000000000000000000000001413403311400202430ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/A_qmat.Bdouble000066400000000000000000000000001413403311400213750ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/A_recon_params.Bdouble000066400000000000000000000000001413403311400231040ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/AffineTransform.mat000066400000000000000000000000001413403311400224640ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/BrainSegmentationPrior01.nii.gz000066400000000000000000000000001413403311400246030ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/BrainSegmentationPrior02.nii.gz000066400000000000000000000000001413403311400246040ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/BrainSegmentationPrior03.nii.gz000066400000000000000000000000001413403311400246050ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/BrainSegmentationPrior04.nii.gz000066400000000000000000000000001413403311400246060ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/DisplacementFieldTransform.nii.gz000066400000000000000000000000001413403311400252650ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/FLASH1.mgz000066400000000000000000000000001413403311400203320ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/FLASH2.mgz000066400000000000000000000000001413403311400203330ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/FLASH3.mgz000066400000000000000000000000001413403311400203340ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/Fred+orig000066400000000000000000000000001413403311400204340ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/FreeSurferColorLUT.txt000066400000000000000000000000011413403311400230730ustar00rootroot00000000000000 nipype-1.7.0/nipype/testing/data/FreeSurferColorLUT_adapted_aparc+aseg_out.pck000066400000000000000000000000001413403311400274620ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/MASK_average_thal_right.nii000066400000000000000000000000001413403311400240300ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/NWARP000066400000000000000000000000001413403311400175070ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/PD.mgz000066400000000000000000000000001413403311400177170ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/ProbabilityMaskOfStudyTemplate.nii.gz000066400000000000000000000000001413403311400261230ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/Q25_warp+tlrc.HEAD000066400000000000000000000000001413403311400216600ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/QSH_peaks.Bdouble000066400000000000000000000000001413403311400220110ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/README000066400000000000000000000006221413403311400175670ustar00rootroot00000000000000This directory contains empty, dummy files which are meant to be used in the doctests of nipype. For verion 0.3 of nipype, we're using Traits and for input files, the code checks to confirm the assigned files actually exist. It doesn't matter what the files are, or even if they contain "real data", only that they exist. Again, these files are only meant to serve as documentation in the doctests. nipype-1.7.0/nipype/testing/data/ROI_scale500.nii.gz000066400000000000000000000000001413403311400220420ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/SPM.mat000066400000000000000000000000001413403311400200370ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/SubjectA.Bfloat000066400000000000000000000000001413403311400215260ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/T1.mgz000066400000000000000000000000001413403311400177000ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/T1.nii000066400000000000000000000000001413403311400176620ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/T1.nii.gz000066400000000000000000000000001413403311400203010ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/T1_brain.nii000066400000000000000000000000001413403311400210350ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/T1map.nii.gz000066400000000000000000000000001413403311400207770ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/T2.nii000066400000000000000000000000001413403311400176630ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/TI4D.nii.gz000066400000000000000000000000001413403311400205210ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/TPM.nii000066400000000000000000000000001413403311400200360ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/Template_1_IXI550_MNI152.nii000066400000000000000000000000001413403311400232670ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/Template_6.nii000066400000000000000000000000001413403311400213760ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/TransformParameters.0.txt000066400000000000000000000000001413403311400235730ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/afni_output.3D000066400000000000000000000000001413403311400214220ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/allFA.nii000066400000000000000000000000001413403311400203550ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/all_FA.nii.gz000066400000000000000000000000001413403311400211330ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/anat_coreg.mif000066400000000000000000000000001413403311400214740ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/anatomical.nii000066400000000000000000000000001413403311400215060ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/ants_Affine.txt000066400000000000000000000000001413403311400216530ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/ants_Warp.nii.gz000066400000000000000000000000001413403311400217530ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/ants_deformed.nii.gz000066400000000000000000000000001413403311400226270ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/aparc+aseg.nii000066400000000000000000000000001413403311400213770ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/aseg.mgz000066400000000000000000000000001413403311400203330ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/asl.nii.gz000066400000000000000000000000001413403311400205740ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/atlas.nii.gz000066400000000000000000000000001413403311400211210ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/b0.nii000066400000000000000000000000001413403311400176770ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/b0.nii.gz000066400000000000000000000000001413403311400203160ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/b0_b0rev.nii000066400000000000000000000000001413403311400207750ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/ballstickfit_data.Bfloat000066400000000000000000000000001413403311400234720ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/bedpostxout/000077500000000000000000000000001413403311400212675ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/bedpostxout/do_not_delete.txt000066400000000000000000000000741413403311400246350ustar00rootroot00000000000000This file has to be here because git ignores empty folders. nipype-1.7.0/nipype/testing/data/brain_mask.nii000066400000000000000000000000001413403311400215040ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/brain_study_template.nii.gz000066400000000000000000000000001413403311400242330ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/brain_track.Bdouble000066400000000000000000000000001413403311400224520ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/brukerdir/000077500000000000000000000000001413403311400207005ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/brukerdir/fid000066400000000000000000000000001413403311400213530ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/brukerdir/pdata/000077500000000000000000000000001413403311400217715ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/brukerdir/pdata/1/000077500000000000000000000000001413403311400221315ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/brukerdir/pdata/1/2dseq000066400000000000000000000000001413403311400230600ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/bvals000066400000000000000000000000001413403311400177270ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/bvals.scheme000066400000000000000000000000001413403311400211720ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/bvecs000066400000000000000000000000001413403311400177220ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/bvecs.scheme000066400000000000000000000000001413403311400211650ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/c1s1.nii000066400000000000000000000000001413403311400201450ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/c1s3.nii000066400000000000000000000000001413403311400201470ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/clustering.mat000066400000000000000000000000001413403311400215570ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/cmatrix.mat000066400000000000000000000000001413403311400210470ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/complex.nii000066400000000000000000000000001413403311400210450ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/config.ini000066400000000000000000000000311413403311400206470ustar00rootroot00000000000000[BOOL] ManualNIfTIConv=0 nipype-1.7.0/nipype/testing/data/cont1.nii000066400000000000000000000000001413403311400204220ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/cont1a.nii000066400000000000000000000000001413403311400205630ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/cont2.nii000066400000000000000000000000001413403311400204230ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/cont2a.nii000066400000000000000000000000001413403311400205640ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/converted.trk000066400000000000000000000000001413403311400214100ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/cope.nii.gz000066400000000000000000000000001413403311400207430ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/cope1.nii.gz000066400000000000000000000000001413403311400210240ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/cope1run1.nii.gz000066400000000000000000000000001413403311400216320ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/cope1run2.nii.gz000066400000000000000000000000001413403311400216330ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/cope2run1.nii.gz000066400000000000000000000000001413403311400216330ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/cope2run2.nii.gz000066400000000000000000000000001413403311400216340ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/cortex.label000066400000000000000000000000001413403311400212020ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/cov_split.mat000066400000000000000000000000001413403311400214020ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/csd.mif000066400000000000000000000000001413403311400201430ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/data.Bfloat000066400000000000000000000000001413403311400207370ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/db.xml000066400000000000000000000000001413403311400200040ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/degree.csv000066400000000000000000000000001413403311400206450ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/degree.mat000066400000000000000000000000001413403311400206330ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/design.con000066400000000000000000000000001413403311400206470ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/design.mat000066400000000000000000000000001413403311400206510ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dicomdir/000077500000000000000000000000001413403311400205015ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dicomdir/123456-1-1.dcm000066400000000000000000000000001413403311400222140ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/diffusion.nii000066400000000000000000000000001413403311400213640ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/diffusion_weighted.nii000066400000000000000000000000001413403311400232440ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dilated_wm_mask.nii000066400000000000000000000000001413403311400225220ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dirs.txt000066400000000000000000000000001413403311400203770ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dofrun1000066400000000000000000000000001413403311400201760ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dofrun2000066400000000000000000000000001413403311400201770ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/ds003_sub-01_mc.DVARS000066400000000000000000000007341413403311400221130ustar00rootroot000000000000002.02915 5.2016 1.74221 1.54871 3.97002 1.18108 0.921419 2.362 0.784497 1.26058 3.23142 0.734119 1.00079 2.56548 0.787452 0.929074 2.38163 0.828835 0.741207 1.90004 0.746263 1.07913 2.7663 0.779829 1.2969 3.32452 0.73856 0.767387 1.96715 0.772047 0.847059 2.17138 0.774103 0.984061 2.52258 0.88097 0.852897 2.18635 0.794655 0.927778 2.3783 0.756786 0.857544 2.19826 0.796125 0.780098 1.99973 0.731265 1.05496 2.70434 0.788584 1.32099 3.38628 0.831803 0.691529 1.77269 0.738788 nipype-1.7.0/nipype/testing/data/ds003_sub-01_mc.nii.gz000066400000000000000000005102261413403311400224340ustar00rootroot00000000000000̷uTT_Mw 3CwXuyDE,,lBDDDJB1(T @.Tw]޿wu֙=s|ij:2aa?u4SC_U.mxe4Wf'wD5ׇRn{ `^+`F>&YhU07eڥbJdހ%ېi pGL/.%ji#sIB^TXE8Oߧ*EN11Cqk^7>OF\{1-zӌt,2o}Te? \(eF%Q w'x`EthΠTYMQcOeT]o2 vba0 p}W+/7!MdHZ\&C#Īq'~| w6AY}/9>k(}xN A.%őJq@-G!gh ] K7KJ (6ss[-(Kr)3bVz8"kP>^UK heJ<@;=bhvۊ^Wn%\ӠaN#{π[ܷ&"qMl@-ҫ4!=8V-a{4sNW{Q3:.w^#NJuE]MvL*VloJCOΡK&M]to}jv/MĄиM#lmx3;`BI(+_;pZ>=B1`A]\(c9-HZPyo H kC'gT}͖Ey'c?,QC{/^6ZYz FS,^_JKQjF XTxZ\P?Hݎ17ԃ9Ҋv/Arh8^HGzV;!"H:XWvwg{b C~5]Jl0WiZ89~O4e@-އ X}8u2<ܒ.E7UH+F_;()xJ߸]MLw\mb 6o$Giy3{abmݔդUtq[muMtoc/_6"e dIAMql6~uiҖ v)Kp(rIQґeԹm~M4vG=&P6^W:=,xHVtrRLRODᮈ-] xR'Vt^N/OTN JY}/nP0r(h\-ϗf_ Bm6ttiZ/ZG: }J18I">oƋx]sś&s*z;4̠tT|yͫ0}5:%GQݮP񟨎G:Q|?˫oӯyTcH)y'J+[btnbk6¬/?vWwTѪ/Uup@[tLo#Nwʥe#Ƒ}w(t#s *2}:j?ˆpo[tuL^Q5+cMxY$z,ݳ[)'޺O>cutc$ G{- 'Ej޻ QE?i6)/񴦸5a>҅=VeXq: ln#7iw_ ~Asb)u@ȥ0|A|O^E?42%j6"羣ɵ%Y<<gIs7AhXL*1{0Kq'WЛ>1x\ 7;Y}.?E_]o(9~No#%^:|e.S1lQ!_4ufv5EEOȸ18MI7bp+D{E(~ŐbY(߾F %=aXAdN gcb4P"&B;VBm釐, ?1!CSt~S%((?C [$dޝb?O|xz\Fw; '=ͧ#v (N\Ą7y|*~°ːz(\ S*eKaϞQ2~fb,x4Fj(r_E&g[0W 6I*v6,Ϝ{4H)8t,1384ʑ-p'_Y+ڍywS/ٵ’Y@M=V Jz66lC ֝op޶IgF)2紥%gX{-wd M9.̘{ڇ\(4TzU.:ֿ]x:%Gǩkڃc-l7"\dŴW 11icO/)٤'/;̒ҊǚT8AJ~eó"M 2)Q(pRu%›)[!E'lwd9kSf;;p{ %{__r9jMkuMy̠p!7JS5X|qgUR?~pѕee4rWUX!M9:-7X UBғ$=w^:ǑuUn/v!gǮDWp.K4jp&> 㙶+['{|'y/-ݐLIoqآ=_瀃y)8U=?Mw-yy*ox)9guH\D3F_ ]ߓSF(Y/p,zSg+M,y*S_m& <I?[#^E}.-W*FIO&)x0 EcfJNŸʅk%Ǔ~1xC%G,q_$ ,Ĭ! 7E.d8Cz"1{Č03cc#\'LRF GEIs1x&oL%7u^CN܍q¿)RCq=anXq5|J 4a2⡫^Ұmqe([9_/S/gK"txڈkj4mfYɭTJ|^GEĤ`TtEQ4`܄C/4݇z <`϶? Ӛ'>Ur)' aRjˆ?/R?Ӕ2ug3Rޖպs[.P6'Ɗ #. lbX(O9ݣfm\5/э 8q-J6|`=: ) ,Xt/8.`۫6‰/)cj?h}>7D\ vC˚7U+KQBGVL295՚D~ŵvBZ*HɁ/U\ʕ-yA%_"c4IZ{|p:/EZ c"aw+MS lxb3k̙/Q 6Ll-2q3$O{;ܛqqڌf9A,yT-Ħ<{&QMŢP/v5U_7?ԬHγ®xy)y/B+jh,*Of۱h`+wjKIr+hwoΟùS/ wWc+m4THZ8?`CY@%G񬦸>4W`mG/1ָ:؅ar35s G(wI]:E aV<]ÉAlo= ̗T7$؝>YV~U85^q1f-F0y(ݠnWe =+~0(1^E"׻Hkq2e>Az2A4u_dc:;l?fzp$yU4KEԼ,DO-EK;l/s|LYO 3fvb Kа_#ƷtcO*r' tdz5ʘSޣ+DX7GS|cOpI]#.oPIx&nCË*E>c\ ;eq ?wո$Dw1q7PTu9~=Vs(x&ǐm)Ҝ { 0wducSF"xOt/w寮hYv썋U*$&LC,Uf+ 胙n}1x{Ў oAij;Xp(>zL:$& җEh CwuXFypԵ.mƄXf4EIm>7StvJ }fs\ !\Ƈt$DYNh6SV#3Yg OGy:Ѵt9Ž|Aw\9HJ'hz!S@ )pgffPcQ ߡndk{xFi})2"[(1@/4IK,fmfcoT~.,XB0/zR5ǿ`eux2>Edt]^E/S*ė i渦p;\31Nn0DCǤXP6dڟMZ$jgI >[c/fHm vpNql{sw; pQTSZ1~=q^VH}c鳥]QJ}92\oMfXde72BicpcHF1qt&OEL? ^Ya^z4z2*zEfdfX`W e%ѧ- V7`N +fAb|Tg'߿r-)eu҃zt!<.)oщ_ vbcwz\'g A|@;*W96-Ff2–֨Rg2zBR,,%ڜB=ԛpH)M:Xa# >I(j fݝ.6lX+ºI|r= fLC9f8ὙF G$Rbf.Zh9vBFΐCͰnƨѺj5.yis܋vBI 3O5uZ[&]q1ģ52*~9 ]P^o{tj` ;o\MQsI%x B86^_zlfTu*M\Z$)w9O÷`ͣ0V!zwTSߑ45:~{A!Q_h5z} :\)`tk?e O:֜?myJ>-4qW8~<=. g&M/=13Kr: 'FM:&r-׉MvYnTʓ ]x[±h4䂟wRy|Б5i1iFZʇb[{ǔ:a2)'cd)m,HxmRF&4e8ݖTKD3BQ=r7"U؂s@ //Ÿ+DݤV4-z!h'\vsZ[t 3tXin\<#Skйl9"vG6hhꅂhn4 YkUz3R ²0G:T@_@q4AhDc+7^^aqa!/}U+\DcQ7;%>(6 cc6b|y9zIQBpi΄׮䢖TwY32B2>4ġ{Jr;RN\m6}gSG#r֣O<5c<ɨpL'de,soK\UO+kד/=dn͑h[`oZQɑ;"Lz=ap~;MbLz]QK7C=Y>c&FALB2+T7GD铉+Җ.DV8y6]  E_S͈? "OyC(3[߳GbMjSF$kM_“=ԽkCl?Rdž*X^hĿۘfJ.4ر̔MmQbqQzc:ޔ/3+ӽTCo.7␓(Ga Q}!ܳBI v+9Ȏ@ՆK#d7L5a_,X뗉[ kDrx<č#:m/?ghkZnȋZ#DkF/za{mԚ 8 '4-C xvñ'}*qMsJt{{Ggb9ޫ󎼼Ɨq1dl^{p>z}}p:`΃R61vXu (>;EW4`#`9Q}az[{&_rÌ>ܰg{]3-Lל)k&Z4f4 '*# (73B1k~ cbx=C>EbYM,7mC}ojOu&U\ӅV\?kmsxn.NX^a=,^? EE8|*z)̖m0w<|ċbQ#-9ӅuΥ/?RP&-qGעK@kO]'|4@4ūj/9)W] JA?3)ߢ(>םQ-ku WZ`ǿmOz1ߨzM=)V Vx:pi(+t!_O]m*" ^ނ#UG}Vo8$eww@RR"^HW[ #yv'3ᄮVÐkrZ+ƻuMք>V=|.ywW}_/i+~*3GNo&m@9cٳvۆk(qHŐmw]]xGߠT5'+ނӭJS^+""H7C~ m[)=\xL_NPG .l}L=p8wsRwm>Μٝq>nϛW7=i)uY  d)ܦ5F~MԞ=V;.\N^°r-^4ĬmiWKm}}$_fuշr~͚:N뺊f(w4m@k%Pho"ђvv|ĞS8q(5 Sr~k^,CGQ#6bV ,w/Ccz֩.5統~>lؑm+kk4DK^ҵX=|8rakgŇ`[_v)g-9cYבu0LGa⪮bbUūռ9ܒ8&ЎuP\:@Α(9+=S0/Fj(WD+V~+yZRkG|KqoLW#`Z* 3ۚQn=l*MU"#-d"m }9]XplX6<=NSMLOcq5^@]Ftc2YS'Eܧ1ғBi=D;9-t,&(vU A3`[,;ӬYt+=&O $Nǜ* X섹OsdH^?]{ęKؐ->ؼ@I(y_& df6C*[]f'B!g ͘CA{dko׍8k6 (9\J1lThC 13&vClI:L_GV@^ <^ЪԇٖY0M*oGkY'8VytR-wöhl_u 1()>.` 5Dǵ=?ֺ@ [s2ZٽDڌBrBeZLP$W秫~k>n:rJYIN.Pg:,ם&T5 w3cso}rqS:A=M/6ZɊYݘtWzb.eTceiXb3Y)o qfav|5/9Td8m.tk3h;7ykv4s_umbG%_PgV:Ô_moJ?G@hͰA͎7: 7-z`f^l/`ˍѣܟZeL|i9]Ej9vmeUbKqB7GG"bQ竈/}V+nՠnݾ.g #L4J*D^ 2;x[SUxc&o" G(=qb-N<;x5I k:3̷@'Y&NdB4j!M2L^,k r|Slha%C[3DՒ{^|25&b,o!lp@X;?ZJ4EuѺfz4dެc/]uDPIf;tvgzeC*rm磎Qɳ6-Bq- נJQ+_҃B9''3:H_TԒ3dy!U?HKñ8BSˣUW1ώcUhX>, 4"aO[\z!6Dp|PmXVA?59"@4LJ+LeU0l0OsBc:ܞaKqE3CJueT'| Vc vMq-ӧ1{ r]q!SG·" z&BBsŰ?G74F +Gޕr$L zc{ExF#K_=KW=t-X }3&AO]%Lx"O0i)cT*u?X^ՖlΒ'Tq2!z"+" ;Z(ݮ&J5בb2֎H<K*H{aJ n pwfN+(Oi,چmc['`#Wc.e??bԻRptm+ZbY Žl\ߜ6Mgi2Yrad$:`d}|x& >"ΚMbƉJ~jؗ]k&upq?*9N 1lk?,eXDiJ&.4}ugӅ܋zuz-#t& ӇEB|Dh2HFM?'5ߔĽ t-d UN`4U2q6ސeK_iF&g߆+Kדol[vS 7@_N8 pD*vϿE/О%^bi{yA n9K,,طYC1p!ke`GeiqƔ9z$ˣi4`Eͤ9+y/u[ۥ3^K0ޘ-2M?3{fC5{~'yE7Y~9r]xJRfJ3P dOF!>cC*w,{3S}ӗb^/,Y'YQqǟ`ɊXIwY@t?? BQ[.hs*d}OxbϜ²X5/eKE(o| &ɻ%{:z*c{][b{b5{+2B.{!n9LjRq,MY mi(o=vhs-W՝PDfs4#í{QNGli}FawpfrN  'DW!ߘVS̻J;dg=b7wOLAx]S|mLf֑C՞Ҫɗ8]WV|!a=$ mCP@TǍXԨRTMK幞cuyNI]LȬo&~+fW.b/-AlWUzmر${-O%YʻєUDWdܩ>*gcf~P ?Wɷhb'Z_ɴӾ;94QjT& Nbov '$s[~)4 K+})^L/ Tӽ(u#8= qBJdA33n7*k WK}C#ߧYIcl$"*#1nxfV4.>n<+Y+>N_)%)q -2l3ë0vA9gʟN_EOCW-h'[$bnҊELùDr̋`})MD{Q:di9h/ݧBRoM7>‚juTbk,xVOwhEgއL| *<=}&/OLԬKaKoMZ&;Rf -ZW1bV_}w/DFV\KQcrCAScT9ybg^:§@ >1X_eƋH1[*lDofh;Zd+=lJ:RRwK@}15,'7z#F)E̻*Yxcz,Fّ|eOﮆcw} t/_2Vm;% PM|vƌ)Mo/,dvbH,#F}sgd!d+A%a_zAMr|rY>+5]2hr֯rYMjs6j!jPJ?<"IniNafB:\gQ;M۠ċ|!Lb^Grtt9B3Y N9ZYA6 R$~.XM-=y&\1џk?6:F ̌;FqS\J*.dphRbr d1B&6)/ 1|X2o۝Fٮa. T6Sv:/SLi6_ y= Ɉi9V|ᓿf7c w} yoi6$i`Cl3 nX.NOTrZua;ՁC0M!U1@KQ$ &;XАnĭFq\FlW~g-PJ/mGtaW/쯟7p^ԅNo#;|ӊfo0UXbu|:O 88APȺ7` 88)~?F_V`}{7ZQ ,բш wǕYv[KF#Ǿ}_u&Ug77Ń {fn-Z %o℆F>fgXh+E/H5gOy$E\6ZgV6,41N=Sr4shϫ$||A>7oР=(}KROY+Qݜqo~Ěd 1 Lig=˗J/jMCwVNIղoNXs1?ItYY7^AUb썘^f\?%gJ3kǚ=%Bٌ] ֔v[ܪFwV&lcE!@tp! '/|X ?ݚ{o}93#zjOcuk.ɲ%Sa2jV=Șp- _S%/hC!7$N!h`[im>ߊ9lΑpڼ_^D7,.XkX]遰ҀO7R|:Fk5ΑF+'aۊpPMcȭL7f ['9-Ⱦ扣7'# M]\"$ƆAXlf>le]h*.A hU鋵&(L@@÷:V=K\˴76FPI s6+Aa| 1ހdj'@'FoJjb,7V~ ]43^鐝 V7.@MֈQQ 黋m2:J^"$VM |jTe2cZI[s7a{lCg<ݍiZ͟Չyl=猔k q&?Dxqf@eV ^KXֽt*W K Pc{>҈OiVuNQx`fj< e -KQFƤddzNHs'ߓ?Wds$}Q 7eq]tQhȁ#_->ԜRҌ 2ൻpL+~$ȳ%ٌ6[5~p1 uy'c]ϔi-ɓ</{yacggj &k5]R0*s 7xe3Jx/-TZ꼱%fFB%f%Ѝ}ܾښ߫2㳤8cBZ}%4n.ɂ~3> uC +p75k.iCZr&P'P؎ؿ>^2a;֩viEZM 5&4Ռ-J]YRn~I=pBfK*ҵ 8v8ۺI/}cQt.{L_ 0"emGR{j`Vȴ>f* 2GA&1x\ ]^jh,-Q,1Cn3Gw1Ā^FL._pOPݙ,-B{ςƸإNd*:$Z==,aaXfbZ!4iG mމ^lFBuQ`65_+ 7{#Zg/J~^Cq6S~6UJx+Β,eЀhu9،8x13I0a8"luOywSgU)SWt{3^^0?&=Cgg͛W-_܊/.KwV|]9?gy.Y>MB^qKgg4_Rb(+Do)45 /­q{@bLSA_) nt i\-AyٗɆVw]C`'Av__^ķYR2勖)NLO Φ!hc-IB>~Y O;FԠ].]#>]ƌϫdAY*@3(3yx3^_fsW H秶 ヤEE@[#,yb+D֞CtԎ2tu 8F{_ӫ8>:iɂr#|:5±}<gIe3l  ' Rj4%YxoN"]̰ i=boȹE´i?jA}8EXAv>4DCe<"x9i!㺓ܿ<|Iqy|A8d7 ogbfk/byFUG  Ƃ1:SCƲ ̀(*}pkQCUJuj8 U׎| tCeiHjSo6w06c(!uopmf Y׌ ѥ%F]oks WjV+ |Ɯ˛~La\"P >@z)>ɀ~DVkm =TA'o88,y!Z\)_?KuMLQw}$@4n8=c\jǽ#-=^R=Y'Y/zDXg  J:X>D5)f(pevb^&p57CKiG r-6]@mc_k3 Qu?J v0L]O=ߣ u0ᆟ*cqX}:V`uՖ)B6>za7\XW#^:N&}0;`EEb^0v6&$):H9Q(w7{#+h>"1KkB1Ϊ{(Kk3B j>!퐁um\{i7.U"̧s`,)`cJ.&S"W±8k VB`Csīl\ڽQnZ_)F/5.BOpZVO^!Q;[ Y;L xBL=`{3@-RY+0FxV7 VC̏v0gXhEֱy]ۼ9L+5oAUF(}jyL;Pa4ETs3eWVL&Bf84=J))ݹrѿ(6ӄnfKEuN5R?1Lާ Q QR .`7zdo hA7[(W-E=+g?c1׉ee=9؋{wt]Hv/IaJJs>yxoOv \1BLWBe,Q9sV^;kqJN7d;5Nm[ g/E"Y{ؖŃɭ1ڧRrR͊V%fMpI tЩS! JГϬt:VLD&Y#8mF*& X"8-nu^t%#>"‚82=YE֔]uŘ@5D>:2Uc찿un;o!-8GhR4HKD]jf hc*]n,^@4k cW۴QXiY'UA)%?w _b{X@B-Xꢩڠk(}uPFX˛v-_k^[G G/7@StRbԾΛ:n]SřީQ0஢2m9XN}keCZ+vGrBK[4DߴOeeMDdӿa :и Uꦉx=R=#PKa \ )&pG)\_S1@Y Rp9Rq.FaqHlKL@jz|,2 rp(QWL\!Q0 Bx݅Zu&4Emmv}mTØh_yՂR\uE = _kM,xTWx3aH $aSJ*DZMLS"‘u\~ =\Nd=kYIU": mwV`UHV (g^=ed.A-9e>ڝϦ5c3/FbFn,XkSV z1Uc\r X¾l:޼eAyXkwrЪz1$ C6*g j=S%JXX]Ub;=D. sPw Eh2ZR@L`'uBIb(ċq'.xNs&L,Ě(`5Yb|߭Vc a9ןQ3r'!`TӞ  Մ n<ւh` Lx65 Od5vB~;>(~2Z+k:,e<*DqW :5B"^ # ԑfC6$1tTE/O V iZpH;XUl7̟,mNRG?A_{it/裴\ f}/ZihwM<(ݍ٦ݬ8s^xH,_=Th=c֔y.沁X4~Fs/63eS|j##5PpF_C[vQZN>3mtgK#*߶xt|(0 I_LYmG+S(I5t[M 7қhXDlCȟ8< F<7k *Ͱrc3ϰfrIKn2ϫGgP@6iY 9>=& ׍aPvPJ-r!:|Yךy9J4ev~{9\wP_'xu*|Xr_^Nyzl|_rͨopv/May*il 59w.sXϦPoR2G>Q94oz=#{HpHs~fEۉؘ'LMm§ \E3xeΧ`wUZR5nkShZ_\DHi0%pjFP9foi)i5m N?lWScRMzaعT8'^hnOc c鼭cjIwlWm ?J^iQ=:݊'0JkV.մ,~0}V 'S$?r͂;#,|KDLh#8Rlq+)m{&eĜcUl b[$U RbNJlSi:ۿ+McQӡUo9cbň& hs4Z/x5/êa0=Qk1&݆-z&wؚL<˫{kܽբTAbՆؾ ߅h`|'*H}^~:#^b| 2˺NRs͠6 qOݢT o~۶KM[of yiKtz6Ekj,}}{wQi+׳ _ٯdrk!͍U١>f=X@mҨL-=(QjzRA*+6hV~M'י"X; /!?7I1)tA#y=6zb.5Q9j؜iӅ'Gwd6QSҙ|k=dtLyT>DX:E0NX-zlڻSO]<Z|:3MmP`1oɄOZw,l+?\Nؒwkݭƣu˪/kz|Ǟ4O딒kZ*7fh,aaRlkvd~|?չu_TA 7-=3d|**Orpw76VۼZ#7I_q(=>W2=3YqϭP8}ryq=u[@_Rė~M  ThEϫB|DfP5'͔uX|#cث6Yziyy,cOke5H1Da~I7A!6 VӸ{61712_sppDOðS/Q:Q'Ma >xDWB-n}%?KcXdѾe GOйN8ib 69f#FM` }\nҜ2r>=JR˱O1C{M‰pȮl| ٖ~Yu9^~ C*8ɤmA יaQ\=mAWwjuQ"MtC̓[܍+/_YG3ocvJM^NּzpfMOj0^[wK6YPa(? 1B_БV w16ɐc_|1EZ_2vCWҺ|-LMQq8fҖZ|J!6Ҝlsr跘{NLנ`f^۰mEt75qFy%A6)sg) ](#sp2 (\sz4c )nM *}Tzn}4~uQwi ".DeK nĞ̗c[v~,bі/6_,_*ݚhr_-,ㆧ'6Cc;S`kNZp]nFkvo$|?K2(۳=|_v>] 64[:)>䬤 &{z D3,Tb|nW)u :*WK/TՓ2rڨHE'0k -Br`|Sd?4yKRCM=;ij(r?h7tdT)58RSWD~$|03w&c/#H~T-E\;G_9syV?jjW"axl/(JSty>˜ .&Oefk*<3HMh\8vSrӁMz\\aʧ?o,:|E5~ K>":Blm 4 ] U7=#z' Q.Ǎ;">U+/M ]`atGA[+uGev 9זK]z4m!4ߕ{qY53¤qPF_$+ۜz`ɯͶb߰VZ/}?y2KaQIH"*=lGO=l;ޘ6I9D'q X>as>+hfnzє׽0׬>1#UydyB?Giw% Ջ1G{SO^]5Z;6N^~Z=ܽ@E#pcmt-Tŏ1Wx-~uu$.]ήFъ8%2^qKy=IТEŎ/J0 LnJhxaIsM@tgKybI~]ưGsmiVv.Œm햾Xe6I#&TPZc.O\OuT:!myt+k0\)^j kO1YMLrx\e\)Ff yN£`7xrR#CAjۖ~R%|k{x܃2џZ%#٠''ڈ! Ì1kbD .WmŘK>,E4o.Z5XϚ~kU["IFu%p0EG& `0}X3]>4jm<3C;)Cۀ.c(HFsbxYP #Hy[fu>D?^fkj=ND]g\No=H I{_7eH*iIeUde&Y*2"dl"/΋s>>zu0E.yE[,CriM| Œ7Su~0`Z$L f1Ѱ_K?Ƅ_7E|1 PZJۄ?FA[ {8W-WI-يw5r _ת͹E)Rza"&߱dv 9Bm)gG)1i+邜n̸D7x| wdo%zf}PJqeo*{G1C& |ĄhU^;;hv'\lxRX>3LK҇($EMve 2v8e0a%c\и  ȣܜoϊ~U U s,r9aB2`m+p3O҉G(s4wsc1nv.Yzh:%8Wav<s& }4.^Y  fk?>E"Ae&gz!Y3;7)Hr }=e 7b] ,jkkw`xca<'έhđ]ܝ,̐#&^tBIE0o8 ,/:C%8\ 0_ͬufm,mJ뺯E0gz0bA:LWFJPt tulcgYk gē fq%1|^bf. vs-P/sFBXl_nfQl|=@yOi:Knlzi0$c$<ý;~l645iR^([ԉƼߖt>C]`;=N, ;gO.f{i_g#ort݂lʳzy;VlH)GB/,Ɩl{7tCdzy~9omf|O;BL&ё69z!|a+p=SvVf"~]e %KgNLsYu@RR)sElj9

쏊b9.=!q;my:z1.e+,XMYTjٲ:܈Ճ"CE1O7C 4wźCQ0XE>rd谴W)M5Ee~ .C,uAJRlr}. g4T} ;j׌)T1^x =E|!B[{83He~ p$e eY&F9.c\\:V?XuS>H:l  |2D WhbE&0?@z gL /BN#@Kê<0B82UɧQzPfzCJ~-[Pop kjZmQ*,[)L `5η@D81!LJ|רaYu 'p=+L3Ŀ"\.HdzYB_ (:::LbAP=Ӫ un_cCHp0TDwfapR*/:$o Kaɚ!) Z}iGgvfOX@WĨ518_|]UI퇘o8Q9Is{1jJ^LxRdpʥ0'>mxd;X ٝ-j,. :@"蚭϶JbUwc =_˕%H*o嘏t_L46|=/!v"Z.ǮRt:/s> 5"Hô3E=L ",y%fM(O}f2]\ S}}RŮ!TzUTpv:!{!#uX *?*.O~jB&^4FnX h]4XQ5P5W> ?X!M8^x#sMaWa>ۍ01Dcs$ Pg`7R +S=Q/9;ڋ͔;,m 㨲[Yr,|T"=XLk=O0 g;7ҭ9xlӫBɱn=+7fŒ,d*KߓJZ7[Y30䏷JiTe/ tuUI駞cj(^ݭJSh׃tXLc&% fh,R}AjVqGC̰N8Q3!H6deEz[.̭a!t:SsPXO=fk!l&dhHqg×\;&@eo&(kΕBBR&a?᷾n0G^bZ9qUF!ҹp9)bPal؂OzYi!;?e{tˮj,&XoAŽ^"x{9 .1g(c-%bc/`)[ 2gbMbif84wbܫw zf9'ݐ^ N{(; :`Ɠcncg5fFx¹f V0st2XKk:T1 u)c1 Uk h>H^A?MI?{7zZ|四4oU1l$a6w M.We7aW\k"3^KI'̫6.ѹKuJތ8c?`8jS\9We,cY-՞bA|5dv2cWBo>VygY玡WR#/nΣo۷ҕ[iKd`hܶ/>RAo-֭hPt@b/)pW*vk Kmg a换q={T SX]k"[k.bv_*{"*3|L㮑zca0L9˶23h֦xCnY4; Oeӑw ,wZ~uƛڹָ&OZ奈f|3&9%aקv!FDX:Uz*g1nw]"vu)9{gw='a@g+،,%;-RümلX7P3 M2^ޠ[x ¨}9 O(֙)O!~bYEӆAe?s- v+uP.p+@Rznh dq7Šz쎬8:H4/ܩdb=lآJ߮y\XJ9_h%GP?\ W~,q>VfH^U~QָƉz}7S9(i [i0G&+5Չ} _~s?-J~?WR̜5zeN42Cvj=;jOCd?!&';xC*x,^Qɸ{vy#\CY= Z.x)(B3;b_WGFA5R~ϙ6InU`Z) cSNԶ*5&Ȉ-3Mos"u>r%6B7+cu7:=(cZ!ycQYe(ϟXs*\3ڿ1d|jGS9\eQlJ(.l>jYjZ<o~$e^na7m4&Sr㦒?:Pݔ4'⛲~Z|ND]_|/cra@P䣼zv4v-RifxG5UϸM={oCʬuX$|~;b|HFAKk/o^SƋpSocNb$ZmLAhsUbg +2`M Kd|JYlw}{hzd8P9I@s  +px߅i<`9v2U1tm+T~2IX YxwH2fc붰;|u/6t6C6^~OBb+ZK%iPfs+܀UP+:B9ѐ~ڴ2Zy=>'aق u32 ;fx_0ETӨnI0Gyci9*u1DiRsW<8DYm|r>nȿnRM|A\ -FV,zu>{ rBjǝ%.t5-h\#% wu[Kh I}[Q$EBM[ R>b:'ѪfZiƯ̱lӖ=Qj|]:6-.b#J"еYR 8Y_~MzE|HeE|H c};e^}\ 6|vۛ2Nq`r`s+<@'qyGCuez.%,{t1m>t;X{]FHOs{2aG2uWVcB]8Ht2WeZ4-qG6`u#f+җgt(8IS$o=L2-Ӭ7[hkqK,">K.keٵ*E&vMhe^Μ1ݼ+vV&ԏucsn𠗆=D›\%\!RͭRanNٰUp ,L?Xg M|,sWo_[qe&lKw4RI)Wƞi[Hg+h m%RyW2^rؔ' xF˺ LD#aV>ΐqV\eous}S?a'c}[^yGjYN"̬Z;j"Þky6|pG1o‹Fp[92Ո6 qs韍Yj:L~f\%Y:*K]m\vΜ݂tyWT/R\gp=녮y+`2z:Nn_\ sKnuuN|S1@?>KhB'{> ӻ`eBr) !ew;Lʧ+^zޜc{{Kt295O"|B!2 /GôgZK#n >,RZ$6X Vb' ?Atv7au+zp"܈3V&DC6Gv5( 7DCD1X >J"o$LUelnd'>}ەyeBFU://^],k7Aȸ|Mֺ7Ϣ)a)j_r2Xcwa~qBɹ ֘e8V:,yETx6]2<'Sâ.+ rF&DxJỲ͍i4fvuJHy\!e[y\qYCx# ()W(u!z|!]"bfa؞8Pʆ|HV*?΄|aoqWJZC\6jSaxlA ^ˑ9b)ss*1[ba oxF}Q)+:ҐR aϟm1Uz+N<;ğRW$Hk؃A}1{zލ#+; EJqOָ@b~Yn}LІ߃<y}Z#u!xgZrf_:݂sf=fM9_Iu B8U{#otMFs~z/a|SJ/w+WecaQPr6[Ѧ|fMі9(ǖx@).Y`NTόF~ Ӽ}ORFo9mٚ߻轌otV?Z?!b*+c`SozK՛ƼG1ת=M?_{EkM)Vd-&_[p=ǎCÆݤ&/=xq^fɇ5z(v8˙V.|E0j-.pǏϟ0bYFu9g=o)S%-L!4;30}} ;U-wdŗr\M Mf=mcl6.ꍷ@ #׸@k*(`qwrUϦo5e5x;KLlxdV.;.fX,LEA ^ҁՈzo]:A6 u EXӼDhaeTL|eGU\6a@T]VQxB<-0G['BCjSx6`A<=8]ϣzz \Ж(+ Z+06,~ : qJ#a 7Tإ.,t|BD*Ttmg^T?-~fXއYSW}Li]y f޵X4Іd!fK#lYd)/+#cSZx+I45A|5f.?P/Գ]?E! ͙R|bct. Wi 2<kv,F`xMqGwψ|ԂaoY8S{-#Yjh/$Ղ|.M, oVG\uV^C@eϷqۧUJQ,FvY' ]oXqJX.F'0[%|F“?H&-^Z~Mߗc<3dn(Pm>#7EG-4/m x<#шmO@'dteDZuA E&} =r^O+HR޹IE|bե5,@[h 2aS,qǾko4;1_=4k->Wݣ(Wz1[¢ 7X%YX^fd F]6sHSȭxe+J1lޙ00Y:q,ODcیXTCH|a rTaDnBVm\D1&'KW]硥xv{zK`~/cRa'Vu„X>1Qw=!ô.T_rEbgm`_oD#XKP# ۲,kBкޖ`SU}E^#L@p4t`u-BL~J$,> ,ͭ5-8^U1Yp "Y! ?7K4]{g}8.'oȹΏL]%x6Y 6Q1NL>eM8BVҰ3'iޤ'$kNY]XΥif|֣eF;RqeE 3QmhEi22;xcN⺙`|G+?ވؽXDN&CY9_%SܗtV ` NڣXrXa_vW݁)C t=_Eؓ8< V֢vw8az>Xw:EeՎv}̆LMEM|w2yG1ܯ2'HċV+(aqˁt-c%ɗtZJ2H"fp^aK.8c -Y3lGBZ.c;W,FY=4LW&& KPyt8w^ݢb|xPrSa+1Y`Ƹ5;A~!v_pH/qT HŞl<퓄|a3a?EW+'^h2DKxyc S<gLdh';"pzɰZॉBt 5-R, ]3330xv/nIOcRp&7&l[ˑ8 9-ʐ g3}YhcNA2c1~K`~VST8)9$.fͅ}X`#ridO@GG0m]tbrG#3i~w6s.f.0X*ےͧǣ)õaRځB6}x]EuMYgXfkEYyYAL%"Z{#)Dcհ[WuF0=?U(u̘ 谔>/&랁&'{<诋qH/'QCec讴g!8%l+vp%E`u9dMp(J+q@6h+])2LW‚hLUywqݪk+lP#" 4<^p Tq}.2֙C nq' bwj G&F|B9m}%ntpL؎kK;w)Td8RE!_a1~)|ꬃS6Q qui2DSkMWt1WEc-}MS` $CeZ2"gk"Dcatd(sa6ةb|`FXϔ>%6[₩4>c.a1W9fIW§(h#>t\1֖d/KX^w/b'a XE5KtX#FXepd fء܈g7&ǗrY]Je_omX}\ZnCkB!}tS1r {x [Dcog\`"iB+:kO]cwqϖ 7tD, ʙߤ4[N+ϣ='hBT:ǢG쇽(̜lɄ@L8͢Je){fݣC)_X'{ߤFP̡qoZ)>HX뇃R{9v |N\d2iaVU wsX2VSu8wptaǺ Wzi0OYѬQlS 6?' p,z5kz4}=2&΢ 49t'toOJ.ag7BFw#:g $*|v4k#yG֣i*7B`OF4qhKކ]#+[:7^Fgx?t ) pʮBhvfYvͻ]Z׸Gd2A%zB+)޾菘-7ŏ|\P*mqDy4 1ՌIݭÃV 3V9dXwdskRum:5InȱW8vv~C`Z-8Zz42.TdF-^їwiIQjK6K$bvB_LaP8 ׳G(XOQ[KV >g2>t\bex:6pM5M(~+əkt֧23Β:[*fY%O2D,k+cԱ=]ոLOiP@^v\_)?Aek8OKsm˩ܢ^{KYPB}r>QnF]<Ҳgٔ;]SE$a%KS#-g _ˆOBQL^g| xzݙ=8 }\b\fx%N?#\rbҝ͛MxƟTm@[F2Pv(B"B:̙OSKҀY ɹmJC6/mxՍzym|MUagQȉn/M4X<. mcq=iVuV00 B:C˂zϿw{^7sɡ*~tnnVQ^880G(> k<׆g䴑< lWyEspzG彶=3 {zݳC0g N{\Yx});(,GC2l:[Я9& X;K,AgӃ8vcA]p/%c}K#.) uzp2kƸi3u~w%rv )5ݡaC'68{?-χH-XUu` XށROSF6ہ^2 qt&ؽ,~)-ݿv]oahc>zd_m)%=Ue3Fq d |Qvܝ=KDVUJKIKPk`I وya5<u;+:б%zEא~)᛺r-^7^$ҘY n4m=NlJ(B>Wmc_n&rK[" ;e=}͆GPNTR!(f2qּ)jȝ{kpQOzαkf !$P5Lsyz]XfF*cm>*64y uIag!{G|cG>)[uI;s|mm'˔{OQlvKc"[%sI8=[C/E6uign2ݒg+_v;fpkŽ|fdN]%̔aӑlPߨyѠN|\ҳ]VӔ <6VE^])˜y~ۢ_qq̜q Qg/9}RG;ioC;F[kn쐒dD']!DZANJ q~M+̧G'NҜ0˔=:ƥa̩DtUSֈ5zkp&35 V-])b'Ye'L C7;_Y"?W;WZuXo+,~ 0he֘ڑmn0^+Hj2ǐ° vjrٿJSL=nN][03h 3X=A#Oinim}?z2ڨ&,hfv2`y%hEBƴQg'Hm~?I(snKBCϪ4*a?{:7*QWu1$~(V7v+/S~ƈQbWS4]$Kw*oTjOkw-6Wۘ6cyߣFL>x<%bwG`*!(:ﰚ79өs:Y>~EVFF%"TTT=C(BvdDv_#Y \s_s߯}]>p&W]S%?chݭ![޻݅߈3b$1׮%G>)kC  *D6<1ҁg~pGkQj.r|yFǓȉc#Cm.྿vVxc)n=rDqh]ˊ}HnjX_#vo56/aԷ 7NlUH'FA!Д`6zJr2@%Y*4A`^8DɯEBJ #1g QK87Tx)->x:z̦Rf1Ub2#: ^Av|V]oˇ/sC-mC/`> }BQ0:כ]J/F|#~9Se9*gD3xZ6yBk fG;me)?uY'W*R93:JJy=;ICРC$LfZs^If.=Gr+R6Jd.L;| nxH9/j| Ί7Ƙ3>M?-e?$VA>(>B3J8>CMD cUs 1/~LbKM؂-Dld uTO6aya[ 3QW xU/͡cŵ±Cꌄu Hf|rGZqܹ<{Mn&ub:V,̆qZ# yDuurǎGr[4]p>_]B|6֡ƒ8gtP屚4{iߌh;%|u~GXma P8;ƒV: Z}8۵;/i֚|@']xo K:[,d(ӪRikE2KfY/~GL"Ec~9YCe\&;7ƥ#~HD\q~ac=w_sۄOS>R_{d&Ga޹Ef59BYw afx >ׂI9XaK͔gyU d"+Y]n!lHtd7=G~-W(qLCA̚_ J _|%tF%un r;Mɝrdgųl, _֛N|_TӊY+"!(a4^-2f/$ 姼,hsLHs*\Àh !"pT }.M~~eV35O)ELyV /o5*JB.2w%†I6k7N7iȀe\̔n3SG')<'>)JaNSXbcY%Cv穄 %/6LU˸40h1}*±5z_8YcCבVE@'љQv m'GFѯ7֓%94&,wPzad;ҍCgyl`5ᕞ!_GIi11Fl|kGVS܍0Ǘ# ]M/jU3Sh4oza(*tg~e5AU& P]*w>$_Juੵ 7mF{w<;4v9z7Ęu+&ct9-EFw)0OXN'30V{!6en7Ǡe$Lc 9|2\Ġe*|wprՇ~qn_3 qWJof,Nl ʆEugt1҃W0TRnLqcr$rʽ^XW`ΕC)Q\66I4Mo.!ͬBٷŝ;|fYݨjlKnb'3eBY<3E&W,Q,__qE/6,c[;DuS'$2Bib<5c{{uA]XĞ'i:[I%Y0҂כ3[TaM1$j'Ö61YKN%z-!T5^ H`߁sWL O3켓Ȋ],#c/)|?U~yESFJτ9>c bRhXeIey< *70H'T[PTSG[e}&bބ{< BmcPiEH;e!nyj=*vԱƝͪYC-.ffyI6OwOy-sFvg36Nx);AXq Z 5Ħ1~ zoaZ0kгd9laZ6vCp=/RǘUJu4A)?A#wøYl+ \o5Hr`6ְ74rxk#1 E& &z w0<Ơ$Xm!>T#/^Ն!}Ѓ7Kd5wǩ}Ӑ ypp oYA _)BŠM)·n _>2!VihM0\?uĹ25 S$~W x_`.>cE?a%ꈚ^=QjcB3DHc 4FǝbCUpT1]hnꫬ1׃KK'sJ6P4_r tq:u3@YN*좊k1Fɨ 9w< iC-O;E壇Y(Ak -8ZRHcSP2k$:}4k‖ce{w"fmjDMX0`Ʋq}|s<:eP%AycD %XX }|1gbV8*]:(0 ,4Ō clYTQq,d>+1?ߤ%z8ߙ:8`U;YJfÊèV}f ot-23S~63oltdڧ0'ۘņ86wX,Y/GtY/~t^Uiz%8Kgҷ}UQ5: b,C.fkX`ғGTB!RKGߥAh,6 >v 3fyc8ً-vҠuc-_]CR'&fN!b=GO&]nrf99J#dv|]r;I߰i vh,LS2BJƧ0^O~)0:.JΐH$X;t;uӃM=/6agL>^ X7`\e)&o[ey;Jv8 JhROM2u7u.Z~ ٽGJ.5ѧ.|݃4]lL3qK SK !Oi 5!yH ;fxK8kFK(~U1Emv3]aN?3IT*5u J8c7 KFxa"mcmbzE}C܄KDSMhN#=϶٧l p񈍂Y$&1wyrRZ~Xb$C[>O+ciq45W5FAG?S/f>-Nf oXN wVf蓾Hp;Ԭ,i kY>`˦][7P][_p\De ׷i2?0B/ jGj ObMP?o!Á):sp:%=c:-װռξ?FaO^GғG(2tk{H*Äz/%_^U3AfipYX eCvkӾcfMI0{'sBAݯ|u>!/gmrk]`nũ[KI>TDC?|>.n^4PմF&SN glXς/#QJnLަɟH~B\sPFߡ ^ZɼmADD{`S+K|mTƫS5xۂq 9u_طxŅMiw 3F$65L3-|F|0>젯k2>O҅`&<S녷Q:Ɲ7m$:jw]ST]JJh->_~wGg^ [,L܋ xLa{(omEO_dޏSBMʫs#Ab4z? C".`_OoZ›_-5|4i&?O7͆L+} `D;8}5El)#nTXZbG[R].*gL3P$^ĴEՇ֧ "ُwyl闂^#p!Q0^P`T o.ib7$l_#O'6 lQ6Ȇ0|CVѼyZv̗g k'9 cd*dh)l{|Ms@'(r:UAO>`fϸ(KJLq l"!K jsxco^Wimn 1YTf˜L7cY<]Ek~/[)"KOiS} p-,`[2 w o[uD:3_[JyfnԴ(ZZ} d>]9,Hاퟛs +^ [q$c5T(2lMc az[x_s)G9)z|Vshoh˝E\7Fiie槒ʼnׄ'o)L%_Snc]f<>RQzs?`RM7V;cp3^Rh%ߜHcj'F6ף#z|d01PS at8Kk/oKn j= Y5[ }j‚)7 An>EJ+ցjɳ&XF+BCt(͙=AW^Y*4do:1/iP2pĖP(9\6/z?Kv,=~ y;!c^,e;iCdಓN' `̿:rC';b39݀\jš)ܫZ΍{tp9|ݜͷ:|S䷤-^́PջXCp}}+#puMN#,k+IL<K ~VAa@^ap #Xq^.ߵ"68vaMԾpOAt!2™E 4W ۞؄Vkj:7R f wf#& E"avvHv{_ԕ)k5XeˬGyzŰYB ascF\" hԆ3JAARZA&yu@UBy.Nͽ/QҚt 5l1ߕɿh|#k pTgt`'<_(h S؁q3OyçoF|5#x_YŗyD5S1\X~lEg Mq2ᇾ;^ ~ˬxw9[p"]^Y6>υ^^9m0,y7Csn̖[NzpOn6!O/^=eXat>'|z_Fs>dͿZyR>gҗYw> Cc7[e)un%՟> A>:g|ό?'rӔ_0暗䦈_EJX4 >*9]T6t,yZ;>-:ǬSwk}|W1* Ȗk ‰ܩ>2%;pCO~b  ?s|iUųǝvFȷvTgw=_yh%wh9 ~W/U^KFԱ9}EyhfO$|`=ܖ׍qs7,K3ݙw8?"Pe +tn3`6KaS##^]7*amSP\y ̎6MݎfYˈHm^RYƦ7Qy_XM&Vvٟi tVNg]{a{vAa~t$Xхr{\ U21w(w 1p'xp'#B{bMh{;vICt\>fN 1a==黗0èyUGΓ >Q$y0 oZwz` `uK4UwZ:s7m{^K.!=ٙ^!0Y}#.`Zl?_?WQ}~]E.ܫ~lcr># cѽ{9oTzʽ_grF oUΫ~b+.#xbcWњk|&SnLU1RޜÍ|%jQ_XT 3ćH3#X4yV:nf\Ef 2~JdOĈGE~B.Ζ2Q7d3ՑOkdˇ[۷A^ +Z)ش$n̷Ž騽:C7 #0 uf`WW,l)sg=Èq~#IzHlWv 2suts W^aշP=N?;0oXCZ1K"bj{D$ c=ƬatBg*ǐ[˽,կ]CQy./m"q[G d訳Vй"mhz>9QYR6I9d+e@7[ O.ܱj/ܫa9n)Ҝ/q53*CWhOxÿO4<{{۴N8ð.EC, n0Pʷ5>ٜs5㇣_ӄWLH}%B[P2eP6Z9|J_o3x5?eʧݐi|,FEXqW " e0/)]ۊsK|^"^Y]=,^SoJni(Kv:s =k)cW2,S~11SʯgcsaGwr*ʈ rj?;.."~tr&K:7K8)6.S_Oϳ-!3$KeRW CӍlݔ{H̵ ݂ygJL_Q@L٨> zn2|_0m.=Gŝ%A2U~hk;1ٞQ`5ZOcZ#8v4x~mGxikc?-fm%-tB =g 0Q6BkBLm=cXl5EWhh4; 0ȶze05A8讁 ]57 }0vf.q߇esv0~ f[_6ł&/ٙ0wt.^΃͞#/HMXKCM0}pgU{=4=vuH9"4ƍB/XV[bzC|#ܾmAFxq+Pa L djlY2+׉t:[1PnnQؘy %Q~7ֳɃƅSqhmQUz^?_ fI?A7% &)HmÌ$0my*}L _.T%{|*͑/ԅ8!7n7AjE'n"^T+9nh/ǛuAQ2qh0"#l>H_Jl}I9%\":t97p0(M)xuJ KB?/Q-WhJ?T7 1Z fXd ea(r}<Գ擣xn w)6?vM/CΗY2tMH\0;DUǟ+|m75~;X{Q4*uo]бrz Z@t6z0i[l矷PLVҍ,FD٢\\pjA;Q/4š_Q!}UllIv?O6nl ѫN(,4XC[+|6@ )<;"Oot]M_O" n^Z&o5 Q~ >`q x3?@~;\g h< ߕ=yZ]a]0nBpJDB̋Ʊcqq1F9ųnX,d<7Fc(q َP#fr EL? dZ6/%얡Hk70܉jkf1^u@]Tl Clv]VydEͭFTl FUP%~Š ,%( RCC铌1/]@t$[i]kŧXUvWV?daT+)NTC\}f+[Wk4$a=ŵ"EPY׍.H9b/=;Ny#%rޞsf:г;'h;y9RLGRYnj5+l0GY"%~-.`˲%ZM/ӧ4#RY=>O e+0SKʱjv6X,[YiPW#6%M_)rΚR&o[9ʻ"a))m;oVH׷lP 2+#L*\U?NXjGؠ?_H?- # W.eM)"n )ٶTƮHA(+dBT~!`G`U/lKҷH֑Y&~-رZ5EmÅb6CuDtE*flBmX 4vf} Rc,ևzⶖ~5ĭ2x鈞b5|2+o@ S|$w6O8k~ ;2k'/”7YWR?}#L?)Lg;Pmpci>C41qu!vAS9{A{MM30BFg/M[s}8_]_`}(5V#}M~!mtt53†oָLhP/~ ?m_8HD F++?2F5G ;>:^^pP˅}W@oCѶU #S-F#qbobJ9r1:q̲S2d4_YhM^n %FlօB| D3Cq zIl~P6D,74@S'-4G_*<p2AŸ=@.>lk0w;ˊzSs1^G=L|-Fh5Z7:7 Y5̥1 j>bYG\;_K! Es|-M[.<)*Bf0!{k=׳H?Vs<~1*; [9jV;/z~4BLNIwA%v f.^Lt8 ecORY §"\ak Nj @ıP'fotwE]Y0to6S&+1 $Be5, G%5б?Dn/1^0}b:صT "+$X iyX fBd}_}F+Cںt=k>ʙ4]0z0]%UdpP]%Q+0 f+Y kg !0\Mn3IZ)Ԧ.<8bX!.d(oC!~L)yj7bEײ;mmuD|4Ǫ  YeeO̾W AbK? 5O-VcVMc3Yݵ,vGu{cr6/_I:/2[Y!K+|޳#UlˌtՒγ,:<ʃaS-؄>wưV9[M|$ӣuIgj*e.Ь(Y ;4&f; n ۅi[Gu{t%d:+hj#R@Vf#SF;~$l;{ >Qcs!Բ/Ot a7xNGO,&[ cgGO:쁏 B;ToY {̽VV+:NQe7*m.$ӃYտQl{n? F6.JSТ.Oô.qfyI`~Em7,+bx#6BMt@t[fSݍk[IQM3 NMz:\]F M41ϐ?vyAJE S*ڴEt\|Z&ㅭ+/|{h G(UqԲ3bٚN#V&4. ׫OJ7atNz\2\J[/xQIѰi?5#6ުb0uA[T'u_SԙM\d2φ5_dov1 ɹ"RS{9XNŧPI#D\qx|]vvAuŪs0Ϯ&c~чp %G1>”)3i֢rTgea}Wg1=c*ƐqY`5OKMaU{ZcԻh߈jhg!g+l؄};1M%Y7pNqadok{~2չҲN.exLVŚB`M}\jW#Ї!7.X^_~E8 vʉLAeMwdsSR[|4Y'$b~g&դsR֛&0&K9-_CzGѐ+qGUtKb[(\. =knmGg(ݗNC~̊G)dsIc]+}~S? </ ¿넂dxw`Rl/wskI?BG侥m9Ќ!8շeSibF|/:×{hhu&ռ7E19UQ`;a>Ke ސDՄkԫwN~4)oe{Gݻj4d>yRQ+IH!4lBٔ{{q㎱V0]'3 ÜFU y˥:Ym'׳{6%uѪxzw)>5IH6;{=>TАD_e͑52 61*l`6[ؕ"u: 1eY64?_KK4C8w-y0Ly:O7lϿn]CRSDŽ[L`QtLpVfctvf{^e|ֵ,<}h3WBܪ:˭ܲyէwWB ::\#®G9fF<7s^ߐ[GyA;Jr7~E<ԔZ iw>82׎2a4;˦oJs?'tGsz{Њ+)#Қ.] ]vw0/s֖E|5Mp0_p-}\RdKO=赳 ]Ѣ5/tKϰ/r1n ,Ʒ]Uh,= oҥ2߈$E^S 0@;?(cޣG]qLY1}ӑ͌\]ܜFgcS$hSP7fI1u`Gjb+*O[CZӀs(7}b␕BӁfuRGךsPAzԻ:g#Ges\lf?҈:Y}*)ނ79thV1Z(+T ǰbO;<.&䛜ڒ7rZdZzl=ʿ n< =׽;XgO7{H]Q(~nXvhX|LXó^Qy]^m,yP%!Ў l"x6ˆEWʚgdt| i^tٙf4ax/|ep$OͦxFN^hcꝘ56m#r0Z5a+]BXl< e{bovC<|#OjrszqG`R=f xvf#_5\7\୧j/|H~)6x1={¶g-6&w-#6 gZ1'*mM!{*:aZA2o,'OѡOuijWcJmm;S224N:߷wנx+ C"Ux'+pʗ1R)i,,ik4'fdKʫ7R9?wfvr`0ˌ [sqBI2hw}*֕ҳz~2&hHuُXHtLmwWcЄ'C fĆ~&2]tւ PhL7z29Z?n8rRNi47ϔ=MY5S<[GC6#Fui,ߊQ:v 7Zj1*MԈkµ>HhHmilV@ӏPgkѓf-g@wm ̫cDwr=0{9OYCEVV5#vLݙV"π.Jh[/lyޗ/]x^TbigSJ[w])4ILK_#{XVq&hԗLaJ;~ywr})\̕S2YlAF T&?I|jyfa<77j$ˬ1xzeԃ(] Tَf)V8/56n!wPbjB:\ՏbSDP/9!"^=hC5ga'BcwQp-'"dXxltSc?[5軱8 )f(K<5=}rvl[̏䥉E|Гrj=MeGXw=>;G,o;ʙIiK\=2U.d9qd0QJuTmcI$f̜ZSm4FTx3ddRxt;X;+#*)mbI% Rо&41};OˋWP6!<s#qWRՙeOѱ4?%Lqt1YlѨ|j mƸNHAlJ)*n#'b2Q'ZQ8Tcge'|1?7"!a Sݿ]shэP=Jt|ͷ熱ٷR8ar .6FS{π+ wPiaiӻбLsh5%j4zD^5PçfZQF[j)'%2ڤ'Ybuv \Ogf Z|l2Ƃwyŝ Jjf1|csC邪a% uuzRȸ2z+M{e|?/&䰂[ ?k:_ATcaN}[ӪtKzdaF[TLwNs]{8TPf3 9yMҜ)bڿUBNJiSʂ%g¡8.S(M8Bx9ϓSKf^ԖUs-qR1yZ> ֡`%: !liD{Hj'Zњ'2Zcy\#em]e[g =3X+ybKi5yx :MՋ6lWyšwVU_a]ъ؜+lmZ ~č*ݙc9gҽEBp+l k "c^1ɷnbKOpgag aךpk 9#bjPgn%y޺K·AgF0\7C;Yl/0'k0X !weE6v 3qa#0=L-g|=K hkNũlpbjovzn.C"">=)S)-ؖ]NkZ: G&s F5;$(Qg{<jKb )pcKOF[wI̥e͗/yYy @uVr'^b<:*hMcIm &$mHQI ͑ZQJr_K[,L,D9%ɕ6P%\4KVS*?1J{V͒xa/; X󛟲qI 5+XfKgI Ji"AQ.FH&K̢}5ӨZJwfC JT a#XBs( `}gVGTJ_:j2>, R~ >E c) M%N;?Z-C_h AQ ևz(u6T Ezf]W .!`'⠰F!7vkeek50ef "诪tw=E @6/=5>ڔ ]!2/x\َۏ<'Nt^-jRc[Fu7Ba_cT%BVٓaTk2q!%)fgéLe$ *1jV ق}dOj3<.ޘ{ŭ'|oB6{;cZ "(cy;[`$g8KA~6o\W#uuSZ)gC"]$F, %;0rUeF1|A+~a%^>争!҇!)MQ W^A/`uw8to"ÇEHu6#24Sa{f`2]b*ʦ⿊|8+lw9@#a#0]hj&AapIFS0D96gЖpQ5Jii~l ӻQ-p m%ehV)BO#]D{i韵-)d56Fg_4 07A/H :`a?Vߍq9 =?8K_GmcgSs#\Qh'xpPf7*v]2ujZcO v~ JwxaW8cu^'MHڸۨ'A9:_g6ÙWMo6J`;KJpk9fF?bF9pA݆Z7;_%mmAra)+b{+"'lm`ݗ]P<"bdldftfQ]qM `'0ԎSGY`cLMl(lڂɓخŬ!YSX6<F>퍯 M!1ʫ ZEH^d.鱠v#1]L>ﶀ?Bn1EBBU#~{#ǰ}E0f[mX4&0oypZ;8n§wD"\ &'ZCca1 xS8#"Bl=u`?4m.l a{WKF/7jqn7q g HuBd8,FJ1R",ꀟהDX@G<sMQoUC߿Y8c .KQDc ZWU|ny.3ii*ug콧A8iQw R8]ћtayC՚M7DF[f*28c{X1v߄~Dx6[ǎv `%F!P&b\gQGa:sꊪൿ+5%ind0C9, @;Z-@Nj6]0J?0vE dlL/a٬i"*cc'clh0(0w 25c,jr_f]a+z[-X۹7ݖb܅Xg5Z&΢A&/}7f͵?t?mbxe~c/Lo 8ϾH!Ŧq k9@7sI-?\TXiY`7;V/jΟyo4hG76=Kt3@['f Gu_?  Xelֶ1\\IKXcdogd Tb-x@d$?f) Ȼ$)F ^Н/Z9ż›f+xnhݧǼ__DmZM4ۯQJyP>vQ"w~E^G pQ:5aBoI}d,.xƧu%|8= w{ww[ߝզDq=:kqv3J׷MIԾ[>BnHlAw2v{,I"?q՘ov)m^E;np=z\vs2_pXz T%̫?d'yY)<5~gjLQ37z )uhRvFXE^f/W-g#V5/=Ӳb_$nFwh@cL cKf훉lB$s`!yGmOfxgP}Z:C2ȟ&1-%7u⢉( @&hay#SeH/#H GmC;?wD ̚+JpOq9,؀>Y@4Ǿ_``wAc{g s>C*/Ʀ;k>~0׫[,l@l{w~u-cŹ=,w\`w #3;L_\ʟ.WFq|=<͈] ')~>!r6oJޠ7xI}n3{ŤK1ZMtZ߶Gf6ItՖ#k|׼^C[hyw>SvbWc "rԚ~ ~4{OPerz95$oP_ڿ} wv۟q~gɨ9 75{'mkNLp?~Qx,1Uȍ|m<}R6ͷtҡ%~K&Myyo 0au$c߱/\ydFWhR1^yHY5>xa3`~,p.ɍ\?:C7qعUN@z`*,z?GcF\v;^C"VvHi#}3{}mv!"fa]$e4K==dg̓0?&nxǘgyLLjdjk=c.9bo҄OP&~e=Y\KCދb_xo؝K[|=w;YwmK,_ɰ`:x)[~ibxD#qDK"r=[7p| .Yc2 }0V/ "_7=px^ƛ~vqektC2Q~7 yOR7^p:[,ŞC@fO%   ?“埞ǽB J5s1 ҤxO: #mM"%yldig8IYЖm:K1_-TG\GI4XQnDnIϖ'BFpl)W 612ZÞvcM 5uRP3= gフ]~z?!]V"5(h -=Ca۸G ߽ޚm? ' wcո5!. r$; ثGLa]Sxߛ"$EH셸3{ɾ-HÜNӼRz]g7cƺ_o0-8|--;7_|ܹn`_0&ChD3{5s1x/ c`61 ,*Gֵ"r.VpvB>  /suaMIbMӮ^r-3kl+ЩsmfWࣗӬ<[s~$V|fgǙCD\'6#.;ݹliVm+>&͕۬Cg4i{9¨n&̸DG|b75*'} [ȷZ}#oG@{XhvtZ;h[cՔ7%[&֪CV=)YfGo<njKKr^eE'"lh}9JQ]dGo=jMd+ 'J6av)/R̜69"ߐ >s>`~D \37҆v!=U9S41MaA÷а 6&{\zu =ґfP1%muirvQA}k)>fJ *q3"{Yʥ, W Vd._ϼثv$!!֞Q-I{"#jh?s%GcRjL6YӗҺ=֞LO;q oLS鸖!7-`OBpR!?[5cKVg mW1ؒ6=*]yxZչՙeی ސDyP#6(6E[uӡ2v5?3C{q护xQ*ڛݼwczO42Ә,*OTyC]m[:5mZ,( cǷL.P)uhJNJig3*u7t[cH2$S' o!~ -ODng>@ώI)=YfXPSICBv}؂ߑ8% yGK{8箖4Wa]b] ?klyIr:%OlHa{Ǣ˳=G:trW (f=nRE[2hEiMkwIFY"ZbcFM1n4naC+ǀXR dQ=iEỎEd3ǀ9D%QgSlqfG$W/Gtn m0:ƴ3#OMkVɳ^ÙS-9ޥ ZϮڐY*]=:O2Yw}b6(ŭ(=֏Ў$?’(ƾ#2_G,}`?8p-`k !<3-e/<"6;茜ק rzkxY<|y9ggcgS*tj[aaeٹWb%|\1φ` ps/-3(?D2fԡV/Ps:PJFcD 6#xfiʎٯ0+<Ŝ':ж?J⡠cOМRj ,l۱o7 e)#SAJh9)YK8ČfI/1}ɬ?l H) fϚyYJ_lj)]9)끘|/Jk:8ԗ{ UjDOעrn)#YFҜ 9mrZZYep}=yݵI{+r7e洢VN? C}W?7l_'Hrm>-[?SYpMz:)T,#ؔc!2_0"NrYAئl|iwzmex 2[oa} :x |fC}pV-w|(ٯ'l%msF°?BFsOȑN{8x+VN\Ѥ`̿GaŚ GF8 AT.B tݵ+2a!D|.Ɔ`CbaUc7Zq ~lP3ۦ>.>u 8^z8%Bx{~cȎ9XWy§,\==9j㆑#U{~Qw+e%Nnc"{ybW|/S;1 1q**-5*KXVgn(y ~gN|]C*ZiFjR y_ywo6p#B\EP]a4.`:EP,M- jI]GjrYXd`aNPS΃Շ,;*;luHC;w6Rʈ2&oSfn;\8ߏؠ~h!ESOiJwFn &mn&Hw%lnY2]מKt)InJ(SJJCW9`9XvZrT$ MVp W>5sMgFjҒ&}:=睐~^ !h&<g!˕蹭/|?KrY(۸W2>@wIJa4^x'(B ^'"P}0AI:9\6Cy];+{#ۇpd՝ 0 ѮWO)0E(Ls)vS0TV$MGʼn0~F~ ēCPg<.qu ^_\᲎ehNY=3q9x{5_ u0cUCDs\ ]38{=02|],D! pp .9vAɘRT< =>NOvj[{$kLv$[>AO=P-1Kk`E&86Z$("K"F[K<qE,{ASGXXvZ+h2ko$HrFqb>kت^ܾ~>m!a5ËW.bvmh> # hЇ+PR@D?ίvfqo|ћn })z_-yrY>%=lRnG2 XRge-Ww.Cjq#n82 0OJp)BTXcוZJR;%xLN^ 6keYG%4}p/OoU,ίMvM+6pڃ\? 'H`Xe×2RXboۻ/^w-vl{RgiRtVm ]ȗŀ5N_˾]1s:cA23r ,Vi?~fkD8,Ư-&xFzy{>zP%K,g;Y ߇6X,E[1IM1bfu)VX"SÕ̲ /FuXݲ>`)St7)X)`Q&-ƖS48:kBG'jo2,^/Ssl+\=uX$ujwtq3 ur2ý"ݲh`p$;` (a{ h<\$:o;lb!J?ClW_&DKjB{FzX`N?Ls`}w!?ᩕ _TkA?C s!zA.Zi!k+a{ ?ǖ2$<آjzn8Y#,{@HJ- #faa8QJCdou4AC1ʞ'~,Y9<"qƼɂk ڧaV?Еc3h1*L^%/m%dMVh qư1` eÀș8juhkAn r L'QO;θ? akD0g`D2Lѻ7U[7CHe3btjUYm&Z}gCYpPfxd Y_#\|LaSWd 4VjmjJW(bA̾"jᠯҽ^:H`arO~7| ׳`̥J*bɋG'ػ1CJ5u62 ȹCHWNgg| 1`Г4oNR?ɄYDǿ=?ARUi^=qЀx),-@3.9{a%qۃ Q@ W-STf 롨\PPc ,G;AF/վhS c1C`/AӅDṴ Y`h+H31w5TR DͰr:`L XA"cnRt.c~#v:MUj=8!q%rb_hf&pJӀ/@HiӥXRG`ܛE8;8Goс 1QrXh_fؼG5e&+楾FE?auh0_}g oU%Xf2|?;{N{P!et_׋EvJjFeDVVRdQ^!8s8ߏ^8'a$hcJh*=UFO(bp*3׫~FP g X5}O-NHl@wxVϮ|/;^Q-|)FQ(KZ]D@2wjq[=S&OjR<:|+VP*z"nKiB$5P,Gb-aC,p\",97 2cs\`Qb J<4N~8>BqxPl:##㥐~mWC*mrFX>Mų/?>3'[`Ģ(䍂'%̉x,ٜ3Ρ{/J."/-&5=H8*GS؆q0|0Iؼ-ioOr"gNe"0wMeh(r9Sq !:%рy*l|U mWDdnjy~@nҪOtKk~lP6 BCҩϊ>|+|oU20Ύ͔p% QQ\aDKŒptwDFGbx(㷚J?kɌ㬟Ӎqfi\q2Qi ma4],GZ?f͔jiCTУTb v|uӄ–+pI\!_iK:O7?EoOpwۂOqeSUB"lC&˛H+F*)7M*q C\}6iq@Q;my*c~rdz ω5cdaW/9ɘ7Zk.;p K//5NSDsN@s<#Tkoyyz_×fkpn* 4qd\JϋWQ;zgmT7@D,wS̥nnfY9mQ1 Ǧ"c k49k\[N[gQxyHmoM }foس %{2鉕}Mb遆,>F==ﺊ*) s+^S70q 4NJJY'tQqzѸ*j+Jͳo\+`@£6UZ.sNjy(*Qܤ3hlH.;*}!Q\ff KaSeҥC{uCvW9KI?' }ЎF55OMӲ%1&#{nnRgӁQO>2|t>Q?4eo58%@쑎\Gq͢hlӇqKޒup 7yS|nѦAP9yV'{A؄[=,W;Nn•eڼєU.eOh:&{I+/7ŞT~ɘKlÆVMa/͸|>ߜXCV Nگ^INtw:\uWe 26z*c, H-+`ɳ BD\=Ry3DZ?KiZ1'SM 9t(6_,–0dFAf4e) A"@n(z/įy1"S8)?hXFᬒڙbӅV1r'ڋ#1DE<-z"iF Z<:vg]- 0naω%{ ʯ$\Z]Ek)uy%!Pq_-'8`G`(Bv,%صտxB&-$#4ꢷh6ǿDz$v>.ńqz|F-x|᧊>[ν|ORWB'ۇ{i3-J>c8̀Me1o&㕭2@^p4<)my~1@G*Swcrg2-t{c6 4GLxvP9[}1妜t_'.̕Dd_(R3~׆;rsOhCO )z# в5с}8 ʹ☋F\l,36fM돴z:~"$Y\#Nh*K=݄׭5fr.c==rˀg*8o\QE-n>ũ쉈wZ .ޙɰgl^yX e=٣wB}3E<mWYN>0c?9ozwukP54s?!6"o16߁`< u_-DΓD'xÇeV-Qtz=t_ѿwsz#~:b'H~X<kmDP(`LkNd<*#B13tF pջcd뵹ԈMOtW5ޅs89 [%eX0+e@+\iWO|&͌ϊזa_G*)W?%2ץ-r^sI}MLxbf=G~1gF8GTRF,\ BHw+m(#՞^VVf%&dooJb0z& GB3jgi;dj hY%s ?XHkkl [Q y¯:)ZsN60#)#9<̈7.3L%0=/JBz"#j>؆J25;f<KmMGsbʃ}$_?%EF=RRws,q;)-q:sNJ0tmS>^ \%]kv}#壵hSݐ+a`ƃ)1N$g`4`zi:kSl ӴEDV/HEy?8;%"Y&d6tl<=il[Z)}g&wt C{": ES slBI 6_ orU"k-Ft <,l6W?Mq.l?r,PW53/^K5A15C;{Dc&zW(M ? tc+œ]Ge; RfJs+$ポI,.0%l-\%^AaFaQ$w[3^HKU2_ :3L*EI}\n0Q );ARqDvLdV!cz-"PK$ulܶ'^h{4.+b +%jk(ÑO`%D66d_Ga}I=j^O0 -Wo蕒 ʻP'gQwPuY.v\KKPY(>4-UQ{Iw5$qDV 0MlhG.pKe\2F83Fk4z;ZzBͦx> _R`e؄}v"IW.ۉA(P`+4Pb屒9Mp.t>Zᙼ;V{Y:Ϻp::j%]Jzq'>h`=LlQk5/fIo;MJ7Ui NGkmW 7IHGh!at^c"[࿨ ^OvMd\taD ^k^ѩ6[c<:bMlPSCJ_=p~\'F<9"w J-1zPkwCû9V\u@5'Jr7'/Fv -L5~ gC }iJf(OVI"#N&XsQc@]eNcj#w:B~9/dZfx>zhqƌ} PQ/># 9av"@2Rl~"T,2 &)-͟UZߺ-UD"DJ{ZzsN-Ҋ)Z(GhiugwO‘"M as9\n]_DJlК*I,wDZE^ -C:!3 vC#4 exS1u pbp[PA]W^:u/@ VJ+DV#d2tEI3y;U/ hv֛![qdVxI8A ezh4i Rpv'as#BQ3wnTxޮMRz ޤ"5Ms42BaQƸ0=|{A1R &%^#yk:(:5Oz1[Oiq2^4&֊@L)1 Qռs kwYOtSpkb02\t? {@˨G{*R%5"§DrvwC=WRgQFCC}|sr"E\G(ٞ9'p::KϯB$V|gx+nLlx@W3$I/Q(tqFyD2v f/6d Ąe Jp|mgOGEQjiBrwLOxL`s/N}>yo0v!ڞ쉕zV1$e R5c}8Į(X2?UC'hpvij-YmejrܰT[ax%_Kta=̱f"Mсv_GH mS9GIɯ,QqjXn,No!tv-"Jn0L` S3Ě)ɶ&b+4N#|u? I'|B,3uܚ{;#>GudW+)UZǨWTWŒ,,9ϖnsw=zIc>c`ಣRF땹\cjoj3ݔwkƝtIu8yejX%nS}kGEIvъo+6rhip#t]ϐ_-켙' U`W =\bk* k]yu+owuߧ9J7زAXK-6EGBn?r4&]# Kcj+6tͻ&Sq|e!leݪw|dΖ\;͈h4ӡ@to7zbЬ|ſޟ矡$|n{\֑2M;8M!.˽L{"{Ԉŗ䯛4ͣIh2R9x2Ꙓ-OE\, ]//q"3@-v|&$N(Ԗï62?wx]Vtx`W ړ4 5{\EtgѰI䕶${W#;άGC]5>?A~W?l98eEL骙#u\Ibʍ"TI_xCuN)Ƭe) >K_4̈SAў54A:SlDz";e2p. o%Ծ`?4ᵭӎot}#}^B=,%2>y;pK0^K7~ 49}Ǭ4 q>Tg[T>_gS/ND6dsHߊZ n0!-!Μl>v?(Kã>{S⼩ guH`3WDM *55DrYBY|:.h3tH> cCG3L.zZys6N9C{*֑c_%Mlډn;~ ƻ. mnGVV&uor(ZuvJ[HYVKg[w1}-ᚕ.յPY^LYziWj=L#Si+OR-hndN./^|ԦPO ^WjSg>ZpQ:3v /9#[i5ŢeGnߚr[ S~Mfe=ׂr4[<= ~wKfKJhmƵ N<멜-ٰ6#6^̦If| r>s@ƋکqL~l λOu%2díSj1Ϟaã)x^1/>RWmKw445[QOM]dg&AnJla{>PɮTFA\aD)A_#GH7{GgkaPDȎat=/vvg%^)%H2?8f;O%j&Ø^+'>rʚqED0LTqG ⥅Rv Rq‘{ I[&Uk#_G›}}9: y_t9Xr^m+\#Ok"VѠb B/Nv)mHQ p/*{ufOh45;!G^P)Lr Lsq?ct*U":1C;ĩt!5Upx6FӔMVx:F U~QcӤW =pm]s ~1ӄXpI+vֳ2`sSM?->?I^66_&%o# ǒ["^BΓ;qy94p ܄窍գFHѣA(z'eN\;t=VЁsx?F@k#Ӝ28;4hoSΨwr9)Pf+;W~nW+M7wŮKPK=CטmE/gV3>^Q)4EЦ ^[ej;I.m̙í(sQ1ߔbIՆS8Q޾no5hǏS]ψTx5L37Ǖ8Qūq``[twL^6msw4B7:.5QJtۥ>Q3Lpqt顡|pE1GuiШFI ӰgVh?u9 hjDKG#Fs ˈŔw0AXUy:. F t«X;IjO= C$?-/ac/צ#duHv@[<4NBGg8MޕbެF)ƋQ_ DXҬf#JJm1j%`y BU4EE|gczNoG"m.!tB|W=b'[濉{{;vKhXr4ƬȄCլ׌{:jyL sj%I&YR(/"^.c =K>/i4Fϳ !.-&(*bdIK{9#k1ħw|o 7{g8^)vF#PIiG HzSb/{:=܀FrˆR,7 ޼bӱDl?D&q{ehN!z\[*fVJ1PNLL9"BڠorIu3znGҎ.;M hp!7 |hN )Hm+#[. ƒPْY3^yDvjl½|y\+O_ޙ."mgJqga1<_%$ {TVZ-i&s˅Ta<:llк6*Dmm1uSz=]~+;8.mB rG6&0Y(u}4F@K[PnJ4X#B2FG ?8 逛~ Ѹ_R6v%vW3 Jr# !EԂ6~k"?NŏƍuK /C*gjSҵ$̍: Ǵz9:Z]߯K?~t@ ?*8r']EA|Cn!^Ady.6>7C*4? 2,t3C %"*bp 1DEI [n?*mgf=o/G_؅t c!?UX,"Wā[XF!^ vX6=޹¶]I+%9ܢf'!C[4sBԜ/VWkbC!`e=W ͢/Z|h{7*.!i*3$fD+B$v$ h+jѯ!o:@^! ;>}'l8ktLebc- 4MxYNsUdJ5XmeP6k#o%g6ia^͔&렕51w#_F$K#, `5ZAwldJ,0F3$m 4(7M(B Af$"xE0.T_Pr:\n Pk卉RO3;4j`LfM0Q6 oZ;L+aPc\?`K,1"YaB~wpo)8 p$&W`bm$Ÿ@C!6iaS_#,?xCad?gv 鉻$Ō0j#1HWS=+7AދumkL c.;#=Ql ߄q>\>Zls^|Ě#ģo~"N5ǩfX:h9PRH=EE"򶯈 Q"Z/ ' h'vzՅaCs&I13aݬQ r}1v 1g0H̚qZ.E o4.0.4Cⲑ}" {UE)"a(l btY5Q] oK@C%+RRA#E!6h"3͂Gp)2WW%nv2@!z5S,e]Oݔ5XUۆI c9)Ƹq|l*ζ??&6+0GG<^ _eS%) t cRfm/!Ѯ3=ߙbt)}6A맺T5ZZcR/ T C̱&ت^35D l~Φ9]K5tQ[bwMj#~:`N(IGҾ;*p qj8wպUэΨ- [ኑP/Y/jI-eaziR3ΰa9jP-}dPu͟এ!͒(pwsx|pœ^yoŬ¬nЮ'ƋE38W:eCz2).^HD{*ε[%>&5iJD=Luf2!)RdBSJTS9DBYG|Gk-)6Dx`mH 󝡞L?q>q-d%iԳyfS@*r&\#vbUwk$yWe(wBx O\[֋ m}ԔLOg Afd+ƪR1p #݃(5mC<4!VA-hg3y(ZO2xDi6k? ΓSC])O%KŬ1Nh%AU4#n7F5u ~PXCsHe'2x+*0ל{_ N~wӱ%q&darjT5q t=6Y c- qIQzcpzͪEQe.?/l~ۅe\QKL?~Ӱ+=vFْ3s:Fĭrχt-T77.Tٍ;K|^_˳SҦ$FSUJ*R6I'WbC~'t@ ҧk4"+5iri^43I[f.aP<&;l!Y͙jt́wip?Hu+u vQQ=/3$}*U3(iOсlmMˡa7a:<9Ԃ35PATk9H QT{{ S^i|+Jk^C-wTjLšN̓nD?Ӗ?tL+6C!l:FV(3VKzZm.[oh=٬J(c)GUa%ǩ커rJ߾eeRl U<🿆/_;nIڜl-B阇!\">xFIg`y$$!y\Y,jvrX֕RwPB?,B-GXc۝bv D nbbو"pKa5 cH:_'4QXh/z&q:n1\ 4k }ް[{Ey#|?'[B.¼$0M]&|ntٝzwա zڽV\oI1nfWxVɡe0GzUj3ѵIdc#w^$Oe޾LxWBzw3ƏPٖ u_坎"V&@q'Q_<.VV#nwtR ߲;.iKgW @4 Vin͘[j;?H EeYҞzewۥfK8STwM$/EFXv:É׻Ƭ_SܧS iʞtOHs5 yo(ZTZqGknwOc2xDK&ˮzӢ, yRgwhluvl7+6=JZ{PMQ? y?DIU"V%H??`f9~7KmI׃9Ҋr.ܶz|og ASa8cx.<-Ί'INС5.vln+k92ԁKr:uvX) ۼ|y+ϲ!\ܷ%;/pXtg3Ɋ9H^AYOq\ڟEOɋĉFdž+.Zp4G>jlm{|F:In5 AA!uȝh{9[d\xދ?{8/Ƭ?7K0+%ݰ)>C<=ܡּPݚSy^.Ju[ װ@,rDYZtevcPWpS\U/C3OwTcbf'cVjE>I< 770;uF1aZaGTj_1!Pꍜ2,WvTK$=G:"I.d/"T#1?_"?;Kg͑1kҜ6[0{H9(7*ӷ8s+ +?Ċ,2SM8ˊ K4aTuѥҠKU9;9̙e3⑹ a̾q26pTgY)d2,7pbo| ow/y,+֔&4Sd2o'.t Ũ*'yA$aXB6($|džMذ#aHӵ a2 rB~m41/ x At>TFNБp{+ 0a8 6m}m ʑiߎ?DbQP8<,Gy/on McX)N@lnӶ3!&T>15F0k)cJ>f!l'*Vy4 Gjp[c#f/GSB:մU ۬9^T+6543Lxt PNb2}N"G6tqvSmX{=_M79j|ל:sf=j)6|lCtD=VFgvԵⲏvw-//Yvv9{$S3Y䊈 V,m(X~Ҁߞm6a+r -"wf íqhyϑ:"3T#EZ*ZyCOxHOd3s'Jg=3PjZbWH8!xjrNv OG4;}\p[ ߫F6UДxJ9d&rX7J,\ OYnKsIۻ (6`ce~W·:at&`)UGiKjgo~#}ΈI8 +"k*Zjod3њ\ZkVW3V\i{kO0F3K.Bc)ލ#8qMذF3gU9di{)[LWJ±JacǝYx?3%xp_{mخ)W5V꺮ٜ-y1zhRN᪖<#˖pkqR4F3pSz]?`qӥ50ƽ!ȥ[N/̹ Ⱦ~Ĝ?rtI߳N2M;U5ӱeϓB+ϒ]_c-)xz_*qG?D]EHZaҵ3~P#դs֘ Y$5{^͜Er47L[ٮ8[oȅ҄#10} oE,Z.h"w)zx:X1OH-luZV߶vdib%=p)q0g,Fi%vQ53#;r- &_ə.a{֨\"UXc;?xbۙY:y]4Sc0nmS%[9rF~2B !;4!04VCe8TS?ި4p RZUD]»r <7 kQ9p|?Eh,[71a=>T4A#D&B m}3 ;r+IAT*@ocN:V_c y2Ν-gNm0uڎ ,y1%|GFV5ElDܕ"/AtA]4!~(,ʼnԳG}ZD;g[aǵHuƟ>N~5Ujll29>Ŵ3G6`5I]>*;D2 ms/KxO:ran>_8a**0Z%^{(%ָ"I2W=Gmxr3rEHs=nP2Vpn!8&yڐLhԉjϩ#3~rڈGpSKc`FksJI?HE+ [&(wKS. {F4+@z#:~8Ui/W#3c,U`PLOѮq:g!frǘn'kEhR 6P-IGm OsxJSKM.tP 7eM}uBx=‡1DF]J--[V ߏ?(% s o;xk FV1üwKC- ;CU~ 'nws QD@Q{8v yW{!JU8 q? ͱ 8Duw=q7Q,ߡ+O|C2\̱AO0 VWBp ʊڣdUW B01sKQTGɑe"šzdf0Չ^aK>6|nj27gd3aqе f=@Pc ԏ8doa> ǁK  9/]iS̭l $d<ߌ#W`o0d^U p.ec3H~ja4qϘۓˠ o8CCp!r"?VI)KI5I?6KC?{USz?8WL+,>"o&欻> s㟚ϗJ |JgN#( Gb>BƉWzzHiZDK5: }`?<|RoQFD ]pTaǺ/ƚ^hY9qh-Vj=Y9( bo2+t%n?HAhs0}6F_/YҧuPִI4TTw%ZF7^S㓤?5As19._"bAtKď>Ĥ eܷt@aC4gɧɮO0{^kl[fEƸZn0,vх@mywc[qd*l+maMa%BzzN/d0wsx[-@\3oeu; 6NcH:ERs&4KEZt7pho'fm.ep_r{+=V.'28%Ee[`|[,8>Q`+n]OL7Cᥜu%{xh_ a;-Zo\d09*sZٶY,fDJn=P6 CGv0xE@M3|Hc0-NJ6PR&Լ ѭ-ң'$; A:cq'jLV6ˆCd+P樭k 0Dw`V/R_~5D#LTEO$!n1I?H2hZ8G*nb;\7Tjc[BsM3I:M]l4U[+~tVw}ES޵n ,\..g(OCZwBc3f&}ݍ4#ڷK~@;Oɑ|vCoCq*'@m>}$a'5 HdmY^i5smnH:|eFD9xrNޤ9f 8kotrԨc~ _ͪa}|_ԸƀK ^OܖJ"\)weVDqJӆ06k/ 6qc4"Lm03IV;z[qUcճ8NnCJ)PRk}o9UUx?ڋ;Ğ$W!(. {m&xx>I f Ht#ѯcsY(;ȋTu1e|dž롈SmFg+SE?,Z u-ԝE[=0T"^_.m:cO4v@$=MGL7/`?'Lu0$wdNt:C;bvH Lp:/N@C"dcȳZt?`lCSҎѤq/9FEHxzA;GK"l:BY,mkªZ5 |V伡P#wrq(, J2ąy4dm溫tS|KS65L} +E1 N`dz`D.\^N^I臮GWGSB>5*1+=뼠zf!%S1DX"# iC$O#N7nDW-lY%҉@W9[1z7>np-T<F5Qy }Ɔ,/(A(K-n :PWZ7TX ։ݭsIBQ+<=y:MeS])gI.Sb,iФD24-z]0jèl#%o"~#Ă_$o+T50z?ˮ+T<^4ghޡxeYҬ>r8bb]UbD]_I.:fͭ*c'nXa£پ+/"7"8nKU(ФΌy }͸?#FjVI);Or`pB W{hu622ۣ<>Ste974;u-3Z|pe2]%^nĶҥog"ş-ژm9ܚ:oKȤwyw2"m=Njh3Pka˞rvGNZckBlƑg?1䬠rߵ^FRɢޘ*_N&Xl~T֒kT8`dC/s5cNEc$]m6D j' TnqՂE;acD̻ %IgMg#cLӗ L8=P$)T֕ Dk_e/E_1I;<)2FR%)FG.bb9'M5Dٔ5c6D_qRۣ!^#$Jѽя%[MNbC~T^wE h*q4=l݇N Lש5tekYs^=B2jk5@!%-Yg ̩wE3d@;p@MxŊѩmt<(޷B8K=B1)u-~#lR9f::Xuc"i{;$Ajt?6 '\$fតl4{ +ˌKukO=v[K=oS05Sp5vlΊ=;qkSI2*ܳd !uنl9XޕO$_1!D;N{V7ct+N 6sz6}4綾FVqn{ioJ8 ]Ƹ՞љ_`|s%nŎc ~_DgSq}vK)*H7qgv|cSM\xEGrZnWl'e0t/NMy#hu\ΕWzɷy2Xk^"D@}/7/{Me>o,1g=8ֆ矧=TАO}[hj'J;#[t4 ӌo p(?'؂gLy[  _ 3Ε-P]1OJȹ / ih lyic>L :OGG }ORdpulv-W^*ɳKZ{ߺz-޾ A@<+t?ƇN8jS&~/&rň6]f9^鄁 %N=T_΁ ;?k(; Wػr)"' =q&J  !4h_ ,OQicUG8CFhQkő ]gts/D0,.hGոh˰&)G .bƠ<vҏƖ8 |/g+<1D߶09rRu᎖'d2-ȅf_%-<1W_Elyݘ'VN_d}6 Ch+:j%{`Qv􅪕3BځO6F?Qߔg0yr:t ϐÒYxnfK 27~ÚGH _Թw9:LЂ˕_ &4NÒRZԾOƶ|r3k}9-L3Sg9/27/7>OZ $? j}< y-[>7_mM I~{ǝE-gϓV[B-<=W _'VVdќ5\WI?Ѥ=G}:{{10[NUÞ֎qzDYZ@'Lz]lx(.@/Quw>uļ%WP jR </Hh!~l>[Or|%I+&%?MD1^+x3No,uAv'OTԳ%ғ K8'8~?bbڧVDggR$j"FoGw;cv -}bC>P -%f0^_^Seb4])FBؔAZhP Ev[+%-6r!1S aŏ4DWW+L[.,uŕ;0)T̙ ffͳqdG֒ǖZto;%N:;/5JCYp݆ly@+u,qHnk/\pόO?T@d{,ڕ)2 MYi;OJp|s ;-pS9g#\Ji^auE˥R.|Ea{yRߘq+\`:mmY`љC1c}%Z}IQ6*B+Vi DQiSЪU}՞Us{z?_s3d\DʗNps3(%ќq+\CU]f,-:#S.Qj Uxc\Y7H?;_M>f̿n25+ǘ[387TM&ϰ˲0i ݴl{ϺC "> 2[%z'{k!v)ܐm v ,LpџM+ yk-Z~rj*L&FS0@L\S vŰigŔl;u๽ЙuF+ܕa, [?Gn fn;WbEdyj3fXV q.z*8¢۱+M:L+vnBa5-"ESr2^.=1vv%C;p^9q03zD 9h¨T`a>ND@ݮSpng_]r)u^̱Z tv0A=DX/#i|R.>lygI mHO %`oGC|!LFw %0 ?YCchWؑ6`@Jɰ4EF9enSބ3[}/m}>܉"h CX{t7ww2;G~\1Rɹw9vۘ}o!žK 1T?Kzr(X&ʽ4ZyfIx>M⋙] XۼPPYi޶Iؐ[~O)f:![k'^^5[)lHKfmT@t{cKsP^6.ሖ񱓍x }wЁS]Żx5vrYt;l͐Inm|Dvjjy./w&Q& U[ilYCNvf{Ip(9NBsշ AY"/]ݪ>.آm*-wRɐ4 w?X[XtVnY1J.賈m Kcc?Q6HƚRGf} נ[ <4:]Ko&!}x8 Awaqj8?|^pCb_[.,[" ^uC]ExSͳQ=As~$R0lioKv"^[ u 19qqvQS1ەyC=`w]Lۯ -j"4nᢧ!ئW]@IISp!Scze`2MV`Pim@Se^ޫKpl?ύs[{ LGiӌQuR_HEP heBpsYCXo>;5{28SJf_AȾ-GM!^̕`/|\WS*0`weΗGH;K;{Ԣd#lY,awV Lp&^ffPMӓ~t1K_ޥIk)rўf 祂I7rc|tEUo|}dY:=K#Q~L:bB GuㅀQ$`/-`9RRM.Q4I͢] 1̎#Pg {¸ E1S_$}M4 7mI3t<mJ}cCId" 9 ~GRXz.QsdD۳YܼXQZ,F4JX믃e2-ȗָό~5ÒYfԷ,:: (V:rX A½zxF'}Qd\}sd#w*FT= `X^غ/' =rCq =40EsB},Lyk N]Pv6MŷS's@0%-$ BL!ʔ3|%M1eI/\$|(?h? Mkv\ct1XiLл ?MtQGӇDt c*cDiq+87k{}5If6Tfb5CaSb)ۉ $ʰBr ʰ"O ;ULc=iM$O}2ΏPHM C09NH "1uD ?%i NX?  rBWeg}T[42K4Rjqb6^3t!g$;PU2…^gݷeښJ6ŗii1[2ɵYI`!+Y7M_[8.FCDmji̮f-sRbװѩƊGX;OqDC)dc!h'j*zh!|Qgzİѹl%? ť e՘ 3<^{O5v:B9zEJ]ԆE> SB\/8w7¥27"so[K\kx̰3tU"TgnESϚf'%{~GzUes#FqJ~1/Uyp+Y1)2 r_dP|`r,u%h"ozMZ:9{|ekqEĮpiLGc tz-ue/WSKh.=غNJp6\BmBz8 ?y]6R`3ktK 4uZY@{Q/ 4+gϠbG`.ei;#hI7R#mX5[H+;DǍ5I8>^?KgG_ï]+( if,fN;NӕGd&I17$r'G~BKhPܽ{!1;l"ޚ6"˩9CJM@豭Tn؂YI z^'}|mHzҵe"˒q%'{{eFvρnkmF9`":ՄZҝWA;O.ן1W7anUٴѱ8+By+,:9".+WX<*`)d}+ q`C}Ahiw15_$8,Eh ZEe*d< Ź\]ǪFb]jdO@ `pprl4O_6gW _sA*{Z̞͝81VuQ1z7g]̰VMhYP'fF\Өc/c6¶ q[S X)L;,^Cqv|+.n?J:&}{3PxO7X! Cį|YO<"tm ;/Թݨ/lx-1t˾x ~njqsB r^+)|O -x'1RGQO3:7V7d.0X['0 t;ЭӁT|NtR6w*#Ȯ}=JA ؘz":sZ*ӏ)i9]'OW3y9eҊ,8.C^:Lۿi Ɋޯo \kRfz7z!uLqcoB{&b4JبnAE' 0}|35o)؝Zܳ1ϳC_ӄA}Un̵n'[ڟPk954RIa:iqMޚF:tW|'x !^DZ?ty&.MhrWdگ&oB?!GX{QU*~M 򂍪ͯe3 ."62x G i CƓhT՗[!FMM.o}EUu v(a[+N `:~vhaDM*ǖoɬd~"ΐނ&GPۼx64CaۉL9*}qtOvxEiTfg}GcJ=6#&jb MuFީao&(fsk8 }6-z}?p|L,MDZк* VEE1[J x'gAgetZTqEFn s1X"ԳCUСjB-'wНdnq?`; elkH pP~&^<lЬ|Js˔ޮtTyoE_Kf%Y>y•s1>+>JMTA^Ix|CY_wNQ OB@\geWSuG_ ’]36UwiHD>-Ŷ{,Jsթ9~xM|>Sojk&XR5̀CFVޤ/33;ڌf~@s~T"+f?MFD<1PVE:zipk=~~%2*tȜo y\Uϔc=' E]1YXY%k]tߝlgw}5oxJ/mX(,^< ylo]4<_qfW>"ցUg'KV|.S^JߍϠnNj/D.3~Dٿn3nmW2LS޸׀:'U+? *a,-`lh$N&l7Ou Q:ܲQifb7Ix1y D'cn+ r&rh0| 6=*JR_C ;N]O^[0/Ljdlz>+NAy%Nx7Cl)t:{%-teG;XVeCVڎ!'cuvwWۭ#}7suYf#>#k/ 7r`]2}3LF'd0I| jv`_FqG{Љ^ XeKО^ӁLmqplXl~fegf p2Uz%u4 :]O3חԚHg2Ж lds%G\~)Yá{rmyy2;c?4|&uM{%/Cz u@tڑ~_D%Z|-IW!p/m;kV_ӻ$Kw"qQn:~cevHw-穡Ԝ[享MxS.kktKMaɨs[sv ǯʾ0Mmx2 2Ϩ,`42o ?G#~!ڊoɵY^{@ۇbmmg Rv^fjVށ,y-sρ&|*SME y ?vBxϼ^⸢VxU꺴ƨ#}eGuOZǚ\X΃XvC~JGċ䴍)͚cɪ ƁoG/u;sߖ\poS͚#Me\}&=k }ÛXYb+v-%f|N]~.[ħ0^//.w@ɜ)h۹vm#;e$;~džĵR)ﴩ_Ak߸춱(:e{Kq tB1 l횙Ow yAf<̜kHh_]]q.Lw%ʨcP͊Wwp4F0SHȕ`QX{gv!tÌRAkd&(q?x g ھ' ޚ" `B)zi? =vӍ~Mz<] -4IМo cF|=`*؀ qL|Z2K:Ru;[r_:ļor|oIvs~vgT:\ÇhThsR9ʇ(|xg8.z%41Y]9|v]2[k0)WZ|K;I9Lzq2/!>7`Aq xE2?|%/{#{1L\WY۹X%6 oŶ}̲#/)r>+2ό;S5cSh( j<-*`ۜ~=G~Œoֱ y1UZ$wnh.U'Oh);<.1(}L<-v69SƷʹ|^%϶l_,^; 7ia"9cė_S^WmʗX;MYbOtsϟgɤj$ I%1fvf)xA?`ΏŪ&8f/S]ܲ`l)/%srĄ?'[lxe e)_>[Z26Zjnɀȴ-}@rs㾛l"U)壃TT ]r؉;p AGc٫}S4hSTc̃y=+n5@s,*P~ 9 w[*h ќʆebjI#ʀmK<9eߺVMzX=g&'bUL\q{S^b̓\[9NSnIFix0XzNu>J1gwܼ\Uu]D d k iyNxX^$ J8cm4{)߲L5e|D ֯1+"4# H#~hwyoƒ~7e` _hG]VoҪzaCɇٚ݌7|(>d16ooj)>Önق1~:Shh5?ۊ&xQO^ 5R R.c} o[ DWu;tKװ9ƐVΗ\W TWn/ʼ(V a+.5F@ .EB4R=/^԰si/OjZ+cCwk$GӄSMx>oI>Coiaf7V e}c'v8=U`k;˚u꽸? ݃Q8 Q\Ywjaz3E?wcd# ,n? غ㜫?4E g{]L$3 VSUE$cqc{-|L)7#0P4(꧰ k`Y5`lβ[]0S(3M !v=.@`v${dRiK >7ME /Zwzycs7 zMٜ:_h;?iBZaOaY0F }8WM>4D!^Q_a}q!0d6r` (N;P{K.|14c>FhAg\ \X)Q2i 5l=l,T&- j.QRahD~ TbAE\>AYx=Z#!^U:(ql|?K_VPkҏe-!PfHpx !!4LmkȀwL$C5Z_lj̣d7crH{$4XKZ[__zp6д 2UMM-R"gݾ~9zÅt1ඒLɦ~ѽNW); j'wo(|@*G54 '\{iۄqy:HӌA{)Otta1ܸ!BǶLd"k,[X޺^v'(>ɢ'L3ox1Ol l{f涗9~\SaTavV4+{@ }uAUK/þTgVo2Ylys^ZƜسO3f`8a fa\tӝ ![^զdK+:d5-$O鬀4IXxng?q6]8KoyR݀8֏޳8fk+AXv ),d9z|AM;KZ|V~GӠ~}5 \}q{aٌڑۿ,fC-һ&<_A`(/Z 5[^"zpkj4sd#bUjϜf>ie %*c޴C7XXlIk'PY4)ꖟNCz֋n;~o{TgZz8I :%ѹo/h֯T|X^tCw$G&4S[F?eZDOhbMz=ȈL7; :!ST׮Α=V֩Mg {RѬS %-;& Nzb}A Yg<={GsP 1#cPzGߵx [1EVؾ v,wc+=N ;䘿]W-icf & q1f|.Mlw_J>V8(,0a~ 3xO (o_=oQjv-,J-^/qvTvahT0{oxm/^?NU ;ha[xe["k@tJnⓇ7+ң̓Thc܉RR{VKVwHs51Y }S*.@GTU_ȫ6?eb6cr.Abw,kFyj(Y( Q['{Nj)yfcDcZ,T |tQGHf_ *=uG/;H##hXjZVI6_W"c$FuXK4x5?(+IohNhZn~~L㇢vk&&i[K]M/QWie&qt;B9G;B. {UOv?$u_M0iffL$?}7G[9R J+Ч3o^Fg21M w*e>\,5{+c(g}ؽ.OU4dw39iQe63VlFܫ,!M1L 5dVhq}T$-!?;Xµc.\`1QВ_//cTݵ&e5BD[9eW 31Zy0 DUd X/1,Лo/\{wUA J~Z=Nr~i%[YlRcw;IJ;}آt9E0u=1%6D GȱK{gIkŁ;lϡpڅ4jѯTGa}2;#>I\q@'t]5"kܶ~Hn\;v_1GDB]EY6v ]WO5(OrUK6';6Lfc)oXdOm=NeZo)rH+F fȻ[kc!?)S:{bBO|FH[#SWILO:cOZ܆ R)x ]{>cl<<%>}4h֏*ʱ2]1ɌO1;?#ۄ)ec—/l/\r6qzӀ0CD:7Wċ4JV=>͜OAGBĪUL)ZiT,rof׉)D|rM6]\D_2cpB4-ض[H-_w䝺Z=sneǞ | .bo*nOM!{9ԍHw{_YxW+Ί{m|LB^8>ZYܑH`:MiGU; Rf<(Lula)KK6v_JM~'V {_hd -WB.*R+5ŝ7z:(iu(b{ʃhtDt^2T[# j:w)Ζ{măvk󹞧(UtmIǵR'e ?g3MftG=T?}nR׌=%v/ý8 OoQa;Xg@6ž)szOUV8;!p#>MخXrf;۱#;ݿ&疣بT kpy$mb[E!fHPm]"u@^f, 'm?Px:j00u-I2%N'֒ʂd,+h!lxLNEۉtmf{@;,y,P.^jw0Mx]K.ǵP Od;b[D6}zļr!TdVIF?OO?n͵fCh_ #Ri2fK͹v߻Д7q_].^˺uVU~=T,?ef͆w4=?C7c>&τd+ӀyYci2l,|ClPzoćO#i03/53Nwlka*F0P6w״eiD)tmGׂ?br;ʍ̴ARo7}*]2pWo^$/KSb{ȫ:s虈">z @MCh1٦Xw-y[nE.<%Œo/'HGq׽;&[$i=iP&JҔ4~4>}s|7 }OퟙG 6Ip'$@_B˹G:מ td*%#ikOw*;S#qbf1iZ7!,ya_:ݔGosY<|7by+{8#cUl? σDv|e= a= _h 86 HyB2\(GmfIcy͡,LVc!c >)f}G n*Lfǚ-Nάk 80(Ie2LkY,*k_bvew(gcʐRxЛ0܋Cӻᴕ].tze}@ݳюl-Oˇ}Gc^6Ϗ  ˜K%.x&ٖ{3wsQ>pǽ綮 BǞ)G IKŭ47Ϝ_Ǻޏ#p)Ae'D{A?ė[knsӌ\&\RŤw^!ܮPF j=uP櫺ZZwmgãO(^$͟i"l>/EnmS),j!3UӉ7xPfsb)jm|u$$wFR2w/X7Gk w䆫y ~vM..P% V&qBۏ{Ws0S)wÜavI?if>eJ72G{~G5EuCP6}S v,,]VH?4|G9~?4 { z<—SH 888͂\ǜšCyڼbR؄2܏ yVrR  J9sF!%-סawW0N臕n3 =D(x4ic%Rv٥oPc11 u!/ '5s̿6-*&kٞl̠mP v"{a9XC5Uh)UޅHI^ؘ>t3&qHC.]%P#1|tF#gHg 킬mUT,o;UHWլk҄w#yܾ@ɏ5#i]6J_Uv|wo;bmÄZ&s~L*^G#ƪCk~ɞܡyl\#Y=9V$C5Z?<ݶ0)c#t9`gs @u"~1|msIx߇~İS`8h;~;eRDqG ߾"3bbsGaBfi'ǣo*<ˆy8fCrwG``q77iJ#b拭BYU:8y|q4'.?٢Tk4'QKt$J׏$}DkTAbK݈.׳!y0JNwv0^iy)h#Fe8an#8u3oC_cblEǘĚ d5 ,Py;5 a3JFL| \BA}34쾇Cֱȣ,nv4/OkXoEtj JA_< !]U$K5_m0dJq9X÷ڶ}n|Nᙯipji `8U`#q#*|8uN?r+;mROi,s$牔=JX#U/C(fHc:<1+-7J=L oV15)tCD( ,aVIkfkIhbmN^)WѺ?ݐ#Mqwlp3Qܐj~#kn^webt4:b{K(M`jKw/\-%f+t ϳGĀ07P'2*GHAqY/>60=l\d6/gExt!^~P7E_bv5A'-1&s|Ss^{,BK(:=M@\$!_??I1z yR285yu*x!s%aʔB#4 .ŷZ  Fd׋XAz8'߂%F‚zV+0 3e#[Mn,h9"d;>蕣BQS|#6xgX衫L'֫ȕm5VP6`EtNvD[#YI[z/vi,Eژ9`@Wg6Ư'TPbشTes`nB7#ghZ?a,?f ө \ڶթ|>+WԹzqHNi +ۙ#?Epmt޷@ZaO::xl ~| ;9O1/#쾫8\of`I Rb޽Έ;qJ7Rn+.1ֹ Dpn sW͹գE#n2Amχ`oڙTc3D+Qpm6u&X-kһ(q:KLb'ӕ#qC ,QQ$jFF}'3Bx06&wì}19Ξ{+1],װlbF,6p_ӎ-:~5!LJ5ļgj/1?؁TĆ~/΂WOC˄7`* 6R%URj:KүT\xvN yu/ξv5 b 37w/f iKh7LGUwxH:;ƚ{Eqp p}\(naT >Ys'ɏƓ*&B~ˡөX?u !"λu䲺{\~a2G0ӫin-ǿE3v|b2/Vy /Ζjh4+Q[R'Ii ',r ?Oc hC5yet-=>v CkZ+~JNQqUWC QgQ iWT`-+2Щ )륰Mq'Xd~B?n m9(zZA ؝CZ4%w?եryYm{ l[{{NG43jn@J”B oD@p +f3|O'3'>I6ݝݝ2Xt]W캓 :Vv#g]cG|(9}B)),zεK\W7 %lnW]׉}Ş<}_#}fT~qKc vά/z 䝋95/z"=N4ƙ"$wQz Xp4 ݒ0ZXYV^VPln]2*}KCt-c+wc~V5#X\jX(7XŶ ns:5: JO7-u_ʪH'CС0\-<<l~޾I }xܣwѼXG}~L)C9$6Z1¦ }non]w"E]P']x=(*6u#m0 mzHfWPV4 2keZ\t.k&6k! S( G'2 tzr,ɢ$=rQySIZ ph:@H0!@jڦ¯E,"--%}fy,Mvd_gjξv(uM]@qLG./WɆMvcFǟ]= Zo'Z)%ެfn fֱWZ͞hQ[n9ͭAZs,?MكlY-D+]z.4XLKֈ ҍ;M1_ s~Tҿ[hubН)BUK[͟X)D#+̉T 8~Ԗ]ר[7:;'UhsIONߡ+= J< ]SeOEe8}]Lx1Ms T;&)YGfLmBmK&1͗kx#37~H6i_Qt**,qsӂ=|yKȗR>HΛt"?O xnySvP~,L~$n/֋3EUN3!S m (jbDo:Vόۮ2TՋXG Vj0vTXHɞY'K>gϷtIjw^\ r1{BH>E"MkOFxp={:w5%ݻAϬF7DвӍ9Bqv57PTnKas羁bUma_|quI#t H\Rgv(ɇ{DW-ݘ%ò3;&qek4iAat%fK`\Pp\lbw5qg&nޒ8RY4| s1 dHlmyUCټv4"-y*ջT~~Gtק˿-pĤyBUNXR1eE Bp-t_+^߯*h}_ ߭Hfc>MrgfY"m\JWi8Ŏ=ʇ]xS4R>W[7-yG[odD>a5LzݬU5 Bi/%FrYnC|o=wo/DPs~璒?"^1|&KDZGaX)E-/ZbsSK-S6[]%9?-ꨦ {>Wiu-Y߅ry9ʕ'&*Vh38J.xܖԲk/?\. nP24\ˁ۴p~|uOwW5ÕDZXQd^_"M{%ZĞhj \Vƭ%⋈/Lxi<6|c ϻ#6OݶgmolɛqC~|[~HBbio7 *pe[H%x.{TJd#̞7EN*IKOj g\؞*XXkKR2|njToZ&aQEZ:#S">GoMT.׭xhש`?|ؔw?dEg< ͺ.z _k٨ڇ2+`^ Ô54Ǎyrjj[.W&LYoXp߾m<{{;zH%YRL7퉷ހ]7K ]qL_eؽt vf|^s?<)x 68{L"hHC c 93x*ҏjb?\Mc[E+sh!fq 0gpNBZIǂaӎC :j",Fim- /OFKi?U.bt ׸qh=rFθR( Ui&·X݃e\|椷Ggb mѾ@7b?bm x|Sf/!ma~ vȮt%mf[+#~JPbrP?ZnE!lkdɷ|j~̚׏*ys9sO1 (tFѫBwDThbEoZ9~/K-UDb{7 ~m,霄$y!uZzyױGZqÛ-Rkr^:Kn.n-SG{K_"֜ ;my %*w̐luZ^O2>&EOmJ:%]0-* u.n;Juk- >TrǚgQrC?9YU5 6tJLu"V+mc˚|w-8ǺHYU"GTJW%]lϻ-߻/%J*c<ۜ_kÝ~SߓhXd֚1>|]nV}^/fڛi>in %%*l_6M qW]q%dQ:_7m#*)]d)BGP2Z`.p=3eDbaA$m˦wg-Lm*L~P;NA Ĉ.m2?ҝwn KyƐ(5 ZoP&S. F${#rT.K0{.m!=+eU[l!L˙ pFRd1ZLELcocL}9_{M L.V`b7ė.ЉVOdfEk&m+!nH ̱zX07#ld3u*EnoՖ_t,M@ e٣1Va Vɉn%y<_,ox,'nVz+xL5ߕa/=%:4Y4lm7+x݉wґMbYGS~⌒Xeɗ{fc{,+CQt=a+>S59I;7rޯ=b^lr`l &.SQ_LZ1?Z5IxKﱉC0qH2>pį™6}>l7HIE|b1<Ό_%7GQ.ͱfx,W \_ ZbhH%zdO>p ,I";g;=\ϾZCjgS-G.W\l,fcAV=%J]c<فB,㥕C2ˍV;qYP"cGnjN%NGmZ[ |6MJKceƍ50b#B^"!68Z5R`a!V*3Y&Q6o<q&B^ RT~N2eM;̛쏣Gۣ鈾˪#m? n(77:Wmˤ[9_.p^f9J@_ʆN8l YA}) !e;•ݥ»j1 Pk7Z^bDf-Ch'"-(t Z=x#Xad#٪!1&mV3$O/`FAU+ "Mq)1BrS#N#m" ^ dg3ֲQG-'SY؋3`@]J "C0*E`~"({۩5'. _"⽊SV>v'Fy)W|M)X1 {hO()gQ輋ԸOK~"+j4gaA z*Qȵas ƦaYZvz_{NlQ{_BN=myDEكs13)c- (a3\?eimg V 8neܠUe )2m;=bkb8 $ 9LfhG*Ձ`I. /,o;/e3Mӌ4GszhWX3UJ̯@Z]H VG !cJo]3QQ#; ^XPhcZD_kYS`I{S4b !FWwIšpS.&?xĸg-Mq 0M8sG67(0< Ax?}l }&#u0:Sabo vY#f1mt&+PfX^09}#;>EXwHad V0nhP= :Gz$bѬCE^y?BJ1IS]"7K8_s0ak%WO`IJfS*W3'/65Cn3ORz sS3TJJ`_ۀ^r_Ű1tEEfX|EB]15E8 ñ?bMYm6+wdq3p$m6")aKN}D4zbHZhת܂4C%8hI0Ș/LTEG"lAK7W!_ 2EQ>ά2Ao9 #WW%:R9Ck?b|K6&26phΛ 2?T?%=YY1]bl]l1_~ׇnDZrtbvڪ0 #dlwǥtSz>v[F"r:Cl&՞gwTᆺmۄsQ`Jb:hӾ79?Gl#t5j*eqb{!x?j[i/GdC-~/K5xYP݀%Dr):%_O|x5OCaV֜Ai,a ;MV 8;Bqώ1,-p>4rʹ.dZC.IdiVFB=(&z;wAj0ΏU(I53Wr!8ӑo$pRQjA?7D-z}(<r;15e,O(D~%脨bNaϥ7\Btrr8Hfcqv+;-=idFm-/k]|`f>sfRش$hOӮKTQDi}މڝ?x 4 k1^xYKZ"Z͖dnMaёX;) ZSz/Q\IXMFx+Ҷh^m#1U5"w;t%e;|؈ݯفXR4[$1ZbjAGX'@v g_;[<aۘr\H\.ER7ԐuKf^~ՂH 0*'#z?Ea*ʉ 6.XU[V!o<&leLgO۳w.HDdZ1  x;|énlU9cB5JF.tn/v=xk:$#p+ҺYaMIB +LτY;ӜCkdu{iΔ8"]-fcw j9@y:[0tE͏ H$ڲ$;}4С9oIOo= =v] ZBqT׺4C좞JM?M^]%:uk3VR2UX];E,4uM.mn);V /UHCOXс1XF̥t0Y!OmlP mӖ/ ki1%6A]7-KicH%ZŖ-a>ckOx>BJBV7am&J=}ݜscu ]2 _i36jsk>{9Smvm5[7j89EKq{v;@s_=_A}Ҥ}>࣊'ԓ#3ؖXh /jzqѻjKg_#_dvKe oRk|j.u%}>?!)x;:-/ 'ȣ9l91cU> }G,4A:3l ijݧt.IངmdUH8 _~#@径`WR>OLhE9춡=ʸSa'}[,?EWq}V:=UddlHhi2+I$Hٲ7{e$s?\zs?g/>?cҮq qغ/m -l~'ʈZ[ jc@_ِ1ա6rrK n].:@c/=>';ǿM(IM۬ia{J!bӣ'YZvt:9}Z9~K Sǵ ΄(ix#lK(Lǀ +/q!լ8N?Z񑿗P(7Z/~JE)ȿΈQӍE&TʭdCp&ϰm=jy%;E;3l eH|)uEYQ~{& 's׈|67hR ޥ~=F9\L-jK*li/5<6VC74:;m ;:Og>2 'ɒ^ jr.Ӌ:C:5@ vӮ0Ō08$W}qjl (c_3.ަRZ֒TUv4ҋ82:ۘ'b晙cjle] o\y\j:D3t+k6c?6\ꆸC B&J%ol 6x|q<Hjoak0(NFcLeH\k Eоq OםvCOz޲Ƣ e{)ġb^|fwK"q-=aH:gIq}5p#-& .ٯ~lht[>~^V\ wrd-W, vsAV| /Yeu Rg>~>8#̉Bϡ-ʆ̽?[Ǐ_Պn3$znLyG4-[* WiK X5j6d?w .AAG(_- Чc47uBal ؏qWؿx5ه)uq$+,ho'~wIf4*cq1/4FN}|5w&jrov4~㇊1: b6 NS!Lm|_xB&84iF?h[))~F]PЍZDyP^J%_?l騣Ư<%1Vtt5mͩnVnrVhP3_zJ_߳#t5-?6KNniQX9qꏸx36=ۣǑ:dn~{cO[,kDAwkXlt5voI0x,gcyfvq~P&?*ڭ"dВ*ifM/əkh ÿE]g(s>Zj42Ev͂׵)+8 Fk DxWMN myoUf+R<|7 Eck~"i (ԋpԷcnjVAx̟v[s76fkQw*%:6A@W|$=sg`\ =ʎ`CLX Gl; ,L ,|#Q(< "N-hNhX_+t yB^̐\1.{W£UֹjoQ@~0d&I,G6;90ͮ^nB/f!c~`s%f"Mp{ba] $ӧ7#á|INVgk5n4q#ॖcTXX4)?sl3sxCi? ݥqS:3!.rlh NOa3S9".Κc [ҐxM]5!8aA;|| ,t??QBvڭҊUhFgʲCSg !wkc"DI[k$ dUTDNw^?M|*GHXN-VF597͖N U3M"P`vOӧ x6fǹ`otadlve7GgPS:q]JJP= 'Y@]wheZcx"ISv[0r1'FJJNf̊-b*[!N*CEsc}d X4~6_Mӧz4o =xǷb^!{U'd81.- .}j6xi%J҅òG+W̻$E|j rdoĀx};*{`  [ٕd֏:bH@3dQr0I=d'Q$Tl6'6! { {v-uIV>Q;OI3=^X,):8 gaXTU5(svU@u)ڋv"&5ĝBԾ,tN˞qSGqdA'+QN.2R qq%eb^+GN^6>Ihn\VGV L! [-qe,xt &<}q^'*Üm*Sxz!K-,J3k]+.L:MT=U6+p^4OE# 1YvtL>6yȭ N^I0OgpԱ]~9قR/<r4 A F %0J6B5EŬD"pZEWB<1*] i8+\hú4O |OX}3nG"Mo4oQt b*tKG|5g|:0H3]{T~D۟R̴2Fhz%>I{1Z)+N|\:#0~i$a3WĒs(XOFkcn16S]-1ZA*[1lз-}G ޘNFWyX̔xr^ "Gc7caX?b@V9f#l펌7`߮Q)0Yw˖|*0Q#9FS'1rperVBH4g3U ^-)h׬"^4 ¹~&J0_o9mK5ݾI6})?F=A4guV32dMdp,ůRXvB"nl ` Pʶc'ע340@h妏eWP I#lfWo[hXEU]S9Fӿ{ P&I-'/Azh/ƿ F0/GXS1 B&͍3Sf om gZ!Rt|?QoMmF\1^va0[nj2mXb;+YK\m|ܞ,mv33XY!CP[61o-C8m G/:㻟+ A la"܉CY ܆5O)p}po%]P=?2ܾ(A<ߗ#n&xa-2Gъ:ABM;ZFːWg5Vbԅ9 ?tGvAm0:2|#Pa.d훰AOƅm6ŋ1N.ذa,voJvr;m+SOdEM*M Ȑra\hk%˴9Y6?"~w&qcv\H*U)=P^4ѸN>M<#2oy s16/cΒT~;!k!? `E ڈ>]pcp ab4gK>>s@MB+{a%)豐cxv!##2U/ ˕mlb|x[Z+/Dv;k4y [ۀ ¸1xtbPlj#5|:n<} knFg8y}A(]cζgx }rOUyon_ 䧻mVt_]cx7MT an%)m_t#.+t6"myu憔+]À>X\] ףfTÌ h.tu׶ |t>z5 KcxIwsx;E!'w*>syry`/|떒7.nu?ֲg>0|6#`Xp;Li#Nj'c.||'sB՝#5S-ul2M?27K8ǻ4tXƥ--v}Pln< 6/馇9"pڠX~"CCQ6Yjg-R77ڍ=Qј>Mq0C ݄I33m;tܪneHz&:ڠƓ$)f vvRlîbE?ֻm0RЋel^gߙ'X^Sza-A~P<՞zw^/wkHD+nelτhsV XPۍҙ9 ?Mctң{νb_c|_t<*Hm'bLa1|Œ <.I蛖%!1hh͋R6a*z=$ƝItÙ՟w?4/0<;GsQxk6;,} ~V`Cd/r2MĿ0G~5lS;.?y5~o8/dNqɂɾLoK~Րںuo<"nt,`3Ϭϭ$%6Ga6kxmz!}L$_p`U1B3xr[Of)q/LnAsZjpL;̧UC )ZeOp*~ epnb|=?az 4߿_\ne_ԅo;c >YT _`eӳy& w/)2'@UTsa=~b!?^Țps0t( O.ǻ#xq-hP=tq|zSFߟ>P 4Ơ,K@z5Hbu %L=֬͜u߸9%3/ k}2^df-(UB wRLs*К'[{V=ǶlAnĶ:;WDnj{bhMٯٌǛnV/ `{&>]X$tj 'jdHQ8sϜ/ijN^ s@1ڍ7Ixx&jeV<as$w["&>UψA;7W-+f=R+r;f(KK@Y >AE,.X#xd.CMU-R[IJ֡:qwV ,k^dzMp;f-2|EF ճ&UQ?Z>/ 1ɘsn5q8_ĥԊ}ܭd`K_ddW< Ws5+Y~~*ިPwYɯ\};P2594ZܣϜBds]~/\㇒NLuh-O)~c5OD+tMա܌蓛I=y45b~#˚oe4+FL{֙QFTE[iJ|$ #L[Ci Y:PL'Qtit{AMfOʄA^#{"~r(t^s2kM!t TM!MJwp%xz퓅mP?ٚ8OWWQV^c)fVM[.>lE3Bb[d F0l\ώxlwZRJ8{ҬVFw5BveS:kn|\/|;C ~Pܷʊɭґ͔aD'XK(gVJvI 3,[ɝox!Ⰹro? !LY slRQ7 jI% _E5ǟȽ ld)/4N@@+o 'pzX0:-% 2}#6!lsR)2pfNژ۩Xw"Lߝ |Xb~W6,d(8+Ȇ땶N=CZvA;ԣ( ۊae-Fc+c-r=5;s˕LVO%}驠#6y;PXA^"j)QR~ŔIi䢒jZ<~}*<: ({ΰ) Vbl_ $ ~IV/a3k+0)dUTXй4w+Xo% BXqcyw+ʭ(G϶>ʑ=w-y=珔MGxq6;7R%#3;ǽoSNgrD\."zόu^'e,-/|Je7>ҳF`Ct#v# V41Ԅ7׼\/joT q^$چc/-:Qځ$}h vsZmEFxiI 6)'_EPk95qWH +zQoMJ)['XD>.ߞ,L*sAe+!k~+ڟG0=&Jb ;!p[ lqe k>:|4| +{K2nxo=hBi.{^]7gr}pRla |LBGj)yjKT~4 j~w;KY֚s۾F{t.a"PALV&GFɩJ#o5HLgkMiif>8sc9cklvJR&ZҾNVd3QI"It@F'xOʒcٍJaGxogsAߜLѥů4-=?2K)_^,O R!Pns#SѤc5oN4QY_k)iGi󖏼䵬8ܼSxV]ot&kKLj2#mNKGYd1yQϼ#?2պ#i*ᡤol؍\塦NցN|wwԜiӃ)a6{LӅ$ȿ ]ADoQ-hܚ*kIoNٯt_j~l'5'<N)xoiOKq-wݨE3s=ߓ_BǛ#qu~V( xA(p=-wnWV uf6L+z+v Fsa?1&IflE#D42;/v5<يmCR0)O9Ak( ! ;cښaV߁3-kn{d#lv**1P[…yr/WC8&ˎbȃ$f&{ꡳ2vp@mq?AXPhEp+.|X~,/lӢ#R#6h6I&G,0qp 2ahL`=xS;n\'EnTRJyׯ6dN0-~j-8]9ݠ +Դ;܆8*9˂zֲ2y*&bLՂ×bkU5jo@5kLUUj:pڂrƵxD @Al]k }~@E,a+ZvGvfV\QT g_wZa&rFnƖF.S^_PA瘓9U6.hÄ1zt ^ϒV]VRF+jiVsʝǴIeؓG|G0mp^y V>i!iw%-FAkw:VGLt_ -@ pݑYŤ{WE^YKt -ǿx0t&O5w m+` {)e4QM͙vnlզG)%g"|r$T9v ޅ`r("6W~dt-s߷7'xkQٷG~D.6Y܅~(jNIʰƭ5,U\|T1l^E>L,ۖ~2o)dž?w yJ8:Z`Um%kx=t0 Ř}فYӅgl`x@焾PH80KP aWLzc!De?o0^ea` ] 5L!',f"pjAxWX;+P˿1ơfTf!oPg W}kYiv,x䫱^GnCcp~?)%0IBJfؗUa{}~o"n6yKkk,msStN[@TƇYYbͯ<0KmSK.߂x+/%c3\:I7cp~;KDhuPқ\ 񶤦{|n\xZCKw1e4̷1 Mo)m=eVtƁ[XӥJ_oFz Ʋ~c4ZS;NF#mr 磻y{EE3+(ALܪJXtt! &UdGLJ#E R]fJm]JGHt)󍶳Oc^醣AB+BRbum>{j\Mգ[ET”ݖ)=NR^'n9y{M]YLj8ҿG &$S6,]3(mcۤ>[XQ 1ofFGAOo}4N܇Pf[כ~{SnA1~ mlQt+DK0~5k' 6uˋǰ1BbXcm Bc~N>]NaX: FkUb½")wĖaA eOXn+)ZM|qJg^BX3xLj&2}4ft\w@x~lGuXs$!$\0=N` -0*F^ Ďz Rlo2FG$xE&3鏁ie_k_~ݬ`-x[5h4E"Bw7hu3;|q)xKp_i -2 %BM6zt#@D=> فMc͝{HA!ǯ w,6ǟ - Q[[NՀФNLd60=y ڇ=ut6b 1Za~G88}'b+`ggQ#$ӳ,e575 rD\*lި )]S`Gh7d[#<ȁe'سEH˓`ЁVYmp!TnS~8SrNh$ˆazX@ch!C - V6Fz1 mg; OuW-li9ıXZPI.V俩(\ɱ Z3~ hj?W -{0,f*;dog^r}a:Ɖdk* ^TKLаHVqlNhC3bpIØE F8Ў*:-Aa(QAzNZ Ԟ'* qqa%60E aByA !4, ۢ5.GP?!MSӂ|_%K"hيq>T<0 ?^[as ha3SUKr^M@kmEy +e=0_,2v>vl>'c4lہ5& Ϙn(d̙bY,bG 2Y,XޗȆy[ 'y|rpgך,S[9 ' v쨄|?]ΓX,g5>;M1$i^ݴݿE~}@%dDy_d%d-DFHɗ"{d ʖM;u9z.N_;C>? nCX`0 wXB],D^rsO SfwhÑvf/+\<^MOSx'Zۃ'T㝇2(HHQ+]&=)sO`v9Eex>Eu?*p֛#쫃żNcEPstN w%|l( 24CO,82Dl o:a prItĠZPZiVMk镍6vH4iWzri"pwMҵpm\NSqQ/Hu<4\!/hT֔}WICy5(LT)Q;M\yQ5b.љGɤȄfG˵!3\%[0$P5M)SpAQhC4֖j&,g^T)d^973$ˮh;n)>ƫ>7 lNIo%zfzʜv]9'bD80 +z'BԜ/Ɠ㠚##4M"-AW?=o; e'#qZcGC'_{Ր 8!Z"pd~ora4 IXqa03$\ f>ǜSGY;ɕIXi㯪p":YQ}s.'oxPgJX V?cptQ (K*NO}|;=g`NpkD陥RuK;Ej}nC他ۯ+Ž%϶p~ 80ikċ[B2H­|gQArї}v j=4 UஸѷC?-/%KCs8^(\STvzzH!7Jޣzf,"`6(-?Q-nڭ3֟i*?J2c~_nby'i0gt}If>S5&H43c>3Жg=2ɣn3!o(N-7}Ӓӱ w>_kʣXZ50LYܾF5%#u2 RɂZɡ֔x<~ %"H_u,{;>QJ_H+m~XBk)uQb>#3DTdgR[#Q6k<#޹x|fEF7Q~T>8P$tO76R 7Fw)[w8+H{_!* 3bCd!aYي,Kъ15d-JT=u}Xwv`~MRZߐ'iU:≠o_oMesiD*3&u?c;ѤYrTM.+䘵9 4dmPLU^a:|=,W)oh1}%,TEݡ6/%K0c6#fLg{~#:/nT.;r/.Tk\-G[?FNn45|O2?0'MxS>?eKz}f폥#J*G#$V=†tw1 o1Uty.-(l| Zct~kDYw14Q8Н&woPa]]LWTh{VtG ;u\" ?H.>Y ۑjI%[POg$r@_ϳw­dCE8=J=a?B],sKq 1e 60d~]M7y =z.\TmX#j}7~dgEdژrt!m;HkŚ{`atL1Δgh}+eSlK!S1< ym*Z%&IxOmao$f䗪vz#GĻ +nAtUd΄4^aVw,b\lZ$#ēۭLGw}*v2sbE" "&_ۋ^~u0 #q3zفO὆xkn" ]: v ė:wVa%HRVl*yVtNV|Uc Nk1֟<<}6YM)Ҭ+lvCGB>o!d2>=vN=]=2qI,8m)y}d(x3mʗYroEL|_ha<)f%wU`WxAsg{`aLHmNu- ҆ %X-k"+lًN~@)x7s~EJښۯgB7Xr^{j_x^xoz ˣ94Y+[2[[%k޷F0g]c޶֑CQg^KO!\9VZjG^Ė--jdl==#/M\ǥs zlK<^lllˡkt3~}ڜ ؜-"o/ktl ] ˹~BQ[=ׂzY$㨃6<ŒOw_O pZ*Ŷ =hM^UήYi!yK[d&oi!FF`7<*u9b*%4!z`7r>c 0ʂ&U@z(3x&Ram{G|\mHNӇvy?ƂTߒB/ = | c1|/}z;#'?BJIBbӇT/ #Xq|V_8vB"LnuTR( bzi>D;Bґq(vt9(Hԏ ka\'EŨE8^j!k +]3b̒{hb`qa5]xOwR rG[t3ETUG+1VO0Q6Z*)Ǣ[t IV)޿N~IS^SᾟԔwn+Dxd$*W@Tr>Zt]_~):'2繭;׆Ӿ[".K*C"}8rFG=N,XQ>x'^33ܘg~Ey66~2BXTdyqH䄽C9q>lu[}emċ[  i+JĒYbT<|N.,Ƕ&5O|a>Br(=SJCPN{3Eejx>c72:/lFb[[x?R]'>Rm0,sE7`2~t45C:gGUާHڷn̐1QҡHc]zq+Ď_d۱,R"Jf ^ol@F80OuL4ο:$lUQD?#~7zաgn >>Qk joS[n23G G|INNv6E?Q8''TTBlYl9sTiuF?%|Bkƨ210j|)B'L*+&p(AڎQtNOVkiEFO:AMޅ _|.:̓CR~2IAzi[L%(T3Ni<ל{cx ZGHiw1xhpYIz=/#Msp }!WubJ/E]KuS}4Wۅ~^$>GfAi[-24*Gǭ,a)O芻&;cR?oB&ީ*/r7B(+aڢf07DPo gl/j{"P&̖\uq睺˞<4 ݩ^N(N7@6 E'ەƉŎ l !H| i\Ly3=QE mocd>_ad3,wj[0z&ya4ȇOEDfGEg/)~6? CwcU},DU롸#^Wb1LTjZa>F(fr߷V 7mѻNrOI'p[+r;`oVb^+4>oծ LEkXmTS)/;LڤǚȚeQXzKB;FG? غ+[YgJh4NO;t~p^q {Fgdz_슴G_挕Brձ0SM4Vm tq/]׻xw,)u?M)t̑QZs?z]ĞD\b[e%Dq,|55R fcPy465h7h,k m[*e"xZ$uqvث:ת#&Eʘ`&E`(/'Q3։bnzq/}64^E0?B$鿥@KSlg%ᘿu+,ծx¿B(}*wsVm37R711F|}21sW$HGCifG$>ץu;s2kLҾEu2mN<=I_ŒKO|K.oC9E? MdItz{LjMBeңan(tWjS%QunXPSΦ|2UIo"8_=5Ƌ_ t_8$wqD`hjoH "+X|^-BeiDW!O ޚ,C c?'ζktd 368ac ak[5GpM0zG8Vojmd+V^.g}S%X7 ib܎8+KB7GT0­a6o>/K>)t*UMHdWzxSV*D0f-n`vt+?MG\z=ZFVWl0MrmQ&Zp[- 킠}$LXLjWӌw=.}^S{^*}zRrXY: y W?b~FbuX)P2{3̹)tb8iCk$ \5ÖE4:"\Uy-4(3;7ELi瓎y024|%?(h:[Q!ڷg95_(!,#£ @zGyx!4GLe6[m)H7FA}Ý~ؽR Ǩ)@|k7Ixs?[d-yn{V\I({}S?:ϪtV(9O2ՈiM4ux ҫ'hEKnq~Gsv\%cM=[gu_djח[qj=2Ӏ!̚A+)Ӷ r3׉_uc|g~o1Aі[)yG}VUdQɑqNr'Jâ(D|;CNn*r>ǎ+C#>EܻpK Q#]砸=3_|r5ϔt_9m␽k$nc4.?[4˶*yJ#˸]ćEGx 6CjsEK^42*_4ímgT6[&E i-'A|Ɍm6T[vyZ3rutAg5j})4ό{Y5w{/ѐ;Dޡ.E%FcM^`!Rn9˦ ^yƁ;Z˸m*x[#eslњ:6/\J(2%vޖv88!Ίw3fі}1N[h14A\Ѣ7f褫%?lysCO/jlxYX04iiI;*aʈe8)ۯԕ-XR3wߑ"꩓':?ÝpvEp|+Ta*uQX8n!E+mWJܮq}CԎ088; 9fmsE~q,> R&tg6{`nt R|i?&\c}1=BEd0V_1\1ډ@mהf5WH2_ YFEaz=U'ΦE2&fVc+D6FVt8bS ?u =ѭF.tyθW4i|-RL| rD =IV|"&Te2Ce,fg&(~ 6~>czxaз܎n_.줇d~2h+g`xgJ?;V!ש`qAsW?Gy0mkih%-+B)}$N<ۀO o֋҂wt{,tt-Ҹ Yc=ubu[0eE]Fa6)x-+v;mKe֜_=g}6Čmk-׵Dj"Zz$8'a_w`S+xO>JcJEnKE(ol/İUo;ߜY ڈ&ф?jBgYn3tKajG6 ^6s6$nѼ9L{h;*ǽhxGfn 2kߒyZrg>|C^?AOFmGGh6CݮYRs\Qq.FK׍sB-5Ocu0=Vw3EYbK; CGBM QL }iBogy]—HCث=IKMpZ3@ !h 3"IQ|K<>wKn Jщ騉^wn 7 C۽P/lust'{m+Yh9RdoRNXе-2| Ss5ڄ|lCjvxgd&(9_Tsqd4ܷO(^2o?"M"v^Q~s#GG֭],yfbGƸәC>pCY4P+Tpc;3`]R\#wyMG4-(SNYG)/+:iRJ~Uњm滒r$_lQNlaҝ 7*¸͑wڭxB˒Z/R 5ڰ=E,qn?XG9b䓉]b dgIJ'6W3hj]I{y2Ed$r`"X5z(nnvֳ{kezfpwН(/Te6=]q9;׋cǣ >i[󃗖i*5R^/j$j8X9džQd+xCɃ If[:bT7ug <0H2m~ީ~w WkٽcTy=)|TynK!)n?tT;Y=~'E*# tG#SצIoPډ~i NRҳ\gžCN$>TO3i9b3^gYz )^ċQze8* Sf+.4Dr:WOd8=""$/V +,13 Cqr S)]ϑRUZwct")@}xioHmm>Q;t Yj<HzOj\d 3.\k/L-ŷ>gRhY%<5l3ɒUv=V9jGX Dpi] tz4Ѫmv `yPF׆m\.uoC)tHu8Q:xKxx\?SHCeD_~o./m*2e$:R1ǗhoLL5Ygòwnt}{\:$g8=sfvd+ص`ܚA}n%6aV,]hvvcԔ`S;<)x$n<`[ J88"͝o?Ig\ QV?{nlcrC٥d$8Vq4%]~#lļp"w5&xMw+zI)boj:'`nipype-1.7.0/nipype/testing/data/ds003_sub-01_mc_brainmask.nii.gz000066400000000000000000000004051413403311400244540ustar00rootroot00000000000000ad 103!'# x8Bpۣ$ƅ)n>zĸk v®GVA(|k3~ ?|?ÇP<=ڇŸ>zjG_TS >HO{y/#+_oą/% Sߗ`nipype-1.7.0/nipype/testing/data/ds005/000077500000000000000000000000001413403311400175425ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/ds005/filler.txt000066400000000000000000000000001413403311400215460ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dteig.Bdouble000066400000000000000000000000001413403311400212670ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dti.mif000066400000000000000000000000001413403311400201520ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dwi.mif000066400000000000000000000000001413403311400201550ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dwi.nii.gz000066400000000000000000000000001413403311400206000ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dwi2anat_InverseWarp.nii.gz000066400000000000000000000000001413403311400240530ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dwi2anat_Warp.nii.gz000066400000000000000000000000001413403311400225170ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dwi2anat_coreg_Affine.txt000066400000000000000000000000001413403311400235760ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dwi_CSD_tracked.tck000066400000000000000000000000001413403311400223510ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dwi_FA.mif000066400000000000000000000000001413403311400205230ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dwi_WMProb.mif000066400000000000000000000000001413403311400214030ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dwi_evals.nii000066400000000000000000000000001413403311400213530ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/dwi_tensor.mif000066400000000000000000000000001413403311400215470ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/elastix.txt000066400000000000000000000000001413403311400211070ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/encoding.txt000066400000000000000000000000001413403311400212240ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/epi.nii000066400000000000000000000000001413403311400201530ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/epi_acqp.txt000066400000000000000000000000001413403311400212170ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/epi_index.txt000066400000000000000000000000001413403311400214020ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/epi_mask.nii000066400000000000000000000000001413403311400211660ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/epi_param.txt000066400000000000000000000003151413403311400214040ustar00rootroot00000000000000{ "enc_dir": "y-", "echospacing": 7.800117313764398e-4, "delta_te": 2.46e-3, "epi_factor": 128, "epi_lines": 57, "acc_factor": 2, "field_strength": 3.0, "field_axis": "z" } nipype-1.7.0/nipype/testing/data/epi_phasediff.nii000066400000000000000000000000001413403311400221640ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/epi_rev.nii000066400000000000000000000000001413403311400210270ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/epi_slspec.txt000066400000000000000000000000001413403311400215640ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/events.tsv000066400000000000000000000004111413403311400207450ustar00rootroot00000000000000onset duration frequency pulse_width amplitude 183.75 20.0 20.0 0.005 1.0 313.75 20.0 20.0 0.005 1.0 483.75 20.0 20.0 0.005 1.0 633.75 20.0 20.0 0.005 1.0 783.75 20.0 20.0 0.005 1.0 933.75 20.0 20.0 0.005 1.0 1083.75 20.0 20.0 0.005 1.0 1233.75 20.0 20.0 0.005 1.0 nipype-1.7.0/nipype/testing/data/f1.1D000066400000000000000000000000001413403311400173710ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/f2.1D000066400000000000000000000000001413403311400173720ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fa.nii.gz000066400000000000000000000000001413403311400204030ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fdir00.nii000066400000000000000000000000001413403311400204620ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fdir01.nii000066400000000000000000000000001413403311400204630ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/ffra00.nii000066400000000000000000000000001413403311400204540ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/ffra01.nii000066400000000000000000000000001413403311400204550ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fieldmap_mag.nii000066400000000000000000000000001413403311400220030ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fieldmap_mag_brain.nii000066400000000000000000000000001413403311400231560ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fieldmap_phase_fslprepared.nii000066400000000000000000000000001413403311400247260ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/first_merged.nii.gz000066400000000000000000000000001413403311400224670ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fitted_data1.Bfloat000066400000000000000000000000001413403311400223570ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fitted_data2.Bfloat000066400000000000000000000000001413403311400223600ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fixed1.nii000066400000000000000000000000001413403311400205560ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fixed2.nii000066400000000000000000000000001413403311400205570ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/flash_05.mgz000066400000000000000000000000001413403311400210150ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/flash_30.mgz000066400000000000000000000000001413403311400210130ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/flirt.mat000066400000000000000000000000001413403311400205200ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fmri_timeseries.csv000066400000000000000000002026341413403311400226210ustar00rootroot00000000000000"WM","Vent","Brain","LCau","LPut","LThal","LFpol","LAng","LSupraM","LMTG","LHip","LPostPHG","APHG","LAmy","LParaCing","LPCC","LPrec","RCau","RPut","RThal","RFpol","RAng","RSupraM","RMTG","RHip","RPostPHG","RAntPHG","RAmy","RParaCing","RPCC","RPrec" 10125.9,10112.8,9219.5,-7.39443,-8.74936,7.28395,13.7953,32.2328,32.4809,18.958,-12.2383,-6.86466,-23.0912,-16.425,-5.70842,11.2467,-1.58574,-4.53717,-17.3842,0.912601,13.0428,2.44622,2.08875,-8.74373,-9.47217,-6.87574,-8.11158,-14.54,0.414787,6.04424,0.540389 10136.8,10115.1,9222.54,-0.120582,-1.94906,6.92247,4.75197,11.0735,0.972766,10.2285,0.717545,-1.04488,-7.64424,-2.10875,-2.44368,1.52535,-1.14131,-1.72589,-1.1247,-0.993354,2.98318,1.29855,2.0688,1.00297,0.135373,-3.25325,-3.12065,0.913296,-1.7868,1.58829,-0.735248 10148,10122.2,9228.62,4.24336,-0.689111,5.12782,0.132862,-6.64526,-14.7952,5.19361,3.68198,2.77598,-0.691866,1.07559,1.71444,-1.30287,-2.75746,1.74208,4.75944,1.80799,-0.064464,2.37174,1.09905,3.5756,2.98064,-0.238711,0.822007,5.07188,-0.864496,-0.208741,-1.31367 10156.6,10132.2,9236.11,-0.047434,-1.79438,-0.767925,-3.78683,-2.46365,-12.9433,2.00586,-0.48292,1.16216,0.113706,-0.639879,-0.0445654,-2.82995,-2.22008,1.46544,3.70217,2.84476,-3.32792,6.701,0.982599,0.145487,0.0501163,-1.16747,-0.630382,-0.0550437,-0.0563951,0.0449386,-0.715988 10162.9,10141.8,9243.46,-0.3687,0.640608,-2.93969,-0.37466,-5.42813,-8.55527,-4.70566,-3.62351,-3.94857,0.847112,0.357187,1.39279,-3.07124,0.779726,5.12671,3.62277,2.86265,3.44378,5.49842,0.895482,-2.1777,0.14728,-0.491475,-0.0257423,-0.32504,2.28464,-0.610659,2.01955 10168.7,10149.5,9249.62,-0.272231,3.00751,-2.20783,-5.50238,-1.65733,-2.39574,-6.82249,-1.5591,-5.38806,-0.315138,2.41171,-0.227563,-0.306796,1.26618,4.45885,3.55662,3.14737,-0.0497907,2.76691,1.04757,-2.50276,3.25334,1.90194,3.54754,3.2308,0.393197,0.115407,1.88919 10175.3,10155.8,9253.09,0.271133,3.11725,-1.24188,-5.32432,6.94595,5.40219,2.63329,1.77742,-0.434798,3.20784,3.1926,-2.12653,1.4207,-0.162939,1.57116,1.20026,2.14004,-4.36978,-0.074248,0.344989,-2.79157,3.57441,2.795,6.81971,4.61981,-3.15395,-0.556388,-0.951462 10181,10160.9,9253.62,-1.52186,-1.02665,-1.31765,-8.89055,1.45638,-6.40533,-8.20284,3.42071,6.34151,7.32703,2.81444,-5.56924,-2.07761,-2.82472,1.75969,1.56549,2.59032,-4.99642,-0.861721,0.661704,1.27294,4.24609,5.72265,7.93181,6.46356,-4.54558,-2.93302,-2.55741 10182,10163.1,9253.53,-4.12759,-5.01517,-1.383,-11.7032,7.03273,-0.354258,-4.14846,2.56836,5.49077,2.70724,-0.00938943,-7.91268,-3.33257,-3.77932,-2.70035,-1.95288,1.51899,-10.5021,0.604386,1.13765,2.8031,0.719838,5.10986,5.4321,3.01561,-5.05514,-2.51591,-2.29453 10178.9,10161.7,9255.33,-2.09727,-3.23639,-0.971464,-6.47564,-1.86208,1.47429,-8.69004,2.23012,2.64935,4.20852,-0.00802028,-4.11236,-1.54808,-1.73414,-2.21966,-2.31888,0.521142,-4.49634,-1.66003,1.37105,1.47741,-1.17943,3.52554,2.31201,0.381259,-1.24137,-0.930002,-0.860505 10176.3,10158.2,9258.8,-2.87976,-1.16821,-1.15587,-7.36873,-2.70663,3.69409,-6.23946,3.17083,3.67683,5.95472,2.6739,-2.5798,1.61294,2.31642,-4.31408,-1.6647,-0.422612,-6.13843,-0.39141,1.92345,-2.82275,-0.742784,1.68164,-0.706688,-1.87652,0.172975,1.51911,1.04727 10176.2,10155.4,9261.93,-1.79655,0.511159,-2.91648,-1.19976,-6.01265,2.43062,-4.91165,1.64787,2.485,6.04132,2.79139,1.36683,2.36631,4.70105,-3.09068,-0.875835,-2.73203,-1.04036,0.0279962,0.57264,-4.70596,0.399049,0.109101,0.540718,-2.52779,1.90878,1.47212,2.48712 10177,10154.3,9263.36,-2.06935,1.47151,-1.59814,1.1621,-8.21806,2.74994,-4.8666,1.6535,2.86737,3.56179,1.87379,3.98852,2.20191,7.00018,-2.12026,-0.322149,-0.459427,1.99009,-0.386875,-1.65524,-2.88602,2.5405,3.09752,5.52644,1.72241,3.28467,2.06659,4.48929 10176.7,10153.6,9262.97,-2.47996,0.0736981,-1.18826,-1.40068,-2.38119,-1.33094,-3.87199,0.498621,1.31667,-0.952908,0.481976,0.0885501,1.11339,4.67043,-2.37383,-2.32579,0.991108,-0.25346,2.41941,-1.44295,0.0394728,1.67752,2.73018,4.10445,2.29859,0.993454,2.7469,3.39394 10174.9,10153,9261.77,-0.957748,-0.455644,0.885525,1.7746,0.0437147,0.878291,0.0855234,-0.572903,1.39546,0.00119098,1.69176,-1.96049,0.156938,2.84845,-1.18488,-2.65197,1.35428,1.98606,1.65427,-0.643756,-1.03602,-0.0406435,-0.236011,-0.961959,1.28125,-0.464305,1.75539,1.84618 10173.4,10153.5,9261.3,-0.583682,-0.792331,1.36077,0.644185,-3.55594,-0.618864,-4.88099,-0.136266,1.51362,2.73872,3.65897,-2.63062,0.416981,0.735765,0.533665,-0.326252,1.0146,2.83848,2.16063,2.30307,-2.01136,0.638055,-0.22921,-3.19692,0.947596,-0.379132,0.678065,0.747812 10174.5,10155.7,9262.24,-0.685336,0.856591,-2.63545,-0.959601,3.25442,0.791955,-2.20612,0.263046,-1.34292,4.47114,2.99912,-2.56858,-0.21931,-1.56389,-0.808263,0.311028,-2.34261,-0.965718,1.98615,3.50723,-1.41951,-0.258476,-1.16227,-1.73014,0.372641,-0.118946,-0.422557,-1.3986 10179.6,10157.8,9264.01,2.59538,3.68921,-1.9033,3.99249,0.109215,-1.86778,-4.51336,0.591929,-1.29086,1.52475,1.01934,0.773735,0.0652847,-3.00075,1.79923,2.1369,-2.11635,3.17035,-1.87907,2.19309,0.880052,-0.480886,-1.94369,-0.204693,1.63785,1.43004,-2.081,-3.24652 10186.9,10157.6,9265.4,2.10402,4.02633,0.884264,0.1708,-3.27208,-4.9215,-1.0364,1.60796,1.70888,-1.43476,1.10519,1.26841,0.0627916,-2.97727,1.13683,2.82663,-0.301705,-0.592683,-3.81587,-0.70989,1.60855,0.103857,-2.48043,-1.22737,-0.312858,1.31617,-1.91269,-3.98886 10192.2,10155.4,9265.29,1.6824,4.26755,1.57687,1.43194,-5.98808,-2.25097,0.153789,0.168572,0.879003,1.68604,0.75956,3.65922,-0.869793,-2.49312,0.497574,2.41553,-1.34226,-0.127659,-3.59295,-1.56547,0.88849,-0.785242,-4.24845,-5.15572,-4.81836,2.77035,-1.44493,-3.44434 10193.6,10153.7,9263.38,1.6491,4.80854,1.08823,5.10222,-5.26833,5.52263,-0.997094,-0.959485,-1.52356,6.15147,0.897033,7.60472,-1.50848,-0.576994,0.845199,3.25263,-2.21353,2.36454,-2.11918,-0.480371,1.405,-1.24949,-1.88424,-5.50221,-4.39822,4.6832,-0.575266,-0.350337 10193.7,10153.5,9260.14,0.371243,3.4575,-0.922956,2.86612,3.70316,4.4652,-2.35097,-2.08567,-4.55866,2.05406,0.20181,5.48777,-0.851734,-0.932792,0.852325,2.66059,-2.76402,-0.836483,3.32512,2.58318,3.54953,-1.82575,1.03107,-3.58566,-4.1055,2.71087,0.64122,1.16036 10193.4,10154.1,9256.45,0.655998,2.95689,-0.961572,2.95967,6.90968,-0.0847335,-1.13659,-2.64581,-3.78971,-2.43015,-0.722449,3.08777,-0.234356,-0.603156,1.30068,1.14368,-2.23215,0.241084,3.91588,3.38796,4.07024,-1.08082,1.15617,-0.375163,-2.54369,1.29418,0.795869,1.31402 10190.3,10152.8,9253.2,2.59279,1.93007,1.93861,4.82647,-1.84288,-5.84018,-7.03235,-2.16958,-0.8999,-4.4747,-1.99497,2.40008,0.0349671,-0.825783,2.00993,-0.184404,-0.576706,6.30193,1.43455,3.63536,2.34484,0.148851,-1.22127,-0.718508,-0.716753,1.50537,0.412978,0.73252 10185.2,10148.2,9250.73,1.88291,-0.127643,2.41457,0.38457,3.28565,2.40364,1.07674,-0.352091,-0.192694,-2.80281,-2.45121,-0.746935,0.454781,-0.345492,-2.38393,-2.35152,-0.468918,-0.28004,0.207449,2.6636,-1.39254,-2.09536,-4.44811,-4.48824,-2.93117,-0.770421,1.19,0.219788 10183,10142.2,9248.93,3.78484,0.701338,-0.71552,3.48407,0.454755,4.3743,3.68099,-0.668556,-3.42636,5.52772,-1.23863,-0.405148,0.665698,1.06479,-0.0251586,-0.48849,-0.847741,1.4814,-5.36764,-0.405219,-1.51485,-3.88226,-5.12764,-5.33767,-4.3365,-1.173,0.417418,0.415356 10185.4,10138.4,9247.93,3.11727,0.196163,-2.018,0.721283,-2.5075,-1.06349,0.331823,-1.2182,-4.01712,4.78444,0.452166,-2.16432,0.55673,1.61447,1.16718,1.44415,0.569846,-0.812131,-8.14324,-2.91296,2.43154,-1.45218,-0.730675,-1.0947,-2.25658,-3.52675,-0.361214,1.09266 10188,10139,9248.05,1.52249,-1.16117,-2.4591,-2.41492,-0.35832,-7.48161,-0.0490082,-2.1421,-3.52013,0.903896,-0.958215,-5.8036,-2.36788,-0.368615,-1.88998,-1.40466,-1.28791,-4.79995,-5.58563,-3.57656,4.13739,-0.274441,1.53352,2.93946,-1.96753,-6.76034,-1.87752,-0.324793 10186.8,10142.9,9249.23,2.29541,-0.414867,0.263844,-2.42527,-9.23597,-12.7958,-5.40665,-1.3296,-0.255947,1.05195,-3.09731,-3.83996,-4.40177,-0.0123634,-1.79533,-2.22933,-1.59891,-1.58539,-4.29444,-3.24283,2.73497,0.939395,2.25632,3.98042,0.672842,-4.87272,-3.0871,0.140664 10183.8,10146.3,9250.93,1.04007,-0.107056,-0.719832,-5.17314,-6.41206,-13.4527,-3.51115,-1.82372,-1.0661,0.164654,-4.87432,-3.16371,-3.16216,0.547311,-2.31938,-3.32366,-2.59406,-3.07878,1.07584,0.135595,-0.15385,-0.198986,-1.76614,-0.364142,-1.44816,-3.17832,-0.666637,0.539005 10182.5,10148.1,9252.57,1.58315,0.552138,-2.38854,1.84879,-2.25441,-6.8381,0.208721,-2.73312,-3.19332,-2.49192,-4.21087,0.445019,0.0651566,2.67403,-0.780414,-2.43461,-3.10543,1.48742,-0.123359,0.0321366,-2.00728,-1.30717,-5.02137,-5.05394,-3.39985,-0.233706,2.10556,1.51466 10182.7,10149.6,9253.33,0.671616,-1.8801,-5.19861,1.6691,-0.386439,-6.73637,0.390118,-1.36276,-2.8229,-3.74619,-1.53148,0.15594,0.934737,1.96014,-1.35363,-0.924511,-3.00858,0.653744,-1.84706,-3.59509,-0.247233,0.962108,-1.40552,-3.28119,-2.22432,0.0626129,2.48273,0.969888 10182.9,10150.9,9252.01,0.0166707,-2.52456,-5.48285,2.26653,-2.03587,-6.50283,-1.00325,0.264499,-1.46362,-0.822672,-1.11829,0.403605,-0.734484,-0.382999,-0.186567,1.24812,-2.13095,1.80897,-2.82131,-6.15356,2.54337,2.39696,2.51379,2.41699,0.307725,-0.195503,-0.252349,-0.890546 10182.1,10151,9248.33,-1.21698,-1.52567,-2.334,0.102378,3.74418,-1.36756,3.51501,1.50357,-1.80774,-0.855037,-2.71284,0.0746735,-1.2904,-2.37263,-0.326812,1.37779,0.0811662,-2.04277,0.452769,-4.37491,4.60025,0.785458,0.944597,2.57121,-0.443829,-1.9031,-1.78376,-2.25217 10180.2,10149.4,9243.85,-0.498632,0.815261,-1.05027,1.32586,2.65892,-5.17029,-0.588453,1.63481,-3.33979,4.4087,-1.26981,2.01576,-3.03953,-3.66687,1.33091,1.62961,0.568999,0.53543,0.477935,-1.78405,3.91722,-1.12653,-3.07327,-2.27103,-2.21119,-0.0469714,-3.05949,-3.83303 10176.1,10146.3,9240.54,-0.464849,1.25223,-1.14736,-0.645201,4.96922,-0.805424,1.85313,1.43677,-1.45072,6.22509,1.54511,2.89442,-3.56094,-4.35854,-0.476689,0.39343,-0.929162,-1.07774,0.941846,-0.57756,0.363373,-1.13491,-1.30865,-3.06369,-1.8739,2.47973,-3.19611,-5.38414 10169.3,10142.4,9238.91,2.28739,1.91951,-0.759834,1.17008,-1.10807,0.137649,-1.76481,-0.427729,-0.592675,2.50623,0.607717,4.10404,-2.20382,-5.11375,1.80008,0.383348,-3.40396,4.33491,0.605228,-0.0871236,0.185566,0.480246,2.74078,1.48145,2.07534,4.96863,-2.65852,-5.78272 10162.1,10139,9238.14,2.03262,2.32633,0.46709,-2.26524,5.80967,5.85587,5.67759,0.185696,-0.246666,-0.787877,-0.201738,0.61348,-0.542043,-3.51173,0.345287,-0.426571,-4.01566,0.315299,2.10005,-0.391753,2.39343,1.28396,3,4.99164,5.3145,2.31592,0.0224444,-4.14279 10158.4,10136.9,9237.31,2.77556,2.83113,1.37245,1.19159,2.19923,-2.0116,3.1913,1.03754,-0.929092,0.870894,1.00256,-0.624392,-0.561338,-2.99529,2.23674,0.823539,-1.63024,3.75817,0.298891,-1.18515,4.54738,1.25951,1.91277,3.57793,5.44217,0.785618,0.025315,-3.27161 10158.5,10135.5,9236.37,0.0672571,0.761886,2.35427,-0.889999,6.73976,-1.98269,8.45302,1.1398,0.0604089,-1.15193,1.32222,-2.47069,0.131408,-3.48238,-0.669944,0.753279,3.07189,-2.04262,0.174304,-2.32107,2.83224,0.708328,3.23848,0.984911,2.384,-1.28385,-0.548071,-3.32946 10160.6,10134.8,9236.46,-0.783525,0.239203,0.00548465,1.88108,6.83171,-2.89703,7.27976,-2.71585,-1.47417,2.12383,-1.04536,-1.14095,0.145875,-4.3962,-0.139564,0.781551,3.40043,-0.28834,-0.343608,-2.36391,0.0938093,-0.36295,1.0276,-0.578692,-0.619797,-0.489157,-1.92106,-4.163 10166.1,10135,9239.02,0.124276,1.29463,-1.44975,3.21172,2.53479,-3.38317,-0.20102,-4.72755,-2.14129,5.53743,-1.24849,0.994366,0.436372,-3.09635,2.19121,1.13794,1.52365,3.0586,0.622146,-0.699363,0.103461,0.316277,-1.73095,-0.195395,0.490618,1.44514,-2.50878,-3.62472 10175.6,10136.9,9243.9,1.67228,1.70099,-0.125799,2.04051,6.74509,2.05118,7.82124,-3.08565,-1.70842,3.37127,-0.160655,1.32998,0.57087,-1.46351,1.80831,-0.585194,-0.267853,0.719624,2.12333,-0.931791,2.61407,0.519467,-1.78038,1.70819,2.66646,1.47407,-2.48388,-2.6294 10184.4,10140.5,9249.09,4.05746,1.49391,3.1491,4.74869,1.42089,-7.65297,4.6083,-1.50292,-0.681543,0.792377,-1.54194,2.19467,-1.449,-2.54459,5.38937,-0.0662613,0.683022,6.46847,-1.151,-2.09676,5.40097,0.0884146,-0.584039,0.411805,2.87021,2.70096,-3.69024,-2.72328 10185.2,10143.8,9252.71,2.20708,-1.9117,6.2705,-1.38994,9.88462,0.984595,14.8745,1.09177,3.01497,-6.59006,-3.06879,0.864155,-0.352553,-2.42934,1.6214,-0.899998,2.90809,-2.62154,-0.748965,-1.78716,3.1828,-0.76616,1.51574,-1.80336,0.759499,1.08543,-1.48814,-0.830864 10176.5,10145.2,9254.8,3.08758,-1.24415,2.30133,1.5123,4.9996,-2.25743,5.71269,0.326257,0.862459,-5.32366,-2.15784,1.98295,-0.769376,-3.24456,1.73394,-1.18022,0.303592,1.19388,-1.18318,1.1848,-0.484859,-3.12715,-2.31674,-4.16244,-1.41399,2.32149,-1.0187,-1.70219 10164.6,10145.4,9256.92,1.59078,-1.06701,-0.557541,-2.88977,3.22953,-0.245042,-0.474481,0.0498212,-1.16809,-8.33134,-0.306573,0.38113,0.242976,-2.39828,-1.29092,-1.68013,-0.127576,-1.94114,1.03024,1.7825,-1.44807,-2.86352,-4.13379,-1.78466,1.5241,1.16147,-0.513496,-2.30027 10156.4,10145.9,9260.21,0.0333157,-1.40254,-1.63643,-2.63202,2.15792,2.8366,-1.32406,-2.25364,-4.61227,-7.74587,-1.005,0.107792,-0.131513,-2.0428,-1.28031,-1.65736,-0.0589992,-0.767749,0.0451012,-1.23948,0.334266,-2.05544,-5.74107,1.40617,2.47259,0.129519,-1.22605,-3.50154 10152.5,10145.2,9264.25,-2.23854,-3.34598,0.871046,-4.48776,-5.12246,-0.367558,-7.49548,-3.04105,-2.99035,-3.84367,-2.67766,1.19195,0.695189,-1.99211,2.38266,0.800284,2.92667,1.82052,-0.796218,-1.82753,3.43662,1.60186,-2.49788,2.02216,2.59346,0.975508,-0.397427,-2.78437 10148.6,10141.1,9267.56,-4.64613,-5.4569,3.80281,-6.22039,0.554038,5.00519,-0.395733,-3.04225,0.570141,-6.95862,-4.49105,-0.00732036,3.78285,-2.09066,1.46914,-0.873643,3.95228,-2.08532,2.8568,0.749314,1.78963,1.02579,-0.808831,-1.60113,-1.17483,0.544949,1.95805,-1.27827 10142.4,10134.6,9268.73,-4.02228,-5.3818,4.39201,-6.57399,-2.68308,-0.146626,-0.297909,-1.28233,3.72363,-10.5635,-3.46562,-0.498293,3.92457,-1.10422,0.725311,-0.888612,3.1725,-1.82837,4.64182,1.32637,-0.56378,0.781271,3.29557,-0.557202,-0.712584,0.587691,2.76212,1.05325 10137.8,10128,9266.83,-2.98689,-3.62614,2.49614,-3.78405,5.33483,-3.24499,-1.4797,-1.49474,0.75769,-13.0722,-3.57543,-1.73535,1.13307,-2.81826,-2.67056,-2.75063,-0.407379,-1.38965,7.67619,2.2374,-2.93415,-2.1994,0.956463,-2.25511,-4.42128,-0.889014,2.30781,-0.144069 10139.6,10121.2,9261.84,-1.19244,-2.09691,-1.17019,-2.92359,1.84257,-9.64131,-8.2266,-2.48032,-2.29368,-7.41116,-3.60172,0.404837,-2.31741,-3.52505,-1.14341,-1.1367,-2.22469,2.93998,5.91064,0.841518,-1.68308,-1.06298,-0.398387,-1.68239,-3.53445,0.38234,1.02165,-0.403129 10146.2,10113.8,9255.3,-3.35595,-3.34535,-1.74811,-10.4556,3.60927,-0.776329,-3.08604,-1.29687,0.835023,-5.76979,-1.7646,-2.22816,-1.31439,-0.382083,-1.73312,-0.792276,0.206848,-4.1992,4.29806,-0.830575,-1.71405,1.40452,2.00247,0.106559,-0.768805,-1.08451,1.11784,1.22578 10152.4,10107.8,9249.87,-2.49869,-3.87311,-1.98238,-6.90342,-1.23671,2.90852,2.97754,-0.581043,2.81778,-2.71728,-1.21684,-5.07044,0.497485,2.01224,-0.365556,-1.64542,1.17956,-3.76085,-0.573467,-2.58111,-2.12663,0.378165,4.18795,1.24581,-1.36196,-2.87649,0.482267,1.63454 10154.8,10107.2,9247.27,-4.01788,-5.39388,-1.72161,-10.3153,-0.251037,-1.57831,1.61553,1.18147,5.7765,-0.599766,-1.22598,-10.0294,0.895145,2.02015,-4.45992,-2.58818,2.98391,-9.45103,-1.41902,-1.29446,-0.55725,-0.180421,6.94249,-0.594659,-3.53394,-6.50742,1.38112,1.51458 10153,10112.2,9246.76,-3.24249,-5.01072,-2.02956,-7.46567,0.0264794,-1.5224,-3.31193,1.53111,5.32332,2.5335,0.40251,-7.05633,-0.711568,2.89381,-5.39998,-1.36446,2.04786,-7.02942,-4.53297,-0.88262,-0.357391,0.595822,6.5409,-2.84395,-2.64994,-5.7378,1.39939,2.97985 10148.7,10119,9246.16,-3.96002,-4.42756,-3.26432,-8.69557,4.03628,0.616301,-3.92147,2.76458,1.652,2.17356,4.22927,-4.5247,-2.33417,3.89508,-5.29918,-0.309883,-0.288513,-8.36711,-3.09529,-0.126421,-1.8539,2.38545,3.61409,-1.26649,0.429596,-4.19612,1.45711,3.95651 10145,10125.2,9244.17,-1.75695,-0.511195,-1.73883,-3.34742,-1.26592,5.24499,-3.03549,2.78645,-2.1334,0.220919,5.88292,0.160927,-1.7455,5.37331,-1.59599,1.91312,-0.631146,-3.16886,-2.94994,0.34822,-3.01289,2.84951,0.356135,3.47859,4.18276,-0.12287,0.984563,3.64398 10143.1,10130.2,9241.27,-1.71615,1.12867,1.04805,-6.57347,2.41341,16.2593,7.00371,0.924589,-2.71609,-6.2656,3.57183,0.37743,1.96421,5.66573,-2.3041,2.26799,0.668846,-8.32571,2.30148,2.66333,-1.75615,2.71555,1.44408,6.00224,4.85886,0.685304,3.03234,2.82015 10140.7,10134.4,9239.05,-1.25992,2.46902,-0.556969,-2.76672,5.45596,12.4649,8.36959,-2.49709,-3.8708,-1.40646,1.38854,1.37064,2.12007,3.84209,0.459629,2.15086,-1.24194,-4.15365,4.52043,5.4809,0.876317,0.656659,-1.01116,2.09458,1.65028,2.77599,3.21635,0.381243 10133.6,10137.8,9238.32,-2.22442,1.37094,-0.787327,-1.05469,3.55443,5.14715,-0.0509983,-0.0905216,0.72894,3.96149,2.38061,1.75467,3.09083,4.18358,2.79613,3.29833,0.325666,-0.671704,6.07566,7.72379,3.13564,0.655668,-2.59152,-1.76199,1.58102,4.45884,3.34631,0.480564 10121.1,10140.7,9238.2,-2.17367,-0.866588,-2.79273,0.692199,10.1863,9.97874,6.04483,2.66482,1.76948,2.61332,1.9281,-1.1243,5.03132,3.85731,-0.443337,0.284932,-0.868815,-3.31091,8.51065,6.49177,2.23459,-1.67042,-3.77735,-2.781,-0.902713,1.50205,4.04064,0.197185 10110.8,10144,9237.47,0.303664,0.966366,-2.65365,4.69141,3.98147,5.09796,4.57488,3.26927,0.562439,5.41174,1.92471,-1.15766,3.6349,2.42314,-0.0874924,-0.0560302,-1.22366,1.9914,3.44357,1.69106,1.98031,-1.32375,-0.576816,-1.03349,0.269332,-0.300454,3.28264,-0.458562 10110.3,10147.7,9235.48,1.28867,0.940385,2.1165,-0.581377,-0.643187,-2.16313,1.69237,2.47912,1.37859,3.32286,1.26412,-0.720553,2.36863,-1.25903,0.0706914,0.944374,2.2859,0.229574,1.5842,-0.12766,4.43122,1.34327,3.34673,-0.404948,2.87655,-1.67866,3.04869,-0.25307 10116.7,10150.7,9232.33,0.394714,-0.833445,4.94793,-6.11826,9.22151,2.99358,11.1041,1.15853,2.93899,0.397365,0.0221406,-0.0976144,-1.13452,-3.42557,-3.72862,0.476803,3.69054,-8.12164,2.48493,0.363106,3.87676,0.504363,0.972674,-1.44388,2.15926,-0.828986,1.75931,-0.549928 10121.4,10152.8,9229.14,1.29508,-0.757006,3.12597,-1.6729,7.62364,-0.936804,6.48918,-1.03742,1.86227,-0.262351,-0.75051,2.31301,-4.8422,-4.5034,-2.66476,0.578808,1.27532,-2.04282,3.45288,3.01897,0.564668,-1.21876,-3.06331,-2.70583,0.257935,3.52846,-1.56111,-1.5308 10121.6,10152.4,9226.86,0.677648,0.378414,1.31475,-2.61018,4.91454,0.37514,2.86121,-0.193973,1.93324,-4.63591,1.10695,3.14457,-2.96694,-2.19304,-2.99025,0.50097,0.165722,-0.200595,6.85438,4.63234,-2.47705,0.342532,-1.30419,-0.141339,1.63084,4.32707,-1.19328,0.76139 10120.5,10149.2,9225.49,0.499478,1.88224,-2.14427,-2.77288,10.6927,1.71766,6.49787,0.43981,0.0705592,-5.13201,2.57263,1.48076,-1.20267,-0.591255,-4.74193,-1.79266,-1.46188,-3.42451,8.04316,3.54243,-2.30088,0.0710442,-2.83238,0.653942,0.240506,0.904871,0.430945,1.6283 10121.2,10144.8,9224.89,1.35965,2.80608,-1.94166,1.75583,0.26227,-8.26437,0.567312,1.6259,1.60009,0.0627174,2.62631,2.65738,-1.31444,1.36503,-0.138702,-0.303116,1.07964,0.805711,0.6712,-0.0379901,0.596301,1.49046,-2.9437,-0.0854658,1.7116,1.14138,0.19577,2.11315 10121.7,10140,9224.64,-0.625981,1.46152,0.571473,-0.708952,-3.97306,-7.60183,3.54876,2.52756,3.43643,-3.37318,1.25185,1.95327,-0.430742,1.99167,1.38528,0.439469,3.35733,-3.21518,-3.33649,-3.33716,1.63613,2.87364,0.216347,-1.19264,2.34646,1.38095,0.250252,2.26893 10117.5,10135.7,9223.59,-0.644241,3.50756,1.18011,1.32346,-4.09529,-1.15572,8.91836,0.864807,0.810206,-4.21922,0.85698,1.54667,-0.984211,1.49262,0.424346,0.272079,0.55043,-3.11065,-4.92549,-5.21789,0.616593,0.933381,0.453042,-0.907799,0.816878,0.888407,-1.07882,0.897744 10109,10134,9221.44,1.24811,3.97674,3.11247,-1.16572,-9.20759,1.26864,10.07,0.861166,0.629341,-5.07074,1.84156,0.554677,0.501606,2.3508,-1.99158,1.42546,-0.0624237,-4.75601,-4.11731,-5.27973,3.12042,0.927954,2.01431,1.91643,2.26937,-2.42322,-1.85499,2.11246 10103,10135.6,9219.87,2.2046,4.10281,1.87105,-2.44462,-1.81059,2.73657,16.517,1.49188,0.862687,-1.50652,2.91423,-2.27191,-0.311967,3.16828,-6.05317,-0.647296,-0.600809,-9.86797,-3.317,-4.05579,3.51099,-1.77799,-1.17227,0.17711,-2.12588,-5.86398,-2.08211,1.43944 10103.9,10138.7,9220.3,3.77174,5.49059,1.2637,1.03751,-12.6254,-6.24364,0.90728,3.65224,3.71822,2.59825,4.31988,1.86088,-2.62582,4.43061,-1.00461,2.10803,1.47555,-3.28777,-8.18549,-4.31695,2.95113,-1.34785,0.676274,-1.38936,-3.04336,-1.37001,-2.35773,2.00922 10108.6,10140.8,9221.82,-0.70593,3.90046,-1.14247,-3.0764,-1.47295,-1.10809,-0.510284,3.79285,2.60078,-1.28697,3.77566,2.32766,-3.54475,2.99719,-1.20306,1.33262,-0.719923,-9.06449,-7.33119,-4.80493,-0.721145,-2.4024,1.79362,-1.97223,-5.04385,0.0875954,-1.73778,0.950888 10113.1,10142.1,9223.55,-1.06377,0.843971,-1.44889,-5.32939,2.69029,-3.83385,-5.63119,0.535717,-1.61039,-5.59267,1.26514,2.05707,-3.31026,-0.958826,1.33732,1.46551,-3.13585,-9.66605,-6.00234,-4.35532,-0.26599,-0.831562,2.98878,0.128679,-2.54674,-0.278737,-3.58409,-1.324 10120.7,10142.9,9227.01,3.56995,1.04759,3.75113,-1.7421,5.12807,3.1454,2.38504,-1.62768,-2.93793,-5.71266,-0.530001,2.84448,-2.04436,-1.31251,2.17243,2.11298,-0.867238,-7.66197,-6.87331,-3.32769,-0.373459,-0.116178,2.03689,0.379397,-0.00605166,-0.182103,-4.1657,-1.22794 10135.1,10142.1,9232.63,4.13322,3.14571,5.42112,-9.50857,6.61076,-1.5265,-1.3563,-0.229734,-0.953633,-2.39287,0.0907423,-2.25912,-2.95494,-0.622513,-0.878638,3.11006,2.20909,-12.7591,-4.65267,-0.652931,-0.508727,-0.484787,-1.43884,-3.89903,-1.68783,-1.20607,-1.47415,-0.30987 10150.6,10139.9,9237.26,7.08686,7.1115,3.05908,-7.31514,-2.75139,-6.15754,-6.75994,1.34201,0.583247,1.72791,0.0586144,-1.05549,-2.23348,1.35232,0.957745,3.9225,0.27845,-7.28043,-8.71747,-3.21629,1.12263,-1.08286,-3.72117,-4.10901,-0.817087,-0.319549,-0.171801,1.86899 10161.3,10137.9,9238.2,5.45348,5.872,0.0360833,-8.71486,1.68904,-1.57501,-9.84544,2.70784,2.39605,-1.45535,-0.548901,-2.93743,2.31592,2.21738,-0.0678836,1.75621,-1.90485,-7.83172,-5.34721,-0.902631,2.89369,0.938874,1.08004,0.946796,3.39736,-3.2386,1.23533,3.43628 10168.7,10135,9236.89,1.9988,3.16081,-0.959961,-1.65775,15.8147,12.2058,-6.43511,1.69639,2.59198,-2.06327,-0.47323,-4.35241,3.77438,3.79233,-2.16153,-2.08622,-2.56136,-3.89096,-0.736348,5.49778,-0.475583,0.770127,3.05002,3.17719,3.81221,-4.99556,1.59718,3.01185 10178.3,10131.2,9237.28,0.818385,-0.233269,1.46873,6.63122,10.9706,17.5879,-3.54675,0.677416,3.72244,0.655626,-0.201865,-1.16835,1.57109,5.42876,-0.444523,-1.12764,-0.256929,5.62565,-1.99386,6.4084,-2.47406,1.18593,3.2834,3.0293,3.51573,-2.53776,0.959038,3.23253 10193.3,10130.2,9242.16,-2.48525,-2.35837,2.98987,5.98816,11.4719,15.9039,-4.84232,-0.825315,2.54659,1.43064,-0.659643,-2.96556,0.571285,2.41784,-2.00371,-0.757574,1.41844,6.37057,1.42823,7.71148,-4.93994,-1.54988,-0.232174,-1.34349,-1.26249,-2.05601,1.26179,0.464125 10210.2,10133.3,9250.5,-0.302459,-1.69801,0.843368,2.30597,6.15326,11.0157,-5.9274,-1.05244,-1.68469,-0.278629,-0.694935,-0.891837,1.23651,-0.21345,-0.305015,-0.0987808,0.160233,4.91775,0.166271,3.92353,-3.88399,-2.55526,0.198425,-0.923912,-1.86728,-0.552523,1.22445,1.15572 10221,10137.3,9258.6,-1.56339,-0.256664,0.840544,-1.61826,11.0061,14.4706,-2.59098,0.449882,-1.65171,-1.89163,-1.35949,-1.40198,3.60618,0.270121,-1.02351,-1.1912,0.778059,-0.110922,0.867721,2.27546,-5.20223,-2.14642,1.17716,-1.36266,-2.51971,-1.10085,2.42789,2.32548 10222.9,10141.6,9264.61,-4.74868,-0.212232,1.05283,-1.29221,10.744,4.75459,-2.81401,0.644295,0.850172,0.179994,-3.01777,-4.30435,2.71079,-1.12735,-1.29174,-2.07496,1.34575,1.0376,2.5823,1.95702,-4.5778,-1.28586,-0.494008,-4.39926,-5.46478,-2.40477,1.70545,-0.546783 10222.5,10148.7,9269.02,-3.49502,-0.678579,-0.213247,8.06515,8.4472,0.736921,12.8231,-0.680516,1.09355,1.44143,-3.62765,-2.08929,0.194595,-2.35671,-0.392866,-2.86869,-0.655593,6.76095,0.52286,-1.94996,-0.69629,-1.94695,-3.05311,-3.36287,-5.8798,-2.04553,-0.962602,-2.08692 10226.3,10155.2,9271.48,-1.96969,-0.131236,-7.34816,10.3469,1.43629,-18.1274,6.28789,-1.94889,-4.21799,9.10578,-0.96868,-0.513386,-5.07894,-4.75252,3.07715,-1.21549,-4.62974,12.6049,-2.11208,-4.5134,4.07597,-2.26695,-5.31607,-0.080814,-4.75562,0.0499323,-2.60796,-2.05158 10230.1,10151.7,9270.27,-0.441668,1.99564,-2.24149,10.4542,-4.09391,-6.45561,-1.77752,0.712394,-1.02642,8.25875,2.54249,4.31177,-1.67116,1.28898,3.90167,2.27301,-0.292013,13.1856,-3.31394,-4.23242,0.509949,-0.582218,-1.55254,1.54596,0.383257,3.15094,0.659781,3.83919 10224.9,10138.7,9266.49,4.67287,5.1299,-1.26323,13.4301,-10.2745,-9.49416,-12.2719,-1.18436,-2.87586,6.16837,2.83569,6.07774,-2.8315,2.00898,6.40272,2.01559,-1.86315,15.8694,-4.72684,-3.25468,-2.65905,-3.311,-6.24296,-4.21139,-3.70695,4.80612,0.395122,1.76566 10212.8,10131.4,9265.67,3.01888,4.86272,2.80549,9.41976,5.08199,16.7307,3.01517,-1.39232,-0.901598,-3.17761,2.70511,2.89126,0.206015,2.09237,1.79821,0.427067,-0.286912,4.97158,1.88506,1.52106,-4.78901,-3.10639,-5.19696,-1.88352,-1.17405,1.76068,1.66502,-0.462334 10205.3,10137.3,9271.29,5.0191,6.44861,-1.029,10.2232,1.46143,6.79866,-7.1328,-3.52906,-8.32347,-3.93806,2.03961,4.301,-3.73195,-3.92217,6.44854,2.90593,-2.49697,11.4551,-0.562561,1.57056,0.711111,-0.350636,-4.25263,3.76126,3.75639,3.70316,-1.79131,-3.47622 10205.7,10147.7,9278.59,5.83546,6.36501,-0.202118,7.16455,-12.9828,-12.4607,-27.3389,-3.33415,-9.60681,-6.26496,-0.539386,6.78879,-3.91681,-6.10831,9.8609,6.12423,0.502419,17.71,-2.72276,0.90307,5.89102,4.35576,1.47131,6.87862,9.08531,6.44279,-3.45175,-1.92878 10205.4,10153.7,9279.43,2.61204,3.79426,2.8599,4.2373,-6.30104,-6.55433,-17.9117,-2.30217,-4.33352,-8.56342,-2.54108,4.06241,-0.221565,-2.25183,3.87958,2.42384,1.7425,10.0636,-0.274803,1.38918,2.9688,2.49859,1.85002,3.57782,5.56749,4.25356,-1.57246,0.769565 10198.3,10155.2,9271.53,1.79363,-0.436721,3.46418,1.17919,-6.21503,-12.0337,-14.7144,-0.753172,-0.422946,-10.0673,-1.05729,0.16841,0.00393219,0.329848,3.06417,0.641188,1.13987,4.50086,-1.96838,-0.158451,2.22687,1.01485,-0.617827,-1.82684,0.837829,1.35672,-0.969077,2.83866 10187,10154.7,9258.9,0.357944,-3.85399,-0.403587,-0.905802,-6.94279,-16.6984,-17.7781,-0.22625,-1.87358,-4.80273,-0.208291,-3.41762,-1.38116,-0.435891,4.56144,1.47257,0.881539,4.31043,-2.35524,-0.63135,2.49929,2.73787,-0.3439,-0.967951,0.479767,-1.25236,-0.198644,2.70849 10175.5,10150.8,9245.55,-2.22289,-4.64417,-1.57873,-3.37822,-3.35046,-9.88201,-14.3071,0.168661,-0.756661,-2.69992,-1.57269,-4.61371,-0.741804,-0.794809,1.95045,1.34471,1.90438,0.670421,-1.36383,-0.0207592,1.95603,4.44548,1.70081,0.896225,1.96219,-2.68814,1.37985,1.21966 10163.9,10144.5,9233.39,-1.0609,-3.6573,-1.22008,-1.66234,-8.72059,-9.8591,-9.71449,-0.237702,2.4907,-0.383432,-2.45784,-2.52105,-0.451308,-0.95008,0.101755,0.998499,0.0147502,0.763548,-2.08901,-0.286814,2.08671,3.24587,1.98374,-1.03823,1.41551,-1.64013,0.866956,-0.452541 10152.5,10140.9,9224.11,1.58528,-1.3177,-2.21666,-0.770113,-12.1162,-14.2306,-0.877621,-0.372338,1.62768,2.76293,-0.69447,0.389726,-2.24466,-0.492948,-1.07534,1.2119,-2.84085,1.62365,-4.58137,-3.47859,2.38127,-0.58689,-1.20067,-5.12188,-1.38938,0.191315,-1.00868,-0.231626 10144.9,10141,9218.45,2.9188,-0.174985,-4.58083,-6.94645,-12.0718,-23.1781,-6.27315,-0.364715,-3.24703,1.70145,0.993811,-0.598274,-3.56103,-0.759525,0.496704,2.46032,-1.89983,0.597576,-2.01394,-2.93857,4.73883,-0.682548,-1.34504,-3.70636,-1.23983,0.0550942,-2.01066,1.58053 10141.8,10139.7,9215.32,1.06474,0.421951,-5.29652,-9.2234,8.36446,-5.7284,0.960531,-0.909556,-4.90704,0.770291,1.54135,-5.62095,-2.20122,-1.09503,-2.35206,-0.974175,-1.0101,-7.23319,3.01594,0.768168,2.39478,-1.32615,-1.6404,1.53725,-1.51813,-3.97654,-1.7665,0.833795 10141.4,10134.3,9214.23,0.86273,1.35397,-0.657898,-4.72598,2.71892,1.93911,-8.71178,0.127278,0.812447,5.14689,3.34014,-5.47575,-0.124804,-2.70815,-0.541837,-0.600256,1.53834,-3.53843,0.0605411,2.43643,0.689316,0.936364,1.45495,3.58725,0.917646,-4.12549,-2.16127,-1.91164 10145.6,10128.8,9217.09,0.035273,1.26692,3.11502,-4.96307,-6.78084,1.02172,-8.79811,2.69846,4.94751,11.3598,6.51275,-2.0705,0.657905,-2.59061,-0.35795,1.18908,3.42851,-3.05799,-3.41004,0.806424,0.399374,2.92706,4.4301,0.273598,0.553543,-1.76552,-0.755718,-3.46001 10157.5,10128.8,9225.31,0.248702,0.312336,2.57768,-4.36878,-7.1619,-0.049009,-3.2758,2.7151,1.99544,11.1247,7.80862,3.2311,1.05086,1.13953,0.117826,1.5885,2.6575,-2.74279,-2.82058,-0.206648,1.25493,1.71967,2.81266,-4.13773,-2.45207,2.50385,0.789243,-0.268176 10170.7,10133.1,9236.11,-2.23675,-0.885477,2.34602,-6.30375,3.19378,12.3402,5.26964,2.51006,1.86666,4.33237,6.63528,4.85198,3.48519,8.46812,-2.52066,-0.634166,3.57125,-6.40349,1.46869,0.818123,-1.68738,1.2743,1.91738,-0.951766,-0.403311,4.63843,3.18061,7.04436 10176.7,10136.2,9243.78,0.782244,0.338989,-0.179665,0.677035,-11.8864,-9.98092,-16.6014,-0.0876104,-1.39338,0.511794,2.05749,5.37285,2.64871,7.7119,4.8232,-1.23349,2.56586,8.98335,0.643413,1.73431,-0.63479,2.49537,-0.600719,2.26345,1.69812,6.71431,2.31721,8.10433 10176.8,10136.6,9245.84,-3.20567,1.13405,3.92668,-1.78597,-0.236073,-2.19382,-11.4115,3.08973,1.33702,-3.27145,0.727769,-0.100717,5.38921,8.19297,0.492232,-2.20151,5.25989,3.6589,4.08819,2.21554,-1.32513,3.54291,0.119275,3.23854,3.862,2.19948,5.28701,6.25834 10178.4,10137.4,9245.74,-5.53585,0.420645,5.85295,-4.47724,14.54,12.4497,8.36972,4.99424,2.57479,-4.3639,0.677018,-2.6813,6.67898,7.5884,-5.54187,-1.3688,4.05586,-6.15054,4.2909,-0.899213,-1.24567,1.90686,-0.469126,1.72139,5.00978,-1.65339,6.96518,3.71489 10184.8,10141.1,9247.89,-4.95644,-1.91401,3.7243,-7.95873,7.49028,6.40526,5.31843,3.53676,4.4376,-3.95261,0.746514,-2.92295,5.17495,5.09822,-5.56387,2.13589,1.74219,-7.51099,1.13636,-2.24892,-0.712168,1.40767,0.401594,-0.663717,6.22808,-1.51586,5.59537,1.86444 10195.1,10147.9,9253.27,-3.98,-3.06823,-2.05534,-6.10099,3.83685,4.55708,3.92119,0.928846,2.49159,0.0763172,1.14792,-2.88509,3.3624,3.14131,-4.76678,1.53759,-2.49281,-5.00974,0.3227,-1.57677,-2.36177,0.558465,1.76223,-0.153596,3.21585,-0.248642,3.44061,1.09292 10206.6,10155.3,9259.98,-4.64998,-1.64546,-4.6585,-6.92405,-1.23826,-1.4651,-7.80907,2.03872,0.322905,5.35637,2.9557,-1.90346,0.941137,2.90995,-2.25745,1.6362,-2.73525,-3.06893,0.361893,-0.410406,-1.95298,3.18373,4.96997,3.18307,2.09522,2.29277,1.29516,1.46329 10215.1,10159.8,9265.65,-5.64262,-2.22323,-2.32616,-8.62966,1.24852,3.53986,-7.11813,2.5704,-0.221435,0.41167,0.765415,-1.44792,2.10023,1.14341,-1.90736,0.761342,-0.0657556,-6.90094,4.60419,2.00852,-1.1143,4.44335,7.23913,4.6059,2.18355,1.92624,1.0442,1.06642 10218.9,10161,9269.98,-5.54728,-2.69742,0.623383,-4.54971,5.62832,12.115,1.60837,0.527375,0.225195,-4.35554,-1.09064,-1.69716,2.68584,-2.42078,-3.28377,-0.48855,1.46337,-7.59929,7.41232,3.78152,-1.52786,1.12019,5.14455,0.902689,0.791392,0.171231,1.01653,-2.1951 10225.1,10161.4,9274.87,-4.18459,-1.40959,4.0543,-3.78563,4.56469,13.1486,7.4468,1.32559,4.01602,-4.26528,2.47676,-0.706977,1.49841,-2.44619,-4.48237,0.314642,3.21848,-7.78537,6.45365,2.67192,-0.518631,-0.579868,3.1551,-3.30298,0.42352,0.385421,1.09082,-3.38628 10238.6,10163.7,9281.72,0.163978,0.29531,1.39945,-1.88245,0.770367,3.01996,6.47156,0.843119,3.05229,-2.89342,3.69162,1.01002,0.156961,-1.63668,-1.88068,0.459627,0.572044,-3.8789,6.07964,1.73877,1.04155,-0.952277,-0.352698,-3.89818,-1.13337,1.63306,0.655322,-3.05775 10252.3,10168.8,9289.58,1.69242,0.803041,0.969081,-1.57571,10.1963,10.1486,9.01137,-0.23779,2.45598,-11.8335,0.764195,0.347471,0.63322,0.818036,-2.67947,-0.48707,-0.0121974,-5.92175,4.75178,1.31186,-0.59319,-0.865273,-2.13114,-0.629395,-0.22624,0.187864,0.687159,-1.38416 10258.4,10175.1,9296.44,0.693656,-1.47018,1.57507,-4.07861,13.9151,7.913,3.87705,-2.41045,1.40643,-18.8401,-3.38044,-3.78137,0.444306,-0.142111,-3.19856,-0.633983,1.26609,-6.96487,4.03731,1.86282,-0.255938,0.885239,0.576534,4.16798,1.48633,-2.91027,0.44246,-1.26861 10259.2,10179.7,9301.13,-1.11281,-2.9356,3.48279,-4.07376,14.5961,4.75668,2.95063,-2.50321,1.99968,-15.2573,-3.94817,-6.19421,0.994523,-0.409685,-3.36826,-1.30752,2.89435,-7.11783,2.3961,1.75016,-0.287404,0.839505,2.32354,3.16514,0.431073,-4.23834,0.224613,-1.13459 10258.9,10180.8,9303.2,-3.70956,-2.93593,3.76222,-6.98265,14.1006,4.36509,3.13521,0.524873,3.4745,-8.19672,-0.812591,-7.54285,2.87285,0.165482,-4.34303,-3.00502,3.10194,-11.8146,3.48326,1.87454,-2.39007,-1.71717,-0.0308325,-3.00344,-3.10099,-5.07511,0.999296,-0.291248 10259.7,10178.9,9302.61,-2.50722,-0.863499,1.6361,-7.29671,5.65875,7.35687,6.74534,2.86707,2.5541,-4.10002,1.92641,-4.21325,3.79643,1.11564,-2.85299,-3.384,0.718232,-13.5344,2.15514,-0.378278,-3.09826,-4.48668,-4.09564,-6.07121,-4.62941,-4.63714,1.35609,1.33932 10264.3,10176.2,9300.58,-1.50986,-0.476834,0.153861,-9.03392,2.34462,9.76008,11.2624,0.958254,-0.70443,-6.3101,0.886002,-3.04957,4.20237,0.687347,-2.59931,-4.30057,-0.344332,-15.3463,3.30618,0.212706,-1.83037,-5.39362,-6.37009,-5.79293,-5.6463,-5.17005,1.45394,1.2199 10270.2,10175.5,9299.06,-1.8193,-1.62584,1.49621,-15.2891,-0.19176,0.694336,7.97111,-0.906134,-1.88497,-6.47048,-0.900237,-3.70282,1.23614,0.322582,-3.93212,-3.45866,1.71962,-16.8955,0.58688,-0.409914,-0.259588,-2.68512,-3.64588,-3.35838,-4.51583,-4.19392,0.240148,0.159851 10270.2,10179.6,9298.63,-1.90388,-3.42457,3.36972,-15.5947,6.83754,-2.72512,7.96959,-1.26132,-2.35887,-7.13988,-3.00989,-4.84946,-1.32472,-2.90407,-7.21556,-3.99747,1.63284,-18.121,1.49353,-0.486008,-0.289734,-2.44221,-2.61409,-4.74746,-6.81336,-4.22186,-0.397997,-3.01155 10263.1,10186.3,9296.94,0.1046,-2.95923,0.55802,-3.53552,11.956,6.06043,20.0157,-0.175478,-1.81809,-1.77528,-2.10279,-0.283075,-3.48288,-4.09089,-6.41457,-3.4926,-1.98205,-11.2644,1.51324,-2.56718,2.01317,-3.17178,-3.03644,-4.28621,-6.82533,-2.57386,-0.732198,-4.52782 10250.3,10186.7,9289.82,0.787893,-2.63004,-4.83671,4.59987,9.90165,5.11396,20.1712,-1.49013,-0.900383,3.2704,-1.38302,1.01612,-3.51797,-3.65748,-2.01906,-2.31487,-4.58178,-0.663723,4.99631,0.0846666,6.20019,-1.32911,-0.366123,-0.708005,-3.05462,-1.4169,-1.33549,-4.03837 10229.6,10174.2,9276.51,2.92922,1.43172,-8.45959,7.92191,9.82817,0.906035,15.1761,-5.66535,-4.80598,8.92318,-1.50732,0.863702,-4.19618,-1.72605,1.43049,-1.60336,-7.78679,7.9456,2.20311,0.976306,4.6808,-2.0774,-1.41618,1.52784,-1.00485,0.251303,-2.51818,-3.24837 10203.9,10154.8,9263.01,1.97737,4.88419,1.86761,-1.89071,16.8831,21.8027,18.6752,-2.85592,-0.407409,1.1857,1.57668,2.90834,1.42619,5.01683,-2.88862,1.13125,-1.02838,-3.77013,-1.83294,-0.874118,-1.82318,-1.06152,0.617181,1.34269,3.38069,1.15764,1.12216,1.38647 10184.5,10141.2,9256.68,5.24597,7.64832,2.18557,1.58328,4.92602,9.28816,-0.0172234,-2.70209,-2.36954,2.63625,2.45988,6.65341,1.30855,2.45772,0.884071,4.15289,-0.306199,0.501745,-3.91598,-0.843063,-3.78083,-0.751671,-0.908618,-0.353576,1.46737,4.59599,1.10914,-1.05414 10178.9,10140.4,9258.57,8.5511,8.38576,-0.704081,10.0442,3.87995,9.53107,4.06474,-2.33977,-3.33414,3.45052,0.769206,8.44243,0.151836,-0.110094,2.50423,3.89258,-1.86971,4.86933,-2.34618,0.208276,-3.54318,-0.382483,-0.444637,3.17545,1.86638,6.31308,-0.0788599,-2.11239 10182.7,10148,9263.52,7.664,6.75263,-0.540997,5.42972,-5.04193,-7.98425,-8.29464,-0.166299,-0.588527,3.31557,0.500806,4.72146,-2.51571,-1.43305,5.52369,5.671,1.03703,8.03067,0.0463032,4.16527,0.993743,2.27,2.01907,5.48701,6.28587,6.50446,-0.915646,-0.555951 10185.6,10156.6,9266.64,4.26252,2.60407,3.65205,1.35764,1.93964,-1.71464,3.62386,0.664968,2.07164,-1.84774,-1.41728,2.03742,-1.93901,-0.955849,2.55509,2.24827,3.4143,2.08534,1.52467,4.36357,2.40504,-0.149419,1.87333,2.56701,3.76988,3.58853,-0.290298,1.53656 10182.8,10164.1,9266.99,3.44774,1.00051,3.58435,5.06036,-3.20427,-1.32409,2.16178,-1.24869,0.986594,2.68824,-3.10496,3.75494,-3.03899,-1.36189,2.85639,-0.797041,2.25309,6.84226,-1.01807,1.45026,1.64915,-1.77668,1.47461,1.32051,0.0174875,3.15498,-1.91103,0.915561 10177.6,10169.5,9265.47,2.97062,0.742454,2.19308,3.39405,-10.2555,-6.11354,-8.35604,-2.29312,-0.492631,4.2024,-2.46282,2.85236,-2.05854,-1.07623,3.34902,-1.67951,1.43015,9.72371,1.0556,1.2093,0.0329592,0.933345,2.62882,4.14907,1.43657,2.25242,-2.21302,0.424466 10175.1,10171.1,9262.53,2.78573,0.66686,2.0545,2.76769,-2.38316,1.38611,1.33538,-1.98843,-1.22362,0.719734,-1.48276,0.571928,-0.303568,1.13172,0.533248,-2.57485,0.218063,4.75694,4.12677,1.25451,-2.29974,1.77459,2.18864,5.66448,2.31972,-0.197648,-0.423422,1.24127 10176.1,10170.7,9258.49,5.31438,0.737423,2.23937,7.15555,-6.03862,-6.93885,2.59027,-2.08985,-1.82474,1.76361,-1.51506,2.40133,-2.94977,1.13326,2.34185,-1.4691,-0.319475,6.55378,0.151184,-0.820336,-1.03183,0.737373,1.0173,1.60097,0.120988,0.706961,-1.06361,1.61191 10177.1,10171.1,9253.43,5.27989,0.124242,0.594136,6.40228,-14.4792,-17.9873,-7.83873,-2.70593,-2.84279,6.19952,-1.02819,4.22035,-3.89328,-0.655654,4.6427,-0.543649,-0.312946,7.67303,-3.34568,-2.99026,0.892734,0.193866,0.437901,-1.37172,-2.06494,3.10779,-2.09072,0.969194 10175,10171.9,9247.28,2.27598,-1.11333,-0.371999,2.70022,-5.44405,-1.24932,2.95574,-2.54561,-3.07604,2.81372,-0.48024,4.11824,2.04907,-0.370621,1.24343,-2.71039,-1.27809,-0.906837,-1.29061,-4.80376,-0.177684,-0.68347,-0.0356975,0.976652,-2.58184,2.60538,-0.53245,1.0079 10170.6,10171.1,9240.98,0.484599,0.0646839,-1.51326,2.89899,-3.4319,-0.213982,2.47953,-0.834731,-2.00581,5.72898,0.227883,2.67222,2.27602,0.0505934,1.31844,-2.26552,-2.6972,-0.975391,-0.869576,-3.70984,-1.26158,-0.292123,-0.590846,2.58737,-1.84822,1.62378,-0.526111,-0.491878 10166.9,10167.6,9236.09,0.964725,-0.0392702,-0.079079,4.19696,-8.77705,-7.3393,-5.33084,1.7816,1.00552,6.00308,-0.645333,1.80016,-0.345783,0.537513,3.29513,-0.258503,-1.94323,3.02276,-2.07851,-0.708951,-0.985472,0.42465,-0.0047685,-0.0149723,-1.37113,0.550535,-0.779034,-0.484969 10166.1,10161.5,9233.6,-0.598547,-1.76595,-1.06041,-0.952044,-3.22733,-6.25839,-1.71002,3.5389,3.14678,2.52469,-0.94774,-0.697306,-1.82073,1.8162,-0.398189,-0.0962201,-1.17773,-3.11075,-1.86249,-0.148137,-0.912351,0.0729367,0.372787,-1.52491,-1.99794,-1.67208,0.753712,1.02245 10167.9,10154.5,9233.85,1.32924,-0.579085,-4.09528,3.27081,-6.78357,-9.38603,-3.06915,1.95927,0.70163,2.46784,-0.635142,0.854662,-1.03664,2.44479,0.381434,0.976493,-2.1874,1.35415,-3.25712,-1.85514,0.202589,0.286026,0.720155,0.627719,-0.687001,-0.872865,1.21871,2.25385 10170.4,10147.3,9236.23,1.55419,0.655793,-3.90119,3.65032,-6.92144,-3.81534,-0.829364,1.59907,-0.150104,0.588015,0.212751,1.04803,3.09472,3.79829,-0.218751,1.11779,-1.55055,0.933332,-1.25266,-2.59487,0.647035,1.39731,2.58953,2.8589,1.80309,-1.43261,2.52993,2.79953 10171.9,10139.7,9239.22,2.16966,0.513128,-2.93705,2.73804,-10.8601,-4.50483,3.76187,1.03924,-0.676839,-1.4866,-1.19577,1.6866,5.98311,3.12642,0.0885709,0.9896,-0.594518,0.533618,0.379411,-3.82145,2.32664,2.22298,3.60721,3.05218,2.2889,-1.98702,2.79897,1.35025 10172.4,10133.5,9242.05,0.627291,0.905709,1.39363,2.99372,-15.425,-9.09382,2.11414,1.04226,2.10526,-4.39506,-2.77953,2.15891,6.66724,1.70369,-0.372333,1.40462,2.59187,2.26874,-0.378224,-3.69675,3.0335,2.25396,3.10192,0.0429504,0.10951,-0.799702,2.66794,-0.282681 10173.8,10130.2,9245.36,-1.33644,1.42161,3.11004,3.93858,-17.0646,-12.116,1.67239,1.94826,5.54306,-3.85205,-1.5475,2.52019,4.33814,1.15019,-0.541069,1.99129,3.05378,4.25369,-2.76731,-2.80645,1.85733,0.988299,2.88783,-1.97077,-2.83768,1.85125,2.84766,0.389147 10176.4,10130.9,9250,-3.53503,0.391503,-0.270572,1.95882,-15.1875,-18.5758,-1.42497,2.28845,5.40786,-2.12974,1.20821,0.911564,0.2788,0.0689856,-0.00271805,2.01928,-0.20812,3.23848,-1.98612,0.0245125,0.488358,-1.18054,1.47019,-3.47437,-4.6287,2.11498,2.20934,0.993318 10178.8,10135.9,9255.56,-3.20255,-0.268054,-3.48033,2.47099,-11.3536,-16.9308,2.01776,1.40976,1.56328,0.853625,1.89586,1.47109,-1.50849,0.167668,0.627511,1.41809,-4.21425,2.05546,-2.39209,-0.416193,0.276633,-1.50971,-0.820011,-1.25927,-1.76,0.153711,0.431209,1.48315 10181.2,10144.1,9260.31,-2.49125,-0.613263,-3.86482,0.287362,-9.17309,-14.1157,3.48478,0.196793,-1.25386,2.83848,0.198147,-0.0165582,0.471677,-0.139327,-0.216901,-0.966032,-5.2193,-1.40546,-0.977273,-1.2574,1.78779,0.134179,-1.72164,0.653388,0.313432,-3.37716,-0.587605,0.861387 10186.6,10151.1,9263.12,-0.0358474,0.714951,-5.47328,-0.875177,-17.5089,-13.8361,0.471247,0.643912,-2.41975,9.9458,0.993041,0.803296,-0.226386,0.0668295,2.19176,-1.16819,-4.40868,0.69383,-3.38706,-3.58218,3.07732,2.10253,1.79789,2.06744,1.83904,-2.15516,-1.67344,0.661882 10193.4,10152.2,9264.85,-2.78688,1.85556,-1.96216,-7.27433,-5.61022,0.625161,3.91544,2.78407,0.13042,8.01854,3.573,-2.43853,-1.07905,0.148792,-1.48277,-2.3792,0.378784,-7.05144,-1.06108,-1.76148,0.135824,1.71393,3.80312,-1.43656,0.702495,-1.95731,-0.703674,-0.33177 10196.9,10148.7,9267.46,1.41437,4.41491,0.0330121,-0.96198,-19.7539,-11.561,-5.49424,1.03618,-0.588315,13.1158,4.11913,1.82776,-4.02743,-1.24038,4.49417,2.16391,1.61464,5.33203,-6.2827,-3.22771,2.42673,4.53812,5.27571,1.95384,4.83592,2.15944,-2.23414,-0.0179182 10195.1,10146.6,9271.67,-0.599083,4.08109,5.56207,-0.651956,-1.899,4.41751,8.64946,-0.00765143,1.65381,7.40697,3.13743,0.528221,-1.17274,-0.333192,-1.34405,0.810869,3.04978,-1.96585,-3.00608,-1.02587,-0.427114,2.63482,2.33223,1.44749,2.70602,-0.508442,-0.782524,0.838544 10190.6,10149.1,9275.95,0.560997,3.32623,0.00253245,1.6273,-9.62681,-9.32197,-7.13248,-1.74244,-2.26773,10.279,2.01853,1.79006,-2.32577,-1.861,2.70102,2.63733,-0.668516,4.89049,-2.56801,1.67809,-0.682542,1.07859,-0.730879,1.04436,0.219305,1.04839,-1.30085,-0.204558 10188,10153.1,9277.72,-1.05102,1.4439,-1.2902,0.37219,3.61058,7.8905,-0.13638,-0.797121,-3.203,3.7144,-0.467361,1.43319,1.01941,-0.964803,1.27849,1.32106,-0.71757,-0.281666,1.82319,4.43107,-2.93419,-0.102775,-2.79816,1.60946,-0.350934,0.837113,0.975085,-0.206216 10189.3,10155.8,9275.17,1.71247,1.79065,-0.806826,4.2591,-1.07113,5.08033,-3.80833,-1.05846,-3.93516,4.86697,-2.48519,4.41458,1.0147,-2.04319,5.76698,3.04901,0.621182,6.18537,-0.471514,3.74338,0.0954557,1.78055,-2.23478,4.29533,3.28968,4.08665,-0.45381,-1.12752 10190.8,10155.9,9267.91,0.0885688,1.62773,3.97676,0.475719,6.50171,12.0036,4.17355,0.0800788,0.877184,4.13283,-1.66529,2.3731,1.22312,-1.52431,1.32333,1.30085,4.02821,0.00402446,-0.278254,3.83144,-0.00616006,1.70507,0.14686,2.05675,3.75234,3.42709,-1.13997,-2.28219 10186.5,10152.6,9257.34,-0.152071,1.1051,2.98089,-3.26014,-3.23874,0.545145,-3.74253,0.650653,4.32612,4.55661,-0.349067,0.443991,-1.54712,-2.37082,1.08068,1.11666,3.19332,0.114235,-4.77887,1.03262,0.526047,1.57427,1.96416,-1.21359,2.2522,2.81775,-2.19914,-3.20958 10175.9,10146,9246.33,-2.37365,-0.801223,1.8448,-4.49245,2.73452,3.45587,0.665856,0.804743,7.15539,-1.25789,-1.25952,-2.70716,-1.07845,-2.04441,-1.93328,-1.35806,1.5978,-5.1161,-5.79834,-0.925826,-2.80177,-1.15512,-1.39234,-4.88988,-2.71874,-0.727928,-1.17586,-2.55528 10163.6,10137.3,9237.87,-0.803469,-2.78044,-0.895544,-1.96323,-0.541223,-3.95959,-1.23923,0.0489646,5.82687,-0.842944,-2.20839,-1.37161,-0.868195,-0.366623,-0.326653,-0.542204,-0.442138,-3.06811,-5.05951,-1.77693,-2.56412,-2.0747,-5.18551,-5.90628,-3.59607,-1.51359,-1.0358,-0.0442413 10154.4,10129.1,9233.99,1.23915,-3.76005,-2.64612,0.723829,-3.148,-4.96491,0.57486,-0.202117,2.21428,-0.386009,-2.61213,0.591537,-0.420445,2.51457,0.848114,0.0155665,-2.8099,-0.688955,-1.65728,-1.68576,-0.314736,-2.37588,-7.30164,-5.93878,-1.09582,-1.08092,-1.23666,3.04974 10147.7,10124.3,9234.84,0.130569,-3.33534,-5.30783,0.228073,-1.79103,-2.90284,1.72325,0.336059,-1.67646,0.805152,-2.51359,-1.68843,-1.08056,2.79024,0.667811,-0.918425,-5.25023,-0.613583,-1.21144,-3.86108,1.12026,-2.87087,-6.96217,-3.74878,-0.871173,-1.99148,-1.4983,3.13726 10141.9,10125,9238.34,-2.3342,-3.74514,-6.28736,0.247636,2.71253,3.12847,7.57994,-0.0401623,-2.07147,0.481455,-3.97685,-4.46362,-0.415913,1.42821,-0.575486,-2.68041,-4.57327,-2.24353,-2.60028,-5.84863,0.625916,-3.42977,-3.6369,-0.844099,-3.5874,-4.64335,-0.985747,1.2717 10139.9,10130.2,9242.19,-1.31024,-4.72475,-7.14762,0.73153,1.45053,-5.53508,5.90136,-2.31863,0.194991,0.488804,-6.97821,-4.41928,-2.29074,-1.35009,0.919216,-2.89533,-3.25509,-0.799203,-1.99553,-4.14064,2.04707,-1.98553,-0.137078,-0.0166083,-4.9352,-5.40326,-1.67739,-1.42035 10146.2,10135.6,9246.04,1.48702,-3.36982,-6.22071,1.74719,2.56435,-13.0074,1.99705,-3.21561,2.91416,0.844878,-6.7988,-2.16439,-5.4962,-1.85975,2.13575,-1.59383,-2.91884,1.52462,-1.3314,-1.85117,3.6544,-0.430522,0.692754,-0.840642,-3.31251,-2.33908,-3.05762,-2.1983 10158.1,10136.1,9250.8,0.841737,-2.49661,-1.39476,-1.47649,15.6927,0.965199,10.869,-0.546861,4.02682,-3.15137,-2.65822,-1.05518,-4.77058,0.229656,-2.58261,-1.60934,-0.689737,-5.44364,-0.234473,-1.95479,2.60062,-0.769404,0.484685,-2.21476,-2.21659,-0.527818,-2.3356,-0.631119 10167.2,10131.4,9256.17,1.43756,-1.64599,0.0828565,1.10643,1.09851,-8.71597,-1.14743,1.16785,1.24835,1.69522,0.678389,1.91657,-5.73395,-1.26925,0.618759,0.671225,0.99422,2.5392,-3.14056,-3.00047,3.39733,-0.267724,0.865602,-1.72338,-1.28093,1.59131,-3.58079,-1.60917 10168.5,10125.9,9259.95,0.111755,-1.49369,1.18289,-0.284048,-1.52165,-7.82514,1.91577,2.83987,1.30957,4.34859,2.31828,0.547347,-5.35341,-2.95714,0.120479,-0.07344,1.25038,0.863374,-1.97606,-2.63292,2.99367,-1.51317,-0.192761,-1.94301,-2.34527,-0.816782,-4.15688,-3.69083 10164.7,10123.5,9260.03,2.54631,0.123647,1.85441,0.291179,-2.26534,-5.622,0.403256,2.75151,1.92159,5.45502,4.02912,0.277333,-3.49437,-2.59529,1.68451,1.03176,0.611114,1.05444,-1.37086,-0.762577,2.09659,-3.15435,-1.66892,-4.18628,-2.03484,-0.59484,-4.5361,-4.06338 10160.7,10123.9,9256.02,4.16394,1.15842,1.00215,-1.41089,3.00077,3.69915,2.12147,1.50602,1.11373,3.7783,5.12886,1.27055,-1.0735,0.163066,0.715848,1.75274,0.248762,-1.87449,-2.70607,-0.0821427,-0.982237,-3.91753,-0.603176,-5.15131,-1.55797,1.9122,-2.63806,-2.45448 10157.6,10124.8,9249.1,1.13904,0.752742,1.28292,-3.44794,5.87463,13.5955,-3.90547,0.053564,0.392376,-2.17549,4.02652,0.800942,2.14933,0.991305,-1.00534,1.93346,1.74799,-4.3887,-2.62983,2.12002,-3.97726,-2.37985,1.92724,-3.91126,-1.80145,3.29901,0.515867,-2.07875 10155.9,10125.9,9241.01,-1.21278,1.24353,0.0902419,-1.38693,3.90257,17.0687,-1.7671,-0.621263,-0.743581,-3.56603,3.19768,0.515647,2.83626,-0.394058,-0.965446,2.53295,1.02968,-3.73706,-0.646373,4.19926,-3.90665,0.100245,2.07717,0.65145,-0.4389,3.45695,1.30478,-2.26372 10156.9,10129,9233.19,-0.519545,3.45514,-0.128203,0.470911,-4.34917,11.6069,-5.37302,-0.249794,0.0908138,-1.64961,3.7305,0.887725,1.28233,-0.50548,0.651175,4.68216,0.481759,0.131141,2.83721,7.4517,-1.51906,2.02591,0.478488,2.8447,3.96564,4.21205,0.0189546,-1.26083 10160.2,10134.9,9226.61,0.334619,3.63902,-1.33005,0.500933,-0.0390483,15.3466,3.49804,-1.22599,-0.443012,-1.29729,1.85728,0.83413,0.663791,1.08815,-1.61332,2.35978,-1.91003,-1.54128,7.06018,8.52392,-0.0931056,-0.631766,-1.8937,1.21041,3.92464,3.0125,0.582016,-0.0552563 10165.1,10142,9222.12,-0.0501124,2.72845,-2.35233,0.461804,-3.24106,3.89637,-4.4752,-1.7395,-0.658087,1.46568,0.74815,1.9358,-1.37579,1.26993,0.248403,2.1501,-1.97865,2.84403,4.93078,6.34449,2.55208,-1.66616,-1.28941,-0.85475,2.44335,3.28626,0.575625,0.0867697 10169,10147.2,9219.92,-2.57524,1.55278,1.64717,-0.408592,2.78686,3.93608,-3.35557,-1.05071,0.358949,-1.71793,1.23509,0.730307,-0.807758,0.469476,-0.799756,2.26666,1.42763,2.57756,3.31921,4.24278,2.32673,-1.92157,-0.625841,-1.7385,0.55312,2.469,0.416022,0.102824 10167.7,10149.8,9219.39,-2.61236,0.265041,4.14099,-1.10443,5.68968,5.75872,0.437178,-1.27371,-1.44794,-5.50529,0.962099,-1.7594,-0.014506,-1.47838,-2.10998,2.88166,2.32266,2.31558,3.04189,2.76494,1.13588,-2.76241,-2.5749,-1.37983,-0.132212,1.62609,0.00182996,-0.567092 10161.2,10151.5,9219.88,-1.00231,0.225002,2.94421,2.03312,-0.355979,4.16591,-0.636307,-0.980578,-3.17075,-4.4683,-0.0413473,-0.96548,-0.194949,-0.798368,-1.08568,3.94015,1.20872,6.21739,0.493017,0.663456,-1.20346,-2.76074,-4.99576,-0.484664,1.27829,1.87168,-0.0347963,-0.649195 10155.5,10153.9,9220.83,-0.939771,0.647249,0.0634509,3.2582,-1.62031,4.0693,-0.997477,-0.169163,-4.01209,-4.20755,-1.14083,-0.040949,0.676499,1.0769,-0.637069,2.85891,0.53402,4.18699,0.666861,0.369829,-2.63692,-0.336214,-3.73798,1.47577,2.81105,-0.292838,0.0270106,-0.151526 10154.1,10157.5,9221.67,-1.65802,1.59847,-3.57612,1.52401,6.37221,4.48866,-1.46299,-0.915699,-6.98915,-0.340048,-0.952717,-2.18866,-0.811792,-0.642645,-0.622625,-0.300884,-1.00057,-1.15759,2.44751,2.6773,-1.823,1.29837,-1.91591,2.49204,1.93197,-3.59974,-1.91245,-2.4109 10154.4,10160.7,9221.98,-0.583463,-0.108757,-4.6507,-0.0693877,5.35637,4.425,-6.56889,-1.82597,-8.57191,2.85503,-1.05825,-2.33955,-3.22781,-4.76081,2.05753,-0.861931,-1.83229,-0.124382,0.503483,2.18131,1.30665,2.42826,0.824233,3.84653,2.09007,-3.3925,-4.31649,-3.96112 10153.4,10159.2,9221.68,-2.76485,-4.09131,-2.87698,-1.10712,12.5336,12.9839,-4.34652,-1.87041,-6.50663,-1.43881,-2.78497,-4.09349,-3.27711,-7.58611,-0.918956,-2.43732,-1.68029,-2.93885,1.37614,1.00354,-0.202025,0.252735,-1.35224,2.14941,-1.22668,-3.85694,-3.91196,-5.39514 10153.1,10150.6,9221.82,-3.95579,-6.11602,-1.95691,-0.571033,7.36799,2.23424,-8.23593,-1.15065,-2.89936,-3.34966,-3.42278,-4.92737,-4.22729,-7.57776,-1.53936,-2.4826,-0.485854,-2.05301,1.35048,0.235875,-0.851581,0.299046,-3.65228,0.452501,-2.53126,-4.14097,-3.0318,-6.032 10156.5,10138.1,9224.22,-1.72219,-4.81284,-2.04034,3.64429,-3.40667,-8.21149,-2.06758,-0.247629,0.240041,0.844032,-2.55693,-2.29071,-5.62686,-4.10255,0.955484,-2.58578,-0.573095,1.96046,-2.89531,-2.47853,1.00662,1.59082,-2.31097,1.60096,-0.355857,-3.59741,-2.54995,-3.16362 10162.5,10126.5,9229.66,-1.48624,-2.31864,-1.19917,5.07688,-2.15075,-4.48733,6.81643,1.19375,3.4529,3.66948,-1.49639,-1.71619,-5.51437,-1.29231,-0.407537,-4.604,-2.54282,0.0824236,-5.27449,-4.81883,0.767691,-1.39492,-2.55861,-0.325428,-1.75464,-3.59903,-1.89829,-0.732932 10167.7,10118.7,9237.56,-1.06333,-0.880843,-0.709075,2.8371,-10.0447,-10.4348,-2.5904,3.18465,5.97115,6.33779,-0.55058,-1.01646,-4.14332,-1.6247,-0.0193591,-4.01402,-3.73144,0.38443,-5.50468,-6.41294,-0.295721,-3.62009,-2.70822,-3.1355,-4.45086,-2.10376,-1.79258,-1.22716 10172.5,10116.9,9247.18,1.551,0.130326,-0.490568,5.87654,-14.5436,-8.35183,-0.790109,3.39107,4.7174,8.28156,-0.0057788,2.6686,-1.84943,-1.48071,1.03911,-4.0934,-3.48936,2.7605,-6.22541,-8.72046,-2.487,-3.9855,-3.15508,-4.85806,-6.30628,-0.1826,-2.22861,-1.91313 10179.7,10122.6,9257.78,1.5355,1.00586,-2.46594,5.55739,-10.6179,-9.89219,1.01847,2.02002,1.55047,10.3651,1.59035,2.3257,-3.02423,-0.681756,0.379055,-4.13859,-2.86252,2.65539,-7.09955,-8.4785,-1.80811,-2.44766,-3.84586,-6.08215,-4.18234,0.309597,-3.66089,-1.78168 10188.9,10134.4,9267.84,0.423127,-1.44673,-6.16369,2.54558,-3.2605,-10.2788,1.93481,-0.460125,-1.55478,7.53447,1.04311,-2.037,-5.33297,-0.715827,-0.912315,-4.00679,-5.27357,1.32517,-7.02947,-5.6844,2.49,-1.1701,-4.14164,-4.46692,0.160721,-1.23591,-5.46575,-0.678645 10196.3,10145.5,9275.21,0.204833,-4.851,-9.24744,3.38063,-3.90706,-1.89916,-0.318999,-3.05687,-4.83175,3.88926,-1.68472,-4.52857,-6.76493,0.053409,0.356074,-2.44354,-9.25902,3.95243,-8.99635,-3.68403,4.07743,-1.41439,-4.06526,0.784286,2.50666,-1.59161,-6.31937,0.0761621 10200.4,10148.5,9278.92,-3.06966,-5.752,-6.27773,-0.452092,4.18213,13.2473,-12.0757,-4.47092,-6.49884,-5.96616,-4.08975,-9.08064,-3.65565,-1.03612,-1.9757,-2.79369,-8.22081,-3.13926,-2.68074,1.98539,-1.47914,-4.27865,-6.82097,-0.0420558,-2.72616,-3.80964,-3.69263,-2.81706 10202.3,10144.3,9279.66,1.7621,-1.2767,-1.87182,1.61337,-6.80859,14.4514,-16.815,-2.07514,-4.63562,0.0307544,-1.49074,-2.29138,-1.18636,-1.08621,1.86862,0.689509,-4.2555,-0.913166,-4.04706,-1.13903,-2.95495,-1.4359,-3.45987,4.36607,0.619825,-1.53464,-2.06409,-2.58631 10201.6,10141.5,9277.89,2.73427,2.11183,3.79277,1.71546,-5.8859,13.3557,-11.3022,2.79327,2.37116,13.2011,3.98285,0.966107,0.039656,-0.715821,2.85166,2.34242,2.77476,-0.0888099,-4.98538,-3.4432,-1.83877,3.57211,2.68075,7.05565,6.45616,-1.54302,-1.24469,-1.49869 10196,10143.8,9273.55,-2.52737,0.202188,7.08167,-4.89952,6.71679,10.6699,0.756855,5.54471,7.25909,13.9583,6.39787,-2.37566,0.745793,-1.45474,-1.09404,0.910205,7.21143,-6.92492,-3.24203,-2.89701,-0.543452,6.07649,7.33376,6.57894,6.15484,-4.40884,0.0587056,-1.11052 10186.2,10147.8,9267.63,-4.31786,0.145523,8.74123,-1.12372,3.61382,5.90919,-2.20636,4.87121,7.93339,10.8223,5.77747,-1.02016,1.70524,-1.23974,-1.99873,1.22043,7.18349,-2.02393,-4.52471,-1.19367,-1.87015,5.60664,6.92162,5.30532,3.03549,-3.16865,1.33872,-1.3693 10178.3,10151.3,9262.07,-1.01371,-0.36759,7.07326,3.03463,-3.67644,6.41668,1.01659,3.32806,5.69645,6.11989,4.17302,3.13986,4.40199,0.31144,-2.58094,-0.0539033,4.16067,1.49299,-3.2753,-1.39228,-2.172,3.33149,4.19598,3.46064,0.616277,-0.818505,3.98959,0.698301 10177.2,10154.3,9257.94,2.09186,0.0766925,2.17884,5.08344,-13.9717,-0.882929,-3.84368,2.86526,4.57806,7.77504,4.75117,6.29349,4.58116,4.04706,1.06485,0.914494,1.84175,7.12093,-3.92066,-3.04038,-1.76589,1.29071,2.74094,1.46176,1.98937,3.12251,5.09485,3.84087 10179.4,10155.4,9254.74,0.187596,-0.882072,-0.665652,4.15319,-3.56212,6.25634,3.46947,2.99756,3.30879,0.859046,5.1349,3.91232,5.90056,6.60019,0.839946,-0.162343,-0.484405,2.65509,-1.8674,-3.50916,-5.10299,-1.60522,1.28388,-0.0295086,1.05,2.81748,5.21994,5.53563 10178.8,10153.1,9251.26,-1.91139,-0.154839,-0.832651,7.32065,-8.14661,3.20829,-4.61065,3.9011,1.20806,1.29028,6.11631,4.24084,4.66918,7.38927,3.1094,1.72009,-0.436683,6.06925,-3.83738,-3.64103,-8.35166,-0.222316,1.74303,3.43329,2.82215,3.91599,3.2218,6.05878 10175,10149.2,9246.46,-3.00223,-0.829219,2.18951,8.12634,-8.29635,3.98254,-2.55022,3.58933,0.0476173,2.00734,2.85452,5.13863,4.39434,5.86178,1.57419,0.321093,2.11151,4.62819,-0.677836,-1.98205,-7.44972,1.36379,2.52895,5.12261,2.10196,3.15929,2.77152,6.16477 10170.8,10147.7,9240.32,-2.09934,-1.33891,3.77143,6.49402,-6.43302,-0.0826344,0.87837,1.12061,0.421557,1.06025,-1.52903,5.64507,3.68263,3.49536,1.25096,-1.4957,2.92854,4.60413,2.40658,-0.645265,-3.32217,0.987715,2.60908,1.94117,-0.424246,2.85508,2.71473,4.88469 10167.3,10148.7,9234.04,-1.71112,-2.89318,3.67043,1.66277,3.35424,4.57631,10.1924,-0.35173,1.35064,-5.80931,-1.82085,3.64176,4.57117,2.2882,0.924739,-2.41648,2.22467,2.19365,5.80375,-0.426137,-2.32705,-0.919332,2.09081,-2.34116,-2.25007,1.71251,3.40172,3.5108 10165.7,10149.1,9229.23,-1.45001,-3.05548,2.45599,-0.349391,3.71978,4.53119,5.144,-0.0754888,2.20722,-6.90377,0.948441,2.13514,3.08117,1.83942,2.86791,-0.010419,2.66035,5.23219,5.6626,-0.804354,-2.37724,-1.67323,0.673861,-3.53649,-1.59081,1.76997,2.75549,2.29186 10167.4,10147.1,9226.8,-1.49928,-2.70714,1.88393,-0.842721,-0.225431,3.25531,1.41947,0.140255,3.21042,-3.88608,1.41104,1.86088,-0.091131,0.642157,1.94581,0.307133,3.18746,6.22574,4.30938,-1.01513,-1.1936,-1.8575,-0.588364,-1.42784,-2.08205,1.85519,1.46316,1.06047 10171.1,10143.9,9226.48,-2.01672,-2.40053,3.06391,-0.0599903,-8.34303,2.94718,-5.04409,-0.199276,4.0892,-3.68083,-0.226057,2.75547,-0.686676,-0.843757,0.670264,-0.458086,3.08212,7.11729,2.84836,0.933537,-1.50789,-1.59001,0.179663,0.0589795,-2.55704,3.42709,0.775783,0.360096 10175,10140.6,9227.89,-1.34782,-2.60865,2.14445,1.39294,-10.3608,4.5868,-8.2559,-1.78039,0.356678,-10.0047,-3.28868,2.87133,1.85333,-3.67234,1.53223,-1.27653,0.113475,6.97877,4.49731,3.38158,-3.24882,-2.09817,-0.213742,-0.816136,-3.92766,4.36792,1.46638,-0.25462 10179,10139.5,9231.01,-0.683001,-1.14693,0.835389,1.45465,-4.93888,6.92044,-3.2459,-1.76518,-2.11784,-11.5638,-3.99539,3.25477,2.97649,-3.54233,2.62301,-0.286071,-1.99677,5.44349,5.35012,2.55683,-3.04093,-1.82791,-1.42661,0.583625,-2.6178,3.43693,2.29735,-0.308687 10185.5,10142.2,9235.77,-0.0852919,0.0218383,0.522022,1.091,-4.00515,-0.71681,-2.72016,-1.24891,-1.4593,-5.53454,-2.81228,2.98724,1.40275,-1.35994,4.37674,1.00841,-2.02092,6.34309,4.01241,0.223476,0.719167,-0.617158,-1.79277,2.19906,-0.00915837,1.60933,1.1106,-0.276707 10194.7,10147.7,9242.28,-0.507821,-1.45713,1.82236,1.06383,0.990703,1.16431,3.40878,-1.35424,0.436421,-3.7364,-2.82733,0.844561,2.18188,1.42103,2.14788,-1.48658,-0.956157,3.31294,2.03859,-1.09837,2.11718,-0.147919,0.113767,0.665977,1.0134,-0.758268,0.662046,1.48327 10202.3,10153,9250.68,-0.953894,-1.28733,1.09826,0.183582,-2.63676,-4.1377,-2.89907,-0.851983,3.07691,-0.452803,-2.18838,0.00930997,2.87142,4.0314,0.911046,-1.55443,1.18147,4.24956,-2.48362,-1.23019,1.72571,2.11001,5.29268,-0.281886,3.31927,-0.100871,1.85826,4.09941 10205.4,10156.4,9259.89,-1.27754,0.134823,0.181405,0.430733,3.94306,1.54036,2.99815,-1.16285,4.70226,-4.24342,-1.81256,1.00154,4.93307,6.24027,-1.59843,-1.48742,2.34844,2.10305,-2.00905,-0.662325,0.626241,1.17997,6.74123,-1.67701,1.35772,0.491316,4.32271,6.53414 10204.9,10157.9,9267.94,0.0906612,2.16352,-0.379486,5.42194,2.73054,2.84047,-1.4914,-1.83181,4.02307,-5.15449,-0.262248,3.79351,5.21678,7.80905,0.384689,1.27337,2.9796,6.90988,1.28339,2.20996,-0.91791,-0.163496,3.78903,-1.75168,-0.655347,2.9127,4.88667,7.66747 10203.5,10159,9273.39,2.81598,1.22437,-0.368556,7.79675,3.42922,7.94279,4.57077,-0.708312,0.0968463,-6.10539,0.906129,5.55489,5.11842,8.21484,-0.0671665,1.22889,2.37144,6.24544,4.97372,3.9233,-2.49967,0.267274,-0.310124,1.09266,-0.410233,4.04567,4.74621,8.0612 10203.2,10162.2,9275.77,5.91857,0.355765,0.897437,11.4606,-3.5509,6.21936,2.57301,-0.0103725,-3.12789,-4.93913,0.601331,6.94209,5.77388,6.93334,1.15761,0.716978,2.28439,10.4648,4.58557,4.39511,-2.76356,2.73426,-1.51427,4.03252,2.99548,5.47757,3.66414,6.66569 10203.5,10167.2,9275.21,3.60261,-0.370029,0.212296,6.53742,-1.17501,1.39057,4.60494,-1.59955,-3.36286,-6.83681,-0.619753,2.05525,7.21718,4.0699,-0.311278,-1.80144,1.07578,6.02142,4.81799,3.05296,-1.94492,1.84126,-1.66326,1.40391,1.77364,2.95825,3.1993,3.61198 10203.2,10169.7,9272.52,1.94895,1.27875,-0.411546,7.45768,-3.75161,0.551798,7.13428,-3.82068,-2.61405,-4.51085,-0.839975,-0.654388,7.59238,3.63367,1.11679,-0.895324,0.0589114,6.72608,0.605615,-0.28023,-1.84675,-0.134175,-0.468956,-1.06577,2.10307,1.19208,2.14254,2.35948 10201,10166,9269.14,-0.454618,0.774031,2.06017,2.8462,-0.622985,0.18548,5.53147,-2.50822,-2.46147,-4.96779,0.0109421,-5.95039,4.88549,1.45711,-1.36876,0.21175,1.58667,0.959389,-1.72767,-0.999701,-1.91612,-0.271218,-0.271307,-3.60937,2.2528,-2.81471,1.29832,0.342989 10196.9,10158.5,9266.51,1.16537,-1.9421,4.60098,6.66208,-8.91079,-4.05041,0.977918,-0.375912,-2.52562,-2.44083,-1.83608,-5.04574,0.870179,-2.88837,0.903319,2.45464,2.77487,7.13809,-7.32993,-2.29902,0.410437,1.61472,1.76486,-2.68616,2.88565,-3.79142,-0.830458,-1.20118 10194.1,10152.5,9265.18,-4.11534,-5.864,4.81522,5.05616,0.145339,-4.93641,2.59855,0.656712,1.10696,-4.83177,-6.68192,-7.2593,-1.01756,-6.50992,-0.623669,0.165413,3.83811,5.84041,-5.84841,-0.103661,1.98729,0.416145,1.34348,-6.16515,-2.67871,-5.57128,-1.65554,-3.26762 10194.1,10148.4,9264.07,-6.59722,-4.92656,-2.01588,3.7417,0.726794,-18.2936,5.15057,-0.276157,1.50739,-0.538248,-8.52874,-4.00362,-4.55022,-5.27015,0.604573,-0.930054,-0.109161,8.19838,-8.17669,-2.1092,4.17484,-1.56197,-1.02102,-5.8341,-5.50376,-1.7134,-2.50895,-3.06608 10193.9,10142,9261.25,-7.62788,-2.98611,1.9356,-1.40885,17.3716,4.06957,22.1809,1.39972,5.64224,-7.94302,-5.59134,-1.45901,0.439725,1.11211,-6.73411,-3.11746,1.4598,-4.78344,-2.09513,-0.404037,0.473396,-4.22587,-2.43839,-5.70551,-5.26427,-0.515338,1.20082,0.113119 10190.4,10132.9,9256.55,-0.061965,0.47587,-3.01478,1.28661,-2.15014,-14.2047,7.89898,0.463674,0.911903,2.0883,-1.64338,3.11185,-2.21723,0.781415,-1.37312,0.396228,-1.38267,3.09944,-1.8496,-1.29836,2.6087,-3.15966,-2.03297,-3.33185,-3.23065,2.92606,0.328003,-0.0324179 10185,10126,9252.36,-0.460313,1.71643,-3.7396,-2.47922,-1.49725,-15.3645,-1.80975,0.715758,-0.981069,-0.691494,-0.794101,-0.106849,-2.08179,-0.30971,-1.53311,0.428815,-0.320026,-0.221114,2.28648,0.175576,3.04606,-1.33911,-0.290353,-5.37868,-3.63253,0.919151,0.306196,-0.421839 10178.6,10124.8,9251.04,-1.00256,1.33259,-4.2472,-1.03971,2.95821,-4.55752,1.84476,0.117356,-4.36831,-4.27268,-1.02576,-0.886254,0.661063,-0.0446314,-0.718596,-0.508343,-2.00182,-0.337999,2.57329,-0.613947,2.18595,0.685998,2.2221,-1.4549,-2.89677,-0.0111036,1.2411,0.83044 10170.8,10127.6,9252.97,-1.71108,0.0714348,-2.91875,-0.0818013,10.0027,5.28964,4.84662,0.115636,-5.97389,-2.97492,0.466922,-1.16018,3.14319,-0.484977,-0.73996,-1.40938,-2.86898,-1.18229,2.85098,1.59393,-0.709864,0.769892,0.0526875,0.667581,-4.09633,-0.130706,2.87503,0.28772 10163.4,10130.8,9256.69,-0.0482655,-0.561906,-4.41924,-1.93638,1.00001,-3.80859,-6.74655,-0.693966,-6.90741,3.83606,-0.443929,0.133173,1.32042,-4.12952,2.21239,-0.401666,-2.83084,1.48444,3.60821,4.7162,0.0479322,1.57325,-2.9423,0.781086,-3.57562,1.01359,1.5974,-1.03302 10159.1,10132.9,9259.9,0.830676,1.38376,-3.59798,1.88876,1.90766,6.33722,1.16568,-1.88109,-5.49532,7.56995,-3.97276,2.47056,-1.10217,-4.02745,0.530141,-1.80729,-2.44923,1.11112,6.04583,5.79514,-1.61378,0.146823,-4.31812,1.65679,-0.82556,0.385538,-1.6035,-0.921055 10159.8,10135.2,9260.63,-0.16576,1.00018,-5.12473,0.442361,0.505831,-5.64864,-2.63413,-2.52592,-5.46478,4.95174,-4.3147,0.782684,-5.73615,-4.82371,0.266276,-1.86669,-4.0481,-1.31822,9.03428,5.18538,0.835431,-1.04748,-4.21294,1.0615,-0.105573,-1.22812,-5.24566,-3.63422 10165.2,10138.1,9258.46,0.205477,-0.680098,-4.46762,5.26891,1.18115,-1.68502,7.13137,-1.22722,-4.01706,-1.7858,-0.511666,3.55446,-3.85553,-2.43205,1.3525,-0.694302,-4.16672,-0.729833,7.26617,2.38627,0.742375,-2.04911,-3.24066,2.72775,2.10783,0.115275,-4.78462,-4.34396 10171.6,10139.6,9254.61,-1.51268,-2.23477,-5.13237,-3.29461,-0.317239,-10.5071,-7.94002,1.87205,-2.15615,-2.57627,4.52526,1.46446,-2.39092,-3.68309,1.44927,1.27351,-2.10555,-3.67494,7.0263,3.64847,0.370668,0.612656,-2.452,4.76347,5.31087,1.21101,-2.18927,-4.86589 10174.6,10139.6,9250.85,-0.380976,0.430706,-4.77251,1.24603,3.57465,-3.14504,-10.8805,1.4131,-3.82203,6.1265,4.05681,1.86576,-2.69539,-3.84931,0.571097,0.0445532,-3.61574,1.0929,5.45496,4.67637,-2.69117,0.376736,-3.44843,8.26613,5.44059,2.39248,-1.35143,-3.43895 10173.2,10141.8,9247.9,-0.967231,0.660605,-0.333774,0.682442,10.1733,9.80472,-4.02844,0.296976,-2.0856,1.70749,0.105393,-0.302007,-2.02762,-1.68176,-2.57321,-1.85542,-2.20576,-3.56605,7.81712,4.57148,-0.717533,0.00661063,0.070936,7.88567,3.00205,-0.188925,-1.30646,-0.417109 10169.8,10147.8,9245.05,1.57911,1.89614,-1.23894,5.44327,1.1255,2.7455,0.888702,-2.69789,-2.29535,1.37374,-2.16695,0.277041,-2.61632,-0.168021,1.19527,-0.966804,-1.39634,2.02717,6.13068,1.74285,2.61838,-0.673957,2.42798,5.71141,1.0237,-0.190537,-2.48355,-0.424022 10166.9,10152.4,9241.4,1.48812,1.56883,0.00439658,-1.99079,-5.3945,-7.45076,-2.79497,-1.09824,0.438405,1.08335,0.567998,-2.12211,0.537132,0.235065,2.13962,0.850241,2.33283,0.11668,5.71046,0.316621,2.37782,1.5783,4.38674,4.44102,2.85837,-0.867284,0.197126,-0.632035 10166,10149.9,9237.21,3.10346,3.20745,-0.0787972,3.26164,-1.99167,1.15174,7.73898,0.388067,-1.3872,7.93093,2.89628,-0.846609,2.95243,1.10786,0.0356645,-0.191303,-1.48335,3.06518,0.833731,-2.48298,-2.62814,-0.329278,-0.0454046,4.84244,1.50962,-0.571214,2.28968,0.0896905 10169.4,10141.9,9233.72,1.54047,2.79665,0.872984,0.435893,0.341067,4.50191,6.31086,2.24353,0.0763229,5.33021,2.30696,-1.94916,2.28551,1.6759,-3.55737,-0.57595,-3.31446,-1.28349,0.109544,-0.911539,-3.08755,0.149125,-2.57658,2.65457,-0.759677,-1.72314,1.73795,1.22082 10175.5,10134.5,9231.85,3.08721,1.31195,-0.463831,-2.78365,-16.0641,-12.4959,-7.90321,1.44639,2.2521,2.09953,-0.628689,0.674957,-0.991746,0.999703,0.501374,1.08647,-1.9555,-0.457535,-1.969,0.140249,0.679574,4.05153,-1.26929,2.9472,1.23177,0.0460567,-1.18548,1.19414 10178.5,10132.3,9231.94,4.8578,-0.156201,-1.83619,3.45539,-10.5983,-4.40534,-3.25278,-1.48511,1.7839,1.07398,-3.79721,3.44697,-0.661031,-0.19397,1.51898,-2.78611,-1.58924,-1.02247,-4.03291,-0.779814,-2.72459,1.42865,-4.44874,1.96164,0.024013,0.769821,-1.68183,-1.09525 10176,10135.5,9234.24,3.98434,-2.9881,-1.82932,-3.45496,-4.37718,-1.32479,-6.81161,0.242295,3.63988,0.773917,-2.92089,1.50769,1.03257,-1.29175,0.607123,-3.32519,0.794345,-7.2134,-4.18473,-2.11878,-3.48641,2.04926,-1.83971,2.5711,1.8547,-0.444122,0.204744,-0.633906 10170.3,10141.1,9238.24,4.5574,-1.21766,-1.92884,-3.3891,-4.53289,-3.61119,-11.1428,0.87067,2.52674,6.28098,-0.916225,0.833349,-0.285056,-2.02874,2.83162,-0.822357,0.836116,-2.02452,-4.36166,-2.46534,-2.40599,3.53798,0.439996,2.8824,2.66576,-0.190266,-0.411649,-0.335746 10164.8,10146.9,9241.73,1.14271,0.21175,2.54403,-5.97996,8.86795,9.92082,0.583279,0.92891,3.1377,1.52082,0.653327,-2.04189,-0.909795,-1.88382,-1.45444,-1.72465,2.94817,-6.9659,0.661566,-0.779148,-2.33549,3.61435,1.90115,-0.709103,0.572663,-2.44443,-1.61985,-1.24632 10161.8,10151.9,9242.42,0.429305,-0.24402,1.54324,-0.758714,1.99988,2.30697,-0.150645,-1.67843,-0.372931,2.68223,0.974669,-2.18675,-3.69726,-3.84373,0.315076,-1.61503,2.02219,-0.439987,1.5067,0.347441,-0.468043,1.85512,2.51346,-3.61534,-1.61311,-1.68631,-4.32277,-3.31289 10160.6,10154.5,9240.5,-1.6783,-2.7916,3.79283,-1.46484,1.8842,7.0456,3.61276,-2.08564,-1.14902,-3.90469,1.00738,-2.71903,-1.12392,-2.56102,-0.564502,-1.26929,2.87817,-3.80446,2.16188,1.69189,-0.17359,-0.806729,4.45158,-4.99401,-1.9224,-2.1335,-3.41399,-1.5215 10158.8,10152.9,9238.94,-1.26294,-1.55708,2.47997,-0.37092,-5.35681,-1.99801,-4.61673,-3.19995,-3.63982,-3.59422,0.268397,-1.15304,1.21312,-1.94008,2.37467,0.463918,1.03699,-0.249188,1.94821,3.1095,0.656428,-1.26258,5.17342,-2.5293,-0.911564,-0.727538,-1.60047,-0.657086 10157.1,10148.4,9241.47,-0.729297,1.90628,1.50273,8.02209,4.5029,7.25435,-0.943104,-3.87229,-5.15977,-0.605295,-0.786266,-0.00624273,3.2036,-0.99694,1.83674,-0.424322,-0.759934,4.69506,3.12589,4.93905,-1.14094,-2.37706,0.896838,-1.15642,-2.07425,-0.341439,0.651623,-1.90525 10159.3,10145.1,9249.53,-3.61489,-0.368775,4.8318,0.654323,13.8953,20.2332,9.01061,0.740005,1.06482,-1.98312,1.43178,-2.39481,5.44965,2.23927,-2.07082,1.84445,3.36316,-2.3874,5.82791,5.13504,0.331121,1.17574,4.11636,2.46863,2.53744,-2.31289,3.73605,1.261 10166.4,10146.2,9260.39,-0.690065,-0.196533,2.57149,3.28245,1.26863,3.07282,2.3288,0.343504,0.7493,7.7189,2.47287,-2.19401,1.83016,1.49389,2.04941,5.57015,1.68587,7.37325,4.33035,3.86901,3.21355,1.31074,4.30838,4.34097,4.14204,-0.792683,1.91579,1.4487 10174.6,10153.3,9268.63,0.973864,0.288282,4.67663,-0.604468,1.35396,1.77193,6.1612,0.928573,3.56181,0.301872,1.61496,-1.94891,1.37811,1.784,-0.829802,4.5252,2.98522,2.05165,3.03006,0.33278,4.9167,0.692046,4.78248,3.89965,4.1223,-1.28055,0.902128,2.44014 10179.4,10165.9,9270.91,0.383028,0.372248,2.91142,5.26445,-4.52355,-0.481389,-1.47582,-0.0802922,4.09074,-3.4789,-1.84054,-0.641665,1.60157,2.15213,-0.406849,1.24052,1.05589,7.69175,-4.79723,-3.42058,1.48542,-2.69221,-0.604027,-2.8823,-1.41943,-0.386671,1.59434,1.71786 10180.9,10180.3,9268.76,-7.39108,-4.07938,1.96913,5.84801,-1.99672,13.1344,-8.45676,2.45664,8.74322,0.00440195,-3.70354,-4.02376,5.09873,7.07674,-2.94009,-6.27334,-2.18896,9.06615,-15.5002,-6.518,-12.659,-9.2251,-8.78964,-16.0646,-15.2285,-1.36974,7.28841,2.96689 nipype-1.7.0/nipype/testing/data/fmri_timeseries_nolabels.csv000066400000000000000000002022431413403311400244740ustar00rootroot0000000000000010125.9,10112.8,9219.5,-7.39443,-8.74936,7.28395,13.7953,32.2328,32.4809,18.958,-12.2383,-6.86466,-23.0912,-16.425,-5.70842,11.2467,-1.58574,-4.53717,-17.3842,0.912601,13.0428,2.44622,2.08875,-8.74373,-9.47217,-6.87574,-8.11158,-14.54,0.414787,6.04424,0.540389 10136.8,10115.1,9222.54,-0.120582,-1.94906,6.92247,4.75197,11.0735,0.972766,10.2285,0.717545,-1.04488,-7.64424,-2.10875,-2.44368,1.52535,-1.14131,-1.72589,-1.1247,-0.993354,2.98318,1.29855,2.0688,1.00297,0.135373,-3.25325,-3.12065,0.913296,-1.7868,1.58829,-0.735248 10148,10122.2,9228.62,4.24336,-0.689111,5.12782,0.132862,-6.64526,-14.7952,5.19361,3.68198,2.77598,-0.691866,1.07559,1.71444,-1.30287,-2.75746,1.74208,4.75944,1.80799,-0.064464,2.37174,1.09905,3.5756,2.98064,-0.238711,0.822007,5.07188,-0.864496,-0.208741,-1.31367 10156.6,10132.2,9236.11,-0.047434,-1.79438,-0.767925,-3.78683,-2.46365,-12.9433,2.00586,-0.48292,1.16216,0.113706,-0.639879,-0.0445654,-2.82995,-2.22008,1.46544,3.70217,2.84476,-3.32792,6.701,0.982599,0.145487,0.0501163,-1.16747,-0.630382,-0.0550437,-0.0563951,0.0449386,-0.715988 10162.9,10141.8,9243.46,-0.3687,0.640608,-2.93969,-0.37466,-5.42813,-8.55527,-4.70566,-3.62351,-3.94857,0.847112,0.357187,1.39279,-3.07124,0.779726,5.12671,3.62277,2.86265,3.44378,5.49842,0.895482,-2.1777,0.14728,-0.491475,-0.0257423,-0.32504,2.28464,-0.610659,2.01955 10168.7,10149.5,9249.62,-0.272231,3.00751,-2.20783,-5.50238,-1.65733,-2.39574,-6.82249,-1.5591,-5.38806,-0.315138,2.41171,-0.227563,-0.306796,1.26618,4.45885,3.55662,3.14737,-0.0497907,2.76691,1.04757,-2.50276,3.25334,1.90194,3.54754,3.2308,0.393197,0.115407,1.88919 10175.3,10155.8,9253.09,0.271133,3.11725,-1.24188,-5.32432,6.94595,5.40219,2.63329,1.77742,-0.434798,3.20784,3.1926,-2.12653,1.4207,-0.162939,1.57116,1.20026,2.14004,-4.36978,-0.074248,0.344989,-2.79157,3.57441,2.795,6.81971,4.61981,-3.15395,-0.556388,-0.951462 10181,10160.9,9253.62,-1.52186,-1.02665,-1.31765,-8.89055,1.45638,-6.40533,-8.20284,3.42071,6.34151,7.32703,2.81444,-5.56924,-2.07761,-2.82472,1.75969,1.56549,2.59032,-4.99642,-0.861721,0.661704,1.27294,4.24609,5.72265,7.93181,6.46356,-4.54558,-2.93302,-2.55741 10182,10163.1,9253.53,-4.12759,-5.01517,-1.383,-11.7032,7.03273,-0.354258,-4.14846,2.56836,5.49077,2.70724,-0.00938943,-7.91268,-3.33257,-3.77932,-2.70035,-1.95288,1.51899,-10.5021,0.604386,1.13765,2.8031,0.719838,5.10986,5.4321,3.01561,-5.05514,-2.51591,-2.29453 10178.9,10161.7,9255.33,-2.09727,-3.23639,-0.971464,-6.47564,-1.86208,1.47429,-8.69004,2.23012,2.64935,4.20852,-0.00802028,-4.11236,-1.54808,-1.73414,-2.21966,-2.31888,0.521142,-4.49634,-1.66003,1.37105,1.47741,-1.17943,3.52554,2.31201,0.381259,-1.24137,-0.930002,-0.860505 10176.3,10158.2,9258.8,-2.87976,-1.16821,-1.15587,-7.36873,-2.70663,3.69409,-6.23946,3.17083,3.67683,5.95472,2.6739,-2.5798,1.61294,2.31642,-4.31408,-1.6647,-0.422612,-6.13843,-0.39141,1.92345,-2.82275,-0.742784,1.68164,-0.706688,-1.87652,0.172975,1.51911,1.04727 10176.2,10155.4,9261.93,-1.79655,0.511159,-2.91648,-1.19976,-6.01265,2.43062,-4.91165,1.64787,2.485,6.04132,2.79139,1.36683,2.36631,4.70105,-3.09068,-0.875835,-2.73203,-1.04036,0.0279962,0.57264,-4.70596,0.399049,0.109101,0.540718,-2.52779,1.90878,1.47212,2.48712 10177,10154.3,9263.36,-2.06935,1.47151,-1.59814,1.1621,-8.21806,2.74994,-4.8666,1.6535,2.86737,3.56179,1.87379,3.98852,2.20191,7.00018,-2.12026,-0.322149,-0.459427,1.99009,-0.386875,-1.65524,-2.88602,2.5405,3.09752,5.52644,1.72241,3.28467,2.06659,4.48929 10176.7,10153.6,9262.97,-2.47996,0.0736981,-1.18826,-1.40068,-2.38119,-1.33094,-3.87199,0.498621,1.31667,-0.952908,0.481976,0.0885501,1.11339,4.67043,-2.37383,-2.32579,0.991108,-0.25346,2.41941,-1.44295,0.0394728,1.67752,2.73018,4.10445,2.29859,0.993454,2.7469,3.39394 10174.9,10153,9261.77,-0.957748,-0.455644,0.885525,1.7746,0.0437147,0.878291,0.0855234,-0.572903,1.39546,0.00119098,1.69176,-1.96049,0.156938,2.84845,-1.18488,-2.65197,1.35428,1.98606,1.65427,-0.643756,-1.03602,-0.0406435,-0.236011,-0.961959,1.28125,-0.464305,1.75539,1.84618 10173.4,10153.5,9261.3,-0.583682,-0.792331,1.36077,0.644185,-3.55594,-0.618864,-4.88099,-0.136266,1.51362,2.73872,3.65897,-2.63062,0.416981,0.735765,0.533665,-0.326252,1.0146,2.83848,2.16063,2.30307,-2.01136,0.638055,-0.22921,-3.19692,0.947596,-0.379132,0.678065,0.747812 10174.5,10155.7,9262.24,-0.685336,0.856591,-2.63545,-0.959601,3.25442,0.791955,-2.20612,0.263046,-1.34292,4.47114,2.99912,-2.56858,-0.21931,-1.56389,-0.808263,0.311028,-2.34261,-0.965718,1.98615,3.50723,-1.41951,-0.258476,-1.16227,-1.73014,0.372641,-0.118946,-0.422557,-1.3986 10179.6,10157.8,9264.01,2.59538,3.68921,-1.9033,3.99249,0.109215,-1.86778,-4.51336,0.591929,-1.29086,1.52475,1.01934,0.773735,0.0652847,-3.00075,1.79923,2.1369,-2.11635,3.17035,-1.87907,2.19309,0.880052,-0.480886,-1.94369,-0.204693,1.63785,1.43004,-2.081,-3.24652 10186.9,10157.6,9265.4,2.10402,4.02633,0.884264,0.1708,-3.27208,-4.9215,-1.0364,1.60796,1.70888,-1.43476,1.10519,1.26841,0.0627916,-2.97727,1.13683,2.82663,-0.301705,-0.592683,-3.81587,-0.70989,1.60855,0.103857,-2.48043,-1.22737,-0.312858,1.31617,-1.91269,-3.98886 10192.2,10155.4,9265.29,1.6824,4.26755,1.57687,1.43194,-5.98808,-2.25097,0.153789,0.168572,0.879003,1.68604,0.75956,3.65922,-0.869793,-2.49312,0.497574,2.41553,-1.34226,-0.127659,-3.59295,-1.56547,0.88849,-0.785242,-4.24845,-5.15572,-4.81836,2.77035,-1.44493,-3.44434 10193.6,10153.7,9263.38,1.6491,4.80854,1.08823,5.10222,-5.26833,5.52263,-0.997094,-0.959485,-1.52356,6.15147,0.897033,7.60472,-1.50848,-0.576994,0.845199,3.25263,-2.21353,2.36454,-2.11918,-0.480371,1.405,-1.24949,-1.88424,-5.50221,-4.39822,4.6832,-0.575266,-0.350337 10193.7,10153.5,9260.14,0.371243,3.4575,-0.922956,2.86612,3.70316,4.4652,-2.35097,-2.08567,-4.55866,2.05406,0.20181,5.48777,-0.851734,-0.932792,0.852325,2.66059,-2.76402,-0.836483,3.32512,2.58318,3.54953,-1.82575,1.03107,-3.58566,-4.1055,2.71087,0.64122,1.16036 10193.4,10154.1,9256.45,0.655998,2.95689,-0.961572,2.95967,6.90968,-0.0847335,-1.13659,-2.64581,-3.78971,-2.43015,-0.722449,3.08777,-0.234356,-0.603156,1.30068,1.14368,-2.23215,0.241084,3.91588,3.38796,4.07024,-1.08082,1.15617,-0.375163,-2.54369,1.29418,0.795869,1.31402 10190.3,10152.8,9253.2,2.59279,1.93007,1.93861,4.82647,-1.84288,-5.84018,-7.03235,-2.16958,-0.8999,-4.4747,-1.99497,2.40008,0.0349671,-0.825783,2.00993,-0.184404,-0.576706,6.30193,1.43455,3.63536,2.34484,0.148851,-1.22127,-0.718508,-0.716753,1.50537,0.412978,0.73252 10185.2,10148.2,9250.73,1.88291,-0.127643,2.41457,0.38457,3.28565,2.40364,1.07674,-0.352091,-0.192694,-2.80281,-2.45121,-0.746935,0.454781,-0.345492,-2.38393,-2.35152,-0.468918,-0.28004,0.207449,2.6636,-1.39254,-2.09536,-4.44811,-4.48824,-2.93117,-0.770421,1.19,0.219788 10183,10142.2,9248.93,3.78484,0.701338,-0.71552,3.48407,0.454755,4.3743,3.68099,-0.668556,-3.42636,5.52772,-1.23863,-0.405148,0.665698,1.06479,-0.0251586,-0.48849,-0.847741,1.4814,-5.36764,-0.405219,-1.51485,-3.88226,-5.12764,-5.33767,-4.3365,-1.173,0.417418,0.415356 10185.4,10138.4,9247.93,3.11727,0.196163,-2.018,0.721283,-2.5075,-1.06349,0.331823,-1.2182,-4.01712,4.78444,0.452166,-2.16432,0.55673,1.61447,1.16718,1.44415,0.569846,-0.812131,-8.14324,-2.91296,2.43154,-1.45218,-0.730675,-1.0947,-2.25658,-3.52675,-0.361214,1.09266 10188,10139,9248.05,1.52249,-1.16117,-2.4591,-2.41492,-0.35832,-7.48161,-0.0490082,-2.1421,-3.52013,0.903896,-0.958215,-5.8036,-2.36788,-0.368615,-1.88998,-1.40466,-1.28791,-4.79995,-5.58563,-3.57656,4.13739,-0.274441,1.53352,2.93946,-1.96753,-6.76034,-1.87752,-0.324793 10186.8,10142.9,9249.23,2.29541,-0.414867,0.263844,-2.42527,-9.23597,-12.7958,-5.40665,-1.3296,-0.255947,1.05195,-3.09731,-3.83996,-4.40177,-0.0123634,-1.79533,-2.22933,-1.59891,-1.58539,-4.29444,-3.24283,2.73497,0.939395,2.25632,3.98042,0.672842,-4.87272,-3.0871,0.140664 10183.8,10146.3,9250.93,1.04007,-0.107056,-0.719832,-5.17314,-6.41206,-13.4527,-3.51115,-1.82372,-1.0661,0.164654,-4.87432,-3.16371,-3.16216,0.547311,-2.31938,-3.32366,-2.59406,-3.07878,1.07584,0.135595,-0.15385,-0.198986,-1.76614,-0.364142,-1.44816,-3.17832,-0.666637,0.539005 10182.5,10148.1,9252.57,1.58315,0.552138,-2.38854,1.84879,-2.25441,-6.8381,0.208721,-2.73312,-3.19332,-2.49192,-4.21087,0.445019,0.0651566,2.67403,-0.780414,-2.43461,-3.10543,1.48742,-0.123359,0.0321366,-2.00728,-1.30717,-5.02137,-5.05394,-3.39985,-0.233706,2.10556,1.51466 10182.7,10149.6,9253.33,0.671616,-1.8801,-5.19861,1.6691,-0.386439,-6.73637,0.390118,-1.36276,-2.8229,-3.74619,-1.53148,0.15594,0.934737,1.96014,-1.35363,-0.924511,-3.00858,0.653744,-1.84706,-3.59509,-0.247233,0.962108,-1.40552,-3.28119,-2.22432,0.0626129,2.48273,0.969888 10182.9,10150.9,9252.01,0.0166707,-2.52456,-5.48285,2.26653,-2.03587,-6.50283,-1.00325,0.264499,-1.46362,-0.822672,-1.11829,0.403605,-0.734484,-0.382999,-0.186567,1.24812,-2.13095,1.80897,-2.82131,-6.15356,2.54337,2.39696,2.51379,2.41699,0.307725,-0.195503,-0.252349,-0.890546 10182.1,10151,9248.33,-1.21698,-1.52567,-2.334,0.102378,3.74418,-1.36756,3.51501,1.50357,-1.80774,-0.855037,-2.71284,0.0746735,-1.2904,-2.37263,-0.326812,1.37779,0.0811662,-2.04277,0.452769,-4.37491,4.60025,0.785458,0.944597,2.57121,-0.443829,-1.9031,-1.78376,-2.25217 10180.2,10149.4,9243.85,-0.498632,0.815261,-1.05027,1.32586,2.65892,-5.17029,-0.588453,1.63481,-3.33979,4.4087,-1.26981,2.01576,-3.03953,-3.66687,1.33091,1.62961,0.568999,0.53543,0.477935,-1.78405,3.91722,-1.12653,-3.07327,-2.27103,-2.21119,-0.0469714,-3.05949,-3.83303 10176.1,10146.3,9240.54,-0.464849,1.25223,-1.14736,-0.645201,4.96922,-0.805424,1.85313,1.43677,-1.45072,6.22509,1.54511,2.89442,-3.56094,-4.35854,-0.476689,0.39343,-0.929162,-1.07774,0.941846,-0.57756,0.363373,-1.13491,-1.30865,-3.06369,-1.8739,2.47973,-3.19611,-5.38414 10169.3,10142.4,9238.91,2.28739,1.91951,-0.759834,1.17008,-1.10807,0.137649,-1.76481,-0.427729,-0.592675,2.50623,0.607717,4.10404,-2.20382,-5.11375,1.80008,0.383348,-3.40396,4.33491,0.605228,-0.0871236,0.185566,0.480246,2.74078,1.48145,2.07534,4.96863,-2.65852,-5.78272 10162.1,10139,9238.14,2.03262,2.32633,0.46709,-2.26524,5.80967,5.85587,5.67759,0.185696,-0.246666,-0.787877,-0.201738,0.61348,-0.542043,-3.51173,0.345287,-0.426571,-4.01566,0.315299,2.10005,-0.391753,2.39343,1.28396,3,4.99164,5.3145,2.31592,0.0224444,-4.14279 10158.4,10136.9,9237.31,2.77556,2.83113,1.37245,1.19159,2.19923,-2.0116,3.1913,1.03754,-0.929092,0.870894,1.00256,-0.624392,-0.561338,-2.99529,2.23674,0.823539,-1.63024,3.75817,0.298891,-1.18515,4.54738,1.25951,1.91277,3.57793,5.44217,0.785618,0.025315,-3.27161 10158.5,10135.5,9236.37,0.0672571,0.761886,2.35427,-0.889999,6.73976,-1.98269,8.45302,1.1398,0.0604089,-1.15193,1.32222,-2.47069,0.131408,-3.48238,-0.669944,0.753279,3.07189,-2.04262,0.174304,-2.32107,2.83224,0.708328,3.23848,0.984911,2.384,-1.28385,-0.548071,-3.32946 10160.6,10134.8,9236.46,-0.783525,0.239203,0.00548465,1.88108,6.83171,-2.89703,7.27976,-2.71585,-1.47417,2.12383,-1.04536,-1.14095,0.145875,-4.3962,-0.139564,0.781551,3.40043,-0.28834,-0.343608,-2.36391,0.0938093,-0.36295,1.0276,-0.578692,-0.619797,-0.489157,-1.92106,-4.163 10166.1,10135,9239.02,0.124276,1.29463,-1.44975,3.21172,2.53479,-3.38317,-0.20102,-4.72755,-2.14129,5.53743,-1.24849,0.994366,0.436372,-3.09635,2.19121,1.13794,1.52365,3.0586,0.622146,-0.699363,0.103461,0.316277,-1.73095,-0.195395,0.490618,1.44514,-2.50878,-3.62472 10175.6,10136.9,9243.9,1.67228,1.70099,-0.125799,2.04051,6.74509,2.05118,7.82124,-3.08565,-1.70842,3.37127,-0.160655,1.32998,0.57087,-1.46351,1.80831,-0.585194,-0.267853,0.719624,2.12333,-0.931791,2.61407,0.519467,-1.78038,1.70819,2.66646,1.47407,-2.48388,-2.6294 10184.4,10140.5,9249.09,4.05746,1.49391,3.1491,4.74869,1.42089,-7.65297,4.6083,-1.50292,-0.681543,0.792377,-1.54194,2.19467,-1.449,-2.54459,5.38937,-0.0662613,0.683022,6.46847,-1.151,-2.09676,5.40097,0.0884146,-0.584039,0.411805,2.87021,2.70096,-3.69024,-2.72328 10185.2,10143.8,9252.71,2.20708,-1.9117,6.2705,-1.38994,9.88462,0.984595,14.8745,1.09177,3.01497,-6.59006,-3.06879,0.864155,-0.352553,-2.42934,1.6214,-0.899998,2.90809,-2.62154,-0.748965,-1.78716,3.1828,-0.76616,1.51574,-1.80336,0.759499,1.08543,-1.48814,-0.830864 10176.5,10145.2,9254.8,3.08758,-1.24415,2.30133,1.5123,4.9996,-2.25743,5.71269,0.326257,0.862459,-5.32366,-2.15784,1.98295,-0.769376,-3.24456,1.73394,-1.18022,0.303592,1.19388,-1.18318,1.1848,-0.484859,-3.12715,-2.31674,-4.16244,-1.41399,2.32149,-1.0187,-1.70219 10164.6,10145.4,9256.92,1.59078,-1.06701,-0.557541,-2.88977,3.22953,-0.245042,-0.474481,0.0498212,-1.16809,-8.33134,-0.306573,0.38113,0.242976,-2.39828,-1.29092,-1.68013,-0.127576,-1.94114,1.03024,1.7825,-1.44807,-2.86352,-4.13379,-1.78466,1.5241,1.16147,-0.513496,-2.30027 10156.4,10145.9,9260.21,0.0333157,-1.40254,-1.63643,-2.63202,2.15792,2.8366,-1.32406,-2.25364,-4.61227,-7.74587,-1.005,0.107792,-0.131513,-2.0428,-1.28031,-1.65736,-0.0589992,-0.767749,0.0451012,-1.23948,0.334266,-2.05544,-5.74107,1.40617,2.47259,0.129519,-1.22605,-3.50154 10152.5,10145.2,9264.25,-2.23854,-3.34598,0.871046,-4.48776,-5.12246,-0.367558,-7.49548,-3.04105,-2.99035,-3.84367,-2.67766,1.19195,0.695189,-1.99211,2.38266,0.800284,2.92667,1.82052,-0.796218,-1.82753,3.43662,1.60186,-2.49788,2.02216,2.59346,0.975508,-0.397427,-2.78437 10148.6,10141.1,9267.56,-4.64613,-5.4569,3.80281,-6.22039,0.554038,5.00519,-0.395733,-3.04225,0.570141,-6.95862,-4.49105,-0.00732036,3.78285,-2.09066,1.46914,-0.873643,3.95228,-2.08532,2.8568,0.749314,1.78963,1.02579,-0.808831,-1.60113,-1.17483,0.544949,1.95805,-1.27827 10142.4,10134.6,9268.73,-4.02228,-5.3818,4.39201,-6.57399,-2.68308,-0.146626,-0.297909,-1.28233,3.72363,-10.5635,-3.46562,-0.498293,3.92457,-1.10422,0.725311,-0.888612,3.1725,-1.82837,4.64182,1.32637,-0.56378,0.781271,3.29557,-0.557202,-0.712584,0.587691,2.76212,1.05325 10137.8,10128,9266.83,-2.98689,-3.62614,2.49614,-3.78405,5.33483,-3.24499,-1.4797,-1.49474,0.75769,-13.0722,-3.57543,-1.73535,1.13307,-2.81826,-2.67056,-2.75063,-0.407379,-1.38965,7.67619,2.2374,-2.93415,-2.1994,0.956463,-2.25511,-4.42128,-0.889014,2.30781,-0.144069 10139.6,10121.2,9261.84,-1.19244,-2.09691,-1.17019,-2.92359,1.84257,-9.64131,-8.2266,-2.48032,-2.29368,-7.41116,-3.60172,0.404837,-2.31741,-3.52505,-1.14341,-1.1367,-2.22469,2.93998,5.91064,0.841518,-1.68308,-1.06298,-0.398387,-1.68239,-3.53445,0.38234,1.02165,-0.403129 10146.2,10113.8,9255.3,-3.35595,-3.34535,-1.74811,-10.4556,3.60927,-0.776329,-3.08604,-1.29687,0.835023,-5.76979,-1.7646,-2.22816,-1.31439,-0.382083,-1.73312,-0.792276,0.206848,-4.1992,4.29806,-0.830575,-1.71405,1.40452,2.00247,0.106559,-0.768805,-1.08451,1.11784,1.22578 10152.4,10107.8,9249.87,-2.49869,-3.87311,-1.98238,-6.90342,-1.23671,2.90852,2.97754,-0.581043,2.81778,-2.71728,-1.21684,-5.07044,0.497485,2.01224,-0.365556,-1.64542,1.17956,-3.76085,-0.573467,-2.58111,-2.12663,0.378165,4.18795,1.24581,-1.36196,-2.87649,0.482267,1.63454 10154.8,10107.2,9247.27,-4.01788,-5.39388,-1.72161,-10.3153,-0.251037,-1.57831,1.61553,1.18147,5.7765,-0.599766,-1.22598,-10.0294,0.895145,2.02015,-4.45992,-2.58818,2.98391,-9.45103,-1.41902,-1.29446,-0.55725,-0.180421,6.94249,-0.594659,-3.53394,-6.50742,1.38112,1.51458 10153,10112.2,9246.76,-3.24249,-5.01072,-2.02956,-7.46567,0.0264794,-1.5224,-3.31193,1.53111,5.32332,2.5335,0.40251,-7.05633,-0.711568,2.89381,-5.39998,-1.36446,2.04786,-7.02942,-4.53297,-0.88262,-0.357391,0.595822,6.5409,-2.84395,-2.64994,-5.7378,1.39939,2.97985 10148.7,10119,9246.16,-3.96002,-4.42756,-3.26432,-8.69557,4.03628,0.616301,-3.92147,2.76458,1.652,2.17356,4.22927,-4.5247,-2.33417,3.89508,-5.29918,-0.309883,-0.288513,-8.36711,-3.09529,-0.126421,-1.8539,2.38545,3.61409,-1.26649,0.429596,-4.19612,1.45711,3.95651 10145,10125.2,9244.17,-1.75695,-0.511195,-1.73883,-3.34742,-1.26592,5.24499,-3.03549,2.78645,-2.1334,0.220919,5.88292,0.160927,-1.7455,5.37331,-1.59599,1.91312,-0.631146,-3.16886,-2.94994,0.34822,-3.01289,2.84951,0.356135,3.47859,4.18276,-0.12287,0.984563,3.64398 10143.1,10130.2,9241.27,-1.71615,1.12867,1.04805,-6.57347,2.41341,16.2593,7.00371,0.924589,-2.71609,-6.2656,3.57183,0.37743,1.96421,5.66573,-2.3041,2.26799,0.668846,-8.32571,2.30148,2.66333,-1.75615,2.71555,1.44408,6.00224,4.85886,0.685304,3.03234,2.82015 10140.7,10134.4,9239.05,-1.25992,2.46902,-0.556969,-2.76672,5.45596,12.4649,8.36959,-2.49709,-3.8708,-1.40646,1.38854,1.37064,2.12007,3.84209,0.459629,2.15086,-1.24194,-4.15365,4.52043,5.4809,0.876317,0.656659,-1.01116,2.09458,1.65028,2.77599,3.21635,0.381243 10133.6,10137.8,9238.32,-2.22442,1.37094,-0.787327,-1.05469,3.55443,5.14715,-0.0509983,-0.0905216,0.72894,3.96149,2.38061,1.75467,3.09083,4.18358,2.79613,3.29833,0.325666,-0.671704,6.07566,7.72379,3.13564,0.655668,-2.59152,-1.76199,1.58102,4.45884,3.34631,0.480564 10121.1,10140.7,9238.2,-2.17367,-0.866588,-2.79273,0.692199,10.1863,9.97874,6.04483,2.66482,1.76948,2.61332,1.9281,-1.1243,5.03132,3.85731,-0.443337,0.284932,-0.868815,-3.31091,8.51065,6.49177,2.23459,-1.67042,-3.77735,-2.781,-0.902713,1.50205,4.04064,0.197185 10110.8,10144,9237.47,0.303664,0.966366,-2.65365,4.69141,3.98147,5.09796,4.57488,3.26927,0.562439,5.41174,1.92471,-1.15766,3.6349,2.42314,-0.0874924,-0.0560302,-1.22366,1.9914,3.44357,1.69106,1.98031,-1.32375,-0.576816,-1.03349,0.269332,-0.300454,3.28264,-0.458562 10110.3,10147.7,9235.48,1.28867,0.940385,2.1165,-0.581377,-0.643187,-2.16313,1.69237,2.47912,1.37859,3.32286,1.26412,-0.720553,2.36863,-1.25903,0.0706914,0.944374,2.2859,0.229574,1.5842,-0.12766,4.43122,1.34327,3.34673,-0.404948,2.87655,-1.67866,3.04869,-0.25307 10116.7,10150.7,9232.33,0.394714,-0.833445,4.94793,-6.11826,9.22151,2.99358,11.1041,1.15853,2.93899,0.397365,0.0221406,-0.0976144,-1.13452,-3.42557,-3.72862,0.476803,3.69054,-8.12164,2.48493,0.363106,3.87676,0.504363,0.972674,-1.44388,2.15926,-0.828986,1.75931,-0.549928 10121.4,10152.8,9229.14,1.29508,-0.757006,3.12597,-1.6729,7.62364,-0.936804,6.48918,-1.03742,1.86227,-0.262351,-0.75051,2.31301,-4.8422,-4.5034,-2.66476,0.578808,1.27532,-2.04282,3.45288,3.01897,0.564668,-1.21876,-3.06331,-2.70583,0.257935,3.52846,-1.56111,-1.5308 10121.6,10152.4,9226.86,0.677648,0.378414,1.31475,-2.61018,4.91454,0.37514,2.86121,-0.193973,1.93324,-4.63591,1.10695,3.14457,-2.96694,-2.19304,-2.99025,0.50097,0.165722,-0.200595,6.85438,4.63234,-2.47705,0.342532,-1.30419,-0.141339,1.63084,4.32707,-1.19328,0.76139 10120.5,10149.2,9225.49,0.499478,1.88224,-2.14427,-2.77288,10.6927,1.71766,6.49787,0.43981,0.0705592,-5.13201,2.57263,1.48076,-1.20267,-0.591255,-4.74193,-1.79266,-1.46188,-3.42451,8.04316,3.54243,-2.30088,0.0710442,-2.83238,0.653942,0.240506,0.904871,0.430945,1.6283 10121.2,10144.8,9224.89,1.35965,2.80608,-1.94166,1.75583,0.26227,-8.26437,0.567312,1.6259,1.60009,0.0627174,2.62631,2.65738,-1.31444,1.36503,-0.138702,-0.303116,1.07964,0.805711,0.6712,-0.0379901,0.596301,1.49046,-2.9437,-0.0854658,1.7116,1.14138,0.19577,2.11315 10121.7,10140,9224.64,-0.625981,1.46152,0.571473,-0.708952,-3.97306,-7.60183,3.54876,2.52756,3.43643,-3.37318,1.25185,1.95327,-0.430742,1.99167,1.38528,0.439469,3.35733,-3.21518,-3.33649,-3.33716,1.63613,2.87364,0.216347,-1.19264,2.34646,1.38095,0.250252,2.26893 10117.5,10135.7,9223.59,-0.644241,3.50756,1.18011,1.32346,-4.09529,-1.15572,8.91836,0.864807,0.810206,-4.21922,0.85698,1.54667,-0.984211,1.49262,0.424346,0.272079,0.55043,-3.11065,-4.92549,-5.21789,0.616593,0.933381,0.453042,-0.907799,0.816878,0.888407,-1.07882,0.897744 10109,10134,9221.44,1.24811,3.97674,3.11247,-1.16572,-9.20759,1.26864,10.07,0.861166,0.629341,-5.07074,1.84156,0.554677,0.501606,2.3508,-1.99158,1.42546,-0.0624237,-4.75601,-4.11731,-5.27973,3.12042,0.927954,2.01431,1.91643,2.26937,-2.42322,-1.85499,2.11246 10103,10135.6,9219.87,2.2046,4.10281,1.87105,-2.44462,-1.81059,2.73657,16.517,1.49188,0.862687,-1.50652,2.91423,-2.27191,-0.311967,3.16828,-6.05317,-0.647296,-0.600809,-9.86797,-3.317,-4.05579,3.51099,-1.77799,-1.17227,0.17711,-2.12588,-5.86398,-2.08211,1.43944 10103.9,10138.7,9220.3,3.77174,5.49059,1.2637,1.03751,-12.6254,-6.24364,0.90728,3.65224,3.71822,2.59825,4.31988,1.86088,-2.62582,4.43061,-1.00461,2.10803,1.47555,-3.28777,-8.18549,-4.31695,2.95113,-1.34785,0.676274,-1.38936,-3.04336,-1.37001,-2.35773,2.00922 10108.6,10140.8,9221.82,-0.70593,3.90046,-1.14247,-3.0764,-1.47295,-1.10809,-0.510284,3.79285,2.60078,-1.28697,3.77566,2.32766,-3.54475,2.99719,-1.20306,1.33262,-0.719923,-9.06449,-7.33119,-4.80493,-0.721145,-2.4024,1.79362,-1.97223,-5.04385,0.0875954,-1.73778,0.950888 10113.1,10142.1,9223.55,-1.06377,0.843971,-1.44889,-5.32939,2.69029,-3.83385,-5.63119,0.535717,-1.61039,-5.59267,1.26514,2.05707,-3.31026,-0.958826,1.33732,1.46551,-3.13585,-9.66605,-6.00234,-4.35532,-0.26599,-0.831562,2.98878,0.128679,-2.54674,-0.278737,-3.58409,-1.324 10120.7,10142.9,9227.01,3.56995,1.04759,3.75113,-1.7421,5.12807,3.1454,2.38504,-1.62768,-2.93793,-5.71266,-0.530001,2.84448,-2.04436,-1.31251,2.17243,2.11298,-0.867238,-7.66197,-6.87331,-3.32769,-0.373459,-0.116178,2.03689,0.379397,-0.00605166,-0.182103,-4.1657,-1.22794 10135.1,10142.1,9232.63,4.13322,3.14571,5.42112,-9.50857,6.61076,-1.5265,-1.3563,-0.229734,-0.953633,-2.39287,0.0907423,-2.25912,-2.95494,-0.622513,-0.878638,3.11006,2.20909,-12.7591,-4.65267,-0.652931,-0.508727,-0.484787,-1.43884,-3.89903,-1.68783,-1.20607,-1.47415,-0.30987 10150.6,10139.9,9237.26,7.08686,7.1115,3.05908,-7.31514,-2.75139,-6.15754,-6.75994,1.34201,0.583247,1.72791,0.0586144,-1.05549,-2.23348,1.35232,0.957745,3.9225,0.27845,-7.28043,-8.71747,-3.21629,1.12263,-1.08286,-3.72117,-4.10901,-0.817087,-0.319549,-0.171801,1.86899 10161.3,10137.9,9238.2,5.45348,5.872,0.0360833,-8.71486,1.68904,-1.57501,-9.84544,2.70784,2.39605,-1.45535,-0.548901,-2.93743,2.31592,2.21738,-0.0678836,1.75621,-1.90485,-7.83172,-5.34721,-0.902631,2.89369,0.938874,1.08004,0.946796,3.39736,-3.2386,1.23533,3.43628 10168.7,10135,9236.89,1.9988,3.16081,-0.959961,-1.65775,15.8147,12.2058,-6.43511,1.69639,2.59198,-2.06327,-0.47323,-4.35241,3.77438,3.79233,-2.16153,-2.08622,-2.56136,-3.89096,-0.736348,5.49778,-0.475583,0.770127,3.05002,3.17719,3.81221,-4.99556,1.59718,3.01185 10178.3,10131.2,9237.28,0.818385,-0.233269,1.46873,6.63122,10.9706,17.5879,-3.54675,0.677416,3.72244,0.655626,-0.201865,-1.16835,1.57109,5.42876,-0.444523,-1.12764,-0.256929,5.62565,-1.99386,6.4084,-2.47406,1.18593,3.2834,3.0293,3.51573,-2.53776,0.959038,3.23253 10193.3,10130.2,9242.16,-2.48525,-2.35837,2.98987,5.98816,11.4719,15.9039,-4.84232,-0.825315,2.54659,1.43064,-0.659643,-2.96556,0.571285,2.41784,-2.00371,-0.757574,1.41844,6.37057,1.42823,7.71148,-4.93994,-1.54988,-0.232174,-1.34349,-1.26249,-2.05601,1.26179,0.464125 10210.2,10133.3,9250.5,-0.302459,-1.69801,0.843368,2.30597,6.15326,11.0157,-5.9274,-1.05244,-1.68469,-0.278629,-0.694935,-0.891837,1.23651,-0.21345,-0.305015,-0.0987808,0.160233,4.91775,0.166271,3.92353,-3.88399,-2.55526,0.198425,-0.923912,-1.86728,-0.552523,1.22445,1.15572 10221,10137.3,9258.6,-1.56339,-0.256664,0.840544,-1.61826,11.0061,14.4706,-2.59098,0.449882,-1.65171,-1.89163,-1.35949,-1.40198,3.60618,0.270121,-1.02351,-1.1912,0.778059,-0.110922,0.867721,2.27546,-5.20223,-2.14642,1.17716,-1.36266,-2.51971,-1.10085,2.42789,2.32548 10222.9,10141.6,9264.61,-4.74868,-0.212232,1.05283,-1.29221,10.744,4.75459,-2.81401,0.644295,0.850172,0.179994,-3.01777,-4.30435,2.71079,-1.12735,-1.29174,-2.07496,1.34575,1.0376,2.5823,1.95702,-4.5778,-1.28586,-0.494008,-4.39926,-5.46478,-2.40477,1.70545,-0.546783 10222.5,10148.7,9269.02,-3.49502,-0.678579,-0.213247,8.06515,8.4472,0.736921,12.8231,-0.680516,1.09355,1.44143,-3.62765,-2.08929,0.194595,-2.35671,-0.392866,-2.86869,-0.655593,6.76095,0.52286,-1.94996,-0.69629,-1.94695,-3.05311,-3.36287,-5.8798,-2.04553,-0.962602,-2.08692 10226.3,10155.2,9271.48,-1.96969,-0.131236,-7.34816,10.3469,1.43629,-18.1274,6.28789,-1.94889,-4.21799,9.10578,-0.96868,-0.513386,-5.07894,-4.75252,3.07715,-1.21549,-4.62974,12.6049,-2.11208,-4.5134,4.07597,-2.26695,-5.31607,-0.080814,-4.75562,0.0499323,-2.60796,-2.05158 10230.1,10151.7,9270.27,-0.441668,1.99564,-2.24149,10.4542,-4.09391,-6.45561,-1.77752,0.712394,-1.02642,8.25875,2.54249,4.31177,-1.67116,1.28898,3.90167,2.27301,-0.292013,13.1856,-3.31394,-4.23242,0.509949,-0.582218,-1.55254,1.54596,0.383257,3.15094,0.659781,3.83919 10224.9,10138.7,9266.49,4.67287,5.1299,-1.26323,13.4301,-10.2745,-9.49416,-12.2719,-1.18436,-2.87586,6.16837,2.83569,6.07774,-2.8315,2.00898,6.40272,2.01559,-1.86315,15.8694,-4.72684,-3.25468,-2.65905,-3.311,-6.24296,-4.21139,-3.70695,4.80612,0.395122,1.76566 10212.8,10131.4,9265.67,3.01888,4.86272,2.80549,9.41976,5.08199,16.7307,3.01517,-1.39232,-0.901598,-3.17761,2.70511,2.89126,0.206015,2.09237,1.79821,0.427067,-0.286912,4.97158,1.88506,1.52106,-4.78901,-3.10639,-5.19696,-1.88352,-1.17405,1.76068,1.66502,-0.462334 10205.3,10137.3,9271.29,5.0191,6.44861,-1.029,10.2232,1.46143,6.79866,-7.1328,-3.52906,-8.32347,-3.93806,2.03961,4.301,-3.73195,-3.92217,6.44854,2.90593,-2.49697,11.4551,-0.562561,1.57056,0.711111,-0.350636,-4.25263,3.76126,3.75639,3.70316,-1.79131,-3.47622 10205.7,10147.7,9278.59,5.83546,6.36501,-0.202118,7.16455,-12.9828,-12.4607,-27.3389,-3.33415,-9.60681,-6.26496,-0.539386,6.78879,-3.91681,-6.10831,9.8609,6.12423,0.502419,17.71,-2.72276,0.90307,5.89102,4.35576,1.47131,6.87862,9.08531,6.44279,-3.45175,-1.92878 10205.4,10153.7,9279.43,2.61204,3.79426,2.8599,4.2373,-6.30104,-6.55433,-17.9117,-2.30217,-4.33352,-8.56342,-2.54108,4.06241,-0.221565,-2.25183,3.87958,2.42384,1.7425,10.0636,-0.274803,1.38918,2.9688,2.49859,1.85002,3.57782,5.56749,4.25356,-1.57246,0.769565 10198.3,10155.2,9271.53,1.79363,-0.436721,3.46418,1.17919,-6.21503,-12.0337,-14.7144,-0.753172,-0.422946,-10.0673,-1.05729,0.16841,0.00393219,0.329848,3.06417,0.641188,1.13987,4.50086,-1.96838,-0.158451,2.22687,1.01485,-0.617827,-1.82684,0.837829,1.35672,-0.969077,2.83866 10187,10154.7,9258.9,0.357944,-3.85399,-0.403587,-0.905802,-6.94279,-16.6984,-17.7781,-0.22625,-1.87358,-4.80273,-0.208291,-3.41762,-1.38116,-0.435891,4.56144,1.47257,0.881539,4.31043,-2.35524,-0.63135,2.49929,2.73787,-0.3439,-0.967951,0.479767,-1.25236,-0.198644,2.70849 10175.5,10150.8,9245.55,-2.22289,-4.64417,-1.57873,-3.37822,-3.35046,-9.88201,-14.3071,0.168661,-0.756661,-2.69992,-1.57269,-4.61371,-0.741804,-0.794809,1.95045,1.34471,1.90438,0.670421,-1.36383,-0.0207592,1.95603,4.44548,1.70081,0.896225,1.96219,-2.68814,1.37985,1.21966 10163.9,10144.5,9233.39,-1.0609,-3.6573,-1.22008,-1.66234,-8.72059,-9.8591,-9.71449,-0.237702,2.4907,-0.383432,-2.45784,-2.52105,-0.451308,-0.95008,0.101755,0.998499,0.0147502,0.763548,-2.08901,-0.286814,2.08671,3.24587,1.98374,-1.03823,1.41551,-1.64013,0.866956,-0.452541 10152.5,10140.9,9224.11,1.58528,-1.3177,-2.21666,-0.770113,-12.1162,-14.2306,-0.877621,-0.372338,1.62768,2.76293,-0.69447,0.389726,-2.24466,-0.492948,-1.07534,1.2119,-2.84085,1.62365,-4.58137,-3.47859,2.38127,-0.58689,-1.20067,-5.12188,-1.38938,0.191315,-1.00868,-0.231626 10144.9,10141,9218.45,2.9188,-0.174985,-4.58083,-6.94645,-12.0718,-23.1781,-6.27315,-0.364715,-3.24703,1.70145,0.993811,-0.598274,-3.56103,-0.759525,0.496704,2.46032,-1.89983,0.597576,-2.01394,-2.93857,4.73883,-0.682548,-1.34504,-3.70636,-1.23983,0.0550942,-2.01066,1.58053 10141.8,10139.7,9215.32,1.06474,0.421951,-5.29652,-9.2234,8.36446,-5.7284,0.960531,-0.909556,-4.90704,0.770291,1.54135,-5.62095,-2.20122,-1.09503,-2.35206,-0.974175,-1.0101,-7.23319,3.01594,0.768168,2.39478,-1.32615,-1.6404,1.53725,-1.51813,-3.97654,-1.7665,0.833795 10141.4,10134.3,9214.23,0.86273,1.35397,-0.657898,-4.72598,2.71892,1.93911,-8.71178,0.127278,0.812447,5.14689,3.34014,-5.47575,-0.124804,-2.70815,-0.541837,-0.600256,1.53834,-3.53843,0.0605411,2.43643,0.689316,0.936364,1.45495,3.58725,0.917646,-4.12549,-2.16127,-1.91164 10145.6,10128.8,9217.09,0.035273,1.26692,3.11502,-4.96307,-6.78084,1.02172,-8.79811,2.69846,4.94751,11.3598,6.51275,-2.0705,0.657905,-2.59061,-0.35795,1.18908,3.42851,-3.05799,-3.41004,0.806424,0.399374,2.92706,4.4301,0.273598,0.553543,-1.76552,-0.755718,-3.46001 10157.5,10128.8,9225.31,0.248702,0.312336,2.57768,-4.36878,-7.1619,-0.049009,-3.2758,2.7151,1.99544,11.1247,7.80862,3.2311,1.05086,1.13953,0.117826,1.5885,2.6575,-2.74279,-2.82058,-0.206648,1.25493,1.71967,2.81266,-4.13773,-2.45207,2.50385,0.789243,-0.268176 10170.7,10133.1,9236.11,-2.23675,-0.885477,2.34602,-6.30375,3.19378,12.3402,5.26964,2.51006,1.86666,4.33237,6.63528,4.85198,3.48519,8.46812,-2.52066,-0.634166,3.57125,-6.40349,1.46869,0.818123,-1.68738,1.2743,1.91738,-0.951766,-0.403311,4.63843,3.18061,7.04436 10176.7,10136.2,9243.78,0.782244,0.338989,-0.179665,0.677035,-11.8864,-9.98092,-16.6014,-0.0876104,-1.39338,0.511794,2.05749,5.37285,2.64871,7.7119,4.8232,-1.23349,2.56586,8.98335,0.643413,1.73431,-0.63479,2.49537,-0.600719,2.26345,1.69812,6.71431,2.31721,8.10433 10176.8,10136.6,9245.84,-3.20567,1.13405,3.92668,-1.78597,-0.236073,-2.19382,-11.4115,3.08973,1.33702,-3.27145,0.727769,-0.100717,5.38921,8.19297,0.492232,-2.20151,5.25989,3.6589,4.08819,2.21554,-1.32513,3.54291,0.119275,3.23854,3.862,2.19948,5.28701,6.25834 10178.4,10137.4,9245.74,-5.53585,0.420645,5.85295,-4.47724,14.54,12.4497,8.36972,4.99424,2.57479,-4.3639,0.677018,-2.6813,6.67898,7.5884,-5.54187,-1.3688,4.05586,-6.15054,4.2909,-0.899213,-1.24567,1.90686,-0.469126,1.72139,5.00978,-1.65339,6.96518,3.71489 10184.8,10141.1,9247.89,-4.95644,-1.91401,3.7243,-7.95873,7.49028,6.40526,5.31843,3.53676,4.4376,-3.95261,0.746514,-2.92295,5.17495,5.09822,-5.56387,2.13589,1.74219,-7.51099,1.13636,-2.24892,-0.712168,1.40767,0.401594,-0.663717,6.22808,-1.51586,5.59537,1.86444 10195.1,10147.9,9253.27,-3.98,-3.06823,-2.05534,-6.10099,3.83685,4.55708,3.92119,0.928846,2.49159,0.0763172,1.14792,-2.88509,3.3624,3.14131,-4.76678,1.53759,-2.49281,-5.00974,0.3227,-1.57677,-2.36177,0.558465,1.76223,-0.153596,3.21585,-0.248642,3.44061,1.09292 10206.6,10155.3,9259.98,-4.64998,-1.64546,-4.6585,-6.92405,-1.23826,-1.4651,-7.80907,2.03872,0.322905,5.35637,2.9557,-1.90346,0.941137,2.90995,-2.25745,1.6362,-2.73525,-3.06893,0.361893,-0.410406,-1.95298,3.18373,4.96997,3.18307,2.09522,2.29277,1.29516,1.46329 10215.1,10159.8,9265.65,-5.64262,-2.22323,-2.32616,-8.62966,1.24852,3.53986,-7.11813,2.5704,-0.221435,0.41167,0.765415,-1.44792,2.10023,1.14341,-1.90736,0.761342,-0.0657556,-6.90094,4.60419,2.00852,-1.1143,4.44335,7.23913,4.6059,2.18355,1.92624,1.0442,1.06642 10218.9,10161,9269.98,-5.54728,-2.69742,0.623383,-4.54971,5.62832,12.115,1.60837,0.527375,0.225195,-4.35554,-1.09064,-1.69716,2.68584,-2.42078,-3.28377,-0.48855,1.46337,-7.59929,7.41232,3.78152,-1.52786,1.12019,5.14455,0.902689,0.791392,0.171231,1.01653,-2.1951 10225.1,10161.4,9274.87,-4.18459,-1.40959,4.0543,-3.78563,4.56469,13.1486,7.4468,1.32559,4.01602,-4.26528,2.47676,-0.706977,1.49841,-2.44619,-4.48237,0.314642,3.21848,-7.78537,6.45365,2.67192,-0.518631,-0.579868,3.1551,-3.30298,0.42352,0.385421,1.09082,-3.38628 10238.6,10163.7,9281.72,0.163978,0.29531,1.39945,-1.88245,0.770367,3.01996,6.47156,0.843119,3.05229,-2.89342,3.69162,1.01002,0.156961,-1.63668,-1.88068,0.459627,0.572044,-3.8789,6.07964,1.73877,1.04155,-0.952277,-0.352698,-3.89818,-1.13337,1.63306,0.655322,-3.05775 10252.3,10168.8,9289.58,1.69242,0.803041,0.969081,-1.57571,10.1963,10.1486,9.01137,-0.23779,2.45598,-11.8335,0.764195,0.347471,0.63322,0.818036,-2.67947,-0.48707,-0.0121974,-5.92175,4.75178,1.31186,-0.59319,-0.865273,-2.13114,-0.629395,-0.22624,0.187864,0.687159,-1.38416 10258.4,10175.1,9296.44,0.693656,-1.47018,1.57507,-4.07861,13.9151,7.913,3.87705,-2.41045,1.40643,-18.8401,-3.38044,-3.78137,0.444306,-0.142111,-3.19856,-0.633983,1.26609,-6.96487,4.03731,1.86282,-0.255938,0.885239,0.576534,4.16798,1.48633,-2.91027,0.44246,-1.26861 10259.2,10179.7,9301.13,-1.11281,-2.9356,3.48279,-4.07376,14.5961,4.75668,2.95063,-2.50321,1.99968,-15.2573,-3.94817,-6.19421,0.994523,-0.409685,-3.36826,-1.30752,2.89435,-7.11783,2.3961,1.75016,-0.287404,0.839505,2.32354,3.16514,0.431073,-4.23834,0.224613,-1.13459 10258.9,10180.8,9303.2,-3.70956,-2.93593,3.76222,-6.98265,14.1006,4.36509,3.13521,0.524873,3.4745,-8.19672,-0.812591,-7.54285,2.87285,0.165482,-4.34303,-3.00502,3.10194,-11.8146,3.48326,1.87454,-2.39007,-1.71717,-0.0308325,-3.00344,-3.10099,-5.07511,0.999296,-0.291248 10259.7,10178.9,9302.61,-2.50722,-0.863499,1.6361,-7.29671,5.65875,7.35687,6.74534,2.86707,2.5541,-4.10002,1.92641,-4.21325,3.79643,1.11564,-2.85299,-3.384,0.718232,-13.5344,2.15514,-0.378278,-3.09826,-4.48668,-4.09564,-6.07121,-4.62941,-4.63714,1.35609,1.33932 10264.3,10176.2,9300.58,-1.50986,-0.476834,0.153861,-9.03392,2.34462,9.76008,11.2624,0.958254,-0.70443,-6.3101,0.886002,-3.04957,4.20237,0.687347,-2.59931,-4.30057,-0.344332,-15.3463,3.30618,0.212706,-1.83037,-5.39362,-6.37009,-5.79293,-5.6463,-5.17005,1.45394,1.2199 10270.2,10175.5,9299.06,-1.8193,-1.62584,1.49621,-15.2891,-0.19176,0.694336,7.97111,-0.906134,-1.88497,-6.47048,-0.900237,-3.70282,1.23614,0.322582,-3.93212,-3.45866,1.71962,-16.8955,0.58688,-0.409914,-0.259588,-2.68512,-3.64588,-3.35838,-4.51583,-4.19392,0.240148,0.159851 10270.2,10179.6,9298.63,-1.90388,-3.42457,3.36972,-15.5947,6.83754,-2.72512,7.96959,-1.26132,-2.35887,-7.13988,-3.00989,-4.84946,-1.32472,-2.90407,-7.21556,-3.99747,1.63284,-18.121,1.49353,-0.486008,-0.289734,-2.44221,-2.61409,-4.74746,-6.81336,-4.22186,-0.397997,-3.01155 10263.1,10186.3,9296.94,0.1046,-2.95923,0.55802,-3.53552,11.956,6.06043,20.0157,-0.175478,-1.81809,-1.77528,-2.10279,-0.283075,-3.48288,-4.09089,-6.41457,-3.4926,-1.98205,-11.2644,1.51324,-2.56718,2.01317,-3.17178,-3.03644,-4.28621,-6.82533,-2.57386,-0.732198,-4.52782 10250.3,10186.7,9289.82,0.787893,-2.63004,-4.83671,4.59987,9.90165,5.11396,20.1712,-1.49013,-0.900383,3.2704,-1.38302,1.01612,-3.51797,-3.65748,-2.01906,-2.31487,-4.58178,-0.663723,4.99631,0.0846666,6.20019,-1.32911,-0.366123,-0.708005,-3.05462,-1.4169,-1.33549,-4.03837 10229.6,10174.2,9276.51,2.92922,1.43172,-8.45959,7.92191,9.82817,0.906035,15.1761,-5.66535,-4.80598,8.92318,-1.50732,0.863702,-4.19618,-1.72605,1.43049,-1.60336,-7.78679,7.9456,2.20311,0.976306,4.6808,-2.0774,-1.41618,1.52784,-1.00485,0.251303,-2.51818,-3.24837 10203.9,10154.8,9263.01,1.97737,4.88419,1.86761,-1.89071,16.8831,21.8027,18.6752,-2.85592,-0.407409,1.1857,1.57668,2.90834,1.42619,5.01683,-2.88862,1.13125,-1.02838,-3.77013,-1.83294,-0.874118,-1.82318,-1.06152,0.617181,1.34269,3.38069,1.15764,1.12216,1.38647 10184.5,10141.2,9256.68,5.24597,7.64832,2.18557,1.58328,4.92602,9.28816,-0.0172234,-2.70209,-2.36954,2.63625,2.45988,6.65341,1.30855,2.45772,0.884071,4.15289,-0.306199,0.501745,-3.91598,-0.843063,-3.78083,-0.751671,-0.908618,-0.353576,1.46737,4.59599,1.10914,-1.05414 10178.9,10140.4,9258.57,8.5511,8.38576,-0.704081,10.0442,3.87995,9.53107,4.06474,-2.33977,-3.33414,3.45052,0.769206,8.44243,0.151836,-0.110094,2.50423,3.89258,-1.86971,4.86933,-2.34618,0.208276,-3.54318,-0.382483,-0.444637,3.17545,1.86638,6.31308,-0.0788599,-2.11239 10182.7,10148,9263.52,7.664,6.75263,-0.540997,5.42972,-5.04193,-7.98425,-8.29464,-0.166299,-0.588527,3.31557,0.500806,4.72146,-2.51571,-1.43305,5.52369,5.671,1.03703,8.03067,0.0463032,4.16527,0.993743,2.27,2.01907,5.48701,6.28587,6.50446,-0.915646,-0.555951 10185.6,10156.6,9266.64,4.26252,2.60407,3.65205,1.35764,1.93964,-1.71464,3.62386,0.664968,2.07164,-1.84774,-1.41728,2.03742,-1.93901,-0.955849,2.55509,2.24827,3.4143,2.08534,1.52467,4.36357,2.40504,-0.149419,1.87333,2.56701,3.76988,3.58853,-0.290298,1.53656 10182.8,10164.1,9266.99,3.44774,1.00051,3.58435,5.06036,-3.20427,-1.32409,2.16178,-1.24869,0.986594,2.68824,-3.10496,3.75494,-3.03899,-1.36189,2.85639,-0.797041,2.25309,6.84226,-1.01807,1.45026,1.64915,-1.77668,1.47461,1.32051,0.0174875,3.15498,-1.91103,0.915561 10177.6,10169.5,9265.47,2.97062,0.742454,2.19308,3.39405,-10.2555,-6.11354,-8.35604,-2.29312,-0.492631,4.2024,-2.46282,2.85236,-2.05854,-1.07623,3.34902,-1.67951,1.43015,9.72371,1.0556,1.2093,0.0329592,0.933345,2.62882,4.14907,1.43657,2.25242,-2.21302,0.424466 10175.1,10171.1,9262.53,2.78573,0.66686,2.0545,2.76769,-2.38316,1.38611,1.33538,-1.98843,-1.22362,0.719734,-1.48276,0.571928,-0.303568,1.13172,0.533248,-2.57485,0.218063,4.75694,4.12677,1.25451,-2.29974,1.77459,2.18864,5.66448,2.31972,-0.197648,-0.423422,1.24127 10176.1,10170.7,9258.49,5.31438,0.737423,2.23937,7.15555,-6.03862,-6.93885,2.59027,-2.08985,-1.82474,1.76361,-1.51506,2.40133,-2.94977,1.13326,2.34185,-1.4691,-0.319475,6.55378,0.151184,-0.820336,-1.03183,0.737373,1.0173,1.60097,0.120988,0.706961,-1.06361,1.61191 10177.1,10171.1,9253.43,5.27989,0.124242,0.594136,6.40228,-14.4792,-17.9873,-7.83873,-2.70593,-2.84279,6.19952,-1.02819,4.22035,-3.89328,-0.655654,4.6427,-0.543649,-0.312946,7.67303,-3.34568,-2.99026,0.892734,0.193866,0.437901,-1.37172,-2.06494,3.10779,-2.09072,0.969194 10175,10171.9,9247.28,2.27598,-1.11333,-0.371999,2.70022,-5.44405,-1.24932,2.95574,-2.54561,-3.07604,2.81372,-0.48024,4.11824,2.04907,-0.370621,1.24343,-2.71039,-1.27809,-0.906837,-1.29061,-4.80376,-0.177684,-0.68347,-0.0356975,0.976652,-2.58184,2.60538,-0.53245,1.0079 10170.6,10171.1,9240.98,0.484599,0.0646839,-1.51326,2.89899,-3.4319,-0.213982,2.47953,-0.834731,-2.00581,5.72898,0.227883,2.67222,2.27602,0.0505934,1.31844,-2.26552,-2.6972,-0.975391,-0.869576,-3.70984,-1.26158,-0.292123,-0.590846,2.58737,-1.84822,1.62378,-0.526111,-0.491878 10166.9,10167.6,9236.09,0.964725,-0.0392702,-0.079079,4.19696,-8.77705,-7.3393,-5.33084,1.7816,1.00552,6.00308,-0.645333,1.80016,-0.345783,0.537513,3.29513,-0.258503,-1.94323,3.02276,-2.07851,-0.708951,-0.985472,0.42465,-0.0047685,-0.0149723,-1.37113,0.550535,-0.779034,-0.484969 10166.1,10161.5,9233.6,-0.598547,-1.76595,-1.06041,-0.952044,-3.22733,-6.25839,-1.71002,3.5389,3.14678,2.52469,-0.94774,-0.697306,-1.82073,1.8162,-0.398189,-0.0962201,-1.17773,-3.11075,-1.86249,-0.148137,-0.912351,0.0729367,0.372787,-1.52491,-1.99794,-1.67208,0.753712,1.02245 10167.9,10154.5,9233.85,1.32924,-0.579085,-4.09528,3.27081,-6.78357,-9.38603,-3.06915,1.95927,0.70163,2.46784,-0.635142,0.854662,-1.03664,2.44479,0.381434,0.976493,-2.1874,1.35415,-3.25712,-1.85514,0.202589,0.286026,0.720155,0.627719,-0.687001,-0.872865,1.21871,2.25385 10170.4,10147.3,9236.23,1.55419,0.655793,-3.90119,3.65032,-6.92144,-3.81534,-0.829364,1.59907,-0.150104,0.588015,0.212751,1.04803,3.09472,3.79829,-0.218751,1.11779,-1.55055,0.933332,-1.25266,-2.59487,0.647035,1.39731,2.58953,2.8589,1.80309,-1.43261,2.52993,2.79953 10171.9,10139.7,9239.22,2.16966,0.513128,-2.93705,2.73804,-10.8601,-4.50483,3.76187,1.03924,-0.676839,-1.4866,-1.19577,1.6866,5.98311,3.12642,0.0885709,0.9896,-0.594518,0.533618,0.379411,-3.82145,2.32664,2.22298,3.60721,3.05218,2.2889,-1.98702,2.79897,1.35025 10172.4,10133.5,9242.05,0.627291,0.905709,1.39363,2.99372,-15.425,-9.09382,2.11414,1.04226,2.10526,-4.39506,-2.77953,2.15891,6.66724,1.70369,-0.372333,1.40462,2.59187,2.26874,-0.378224,-3.69675,3.0335,2.25396,3.10192,0.0429504,0.10951,-0.799702,2.66794,-0.282681 10173.8,10130.2,9245.36,-1.33644,1.42161,3.11004,3.93858,-17.0646,-12.116,1.67239,1.94826,5.54306,-3.85205,-1.5475,2.52019,4.33814,1.15019,-0.541069,1.99129,3.05378,4.25369,-2.76731,-2.80645,1.85733,0.988299,2.88783,-1.97077,-2.83768,1.85125,2.84766,0.389147 10176.4,10130.9,9250,-3.53503,0.391503,-0.270572,1.95882,-15.1875,-18.5758,-1.42497,2.28845,5.40786,-2.12974,1.20821,0.911564,0.2788,0.0689856,-0.00271805,2.01928,-0.20812,3.23848,-1.98612,0.0245125,0.488358,-1.18054,1.47019,-3.47437,-4.6287,2.11498,2.20934,0.993318 10178.8,10135.9,9255.56,-3.20255,-0.268054,-3.48033,2.47099,-11.3536,-16.9308,2.01776,1.40976,1.56328,0.853625,1.89586,1.47109,-1.50849,0.167668,0.627511,1.41809,-4.21425,2.05546,-2.39209,-0.416193,0.276633,-1.50971,-0.820011,-1.25927,-1.76,0.153711,0.431209,1.48315 10181.2,10144.1,9260.31,-2.49125,-0.613263,-3.86482,0.287362,-9.17309,-14.1157,3.48478,0.196793,-1.25386,2.83848,0.198147,-0.0165582,0.471677,-0.139327,-0.216901,-0.966032,-5.2193,-1.40546,-0.977273,-1.2574,1.78779,0.134179,-1.72164,0.653388,0.313432,-3.37716,-0.587605,0.861387 10186.6,10151.1,9263.12,-0.0358474,0.714951,-5.47328,-0.875177,-17.5089,-13.8361,0.471247,0.643912,-2.41975,9.9458,0.993041,0.803296,-0.226386,0.0668295,2.19176,-1.16819,-4.40868,0.69383,-3.38706,-3.58218,3.07732,2.10253,1.79789,2.06744,1.83904,-2.15516,-1.67344,0.661882 10193.4,10152.2,9264.85,-2.78688,1.85556,-1.96216,-7.27433,-5.61022,0.625161,3.91544,2.78407,0.13042,8.01854,3.573,-2.43853,-1.07905,0.148792,-1.48277,-2.3792,0.378784,-7.05144,-1.06108,-1.76148,0.135824,1.71393,3.80312,-1.43656,0.702495,-1.95731,-0.703674,-0.33177 10196.9,10148.7,9267.46,1.41437,4.41491,0.0330121,-0.96198,-19.7539,-11.561,-5.49424,1.03618,-0.588315,13.1158,4.11913,1.82776,-4.02743,-1.24038,4.49417,2.16391,1.61464,5.33203,-6.2827,-3.22771,2.42673,4.53812,5.27571,1.95384,4.83592,2.15944,-2.23414,-0.0179182 10195.1,10146.6,9271.67,-0.599083,4.08109,5.56207,-0.651956,-1.899,4.41751,8.64946,-0.00765143,1.65381,7.40697,3.13743,0.528221,-1.17274,-0.333192,-1.34405,0.810869,3.04978,-1.96585,-3.00608,-1.02587,-0.427114,2.63482,2.33223,1.44749,2.70602,-0.508442,-0.782524,0.838544 10190.6,10149.1,9275.95,0.560997,3.32623,0.00253245,1.6273,-9.62681,-9.32197,-7.13248,-1.74244,-2.26773,10.279,2.01853,1.79006,-2.32577,-1.861,2.70102,2.63733,-0.668516,4.89049,-2.56801,1.67809,-0.682542,1.07859,-0.730879,1.04436,0.219305,1.04839,-1.30085,-0.204558 10188,10153.1,9277.72,-1.05102,1.4439,-1.2902,0.37219,3.61058,7.8905,-0.13638,-0.797121,-3.203,3.7144,-0.467361,1.43319,1.01941,-0.964803,1.27849,1.32106,-0.71757,-0.281666,1.82319,4.43107,-2.93419,-0.102775,-2.79816,1.60946,-0.350934,0.837113,0.975085,-0.206216 10189.3,10155.8,9275.17,1.71247,1.79065,-0.806826,4.2591,-1.07113,5.08033,-3.80833,-1.05846,-3.93516,4.86697,-2.48519,4.41458,1.0147,-2.04319,5.76698,3.04901,0.621182,6.18537,-0.471514,3.74338,0.0954557,1.78055,-2.23478,4.29533,3.28968,4.08665,-0.45381,-1.12752 10190.8,10155.9,9267.91,0.0885688,1.62773,3.97676,0.475719,6.50171,12.0036,4.17355,0.0800788,0.877184,4.13283,-1.66529,2.3731,1.22312,-1.52431,1.32333,1.30085,4.02821,0.00402446,-0.278254,3.83144,-0.00616006,1.70507,0.14686,2.05675,3.75234,3.42709,-1.13997,-2.28219 10186.5,10152.6,9257.34,-0.152071,1.1051,2.98089,-3.26014,-3.23874,0.545145,-3.74253,0.650653,4.32612,4.55661,-0.349067,0.443991,-1.54712,-2.37082,1.08068,1.11666,3.19332,0.114235,-4.77887,1.03262,0.526047,1.57427,1.96416,-1.21359,2.2522,2.81775,-2.19914,-3.20958 10175.9,10146,9246.33,-2.37365,-0.801223,1.8448,-4.49245,2.73452,3.45587,0.665856,0.804743,7.15539,-1.25789,-1.25952,-2.70716,-1.07845,-2.04441,-1.93328,-1.35806,1.5978,-5.1161,-5.79834,-0.925826,-2.80177,-1.15512,-1.39234,-4.88988,-2.71874,-0.727928,-1.17586,-2.55528 10163.6,10137.3,9237.87,-0.803469,-2.78044,-0.895544,-1.96323,-0.541223,-3.95959,-1.23923,0.0489646,5.82687,-0.842944,-2.20839,-1.37161,-0.868195,-0.366623,-0.326653,-0.542204,-0.442138,-3.06811,-5.05951,-1.77693,-2.56412,-2.0747,-5.18551,-5.90628,-3.59607,-1.51359,-1.0358,-0.0442413 10154.4,10129.1,9233.99,1.23915,-3.76005,-2.64612,0.723829,-3.148,-4.96491,0.57486,-0.202117,2.21428,-0.386009,-2.61213,0.591537,-0.420445,2.51457,0.848114,0.0155665,-2.8099,-0.688955,-1.65728,-1.68576,-0.314736,-2.37588,-7.30164,-5.93878,-1.09582,-1.08092,-1.23666,3.04974 10147.7,10124.3,9234.84,0.130569,-3.33534,-5.30783,0.228073,-1.79103,-2.90284,1.72325,0.336059,-1.67646,0.805152,-2.51359,-1.68843,-1.08056,2.79024,0.667811,-0.918425,-5.25023,-0.613583,-1.21144,-3.86108,1.12026,-2.87087,-6.96217,-3.74878,-0.871173,-1.99148,-1.4983,3.13726 10141.9,10125,9238.34,-2.3342,-3.74514,-6.28736,0.247636,2.71253,3.12847,7.57994,-0.0401623,-2.07147,0.481455,-3.97685,-4.46362,-0.415913,1.42821,-0.575486,-2.68041,-4.57327,-2.24353,-2.60028,-5.84863,0.625916,-3.42977,-3.6369,-0.844099,-3.5874,-4.64335,-0.985747,1.2717 10139.9,10130.2,9242.19,-1.31024,-4.72475,-7.14762,0.73153,1.45053,-5.53508,5.90136,-2.31863,0.194991,0.488804,-6.97821,-4.41928,-2.29074,-1.35009,0.919216,-2.89533,-3.25509,-0.799203,-1.99553,-4.14064,2.04707,-1.98553,-0.137078,-0.0166083,-4.9352,-5.40326,-1.67739,-1.42035 10146.2,10135.6,9246.04,1.48702,-3.36982,-6.22071,1.74719,2.56435,-13.0074,1.99705,-3.21561,2.91416,0.844878,-6.7988,-2.16439,-5.4962,-1.85975,2.13575,-1.59383,-2.91884,1.52462,-1.3314,-1.85117,3.6544,-0.430522,0.692754,-0.840642,-3.31251,-2.33908,-3.05762,-2.1983 10158.1,10136.1,9250.8,0.841737,-2.49661,-1.39476,-1.47649,15.6927,0.965199,10.869,-0.546861,4.02682,-3.15137,-2.65822,-1.05518,-4.77058,0.229656,-2.58261,-1.60934,-0.689737,-5.44364,-0.234473,-1.95479,2.60062,-0.769404,0.484685,-2.21476,-2.21659,-0.527818,-2.3356,-0.631119 10167.2,10131.4,9256.17,1.43756,-1.64599,0.0828565,1.10643,1.09851,-8.71597,-1.14743,1.16785,1.24835,1.69522,0.678389,1.91657,-5.73395,-1.26925,0.618759,0.671225,0.99422,2.5392,-3.14056,-3.00047,3.39733,-0.267724,0.865602,-1.72338,-1.28093,1.59131,-3.58079,-1.60917 10168.5,10125.9,9259.95,0.111755,-1.49369,1.18289,-0.284048,-1.52165,-7.82514,1.91577,2.83987,1.30957,4.34859,2.31828,0.547347,-5.35341,-2.95714,0.120479,-0.07344,1.25038,0.863374,-1.97606,-2.63292,2.99367,-1.51317,-0.192761,-1.94301,-2.34527,-0.816782,-4.15688,-3.69083 10164.7,10123.5,9260.03,2.54631,0.123647,1.85441,0.291179,-2.26534,-5.622,0.403256,2.75151,1.92159,5.45502,4.02912,0.277333,-3.49437,-2.59529,1.68451,1.03176,0.611114,1.05444,-1.37086,-0.762577,2.09659,-3.15435,-1.66892,-4.18628,-2.03484,-0.59484,-4.5361,-4.06338 10160.7,10123.9,9256.02,4.16394,1.15842,1.00215,-1.41089,3.00077,3.69915,2.12147,1.50602,1.11373,3.7783,5.12886,1.27055,-1.0735,0.163066,0.715848,1.75274,0.248762,-1.87449,-2.70607,-0.0821427,-0.982237,-3.91753,-0.603176,-5.15131,-1.55797,1.9122,-2.63806,-2.45448 10157.6,10124.8,9249.1,1.13904,0.752742,1.28292,-3.44794,5.87463,13.5955,-3.90547,0.053564,0.392376,-2.17549,4.02652,0.800942,2.14933,0.991305,-1.00534,1.93346,1.74799,-4.3887,-2.62983,2.12002,-3.97726,-2.37985,1.92724,-3.91126,-1.80145,3.29901,0.515867,-2.07875 10155.9,10125.9,9241.01,-1.21278,1.24353,0.0902419,-1.38693,3.90257,17.0687,-1.7671,-0.621263,-0.743581,-3.56603,3.19768,0.515647,2.83626,-0.394058,-0.965446,2.53295,1.02968,-3.73706,-0.646373,4.19926,-3.90665,0.100245,2.07717,0.65145,-0.4389,3.45695,1.30478,-2.26372 10156.9,10129,9233.19,-0.519545,3.45514,-0.128203,0.470911,-4.34917,11.6069,-5.37302,-0.249794,0.0908138,-1.64961,3.7305,0.887725,1.28233,-0.50548,0.651175,4.68216,0.481759,0.131141,2.83721,7.4517,-1.51906,2.02591,0.478488,2.8447,3.96564,4.21205,0.0189546,-1.26083 10160.2,10134.9,9226.61,0.334619,3.63902,-1.33005,0.500933,-0.0390483,15.3466,3.49804,-1.22599,-0.443012,-1.29729,1.85728,0.83413,0.663791,1.08815,-1.61332,2.35978,-1.91003,-1.54128,7.06018,8.52392,-0.0931056,-0.631766,-1.8937,1.21041,3.92464,3.0125,0.582016,-0.0552563 10165.1,10142,9222.12,-0.0501124,2.72845,-2.35233,0.461804,-3.24106,3.89637,-4.4752,-1.7395,-0.658087,1.46568,0.74815,1.9358,-1.37579,1.26993,0.248403,2.1501,-1.97865,2.84403,4.93078,6.34449,2.55208,-1.66616,-1.28941,-0.85475,2.44335,3.28626,0.575625,0.0867697 10169,10147.2,9219.92,-2.57524,1.55278,1.64717,-0.408592,2.78686,3.93608,-3.35557,-1.05071,0.358949,-1.71793,1.23509,0.730307,-0.807758,0.469476,-0.799756,2.26666,1.42763,2.57756,3.31921,4.24278,2.32673,-1.92157,-0.625841,-1.7385,0.55312,2.469,0.416022,0.102824 10167.7,10149.8,9219.39,-2.61236,0.265041,4.14099,-1.10443,5.68968,5.75872,0.437178,-1.27371,-1.44794,-5.50529,0.962099,-1.7594,-0.014506,-1.47838,-2.10998,2.88166,2.32266,2.31558,3.04189,2.76494,1.13588,-2.76241,-2.5749,-1.37983,-0.132212,1.62609,0.00182996,-0.567092 10161.2,10151.5,9219.88,-1.00231,0.225002,2.94421,2.03312,-0.355979,4.16591,-0.636307,-0.980578,-3.17075,-4.4683,-0.0413473,-0.96548,-0.194949,-0.798368,-1.08568,3.94015,1.20872,6.21739,0.493017,0.663456,-1.20346,-2.76074,-4.99576,-0.484664,1.27829,1.87168,-0.0347963,-0.649195 10155.5,10153.9,9220.83,-0.939771,0.647249,0.0634509,3.2582,-1.62031,4.0693,-0.997477,-0.169163,-4.01209,-4.20755,-1.14083,-0.040949,0.676499,1.0769,-0.637069,2.85891,0.53402,4.18699,0.666861,0.369829,-2.63692,-0.336214,-3.73798,1.47577,2.81105,-0.292838,0.0270106,-0.151526 10154.1,10157.5,9221.67,-1.65802,1.59847,-3.57612,1.52401,6.37221,4.48866,-1.46299,-0.915699,-6.98915,-0.340048,-0.952717,-2.18866,-0.811792,-0.642645,-0.622625,-0.300884,-1.00057,-1.15759,2.44751,2.6773,-1.823,1.29837,-1.91591,2.49204,1.93197,-3.59974,-1.91245,-2.4109 10154.4,10160.7,9221.98,-0.583463,-0.108757,-4.6507,-0.0693877,5.35637,4.425,-6.56889,-1.82597,-8.57191,2.85503,-1.05825,-2.33955,-3.22781,-4.76081,2.05753,-0.861931,-1.83229,-0.124382,0.503483,2.18131,1.30665,2.42826,0.824233,3.84653,2.09007,-3.3925,-4.31649,-3.96112 10153.4,10159.2,9221.68,-2.76485,-4.09131,-2.87698,-1.10712,12.5336,12.9839,-4.34652,-1.87041,-6.50663,-1.43881,-2.78497,-4.09349,-3.27711,-7.58611,-0.918956,-2.43732,-1.68029,-2.93885,1.37614,1.00354,-0.202025,0.252735,-1.35224,2.14941,-1.22668,-3.85694,-3.91196,-5.39514 10153.1,10150.6,9221.82,-3.95579,-6.11602,-1.95691,-0.571033,7.36799,2.23424,-8.23593,-1.15065,-2.89936,-3.34966,-3.42278,-4.92737,-4.22729,-7.57776,-1.53936,-2.4826,-0.485854,-2.05301,1.35048,0.235875,-0.851581,0.299046,-3.65228,0.452501,-2.53126,-4.14097,-3.0318,-6.032 10156.5,10138.1,9224.22,-1.72219,-4.81284,-2.04034,3.64429,-3.40667,-8.21149,-2.06758,-0.247629,0.240041,0.844032,-2.55693,-2.29071,-5.62686,-4.10255,0.955484,-2.58578,-0.573095,1.96046,-2.89531,-2.47853,1.00662,1.59082,-2.31097,1.60096,-0.355857,-3.59741,-2.54995,-3.16362 10162.5,10126.5,9229.66,-1.48624,-2.31864,-1.19917,5.07688,-2.15075,-4.48733,6.81643,1.19375,3.4529,3.66948,-1.49639,-1.71619,-5.51437,-1.29231,-0.407537,-4.604,-2.54282,0.0824236,-5.27449,-4.81883,0.767691,-1.39492,-2.55861,-0.325428,-1.75464,-3.59903,-1.89829,-0.732932 10167.7,10118.7,9237.56,-1.06333,-0.880843,-0.709075,2.8371,-10.0447,-10.4348,-2.5904,3.18465,5.97115,6.33779,-0.55058,-1.01646,-4.14332,-1.6247,-0.0193591,-4.01402,-3.73144,0.38443,-5.50468,-6.41294,-0.295721,-3.62009,-2.70822,-3.1355,-4.45086,-2.10376,-1.79258,-1.22716 10172.5,10116.9,9247.18,1.551,0.130326,-0.490568,5.87654,-14.5436,-8.35183,-0.790109,3.39107,4.7174,8.28156,-0.0057788,2.6686,-1.84943,-1.48071,1.03911,-4.0934,-3.48936,2.7605,-6.22541,-8.72046,-2.487,-3.9855,-3.15508,-4.85806,-6.30628,-0.1826,-2.22861,-1.91313 10179.7,10122.6,9257.78,1.5355,1.00586,-2.46594,5.55739,-10.6179,-9.89219,1.01847,2.02002,1.55047,10.3651,1.59035,2.3257,-3.02423,-0.681756,0.379055,-4.13859,-2.86252,2.65539,-7.09955,-8.4785,-1.80811,-2.44766,-3.84586,-6.08215,-4.18234,0.309597,-3.66089,-1.78168 10188.9,10134.4,9267.84,0.423127,-1.44673,-6.16369,2.54558,-3.2605,-10.2788,1.93481,-0.460125,-1.55478,7.53447,1.04311,-2.037,-5.33297,-0.715827,-0.912315,-4.00679,-5.27357,1.32517,-7.02947,-5.6844,2.49,-1.1701,-4.14164,-4.46692,0.160721,-1.23591,-5.46575,-0.678645 10196.3,10145.5,9275.21,0.204833,-4.851,-9.24744,3.38063,-3.90706,-1.89916,-0.318999,-3.05687,-4.83175,3.88926,-1.68472,-4.52857,-6.76493,0.053409,0.356074,-2.44354,-9.25902,3.95243,-8.99635,-3.68403,4.07743,-1.41439,-4.06526,0.784286,2.50666,-1.59161,-6.31937,0.0761621 10200.4,10148.5,9278.92,-3.06966,-5.752,-6.27773,-0.452092,4.18213,13.2473,-12.0757,-4.47092,-6.49884,-5.96616,-4.08975,-9.08064,-3.65565,-1.03612,-1.9757,-2.79369,-8.22081,-3.13926,-2.68074,1.98539,-1.47914,-4.27865,-6.82097,-0.0420558,-2.72616,-3.80964,-3.69263,-2.81706 10202.3,10144.3,9279.66,1.7621,-1.2767,-1.87182,1.61337,-6.80859,14.4514,-16.815,-2.07514,-4.63562,0.0307544,-1.49074,-2.29138,-1.18636,-1.08621,1.86862,0.689509,-4.2555,-0.913166,-4.04706,-1.13903,-2.95495,-1.4359,-3.45987,4.36607,0.619825,-1.53464,-2.06409,-2.58631 10201.6,10141.5,9277.89,2.73427,2.11183,3.79277,1.71546,-5.8859,13.3557,-11.3022,2.79327,2.37116,13.2011,3.98285,0.966107,0.039656,-0.715821,2.85166,2.34242,2.77476,-0.0888099,-4.98538,-3.4432,-1.83877,3.57211,2.68075,7.05565,6.45616,-1.54302,-1.24469,-1.49869 10196,10143.8,9273.55,-2.52737,0.202188,7.08167,-4.89952,6.71679,10.6699,0.756855,5.54471,7.25909,13.9583,6.39787,-2.37566,0.745793,-1.45474,-1.09404,0.910205,7.21143,-6.92492,-3.24203,-2.89701,-0.543452,6.07649,7.33376,6.57894,6.15484,-4.40884,0.0587056,-1.11052 10186.2,10147.8,9267.63,-4.31786,0.145523,8.74123,-1.12372,3.61382,5.90919,-2.20636,4.87121,7.93339,10.8223,5.77747,-1.02016,1.70524,-1.23974,-1.99873,1.22043,7.18349,-2.02393,-4.52471,-1.19367,-1.87015,5.60664,6.92162,5.30532,3.03549,-3.16865,1.33872,-1.3693 10178.3,10151.3,9262.07,-1.01371,-0.36759,7.07326,3.03463,-3.67644,6.41668,1.01659,3.32806,5.69645,6.11989,4.17302,3.13986,4.40199,0.31144,-2.58094,-0.0539033,4.16067,1.49299,-3.2753,-1.39228,-2.172,3.33149,4.19598,3.46064,0.616277,-0.818505,3.98959,0.698301 10177.2,10154.3,9257.94,2.09186,0.0766925,2.17884,5.08344,-13.9717,-0.882929,-3.84368,2.86526,4.57806,7.77504,4.75117,6.29349,4.58116,4.04706,1.06485,0.914494,1.84175,7.12093,-3.92066,-3.04038,-1.76589,1.29071,2.74094,1.46176,1.98937,3.12251,5.09485,3.84087 10179.4,10155.4,9254.74,0.187596,-0.882072,-0.665652,4.15319,-3.56212,6.25634,3.46947,2.99756,3.30879,0.859046,5.1349,3.91232,5.90056,6.60019,0.839946,-0.162343,-0.484405,2.65509,-1.8674,-3.50916,-5.10299,-1.60522,1.28388,-0.0295086,1.05,2.81748,5.21994,5.53563 10178.8,10153.1,9251.26,-1.91139,-0.154839,-0.832651,7.32065,-8.14661,3.20829,-4.61065,3.9011,1.20806,1.29028,6.11631,4.24084,4.66918,7.38927,3.1094,1.72009,-0.436683,6.06925,-3.83738,-3.64103,-8.35166,-0.222316,1.74303,3.43329,2.82215,3.91599,3.2218,6.05878 10175,10149.2,9246.46,-3.00223,-0.829219,2.18951,8.12634,-8.29635,3.98254,-2.55022,3.58933,0.0476173,2.00734,2.85452,5.13863,4.39434,5.86178,1.57419,0.321093,2.11151,4.62819,-0.677836,-1.98205,-7.44972,1.36379,2.52895,5.12261,2.10196,3.15929,2.77152,6.16477 10170.8,10147.7,9240.32,-2.09934,-1.33891,3.77143,6.49402,-6.43302,-0.0826344,0.87837,1.12061,0.421557,1.06025,-1.52903,5.64507,3.68263,3.49536,1.25096,-1.4957,2.92854,4.60413,2.40658,-0.645265,-3.32217,0.987715,2.60908,1.94117,-0.424246,2.85508,2.71473,4.88469 10167.3,10148.7,9234.04,-1.71112,-2.89318,3.67043,1.66277,3.35424,4.57631,10.1924,-0.35173,1.35064,-5.80931,-1.82085,3.64176,4.57117,2.2882,0.924739,-2.41648,2.22467,2.19365,5.80375,-0.426137,-2.32705,-0.919332,2.09081,-2.34116,-2.25007,1.71251,3.40172,3.5108 10165.7,10149.1,9229.23,-1.45001,-3.05548,2.45599,-0.349391,3.71978,4.53119,5.144,-0.0754888,2.20722,-6.90377,0.948441,2.13514,3.08117,1.83942,2.86791,-0.010419,2.66035,5.23219,5.6626,-0.804354,-2.37724,-1.67323,0.673861,-3.53649,-1.59081,1.76997,2.75549,2.29186 10167.4,10147.1,9226.8,-1.49928,-2.70714,1.88393,-0.842721,-0.225431,3.25531,1.41947,0.140255,3.21042,-3.88608,1.41104,1.86088,-0.091131,0.642157,1.94581,0.307133,3.18746,6.22574,4.30938,-1.01513,-1.1936,-1.8575,-0.588364,-1.42784,-2.08205,1.85519,1.46316,1.06047 10171.1,10143.9,9226.48,-2.01672,-2.40053,3.06391,-0.0599903,-8.34303,2.94718,-5.04409,-0.199276,4.0892,-3.68083,-0.226057,2.75547,-0.686676,-0.843757,0.670264,-0.458086,3.08212,7.11729,2.84836,0.933537,-1.50789,-1.59001,0.179663,0.0589795,-2.55704,3.42709,0.775783,0.360096 10175,10140.6,9227.89,-1.34782,-2.60865,2.14445,1.39294,-10.3608,4.5868,-8.2559,-1.78039,0.356678,-10.0047,-3.28868,2.87133,1.85333,-3.67234,1.53223,-1.27653,0.113475,6.97877,4.49731,3.38158,-3.24882,-2.09817,-0.213742,-0.816136,-3.92766,4.36792,1.46638,-0.25462 10179,10139.5,9231.01,-0.683001,-1.14693,0.835389,1.45465,-4.93888,6.92044,-3.2459,-1.76518,-2.11784,-11.5638,-3.99539,3.25477,2.97649,-3.54233,2.62301,-0.286071,-1.99677,5.44349,5.35012,2.55683,-3.04093,-1.82791,-1.42661,0.583625,-2.6178,3.43693,2.29735,-0.308687 10185.5,10142.2,9235.77,-0.0852919,0.0218383,0.522022,1.091,-4.00515,-0.71681,-2.72016,-1.24891,-1.4593,-5.53454,-2.81228,2.98724,1.40275,-1.35994,4.37674,1.00841,-2.02092,6.34309,4.01241,0.223476,0.719167,-0.617158,-1.79277,2.19906,-0.00915837,1.60933,1.1106,-0.276707 10194.7,10147.7,9242.28,-0.507821,-1.45713,1.82236,1.06383,0.990703,1.16431,3.40878,-1.35424,0.436421,-3.7364,-2.82733,0.844561,2.18188,1.42103,2.14788,-1.48658,-0.956157,3.31294,2.03859,-1.09837,2.11718,-0.147919,0.113767,0.665977,1.0134,-0.758268,0.662046,1.48327 10202.3,10153,9250.68,-0.953894,-1.28733,1.09826,0.183582,-2.63676,-4.1377,-2.89907,-0.851983,3.07691,-0.452803,-2.18838,0.00930997,2.87142,4.0314,0.911046,-1.55443,1.18147,4.24956,-2.48362,-1.23019,1.72571,2.11001,5.29268,-0.281886,3.31927,-0.100871,1.85826,4.09941 10205.4,10156.4,9259.89,-1.27754,0.134823,0.181405,0.430733,3.94306,1.54036,2.99815,-1.16285,4.70226,-4.24342,-1.81256,1.00154,4.93307,6.24027,-1.59843,-1.48742,2.34844,2.10305,-2.00905,-0.662325,0.626241,1.17997,6.74123,-1.67701,1.35772,0.491316,4.32271,6.53414 10204.9,10157.9,9267.94,0.0906612,2.16352,-0.379486,5.42194,2.73054,2.84047,-1.4914,-1.83181,4.02307,-5.15449,-0.262248,3.79351,5.21678,7.80905,0.384689,1.27337,2.9796,6.90988,1.28339,2.20996,-0.91791,-0.163496,3.78903,-1.75168,-0.655347,2.9127,4.88667,7.66747 10203.5,10159,9273.39,2.81598,1.22437,-0.368556,7.79675,3.42922,7.94279,4.57077,-0.708312,0.0968463,-6.10539,0.906129,5.55489,5.11842,8.21484,-0.0671665,1.22889,2.37144,6.24544,4.97372,3.9233,-2.49967,0.267274,-0.310124,1.09266,-0.410233,4.04567,4.74621,8.0612 10203.2,10162.2,9275.77,5.91857,0.355765,0.897437,11.4606,-3.5509,6.21936,2.57301,-0.0103725,-3.12789,-4.93913,0.601331,6.94209,5.77388,6.93334,1.15761,0.716978,2.28439,10.4648,4.58557,4.39511,-2.76356,2.73426,-1.51427,4.03252,2.99548,5.47757,3.66414,6.66569 10203.5,10167.2,9275.21,3.60261,-0.370029,0.212296,6.53742,-1.17501,1.39057,4.60494,-1.59955,-3.36286,-6.83681,-0.619753,2.05525,7.21718,4.0699,-0.311278,-1.80144,1.07578,6.02142,4.81799,3.05296,-1.94492,1.84126,-1.66326,1.40391,1.77364,2.95825,3.1993,3.61198 10203.2,10169.7,9272.52,1.94895,1.27875,-0.411546,7.45768,-3.75161,0.551798,7.13428,-3.82068,-2.61405,-4.51085,-0.839975,-0.654388,7.59238,3.63367,1.11679,-0.895324,0.0589114,6.72608,0.605615,-0.28023,-1.84675,-0.134175,-0.468956,-1.06577,2.10307,1.19208,2.14254,2.35948 10201,10166,9269.14,-0.454618,0.774031,2.06017,2.8462,-0.622985,0.18548,5.53147,-2.50822,-2.46147,-4.96779,0.0109421,-5.95039,4.88549,1.45711,-1.36876,0.21175,1.58667,0.959389,-1.72767,-0.999701,-1.91612,-0.271218,-0.271307,-3.60937,2.2528,-2.81471,1.29832,0.342989 10196.9,10158.5,9266.51,1.16537,-1.9421,4.60098,6.66208,-8.91079,-4.05041,0.977918,-0.375912,-2.52562,-2.44083,-1.83608,-5.04574,0.870179,-2.88837,0.903319,2.45464,2.77487,7.13809,-7.32993,-2.29902,0.410437,1.61472,1.76486,-2.68616,2.88565,-3.79142,-0.830458,-1.20118 10194.1,10152.5,9265.18,-4.11534,-5.864,4.81522,5.05616,0.145339,-4.93641,2.59855,0.656712,1.10696,-4.83177,-6.68192,-7.2593,-1.01756,-6.50992,-0.623669,0.165413,3.83811,5.84041,-5.84841,-0.103661,1.98729,0.416145,1.34348,-6.16515,-2.67871,-5.57128,-1.65554,-3.26762 10194.1,10148.4,9264.07,-6.59722,-4.92656,-2.01588,3.7417,0.726794,-18.2936,5.15057,-0.276157,1.50739,-0.538248,-8.52874,-4.00362,-4.55022,-5.27015,0.604573,-0.930054,-0.109161,8.19838,-8.17669,-2.1092,4.17484,-1.56197,-1.02102,-5.8341,-5.50376,-1.7134,-2.50895,-3.06608 10193.9,10142,9261.25,-7.62788,-2.98611,1.9356,-1.40885,17.3716,4.06957,22.1809,1.39972,5.64224,-7.94302,-5.59134,-1.45901,0.439725,1.11211,-6.73411,-3.11746,1.4598,-4.78344,-2.09513,-0.404037,0.473396,-4.22587,-2.43839,-5.70551,-5.26427,-0.515338,1.20082,0.113119 10190.4,10132.9,9256.55,-0.061965,0.47587,-3.01478,1.28661,-2.15014,-14.2047,7.89898,0.463674,0.911903,2.0883,-1.64338,3.11185,-2.21723,0.781415,-1.37312,0.396228,-1.38267,3.09944,-1.8496,-1.29836,2.6087,-3.15966,-2.03297,-3.33185,-3.23065,2.92606,0.328003,-0.0324179 10185,10126,9252.36,-0.460313,1.71643,-3.7396,-2.47922,-1.49725,-15.3645,-1.80975,0.715758,-0.981069,-0.691494,-0.794101,-0.106849,-2.08179,-0.30971,-1.53311,0.428815,-0.320026,-0.221114,2.28648,0.175576,3.04606,-1.33911,-0.290353,-5.37868,-3.63253,0.919151,0.306196,-0.421839 10178.6,10124.8,9251.04,-1.00256,1.33259,-4.2472,-1.03971,2.95821,-4.55752,1.84476,0.117356,-4.36831,-4.27268,-1.02576,-0.886254,0.661063,-0.0446314,-0.718596,-0.508343,-2.00182,-0.337999,2.57329,-0.613947,2.18595,0.685998,2.2221,-1.4549,-2.89677,-0.0111036,1.2411,0.83044 10170.8,10127.6,9252.97,-1.71108,0.0714348,-2.91875,-0.0818013,10.0027,5.28964,4.84662,0.115636,-5.97389,-2.97492,0.466922,-1.16018,3.14319,-0.484977,-0.73996,-1.40938,-2.86898,-1.18229,2.85098,1.59393,-0.709864,0.769892,0.0526875,0.667581,-4.09633,-0.130706,2.87503,0.28772 10163.4,10130.8,9256.69,-0.0482655,-0.561906,-4.41924,-1.93638,1.00001,-3.80859,-6.74655,-0.693966,-6.90741,3.83606,-0.443929,0.133173,1.32042,-4.12952,2.21239,-0.401666,-2.83084,1.48444,3.60821,4.7162,0.0479322,1.57325,-2.9423,0.781086,-3.57562,1.01359,1.5974,-1.03302 10159.1,10132.9,9259.9,0.830676,1.38376,-3.59798,1.88876,1.90766,6.33722,1.16568,-1.88109,-5.49532,7.56995,-3.97276,2.47056,-1.10217,-4.02745,0.530141,-1.80729,-2.44923,1.11112,6.04583,5.79514,-1.61378,0.146823,-4.31812,1.65679,-0.82556,0.385538,-1.6035,-0.921055 10159.8,10135.2,9260.63,-0.16576,1.00018,-5.12473,0.442361,0.505831,-5.64864,-2.63413,-2.52592,-5.46478,4.95174,-4.3147,0.782684,-5.73615,-4.82371,0.266276,-1.86669,-4.0481,-1.31822,9.03428,5.18538,0.835431,-1.04748,-4.21294,1.0615,-0.105573,-1.22812,-5.24566,-3.63422 10165.2,10138.1,9258.46,0.205477,-0.680098,-4.46762,5.26891,1.18115,-1.68502,7.13137,-1.22722,-4.01706,-1.7858,-0.511666,3.55446,-3.85553,-2.43205,1.3525,-0.694302,-4.16672,-0.729833,7.26617,2.38627,0.742375,-2.04911,-3.24066,2.72775,2.10783,0.115275,-4.78462,-4.34396 10171.6,10139.6,9254.61,-1.51268,-2.23477,-5.13237,-3.29461,-0.317239,-10.5071,-7.94002,1.87205,-2.15615,-2.57627,4.52526,1.46446,-2.39092,-3.68309,1.44927,1.27351,-2.10555,-3.67494,7.0263,3.64847,0.370668,0.612656,-2.452,4.76347,5.31087,1.21101,-2.18927,-4.86589 10174.6,10139.6,9250.85,-0.380976,0.430706,-4.77251,1.24603,3.57465,-3.14504,-10.8805,1.4131,-3.82203,6.1265,4.05681,1.86576,-2.69539,-3.84931,0.571097,0.0445532,-3.61574,1.0929,5.45496,4.67637,-2.69117,0.376736,-3.44843,8.26613,5.44059,2.39248,-1.35143,-3.43895 10173.2,10141.8,9247.9,-0.967231,0.660605,-0.333774,0.682442,10.1733,9.80472,-4.02844,0.296976,-2.0856,1.70749,0.105393,-0.302007,-2.02762,-1.68176,-2.57321,-1.85542,-2.20576,-3.56605,7.81712,4.57148,-0.717533,0.00661063,0.070936,7.88567,3.00205,-0.188925,-1.30646,-0.417109 10169.8,10147.8,9245.05,1.57911,1.89614,-1.23894,5.44327,1.1255,2.7455,0.888702,-2.69789,-2.29535,1.37374,-2.16695,0.277041,-2.61632,-0.168021,1.19527,-0.966804,-1.39634,2.02717,6.13068,1.74285,2.61838,-0.673957,2.42798,5.71141,1.0237,-0.190537,-2.48355,-0.424022 10166.9,10152.4,9241.4,1.48812,1.56883,0.00439658,-1.99079,-5.3945,-7.45076,-2.79497,-1.09824,0.438405,1.08335,0.567998,-2.12211,0.537132,0.235065,2.13962,0.850241,2.33283,0.11668,5.71046,0.316621,2.37782,1.5783,4.38674,4.44102,2.85837,-0.867284,0.197126,-0.632035 10166,10149.9,9237.21,3.10346,3.20745,-0.0787972,3.26164,-1.99167,1.15174,7.73898,0.388067,-1.3872,7.93093,2.89628,-0.846609,2.95243,1.10786,0.0356645,-0.191303,-1.48335,3.06518,0.833731,-2.48298,-2.62814,-0.329278,-0.0454046,4.84244,1.50962,-0.571214,2.28968,0.0896905 10169.4,10141.9,9233.72,1.54047,2.79665,0.872984,0.435893,0.341067,4.50191,6.31086,2.24353,0.0763229,5.33021,2.30696,-1.94916,2.28551,1.6759,-3.55737,-0.57595,-3.31446,-1.28349,0.109544,-0.911539,-3.08755,0.149125,-2.57658,2.65457,-0.759677,-1.72314,1.73795,1.22082 10175.5,10134.5,9231.85,3.08721,1.31195,-0.463831,-2.78365,-16.0641,-12.4959,-7.90321,1.44639,2.2521,2.09953,-0.628689,0.674957,-0.991746,0.999703,0.501374,1.08647,-1.9555,-0.457535,-1.969,0.140249,0.679574,4.05153,-1.26929,2.9472,1.23177,0.0460567,-1.18548,1.19414 10178.5,10132.3,9231.94,4.8578,-0.156201,-1.83619,3.45539,-10.5983,-4.40534,-3.25278,-1.48511,1.7839,1.07398,-3.79721,3.44697,-0.661031,-0.19397,1.51898,-2.78611,-1.58924,-1.02247,-4.03291,-0.779814,-2.72459,1.42865,-4.44874,1.96164,0.024013,0.769821,-1.68183,-1.09525 10176,10135.5,9234.24,3.98434,-2.9881,-1.82932,-3.45496,-4.37718,-1.32479,-6.81161,0.242295,3.63988,0.773917,-2.92089,1.50769,1.03257,-1.29175,0.607123,-3.32519,0.794345,-7.2134,-4.18473,-2.11878,-3.48641,2.04926,-1.83971,2.5711,1.8547,-0.444122,0.204744,-0.633906 10170.3,10141.1,9238.24,4.5574,-1.21766,-1.92884,-3.3891,-4.53289,-3.61119,-11.1428,0.87067,2.52674,6.28098,-0.916225,0.833349,-0.285056,-2.02874,2.83162,-0.822357,0.836116,-2.02452,-4.36166,-2.46534,-2.40599,3.53798,0.439996,2.8824,2.66576,-0.190266,-0.411649,-0.335746 10164.8,10146.9,9241.73,1.14271,0.21175,2.54403,-5.97996,8.86795,9.92082,0.583279,0.92891,3.1377,1.52082,0.653327,-2.04189,-0.909795,-1.88382,-1.45444,-1.72465,2.94817,-6.9659,0.661566,-0.779148,-2.33549,3.61435,1.90115,-0.709103,0.572663,-2.44443,-1.61985,-1.24632 10161.8,10151.9,9242.42,0.429305,-0.24402,1.54324,-0.758714,1.99988,2.30697,-0.150645,-1.67843,-0.372931,2.68223,0.974669,-2.18675,-3.69726,-3.84373,0.315076,-1.61503,2.02219,-0.439987,1.5067,0.347441,-0.468043,1.85512,2.51346,-3.61534,-1.61311,-1.68631,-4.32277,-3.31289 10160.6,10154.5,9240.5,-1.6783,-2.7916,3.79283,-1.46484,1.8842,7.0456,3.61276,-2.08564,-1.14902,-3.90469,1.00738,-2.71903,-1.12392,-2.56102,-0.564502,-1.26929,2.87817,-3.80446,2.16188,1.69189,-0.17359,-0.806729,4.45158,-4.99401,-1.9224,-2.1335,-3.41399,-1.5215 10158.8,10152.9,9238.94,-1.26294,-1.55708,2.47997,-0.37092,-5.35681,-1.99801,-4.61673,-3.19995,-3.63982,-3.59422,0.268397,-1.15304,1.21312,-1.94008,2.37467,0.463918,1.03699,-0.249188,1.94821,3.1095,0.656428,-1.26258,5.17342,-2.5293,-0.911564,-0.727538,-1.60047,-0.657086 10157.1,10148.4,9241.47,-0.729297,1.90628,1.50273,8.02209,4.5029,7.25435,-0.943104,-3.87229,-5.15977,-0.605295,-0.786266,-0.00624273,3.2036,-0.99694,1.83674,-0.424322,-0.759934,4.69506,3.12589,4.93905,-1.14094,-2.37706,0.896838,-1.15642,-2.07425,-0.341439,0.651623,-1.90525 10159.3,10145.1,9249.53,-3.61489,-0.368775,4.8318,0.654323,13.8953,20.2332,9.01061,0.740005,1.06482,-1.98312,1.43178,-2.39481,5.44965,2.23927,-2.07082,1.84445,3.36316,-2.3874,5.82791,5.13504,0.331121,1.17574,4.11636,2.46863,2.53744,-2.31289,3.73605,1.261 10166.4,10146.2,9260.39,-0.690065,-0.196533,2.57149,3.28245,1.26863,3.07282,2.3288,0.343504,0.7493,7.7189,2.47287,-2.19401,1.83016,1.49389,2.04941,5.57015,1.68587,7.37325,4.33035,3.86901,3.21355,1.31074,4.30838,4.34097,4.14204,-0.792683,1.91579,1.4487 10174.6,10153.3,9268.63,0.973864,0.288282,4.67663,-0.604468,1.35396,1.77193,6.1612,0.928573,3.56181,0.301872,1.61496,-1.94891,1.37811,1.784,-0.829802,4.5252,2.98522,2.05165,3.03006,0.33278,4.9167,0.692046,4.78248,3.89965,4.1223,-1.28055,0.902128,2.44014 10179.4,10165.9,9270.91,0.383028,0.372248,2.91142,5.26445,-4.52355,-0.481389,-1.47582,-0.0802922,4.09074,-3.4789,-1.84054,-0.641665,1.60157,2.15213,-0.406849,1.24052,1.05589,7.69175,-4.79723,-3.42058,1.48542,-2.69221,-0.604027,-2.8823,-1.41943,-0.386671,1.59434,1.71786 10180.9,10180.3,9268.76,-7.39108,-4.07938,1.96913,5.84801,-1.99672,13.1344,-8.45676,2.45664,8.74322,0.00440195,-3.70354,-4.02376,5.09873,7.07674,-2.94009,-6.27334,-2.18896,9.06615,-15.5002,-6.518,-12.659,-9.2251,-8.78964,-16.0646,-15.2285,-1.36974,7.28841,2.96689 nipype-1.7.0/nipype/testing/data/fods.mif000066400000000000000000000000001413403311400203250ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fsLUT_aparc+aseg.pck000066400000000000000000000000001413403311400224520ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fs_LR-deformed_to-fsaverage.L.sphere.32k_fs_LR.surf.gii000066400000000000000000000000001413403311400307420ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fs_LR.L.midthickness_va_avg.32k_fs_LR.shape.gii000066400000000000000000000000001413403311400273570ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fsaverage5.L.midthickness_va_avg.10k_fsavg_L.shape.gii000066400000000000000000000000001413403311400307520ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fsaverage5_std_sphere.L.10k_fsavg_L.surf.gii000066400000000000000000000000001413403311400270420ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/fsl_mcflirt_movpar.txt000066400000000000000000000606171413403311400233520ustar00rootroot00000000000000-0.00848102 0.00369798 0.003424 0.31043 -0.751705 0.619666 -0.00786305 0.00338866 0.0031168 0.305984 -0.736865 0.60846 -0.0078758 0.00327434 0.00305205 0.310853 -0.712291 0.60703 -0.00697324 0.00321863 0.00290525 0.312876 -0.690708 0.574235 -0.00493536 0.00253503 0.00278449 0.306737 -0.640998 0.497959 -0.0050205 0.00287942 0.00278449 0.302929 -0.611536 0.504744 -0.00515396 0.0028174 0.00278449 0.306624 -0.626585 0.495511 -0.00502531 0.00303599 0.00238171 0.304389 -0.615857 0.476798 -0.0051203 0.00275721 0.00238171 0.293279 -0.609103 0.479662 -0.0051486 0.00302039 0.00238171 0.3024 -0.590623 0.504029 -0.00480493 0.00280246 0.00254305 0.299592 -0.607553 0.496145 -0.00483143 0.0029749 0.00238171 0.296309 -0.610005 0.49058 -0.00500805 0.00294036 0.00238171 0.289551 -0.586523 0.475725 -0.00507794 0.00290279 0.00251728 0.291829 -0.581586 0.4659 -0.00505649 0.00287046 0.00238171 0.285892 -0.558162 0.486484 -0.00486141 0.0029014 0.00238171 0.281708 -0.576438 0.500581 -0.00451471 0.00325153 0.00238171 0.260234 -0.581576 0.48697 -0.00435536 0.00291181 0.00205411 0.243392 -0.57584 0.482712 -0.00372446 0.00344849 0.00215792 0.243957 -0.532488 0.454305 -0.00426827 0.00306549 0.00211817 0.247016 -0.524634 0.443058 -0.00430498 0.00292419 0.00204627 0.250965 -0.520665 0.447152 -0.00416968 0.00284674 0.0015551 0.227943 -0.517509 0.427806 -0.00412455 0.00257037 0.0015551 0.220707 -0.541823 0.446838 -0.00375444 0.00283961 0.0015551 0.240189 -0.523312 0.445508 -0.004049 0.00295457 0.00171111 0.247864 -0.542133 0.441171 -0.00341088 0.00353936 0.00196785 0.241637 -0.530257 0.458271 -0.00385554 0.00307726 0.00192797 0.235657 -0.549361 0.462623 -0.00381038 0.00290875 0.0019887 0.227608 -0.520289 0.446221 -0.00404867 0.00272854 0.0015551 0.228016 -0.504383 0.428189 -0.00407163 0.0026387 0.0015551 0.228074 -0.504025 0.412914 -0.00364706 0.00253998 0.0015551 0.228137 -0.475896 0.384941 -0.0037216 0.00258769 0.00129908 0.219247 -0.475888 0.39801 -0.00354442 0.0026195 0.00161976 0.223251 -0.494004 0.413261 -0.00365106 0.00266975 0.00140795 0.214972 -0.475915 0.395145 -0.00345051 0.0026654 0.00143458 0.217735 -0.499591 0.395244 -0.00386278 0.0032511 0.00148858 0.227885 -0.4916 0.410386 -0.00370985 0.0028687 0.00155317 0.22808 -0.454425 0.401618 -0.00361409 0.00281021 0.00114015 0.210271 -0.474531 0.395154 -0.00336899 0.0028391 0.00122588 0.206562 -0.48207 0.395278 -0.00322115 0.0025016 0.000971286 0.19819 -0.488312 0.401101 -0.00332647 0.00214777 0.000951943 0.19833 -0.441523 0.362933 -0.00379846 0.0025001 0.0011168 0.200561 -0.41288 0.375803 -0.00385488 0.00269027 0.00159237 0.197731 -0.456883 0.408944 -0.00316266 0.00254457 0.00153409 0.23235 -0.46996 0.41613 -0.00287148 0.00279855 0.00140178 0.221113 -0.452827 0.399155 -0.00330114 0.00308189 0.00119473 0.217267 -0.437719 0.400279 -0.00272255 0.00296461 0.00121548 0.205994 -0.422254 0.38321 -0.00225855 0.0027351 0.001041 0.210033 -0.427521 0.374364 -0.00217991 0.00264376 0.00113029 0.214503 -0.411771 0.383445 -0.00214648 0.00214777 0.00105354 0.212427 -0.426881 0.370372 -0.00161787 0.00260412 0.000998276 0.207648 -0.430173 0.362644 -0.00180066 0.00214777 0.0010713 0.20834 -0.444568 0.365418 -0.00180454 0.0024648 0.000798108 0.202802 -0.403147 0.356207 -0.0020506 0.00214777 0.000798108 0.197762 -0.399779 0.348588 -0.00190313 0.0024758 0.000883538 0.194956 -0.400015 0.358683 -0.00146259 0.00214777 0.000798108 0.197762 -0.40225 0.370598 -0.00174289 0.00245124 0.00116596 0.197472 -0.39386 0.376956 -0.00155825 0.00276825 0.00122167 0.197315 -0.412931 0.383866 -0.00185648 0.00259941 0.00105382 0.197469 -0.395061 0.383594 -0.00182118 0.00267729 0.00107721 0.197449 -0.391875 0.365938 -0.000973134 0.00301533 0.00115758 0.197249 -0.394952 0.350869 -0.00167533 0.00247387 0.000798107 0.187618 -0.392903 0.365637 -0.00138586 0.00266239 0.00114166 0.197418 -0.434129 0.378076 -0.00150404 0.00262249 0.000978026 0.197533 -0.381574 0.37689 -0.00159637 0.00238195 0.000798108 0.197691 -0.399326 0.371102 -0.0015072 0.00243395 0.000798107 0.203798 -0.40491 0.383813 -0.00148289 0.00240714 0.00113101 0.199821 -0.37287 0.375321 -0.00125133 0.00251404 0.00106359 0.194161 -0.376139 0.375408 -0.0012191 0.00263648 0.00122436 0.188593 -0.377702 0.366024 -0.00113731 0.00240082 0.0010032 0.178868 -0.378858 0.358996 -0.00148008 0.00232081 0.00101022 0.180199 -0.372738 0.383817 -0.0012126 0.00250957 0.000798107 0.175212 -0.34567 0.37173 -0.00111795 0.00241347 0.000798107 0.170299 -0.353606 0.358821 -0.00095758 0.00213035 0.000397963 0.161141 -0.357446 0.365647 -0.00109816 0.00210888 0.000620097 0.16592 -0.367498 0.356733 -0.0025239 0.00241204 0.000875664 0.174841 -0.318649 0.365096 -0.00194576 0.00235136 0.000791062 0.173719 -0.34921 0.365346 -0.00136463 0.00219537 0.000472743 0.183497 -0.349016 0.347253 -0.00105549 0.00221794 0.000524981 0.172247 -0.356581 0.336894 -0.000934616 0.00225553 0.000472743 0.19016 -0.348219 0.341451 -0.000892303 0.00210585 0.000472743 0.190212 -0.354825 0.346641 -0.000674097 0.00207306 0.000472743 0.187654 -0.347504 0.36574 -0.000731368 0.00224711 0.000472743 0.162845 -0.311492 0.347469 -0.000851133 0.00219081 0.00031125 0.166713 -0.330998 0.327355 -0.000823373 0.00271374 0.000472743 0.161522 -0.300522 0.336238 -0.00186156 0.0026479 0.000527825 0.174187 -0.311056 0.384604 -0.00181456 0.00293178 0.000472743 0.17862 -0.31363 0.365878 -0.00145165 0.00292302 0.000472743 0.174693 -0.323792 0.366044 -0.000280547 0.00302176 0.000613008 0.171158 -0.307377 0.354648 -0.00102857 0.00241798 0.000472743 0.169783 -0.33105 0.366085 -0.000910833 0.00233255 0.000472743 0.172468 -0.326596 0.370393 -0.00259817 0.00305771 0.000318212 0.172369 -0.233163 0.328424 -0.00274121 0.002114 0.000472743 0.15706 -0.378932 0.346467 -0.000925877 0.002114 0.000472743 0.16657 -0.331152 0.323551 -0.000908101 0.00225225 0.000472744 0.157014 -0.303804 0.331662 -0.000989428 0.00211401 0.000209262 0.161837 -0.277138 0.335176 -0.000944042 0.00230984 0.000119085 0.176478 -0.291953 0.337145 -0.000735466 0.00232536 0.000472743 0.176324 -0.326139 0.320604 -0.000891236 0.00211401 0.000472744 0.175609 -0.304903 0.337474 -0.000458968 0.00211401 0.000472744 0.164809 -0.30954 0.312743 -0.000668192 0.00211401 0.000363271 0.160076 -0.29724 0.336205 -0.00148407 0.00223152 0.000472744 0.171184 -0.275324 0.366291 -0.00201892 0.00250118 0.000472744 0.168691 -0.286058 0.348595 -0.00146059 0.00236547 0.000472744 0.161138 -0.278151 0.326023 -0.00135461 0.00222972 0.000472744 0.161689 -0.260247 0.318822 -0.00092949 0.00222308 0.000643927 0.161278 -0.272452 0.316306 -0.000847415 0.00234605 0.000618961 0.162992 -0.256975 0.315654 -0.00101742 0.00232242 0.000472744 0.153625 -0.227373 0.297289 -0.000361782 0.00189599 0.000252999 0.142833 -0.261867 0.277084 -0.000479091 0.0015063 0.000472744 0.150675 -0.239074 0.281411 -0.000360777 0.00165293 0.000472744 0.159345 -0.239806 0.294284 -3.19434e-05 0.00168715 0.000406089 0.159362 -0.232677 0.319091 -0.000533827 0.00173204 0.00036781 0.159385 -0.233485 0.308725 -0.000668182 0.00188107 0.000369061 0.159356 -0.207783 0.275831 -0.000355443 0.00176457 0.000365783 0.159394 -0.220577 0.276127 -0.00060883 0.00189395 0.000472744 0.15932 -0.22128 0.287449 -0.000746941 0.0018624 0.000616126 0.15924 -0.225162 0.291474 -0.000564671 0.0017201 0.000472744 0.153814 -0.226566 0.287388 -0.00145663 0.00258813 0.00116296 0.185202 -0.244621 0.335017 -0.00219006 0.00245987 0.000787457 0.169001 -0.208462 0.354626 -0.00191619 0.00231926 0.000472743 0.166009 -0.216173 0.338666 -0.00172903 0.00216372 0.000690806 0.170092 -0.219892 0.310842 -0.00123709 0.00180974 0.000419105 0.173846 -0.20502 0.317748 -0.00137921 0.00199518 0.000472743 0.159294 -0.202677 0.329726 -0.00144674 0.00173962 0.000472743 0.15938 -0.217726 0.325648 -0.00118526 0.00170087 0.000472743 0.159396 -0.209523 0.310477 -0.00118249 0.00167393 0.000555762 0.176098 -0.218246 0.310515 -0.00151082 0.00179082 0.000472743 0.17037 -0.196022 0.316673 -0.00162813 0.00180919 0.000472743 0.153439 -0.182404 0.310498 -0.00139222 0.00185995 0.000472743 0.149298 -0.185631 0.30168 -0.00139271 0.0017295 0.000472743 0.147365 -0.176281 0.299172 -0.00180321 0.00144362 0.000322926 0.143595 -0.183191 0.298361 -0.00214857 0.00144269 0.000472743 0.142475 -0.192361 0.293597 -0.00154472 0.00146742 0.000472743 0.141103 -0.192287 0.281476 -0.000568332 0.00143017 0.000472743 0.130095 -0.188297 0.269273 -0.00113892 0.00137752 4.43659e-07 0.130224 -0.175748 0.269201 -0.00103425 0.00142273 0.000185835 0.13561 -0.183987 0.269193 -0.000774376 0.0015795 0.000472743 0.138689 -0.165624 0.257333 -0.00173929 0.00179112 0.000254479 0.130085 -0.184029 0.274216 -0.000135947 0.00195957 2.76676e-05 0.130163 -0.144255 0.240247 -0.00103911 0.0016387 0.000115201 0.130179 -0.167486 0.246782 -0.00110093 0.00171344 0.000115603 0.123061 -0.149876 0.239903 -0.00109477 0.00165112 0.000265259 0.123906 -0.165456 0.247424 -0.000802759 0.00159631 0.000288558 0.130126 -0.17102 0.262394 -0.000862495 0.00135147 -6.8268e-05 0.130255 -0.144222 0.259091 -0.00297174 0.00174883 0.000260285 0.167241 -0.0379135 0.188088 -0.00105132 0.00108635 -0.000173276 0.0878331 -0.204579 0.207703 -0.0013336 -0.000126747 -0.000484435 0.0254196 -0.11665 0.195031 -0.0010814 -0.000294802 -0.000484435 0.0140919 -0.0978324 0.126513 -0.000603341 0.000302712 2.64698e-23 0.0405334 -0.104296 0.0975108 -0.000825199 0.000493607 -8.17347e-05 0.0263388 -0.0460845 0.0975304 -0.000750846 0.00028972 -0.000410709 0.0187101 -0.0634676 0.0974913 -0.00077172 0.000374374 0 0.0186823 -0.06348 0.097471 -0.000700404 0.000438789 -5.29396e-23 0.0186822 -0.0825618 0.0773451 -0.000380307 0.00023343 0.00029862 0.0326818 -0.0864233 0.0530209 -0.000104716 0.00023343 0.00011753 0.0297457 -0.0664206 0.0363457 -0.000368444 0.00023343 1.32349e-23 0.0245133 -0.0499621 0.0113951 0.000291951 0.00023343 0 0.0186943 -0.0499781 0.0114422 -2.62807e-05 0.00023343 0 0.0186943 -0.0546394 0.0114204 0.000255729 0.00023343 -0.000164307 0.0186982 -0.0395877 0.0078155 0.000194914 0.00023343 0 0.0213834 -0.0626574 0.020654 0.000193759 0.00023343 0 0.0186943 -0.0329949 0.0114375 -3.70427e-05 0.00023343 0 0.0186943 -0.0279062 0.0114589 -0.000323262 0.00023343 0 0.0186943 -0.0282253 0.0321371 0.00084391 0.000638024 2.11758e-22 0.0186785 -0.0278405 0.0113712 -7.69024e-06 0.000109617 -0.000147268 0.0220757 -0.032972 0.0230633 -0.000129427 0.000109617 0 0.0321617 -0.0329855 0.0230997 0.000226123 0.000305439 -6.66191e-05 0.0186917 -0.032967 0.0420957 0.000226123 0.000109617 0 0.0187093 -0.0329701 0.0444471 0.00042491 0.000109617 0 0.0187093 -0.0351983 0.0208685 0.000143792 0.000539639 0 0.0186927 0.00303243 0.0145763 -4.10923e-05 0.000539639 -8.43282e-05 0.0278386 -0.00870932 0.0453143 -0.000184071 0.000539639 -0.000101156 0.0255131 -0.0137627 0.0395709 0.00132307 -5.3134e-05 -0.000449938 0.0206134 0.00314807 0.0205795 0.000250579 0.000391543 0 0.018478 -0.0139536 0.0283306 0.000377446 -0 0 0.0151655 0.00295563 0.0203817 0.000586776 -0 0 0.0175544 -0.00204988 0.0203686 0.000276676 -0 -0.000255649 -2.83769e-07 -0.00704635 0.0387269 -0.000371102 0.000301187 0 -1.3095e-05 0.0271561 0.0227428 0.000115231 0.000164171 -0.000236649 -1.08038e-05 0.000926837 0.00844783 -0.000636739 -0.00048414 -0.000178152 6.10722e-05 -0.0295802 0.0422578 0.000574861 -1.32349e-23 -0.00017029 4.76745e-06 -0.0220311 0.0160948 0 -0 0 0 0 0 5.49884e-05 -0 0 0 9.64137e-08 -2.30886e-06 -0.00122678 -0.00040948 -0.000406403 9.19199e-05 -0.000129593 0.0283654 0.00105998 0.000520764 0 -6.85096e-06 0.0200838 -0.0147987 0.000226882 -4.6423e-05 0 0.00537123 0.0200853 -0.00548566 0.000513422 -4.6423e-05 -5.29275e-05 -2.36415e-06 0.00468766 -0.0254726 6.55836e-05 -0.000154116 0.000252928 -0.00237985 0.02008 -0.039783 -0.000235384 -4.6423e-05 0 -0.0238541 0.0316494 -0.0486997 -0.000576924 -0.000140285 -0.000262576 -0.0151304 0.0112579 -0.015439 -1.99358e-06 -4.6423e-05 -0.000205989 -0.023605 0.0200991 -0.0367281 -0.000119211 4.71597e-05 0 -0.020794 0.0375814 -0.0414349 -6.89438e-05 0.00032748 -7.78778e-05 -0.020814 0.0372644 -0.0311736 -0.000180973 0.000239954 -0.000271366 -0.0276414 0.0372721 -0.0413698 -0.000289225 0.00017937 -0.000261918 -0.0208393 0.03727 -0.0612577 0.000378139 9.3084e-06 -0.000162182 -0.0207426 0.0372873 -0.0528533 0.000690299 6.51783e-05 0 -0.0210435 0.0233806 -0.029622 0.000410376 9.3084e-06 0 -0.0043979 0.0549634 -0.0295984 -0.0001865 9.3084e-06 0 -0.0043979 0.0549764 -0.0502441 0.00154853 0.000360969 -0.00027623 -0.0204531 0.0701428 -0.0709676 9.6498e-05 0.00015804 0 -0.0253298 0.0581827 -0.0539739 0.000215858 -1.9591e-05 0 -0.0253359 0.06282 -0.0816875 0.000574688 -0.000126265 -0.000429956 -0.0253925 0.0587834 -0.0900856 -0.000105844 0.000136093 -0.000501494 -0.0225436 0.0815906 -0.0952095 -0.00197246 0.000393695 -0.000502585 -0.0253649 0.073506 -0.0625592 8.61726e-05 0.000146047 -4.33949e-05 -0.00716072 0.0272798 -0.08714 0.000434765 -1.9591e-05 -0.00031543 -0.00729227 0.081828 -0.101178 0.000762957 -0.000460443 -0.000365245 -0.0213107 0.0818694 -0.102253 0.000665589 -1.9591e-05 -2.92773e-05 -0.029813 0.0818495 -0.0820078 0.000376118 -1.9591e-05 -0.000212159 -0.0212457 0.081845 -0.0819125 -0.000979862 -1.9591e-05 -0.000365245 -0.0372373 0.108408 -0.0817308 0.000264963 -1.9591e-05 -0.000266924 -0.032113 0.051199 -0.0988675 0.00146606 0.000195412 -0.00029391 -0.0321146 0.0800139 -0.109011 0.000291661 -0.000127204 -0.000365245 -0.0321555 0.0522606 -0.103778 0.000291661 -0.000183757 -0.00030397 -0.0321532 0.0614865 -0.0985986 0.000195473 -0.000127204 -0.000365245 -0.019861 0.0614905 -0.0805213 -2.14393e-05 -0.000127204 -0.000365245 -0.0181936 0.0692439 -0.0870888 0.0003353 -0.000203575 -0.000365245 -0.0211638 0.081888 -0.103809 0.000776189 -0.000127204 -0.000365245 -0.0125477 0.0614844 -0.0771749 0.000176355 -0.000127204 -0.000365245 -0.0233034 0.0878089 -0.0878032 0.000176355 -0.000127204 -0.000365245 -0.0231802 0.0791931 -0.110647 -3.83874e-06 0.00038859 -2.73452e-05 0.0125002 0.125464 -0.142273 0.000868848 -0.000393939 -9.96098e-05 -0.0155239 0.0397744 -0.155987 0.00091589 -0.000562014 -0.00028095 -0.0190541 0.0824586 -0.135075 0.00087502 -0.000450016 -0.000365245 -0.0193506 0.104995 -0.133548 0.000800525 -0.000377505 -0.00031291 -0.0303528 0.114086 -0.136783 0.00146988 -2.37356e-05 -0.000436639 -0.0323861 0.114029 -0.146069 0.000719755 -0.000299638 -0.000365245 -0.023389 0.114132 -0.142513 0.00110753 -0.000377505 -0.000365244 -0.0303642 0.0892388 -0.155885 0.00136252 -0.000377505 -0.000365244 -0.0303642 0.113575 -0.147853 0.00231361 -0.000377505 -0.000365244 -0.0303642 0.139888 -0.193263 0.00110108 -0.000607891 -0.000365244 -0.0304006 0.121659 -0.18148 0.000927566 -0.000377505 -0.000691199 -0.0281333 0.105962 -0.17862 0.000751022 -0.000476087 -0.000789899 -0.0334775 0.121691 -0.194406 0.00112335 -0.000695008 -0.000973046 -0.030542 0.151978 -0.237643 0.000893459 -0.000377505 -0.000973046 -0.03048 0.137543 -0.242902 0.00114371 -0.000843107 -0.000973046 -0.0305719 0.121522 -0.237287 0.00134174 -0.000660276 -0.000973046 -0.0358186 0.128653 -0.219912 0.00155944 -0.000377505 -0.000793386 -0.0304513 0.145991 -0.222683 0.00108988 -0.000767429 -0.000720323 -0.0433613 0.122593 -0.215994 0.00146938 -0.000721923 -0.000762238 -0.0304939 0.122184 -0.198866 0.00110931 -0.000377505 -0.000688982 -0.0483037 0.143174 -0.19271 0.0010997 -0.000377505 -0.0007618 -0.0479196 0.143158 -0.208667 0.000985933 -0.000377505 -0.000789877 -0.0479144 0.136532 -0.198784 0.000511305 -0.000897433 -0.000789877 -0.0479746 0.125719 -0.183577 0.00194047 -0.00077943 -0.000895627 -0.0479619 0.122494 -0.212341 0.00145614 -0.000390931 -0.000794261 -0.0477741 0.138857 -0.194832 0.00108945 -0.000302473 -0.000413053 -0.0476644 0.168143 -0.20273 0.000620222 7.65737e-05 -0.000712959 -0.0432873 0.165084 -0.16488 0.000986368 -0.000118732 -0.000724724 -0.0729611 0.129411 -0.189903 0.00102258 7.65737e-05 -0.000608053 -0.0812101 0.123499 -0.189655 0.00127968 -0.000109986 -0.000824954 -0.0882434 0.141524 -0.187677 0.00151888 -0.000301247 -0.000807071 -0.0795715 0.139571 -0.18773 0.000598165 -0.000123989 -0.00121758 -0.073005 0.154065 -0.198756 0.000996279 -0.000331064 -0.000828674 -0.072995 0.134982 -0.201029 0.000650365 -0.000325216 -0.000828674 -0.0729717 0.126871 -0.187524 0.00163804 -0.000443304 -0.000657716 -0.0729441 0.137021 -0.187792 0.00133349 -0.000648157 -0.000828674 -0.0850728 0.153436 -0.209151 0.00123139 -0.000325216 -0.000828674 -0.0893936 0.16363 -0.220149 0.00104134 -0.000386943 -0.00102111 -0.0915095 0.173769 -0.220081 0.000990241 -0.000381413 -0.000828674 -0.0894022 0.173802 -0.220013 0.00113917 -0.000325216 -0.000828674 -0.0627174 0.128688 -0.24724 0.00201342 -0.00112228 -0.00123176 -0.0828882 0.191247 -0.245811 0.00192589 -0.000582862 -0.000983975 -0.111296 0.151928 -0.236204 0.00180842 -0.000715002 -0.00106754 -0.10782 0.178834 -0.224875 0.00131595 -0.00040685 -0.000743985 -0.104328 0.181422 -0.220757 0.0013881 -0.00082289 -0.00115783 -0.104466 0.181384 -0.227374 0.00151408 -0.000675727 -0.00115783 -0.109676 0.165188 -0.22427 0.00165711 -0.000666016 -0.00103012 -0.104391 0.146302 -0.212609 0.00163797 -0.000451953 -0.000952173 -0.107087 0.193623 -0.217631 0.00109362 -0.000201175 -0.00082053 -0.104261 0.189356 -0.220543 0.00197396 -0.000229021 -0.000838134 -0.120553 0.17773 -0.235394 0.0012392 -0.000660943 -0.000944107 -0.109186 0.16886 -0.213007 0.00053133 0.0001527 -0.00115783 -0.115681 0.175217 -0.207241 0.00152932 -0.000606794 -0.000995529 -0.11225 0.185208 -0.238669 0.00123335 -0.000606794 -0.00115783 -0.117824 0.195997 -0.238557 0.000908555 -0.000606794 -0.00101416 -0.121231 0.190725 -0.238396 0.00162981 -0.000606794 -0.000844137 -0.121191 0.205108 -0.243566 0.0015124 -0.000606794 -0.000872531 -0.128413 0.18357 -0.225067 0.00190692 -0.000606794 -0.000794863 -0.121178 0.188853 -0.225206 0.00183851 -0.000606794 -0.000854444 -0.130781 0.1979 -0.225175 0.00164302 -0.000606794 -0.00127114 -0.128815 0.19232 -0.249187 0.00163785 -0.000606794 -0.00119892 -0.130764 0.193826 -0.240729 0.00131593 -0.000606794 -0.000906967 -0.131006 0.205435 -0.249097 0.00200212 -0.000771996 -0.000854444 -0.137492 0.190545 -0.240773 0.00117287 -0.000698498 -0.000733931 -0.118064 0.201684 -0.225944 0.00182313 -0.000752464 -0.000854444 -0.134366 0.19632 -0.237962 0.00180647 -0.000402503 -0.000854444 -0.119718 0.260103 -0.27449 0.00144831 -0.000793027 -0.000854444 -0.127216 0.190046 -0.2569 0.00217583 -0.000827675 -0.00129585 -0.12717 0.225556 -0.259739 0.00201476 -0.000780337 -0.00129585 -0.143892 0.185635 -0.249578 0.00159089 -0.000794244 -0.00129585 -0.147577 0.1836 -0.241166 0.00160264 -0.000794244 -0.00129585 -0.149757 0.21402 -0.252527 0.00233683 -0.000770757 -0.00156229 -0.147641 0.208383 -0.262793 0.00195913 -0.000720721 -0.00170284 -0.15704 0.199682 -0.285747 0.00226092 -0.000720721 -0.00160804 -0.156816 0.199696 -0.271042 0.00212442 -0.000921885 -0.00157298 -0.156921 0.199745 -0.25961 0.00305442 -0.000720721 -0.00129585 -0.163956 0.212381 -0.245986 0.00330534 -0.000373221 -0.00179279 -0.186692 0.194904 -0.309214 0.00346353 -0.000720721 -0.00195238 -0.194758 0.241267 -0.320427 0.00380009 -0.000720721 -0.00174551 -0.191207 0.222927 -0.322773 0.0039433 -0.00100031 -0.00175103 -0.195371 0.226483 -0.320548 0.00446154 -0.00124138 -0.00195885 -0.205667 0.243396 -0.311808 0.00482548 -0.0009442 -0.00186616 -0.205525 0.222744 -0.309492 0.00440585 -0.00117045 -0.00196728 -0.205623 0.237897 -0.32044 0.00414956 -0.000458838 -0.00205432 -0.21865 0.204871 -0.485582 0.00508424 -0.000610207 -0.00243206 -0.224415 0.162328 -0.430429 0.00363123 -0.000668638 -0.00243206 -0.252762 0.237689 -0.348448 0.00387692 -0.00103499 -0.00243206 -0.253835 0.232035 -0.355247 0.00375814 -0.00104958 -0.00263932 -0.255201 0.242367 -0.37341 0.00391373 -0.000941601 -0.00243206 -0.263516 0.216384 -0.366453 0.00373123 -0.00110162 -0.00243206 -0.258176 0.228623 -0.375837 0.00350662 -0.00104958 -0.00243206 -0.26355 0.262808 -0.366282 0.00384451 -0.00111889 -0.00263034 -0.263635 0.241215 -0.346647 0.00381224 -0.00104958 -0.00243206 -0.26355 0.240246 -0.359769 0.00433029 -0.00104958 -0.00243206 -0.26355 0.240051 -0.373859 0.00414349 -0.00104958 -0.00243206 -0.26355 0.257642 -0.373794 0.00346039 -0.00123575 -0.00243206 -0.263612 0.257872 -0.39108 0.00367153 -0.00104958 -0.0026819 -0.263654 0.249964 -0.396576 0.0037025 -0.00104958 -0.00292171 -0.26377 0.260542 -0.385052 0.00361749 -0.00146812 -0.00277011 -0.263837 0.268398 -0.391177 0.00377836 -0.00145107 -0.00260942 -0.273741 0.277082 -0.391254 0.00379373 -0.00150051 -0.00243206 -0.255325 0.277121 -0.413741 0.00280526 -0.00104958 -0.00193346 -0.202101 0.245354 -0.390949 0.00371669 -0.00104958 -0.00234173 -0.237025 0.260223 -0.391367 0.00365351 -0.00121915 -0.00267775 -0.260016 0.274336 -0.416525 0.00356198 -0.00126632 -0.00292571 -0.272055 0.300707 -0.407601 0.00432012 -0.00120794 -0.00284687 -0.278776 0.245382 -0.390087 0.00415636 -0.00106094 -0.00267775 -0.273668 0.270464 -0.402974 0.0047789 -0.00105474 -0.00312553 -0.282745 0.27839 -0.426055 0.00445082 -0.00120537 -0.00312553 -0.295592 0.3062 -0.425905 0.00526023 -0.00117712 -0.00312553 -0.278908 0.323336 -0.4457 0.00516529 -0.00105474 -0.00312553 -0.298321 0.249594 -0.40922 0.00369585 -0.00105474 -0.00292349 -0.278809 0.250177 -0.411018 0.00393004 -0.000963189 -0.00292447 -0.278774 0.279156 -0.423023 0.00394811 -0.00105474 -0.00312553 -0.293691 0.27907 -0.423031 0.00401 -0.00122407 -0.00312553 -0.292801 0.279061 -0.437143 0.0042117 -0.00125451 -0.00306221 -0.291453 0.271675 -0.422949 0.00433882 -0.00122886 -0.00312553 -0.307194 0.266258 -0.427415 0.0043577 -0.000958762 -0.00312553 -0.310699 0.273065 -0.436539 0.00445181 -0.000877902 -0.00312553 -0.298478 0.278914 -0.437571 0.00438267 -0.000890411 -0.00312553 -0.298475 0.278951 -0.448015 0.0036134 -0.00101293 -0.00344615 -0.298568 0.326858 -0.447108 0.00458554 -0.00118916 -0.0032966 -0.298609 0.267921 -0.437643 0.00499018 -0.0011511 -0.00346207 -0.317683 0.276572 -0.437796 0.00423056 -0.000918149 -0.00343336 -0.317588 0.276881 -0.437575 0.00418439 -0.000981184 -0.00331384 -0.317534 0.327044 -0.437738 0.00490767 -0.00111059 -0.00356384 -0.31462 0.298181 -0.454389 0.0048099 -0.00112715 -0.00356384 -0.317706 0.309028 -0.454346 0.00550176 -0.0012964 -0.0032231 -0.31762 0.30011 -0.454599 0.00443756 -0.00118411 -0.00356384 -0.322466 0.350907 -0.448268 0.00481129 -0.00103471 -0.00356384 -0.32692 0.288698 -0.451768 0.00548097 -0.00108202 -0.00356384 -0.317694 0.292463 -0.439131 0.00532322 -0.0013465 -0.00320294 -0.317634 0.299715 -0.4304 0.00522822 -0.0012716 -0.00324059 -0.332011 0.316303 -0.447072 0.00508726 -0.00124569 -0.00338776 -0.325842 0.311571 -0.455335 0.00515591 -0.00113512 -0.00356384 -0.317712 0.319284 -0.445405 0.00519494 -0.00119238 -0.00356384 -0.321919 0.319276 -0.455369 0.00480415 -0.00113038 -0.00356384 -0.324561 0.317617 -0.455227 0.00591115 -0.00134397 -0.00356384 -0.336967 0.307161 -0.455684 0.00540187 -0.00134084 -0.00356384 -0.336978 0.321724 -0.462179 0.00495632 -0.00122923 -0.00356384 -0.336943 0.338448 -0.465876 0.00498272 -0.0012813 -0.00356384 -0.319189 0.338466 -0.465136 0.00548257 -0.00133825 -0.00385146 -0.335071 0.296729 -0.468606 nipype-1.7.0/nipype/testing/data/fsl_motion_outliers_fd.txt000066400000000000000000000066521413403311400242310ustar00rootroot000000000000000.0922165 0.040464 0.111654 0.274237 0.0615315 0.037751 0.069177 0.0394165 0.066542 0.0637689 0.029314 0.055653 0.0291915 0.0594125 0.0478581 0.0750645 0.0681695 0.135893 0.070488 0.0245075 0.08072 0.0666571 0.0712906 0.0591095 0.109185 0.076768 0.067243 0.076951 0.021331 0.0823295 0.0408805 0.0638545 0.062919 0.0381145 0.0858815 0.076134 0.0727425 0.029358 0.0574337 0.109022 0.0932028 0.116082 0.099692 0.0792185 0.0660805 0.079638 0.0615515 0.0422645 0.0605675 0.0678102 0.0534692 0.0858751 0.0441815 0.0411835 0.059656 0.0626191 0.054006 0.050042 0.0276905 0.0816688 0.106604 0.104542 0.0699417 0.0493374 0.0314606 0.0637101 0.02931 0.032287 0.0448395 0.053762 0.0775596 0.0352955 0.0620057 0.0429542 0.165356 0.0681041 0.080837 0.0483714 0.0413671 0.0214476 0.0415278 0.090658 0.0603659 0.0801592 0.12952 0.0450311 0.0328385 0.101851 0.111088 0.0216054 0.263852 0.241185 0.170973 0.0528163 0.0591555 0.0479947 0.0797687 0.057177 0.0617814 0.0564299 0.115253 0.0711485 0.072734 0.0377425 0.0452792 0.0293435 0.0743266 0.130582 0.0712992 0.0355222 0.0534384 0.0404496 0.0728568 0.0347538 0.0365854 0.0236392 0.0343136 0.219582 0.133829 0.0631227 0.0636641 0.0814131 0.0479329 0.0353675 0.0384015 0.0310994 0.060522 0.043508 0.0305195 0.020338 0.0538009 0.0398593 0.044996 0.0778829 0.0675269 0.0303966 0.0684796 0.113632 0.173751 0.0953603 0.0384551 0.0348528 0.04526 0.0633001 0.356055 0.416511 0.253341 0.119676 0.139908 0.0971501 0.0554116 0.0258724 0.0459943 0.0833891 0.0624481 0.0657044 0.0389019 0.0205947 0.0409763 0.0498495 0.0416258 0.0166502 0.0353083 0.0997548 0.0965846 0.0235861 0.0633841 0.0154942 0.0357461 0.0800965 0.0650863 0.0211126 0.163237 0.125343 0.0540911 0.017874 0.0691969 0.11043 0.0835264 0.137328 0.118949 0.0753882 0.00275183 0.133472 0.244647 0.0847069 0.0577316 0.0751496 0.0750398 0.0972748 0.0748739 0.0458396 0.0310216 0.0366835 0.0356063 0.0553765 0.0639495 0.0650416 0.0505025 0.170091 0.13039 0.0472066 0.0572643 0.0815014 0.149822 0.227285 0.108031 0.0560778 0.0724768 0.0322847 0.11819 0.146627 0.111114 0.111445 0.020299 0.0410743 0.0268339 0.05399 0.0815168 0.0777002 0.0315828 0.165272 0.213802 0.0869493 0.036218 0.0332953 0.068719 0.0675272 0.0685225 0.0451177 0.119277 0.102194 0.0573171 0.0555505 0.115179 0.0471257 0.0575205 0.0487958 0.0594829 0.0896243 0.0537505 0.083843 0.0204785 0.0236064 0.075808 0.114648 0.0827696 0.0791115 0.102695 0.119031 0.0318184 0.0600644 0.0330951 0.107511 0.0710708 0.0392274 0.0742816 0.0839207 0.0467648 0.0345335 0.0146616 0.109282 0.187879 0.12107 0.0583698 0.0664069 0.0518948 0.0381672 0.0498546 0.0705965 0.0563436 0.0890585 0.106257 0.10538 0.140839 0.0393886 0.0322632 0.0641569 0.0545492 0.0362664 0.0250806 0.0621673 0.0157825 0.0509126 0.0748957 0.0965591 0.0749209 0.13329 0.132579 0.0985737 0.0772244 0.0360209 0.0445485 0.0692248 0.0694683 0.0347725 0.0302222 0.10371 0.158209 0.098906 0.0514085 0.0313609 0.0843055 0.0608005 0.063549 0.263942 0.17665 0.261261 0.0441281 0.0468925 0.0647964 0.044089 0.0629465 0.071587 0.029169 0.0401875 0.026996 0.0610415 0.0458035 0.035757 0.0468055 0.0355955 0.0530505 0.204683 0.116196 0.0907005 0.066667 0.124328 0.067071 0.09391 0.0647426 0.095498 0.140501 0.105467 0.0573551 0.0305451 0.026572 0.037701 0.0364285 0.0338849 0.0278505 0.0145665 0.109527 0.133339 0.0582865 0.0516891 0.0618162 0.103562 0.0196925 0.0693495 0.137835 0.0963195 0.0614775 0.0551995 0.0580145 0.034866 0.0435379 0.0189935 0.0270825 0.0893485 0.0466895 0.048314 0.0224355 0.10331 nipype-1.7.0/nipype/testing/data/func2anat_InverseWarp.nii.gz000066400000000000000000000000001413403311400242230ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/func2anat_coreg_Affine.txt000066400000000000000000000000001413403311400237460ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/func2anat_coreg_InverseWarp.nii.gz000066400000000000000000000000001413403311400254020ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/func_epi_1_1.nii000066400000000000000000000000001413403311400216260ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/func_to_struct.mat000066400000000000000000000000001413403311400224410ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/functional.HEAD000066400000000000000000000000001413403311400214620ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/functional.nii000066400000000000000000000000001413403311400215400ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/functional.par000066400000000000000000000000001413403311400215430ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/functional.rms000066400000000000000000000000001413403311400215620ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/functional2.nii000066400000000000000000000000001413403311400216220ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/functional3.nii000066400000000000000000000000001413403311400216230ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/functional_1.dcm000066400000000000000000000000001413403311400217440ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/functional_2.dcm000066400000000000000000000000001413403311400217450ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/grad.b000066400000000000000000000000001413403311400177550ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/grads.txt000066400000000000000000000000001413403311400205360ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/im1.nii000066400000000000000000000000001413403311400200640ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/im2.nii000066400000000000000000000000001413403311400200650ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/im3.nii000066400000000000000000000000001413403311400200660ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/im_affine.aff000066400000000000000000000000001413403311400212700ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/im_warp.df.nii000066400000000000000000000000001413403311400214240ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/image.nii000066400000000000000000000000001413403311400204600ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/image.v000066400000000000000000000000001413403311400201460ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/indices-labels.txt000066400000000000000000000000001413403311400223140ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/indices.txt000066400000000000000000000000001413403311400210540ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/input1.xfm000066400000000000000000000000001413403311400206310ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/jsongrabber.txt000066400000000000000000000000461413403311400217460ustar00rootroot00000000000000{"param2": 4, "param1": "exampleStr"} nipype-1.7.0/nipype/testing/data/label.mgz000066400000000000000000000000001413403311400204730ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lh-pial.stl000066400000000000000000000000001413403311400207470ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lh.aparc_a2009s.freesurfer.annot000066400000000000000000000000001413403311400245730ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lh.area.structural000066400000000000000000000000001413403311400223410ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lh.central.structural.gii000066400000000000000000000000001413403311400236300ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lh.cope1.mgz000066400000000000000000000000001413403311400210250ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lh.cope1.nii.gz000066400000000000000000000000001413403311400214260ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lh.hippocampus.stl000066400000000000000000000000001413403311400223530ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lh.pbt.structural000066400000000000000000000000001413403311400222160ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lh.pial000066400000000000000000000000001413403311400201470ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lh.pial_converted.gii000066400000000000000000000000001413403311400227670ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lh.sphere.reg.structural.gii000066400000000000000000000000001413403311400242420ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lh.sphere.structural.gii000066400000000000000000000000001413403311400234660ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lh.white000066400000000000000000000000001413403311400203420ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lta1.lta000066400000000000000000000000001413403311400202400ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lta2.lta000066400000000000000000000000001413403311400202410ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/lut_file000066400000000000000000000000001413403311400204230ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/magnitude.nii000066400000000000000000000000001413403311400213530ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/maps.nii000066400000000000000000000000001413403311400203360ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/mask.1D000066400000000000000000000000001413403311400200160ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/mask.mif000066400000000000000000000000001413403311400203250ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/mask.nii000066400000000000000000000000001413403311400203310ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/mask.nii.gz000066400000000000000000000000001413403311400207500ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/mean_func.nii.gz000066400000000000000000000000001413403311400217500ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/merged_f1samples.nii.gz000066400000000000000000000000001413403311400232330ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/merged_fsamples.nii000066400000000000000000000000001413403311400225330ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/merged_ph1samples.nii.gz000066400000000000000000000000001413403311400234150ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/merged_phsamples.nii000066400000000000000000000000001413403311400227150ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/merged_th1samples.nii.gz000066400000000000000000000000001413403311400234210ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/merged_thsamples.nii000066400000000000000000000000001413403311400227210ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_initial.xfm000066400000000000000000000000001413403311400220500ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_nlp.conf000066400000000000000000000000001413403311400213430ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_2D_00.mnc000066400000000000000000000000001413403311400222050ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_2D_01.mnc000066400000000000000000000000001413403311400222060ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_2D_02.mnc000066400000000000000000000000001413403311400222070ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_2D_03.mnc000066400000000000000000000000001413403311400222100ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_2D_04.mnc000066400000000000000000000000001413403311400222110ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_2D_05.mnc000066400000000000000000000000001413403311400222120ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_2D_06.mnc000066400000000000000000000000001413403311400222130ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_2D_07.mnc000066400000000000000000000000001413403311400222140ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_2D_08.mnc000066400000000000000000000000001413403311400222150ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_2D_09.mnc000066400000000000000000000000001413403311400222160ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_3D_00.mnc000066400000000000000000000000001413403311400222060ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_3D_01.mnc000066400000000000000000000000001413403311400222070ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_3D_02.mnc000066400000000000000000000000001413403311400222100ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_3D_03.mnc000066400000000000000000000000001413403311400222110ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_3D_04.mnc000066400000000000000000000000001413403311400222120ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_3D_05.mnc000066400000000000000000000000001413403311400222130ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_3D_06.mnc000066400000000000000000000000001413403311400222140ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_3D_07.mnc000066400000000000000000000000001413403311400222150ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_3D_08.mnc000066400000000000000000000000001413403311400222160ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/minc_test_3D_09.mnc000066400000000000000000000000001413403311400222170ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/mni.nii000066400000000000000000000000001413403311400201610ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/mni2t1.nii000066400000000000000000000000001413403311400205100ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/model.pklz000066400000000000000000000000001413403311400206770ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/moving.csv000066400000000000000000000000001413403311400207110ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/moving1.nii000066400000000000000000000000001413403311400207560ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/moving2.nii000066400000000000000000000000001413403311400207570ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/mrtrix3_labelconfig.txt000066400000000000000000000000001413403311400233730ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/my_database.db000066400000000000000000000000001413403311400214550ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/network0.aparc+aseg.nii000066400000000000000000000000001413403311400231470ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/network0.gpickle000066400000000000000000000000001413403311400220060ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/nipype2boutiques_example.json000066400000000000000000000455511413403311400246560ustar00rootroot00000000000000{ "name": "FLIRT", "command-line": "FLIRT [IN_FILE] [REFERENCE] [OUT_FILE] [OUT_MATRIX_FILE] [ANGLE_REP] [APPLY_ISOXFM] [APPLY_XFM] [BBRSLOPE] [BBRTYPE] [BGVALUE] [BINS] [COARSE_SEARCH] [COST] [COST_FUNC] [DATATYPE] [DISPLAY_INIT] [DOF] [ECHOSPACING] [FIELDMAP] [FIELDMAPMASK] [FINE_SEARCH] [FORCE_SCALING] [IN_MATRIX_FILE] [IN_WEIGHT] [INTERP] [MIN_SAMPLING] [NO_CLAMP] [NO_RESAMPLE] [NO_RESAMPLE_BLUR] [NO_SEARCH] [OUT_LOG] [PADDING_SIZE] [PEDIR] [REF_WEIGHT] [RIGID2D] [SAVE_LOG] [SCHEDULE] [SEARCHR_X] [SEARCHR_Y] [SEARCHR_Z] [SINC_WIDTH] [SINC_WINDOW] [USES_QFORM] [VERBOSE] [WM_SEG] [WMCOORDS] [WMNORMS]", "author": "Nipype (interface), Oxford Centre for Functional MRI of the Brain (FMRIB) (tool)", "description": "FLIRT, as implemented in Nipype (module: nipype.interfaces.fsl, interface: FLIRT).", "inputs": [ { "id": "angle_rep", "name": "Angle rep", "type": "String", "value-key": "[ANGLE_REP]", "command-line-flag": "-anglerep", "description": "'quaternion' or 'euler'. Representation of rotation angles.", "optional": true, "value-choices": [ "quaternion", "euler" ] }, { "id": "apply_isoxfm", "name": "Apply isoxfm", "type": "Number", "value-key": "[APPLY_ISOXFM]", "command-line-flag": "-applyisoxfm", "description": "A float. As applyxfm but forces isotropic resampling.", "optional": true }, { "id": "apply_xfm", "name": "Apply xfm", "type": "Flag", "value-key": "[APPLY_XFM]", "command-line-flag": "-applyxfm", "description": "A boolean. Apply transformation supplied by in_matrix_file or uses_qform to use the affine matrix stored in the reference header.", "optional": true }, { "id": "bbrslope", "name": "Bbrslope", "type": "Number", "value-key": "[BBRSLOPE]", "command-line-flag": "-bbrslope", "description": "A float. Value of bbr slope.", "optional": true }, { "id": "bbrtype", "name": "Bbrtype", "type": "String", "value-key": "[BBRTYPE]", "command-line-flag": "-bbrtype", "description": "'signed' or 'global_abs' or 'local_abs'. Type of bbr cost function: signed [default], global_abs, local_abs.", "optional": true, "value-choices": [ "signed", "global_abs", "local_abs" ] }, { "id": "bgvalue", "name": "Bgvalue", "type": "Number", "value-key": "[BGVALUE]", "command-line-flag": "-setbackground", "description": "A float. Use specified background value for points outside fov.", "optional": true }, { "id": "bins", "name": "Bins", "type": "Number", "integer": true, "value-key": "[BINS]", "command-line-flag": "-bins", "description": "An integer (int or long). Number of histogram bins.", "optional": true }, { "id": "coarse_search", "name": "Coarse search", "type": "Number", "integer": true, "value-key": "[COARSE_SEARCH]", "command-line-flag": "-coarsesearch", "description": "An integer (int or long). Coarse search delta angle.", "optional": true }, { "id": "cost", "name": "Cost", "type": "String", "value-key": "[COST]", "command-line-flag": "-cost", "description": "'mutualinfo' or 'corratio' or 'normcorr' or 'normmi' or 'leastsq' or 'labeldiff' or 'bbr'. Cost function.", "optional": true, "value-choices": [ "mutualinfo", "corratio", "normcorr", "normmi", "leastsq", "labeldiff", "bbr" ] }, { "id": "cost_func", "name": "Cost func", "type": "String", "value-key": "[COST_FUNC]", "command-line-flag": "-searchcost", "description": "'mutualinfo' or 'corratio' or 'normcorr' or 'normmi' or 'leastsq' or 'labeldiff' or 'bbr'. Cost function.", "optional": true, "value-choices": [ "mutualinfo", "corratio", "normcorr", "normmi", "leastsq", "labeldiff", "bbr" ] }, { "id": "datatype", "name": "Datatype", "type": "String", "value-key": "[DATATYPE]", "command-line-flag": "-datatype", "description": "'char' or 'short' or 'int' or 'float' or 'double'. Force output data type.", "optional": true, "value-choices": [ "char", "short", "int", "float", "double" ] }, { "id": "display_init", "name": "Display init", "type": "Flag", "value-key": "[DISPLAY_INIT]", "command-line-flag": "-displayinit", "description": "A boolean. Display initial matrix.", "optional": true }, { "id": "dof", "name": "Dof", "type": "Number", "integer": true, "value-key": "[DOF]", "command-line-flag": "-dof", "description": "An integer (int or long). Number of transform degrees of freedom.", "optional": true }, { "id": "echospacing", "name": "Echospacing", "type": "Number", "value-key": "[ECHOSPACING]", "command-line-flag": "-echospacing", "description": "A float. Value of epi echo spacing - units of seconds.", "optional": true }, { "id": "fieldmap", "name": "Fieldmap", "type": "File", "value-key": "[FIELDMAP]", "command-line-flag": "-fieldmap", "description": "A file name. Fieldmap image in rads/s - must be already registered to the reference image.", "optional": true }, { "id": "fieldmapmask", "name": "Fieldmapmask", "type": "File", "value-key": "[FIELDMAPMASK]", "command-line-flag": "-fieldmapmask", "description": "A file name. Mask for fieldmap image.", "optional": true }, { "id": "fine_search", "name": "Fine search", "type": "Number", "integer": true, "value-key": "[FINE_SEARCH]", "command-line-flag": "-finesearch", "description": "An integer (int or long). Fine search delta angle.", "optional": true }, { "id": "force_scaling", "name": "Force scaling", "type": "Flag", "value-key": "[FORCE_SCALING]", "command-line-flag": "-forcescaling", "description": "A boolean. Force rescaling even for low-res images.", "optional": true }, { "id": "in_file", "name": "In file", "type": "File", "value-key": "[IN_FILE]", "command-line-flag": "-in", "description": "An existing file name. Input file.", "optional": false }, { "id": "in_matrix_file", "name": "In matrix file", "type": "File", "value-key": "[IN_MATRIX_FILE]", "command-line-flag": "-init", "description": "A file name. Input 4x4 affine matrix.", "optional": true }, { "id": "in_weight", "name": "In weight", "type": "File", "value-key": "[IN_WEIGHT]", "command-line-flag": "-inweight", "description": "An existing file name. File for input weighting volume.", "optional": true }, { "id": "interp", "name": "Interp", "type": "String", "value-key": "[INTERP]", "command-line-flag": "-interp", "description": "'trilinear' or 'nearestneighbour' or 'sinc' or 'spline'. Final interpolation method used in reslicing.", "optional": true, "value-choices": [ "trilinear", "nearestneighbour", "sinc", "spline" ] }, { "id": "min_sampling", "name": "Min sampling", "type": "Number", "value-key": "[MIN_SAMPLING]", "command-line-flag": "-minsampling", "description": "A float. Set minimum voxel dimension for sampling.", "optional": true }, { "id": "no_clamp", "name": "No clamp", "type": "Flag", "value-key": "[NO_CLAMP]", "command-line-flag": "-noclamp", "description": "A boolean. Do not use intensity clamping.", "optional": true }, { "id": "no_resample", "name": "No resample", "type": "Flag", "value-key": "[NO_RESAMPLE]", "command-line-flag": "-noresample", "description": "A boolean. Do not change input sampling.", "optional": true }, { "id": "no_resample_blur", "name": "No resample blur", "type": "Flag", "value-key": "[NO_RESAMPLE_BLUR]", "command-line-flag": "-noresampblur", "description": "A boolean. Do not use blurring on downsampling.", "optional": true }, { "id": "no_search", "name": "No search", "type": "Flag", "value-key": "[NO_SEARCH]", "command-line-flag": "-nosearch", "description": "A boolean. Set all angular searches to ranges 0 to 0.", "optional": true }, { "id": "padding_size", "name": "Padding size", "type": "Number", "integer": true, "value-key": "[PADDING_SIZE]", "command-line-flag": "-paddingsize", "description": "An integer (int or long). For applyxfm: interpolates outside image by size.", "optional": true }, { "id": "pedir", "name": "Pedir", "type": "Number", "integer": true, "value-key": "[PEDIR]", "command-line-flag": "-pedir", "description": "An integer (int or long). Phase encode direction of epi - 1/2/3=x/y/z & -1/-2/-3=-x/-y/-z.", "optional": true }, { "id": "ref_weight", "name": "Ref weight", "type": "File", "value-key": "[REF_WEIGHT]", "command-line-flag": "-refweight", "description": "An existing file name. File for reference weighting volume.", "optional": true }, { "id": "reference", "name": "Reference", "type": "File", "value-key": "[REFERENCE]", "command-line-flag": "-ref", "description": "An existing file name. Reference file.", "optional": false }, { "id": "rigid2D", "name": "Rigid2d", "type": "Flag", "value-key": "[RIGID2D]", "command-line-flag": "-2D", "description": "A boolean. Use 2d rigid body mode - ignores dof.", "optional": true }, { "id": "save_log", "name": "Save log", "type": "Flag", "value-key": "[SAVE_LOG]", "command-line-flag": "--save_log", "description": "A boolean. Save to log file.", "optional": true }, { "id": "schedule", "name": "Schedule", "type": "File", "value-key": "[SCHEDULE]", "command-line-flag": "-schedule", "description": "An existing file name. Replaces default schedule.", "optional": true }, { "id": "searchr_x", "name": "Searchr x", "type": "Number", "list": true, "integer": true, "min-list-entries": 2, "max-list-entries": 2, "value-key": "[SEARCHR_X]", "command-line-flag": "-searchrx", "description": "A list of from 2 to 2 items which are an integer (int or long). Search angles along x-axis, in degrees.", "optional": true }, { "id": "searchr_y", "name": "Searchr y", "type": "Number", "list": true, "integer": true, "min-list-entries": 2, "max-list-entries": 2, "value-key": "[SEARCHR_Y]", "command-line-flag": "-searchry", "description": "A list of from 2 to 2 items which are an integer (int or long). Search angles along y-axis, in degrees.", "optional": true }, { "id": "searchr_z", "name": "Searchr z", "type": "Number", "list": true, "integer": true, "min-list-entries": 2, "max-list-entries": 2, "value-key": "[SEARCHR_Z]", "command-line-flag": "-searchrz", "description": "A list of from 2 to 2 items which are an integer (int or long). Search angles along z-axis, in degrees.", "optional": true }, { "id": "sinc_width", "name": "Sinc width", "type": "Number", "integer": true, "value-key": "[SINC_WIDTH]", "command-line-flag": "-sincwidth", "description": "An integer (int or long). Full-width in voxels.", "optional": true }, { "id": "sinc_window", "name": "Sinc window", "type": "String", "value-key": "[SINC_WINDOW]", "command-line-flag": "-sincwindow", "description": "'rectangular' or 'hanning' or 'blackman'. Sinc window.", "optional": true, "value-choices": [ "rectangular", "hanning", "blackman" ] }, { "id": "uses_qform", "name": "Uses qform", "type": "Flag", "value-key": "[USES_QFORM]", "command-line-flag": "-usesqform", "description": "A boolean. Initialize using sform or qform.", "optional": true }, { "id": "verbose", "name": "Verbose", "type": "Number", "integer": true, "value-key": "[VERBOSE]", "command-line-flag": "-verbose", "description": "An integer (int or long). Verbose mode, 0 is least.", "optional": true }, { "id": "wm_seg", "name": "Wm seg", "type": "File", "value-key": "[WM_SEG]", "command-line-flag": "-wmseg", "description": "A file name. White matter segmentation volume needed by bbr cost function.", "optional": true }, { "id": "wmcoords", "name": "Wmcoords", "type": "File", "value-key": "[WMCOORDS]", "command-line-flag": "-wmcoords", "description": "A file name. White matter boundary coordinates for bbr cost function.", "optional": true }, { "id": "wmnorms", "name": "Wmnorms", "type": "File", "value-key": "[WMNORMS]", "command-line-flag": "-wmnorms", "description": "A file name. White matter boundary normals for bbr cost function.", "optional": true } ], "output-files": [ { "name": "Out file", "id": "out_file", "optional": true, "description": "A file name. Registered output file.", "path-template": "[IN_FILE]_flirt", "value-key": "[OUT_FILE]", "command-line-flag": "-out" }, { "name": "Out log", "id": "out_log", "optional": true, "description": "A file name. Output log.", "path-template": "[IN_FILE]_flirt.log", "value-key": "[OUT_LOG]" }, { "name": "Out matrix file", "id": "out_matrix_file", "optional": true, "description": "A file name. Output affine matrix in 4x4 asciii format.", "path-template": "[IN_FILE]_flirt.mat", "value-key": "[OUT_MATRIX_FILE]", "command-line-flag": "-omat" }, { "name": "Out file", "id": "out_file", "path-template": "out_file", "optional": true, "description": "An existing file name. Path/name of registered file (if generated)." }, { "name": "Out log", "id": "out_log", "path-template": "out_log", "optional": true, "description": "A file name. Path/name of output log (if generated)." }, { "name": "Out matrix file", "id": "out_matrix_file", "path-template": "out_matrix_file", "optional": true, "description": "An existing file name. Path/name of calculated affine transform (if generated)." } ], "groups": [ { "id": "all_or_none_group", "name": "All or none group", "members": [ "save_log", "out_log" ], "all-or-none": true }, { "id": "mutex_group", "name": "Mutex group", "members": [ "apply_isoxfm", "apply_xfm" ], "mutually-exclusive": true } ], "tool-version": "1.0.0", "schema-version": "0.5", "container-image": { "image": "mcin/docker-fsl:latest", "type": "docker", "index": "index.docker.io" }, "tags": { "domain": "neuroinformatics", "source": "nipype-interface" } }nipype-1.7.0/nipype/testing/data/nodif_brain_mask.nii.gz000066400000000000000000000000001413403311400233020ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/norm.mgz000066400000000000000000000000001413403311400203670ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/output.csv000066400000000000000000000000001413403311400207520ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/output_Composite.h5000066400000000000000000000000001413403311400225150ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/pdfs.Bfloat000066400000000000000000000000001413403311400207620ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/peak_directions.mif000066400000000000000000000000001413403311400225350ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/pet.nii.gz000066400000000000000000000000001413403311400206050ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/pet_resliced.nii000066400000000000000000000000001413403311400220400ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/phase.nii000066400000000000000000000000001413403311400204760ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/rc1s1.nii000066400000000000000000000000001413403311400203270ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/rc1s2.nii000066400000000000000000000000001413403311400203300ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/rc2s1.nii000066400000000000000000000000001413403311400203300ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/rc2s2.nii000066400000000000000000000000001413403311400203310ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/realign_json.json000066400000000000000000000016441413403311400222610ustar00rootroot00000000000000{ "cwd": "/home/cburns/data/nipype-tutorial/workingdir/_subject_id_s1/Realign.spm", "flags": null, "fwhm": null, "infile": [ [ "/home/cburns/data/nipype-tutorial/data/s1/f3.nii", "a3c80eb0260e7501b1458c462f51c77f" ], [ "/home/cburns/data/nipype-tutorial/data/s1/f5.nii", "9d6931fbd1b295fef475a2fe1eba5b5d" ], [ "/home/cburns/data/nipype-tutorial/data/s1/f7.nii", "bddcecd01af1cd16bcda369e685c8c89" ], [ "/home/cburns/data/nipype-tutorial/data/s1/f10.nii", "d75253b6ec33489adb72daa7b5b3bf31" ] ], "interp": null, "quality": null, "register_to_mean": true, "separation": null, "weight_img": null, "wrap": null, "write": true, "write_interp": null, "write_mask": null, "write_which": null, "write_wrap": null } nipype-1.7.0/nipype/testing/data/ref_class0.nii000066400000000000000000000000001413403311400214170ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/ref_class1.nii000066400000000000000000000000001413403311400214200ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/register.dat000066400000000000000000000000001413403311400212130ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/register.mat000066400000000000000000000000001413403311400212240ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/resp.1D000066400000000000000000000000001413403311400200340ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/response.txt000066400000000000000000000000001413403311400212740ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/resting.nii000066400000000000000000000000001413403311400210510ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/resting2anat_Warp.nii.gz000066400000000000000000000000001413403311400234070ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/resting2anat_coreg_Affine.txt000066400000000000000000000000001413403311400244660ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/rgb.nii.gz000066400000000000000000000000001413403311400205670ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/rh-pial.stl000066400000000000000000000000001413403311400207550ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/rh.aparc_a2009s.freesurfer.annot000066400000000000000000000000001413403311400246010ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/rh.central.structural.gii000066400000000000000000000000001413403311400236360ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/rh.pbt.structural000066400000000000000000000000001413403311400222240ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/rh.pial000066400000000000000000000000001413403311400201550ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/rh.pial_converted.gii000066400000000000000000000000001413403311400227750ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/rh.sphere.reg.structural.gii000066400000000000000000000000001413403311400242500ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/rh.sphere.structural.gii000066400000000000000000000000001413403311400234740ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/roi01.nii000066400000000000000000000000001413403311400203300ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/roi01_idx.npz000066400000000000000000000000001413403311400212240ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/roi02.nii000066400000000000000000000000001413403311400203310ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/roi02_idx.npz000066400000000000000000000000001413403311400212250ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/roi03.nii000066400000000000000000000000001413403311400203320ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/roi03_idx.npz000066400000000000000000000000001413403311400212260ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/roi04.nii000066400000000000000000000000001413403311400203330ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/roi04_idx.npz000066400000000000000000000000001413403311400212270ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/roi05.nii000066400000000000000000000000001413403311400203340ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/roi05_idx.npz000066400000000000000000000000001413403311400212300ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/run1+orig000066400000000000000000000000001413403311400204410ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/run1+orig_model000066400000000000000000000000001413403311400216210ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/run1_categories.1D000066400000000000000000000000001413403311400221550ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/run2+orig000066400000000000000000000000001413403311400204420ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/run2_categories.1D000066400000000000000000000000001413403311400221560ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/seed.1D000066400000000000000000000000001413403311400200030ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/seed_mask.nii000066400000000000000000000000001413403311400213310ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/seed_source.nii.gz000066400000000000000000000000001413403311400223150ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/seeds_to_M1.nii000066400000000000000000000000001413403311400215400ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/seeds_to_M2.nii000066400000000000000000000000001413403311400215410ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/segmentation0.nii.gz000066400000000000000000000675421413403311400226220ustar00rootroot00000000000000휱:=EF#LN@L` l7Gcr{,>^-Y-ſoOmu_?ǿۿ]:j;s??ߍ6W!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!7Ri8+B.k.sRl+RζG߮iyqZi~}',7no6Զj~X뿳s5Qa4VN[~C?F6B!B!B!B!B!B!hooVgoL[ dgSB7w++lm?cEKlfe,=vǶ>qa|Rm71F:?]B!B!B!B!B!Bz}}.c{f+1{> 5?cAgcr@x;"Ϳ/4jk[be<})RLL{39iio]N'xFܧkclϳ杞"uOiM1MZB!B!B!B!B!X߲wwomBe;le?4lH[~_bV[-zO~[h~ xc}w2R3+_2Dw.VLj:no~*.]D8>pXf}q{fK2HG+̖r)$x\|+%Ũob3|/qAO{yxJ;=}.oYo Mxѽ2(w-̪~=Q[9nB!B!B!B!Bx{A߾g=g9Ϳ7(.gsVoK$ksokXI5Mx+6^[칻JAמ4bb.aux@]1V{IdqlnЮgOij#8qŗj~fKu0{1AZ// amosESij-Ӽj)q56fdi]BںY[ݦ5w{4 94VLϑv}۟L#S_?LC[W~f9lc1gbĽn?5ޠ]TZ?!B!B!B!B!sE3Wk/s:S/?;ꧽ~QNytڔ?i\m2܎sw&+W!B!B!B!BOy}~dl~?h^A(}\7E3i}R7QpssD]w[$b'5|Lۗ<|G~Q3<kb{6uݥjs6̤^صDv;ј=b|'U79XXS;z; U(祌Z3:ҔH+ׂ("#h _(IW}b#ZwQx|xE 胵[]dbrG'mg5O2sٚftGug߭9m_Cu}vnfuvY̕{rJ_;nW4fu{O74a>$ʆh@9>uVm#s ]3Od{tɎ#4GVꘉnds߻>>%{׻̤ޫ7N;3/7N;GiF<{Ɍf,廻 GzNK U쉹Wټ}4Q_6VW7Ve<+3"3&[u~|md3_yeL|lO7Z!8Azmh-zB 3ڭZt>{߉4vVьl<{O/ߥi?[ͭu{mPA1(9ͧ&e{X=6gU}??y'w^a?c;vE6M[qylB!B!B!B!oΔe嗣G{s.j?,_lM:B=.TdnĶu7iyEG o3 fl:; K;{5o߷i:棕5{1߭ٶ}'r;kseqf]#췑Y_~6ons뵷֣Q\PC,j*>Z#~dco[݃yO{1?׿FOigO1ǜGY_fq[سRvl#(|г}]uޓ"nor3mlx|l1Un!WL[if/k.H P^<:'A9xc#}y2 d5ߦwsՊ]6[.4RЕ6.4R/;1+c0 m<)B,[ַh74~f!B!B!˔{ڷ줇sW/v}JS&')#_^{?z?sGt'j[ᚻ횃ٷFzP#rt:fBdq='5[>y+r?EՠRe5DvN5{&vPD<#ͥyxg[l3Z̞~=ZGkfaʳ{jVD[&O?ߟdq)~v8#^.e"맡[8d5̳ZRf91њQl](H{Vwznϗߣcc2[FkFcH7:T3m< ]qǔxgSL33@[>_D'5{γ}n4[Is̥;(p{S73R\؆7>ڢFϪww^[~/cV3ỉx6gb4{yre=PHٌfk0h\z2cגke1Hc<6{Ѱ{(϶q(ͬFp-': "sƞ(,ƑN4r;[̿ޑr5{9]鵚Gkfy}hf'Q\zb4xψS8{P{uOӼ2g4Oع-6!;W΂鵚5lޕ?-w`iv|6^j6o#tq.PQ 훡\F{hg521y"nB1x[[5:?GB!B!BVྠdhXfK>]^{H=5{W湻9MxD5N_EwƮۯ6݁ށTGe 8wl40-컌{S6 {uǶ׌G[̾CsYxebi]-zk'ŷW3E3ٱnэt8Ywjtgܢ /mFq9~ iٟiN 9~ݲx\xwݧY-LQԌ?f`c6 М7Ѣ׋7'ROk8"ѳbٖG:i=@߳q)V˷d}iFz@ݭ[51C3;kG3kgh>mnvq%OP4#68wtG;Bmoh^z97< bCެ/"5dio`'Oh {9{oeq|Vcm(s{߭qh_}xi^'"|:!kzw~Cl-_Z\-}X!B!B!Lok蝛9S}A=wrnZ$[G?Z4??Di?P=.;nܮ;i_&w;5g㒱r @Ӭ}4Gfbj85ŵeǏ-`Zu iehoܦm4cCOKoیR(Q̸9*m,{bV_3֖Yj)Ɩahi|=qr%mW5~\WzyOhLQ1-xkQԼo]ol9j=}E͑m{)p ˴#]t ͬbױc\Ll{^\kڌ#ë#0ֆ>Kڀzws͙zޯظyײXOٜё)y2-] U_8ܤ}3/@}t{chXF_{r4eު OEjOE9`Mk\t_ >*ߋB!B!9&ʾmMЮgeH=EVtX ٽ)٦=zOǓ|1fsϪ]x?ێt#ݟ7SEGFS8 t5Oҝh7~DiSۦ^$W|Q{kGCl%kGW[[CsV_xeb<:[{5:GƸo4iW7fv\ܣWg4`};@ڬ}y]?meb}~m1!ZijUg͙˫7Q|Fy`5{ٍ6={fv5#˙+4{1>'eY4v<9|>{f1b#q@~Dv_՛FjjV^|i ld9gbތn1Ysn0l42qŪőŚnv,-tÔM^߳c}Y=寵;!뗞rڂVssOQd-_N#'-@(ejigtZ;b4t\ln~{}MzkX}47+a4>+~q$ߦW!B!'fuoi]ߔlxzٍc9eXIkcB-hxrQ2yRTz4Gys͠U;5&Xhqt 1mϊefN)[߅?bG1FYqڳGlm8= j;Tq#׹Fыۼ|;*ֵ\rUbV-~1fv\ t!Ĺ}u [i73sWsW?ы im#xgkZP(]NxUݱlgfsF3gic52k{/;?,lTv3*چw̮ߵz2sЎ+vXyՅ4ܳgĸ֜ͱHw"s?;S?Yfwd,wfN5[ߟCozw(Lka cԚ3$[>쫠1&m[pu В/'b+u3n1Ӂ|Fs}]lYۑP?ea[wRcqg=ڼ` 3}oh7ZٳҌg~'}4'ܶ܍gKuX(ow<d/W=,ld֋w9OT&*|dYՇ9NV= (lb6D\}jwǼxrp h?Ҍn]b~AH4'Ɗ7ߑYaٹ,ΐO6w붚i,G|zk͵[ǥH;+>#;fkn(yl*sY߻=Gqd6vYLf^w޸ZۍxuaƬf[닻n%^β22VgCuxqby`+*{N&~^9vN_xs1Fq&Z#2͑KoC=s\GKa5=w'\d an+גuVzw2k;^?} e:Z)/B!B@ə2 `5oYEkAŢTht՘ܴ~ek|QW֭p8H/Ӎ|@snvGm}|ȘF`ƩwO%a_kQ~Ohb2@pi;8}Gs~dG1fr"/ܔ<D66H3tjNsd^m^|Q:wCb lW]4BުyJA-~hpYcs:YYC=#.vjδŲ{mZ_fCs:2[bkV~o{6x`[[qmw-X^Oson}DJ9G6ZD`Emzsf/_!h+5(Q>tkp[QE㽵ӛyްysEh5g ٜm}1f ͡2/}^Q2L=?zMŃEq7_5z3kdH38iXy\@]޵S5oyyv|jwU ,(G=6!mߋ!ogC}82CA`w^2k,W5v ;@!qQ=dr–Z)̹Twvbye~-["_4A9ߩ 6ڟ~sf s ^u?w:}|rVd,(W[g1o^7r-s&l~}^9=ؘ횠B!Bz566xZgf^f5?bd3Z h,z׵J['{{E_cbP=nvT#;eqnߍA>hC̭+{v =9GNOfvom1.>:4y6 b׏Y>WL۱'ZӚ>&AKFe|{JkfE]ujFĚh`}%#hO҈l@cVmũ3w4b(,i_Y&/Wr;v2o|[60ݙX} [ܫ3ߝ1kEst;CsKn{mkL37+͙67w^c稟eiEQ;v}|h/ksK[c|Yif-]f7OCu6.3'##=(͙udw [e:صAVס YH/<:wмț Hwh6g5g=?Gh3zl0hY6=1O7z%ͅ\gzzɸ `3=/zzPEFsTgSe XDټH9`gVGt^8^.GZhۛMžn6MkSߝmḾ%/Md޷D6X]9~7eSʹo`1nu~ .բ2|$e;ir45Wouom굺EA~,Kak*(/V(~N!B!=/~*ޚ}~?Q^B˾?dž׎ސ~/:^:ZÍuuXtrh]p L)W,o[^_e\363_kn"z璱;ẉ9nrlϤqӞG"_޵"?/g{6eCw大7z4F ڀ1 {L󳜯46GlG}9 f 29sXGj]fG4>GqWOg_7rio_;o1^ܚAKn6-|'ΎK3jӌbbw]pnffK*/. =Qԟ=cԽ'칫~]w'^KTxϸ٢[yzZ o³[^wG`և[lZzՓɁ1{)QayY{LݬѴًٲ뽜B6Lδl7+h>|%^[4q ó i~L>mbm5ad/&uRc1|9=Y,6HKcMc9U4fxZrfl^gt벹򪇡Śj4Gy>2՜ɱ+h7jcV=DrfVu]-ڭgjzZblAhC1<;lQ]6lPn6Xnޡ!WUvü mBھC#?BˑlnzCccMLlc4<;|7cqՇbsm3gyt053eXО>{4{s/ތnw~i\n=#{cW!oӹI9l?HV3qf;Zr4{9hܶkUͰY ꜈w?=cs5g-Zw}ז3m3*8Q74Z>o*B!HzxUf-ۮF{f^V~v_ցʱt L|41Zo5OjFcW'~/#O쑶]Rw{-~5-1G޵Nsǖѹ=15Ҍ-z9(wխ3-Ė1ϵjFNڑq׺/Vͤ~4L?5Ps=ڪ:FCu#6&ZՏj> 95KQi44HsK^<=6]jGfE3w -|ͪ^m<39Cs3VoNЌ^QLudlE6]ɝH{͵3sK+l~S9E(lm+m*I٣z 7#5mk;ugso:٪=wzyFyxFVՐit= (>3{y66iņJmofEm檧9냡g=^GҒ3`EKLYuY2:N6z>k9gwv1'[&6GzmY[m?+@kMg1F4gEw;x%{k4:ZLDsd#^]ǐ>g%ngOf4*{q&H/?D9Íszk߲B|ujex":۴ZZRMg+U!B!ZoAٷ>kxvP-Z[7Ӯwo-yJz,hMil;ro v67uNyқvXۜuuٚlEf>ɭ$KbQڜߗ6qLm3u9^9`w-|`|Qc4kΝږh MGvdd޹Gm1ru4gM"{qOsӽ,k͵&kc43}j4dXؼh(=&O.idvtalF~b)j=Loߨٚ#];϶zնqEsٸGz{siFn9z=FjnљH_ҜY sQ/+7R3z>mZ[s:Cx`ylaѐiozizgs::Q({UHWִޣgټGoft>O݃4{y=#n.34V͎`Fka^(nvZOf.5%[|=W8o~F#,b쭵 rWs}}n\>g[3ʿfh>}?^6ޚmmsv\ϱ%|y-;0@W x+[,sk޼4|Es}fwvd;GN(&qܖ^=GGzo7ϗ::<>wc(mƬ y)˽w_IotflXQk ugo Z-L׮66v6?IB!b<=K#Z8EFo%t;yzzl'[kӛx߳kuޙ-!v[jL5r!㳌Y&ݪg3tty`Ǒu9O艹z3mYfA}x[i}{fQ=Y)q3;qf[(V~ uV_5-109}uѠmfk1eVVvM#=3eͽzY۴ޫ8zy߳v!Ξkړ?hņ ^~>r$C,'Y[AڭfdH,3ci} {rj'w%z~4> cW&3FwX7Ņָ^֞UMeCdu$yJX'1vi>h &ݓ_2ahCvv-H+OhNKu?ȬdFV7YG:Uٚw݀{rdj^tZCnVIdسet_סлA"YH7U7_boPl=>[_޺YS=C42Q?i#B!8[Ӵk<+w/5iUVZBd@z בVlT[ܵx랧5Eԟٺ6{'ʗ`5ᓔd KmkO#ΝV\Y[n{c2-{"22lܑ{0zk_V7;ikZM,= Y͇ky1;߽6fcl46(6m(~Cu<͑noM6#4gVU̅گQ_jmm.f[r idzwQ=W4> x1fq/=.jnق|3}KXcY#D{5grk31Yhf-{kfjoN7v^Lfk{OlМŶn9J?Gjf93һ?:RwT_|4ZosuNf;ҁɔGxQi>e 94gkmdz޳o:L[E]^n3XzwX7l/o0@{^~hf~rd(>vgY#h y-hٜ]'|?eIln]'|l%~ }-cxxϋYZ[i;Α}+4[U.5gOkshnWsFwL66-63>WNNGlyA4wby]/X^nj>fsl7[ct?wir zw37d 6yڳS'ޚ y® En,]]寣_z^FdWċ)ӽkF@Yr>fXE]?\\& @  +Ύ$τehoC$8Pd4oݚ烌vQ֪iFefkfͬk3H5+2(_b}Xk%)zzOu"`m̧ /ߧĚ؃>LpjbFYΰ3?oyԅ:~nPgkƣcEuѵ,龪9;z=e<_\(-1M_͔Etk5W5zGQ#4{GklZ`|FYɮmA_^3:*OϪ5ܫ9~>_ ^-zQf-[s$ۇr43zբ8UvXk9e&7<ْ>oܷ<`:z=g,w֧-fOt㜍A}Ϝ}yi3﫚#Dh6=͞gi>/R`mfcY1d]ɻb{8?A3s_urΑ8y?6k~(ΊN4hnVukV۳עw֘׌0挝fzbs-{JsL(s{Aq<_e;08Zcmzwt_ck/mn2m迨f9`uy׶8jn>gjbKfʌ\ן]R.厭g~ Zk?G^LЮkfH=GLh;c3]Y[=rPVѵQM4G<ht|@S-+Oqvܲ5l,͑Wqga`(͵͵)kcf-{-m\%VT7R3ʽQ3>96Zs1o-U-g-QnX6G<_ԴmBzﳚ$4"2k=^4&.: W[\dHzKk~u}OoңZ69%ߴe5gmOjG̮5F"}W_w"zyZOגoܸme ؟!fSzw3f7ϮwNjͧY:՜)ޝngKt{]~3o)B!byѮkt4u..{:^bie1tGplujO3ZE}t݃d+,/죺_GvoP&ƑOT^~{ZRѾ]GL_mb]_0 v5ExwiXZꚢU;{zݏA礬n/5Xv0<}Q"lgn'9:8bS amn8XrdEٜFhM}ͭ`t2WT׬8{/Wvlmagݣ'imaοJ9|k#m(lZ=cf8zYٺGgl樭صkolZkbmǏ4{mf4e^͌3'66Q1̴uQs&~9=3Oe_vyi=ʿ4{vebtQl^ЛWD~9ӏFiOs8]mS6[ {Hw׷l}ьy WSn(f?f}طģTpCj,L;7dÔ;o^\.;u[Sxk?߳ߠw'ZbުqB!B!~ly34Z9mnǫ`]sG6'VƕŷAWȮ-E٨NoޮۿӔ-굹G>Fe3{-4=o.6)O6PSyޫ9Ssؒ)?͵H[ٔѢ>]$l\i^Vz{/'Y9mA]݉Fŧ>Oĩk{t.Q7p z݋c|drڋZe2H3ʭll#ͽ>9yV3=ƚ]Y6s;7khVo]ELv]O1As澒]ӥufiG{òz붼gpDt챫my c~Q3__TjԱ {K=u]IK>֌fGZGv@3u&31[}&ƙ=cUͽmu!B! hSA]Pw!kI/zZN(^LkhmӞ 0 hwd)eƟl}h}sȁ(#3ӑ)wʑ|DY.g9nu HcDfN+ydߙ|Ss>d'-񞦛עbѠ;=1"ߤ~#ctVsw!h/(=''jѽ٫qծ+zGw˜m%~~]5:O)48{|7ҼWE]٘Ex5vƉ,}9wXn'rYсƾHoGiLJh̏|qFqQ6^llif9pYE#-{|׵.j:/ߏ[Ͻ>/+i1J՚ՎzFu KRSϼaHu[Yrylg~Fso2pas$<*Vc+g}WN0oW^}Oxw6gr'b| 'ZG`}25}+PUxE9s˫\}|g_Qk w^9 i Y~c\{ B!Bqf_qߛXֽz6tzltEl=Tڲ',[׶ذa} io5uGbf~ez2>'/F/,o\kl]v}[b]afsw3&=+S5(jeꝙbT)LL\c`X{{cZ([k4u+s7EGߠv^э9;HsbX3?}靡9چk20 x{FkcՓ_ jՐѓ4W(h%[.{{4l>d1ZsK z4xTn_wiگޫZ'o{5tbڸLKnyڣ;+Wofњ֜)ۃm8_ ]e[ffNt1Fv8 {/㞜Gu }v41zbuG i'N-ln cC<7mӿGj{=,͙~0SY=yb@M$;ٝl6exz磹7vzvxb+M{ʙ'(#Qn~k4o,^u~yhf͍'3}`\,%NJzw!}6(h 0=e2n揷ke ]Q!B!2o:oiӮ{.{uɾxZBq;,7~ē-Njd1p'sϰ}Bw>9[*{nH)ף־ cD}eoˍXdF;jk^hr\k`WV?^i66jfe3zgݭqp-{zu\ ϏjWo;\spq|f~/>7:GwU3g^+com^xxk??~b?@3;2Q<2g|Oqt-*(HwfO(-vdfwĽj8VpU6X"zDn3O>v2b(_Ǩg %϶7C3;ĥG3c?4ݢO̾^]}8g׆{ʺcGOѾne:Ftd~c4ތVdsft)(qod2_3;v329 jcbkf4ߺ؅l 'wblbv1ݙ1?Bqxk;6xZO/(YLin,wQnO/ʁC(5Q]R7l@0{`2{l2V]2 ]r1b{DzH=/ng+Ձ4v"i=;r2~$˃`Am;Q.D} ҌXg!C[oiI{?ݡ;;EsFP@#lm=4GlWYo"-tm{Ϟy#r}fFm^rs[]PWƮ6zk$=u|﬍#46i5>}7@3{Gjv#r.m=6Rs834Wkڷ+t9:&rP(.4hrfzk[[&ʍ)z=9ۗ]{w9#5vZ5ּFsЛ^.Ao1~oSʮr]otͭqf92r g]'.t~X/Ĩ$l{ޛ4>JݚQwk?Nm<'ņlG~K3}8pƪ'G`j畺"_(ۯ˱zk6~h:>YYGjGf}(+v>9/3ml Oa헙w-s:0u[̇Ak)V4GoE5Y޸92سFS>YB!B!|e;5P~m˗&oŞ;X xڼwf\a7!^wt(7k7|f:{:33wrds9k5Gsc꽓u1(mfo ?]7iqjg޸r>NOrmiy}Y͎uNS\g'c=%Eݩl G+Q]?1BoFϣ{.yQǝ5oWO~u}l^{q -?vnjlGiF{jή1q۶q|>R}1ᚨ;4|c|@{{#U@:=ϦlњG5go1?ݩj|{s 5cݯZlG[Ǯ?6.\9h{6X7c>lfe=!hc=b#'ƙ'{Fjq;s iz܄Ěit߽͇&=;ӌ|d|-ޚQ*V{ֻ֧)|t^)5hbřojiYSZO+B!B!<2빟뽿9h*ص{cV$z3FnkV]rۣ4=)7w,7\xP̿ff=_eI]:јoc U Ν{Y|<=Q|AypW{H3X=",=ό5یݗ9f1femowqVytVO|oYDm3/ϑi8a{9l;ܜkqڰu1w-Ӛ'M=a:FkFumLsP}p?XsO)bs/3rmdgwfSGAf&9a$Į\?`⚖5dK^12tj>yz|v\<4k|1C}6uݚaW!oNZ5}LKmx/ݳ8۷7FecTwt,?4{냧vo"(>Քg/Q^F}o$˹L׏'r`8Ge`,@~x~/n(9@φHOKm}+i}Gͳl֠y9q7ևQ.ߑOjq !B!B! ;+m߸;i*];"]kFWdܽw۫V-,1|}8޾lٹcltnn4Egc{&Gw)Tf"Gym˶B:ms7[@X\c;^dg壸eCNG`1rG:ڷw&yr.-:͙uhw7pwq̛n=GqF1#l'ObwcZGs?3O݉սmִU:Fϋf5{W[wgW۱oѻcm7j yzB!B!Bz'THeJf5=_-9پF{JОb=;ջ^d+q?xLwL;l_u}po{c@wfkvm9hƏ4ꮹӵ-^/?5onqiz1Afbn6i}WGsH3sF/Ҏr{JZs\4e5C$VO6Ox|5[3;мs=eUyG푿2lkv*;;r~sKz26Z0acr&({YZe&5Y)̀\i|leyޒӳPl~GZOfjư={ kQfSdF|ó-Ztݝk-r3IwmaʖS3=\ X8GmΝ9]xkEQa<7byUwCOi1}DfuLF1fϭOkZUhug=ͨ̊)ze=Ւ)ؽG-Zkw5VU+z>Ao ŗqVW_!B!B!B{ޯw4lyh{'T=W޻G{h |Z~?7[+`P~lb}*ב6^޵0nef7~"Ͻ_--QXN\||G}f!kn]vo''G(fQ=,QgkFيS8_iMhsOQ.󾟺&bseg{'iFfWS?h7O&ݣGj4Z$[[n"QyeuGcĮGϳ5zPdZtz>rLKnfiwG1l}|r纾;G1a8l.!dHW߭/Es8L?^FYAh<ƠܶsU4Ѻ4Ek j^eʂ4Xf46_AMzk&WAd?AoMXmW!B!B!B<|y޻h͛tV3gh2$ .ioe{h$A`C6OdYsqZQn{;G{Lmn8xjnZIͽv7@}iZ[V{C9}foѵͨ/(HwYI-Qn#}L%+VY݁a;7(c?y#Y{==]{cml1k˕4Gڝw{.eV/0{e6}*5)w,oYSUXz2zwv?3FE}m)[hOq{s=ˣ;n/S/'8{z*6G[c5,~lHqE-g|szsF9?=zl=wͷQuO!h~Ac9Q_ުj5ޚOһ6B!B!B!B!.R~͔{hWBoW dyK>/\Qwfc;}^h*h@Z wqиb9ӟY~2v/'^lGbinw݃m2D{ 8tZ=,X)}3f2ns#ͨ~[Ih b>n݋~43ݛnu~(O4goonGu깁>kfOa}z9 sf| 6ZbݢֽfO=ͨ\>d[O|">fsf]D?u( ʋz;ec4?:vUD3}iD城~b ,-c+Ž5QnˬЇXOV1j&8n&_Doj1C'h$5H'aB!B!B!Bz=2']Up?( T@̿y{BWd`H~"3hEsno+fYmlw]hFz4v{UO'n61?D>AeW]O7O@m;{c8װe/,OaXo=f(lUƱh>ݟY=vẕMryN3X܎r7ʛ48fuY2lm{܎޹x~,/x{bjȖcq~2ÛEq_aS+/άÏzt?KP/>f\E3+J}6m+ʠ ֯k3Êzw3e?O? R3no,Fc;syOƄQci.Q68y1HOt|It;HB!B!B!B!o#O|h_?ýe }ʭs޿{)^b{m|u#k]o~{2QiŘKk~<c&(V )q׵_r!(7y}|6XJ3덿h\C`>9nwgk{'we4NF֛=k_9JRqoQr?cyXA2ͬ/gu6${bVF,_?<i@9=gH3QcDb-+bLWn5;^~SwP1ϋGoU`5yoުwAQ;O[c} !B!B!B!B{o7{ h'ݏ]tV~A^n]rt6a>j#-fUu3p/z5+a5E)c6#ڋ39v1EK ~{R n'搫+G W!B!B!B!B!x1ӞB,ܧ}QCxnٲf>a;ZUoϾ˛bmϧ~ QZK3cELg}XmEyP_f3Oυ\Zێ'h̶96_~َ]^?_oBzy6έ$to&h cꚣfjYb|ߋ/{ꤹQ.#3 55+_5Q~jf7G/"V;7:vz/иy yrMZkQB!B!B!B!B!B!¡lݿ sT߻~ǎ4Y=CZw/v{Q 6GucM5)x_rXr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁ ׏ ]Uz H47/]"S,W;ߵ4wحי w c1f4զ*A?x/u0!%痁rUvr`9X,ˁr^v%%il^^L<qY#a:lކoA=1x:w+wk0kNI_; ?z?14旊f>Ğ'=؎cf,:v\ZkH8pαO›.r:>ʮ4foC>~ٰ_1>1iX,ˁr`9X)}=kQno!by/Ge^;of+BV׊-R04~>XI|OxJk/ fw=1F"Xq`5=E:ߡDB7! U\nCrх>_?{"=P*h8CB1ϰ#?P ˁr`9X,ˁ[hgAW@_yBo/+_N_(a?7˄x"KD=*XP'OE;+c;^UR%Kb+zPpG>_ 谷i_ |xlZ? ?T/cu 3ɗ3/h_ZALklN4+w淕.ʃ.vagOoIr`9X,ˁk }V6؍Wr`9X,ˁ:'z-WboxN# Nm׫4m7߆/-EslThcr7բ_kJ &.I'qt0vBzNp>䇸:Bˁr`9X:~S?<9v:Q۾<[E>D~Oa^azBx5PWw =u"@u[T*7m |_UV1C5D{SȖsU˽lEWqa #U={dTMmt~^D$F3YJwlņxkrI/L K~e2=%"/%܂W9q.ٽ`FCө+y 7]0{}RAw`' 8h6R7̄lq[c X;Hʢ9n|KOYn#ﰕSsL'm ci8E ׸{au7k10ȔFvh2٠5  5: Iz LP[N|&-=l` N;IB]w@Np8v_#~!VOkɮ`uoqx)Cl~uUV'5Ye^`Bm`g-wqO{bEh@=,?YOǮ&Vc瞳~/V?$_/DKr`9v@/z g=`}B RX_>u7x@}> :aG=1} q?jl8h.3y:=#D# $2 } ӇfZo̕؂enE݆)v{ɫ-&-D 7ni-sؼ\)?xsjV{^qFmAiCֹSKHG_Bpgur`,&~w9X\XCT 45zo-s]=]|A]~C<? 5yyAyG]G # ׹[lo &\ǡEz oE-ˁ@;;w]>0Vpv̩wh&%2dDT7az2'/g/+,3pf훛N3N\#\,Yuq,S~ 5:AcK:p.ᥥӹwID4z (1Y,T(QRLe0X5`Ȫ %Ȁ8%K-0} r p+yF'KEnLO\~hu7d`^Ҁcf <vnY2Tb/nj8ԖԳ#x kj~!#Heƫ}&GOvO\JD))?z5UrNF@av/?L|ꀶ翫& A$=Mˁrs[7Ay?:?|i PnUɤ3b؁4` +M*tʴ':w41xo,#'~HwK #M z)ءm2:#7y:ek4zCxx@;4p'uh6XБ똫p3\*2ْ9Ҕ^Cl=*wʘgh#>ʬT 2Z! y4y&ؤ|.ؿ)hOhyzsY߿XW[`L,2418\ T,ŀtT/`&IEl RyְM%B{ /XNV?RtR}LmţZ$G7$uq`Q{M,LFxUׯn^%Es+dQx.oe29 Dث"#Z.*D՞Xۅkp"gg;NToy;I YZ̎buvL h pu3C687_ľSx҈ZN?c .;"ߗ#TC?(yXW89r`98Zv 7?s&< t.4~=]_!ЁBwpōFz 6}^=C'p!v-pyg~>'HLъ=$ffh(u`]do`R9)'_q ]$6zWE:􋑮0btEhum{ĉԲ+ҿ Vw螁y,&)c"\1]BR?\±4vm׭Zˁr'jү?a*7lݽ4q [nҤ@c$KF&IpDbD-,lǖSqY)!Xs1JUst)>0m4ǢgC nI]s<@40W#qH?vr)4D3xfCCrpaZ;W?S L3\U[ ~pA˞Sۆ12U)\XB asӟ/2V ʘ(kؓ)uhC>zk#1 FIټșq0ZWܲ&15' * E<;1EW*z$jГ* HlilD buʮ;pQoܺywަ, Xq*  \]MnyxfrFggpo*#5wFN#hYx&mH143J}eLHww 8HLbD;x%bBlp~d0VŮnN.0pt! cSy~()Wk;O= 8n`3v'(dV8.,n LXcN`7 V$Խ&P d1Qdp#if$9v:]IF1r[ /xSm>';}3򦹕*lQ,jm*Zv#X"7qOu7F|)rӍ(_ߪ(l_Sd|PRvm}|sRs+z 7HZoZ{QJbRxHΣS}mena[8 qNqvmN W\8}vĎVatenmAOkSjS3ؤP#ѡdwNΤa2kH*x.p ;.f6w]Lҝk@|8Qؚ(Fsd7HoX " Mש85䰾! 팝α掬0%uO-x<:oA0pdqrFXs Xk ##p:H!U8\BTIS'zw(SSn y*7 Ք>Z>bhكhHA΀hubJ76ڜf.d~(rDNY4}}cD K]_D…=;zݚsFe龝ͱ^k,ZDxxtesnp#w dkFik='\u0V!70.۝^%{fV?iy88y/+G %[4V'B*k !{)ZX:x-C/#6V6Tܯ^9MhyA@ߓrGS|m엛AA}*oQ_jY3f;{)V {8\km;/ϼFc^r||X '=^cLkj4Fx,SWsh)>a4s7܉ .E wŴj>,|&aw2+ZvJgCSwW0LKΓa/!bhdf+P'kY 4X< utݡvMm8.1kRmŸЮctd֘i7]y5r!'5܏2 ^\9f;0{>sj`aJtweIms+Hâgf+ _@6YȚ㴹]b kg!jwfGpX'H)Lt9e?AC3j$s]K 6`/%e.[3'FKUu+[hqTIX||N @+HLT.F<?YͱȤI6wؼ]w;h T}y v[8T: nt;# Gud6=~0h nǯRC*k+/ze@RG+wޡ-wkG{l؁ïdO`Vfl',·]fpʨ ]&M ?߮'2)*k%&!,t0$0)evCr`90u XE<=,_n Cϣ I"?w2*zKx5CMo:t1)s yu_,'ӄE\հ2AJ^(#]s_p!0)_EM[#2V-{ܠ䢳# >E'U+>2򜫥 3do.Y祚+ޅœJ(:]VM Q'fLrl؅E# ƹqC'4c1$ L_bjZ-sZJz v]{心"}tϒGTh:=t.먋Z$NʌTF~.er N$T9̚Anץ".mNA֙"GPolǒrE!ÆI!;/lIsvN{dgѽMAsɀ!h1.+XD<0Z(aaEå&3WXv#0c & ,Hrf6rZe߃פ6qL؞Vѫf5Hu֞Y޲FEGcWr# `=78| 64=xPciݝ܋:6{ugK:5l xI< 6±%Df*pqy&a̧ * 'Nk}5ɫ A9`p:>B0wGU#*^K=CW9:bTq̷x{t &#}K'?BĞV]Uws/P~_щopvp:DD|)/߁T:O:Ć6.8\vpsXl j=z%M~ &[tK- Vo <UD ;sq%Swر0hYܲ㖱$夺q^}.QaD䰡03$NOhMP^^}vٲ\eeK4~Z#'HD;x&6u,͋-'O'XkQ(;Mma9N'CXT\xp3z?e7ՎBCi1<k#]/S\X ;lE PzDdނLeV Q$Evj_n#=bf{T/Wԙ.8j_(Μ3\VMBֺv,#q~xֻpBGs[8J`F['=VkZ:|'0e+}8U.0}ԍCCL[ɦ TG>y56 E*4̻!qmHxtFԜll'.9eSChqsxiX] cb[j C ]nC}t V7td$/TaB'*t"$r:頂.Srs|OzpԝDJn)y 8ڷM)KhY!pѭHnh_mb3+Cr)ل0D2ط^a䯋+K I6]p0­4 sW.e~߬Cms=XCsӘ;fawܝvV83~Fݢl=,Ԃ+Nx(ВyceV)\=ɚYd`e&Oht89QÓ> ߠU`5NaM ܢllFdR8vaeX6kp9=3x0w nZ_}A0k#o[0d,@_ Jt096q{$gǷ M}̲,tIera8JI)ڡOB[ي]0V/.qmG:}b .إd@>1|p^dF\ԏnVk샵? Ӗh);g:rO0# On0gyV6̻[m\>>Ra;xDC>C7/߲vwwBOK~uoRF״+ h>7%{]P[ҽ )ۍj`tLm>@l!./#Vh@aUw| + "χ:U/ tĂoz7":ˁ8g>?~4.ĝ4w~dɭG WZ%JXگ)r|Q @`+/ԈXf!sPQ=XWlU~2m/Ao5n>SXڝfOƮw`rH#nO²Z|a'ԐVX1MIHtEgؑBD%Ss.f_8@M>LLL '43X6]3Mj`j2۶Ռ%}K&Q V>)E S>UރcqGu$`GU$hqdޑ&SJ<)Q8XN`E -smg2x'֝ 0 [O B3Y #עv{ ]6_F@13qf6.G;le%UBJtn>O#OCϢTa5Ie8.Ā,A_y"3?UMy2BD 1^q܃Vxg_Պ°PWឝ#[:8alhMpSK2H{Z#$tRkYXyL23fо| |ciMUvѨmRLKYfہY;0k*b倞T}aH :q;{1%=?0?BB6EHziܛ5?& .E 0#סT~3jx]cˁOGF>Ydm@79wn^Rͣغxb}κ͛Ϸ#c7k []g6ԷMȓ v&4+d4_W#phֆog7pFJwuC-/w`'yLkh19pd ̑@iՄ" ?TQ|k l2.U-X੠fREx3 Fm_9efdbFL7@@TA{ -ZM4>` e|5GQJRGjAJ `o q(9i L'6bhpGHE;1]X$n&һCpc=λ??'.ɦyBw׌۳ީVVv5jg`v%qaB@d.Y_eS铙s:(2ɝ`wҍ 1q~ف]?SNw:^J63JF߃f/fѥn`)cLAìCp EZ~ljW0(yݟa]i= ƙژAtxgaKdc~1tĊqu7 g#B+SfڞK;NL jd KddWp4n>A̧O_Hkp΀ɢIy^ɉdn0\~ڵ.\eܣW}Zc=z'Y}a=r$0&\?@8U&74*dóxz7`h.++ E߉f1Mj.i<js+Xa8JF7L:U`A'2Xf藃n G}?B㛰s> =+r@u,{3-$7}D y|GiƔsE @$U]KsEXgC"nZdbJzvc zŞgWh/aUK*¹Ht+mF4a1A | 쬬(@M13y[v6zQ]fnV4V`a ։lbdy’ f~b Gpc4D}v S2CKYymV ~ϺKz7silclY bղlW<1$hq{N)o-34ThNd(r(?'G5כ8Tq85d=np$4q+.ׯ.|.n:Fgdj|S#=odb&c2u/`3ЂyF'@\#S&Dz!33jU׭wel_z*.'yt1[Rt^RaaNvfgkq\$ FdnAD\ a&J`!|8.XW\ہ ;- YPJWoy 6۶}ӜY";Ř=6v[:';<:Ic!u$Ww;k=)=h.{b^hGݽ!,"@RMhٯs{^ӕlbpTߞdW(^Vz+ 9sM~hϿdẎr>'_D%?&⛘๋}7^$T,ݷf.ty\F}d|m$Q*,ez:K\\{ C 6uj}WSk08,vv;Oc,;4q'L71*5b.lU"ư=ChX=j+֥HLISU#95pRl35}xU(MLeHŲGN0Tv (=setj HY5a Ƽ8*ա"X9Ccb*ה/jtڒ9=h\:M‰>U奮E #0͜CD[ n坙QK PtleWpdD>70*NG M諰祪NNwK#AqVp:1aEf3S>ZHU ۀ'#K ҈wq}+: Moq"UtT(xnֱqӽ!VkP'zLUzl$;d#|Z1Z[I8=XȰqahXv/I†}S%Qp]#[z`VuL/ViBq>h `&o߱gD+|9@>_"1@?}Qs -[[_o%| 훥q=М!\wZ6d =]Z]nC \I6dz]&phS̃pj jc70]dofV劰C(L3-0(el&G '@LL5ܜ@ixYW⬴wp9kxh&r+si)u|Ţ|=9Oh MM2)B~3,+7\qY-JH޹:> έJUӐdkPǣMF,FiLqq;eֱ:A|ic&<L`tSFN`'U!Xԍx:RLnn*(&9V銹 M/ث2(煢ᢾà݁5VpŮj}r}6+U^UF*f`rT{5_{D݊F t&cwn5L |[1ptXX`DYZ%6 KFpi^,Hn>hgKmac?ZSuoW{9OwEob?s#N"JpJ'5.Ϫ7R`%7~}Q|<}ܬ }oW!1l .*<9qP!#yyn%}Ň,1i6 8v,y&Uйx,- pphuEi7Գٓ; R'DЛEYՖT\P0Y{ctHκ{9{'t[?hM/hfSfi^zz[a9n* fgJ~SpBoHzxMƊ(/e1ic9o#̯,"]Gо g7 OˈsTR.@wM<+]ׄD;fT^Z4\ (chkݘ|&Yёc7,M/B x+A[a{S_ZmDZ0'N :?^:/ˁ?pLE~g$| ~ͯo x>W_|i}j>e]+)/Pvx }z·zX`Z*DRPI%ŲO|a+$hnAdiyja9Oe7 Qd`uDxGچ"kfEg_v8~wa r ǹ} mgBw9$LQ)+`ٓDk{OsSE07Aدv>;&^F5`{/vy`vՠw`/u6.gK0?b=w v}RS y4o{5&ɝY;9wԼ-4иC6o6w5[!t;yɐo3ͷUv^żG$ѳ#?OlRN^ 8_{^>.۾[ QwZ R&.]>0oP-u %M=b+vB 0y LNG0ږlwI9kڳ"lgg5 59?ƣu׋hlo4 E;v\:nC&~'}˸ˁB}f&y 7NΟn݂!=^+)h&= ׸Rx|uZ`R聚H:K#mә1]n6Q tttU~+'q7ԣBQ Rp~f&4]gLEZ瀪aN=Mn )q# Ov@H .Wֹ Wc3f.9a8=r$W׀*ȱ ݖ}0̗T'+y+zˁ iN0b. goǬeWi!A y4r^~dqQMt ABqy-e{LA_?ooh aw#4 )#-C+qCPњfL@^0ؕ3܈}ΰ~yN-~'ļ\poEB1Bѽ=ETHRM+/=0o0+[|}FĎsW fg{U0F ACkE[:snffQXf`٦vd#V%]5|8Δ/ ֑:]r`9/nIN?wo'n<|Zws n _Ʒ/ʉ@H/<_ H\0k '9?a] DI 6=W  ܭV=r-Pt`&PܴvBkt&|W 딺*[%qЄZhj26c=|+#]-S䁶 P'2iw):` [4c[-݂Wlxubc;'S7WP 3z8vǞ9Ro|S_y|~aռda4ɓ)]=23{}0S>VR2KB٦G7|{͙nU¤z\43y6b'"lCd7zYIznB*-)1 چ:hFb5:t26&>6Z,F;X?~e.=8chiEOC6B˕ЪAST& zy8fv3 ߱. `y¶X5kUsKBnL,mkN=[X,vS㻮`~E绶j&v__s)u/FX( @P/P j\t;wG)ˆi%瞺}J*RAp AKep!w`]nt;HGCtxN 1TqG0>ԃ&`(UmkRCPG-gqI<A{&=ou p)hDw{w8`];ІhӞ쓢9^Ɇf$/=&j=}#dJ)qYh>}µX&u|L'G \'?u5.ͳ_ȪB 9kNNʀ;F +*ÑϗޜَTe6qqìlNSp+dV6T-v;Iw;)+=--xgaA|`"8 }OFm#AhX% `y5Bp~6+^Q8͜߉ -e:Q$mKj=UwɶHrS{>I J}i_xVr`9q`sGsm};vnŊʷ'tj#?7ݢc+Rq_V8 xx #Hd:!\nCF|K ]{a^ir :V=SRbj oNSBp^@{pJ]vEmzt"G"u޺PcWG= ڊsFV/~&V'Wr~Tx-׈ljbBP٢;G?$jx@/`_]cV?UT ) MA;pi%(Lf2t\T[ﰕ.5#N 7"v}pq{.H-xSNv,}ɠWqC|Wl:5, M"30VDצk ;nj/f^Dﰬ`azBӷ:l<ӽM5`| >dUDZBiD{69rBgRlj-ˁ.e-OnQK2|绽ewx p_Hds@^*yP~(hy\cX`>ËoO`r) P4RZx3{ZXH066ܿ@"Z,mgrݹixGT-u:6ɾl^> [2+t4QKEܰpc"$q54_5yX"R ,t,-ZtaMyr50DrL])l,"wpR13Y-NV)n:FpNj~CQ6p vx*2DF*ͬd7]ArwoWT@_ 쁤υ vd&]%RT a{ʌuDw߁c$Prv0g/W]\>a-t "F.73쒭`i(New\N-fM2x8mRdy*nJbrhcִ` MC w~.Ƞ/]}| ƘOxr`9X;0'F+!_EK"^|۹ _jP^g֡ɬF(OAV4/pjs<Ha0U?ND+9ԸWmPs' =[pVJ7NL7ufn^ʁ"钔ZDZ)v $mA!ۑ9xҲ|VE)现>;n8`[黗sI* xk]u'FIYnujK9JGd$K7KK|om ]MijP4 ._wfVG5+Yo11F# 5]Xg]d ~J[̸̷D[KU~,PO ݨ:7&FR)eW$C/ƯÊH0G V$L{yE^=ZdTωx<Vr h )peb ~7kZvDHw^2d$\U=z{CW֙mVywrt\¬r`9/3)8?T_߫| c|?LZ|Gg GӐqfu`ˣo_)>s T8a#:p1*@ݵBpSm`%#}8QeBE[ pM=u5W)tPFcMq_5UpCw2sfE.ΐ15Zks)L$|9ͨ<1x=D/@_:“GȬ,Sp>xXP)X+8}iW?8idiơSLfIѻduo+W#qGMiJ}o/ wHabl<Ѧn-1wv=}FT.SdB8g)p%  WHxB\A`09kRTvlv1kk^2f,Ncjs,o:W'$o$ךSS=X @q n`b3H;,5sG7 .X<[gp,&F_lh 6ԧkLkݩ2Uv6^43#LQ[=jkc1K.\aH6At0n+tBs_%w^ַAj]?ݥXr`9X9G=P|Je6J QNCkaf1S!ǀ~ѡd_g@֝R%d+m8uiy=`viqdN5T^/0^nSJ/^w|zF3 k74^j37ɒrJ( iOyNn 9U źfkD'*{8\lYVΆLcv$@Iuuo`%eruv_?.~3T4VӷրvG37+j9X,ˁ`,G>?E\/ux O5y@ӏ~?t [< ^cr`9X,~@@=EǢOώHOs񗧹(:)x?d<~§ <~|s͂U UV/Pa=uv8'^ f|A3$ m18'$ 3{&DRtQb#եT*WU5?QwprT4&#^猰sywX9+C嶚g0Cv5Ȝ_+¹1W:y 2#~\6OFC s}`O&RW'ld!bwXVpUbNSha뽝BmXR>"7ԖbS-juou6gx|:cj0ctحXЁXixPY>.xH20%wjG~?$f/ˁr`9g(Ob< G<,HN Y #cs^_ǫu'uz?sWפeWyw 7f K4 ^Xy gx"S~mn;m_=L]Z0m xsǦ\ `N58NV\gĂm*@Cg%Fg8%?jFE=TEu|+VHC\/G31ܟaEZ @Vr`9X,ˁB~I '?>F kLaH0"Nj^6#Xlr8݊#?1 XaI:dxcf;DxF2tņEB^q96̲Gpevl!6+R(_706/D W'z"VqOu]j@B~G'NNB ۯAj_>,ٗXS.O%VL|>2V~%3)'xOmަ> C 8o(eytˁr`9X,3%aY^_ GP?|~ckb}D Z3ix 3s>e't~j~6@_h 㚕>E%?ȴo`#(23Nvnc㯎PolMY>UsNhh-3njclr6Gp/`[Ϣ.fM?CA#sJs'F_!891Hˁr`9X,ˁ8yWy]tZo>ECXޮ 60U"74+В#uj_?f PSϫ%V9hwLjX5=V9` ,ke t;Ee#ԟ}џkF6|ܟτJ3(BQšj,D {NWR?ԻGgN[,ˁr`9X,s:w {ŏ|''_4^ĻhkJ7 Bߎ+j1Uw`n?v~#UoRѼŃufk?Ăj*5WfޚeR0ῃƟ$JˏeHǏ`S_Z0_z M#qsJH},ˁr`9X,ˁ׿tZ^&y#?o, m7`03 fH5ˏ ~UvNy_al0XU+gbl_WA~SvoapGП`b_~.nW'_-Zˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,ˁr`9X,|ͮ@q&nipype-1.7.0/nipype/testing/data/session_info.npz000066400000000000000000000000001413403311400221240ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/sh.mif000066400000000000000000000000001413403311400200040ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/skeleton_mask.nii.gz000066400000000000000000000000001413403311400226540ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/smri_ants_registration_settings.json000066400000000000000000000054441413403311400263220ustar00rootroot00000000000000{ "convergence_window_size": [ 20, 20, 20, 5 ], "winsorize_lower_quantile": 0.0, "output_transform_prefix": "output_", "transforms": [ "Translation", "Rigid", "Affine", "SyN" ], "initial_moving_transform_com": true, "metric_weight": [ 1.0, 1.0, 1.0, [ 0.5, 0.5 ] ], "sigma_units": [ "vox", "vox", "vox", "vox" ], "convergence_threshold": [ 1e-08, 1e-08, 1e-08, -0.01 ], "sampling_strategy": [ "Regular", "Regular", "Regular", [ null, null ] ], "shrink_factors": [ [ 6, 4, 2 ], [ 3, 2, 1 ], [ 3, 2, 1 ], [ 4, 2, 1 ] ], "winsorize_upper_quantile": 1.0, "metric": [ "Mattes", "Mattes", "Mattes", [ "Mattes", "CC" ] ], "interpolation": "Linear", "use_estimate_learning_rate_once": [ true, true, true, true ], "write_composite_transform": true, "initialize_transforms_per_stage": false, "num_threads": 1, "output_warped_image": "INTERNAL_WARPED.nii.gz", "sampling_percentage": [ 0.3, 0.3, 0.3, [ null, null ] ], "number_of_iterations": [ [ 10000, 111110, 11110 ], [ 10000, 111110, 11110 ], [ 10000, 111110, 11110 ], [ 100, 50, 30 ] ], "radius_or_number_of_bins": [ 32, 32, 32, [ 32, 4 ] ], "environ": { "NSLOTS": "1" }, "smoothing_sigmas": [ [ 4.0, 2.0, 1.0 ], [ 4.0, 2.0, 1.0 ], [ 4.0, 2.0, 1.0 ], [ 1.0, 0.5, 0.0 ] ], "use_histogram_matching": [ false, false, false, true ], "transform_parameters": [ [ 0.1 ], [ 0.1 ], [ 0.1 ], [ 0.2, 3.0, 0.0 ] ], "dimension": 3, "collapse_output_transforms": false } nipype-1.7.0/nipype/testing/data/spmT_0001.img000066400000000000000000000000001413403311400207560ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/spminfo000066400000000000000000000007331413403311400203100ustar00rootroot00000000000000fprintf(1,'Executing %s at %s:\n',mfilename,datestr(now)); ver, try, if isempty(which('spm')), throw(MException('SPMCheck:NotFound','SPM not in matlab path')); end; spm_path = spm('dir'); fprintf(1, 'NIPYPE %s', spm_path); ,catch ME, fprintf(2,'MATLAB code threw an exception:\n'); fprintf(2,'%s\n',ME.message); if length(ME.stack) ~= 0, fprintf(2,'File:%s\nName:%s\nLine:%d\n',ME.stack.file,ME.stack.name,ME.stack.line);, end; end; nipype-1.7.0/nipype/testing/data/streamlines.trk000066400000000000000000000000001413403311400217450ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/struct2mni.nii000066400000000000000000000000001413403311400215100ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/struct_to_func.mat000066400000000000000000000000001413403311400224410ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/struct_to_template.mat000066400000000000000000000000001413403311400233210ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/structural.nii000066400000000000000000000000001413403311400216060ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/study_template.nii.gz000066400000000000000000000000001413403311400230600ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/sub-01.L.midthickness.32k_fs_LR.surf.gii000066400000000000000000000000001413403311400257150ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/sub-01.R.midthickness.32k_fs_LR.surf.gii000066400000000000000000000000001413403311400257230ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/sub-01_dir-LR_epi.nii.gz000066400000000000000000000000001413403311400230320ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/sub-01_dir-RL_epi.nii.gz000066400000000000000000000000001413403311400230320ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/sub-01_task-rest.dtseries.nii000066400000000000000000000000001413403311400242230ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/sub-01_task-rest_bold_space-fsaverage5.L.func.gii000066400000000000000000000000001413403311400277200ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/subj1.cff000066400000000000000000000000001413403311400204010ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/subj1.pck000066400000000000000000000000001413403311400204200ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/subj2.cff000066400000000000000000000000001413403311400204020ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/subj2.pck000066400000000000000000000000001413403311400204210ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/subjectDesign.con000066400000000000000000000000001413403311400221670ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/subjectDesign.mat000066400000000000000000000000001413403311400221710ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/surf.txt000066400000000000000000000000001413403311400204150ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/surf01.vtk000066400000000000000000115366221413403311400205750ustar00rootroot00000000000000# vtk DataFile Version 1.0 vtk output ASCII DATASET POLYDATA POINTS 31034 float -32.035343170 -33.840930939 -35.797355652 -31.336599350 -33.903381348 -35.879737854 -30.442840576 -33.946811676 -35.937088013 -29.481933594 -33.968864441 -35.965759277 -28.495168686 -33.977611542 -35.976860046 -27.498950958 -33.980327606 -35.980201721 -26.499826431 -33.980976105 -35.980960846 -25.499979019 -33.981090546 -35.981090546 -24.499998093 -33.981101990 -35.981101990 -23.500000000 -33.981101990 -35.981101990 -22.500000000 -33.981101990 -35.981101990 -21.500000000 -33.981101990 -35.981101990 -20.500000000 -33.981101990 -35.981101990 -19.500000000 -33.981101990 -35.981101990 -18.500000000 -33.981101990 -35.981101990 -17.500000000 -33.981101990 -35.981101990 -16.500000000 -33.981101990 -35.981101990 -15.500000000 -33.981101990 -35.981101990 -14.500000000 -33.981101990 -35.981101990 -13.500000000 -33.981101990 -35.981101990 -12.500000000 -33.981101990 -35.981101990 -11.500000000 -33.981101990 -35.981101990 -10.500000000 -33.981101990 -35.981101990 -9.500000000 -33.981101990 -35.981101990 -8.500000000 -33.981101990 -35.981101990 -7.500000000 -33.981101990 -35.981101990 -6.500000000 -33.981101990 -35.981101990 -5.500000000 -33.981101990 -35.981101990 -4.500000000 -33.981101990 -35.981101990 -3.500000000 -33.981101990 -35.981101990 -2.500000000 -33.981101990 -35.981101990 -1.500000000 -33.981101990 -35.981101990 -0.500000000 -33.981101990 -35.981101990 0.500000000 -33.981101990 -35.981101990 1.500000000 -33.981101990 -35.981101990 2.500000000 -33.981101990 -35.981101990 3.500000000 -33.981101990 -35.981101990 4.500000000 -33.981101990 -35.981101990 5.500000000 -33.981101990 -35.981101990 6.500000000 -33.981101990 -35.981101990 7.500000000 -33.981101990 -35.981101990 8.500000000 -33.981101990 -35.981101990 9.500000000 -33.981101990 -35.981101990 10.500000000 -33.981101990 -35.981101990 11.500000000 -33.981101990 -35.981101990 12.500000000 -33.981101990 -35.981101990 13.500000000 -33.981101990 -35.981101990 14.500000000 -33.981101990 -35.981101990 15.500000000 -33.981101990 -35.981101990 16.500000000 -33.981101990 -35.981101990 17.500000000 -33.981101990 -35.981101990 18.500000000 -33.981101990 -35.981101990 19.500000000 -33.981101990 -35.981101990 20.500000000 -33.981101990 -35.981101990 21.500000000 -33.981101990 -35.981101990 22.500000000 -33.981101990 -35.981101990 23.499998093 -33.981101990 -35.981101990 24.499979019 -33.981090546 -35.981090546 25.499824524 -33.980976105 -35.980960846 26.498950958 -33.980335236 -35.980201721 27.495168686 -33.977619171 -35.976860046 28.481933594 -33.968864441 -35.965766907 29.442840576 -33.946807861 -35.937091827 30.336599350 -33.903377533 -35.879737854 31.035345078 -33.840930939 -35.797355652 -33.030693054 -33.030693054 -35.846313477 -32.334243774 -33.254276276 -35.954723358 -31.405673981 -33.216838837 -36.112342834 -30.460596085 -33.220722198 -36.185966492 -29.486719131 -33.227870941 -36.217514038 -28.496377945 -33.231601715 -36.228958130 -27.499221802 -33.232868195 -36.232299805 -26.499872208 -33.233169556 -36.233074188 -25.499984741 -33.233219147 -36.233207703 -24.500000000 -33.233222961 -36.233222961 -23.500000000 -33.233222961 -36.233222961 -22.500000000 -33.233222961 -36.233222961 -21.500000000 -33.233222961 -36.233222961 -20.500000000 -33.233222961 -36.233222961 -19.500000000 -33.233222961 -36.233222961 -18.500000000 -33.233222961 -36.233222961 -17.500000000 -33.233222961 -36.233222961 -16.500000000 -33.233222961 -36.233222961 -15.500000000 -33.233222961 -36.233222961 -14.500000000 -33.233222961 -36.233222961 -13.500000000 -33.233222961 -36.233222961 -12.500000000 -33.233222961 -36.233222961 -11.500000000 -33.233222961 -36.233222961 -10.500000000 -33.233222961 -36.233222961 -9.500000000 -33.233222961 -36.233222961 -8.500000000 -33.233222961 -36.233222961 -7.500000000 -33.233222961 -36.233222961 -6.500000000 -33.233222961 -36.233222961 -5.500000000 -33.233222961 -36.233222961 -4.500000000 -33.233222961 -36.233222961 -3.500000000 -33.233222961 -36.233222961 -2.500000000 -33.233222961 -36.233222961 -1.500000000 -33.233222961 -36.233222961 -0.500000000 -33.233222961 -36.233222961 0.500000000 -33.233222961 -36.233222961 1.500000000 -33.233222961 -36.233222961 2.500000000 -33.233222961 -36.233222961 3.500000000 -33.233222961 -36.233222961 4.500000000 -33.233222961 -36.233222961 5.500000000 -33.233222961 -36.233222961 6.500000000 -33.233222961 -36.233222961 7.500000000 -33.233222961 -36.233222961 8.500000000 -33.233222961 -36.233222961 9.500000000 -33.233222961 -36.233222961 10.500000000 -33.233222961 -36.233222961 11.500000000 -33.233222961 -36.233222961 12.500000000 -33.233222961 -36.233222961 13.500000000 -33.233222961 -36.233222961 14.500000000 -33.233222961 -36.233222961 15.500000000 -33.233222961 -36.233222961 16.500000000 -33.233222961 -36.233222961 17.500000000 -33.233222961 -36.233222961 18.500000000 -33.233222961 -36.233222961 19.500000000 -33.233222961 -36.233222961 20.500000000 -33.233222961 -36.233222961 21.500000000 -33.233222961 -36.233222961 22.500000000 -33.233222961 -36.233222961 23.500000000 -33.233222961 -36.233222961 24.499984741 -33.233222961 -36.233207703 25.499872208 -33.233173370 -36.233074188 26.499225616 -33.232868195 -36.232299805 27.496377945 -33.231605530 -36.228958130 28.486719131 -33.227874756 -36.217514038 29.460596085 -33.220710754 -36.185966492 30.405673981 -33.216838837 -36.112346649 31.334243774 -33.254272461 -35.954723358 32.030693054 -33.030693054 -35.846317291 -33.840927124 -32.035346985 -35.797351837 -33.254272461 -32.334243774 -35.954719543 -32.371025085 -32.371025085 -36.154674530 -31.438953400 -32.375358582 -36.280395508 -30.476003647 -32.379226685 -36.344284058 -29.492319107 -32.382312775 -36.371490479 -28.498050690 -32.383808136 -36.381023407 -27.499622345 -32.384300232 -36.383724213 -26.499948502 -32.384407043 -36.384311676 -25.499996185 -32.384422302 -36.384407043 -24.500000000 -32.384422302 -36.384422302 -23.500000000 -32.384422302 -36.384422302 -22.500000000 -32.384422302 -36.384422302 -21.500000000 -32.384422302 -36.384422302 -20.500000000 -32.384422302 -36.384422302 -19.500000000 -32.384422302 -36.384422302 -18.500000000 -32.384422302 -36.384422302 -17.500000000 -32.384422302 -36.384422302 -16.500000000 -32.384422302 -36.384422302 -15.500000000 -32.384422302 -36.384422302 -14.500000000 -32.384422302 -36.384422302 -13.500000000 -32.384422302 -36.384422302 -12.500000000 -32.384422302 -36.384422302 -11.500000000 -32.384422302 -36.384422302 -10.500000000 -32.384422302 -36.384422302 -9.500000000 -32.384422302 -36.384422302 -8.500000000 -32.384422302 -36.384422302 -7.500000000 -32.384422302 -36.384422302 -6.500000000 -32.384422302 -36.384422302 -5.500000000 -32.384422302 -36.384422302 -4.500000000 -32.384422302 -36.384422302 -3.500000000 -32.384422302 -36.384422302 -2.500000000 -32.384422302 -36.384422302 -1.500000000 -32.384422302 -36.384422302 -0.500000000 -32.384422302 -36.384422302 0.500000000 -32.384422302 -36.384422302 1.500000000 -32.384422302 -36.384422302 2.500000000 -32.384422302 -36.384422302 3.500000000 -32.384422302 -36.384422302 4.500000000 -32.384422302 -36.384422302 5.500000000 -32.384422302 -36.384422302 6.500000000 -32.384422302 -36.384422302 7.500000000 -32.384422302 -36.384422302 8.500000000 -32.384422302 -36.384422302 9.500000000 -32.384422302 -36.384422302 10.500000000 -32.384422302 -36.384422302 11.500000000 -32.384422302 -36.384422302 12.500000000 -32.384422302 -36.384422302 13.500000000 -32.384422302 -36.384422302 14.500000000 -32.384422302 -36.384422302 15.500000000 -32.384422302 -36.384422302 16.500000000 -32.384422302 -36.384422302 17.500000000 -32.384422302 -36.384422302 18.500000000 -32.384422302 -36.384422302 19.500000000 -32.384422302 -36.384422302 20.500000000 -32.384422302 -36.384422302 21.500000000 -32.384422302 -36.384422302 22.500000000 -32.384422302 -36.384422302 23.500000000 -32.384422302 -36.384422302 24.499996185 -32.384422302 -36.384410858 25.499948502 -32.384407043 -36.384315491 26.499622345 -32.384300232 -36.383724213 27.498050690 -32.383811951 -36.381023407 28.492321014 -32.382312775 -36.371490479 29.476007462 -32.379222870 -36.344287872 30.438953400 -32.375358582 -36.280395508 31.371026993 -32.371025085 -36.154674530 32.254276276 -32.334243774 -35.954723358 32.840930939 -32.035343170 -35.797355652 -33.903373718 -31.336603165 -35.879737854 -33.216838837 -31.405673981 -36.112342834 -32.375358582 -31.438953400 -36.280395508 -31.451213837 -31.451221466 -36.380989075 -30.483785629 -31.456089020 -36.430477142 -29.495611191 -31.458080292 -36.450424194 -28.499073029 -31.458770752 -36.456943512 -27.499858856 -31.458948135 -36.458606720 -26.499988556 -31.458978653 -36.458934784 -25.500000000 -31.458980560 -36.458976746 -24.500000000 -31.458980560 -36.458984375 -23.500000000 -31.458980560 -36.458984375 -22.500000000 -31.458980560 -36.458984375 -21.500000000 -31.458980560 -36.458984375 -20.500000000 -31.458980560 -36.458984375 -19.500000000 -31.458980560 -36.458984375 -18.500000000 -31.458980560 -36.458984375 -17.500000000 -31.458980560 -36.458984375 -16.500000000 -31.458980560 -36.458984375 -15.500000000 -31.458980560 -36.458984375 -14.500000000 -31.458980560 -36.458984375 -13.500000000 -31.458980560 -36.458984375 -12.500000000 -31.458980560 -36.458984375 -11.500000000 -31.458980560 -36.458984375 -10.500000000 -31.458980560 -36.458984375 -9.500000000 -31.458980560 -36.458984375 -8.500000000 -31.458980560 -36.458984375 -7.500000000 -31.458980560 -36.458984375 -6.500000000 -31.458980560 -36.458984375 -5.500000000 -31.458980560 -36.458984375 -4.500000000 -31.458980560 -36.458984375 -3.500000000 -31.458980560 -36.458984375 -2.500000000 -31.458980560 -36.458984375 -1.500000000 -31.458980560 -36.458984375 -0.500000000 -31.458980560 -36.458984375 0.500000000 -31.458980560 -36.458984375 1.500000000 -31.458980560 -36.458984375 2.500000000 -31.458980560 -36.458984375 3.500000000 -31.458980560 -36.458984375 4.500000000 -31.458980560 -36.458984375 5.500000000 -31.458980560 -36.458984375 6.500000000 -31.458980560 -36.458984375 7.500000000 -31.458980560 -36.458984375 8.500000000 -31.458980560 -36.458984375 9.500000000 -31.458980560 -36.458984375 10.500000000 -31.458980560 -36.458984375 11.500000000 -31.458980560 -36.458984375 12.500000000 -31.458980560 -36.458984375 13.500000000 -31.458980560 -36.458984375 14.500000000 -31.458980560 -36.458984375 15.500000000 -31.458980560 -36.458984375 16.500000000 -31.458980560 -36.458984375 17.500000000 -31.458980560 -36.458984375 18.500000000 -31.458980560 -36.458984375 19.500000000 -31.458980560 -36.458984375 20.500000000 -31.458980560 -36.458984375 21.500000000 -31.458980560 -36.458984375 22.500000000 -31.458980560 -36.458984375 23.500000000 -31.458980560 -36.458984375 24.500000000 -31.458980560 -36.458976746 25.499988556 -31.458978653 -36.458934784 26.499858856 -31.458948135 -36.458606720 27.499073029 -31.458770752 -36.456943512 28.495611191 -31.458080292 -36.450424194 29.483785629 -31.456085205 -36.430473328 30.451217651 -31.451217651 -36.380989075 31.375360489 -31.438953400 -36.280395508 32.216842651 -31.405675888 -36.112342834 32.903381348 -31.336603165 -35.879737854 -33.946807861 -30.442840576 -35.937091827 -33.220714569 -30.460592270 -36.185966492 -32.379222870 -30.476007462 -36.344284058 -31.456085205 -30.483789444 -36.430477142 -30.486968994 -30.486968994 -36.469234467 -29.496957779 -30.488048553 -36.483337402 -28.499475479 -30.488342285 -36.487373352 -27.499938965 -30.488397598 -36.488258362 -26.499996185 -30.488403320 -36.488391876 -25.500000000 -30.488403320 -36.488410950 -24.500000000 -30.488403320 -36.488410950 -23.500000000 -30.488403320 -36.488410950 -22.500000000 -30.488403320 -36.488410950 -21.500000000 -30.488403320 -36.488410950 -20.500000000 -30.488403320 -36.488410950 -19.500000000 -30.488403320 -36.488410950 -18.500000000 -30.488403320 -36.488410950 -17.500000000 -30.488403320 -36.488410950 -16.500000000 -30.488403320 -36.488410950 -15.500000000 -30.488403320 -36.488410950 -14.500000000 -30.488403320 -36.488410950 -13.500000000 -30.488403320 -36.488410950 -12.500000000 -30.488403320 -36.488410950 -11.500000000 -30.488403320 -36.488410950 -10.500000000 -30.488403320 -36.488410950 -9.500000000 -30.488403320 -36.488410950 -8.500000000 -30.488403320 -36.488410950 -7.500000000 -30.488403320 -36.488410950 -6.500000000 -30.488403320 -36.488410950 -5.500000000 -30.488403320 -36.488410950 -4.500000000 -30.488403320 -36.488410950 -3.500000000 -30.488403320 -36.488410950 -2.500000000 -30.488403320 -36.488410950 -1.500000000 -30.488403320 -36.488410950 -0.500000000 -30.488403320 -36.488410950 0.500000000 -30.488403320 -36.488410950 1.500000000 -30.488403320 -36.488410950 2.500000000 -30.488403320 -36.488410950 3.500000000 -30.488403320 -36.488410950 4.500000000 -30.488403320 -36.488410950 5.500000000 -30.488403320 -36.488410950 6.500000000 -30.488403320 -36.488410950 7.500000000 -30.488403320 -36.488410950 8.500000000 -30.488403320 -36.488410950 9.500000000 -30.488403320 -36.488410950 10.500000000 -30.488403320 -36.488410950 11.500000000 -30.488403320 -36.488410950 12.500000000 -30.488403320 -36.488410950 13.500000000 -30.488403320 -36.488410950 14.500000000 -30.488403320 -36.488410950 15.500000000 -30.488403320 -36.488410950 16.500000000 -30.488403320 -36.488410950 17.500000000 -30.488403320 -36.488410950 18.500000000 -30.488403320 -36.488410950 19.500000000 -30.488403320 -36.488410950 20.500000000 -30.488403320 -36.488410950 21.500000000 -30.488403320 -36.488410950 22.500000000 -30.488403320 -36.488410950 23.500000000 -30.488403320 -36.488410950 24.500000000 -30.488403320 -36.488410950 25.499996185 -30.488403320 -36.488395691 26.499938965 -30.488399506 -36.488258362 27.499477386 -30.488346100 -36.487373352 28.496959686 -30.488052368 -36.483337402 29.486968994 -30.486968994 -36.469234467 30.456085205 -30.483785629 -36.430477142 31.379222870 -30.476007462 -36.344284058 32.220714569 -30.460596085 -36.185966492 32.946811676 -30.442840576 -35.937088013 -33.968864441 -29.481933594 -35.965759277 -33.227867126 -29.486715317 -36.217510223 -32.382312775 -29.492319107 -36.371490479 -31.458074570 -29.495611191 -36.450428009 -30.488048553 -29.496957779 -36.483337402 -29.497375488 -29.497371674 -36.494171143 -28.499578476 -29.497457504 -36.496910095 -27.499954224 -29.497470856 -36.497406006 -26.499996185 -29.497470856 -36.497467041 -25.500000000 -29.497470856 -36.497470856 -24.500000000 -29.497470856 -36.497470856 -23.500000000 -29.497470856 -36.497470856 -22.500000000 -29.497470856 -36.497470856 -21.500000000 -29.497470856 -36.497470856 -20.500000000 -29.497470856 -36.497470856 -19.500000000 -29.497470856 -36.497470856 -18.500000000 -29.497470856 -36.497470856 -17.500000000 -29.497470856 -36.497470856 -16.500000000 -29.497470856 -36.497470856 -15.500000000 -29.497470856 -36.497470856 -14.500000000 -29.497470856 -36.497470856 -13.500000000 -29.497470856 -36.497470856 -12.500000000 -29.497470856 -36.497470856 -11.500000000 -29.497470856 -36.497470856 -10.500000000 -29.497470856 -36.497470856 -9.500000000 -29.497470856 -36.497470856 -8.500000000 -29.497470856 -36.497470856 -7.500000000 -29.497470856 -36.497470856 -6.500000000 -29.497470856 -36.497470856 -5.500000000 -29.497470856 -36.497470856 -4.500000000 -29.497470856 -36.497470856 -3.500000000 -29.497470856 -36.497470856 -2.500000000 -29.497470856 -36.497470856 -1.500000000 -29.497470856 -36.497470856 -0.500000000 -29.497470856 -36.497470856 0.500000000 -29.497470856 -36.497470856 1.500000000 -29.497470856 -36.497470856 2.500000000 -29.497470856 -36.497470856 3.500000000 -29.497470856 -36.497470856 4.500000000 -29.497470856 -36.497470856 5.500000000 -29.497470856 -36.497470856 6.500000000 -29.497470856 -36.497470856 7.500000000 -29.497470856 -36.497470856 8.500000000 -29.497470856 -36.497470856 9.500000000 -29.497470856 -36.497470856 10.500000000 -29.497470856 -36.497470856 11.500000000 -29.497470856 -36.497470856 12.500000000 -29.497470856 -36.497470856 13.500000000 -29.497470856 -36.497470856 14.500000000 -29.497470856 -36.497470856 15.500000000 -29.497470856 -36.497470856 16.500000000 -29.497470856 -36.497470856 17.500000000 -29.497470856 -36.497470856 18.500000000 -29.497470856 -36.497470856 19.500000000 -29.497470856 -36.497470856 20.500000000 -29.497470856 -36.497470856 21.500000000 -29.497470856 -36.497470856 22.500000000 -29.497470856 -36.497470856 23.500000000 -29.497470856 -36.497470856 24.500000000 -29.497470856 -36.497470856 25.499996185 -29.497470856 -36.497467041 26.499954224 -29.497470856 -36.497409821 27.499576569 -29.497461319 -36.496910095 28.497371674 -29.497371674 -36.494174957 29.488048553 -29.496959686 -36.483337402 30.458074570 -29.495611191 -36.450428009 31.382312775 -29.492319107 -36.371490479 32.227874756 -29.486715317 -36.217514038 32.968864441 -29.481933594 -35.965759277 -33.977611542 -28.495168686 -35.976860046 -33.231597900 -28.496377945 -36.228954315 -32.383808136 -28.498052597 -36.381023407 -31.458766937 -28.499073029 -36.456939697 -30.488346100 -28.499475479 -36.487373352 -29.497461319 -28.499576569 -36.496910095 -28.499593735 -28.499591827 -36.499160767 -27.499954224 -28.499591827 -36.499546051 -26.499996185 -28.499591827 -36.499588013 -25.500000000 -28.499591827 -36.499595642 -24.500000000 -28.499591827 -36.499595642 -23.500000000 -28.499591827 -36.499595642 -22.500000000 -28.499591827 -36.499595642 -21.500000000 -28.499591827 -36.499595642 -20.500000000 -28.499591827 -36.499595642 -19.500000000 -28.499591827 -36.499595642 -18.500000000 -28.499591827 -36.499595642 -17.500000000 -28.499591827 -36.499595642 -16.500000000 -28.499591827 -36.499595642 -15.500000000 -28.499591827 -36.499595642 -14.500000000 -28.499591827 -36.499595642 -13.500000000 -28.499591827 -36.499595642 -12.500000000 -28.499591827 -36.499595642 -11.500000000 -28.499591827 -36.499595642 -10.500000000 -28.499591827 -36.499595642 -9.500000000 -28.499591827 -36.499595642 -8.500000000 -28.499591827 -36.499595642 -7.500000000 -28.499591827 -36.499595642 -6.500000000 -28.499591827 -36.499595642 -5.500000000 -28.499591827 -36.499595642 -4.500000000 -28.499591827 -36.499595642 -3.500000000 -28.499591827 -36.499595642 -2.500000000 -28.499591827 -36.499595642 -1.500000000 -28.499591827 -36.499595642 -0.500000000 -28.499591827 -36.499595642 0.500000000 -28.499591827 -36.499595642 1.500000000 -28.499591827 -36.499595642 2.500000000 -28.499591827 -36.499595642 3.500000000 -28.499591827 -36.499595642 4.500000000 -28.499591827 -36.499595642 5.500000000 -28.499591827 -36.499595642 6.500000000 -28.499591827 -36.499595642 7.500000000 -28.499591827 -36.499595642 8.500000000 -28.499591827 -36.499595642 9.500000000 -28.499591827 -36.499595642 10.500000000 -28.499591827 -36.499595642 11.500000000 -28.499591827 -36.499595642 12.500000000 -28.499591827 -36.499595642 13.500000000 -28.499591827 -36.499595642 14.500000000 -28.499591827 -36.499595642 15.500000000 -28.499591827 -36.499595642 16.500000000 -28.499591827 -36.499595642 17.500000000 -28.499591827 -36.499595642 18.500000000 -28.499591827 -36.499595642 19.500000000 -28.499591827 -36.499595642 20.500000000 -28.499591827 -36.499595642 21.500000000 -28.499591827 -36.499595642 22.500000000 -28.499591827 -36.499595642 23.500000000 -28.499591827 -36.499595642 24.500000000 -28.499591827 -36.499595642 25.499996185 -28.499591827 -36.499588013 26.499954224 -28.499591827 -36.499546051 27.499591827 -28.499591827 -36.499160767 28.497461319 -28.499576569 -36.496910095 29.488346100 -28.499475479 -36.487373352 30.458766937 -28.499073029 -36.456939697 31.383810043 -28.498050690 -36.381031036 32.231597900 -28.496377945 -36.228958130 32.977611542 -28.495168686 -35.976860046 -33.980331421 -27.498950958 -35.980201721 -33.232864380 -27.499221802 -36.232307434 -32.384300232 -27.499622345 -36.383720398 -31.458948135 -27.499858856 -36.458606720 -30.488399506 -27.499938965 -36.488258362 -29.497472763 -27.499954224 -36.497406006 -28.499593735 -27.499954224 -36.499546051 -27.499954224 -27.499954224 -36.499908447 -26.499996185 -27.499954224 -36.499950409 -25.500000000 -27.499954224 -36.499954224 -24.500000000 -27.499954224 -36.499954224 -23.500000000 -27.499954224 -36.499954224 -22.500000000 -27.499954224 -36.499954224 -21.500000000 -27.499954224 -36.499954224 -20.500000000 -27.499954224 -36.499954224 -19.500000000 -27.499954224 -36.499954224 -18.500000000 -27.499954224 -36.499954224 -17.500000000 -27.499954224 -36.499954224 -16.500000000 -27.499954224 -36.499954224 -15.500000000 -27.499954224 -36.499954224 -14.500000000 -27.499954224 -36.499954224 -13.500000000 -27.499954224 -36.499954224 -12.500000000 -27.499954224 -36.499954224 -11.500000000 -27.499954224 -36.499954224 -10.500000000 -27.499954224 -36.499954224 -9.500000000 -27.499954224 -36.499954224 -8.500000000 -27.499954224 -36.499954224 -7.500000000 -27.499954224 -36.499954224 -6.500000000 -27.499954224 -36.499954224 -5.500000000 -27.499954224 -36.499954224 -4.500000000 -27.499954224 -36.499954224 -3.500000000 -27.499954224 -36.499954224 -2.500000000 -27.499954224 -36.499954224 -1.500000000 -27.499954224 -36.499954224 -0.500000000 -27.499954224 -36.499954224 0.500000000 -27.499954224 -36.499954224 1.500000000 -27.499954224 -36.499954224 2.500000000 -27.499954224 -36.499954224 3.500000000 -27.499954224 -36.499954224 4.500000000 -27.499954224 -36.499954224 5.500000000 -27.499954224 -36.499954224 6.500000000 -27.499954224 -36.499954224 7.500000000 -27.499954224 -36.499954224 8.500000000 -27.499954224 -36.499954224 9.500000000 -27.499954224 -36.499954224 10.500000000 -27.499954224 -36.499954224 11.500000000 -27.499954224 -36.499954224 12.500000000 -27.499954224 -36.499954224 13.500000000 -27.499954224 -36.499954224 14.500000000 -27.499954224 -36.499954224 15.500000000 -27.499954224 -36.499954224 16.500000000 -27.499954224 -36.499954224 17.500000000 -27.499954224 -36.499954224 18.500000000 -27.499954224 -36.499954224 19.500000000 -27.499954224 -36.499954224 20.500000000 -27.499954224 -36.499954224 21.500000000 -27.499954224 -36.499954224 22.500000000 -27.499954224 -36.499954224 23.500000000 -27.499954224 -36.499954224 24.500000000 -27.499954224 -36.499954224 25.499996185 -27.499954224 -36.499950409 26.499954224 -27.499954224 -36.499908447 27.499591827 -27.499954224 -36.499542236 28.497470856 -27.499954224 -36.497406006 29.488399506 -27.499938965 -36.488258362 30.458948135 -27.499858856 -36.458606720 31.384298325 -27.499618530 -36.383720398 32.232864380 -27.499225616 -36.232307434 32.980327606 -27.498950958 -35.980201721 -33.980972290 -26.499826431 -35.980957031 -33.233165741 -26.499874115 -36.233070374 -32.384403229 -26.499948502 -36.384307861 -31.458978653 -26.499988556 -36.458930969 -30.488407135 -26.499996185 -36.488391876 -29.497472763 -26.499996185 -36.497467041 -28.499593735 -26.499996185 -36.499588013 -27.499954224 -26.499996185 -36.499950409 -26.499996185 -26.499996185 -36.499992371 -25.500000000 -26.499996185 -36.499996185 -24.500000000 -26.499996185 -36.499996185 -23.500000000 -26.499996185 -36.499996185 -22.500000000 -26.499996185 -36.499996185 -21.500000000 -26.499996185 -36.499996185 -20.500000000 -26.499996185 -36.499996185 -19.500000000 -26.499996185 -36.499996185 -18.500000000 -26.499996185 -36.499996185 -17.500000000 -26.499996185 -36.499996185 -16.500000000 -26.499996185 -36.499996185 -15.500000000 -26.499996185 -36.499996185 -14.500000000 -26.499996185 -36.499996185 -13.500000000 -26.499996185 -36.499996185 -12.500000000 -26.499996185 -36.499996185 -11.500000000 -26.499996185 -36.499996185 -10.500000000 -26.499996185 -36.499996185 -9.500000000 -26.499996185 -36.499996185 -8.500000000 -26.499996185 -36.499996185 -7.500000000 -26.499996185 -36.499996185 -6.500000000 -26.499996185 -36.499996185 -5.500000000 -26.499996185 -36.499996185 -4.500000000 -26.499996185 -36.499996185 -3.500000000 -26.499996185 -36.499996185 -2.500000000 -26.499996185 -36.499996185 -1.500000000 -26.499996185 -36.499996185 -0.500000000 -26.499996185 -36.499996185 0.500000000 -26.499996185 -36.499996185 1.500000000 -26.499996185 -36.499996185 2.500000000 -26.499996185 -36.499996185 3.500000000 -26.499996185 -36.499996185 4.500000000 -26.499996185 -36.499996185 5.500000000 -26.499996185 -36.499996185 6.500000000 -26.499996185 -36.499996185 7.500000000 -26.499996185 -36.499996185 8.500000000 -26.499996185 -36.499996185 9.500000000 -26.499996185 -36.499996185 10.500000000 -26.499996185 -36.499996185 11.500000000 -26.499996185 -36.499996185 12.500000000 -26.499996185 -36.499996185 13.500000000 -26.499996185 -36.499996185 14.500000000 -26.499996185 -36.499996185 15.500000000 -26.499996185 -36.499996185 16.500000000 -26.499996185 -36.499996185 17.500000000 -26.499996185 -36.499996185 18.500000000 -26.499996185 -36.499996185 19.500000000 -26.499996185 -36.499996185 20.500000000 -26.499996185 -36.499996185 21.500000000 -26.499996185 -36.499996185 22.500000000 -26.499996185 -36.499996185 23.500000000 -26.499996185 -36.499996185 24.500000000 -26.499996185 -36.499996185 25.499996185 -26.499996185 -36.499992371 26.499954224 -26.499996185 -36.499950409 27.499591827 -26.499996185 -36.499588013 28.497470856 -26.499996185 -36.497467041 29.488407135 -26.499996185 -36.488391876 30.458978653 -26.499988556 -36.458930969 31.384403229 -26.499948502 -36.384311676 32.233165741 -26.499874115 -36.233074188 32.980976105 -26.499826431 -35.980960846 -33.981086731 -25.499979019 -35.981086731 -33.233215332 -25.499986649 -36.233203888 -32.384422302 -25.499996185 -36.384407043 -31.458978653 -25.500000000 -36.458972931 -30.488407135 -25.500000000 -36.488403320 -29.497472763 -25.500000000 -36.497467041 -28.499593735 -25.500000000 -36.499591827 -27.499954224 -25.500000000 -36.499954224 -26.499996185 -25.500000000 -36.499996185 -25.500000000 -25.500000000 -36.500000000 -24.500000000 -25.500000000 -36.500000000 -23.500000000 -25.500000000 -36.500000000 -22.500000000 -25.500000000 -36.500000000 -21.500000000 -25.500000000 -36.500000000 -20.500000000 -25.500000000 -36.500000000 -19.500000000 -25.500000000 -36.500000000 -18.500000000 -25.500000000 -36.500000000 -17.500000000 -25.500000000 -36.500000000 -16.500000000 -25.500000000 -36.500000000 -15.500000000 -25.500000000 -36.500000000 -14.500000000 -25.500000000 -36.500000000 -13.500000000 -25.500000000 -36.500000000 -12.500000000 -25.500000000 -36.500000000 -11.500000000 -25.500000000 -36.500000000 -10.500000000 -25.500000000 -36.500000000 -9.500000000 -25.500000000 -36.500000000 -8.500000000 -25.500000000 -36.500000000 -7.500000000 -25.500000000 -36.500000000 -6.500000000 -25.500000000 -36.500000000 -5.500000000 -25.500000000 -36.500000000 -4.500000000 -25.500000000 -36.500000000 -3.500000000 -25.500000000 -36.500000000 -2.500000000 -25.500000000 -36.500000000 -1.500000000 -25.500000000 -36.500000000 -0.500000000 -25.500000000 -36.500000000 0.500000000 -25.500000000 -36.500000000 1.500000000 -25.500000000 -36.500000000 2.500000000 -25.500000000 -36.500000000 3.500000000 -25.500000000 -36.500000000 4.500000000 -25.500000000 -36.500000000 5.500000000 -25.500000000 -36.500000000 6.500000000 -25.500000000 -36.500000000 7.500000000 -25.500000000 -36.500000000 8.500000000 -25.500000000 -36.500000000 9.500000000 -25.500000000 -36.500000000 10.500000000 -25.500000000 -36.500000000 11.500000000 -25.500000000 -36.500000000 12.500000000 -25.500000000 -36.500000000 13.500000000 -25.500000000 -36.500000000 14.500000000 -25.500000000 -36.500000000 15.500000000 -25.500000000 -36.500000000 16.500000000 -25.500000000 -36.500000000 17.500000000 -25.500000000 -36.500000000 18.500000000 -25.500000000 -36.500000000 19.500000000 -25.500000000 -36.500000000 20.500000000 -25.500000000 -36.500000000 21.500000000 -25.500000000 -36.500000000 22.500000000 -25.500000000 -36.500000000 23.500000000 -25.500000000 -36.500000000 24.500000000 -25.500000000 -36.500000000 25.499996185 -25.500000000 -36.499996185 26.499954224 -25.500000000 -36.499954224 27.499591827 -25.500000000 -36.499591827 28.497470856 -25.500000000 -36.497474670 29.488407135 -25.500000000 -36.488403320 30.458978653 -25.500000000 -36.458976746 31.384418488 -25.499996185 -36.384407043 32.233215332 -25.499986649 -36.233207703 32.981090546 -25.499975204 -35.981090546 -33.981101990 -24.499998093 -35.981101990 -33.233222961 -24.499998093 -36.233222961 -32.384422302 -24.500000000 -36.384418488 -31.458978653 -24.500000000 -36.458976746 -30.488407135 -24.500000000 -36.488403320 -29.497472763 -24.500000000 -36.497467041 -28.499593735 -24.500000000 -36.499591827 -27.499954224 -24.500000000 -36.499954224 -26.499996185 -24.500000000 -36.499996185 -25.500000000 -24.500000000 -36.500000000 -24.500000000 -24.500000000 -36.500000000 -23.500000000 -24.500000000 -36.500000000 -22.500000000 -24.500000000 -36.500000000 -21.500000000 -24.500000000 -36.500000000 -20.500000000 -24.500000000 -36.500000000 -19.500000000 -24.500000000 -36.500000000 -18.500000000 -24.500000000 -36.500000000 -17.500000000 -24.500000000 -36.500000000 -16.500000000 -24.500000000 -36.500000000 -15.500000000 -24.500000000 -36.500000000 -14.500000000 -24.500000000 -36.500000000 -13.500000000 -24.500000000 -36.500000000 -12.500000000 -24.500000000 -36.500000000 -11.500000000 -24.500000000 -36.500000000 -10.500000000 -24.500000000 -36.500000000 -9.500000000 -24.500000000 -36.500000000 -8.500000000 -24.500000000 -36.500000000 -7.500000000 -24.500000000 -36.500000000 -6.500000000 -24.500000000 -36.500000000 -5.500000000 -24.500000000 -36.500000000 -4.500000000 -24.500000000 -36.500000000 -3.500000000 -24.500000000 -36.500000000 -2.500000000 -24.500000000 -36.500000000 -1.500000000 -24.500000000 -36.500000000 -0.500000000 -24.500000000 -36.500000000 0.500000000 -24.500000000 -36.500000000 1.500000000 -24.500000000 -36.500000000 2.500000000 -24.500000000 -36.500000000 3.500000000 -24.500000000 -36.500000000 4.500000000 -24.500000000 -36.500000000 5.500000000 -24.500000000 -36.500000000 6.500000000 -24.500000000 -36.500000000 7.500000000 -24.500000000 -36.500000000 8.500000000 -24.500000000 -36.500000000 9.500000000 -24.500000000 -36.500000000 10.500000000 -24.500000000 -36.500000000 11.500000000 -24.500000000 -36.500000000 12.500000000 -24.500000000 -36.500000000 13.500000000 -24.500000000 -36.500000000 14.500000000 -24.500000000 -36.500000000 15.500000000 -24.500000000 -36.500000000 16.500000000 -24.500000000 -36.500000000 17.500000000 -24.500000000 -36.500000000 18.500000000 -24.500000000 -36.500000000 19.500000000 -24.500000000 -36.500000000 20.500000000 -24.500000000 -36.500000000 21.500000000 -24.500000000 -36.500000000 22.500000000 -24.500000000 -36.500000000 23.500000000 -24.500000000 -36.500000000 24.500000000 -24.500000000 -36.500000000 25.499996185 -24.500000000 -36.499996185 26.499954224 -24.500000000 -36.499954224 27.499591827 -24.500000000 -36.499591827 28.497470856 -24.500000000 -36.497474670 29.488407135 -24.500000000 -36.488403320 30.458978653 -24.500000000 -36.458980560 31.384418488 -24.500000000 -36.384422302 32.233222961 -24.499998093 -36.233222961 32.981101990 -24.499998093 -35.981101990 -33.981101990 -23.500000000 -35.981101990 -33.233222961 -23.500000000 -36.233222961 -32.384422302 -23.500000000 -36.384418488 -31.458978653 -23.500000000 -36.458976746 -30.488407135 -23.500000000 -36.488403320 -29.497472763 -23.500000000 -36.497467041 -28.499593735 -23.500000000 -36.499591827 -27.499954224 -23.500000000 -36.499954224 -26.499996185 -23.500000000 -36.499996185 -25.500000000 -23.500000000 -36.500000000 -24.500000000 -23.500000000 -36.500000000 -23.500000000 -23.500000000 -36.500000000 -22.500000000 -23.500000000 -36.500000000 -21.500000000 -23.500000000 -36.500000000 -20.500000000 -23.500000000 -36.500000000 -19.500000000 -23.500000000 -36.500000000 -18.500000000 -23.500000000 -36.500000000 -17.500000000 -23.500000000 -36.500000000 -16.500000000 -23.500000000 -36.500000000 -15.500000000 -23.500000000 -36.500000000 -14.500000000 -23.500000000 -36.500000000 -13.500000000 -23.500000000 -36.500000000 -12.500000000 -23.500000000 -36.500000000 -11.500000000 -23.500000000 -36.500000000 -10.500000000 -23.500000000 -36.500000000 -9.500000000 -23.500000000 -36.500000000 -8.500000000 -23.500000000 -36.500000000 -7.500000000 -23.500000000 -36.500000000 -6.500000000 -23.500000000 -36.500000000 -5.500000000 -23.500000000 -36.500000000 -4.500000000 -23.500000000 -36.500000000 -3.500000000 -23.500000000 -36.500000000 -2.500000000 -23.500000000 -36.500000000 -1.500000000 -23.500000000 -36.500000000 -0.500000000 -23.500000000 -36.500000000 0.500000000 -23.500000000 -36.500000000 1.500000000 -23.500000000 -36.500000000 2.500000000 -23.500000000 -36.500000000 3.500000000 -23.500000000 -36.500000000 4.500000000 -23.500000000 -36.500000000 5.500000000 -23.500000000 -36.500000000 6.500000000 -23.500000000 -36.500000000 7.500000000 -23.500000000 -36.500000000 8.500000000 -23.500000000 -36.500000000 9.500000000 -23.500000000 -36.500000000 10.500000000 -23.500000000 -36.500000000 11.500000000 -23.500000000 -36.500000000 12.500000000 -23.500000000 -36.500000000 13.500000000 -23.500000000 -36.500000000 14.500000000 -23.500000000 -36.500000000 15.500000000 -23.500000000 -36.500000000 16.500000000 -23.500000000 -36.500000000 17.500000000 -23.500000000 -36.500000000 18.500000000 -23.500000000 -36.500000000 19.500000000 -23.500000000 -36.500000000 20.500000000 -23.500000000 -36.500000000 21.500000000 -23.500000000 -36.500000000 22.500000000 -23.500000000 -36.500000000 23.500000000 -23.500000000 -36.500000000 24.500000000 -23.500000000 -36.500000000 25.499996185 -23.500000000 -36.499996185 26.499954224 -23.500000000 -36.499954224 27.499591827 -23.500000000 -36.499591827 28.497470856 -23.500000000 -36.497474670 29.488407135 -23.500000000 -36.488403320 30.458978653 -23.500000000 -36.458980560 31.384418488 -23.500000000 -36.384422302 32.233222961 -23.500000000 -36.233226776 32.981101990 -23.500000000 -35.981109619 -33.981101990 -22.500000000 -35.981101990 -33.233222961 -22.500000000 -36.233222961 -32.384422302 -22.500000000 -36.384418488 -31.458978653 -22.500000000 -36.458976746 -30.488407135 -22.500000000 -36.488403320 -29.497472763 -22.500000000 -36.497467041 -28.499593735 -22.500000000 -36.499591827 -27.499954224 -22.500000000 -36.499954224 -26.499996185 -22.500000000 -36.499996185 -25.500000000 -22.500000000 -36.500000000 -24.500000000 -22.500000000 -36.500000000 -23.500000000 -22.500000000 -36.500000000 -22.500000000 -22.500000000 -36.500000000 -21.500000000 -22.500000000 -36.500000000 -20.500000000 -22.500000000 -36.500000000 -19.500000000 -22.500000000 -36.500000000 -18.500000000 -22.500000000 -36.500000000 -17.500000000 -22.500000000 -36.500000000 -16.500000000 -22.500000000 -36.500000000 -15.500000000 -22.500000000 -36.500000000 -14.500000000 -22.500000000 -36.500000000 -13.500000000 -22.500000000 -36.500000000 -12.500000000 -22.500000000 -36.500000000 -11.500000000 -22.500000000 -36.500000000 -10.500000000 -22.500000000 -36.500000000 -9.500000000 -22.500000000 -36.500000000 -8.500000000 -22.500000000 -36.500000000 -7.500000000 -22.500000000 -36.500000000 -6.500000000 -22.500000000 -36.500000000 -5.500000000 -22.500000000 -36.500000000 -4.500000000 -22.500000000 -36.500000000 -3.500000000 -22.500000000 -36.500000000 -2.500000000 -22.500000000 -36.500000000 -1.500000000 -22.500000000 -36.500000000 -0.500000000 -22.500000000 -36.500000000 0.500000000 -22.500000000 -36.500000000 1.500000000 -22.500000000 -36.500000000 2.500000000 -22.500000000 -36.500000000 3.500000000 -22.500000000 -36.500000000 4.500000000 -22.500000000 -36.500000000 5.500000000 -22.500000000 -36.500000000 6.500000000 -22.500000000 -36.500000000 7.500000000 -22.500000000 -36.500000000 8.500000000 -22.500000000 -36.500000000 9.500000000 -22.500000000 -36.500000000 10.500000000 -22.500000000 -36.500000000 11.500000000 -22.500000000 -36.500000000 12.500000000 -22.500000000 -36.500000000 13.500000000 -22.500000000 -36.500000000 14.500000000 -22.500000000 -36.500000000 15.500000000 -22.500000000 -36.500000000 16.500000000 -22.500000000 -36.500000000 17.500000000 -22.500000000 -36.500000000 18.500000000 -22.500000000 -36.500000000 19.500000000 -22.500000000 -36.500000000 20.500000000 -22.500000000 -36.500000000 21.500000000 -22.500000000 -36.500000000 22.500000000 -22.500000000 -36.500000000 23.500000000 -22.500000000 -36.500000000 24.500000000 -22.500000000 -36.500000000 25.499996185 -22.500000000 -36.499996185 26.499954224 -22.500000000 -36.499954224 27.499591827 -22.500000000 -36.499591827 28.497470856 -22.500000000 -36.497474670 29.488407135 -22.500000000 -36.488403320 30.458978653 -22.500000000 -36.458980560 31.384418488 -22.500000000 -36.384422302 32.233222961 -22.500000000 -36.233226776 32.981101990 -22.500000000 -35.981109619 -33.981101990 -21.500000000 -35.981101990 -33.233222961 -21.500000000 -36.233222961 -32.384422302 -21.500000000 -36.384418488 -31.458978653 -21.500000000 -36.458976746 -30.488407135 -21.500000000 -36.488403320 -29.497472763 -21.500000000 -36.497467041 -28.499593735 -21.500000000 -36.499591827 -27.499954224 -21.500000000 -36.499954224 -26.499996185 -21.500000000 -36.499996185 -25.500000000 -21.500000000 -36.500000000 -24.500000000 -21.500000000 -36.500000000 -23.500000000 -21.500000000 -36.500000000 -22.500000000 -21.500000000 -36.500000000 -21.500000000 -21.500000000 -36.500000000 -20.500000000 -21.500000000 -36.500000000 -19.500000000 -21.500000000 -36.500000000 -18.500000000 -21.500000000 -36.500000000 -17.500000000 -21.500000000 -36.500000000 -16.500000000 -21.500000000 -36.500000000 -15.500000000 -21.500000000 -36.500000000 -14.500000000 -21.500000000 -36.500000000 -13.500000000 -21.500000000 -36.500000000 -12.500000000 -21.500000000 -36.500000000 -11.500000000 -21.500000000 -36.500000000 -10.500000000 -21.500000000 -36.500000000 -9.500000000 -21.500000000 -36.500000000 -8.500000000 -21.500000000 -36.500000000 -7.500000000 -21.500000000 -36.500000000 -6.500000000 -21.500000000 -36.500000000 -5.500000000 -21.500000000 -36.500000000 -4.500000000 -21.500000000 -36.500000000 -3.500000000 -21.500000000 -36.500000000 -2.500000000 -21.500000000 -36.500000000 -1.500000000 -21.500000000 -36.500000000 -0.500000000 -21.500000000 -36.500000000 0.500000000 -21.500000000 -36.500000000 1.500000000 -21.500000000 -36.500000000 2.500000000 -21.500000000 -36.500000000 3.500000000 -21.500000000 -36.500000000 4.500000000 -21.500000000 -36.500000000 5.500000000 -21.500000000 -36.500000000 6.500000000 -21.500000000 -36.500000000 7.500000000 -21.500000000 -36.500000000 8.500000000 -21.500000000 -36.500000000 9.500000000 -21.500000000 -36.500000000 10.500000000 -21.500000000 -36.500000000 11.500000000 -21.500000000 -36.500000000 12.500000000 -21.500000000 -36.500000000 13.500000000 -21.500000000 -36.500000000 14.500000000 -21.500000000 -36.500000000 15.500000000 -21.500000000 -36.500000000 16.500000000 -21.500000000 -36.500000000 17.500000000 -21.500000000 -36.500000000 18.500000000 -21.500000000 -36.500000000 19.500000000 -21.500000000 -36.500000000 20.500000000 -21.500000000 -36.500000000 21.500000000 -21.500000000 -36.500000000 22.500000000 -21.500000000 -36.500000000 23.500000000 -21.500000000 -36.500000000 24.500000000 -21.500000000 -36.500000000 25.499996185 -21.500000000 -36.499996185 26.499954224 -21.500000000 -36.499954224 27.499591827 -21.500000000 -36.499591827 28.497470856 -21.500000000 -36.497474670 29.488407135 -21.500000000 -36.488403320 30.458978653 -21.500000000 -36.458980560 31.384418488 -21.500000000 -36.384422302 32.233222961 -21.500000000 -36.233226776 32.981101990 -21.500000000 -35.981109619 -33.981101990 -20.500000000 -35.981101990 -33.233222961 -20.500000000 -36.233222961 -32.384422302 -20.500000000 -36.384418488 -31.458978653 -20.500000000 -36.458976746 -30.488407135 -20.500000000 -36.488403320 -29.497472763 -20.500000000 -36.497467041 -28.499593735 -20.500000000 -36.499591827 -27.499954224 -20.500000000 -36.499954224 -26.499996185 -20.500000000 -36.499996185 -25.500000000 -20.500000000 -36.500000000 -24.500000000 -20.500000000 -36.500000000 -23.500000000 -20.500000000 -36.500000000 -22.500000000 -20.500000000 -36.500000000 -21.500000000 -20.500000000 -36.500000000 -20.500000000 -20.500000000 -36.500000000 -19.500000000 -20.500000000 -36.500000000 -18.500000000 -20.500000000 -36.500000000 -17.500000000 -20.500000000 -36.500000000 -16.500000000 -20.500000000 -36.500000000 -15.500000000 -20.500000000 -36.500000000 -14.500000000 -20.500000000 -36.500000000 -13.500000000 -20.500000000 -36.500000000 -12.500000000 -20.500000000 -36.500000000 -11.500000000 -20.500000000 -36.500000000 -10.500000000 -20.500000000 -36.500000000 -9.500000000 -20.500000000 -36.500000000 -8.500000000 -20.500000000 -36.500000000 -7.500000000 -20.500000000 -36.500000000 -6.500000000 -20.500000000 -36.500000000 -5.500000000 -20.500000000 -36.500000000 -4.500000000 -20.500000000 -36.500000000 -3.500000000 -20.500000000 -36.500000000 -2.500000000 -20.500000000 -36.500000000 -1.500000000 -20.500000000 -36.500000000 -0.500000000 -20.500000000 -36.500000000 0.500000000 -20.500000000 -36.500000000 1.500000000 -20.500000000 -36.500000000 2.500000000 -20.500000000 -36.500000000 3.500000000 -20.500000000 -36.500000000 4.500000000 -20.500000000 -36.500000000 5.500000000 -20.500000000 -36.500000000 6.500000000 -20.500000000 -36.500000000 7.500000000 -20.500000000 -36.500000000 8.500000000 -20.500000000 -36.500000000 9.500000000 -20.500000000 -36.500000000 10.500000000 -20.500000000 -36.500000000 11.500000000 -20.500000000 -36.500000000 12.500000000 -20.500000000 -36.500000000 13.500000000 -20.500000000 -36.500000000 14.500000000 -20.500000000 -36.500000000 15.500000000 -20.500000000 -36.500000000 16.500000000 -20.500000000 -36.500000000 17.500000000 -20.500000000 -36.500000000 18.500000000 -20.500000000 -36.500000000 19.500000000 -20.500000000 -36.500000000 20.500000000 -20.500000000 -36.500000000 21.500000000 -20.500000000 -36.500000000 22.500000000 -20.500000000 -36.500000000 23.500000000 -20.500000000 -36.500000000 24.500000000 -20.500000000 -36.500000000 25.499996185 -20.500000000 -36.499996185 26.499954224 -20.500000000 -36.499954224 27.499591827 -20.500000000 -36.499591827 28.497470856 -20.500000000 -36.497474670 29.488407135 -20.500000000 -36.488403320 30.458978653 -20.500000000 -36.458980560 31.384418488 -20.500000000 -36.384422302 32.233222961 -20.500000000 -36.233226776 32.981101990 -20.500000000 -35.981109619 -33.981101990 -19.500000000 -35.981101990 -33.233222961 -19.500000000 -36.233222961 -32.384422302 -19.500000000 -36.384418488 -31.458978653 -19.500000000 -36.458976746 -30.488407135 -19.500000000 -36.488403320 -29.497472763 -19.500000000 -36.497467041 -28.499593735 -19.500000000 -36.499591827 -27.499954224 -19.500000000 -36.499954224 -26.499996185 -19.500000000 -36.499996185 -25.500000000 -19.500000000 -36.500000000 -24.500000000 -19.500000000 -36.500000000 -23.500000000 -19.500000000 -36.500000000 -22.500000000 -19.500000000 -36.500000000 -21.500000000 -19.500000000 -36.500000000 -20.500000000 -19.500000000 -36.500000000 -19.500000000 -19.500000000 -36.500000000 -18.500000000 -19.500000000 -36.500000000 -17.500000000 -19.500000000 -36.500000000 -16.500000000 -19.500000000 -36.500000000 -15.500000000 -19.500000000 -36.500000000 -14.500000000 -19.500000000 -36.500000000 -13.500000000 -19.500000000 -36.500000000 -12.500000000 -19.500000000 -36.500000000 -11.500000000 -19.500000000 -36.500000000 -10.500000000 -19.500000000 -36.500000000 -9.500000000 -19.500000000 -36.500000000 -8.500000000 -19.500000000 -36.500000000 -7.500000000 -19.500000000 -36.500000000 -6.500000000 -19.500000000 -36.500000000 -5.500000000 -19.500000000 -36.500000000 -4.500000000 -19.500000000 -36.500000000 -3.500000000 -19.500000000 -36.500000000 -2.500000000 -19.500000000 -36.500000000 -1.500000000 -19.500000000 -36.500000000 -0.500000000 -19.500000000 -36.500000000 0.500000000 -19.500000000 -36.500000000 1.500000000 -19.500000000 -36.500000000 2.500000000 -19.500000000 -36.500000000 3.500000000 -19.500000000 -36.500000000 4.500000000 -19.500000000 -36.500000000 5.500000000 -19.500000000 -36.500000000 6.500000000 -19.500000000 -36.500000000 7.500000000 -19.500000000 -36.500000000 8.500000000 -19.500000000 -36.500000000 9.500000000 -19.500000000 -36.500000000 10.500000000 -19.500000000 -36.500000000 11.500000000 -19.500000000 -36.500000000 12.500000000 -19.500000000 -36.500000000 13.500000000 -19.500000000 -36.500000000 14.500000000 -19.500000000 -36.500000000 15.500000000 -19.500000000 -36.500000000 16.500000000 -19.500000000 -36.500000000 17.500000000 -19.500000000 -36.500000000 18.500000000 -19.500000000 -36.500000000 19.500000000 -19.500000000 -36.500000000 20.500000000 -19.500000000 -36.500000000 21.500000000 -19.500000000 -36.500000000 22.500000000 -19.500000000 -36.500000000 23.500000000 -19.500000000 -36.500000000 24.500000000 -19.500000000 -36.500000000 25.499996185 -19.500000000 -36.499996185 26.499954224 -19.500000000 -36.499954224 27.499591827 -19.500000000 -36.499591827 28.497470856 -19.500000000 -36.497474670 29.488407135 -19.500000000 -36.488403320 30.458978653 -19.500000000 -36.458980560 31.384418488 -19.500000000 -36.384422302 32.233222961 -19.500000000 -36.233226776 32.981101990 -19.500000000 -35.981109619 -33.981101990 -18.500000000 -35.981101990 -33.233222961 -18.500000000 -36.233222961 -32.384422302 -18.500000000 -36.384418488 -31.458978653 -18.500000000 -36.458976746 -30.488407135 -18.500000000 -36.488403320 -29.497472763 -18.500000000 -36.497467041 -28.499593735 -18.500000000 -36.499591827 -27.499954224 -18.500000000 -36.499954224 -26.499996185 -18.500000000 -36.499996185 -25.500000000 -18.500000000 -36.500000000 -24.500000000 -18.500000000 -36.500000000 -23.500000000 -18.500000000 -36.500000000 -22.500000000 -18.500000000 -36.500000000 -21.500000000 -18.500000000 -36.500000000 -20.500000000 -18.500000000 -36.500000000 -19.500000000 -18.500000000 -36.500000000 -18.500000000 -18.500000000 -36.500000000 -17.500000000 -18.500000000 -36.500000000 -16.500000000 -18.500000000 -36.500000000 -15.500000000 -18.500000000 -36.500000000 -14.500000000 -18.500000000 -36.500000000 -13.500000000 -18.500000000 -36.500000000 -12.500000000 -18.500000000 -36.500000000 -11.500000000 -18.500000000 -36.500000000 -10.500000000 -18.500000000 -36.500000000 -9.500000000 -18.500000000 -36.500000000 -8.500000000 -18.500000000 -36.500000000 -7.500000000 -18.500000000 -36.500000000 -6.500000000 -18.500000000 -36.500000000 -5.500000000 -18.500000000 -36.500000000 -4.500000000 -18.500000000 -36.500000000 -3.500000000 -18.500000000 -36.500000000 -2.500000000 -18.500000000 -36.500000000 -1.500000000 -18.500000000 -36.500000000 -0.500000000 -18.500000000 -36.500000000 0.500000000 -18.500000000 -36.500000000 1.500000000 -18.500000000 -36.500000000 2.500000000 -18.500000000 -36.500000000 3.500000000 -18.500000000 -36.500000000 4.500000000 -18.500000000 -36.500000000 5.500000000 -18.500000000 -36.500000000 6.500000000 -18.500000000 -36.500000000 7.500000000 -18.500000000 -36.500000000 8.500000000 -18.500000000 -36.500000000 9.500000000 -18.500000000 -36.500000000 10.500000000 -18.500000000 -36.500000000 11.500000000 -18.500000000 -36.500000000 12.500000000 -18.500000000 -36.500000000 13.500000000 -18.500000000 -36.500000000 14.500000000 -18.500000000 -36.500000000 15.500000000 -18.500000000 -36.500000000 16.500000000 -18.500000000 -36.500000000 17.500000000 -18.500000000 -36.500000000 18.500000000 -18.500000000 -36.500000000 19.500000000 -18.500000000 -36.500000000 20.500000000 -18.500000000 -36.500000000 21.500000000 -18.500000000 -36.500000000 22.500000000 -18.500000000 -36.500000000 23.500000000 -18.500000000 -36.500000000 24.500000000 -18.500000000 -36.500000000 25.499996185 -18.500000000 -36.499996185 26.499954224 -18.500000000 -36.499954224 27.499591827 -18.500000000 -36.499591827 28.497470856 -18.500000000 -36.497474670 29.488407135 -18.500000000 -36.488403320 30.458978653 -18.500000000 -36.458980560 31.384418488 -18.500000000 -36.384422302 32.233222961 -18.500000000 -36.233226776 32.981101990 -18.500000000 -35.981109619 -33.981101990 -17.500000000 -35.981101990 -33.233222961 -17.500000000 -36.233222961 -32.384422302 -17.500000000 -36.384418488 -31.458978653 -17.500000000 -36.458976746 -30.488407135 -17.500000000 -36.488403320 -29.497472763 -17.500000000 -36.497467041 -28.499593735 -17.500000000 -36.499591827 -27.499954224 -17.500000000 -36.499954224 -26.499996185 -17.500000000 -36.499996185 -25.500000000 -17.500000000 -36.500000000 -24.500000000 -17.500000000 -36.500000000 -23.500000000 -17.500000000 -36.500000000 -22.500000000 -17.500000000 -36.500000000 -21.500000000 -17.500000000 -36.500000000 -20.500000000 -17.500000000 -36.500000000 -19.500000000 -17.500000000 -36.500000000 -18.500000000 -17.500000000 -36.500000000 -17.500000000 -17.500000000 -36.500000000 -16.500000000 -17.500000000 -36.500000000 -15.500000000 -17.500000000 -36.500000000 -14.500000000 -17.500000000 -36.500000000 -13.500000000 -17.500000000 -36.500000000 -12.500000000 -17.500000000 -36.500000000 -11.500000000 -17.500000000 -36.500000000 -10.500000000 -17.500000000 -36.500000000 -9.500000000 -17.500000000 -36.500000000 -8.500000000 -17.500000000 -36.500000000 -7.500000000 -17.500000000 -36.500000000 -6.500000000 -17.500000000 -36.500000000 -5.500000000 -17.500000000 -36.500000000 -4.500000000 -17.500000000 -36.500000000 -3.500000000 -17.500000000 -36.500000000 -2.500000000 -17.500000000 -36.500000000 -1.500000000 -17.500000000 -36.500000000 -0.500000000 -17.500000000 -36.500000000 0.500000000 -17.500000000 -36.500000000 1.500000000 -17.500000000 -36.500000000 2.500000000 -17.500000000 -36.500000000 3.500000000 -17.500000000 -36.500000000 4.500000000 -17.500000000 -36.500000000 5.500000000 -17.500000000 -36.500000000 6.500000000 -17.500000000 -36.500000000 7.500000000 -17.500000000 -36.500000000 8.500000000 -17.500000000 -36.500000000 9.500000000 -17.500000000 -36.500000000 10.500000000 -17.500000000 -36.500000000 11.500000000 -17.500000000 -36.500000000 12.500000000 -17.500000000 -36.500000000 13.500000000 -17.500000000 -36.500000000 14.500000000 -17.500000000 -36.500000000 15.500000000 -17.500000000 -36.500000000 16.500000000 -17.500000000 -36.500000000 17.500000000 -17.500000000 -36.500000000 18.500000000 -17.500000000 -36.500000000 19.500000000 -17.500000000 -36.500000000 20.500000000 -17.500000000 -36.500000000 21.500000000 -17.500000000 -36.500000000 22.500000000 -17.500000000 -36.500000000 23.500000000 -17.500000000 -36.500000000 24.500000000 -17.500000000 -36.500000000 25.499996185 -17.500000000 -36.499996185 26.499954224 -17.500000000 -36.499954224 27.499591827 -17.500000000 -36.499591827 28.497470856 -17.500000000 -36.497474670 29.488407135 -17.500000000 -36.488403320 30.458978653 -17.500000000 -36.458980560 31.384418488 -17.500000000 -36.384422302 32.233222961 -17.500000000 -36.233226776 32.981101990 -17.500000000 -35.981109619 -33.981101990 -16.500000000 -35.981101990 -33.233222961 -16.500000000 -36.233222961 -32.384422302 -16.500000000 -36.384418488 -31.458978653 -16.500000000 -36.458976746 -30.488407135 -16.500000000 -36.488403320 -29.497472763 -16.500000000 -36.497467041 -28.499593735 -16.500000000 -36.499591827 -27.499954224 -16.500000000 -36.499954224 -26.499996185 -16.500000000 -36.499996185 -25.500000000 -16.500000000 -36.500000000 -24.500000000 -16.500000000 -36.500000000 -23.500000000 -16.500000000 -36.500000000 -22.500000000 -16.500000000 -36.500000000 -21.500000000 -16.500000000 -36.500000000 -20.500000000 -16.500000000 -36.500000000 -19.500000000 -16.500000000 -36.500000000 -18.500000000 -16.500000000 -36.500000000 -17.500000000 -16.500000000 -36.500000000 -16.500000000 -16.500000000 -36.500000000 -15.500000000 -16.500000000 -36.500000000 -14.500000000 -16.500000000 -36.500000000 -13.500000000 -16.500000000 -36.500000000 -12.500000000 -16.500000000 -36.500000000 -11.500000000 -16.500000000 -36.500000000 -10.500000000 -16.500000000 -36.500000000 -9.500000000 -16.500000000 -36.500000000 -8.500000000 -16.500000000 -36.500000000 -7.500000000 -16.500000000 -36.500000000 -6.500000000 -16.500000000 -36.500000000 -5.500000000 -16.500000000 -36.500000000 -4.500000000 -16.500000000 -36.500000000 -3.500000000 -16.500000000 -36.500000000 -2.500000000 -16.500000000 -36.500000000 -1.500000000 -16.500000000 -36.500000000 -0.500000000 -16.500000000 -36.500000000 0.500000000 -16.500000000 -36.500000000 1.500000000 -16.500000000 -36.500000000 2.500000000 -16.500000000 -36.500000000 3.500000000 -16.500000000 -36.500000000 4.500000000 -16.500000000 -36.500000000 5.500000000 -16.500000000 -36.500000000 6.500000000 -16.500000000 -36.500000000 7.500000000 -16.500000000 -36.500000000 8.500000000 -16.500000000 -36.500000000 9.500000000 -16.500000000 -36.500000000 10.500000000 -16.500000000 -36.500000000 11.500000000 -16.500000000 -36.500000000 12.500000000 -16.500000000 -36.500000000 13.500000000 -16.500000000 -36.500000000 14.500000000 -16.500000000 -36.500000000 15.500000000 -16.500000000 -36.500000000 16.500000000 -16.500000000 -36.500000000 17.500000000 -16.500000000 -36.500000000 18.500000000 -16.500000000 -36.500000000 19.500000000 -16.500000000 -36.500000000 20.500000000 -16.500000000 -36.500000000 21.500000000 -16.500000000 -36.500000000 22.500000000 -16.500000000 -36.500000000 23.500000000 -16.500000000 -36.500000000 24.500000000 -16.500000000 -36.500000000 25.499996185 -16.500000000 -36.499996185 26.499954224 -16.500000000 -36.499954224 27.499591827 -16.500000000 -36.499591827 28.497470856 -16.500000000 -36.497474670 29.488407135 -16.500000000 -36.488403320 30.458978653 -16.500000000 -36.458980560 31.384418488 -16.500000000 -36.384422302 32.233222961 -16.500000000 -36.233226776 32.981101990 -16.500000000 -35.981109619 -33.981101990 -15.500000000 -35.981101990 -33.233222961 -15.500000000 -36.233222961 -32.384422302 -15.500000000 -36.384418488 -31.458978653 -15.500000000 -36.458976746 -30.488407135 -15.500000000 -36.488403320 -29.497472763 -15.500000000 -36.497467041 -28.499593735 -15.500000000 -36.499591827 -27.499954224 -15.500000000 -36.499954224 -26.499996185 -15.500000000 -36.499996185 -25.500000000 -15.500000000 -36.500000000 -24.500000000 -15.500000000 -36.500000000 -23.500000000 -15.500000000 -36.500000000 -22.500000000 -15.500000000 -36.500000000 -21.500000000 -15.500000000 -36.500000000 -20.500000000 -15.500000000 -36.500000000 -19.500000000 -15.500000000 -36.500000000 -18.500000000 -15.500000000 -36.500000000 -17.500000000 -15.500000000 -36.500000000 -16.500000000 -15.500000000 -36.500000000 -15.500000000 -15.500000000 -36.500000000 -14.500000000 -15.500000000 -36.500000000 -13.500000000 -15.500000000 -36.500000000 -12.500000000 -15.500000000 -36.500000000 -11.500000000 -15.500000000 -36.500000000 -10.500000000 -15.500000000 -36.500000000 -9.500000000 -15.500000000 -36.500000000 -8.500000000 -15.500000000 -36.500000000 -7.500000000 -15.500000000 -36.500000000 -6.500000000 -15.500000000 -36.500000000 -5.500000000 -15.500000000 -36.500000000 -4.500000000 -15.500000000 -36.500000000 -3.500000000 -15.500000000 -36.500000000 -2.500000000 -15.500000000 -36.500000000 -1.500000000 -15.500000000 -36.500000000 -0.500000000 -15.500000000 -36.500000000 0.500000000 -15.500000000 -36.500000000 1.500000000 -15.500000000 -36.500000000 2.500000000 -15.500000000 -36.500000000 3.500000000 -15.500000000 -36.500000000 4.500000000 -15.500000000 -36.500000000 5.500000000 -15.500000000 -36.500000000 6.500000000 -15.500000000 -36.500000000 7.500000000 -15.500000000 -36.500000000 8.500000000 -15.500000000 -36.500000000 9.500000000 -15.500000000 -36.500000000 10.500000000 -15.500000000 -36.500000000 11.500000000 -15.500000000 -36.500000000 12.500000000 -15.500000000 -36.500000000 13.500000000 -15.500000000 -36.500000000 14.500000000 -15.500000000 -36.500000000 15.500000000 -15.500000000 -36.500000000 16.500000000 -15.500000000 -36.500000000 17.500000000 -15.500000000 -36.500000000 18.500000000 -15.500000000 -36.500000000 19.500000000 -15.500000000 -36.500000000 20.500000000 -15.500000000 -36.500000000 21.500000000 -15.500000000 -36.500000000 22.500000000 -15.500000000 -36.500000000 23.500000000 -15.500000000 -36.500000000 24.500000000 -15.500000000 -36.500000000 25.499996185 -15.500000000 -36.499996185 26.499954224 -15.500000000 -36.499954224 27.499591827 -15.500000000 -36.499591827 28.497470856 -15.500000000 -36.497474670 29.488407135 -15.500000000 -36.488403320 30.458978653 -15.500000000 -36.458980560 31.384418488 -15.500000000 -36.384422302 32.233222961 -15.500000000 -36.233226776 32.981101990 -15.500000000 -35.981109619 -33.981101990 -14.500000000 -35.981101990 -33.233222961 -14.500000000 -36.233222961 -32.384422302 -14.500000000 -36.384418488 -31.458978653 -14.500000000 -36.458976746 -30.488407135 -14.500000000 -36.488403320 -29.497472763 -14.500000000 -36.497467041 -28.499593735 -14.500000000 -36.499591827 -27.499954224 -14.500000000 -36.499954224 -26.499996185 -14.500000000 -36.499996185 -25.500000000 -14.500000000 -36.500000000 -24.500000000 -14.500000000 -36.500000000 -23.500000000 -14.500000000 -36.500000000 -22.500000000 -14.500000000 -36.500000000 -21.500000000 -14.500000000 -36.500000000 -20.500000000 -14.500000000 -36.500000000 -19.500000000 -14.500000000 -36.500000000 -18.500000000 -14.500000000 -36.500000000 -17.500000000 -14.500000000 -36.500000000 -16.500000000 -14.500000000 -36.500000000 -15.500000000 -14.500000000 -36.500000000 -14.500000000 -14.500000000 -36.500000000 -13.500000000 -14.500000000 -36.500000000 -12.500000000 -14.500000000 -36.500000000 -11.500000000 -14.500000000 -36.500000000 -10.500000000 -14.500000000 -36.500000000 -9.500000000 -14.500000000 -36.500000000 -8.500000000 -14.500000000 -36.500000000 -7.500000000 -14.500000000 -36.500000000 -6.500000000 -14.500000000 -36.500000000 -5.500000000 -14.500000000 -36.500000000 -4.500000000 -14.500000000 -36.500000000 -3.500000000 -14.500000000 -36.500000000 -2.500000000 -14.500000000 -36.500000000 -1.500000000 -14.500000000 -36.500000000 -0.500000000 -14.500000000 -36.500000000 0.500000000 -14.500000000 -36.500000000 1.500000000 -14.500000000 -36.500000000 2.500000000 -14.500000000 -36.500000000 3.500000000 -14.500000000 -36.500000000 4.500000000 -14.500000000 -36.500000000 5.500000000 -14.500000000 -36.500000000 6.500000000 -14.500000000 -36.500000000 7.500000000 -14.500000000 -36.500000000 8.500000000 -14.500000000 -36.500000000 9.500000000 -14.500000000 -36.500000000 10.500000000 -14.500000000 -36.500000000 11.500000000 -14.500000000 -36.500000000 12.500000000 -14.500000000 -36.500000000 13.500000000 -14.500000000 -36.500000000 14.500000000 -14.500000000 -36.500000000 15.500000000 -14.500000000 -36.500000000 16.500000000 -14.500000000 -36.500000000 17.500000000 -14.500000000 -36.500000000 18.500000000 -14.500000000 -36.500000000 19.500000000 -14.500000000 -36.500000000 20.500000000 -14.500000000 -36.500000000 21.500000000 -14.500000000 -36.500000000 22.500000000 -14.500000000 -36.500000000 23.500000000 -14.500000000 -36.500000000 24.500000000 -14.500000000 -36.500000000 25.499996185 -14.500000000 -36.499996185 26.499954224 -14.500000000 -36.499954224 27.499591827 -14.500000000 -36.499591827 28.497470856 -14.500000000 -36.497474670 29.488407135 -14.500000000 -36.488403320 30.458978653 -14.500000000 -36.458980560 31.384418488 -14.500000000 -36.384422302 32.233222961 -14.500000000 -36.233226776 32.981101990 -14.500000000 -35.981109619 -33.981101990 -13.500000000 -35.981101990 -33.233222961 -13.500000000 -36.233222961 -32.384422302 -13.500000000 -36.384418488 -31.458978653 -13.500000000 -36.458976746 -30.488407135 -13.500000000 -36.488403320 -29.497472763 -13.500000000 -36.497467041 -28.499593735 -13.500000000 -36.499591827 -27.499954224 -13.500000000 -36.499954224 -26.499996185 -13.500000000 -36.499996185 -25.500000000 -13.500000000 -36.500000000 -24.500000000 -13.500000000 -36.500000000 -23.500000000 -13.500000000 -36.500000000 -22.500000000 -13.500000000 -36.500000000 -21.500000000 -13.500000000 -36.500000000 -20.500000000 -13.500000000 -36.500000000 -19.500000000 -13.500000000 -36.500000000 -18.500000000 -13.500000000 -36.500000000 -17.500000000 -13.500000000 -36.500000000 -16.500000000 -13.500000000 -36.500000000 -15.500000000 -13.500000000 -36.500000000 -14.500000000 -13.500000000 -36.500000000 -13.500000000 -13.500000000 -36.500000000 -12.500000000 -13.500000000 -36.500000000 -11.500000000 -13.500000000 -36.500000000 -10.500000000 -13.500000000 -36.500000000 -9.500000000 -13.500000000 -36.500000000 -8.500000000 -13.500000000 -36.500000000 -7.500000000 -13.500000000 -36.500000000 -6.500000000 -13.500000000 -36.500000000 -5.500000000 -13.500000000 -36.500000000 -4.500000000 -13.500000000 -36.500000000 -3.500000000 -13.500000000 -36.500000000 -2.500000000 -13.500000000 -36.500000000 -1.500000000 -13.500000000 -36.500000000 -0.500000000 -13.500000000 -36.500000000 0.500000000 -13.500000000 -36.500000000 1.500000000 -13.500000000 -36.500000000 2.500000000 -13.500000000 -36.500000000 3.500000000 -13.500000000 -36.500000000 4.500000000 -13.500000000 -36.500000000 5.500000000 -13.500000000 -36.500000000 6.500000000 -13.500000000 -36.500000000 7.500000000 -13.500000000 -36.500000000 8.500000000 -13.500000000 -36.500000000 9.500000000 -13.500000000 -36.500000000 10.500000000 -13.500000000 -36.500000000 11.500000000 -13.500000000 -36.500000000 12.500000000 -13.500000000 -36.500000000 13.500000000 -13.500000000 -36.500000000 14.500000000 -13.500000000 -36.500000000 15.500000000 -13.500000000 -36.500000000 16.500000000 -13.500000000 -36.500000000 17.500000000 -13.500000000 -36.500000000 18.500000000 -13.500000000 -36.500000000 19.500000000 -13.500000000 -36.500000000 20.500000000 -13.500000000 -36.500000000 21.500000000 -13.500000000 -36.500000000 22.500000000 -13.500000000 -36.500000000 23.500000000 -13.500000000 -36.500000000 24.500000000 -13.500000000 -36.500000000 25.499996185 -13.500000000 -36.499996185 26.499954224 -13.500000000 -36.499954224 27.499591827 -13.500000000 -36.499591827 28.497470856 -13.500000000 -36.497474670 29.488407135 -13.500000000 -36.488403320 30.458978653 -13.500000000 -36.458980560 31.384418488 -13.500000000 -36.384422302 32.233222961 -13.500000000 -36.233226776 32.981101990 -13.500000000 -35.981109619 -33.981101990 -12.500000000 -35.981101990 -33.233222961 -12.500000000 -36.233222961 -32.384422302 -12.500000000 -36.384418488 -31.458978653 -12.500000000 -36.458976746 -30.488407135 -12.500000000 -36.488403320 -29.497472763 -12.500000000 -36.497467041 -28.499593735 -12.500000000 -36.499591827 -27.499954224 -12.500000000 -36.499954224 -26.499996185 -12.500000000 -36.499996185 -25.500000000 -12.500000000 -36.500000000 -24.500000000 -12.500000000 -36.500000000 -23.500000000 -12.500000000 -36.500000000 -22.500000000 -12.500000000 -36.500000000 -21.500000000 -12.500000000 -36.500000000 -20.500000000 -12.500000000 -36.500000000 -19.500000000 -12.500000000 -36.500000000 -18.500000000 -12.500000000 -36.500000000 -17.500000000 -12.500000000 -36.500000000 -16.500000000 -12.500000000 -36.500000000 -15.500000000 -12.500000000 -36.500000000 -14.500000000 -12.500000000 -36.500000000 -13.500000000 -12.500000000 -36.500000000 -12.500000000 -12.500000000 -36.500000000 -11.500000000 -12.500000000 -36.500000000 -10.500000000 -12.500000000 -36.500000000 -9.500000000 -12.500000000 -36.500000000 -8.500000000 -12.500000000 -36.500000000 -7.500000000 -12.500000000 -36.500000000 -6.500000000 -12.500000000 -36.500000000 -5.500000000 -12.500000000 -36.500000000 -4.500000000 -12.500000000 -36.500000000 -3.500000000 -12.500000000 -36.500000000 -2.500000000 -12.500000000 -36.500000000 -1.500000000 -12.500000000 -36.500000000 -0.500000000 -12.500000000 -36.500000000 0.500000000 -12.500000000 -36.500000000 1.500000000 -12.500000000 -36.500000000 2.500000000 -12.500000000 -36.500000000 3.500000000 -12.500000000 -36.500000000 4.500000000 -12.500000000 -36.500000000 5.500000000 -12.500000000 -36.500000000 6.500000000 -12.500000000 -36.500000000 7.500000000 -12.500000000 -36.500000000 8.500000000 -12.500000000 -36.500000000 9.500000000 -12.500000000 -36.500000000 10.500000000 -12.500000000 -36.500000000 11.500000000 -12.500000000 -36.500000000 12.500000000 -12.500000000 -36.500000000 13.500000000 -12.500000000 -36.500000000 14.500000000 -12.500000000 -36.500000000 15.500000000 -12.500000000 -36.500000000 16.500000000 -12.500000000 -36.500000000 17.500000000 -12.500000000 -36.500000000 18.500000000 -12.500000000 -36.500000000 19.500000000 -12.500000000 -36.500000000 20.500000000 -12.500000000 -36.500000000 21.500000000 -12.500000000 -36.500000000 22.500000000 -12.500000000 -36.500000000 23.500000000 -12.500000000 -36.500000000 24.500000000 -12.500000000 -36.500000000 25.499996185 -12.500000000 -36.499996185 26.499954224 -12.500000000 -36.499954224 27.499591827 -12.500000000 -36.499591827 28.497470856 -12.500000000 -36.497474670 29.488407135 -12.500000000 -36.488403320 30.458978653 -12.500000000 -36.458980560 31.384418488 -12.500000000 -36.384422302 32.233222961 -12.500000000 -36.233226776 32.981101990 -12.500000000 -35.981109619 -33.981101990 -11.500000000 -35.981101990 -33.233222961 -11.500000000 -36.233222961 -32.384422302 -11.500000000 -36.384418488 -31.458978653 -11.500000000 -36.458976746 -30.488407135 -11.500000000 -36.488403320 -29.497472763 -11.500000000 -36.497467041 -28.499593735 -11.500000000 -36.499591827 -27.499954224 -11.500000000 -36.499954224 -26.499996185 -11.500000000 -36.499996185 -25.500000000 -11.500000000 -36.500000000 -24.500000000 -11.500000000 -36.500000000 -23.500000000 -11.500000000 -36.500000000 -22.500000000 -11.500000000 -36.500000000 -21.500000000 -11.500000000 -36.500000000 -20.500000000 -11.500000000 -36.500000000 -19.500000000 -11.500000000 -36.500000000 -18.500000000 -11.500000000 -36.500000000 -17.500000000 -11.500000000 -36.500000000 -16.500000000 -11.500000000 -36.500000000 -15.500000000 -11.500000000 -36.500000000 -14.500000000 -11.500000000 -36.500000000 -13.500000000 -11.500000000 -36.500000000 -12.500000000 -11.500000000 -36.500000000 -11.500000000 -11.500000000 -36.500000000 -10.500000000 -11.500000000 -36.500000000 -9.500000000 -11.500000000 -36.500000000 -8.500000000 -11.500000000 -36.500000000 -7.500000000 -11.500000000 -36.500000000 -6.500000000 -11.500000000 -36.500000000 -5.500000000 -11.500000000 -36.500000000 -4.500000000 -11.500000000 -36.500000000 -3.500000000 -11.500000000 -36.500000000 -2.500000000 -11.500000000 -36.500000000 -1.500000000 -11.500000000 -36.500000000 -0.500000000 -11.500000000 -36.500000000 0.500000000 -11.500000000 -36.500000000 1.500000000 -11.500000000 -36.500000000 2.500000000 -11.500000000 -36.500000000 3.500000000 -11.500000000 -36.500000000 4.500000000 -11.500000000 -36.500000000 5.500000000 -11.500000000 -36.500000000 6.500000000 -11.500000000 -36.500000000 7.500000000 -11.500000000 -36.500000000 8.500000000 -11.500000000 -36.500000000 9.500000000 -11.500000000 -36.500000000 10.500000000 -11.500000000 -36.500000000 11.500000000 -11.500000000 -36.500000000 12.500000000 -11.500000000 -36.500000000 13.500000000 -11.500000000 -36.500000000 14.500000000 -11.500000000 -36.500000000 15.500000000 -11.500000000 -36.500000000 16.500000000 -11.500000000 -36.500000000 17.500000000 -11.500000000 -36.500000000 18.500000000 -11.500000000 -36.500000000 19.500000000 -11.500000000 -36.500000000 20.500000000 -11.500000000 -36.500000000 21.500000000 -11.500000000 -36.500000000 22.500000000 -11.500000000 -36.500000000 23.500000000 -11.500000000 -36.500000000 24.500000000 -11.500000000 -36.500000000 25.499996185 -11.500000000 -36.499996185 26.499954224 -11.500000000 -36.499954224 27.499591827 -11.500000000 -36.499591827 28.497470856 -11.500000000 -36.497474670 29.488407135 -11.500000000 -36.488403320 30.458978653 -11.500000000 -36.458980560 31.384418488 -11.500000000 -36.384422302 32.233222961 -11.500000000 -36.233226776 32.981101990 -11.500000000 -35.981109619 -33.981101990 -10.500000000 -35.981101990 -33.233222961 -10.500000000 -36.233222961 -32.384422302 -10.500000000 -36.384418488 -31.458978653 -10.500000000 -36.458976746 -30.488407135 -10.500000000 -36.488403320 -29.497472763 -10.500000000 -36.497467041 -28.499593735 -10.500000000 -36.499591827 -27.499954224 -10.500000000 -36.499954224 -26.499996185 -10.500000000 -36.499996185 -25.500000000 -10.500000000 -36.500000000 -24.500000000 -10.500000000 -36.500000000 -23.500000000 -10.500000000 -36.500000000 -22.500000000 -10.500000000 -36.500000000 -21.500000000 -10.500000000 -36.500000000 -20.500000000 -10.500000000 -36.500000000 -19.500000000 -10.500000000 -36.500000000 -18.500000000 -10.500000000 -36.500000000 -17.500000000 -10.500000000 -36.500000000 -16.500000000 -10.500000000 -36.500000000 -15.500000000 -10.500000000 -36.500000000 -14.500000000 -10.500000000 -36.500000000 -13.500000000 -10.500000000 -36.500000000 -12.500000000 -10.500000000 -36.500000000 -11.500000000 -10.500000000 -36.500000000 -10.500000000 -10.500000000 -36.500000000 -9.500000000 -10.500000000 -36.500000000 -8.500000000 -10.500000000 -36.500000000 -7.500000000 -10.500000000 -36.500000000 -6.500000000 -10.500000000 -36.500000000 -5.500000000 -10.500000000 -36.500000000 -4.500000000 -10.500000000 -36.500000000 -3.500000000 -10.500000000 -36.500000000 -2.500000000 -10.500000000 -36.500000000 -1.500000000 -10.500000000 -36.500000000 -0.500000000 -10.500000000 -36.500000000 0.500000000 -10.500000000 -36.500000000 1.500000000 -10.500000000 -36.500000000 2.500000000 -10.500000000 -36.500000000 3.500000000 -10.500000000 -36.500000000 4.500000000 -10.500000000 -36.500000000 5.500000000 -10.500000000 -36.500000000 6.500000000 -10.500000000 -36.500000000 7.500000000 -10.500000000 -36.500000000 8.500000000 -10.500000000 -36.500000000 9.500000000 -10.500000000 -36.500000000 10.500000000 -10.500000000 -36.500000000 11.500000000 -10.500000000 -36.500000000 12.500000000 -10.500000000 -36.500000000 13.500000000 -10.500000000 -36.500000000 14.500000000 -10.500000000 -36.500000000 15.500000000 -10.500000000 -36.500000000 16.500000000 -10.500000000 -36.500000000 17.500000000 -10.500000000 -36.500000000 18.500000000 -10.500000000 -36.500000000 19.500000000 -10.500000000 -36.500000000 20.500000000 -10.500000000 -36.500000000 21.500000000 -10.500000000 -36.500000000 22.500000000 -10.500000000 -36.500000000 23.500000000 -10.500000000 -36.500000000 24.500000000 -10.500000000 -36.500000000 25.499996185 -10.500000000 -36.499996185 26.499954224 -10.500000000 -36.499954224 27.499591827 -10.500000000 -36.499591827 28.497470856 -10.500000000 -36.497474670 29.488407135 -10.500000000 -36.488403320 30.458978653 -10.500000000 -36.458980560 31.384418488 -10.500000000 -36.384422302 32.233222961 -10.500000000 -36.233226776 32.981101990 -10.500000000 -35.981109619 -33.981101990 -9.500000000 -35.981101990 -33.233222961 -9.500000000 -36.233222961 -32.384422302 -9.500000000 -36.384418488 -31.458978653 -9.500000000 -36.458976746 -30.488407135 -9.500000000 -36.488403320 -29.497472763 -9.500000000 -36.497467041 -28.499593735 -9.500000000 -36.499591827 -27.499954224 -9.500000000 -36.499954224 -26.499996185 -9.500000000 -36.499996185 -25.500000000 -9.500000000 -36.500000000 -24.500000000 -9.500000000 -36.500000000 -23.500000000 -9.500000000 -36.500000000 -22.500000000 -9.500000000 -36.500000000 -21.500000000 -9.500000000 -36.500000000 -20.500000000 -9.500000000 -36.500000000 -19.500000000 -9.500000000 -36.500000000 -18.500000000 -9.500000000 -36.500000000 -17.500000000 -9.500000000 -36.500000000 -16.500000000 -9.500000000 -36.500000000 -15.500000000 -9.500000000 -36.500000000 -14.500000000 -9.500000000 -36.500000000 -13.500000000 -9.500000000 -36.500000000 -12.500000000 -9.500000000 -36.500000000 -11.500000000 -9.500000000 -36.500000000 -10.500000000 -9.500000000 -36.500000000 -9.500000000 -9.500000000 -36.500000000 -8.500000000 -9.500000000 -36.500000000 -7.500000000 -9.500000000 -36.500000000 -6.500000000 -9.500000000 -36.500000000 -5.500000000 -9.500000000 -36.500000000 -4.500000000 -9.500000000 -36.500000000 -3.500000000 -9.500000000 -36.500000000 -2.500000000 -9.500000000 -36.500000000 -1.500000000 -9.500000000 -36.500000000 -0.500000000 -9.500000000 -36.500000000 0.500000000 -9.500000000 -36.500000000 1.500000000 -9.500000000 -36.500000000 2.500000000 -9.500000000 -36.500000000 3.500000000 -9.500000000 -36.500000000 4.500000000 -9.500000000 -36.500000000 5.500000000 -9.500000000 -36.500000000 6.500000000 -9.500000000 -36.500000000 7.500000000 -9.500000000 -36.500000000 8.500000000 -9.500000000 -36.500000000 9.500000000 -9.500000000 -36.500000000 10.500000000 -9.500000000 -36.500000000 11.500000000 -9.500000000 -36.500000000 12.500000000 -9.500000000 -36.500000000 13.500000000 -9.500000000 -36.500000000 14.500000000 -9.500000000 -36.500000000 15.500000000 -9.500000000 -36.500000000 16.500000000 -9.500000000 -36.500000000 17.500000000 -9.500000000 -36.500000000 18.500000000 -9.500000000 -36.500000000 19.500000000 -9.500000000 -36.500000000 20.500000000 -9.500000000 -36.500000000 21.500000000 -9.500000000 -36.500000000 22.500000000 -9.500000000 -36.500000000 23.500000000 -9.500000000 -36.500000000 24.500000000 -9.500000000 -36.500000000 25.499996185 -9.500000000 -36.499996185 26.499954224 -9.500000000 -36.499954224 27.499591827 -9.500000000 -36.499591827 28.497470856 -9.500000000 -36.497474670 29.488407135 -9.500000000 -36.488403320 30.458978653 -9.500000000 -36.458980560 31.384418488 -9.500000000 -36.384422302 32.233222961 -9.500000000 -36.233226776 32.981101990 -9.500000000 -35.981109619 -33.981101990 -8.500000000 -35.981101990 -33.233222961 -8.500000000 -36.233222961 -32.384422302 -8.500000000 -36.384418488 -31.458978653 -8.500000000 -36.458976746 -30.488407135 -8.500000000 -36.488403320 -29.497472763 -8.500000000 -36.497467041 -28.499593735 -8.500000000 -36.499591827 -27.499954224 -8.500000000 -36.499954224 -26.499996185 -8.500000000 -36.499996185 -25.500000000 -8.500000000 -36.500000000 -24.500000000 -8.500000000 -36.500000000 -23.500000000 -8.500000000 -36.500000000 -22.500000000 -8.500000000 -36.500000000 -21.500000000 -8.500000000 -36.500000000 -20.500000000 -8.500000000 -36.500000000 -19.500000000 -8.500000000 -36.500000000 -18.500000000 -8.500000000 -36.500000000 -17.500000000 -8.500000000 -36.500000000 -16.500000000 -8.500000000 -36.500000000 -15.500000000 -8.500000000 -36.500000000 -14.500000000 -8.500000000 -36.500000000 -13.500000000 -8.500000000 -36.500000000 -12.500000000 -8.500000000 -36.500000000 -11.500000000 -8.500000000 -36.500000000 -10.500000000 -8.500000000 -36.500000000 -9.500000000 -8.500000000 -36.500000000 -8.500000000 -8.500000000 -36.500000000 -7.500000000 -8.500000000 -36.500000000 -6.500000000 -8.500000000 -36.500000000 -5.500000000 -8.500000000 -36.500000000 -4.500000000 -8.500000000 -36.500000000 -3.500000000 -8.500000000 -36.500000000 -2.500000000 -8.500000000 -36.500000000 -1.500000000 -8.500000000 -36.500000000 -0.500000000 -8.500000000 -36.500000000 0.500000000 -8.500000000 -36.500000000 1.500000000 -8.500000000 -36.500000000 2.500000000 -8.500000000 -36.500000000 3.500000000 -8.500000000 -36.500000000 4.500000000 -8.500000000 -36.500000000 5.500000000 -8.500000000 -36.500000000 6.500000000 -8.500000000 -36.500000000 7.500000000 -8.500000000 -36.500000000 8.500000000 -8.500000000 -36.500000000 9.500000000 -8.500000000 -36.500000000 10.500000000 -8.500000000 -36.500000000 11.500000000 -8.500000000 -36.500000000 12.500000000 -8.500000000 -36.500000000 13.500000000 -8.500000000 -36.500000000 14.500000000 -8.500000000 -36.500000000 15.500000000 -8.500000000 -36.500000000 16.500000000 -8.500000000 -36.500000000 17.500000000 -8.500000000 -36.500000000 18.500000000 -8.500000000 -36.500000000 19.500000000 -8.500000000 -36.500000000 20.500000000 -8.500000000 -36.500000000 21.500000000 -8.500000000 -36.500000000 22.500000000 -8.500000000 -36.500000000 23.500000000 -8.500000000 -36.500000000 24.500000000 -8.500000000 -36.500000000 25.499996185 -8.500000000 -36.499996185 26.499954224 -8.500000000 -36.499954224 27.499591827 -8.500000000 -36.499591827 28.497470856 -8.500000000 -36.497474670 29.488407135 -8.500000000 -36.488403320 30.458978653 -8.500000000 -36.458980560 31.384418488 -8.500000000 -36.384422302 32.233222961 -8.500000000 -36.233226776 32.981101990 -8.500000000 -35.981109619 -33.981101990 -7.500000000 -35.981101990 -33.233222961 -7.500000000 -36.233222961 -32.384422302 -7.500000000 -36.384418488 -31.458978653 -7.500000000 -36.458976746 -30.488407135 -7.500000000 -36.488403320 -29.497472763 -7.500000000 -36.497467041 -28.499593735 -7.500000000 -36.499591827 -27.499954224 -7.500000000 -36.499954224 -26.499996185 -7.500000000 -36.499996185 -25.500000000 -7.500000000 -36.500000000 -24.500000000 -7.500000000 -36.500000000 -23.500000000 -7.500000000 -36.500000000 -22.500000000 -7.500000000 -36.500000000 -21.500000000 -7.500000000 -36.500000000 -20.500000000 -7.500000000 -36.500000000 -19.500000000 -7.500000000 -36.500000000 -18.500000000 -7.500000000 -36.500000000 -17.500000000 -7.500000000 -36.500000000 -16.500000000 -7.500000000 -36.500000000 -15.500000000 -7.500000000 -36.500000000 -14.500000000 -7.500000000 -36.500000000 -13.500000000 -7.500000000 -36.500000000 -12.500000000 -7.500000000 -36.500000000 -11.500000000 -7.500000000 -36.500000000 -10.500000000 -7.500000000 -36.500000000 -9.500000000 -7.500000000 -36.500000000 -8.500000000 -7.500000000 -36.500000000 -7.500000000 -7.500000000 -36.500000000 -6.500000000 -7.500000000 -36.500000000 -5.500000000 -7.500000000 -36.500000000 -4.500000000 -7.500000000 -36.500000000 -3.500000000 -7.500000000 -36.500000000 -2.500000000 -7.500000000 -36.500000000 -1.500000000 -7.500000000 -36.500000000 -0.500000000 -7.500000000 -36.500000000 0.500000000 -7.500000000 -36.500000000 1.500000000 -7.500000000 -36.500000000 2.500000000 -7.500000000 -36.500000000 3.500000000 -7.500000000 -36.500000000 4.500000000 -7.500000000 -36.500000000 5.500000000 -7.500000000 -36.500000000 6.500000000 -7.500000000 -36.500000000 7.500000000 -7.500000000 -36.500000000 8.500000000 -7.500000000 -36.500000000 9.500000000 -7.500000000 -36.500000000 10.500000000 -7.500000000 -36.500000000 11.500000000 -7.500000000 -36.500000000 12.500000000 -7.500000000 -36.500000000 13.500000000 -7.500000000 -36.500000000 14.500000000 -7.500000000 -36.500000000 15.500000000 -7.500000000 -36.500000000 16.500000000 -7.500000000 -36.500000000 17.500000000 -7.500000000 -36.500000000 18.500000000 -7.500000000 -36.500000000 19.500000000 -7.500000000 -36.500000000 20.500000000 -7.500000000 -36.500000000 21.500000000 -7.500000000 -36.500000000 22.500000000 -7.500000000 -36.500000000 23.500000000 -7.500000000 -36.500000000 24.500000000 -7.500000000 -36.500000000 25.499996185 -7.500000000 -36.499996185 26.499954224 -7.500000000 -36.499954224 27.499591827 -7.500000000 -36.499591827 28.497470856 -7.500000000 -36.497474670 29.488407135 -7.500000000 -36.488403320 30.458978653 -7.500000000 -36.458980560 31.384418488 -7.500000000 -36.384422302 32.233222961 -7.500000000 -36.233226776 32.981101990 -7.500000000 -35.981109619 -33.981101990 -6.500000000 -35.981101990 -33.233222961 -6.500000000 -36.233222961 -32.384422302 -6.500000000 -36.384418488 -31.458978653 -6.500000000 -36.458976746 -30.488407135 -6.500000000 -36.488403320 -29.497472763 -6.500000000 -36.497467041 -28.499593735 -6.500000000 -36.499591827 -27.499954224 -6.500000000 -36.499954224 -26.499996185 -6.500000000 -36.499996185 -25.500000000 -6.500000000 -36.500000000 -24.500000000 -6.500000000 -36.500000000 -23.500000000 -6.500000000 -36.500000000 -22.500000000 -6.500000000 -36.500000000 -21.500000000 -6.500000000 -36.500000000 -20.500000000 -6.500000000 -36.500000000 -19.500000000 -6.500000000 -36.500000000 -18.500000000 -6.500000000 -36.500000000 -17.500000000 -6.500000000 -36.500000000 -16.500000000 -6.500000000 -36.500000000 -15.500000000 -6.500000000 -36.500000000 -14.500000000 -6.500000000 -36.500000000 -13.500000000 -6.500000000 -36.500000000 -12.500000000 -6.500000000 -36.500000000 -11.500000000 -6.500000000 -36.500000000 -10.500000000 -6.500000000 -36.500000000 -9.500000000 -6.500000000 -36.500000000 -8.500000000 -6.500000000 -36.500000000 -7.500000000 -6.500000000 -36.500000000 -6.500000000 -6.500000000 -36.500000000 -5.500000000 -6.500000000 -36.500000000 -4.500000000 -6.500000000 -36.500000000 -3.500000000 -6.500000000 -36.500000000 -2.500000000 -6.500000000 -36.500000000 -1.500000000 -6.500000000 -36.500000000 -0.500000000 -6.500000000 -36.500000000 0.500000000 -6.500000000 -36.500000000 1.500000000 -6.500000000 -36.500000000 2.500000000 -6.500000000 -36.500000000 3.500000000 -6.500000000 -36.500000000 4.500000000 -6.500000000 -36.500000000 5.500000000 -6.500000000 -36.500000000 6.500000000 -6.500000000 -36.500000000 7.500000000 -6.500000000 -36.500000000 8.500000000 -6.500000000 -36.500000000 9.500000000 -6.500000000 -36.500000000 10.500000000 -6.500000000 -36.500000000 11.500000000 -6.500000000 -36.500000000 12.500000000 -6.500000000 -36.500000000 13.500000000 -6.500000000 -36.500000000 14.500000000 -6.500000000 -36.500000000 15.500000000 -6.500000000 -36.500000000 16.500000000 -6.500000000 -36.500000000 17.500000000 -6.500000000 -36.500000000 18.500000000 -6.500000000 -36.500000000 19.500000000 -6.500000000 -36.500000000 20.500000000 -6.500000000 -36.500000000 21.500000000 -6.500000000 -36.500000000 22.500000000 -6.500000000 -36.500000000 23.500000000 -6.500000000 -36.500000000 24.500000000 -6.500000000 -36.500000000 25.499996185 -6.500000000 -36.499996185 26.499954224 -6.500000000 -36.499954224 27.499591827 -6.500000000 -36.499591827 28.497470856 -6.500000000 -36.497474670 29.488407135 -6.500000000 -36.488403320 30.458978653 -6.500000000 -36.458980560 31.384418488 -6.500000000 -36.384422302 32.233222961 -6.500000000 -36.233226776 32.981101990 -6.500000000 -35.981109619 -33.981101990 -5.500000000 -35.981101990 -33.233222961 -5.500000000 -36.233222961 -32.384422302 -5.500000000 -36.384418488 -31.458978653 -5.500000000 -36.458976746 -30.488407135 -5.500000000 -36.488403320 -29.497472763 -5.500000000 -36.497467041 -28.499593735 -5.500000000 -36.499591827 -27.499954224 -5.500000000 -36.499954224 -26.499996185 -5.500000000 -36.499996185 -25.500000000 -5.500000000 -36.500000000 -24.500000000 -5.500000000 -36.500000000 -23.500000000 -5.500000000 -36.500000000 -22.500000000 -5.500000000 -36.500000000 -21.500000000 -5.500000000 -36.500000000 -20.500000000 -5.500000000 -36.500000000 -19.500000000 -5.500000000 -36.500000000 -18.500000000 -5.500000000 -36.500000000 -17.500000000 -5.500000000 -36.500000000 -16.500000000 -5.500000000 -36.500000000 -15.500000000 -5.500000000 -36.500000000 -14.500000000 -5.500000000 -36.500000000 -13.500000000 -5.500000000 -36.500000000 -12.500000000 -5.500000000 -36.500000000 -11.500000000 -5.500000000 -36.500000000 -10.500000000 -5.500000000 -36.500000000 -9.500000000 -5.500000000 -36.500000000 -8.500000000 -5.500000000 -36.500000000 -7.500000000 -5.500000000 -36.500000000 -6.500000000 -5.500000000 -36.500000000 -5.500000000 -5.500000000 -36.500000000 -4.500000000 -5.500000000 -36.500000000 -3.500000000 -5.500000000 -36.500000000 -2.500000000 -5.500000000 -36.500000000 -1.500000000 -5.500000000 -36.500000000 -0.500000000 -5.500000000 -36.500000000 0.500000000 -5.500000000 -36.500000000 1.500000000 -5.500000000 -36.500000000 2.500000000 -5.500000000 -36.500000000 3.500000000 -5.500000000 -36.500000000 4.500000000 -5.500000000 -36.500000000 5.500000000 -5.500000000 -36.500000000 6.500000000 -5.500000000 -36.500000000 7.500000000 -5.500000000 -36.500000000 8.500000000 -5.500000000 -36.500000000 9.500000000 -5.500000000 -36.500000000 10.500000000 -5.500000000 -36.500000000 11.500000000 -5.500000000 -36.500000000 12.500000000 -5.500000000 -36.500000000 13.500000000 -5.500000000 -36.500000000 14.500000000 -5.500000000 -36.500000000 15.500000000 -5.500000000 -36.500000000 16.500000000 -5.500000000 -36.500000000 17.500000000 -5.500000000 -36.500000000 18.500000000 -5.500000000 -36.500000000 19.500000000 -5.500000000 -36.500000000 20.500000000 -5.500000000 -36.500000000 21.500000000 -5.500000000 -36.500000000 22.500000000 -5.500000000 -36.500000000 23.500000000 -5.500000000 -36.500000000 24.500000000 -5.500000000 -36.500000000 25.499996185 -5.500000000 -36.499996185 26.499954224 -5.500000000 -36.499954224 27.499591827 -5.500000000 -36.499591827 28.497470856 -5.500000000 -36.497474670 29.488407135 -5.500000000 -36.488403320 30.458978653 -5.500000000 -36.458980560 31.384418488 -5.500000000 -36.384422302 32.233222961 -5.500000000 -36.233226776 32.981101990 -5.500000000 -35.981109619 -33.981101990 -4.500000000 -35.981101990 -33.233222961 -4.500000000 -36.233222961 -32.384422302 -4.500000000 -36.384418488 -31.458978653 -4.500000000 -36.458976746 -30.488407135 -4.500000000 -36.488403320 -29.497472763 -4.500000000 -36.497467041 -28.499593735 -4.500000000 -36.499591827 -27.499954224 -4.500000000 -36.499954224 -26.499996185 -4.500000000 -36.499996185 -25.500000000 -4.500000000 -36.500000000 -24.500000000 -4.500000000 -36.500000000 -23.500000000 -4.500000000 -36.500000000 -22.500000000 -4.500000000 -36.500000000 -21.500000000 -4.500000000 -36.500000000 -20.500000000 -4.500000000 -36.500000000 -19.500000000 -4.500000000 -36.500000000 -18.500000000 -4.500000000 -36.500000000 -17.500000000 -4.500000000 -36.500000000 -16.500000000 -4.500000000 -36.500000000 -15.500000000 -4.500000000 -36.500000000 -14.500000000 -4.500000000 -36.500000000 -13.500000000 -4.500000000 -36.500000000 -12.500000000 -4.500000000 -36.500000000 -11.500000000 -4.500000000 -36.500000000 -10.500000000 -4.500000000 -36.500000000 -9.500000000 -4.500000000 -36.500000000 -8.500000000 -4.500000000 -36.500000000 -7.500000000 -4.500000000 -36.500000000 -6.500000000 -4.500000000 -36.500000000 -5.500000000 -4.500000000 -36.500000000 -4.500000000 -4.500000000 -36.500000000 -3.500000000 -4.500000000 -36.500000000 -2.500000000 -4.500000000 -36.500000000 -1.500000000 -4.500000000 -36.500000000 -0.500000000 -4.500000000 -36.500000000 0.500000000 -4.500000000 -36.500000000 1.500000000 -4.500000000 -36.500000000 2.500000000 -4.500000000 -36.500000000 3.500000000 -4.500000000 -36.500000000 4.500000000 -4.500000000 -36.500000000 5.500000000 -4.500000000 -36.500000000 6.500000000 -4.500000000 -36.500000000 7.500000000 -4.500000000 -36.500000000 8.500000000 -4.500000000 -36.500000000 9.500000000 -4.500000000 -36.500000000 10.500000000 -4.500000000 -36.500000000 11.500000000 -4.500000000 -36.500000000 12.500000000 -4.500000000 -36.500000000 13.500000000 -4.500000000 -36.500000000 14.500000000 -4.500000000 -36.500000000 15.500000000 -4.500000000 -36.500000000 16.500000000 -4.500000000 -36.500000000 17.500000000 -4.500000000 -36.500000000 18.500000000 -4.500000000 -36.500000000 19.500000000 -4.500000000 -36.500000000 20.500000000 -4.500000000 -36.500000000 21.500000000 -4.500000000 -36.500000000 22.500000000 -4.500000000 -36.500000000 23.500000000 -4.500000000 -36.500000000 24.500000000 -4.500000000 -36.500000000 25.499996185 -4.500000000 -36.499996185 26.499954224 -4.500000000 -36.499954224 27.499591827 -4.500000000 -36.499591827 28.497470856 -4.500000000 -36.497474670 29.488407135 -4.500000000 -36.488403320 30.458978653 -4.500000000 -36.458980560 31.384418488 -4.500000000 -36.384422302 32.233222961 -4.500000000 -36.233226776 32.981101990 -4.500000000 -35.981109619 -33.981101990 -3.500000000 -35.981101990 -33.233222961 -3.500000000 -36.233222961 -32.384422302 -3.500000000 -36.384418488 -31.458978653 -3.500000000 -36.458976746 -30.488407135 -3.500000000 -36.488403320 -29.497472763 -3.500000000 -36.497467041 -28.499593735 -3.500000000 -36.499591827 -27.499954224 -3.500000000 -36.499954224 -26.499996185 -3.500000000 -36.499996185 -25.500000000 -3.500000000 -36.500000000 -24.500000000 -3.500000000 -36.500000000 -23.500000000 -3.500000000 -36.500000000 -22.500000000 -3.500000000 -36.500000000 -21.500000000 -3.500000000 -36.500000000 -20.500000000 -3.500000000 -36.500000000 -19.500000000 -3.500000000 -36.500000000 -18.500000000 -3.500000000 -36.500000000 -17.500000000 -3.500000000 -36.500000000 -16.500000000 -3.500000000 -36.500000000 -15.500000000 -3.500000000 -36.500000000 -14.500000000 -3.500000000 -36.500000000 -13.500000000 -3.500000000 -36.500000000 -12.500000000 -3.500000000 -36.500000000 -11.500000000 -3.500000000 -36.500000000 -10.500000000 -3.500000000 -36.500000000 -9.500000000 -3.500000000 -36.500000000 -8.500000000 -3.500000000 -36.500000000 -7.500000000 -3.500000000 -36.500000000 -6.500000000 -3.500000000 -36.500000000 -5.500000000 -3.500000000 -36.500000000 -4.500000000 -3.500000000 -36.500000000 -3.500000000 -3.500000000 -36.500000000 -2.500000000 -3.500000000 -36.500000000 -1.500000000 -3.500000000 -36.500000000 -0.500000000 -3.500000000 -36.500000000 0.500000000 -3.500000000 -36.500000000 1.500000000 -3.500000000 -36.500000000 2.500000000 -3.500000000 -36.500000000 3.500000000 -3.500000000 -36.500000000 4.500000000 -3.500000000 -36.500000000 5.500000000 -3.500000000 -36.500000000 6.500000000 -3.500000000 -36.500000000 7.500000000 -3.500000000 -36.500000000 8.500000000 -3.500000000 -36.500000000 9.500000000 -3.500000000 -36.500000000 10.500000000 -3.500000000 -36.500000000 11.500000000 -3.500000000 -36.500000000 12.500000000 -3.500000000 -36.500000000 13.500000000 -3.500000000 -36.500000000 14.500000000 -3.500000000 -36.500000000 15.500000000 -3.500000000 -36.500000000 16.500000000 -3.500000000 -36.500000000 17.500000000 -3.500000000 -36.500000000 18.500000000 -3.500000000 -36.500000000 19.500000000 -3.500000000 -36.500000000 20.500000000 -3.500000000 -36.500000000 21.500000000 -3.500000000 -36.500000000 22.500000000 -3.500000000 -36.500000000 23.500000000 -3.500000000 -36.500000000 24.500000000 -3.500000000 -36.500000000 25.499996185 -3.500000000 -36.499996185 26.499954224 -3.500000000 -36.499954224 27.499591827 -3.500000000 -36.499591827 28.497470856 -3.500000000 -36.497474670 29.488407135 -3.500000000 -36.488403320 30.458978653 -3.500000000 -36.458980560 31.384418488 -3.500000000 -36.384422302 32.233222961 -3.500000000 -36.233226776 32.981101990 -3.500000000 -35.981109619 -33.981101990 -2.500000000 -35.981101990 -33.233222961 -2.500000000 -36.233222961 -32.384422302 -2.500000000 -36.384418488 -31.458978653 -2.500000000 -36.458976746 -30.488407135 -2.500000000 -36.488403320 -29.497472763 -2.500000000 -36.497467041 -28.499593735 -2.500000000 -36.499591827 -27.499954224 -2.500000000 -36.499954224 -26.499996185 -2.500000000 -36.499996185 -25.500000000 -2.500000000 -36.500000000 -24.500000000 -2.500000000 -36.500000000 -23.500000000 -2.500000000 -36.500000000 -22.500000000 -2.500000000 -36.500000000 -21.500000000 -2.500000000 -36.500000000 -20.500000000 -2.500000000 -36.500000000 -19.500000000 -2.500000000 -36.500000000 -18.500000000 -2.500000000 -36.500000000 -17.500000000 -2.500000000 -36.500000000 -16.500000000 -2.500000000 -36.500000000 -15.500000000 -2.500000000 -36.500000000 -14.500000000 -2.500000000 -36.500000000 -13.500000000 -2.500000000 -36.500000000 -12.500000000 -2.500000000 -36.500000000 -11.500000000 -2.500000000 -36.500000000 -10.500000000 -2.500000000 -36.500000000 -9.500000000 -2.500000000 -36.500000000 -8.500000000 -2.500000000 -36.500000000 -7.500000000 -2.500000000 -36.500000000 -6.500000000 -2.500000000 -36.500000000 -5.500000000 -2.500000000 -36.500000000 -4.500000000 -2.500000000 -36.500000000 -3.500000000 -2.500000000 -36.500000000 -2.500000000 -2.500000000 -36.500000000 -1.500000000 -2.500000000 -36.500000000 -0.500000000 -2.500000000 -36.500000000 0.500000000 -2.500000000 -36.500000000 1.500000000 -2.500000000 -36.500000000 2.500000000 -2.500000000 -36.500000000 3.500000000 -2.500000000 -36.500000000 4.500000000 -2.500000000 -36.500000000 5.500000000 -2.500000000 -36.500000000 6.500000000 -2.500000000 -36.500000000 7.500000000 -2.500000000 -36.500000000 8.500000000 -2.500000000 -36.500000000 9.500000000 -2.500000000 -36.500000000 10.500000000 -2.500000000 -36.500000000 11.500000000 -2.500000000 -36.500000000 12.500000000 -2.500000000 -36.500000000 13.500000000 -2.500000000 -36.500000000 14.500000000 -2.500000000 -36.500000000 15.500000000 -2.500000000 -36.500000000 16.500000000 -2.500000000 -36.500000000 17.500000000 -2.500000000 -36.500000000 18.500000000 -2.500000000 -36.500000000 19.500000000 -2.500000000 -36.500000000 20.500000000 -2.500000000 -36.500000000 21.500000000 -2.500000000 -36.500000000 22.500000000 -2.500000000 -36.500000000 23.500000000 -2.500000000 -36.500000000 24.500000000 -2.500000000 -36.500000000 25.499996185 -2.500000000 -36.499996185 26.499954224 -2.500000000 -36.499954224 27.499591827 -2.500000000 -36.499591827 28.497470856 -2.500000000 -36.497474670 29.488407135 -2.500000000 -36.488403320 30.458978653 -2.500000000 -36.458980560 31.384418488 -2.500000000 -36.384422302 32.233222961 -2.500000000 -36.233226776 32.981101990 -2.500000000 -35.981109619 -33.981101990 -1.500000000 -35.981101990 -33.233222961 -1.500000000 -36.233222961 -32.384422302 -1.500000000 -36.384418488 -31.458978653 -1.500000000 -36.458976746 -30.488407135 -1.500000000 -36.488403320 -29.497472763 -1.500000000 -36.497467041 -28.499593735 -1.500000000 -36.499591827 -27.499954224 -1.500000000 -36.499954224 -26.499996185 -1.500000000 -36.499996185 -25.500000000 -1.500000000 -36.500000000 -24.500000000 -1.500000000 -36.500000000 -23.500000000 -1.500000000 -36.500000000 -22.500000000 -1.500000000 -36.500000000 -21.500000000 -1.500000000 -36.500000000 -20.500000000 -1.500000000 -36.500000000 -19.500000000 -1.500000000 -36.500000000 -18.500000000 -1.500000000 -36.500000000 -17.500000000 -1.500000000 -36.500000000 -16.500000000 -1.500000000 -36.500000000 -15.500000000 -1.500000000 -36.500000000 -14.500000000 -1.500000000 -36.500000000 -13.500000000 -1.500000000 -36.500000000 -12.500000000 -1.500000000 -36.500000000 -11.500000000 -1.500000000 -36.500000000 -10.500000000 -1.500000000 -36.500000000 -9.500000000 -1.500000000 -36.500000000 -8.500000000 -1.500000000 -36.500000000 -7.500000000 -1.500000000 -36.500000000 -6.500000000 -1.500000000 -36.500000000 -5.500000000 -1.500000000 -36.500000000 -4.500000000 -1.500000000 -36.500000000 -3.500000000 -1.500000000 -36.500000000 -2.500000000 -1.500000000 -36.500000000 -1.500000000 -1.500000000 -36.500000000 -0.500000000 -1.500000000 -36.500000000 0.500000000 -1.500000000 -36.500000000 1.500000000 -1.500000000 -36.500000000 2.500000000 -1.500000000 -36.500000000 3.500000000 -1.500000000 -36.500000000 4.500000000 -1.500000000 -36.500000000 5.500000000 -1.500000000 -36.500000000 6.500000000 -1.500000000 -36.500000000 7.500000000 -1.500000000 -36.500000000 8.500000000 -1.500000000 -36.500000000 9.500000000 -1.500000000 -36.500000000 10.500000000 -1.500000000 -36.500000000 11.500000000 -1.500000000 -36.500000000 12.500000000 -1.500000000 -36.500000000 13.500000000 -1.500000000 -36.500000000 14.500000000 -1.500000000 -36.500000000 15.500000000 -1.500000000 -36.500000000 16.500000000 -1.500000000 -36.500000000 17.500000000 -1.500000000 -36.500000000 18.500000000 -1.500000000 -36.500000000 19.500000000 -1.500000000 -36.500000000 20.500000000 -1.500000000 -36.500000000 21.500000000 -1.500000000 -36.500000000 22.500000000 -1.500000000 -36.500000000 23.500000000 -1.500000000 -36.500000000 24.500000000 -1.500000000 -36.500000000 25.499996185 -1.500000000 -36.499996185 26.499954224 -1.500000000 -36.499954224 27.499591827 -1.500000000 -36.499591827 28.497470856 -1.500000000 -36.497474670 29.488407135 -1.500000000 -36.488403320 30.458978653 -1.500000000 -36.458980560 31.384418488 -1.500000000 -36.384422302 32.233222961 -1.500000000 -36.233226776 32.981101990 -1.500000000 -35.981109619 -33.981101990 -0.500000000 -35.981101990 -33.233222961 -0.500000000 -36.233222961 -32.384422302 -0.500000000 -36.384418488 -31.458978653 -0.500000000 -36.458976746 -30.488407135 -0.500000000 -36.488403320 -29.497472763 -0.500000000 -36.497467041 -28.499593735 -0.500000000 -36.499591827 -27.499954224 -0.500000000 -36.499954224 -26.499996185 -0.500000000 -36.499996185 -25.500000000 -0.500000000 -36.500000000 -24.500000000 -0.500000000 -36.500000000 -23.500000000 -0.500000000 -36.500000000 -22.500000000 -0.500000000 -36.500000000 -21.500000000 -0.500000000 -36.500000000 -20.500000000 -0.500000000 -36.500000000 -19.500000000 -0.500000000 -36.500000000 -18.500000000 -0.500000000 -36.500000000 -17.500000000 -0.500000000 -36.500000000 -16.500000000 -0.500000000 -36.500000000 -15.500000000 -0.500000000 -36.500000000 -14.500000000 -0.500000000 -36.500000000 -13.500000000 -0.500000000 -36.500000000 -12.500000000 -0.500000000 -36.500000000 -11.500000000 -0.500000000 -36.500000000 -10.500000000 -0.500000000 -36.500000000 -9.500000000 -0.500000000 -36.500000000 -8.500000000 -0.500000000 -36.500000000 -7.500000000 -0.500000000 -36.500000000 -6.500000000 -0.500000000 -36.500000000 -5.500000000 -0.500000000 -36.500000000 -4.500000000 -0.500000000 -36.500000000 -3.500000000 -0.500000000 -36.500000000 -2.500000000 -0.500000000 -36.500000000 -1.500000000 -0.500000000 -36.500000000 -0.500000000 -0.500000000 -36.500000000 0.500000000 -0.500000000 -36.500000000 1.500000000 -0.500000000 -36.500000000 2.500000000 -0.500000000 -36.500000000 3.500000000 -0.500000000 -36.500000000 4.500000000 -0.500000000 -36.500000000 5.500000000 -0.500000000 -36.500000000 6.500000000 -0.500000000 -36.500000000 7.500000000 -0.500000000 -36.500000000 8.500000000 -0.500000000 -36.500000000 9.500000000 -0.500000000 -36.500000000 10.500000000 -0.500000000 -36.500000000 11.500000000 -0.500000000 -36.500000000 12.500000000 -0.500000000 -36.500000000 13.500000000 -0.500000000 -36.500000000 14.500000000 -0.500000000 -36.500000000 15.500000000 -0.500000000 -36.500000000 16.500000000 -0.500000000 -36.500000000 17.500000000 -0.500000000 -36.500000000 18.500000000 -0.500000000 -36.500000000 19.500000000 -0.500000000 -36.500000000 20.500000000 -0.500000000 -36.500000000 21.500000000 -0.500000000 -36.500000000 22.500000000 -0.500000000 -36.500000000 23.500000000 -0.500000000 -36.500000000 24.500000000 -0.500000000 -36.500000000 25.499996185 -0.500000000 -36.499996185 26.499954224 -0.500000000 -36.499954224 27.499591827 -0.500000000 -36.499591827 28.497470856 -0.500000000 -36.497474670 29.488407135 -0.500000000 -36.488403320 30.458978653 -0.500000000 -36.458980560 31.384418488 -0.500000000 -36.384422302 32.233222961 -0.500000000 -36.233226776 32.981101990 -0.500000000 -35.981109619 -33.981101990 0.500000000 -35.981101990 -33.233222961 0.500000000 -36.233222961 -32.384422302 0.500000000 -36.384418488 -31.458978653 0.500000000 -36.458976746 -30.488407135 0.500000000 -36.488403320 -29.497472763 0.500000000 -36.497467041 -28.499593735 0.500000000 -36.499591827 -27.499954224 0.500000000 -36.499954224 -26.499996185 0.500000000 -36.499996185 -25.500000000 0.500000000 -36.500000000 -24.500000000 0.500000000 -36.500000000 -23.500000000 0.500000000 -36.500000000 -22.500000000 0.500000000 -36.500000000 -21.500000000 0.500000000 -36.500000000 -20.500000000 0.500000000 -36.500000000 -19.500000000 0.500000000 -36.500000000 -18.500000000 0.500000000 -36.500000000 -17.500000000 0.500000000 -36.500000000 -16.500000000 0.500000000 -36.500000000 -15.500000000 0.500000000 -36.500000000 -14.500000000 0.500000000 -36.500000000 -13.500000000 0.500000000 -36.500000000 -12.500000000 0.500000000 -36.500000000 -11.500000000 0.500000000 -36.500000000 -10.500000000 0.500000000 -36.500000000 -9.500000000 0.500000000 -36.500000000 -8.500000000 0.500000000 -36.500000000 -7.500000000 0.500000000 -36.500000000 -6.500000000 0.500000000 -36.500000000 -5.500000000 0.500000000 -36.500000000 -4.500000000 0.500000000 -36.500000000 -3.500000000 0.500000000 -36.500000000 -2.500000000 0.500000000 -36.500000000 -1.500000000 0.500000000 -36.500000000 -0.500000000 0.500000000 -36.500000000 0.500000000 0.500000000 -36.500000000 1.500000000 0.500000000 -36.500000000 2.500000000 0.500000000 -36.500000000 3.500000000 0.500000000 -36.500000000 4.500000000 0.500000000 -36.500000000 5.500000000 0.500000000 -36.500000000 6.500000000 0.500000000 -36.500000000 7.500000000 0.500000000 -36.500000000 8.500000000 0.500000000 -36.500000000 9.500000000 0.500000000 -36.500000000 10.500000000 0.500000000 -36.500000000 11.500000000 0.500000000 -36.500000000 12.500000000 0.500000000 -36.500000000 13.500000000 0.500000000 -36.500000000 14.500000000 0.500000000 -36.500000000 15.500000000 0.500000000 -36.500000000 16.500000000 0.500000000 -36.500000000 17.500000000 0.500000000 -36.500000000 18.500000000 0.500000000 -36.500000000 19.500000000 0.500000000 -36.500000000 20.500000000 0.500000000 -36.500000000 21.500000000 0.500000000 -36.500000000 22.500000000 0.500000000 -36.500000000 23.500000000 0.500000000 -36.500000000 24.500000000 0.500000000 -36.500000000 25.499996185 0.500000000 -36.499996185 26.499954224 0.500000000 -36.499954224 27.499591827 0.500000000 -36.499591827 28.497470856 0.500000000 -36.497474670 29.488407135 0.500000000 -36.488403320 30.458978653 0.500000000 -36.458980560 31.384418488 0.500000000 -36.384422302 32.233222961 0.500000000 -36.233226776 32.981101990 0.500000000 -35.981109619 -33.981101990 1.500000000 -35.981101990 -33.233222961 1.500000000 -36.233222961 -32.384422302 1.500000000 -36.384418488 -31.458978653 1.500000000 -36.458976746 -30.488407135 1.500000000 -36.488403320 -29.497472763 1.500000000 -36.497467041 -28.499593735 1.500000000 -36.499591827 -27.499954224 1.500000000 -36.499954224 -26.499996185 1.500000000 -36.499996185 -25.500000000 1.500000000 -36.500000000 -24.500000000 1.500000000 -36.500000000 -23.500000000 1.500000000 -36.500000000 -22.500000000 1.500000000 -36.500000000 -21.500000000 1.500000000 -36.500000000 -20.500000000 1.500000000 -36.500000000 -19.500000000 1.500000000 -36.500000000 -18.500000000 1.500000000 -36.500000000 -17.500000000 1.500000000 -36.500000000 -16.500000000 1.500000000 -36.500000000 -15.500000000 1.500000000 -36.500000000 -14.500000000 1.500000000 -36.500000000 -13.500000000 1.500000000 -36.500000000 -12.500000000 1.500000000 -36.500000000 -11.500000000 1.500000000 -36.500000000 -10.500000000 1.500000000 -36.500000000 -9.500000000 1.500000000 -36.500000000 -8.500000000 1.500000000 -36.500000000 -7.500000000 1.500000000 -36.500000000 -6.500000000 1.500000000 -36.500000000 -5.500000000 1.500000000 -36.500000000 -4.500000000 1.500000000 -36.500000000 -3.500000000 1.500000000 -36.500000000 -2.500000000 1.500000000 -36.500000000 -1.500000000 1.500000000 -36.500000000 -0.500000000 1.500000000 -36.500000000 0.500000000 1.500000000 -36.500000000 1.500000000 1.500000000 -36.500000000 2.500000000 1.500000000 -36.500000000 3.500000000 1.500000000 -36.500000000 4.500000000 1.500000000 -36.500000000 5.500000000 1.500000000 -36.500000000 6.500000000 1.500000000 -36.500000000 7.500000000 1.500000000 -36.500000000 8.500000000 1.500000000 -36.500000000 9.500000000 1.500000000 -36.500000000 10.500000000 1.500000000 -36.500000000 11.500000000 1.500000000 -36.500000000 12.500000000 1.500000000 -36.500000000 13.500000000 1.500000000 -36.500000000 14.500000000 1.500000000 -36.500000000 15.500000000 1.500000000 -36.500000000 16.500000000 1.500000000 -36.500000000 17.500000000 1.500000000 -36.500000000 18.500000000 1.500000000 -36.500000000 19.500000000 1.500000000 -36.500000000 20.500000000 1.500000000 -36.500000000 21.500000000 1.500000000 -36.500000000 22.500000000 1.500000000 -36.500000000 23.500000000 1.500000000 -36.500000000 24.500000000 1.500000000 -36.500000000 25.499996185 1.500000000 -36.499996185 26.499954224 1.500000000 -36.499954224 27.499591827 1.500000000 -36.499591827 28.497470856 1.500000000 -36.497474670 29.488407135 1.500000000 -36.488403320 30.458978653 1.500000000 -36.458980560 31.384418488 1.500000000 -36.384422302 32.233222961 1.500000000 -36.233226776 32.981101990 1.500000000 -35.981109619 -33.981101990 2.500000000 -35.981101990 -33.233222961 2.500000000 -36.233222961 -32.384422302 2.500000000 -36.384418488 -31.458978653 2.500000000 -36.458976746 -30.488407135 2.500000000 -36.488403320 -29.497472763 2.500000000 -36.497467041 -28.499593735 2.500000000 -36.499591827 -27.499954224 2.500000000 -36.499954224 -26.499996185 2.500000000 -36.499996185 -25.500000000 2.500000000 -36.500000000 -24.500000000 2.500000000 -36.500000000 -23.500000000 2.500000000 -36.500000000 -22.500000000 2.500000000 -36.500000000 -21.500000000 2.500000000 -36.500000000 -20.500000000 2.500000000 -36.500000000 -19.500000000 2.500000000 -36.500000000 -18.500000000 2.500000000 -36.500000000 -17.500000000 2.500000000 -36.500000000 -16.500000000 2.500000000 -36.500000000 -15.500000000 2.500000000 -36.500000000 -14.500000000 2.500000000 -36.500000000 -13.500000000 2.500000000 -36.500000000 -12.500000000 2.500000000 -36.500000000 -11.500000000 2.500000000 -36.500000000 -10.500000000 2.500000000 -36.500000000 -9.500000000 2.500000000 -36.500000000 -8.500000000 2.500000000 -36.500000000 -7.500000000 2.500000000 -36.500000000 -6.500000000 2.500000000 -36.500000000 -5.500000000 2.500000000 -36.500000000 -4.500000000 2.500000000 -36.500000000 -3.500000000 2.500000000 -36.500000000 -2.500000000 2.500000000 -36.500000000 -1.500000000 2.500000000 -36.500000000 -0.500000000 2.500000000 -36.500000000 0.500000000 2.500000000 -36.500000000 1.500000000 2.500000000 -36.500000000 2.500000000 2.500000000 -36.500000000 3.500000000 2.500000000 -36.500000000 4.500000000 2.500000000 -36.500000000 5.500000000 2.500000000 -36.500000000 6.500000000 2.500000000 -36.500000000 7.500000000 2.500000000 -36.500000000 8.500000000 2.500000000 -36.500000000 9.500000000 2.500000000 -36.500000000 10.500000000 2.500000000 -36.500000000 11.500000000 2.500000000 -36.500000000 12.500000000 2.500000000 -36.500000000 13.500000000 2.500000000 -36.500000000 14.500000000 2.500000000 -36.500000000 15.500000000 2.500000000 -36.500000000 16.500000000 2.500000000 -36.500000000 17.500000000 2.500000000 -36.500000000 18.500000000 2.500000000 -36.500000000 19.500000000 2.500000000 -36.500000000 20.500000000 2.500000000 -36.500000000 21.500000000 2.500000000 -36.500000000 22.500000000 2.500000000 -36.500000000 23.500000000 2.500000000 -36.500000000 24.500000000 2.500000000 -36.500000000 25.499996185 2.500000000 -36.499996185 26.499954224 2.500000000 -36.499954224 27.499591827 2.500000000 -36.499591827 28.497470856 2.500000000 -36.497474670 29.488407135 2.500000000 -36.488403320 30.458978653 2.500000000 -36.458980560 31.384418488 2.500000000 -36.384422302 32.233222961 2.500000000 -36.233226776 32.981101990 2.500000000 -35.981109619 -33.981101990 3.500000000 -35.981101990 -33.233222961 3.500000000 -36.233222961 -32.384422302 3.500000000 -36.384418488 -31.458978653 3.500000000 -36.458976746 -30.488407135 3.500000000 -36.488403320 -29.497472763 3.500000000 -36.497467041 -28.499593735 3.500000000 -36.499591827 -27.499954224 3.500000000 -36.499954224 -26.499996185 3.500000000 -36.499996185 -25.500000000 3.500000000 -36.500000000 -24.500000000 3.500000000 -36.500000000 -23.500000000 3.500000000 -36.500000000 -22.500000000 3.500000000 -36.500000000 -21.500000000 3.500000000 -36.500000000 -20.500000000 3.500000000 -36.500000000 -19.500000000 3.500000000 -36.500000000 -18.500000000 3.500000000 -36.500000000 -17.500000000 3.500000000 -36.500000000 -16.500000000 3.500000000 -36.500000000 -15.500000000 3.500000000 -36.500000000 -14.500000000 3.500000000 -36.500000000 -13.500000000 3.500000000 -36.500000000 -12.500000000 3.500000000 -36.500000000 -11.500000000 3.500000000 -36.500000000 -10.500000000 3.500000000 -36.500000000 -9.500000000 3.500000000 -36.500000000 -8.500000000 3.500000000 -36.500000000 -7.500000000 3.500000000 -36.500000000 -6.500000000 3.500000000 -36.500000000 -5.500000000 3.500000000 -36.500000000 -4.500000000 3.500000000 -36.500000000 -3.500000000 3.500000000 -36.500000000 -2.500000000 3.500000000 -36.500000000 -1.500000000 3.500000000 -36.500000000 -0.500000000 3.500000000 -36.500000000 0.500000000 3.500000000 -36.500000000 1.500000000 3.500000000 -36.500000000 2.500000000 3.500000000 -36.500000000 3.500000000 3.500000000 -36.500000000 4.500000000 3.500000000 -36.500000000 5.500000000 3.500000000 -36.500000000 6.500000000 3.500000000 -36.500000000 7.500000000 3.500000000 -36.500000000 8.500000000 3.500000000 -36.500000000 9.500000000 3.500000000 -36.500000000 10.500000000 3.500000000 -36.500000000 11.500000000 3.500000000 -36.500000000 12.500000000 3.500000000 -36.500000000 13.500000000 3.500000000 -36.500000000 14.500000000 3.500000000 -36.500000000 15.500000000 3.500000000 -36.500000000 16.500000000 3.500000000 -36.500000000 17.500000000 3.500000000 -36.500000000 18.500000000 3.500000000 -36.500000000 19.500000000 3.500000000 -36.500000000 20.500000000 3.500000000 -36.500000000 21.500000000 3.500000000 -36.500000000 22.500000000 3.500000000 -36.500000000 23.500000000 3.500000000 -36.500000000 24.500000000 3.500000000 -36.500000000 25.499996185 3.500000000 -36.499996185 26.499954224 3.500000000 -36.499954224 27.499591827 3.500000000 -36.499591827 28.497470856 3.500000000 -36.497474670 29.488407135 3.500000000 -36.488403320 30.458978653 3.500000000 -36.458980560 31.384418488 3.500000000 -36.384422302 32.233222961 3.500000000 -36.233226776 32.981101990 3.500000000 -35.981109619 -33.981101990 4.500000000 -35.981101990 -33.233222961 4.500000000 -36.233222961 -32.384422302 4.500000000 -36.384418488 -31.458978653 4.500000000 -36.458976746 -30.488407135 4.500000000 -36.488403320 -29.497472763 4.500000000 -36.497467041 -28.499593735 4.500000000 -36.499591827 -27.499954224 4.500000000 -36.499954224 -26.499996185 4.500000000 -36.499996185 -25.500000000 4.500000000 -36.500000000 -24.500000000 4.500000000 -36.500000000 -23.500000000 4.500000000 -36.500000000 -22.500000000 4.500000000 -36.500000000 -21.500000000 4.500000000 -36.500000000 -20.500000000 4.500000000 -36.500000000 -19.500000000 4.500000000 -36.500000000 -18.500000000 4.500000000 -36.500000000 -17.500000000 4.500000000 -36.500000000 -16.500000000 4.500000000 -36.500000000 -15.500000000 4.500000000 -36.500000000 -14.500000000 4.500000000 -36.500000000 -13.500000000 4.500000000 -36.500000000 -12.500000000 4.500000000 -36.500000000 -11.500000000 4.500000000 -36.500000000 -10.500000000 4.500000000 -36.500000000 -9.500000000 4.500000000 -36.500000000 -8.500000000 4.500000000 -36.500000000 -7.500000000 4.500000000 -36.500000000 -6.500000000 4.500000000 -36.500000000 -5.500000000 4.500000000 -36.500000000 -4.500000000 4.500000000 -36.500000000 -3.500000000 4.500000000 -36.500000000 -2.500000000 4.500000000 -36.500000000 -1.500000000 4.500000000 -36.500000000 -0.500000000 4.500000000 -36.500000000 0.500000000 4.500000000 -36.500000000 1.500000000 4.500000000 -36.500000000 2.500000000 4.500000000 -36.500000000 3.500000000 4.500000000 -36.500000000 4.500000000 4.500000000 -36.500000000 5.500000000 4.500000000 -36.500000000 6.500000000 4.500000000 -36.500000000 7.500000000 4.500000000 -36.500000000 8.500000000 4.500000000 -36.500000000 9.500000000 4.500000000 -36.500000000 10.500000000 4.500000000 -36.500000000 11.500000000 4.500000000 -36.500000000 12.500000000 4.500000000 -36.500000000 13.500000000 4.500000000 -36.500000000 14.500000000 4.500000000 -36.500000000 15.500000000 4.500000000 -36.500000000 16.500000000 4.500000000 -36.500000000 17.500000000 4.500000000 -36.500000000 18.500000000 4.500000000 -36.500000000 19.500000000 4.500000000 -36.500000000 20.500000000 4.500000000 -36.500000000 21.500000000 4.500000000 -36.500000000 22.500000000 4.500000000 -36.500000000 23.500000000 4.500000000 -36.500000000 24.500000000 4.500000000 -36.500000000 25.499996185 4.500000000 -36.499996185 26.499954224 4.500000000 -36.499954224 27.499591827 4.500000000 -36.499591827 28.497470856 4.500000000 -36.497474670 29.488407135 4.500000000 -36.488403320 30.458978653 4.500000000 -36.458980560 31.384418488 4.500000000 -36.384422302 32.233222961 4.500000000 -36.233226776 32.981101990 4.500000000 -35.981109619 -33.981101990 5.500000000 -35.981101990 -33.233222961 5.500000000 -36.233222961 -32.384422302 5.500000000 -36.384418488 -31.458978653 5.500000000 -36.458976746 -30.488407135 5.500000000 -36.488403320 -29.497472763 5.500000000 -36.497467041 -28.499593735 5.500000000 -36.499591827 -27.499954224 5.500000000 -36.499954224 -26.499996185 5.500000000 -36.499996185 -25.500000000 5.500000000 -36.500000000 -24.500000000 5.500000000 -36.500000000 -23.500000000 5.500000000 -36.500000000 -22.500000000 5.500000000 -36.500000000 -21.500000000 5.500000000 -36.500000000 -20.500000000 5.500000000 -36.500000000 -19.500000000 5.500000000 -36.500000000 -18.500000000 5.500000000 -36.500000000 -17.500000000 5.500000000 -36.500000000 -16.500000000 5.500000000 -36.500000000 -15.500000000 5.500000000 -36.500000000 -14.500000000 5.500000000 -36.500000000 -13.500000000 5.500000000 -36.500000000 -12.500000000 5.500000000 -36.500000000 -11.500000000 5.500000000 -36.500000000 -10.500000000 5.500000000 -36.500000000 -9.500000000 5.500000000 -36.500000000 -8.500000000 5.500000000 -36.500000000 -7.500000000 5.500000000 -36.500000000 -6.500000000 5.500000000 -36.500000000 -5.500000000 5.500000000 -36.500000000 -4.500000000 5.500000000 -36.500000000 -3.500000000 5.500000000 -36.500000000 -2.500000000 5.500000000 -36.500000000 -1.500000000 5.500000000 -36.500000000 -0.500000000 5.500000000 -36.500000000 0.500000000 5.500000000 -36.500000000 1.500000000 5.500000000 -36.500000000 2.500000000 5.500000000 -36.500000000 3.500000000 5.500000000 -36.500000000 4.500000000 5.500000000 -36.500000000 5.500000000 5.500000000 -36.500000000 6.500000000 5.500000000 -36.500000000 7.500000000 5.500000000 -36.500000000 8.500000000 5.500000000 -36.500000000 9.500000000 5.500000000 -36.500000000 10.500000000 5.500000000 -36.500000000 11.500000000 5.500000000 -36.500000000 12.500000000 5.500000000 -36.500000000 13.500000000 5.500000000 -36.500000000 14.500000000 5.500000000 -36.500000000 15.500000000 5.500000000 -36.500000000 16.500000000 5.500000000 -36.500000000 17.500000000 5.500000000 -36.500000000 18.500000000 5.500000000 -36.500000000 19.500000000 5.500000000 -36.500000000 20.500000000 5.500000000 -36.500000000 21.500000000 5.500000000 -36.500000000 22.500000000 5.500000000 -36.500000000 23.500000000 5.500000000 -36.500000000 24.500000000 5.500000000 -36.500000000 25.499996185 5.500000000 -36.499996185 26.499954224 5.500000000 -36.499954224 27.499591827 5.500000000 -36.499591827 28.497470856 5.500000000 -36.497474670 29.488407135 5.500000000 -36.488403320 30.458978653 5.500000000 -36.458980560 31.384418488 5.500000000 -36.384422302 32.233222961 5.500000000 -36.233226776 32.981101990 5.500000000 -35.981109619 -33.981101990 6.500000000 -35.981101990 -33.233222961 6.500000000 -36.233222961 -32.384422302 6.500000000 -36.384418488 -31.458978653 6.500000000 -36.458976746 -30.488407135 6.500000000 -36.488403320 -29.497472763 6.500000000 -36.497467041 -28.499593735 6.500000000 -36.499591827 -27.499954224 6.500000000 -36.499954224 -26.499996185 6.500000000 -36.499996185 -25.500000000 6.500000000 -36.500000000 -24.500000000 6.500000000 -36.500000000 -23.500000000 6.500000000 -36.500000000 -22.500000000 6.500000000 -36.500000000 -21.500000000 6.500000000 -36.500000000 -20.500000000 6.500000000 -36.500000000 -19.500000000 6.500000000 -36.500000000 -18.500000000 6.500000000 -36.500000000 -17.500000000 6.500000000 -36.500000000 -16.500000000 6.500000000 -36.500000000 -15.500000000 6.500000000 -36.500000000 -14.500000000 6.500000000 -36.500000000 -13.500000000 6.500000000 -36.500000000 -12.500000000 6.500000000 -36.500000000 -11.500000000 6.500000000 -36.500000000 -10.500000000 6.500000000 -36.500000000 -9.500000000 6.500000000 -36.500000000 -8.500000000 6.500000000 -36.500000000 -7.500000000 6.500000000 -36.500000000 -6.500000000 6.500000000 -36.500000000 -5.500000000 6.500000000 -36.500000000 -4.500000000 6.500000000 -36.500000000 -3.500000000 6.500000000 -36.500000000 -2.500000000 6.500000000 -36.500000000 -1.500000000 6.500000000 -36.500000000 -0.500000000 6.500000000 -36.500000000 0.500000000 6.500000000 -36.500000000 1.500000000 6.500000000 -36.500000000 2.500000000 6.500000000 -36.500000000 3.500000000 6.500000000 -36.500000000 4.500000000 6.500000000 -36.500000000 5.500000000 6.500000000 -36.500000000 6.500000000 6.500000000 -36.500000000 7.500000000 6.500000000 -36.500000000 8.500000000 6.500000000 -36.500000000 9.500000000 6.500000000 -36.500000000 10.500000000 6.500000000 -36.500000000 11.500000000 6.500000000 -36.500000000 12.500000000 6.500000000 -36.500000000 13.500000000 6.500000000 -36.500000000 14.500000000 6.500000000 -36.500000000 15.500000000 6.500000000 -36.500000000 16.500000000 6.500000000 -36.500000000 17.500000000 6.500000000 -36.500000000 18.500000000 6.500000000 -36.500000000 19.500000000 6.500000000 -36.500000000 20.500000000 6.500000000 -36.500000000 21.500000000 6.500000000 -36.500000000 22.500000000 6.500000000 -36.500000000 23.500000000 6.500000000 -36.500000000 24.500000000 6.500000000 -36.500000000 25.499996185 6.500000000 -36.499996185 26.499954224 6.500000000 -36.499954224 27.499591827 6.500000000 -36.499591827 28.497470856 6.500000000 -36.497474670 29.488407135 6.500000000 -36.488403320 30.458978653 6.500000000 -36.458980560 31.384418488 6.500000000 -36.384422302 32.233222961 6.500000000 -36.233226776 32.981101990 6.500000000 -35.981109619 -33.981101990 7.500000000 -35.981101990 -33.233222961 7.500000000 -36.233222961 -32.384422302 7.500000000 -36.384418488 -31.458978653 7.500000000 -36.458976746 -30.488407135 7.500000000 -36.488403320 -29.497472763 7.500000000 -36.497467041 -28.499593735 7.500000000 -36.499591827 -27.499954224 7.500000000 -36.499954224 -26.499996185 7.500000000 -36.499996185 -25.500000000 7.500000000 -36.500000000 -24.500000000 7.500000000 -36.500000000 -23.500000000 7.500000000 -36.500000000 -22.500000000 7.500000000 -36.500000000 -21.500000000 7.500000000 -36.500000000 -20.500000000 7.500000000 -36.500000000 -19.500000000 7.500000000 -36.500000000 -18.500000000 7.500000000 -36.500000000 -17.500000000 7.500000000 -36.500000000 -16.500000000 7.500000000 -36.500000000 -15.500000000 7.500000000 -36.500000000 -14.500000000 7.500000000 -36.500000000 -13.500000000 7.500000000 -36.500000000 -12.500000000 7.500000000 -36.500000000 -11.500000000 7.500000000 -36.500000000 -10.500000000 7.500000000 -36.500000000 -9.500000000 7.500000000 -36.500000000 -8.500000000 7.500000000 -36.500000000 -7.500000000 7.500000000 -36.500000000 -6.500000000 7.500000000 -36.500000000 -5.500000000 7.500000000 -36.500000000 -4.500000000 7.500000000 -36.500000000 -3.500000000 7.500000000 -36.500000000 -2.500000000 7.500000000 -36.500000000 -1.500000000 7.500000000 -36.500000000 -0.500000000 7.500000000 -36.500000000 0.500000000 7.500000000 -36.500000000 1.500000000 7.500000000 -36.500000000 2.500000000 7.500000000 -36.500000000 3.500000000 7.500000000 -36.500000000 4.500000000 7.500000000 -36.500000000 5.500000000 7.500000000 -36.500000000 6.500000000 7.500000000 -36.500000000 7.500000000 7.500000000 -36.500000000 8.500000000 7.500000000 -36.500000000 9.500000000 7.500000000 -36.500000000 10.500000000 7.500000000 -36.500000000 11.500000000 7.500000000 -36.500000000 12.500000000 7.500000000 -36.500000000 13.500000000 7.500000000 -36.500000000 14.500000000 7.500000000 -36.500000000 15.500000000 7.500000000 -36.500000000 16.500000000 7.500000000 -36.500000000 17.500000000 7.500000000 -36.500000000 18.500000000 7.500000000 -36.500000000 19.500000000 7.500000000 -36.500000000 20.500000000 7.500000000 -36.500000000 21.500000000 7.500000000 -36.500000000 22.500000000 7.500000000 -36.500000000 23.500000000 7.500000000 -36.500000000 24.500000000 7.500000000 -36.500000000 25.499996185 7.500000000 -36.499996185 26.499954224 7.500000000 -36.499954224 27.499591827 7.500000000 -36.499591827 28.497470856 7.500000000 -36.497474670 29.488407135 7.500000000 -36.488403320 30.458978653 7.500000000 -36.458980560 31.384418488 7.500000000 -36.384422302 32.233222961 7.500000000 -36.233226776 32.981101990 7.500000000 -35.981109619 -33.981101990 8.500000000 -35.981101990 -33.233222961 8.500000000 -36.233222961 -32.384422302 8.500000000 -36.384418488 -31.458978653 8.500000000 -36.458976746 -30.488407135 8.500000000 -36.488403320 -29.497472763 8.500000000 -36.497467041 -28.499593735 8.500000000 -36.499591827 -27.499954224 8.500000000 -36.499954224 -26.499996185 8.500000000 -36.499996185 -25.500000000 8.500000000 -36.500000000 -24.500000000 8.500000000 -36.500000000 -23.500000000 8.500000000 -36.500000000 -22.500000000 8.500000000 -36.500000000 -21.500000000 8.500000000 -36.500000000 -20.500000000 8.500000000 -36.500000000 -19.500000000 8.500000000 -36.500000000 -18.500000000 8.500000000 -36.500000000 -17.500000000 8.500000000 -36.500000000 -16.500000000 8.500000000 -36.500000000 -15.500000000 8.500000000 -36.500000000 -14.500000000 8.500000000 -36.500000000 -13.500000000 8.500000000 -36.500000000 -12.500000000 8.500000000 -36.500000000 -11.500000000 8.500000000 -36.500000000 -10.500000000 8.500000000 -36.500000000 -9.500000000 8.500000000 -36.500000000 -8.500000000 8.500000000 -36.500000000 -7.500000000 8.500000000 -36.500000000 -6.500000000 8.500000000 -36.500000000 -5.500000000 8.500000000 -36.500000000 -4.500000000 8.500000000 -36.500000000 -3.500000000 8.500000000 -36.500000000 -2.500000000 8.500000000 -36.500000000 -1.500000000 8.500000000 -36.500000000 -0.500000000 8.500000000 -36.500000000 0.500000000 8.500000000 -36.500000000 1.500000000 8.500000000 -36.500000000 2.500000000 8.500000000 -36.500000000 3.500000000 8.500000000 -36.500000000 4.500000000 8.500000000 -36.500000000 5.500000000 8.500000000 -36.500000000 6.500000000 8.500000000 -36.500000000 7.500000000 8.500000000 -36.500000000 8.500000000 8.500000000 -36.500000000 9.500000000 8.500000000 -36.500000000 10.500000000 8.500000000 -36.500000000 11.500000000 8.500000000 -36.500000000 12.500000000 8.500000000 -36.500000000 13.500000000 8.500000000 -36.500000000 14.500000000 8.500000000 -36.500000000 15.500000000 8.500000000 -36.500000000 16.500000000 8.500000000 -36.500000000 17.500000000 8.500000000 -36.500000000 18.500000000 8.500000000 -36.500000000 19.500000000 8.500000000 -36.500000000 20.500000000 8.500000000 -36.500000000 21.500000000 8.500000000 -36.500000000 22.500000000 8.500000000 -36.500000000 23.500000000 8.500000000 -36.500000000 24.500000000 8.500000000 -36.500000000 25.499996185 8.500000000 -36.499996185 26.499954224 8.500000000 -36.499954224 27.499591827 8.500000000 -36.499591827 28.497470856 8.500000000 -36.497474670 29.488407135 8.500000000 -36.488403320 30.458978653 8.500000000 -36.458980560 31.384418488 8.500000000 -36.384422302 32.233222961 8.500000000 -36.233226776 32.981101990 8.500000000 -35.981109619 -33.981101990 9.500000000 -35.981101990 -33.233222961 9.500000000 -36.233222961 -32.384422302 9.500000000 -36.384418488 -31.458978653 9.500000000 -36.458976746 -30.488407135 9.500000000 -36.488403320 -29.497472763 9.500000000 -36.497467041 -28.499593735 9.500000000 -36.499591827 -27.499954224 9.500000000 -36.499954224 -26.499996185 9.500000000 -36.499996185 -25.500000000 9.500000000 -36.500000000 -24.500000000 9.500000000 -36.500000000 -23.500000000 9.500000000 -36.500000000 -22.500000000 9.500000000 -36.500000000 -21.500000000 9.500000000 -36.500000000 -20.500000000 9.500000000 -36.500000000 -19.500000000 9.500000000 -36.500000000 -18.500000000 9.500000000 -36.500000000 -17.500000000 9.500000000 -36.500000000 -16.500000000 9.500000000 -36.500000000 -15.500000000 9.500000000 -36.500000000 -14.500000000 9.500000000 -36.500000000 -13.500000000 9.500000000 -36.500000000 -12.500000000 9.500000000 -36.500000000 -11.500000000 9.500000000 -36.500000000 -10.500000000 9.500000000 -36.500000000 -9.500000000 9.500000000 -36.500000000 -8.500000000 9.500000000 -36.500000000 -7.500000000 9.500000000 -36.500000000 -6.500000000 9.500000000 -36.500000000 -5.500000000 9.500000000 -36.500000000 -4.500000000 9.500000000 -36.500000000 -3.500000000 9.500000000 -36.500000000 -2.500000000 9.500000000 -36.500000000 -1.500000000 9.500000000 -36.500000000 -0.500000000 9.500000000 -36.500000000 0.500000000 9.500000000 -36.500000000 1.500000000 9.500000000 -36.500000000 2.500000000 9.500000000 -36.500000000 3.500000000 9.500000000 -36.500000000 4.500000000 9.500000000 -36.500000000 5.500000000 9.500000000 -36.500000000 6.500000000 9.500000000 -36.500000000 7.500000000 9.500000000 -36.500000000 8.500000000 9.500000000 -36.500000000 9.500000000 9.500000000 -36.500000000 10.500000000 9.500000000 -36.500000000 11.500000000 9.500000000 -36.500000000 12.500000000 9.500000000 -36.500000000 13.500000000 9.500000000 -36.500000000 14.500000000 9.500000000 -36.500000000 15.500000000 9.500000000 -36.500000000 16.500000000 9.500000000 -36.500000000 17.500000000 9.500000000 -36.500000000 18.500000000 9.500000000 -36.500000000 19.500000000 9.500000000 -36.500000000 20.500000000 9.500000000 -36.500000000 21.500000000 9.500000000 -36.500000000 22.500000000 9.500000000 -36.500000000 23.500000000 9.500000000 -36.500000000 24.500000000 9.500000000 -36.500000000 25.499996185 9.500000000 -36.499996185 26.499954224 9.500000000 -36.499954224 27.499591827 9.500000000 -36.499591827 28.497470856 9.500000000 -36.497474670 29.488407135 9.500000000 -36.488403320 30.458978653 9.500000000 -36.458980560 31.384418488 9.500000000 -36.384422302 32.233222961 9.500000000 -36.233226776 32.981101990 9.500000000 -35.981109619 -33.981101990 10.500000000 -35.981101990 -33.233222961 10.500000000 -36.233222961 -32.384422302 10.500000000 -36.384418488 -31.458978653 10.500000000 -36.458976746 -30.488407135 10.500000000 -36.488403320 -29.497472763 10.500000000 -36.497467041 -28.499593735 10.500000000 -36.499591827 -27.499954224 10.500000000 -36.499954224 -26.499996185 10.500000000 -36.499996185 -25.500000000 10.500000000 -36.500000000 -24.500000000 10.500000000 -36.500000000 -23.500000000 10.500000000 -36.500000000 -22.500000000 10.500000000 -36.500000000 -21.500000000 10.500000000 -36.500000000 -20.500000000 10.500000000 -36.500000000 -19.500000000 10.500000000 -36.500000000 -18.500000000 10.500000000 -36.500000000 -17.500000000 10.500000000 -36.500000000 -16.500000000 10.500000000 -36.500000000 -15.500000000 10.500000000 -36.500000000 -14.500000000 10.500000000 -36.500000000 -13.500000000 10.500000000 -36.500000000 -12.500000000 10.500000000 -36.500000000 -11.500000000 10.500000000 -36.500000000 -10.500000000 10.500000000 -36.500000000 -9.500000000 10.500000000 -36.500000000 -8.500000000 10.500000000 -36.500000000 -7.500000000 10.500000000 -36.500000000 -6.500000000 10.500000000 -36.500000000 -5.500000000 10.500000000 -36.500000000 -4.500000000 10.500000000 -36.500000000 -3.500000000 10.500000000 -36.500000000 -2.500000000 10.500000000 -36.500000000 -1.500000000 10.500000000 -36.500000000 -0.500000000 10.500000000 -36.500000000 0.500000000 10.500000000 -36.500000000 1.500000000 10.500000000 -36.500000000 2.500000000 10.500000000 -36.500000000 3.500000000 10.500000000 -36.500000000 4.500000000 10.500000000 -36.500000000 5.500000000 10.500000000 -36.500000000 6.500000000 10.500000000 -36.500000000 7.500000000 10.500000000 -36.500000000 8.500000000 10.500000000 -36.500000000 9.500000000 10.500000000 -36.500000000 10.500000000 10.500000000 -36.500000000 11.500000000 10.500000000 -36.500000000 12.500000000 10.500000000 -36.500000000 13.500000000 10.500000000 -36.500000000 14.500000000 10.500000000 -36.500000000 15.500000000 10.500000000 -36.500000000 16.500000000 10.500000000 -36.500000000 17.500000000 10.500000000 -36.500000000 18.500000000 10.500000000 -36.500000000 19.500000000 10.500000000 -36.500000000 20.500000000 10.500000000 -36.500000000 21.500000000 10.500000000 -36.500000000 22.500000000 10.500000000 -36.500000000 23.500000000 10.500000000 -36.500000000 24.500000000 10.500000000 -36.500000000 25.499996185 10.500000000 -36.499996185 26.499954224 10.500000000 -36.499954224 27.499591827 10.500000000 -36.499591827 28.497470856 10.500000000 -36.497474670 29.488407135 10.500000000 -36.488403320 30.458978653 10.500000000 -36.458980560 31.384418488 10.500000000 -36.384422302 32.233222961 10.500000000 -36.233226776 32.981101990 10.500000000 -35.981109619 -33.981101990 11.500000000 -35.981101990 -33.233222961 11.500000000 -36.233222961 -32.384422302 11.500000000 -36.384418488 -31.458978653 11.500000000 -36.458976746 -30.488407135 11.500000000 -36.488403320 -29.497472763 11.500000000 -36.497467041 -28.499593735 11.500000000 -36.499591827 -27.499954224 11.500000000 -36.499954224 -26.499996185 11.500000000 -36.499996185 -25.500000000 11.500000000 -36.500000000 -24.500000000 11.500000000 -36.500000000 -23.500000000 11.500000000 -36.500000000 -22.500000000 11.500000000 -36.500000000 -21.500000000 11.500000000 -36.500000000 -20.500000000 11.500000000 -36.500000000 -19.500000000 11.500000000 -36.500000000 -18.500000000 11.500000000 -36.500000000 -17.500000000 11.500000000 -36.500000000 -16.500000000 11.500000000 -36.500000000 -15.500000000 11.500000000 -36.500000000 -14.500000000 11.500000000 -36.500000000 -13.500000000 11.500000000 -36.500000000 -12.500000000 11.500000000 -36.500000000 -11.500000000 11.500000000 -36.500000000 -10.500000000 11.500000000 -36.500000000 -9.500000000 11.500000000 -36.500000000 -8.500000000 11.500000000 -36.500000000 -7.500000000 11.500000000 -36.500000000 -6.500000000 11.500000000 -36.500000000 -5.500000000 11.500000000 -36.500000000 -4.500000000 11.500000000 -36.500000000 -3.500000000 11.500000000 -36.500000000 -2.500000000 11.500000000 -36.500000000 -1.500000000 11.500000000 -36.500000000 -0.500000000 11.500000000 -36.500000000 0.500000000 11.500000000 -36.500000000 1.500000000 11.500000000 -36.500000000 2.500000000 11.500000000 -36.500000000 3.500000000 11.500000000 -36.500000000 4.500000000 11.500000000 -36.500000000 5.500000000 11.500000000 -36.500000000 6.500000000 11.500000000 -36.500000000 7.500000000 11.500000000 -36.500000000 8.500000000 11.500000000 -36.500000000 9.500000000 11.500000000 -36.500000000 10.500000000 11.500000000 -36.500000000 11.500000000 11.500000000 -36.500000000 12.500000000 11.500000000 -36.500000000 13.500000000 11.500000000 -36.500000000 14.500000000 11.500000000 -36.500000000 15.500000000 11.500000000 -36.500000000 16.500000000 11.500000000 -36.500000000 17.500000000 11.500000000 -36.500000000 18.500000000 11.500000000 -36.500000000 19.500000000 11.500000000 -36.500000000 20.500000000 11.500000000 -36.500000000 21.500000000 11.500000000 -36.500000000 22.500000000 11.500000000 -36.500000000 23.500000000 11.500000000 -36.500000000 24.500000000 11.500000000 -36.500000000 25.499996185 11.500000000 -36.499996185 26.499954224 11.500000000 -36.499954224 27.499591827 11.500000000 -36.499591827 28.497470856 11.500000000 -36.497474670 29.488407135 11.500000000 -36.488403320 30.458978653 11.500000000 -36.458980560 31.384418488 11.500000000 -36.384422302 32.233222961 11.500000000 -36.233226776 32.981101990 11.500000000 -35.981109619 -33.981101990 12.500000000 -35.981101990 -33.233222961 12.500000000 -36.233222961 -32.384422302 12.500000000 -36.384418488 -31.458978653 12.500000000 -36.458976746 -30.488407135 12.500000000 -36.488403320 -29.497472763 12.500000000 -36.497467041 -28.499593735 12.500000000 -36.499591827 -27.499954224 12.500000000 -36.499954224 -26.499996185 12.500000000 -36.499996185 -25.500000000 12.500000000 -36.500000000 -24.500000000 12.500000000 -36.500000000 -23.500000000 12.500000000 -36.500000000 -22.500000000 12.500000000 -36.500000000 -21.500000000 12.500000000 -36.500000000 -20.500000000 12.500000000 -36.500000000 -19.500000000 12.500000000 -36.500000000 -18.500000000 12.500000000 -36.500000000 -17.500000000 12.500000000 -36.500000000 -16.500000000 12.500000000 -36.500000000 -15.500000000 12.500000000 -36.500000000 -14.500000000 12.500000000 -36.500000000 -13.500000000 12.500000000 -36.500000000 -12.500000000 12.500000000 -36.500000000 -11.500000000 12.500000000 -36.500000000 -10.500000000 12.500000000 -36.500000000 -9.500000000 12.500000000 -36.500000000 -8.500000000 12.500000000 -36.500000000 -7.500000000 12.500000000 -36.500000000 -6.500000000 12.500000000 -36.500000000 -5.500000000 12.500000000 -36.500000000 -4.500000000 12.500000000 -36.500000000 -3.500000000 12.500000000 -36.500000000 -2.500000000 12.500000000 -36.500000000 -1.500000000 12.500000000 -36.500000000 -0.500000000 12.500000000 -36.500000000 0.500000000 12.500000000 -36.500000000 1.500000000 12.500000000 -36.500000000 2.500000000 12.500000000 -36.500000000 3.500000000 12.500000000 -36.500000000 4.500000000 12.500000000 -36.500000000 5.500000000 12.500000000 -36.500000000 6.500000000 12.500000000 -36.500000000 7.500000000 12.500000000 -36.500000000 8.500000000 12.500000000 -36.500000000 9.500000000 12.500000000 -36.500000000 10.500000000 12.500000000 -36.500000000 11.500000000 12.500000000 -36.500000000 12.500000000 12.500000000 -36.500000000 13.500000000 12.500000000 -36.500000000 14.500000000 12.500000000 -36.500000000 15.500000000 12.500000000 -36.500000000 16.500000000 12.500000000 -36.500000000 17.500000000 12.500000000 -36.500000000 18.500000000 12.500000000 -36.500000000 19.500000000 12.500000000 -36.500000000 20.500000000 12.500000000 -36.500000000 21.500000000 12.500000000 -36.500000000 22.500000000 12.500000000 -36.500000000 23.500000000 12.500000000 -36.500000000 24.500000000 12.500000000 -36.500000000 25.499996185 12.500000000 -36.499996185 26.499954224 12.500000000 -36.499954224 27.499591827 12.500000000 -36.499591827 28.497470856 12.500000000 -36.497474670 29.488407135 12.500000000 -36.488403320 30.458978653 12.500000000 -36.458980560 31.384418488 12.500000000 -36.384422302 32.233222961 12.500000000 -36.233226776 32.981101990 12.500000000 -35.981109619 -33.981101990 13.500000000 -35.981101990 -33.233222961 13.500000000 -36.233222961 -32.384422302 13.500000000 -36.384418488 -31.458978653 13.500000000 -36.458976746 -30.488407135 13.500000000 -36.488403320 -29.497472763 13.500000000 -36.497467041 -28.499593735 13.500000000 -36.499591827 -27.499954224 13.500000000 -36.499954224 -26.499996185 13.500000000 -36.499996185 -25.500000000 13.500000000 -36.500000000 -24.500000000 13.500000000 -36.500000000 -23.500000000 13.500000000 -36.500000000 -22.500000000 13.500000000 -36.500000000 -21.500000000 13.500000000 -36.500000000 -20.500000000 13.500000000 -36.500000000 -19.500000000 13.500000000 -36.500000000 -18.500000000 13.500000000 -36.500000000 -17.500000000 13.500000000 -36.500000000 -16.500000000 13.500000000 -36.500000000 -15.500000000 13.500000000 -36.500000000 -14.500000000 13.500000000 -36.500000000 -13.500000000 13.500000000 -36.500000000 -12.500000000 13.500000000 -36.500000000 -11.500000000 13.500000000 -36.500000000 -10.500000000 13.500000000 -36.500000000 -9.500000000 13.500000000 -36.500000000 -8.500000000 13.500000000 -36.500000000 -7.500000000 13.500000000 -36.500000000 -6.500000000 13.500000000 -36.500000000 -5.500000000 13.500000000 -36.500000000 -4.500000000 13.500000000 -36.500000000 -3.500000000 13.500000000 -36.500000000 -2.500000000 13.500000000 -36.500000000 -1.500000000 13.500000000 -36.500000000 -0.500000000 13.500000000 -36.500000000 0.500000000 13.500000000 -36.500000000 1.500000000 13.500000000 -36.500000000 2.500000000 13.500000000 -36.500000000 3.500000000 13.500000000 -36.500000000 4.500000000 13.500000000 -36.500000000 5.500000000 13.500000000 -36.500000000 6.500000000 13.500000000 -36.500000000 7.500000000 13.500000000 -36.500000000 8.500000000 13.500000000 -36.500000000 9.500000000 13.500000000 -36.500000000 10.500000000 13.500000000 -36.500000000 11.500000000 13.500000000 -36.500000000 12.500000000 13.500000000 -36.500000000 13.500000000 13.500000000 -36.500000000 14.500000000 13.500000000 -36.500000000 15.500000000 13.500000000 -36.500000000 16.500000000 13.500000000 -36.500000000 17.500000000 13.500000000 -36.500000000 18.500000000 13.500000000 -36.500000000 19.500000000 13.500000000 -36.500000000 20.500000000 13.500000000 -36.500000000 21.500000000 13.500000000 -36.500000000 22.500000000 13.500000000 -36.500000000 23.500000000 13.500000000 -36.500000000 24.500000000 13.500000000 -36.500000000 25.499996185 13.500000000 -36.499996185 26.499954224 13.500000000 -36.499954224 27.499591827 13.500000000 -36.499591827 28.497470856 13.500000000 -36.497474670 29.488407135 13.500000000 -36.488403320 30.458978653 13.500000000 -36.458980560 31.384418488 13.500000000 -36.384422302 32.233222961 13.500000000 -36.233226776 32.981101990 13.500000000 -35.981109619 -33.981101990 14.500000000 -35.981101990 -33.233222961 14.500000000 -36.233222961 -32.384422302 14.500000000 -36.384418488 -31.458978653 14.500000000 -36.458976746 -30.488407135 14.500000000 -36.488403320 -29.497472763 14.500000000 -36.497467041 -28.499593735 14.500000000 -36.499591827 -27.499954224 14.500000000 -36.499954224 -26.499996185 14.500000000 -36.499996185 -25.500000000 14.500000000 -36.500000000 -24.500000000 14.500000000 -36.500000000 -23.500000000 14.500000000 -36.500000000 -22.500000000 14.500000000 -36.500000000 -21.500000000 14.500000000 -36.500000000 -20.500000000 14.500000000 -36.500000000 -19.500000000 14.500000000 -36.500000000 -18.500000000 14.500000000 -36.500000000 -17.500000000 14.500000000 -36.500000000 -16.500000000 14.500000000 -36.500000000 -15.500000000 14.500000000 -36.500000000 -14.500000000 14.500000000 -36.500000000 -13.500000000 14.500000000 -36.500000000 -12.500000000 14.500000000 -36.500000000 -11.500000000 14.500000000 -36.500000000 -10.500000000 14.500000000 -36.500000000 -9.500000000 14.500000000 -36.500000000 -8.500000000 14.500000000 -36.500000000 -7.500000000 14.500000000 -36.500000000 -6.500000000 14.500000000 -36.500000000 -5.500000000 14.500000000 -36.500000000 -4.500000000 14.500000000 -36.500000000 -3.500000000 14.500000000 -36.500000000 -2.500000000 14.500000000 -36.500000000 -1.500000000 14.500000000 -36.500000000 -0.500000000 14.500000000 -36.500000000 0.500000000 14.500000000 -36.500000000 1.500000000 14.500000000 -36.500000000 2.500000000 14.500000000 -36.500000000 3.500000000 14.500000000 -36.500000000 4.500000000 14.500000000 -36.500000000 5.500000000 14.500000000 -36.500000000 6.500000000 14.500000000 -36.500000000 7.500000000 14.500000000 -36.500000000 8.500000000 14.500000000 -36.500000000 9.500000000 14.500000000 -36.500000000 10.500000000 14.500000000 -36.500000000 11.500000000 14.500000000 -36.500000000 12.500000000 14.500000000 -36.500000000 13.500000000 14.500000000 -36.500000000 14.500000000 14.500000000 -36.500000000 15.500000000 14.500000000 -36.500000000 16.500000000 14.500000000 -36.500000000 17.500000000 14.500000000 -36.500000000 18.500000000 14.500000000 -36.500000000 19.500000000 14.500000000 -36.500000000 20.500000000 14.500000000 -36.500000000 21.500000000 14.500000000 -36.500000000 22.500000000 14.500000000 -36.500000000 23.500000000 14.500000000 -36.500000000 24.500000000 14.500000000 -36.500000000 25.499996185 14.500000000 -36.499996185 26.499954224 14.500000000 -36.499954224 27.499591827 14.500000000 -36.499591827 28.497470856 14.500000000 -36.497474670 29.488407135 14.500000000 -36.488403320 30.458978653 14.500000000 -36.458980560 31.384418488 14.500000000 -36.384422302 32.233222961 14.500000000 -36.233226776 32.981101990 14.500000000 -35.981109619 -33.981101990 15.500000000 -35.981101990 -33.233222961 15.500000000 -36.233222961 -32.384422302 15.500000000 -36.384418488 -31.458978653 15.500000000 -36.458976746 -30.488407135 15.500000000 -36.488403320 -29.497472763 15.500000000 -36.497467041 -28.499593735 15.500000000 -36.499591827 -27.499954224 15.500000000 -36.499954224 -26.499996185 15.500000000 -36.499996185 -25.500000000 15.500000000 -36.500000000 -24.500000000 15.500000000 -36.500000000 -23.500000000 15.500000000 -36.500000000 -22.500000000 15.500000000 -36.500000000 -21.500000000 15.500000000 -36.500000000 -20.500000000 15.500000000 -36.500000000 -19.500000000 15.500000000 -36.500000000 -18.500000000 15.500000000 -36.500000000 -17.500000000 15.500000000 -36.500000000 -16.500000000 15.500000000 -36.500000000 -15.500000000 15.500000000 -36.500000000 -14.500000000 15.500000000 -36.500000000 -13.500000000 15.500000000 -36.500000000 -12.500000000 15.500000000 -36.500000000 -11.500000000 15.500000000 -36.500000000 -10.500000000 15.500000000 -36.500000000 -9.500000000 15.500000000 -36.500000000 -8.500000000 15.500000000 -36.500000000 -7.500000000 15.500000000 -36.500000000 -6.500000000 15.500000000 -36.500000000 -5.500000000 15.500000000 -36.500000000 -4.500000000 15.500000000 -36.500000000 -3.500000000 15.500000000 -36.500000000 -2.500000000 15.500000000 -36.500000000 -1.500000000 15.500000000 -36.500000000 -0.500000000 15.500000000 -36.500000000 0.500000000 15.500000000 -36.500000000 1.500000000 15.500000000 -36.500000000 2.500000000 15.500000000 -36.500000000 3.500000000 15.500000000 -36.500000000 4.500000000 15.500000000 -36.500000000 5.500000000 15.500000000 -36.500000000 6.500000000 15.500000000 -36.500000000 7.500000000 15.500000000 -36.500000000 8.500000000 15.500000000 -36.500000000 9.500000000 15.500000000 -36.500000000 10.500000000 15.500000000 -36.500000000 11.500000000 15.500000000 -36.500000000 12.500000000 15.500000000 -36.500000000 13.500000000 15.500000000 -36.500000000 14.500000000 15.500000000 -36.500000000 15.500000000 15.500000000 -36.500000000 16.500000000 15.500000000 -36.500000000 17.500000000 15.500000000 -36.500000000 18.500000000 15.500000000 -36.500000000 19.500000000 15.500000000 -36.500000000 20.500000000 15.500000000 -36.500000000 21.500000000 15.500000000 -36.500000000 22.500000000 15.500000000 -36.500000000 23.500000000 15.500000000 -36.500000000 24.500000000 15.500000000 -36.500000000 25.499996185 15.500000000 -36.499996185 26.499954224 15.500000000 -36.499954224 27.499591827 15.500000000 -36.499591827 28.497470856 15.500000000 -36.497474670 29.488407135 15.500000000 -36.488403320 30.458978653 15.500000000 -36.458980560 31.384418488 15.500000000 -36.384422302 32.233222961 15.500000000 -36.233226776 32.981101990 15.500000000 -35.981109619 -33.981101990 16.500000000 -35.981101990 -33.233222961 16.500000000 -36.233222961 -32.384422302 16.500000000 -36.384418488 -31.458978653 16.500000000 -36.458976746 -30.488407135 16.500000000 -36.488403320 -29.497472763 16.500000000 -36.497467041 -28.499593735 16.500000000 -36.499591827 -27.499954224 16.500000000 -36.499954224 -26.499996185 16.500000000 -36.499996185 -25.500000000 16.500000000 -36.500000000 -24.500000000 16.500000000 -36.500000000 -23.500000000 16.500000000 -36.500000000 -22.500000000 16.500000000 -36.500000000 -21.500000000 16.500000000 -36.500000000 -20.500000000 16.500000000 -36.500000000 -19.500000000 16.500000000 -36.500000000 -18.500000000 16.500000000 -36.500000000 -17.500000000 16.500000000 -36.500000000 -16.500000000 16.500000000 -36.500000000 -15.500000000 16.500000000 -36.500000000 -14.500000000 16.500000000 -36.500000000 -13.500000000 16.500000000 -36.500000000 -12.500000000 16.500000000 -36.500000000 -11.500000000 16.500000000 -36.500000000 -10.500000000 16.500000000 -36.500000000 -9.500000000 16.500000000 -36.500000000 -8.500000000 16.500000000 -36.500000000 -7.500000000 16.500000000 -36.500000000 -6.500000000 16.500000000 -36.500000000 -5.500000000 16.500000000 -36.500000000 -4.500000000 16.500000000 -36.500000000 -3.500000000 16.500000000 -36.500000000 -2.500000000 16.500000000 -36.500000000 -1.500000000 16.500000000 -36.500000000 -0.500000000 16.500000000 -36.500000000 0.500000000 16.500000000 -36.500000000 1.500000000 16.500000000 -36.500000000 2.500000000 16.500000000 -36.500000000 3.500000000 16.500000000 -36.500000000 4.500000000 16.500000000 -36.500000000 5.500000000 16.500000000 -36.500000000 6.500000000 16.500000000 -36.500000000 7.500000000 16.500000000 -36.500000000 8.500000000 16.500000000 -36.500000000 9.500000000 16.500000000 -36.500000000 10.500000000 16.500000000 -36.500000000 11.500000000 16.500000000 -36.500000000 12.500000000 16.500000000 -36.500000000 13.500000000 16.500000000 -36.500000000 14.500000000 16.500000000 -36.500000000 15.500000000 16.500000000 -36.500000000 16.500000000 16.500000000 -36.500000000 17.500000000 16.500000000 -36.500000000 18.500000000 16.500000000 -36.500000000 19.500000000 16.500000000 -36.500000000 20.500000000 16.500000000 -36.500000000 21.500000000 16.500000000 -36.500000000 22.500000000 16.500000000 -36.500000000 23.500000000 16.500000000 -36.500000000 24.500000000 16.500000000 -36.500000000 25.499996185 16.500000000 -36.499996185 26.499954224 16.500000000 -36.499954224 27.499591827 16.500000000 -36.499591827 28.497470856 16.500000000 -36.497474670 29.488407135 16.500000000 -36.488403320 30.458978653 16.500000000 -36.458980560 31.384418488 16.500000000 -36.384422302 32.233222961 16.500000000 -36.233226776 32.981101990 16.500000000 -35.981109619 -33.981101990 17.500000000 -35.981101990 -33.233222961 17.500000000 -36.233222961 -32.384422302 17.500000000 -36.384418488 -31.458978653 17.500000000 -36.458976746 -30.488407135 17.500000000 -36.488403320 -29.497472763 17.500000000 -36.497467041 -28.499593735 17.500000000 -36.499591827 -27.499954224 17.500000000 -36.499954224 -26.499996185 17.500000000 -36.499996185 -25.500000000 17.500000000 -36.500000000 -24.500000000 17.500000000 -36.500000000 -23.500000000 17.500000000 -36.500000000 -22.500000000 17.500000000 -36.500000000 -21.500000000 17.500000000 -36.500000000 -20.500000000 17.500000000 -36.500000000 -19.500000000 17.500000000 -36.500000000 -18.500000000 17.500000000 -36.500000000 -17.500000000 17.500000000 -36.500000000 -16.500000000 17.500000000 -36.500000000 -15.500000000 17.500000000 -36.500000000 -14.500000000 17.500000000 -36.500000000 -13.500000000 17.500000000 -36.500000000 -12.500000000 17.500000000 -36.500000000 -11.500000000 17.500000000 -36.500000000 -10.500000000 17.500000000 -36.500000000 -9.500000000 17.500000000 -36.500000000 -8.500000000 17.500000000 -36.500000000 -7.500000000 17.500000000 -36.500000000 -6.500000000 17.500000000 -36.500000000 -5.500000000 17.500000000 -36.500000000 -4.500000000 17.500000000 -36.500000000 -3.500000000 17.500000000 -36.500000000 -2.500000000 17.500000000 -36.500000000 -1.500000000 17.500000000 -36.500000000 -0.500000000 17.500000000 -36.500000000 0.500000000 17.500000000 -36.500000000 1.500000000 17.500000000 -36.500000000 2.500000000 17.500000000 -36.500000000 3.500000000 17.500000000 -36.500000000 4.500000000 17.500000000 -36.500000000 5.500000000 17.500000000 -36.500000000 6.500000000 17.500000000 -36.500000000 7.500000000 17.500000000 -36.500000000 8.500000000 17.500000000 -36.500000000 9.500000000 17.500000000 -36.500000000 10.500000000 17.500000000 -36.500000000 11.500000000 17.500000000 -36.500000000 12.500000000 17.500000000 -36.500000000 13.500000000 17.500000000 -36.500000000 14.500000000 17.500000000 -36.500000000 15.500000000 17.500000000 -36.500000000 16.500000000 17.500000000 -36.500000000 17.500000000 17.500000000 -36.500000000 18.500000000 17.500000000 -36.500000000 19.500000000 17.500000000 -36.500000000 20.500000000 17.500000000 -36.500000000 21.500000000 17.500000000 -36.500000000 22.500000000 17.500000000 -36.500000000 23.500000000 17.500000000 -36.500000000 24.500000000 17.500000000 -36.500000000 25.499996185 17.500000000 -36.499996185 26.499954224 17.500000000 -36.499954224 27.499591827 17.500000000 -36.499591827 28.497470856 17.500000000 -36.497474670 29.488407135 17.500000000 -36.488403320 30.458978653 17.500000000 -36.458980560 31.384418488 17.500000000 -36.384422302 32.233222961 17.500000000 -36.233226776 32.981101990 17.500000000 -35.981109619 -33.981101990 18.500000000 -35.981101990 -33.233222961 18.500000000 -36.233222961 -32.384422302 18.500000000 -36.384418488 -31.458978653 18.500000000 -36.458976746 -30.488407135 18.500000000 -36.488403320 -29.497472763 18.500000000 -36.497467041 -28.499593735 18.500000000 -36.499591827 -27.499954224 18.500000000 -36.499954224 -26.499996185 18.500000000 -36.499996185 -25.500000000 18.500000000 -36.500000000 -24.500000000 18.500000000 -36.500000000 -23.500000000 18.500000000 -36.500000000 -22.500000000 18.500000000 -36.500000000 -21.500000000 18.500000000 -36.500000000 -20.500000000 18.500000000 -36.500000000 -19.500000000 18.500000000 -36.500000000 -18.500000000 18.500000000 -36.500000000 -17.500000000 18.500000000 -36.500000000 -16.500000000 18.500000000 -36.500000000 -15.500000000 18.500000000 -36.500000000 -14.500000000 18.500000000 -36.500000000 -13.500000000 18.500000000 -36.500000000 -12.500000000 18.500000000 -36.500000000 -11.500000000 18.500000000 -36.500000000 -10.500000000 18.500000000 -36.500000000 -9.500000000 18.500000000 -36.500000000 -8.500000000 18.500000000 -36.500000000 -7.500000000 18.500000000 -36.500000000 -6.500000000 18.500000000 -36.500000000 -5.500000000 18.500000000 -36.500000000 -4.500000000 18.500000000 -36.500000000 -3.500000000 18.500000000 -36.500000000 -2.500000000 18.500000000 -36.500000000 -1.500000000 18.500000000 -36.500000000 -0.500000000 18.500000000 -36.500000000 0.500000000 18.500000000 -36.500000000 1.500000000 18.500000000 -36.500000000 2.500000000 18.500000000 -36.500000000 3.500000000 18.500000000 -36.500000000 4.500000000 18.500000000 -36.500000000 5.500000000 18.500000000 -36.500000000 6.500000000 18.500000000 -36.500000000 7.500000000 18.500000000 -36.500000000 8.500000000 18.500000000 -36.500000000 9.500000000 18.500000000 -36.500000000 10.500000000 18.500000000 -36.500000000 11.500000000 18.500000000 -36.500000000 12.500000000 18.500000000 -36.500000000 13.500000000 18.500000000 -36.500000000 14.500000000 18.500000000 -36.500000000 15.500000000 18.500000000 -36.500000000 16.500000000 18.500000000 -36.500000000 17.500000000 18.500000000 -36.500000000 18.500000000 18.500000000 -36.500000000 19.500000000 18.500000000 -36.500000000 20.500000000 18.500000000 -36.500000000 21.500000000 18.500000000 -36.500000000 22.500000000 18.500000000 -36.500000000 23.500000000 18.500000000 -36.500000000 24.500000000 18.500000000 -36.500000000 25.499996185 18.500000000 -36.499996185 26.499954224 18.500000000 -36.499954224 27.499591827 18.500000000 -36.499591827 28.497470856 18.500000000 -36.497474670 29.488407135 18.500000000 -36.488403320 30.458978653 18.500000000 -36.458980560 31.384418488 18.500000000 -36.384422302 32.233222961 18.500000000 -36.233226776 32.981101990 18.500000000 -35.981109619 -33.981101990 19.500000000 -35.981101990 -33.233222961 19.500000000 -36.233222961 -32.384422302 19.500000000 -36.384418488 -31.458978653 19.500000000 -36.458976746 -30.488407135 19.500000000 -36.488403320 -29.497472763 19.500000000 -36.497467041 -28.499593735 19.500000000 -36.499591827 -27.499954224 19.500000000 -36.499954224 -26.499996185 19.500000000 -36.499996185 -25.500000000 19.500000000 -36.500000000 -24.500000000 19.500000000 -36.500000000 -23.500000000 19.500000000 -36.500000000 -22.500000000 19.500000000 -36.500000000 -21.500000000 19.500000000 -36.500000000 -20.500000000 19.500000000 -36.500000000 -19.500000000 19.500000000 -36.500000000 -18.500000000 19.500000000 -36.500000000 -17.500000000 19.500000000 -36.500000000 -16.500000000 19.500000000 -36.500000000 -15.500000000 19.500000000 -36.500000000 -14.500000000 19.500000000 -36.500000000 -13.500000000 19.500000000 -36.500000000 -12.500000000 19.500000000 -36.500000000 -11.500000000 19.500000000 -36.500000000 -10.500000000 19.500000000 -36.500000000 -9.500000000 19.500000000 -36.500000000 -8.500000000 19.500000000 -36.500000000 -7.500000000 19.500000000 -36.500000000 -6.500000000 19.500000000 -36.500000000 -5.500000000 19.500000000 -36.500000000 -4.500000000 19.500000000 -36.500000000 -3.500000000 19.500000000 -36.500000000 -2.500000000 19.500000000 -36.500000000 -1.500000000 19.500000000 -36.500000000 -0.500000000 19.500000000 -36.500000000 0.500000000 19.500000000 -36.500000000 1.500000000 19.500000000 -36.500000000 2.500000000 19.500000000 -36.500000000 3.500000000 19.500000000 -36.500000000 4.500000000 19.500000000 -36.500000000 5.500000000 19.500000000 -36.500000000 6.500000000 19.500000000 -36.500000000 7.500000000 19.500000000 -36.500000000 8.500000000 19.500000000 -36.500000000 9.500000000 19.500000000 -36.500000000 10.500000000 19.500000000 -36.500000000 11.500000000 19.500000000 -36.500000000 12.500000000 19.500000000 -36.500000000 13.500000000 19.500000000 -36.500000000 14.500000000 19.500000000 -36.500000000 15.500000000 19.500000000 -36.500000000 16.500000000 19.500000000 -36.500000000 17.500000000 19.500000000 -36.500000000 18.500000000 19.500000000 -36.500000000 19.500000000 19.500000000 -36.500000000 20.500000000 19.500000000 -36.500000000 21.500000000 19.500000000 -36.500000000 22.500000000 19.500000000 -36.500000000 23.500000000 19.500000000 -36.500000000 24.500000000 19.500000000 -36.500000000 25.499996185 19.500000000 -36.499996185 26.499954224 19.500000000 -36.499954224 27.499591827 19.500000000 -36.499591827 28.497470856 19.500000000 -36.497474670 29.488407135 19.500000000 -36.488403320 30.458978653 19.500000000 -36.458980560 31.384418488 19.500000000 -36.384422302 32.233222961 19.500000000 -36.233226776 32.981101990 19.500000000 -35.981109619 -33.981101990 20.500000000 -35.981101990 -33.233222961 20.500000000 -36.233222961 -32.384422302 20.500000000 -36.384418488 -31.458978653 20.500000000 -36.458976746 -30.488407135 20.500000000 -36.488403320 -29.497472763 20.500000000 -36.497467041 -28.499593735 20.500000000 -36.499591827 -27.499954224 20.500000000 -36.499954224 -26.499996185 20.500000000 -36.499996185 -25.500000000 20.500000000 -36.500000000 -24.500000000 20.500000000 -36.500000000 -23.500000000 20.500000000 -36.500000000 -22.500000000 20.500000000 -36.500000000 -21.500000000 20.500000000 -36.500000000 -20.500000000 20.500000000 -36.500000000 -19.500000000 20.500000000 -36.500000000 -18.500000000 20.500000000 -36.500000000 -17.500000000 20.500000000 -36.500000000 -16.500000000 20.500000000 -36.500000000 -15.500000000 20.500000000 -36.500000000 -14.500000000 20.500000000 -36.500000000 -13.500000000 20.500000000 -36.500000000 -12.500000000 20.500000000 -36.500000000 -11.500000000 20.500000000 -36.500000000 -10.500000000 20.500000000 -36.500000000 -9.500000000 20.500000000 -36.500000000 -8.500000000 20.500000000 -36.500000000 -7.500000000 20.500000000 -36.500000000 -6.500000000 20.500000000 -36.500000000 -5.500000000 20.500000000 -36.500000000 -4.500000000 20.500000000 -36.500000000 -3.500000000 20.500000000 -36.500000000 -2.500000000 20.500000000 -36.500000000 -1.500000000 20.500000000 -36.500000000 -0.500000000 20.500000000 -36.500000000 0.500000000 20.500000000 -36.500000000 1.500000000 20.500000000 -36.500000000 2.500000000 20.500000000 -36.500000000 3.500000000 20.500000000 -36.500000000 4.500000000 20.500000000 -36.500000000 5.500000000 20.500000000 -36.500000000 6.500000000 20.500000000 -36.500000000 7.500000000 20.500000000 -36.500000000 8.500000000 20.500000000 -36.500000000 9.500000000 20.500000000 -36.500000000 10.500000000 20.500000000 -36.500000000 11.500000000 20.500000000 -36.500000000 12.500000000 20.500000000 -36.500000000 13.500000000 20.500000000 -36.500000000 14.500000000 20.500000000 -36.500000000 15.500000000 20.500000000 -36.500000000 16.500000000 20.500000000 -36.500000000 17.500000000 20.500000000 -36.500000000 18.500000000 20.500000000 -36.500000000 19.500000000 20.500000000 -36.500000000 20.500000000 20.500000000 -36.500000000 21.500000000 20.500000000 -36.500000000 22.500000000 20.500000000 -36.500000000 23.500000000 20.500000000 -36.500000000 24.500000000 20.500000000 -36.500000000 25.499996185 20.500000000 -36.499996185 26.499954224 20.500000000 -36.499954224 27.499591827 20.500000000 -36.499591827 28.497470856 20.500000000 -36.497474670 29.488407135 20.500000000 -36.488403320 30.458978653 20.500000000 -36.458980560 31.384418488 20.500000000 -36.384422302 32.233222961 20.500000000 -36.233226776 32.981101990 20.500000000 -35.981109619 -33.981101990 21.500000000 -35.981101990 -33.233222961 21.500000000 -36.233222961 -32.384422302 21.500000000 -36.384418488 -31.458978653 21.500000000 -36.458976746 -30.488407135 21.500000000 -36.488403320 -29.497472763 21.500000000 -36.497467041 -28.499593735 21.500000000 -36.499591827 -27.499954224 21.500000000 -36.499954224 -26.499996185 21.500000000 -36.499996185 -25.500000000 21.500000000 -36.500000000 -24.500000000 21.500000000 -36.500000000 -23.500000000 21.500000000 -36.500000000 -22.500000000 21.500000000 -36.500000000 -21.500000000 21.500000000 -36.500000000 -20.500000000 21.500000000 -36.500000000 -19.500000000 21.500000000 -36.500000000 -18.500000000 21.500000000 -36.500000000 -17.500000000 21.500000000 -36.500000000 -16.500000000 21.500000000 -36.500000000 -15.500000000 21.500000000 -36.500000000 -14.500000000 21.500000000 -36.500000000 -13.500000000 21.500000000 -36.500000000 -12.500000000 21.500000000 -36.500000000 -11.500000000 21.500000000 -36.500000000 -10.500000000 21.500000000 -36.500000000 -9.500000000 21.500000000 -36.500000000 -8.500000000 21.500000000 -36.500000000 -7.500000000 21.500000000 -36.500000000 -6.500000000 21.500000000 -36.500000000 -5.500000000 21.500000000 -36.500000000 -4.500000000 21.500000000 -36.500000000 -3.500000000 21.500000000 -36.500000000 -2.500000000 21.500000000 -36.500000000 -1.500000000 21.500000000 -36.500000000 -0.500000000 21.500000000 -36.500000000 0.500000000 21.500000000 -36.500000000 1.500000000 21.500000000 -36.500000000 2.500000000 21.500000000 -36.500000000 3.500000000 21.500000000 -36.500000000 4.500000000 21.500000000 -36.500000000 5.500000000 21.500000000 -36.500000000 6.500000000 21.500000000 -36.500000000 7.500000000 21.500000000 -36.500000000 8.500000000 21.500000000 -36.500000000 9.500000000 21.500000000 -36.500000000 10.500000000 21.500000000 -36.500000000 11.500000000 21.500000000 -36.500000000 12.500000000 21.500000000 -36.500000000 13.500000000 21.500000000 -36.500000000 14.500000000 21.500000000 -36.500000000 15.500000000 21.500000000 -36.500000000 16.500000000 21.500000000 -36.500000000 17.500000000 21.500000000 -36.500000000 18.500000000 21.500000000 -36.500000000 19.500000000 21.500000000 -36.500000000 20.500000000 21.500000000 -36.500000000 21.500000000 21.500000000 -36.500000000 22.500000000 21.500000000 -36.500000000 23.500000000 21.500000000 -36.500000000 24.500000000 21.500000000 -36.500000000 25.499996185 21.500000000 -36.499996185 26.499954224 21.500000000 -36.499954224 27.499591827 21.500000000 -36.499591827 28.497470856 21.500000000 -36.497474670 29.488407135 21.500000000 -36.488403320 30.458978653 21.500000000 -36.458980560 31.384418488 21.500000000 -36.384422302 32.233222961 21.500000000 -36.233226776 32.981101990 21.500000000 -35.981109619 -33.981101990 22.500000000 -35.981101990 -33.233222961 22.500000000 -36.233222961 -32.384422302 22.500000000 -36.384418488 -31.458978653 22.500000000 -36.458976746 -30.488407135 22.500000000 -36.488403320 -29.497472763 22.500000000 -36.497467041 -28.499593735 22.500000000 -36.499591827 -27.499954224 22.500000000 -36.499954224 -26.499996185 22.500000000 -36.499996185 -25.500000000 22.500000000 -36.500000000 -24.500000000 22.500000000 -36.500000000 -23.500000000 22.500000000 -36.500000000 -22.500000000 22.500000000 -36.500000000 -21.500000000 22.500000000 -36.500000000 -20.500000000 22.500000000 -36.500000000 -19.500000000 22.500000000 -36.500000000 -18.500000000 22.500000000 -36.500000000 -17.500000000 22.500000000 -36.500000000 -16.500000000 22.500000000 -36.500000000 -15.500000000 22.500000000 -36.500000000 -14.500000000 22.500000000 -36.500000000 -13.500000000 22.500000000 -36.500000000 -12.500000000 22.500000000 -36.500000000 -11.500000000 22.500000000 -36.500000000 -10.500000000 22.500000000 -36.500000000 -9.500000000 22.500000000 -36.500000000 -8.500000000 22.500000000 -36.500000000 -7.500000000 22.500000000 -36.500000000 -6.500000000 22.500000000 -36.500000000 -5.500000000 22.500000000 -36.500000000 -4.500000000 22.500000000 -36.500000000 -3.500000000 22.500000000 -36.500000000 -2.500000000 22.500000000 -36.500000000 -1.500000000 22.500000000 -36.500000000 -0.500000000 22.500000000 -36.500000000 0.500000000 22.500000000 -36.500000000 1.500000000 22.500000000 -36.500000000 2.500000000 22.500000000 -36.500000000 3.500000000 22.500000000 -36.500000000 4.500000000 22.500000000 -36.500000000 5.500000000 22.500000000 -36.500000000 6.500000000 22.500000000 -36.500000000 7.500000000 22.500000000 -36.500000000 8.500000000 22.500000000 -36.500000000 9.500000000 22.500000000 -36.500000000 10.500000000 22.500000000 -36.500000000 11.500000000 22.500000000 -36.500000000 12.500000000 22.500000000 -36.500000000 13.500000000 22.500000000 -36.500000000 14.500000000 22.500000000 -36.500000000 15.500000000 22.500000000 -36.500000000 16.500000000 22.500000000 -36.500000000 17.500000000 22.500000000 -36.500000000 18.500000000 22.500000000 -36.500000000 19.500000000 22.500000000 -36.500000000 20.500000000 22.500000000 -36.500000000 21.500000000 22.500000000 -36.500000000 22.500000000 22.500000000 -36.500000000 23.500000000 22.500000000 -36.500000000 24.500000000 22.500000000 -36.500000000 25.499996185 22.500000000 -36.499996185 26.499954224 22.500000000 -36.499954224 27.499591827 22.500000000 -36.499591827 28.497470856 22.500000000 -36.497474670 29.488407135 22.500000000 -36.488403320 30.458978653 22.500000000 -36.458980560 31.384418488 22.500000000 -36.384422302 32.233222961 22.500000000 -36.233226776 32.981101990 22.500000000 -35.981109619 -33.981101990 23.499998093 -35.981101990 -33.233222961 23.500000000 -36.233222961 -32.384422302 23.500000000 -36.384418488 -31.458978653 23.500000000 -36.458976746 -30.488407135 23.500000000 -36.488403320 -29.497472763 23.500000000 -36.497467041 -28.499593735 23.500000000 -36.499591827 -27.499954224 23.500000000 -36.499954224 -26.499996185 23.500000000 -36.499996185 -25.500000000 23.500000000 -36.500000000 -24.500000000 23.500000000 -36.500000000 -23.500000000 23.500000000 -36.500000000 -22.500000000 23.500000000 -36.500000000 -21.500000000 23.500000000 -36.500000000 -20.500000000 23.500000000 -36.500000000 -19.500000000 23.500000000 -36.500000000 -18.500000000 23.500000000 -36.500000000 -17.500000000 23.500000000 -36.500000000 -16.500000000 23.500000000 -36.500000000 -15.500000000 23.500000000 -36.500000000 -14.500000000 23.500000000 -36.500000000 -13.500000000 23.500000000 -36.500000000 -12.500000000 23.500000000 -36.500000000 -11.500000000 23.500000000 -36.500000000 -10.500000000 23.500000000 -36.500000000 -9.500000000 23.500000000 -36.500000000 -8.500000000 23.500000000 -36.500000000 -7.500000000 23.500000000 -36.500000000 -6.500000000 23.500000000 -36.500000000 -5.500000000 23.500000000 -36.500000000 -4.500000000 23.500000000 -36.500000000 -3.500000000 23.500000000 -36.500000000 -2.500000000 23.500000000 -36.500000000 -1.500000000 23.500000000 -36.500000000 -0.500000000 23.500000000 -36.500000000 0.500000000 23.500000000 -36.500000000 1.500000000 23.500000000 -36.500000000 2.500000000 23.500000000 -36.500000000 3.500000000 23.500000000 -36.500000000 4.500000000 23.500000000 -36.500000000 5.500000000 23.500000000 -36.500000000 6.500000000 23.500000000 -36.500000000 7.500000000 23.500000000 -36.500000000 8.500000000 23.500000000 -36.500000000 9.500000000 23.500000000 -36.500000000 10.500000000 23.500000000 -36.500000000 11.500000000 23.500000000 -36.500000000 12.500000000 23.500000000 -36.500000000 13.500000000 23.500000000 -36.500000000 14.500000000 23.500000000 -36.500000000 15.500000000 23.500000000 -36.500000000 16.500000000 23.500000000 -36.500000000 17.500000000 23.500000000 -36.500000000 18.500000000 23.500000000 -36.500000000 19.500000000 23.500000000 -36.500000000 20.500000000 23.500000000 -36.500000000 21.500000000 23.500000000 -36.500000000 22.500000000 23.500000000 -36.500000000 23.500000000 23.500000000 -36.500000000 24.500000000 23.500000000 -36.500000000 25.499996185 23.500000000 -36.499996185 26.499954224 23.500000000 -36.499954224 27.499591827 23.500000000 -36.499591827 28.497470856 23.500000000 -36.497474670 29.488407135 23.500000000 -36.488403320 30.458978653 23.500000000 -36.458980560 31.384418488 23.500000000 -36.384422302 32.233222961 23.500000000 -36.233222961 32.981101990 23.499998093 -35.981105804 -33.981086731 24.499979019 -35.981090546 -33.233219147 24.499984741 -36.233207703 -32.384422302 24.499996185 -36.384407043 -31.458978653 24.500000000 -36.458972931 -30.488407135 24.500000000 -36.488403320 -29.497472763 24.500000000 -36.497467041 -28.499593735 24.500000000 -36.499591827 -27.499954224 24.500000000 -36.499954224 -26.499996185 24.500000000 -36.499996185 -25.500000000 24.500000000 -36.500000000 -24.500000000 24.500000000 -36.500000000 -23.500000000 24.500000000 -36.500000000 -22.500000000 24.500000000 -36.500000000 -21.500000000 24.500000000 -36.500000000 -20.500000000 24.500000000 -36.500000000 -19.500000000 24.500000000 -36.500000000 -18.500000000 24.500000000 -36.500000000 -17.500000000 24.500000000 -36.500000000 -16.500000000 24.500000000 -36.500000000 -15.500000000 24.500000000 -36.500000000 -14.500000000 24.500000000 -36.500000000 -13.500000000 24.500000000 -36.500000000 -12.500000000 24.500000000 -36.500000000 -11.500000000 24.500000000 -36.500000000 -10.500000000 24.500000000 -36.500000000 -9.500000000 24.500000000 -36.500000000 -8.500000000 24.500000000 -36.500000000 -7.500000000 24.500000000 -36.500000000 -6.500000000 24.500000000 -36.500000000 -5.500000000 24.500000000 -36.500000000 -4.500000000 24.500000000 -36.500000000 -3.500000000 24.500000000 -36.500000000 -2.500000000 24.500000000 -36.500000000 -1.500000000 24.500000000 -36.500000000 -0.500000000 24.500000000 -36.500000000 0.500000000 24.500000000 -36.500000000 1.500000000 24.500000000 -36.500000000 2.500000000 24.500000000 -36.500000000 3.500000000 24.500000000 -36.500000000 4.500000000 24.500000000 -36.500000000 5.500000000 24.500000000 -36.500000000 6.500000000 24.500000000 -36.500000000 7.500000000 24.500000000 -36.500000000 8.500000000 24.500000000 -36.500000000 9.500000000 24.500000000 -36.500000000 10.500000000 24.500000000 -36.500000000 11.500000000 24.500000000 -36.500000000 12.500000000 24.500000000 -36.500000000 13.500000000 24.500000000 -36.500000000 14.500000000 24.500000000 -36.500000000 15.500000000 24.500000000 -36.500000000 16.500000000 24.500000000 -36.500000000 17.500000000 24.500000000 -36.500000000 18.500000000 24.500000000 -36.500000000 19.500000000 24.500000000 -36.500000000 20.500000000 24.500000000 -36.500000000 21.500000000 24.500000000 -36.500000000 22.500000000 24.500000000 -36.500000000 23.500000000 24.500000000 -36.500000000 24.500000000 24.500000000 -36.500000000 25.499996185 24.500000000 -36.499996185 26.499954224 24.500000000 -36.499954224 27.499591827 24.500000000 -36.499591827 28.497470856 24.500000000 -36.497474670 29.488407135 24.500000000 -36.488403320 30.458978653 24.500000000 -36.458976746 31.384418488 24.499996185 -36.384407043 32.233219147 24.499988556 -36.233207703 32.981090546 24.499979019 -35.981090546 -33.980976105 25.499826431 -35.980960846 -33.233169556 25.499874115 -36.233074188 -32.384407043 25.499948502 -36.384307861 -31.458978653 25.499988556 -36.458930969 -30.488407135 25.499996185 -36.488388062 -29.497472763 25.499996185 -36.497467041 -28.499593735 25.499996185 -36.499584198 -27.499954224 25.499996185 -36.499950409 -26.499996185 25.499996185 -36.499992371 -25.500000000 25.499996185 -36.499996185 -24.500000000 25.499996185 -36.499996185 -23.500000000 25.499996185 -36.499996185 -22.500000000 25.499996185 -36.499996185 -21.500000000 25.499996185 -36.499996185 -20.500000000 25.499996185 -36.499996185 -19.500000000 25.499996185 -36.499996185 -18.500000000 25.499996185 -36.499996185 -17.500000000 25.499996185 -36.499996185 -16.500000000 25.499996185 -36.499996185 -15.500000000 25.499996185 -36.499996185 -14.500000000 25.499996185 -36.499996185 -13.500000000 25.499996185 -36.499996185 -12.500000000 25.499996185 -36.499996185 -11.500000000 25.499996185 -36.499996185 -10.500000000 25.499996185 -36.499996185 -9.500000000 25.499996185 -36.499996185 -8.500000000 25.499996185 -36.499996185 -7.500000000 25.499996185 -36.499996185 -6.500000000 25.499996185 -36.499996185 -5.500000000 25.499996185 -36.499996185 -4.500000000 25.499996185 -36.499996185 -3.500000000 25.499996185 -36.499996185 -2.500000000 25.499996185 -36.499996185 -1.500000000 25.499996185 -36.499996185 -0.500000000 25.499996185 -36.499996185 0.500000000 25.499996185 -36.499996185 1.500000000 25.499996185 -36.499996185 2.500000000 25.499996185 -36.499996185 3.500000000 25.499996185 -36.499996185 4.500000000 25.499996185 -36.499996185 5.500000000 25.499996185 -36.499996185 6.500000000 25.499996185 -36.499996185 7.500000000 25.499996185 -36.499996185 8.500000000 25.499996185 -36.499996185 9.500000000 25.499996185 -36.499996185 10.500000000 25.499996185 -36.499996185 11.500000000 25.499996185 -36.499996185 12.500000000 25.499996185 -36.499996185 13.500000000 25.499996185 -36.499996185 14.500000000 25.499996185 -36.499996185 15.500000000 25.499996185 -36.499996185 16.500000000 25.499996185 -36.499996185 17.500000000 25.499996185 -36.499996185 18.500000000 25.499996185 -36.499996185 19.500000000 25.499996185 -36.499996185 20.500000000 25.499996185 -36.499996185 21.500000000 25.499996185 -36.499996185 22.500000000 25.499996185 -36.499996185 23.500000000 25.499996185 -36.499996185 24.500000000 25.499996185 -36.499996185 25.499996185 25.499996185 -36.499992371 26.499954224 25.499996185 -36.499950409 27.499591827 25.499996185 -36.499588013 28.497470856 25.499996185 -36.497467041 29.488407135 25.499996185 -36.488388062 30.458974838 25.499988556 -36.458934784 31.384405136 25.499948502 -36.384307861 32.233169556 25.499874115 -36.233074188 32.980976105 25.499824524 -35.980957031 -33.980327606 26.498950958 -35.980201721 -33.232860565 26.499225616 -36.232303619 -32.384296417 26.499618530 -36.383720398 -31.458948135 26.499858856 -36.458606720 -30.488397598 26.499938965 -36.488258362 -29.497472763 26.499954224 -36.497406006 -28.499593735 26.499954224 -36.499542236 -27.499954224 26.499954224 -36.499908447 -26.499996185 26.499954224 -36.499950409 -25.500000000 26.499954224 -36.499950409 -24.500000000 26.499954224 -36.499950409 -23.500000000 26.499954224 -36.499950409 -22.500000000 26.499954224 -36.499950409 -21.500000000 26.499954224 -36.499950409 -20.500000000 26.499954224 -36.499950409 -19.500000000 26.499954224 -36.499950409 -18.500000000 26.499954224 -36.499950409 -17.500000000 26.499954224 -36.499950409 -16.500000000 26.499954224 -36.499950409 -15.500000000 26.499954224 -36.499950409 -14.500000000 26.499954224 -36.499950409 -13.500000000 26.499954224 -36.499950409 -12.500000000 26.499954224 -36.499950409 -11.500000000 26.499954224 -36.499950409 -10.500000000 26.499954224 -36.499950409 -9.500000000 26.499954224 -36.499950409 -8.500000000 26.499954224 -36.499950409 -7.500000000 26.499954224 -36.499950409 -6.500000000 26.499954224 -36.499950409 -5.500000000 26.499954224 -36.499950409 -4.500000000 26.499954224 -36.499950409 -3.500000000 26.499954224 -36.499950409 -2.500000000 26.499954224 -36.499950409 -1.500000000 26.499954224 -36.499950409 -0.500000000 26.499954224 -36.499950409 0.500000000 26.499954224 -36.499950409 1.500000000 26.499954224 -36.499950409 2.500000000 26.499954224 -36.499950409 3.500000000 26.499954224 -36.499950409 4.500000000 26.499954224 -36.499950409 5.500000000 26.499954224 -36.499950409 6.500000000 26.499954224 -36.499950409 7.500000000 26.499954224 -36.499950409 8.500000000 26.499954224 -36.499950409 9.500000000 26.499954224 -36.499950409 10.500000000 26.499954224 -36.499950409 11.500000000 26.499954224 -36.499950409 12.500000000 26.499954224 -36.499950409 13.500000000 26.499954224 -36.499950409 14.500000000 26.499954224 -36.499950409 15.500000000 26.499954224 -36.499950409 16.500000000 26.499954224 -36.499950409 17.500000000 26.499954224 -36.499950409 18.500000000 26.499954224 -36.499950409 19.500000000 26.499954224 -36.499950409 20.500000000 26.499954224 -36.499950409 21.500000000 26.499954224 -36.499950409 22.500000000 26.499954224 -36.499950409 23.500000000 26.499954224 -36.499950409 24.500000000 26.499954224 -36.499950409 25.499996185 26.499954224 -36.499950409 26.499954224 26.499954224 -36.499908447 27.499591827 26.499954224 -36.499542236 28.497470856 26.499954224 -36.497406006 29.488399506 26.499938965 -36.488258362 30.458948135 26.499858856 -36.458606720 31.384298325 26.499622345 -36.383720398 32.232864380 26.499225616 -36.232303619 32.980335236 26.498950958 -35.980201721 -33.977615356 27.495168686 -35.976860046 -33.231597900 27.496377945 -36.228958130 -32.383811951 27.498052597 -36.381027222 -31.458766937 27.499073029 -36.456939697 -30.488346100 27.499475479 -36.487373352 -29.497461319 27.499576569 -36.496910095 -28.499593735 27.499591827 -36.499160767 -27.499954224 27.499591827 -36.499542236 -26.499996185 27.499591827 -36.499584198 -25.500000000 27.499591827 -36.499591827 -24.500000000 27.499591827 -36.499591827 -23.500000000 27.499591827 -36.499591827 -22.500000000 27.499591827 -36.499591827 -21.500000000 27.499591827 -36.499591827 -20.500000000 27.499591827 -36.499591827 -19.500000000 27.499591827 -36.499591827 -18.500000000 27.499591827 -36.499591827 -17.500000000 27.499591827 -36.499591827 -16.500000000 27.499591827 -36.499591827 -15.500000000 27.499591827 -36.499591827 -14.500000000 27.499591827 -36.499591827 -13.500000000 27.499591827 -36.499591827 -12.500000000 27.499591827 -36.499591827 -11.500000000 27.499591827 -36.499591827 -10.500000000 27.499591827 -36.499591827 -9.500000000 27.499591827 -36.499591827 -8.500000000 27.499591827 -36.499591827 -7.500000000 27.499591827 -36.499591827 -6.500000000 27.499591827 -36.499591827 -5.500000000 27.499591827 -36.499591827 -4.500000000 27.499591827 -36.499591827 -3.500000000 27.499591827 -36.499591827 -2.500000000 27.499591827 -36.499591827 -1.500000000 27.499591827 -36.499591827 -0.500000000 27.499591827 -36.499591827 0.500000000 27.499591827 -36.499591827 1.500000000 27.499591827 -36.499591827 2.500000000 27.499591827 -36.499591827 3.500000000 27.499591827 -36.499591827 4.500000000 27.499591827 -36.499591827 5.500000000 27.499591827 -36.499591827 6.500000000 27.499591827 -36.499591827 7.500000000 27.499591827 -36.499591827 8.500000000 27.499591827 -36.499591827 9.500000000 27.499591827 -36.499591827 10.500000000 27.499591827 -36.499591827 11.500000000 27.499591827 -36.499591827 12.500000000 27.499591827 -36.499591827 13.500000000 27.499591827 -36.499591827 14.500000000 27.499591827 -36.499591827 15.500000000 27.499591827 -36.499591827 16.500000000 27.499591827 -36.499591827 17.500000000 27.499591827 -36.499591827 18.500000000 27.499591827 -36.499591827 19.500000000 27.499591827 -36.499591827 20.500000000 27.499591827 -36.499591827 21.500000000 27.499591827 -36.499591827 22.500000000 27.499591827 -36.499591827 23.500000000 27.499591827 -36.499591827 24.500000000 27.499591827 -36.499591827 25.499996185 27.499591827 -36.499584198 26.499954224 27.499591827 -36.499542236 27.499591827 27.499591827 -36.499160767 28.497457504 27.499576569 -36.496910095 29.488346100 27.499475479 -36.487373352 30.458766937 27.499073029 -36.456939697 31.383813858 27.498052597 -36.381027222 32.231605530 27.496377945 -36.228958130 32.977619171 27.495168686 -35.976860046 -33.968864441 28.481933594 -35.965766907 -33.227874756 28.486719131 -36.217517853 -32.382312775 28.492321014 -36.371490479 -31.458078384 28.495611191 -36.450428009 -30.488048553 28.496957779 -36.483337402 -29.497375488 28.497371674 -36.494178772 -28.499578476 28.497461319 -36.496910095 -27.499954224 28.497470856 -36.497406006 -26.499996185 28.497470856 -36.497467041 -25.500000000 28.497470856 -36.497467041 -24.500000000 28.497470856 -36.497467041 -23.500000000 28.497470856 -36.497467041 -22.500000000 28.497470856 -36.497467041 -21.500000000 28.497470856 -36.497467041 -20.500000000 28.497470856 -36.497467041 -19.500000000 28.497470856 -36.497467041 -18.500000000 28.497470856 -36.497467041 -17.500000000 28.497470856 -36.497467041 -16.500000000 28.497470856 -36.497467041 -15.500000000 28.497470856 -36.497467041 -14.500000000 28.497470856 -36.497467041 -13.500000000 28.497470856 -36.497467041 -12.500000000 28.497470856 -36.497467041 -11.500000000 28.497470856 -36.497467041 -10.500000000 28.497470856 -36.497467041 -9.500000000 28.497470856 -36.497467041 -8.500000000 28.497470856 -36.497467041 -7.500000000 28.497470856 -36.497467041 -6.500000000 28.497470856 -36.497467041 -5.500000000 28.497470856 -36.497467041 -4.500000000 28.497470856 -36.497467041 -3.500000000 28.497470856 -36.497467041 -2.500000000 28.497470856 -36.497467041 -1.500000000 28.497470856 -36.497467041 -0.500000000 28.497470856 -36.497467041 0.500000000 28.497470856 -36.497467041 1.500000000 28.497470856 -36.497467041 2.500000000 28.497470856 -36.497467041 3.500000000 28.497470856 -36.497467041 4.500000000 28.497470856 -36.497467041 5.500000000 28.497470856 -36.497467041 6.500000000 28.497470856 -36.497467041 7.500000000 28.497470856 -36.497467041 8.500000000 28.497470856 -36.497467041 9.500000000 28.497470856 -36.497467041 10.500000000 28.497470856 -36.497467041 11.500000000 28.497470856 -36.497467041 12.500000000 28.497470856 -36.497467041 13.500000000 28.497470856 -36.497467041 14.500000000 28.497470856 -36.497467041 15.500000000 28.497470856 -36.497467041 16.500000000 28.497470856 -36.497467041 17.500000000 28.497470856 -36.497467041 18.500000000 28.497470856 -36.497467041 19.500000000 28.497470856 -36.497467041 20.500000000 28.497470856 -36.497467041 21.500000000 28.497470856 -36.497467041 22.500000000 28.497470856 -36.497467041 23.500000000 28.497470856 -36.497467041 24.500000000 28.497470856 -36.497467041 25.499996185 28.497470856 -36.497467041 26.499954224 28.497470856 -36.497406006 27.499576569 28.497457504 -36.496910095 28.497375488 28.497371674 -36.494178772 29.488048553 28.496957779 -36.483337402 30.458078384 28.495609283 -36.450428009 31.382312775 28.492321014 -36.371490479 32.227874756 28.486719131 -36.217510223 32.968864441 28.481933594 -35.965766907 -33.946815491 29.442840576 -35.937091827 -33.220714569 29.460592270 -36.185966492 -32.379222870 29.476003647 -36.344280243 -31.456085205 29.483789444 -36.430480957 -30.486968994 29.486965179 -36.469230652 -29.496959686 29.488052368 -36.483337402 -28.499475479 29.488346100 -36.487373352 -27.499938965 29.488399506 -36.488258362 -26.499996185 29.488407135 -36.488391876 -25.500000000 29.488407135 -36.488403320 -24.500000000 29.488407135 -36.488403320 -23.500000000 29.488407135 -36.488403320 -22.500000000 29.488407135 -36.488403320 -21.500000000 29.488407135 -36.488403320 -20.500000000 29.488407135 -36.488403320 -19.500000000 29.488407135 -36.488403320 -18.500000000 29.488407135 -36.488403320 -17.500000000 29.488407135 -36.488403320 -16.500000000 29.488407135 -36.488403320 -15.500000000 29.488407135 -36.488403320 -14.500000000 29.488407135 -36.488403320 -13.500000000 29.488407135 -36.488403320 -12.500000000 29.488407135 -36.488403320 -11.500000000 29.488407135 -36.488403320 -10.500000000 29.488407135 -36.488403320 -9.500000000 29.488407135 -36.488403320 -8.500000000 29.488407135 -36.488403320 -7.500000000 29.488407135 -36.488403320 -6.500000000 29.488407135 -36.488403320 -5.500000000 29.488407135 -36.488403320 -4.500000000 29.488407135 -36.488403320 -3.500000000 29.488407135 -36.488403320 -2.500000000 29.488407135 -36.488403320 -1.500000000 29.488407135 -36.488403320 -0.500000000 29.488407135 -36.488403320 0.500000000 29.488407135 -36.488403320 1.500000000 29.488407135 -36.488403320 2.500000000 29.488407135 -36.488403320 3.500000000 29.488407135 -36.488403320 4.500000000 29.488407135 -36.488403320 5.500000000 29.488407135 -36.488403320 6.500000000 29.488407135 -36.488403320 7.500000000 29.488407135 -36.488403320 8.500000000 29.488407135 -36.488403320 9.500000000 29.488407135 -36.488403320 10.500000000 29.488407135 -36.488403320 11.500000000 29.488407135 -36.488403320 12.500000000 29.488407135 -36.488403320 13.500000000 29.488407135 -36.488403320 14.500000000 29.488407135 -36.488403320 15.500000000 29.488407135 -36.488403320 16.500000000 29.488407135 -36.488403320 17.500000000 29.488407135 -36.488403320 18.500000000 29.488407135 -36.488403320 19.500000000 29.488407135 -36.488403320 20.500000000 29.488407135 -36.488403320 21.500000000 29.488407135 -36.488403320 22.500000000 29.488407135 -36.488403320 23.500000000 29.488407135 -36.488403320 24.500000000 29.488407135 -36.488403320 25.499996185 29.488407135 -36.488391876 26.499938965 29.488399506 -36.488258362 27.499475479 29.488348007 -36.487377167 28.496957779 29.488052368 -36.483337402 29.486968994 29.486968994 -36.469238281 30.456085205 29.483785629 -36.430473328 31.379222870 29.476003647 -36.344280243 32.220718384 29.460596085 -36.185966492 32.946807861 29.442840576 -35.937095642 -33.903377533 30.336599350 -35.879737854 -33.216842651 30.405670166 -36.112342834 -32.375358582 30.438953400 -36.280395508 -31.451217651 30.451217651 -36.380989075 -30.483789444 30.456085205 -36.430473328 -29.495611191 30.458078384 -36.450424194 -28.499073029 30.458770752 -36.456939697 -27.499858856 30.458950043 -36.458606720 -26.499988556 30.458980560 -36.458930969 -25.500000000 30.458980560 -36.458976746 -24.500000000 30.458980560 -36.458980560 -23.500000000 30.458980560 -36.458980560 -22.500000000 30.458980560 -36.458980560 -21.500000000 30.458980560 -36.458980560 -20.500000000 30.458980560 -36.458980560 -19.500000000 30.458980560 -36.458980560 -18.500000000 30.458980560 -36.458980560 -17.500000000 30.458980560 -36.458980560 -16.500000000 30.458980560 -36.458980560 -15.500000000 30.458980560 -36.458980560 -14.500000000 30.458980560 -36.458980560 -13.500000000 30.458980560 -36.458980560 -12.500000000 30.458980560 -36.458980560 -11.500000000 30.458980560 -36.458980560 -10.500000000 30.458980560 -36.458980560 -9.500000000 30.458980560 -36.458980560 -8.500000000 30.458980560 -36.458980560 -7.500000000 30.458980560 -36.458980560 -6.500000000 30.458980560 -36.458980560 -5.500000000 30.458980560 -36.458980560 -4.500000000 30.458980560 -36.458980560 -3.500000000 30.458980560 -36.458980560 -2.500000000 30.458980560 -36.458980560 -1.500000000 30.458980560 -36.458980560 -0.500000000 30.458980560 -36.458980560 0.500000000 30.458980560 -36.458980560 1.500000000 30.458980560 -36.458980560 2.500000000 30.458980560 -36.458980560 3.500000000 30.458980560 -36.458980560 4.500000000 30.458980560 -36.458980560 5.500000000 30.458980560 -36.458980560 6.500000000 30.458980560 -36.458980560 7.500000000 30.458980560 -36.458980560 8.500000000 30.458980560 -36.458980560 9.500000000 30.458980560 -36.458980560 10.500000000 30.458980560 -36.458980560 11.500000000 30.458980560 -36.458980560 12.500000000 30.458980560 -36.458980560 13.500000000 30.458980560 -36.458980560 14.500000000 30.458980560 -36.458980560 15.500000000 30.458980560 -36.458980560 16.500000000 30.458980560 -36.458980560 17.500000000 30.458980560 -36.458980560 18.500000000 30.458980560 -36.458980560 19.500000000 30.458980560 -36.458980560 20.500000000 30.458980560 -36.458980560 21.500000000 30.458980560 -36.458980560 22.500000000 30.458980560 -36.458980560 23.500000000 30.458980560 -36.458980560 24.500000000 30.458980560 -36.458976746 25.499988556 30.458978653 -36.458934784 26.499858856 30.458948135 -36.458606720 27.499073029 30.458770752 -36.456939697 28.495611191 30.458080292 -36.450428009 29.483789444 30.456085205 -36.430473328 30.451217651 30.451217651 -36.380989075 31.375360489 30.438953400 -36.280395508 32.216842651 30.405673981 -36.112346649 32.903381348 30.336599350 -35.879741669 -33.840934753 31.035345078 -35.797355652 -33.254272461 31.334243774 -35.954723358 -32.371025085 31.371026993 -36.154674530 -31.438953400 31.375360489 -36.280391693 -30.476007462 31.379222870 -36.344280243 -29.492321014 31.382312775 -36.371486664 -28.498050690 31.383813858 -36.381027222 -27.499618530 31.384296417 -36.383720398 -26.499948502 31.384403229 -36.384307861 -25.499996185 31.384418488 -36.384407043 -24.500000000 31.384418488 -36.384418488 -23.500000000 31.384418488 -36.384422302 -22.500000000 31.384418488 -36.384422302 -21.500000000 31.384418488 -36.384422302 -20.500000000 31.384418488 -36.384422302 -19.500000000 31.384418488 -36.384422302 -18.500000000 31.384418488 -36.384422302 -17.500000000 31.384418488 -36.384422302 -16.500000000 31.384418488 -36.384422302 -15.500000000 31.384418488 -36.384422302 -14.500000000 31.384418488 -36.384422302 -13.500000000 31.384418488 -36.384422302 -12.500000000 31.384418488 -36.384422302 -11.500000000 31.384418488 -36.384422302 -10.500000000 31.384418488 -36.384422302 -9.500000000 31.384418488 -36.384422302 -8.500000000 31.384418488 -36.384422302 -7.500000000 31.384418488 -36.384422302 -6.500000000 31.384418488 -36.384422302 -5.500000000 31.384418488 -36.384422302 -4.500000000 31.384418488 -36.384422302 -3.500000000 31.384418488 -36.384422302 -2.500000000 31.384418488 -36.384422302 -1.500000000 31.384418488 -36.384422302 -0.500000000 31.384418488 -36.384422302 0.500000000 31.384418488 -36.384422302 1.500000000 31.384418488 -36.384422302 2.500000000 31.384418488 -36.384422302 3.500000000 31.384418488 -36.384422302 4.500000000 31.384418488 -36.384422302 5.500000000 31.384418488 -36.384422302 6.500000000 31.384418488 -36.384422302 7.500000000 31.384418488 -36.384422302 8.500000000 31.384418488 -36.384422302 9.500000000 31.384418488 -36.384422302 10.500000000 31.384418488 -36.384422302 11.500000000 31.384418488 -36.384422302 12.500000000 31.384418488 -36.384422302 13.500000000 31.384418488 -36.384422302 14.500000000 31.384418488 -36.384422302 15.500000000 31.384418488 -36.384422302 16.500000000 31.384418488 -36.384422302 17.500000000 31.384418488 -36.384422302 18.500000000 31.384418488 -36.384422302 19.500000000 31.384418488 -36.384422302 20.500000000 31.384418488 -36.384422302 21.500000000 31.384418488 -36.384422302 22.500000000 31.384418488 -36.384422302 23.500000000 31.384418488 -36.384418488 24.499996185 31.384418488 -36.384407043 25.499948502 31.384405136 -36.384315491 26.499622345 31.384296417 -36.383720398 27.498052597 31.383813858 -36.381027222 28.492321014 31.382312775 -36.371486664 29.476007462 31.379222870 -36.344280243 30.438953400 31.375360489 -36.280395508 31.371026993 31.371028900 -36.154674530 32.254276276 31.334243774 -35.954723358 32.840934753 31.035345078 -35.797359467 -33.030693054 32.030693054 -35.846313477 -32.334243774 32.254276276 -35.954723358 -31.405673981 32.216838837 -36.112346649 -30.460596085 32.220714569 -36.185966492 -29.486719131 32.227874756 -36.217510223 -28.496377945 32.231605530 -36.228954315 -27.499221802 32.232860565 -36.232307434 -26.499872208 32.233165741 -36.233074188 -25.499986649 32.233219147 -36.233207703 -24.499998093 32.233222961 -36.233222961 -23.500000000 32.233222961 -36.233226776 -22.500000000 32.233222961 -36.233226776 -21.500000000 32.233222961 -36.233226776 -20.500000000 32.233222961 -36.233226776 -19.500000000 32.233222961 -36.233226776 -18.500000000 32.233222961 -36.233226776 -17.500000000 32.233222961 -36.233226776 -16.500000000 32.233222961 -36.233226776 -15.500000000 32.233222961 -36.233226776 -14.500000000 32.233222961 -36.233226776 -13.500000000 32.233222961 -36.233226776 -12.500000000 32.233222961 -36.233226776 -11.500000000 32.233222961 -36.233226776 -10.500000000 32.233222961 -36.233226776 -9.500000000 32.233222961 -36.233226776 -8.500000000 32.233222961 -36.233226776 -7.500000000 32.233222961 -36.233226776 -6.500000000 32.233222961 -36.233226776 -5.500000000 32.233222961 -36.233226776 -4.500000000 32.233222961 -36.233226776 -3.500000000 32.233222961 -36.233226776 -2.500000000 32.233222961 -36.233226776 -1.500000000 32.233222961 -36.233226776 -0.500000000 32.233222961 -36.233226776 0.500000000 32.233222961 -36.233226776 1.500000000 32.233222961 -36.233226776 2.500000000 32.233222961 -36.233226776 3.500000000 32.233222961 -36.233226776 4.500000000 32.233222961 -36.233226776 5.500000000 32.233222961 -36.233226776 6.500000000 32.233222961 -36.233226776 7.500000000 32.233222961 -36.233226776 8.500000000 32.233222961 -36.233226776 9.500000000 32.233222961 -36.233226776 10.500000000 32.233222961 -36.233226776 11.500000000 32.233222961 -36.233226776 12.500000000 32.233222961 -36.233226776 13.500000000 32.233222961 -36.233226776 14.500000000 32.233222961 -36.233226776 15.500000000 32.233222961 -36.233226776 16.500000000 32.233222961 -36.233226776 17.500000000 32.233222961 -36.233226776 18.500000000 32.233222961 -36.233226776 19.500000000 32.233222961 -36.233226776 20.500000000 32.233222961 -36.233226776 21.500000000 32.233222961 -36.233226776 22.500000000 32.233222961 -36.233226776 23.499998093 32.233222961 -36.233222961 24.499986649 32.233219147 -36.233207703 25.499872208 32.233165741 -36.233074188 26.499225616 32.232860565 -36.232303619 27.496377945 32.231597900 -36.228954315 28.486719131 32.227874756 -36.217510223 29.460592270 32.220714569 -36.185966492 30.405673981 32.216838837 -36.112342834 31.334241867 32.254276276 -35.954723358 32.030696869 32.030693054 -35.846317291 -32.035343170 32.840930939 -35.797355652 -31.336603165 32.903373718 -35.879737854 -30.442844391 32.946807861 -35.937095642 -29.481937408 32.968864441 -35.965759277 -28.495168686 32.977615356 -35.976863861 -27.498950958 32.980335236 -35.980205536 -26.499826431 32.980979919 -35.980964661 -25.499979019 32.981090546 -35.981090546 -24.499998093 32.981105804 -35.981105804 -23.500000000 32.981105804 -35.981105804 -22.500000000 32.981105804 -35.981105804 -21.500000000 32.981105804 -35.981105804 -20.500000000 32.981105804 -35.981105804 -19.500000000 32.981105804 -35.981105804 -18.500000000 32.981105804 -35.981105804 -17.500000000 32.981105804 -35.981105804 -16.500000000 32.981105804 -35.981105804 -15.500000000 32.981105804 -35.981105804 -14.500000000 32.981105804 -35.981105804 -13.500000000 32.981105804 -35.981105804 -12.500000000 32.981105804 -35.981105804 -11.500000000 32.981105804 -35.981105804 -10.500000000 32.981105804 -35.981105804 -9.500000000 32.981105804 -35.981105804 -8.500000000 32.981105804 -35.981105804 -7.500000000 32.981105804 -35.981105804 -6.500000000 32.981105804 -35.981105804 -5.500000000 32.981105804 -35.981105804 -4.500000000 32.981105804 -35.981105804 -3.500000000 32.981105804 -35.981105804 -2.500000000 32.981105804 -35.981105804 -1.500000000 32.981105804 -35.981105804 -0.500000000 32.981105804 -35.981105804 0.500000000 32.981105804 -35.981105804 1.500000000 32.981105804 -35.981105804 2.500000000 32.981105804 -35.981105804 3.500000000 32.981105804 -35.981105804 4.500000000 32.981105804 -35.981105804 5.500000000 32.981105804 -35.981105804 6.500000000 32.981105804 -35.981105804 7.500000000 32.981105804 -35.981105804 8.500000000 32.981105804 -35.981105804 9.500000000 32.981105804 -35.981105804 10.500000000 32.981105804 -35.981105804 11.500000000 32.981105804 -35.981105804 12.500000000 32.981105804 -35.981105804 13.500000000 32.981105804 -35.981105804 14.500000000 32.981105804 -35.981105804 15.500000000 32.981105804 -35.981105804 16.500000000 32.981105804 -35.981105804 17.500000000 32.981105804 -35.981105804 18.500000000 32.981105804 -35.981105804 19.500000000 32.981105804 -35.981105804 20.500000000 32.981105804 -35.981105804 21.500000000 32.981105804 -35.981105804 22.500000000 32.981105804 -35.981105804 23.499998093 32.981105804 -35.981105804 24.499979019 32.981090546 -35.981090546 25.499826431 32.980979919 -35.980960846 26.498950958 32.980335236 -35.980201721 27.495168686 32.977615356 -35.976860046 28.481937408 32.968864441 -35.965766907 29.442840576 32.946807861 -35.937091827 30.336603165 32.903373718 -35.879737854 31.035345078 32.840930939 -35.797355652 -33.006500244 -34.772159576 -34.772163391 -32.313602448 -34.912067413 -34.912067413 -31.413049698 -35.040893555 -35.040897369 -30.463253021 -35.115642548 -35.115646362 -29.487289429 -35.150257111 -35.150253296 -28.496557236 -35.163158417 -35.163158417 -27.499298096 -35.166961670 -35.166954041 -26.499902725 -35.167808533 -35.167808533 -25.499990463 -35.167949677 -35.167949677 -24.500000000 -35.167961121 -35.167961121 -23.500000000 -35.167968750 -35.167968750 -22.500000000 -35.167968750 -35.167968750 -21.500000000 -35.167968750 -35.167968750 -20.500000000 -35.167968750 -35.167968750 -19.500000000 -35.167968750 -35.167968750 -18.500000000 -35.167968750 -35.167968750 -17.500000000 -35.167968750 -35.167968750 -16.500000000 -35.167968750 -35.167968750 -15.500000000 -35.167968750 -35.167968750 -14.500000000 -35.167968750 -35.167968750 -13.500000000 -35.167968750 -35.167968750 -12.500000000 -35.167968750 -35.167968750 -11.500000000 -35.167968750 -35.167968750 -10.500000000 -35.167968750 -35.167968750 -9.500000000 -35.167968750 -35.167968750 -8.500000000 -35.167968750 -35.167968750 -7.500000000 -35.167968750 -35.167968750 -6.500000000 -35.167968750 -35.167968750 -5.500000000 -35.167968750 -35.167968750 -4.500000000 -35.167968750 -35.167968750 -3.500000000 -35.167968750 -35.167968750 -2.500000000 -35.167968750 -35.167968750 -1.500000000 -35.167968750 -35.167968750 -0.500000000 -35.167968750 -35.167968750 0.500000000 -35.167968750 -35.167968750 1.500000000 -35.167968750 -35.167968750 2.500000000 -35.167968750 -35.167968750 3.500000000 -35.167968750 -35.167968750 4.500000000 -35.167968750 -35.167968750 5.500000000 -35.167968750 -35.167968750 6.500000000 -35.167968750 -35.167968750 7.500000000 -35.167968750 -35.167968750 8.500000000 -35.167968750 -35.167968750 9.500000000 -35.167968750 -35.167968750 10.500000000 -35.167968750 -35.167968750 11.500000000 -35.167968750 -35.167968750 12.500000000 -35.167968750 -35.167968750 13.500000000 -35.167968750 -35.167968750 14.500000000 -35.167968750 -35.167968750 15.500000000 -35.167968750 -35.167968750 16.500000000 -35.167968750 -35.167968750 17.500000000 -35.167968750 -35.167968750 18.500000000 -35.167968750 -35.167968750 19.500000000 -35.167968750 -35.167968750 20.500000000 -35.167968750 -35.167968750 21.500000000 -35.167968750 -35.167968750 22.500000000 -35.167968750 -35.167968750 23.500000000 -35.167968750 -35.167968750 24.499992371 -35.167953491 -35.167949677 25.499898911 -35.167808533 -35.167804718 26.499298096 -35.166961670 -35.166957855 27.496557236 -35.163158417 -35.163158417 28.487289429 -35.150249481 -35.150253296 29.463253021 -35.115642548 -35.115642548 30.413049698 -35.040897369 -35.040897369 31.313602448 -34.912067413 -34.912059784 32.006500244 -34.772163391 -34.772159576 -33.980762482 -33.980758667 -34.842590332 -33.316951752 -34.231868744 -35.057430267 -32.380741119 -34.260547638 -35.428535461 -31.410833359 -34.477191925 -35.518615723 -30.459178925 -34.567749023 -35.577739716 -29.485630035 -34.604709625 -35.607204437 -28.496026993 -34.617927551 -35.618602753 -27.499156952 -34.621795654 -35.621986389 -26.499868393 -34.622692108 -35.622741699 -25.499986649 -34.622852325 -35.622856140 -24.500000000 -34.622871399 -35.622871399 -23.500000000 -34.622871399 -35.622871399 -22.500000000 -34.622871399 -35.622871399 -21.500000000 -34.622871399 -35.622871399 -20.500000000 -34.622871399 -35.622871399 -19.500000000 -34.622871399 -35.622871399 -18.500000000 -34.622871399 -35.622871399 -17.500000000 -34.622871399 -35.622871399 -16.500000000 -34.622871399 -35.622871399 -15.500000000 -34.622871399 -35.622871399 -14.500000000 -34.622871399 -35.622871399 -13.500000000 -34.622871399 -35.622871399 -12.500000000 -34.622871399 -35.622871399 -11.500000000 -34.622871399 -35.622871399 -10.500000000 -34.622871399 -35.622871399 -9.500000000 -34.622871399 -35.622871399 -8.500000000 -34.622871399 -35.622871399 -7.500000000 -34.622871399 -35.622871399 -6.500000000 -34.622871399 -35.622871399 -5.500000000 -34.622871399 -35.622871399 -4.500000000 -34.622871399 -35.622871399 -3.500000000 -34.622871399 -35.622871399 -2.500000000 -34.622871399 -35.622871399 -1.500000000 -34.622871399 -35.622871399 -0.500000000 -34.622871399 -35.622871399 0.500000000 -34.622871399 -35.622871399 1.500000000 -34.622871399 -35.622871399 2.500000000 -34.622871399 -35.622871399 3.500000000 -34.622871399 -35.622871399 4.500000000 -34.622871399 -35.622871399 5.500000000 -34.622871399 -35.622871399 6.500000000 -34.622871399 -35.622871399 7.500000000 -34.622871399 -35.622871399 8.500000000 -34.622871399 -35.622871399 9.500000000 -34.622871399 -35.622871399 10.500000000 -34.622871399 -35.622871399 11.500000000 -34.622871399 -35.622871399 12.500000000 -34.622871399 -35.622871399 13.500000000 -34.622871399 -35.622871399 14.500000000 -34.622871399 -35.622871399 15.500000000 -34.622871399 -35.622871399 16.500000000 -34.622871399 -35.622871399 17.500000000 -34.622871399 -35.622871399 18.500000000 -34.622871399 -35.622871399 19.500000000 -34.622871399 -35.622871399 20.500000000 -34.622871399 -35.622871399 21.500000000 -34.622871399 -35.622871399 22.500000000 -34.622871399 -35.622871399 23.500000000 -34.622871399 -35.622871399 24.499986649 -34.622852325 -35.622856140 25.499866486 -34.622695923 -35.622741699 26.499156952 -34.621795654 -35.621978760 27.496026993 -34.617927551 -35.618602753 28.485630035 -34.604709625 -35.607208252 29.459177017 -34.567745209 -35.577743530 30.410833359 -34.477191925 -35.518615723 31.380739212 -34.260547638 -35.428535461 32.316951752 -34.231872559 -35.057430267 32.980762482 -33.980762482 -34.842590332 -34.772163391 -33.006500244 -34.772163391 -34.231872559 -33.316951752 -35.057430267 -33.424442291 -33.424442291 -35.498542786 -32.736049652 -33.678356171 -35.670326233 31.736053467 -33.678352356 -35.670326233 32.424442291 -33.424442291 -35.498546600 33.231872559 -33.316947937 -35.057430267 33.772163391 -33.006500244 -34.772163391 -34.912059784 -32.313602448 -34.912059784 -34.260543823 -32.380741119 -35.428531647 -33.678352356 -32.736053467 -35.670322418 32.678352356 -32.736049652 -35.670326233 33.260547638 -32.380737305 -35.428535461 33.912059784 -32.313598633 -34.912067413 -35.040893555 -31.413049698 -35.040893555 -34.477191925 -31.410833359 -35.518615723 33.477191925 -31.410833359 -35.518615723 34.040893555 -31.413049698 -35.040893555 -35.115642548 -30.463253021 -35.115646362 -34.567745209 -30.459177017 -35.577739716 33.567749023 -30.459178925 -35.577739716 34.115642548 -30.463253021 -35.115646362 -35.150253296 -29.487289429 -35.150253296 -34.604705811 -29.485630035 -35.607208252 33.604709625 -29.485630035 -35.607204437 34.150257111 -29.487289429 -35.150253296 -35.163162231 -28.496557236 -35.163158417 -34.617927551 -28.496026993 -35.618602753 33.617927551 -28.496026993 -35.618602753 34.163158417 -28.496557236 -35.163158417 -35.166961670 -27.499298096 -35.166954041 -34.621799469 -27.499160767 -35.621978760 33.621795654 -27.499156952 -35.621986389 34.166961670 -27.499298096 -35.166954041 -35.167804718 -26.499902725 -35.167808533 -34.622692108 -26.499868393 -35.622734070 33.622692108 -26.499868393 -35.622741699 34.167808533 -26.499902725 -35.167808533 -35.167949677 -25.499990463 -35.167949677 -34.622844696 -25.499986649 -35.622856140 33.622844696 -25.499986649 -35.622856140 34.167949677 -25.499990463 -35.167949677 -35.167961121 -24.500000000 -35.167961121 -34.622871399 -24.500000000 -35.622871399 33.622871399 -24.500000000 -35.622871399 34.167961121 -24.500000000 -35.167961121 -35.167964935 -23.500000000 -35.167964935 -34.622871399 -23.500000000 -35.622871399 33.622871399 -23.500000000 -35.622871399 34.167968750 -23.500000000 -35.167968750 -35.167964935 -22.500000000 -35.167964935 -34.622871399 -22.500000000 -35.622871399 33.622871399 -22.500000000 -35.622871399 34.167968750 -22.500000000 -35.167968750 -35.167964935 -21.500000000 -35.167964935 -34.622871399 -21.500000000 -35.622871399 33.622871399 -21.500000000 -35.622871399 34.167968750 -21.500000000 -35.167968750 -35.167964935 -20.500000000 -35.167964935 -34.622871399 -20.500000000 -35.622871399 33.622871399 -20.500000000 -35.622871399 34.167968750 -20.500000000 -35.167968750 -35.167964935 -19.500000000 -35.167964935 -34.622871399 -19.500000000 -35.622871399 33.622871399 -19.500000000 -35.622871399 34.167968750 -19.500000000 -35.167968750 -35.167968750 -18.500000000 -35.167968750 -34.622871399 -18.500000000 -35.622871399 33.622871399 -18.500000000 -35.622871399 34.167968750 -18.500000000 -35.167968750 -35.167968750 -17.500000000 -35.167968750 -34.622871399 -17.500000000 -35.622871399 33.622871399 -17.500000000 -35.622871399 34.167968750 -17.500000000 -35.167968750 -35.167968750 -16.500000000 -35.167968750 -34.622871399 -16.500000000 -35.622871399 33.622871399 -16.500000000 -35.622871399 34.167968750 -16.500000000 -35.167968750 -35.167964935 -15.500000000 -35.167964935 -34.622871399 -15.500000000 -35.622871399 33.622871399 -15.500000000 -35.622871399 34.167968750 -15.500000000 -35.167968750 -35.167964935 -14.500000000 -35.167964935 -34.622871399 -14.500000000 -35.622871399 33.622871399 -14.500000000 -35.622871399 34.167968750 -14.500000000 -35.167968750 -35.167964935 -13.500000000 -35.167964935 -34.622871399 -13.500000000 -35.622871399 33.622871399 -13.500000000 -35.622871399 34.167968750 -13.500000000 -35.167968750 -35.167964935 -12.500000000 -35.167964935 -34.622871399 -12.500000000 -35.622871399 33.622871399 -12.500000000 -35.622871399 34.167968750 -12.500000000 -35.167968750 -35.167964935 -11.500000000 -35.167964935 -34.622871399 -11.500000000 -35.622871399 33.622871399 -11.500000000 -35.622871399 34.167968750 -11.500000000 -35.167968750 -35.167964935 -10.500000000 -35.167964935 -34.622871399 -10.500000000 -35.622871399 33.622871399 -10.500000000 -35.622871399 34.167968750 -10.500000000 -35.167968750 -35.167964935 -9.500000000 -35.167964935 -34.622871399 -9.500000000 -35.622871399 33.622871399 -9.500000000 -35.622871399 34.167968750 -9.500000000 -35.167968750 -35.167964935 -8.500000000 -35.167964935 -34.622871399 -8.500000000 -35.622871399 33.622871399 -8.500000000 -35.622871399 34.167968750 -8.500000000 -35.167968750 -35.167964935 -7.500000000 -35.167964935 -34.622871399 -7.500000000 -35.622871399 33.622871399 -7.500000000 -35.622871399 34.167968750 -7.500000000 -35.167968750 -35.167964935 -6.500000000 -35.167964935 -34.622871399 -6.500000000 -35.622871399 33.622871399 -6.500000000 -35.622871399 34.167968750 -6.500000000 -35.167968750 -35.167964935 -5.500000000 -35.167964935 -34.622871399 -5.500000000 -35.622871399 33.622871399 -5.500000000 -35.622871399 34.167968750 -5.500000000 -35.167968750 -35.167964935 -4.500000000 -35.167964935 -34.622871399 -4.500000000 -35.622871399 33.622871399 -4.500000000 -35.622871399 34.167968750 -4.500000000 -35.167968750 -35.167968750 -3.500000000 -35.167968750 -34.622871399 -3.500000000 -35.622871399 33.622871399 -3.500000000 -35.622871399 34.167968750 -3.500000000 -35.167968750 -35.167968750 -2.500000000 -35.167968750 -34.622871399 -2.500000000 -35.622871399 33.622871399 -2.500000000 -35.622871399 34.167968750 -2.500000000 -35.167968750 -35.167968750 -1.500000000 -35.167968750 -34.622871399 -1.500000000 -35.622871399 33.622871399 -1.500000000 -35.622871399 34.167968750 -1.500000000 -35.167968750 -35.167964935 -0.500000000 -35.167964935 -34.622871399 -0.500000000 -35.622871399 33.622871399 -0.500000000 -35.622871399 34.167968750 -0.500000000 -35.167968750 -35.167964935 0.500000000 -35.167964935 -34.622871399 0.500000000 -35.622871399 33.622871399 0.500000000 -35.622871399 34.167968750 0.500000000 -35.167968750 -35.167964935 1.500000000 -35.167964935 -34.622871399 1.500000000 -35.622871399 33.622871399 1.500000000 -35.622871399 34.167968750 1.500000000 -35.167968750 -35.167968750 2.500000000 -35.167968750 -34.622871399 2.500000000 -35.622871399 33.622871399 2.500000000 -35.622871399 34.167968750 2.500000000 -35.167968750 -35.167968750 3.500000000 -35.167968750 -34.622871399 3.500000000 -35.622871399 33.622871399 3.500000000 -35.622871399 34.167968750 3.500000000 -35.167968750 -35.167968750 4.500000000 -35.167968750 -34.622871399 4.500000000 -35.622871399 33.622871399 4.500000000 -35.622871399 34.167968750 4.500000000 -35.167968750 -35.167968750 5.500000000 -35.167968750 -34.622871399 5.500000000 -35.622871399 33.622871399 5.500000000 -35.622871399 34.167968750 5.500000000 -35.167968750 -35.167964935 6.500000000 -35.167964935 -34.622871399 6.500000000 -35.622871399 33.622871399 6.500000000 -35.622871399 34.167968750 6.500000000 -35.167968750 -35.167964935 7.500000000 -35.167964935 -34.622871399 7.500000000 -35.622871399 33.622871399 7.500000000 -35.622871399 34.167968750 7.500000000 -35.167968750 -35.167964935 8.500000000 -35.167964935 -34.622871399 8.500000000 -35.622871399 33.622871399 8.500000000 -35.622871399 34.167968750 8.500000000 -35.167968750 -35.167964935 9.500000000 -35.167964935 -34.622871399 9.500000000 -35.622871399 33.622871399 9.500000000 -35.622871399 34.167968750 9.500000000 -35.167968750 -35.167964935 10.500000000 -35.167964935 -34.622871399 10.500000000 -35.622871399 33.622871399 10.500000000 -35.622871399 34.167968750 10.500000000 -35.167968750 -35.167964935 11.500000000 -35.167964935 -34.622871399 11.500000000 -35.622871399 33.622871399 11.500000000 -35.622871399 34.167968750 11.500000000 -35.167968750 -35.167964935 12.500000000 -35.167964935 -34.622871399 12.500000000 -35.622871399 33.622871399 12.500000000 -35.622871399 34.167968750 12.500000000 -35.167968750 -35.167964935 13.500000000 -35.167964935 -34.622871399 13.500000000 -35.622871399 33.622871399 13.500000000 -35.622871399 34.167968750 13.500000000 -35.167968750 -35.167964935 14.500000000 -35.167964935 -34.622871399 14.500000000 -35.622871399 33.622871399 14.500000000 -35.622871399 34.167968750 14.500000000 -35.167968750 -35.167964935 15.500000000 -35.167964935 -34.622871399 15.500000000 -35.622871399 33.622871399 15.500000000 -35.622871399 34.167968750 15.500000000 -35.167968750 -35.167964935 16.500000000 -35.167964935 -34.622871399 16.500000000 -35.622871399 33.622871399 16.500000000 -35.622871399 34.167968750 16.500000000 -35.167968750 -35.167964935 17.500000000 -35.167964935 -34.622871399 17.500000000 -35.622871399 33.622871399 17.500000000 -35.622871399 34.167968750 17.500000000 -35.167968750 -35.167968750 18.500000000 -35.167968750 -34.622871399 18.500000000 -35.622871399 33.622871399 18.500000000 -35.622871399 34.167968750 18.500000000 -35.167968750 -35.167968750 19.500000000 -35.167968750 -34.622871399 19.500000000 -35.622871399 33.622871399 19.500000000 -35.622871399 34.167968750 19.500000000 -35.167968750 -35.167968750 20.500000000 -35.167968750 -34.622871399 20.500000000 -35.622871399 33.622871399 20.500000000 -35.622871399 34.167968750 20.500000000 -35.167968750 -35.167968750 21.500000000 -35.167968750 -34.622871399 21.500000000 -35.622871399 33.622871399 21.500000000 -35.622871399 34.167968750 21.500000000 -35.167968750 -35.167968750 22.500000000 -35.167968750 -34.622871399 22.500000000 -35.622871399 33.622871399 22.500000000 -35.622871399 34.167968750 22.500000000 -35.167968750 -35.167968750 23.500000000 -35.167968750 -34.622871399 23.500000000 -35.622871399 33.622871399 23.500000000 -35.622871399 34.167968750 23.500000000 -35.167968750 -35.167949677 24.499992371 -35.167949677 -34.622852325 24.499986649 -35.622859955 33.622852325 24.499986649 -35.622856140 34.167949677 24.499992371 -35.167949677 -35.167808533 25.499898911 -35.167808533 -34.622692108 25.499866486 -35.622734070 33.622695923 25.499866486 -35.622734070 34.167808533 25.499898911 -35.167804718 -35.166957855 26.499294281 -35.166957855 -34.621799469 26.499156952 -35.621986389 33.621795654 26.499156952 -35.621978760 34.166961670 26.499298096 -35.166957855 -35.163162231 27.496557236 -35.163162231 -34.617927551 27.496023178 -35.618602753 33.617927551 27.496026993 -35.618602753 34.163158417 27.496557236 -35.163158417 -35.150257111 28.487289429 -35.150253296 -34.604713440 28.485630035 -35.607208252 33.604709625 28.485630035 -35.607208252 34.150249481 28.487289429 -35.150253296 -35.115642548 29.463253021 -35.115642548 -34.567749023 29.459177017 -35.577743530 33.567745209 29.459177017 -35.577747345 34.115642548 29.463253021 -35.115642548 -35.040893555 30.413049698 -35.040897369 -34.477191925 30.410833359 -35.518615723 33.477191925 30.410833359 -35.518623352 34.040897369 30.413049698 -35.040897369 -34.912063599 31.313602448 -34.912063599 -34.260543823 31.380739212 -35.428535461 -33.678356171 31.736053467 -35.670326233 32.678356171 31.736053467 -35.670326233 33.260547638 31.380739212 -35.428535461 33.912067413 31.313602448 -34.912059784 -34.772159576 32.006500244 -34.772163391 -34.231868744 32.316951752 -35.057430267 -33.424442291 32.424442291 -35.498542786 -32.736049652 32.678352356 -35.670322418 31.736055374 32.678352356 -35.670326233 32.424442291 32.424442291 -35.498546600 33.231876373 32.316951752 -35.057430267 33.772163391 32.006500244 -34.772159576 -33.980758667 32.980762482 -34.842594147 -33.316947937 33.231868744 -35.057430267 -32.380737305 33.260547638 -35.428535461 -31.410833359 33.477191925 -35.518615723 -30.459177017 33.567745209 -35.577739716 -29.485630035 33.604705811 -35.607208252 -28.496026993 33.617927551 -35.618602753 -27.499160767 33.621803284 -35.621986389 -26.499868393 33.622692108 -35.622741699 -25.499986649 33.622852325 -35.622856140 -24.500000000 33.622871399 -35.622871399 -23.500000000 33.622871399 -35.622871399 -22.500000000 33.622871399 -35.622871399 -21.500000000 33.622871399 -35.622871399 -20.500000000 33.622871399 -35.622871399 -19.500000000 33.622871399 -35.622871399 -18.500000000 33.622871399 -35.622871399 -17.500000000 33.622871399 -35.622871399 -16.500000000 33.622871399 -35.622871399 -15.500000000 33.622871399 -35.622871399 -14.500000000 33.622871399 -35.622871399 -13.500000000 33.622871399 -35.622871399 -12.500000000 33.622871399 -35.622871399 -11.500000000 33.622871399 -35.622871399 -10.500000000 33.622871399 -35.622871399 -9.500000000 33.622871399 -35.622871399 -8.500000000 33.622871399 -35.622871399 -7.500000000 33.622871399 -35.622871399 -6.500000000 33.622871399 -35.622871399 -5.500000000 33.622871399 -35.622871399 -4.500000000 33.622871399 -35.622871399 -3.500000000 33.622871399 -35.622871399 -2.500000000 33.622871399 -35.622871399 -1.500000000 33.622871399 -35.622871399 -0.500000000 33.622871399 -35.622871399 0.500000000 33.622871399 -35.622871399 1.500000000 33.622871399 -35.622871399 2.500000000 33.622871399 -35.622871399 3.500000000 33.622871399 -35.622871399 4.500000000 33.622871399 -35.622871399 5.500000000 33.622871399 -35.622871399 6.500000000 33.622871399 -35.622871399 7.500000000 33.622871399 -35.622871399 8.500000000 33.622871399 -35.622871399 9.500000000 33.622871399 -35.622871399 10.500000000 33.622871399 -35.622871399 11.500000000 33.622871399 -35.622871399 12.500000000 33.622871399 -35.622871399 13.500000000 33.622871399 -35.622871399 14.500000000 33.622871399 -35.622871399 15.500000000 33.622871399 -35.622871399 16.500000000 33.622871399 -35.622871399 17.500000000 33.622871399 -35.622871399 18.500000000 33.622871399 -35.622871399 19.500000000 33.622871399 -35.622871399 20.500000000 33.622871399 -35.622871399 21.500000000 33.622871399 -35.622871399 22.500000000 33.622871399 -35.622871399 23.500000000 33.622871399 -35.622871399 24.499986649 33.622856140 -35.622856140 25.499868393 33.622695923 -35.622741699 26.499156952 33.621803284 -35.621986389 27.496026993 33.617927551 -35.618602753 28.485630035 33.604709625 -35.607208252 29.459178925 33.567749023 -35.577739716 30.410833359 33.477191925 -35.518615723 31.380737305 33.260547638 -35.428535461 32.316951752 33.231872559 -35.057430267 32.980758667 32.980762482 -34.842590332 -33.006500244 33.772163391 -34.772163391 -32.313598633 33.912063599 -34.912063599 -31.413049698 34.040893555 -35.040893555 -30.463253021 34.115642548 -35.115642548 -29.487289429 34.150249481 -35.150253296 -28.496557236 34.163158417 -35.163162231 -27.499298096 34.166954041 -35.166961670 -26.499898911 34.167808533 -35.167808533 -25.499990463 34.167949677 -35.167949677 -24.500000000 34.167968750 -35.167964935 -23.500000000 34.167968750 -35.167968750 -22.500000000 34.167968750 -35.167968750 -21.500000000 34.167968750 -35.167968750 -20.500000000 34.167968750 -35.167968750 -19.500000000 34.167968750 -35.167968750 -18.500000000 34.167968750 -35.167968750 -17.500000000 34.167968750 -35.167968750 -16.500000000 34.167968750 -35.167968750 -15.500000000 34.167968750 -35.167968750 -14.500000000 34.167968750 -35.167968750 -13.500000000 34.167968750 -35.167968750 -12.500000000 34.167968750 -35.167968750 -11.500000000 34.167968750 -35.167968750 -10.500000000 34.167968750 -35.167968750 -9.500000000 34.167968750 -35.167968750 -8.500000000 34.167968750 -35.167968750 -7.500000000 34.167968750 -35.167968750 -6.500000000 34.167968750 -35.167968750 -5.500000000 34.167968750 -35.167968750 -4.500000000 34.167968750 -35.167968750 -3.500000000 34.167968750 -35.167968750 -2.500000000 34.167968750 -35.167968750 -1.500000000 34.167968750 -35.167968750 -0.500000000 34.167968750 -35.167968750 0.500000000 34.167968750 -35.167968750 1.500000000 34.167968750 -35.167968750 2.500000000 34.167968750 -35.167968750 3.500000000 34.167968750 -35.167968750 4.500000000 34.167968750 -35.167968750 5.500000000 34.167968750 -35.167968750 6.500000000 34.167968750 -35.167968750 7.500000000 34.167968750 -35.167968750 8.500000000 34.167968750 -35.167968750 9.500000000 34.167968750 -35.167968750 10.500000000 34.167968750 -35.167968750 11.500000000 34.167968750 -35.167968750 12.500000000 34.167968750 -35.167968750 13.500000000 34.167968750 -35.167968750 14.500000000 34.167968750 -35.167968750 15.500000000 34.167968750 -35.167968750 16.500000000 34.167968750 -35.167968750 17.500000000 34.167968750 -35.167968750 18.500000000 34.167968750 -35.167968750 19.500000000 34.167968750 -35.167968750 20.500000000 34.167968750 -35.167968750 21.500000000 34.167968750 -35.167968750 22.500000000 34.167968750 -35.167968750 23.500000000 34.167968750 -35.167961121 24.499994278 34.167949677 -35.167949677 25.499898911 34.167808533 -35.167808533 26.499298096 34.166961670 -35.166961670 27.496557236 34.163162231 -35.163158417 28.487293243 34.150257111 -35.150253296 29.463256836 34.115638733 -35.115642548 30.413049698 34.040893555 -35.040893555 31.313602448 33.912063599 -34.912063599 32.006500244 33.772163391 -34.772163391 -32.035343170 -35.797355652 -33.840934753 -31.336603165 -35.879737854 -33.903385162 -30.442840576 -35.937091827 -33.946811676 -29.481933594 -35.965766907 -33.968872070 -28.495168686 -35.976860046 -33.977619171 -27.498950958 -35.980201721 -33.980335236 -26.499822617 -35.980957031 -33.980976105 -25.499977112 -35.981090546 -33.981090546 -24.499998093 -35.981101990 -33.981101990 -23.500000000 -35.981101990 -33.981101990 -22.500000000 -35.981101990 -33.981101990 -21.500000000 -35.981101990 -33.981101990 -20.500000000 -35.981101990 -33.981101990 -19.500000000 -35.981101990 -33.981101990 -18.500000000 -35.981101990 -33.981101990 -17.500000000 -35.981101990 -33.981101990 -16.500000000 -35.981101990 -33.981101990 -15.500000000 -35.981101990 -33.981101990 -14.500000000 -35.981101990 -33.981101990 -13.500000000 -35.981101990 -33.981101990 -12.500000000 -35.981101990 -33.981101990 -11.500000000 -35.981101990 -33.981101990 -10.500000000 -35.981101990 -33.981101990 -9.500000000 -35.981101990 -33.981101990 -8.500000000 -35.981101990 -33.981101990 -7.500000000 -35.981101990 -33.981101990 -6.500000000 -35.981101990 -33.981101990 -5.500000000 -35.981101990 -33.981101990 -4.500000000 -35.981101990 -33.981101990 -3.500000000 -35.981101990 -33.981101990 -2.500000000 -35.981101990 -33.981101990 -1.500000000 -35.981101990 -33.981101990 -0.500000000 -35.981101990 -33.981101990 0.500000000 -35.981101990 -33.981101990 1.500000000 -35.981101990 -33.981101990 2.500000000 -35.981101990 -33.981101990 3.500000000 -35.981101990 -33.981101990 4.500000000 -35.981101990 -33.981101990 5.500000000 -35.981101990 -33.981101990 6.500000000 -35.981101990 -33.981101990 7.500000000 -35.981101990 -33.981101990 8.500000000 -35.981101990 -33.981101990 9.500000000 -35.981101990 -33.981101990 10.500000000 -35.981101990 -33.981101990 11.500000000 -35.981101990 -33.981101990 12.500000000 -35.981101990 -33.981101990 13.500000000 -35.981101990 -33.981101990 14.500000000 -35.981101990 -33.981101990 15.500000000 -35.981101990 -33.981101990 16.500000000 -35.981101990 -33.981101990 17.500000000 -35.981101990 -33.981101990 18.500000000 -35.981101990 -33.981101990 19.500000000 -35.981101990 -33.981101990 20.500000000 -35.981101990 -33.981101990 21.500000000 -35.981101990 -33.981101990 22.500000000 -35.981101990 -33.981101990 23.499998093 -35.981101990 -33.981101990 24.499979019 -35.981086731 -33.981090546 25.499824524 -35.980957031 -33.980976105 26.498950958 -35.980201721 -33.980335236 27.495168686 -35.976860046 -33.977619171 28.481933594 -35.965766907 -33.968872070 29.442840576 -35.937091827 -33.946807861 30.336599350 -35.879737854 -33.903377533 31.035345078 -35.797355652 -33.840930939 -33.980762482 -34.842590332 -33.980762482 -33.316951752 -35.057430267 -34.231872559 -32.380741119 -35.428535461 -34.260547638 -31.410831451 -35.518615723 -34.477195740 -30.459177017 -35.577739716 -34.567749023 -29.485630035 -35.607204437 -34.604709625 -28.496023178 -35.618602753 -34.617927551 -27.499160767 -35.621982574 -34.621795654 -26.499868393 -35.622734070 -34.622692108 -25.499986649 -35.622856140 -34.622848511 -24.500000000 -35.622871399 -34.622871399 -23.500000000 -35.622871399 -34.622871399 -22.500000000 -35.622871399 -34.622871399 -21.500000000 -35.622871399 -34.622871399 -20.500000000 -35.622871399 -34.622871399 -19.500000000 -35.622871399 -34.622871399 -18.500000000 -35.622871399 -34.622871399 -17.500000000 -35.622871399 -34.622871399 -16.500000000 -35.622871399 -34.622871399 -15.500000000 -35.622871399 -34.622871399 -14.500000000 -35.622871399 -34.622871399 -13.500000000 -35.622871399 -34.622871399 -12.500000000 -35.622871399 -34.622871399 -11.500000000 -35.622871399 -34.622871399 -10.500000000 -35.622871399 -34.622871399 -9.500000000 -35.622871399 -34.622871399 -8.500000000 -35.622871399 -34.622871399 -7.500000000 -35.622871399 -34.622871399 -6.500000000 -35.622871399 -34.622871399 -5.500000000 -35.622871399 -34.622871399 -4.500000000 -35.622871399 -34.622871399 -3.500000000 -35.622871399 -34.622871399 -2.500000000 -35.622871399 -34.622871399 -1.500000000 -35.622871399 -34.622871399 -0.500000000 -35.622871399 -34.622871399 0.500000000 -35.622871399 -34.622871399 1.500000000 -35.622871399 -34.622871399 2.500000000 -35.622871399 -34.622871399 3.500000000 -35.622871399 -34.622871399 4.500000000 -35.622871399 -34.622871399 5.500000000 -35.622871399 -34.622871399 6.500000000 -35.622871399 -34.622871399 7.500000000 -35.622871399 -34.622871399 8.500000000 -35.622871399 -34.622871399 9.500000000 -35.622871399 -34.622871399 10.500000000 -35.622871399 -34.622871399 11.500000000 -35.622871399 -34.622871399 12.500000000 -35.622871399 -34.622871399 13.500000000 -35.622871399 -34.622871399 14.500000000 -35.622871399 -34.622871399 15.500000000 -35.622871399 -34.622871399 16.500000000 -35.622871399 -34.622871399 17.500000000 -35.622871399 -34.622871399 18.500000000 -35.622871399 -34.622871399 19.500000000 -35.622871399 -34.622871399 20.500000000 -35.622871399 -34.622871399 21.500000000 -35.622871399 -34.622871399 22.500000000 -35.622871399 -34.622871399 23.500000000 -35.622871399 -34.622871399 24.499986649 -35.622856140 -34.622844696 25.499866486 -35.622734070 -34.622692108 26.499156952 -35.621978760 -34.621803284 27.496023178 -35.618598938 -34.617927551 28.485630035 -35.607208252 -34.604709625 29.459178925 -35.577739716 -34.567745209 30.410833359 -35.518615723 -34.477191925 31.380739212 -35.428535461 -34.260547638 32.316951752 -35.057430267 -34.231872559 32.980762482 -34.842590332 -33.980758667 -34.842590332 -33.980758667 -33.980762482 -34.297924042 -34.297924042 -34.297924042 -33.672851562 -34.565631866 -34.565631866 32.672851562 -34.565631866 -34.565628052 33.297924042 -34.297924042 -34.297927856 33.842590332 -33.980758667 -33.980758667 -35.057430267 -33.316947937 -34.231876373 -34.565631866 -33.672851562 -34.565631866 33.565631866 -33.672851562 -34.565628052 34.057430267 -33.316947937 -34.231868744 -35.797355652 -32.035343170 -33.840934753 -35.428535461 -32.380741119 -34.260547638 34.428535461 -32.380741119 -34.260547638 34.797355652 -32.035343170 -33.840934753 -35.879734039 -31.336599350 -33.903385162 -35.518615723 -31.410831451 -34.477195740 34.518615723 -31.410831451 -34.477195740 34.879737854 -31.336599350 -33.903385162 -35.937091827 -30.442840576 -33.946811676 -35.577735901 -30.459177017 -34.567752838 34.577739716 -30.459177017 -34.567749023 34.937091827 -30.442840576 -33.946811676 -35.965759277 -29.481933594 -33.968864441 -35.607204437 -29.485630035 -34.604709625 34.607204437 -29.485630035 -34.604709625 34.965766907 -29.481933594 -33.968872070 -35.976860046 -28.495168686 -33.977619171 -35.618598938 -28.496023178 -34.617927551 34.618602753 -28.496023178 -34.617927551 34.976860046 -28.495168686 -33.977619171 -35.980201721 -27.498950958 -33.980335236 -35.621982574 -27.499156952 -34.621803284 34.621982574 -27.499160767 -34.621795654 34.980201721 -27.498950958 -33.980335236 -35.980957031 -26.499822617 -33.980976105 -35.622734070 -26.499868393 -34.622692108 34.622734070 -26.499868393 -34.622692108 34.980957031 -26.499822617 -33.980976105 -35.981090546 -25.499979019 -33.981090546 -35.622856140 -25.499986649 -34.622844696 34.622856140 -25.499986649 -34.622848511 34.981090546 -25.499977112 -33.981090546 -35.981101990 -24.499998093 -33.981101990 -35.622871399 -24.500000000 -34.622871399 34.622871399 -24.500000000 -34.622871399 34.981101990 -24.499998093 -33.981101990 -35.981101990 -23.500000000 -33.981101990 -35.622871399 -23.500000000 -34.622871399 34.622871399 -23.500000000 -34.622871399 34.981101990 -23.500000000 -33.981101990 -35.981101990 -22.500000000 -33.981101990 -35.622871399 -22.500000000 -34.622871399 34.622871399 -22.500000000 -34.622871399 34.981101990 -22.500000000 -33.981101990 -35.981101990 -21.500000000 -33.981101990 -35.622871399 -21.500000000 -34.622871399 34.622871399 -21.500000000 -34.622871399 34.981101990 -21.500000000 -33.981101990 -35.981101990 -20.500000000 -33.981101990 -35.622871399 -20.500000000 -34.622871399 34.622871399 -20.500000000 -34.622871399 34.981101990 -20.500000000 -33.981101990 -35.981101990 -19.500000000 -33.981101990 -35.622871399 -19.500000000 -34.622871399 34.622871399 -19.500000000 -34.622871399 34.981101990 -19.500000000 -33.981101990 -35.981101990 -18.500000000 -33.981101990 -35.622871399 -18.500000000 -34.622871399 34.622871399 -18.500000000 -34.622871399 34.981101990 -18.500000000 -33.981101990 -35.981101990 -17.500000000 -33.981101990 -35.622871399 -17.500000000 -34.622871399 34.622871399 -17.500000000 -34.622871399 34.981101990 -17.500000000 -33.981101990 -35.981101990 -16.500000000 -33.981101990 -35.622871399 -16.500000000 -34.622871399 34.622871399 -16.500000000 -34.622871399 34.981101990 -16.500000000 -33.981101990 -35.981101990 -15.500000000 -33.981101990 -35.622871399 -15.500000000 -34.622871399 34.622871399 -15.500000000 -34.622871399 34.981101990 -15.500000000 -33.981101990 -35.981101990 -14.500000000 -33.981101990 -35.622871399 -14.500000000 -34.622871399 34.622871399 -14.500000000 -34.622871399 34.981101990 -14.500000000 -33.981101990 -35.981101990 -13.500000000 -33.981101990 -35.622871399 -13.500000000 -34.622871399 34.622871399 -13.500000000 -34.622871399 34.981101990 -13.500000000 -33.981101990 -35.981101990 -12.500000000 -33.981101990 -35.622871399 -12.500000000 -34.622871399 34.622871399 -12.500000000 -34.622871399 34.981101990 -12.500000000 -33.981101990 -35.981101990 -11.500000000 -33.981101990 -35.622871399 -11.500000000 -34.622871399 34.622871399 -11.500000000 -34.622871399 34.981101990 -11.500000000 -33.981101990 -35.981101990 -10.500000000 -33.981101990 -35.622871399 -10.500000000 -34.622871399 34.622871399 -10.500000000 -34.622871399 34.981101990 -10.500000000 -33.981101990 -35.981101990 -9.500000000 -33.981101990 -35.622871399 -9.500000000 -34.622871399 34.622871399 -9.500000000 -34.622871399 34.981101990 -9.500000000 -33.981101990 -35.981101990 -8.500000000 -33.981101990 -35.622871399 -8.500000000 -34.622871399 34.622871399 -8.500000000 -34.622871399 34.981101990 -8.500000000 -33.981101990 -35.981101990 -7.500000000 -33.981101990 -35.622871399 -7.500000000 -34.622871399 34.622871399 -7.500000000 -34.622871399 34.981101990 -7.500000000 -33.981101990 -35.981101990 -6.500000000 -33.981101990 -35.622871399 -6.500000000 -34.622871399 34.622871399 -6.500000000 -34.622871399 34.981101990 -6.500000000 -33.981101990 -35.981101990 -5.500000000 -33.981101990 -35.622871399 -5.500000000 -34.622871399 34.622871399 -5.500000000 -34.622871399 34.981101990 -5.500000000 -33.981101990 -35.981101990 -4.500000000 -33.981101990 -35.622871399 -4.500000000 -34.622871399 34.622871399 -4.500000000 -34.622871399 34.981101990 -4.500000000 -33.981101990 -35.981101990 -3.500000000 -33.981101990 -35.622871399 -3.500000000 -34.622871399 34.622871399 -3.500000000 -34.622871399 34.981101990 -3.500000000 -33.981101990 -35.981101990 -2.500000000 -33.981101990 -35.622871399 -2.500000000 -34.622871399 34.622871399 -2.500000000 -34.622871399 34.981101990 -2.500000000 -33.981101990 -35.981101990 -1.500000000 -33.981101990 -35.622871399 -1.500000000 -34.622871399 34.622871399 -1.500000000 -34.622871399 34.981101990 -1.500000000 -33.981101990 -35.981101990 -0.500000000 -33.981101990 -35.622871399 -0.500000000 -34.622871399 34.622871399 -0.500000000 -34.622871399 34.981101990 -0.500000000 -33.981101990 -35.981101990 0.500000000 -33.981101990 -35.622871399 0.500000000 -34.622871399 34.622871399 0.500000000 -34.622871399 34.981101990 0.500000000 -33.981101990 -35.981101990 1.500000000 -33.981101990 -35.622871399 1.500000000 -34.622871399 34.622871399 1.500000000 -34.622871399 34.981101990 1.500000000 -33.981101990 -35.981101990 2.500000000 -33.981101990 -35.622871399 2.500000000 -34.622871399 34.622871399 2.500000000 -34.622871399 34.981101990 2.500000000 -33.981101990 -35.981101990 3.500000000 -33.981101990 -35.622871399 3.500000000 -34.622871399 34.622871399 3.500000000 -34.622871399 34.981101990 3.500000000 -33.981101990 -35.981101990 4.500000000 -33.981101990 -35.622871399 4.500000000 -34.622871399 34.622871399 4.500000000 -34.622871399 34.981101990 4.500000000 -33.981101990 -35.981101990 5.500000000 -33.981101990 -35.622871399 5.500000000 -34.622871399 34.622871399 5.500000000 -34.622871399 34.981101990 5.500000000 -33.981101990 -35.981101990 6.500000000 -33.981101990 -35.622871399 6.500000000 -34.622871399 34.622871399 6.500000000 -34.622871399 34.981101990 6.500000000 -33.981101990 -35.981101990 7.500000000 -33.981101990 -35.622871399 7.500000000 -34.622871399 34.622871399 7.500000000 -34.622871399 34.981101990 7.500000000 -33.981101990 -35.981101990 8.500000000 -33.981101990 -35.622871399 8.500000000 -34.622871399 34.622871399 8.500000000 -34.622871399 34.981101990 8.500000000 -33.981101990 -35.981101990 9.500000000 -33.981101990 -35.622871399 9.500000000 -34.622871399 34.622871399 9.500000000 -34.622871399 34.981101990 9.500000000 -33.981101990 -35.981101990 10.500000000 -33.981101990 -35.622871399 10.500000000 -34.622871399 34.622871399 10.500000000 -34.622871399 34.981101990 10.500000000 -33.981101990 -35.981101990 11.500000000 -33.981101990 -35.622871399 11.500000000 -34.622871399 34.622871399 11.500000000 -34.622871399 34.981101990 11.500000000 -33.981101990 -35.981101990 12.500000000 -33.981101990 -35.622871399 12.500000000 -34.622871399 34.622871399 12.500000000 -34.622871399 34.981101990 12.500000000 -33.981101990 -35.981101990 13.500000000 -33.981101990 -35.622871399 13.500000000 -34.622871399 34.622871399 13.500000000 -34.622871399 34.981101990 13.500000000 -33.981101990 -35.981101990 14.500000000 -33.981101990 -35.622871399 14.500000000 -34.622871399 34.622871399 14.500000000 -34.622871399 34.981101990 14.500000000 -33.981101990 -35.981101990 15.500000000 -33.981101990 -35.622871399 15.500000000 -34.622871399 34.622871399 15.500000000 -34.622871399 34.981101990 15.500000000 -33.981101990 -35.981101990 16.500000000 -33.981101990 -35.622871399 16.500000000 -34.622871399 34.622871399 16.500000000 -34.622871399 34.981101990 16.500000000 -33.981101990 -35.981101990 17.500000000 -33.981101990 -35.622871399 17.500000000 -34.622871399 34.622871399 17.500000000 -34.622871399 34.981101990 17.500000000 -33.981101990 -35.981101990 18.500000000 -33.981101990 -35.622871399 18.500000000 -34.622871399 34.622871399 18.500000000 -34.622871399 34.981101990 18.500000000 -33.981101990 -35.981101990 19.500000000 -33.981101990 -35.622871399 19.500000000 -34.622871399 34.622871399 19.500000000 -34.622871399 34.981101990 19.500000000 -33.981101990 -35.981101990 20.500000000 -33.981101990 -35.622871399 20.500000000 -34.622871399 34.622871399 20.500000000 -34.622871399 34.981101990 20.500000000 -33.981101990 -35.981101990 21.500000000 -33.981101990 -35.622871399 21.500000000 -34.622871399 34.622871399 21.500000000 -34.622871399 34.981101990 21.500000000 -33.981101990 -35.981101990 22.500000000 -33.981101990 -35.622871399 22.500000000 -34.622871399 34.622871399 22.500000000 -34.622871399 34.981101990 22.500000000 -33.981101990 -35.981101990 23.499998093 -33.981101990 -35.622871399 23.500000000 -34.622871399 34.622871399 23.500000000 -34.622871399 34.981101990 23.499998093 -33.981101990 -35.981086731 24.499979019 -33.981090546 -35.622856140 24.499986649 -34.622848511 34.622856140 24.499986649 -34.622844696 34.981086731 24.499979019 -33.981090546 -35.980957031 25.499824524 -33.980976105 -35.622734070 25.499868393 -34.622692108 34.622734070 25.499866486 -34.622692108 34.980957031 25.499824524 -33.980976105 -35.980201721 26.498950958 -33.980335236 -35.621982574 26.499156952 -34.621803284 34.621978760 26.499156952 -34.621803284 34.980201721 26.498950958 -33.980335236 -35.976860046 27.495168686 -33.977619171 -35.618602753 27.496026993 -34.617927551 34.618598938 27.496023178 -34.617927551 34.976860046 27.495168686 -33.977619171 -35.965766907 28.481937408 -33.968868256 -35.607208252 28.485630035 -34.604709625 34.607208252 28.485630035 -34.604709625 34.965766907 28.481933594 -33.968872070 -35.937091827 29.442840576 -33.946811676 -35.577739716 29.459177017 -34.567745209 34.577739716 29.459178925 -34.567745209 34.937091827 29.442840576 -33.946807861 -35.879737854 30.336603165 -33.903377533 -35.518615723 30.410833359 -34.477191925 34.518615723 30.410833359 -34.477191925 34.879737854 30.336599350 -33.903377533 -35.797355652 31.035345078 -33.840934753 -35.428531647 31.380737305 -34.260543823 34.428535461 31.380739212 -34.260547638 34.797355652 31.035345078 -33.840930939 -35.057430267 32.316951752 -34.231872559 -34.565628052 32.672851562 -34.565631866 33.565628052 32.672851562 -34.565628052 34.057430267 32.316951752 -34.231868744 -34.842590332 32.980762482 -33.980762482 -34.297924042 33.297924042 -34.297931671 -33.672851562 33.565631866 -34.565631866 32.672851562 33.565631866 -34.565628052 33.297924042 33.297924042 -34.297924042 33.842590332 32.980762482 -33.980758667 -33.980758667 33.842590332 -33.980762482 -33.316947937 34.057430267 -34.231876373 -32.380737305 34.428535461 -34.260547638 -31.410831451 34.518615723 -34.477191925 -30.459177017 34.577739716 -34.567749023 -29.485630035 34.607208252 -34.604709625 -28.496028900 34.618598938 -34.617931366 -27.499160767 34.621982574 -34.621803284 -26.499868393 34.622734070 -34.622692108 -25.499986649 34.622856140 -34.622844696 -24.500000000 34.622871399 -34.622863770 -23.500000000 34.622871399 -34.622871399 -22.500000000 34.622871399 -34.622871399 -21.500000000 34.622871399 -34.622871399 -20.500000000 34.622871399 -34.622871399 -19.500000000 34.622871399 -34.622871399 -18.500000000 34.622871399 -34.622871399 -17.500000000 34.622871399 -34.622871399 -16.500000000 34.622871399 -34.622871399 -15.500000000 34.622871399 -34.622871399 -14.500000000 34.622871399 -34.622871399 -13.500000000 34.622871399 -34.622871399 -12.500000000 34.622871399 -34.622871399 -11.500000000 34.622871399 -34.622871399 -10.500000000 34.622871399 -34.622871399 -9.500000000 34.622871399 -34.622871399 -8.500000000 34.622871399 -34.622871399 -7.500000000 34.622871399 -34.622871399 -6.500000000 34.622871399 -34.622871399 -5.500000000 34.622871399 -34.622871399 -4.500000000 34.622871399 -34.622871399 -3.500000000 34.622871399 -34.622871399 -2.500000000 34.622871399 -34.622871399 -1.500000000 34.622871399 -34.622871399 -0.500000000 34.622871399 -34.622871399 0.500000000 34.622871399 -34.622871399 1.500000000 34.622871399 -34.622871399 2.500000000 34.622871399 -34.622871399 3.500000000 34.622871399 -34.622871399 4.500000000 34.622871399 -34.622871399 5.500000000 34.622871399 -34.622871399 6.500000000 34.622871399 -34.622871399 7.500000000 34.622871399 -34.622871399 8.500000000 34.622871399 -34.622871399 9.500000000 34.622871399 -34.622871399 10.500000000 34.622871399 -34.622871399 11.500000000 34.622871399 -34.622871399 12.500000000 34.622871399 -34.622871399 13.500000000 34.622871399 -34.622871399 14.500000000 34.622871399 -34.622871399 15.500000000 34.622871399 -34.622871399 16.500000000 34.622871399 -34.622871399 17.500000000 34.622871399 -34.622871399 18.500000000 34.622871399 -34.622871399 19.500000000 34.622871399 -34.622871399 20.500000000 34.622871399 -34.622871399 21.500000000 34.622871399 -34.622871399 22.500000000 34.622871399 -34.622871399 23.500000000 34.622871399 -34.622871399 24.499986649 34.622856140 -34.622844696 25.499868393 34.622734070 -34.622692108 26.499156952 34.621982574 -34.621803284 27.496026993 34.618602753 -34.617927551 28.485631943 34.607208252 -34.604709625 29.459178925 34.577739716 -34.567752838 30.410833359 34.518615723 -34.477191925 31.380737305 34.428535461 -34.260547638 32.316947937 34.057430267 -34.231868744 32.980758667 33.842590332 -33.980758667 -32.035343170 34.797355652 -33.840934753 -31.336603165 34.879734039 -33.903381348 -30.442840576 34.937091827 -33.946807861 -29.481937408 34.965763092 -33.968864441 -28.495172501 34.976860046 -33.977611542 -27.498952866 34.980201721 -33.980331421 -26.499826431 34.980957031 -33.980972290 -25.499979019 34.981086731 -33.981086731 -24.499998093 34.981101990 -33.981098175 -23.500000000 34.981101990 -33.981101990 -22.500000000 34.981101990 -33.981101990 -21.500000000 34.981101990 -33.981101990 -20.500000000 34.981101990 -33.981101990 -19.500000000 34.981101990 -33.981101990 -18.500000000 34.981101990 -33.981101990 -17.500000000 34.981101990 -33.981101990 -16.500000000 34.981101990 -33.981101990 -15.500000000 34.981101990 -33.981101990 -14.500000000 34.981101990 -33.981101990 -13.500000000 34.981101990 -33.981101990 -12.500000000 34.981101990 -33.981101990 -11.500000000 34.981101990 -33.981101990 -10.500000000 34.981101990 -33.981101990 -9.500000000 34.981101990 -33.981101990 -8.500000000 34.981101990 -33.981101990 -7.500000000 34.981101990 -33.981101990 -6.500000000 34.981101990 -33.981101990 -5.500000000 34.981101990 -33.981101990 -4.500000000 34.981101990 -33.981101990 -3.500000000 34.981101990 -33.981101990 -2.500000000 34.981101990 -33.981101990 -1.500000000 34.981101990 -33.981101990 -0.500000000 34.981101990 -33.981101990 0.500000000 34.981101990 -33.981101990 1.500000000 34.981101990 -33.981101990 2.500000000 34.981101990 -33.981101990 3.500000000 34.981101990 -33.981101990 4.500000000 34.981101990 -33.981101990 5.500000000 34.981101990 -33.981101990 6.500000000 34.981101990 -33.981101990 7.500000000 34.981101990 -33.981101990 8.500000000 34.981101990 -33.981101990 9.500000000 34.981101990 -33.981101990 10.500000000 34.981101990 -33.981101990 11.500000000 34.981101990 -33.981101990 12.500000000 34.981101990 -33.981101990 13.500000000 34.981101990 -33.981101990 14.500000000 34.981101990 -33.981101990 15.500000000 34.981101990 -33.981101990 16.500000000 34.981101990 -33.981101990 17.500000000 34.981101990 -33.981101990 18.500000000 34.981101990 -33.981101990 19.500000000 34.981101990 -33.981101990 20.500000000 34.981101990 -33.981101990 21.500000000 34.981101990 -33.981101990 22.500000000 34.981101990 -33.981101990 23.499998093 34.981101990 -33.981101990 24.499979019 34.981086731 -33.981086731 25.499822617 34.980957031 -33.980972290 26.498952866 34.980201721 -33.980331421 27.495172501 34.976863861 -33.977619171 28.481937408 34.965763092 -33.968864441 29.442840576 34.937091827 -33.946811676 30.336603165 34.879737854 -33.903381348 31.035345078 34.797355652 -33.840934753 -33.030693054 -35.846317291 -33.030693054 -32.334243774 -35.954723358 -33.254272461 -31.405673981 -36.112342834 -33.216842651 -30.460596085 -36.185966492 -33.220710754 -29.486715317 -36.217514038 -33.227874756 -28.496374130 -36.228954315 -33.231601715 -27.499221802 -36.232299805 -33.232868195 -26.499872208 -36.233074188 -33.233169556 -25.499986649 -36.233207703 -33.233219147 -24.499998093 -36.233222961 -33.233222961 -23.500000000 -36.233222961 -33.233222961 -22.500000000 -36.233222961 -33.233222961 -21.500000000 -36.233222961 -33.233222961 -20.500000000 -36.233222961 -33.233222961 -19.500000000 -36.233222961 -33.233222961 -18.500000000 -36.233222961 -33.233222961 -17.500000000 -36.233222961 -33.233222961 -16.500000000 -36.233222961 -33.233222961 -15.500000000 -36.233222961 -33.233222961 -14.500000000 -36.233222961 -33.233222961 -13.500000000 -36.233222961 -33.233222961 -12.500000000 -36.233222961 -33.233222961 -11.500000000 -36.233222961 -33.233222961 -10.500000000 -36.233222961 -33.233222961 -9.500000000 -36.233222961 -33.233222961 -8.500000000 -36.233222961 -33.233222961 -7.500000000 -36.233222961 -33.233222961 -6.500000000 -36.233222961 -33.233222961 -5.500000000 -36.233222961 -33.233222961 -4.500000000 -36.233222961 -33.233222961 -3.500000000 -36.233222961 -33.233222961 -2.500000000 -36.233222961 -33.233222961 -1.500000000 -36.233222961 -33.233222961 -0.500000000 -36.233222961 -33.233222961 0.500000000 -36.233222961 -33.233222961 1.500000000 -36.233222961 -33.233222961 2.500000000 -36.233222961 -33.233222961 3.500000000 -36.233222961 -33.233222961 4.500000000 -36.233222961 -33.233222961 5.500000000 -36.233222961 -33.233222961 6.500000000 -36.233222961 -33.233222961 7.500000000 -36.233222961 -33.233222961 8.500000000 -36.233222961 -33.233222961 9.500000000 -36.233222961 -33.233222961 10.500000000 -36.233222961 -33.233222961 11.500000000 -36.233222961 -33.233222961 12.500000000 -36.233222961 -33.233222961 13.500000000 -36.233222961 -33.233222961 14.500000000 -36.233222961 -33.233222961 15.500000000 -36.233222961 -33.233222961 16.500000000 -36.233222961 -33.233222961 17.500000000 -36.233222961 -33.233222961 18.500000000 -36.233222961 -33.233222961 19.500000000 -36.233222961 -33.233222961 20.500000000 -36.233222961 -33.233222961 21.500000000 -36.233222961 -33.233222961 22.500000000 -36.233222961 -33.233222961 23.500000000 -36.233222961 -33.233222961 24.499984741 -36.233207703 -33.233219147 25.499872208 -36.233074188 -33.233173370 26.499225616 -36.232299805 -33.232868195 27.496377945 -36.228958130 -33.231605530 28.486719131 -36.217514038 -33.227878571 29.460596085 -36.185966492 -33.220718384 30.405673981 -36.112342834 -33.216842651 31.334243774 -35.954723358 -33.254276276 32.030693054 -35.846313477 -33.030693054 -34.772163391 -34.772159576 -33.006500244 -34.231872559 -35.057430267 -33.316947937 -33.424438477 -35.498546600 -33.424442291 -32.736049652 -35.670326233 -33.678356171 31.736053467 -35.670326233 -33.678352356 32.424442291 -35.498538971 -33.424442291 33.231872559 -35.057430267 -33.316951752 33.772163391 -34.772159576 -33.006500244 -35.057430267 -34.231872559 -33.316947937 -34.565628052 -34.565628052 -33.672851562 33.565631866 -34.565628052 -33.672847748 34.057430267 -34.231868744 -33.316947937 -35.846313477 -33.030693054 -33.030693054 -35.498542786 -33.424438477 -33.424438477 34.498546600 -33.424438477 -33.424438477 34.846317291 -33.030689240 -33.030693054 -35.954723358 -32.334243774 -33.254276276 -35.670326233 -32.736049652 -33.678352356 34.670322418 -32.736049652 -33.678352356 34.954723358 -32.334243774 -33.254272461 -36.112342834 -31.405673981 -33.216842651 35.112342834 -31.405673981 -33.216842651 -36.185966492 -30.460596085 -33.220722198 35.185966492 -30.460596085 -33.220710754 -36.217510223 -29.486719131 -33.227870941 35.217514038 -29.486715317 -33.227874756 -36.228954315 -28.496377945 -33.231601715 35.228954315 -28.496374130 -33.231601715 -36.232299805 -27.499221802 -33.232868195 35.232299805 -27.499221802 -33.232868195 -36.233074188 -26.499872208 -33.233165741 35.233074188 -26.499872208 -33.233169556 -36.233207703 -25.499984741 -33.233219147 35.233207703 -25.499986649 -33.233219147 -36.233222961 -24.500000000 -33.233222961 35.233222961 -24.499998093 -33.233222961 -36.233222961 -23.500000000 -33.233222961 35.233222961 -23.500000000 -33.233222961 -36.233222961 -22.500000000 -33.233222961 35.233222961 -22.500000000 -33.233222961 -36.233222961 -21.500000000 -33.233222961 35.233222961 -21.500000000 -33.233222961 -36.233222961 -20.500000000 -33.233222961 35.233222961 -20.500000000 -33.233222961 -36.233222961 -19.500000000 -33.233222961 35.233222961 -19.500000000 -33.233222961 -36.233222961 -18.500000000 -33.233222961 35.233222961 -18.500000000 -33.233222961 -36.233222961 -17.500000000 -33.233222961 35.233222961 -17.500000000 -33.233222961 -36.233222961 -16.500000000 -33.233222961 35.233222961 -16.500000000 -33.233222961 -36.233222961 -15.500000000 -33.233222961 35.233222961 -15.500000000 -33.233222961 -36.233222961 -14.500000000 -33.233222961 35.233222961 -14.500000000 -33.233222961 -36.233222961 -13.500000000 -33.233222961 35.233222961 -13.500000000 -33.233222961 -36.233222961 -12.500000000 -33.233222961 35.233222961 -12.500000000 -33.233222961 -36.233222961 -11.500000000 -33.233222961 35.233222961 -11.500000000 -33.233222961 -36.233222961 -10.500000000 -33.233222961 35.233222961 -10.500000000 -33.233222961 -36.233222961 -9.500000000 -33.233222961 35.233222961 -9.500000000 -33.233222961 -36.233222961 -8.500000000 -33.233222961 35.233222961 -8.500000000 -33.233222961 -36.233222961 -7.500000000 -33.233222961 35.233222961 -7.500000000 -33.233222961 -36.233222961 -6.500000000 -33.233222961 35.233222961 -6.500000000 -33.233222961 -36.233222961 -5.500000000 -33.233222961 35.233222961 -5.500000000 -33.233222961 -36.233222961 -4.500000000 -33.233222961 35.233222961 -4.500000000 -33.233222961 -36.233222961 -3.500000000 -33.233222961 35.233222961 -3.500000000 -33.233222961 -36.233222961 -2.500000000 -33.233222961 35.233222961 -2.500000000 -33.233222961 -36.233222961 -1.500000000 -33.233222961 35.233222961 -1.500000000 -33.233222961 -36.233222961 -0.500000000 -33.233222961 35.233222961 -0.500000000 -33.233222961 -36.233222961 0.500000000 -33.233222961 35.233222961 0.500000000 -33.233222961 -36.233222961 1.500000000 -33.233222961 35.233222961 1.500000000 -33.233222961 -36.233222961 2.500000000 -33.233222961 35.233222961 2.500000000 -33.233222961 -36.233222961 3.500000000 -33.233222961 35.233222961 3.500000000 -33.233222961 -36.233222961 4.500000000 -33.233222961 35.233222961 4.500000000 -33.233222961 -36.233222961 5.500000000 -33.233222961 35.233222961 5.500000000 -33.233222961 -36.233222961 6.500000000 -33.233222961 35.233222961 6.500000000 -33.233222961 -36.233222961 7.500000000 -33.233222961 35.233222961 7.500000000 -33.233222961 -36.233222961 8.500000000 -33.233222961 35.233222961 8.500000000 -33.233222961 -36.233222961 9.500000000 -33.233222961 35.233222961 9.500000000 -33.233222961 -36.233222961 10.500000000 -33.233222961 35.233222961 10.500000000 -33.233222961 -36.233222961 11.500000000 -33.233222961 35.233222961 11.500000000 -33.233222961 -36.233222961 12.500000000 -33.233222961 35.233222961 12.500000000 -33.233222961 -36.233222961 13.500000000 -33.233222961 35.233222961 13.500000000 -33.233222961 -36.233222961 14.500000000 -33.233222961 35.233222961 14.500000000 -33.233222961 -36.233222961 15.500000000 -33.233222961 35.233222961 15.500000000 -33.233222961 -36.233222961 16.500000000 -33.233222961 35.233222961 16.500000000 -33.233222961 -36.233222961 17.500000000 -33.233222961 35.233222961 17.500000000 -33.233222961 -36.233222961 18.500000000 -33.233222961 35.233222961 18.500000000 -33.233222961 -36.233222961 19.500000000 -33.233222961 35.233222961 19.500000000 -33.233222961 -36.233222961 20.500000000 -33.233222961 35.233222961 20.500000000 -33.233222961 -36.233222961 21.500000000 -33.233222961 35.233222961 21.500000000 -33.233222961 -36.233222961 22.500000000 -33.233222961 35.233222961 22.500000000 -33.233222961 -36.233222961 23.500000000 -33.233222961 35.233222961 23.500000000 -33.233222961 -36.233207703 24.499984741 -33.233219147 35.233207703 24.499984741 -33.233219147 -36.233074188 25.499872208 -33.233173370 35.233074188 25.499872208 -33.233173370 -36.232299805 26.499225616 -33.232868195 35.232299805 26.499225616 -33.232868195 -36.228958130 27.496377945 -33.231605530 35.228958130 27.496377945 -33.231605530 -36.217517853 28.486719131 -33.227874756 35.217514038 28.486719131 -33.227878571 -36.185966492 29.460596085 -33.220714569 35.185966492 29.460596085 -33.220718384 -36.112346649 30.405673981 -33.216838837 35.112342834 30.405673981 -33.216842651 -35.954723358 31.334243774 -33.254272461 -35.670326233 31.736053467 -33.678352356 34.670326233 31.736053467 -33.678352356 34.954723358 31.334243774 -33.254276276 -35.846317291 32.030693054 -33.030689240 -35.498538971 32.424442291 -33.424442291 34.498538971 32.424442291 -33.424442291 34.846313477 32.030693054 -33.030693054 -35.057430267 33.231872559 -33.316947937 -34.565628052 33.565628052 -33.672851562 33.565624237 33.565631866 -33.672851562 34.057430267 33.231872559 -33.316947937 -34.772159576 33.772163391 -33.006500244 -34.231872559 34.057430267 -33.316947937 -33.424438477 34.498542786 -33.424438477 -32.736049652 34.670326233 -33.678352356 31.736051559 34.670326233 -33.678356171 32.424438477 34.498546600 -33.424442291 33.231868744 34.057430267 -33.316951752 33.772159576 33.772163391 -33.006500244 -33.030693054 34.846313477 -33.030693054 -32.334243774 34.954723358 -33.254276276 -31.405670166 35.112342834 -33.216838837 -30.460596085 35.185966492 -33.220722198 -29.486719131 35.217517853 -33.227870941 -28.496377945 35.228958130 -33.231597900 -27.499225616 35.232299805 -33.232860565 -26.499874115 35.233074188 -33.233165741 -25.499984741 35.233203888 -33.233219147 -24.500000000 35.233222961 -33.233222961 -23.500000000 35.233222961 -33.233222961 -22.500000000 35.233222961 -33.233222961 -21.500000000 35.233222961 -33.233222961 -20.500000000 35.233222961 -33.233222961 -19.500000000 35.233222961 -33.233222961 -18.500000000 35.233222961 -33.233222961 -17.500000000 35.233222961 -33.233222961 -16.500000000 35.233222961 -33.233222961 -15.500000000 35.233222961 -33.233222961 -14.500000000 35.233222961 -33.233222961 -13.500000000 35.233222961 -33.233222961 -12.500000000 35.233222961 -33.233222961 -11.500000000 35.233222961 -33.233222961 -10.500000000 35.233222961 -33.233222961 -9.500000000 35.233222961 -33.233222961 -8.500000000 35.233222961 -33.233222961 -7.500000000 35.233222961 -33.233222961 -6.500000000 35.233222961 -33.233222961 -5.500000000 35.233222961 -33.233222961 -4.500000000 35.233222961 -33.233222961 -3.500000000 35.233222961 -33.233222961 -2.500000000 35.233222961 -33.233222961 -1.500000000 35.233222961 -33.233222961 -0.500000000 35.233222961 -33.233222961 0.500000000 35.233222961 -33.233222961 1.500000000 35.233222961 -33.233222961 2.500000000 35.233222961 -33.233222961 3.500000000 35.233222961 -33.233222961 4.500000000 35.233222961 -33.233222961 5.500000000 35.233222961 -33.233222961 6.500000000 35.233222961 -33.233222961 7.500000000 35.233222961 -33.233222961 8.500000000 35.233222961 -33.233222961 9.500000000 35.233222961 -33.233222961 10.500000000 35.233222961 -33.233222961 11.500000000 35.233222961 -33.233222961 12.500000000 35.233222961 -33.233222961 13.500000000 35.233222961 -33.233222961 14.500000000 35.233222961 -33.233222961 15.500000000 35.233222961 -33.233222961 16.500000000 35.233222961 -33.233222961 17.500000000 35.233222961 -33.233222961 18.500000000 35.233222961 -33.233222961 19.500000000 35.233222961 -33.233222961 20.500000000 35.233222961 -33.233222961 21.500000000 35.233222961 -33.233222961 22.500000000 35.233222961 -33.233222961 23.500000000 35.233222961 -33.233222961 24.499984741 35.233203888 -33.233215332 25.499872208 35.233074188 -33.233165741 26.499225616 35.232299805 -33.232860565 27.496377945 35.228958130 -33.231597900 28.486719131 35.217517853 -33.227867126 29.460596085 35.185966492 -33.220714569 30.405673981 35.112346649 -33.216838837 31.334243774 34.954723358 -33.254272461 32.030693054 34.846317291 -33.030693054 -33.840934753 -35.797355652 -32.035343170 -33.254276276 -35.954723358 -32.334243774 -32.371025085 -36.154674530 -32.371025085 -31.438955307 -36.280395508 -32.375358582 -30.476007462 -36.344284058 -32.379222870 -29.492319107 -36.371490479 -32.382308960 -28.498050690 -36.381023407 -32.383811951 -27.499622345 -36.383720398 -32.384300232 -26.499948502 -36.384311676 -32.384403229 -25.499996185 -36.384407043 -32.384422302 -24.500000000 -36.384418488 -32.384422302 -23.500000000 -36.384418488 -32.384422302 -22.500000000 -36.384418488 -32.384422302 -21.500000000 -36.384418488 -32.384422302 -20.500000000 -36.384418488 -32.384422302 -19.500000000 -36.384418488 -32.384422302 -18.500000000 -36.384418488 -32.384422302 -17.500000000 -36.384418488 -32.384422302 -16.500000000 -36.384418488 -32.384422302 -15.500000000 -36.384418488 -32.384422302 -14.500000000 -36.384418488 -32.384422302 -13.500000000 -36.384418488 -32.384422302 -12.500000000 -36.384418488 -32.384422302 -11.500000000 -36.384418488 -32.384422302 -10.500000000 -36.384418488 -32.384422302 -9.500000000 -36.384418488 -32.384422302 -8.500000000 -36.384418488 -32.384422302 -7.500000000 -36.384418488 -32.384422302 -6.500000000 -36.384418488 -32.384422302 -5.500000000 -36.384418488 -32.384422302 -4.500000000 -36.384418488 -32.384422302 -3.500000000 -36.384418488 -32.384422302 -2.500000000 -36.384418488 -32.384422302 -1.500000000 -36.384418488 -32.384422302 -0.500000000 -36.384418488 -32.384422302 0.500000000 -36.384418488 -32.384422302 1.500000000 -36.384418488 -32.384422302 2.500000000 -36.384418488 -32.384422302 3.500000000 -36.384418488 -32.384422302 4.500000000 -36.384418488 -32.384422302 5.500000000 -36.384418488 -32.384422302 6.500000000 -36.384418488 -32.384422302 7.500000000 -36.384418488 -32.384422302 8.500000000 -36.384418488 -32.384422302 9.500000000 -36.384418488 -32.384422302 10.500000000 -36.384418488 -32.384422302 11.500000000 -36.384418488 -32.384422302 12.500000000 -36.384418488 -32.384422302 13.500000000 -36.384418488 -32.384422302 14.500000000 -36.384418488 -32.384422302 15.500000000 -36.384418488 -32.384422302 16.500000000 -36.384418488 -32.384422302 17.500000000 -36.384418488 -32.384422302 18.500000000 -36.384418488 -32.384422302 19.500000000 -36.384418488 -32.384422302 20.500000000 -36.384418488 -32.384422302 21.500000000 -36.384418488 -32.384422302 22.500000000 -36.384418488 -32.384422302 23.500000000 -36.384418488 -32.384422302 24.499996185 -36.384407043 -32.384422302 25.499948502 -36.384307861 -32.384407043 26.499618530 -36.383720398 -32.384300232 27.498050690 -36.381023407 -32.383811951 28.492321014 -36.371490479 -32.382316589 29.476007462 -36.344280243 -32.379222870 30.438953400 -36.280391693 -32.375358582 31.371026993 -36.154674530 -32.371025085 32.254276276 -35.954723358 -32.334247589 32.840934753 -35.797351837 -32.035343170 -34.912067413 -34.912067413 -32.313598633 -34.260543823 -35.428531647 -32.380737305 -33.678352356 -35.670322418 -32.736049652 32.678356171 -35.670322418 -32.736053467 33.260543823 -35.428531647 -32.380737305 33.912067413 -34.912059784 -32.313602448 -35.797355652 -33.840934753 -32.035343170 -35.428531647 -34.260547638 -32.380737305 34.428535461 -34.260540009 -32.380737305 34.797355652 -33.840934753 -32.035343170 -35.954719543 -33.254276276 -32.334243774 -35.670322418 -33.678352356 -32.736053467 34.670322418 -33.678352356 -32.736053467 34.954723358 -33.254276276 -32.334243774 -36.154674530 -32.371025085 -32.371025085 35.154674530 -32.371025085 -32.371025085 -36.280395508 -31.438953400 -32.375358582 35.280395508 -31.438953400 -32.375358582 -36.344284058 -30.476003647 -32.379226685 35.344284058 -30.476007462 -32.379222870 -36.371490479 -29.492319107 -32.382312775 35.371490479 -29.492319107 -32.382308960 -36.381023407 -28.498050690 -32.383808136 35.381023407 -28.498050690 -32.383811951 -36.383724213 -27.499622345 -32.384296417 35.383720398 -27.499622345 -32.384300232 -36.384311676 -26.499948502 -32.384403229 35.384311676 -26.499948502 -32.384403229 -36.384407043 -25.499996185 -32.384418488 35.384407043 -25.499996185 -32.384422302 -36.384422302 -24.500000000 -32.384422302 35.384418488 -24.500000000 -32.384422302 -36.384422302 -23.500000000 -32.384422302 35.384418488 -23.500000000 -32.384422302 -36.384422302 -22.500000000 -32.384422302 35.384418488 -22.500000000 -32.384422302 -36.384422302 -21.500000000 -32.384422302 35.384418488 -21.500000000 -32.384422302 -36.384422302 -20.500000000 -32.384422302 35.384418488 -20.500000000 -32.384422302 -36.384422302 -19.500000000 -32.384422302 35.384418488 -19.500000000 -32.384422302 -36.384422302 -18.500000000 -32.384422302 35.384418488 -18.500000000 -32.384422302 -36.384422302 -17.500000000 -32.384422302 35.384418488 -17.500000000 -32.384422302 -36.384422302 -16.500000000 -32.384422302 35.384418488 -16.500000000 -32.384422302 -36.384422302 -15.500000000 -32.384422302 35.384418488 -15.500000000 -32.384422302 -36.384422302 -14.500000000 -32.384422302 35.384418488 -14.500000000 -32.384422302 -36.384422302 -13.500000000 -32.384422302 35.384418488 -13.500000000 -32.384422302 -36.384422302 -12.500000000 -32.384422302 35.384418488 -12.500000000 -32.384422302 -36.384422302 -11.500000000 -32.384422302 35.384418488 -11.500000000 -32.384422302 -36.384422302 -10.500000000 -32.384422302 35.384418488 -10.500000000 -32.384422302 -36.384422302 -9.500000000 -32.384422302 35.384418488 -9.500000000 -32.384422302 -36.384422302 -8.500000000 -32.384422302 35.384418488 -8.500000000 -32.384422302 -36.384422302 -7.500000000 -32.384422302 35.384418488 -7.500000000 -32.384422302 -36.384422302 -6.500000000 -32.384422302 35.384418488 -6.500000000 -32.384422302 -36.384422302 -5.500000000 -32.384422302 35.384418488 -5.500000000 -32.384422302 -36.384422302 -4.500000000 -32.384422302 35.384418488 -4.500000000 -32.384422302 -36.384422302 -3.500000000 -32.384422302 35.384418488 -3.500000000 -32.384422302 -36.384422302 -2.500000000 -32.384422302 35.384418488 -2.500000000 -32.384422302 -36.384422302 -1.500000000 -32.384422302 35.384418488 -1.500000000 -32.384422302 -36.384422302 -0.500000000 -32.384422302 35.384418488 -0.500000000 -32.384422302 -36.384422302 0.500000000 -32.384422302 35.384418488 0.500000000 -32.384422302 -36.384422302 1.500000000 -32.384422302 35.384418488 1.500000000 -32.384422302 -36.384422302 2.500000000 -32.384422302 35.384418488 2.500000000 -32.384422302 -36.384422302 3.500000000 -32.384422302 35.384418488 3.500000000 -32.384422302 -36.384422302 4.500000000 -32.384422302 35.384418488 4.500000000 -32.384422302 -36.384422302 5.500000000 -32.384422302 35.384418488 5.500000000 -32.384422302 -36.384422302 6.500000000 -32.384422302 35.384418488 6.500000000 -32.384422302 -36.384422302 7.500000000 -32.384422302 35.384418488 7.500000000 -32.384422302 -36.384422302 8.500000000 -32.384422302 35.384418488 8.500000000 -32.384422302 -36.384422302 9.500000000 -32.384422302 35.384418488 9.500000000 -32.384422302 -36.384422302 10.500000000 -32.384422302 35.384418488 10.500000000 -32.384422302 -36.384422302 11.500000000 -32.384422302 35.384418488 11.500000000 -32.384422302 -36.384422302 12.500000000 -32.384422302 35.384418488 12.500000000 -32.384422302 -36.384422302 13.500000000 -32.384422302 35.384418488 13.500000000 -32.384422302 -36.384422302 14.500000000 -32.384422302 35.384418488 14.500000000 -32.384422302 -36.384422302 15.500000000 -32.384422302 35.384418488 15.500000000 -32.384422302 -36.384422302 16.500000000 -32.384422302 35.384418488 16.500000000 -32.384422302 -36.384422302 17.500000000 -32.384422302 35.384418488 17.500000000 -32.384422302 -36.384422302 18.500000000 -32.384422302 35.384418488 18.500000000 -32.384422302 -36.384422302 19.500000000 -32.384422302 35.384418488 19.500000000 -32.384422302 -36.384422302 20.500000000 -32.384422302 35.384418488 20.500000000 -32.384422302 -36.384422302 21.500000000 -32.384422302 35.384418488 21.500000000 -32.384422302 -36.384422302 22.500000000 -32.384422302 35.384418488 22.500000000 -32.384422302 -36.384422302 23.500000000 -32.384422302 35.384418488 23.500000000 -32.384422302 -36.384407043 24.499996185 -32.384418488 35.384407043 24.499996185 -32.384422302 -36.384307861 25.499948502 -32.384403229 35.384307861 25.499948502 -32.384407043 -36.383720398 26.499622345 -32.384300232 35.383720398 26.499618530 -32.384300232 -36.381023407 27.498050690 -32.383811951 35.381023407 27.498050690 -32.383811951 -36.371490479 28.492321014 -32.382312775 35.371490479 28.492321014 -32.382316589 -36.344287872 29.476007462 -32.379222870 35.344280243 29.476007462 -32.379222870 -36.280395508 30.438953400 -32.375358582 35.280391693 30.438953400 -32.375358582 -36.154674530 31.371026993 -32.371025085 35.154674530 31.371026993 -32.371025085 -35.954723358 32.254276276 -32.334243774 -35.670322418 32.678356171 -32.736049652 34.670322418 32.678356171 -32.736053467 34.954723358 32.254276276 -32.334247589 -35.797355652 32.840934753 -32.035339355 -35.428535461 33.260543823 -32.380737305 34.428531647 33.260543823 -32.380737305 34.797351837 32.840934753 -32.035343170 -34.912067413 33.912067413 -32.313598633 -34.260547638 34.428531647 -32.380737305 -33.678352356 34.670322418 -32.736053467 32.678352356 34.670326233 -32.736053467 33.260543823 34.428535461 -32.380737305 33.912059784 33.912059784 -32.313602448 -33.840934753 34.797355652 -32.035343170 -33.254276276 34.954719543 -32.334243774 -32.371025085 35.154674530 -32.371025085 -31.438953400 35.280395508 -32.375358582 -30.476003647 35.344284058 -32.379226685 -29.492319107 35.371490479 -32.382312775 -28.498050690 35.381023407 -32.383808136 -27.499622345 35.383724213 -32.384296417 -26.499948502 35.384307861 -32.384403229 -25.499996185 35.384407043 -32.384418488 -24.500000000 35.384422302 -32.384422302 -23.500000000 35.384422302 -32.384422302 -22.500000000 35.384422302 -32.384422302 -21.500000000 35.384422302 -32.384422302 -20.500000000 35.384422302 -32.384422302 -19.500000000 35.384422302 -32.384422302 -18.500000000 35.384422302 -32.384422302 -17.500000000 35.384422302 -32.384422302 -16.500000000 35.384422302 -32.384422302 -15.500000000 35.384422302 -32.384422302 -14.500000000 35.384422302 -32.384422302 -13.500000000 35.384422302 -32.384422302 -12.500000000 35.384422302 -32.384422302 -11.500000000 35.384422302 -32.384422302 -10.500000000 35.384422302 -32.384422302 -9.500000000 35.384422302 -32.384422302 -8.500000000 35.384422302 -32.384422302 -7.500000000 35.384422302 -32.384422302 -6.500000000 35.384422302 -32.384422302 -5.500000000 35.384422302 -32.384422302 -4.500000000 35.384422302 -32.384422302 -3.500000000 35.384422302 -32.384422302 -2.500000000 35.384422302 -32.384422302 -1.500000000 35.384422302 -32.384422302 -0.500000000 35.384422302 -32.384422302 0.500000000 35.384422302 -32.384422302 1.500000000 35.384422302 -32.384422302 2.500000000 35.384422302 -32.384422302 3.500000000 35.384422302 -32.384422302 4.500000000 35.384422302 -32.384422302 5.500000000 35.384422302 -32.384422302 6.500000000 35.384422302 -32.384422302 7.500000000 35.384422302 -32.384422302 8.500000000 35.384422302 -32.384422302 9.500000000 35.384422302 -32.384422302 10.500000000 35.384422302 -32.384422302 11.500000000 35.384422302 -32.384422302 12.500000000 35.384422302 -32.384422302 13.500000000 35.384422302 -32.384422302 14.500000000 35.384422302 -32.384422302 15.500000000 35.384422302 -32.384422302 16.500000000 35.384422302 -32.384422302 17.500000000 35.384422302 -32.384422302 18.500000000 35.384422302 -32.384422302 19.500000000 35.384422302 -32.384422302 20.500000000 35.384422302 -32.384422302 21.500000000 35.384422302 -32.384422302 22.500000000 35.384422302 -32.384422302 23.500000000 35.384422302 -32.384422302 24.499996185 35.384407043 -32.384418488 25.499948502 35.384307861 -32.384403229 26.499622345 35.383720398 -32.384296417 27.498050690 35.381023407 -32.383808136 28.492321014 35.371490479 -32.382312775 29.476007462 35.344287872 -32.379222870 30.438953400 35.280395508 -32.375358582 31.371026993 35.154674530 -32.371025085 32.254276276 34.954723358 -32.334243774 32.840934753 34.797355652 -32.035346985 -33.903377533 -35.879737854 -31.336599350 -33.216838837 -36.112346649 -31.405673981 -32.375358582 -36.280395508 -31.438953400 -31.451217651 -36.380989075 -31.451221466 -30.483785629 -36.430477142 -31.456085205 -29.495611191 -36.450424194 -31.458078384 -28.499073029 -36.456939697 -31.458770752 -27.499858856 -36.458606720 -31.458948135 -26.499988556 -36.458930969 -31.458978653 -25.500000000 -36.458976746 -31.458980560 -24.500000000 -36.458976746 -31.458980560 -23.500000000 -36.458976746 -31.458980560 -22.500000000 -36.458976746 -31.458980560 -21.500000000 -36.458976746 -31.458980560 -20.500000000 -36.458976746 -31.458980560 -19.500000000 -36.458976746 -31.458980560 -18.500000000 -36.458976746 -31.458980560 -17.500000000 -36.458976746 -31.458980560 -16.500000000 -36.458976746 -31.458980560 -15.500000000 -36.458976746 -31.458980560 -14.500000000 -36.458976746 -31.458980560 -13.500000000 -36.458976746 -31.458980560 -12.500000000 -36.458976746 -31.458980560 -11.500000000 -36.458976746 -31.458980560 -10.500000000 -36.458976746 -31.458980560 -9.500000000 -36.458976746 -31.458980560 -8.500000000 -36.458976746 -31.458980560 -7.500000000 -36.458976746 -31.458980560 -6.500000000 -36.458976746 -31.458980560 -5.500000000 -36.458976746 -31.458980560 -4.500000000 -36.458976746 -31.458980560 -3.500000000 -36.458976746 -31.458980560 -2.500000000 -36.458976746 -31.458980560 -1.500000000 -36.458976746 -31.458980560 -0.500000000 -36.458976746 -31.458980560 0.500000000 -36.458976746 -31.458980560 1.500000000 -36.458976746 -31.458980560 2.500000000 -36.458976746 -31.458980560 3.500000000 -36.458976746 -31.458980560 4.500000000 -36.458976746 -31.458980560 5.500000000 -36.458976746 -31.458980560 6.500000000 -36.458976746 -31.458980560 7.500000000 -36.458976746 -31.458980560 8.500000000 -36.458976746 -31.458980560 9.500000000 -36.458976746 -31.458980560 10.500000000 -36.458976746 -31.458980560 11.500000000 -36.458976746 -31.458980560 12.500000000 -36.458976746 -31.458980560 13.500000000 -36.458976746 -31.458980560 14.500000000 -36.458976746 -31.458980560 15.500000000 -36.458976746 -31.458980560 16.500000000 -36.458976746 -31.458980560 17.500000000 -36.458976746 -31.458980560 18.500000000 -36.458976746 -31.458980560 19.500000000 -36.458976746 -31.458980560 20.500000000 -36.458976746 -31.458980560 21.500000000 -36.458976746 -31.458980560 22.500000000 -36.458976746 -31.458980560 23.500000000 -36.458976746 -31.458980560 24.500000000 -36.458976746 -31.458980560 25.499988556 -36.458930969 -31.458978653 26.499858856 -36.458606720 -31.458948135 27.499073029 -36.456939697 -31.458770752 28.495611191 -36.450424194 -31.458080292 29.483785629 -36.430473328 -31.456085205 30.451217651 -36.380989075 -31.451217651 31.375360489 -36.280395508 -31.438953400 32.216842651 -36.112346649 -31.405673981 32.903377533 -35.879737854 -31.336599350 -35.040893555 -35.040893555 -31.413045883 -34.477191925 -35.518615723 -31.410831451 33.477191925 -35.518611908 -31.410831451 34.040893555 -35.040893555 -31.413045883 -35.879737854 -33.903373718 -31.336599350 -35.518615723 -34.477191925 -31.410831451 34.518611908 -34.477191925 -31.410831451 34.879737854 -33.903381348 -31.336599350 -36.112342834 -33.216842651 -31.405673981 35.112346649 -33.216842651 -31.405673981 -36.280395508 -32.375358582 -31.438953400 35.280395508 -32.375358582 -31.438955307 -36.380989075 -31.451213837 -31.451221466 35.380989075 -31.451217651 -31.451221466 -36.430477142 -30.483785629 -31.456089020 35.430477142 -30.483785629 -31.456085205 -36.450424194 -29.495611191 -31.458080292 35.450424194 -29.495611191 -31.458078384 -36.456943512 -28.499073029 -31.458770752 35.456939697 -28.499073029 -31.458770752 -36.458606720 -27.499858856 -31.458948135 35.458606720 -27.499858856 -31.458948135 -36.458934784 -26.499988556 -31.458974838 35.458930969 -26.499988556 -31.458978653 -36.458976746 -25.500000000 -31.458978653 35.458976746 -25.500000000 -31.458980560 -36.458984375 -24.500000000 -31.458978653 35.458976746 -24.500000000 -31.458980560 -36.458984375 -23.500000000 -31.458978653 35.458976746 -23.500000000 -31.458980560 -36.458984375 -22.500000000 -31.458978653 35.458976746 -22.500000000 -31.458980560 -36.458984375 -21.500000000 -31.458978653 35.458976746 -21.500000000 -31.458980560 -36.458984375 -20.500000000 -31.458978653 35.458976746 -20.500000000 -31.458980560 -36.458984375 -19.500000000 -31.458978653 35.458976746 -19.500000000 -31.458980560 -36.458984375 -18.500000000 -31.458978653 35.458976746 -18.500000000 -31.458980560 -36.458984375 -17.500000000 -31.458978653 35.458976746 -17.500000000 -31.458980560 -36.458984375 -16.500000000 -31.458978653 35.458976746 -16.500000000 -31.458980560 -36.458984375 -15.500000000 -31.458978653 35.458976746 -15.500000000 -31.458980560 -36.458984375 -14.500000000 -31.458978653 35.458976746 -14.500000000 -31.458980560 -36.458984375 -13.500000000 -31.458978653 35.458976746 -13.500000000 -31.458980560 -36.458984375 -12.500000000 -31.458978653 35.458976746 -12.500000000 -31.458980560 -36.458984375 -11.500000000 -31.458978653 35.458976746 -11.500000000 -31.458980560 -36.458984375 -10.500000000 -31.458978653 35.458976746 -10.500000000 -31.458980560 -36.458984375 -9.500000000 -31.458978653 35.458976746 -9.500000000 -31.458980560 -36.458984375 -8.500000000 -31.458978653 35.458976746 -8.500000000 -31.458980560 -36.458984375 -7.500000000 -31.458978653 35.458976746 -7.500000000 -31.458980560 -36.458984375 -6.500000000 -31.458978653 35.458976746 -6.500000000 -31.458980560 -36.458984375 -5.500000000 -31.458978653 35.458976746 -5.500000000 -31.458980560 -36.458984375 -4.500000000 -31.458978653 35.458976746 -4.500000000 -31.458980560 -36.458984375 -3.500000000 -31.458978653 35.458976746 -3.500000000 -31.458980560 -36.458984375 -2.500000000 -31.458978653 35.458976746 -2.500000000 -31.458980560 -36.458984375 -1.500000000 -31.458978653 35.458976746 -1.500000000 -31.458980560 -36.458984375 -0.500000000 -31.458978653 35.458976746 -0.500000000 -31.458980560 -36.458984375 0.500000000 -31.458978653 35.458976746 0.500000000 -31.458980560 -36.458984375 1.500000000 -31.458978653 35.458976746 1.500000000 -31.458980560 -36.458984375 2.500000000 -31.458978653 35.458976746 2.500000000 -31.458980560 -36.458984375 3.500000000 -31.458978653 35.458976746 3.500000000 -31.458980560 -36.458984375 4.500000000 -31.458978653 35.458976746 4.500000000 -31.458980560 -36.458984375 5.500000000 -31.458978653 35.458976746 5.500000000 -31.458980560 -36.458984375 6.500000000 -31.458978653 35.458976746 6.500000000 -31.458980560 -36.458984375 7.500000000 -31.458978653 35.458976746 7.500000000 -31.458980560 -36.458984375 8.500000000 -31.458978653 35.458976746 8.500000000 -31.458980560 -36.458984375 9.500000000 -31.458978653 35.458976746 9.500000000 -31.458980560 -36.458984375 10.500000000 -31.458978653 35.458976746 10.500000000 -31.458980560 -36.458984375 11.500000000 -31.458978653 35.458976746 11.500000000 -31.458980560 -36.458984375 12.500000000 -31.458978653 35.458976746 12.500000000 -31.458980560 -36.458984375 13.500000000 -31.458978653 35.458976746 13.500000000 -31.458980560 -36.458984375 14.500000000 -31.458978653 35.458976746 14.500000000 -31.458980560 -36.458984375 15.500000000 -31.458978653 35.458976746 15.500000000 -31.458980560 -36.458984375 16.500000000 -31.458978653 35.458976746 16.500000000 -31.458980560 -36.458984375 17.500000000 -31.458978653 35.458976746 17.500000000 -31.458980560 -36.458984375 18.500000000 -31.458978653 35.458976746 18.500000000 -31.458980560 -36.458984375 19.500000000 -31.458978653 35.458976746 19.500000000 -31.458980560 -36.458984375 20.500000000 -31.458978653 35.458976746 20.500000000 -31.458980560 -36.458984375 21.500000000 -31.458978653 35.458976746 21.500000000 -31.458980560 -36.458984375 22.500000000 -31.458978653 35.458976746 22.500000000 -31.458980560 -36.458984375 23.500000000 -31.458978653 35.458976746 23.500000000 -31.458980560 -36.458976746 24.500000000 -31.458978653 35.458976746 24.500000000 -31.458980560 -36.458934784 25.499988556 -31.458974838 35.458930969 25.499988556 -31.458978653 -36.458606720 26.499858856 -31.458948135 35.458606720 26.499858856 -31.458948135 -36.456943512 27.499073029 -31.458770752 35.456939697 27.499073029 -31.458770752 -36.450424194 28.495611191 -31.458080292 35.450424194 28.495611191 -31.458080292 -36.430473328 29.483785629 -31.456085205 35.430473328 29.483785629 -31.456085205 -36.380989075 30.451217651 -31.451217651 35.380989075 30.451217651 -31.451217651 -36.280395508 31.375360489 -31.438953400 35.280395508 31.375360489 -31.438953400 -36.112342834 32.216842651 -31.405673981 35.112346649 32.216842651 -31.405673981 -35.879737854 32.903377533 -31.336603165 -35.518615723 33.477191925 -31.410833359 34.518611908 33.477191925 -31.410831451 34.879737854 32.903377533 -31.336599350 -35.040893555 34.040893555 -31.413049698 -34.477191925 34.518615723 -31.410831451 33.477191925 34.518611908 -31.410831451 34.040893555 34.040893555 -31.413045883 -33.903373718 34.879737854 -31.336599350 -33.216842651 35.112342834 -31.405673981 -32.375358582 35.280395508 -31.438953400 -31.451213837 35.380989075 -31.451221466 -30.483785629 35.430477142 -31.456089020 -29.495611191 35.450424194 -31.458080292 -28.499073029 35.456943512 -31.458766937 -27.499858856 35.458606720 -31.458948135 -26.499988556 35.458934784 -31.458974838 -25.500000000 35.458976746 -31.458978653 -24.500000000 35.458984375 -31.458978653 -23.500000000 35.458984375 -31.458978653 -22.500000000 35.458984375 -31.458978653 -21.500000000 35.458984375 -31.458978653 -20.500000000 35.458984375 -31.458978653 -19.500000000 35.458984375 -31.458978653 -18.500000000 35.458984375 -31.458978653 -17.500000000 35.458984375 -31.458978653 -16.500000000 35.458984375 -31.458978653 -15.500000000 35.458984375 -31.458978653 -14.500000000 35.458984375 -31.458978653 -13.500000000 35.458984375 -31.458978653 -12.500000000 35.458984375 -31.458978653 -11.500000000 35.458984375 -31.458978653 -10.500000000 35.458984375 -31.458978653 -9.500000000 35.458984375 -31.458978653 -8.500000000 35.458984375 -31.458978653 -7.500000000 35.458984375 -31.458978653 -6.500000000 35.458984375 -31.458978653 -5.500000000 35.458984375 -31.458978653 -4.500000000 35.458984375 -31.458978653 -3.500000000 35.458984375 -31.458978653 -2.500000000 35.458984375 -31.458978653 -1.500000000 35.458984375 -31.458978653 -0.500000000 35.458984375 -31.458978653 0.500000000 35.458984375 -31.458978653 1.500000000 35.458984375 -31.458978653 2.500000000 35.458984375 -31.458978653 3.500000000 35.458984375 -31.458978653 4.500000000 35.458984375 -31.458978653 5.500000000 35.458984375 -31.458978653 6.500000000 35.458984375 -31.458978653 7.500000000 35.458984375 -31.458978653 8.500000000 35.458984375 -31.458978653 9.500000000 35.458984375 -31.458978653 10.500000000 35.458984375 -31.458978653 11.500000000 35.458984375 -31.458978653 12.500000000 35.458984375 -31.458978653 13.500000000 35.458984375 -31.458978653 14.500000000 35.458984375 -31.458978653 15.500000000 35.458984375 -31.458978653 16.500000000 35.458984375 -31.458978653 17.500000000 35.458984375 -31.458978653 18.500000000 35.458984375 -31.458978653 19.500000000 35.458984375 -31.458978653 20.500000000 35.458984375 -31.458978653 21.500000000 35.458984375 -31.458978653 22.500000000 35.458984375 -31.458978653 23.500000000 35.458984375 -31.458978653 24.500000000 35.458976746 -31.458978653 25.499988556 35.458934784 -31.458974838 26.499858856 35.458606720 -31.458948135 27.499073029 35.456943512 -31.458770752 28.495611191 35.450424194 -31.458078384 29.483785629 35.430473328 -31.456085205 30.451217651 35.380989075 -31.451217651 31.375356674 35.280395508 -31.438953400 32.216842651 35.112342834 -31.405673981 32.903377533 34.879737854 -31.336599350 -33.946811676 -35.937088013 -30.442840576 -33.220714569 -36.185966492 -30.460596085 -32.379222870 -36.344284058 -30.476007462 -31.456081390 -36.430477142 -30.483789444 -30.486965179 -36.469234467 -30.486968994 -29.496957779 -36.483337402 -30.488048553 -28.499475479 -36.487373352 -30.488346100 -27.499938965 -36.488258362 -30.488399506 -26.499996185 -36.488391876 -30.488403320 -25.500000000 -36.488403320 -30.488403320 -24.500000000 -36.488403320 -30.488403320 -23.500000000 -36.488403320 -30.488403320 -22.500000000 -36.488403320 -30.488403320 -21.500000000 -36.488403320 -30.488403320 -20.500000000 -36.488403320 -30.488403320 -19.500000000 -36.488403320 -30.488403320 -18.500000000 -36.488403320 -30.488403320 -17.500000000 -36.488403320 -30.488403320 -16.500000000 -36.488403320 -30.488403320 -15.500000000 -36.488403320 -30.488403320 -14.500000000 -36.488403320 -30.488403320 -13.500000000 -36.488403320 -30.488403320 -12.500000000 -36.488403320 -30.488403320 -11.500000000 -36.488403320 -30.488403320 -10.500000000 -36.488403320 -30.488403320 -9.500000000 -36.488403320 -30.488403320 -8.500000000 -36.488403320 -30.488403320 -7.500000000 -36.488403320 -30.488403320 -6.500000000 -36.488403320 -30.488403320 -5.500000000 -36.488403320 -30.488403320 -4.500000000 -36.488403320 -30.488403320 -3.500000000 -36.488403320 -30.488403320 -2.500000000 -36.488403320 -30.488403320 -1.500000000 -36.488403320 -30.488403320 -0.500000000 -36.488403320 -30.488403320 0.500000000 -36.488403320 -30.488403320 1.500000000 -36.488403320 -30.488403320 2.500000000 -36.488403320 -30.488403320 3.500000000 -36.488403320 -30.488403320 4.500000000 -36.488403320 -30.488403320 5.500000000 -36.488403320 -30.488403320 6.500000000 -36.488403320 -30.488403320 7.500000000 -36.488403320 -30.488403320 8.500000000 -36.488403320 -30.488403320 9.500000000 -36.488403320 -30.488403320 10.500000000 -36.488403320 -30.488403320 11.500000000 -36.488403320 -30.488403320 12.500000000 -36.488403320 -30.488403320 13.500000000 -36.488403320 -30.488403320 14.500000000 -36.488403320 -30.488403320 15.500000000 -36.488403320 -30.488403320 16.500000000 -36.488403320 -30.488403320 17.500000000 -36.488403320 -30.488403320 18.500000000 -36.488403320 -30.488403320 19.500000000 -36.488403320 -30.488403320 20.500000000 -36.488403320 -30.488403320 21.500000000 -36.488403320 -30.488403320 22.500000000 -36.488403320 -30.488403320 23.500000000 -36.488403320 -30.488403320 24.500000000 -36.488403320 -30.488403320 25.499996185 -36.488391876 -30.488403320 26.499938965 -36.488258362 -30.488399506 27.499475479 -36.487373352 -30.488346100 28.496959686 -36.483337402 -30.488048553 29.486968994 -36.469234467 -30.486968994 30.456085205 -36.430473328 -30.483785629 31.379222870 -36.344284058 -30.476003647 32.220714569 -36.185966492 -30.460596085 32.946811676 -35.937088013 -30.442840576 -35.115646362 -35.115638733 -30.463253021 -34.567749023 -35.577739716 -30.459178925 33.567749023 -35.577739716 -30.459178925 34.115646362 -35.115642548 -30.463253021 -35.937091827 -33.946807861 -30.442840576 -35.577739716 -34.567741394 -30.459177017 34.577739716 -34.567749023 -30.459177017 34.937091827 -33.946811676 -30.442840576 -36.185966492 -33.220714569 -30.460592270 35.185966492 -33.220714569 -30.460596085 -36.344284058 -32.379222870 -30.476007462 35.344284058 -32.379222870 -30.476007462 -36.430477142 -31.456085205 -30.483789444 35.430477142 -31.456081390 -30.483789444 -36.469234467 -30.486968994 -30.486968994 35.469234467 -30.486965179 -30.486968994 -36.483337402 -29.496957779 -30.488048553 35.483337402 -29.496957779 -30.488048553 -36.487373352 -28.499475479 -30.488342285 35.487373352 -28.499475479 -30.488346100 -36.488258362 -27.499938965 -30.488397598 35.488258362 -27.499938965 -30.488399506 -36.488391876 -26.499996185 -30.488403320 35.488391876 -26.499996185 -30.488403320 -36.488410950 -25.500000000 -30.488403320 35.488403320 -25.500000000 -30.488403320 -36.488410950 -24.500000000 -30.488403320 35.488403320 -24.500000000 -30.488403320 -36.488410950 -23.500000000 -30.488403320 35.488403320 -23.500000000 -30.488403320 -36.488410950 -22.500000000 -30.488403320 35.488403320 -22.500000000 -30.488403320 -36.488410950 -21.500000000 -30.488403320 35.488403320 -21.500000000 -30.488403320 -36.488410950 -20.500000000 -30.488403320 35.488403320 -20.500000000 -30.488403320 -36.488410950 -19.500000000 -30.488403320 35.488403320 -19.500000000 -30.488403320 -36.488410950 -18.500000000 -30.488403320 35.488403320 -18.500000000 -30.488403320 -36.488410950 -17.500000000 -30.488403320 35.488403320 -17.500000000 -30.488403320 -36.488410950 -16.500000000 -30.488403320 35.488403320 -16.500000000 -30.488403320 -36.488410950 -15.500000000 -30.488403320 35.488403320 -15.500000000 -30.488403320 -36.488410950 -14.500000000 -30.488403320 35.488403320 -14.500000000 -30.488403320 -36.488410950 -13.500000000 -30.488403320 35.488403320 -13.500000000 -30.488403320 -36.488410950 -12.500000000 -30.488403320 35.488403320 -12.500000000 -30.488403320 -36.488410950 -11.500000000 -30.488403320 35.488403320 -11.500000000 -30.488403320 -36.488410950 -10.500000000 -30.488403320 35.488403320 -10.500000000 -30.488403320 -36.488410950 -9.500000000 -30.488403320 35.488403320 -9.500000000 -30.488403320 -36.488410950 -8.500000000 -30.488403320 35.488403320 -8.500000000 -30.488403320 -36.488410950 -7.500000000 -30.488403320 35.488403320 -7.500000000 -30.488403320 -36.488410950 -6.500000000 -30.488403320 35.488403320 -6.500000000 -30.488403320 -36.488410950 -5.500000000 -30.488403320 35.488403320 -5.500000000 -30.488403320 -36.488410950 -4.500000000 -30.488403320 35.488403320 -4.500000000 -30.488403320 -36.488410950 -3.500000000 -30.488403320 35.488403320 -3.500000000 -30.488403320 -36.488410950 -2.500000000 -30.488403320 35.488403320 -2.500000000 -30.488403320 -36.488410950 -1.500000000 -30.488403320 35.488403320 -1.500000000 -30.488403320 -36.488410950 -0.500000000 -30.488403320 35.488403320 -0.500000000 -30.488403320 -36.488410950 0.500000000 -30.488403320 35.488403320 0.500000000 -30.488403320 -36.488410950 1.500000000 -30.488403320 35.488403320 1.500000000 -30.488403320 -36.488410950 2.500000000 -30.488403320 35.488403320 2.500000000 -30.488403320 -36.488410950 3.500000000 -30.488403320 35.488403320 3.500000000 -30.488403320 -36.488410950 4.500000000 -30.488403320 35.488403320 4.500000000 -30.488403320 -36.488410950 5.500000000 -30.488403320 35.488403320 5.500000000 -30.488403320 -36.488410950 6.500000000 -30.488403320 35.488403320 6.500000000 -30.488403320 -36.488410950 7.500000000 -30.488403320 35.488403320 7.500000000 -30.488403320 -36.488410950 8.500000000 -30.488403320 35.488403320 8.500000000 -30.488403320 -36.488410950 9.500000000 -30.488403320 35.488403320 9.500000000 -30.488403320 -36.488410950 10.500000000 -30.488403320 35.488403320 10.500000000 -30.488403320 -36.488410950 11.500000000 -30.488403320 35.488403320 11.500000000 -30.488403320 -36.488410950 12.500000000 -30.488403320 35.488403320 12.500000000 -30.488403320 -36.488410950 13.500000000 -30.488403320 35.488403320 13.500000000 -30.488403320 -36.488410950 14.500000000 -30.488403320 35.488403320 14.500000000 -30.488403320 -36.488410950 15.500000000 -30.488403320 35.488403320 15.500000000 -30.488403320 -36.488410950 16.500000000 -30.488403320 35.488403320 16.500000000 -30.488403320 -36.488410950 17.500000000 -30.488403320 35.488403320 17.500000000 -30.488403320 -36.488410950 18.500000000 -30.488403320 35.488403320 18.500000000 -30.488403320 -36.488410950 19.500000000 -30.488403320 35.488403320 19.500000000 -30.488403320 -36.488410950 20.500000000 -30.488403320 35.488403320 20.500000000 -30.488403320 -36.488410950 21.500000000 -30.488403320 35.488403320 21.500000000 -30.488403320 -36.488410950 22.500000000 -30.488403320 35.488403320 22.500000000 -30.488403320 -36.488410950 23.500000000 -30.488403320 35.488403320 23.500000000 -30.488403320 -36.488410950 24.500000000 -30.488403320 35.488403320 24.500000000 -30.488403320 -36.488395691 25.499996185 -30.488403320 35.488391876 25.499996185 -30.488403320 -36.488258362 26.499938965 -30.488399506 35.488258362 26.499938965 -30.488399506 -36.487373352 27.499477386 -30.488346100 35.487373352 27.499475479 -30.488346100 -36.483337402 28.496959686 -30.488052368 35.483337402 28.496959686 -30.488048553 -36.469234467 29.486968994 -30.486968994 35.469234467 29.486968994 -30.486968994 -36.430477142 30.456085205 -30.483785629 35.430473328 30.456085205 -30.483785629 -36.344284058 31.379222870 -30.476007462 35.344284058 31.379222870 -30.476003647 -36.185966492 32.220714569 -30.460596085 35.185966492 32.220714569 -30.460596085 -35.937088013 32.946811676 -30.442840576 -35.577739716 33.567749023 -30.459178925 34.577739716 33.567749023 -30.459178925 34.937088013 32.946811676 -30.442840576 -35.115638733 34.115646362 -30.463253021 -34.567741394 34.577739716 -30.459177017 33.567749023 34.577739716 -30.459177017 34.115642548 34.115646362 -30.463253021 -33.946807861 34.937091827 -30.442840576 -33.220714569 35.185966492 -30.460592270 -32.379222870 35.344284058 -30.476007462 -31.456085205 35.430477142 -30.483789444 -30.486968994 35.469234467 -30.486968994 -29.496957779 35.483337402 -30.488048553 -28.499475479 35.487373352 -30.488342285 -27.499938965 35.488258362 -30.488397598 -26.499996185 35.488391876 -30.488403320 -25.500000000 35.488410950 -30.488403320 -24.500000000 35.488410950 -30.488403320 -23.500000000 35.488410950 -30.488403320 -22.500000000 35.488410950 -30.488403320 -21.500000000 35.488410950 -30.488403320 -20.500000000 35.488410950 -30.488403320 -19.500000000 35.488410950 -30.488403320 -18.500000000 35.488410950 -30.488403320 -17.500000000 35.488410950 -30.488403320 -16.500000000 35.488410950 -30.488403320 -15.500000000 35.488410950 -30.488403320 -14.500000000 35.488410950 -30.488403320 -13.500000000 35.488410950 -30.488403320 -12.500000000 35.488410950 -30.488403320 -11.500000000 35.488410950 -30.488403320 -10.500000000 35.488410950 -30.488403320 -9.500000000 35.488410950 -30.488403320 -8.500000000 35.488410950 -30.488403320 -7.500000000 35.488410950 -30.488403320 -6.500000000 35.488410950 -30.488403320 -5.500000000 35.488410950 -30.488403320 -4.500000000 35.488410950 -30.488403320 -3.500000000 35.488410950 -30.488403320 -2.500000000 35.488410950 -30.488403320 -1.500000000 35.488410950 -30.488403320 -0.500000000 35.488410950 -30.488403320 0.500000000 35.488410950 -30.488403320 1.500000000 35.488410950 -30.488403320 2.500000000 35.488410950 -30.488403320 3.500000000 35.488410950 -30.488403320 4.500000000 35.488410950 -30.488403320 5.500000000 35.488410950 -30.488403320 6.500000000 35.488410950 -30.488403320 7.500000000 35.488410950 -30.488403320 8.500000000 35.488410950 -30.488403320 9.500000000 35.488410950 -30.488403320 10.500000000 35.488410950 -30.488403320 11.500000000 35.488410950 -30.488403320 12.500000000 35.488410950 -30.488403320 13.500000000 35.488410950 -30.488403320 14.500000000 35.488410950 -30.488403320 15.500000000 35.488410950 -30.488403320 16.500000000 35.488410950 -30.488403320 17.500000000 35.488410950 -30.488403320 18.500000000 35.488410950 -30.488403320 19.500000000 35.488410950 -30.488403320 20.500000000 35.488410950 -30.488403320 21.500000000 35.488410950 -30.488403320 22.500000000 35.488410950 -30.488403320 23.500000000 35.488410950 -30.488403320 24.500000000 35.488410950 -30.488403320 25.499996185 35.488395691 -30.488403320 26.499938965 35.488258362 -30.488399506 27.499477386 35.487373352 -30.488346100 28.496959686 35.483337402 -30.488052368 29.486968994 35.469234467 -30.486968994 30.456081390 35.430477142 -30.483785629 31.379222870 35.344284058 -30.476007462 32.220714569 35.185966492 -30.460596085 32.946811676 34.937091827 -30.442840576 -33.968864441 -35.965759277 -29.481937408 -33.227874756 -36.217510223 -29.486719131 -32.382308960 -36.371490479 -29.492319107 -31.458078384 -36.450428009 -29.495611191 -30.488048553 -36.483337402 -29.496957779 -29.497375488 -36.494174957 -29.497371674 -28.499578476 -36.496910095 -29.497461319 -27.499954224 -36.497409821 -29.497470856 -26.499996185 -36.497467041 -29.497470856 -25.500000000 -36.497474670 -29.497470856 -24.500000000 -36.497474670 -29.497470856 -23.500000000 -36.497474670 -29.497470856 -22.500000000 -36.497474670 -29.497470856 -21.500000000 -36.497474670 -29.497470856 -20.500000000 -36.497474670 -29.497470856 -19.500000000 -36.497474670 -29.497470856 -18.500000000 -36.497474670 -29.497470856 -17.500000000 -36.497474670 -29.497470856 -16.500000000 -36.497474670 -29.497470856 -15.500000000 -36.497474670 -29.497470856 -14.500000000 -36.497474670 -29.497470856 -13.500000000 -36.497474670 -29.497470856 -12.500000000 -36.497474670 -29.497470856 -11.500000000 -36.497474670 -29.497470856 -10.500000000 -36.497474670 -29.497470856 -9.500000000 -36.497474670 -29.497470856 -8.500000000 -36.497474670 -29.497470856 -7.500000000 -36.497474670 -29.497470856 -6.500000000 -36.497474670 -29.497470856 -5.500000000 -36.497474670 -29.497470856 -4.500000000 -36.497474670 -29.497470856 -3.500000000 -36.497474670 -29.497470856 -2.500000000 -36.497474670 -29.497470856 -1.500000000 -36.497474670 -29.497470856 -0.500000000 -36.497474670 -29.497470856 0.500000000 -36.497474670 -29.497470856 1.500000000 -36.497474670 -29.497470856 2.500000000 -36.497474670 -29.497470856 3.500000000 -36.497474670 -29.497470856 4.500000000 -36.497474670 -29.497470856 5.500000000 -36.497474670 -29.497470856 6.500000000 -36.497474670 -29.497470856 7.500000000 -36.497474670 -29.497470856 8.500000000 -36.497474670 -29.497470856 9.500000000 -36.497474670 -29.497470856 10.500000000 -36.497474670 -29.497470856 11.500000000 -36.497474670 -29.497470856 12.500000000 -36.497474670 -29.497470856 13.500000000 -36.497474670 -29.497470856 14.500000000 -36.497474670 -29.497470856 15.500000000 -36.497474670 -29.497470856 16.500000000 -36.497474670 -29.497470856 17.500000000 -36.497474670 -29.497470856 18.500000000 -36.497474670 -29.497470856 19.500000000 -36.497474670 -29.497470856 20.500000000 -36.497474670 -29.497470856 21.500000000 -36.497474670 -29.497470856 22.500000000 -36.497474670 -29.497470856 23.500000000 -36.497474670 -29.497470856 24.500000000 -36.497474670 -29.497470856 25.499996185 -36.497470856 -29.497470856 26.499954224 -36.497406006 -29.497470856 27.499576569 -36.496910095 -29.497461319 28.497375488 -36.494174957 -29.497375488 29.488048553 -36.483337402 -29.496957779 30.458078384 -36.450428009 -29.495611191 31.382314682 -36.371490479 -29.492319107 32.227874756 -36.217510223 -29.486715317 32.968864441 -35.965759277 -29.481933594 -35.150257111 -35.150249481 -29.487293243 -34.604709625 -35.607204437 -29.485630035 33.604705811 -35.607208252 -29.485630035 34.150257111 -35.150253296 -29.487289429 -35.965759277 -33.968864441 -29.481937408 -35.607208252 -34.604705811 -29.485630035 34.607208252 -34.604709625 -29.485630035 34.965766907 -33.968864441 -29.481937408 -36.217514038 -33.227867126 -29.486715317 35.217514038 -33.227874756 -29.486719131 -36.371490479 -32.382312775 -29.492319107 35.371490479 -32.382308960 -29.492319107 -36.450428009 -31.458074570 -29.495611191 35.450428009 -31.458078384 -29.495611191 -36.483337402 -30.488048553 -29.496957779 35.483337402 -30.488048553 -29.496957779 -36.494171143 -29.497375488 -29.497371674 35.494174957 -29.497375488 -29.497371674 -36.496910095 -28.499578476 -29.497457504 35.496910095 -28.499578476 -29.497461319 -36.497406006 -27.499954224 -29.497470856 35.497409821 -27.499954224 -29.497470856 -36.497467041 -26.499996185 -29.497470856 35.497467041 -26.499996185 -29.497470856 -36.497470856 -25.500000000 -29.497470856 35.497474670 -25.500000000 -29.497470856 -36.497470856 -24.500000000 -29.497470856 35.497474670 -24.500000000 -29.497470856 -36.497470856 -23.500000000 -29.497470856 35.497474670 -23.500000000 -29.497470856 -36.497470856 -22.500000000 -29.497470856 35.497474670 -22.500000000 -29.497470856 -36.497470856 -21.500000000 -29.497470856 35.497474670 -21.500000000 -29.497470856 -36.497470856 -20.500000000 -29.497470856 35.497474670 -20.500000000 -29.497470856 -36.497470856 -19.500000000 -29.497470856 35.497474670 -19.500000000 -29.497470856 -36.497470856 -18.500000000 -29.497470856 35.497474670 -18.500000000 -29.497470856 -36.497470856 -17.500000000 -29.497470856 35.497474670 -17.500000000 -29.497470856 -36.497470856 -16.500000000 -29.497470856 35.497474670 -16.500000000 -29.497470856 -36.497470856 -15.500000000 -29.497470856 35.497474670 -15.500000000 -29.497470856 -36.497470856 -14.500000000 -29.497470856 35.497474670 -14.500000000 -29.497470856 -36.497470856 -13.500000000 -29.497470856 35.497474670 -13.500000000 -29.497470856 -36.497470856 -12.500000000 -29.497470856 35.497474670 -12.500000000 -29.497470856 -36.497470856 -11.500000000 -29.497470856 35.497474670 -11.500000000 -29.497470856 -36.497470856 -10.500000000 -29.497470856 35.497474670 -10.500000000 -29.497470856 -36.497470856 -9.500000000 -29.497470856 35.497474670 -9.500000000 -29.497470856 -36.497470856 -8.500000000 -29.497470856 35.497474670 -8.500000000 -29.497470856 -36.497470856 -7.500000000 -29.497470856 35.497474670 -7.500000000 -29.497470856 -36.497470856 -6.500000000 -29.497470856 35.497474670 -6.500000000 -29.497470856 -36.497470856 -5.500000000 -29.497470856 35.497474670 -5.500000000 -29.497470856 -36.497470856 -4.500000000 -29.497470856 35.497474670 -4.500000000 -29.497470856 -36.497470856 -3.500000000 -29.497470856 35.497474670 -3.500000000 -29.497470856 -36.497470856 -2.500000000 -29.497470856 35.497474670 -2.500000000 -29.497470856 -36.497470856 -1.500000000 -29.497470856 35.497474670 -1.500000000 -29.497470856 -36.497470856 -0.500000000 -29.497470856 35.497474670 -0.500000000 -29.497470856 -36.497470856 0.500000000 -29.497470856 35.497474670 0.500000000 -29.497470856 -36.497470856 1.500000000 -29.497470856 35.497474670 1.500000000 -29.497470856 -36.497470856 2.500000000 -29.497470856 35.497474670 2.500000000 -29.497470856 -36.497470856 3.500000000 -29.497470856 35.497474670 3.500000000 -29.497470856 -36.497470856 4.500000000 -29.497470856 35.497474670 4.500000000 -29.497470856 -36.497470856 5.500000000 -29.497470856 35.497474670 5.500000000 -29.497470856 -36.497470856 6.500000000 -29.497470856 35.497474670 6.500000000 -29.497470856 -36.497470856 7.500000000 -29.497470856 35.497474670 7.500000000 -29.497470856 -36.497470856 8.500000000 -29.497470856 35.497474670 8.500000000 -29.497470856 -36.497470856 9.500000000 -29.497470856 35.497474670 9.500000000 -29.497470856 -36.497470856 10.500000000 -29.497470856 35.497474670 10.500000000 -29.497470856 -36.497470856 11.500000000 -29.497470856 35.497474670 11.500000000 -29.497470856 -36.497470856 12.500000000 -29.497470856 35.497474670 12.500000000 -29.497470856 -36.497470856 13.500000000 -29.497470856 35.497474670 13.500000000 -29.497470856 -36.497470856 14.500000000 -29.497470856 35.497474670 14.500000000 -29.497470856 -36.497470856 15.500000000 -29.497470856 35.497474670 15.500000000 -29.497470856 -36.497470856 16.500000000 -29.497470856 35.497474670 16.500000000 -29.497470856 -36.497470856 17.500000000 -29.497470856 35.497474670 17.500000000 -29.497470856 -36.497470856 18.500000000 -29.497470856 35.497474670 18.500000000 -29.497470856 -36.497470856 19.500000000 -29.497470856 35.497474670 19.500000000 -29.497470856 -36.497470856 20.500000000 -29.497470856 35.497474670 20.500000000 -29.497470856 -36.497470856 21.500000000 -29.497470856 35.497474670 21.500000000 -29.497470856 -36.497470856 22.500000000 -29.497470856 35.497474670 22.500000000 -29.497470856 -36.497470856 23.500000000 -29.497470856 35.497474670 23.500000000 -29.497470856 -36.497470856 24.500000000 -29.497470856 35.497474670 24.500000000 -29.497470856 -36.497467041 25.499996185 -29.497470856 35.497470856 25.499996185 -29.497470856 -36.497409821 26.499954224 -29.497470856 35.497406006 26.499954224 -29.497470856 -36.496910095 27.499576569 -29.497461319 35.496910095 27.499576569 -29.497461319 -36.494174957 28.497371674 -29.497371674 35.494174957 28.497375488 -29.497375488 -36.483337402 29.488048553 -29.496959686 35.483337402 29.488048553 -29.496957779 -36.450428009 30.458074570 -29.495611191 35.450428009 30.458078384 -29.495611191 -36.371490479 31.382312775 -29.492319107 35.371490479 31.382314682 -29.492319107 -36.217514038 32.227874756 -29.486715317 35.217510223 32.227874756 -29.486715317 -35.965759277 32.968864441 -29.481937408 -35.607204437 33.604709625 -29.485630035 34.607208252 33.604709625 -29.485630035 34.965759277 32.968864441 -29.481933594 -35.150249481 34.150257111 -29.487293243 -34.604705811 34.607208252 -29.485630035 33.604709625 34.607208252 -29.485630035 34.150253296 34.150257111 -29.487289429 -33.968864441 34.965759277 -29.481937408 -33.227867126 35.217514038 -29.486715317 -32.382312775 35.371490479 -29.492319107 -31.458074570 35.450428009 -29.495611191 -30.488048553 35.483337402 -29.496957779 -29.497375488 35.494171143 -29.497371674 -28.499578476 35.496910095 -29.497457504 -27.499954224 35.497406006 -29.497470856 -26.499996185 35.497467041 -29.497470856 -25.500000000 35.497470856 -29.497470856 -24.500000000 35.497470856 -29.497470856 -23.500000000 35.497470856 -29.497470856 -22.500000000 35.497470856 -29.497470856 -21.500000000 35.497470856 -29.497470856 -20.500000000 35.497470856 -29.497470856 -19.500000000 35.497470856 -29.497470856 -18.500000000 35.497470856 -29.497470856 -17.500000000 35.497470856 -29.497470856 -16.500000000 35.497470856 -29.497470856 -15.500000000 35.497470856 -29.497470856 -14.500000000 35.497470856 -29.497470856 -13.500000000 35.497470856 -29.497470856 -12.500000000 35.497470856 -29.497470856 -11.500000000 35.497470856 -29.497470856 -10.500000000 35.497470856 -29.497470856 -9.500000000 35.497470856 -29.497470856 -8.500000000 35.497470856 -29.497470856 -7.500000000 35.497470856 -29.497470856 -6.500000000 35.497470856 -29.497470856 -5.500000000 35.497470856 -29.497470856 -4.500000000 35.497470856 -29.497470856 -3.500000000 35.497470856 -29.497470856 -2.500000000 35.497470856 -29.497470856 -1.500000000 35.497470856 -29.497470856 -0.500000000 35.497470856 -29.497470856 0.500000000 35.497470856 -29.497470856 1.500000000 35.497470856 -29.497470856 2.500000000 35.497470856 -29.497470856 3.500000000 35.497470856 -29.497470856 4.500000000 35.497470856 -29.497470856 5.500000000 35.497470856 -29.497470856 6.500000000 35.497470856 -29.497470856 7.500000000 35.497470856 -29.497470856 8.500000000 35.497470856 -29.497470856 9.500000000 35.497470856 -29.497470856 10.500000000 35.497470856 -29.497470856 11.500000000 35.497470856 -29.497470856 12.500000000 35.497470856 -29.497470856 13.500000000 35.497470856 -29.497470856 14.500000000 35.497470856 -29.497470856 15.500000000 35.497470856 -29.497470856 16.500000000 35.497470856 -29.497470856 17.500000000 35.497470856 -29.497470856 18.500000000 35.497470856 -29.497470856 19.500000000 35.497470856 -29.497470856 20.500000000 35.497470856 -29.497470856 21.500000000 35.497470856 -29.497470856 22.500000000 35.497470856 -29.497470856 23.500000000 35.497470856 -29.497470856 24.500000000 35.497470856 -29.497470856 25.499996185 35.497467041 -29.497470856 26.499954224 35.497409821 -29.497470856 27.499576569 35.496910095 -29.497461319 28.497371674 35.494174957 -29.497371674 29.488048553 35.483337402 -29.496959686 30.458074570 35.450428009 -29.495611191 31.382312775 35.371490479 -29.492319107 32.227874756 35.217514038 -29.486719131 32.968864441 34.965766907 -29.481937408 -33.977611542 -35.976860046 -28.495168686 -33.231597900 -36.228954315 -28.496379852 -32.383808136 -36.381023407 -28.498052597 -31.458766937 -36.456939697 -28.499073029 -30.488346100 -36.487373352 -28.499475479 -29.497461319 -36.496910095 -28.499576569 -28.499593735 -36.499164581 -28.499591827 -27.499954224 -36.499546051 -28.499591827 -26.499996185 -36.499588013 -28.499591827 -25.500000000 -36.499591827 -28.499591827 -24.500000000 -36.499591827 -28.499591827 -23.500000000 -36.499591827 -28.499591827 -22.500000000 -36.499591827 -28.499591827 -21.500000000 -36.499591827 -28.499591827 -20.500000000 -36.499591827 -28.499591827 -19.500000000 -36.499591827 -28.499591827 -18.500000000 -36.499591827 -28.499591827 -17.500000000 -36.499591827 -28.499591827 -16.500000000 -36.499591827 -28.499591827 -15.500000000 -36.499591827 -28.499591827 -14.500000000 -36.499591827 -28.499591827 -13.500000000 -36.499591827 -28.499591827 -12.500000000 -36.499591827 -28.499591827 -11.500000000 -36.499591827 -28.499591827 -10.500000000 -36.499591827 -28.499591827 -9.500000000 -36.499591827 -28.499591827 -8.500000000 -36.499591827 -28.499591827 -7.500000000 -36.499591827 -28.499591827 -6.500000000 -36.499591827 -28.499591827 -5.500000000 -36.499591827 -28.499591827 -4.500000000 -36.499591827 -28.499591827 -3.500000000 -36.499591827 -28.499591827 -2.500000000 -36.499591827 -28.499591827 -1.500000000 -36.499591827 -28.499591827 -0.500000000 -36.499591827 -28.499591827 0.500000000 -36.499591827 -28.499591827 1.500000000 -36.499591827 -28.499591827 2.500000000 -36.499591827 -28.499591827 3.500000000 -36.499591827 -28.499591827 4.500000000 -36.499591827 -28.499591827 5.500000000 -36.499591827 -28.499591827 6.500000000 -36.499591827 -28.499591827 7.500000000 -36.499591827 -28.499591827 8.500000000 -36.499591827 -28.499591827 9.500000000 -36.499591827 -28.499591827 10.500000000 -36.499591827 -28.499591827 11.500000000 -36.499591827 -28.499591827 12.500000000 -36.499591827 -28.499591827 13.500000000 -36.499591827 -28.499591827 14.500000000 -36.499591827 -28.499591827 15.500000000 -36.499591827 -28.499591827 16.500000000 -36.499591827 -28.499591827 17.500000000 -36.499591827 -28.499591827 18.500000000 -36.499591827 -28.499591827 19.500000000 -36.499591827 -28.499591827 20.500000000 -36.499591827 -28.499591827 21.500000000 -36.499591827 -28.499591827 22.500000000 -36.499591827 -28.499591827 23.500000000 -36.499591827 -28.499591827 24.500000000 -36.499591827 -28.499591827 25.499996185 -36.499588013 -28.499591827 26.499954224 -36.499542236 -28.499591827 27.499591827 -36.499160767 -28.499591827 28.497461319 -36.496910095 -28.499576569 29.488346100 -36.487377167 -28.499475479 30.458766937 -36.456939697 -28.499073029 31.383810043 -36.381023407 -28.498050690 32.231601715 -36.228954315 -28.496377945 32.977619171 -35.976860046 -28.495168686 -35.163158417 -35.163154602 -28.496557236 -34.617927551 -35.618598938 -28.496026993 33.617927551 -35.618602753 -28.496026993 34.163158417 -35.163158417 -28.496557236 -35.976860046 -33.977611542 -28.495172501 -35.618602753 -34.617927551 -28.496026993 34.618602753 -34.617931366 -28.496026993 34.976860046 -33.977619171 -28.495172501 -36.228954315 -33.231597900 -28.496377945 35.228958130 -33.231601715 -28.496379852 -36.381023407 -32.383808136 -28.498052597 35.381023407 -32.383808136 -28.498052597 -36.456939697 -31.458766937 -28.499073029 35.456939697 -31.458766937 -28.499073029 -36.487373352 -30.488346100 -28.499475479 35.487373352 -30.488346100 -28.499475479 -36.496910095 -29.497461319 -28.499576569 35.496910095 -29.497461319 -28.499576569 -36.499160767 -28.499593735 -28.499591827 35.499164581 -28.499593735 -28.499591827 -36.499546051 -27.499954224 -28.499591827 35.499546051 -27.499954224 -28.499591827 -36.499588013 -26.499996185 -28.499591827 35.499588013 -26.499996185 -28.499591827 -36.499595642 -25.500000000 -28.499591827 35.499591827 -25.500000000 -28.499591827 -36.499595642 -24.500000000 -28.499591827 35.499591827 -24.500000000 -28.499591827 -36.499595642 -23.500000000 -28.499591827 35.499591827 -23.500000000 -28.499591827 -36.499595642 -22.500000000 -28.499591827 35.499591827 -22.500000000 -28.499591827 -36.499595642 -21.500000000 -28.499591827 35.499591827 -21.500000000 -28.499591827 -36.499595642 -20.500000000 -28.499591827 35.499591827 -20.500000000 -28.499591827 -36.499595642 -19.500000000 -28.499591827 35.499591827 -19.500000000 -28.499591827 -36.499595642 -18.500000000 -28.499591827 35.499591827 -18.500000000 -28.499591827 -36.499595642 -17.500000000 -28.499591827 35.499591827 -17.500000000 -28.499591827 -36.499595642 -16.500000000 -28.499591827 35.499591827 -16.500000000 -28.499591827 -36.499595642 -15.500000000 -28.499591827 35.499591827 -15.500000000 -28.499591827 -36.499595642 -14.500000000 -28.499591827 35.499591827 -14.500000000 -28.499591827 -36.499595642 -13.500000000 -28.499591827 35.499591827 -13.500000000 -28.499591827 -36.499595642 -12.500000000 -28.499591827 35.499591827 -12.500000000 -28.499591827 -36.499595642 -11.500000000 -28.499591827 35.499591827 -11.500000000 -28.499591827 -36.499595642 -10.500000000 -28.499591827 35.499591827 -10.500000000 -28.499591827 -36.499595642 -9.500000000 -28.499591827 35.499591827 -9.500000000 -28.499591827 -36.499595642 -8.500000000 -28.499591827 35.499591827 -8.500000000 -28.499591827 -36.499595642 -7.500000000 -28.499591827 35.499591827 -7.500000000 -28.499591827 -36.499595642 -6.500000000 -28.499591827 35.499591827 -6.500000000 -28.499591827 -36.499595642 -5.500000000 -28.499591827 35.499591827 -5.500000000 -28.499591827 -36.499595642 -4.500000000 -28.499591827 35.499591827 -4.500000000 -28.499591827 -36.499595642 -3.500000000 -28.499591827 35.499591827 -3.500000000 -28.499591827 -36.499595642 -2.500000000 -28.499591827 35.499591827 -2.500000000 -28.499591827 -36.499595642 -1.500000000 -28.499591827 35.499591827 -1.500000000 -28.499591827 -36.499595642 -0.500000000 -28.499591827 35.499591827 -0.500000000 -28.499591827 -36.499595642 0.500000000 -28.499591827 35.499591827 0.500000000 -28.499591827 -36.499595642 1.500000000 -28.499591827 35.499591827 1.500000000 -28.499591827 -36.499595642 2.500000000 -28.499591827 35.499591827 2.500000000 -28.499591827 -36.499595642 3.500000000 -28.499591827 35.499591827 3.500000000 -28.499591827 -36.499595642 4.500000000 -28.499591827 35.499591827 4.500000000 -28.499591827 -36.499595642 5.500000000 -28.499591827 35.499591827 5.500000000 -28.499591827 -36.499595642 6.500000000 -28.499591827 35.499591827 6.500000000 -28.499591827 -36.499595642 7.500000000 -28.499591827 35.499591827 7.500000000 -28.499591827 -36.499595642 8.500000000 -28.499591827 35.499591827 8.500000000 -28.499591827 -36.499595642 9.500000000 -28.499591827 35.499591827 9.500000000 -28.499591827 -36.499595642 10.500000000 -28.499591827 35.499591827 10.500000000 -28.499591827 -36.499595642 11.500000000 -28.499591827 35.499591827 11.500000000 -28.499591827 -36.499595642 12.500000000 -28.499591827 35.499591827 12.500000000 -28.499591827 -36.499595642 13.500000000 -28.499591827 35.499591827 13.500000000 -28.499591827 -36.499595642 14.500000000 -28.499591827 35.499591827 14.500000000 -28.499591827 -36.499595642 15.500000000 -28.499591827 35.499591827 15.500000000 -28.499591827 -36.499595642 16.500000000 -28.499591827 35.499591827 16.500000000 -28.499591827 -36.499595642 17.500000000 -28.499591827 35.499591827 17.500000000 -28.499591827 -36.499595642 18.500000000 -28.499591827 35.499591827 18.500000000 -28.499591827 -36.499595642 19.500000000 -28.499591827 35.499591827 19.500000000 -28.499591827 -36.499595642 20.500000000 -28.499591827 35.499591827 20.500000000 -28.499591827 -36.499595642 21.500000000 -28.499591827 35.499591827 21.500000000 -28.499591827 -36.499595642 22.500000000 -28.499591827 35.499591827 22.500000000 -28.499591827 -36.499595642 23.500000000 -28.499591827 35.499591827 23.500000000 -28.499591827 -36.499595642 24.500000000 -28.499591827 35.499591827 24.500000000 -28.499591827 -36.499588013 25.499996185 -28.499591827 35.499588013 25.499996185 -28.499591827 -36.499546051 26.499954224 -28.499591827 35.499542236 26.499954224 -28.499591827 -36.499160767 27.499591827 -28.499591827 35.499160767 27.499591827 -28.499591827 -36.496910095 28.497461319 -28.499576569 35.496910095 28.497461319 -28.499576569 -36.487373352 29.488346100 -28.499475479 35.487377167 29.488346100 -28.499475479 -36.456939697 30.458766937 -28.499073029 35.456939697 30.458766937 -28.499073029 -36.381031036 31.383810043 -28.498050690 35.381023407 31.383810043 -28.498050690 -36.228958130 32.231597900 -28.496377945 35.228954315 32.231601715 -28.496377945 -35.976860046 32.977611542 -28.495168686 -35.618598938 33.617927551 -28.496026993 34.618602753 33.617927551 -28.496026993 34.976860046 32.977619171 -28.495168686 -35.163154602 34.163158417 -28.496557236 -34.617927551 34.618602753 -28.496026993 33.617927551 34.618602753 -28.496026993 34.163158417 34.163158417 -28.496557236 -33.977611542 34.976860046 -28.495172501 -33.231597900 35.228954315 -28.496377945 -32.383808136 35.381023407 -28.498052597 -31.458766937 35.456939697 -28.499073029 -30.488346100 35.487373352 -28.499475479 -29.497461319 35.496910095 -28.499576569 -28.499593735 35.499160767 -28.499591827 -27.499954224 35.499546051 -28.499591827 -26.499996185 35.499588013 -28.499591827 -25.500000000 35.499595642 -28.499591827 -24.500000000 35.499595642 -28.499591827 -23.500000000 35.499595642 -28.499591827 -22.500000000 35.499595642 -28.499591827 -21.500000000 35.499595642 -28.499591827 -20.500000000 35.499595642 -28.499591827 -19.500000000 35.499595642 -28.499591827 -18.500000000 35.499595642 -28.499591827 -17.500000000 35.499595642 -28.499591827 -16.500000000 35.499595642 -28.499591827 -15.500000000 35.499595642 -28.499591827 -14.500000000 35.499595642 -28.499591827 -13.500000000 35.499595642 -28.499591827 -12.500000000 35.499595642 -28.499591827 -11.500000000 35.499595642 -28.499591827 -10.500000000 35.499595642 -28.499591827 -9.500000000 35.499595642 -28.499591827 -8.500000000 35.499595642 -28.499591827 -7.500000000 35.499595642 -28.499591827 -6.500000000 35.499595642 -28.499591827 -5.500000000 35.499595642 -28.499591827 -4.500000000 35.499595642 -28.499591827 -3.500000000 35.499595642 -28.499591827 -2.500000000 35.499595642 -28.499591827 -1.500000000 35.499595642 -28.499591827 -0.500000000 35.499595642 -28.499591827 0.500000000 35.499595642 -28.499591827 1.500000000 35.499595642 -28.499591827 2.500000000 35.499595642 -28.499591827 3.500000000 35.499595642 -28.499591827 4.500000000 35.499595642 -28.499591827 5.500000000 35.499595642 -28.499591827 6.500000000 35.499595642 -28.499591827 7.500000000 35.499595642 -28.499591827 8.500000000 35.499595642 -28.499591827 9.500000000 35.499595642 -28.499591827 10.500000000 35.499595642 -28.499591827 11.500000000 35.499595642 -28.499591827 12.500000000 35.499595642 -28.499591827 13.500000000 35.499595642 -28.499591827 14.500000000 35.499595642 -28.499591827 15.500000000 35.499595642 -28.499591827 16.500000000 35.499595642 -28.499591827 17.500000000 35.499595642 -28.499591827 18.500000000 35.499595642 -28.499591827 19.500000000 35.499595642 -28.499591827 20.500000000 35.499595642 -28.499591827 21.500000000 35.499595642 -28.499591827 22.500000000 35.499595642 -28.499591827 23.500000000 35.499595642 -28.499591827 24.500000000 35.499595642 -28.499591827 25.499996185 35.499588013 -28.499591827 26.499954224 35.499546051 -28.499591827 27.499591827 35.499160767 -28.499591827 28.497461319 35.496910095 -28.499576569 29.488346100 35.487373352 -28.499475479 30.458766937 35.456939697 -28.499073029 31.383810043 35.381031036 -28.498050690 32.231597900 35.228958130 -28.496377945 32.977619171 34.976860046 -28.495172501 -33.980331421 -35.980201721 -27.498950958 -33.232864380 -36.232307434 -27.499221802 -32.384296417 -36.383720398 -27.499622345 -31.458948135 -36.458606720 -27.499858856 -30.488399506 -36.488258362 -27.499938965 -29.497472763 -36.497406006 -27.499954224 -28.499593735 -36.499549866 -27.499954224 -27.499954224 -36.499908447 -27.499954224 -26.499996185 -36.499950409 -27.499954224 -25.500000000 -36.499954224 -27.499954224 -24.500000000 -36.499954224 -27.499954224 -23.500000000 -36.499954224 -27.499954224 -22.500000000 -36.499954224 -27.499954224 -21.500000000 -36.499954224 -27.499954224 -20.500000000 -36.499954224 -27.499954224 -19.500000000 -36.499954224 -27.499954224 -18.500000000 -36.499954224 -27.499954224 -17.500000000 -36.499954224 -27.499954224 -16.500000000 -36.499954224 -27.499954224 -15.500000000 -36.499954224 -27.499954224 -14.500000000 -36.499954224 -27.499954224 -13.500000000 -36.499954224 -27.499954224 -12.500000000 -36.499954224 -27.499954224 -11.500000000 -36.499954224 -27.499954224 -10.500000000 -36.499954224 -27.499954224 -9.500000000 -36.499954224 -27.499954224 -8.500000000 -36.499954224 -27.499954224 -7.500000000 -36.499954224 -27.499954224 -6.500000000 -36.499954224 -27.499954224 -5.500000000 -36.499954224 -27.499954224 -4.500000000 -36.499954224 -27.499954224 -3.500000000 -36.499954224 -27.499954224 -2.500000000 -36.499954224 -27.499954224 -1.500000000 -36.499954224 -27.499954224 -0.500000000 -36.499954224 -27.499954224 0.500000000 -36.499954224 -27.499954224 1.500000000 -36.499954224 -27.499954224 2.500000000 -36.499954224 -27.499954224 3.500000000 -36.499954224 -27.499954224 4.500000000 -36.499954224 -27.499954224 5.500000000 -36.499954224 -27.499954224 6.500000000 -36.499954224 -27.499954224 7.500000000 -36.499954224 -27.499954224 8.500000000 -36.499954224 -27.499954224 9.500000000 -36.499954224 -27.499954224 10.500000000 -36.499954224 -27.499954224 11.500000000 -36.499954224 -27.499954224 12.500000000 -36.499954224 -27.499954224 13.500000000 -36.499954224 -27.499954224 14.500000000 -36.499954224 -27.499954224 15.500000000 -36.499954224 -27.499954224 16.500000000 -36.499954224 -27.499954224 17.500000000 -36.499954224 -27.499954224 18.500000000 -36.499954224 -27.499954224 19.500000000 -36.499954224 -27.499954224 20.500000000 -36.499954224 -27.499954224 21.500000000 -36.499954224 -27.499954224 22.500000000 -36.499954224 -27.499954224 23.500000000 -36.499954224 -27.499954224 24.500000000 -36.499954224 -27.499954224 25.499996185 -36.499950409 -27.499954224 26.499954224 -36.499908447 -27.499954224 27.499591827 -36.499546051 -27.499954224 28.497470856 -36.497413635 -27.499954224 29.488399506 -36.488258362 -27.499938965 30.458948135 -36.458606720 -27.499858856 31.384298325 -36.383720398 -27.499618530 32.232864380 -36.232307434 -27.499225616 32.980331421 -35.980201721 -27.498950958 -35.166961670 -35.166954041 -27.499298096 -34.621795654 -35.621982574 -27.499156952 33.621795654 -35.621982574 -27.499156952 34.166957855 -35.166961670 -27.499298096 -35.980201721 -33.980335236 -27.498950958 -35.621982574 -34.621795654 -27.499156952 34.621982574 -34.621795654 -27.499156952 34.980201721 -33.980331421 -27.498950958 -36.232307434 -33.232864380 -27.499221802 35.232307434 -33.232864380 -27.499221802 -36.383720398 -32.384300232 -27.499622345 35.383720398 -32.384296417 -27.499622345 -36.458606720 -31.458948135 -27.499858856 35.458606720 -31.458948135 -27.499858856 -36.488258362 -30.488399506 -27.499938965 35.488258362 -30.488399506 -27.499938965 -36.497406006 -29.497472763 -27.499954224 35.497406006 -29.497472763 -27.499954224 -36.499546051 -28.499593735 -27.499954224 35.499549866 -28.499593735 -27.499954224 -36.499908447 -27.499954224 -27.499954224 35.499908447 -27.499954224 -27.499954224 -36.499950409 -26.499996185 -27.499954224 35.499950409 -26.499996185 -27.499954224 -36.499954224 -25.500000000 -27.499954224 35.499954224 -25.500000000 -27.499954224 -36.499954224 -24.500000000 -27.499954224 35.499954224 -24.500000000 -27.499954224 -36.499954224 -23.500000000 -27.499954224 35.499954224 -23.500000000 -27.499954224 -36.499954224 -22.500000000 -27.499954224 35.499954224 -22.500000000 -27.499954224 -36.499954224 -21.500000000 -27.499954224 35.499954224 -21.500000000 -27.499954224 -36.499954224 -20.500000000 -27.499954224 35.499954224 -20.500000000 -27.499954224 -36.499954224 -19.500000000 -27.499954224 35.499954224 -19.500000000 -27.499954224 -36.499954224 -18.500000000 -27.499954224 35.499954224 -18.500000000 -27.499954224 -36.499954224 -17.500000000 -27.499954224 35.499954224 -17.500000000 -27.499954224 -36.499954224 -16.500000000 -27.499954224 35.499954224 -16.500000000 -27.499954224 -36.499954224 -15.500000000 -27.499954224 35.499954224 -15.500000000 -27.499954224 -36.499954224 -14.500000000 -27.499954224 35.499954224 -14.500000000 -27.499954224 -36.499954224 -13.500000000 -27.499954224 35.499954224 -13.500000000 -27.499954224 -36.499954224 -12.500000000 -27.499954224 35.499954224 -12.500000000 -27.499954224 -36.499954224 -11.500000000 -27.499954224 35.499954224 -11.500000000 -27.499954224 -36.499954224 -10.500000000 -27.499954224 35.499954224 -10.500000000 -27.499954224 -36.499954224 -9.500000000 -27.499954224 35.499954224 -9.500000000 -27.499954224 -36.499954224 -8.500000000 -27.499954224 35.499954224 -8.500000000 -27.499954224 -36.499954224 -7.500000000 -27.499954224 35.499954224 -7.500000000 -27.499954224 -36.499954224 -6.500000000 -27.499954224 35.499954224 -6.500000000 -27.499954224 -36.499954224 -5.500000000 -27.499954224 35.499954224 -5.500000000 -27.499954224 -36.499954224 -4.500000000 -27.499954224 35.499954224 -4.500000000 -27.499954224 -36.499954224 -3.500000000 -27.499954224 35.499954224 -3.500000000 -27.499954224 -36.499954224 -2.500000000 -27.499954224 35.499954224 -2.500000000 -27.499954224 -36.499954224 -1.500000000 -27.499954224 35.499954224 -1.500000000 -27.499954224 -36.499954224 -0.500000000 -27.499954224 35.499954224 -0.500000000 -27.499954224 -36.499954224 0.500000000 -27.499954224 35.499954224 0.500000000 -27.499954224 -36.499954224 1.500000000 -27.499954224 35.499954224 1.500000000 -27.499954224 -36.499954224 2.500000000 -27.499954224 35.499954224 2.500000000 -27.499954224 -36.499954224 3.500000000 -27.499954224 35.499954224 3.500000000 -27.499954224 -36.499954224 4.500000000 -27.499954224 35.499954224 4.500000000 -27.499954224 -36.499954224 5.500000000 -27.499954224 35.499954224 5.500000000 -27.499954224 -36.499954224 6.500000000 -27.499954224 35.499954224 6.500000000 -27.499954224 -36.499954224 7.500000000 -27.499954224 35.499954224 7.500000000 -27.499954224 -36.499954224 8.500000000 -27.499954224 35.499954224 8.500000000 -27.499954224 -36.499954224 9.500000000 -27.499954224 35.499954224 9.500000000 -27.499954224 -36.499954224 10.500000000 -27.499954224 35.499954224 10.500000000 -27.499954224 -36.499954224 11.500000000 -27.499954224 35.499954224 11.500000000 -27.499954224 -36.499954224 12.500000000 -27.499954224 35.499954224 12.500000000 -27.499954224 -36.499954224 13.500000000 -27.499954224 35.499954224 13.500000000 -27.499954224 -36.499954224 14.500000000 -27.499954224 35.499954224 14.500000000 -27.499954224 -36.499954224 15.500000000 -27.499954224 35.499954224 15.500000000 -27.499954224 -36.499954224 16.500000000 -27.499954224 35.499954224 16.500000000 -27.499954224 -36.499954224 17.500000000 -27.499954224 35.499954224 17.500000000 -27.499954224 -36.499954224 18.500000000 -27.499954224 35.499954224 18.500000000 -27.499954224 -36.499954224 19.500000000 -27.499954224 35.499954224 19.500000000 -27.499954224 -36.499954224 20.500000000 -27.499954224 35.499954224 20.500000000 -27.499954224 -36.499954224 21.500000000 -27.499954224 35.499954224 21.500000000 -27.499954224 -36.499954224 22.500000000 -27.499954224 35.499954224 22.500000000 -27.499954224 -36.499954224 23.500000000 -27.499954224 35.499954224 23.500000000 -27.499954224 -36.499954224 24.500000000 -27.499954224 35.499954224 24.500000000 -27.499954224 -36.499950409 25.499996185 -27.499954224 35.499950409 25.499996185 -27.499954224 -36.499908447 26.499954224 -27.499954224 35.499908447 26.499954224 -27.499954224 -36.499542236 27.499591827 -27.499954224 35.499546051 27.499591827 -27.499954224 -36.497406006 28.497470856 -27.499954224 35.497413635 28.497470856 -27.499954224 -36.488258362 29.488399506 -27.499938965 35.488258362 29.488399506 -27.499938965 -36.458606720 30.458948135 -27.499858856 35.458606720 30.458948135 -27.499858856 -36.383720398 31.384298325 -27.499618530 35.383720398 31.384298325 -27.499618530 -36.232307434 32.232864380 -27.499225616 35.232307434 32.232864380 -27.499225616 -35.980201721 32.980331421 -27.498950958 -35.621982574 33.621795654 -27.499156952 34.621982574 33.621795654 -27.499156952 34.980201721 32.980331421 -27.498950958 -35.166954041 34.166961670 -27.499298096 -34.621795654 34.621982574 -27.499156952 33.621795654 34.621982574 -27.499156952 34.166961670 34.166957855 -27.499298096 -33.980335236 34.980201721 -27.498950958 -33.232864380 35.232307434 -27.499221802 -32.384300232 35.383720398 -27.499622345 -31.458948135 35.458606720 -27.499858856 -30.488399506 35.488258362 -27.499938965 -29.497472763 35.497406006 -27.499954224 -28.499593735 35.499546051 -27.499954224 -27.499954224 35.499908447 -27.499954224 -26.499996185 35.499950409 -27.499954224 -25.500000000 35.499954224 -27.499954224 -24.500000000 35.499954224 -27.499954224 -23.500000000 35.499954224 -27.499954224 -22.500000000 35.499954224 -27.499954224 -21.500000000 35.499954224 -27.499954224 -20.500000000 35.499954224 -27.499954224 -19.500000000 35.499954224 -27.499954224 -18.500000000 35.499954224 -27.499954224 -17.500000000 35.499954224 -27.499954224 -16.500000000 35.499954224 -27.499954224 -15.500000000 35.499954224 -27.499954224 -14.500000000 35.499954224 -27.499954224 -13.500000000 35.499954224 -27.499954224 -12.500000000 35.499954224 -27.499954224 -11.500000000 35.499954224 -27.499954224 -10.500000000 35.499954224 -27.499954224 -9.500000000 35.499954224 -27.499954224 -8.500000000 35.499954224 -27.499954224 -7.500000000 35.499954224 -27.499954224 -6.500000000 35.499954224 -27.499954224 -5.500000000 35.499954224 -27.499954224 -4.500000000 35.499954224 -27.499954224 -3.500000000 35.499954224 -27.499954224 -2.500000000 35.499954224 -27.499954224 -1.500000000 35.499954224 -27.499954224 -0.500000000 35.499954224 -27.499954224 0.500000000 35.499954224 -27.499954224 1.500000000 35.499954224 -27.499954224 2.500000000 35.499954224 -27.499954224 3.500000000 35.499954224 -27.499954224 4.500000000 35.499954224 -27.499954224 5.500000000 35.499954224 -27.499954224 6.500000000 35.499954224 -27.499954224 7.500000000 35.499954224 -27.499954224 8.500000000 35.499954224 -27.499954224 9.500000000 35.499954224 -27.499954224 10.500000000 35.499954224 -27.499954224 11.500000000 35.499954224 -27.499954224 12.500000000 35.499954224 -27.499954224 13.500000000 35.499954224 -27.499954224 14.500000000 35.499954224 -27.499954224 15.500000000 35.499954224 -27.499954224 16.500000000 35.499954224 -27.499954224 17.500000000 35.499954224 -27.499954224 18.500000000 35.499954224 -27.499954224 19.500000000 35.499954224 -27.499954224 20.500000000 35.499954224 -27.499954224 21.500000000 35.499954224 -27.499954224 22.500000000 35.499954224 -27.499954224 23.500000000 35.499954224 -27.499954224 24.500000000 35.499954224 -27.499954224 25.499996185 35.499950409 -27.499954224 26.499954224 35.499908447 -27.499954224 27.499591827 35.499542236 -27.499954224 28.497470856 35.497406006 -27.499954224 29.488399506 35.488258362 -27.499938965 30.458948135 35.458606720 -27.499858856 31.384298325 35.383720398 -27.499618530 32.232864380 35.232307434 -27.499225616 32.980327606 34.980201721 -27.498950958 -33.980976105 -35.980957031 -26.499826431 -33.233165741 -36.233070374 -26.499872208 -32.384403229 -36.384307861 -26.499948502 -31.458978653 -36.458930969 -26.499988556 -30.488407135 -36.488384247 -26.499996185 -29.497472763 -36.497467041 -26.499996185 -28.499593735 -36.499591827 -26.499996185 -27.499954224 -36.499950409 -26.499996185 -26.499996185 -36.499992371 -26.499996185 -25.500000000 -36.499996185 -26.499996185 -24.500000000 -36.499996185 -26.499996185 -23.500000000 -36.499996185 -26.499996185 -22.500000000 -36.499996185 -26.499996185 -21.500000000 -36.499996185 -26.499996185 -20.500000000 -36.499996185 -26.499996185 -19.500000000 -36.499996185 -26.499996185 -18.500000000 -36.499996185 -26.499996185 -17.500000000 -36.499996185 -26.499996185 -16.500000000 -36.499996185 -26.499996185 -15.500000000 -36.499996185 -26.499996185 -14.500000000 -36.499996185 -26.499996185 -13.500000000 -36.499996185 -26.499996185 -12.500000000 -36.499996185 -26.499996185 -11.500000000 -36.499996185 -26.499996185 -10.500000000 -36.499996185 -26.499996185 -9.500000000 -36.499996185 -26.499996185 -8.500000000 -36.499996185 -26.499996185 -7.500000000 -36.499996185 -26.499996185 -6.500000000 -36.499996185 -26.499996185 -5.500000000 -36.499996185 -26.499996185 -4.500000000 -36.499996185 -26.499996185 -3.500000000 -36.499996185 -26.499996185 -2.500000000 -36.499996185 -26.499996185 -1.500000000 -36.499996185 -26.499996185 -0.500000000 -36.499996185 -26.499996185 0.500000000 -36.499996185 -26.499996185 1.500000000 -36.499996185 -26.499996185 2.500000000 -36.499996185 -26.499996185 3.500000000 -36.499996185 -26.499996185 4.500000000 -36.499996185 -26.499996185 5.500000000 -36.499996185 -26.499996185 6.500000000 -36.499996185 -26.499996185 7.500000000 -36.499996185 -26.499996185 8.500000000 -36.499996185 -26.499996185 9.500000000 -36.499996185 -26.499996185 10.500000000 -36.499996185 -26.499996185 11.500000000 -36.499996185 -26.499996185 12.500000000 -36.499996185 -26.499996185 13.500000000 -36.499996185 -26.499996185 14.500000000 -36.499996185 -26.499996185 15.500000000 -36.499996185 -26.499996185 16.500000000 -36.499996185 -26.499996185 17.500000000 -36.499996185 -26.499996185 18.500000000 -36.499996185 -26.499996185 19.500000000 -36.499996185 -26.499996185 20.500000000 -36.499996185 -26.499996185 21.500000000 -36.499996185 -26.499996185 22.500000000 -36.499996185 -26.499996185 23.500000000 -36.499996185 -26.499996185 24.500000000 -36.499996185 -26.499996185 25.499996185 -36.499992371 -26.499996185 26.499954224 -36.499950409 -26.499996185 27.499591827 -36.499591827 -26.499996185 28.497470856 -36.497467041 -26.499996185 29.488407135 -36.488391876 -26.499996185 30.458978653 -36.458934784 -26.499988556 31.384403229 -36.384307861 -26.499948502 32.233165741 -36.233070374 -26.499872208 32.980972290 -35.980960846 -26.499826431 -35.167808533 -35.167808533 -26.499902725 -34.622692108 -35.622734070 -26.499868393 33.622695923 -35.622734070 -26.499868393 34.167808533 -35.167808533 -26.499902725 -35.980957031 -33.980976105 -26.499822617 -35.622734070 -34.622692108 -26.499868393 34.622734070 -34.622692108 -26.499868393 34.980957031 -33.980976105 -26.499822617 -36.233070374 -33.233165741 -26.499874115 35.233070374 -33.233165741 -26.499872208 -36.384307861 -32.384403229 -26.499948502 35.384307861 -32.384403229 -26.499948502 -36.458930969 -31.458978653 -26.499988556 35.458930969 -31.458978653 -26.499988556 -36.488391876 -30.488407135 -26.499996185 35.488384247 -30.488407135 -26.499996185 -36.497467041 -29.497472763 -26.499996185 35.497467041 -29.497472763 -26.499996185 -36.499588013 -28.499593735 -26.499996185 35.499591827 -28.499593735 -26.499996185 -36.499950409 -27.499954224 -26.499996185 35.499950409 -27.499954224 -26.499996185 -36.499992371 -26.499996185 -26.499996185 35.499992371 -26.499996185 -26.499996185 -36.499996185 -25.500000000 -26.499996185 35.499996185 -25.500000000 -26.499996185 -36.499996185 -24.500000000 -26.499996185 35.499996185 -24.500000000 -26.499996185 -36.499996185 -23.500000000 -26.499996185 35.499996185 -23.500000000 -26.499996185 -36.499996185 -22.500000000 -26.499996185 35.499996185 -22.500000000 -26.499996185 -36.499996185 -21.500000000 -26.499996185 35.499996185 -21.500000000 -26.499996185 -36.499996185 -20.500000000 -26.499996185 35.499996185 -20.500000000 -26.499996185 -36.499996185 -19.500000000 -26.499996185 35.499996185 -19.500000000 -26.499996185 -36.499996185 -18.500000000 -26.499996185 35.499996185 -18.500000000 -26.499996185 -36.499996185 -17.500000000 -26.499996185 35.499996185 -17.500000000 -26.499996185 -36.499996185 -16.500000000 -26.499996185 35.499996185 -16.500000000 -26.499996185 -36.499996185 -15.500000000 -26.499996185 35.499996185 -15.500000000 -26.499996185 -36.499996185 -14.500000000 -26.499996185 35.499996185 -14.500000000 -26.499996185 -36.499996185 -13.500000000 -26.499996185 35.499996185 -13.500000000 -26.499996185 -36.499996185 -12.500000000 -26.499996185 35.499996185 -12.500000000 -26.499996185 -36.499996185 -11.500000000 -26.499996185 35.499996185 -11.500000000 -26.499996185 -36.499996185 -10.500000000 -26.499996185 35.499996185 -10.500000000 -26.499996185 -36.499996185 -9.500000000 -26.499996185 35.499996185 -9.500000000 -26.499996185 -36.499996185 -8.500000000 -26.499996185 35.499996185 -8.500000000 -26.499996185 -36.499996185 -7.500000000 -26.499996185 35.499996185 -7.500000000 -26.499996185 -36.499996185 -6.500000000 -26.499996185 35.499996185 -6.500000000 -26.499996185 -36.499996185 -5.500000000 -26.499996185 35.499996185 -5.500000000 -26.499996185 -36.499996185 -4.500000000 -26.499996185 35.499996185 -4.500000000 -26.499996185 -36.499996185 -3.500000000 -26.499996185 35.499996185 -3.500000000 -26.499996185 -36.499996185 -2.500000000 -26.499996185 35.499996185 -2.500000000 -26.499996185 -36.499996185 -1.500000000 -26.499996185 35.499996185 -1.500000000 -26.499996185 -36.499996185 -0.500000000 -26.499996185 35.499996185 -0.500000000 -26.499996185 -36.499996185 0.500000000 -26.499996185 35.499996185 0.500000000 -26.499996185 -36.499996185 1.500000000 -26.499996185 35.499996185 1.500000000 -26.499996185 -36.499996185 2.500000000 -26.499996185 35.499996185 2.500000000 -26.499996185 -36.499996185 3.500000000 -26.499996185 35.499996185 3.500000000 -26.499996185 -36.499996185 4.500000000 -26.499996185 35.499996185 4.500000000 -26.499996185 -36.499996185 5.500000000 -26.499996185 35.499996185 5.500000000 -26.499996185 -36.499996185 6.500000000 -26.499996185 35.499996185 6.500000000 -26.499996185 -36.499996185 7.500000000 -26.499996185 35.499996185 7.500000000 -26.499996185 -36.499996185 8.500000000 -26.499996185 35.499996185 8.500000000 -26.499996185 -36.499996185 9.500000000 -26.499996185 35.499996185 9.500000000 -26.499996185 -36.499996185 10.500000000 -26.499996185 35.499996185 10.500000000 -26.499996185 -36.499996185 11.500000000 -26.499996185 35.499996185 11.500000000 -26.499996185 -36.499996185 12.500000000 -26.499996185 35.499996185 12.500000000 -26.499996185 -36.499996185 13.500000000 -26.499996185 35.499996185 13.500000000 -26.499996185 -36.499996185 14.500000000 -26.499996185 35.499996185 14.500000000 -26.499996185 -36.499996185 15.500000000 -26.499996185 35.499996185 15.500000000 -26.499996185 -36.499996185 16.500000000 -26.499996185 35.499996185 16.500000000 -26.499996185 -36.499996185 17.500000000 -26.499996185 35.499996185 17.500000000 -26.499996185 -36.499996185 18.500000000 -26.499996185 35.499996185 18.500000000 -26.499996185 -36.499996185 19.500000000 -26.499996185 35.499996185 19.500000000 -26.499996185 -36.499996185 20.500000000 -26.499996185 35.499996185 20.500000000 -26.499996185 -36.499996185 21.500000000 -26.499996185 35.499996185 21.500000000 -26.499996185 -36.499996185 22.500000000 -26.499996185 35.499996185 22.500000000 -26.499996185 -36.499996185 23.500000000 -26.499996185 35.499996185 23.500000000 -26.499996185 -36.499996185 24.500000000 -26.499996185 35.499996185 24.500000000 -26.499996185 -36.499992371 25.499996185 -26.499996185 35.499992371 25.499996185 -26.499996185 -36.499950409 26.499954224 -26.499996185 35.499950409 26.499954224 -26.499996185 -36.499588013 27.499591827 -26.499996185 35.499591827 27.499591827 -26.499996185 -36.497467041 28.497470856 -26.499996185 35.497467041 28.497470856 -26.499996185 -36.488391876 29.488407135 -26.499996185 35.488391876 29.488407135 -26.499996185 -36.458930969 30.458978653 -26.499988556 35.458934784 30.458978653 -26.499988556 -36.384307861 31.384403229 -26.499948502 35.384307861 31.384403229 -26.499948502 -36.233070374 32.233165741 -26.499874115 35.233070374 32.233165741 -26.499872208 -35.980957031 32.980976105 -26.499826431 -35.622734070 33.622692108 -26.499868393 34.622734070 33.622695923 -26.499868393 34.980960846 32.980972290 -26.499826431 -35.167808533 34.167808533 -26.499902725 -34.622692108 34.622734070 -26.499868393 33.622692108 34.622734070 -26.499868393 34.167808533 34.167808533 -26.499902725 -33.980976105 34.980957031 -26.499822617 -33.233165741 35.233070374 -26.499874115 -32.384403229 35.384307861 -26.499948502 -31.458978653 35.458930969 -26.499988556 -30.488407135 35.488391876 -26.499996185 -29.497472763 35.497467041 -26.499996185 -28.499593735 35.499588013 -26.499996185 -27.499954224 35.499950409 -26.499996185 -26.499996185 35.499992371 -26.499996185 -25.500000000 35.499996185 -26.499996185 -24.500000000 35.499996185 -26.499996185 -23.500000000 35.499996185 -26.499996185 -22.500000000 35.499996185 -26.499996185 -21.500000000 35.499996185 -26.499996185 -20.500000000 35.499996185 -26.499996185 -19.500000000 35.499996185 -26.499996185 -18.500000000 35.499996185 -26.499996185 -17.500000000 35.499996185 -26.499996185 -16.500000000 35.499996185 -26.499996185 -15.500000000 35.499996185 -26.499996185 -14.500000000 35.499996185 -26.499996185 -13.500000000 35.499996185 -26.499996185 -12.500000000 35.499996185 -26.499996185 -11.500000000 35.499996185 -26.499996185 -10.500000000 35.499996185 -26.499996185 -9.500000000 35.499996185 -26.499996185 -8.500000000 35.499996185 -26.499996185 -7.500000000 35.499996185 -26.499996185 -6.500000000 35.499996185 -26.499996185 -5.500000000 35.499996185 -26.499996185 -4.500000000 35.499996185 -26.499996185 -3.500000000 35.499996185 -26.499996185 -2.500000000 35.499996185 -26.499996185 -1.500000000 35.499996185 -26.499996185 -0.500000000 35.499996185 -26.499996185 0.500000000 35.499996185 -26.499996185 1.500000000 35.499996185 -26.499996185 2.500000000 35.499996185 -26.499996185 3.500000000 35.499996185 -26.499996185 4.500000000 35.499996185 -26.499996185 5.500000000 35.499996185 -26.499996185 6.500000000 35.499996185 -26.499996185 7.500000000 35.499996185 -26.499996185 8.500000000 35.499996185 -26.499996185 9.500000000 35.499996185 -26.499996185 10.500000000 35.499996185 -26.499996185 11.500000000 35.499996185 -26.499996185 12.500000000 35.499996185 -26.499996185 13.500000000 35.499996185 -26.499996185 14.500000000 35.499996185 -26.499996185 15.500000000 35.499996185 -26.499996185 16.500000000 35.499996185 -26.499996185 17.500000000 35.499996185 -26.499996185 18.500000000 35.499996185 -26.499996185 19.500000000 35.499996185 -26.499996185 20.500000000 35.499996185 -26.499996185 21.500000000 35.499996185 -26.499996185 22.500000000 35.499996185 -26.499996185 23.500000000 35.499996185 -26.499996185 24.500000000 35.499996185 -26.499996185 25.499996185 35.499992371 -26.499996185 26.499954224 35.499950409 -26.499996185 27.499591827 35.499588013 -26.499996185 28.497470856 35.497467041 -26.499996185 29.488407135 35.488391876 -26.499996185 30.458978653 35.458930969 -26.499988556 31.384403229 35.384307861 -26.499948502 32.233165741 35.233070374 -26.499874115 32.980976105 34.980957031 -26.499822617 -33.981086731 -35.981086731 -25.499979019 -33.233215332 -36.233203888 -25.499984741 -32.384422302 -36.384407043 -25.499996185 -31.458978653 -36.458976746 -25.500000000 -30.488407135 -36.488403320 -25.500000000 -29.497472763 -36.497474670 -25.500000000 -28.499593735 -36.499591827 -25.500000000 -27.499954224 -36.499954224 -25.500000000 -26.499996185 -36.499996185 -25.500000000 -25.500000000 -36.500000000 -25.500000000 -24.500000000 -36.500000000 -25.500000000 -23.500000000 -36.500000000 -25.500000000 -22.500000000 -36.500000000 -25.500000000 -21.500000000 -36.500000000 -25.500000000 -20.500000000 -36.500000000 -25.500000000 -19.500000000 -36.500000000 -25.500000000 -18.500000000 -36.500000000 -25.500000000 -17.500000000 -36.500000000 -25.500000000 -16.500000000 -36.500000000 -25.500000000 -15.500000000 -36.500000000 -25.500000000 -14.500000000 -36.500000000 -25.500000000 -13.500000000 -36.500000000 -25.500000000 -12.500000000 -36.500000000 -25.500000000 -11.500000000 -36.500000000 -25.500000000 -10.500000000 -36.500000000 -25.500000000 -9.500000000 -36.500000000 -25.500000000 -8.500000000 -36.500000000 -25.500000000 -7.500000000 -36.500000000 -25.500000000 -6.500000000 -36.500000000 -25.500000000 -5.500000000 -36.500000000 -25.500000000 -4.500000000 -36.500000000 -25.500000000 -3.500000000 -36.500000000 -25.500000000 -2.500000000 -36.500000000 -25.500000000 -1.500000000 -36.500000000 -25.500000000 -0.500000000 -36.500000000 -25.500000000 0.500000000 -36.500000000 -25.500000000 1.500000000 -36.500000000 -25.500000000 2.500000000 -36.500000000 -25.500000000 3.500000000 -36.500000000 -25.500000000 4.500000000 -36.500000000 -25.500000000 5.500000000 -36.500000000 -25.500000000 6.500000000 -36.500000000 -25.500000000 7.500000000 -36.500000000 -25.500000000 8.500000000 -36.500000000 -25.500000000 9.500000000 -36.500000000 -25.500000000 10.500000000 -36.500000000 -25.500000000 11.500000000 -36.500000000 -25.500000000 12.500000000 -36.500000000 -25.500000000 13.500000000 -36.500000000 -25.500000000 14.500000000 -36.500000000 -25.500000000 15.500000000 -36.500000000 -25.500000000 16.500000000 -36.500000000 -25.500000000 17.500000000 -36.500000000 -25.500000000 18.500000000 -36.500000000 -25.500000000 19.500000000 -36.500000000 -25.500000000 20.500000000 -36.500000000 -25.500000000 21.500000000 -36.500000000 -25.500000000 22.500000000 -36.500000000 -25.500000000 23.500000000 -36.500000000 -25.500000000 24.500000000 -36.500000000 -25.500000000 25.499996185 -36.499996185 -25.500000000 26.499954224 -36.499954224 -25.500000000 27.499591827 -36.499591827 -25.500000000 28.497470856 -36.497467041 -25.500000000 29.488407135 -36.488403320 -25.500000000 30.458978653 -36.458976746 -25.500000000 31.384418488 -36.384407043 -25.499996185 32.233215332 -36.233203888 -25.499986649 32.981086731 -35.981086731 -25.499979019 -35.167949677 -35.167945862 -25.499990463 -34.622844696 -35.622852325 -25.499986649 33.622844696 -35.622852325 -25.499986649 34.167949677 -35.167949677 -25.499990463 -35.981086731 -33.981086731 -25.499977112 -35.622852325 -34.622844696 -25.499986649 34.622856140 -34.622844696 -25.499986649 34.981086731 -33.981086731 -25.499979019 -36.233203888 -33.233215332 -25.499986649 35.233203888 -33.233222961 -25.499984741 -36.384407043 -32.384422302 -25.499996185 35.384407043 -32.384422302 -25.499996185 -36.458972931 -31.458978653 -25.500000000 35.458976746 -31.458978653 -25.500000000 -36.488403320 -30.488407135 -25.500000000 35.488403320 -30.488407135 -25.500000000 -36.497467041 -29.497472763 -25.500000000 35.497474670 -29.497472763 -25.500000000 -36.499591827 -28.499593735 -25.500000000 35.499591827 -28.499593735 -25.500000000 -36.499954224 -27.499954224 -25.500000000 35.499954224 -27.499954224 -25.500000000 -36.499996185 -26.499996185 -25.500000000 35.499996185 -26.499996185 -25.500000000 -36.500000000 -25.500000000 -25.500000000 35.500000000 -25.500000000 -25.500000000 -36.500000000 -24.500000000 -25.500000000 35.500000000 -24.500000000 -25.500000000 -36.500000000 -23.500000000 -25.500000000 35.500000000 -23.500000000 -25.500000000 -36.500000000 -22.500000000 -25.500000000 35.500000000 -22.500000000 -25.500000000 -36.500000000 -21.500000000 -25.500000000 35.500000000 -21.500000000 -25.500000000 -36.500000000 -20.500000000 -25.500000000 35.500000000 -20.500000000 -25.500000000 -36.500000000 -19.500000000 -25.500000000 35.500000000 -19.500000000 -25.500000000 -36.500000000 -18.500000000 -25.500000000 35.500000000 -18.500000000 -25.500000000 -36.500000000 -17.500000000 -25.500000000 35.500000000 -17.500000000 -25.500000000 -36.500000000 -16.500000000 -25.500000000 35.500000000 -16.500000000 -25.500000000 -36.500000000 -15.500000000 -25.500000000 35.500000000 -15.500000000 -25.500000000 -36.500000000 -14.500000000 -25.500000000 35.500000000 -14.500000000 -25.500000000 -36.500000000 -13.500000000 -25.500000000 35.500000000 -13.500000000 -25.500000000 -36.500000000 -12.500000000 -25.500000000 35.500000000 -12.500000000 -25.500000000 -36.500000000 -11.500000000 -25.500000000 35.500000000 -11.500000000 -25.500000000 -36.500000000 -10.500000000 -25.500000000 35.500000000 -10.500000000 -25.500000000 -36.500000000 -9.500000000 -25.500000000 35.500000000 -9.500000000 -25.500000000 -36.500000000 -8.500000000 -25.500000000 35.500000000 -8.500000000 -25.500000000 -36.500000000 -7.500000000 -25.500000000 35.500000000 -7.500000000 -25.500000000 -36.500000000 -6.500000000 -25.500000000 35.500000000 -6.500000000 -25.500000000 -36.500000000 -5.500000000 -25.500000000 35.500000000 -5.500000000 -25.500000000 -36.500000000 -4.500000000 -25.500000000 35.500000000 -4.500000000 -25.500000000 -36.500000000 -3.500000000 -25.500000000 35.500000000 -3.500000000 -25.500000000 -36.500000000 -2.500000000 -25.500000000 35.500000000 -2.500000000 -25.500000000 -36.500000000 -1.500000000 -25.500000000 35.500000000 -1.500000000 -25.500000000 -36.500000000 -0.500000000 -25.500000000 35.500000000 -0.500000000 -25.500000000 -36.500000000 0.500000000 -25.500000000 35.500000000 0.500000000 -25.500000000 -36.500000000 1.500000000 -25.500000000 35.500000000 1.500000000 -25.500000000 -36.500000000 2.500000000 -25.500000000 35.500000000 2.500000000 -25.500000000 -36.500000000 3.500000000 -25.500000000 35.500000000 3.500000000 -25.500000000 -36.500000000 4.500000000 -25.500000000 35.500000000 4.500000000 -25.500000000 -36.500000000 5.500000000 -25.500000000 35.500000000 5.500000000 -25.500000000 -36.500000000 6.500000000 -25.500000000 35.500000000 6.500000000 -25.500000000 -36.500000000 7.500000000 -25.500000000 35.500000000 7.500000000 -25.500000000 -36.500000000 8.500000000 -25.500000000 35.500000000 8.500000000 -25.500000000 -36.500000000 9.500000000 -25.500000000 35.500000000 9.500000000 -25.500000000 -36.500000000 10.500000000 -25.500000000 35.500000000 10.500000000 -25.500000000 -36.500000000 11.500000000 -25.500000000 35.500000000 11.500000000 -25.500000000 -36.500000000 12.500000000 -25.500000000 35.500000000 12.500000000 -25.500000000 -36.500000000 13.500000000 -25.500000000 35.500000000 13.500000000 -25.500000000 -36.500000000 14.500000000 -25.500000000 35.500000000 14.500000000 -25.500000000 -36.500000000 15.500000000 -25.500000000 35.500000000 15.500000000 -25.500000000 -36.500000000 16.500000000 -25.500000000 35.500000000 16.500000000 -25.500000000 -36.500000000 17.500000000 -25.500000000 35.500000000 17.500000000 -25.500000000 -36.500000000 18.500000000 -25.500000000 35.500000000 18.500000000 -25.500000000 -36.500000000 19.500000000 -25.500000000 35.500000000 19.500000000 -25.500000000 -36.500000000 20.500000000 -25.500000000 35.500000000 20.500000000 -25.500000000 -36.500000000 21.500000000 -25.500000000 35.500000000 21.500000000 -25.500000000 -36.500000000 22.500000000 -25.500000000 35.500000000 22.500000000 -25.500000000 -36.500000000 23.500000000 -25.500000000 35.500000000 23.500000000 -25.500000000 -36.500000000 24.500000000 -25.500000000 35.500000000 24.500000000 -25.500000000 -36.499996185 25.499996185 -25.500000000 35.499996185 25.499996185 -25.500000000 -36.499954224 26.499954224 -25.500000000 35.499954224 26.499954224 -25.500000000 -36.499591827 27.499591827 -25.500000000 35.499591827 27.499591827 -25.500000000 -36.497474670 28.497470856 -25.500000000 35.497467041 28.497470856 -25.500000000 -36.488403320 29.488407135 -25.500000000 35.488403320 29.488407135 -25.500000000 -36.458976746 30.458978653 -25.500000000 35.458976746 30.458978653 -25.500000000 -36.384407043 31.384418488 -25.499996185 35.384407043 31.384418488 -25.499996185 -36.233203888 32.233215332 -25.499986649 35.233203888 32.233215332 -25.499986649 -35.981086731 32.981086731 -25.499975204 -35.622852325 33.622844696 -25.499986649 34.622852325 33.622844696 -25.499986649 34.981086731 32.981086731 -25.499979019 -35.167945862 34.167949677 -25.499990463 -34.622844696 34.622852325 -25.499986649 33.622844696 34.622852325 -25.499986649 34.167949677 34.167949677 -25.499990463 -33.981086731 34.981086731 -25.499977112 -33.233215332 35.233203888 -25.499986649 -32.384422302 35.384407043 -25.499996185 -31.458978653 35.458972931 -25.500000000 -30.488407135 35.488403320 -25.500000000 -29.497472763 35.497467041 -25.500000000 -28.499593735 35.499591827 -25.500000000 -27.499954224 35.499954224 -25.500000000 -26.499996185 35.499996185 -25.500000000 -25.500000000 35.500000000 -25.500000000 -24.500000000 35.500000000 -25.500000000 -23.500000000 35.500000000 -25.500000000 -22.500000000 35.500000000 -25.500000000 -21.500000000 35.500000000 -25.500000000 -20.500000000 35.500000000 -25.500000000 -19.500000000 35.500000000 -25.500000000 -18.500000000 35.500000000 -25.500000000 -17.500000000 35.500000000 -25.500000000 -16.500000000 35.500000000 -25.500000000 -15.500000000 35.500000000 -25.500000000 -14.500000000 35.500000000 -25.500000000 -13.500000000 35.500000000 -25.500000000 -12.500000000 35.500000000 -25.500000000 -11.500000000 35.500000000 -25.500000000 -10.500000000 35.500000000 -25.500000000 -9.500000000 35.500000000 -25.500000000 -8.500000000 35.500000000 -25.500000000 -7.500000000 35.500000000 -25.500000000 -6.500000000 35.500000000 -25.500000000 -5.500000000 35.500000000 -25.500000000 -4.500000000 35.500000000 -25.500000000 -3.500000000 35.500000000 -25.500000000 -2.500000000 35.500000000 -25.500000000 -1.500000000 35.500000000 -25.500000000 -0.500000000 35.500000000 -25.500000000 0.500000000 35.500000000 -25.500000000 1.500000000 35.500000000 -25.500000000 2.500000000 35.500000000 -25.500000000 3.500000000 35.500000000 -25.500000000 4.500000000 35.500000000 -25.500000000 5.500000000 35.500000000 -25.500000000 6.500000000 35.500000000 -25.500000000 7.500000000 35.500000000 -25.500000000 8.500000000 35.500000000 -25.500000000 9.500000000 35.500000000 -25.500000000 10.500000000 35.500000000 -25.500000000 11.500000000 35.500000000 -25.500000000 12.500000000 35.500000000 -25.500000000 13.500000000 35.500000000 -25.500000000 14.500000000 35.500000000 -25.500000000 15.500000000 35.500000000 -25.500000000 16.500000000 35.500000000 -25.500000000 17.500000000 35.500000000 -25.500000000 18.500000000 35.500000000 -25.500000000 19.500000000 35.500000000 -25.500000000 20.500000000 35.500000000 -25.500000000 21.500000000 35.500000000 -25.500000000 22.500000000 35.500000000 -25.500000000 23.500000000 35.500000000 -25.500000000 24.500000000 35.500000000 -25.500000000 25.499996185 35.499996185 -25.500000000 26.499954224 35.499954224 -25.500000000 27.499591827 35.499591827 -25.500000000 28.497470856 35.497474670 -25.500000000 29.488407135 35.488403320 -25.500000000 30.458978653 35.458976746 -25.500000000 31.384418488 35.384407043 -25.499996185 32.233215332 35.233203888 -25.499986649 32.981086731 34.981086731 -25.499977112 -33.981101990 -35.981101990 -24.499998093 -33.233226776 -36.233222961 -24.500000000 -32.384422302 -36.384418488 -24.500000000 -31.458978653 -36.458980560 -24.500000000 -30.488407135 -36.488403320 -24.500000000 -29.497472763 -36.497474670 -24.500000000 -28.499593735 -36.499591827 -24.500000000 -27.499954224 -36.499954224 -24.500000000 -26.499996185 -36.499996185 -24.500000000 -25.500000000 -36.500000000 -24.500000000 -24.500000000 -36.500000000 -24.500000000 -23.500000000 -36.500000000 -24.500000000 -22.500000000 -36.500000000 -24.500000000 -21.500000000 -36.500000000 -24.500000000 -20.500000000 -36.500000000 -24.500000000 -19.500000000 -36.500000000 -24.500000000 -18.500000000 -36.500000000 -24.500000000 -17.500000000 -36.500000000 -24.500000000 -16.500000000 -36.500000000 -24.500000000 -15.500000000 -36.500000000 -24.500000000 -14.500000000 -36.500000000 -24.500000000 -13.500000000 -36.500000000 -24.500000000 -12.500000000 -36.500000000 -24.500000000 -11.500000000 -36.500000000 -24.500000000 -10.500000000 -36.500000000 -24.500000000 -9.500000000 -36.500000000 -24.500000000 -8.500000000 -36.500000000 -24.500000000 -7.500000000 -36.500000000 -24.500000000 -6.500000000 -36.500000000 -24.500000000 -5.500000000 -36.500000000 -24.500000000 -4.500000000 -36.500000000 -24.500000000 -3.500000000 -36.500000000 -24.500000000 -2.500000000 -36.500000000 -24.500000000 -1.500000000 -36.500000000 -24.500000000 -0.500000000 -36.500000000 -24.500000000 0.500000000 -36.500000000 -24.500000000 1.500000000 -36.500000000 -24.500000000 2.500000000 -36.500000000 -24.500000000 3.500000000 -36.500000000 -24.500000000 4.500000000 -36.500000000 -24.500000000 5.500000000 -36.500000000 -24.500000000 6.500000000 -36.500000000 -24.500000000 7.500000000 -36.500000000 -24.500000000 8.500000000 -36.500000000 -24.500000000 9.500000000 -36.500000000 -24.500000000 10.500000000 -36.500000000 -24.500000000 11.500000000 -36.500000000 -24.500000000 12.500000000 -36.500000000 -24.500000000 13.500000000 -36.500000000 -24.500000000 14.500000000 -36.500000000 -24.500000000 15.500000000 -36.500000000 -24.500000000 16.500000000 -36.500000000 -24.500000000 17.500000000 -36.500000000 -24.500000000 18.500000000 -36.500000000 -24.500000000 19.500000000 -36.500000000 -24.500000000 20.500000000 -36.500000000 -24.500000000 21.500000000 -36.500000000 -24.500000000 22.500000000 -36.500000000 -24.500000000 23.500000000 -36.500000000 -24.500000000 24.500000000 -36.500000000 -24.500000000 25.499996185 -36.499996185 -24.500000000 26.499954224 -36.499954224 -24.500000000 27.499591827 -36.499591827 -24.500000000 28.497470856 -36.497467041 -24.500000000 29.488407135 -36.488403320 -24.500000000 30.458978653 -36.458980560 -24.500000000 31.384418488 -36.384418488 -24.500000000 32.233222961 -36.233222961 -24.499998093 32.981101990 -35.981101990 -24.499998093 -35.167961121 -35.167961121 -24.500000000 -34.622871399 -35.622871399 -24.500000000 33.622871399 -35.622871399 -24.500000000 34.167961121 -35.167961121 -24.500000000 -35.981101990 -33.981101990 -24.499998093 -35.622871399 -34.622871399 -24.500000000 34.622871399 -34.622863770 -24.500000000 34.981101990 -33.981101990 -24.499998093 -36.233222961 -33.233222961 -24.499998093 35.233222961 -33.233226776 -24.500000000 -36.384418488 -32.384422302 -24.500000000 35.384418488 -32.384422302 -24.500000000 -36.458976746 -31.458978653 -24.500000000 35.458980560 -31.458978653 -24.500000000 -36.488403320 -30.488407135 -24.500000000 35.488403320 -30.488407135 -24.500000000 -36.497467041 -29.497472763 -24.500000000 35.497474670 -29.497472763 -24.500000000 -36.499591827 -28.499593735 -24.500000000 35.499591827 -28.499593735 -24.500000000 -36.499954224 -27.499954224 -24.500000000 35.499954224 -27.499954224 -24.500000000 -36.499996185 -26.499996185 -24.500000000 35.499996185 -26.499996185 -24.500000000 -36.500000000 -25.500000000 -24.500000000 35.500000000 -25.500000000 -24.500000000 -36.500000000 -24.500000000 -24.500000000 35.500000000 -24.500000000 -24.500000000 -36.500000000 -23.500000000 -24.500000000 35.500000000 -23.500000000 -24.500000000 -36.500000000 -22.500000000 -24.500000000 35.500000000 -22.500000000 -24.500000000 -36.500000000 -21.500000000 -24.500000000 35.500000000 -21.500000000 -24.500000000 -36.500000000 -20.500000000 -24.500000000 35.500000000 -20.500000000 -24.500000000 -36.500000000 -19.500000000 -24.500000000 35.500000000 -19.500000000 -24.500000000 -36.500000000 -18.500000000 -24.500000000 35.500000000 -18.500000000 -24.500000000 -36.500000000 -17.500000000 -24.500000000 35.500000000 -17.500000000 -24.500000000 -36.500000000 -16.500000000 -24.500000000 35.500000000 -16.500000000 -24.500000000 -36.500000000 -15.500000000 -24.500000000 35.500000000 -15.500000000 -24.500000000 -36.500000000 -14.500000000 -24.500000000 35.500000000 -14.500000000 -24.500000000 -36.500000000 -13.500000000 -24.500000000 35.500000000 -13.500000000 -24.500000000 -36.500000000 -12.500000000 -24.500000000 35.500000000 -12.500000000 -24.500000000 -36.500000000 -11.500000000 -24.500000000 35.500000000 -11.500000000 -24.500000000 -36.500000000 -10.500000000 -24.500000000 35.500000000 -10.500000000 -24.500000000 -36.500000000 -9.500000000 -24.500000000 35.500000000 -9.500000000 -24.500000000 -36.500000000 -8.500000000 -24.500000000 35.500000000 -8.500000000 -24.500000000 -36.500000000 -7.500000000 -24.500000000 35.500000000 -7.500000000 -24.500000000 -36.500000000 -6.500000000 -24.500000000 35.500000000 -6.500000000 -24.500000000 -36.500000000 -5.500000000 -24.500000000 35.500000000 -5.500000000 -24.500000000 -36.500000000 -4.500000000 -24.500000000 35.500000000 -4.500000000 -24.500000000 -36.500000000 -3.500000000 -24.500000000 35.500000000 -3.500000000 -24.500000000 -36.500000000 -2.500000000 -24.500000000 35.500000000 -2.500000000 -24.500000000 -36.500000000 -1.500000000 -24.500000000 35.500000000 -1.500000000 -24.500000000 -36.500000000 -0.500000000 -24.500000000 35.500000000 -0.500000000 -24.500000000 -36.500000000 0.500000000 -24.500000000 35.500000000 0.500000000 -24.500000000 -36.500000000 1.500000000 -24.500000000 35.500000000 1.500000000 -24.500000000 -36.500000000 2.500000000 -24.500000000 35.500000000 2.500000000 -24.500000000 -36.500000000 3.500000000 -24.500000000 35.500000000 3.500000000 -24.500000000 -36.500000000 4.500000000 -24.500000000 35.500000000 4.500000000 -24.500000000 -36.500000000 5.500000000 -24.500000000 35.500000000 5.500000000 -24.500000000 -36.500000000 6.500000000 -24.500000000 35.500000000 6.500000000 -24.500000000 -36.500000000 7.500000000 -24.500000000 35.500000000 7.500000000 -24.500000000 -36.500000000 8.500000000 -24.500000000 35.500000000 8.500000000 -24.500000000 -36.500000000 9.500000000 -24.500000000 35.500000000 9.500000000 -24.500000000 -36.500000000 10.500000000 -24.500000000 35.500000000 10.500000000 -24.500000000 -36.500000000 11.500000000 -24.500000000 35.500000000 11.500000000 -24.500000000 -36.500000000 12.500000000 -24.500000000 35.500000000 12.500000000 -24.500000000 -36.500000000 13.500000000 -24.500000000 35.500000000 13.500000000 -24.500000000 -36.500000000 14.500000000 -24.500000000 35.500000000 14.500000000 -24.500000000 -36.500000000 15.500000000 -24.500000000 35.500000000 15.500000000 -24.500000000 -36.500000000 16.500000000 -24.500000000 35.500000000 16.500000000 -24.500000000 -36.500000000 17.500000000 -24.500000000 35.500000000 17.500000000 -24.500000000 -36.500000000 18.500000000 -24.500000000 35.500000000 18.500000000 -24.500000000 -36.500000000 19.500000000 -24.500000000 35.500000000 19.500000000 -24.500000000 -36.500000000 20.500000000 -24.500000000 35.500000000 20.500000000 -24.500000000 -36.500000000 21.500000000 -24.500000000 35.500000000 21.500000000 -24.500000000 -36.500000000 22.500000000 -24.500000000 35.500000000 22.500000000 -24.500000000 -36.500000000 23.500000000 -24.500000000 35.500000000 23.500000000 -24.500000000 -36.500000000 24.500000000 -24.500000000 35.500000000 24.500000000 -24.500000000 -36.499996185 25.499996185 -24.500000000 35.499996185 25.499996185 -24.500000000 -36.499954224 26.499954224 -24.500000000 35.499954224 26.499954224 -24.500000000 -36.499591827 27.499591827 -24.500000000 35.499591827 27.499591827 -24.500000000 -36.497474670 28.497470856 -24.500000000 35.497467041 28.497470856 -24.500000000 -36.488403320 29.488407135 -24.500000000 35.488403320 29.488407135 -24.500000000 -36.458980560 30.458978653 -24.500000000 35.458980560 30.458978653 -24.500000000 -36.384418488 31.384418488 -24.500000000 35.384418488 31.384418488 -24.500000000 -36.233222961 32.233222961 -24.499998093 35.233222961 32.233222961 -24.499998093 -35.981101990 32.981101990 -24.499998093 -35.622871399 33.622871399 -24.500000000 34.622871399 33.622871399 -24.500000000 34.981101990 32.981101990 -24.499998093 -35.167961121 34.167961121 -24.500000000 -34.622871399 34.622871399 -24.500000000 33.622863770 34.622871399 -24.500000000 34.167961121 34.167961121 -24.500000000 -33.981101990 34.981101990 -24.499998093 -33.233222961 35.233222961 -24.499998093 -32.384422302 35.384418488 -24.500000000 -31.458978653 35.458976746 -24.500000000 -30.488407135 35.488403320 -24.500000000 -29.497472763 35.497467041 -24.500000000 -28.499593735 35.499591827 -24.500000000 -27.499954224 35.499954224 -24.500000000 -26.499996185 35.499996185 -24.500000000 -25.500000000 35.500000000 -24.500000000 -24.500000000 35.500000000 -24.500000000 -23.500000000 35.500000000 -24.500000000 -22.500000000 35.500000000 -24.500000000 -21.500000000 35.500000000 -24.500000000 -20.500000000 35.500000000 -24.500000000 -19.500000000 35.500000000 -24.500000000 -18.500000000 35.500000000 -24.500000000 -17.500000000 35.500000000 -24.500000000 -16.500000000 35.500000000 -24.500000000 -15.500000000 35.500000000 -24.500000000 -14.500000000 35.500000000 -24.500000000 -13.500000000 35.500000000 -24.500000000 -12.500000000 35.500000000 -24.500000000 -11.500000000 35.500000000 -24.500000000 -10.500000000 35.500000000 -24.500000000 -9.500000000 35.500000000 -24.500000000 -8.500000000 35.500000000 -24.500000000 -7.500000000 35.500000000 -24.500000000 -6.500000000 35.500000000 -24.500000000 -5.500000000 35.500000000 -24.500000000 -4.500000000 35.500000000 -24.500000000 -3.500000000 35.500000000 -24.500000000 -2.500000000 35.500000000 -24.500000000 -1.500000000 35.500000000 -24.500000000 -0.500000000 35.500000000 -24.500000000 0.500000000 35.500000000 -24.500000000 1.500000000 35.500000000 -24.500000000 2.500000000 35.500000000 -24.500000000 3.500000000 35.500000000 -24.500000000 4.500000000 35.500000000 -24.500000000 5.500000000 35.500000000 -24.500000000 6.500000000 35.500000000 -24.500000000 7.500000000 35.500000000 -24.500000000 8.500000000 35.500000000 -24.500000000 9.500000000 35.500000000 -24.500000000 10.500000000 35.500000000 -24.500000000 11.500000000 35.500000000 -24.500000000 12.500000000 35.500000000 -24.500000000 13.500000000 35.500000000 -24.500000000 14.500000000 35.500000000 -24.500000000 15.500000000 35.500000000 -24.500000000 16.500000000 35.500000000 -24.500000000 17.500000000 35.500000000 -24.500000000 18.500000000 35.500000000 -24.500000000 19.500000000 35.500000000 -24.500000000 20.500000000 35.500000000 -24.500000000 21.500000000 35.500000000 -24.500000000 22.500000000 35.500000000 -24.500000000 23.500000000 35.500000000 -24.500000000 24.500000000 35.500000000 -24.500000000 25.499996185 35.499996185 -24.500000000 26.499954224 35.499954224 -24.500000000 27.499591827 35.499591827 -24.500000000 28.497470856 35.497474670 -24.500000000 29.488407135 35.488403320 -24.500000000 30.458978653 35.458980560 -24.500000000 31.384418488 35.384418488 -24.500000000 32.233222961 35.233222961 -24.499998093 32.981101990 34.981101990 -24.499998093 -33.981101990 -35.981101990 -23.500000000 -33.233226776 -36.233222961 -23.500000000 -32.384422302 -36.384418488 -23.500000000 -31.458978653 -36.458980560 -23.500000000 -30.488407135 -36.488403320 -23.500000000 -29.497472763 -36.497474670 -23.500000000 -28.499593735 -36.499591827 -23.500000000 -27.499954224 -36.499954224 -23.500000000 -26.499996185 -36.499996185 -23.500000000 -25.500000000 -36.500000000 -23.500000000 -24.500000000 -36.500000000 -23.500000000 -23.500000000 -36.500000000 -23.500000000 -22.500000000 -36.500000000 -23.500000000 -21.500000000 -36.500000000 -23.500000000 -20.500000000 -36.500000000 -23.500000000 -19.500000000 -36.500000000 -23.500000000 -18.500000000 -36.500000000 -23.500000000 -17.500000000 -36.500000000 -23.500000000 -16.500000000 -36.500000000 -23.500000000 -15.500000000 -36.500000000 -23.500000000 -14.500000000 -36.500000000 -23.500000000 -13.500000000 -36.500000000 -23.500000000 -12.500000000 -36.500000000 -23.500000000 -11.500000000 -36.500000000 -23.500000000 -10.500000000 -36.500000000 -23.500000000 -9.500000000 -36.500000000 -23.500000000 -8.500000000 -36.500000000 -23.500000000 -7.500000000 -36.500000000 -23.500000000 -6.500000000 -36.500000000 -23.500000000 -5.500000000 -36.500000000 -23.500000000 -4.500000000 -36.500000000 -23.500000000 -3.500000000 -36.500000000 -23.500000000 -2.500000000 -36.500000000 -23.500000000 -1.500000000 -36.500000000 -23.500000000 -0.500000000 -36.500000000 -23.500000000 0.500000000 -36.500000000 -23.500000000 1.500000000 -36.500000000 -23.500000000 2.500000000 -36.500000000 -23.500000000 3.500000000 -36.500000000 -23.500000000 4.500000000 -36.500000000 -23.500000000 5.500000000 -36.500000000 -23.500000000 6.500000000 -36.500000000 -23.500000000 7.500000000 -36.500000000 -23.500000000 8.500000000 -36.500000000 -23.500000000 9.500000000 -36.500000000 -23.500000000 10.500000000 -36.500000000 -23.500000000 11.500000000 -36.500000000 -23.500000000 12.500000000 -36.500000000 -23.500000000 13.500000000 -36.500000000 -23.500000000 14.500000000 -36.500000000 -23.500000000 15.500000000 -36.500000000 -23.500000000 16.500000000 -36.500000000 -23.500000000 17.500000000 -36.500000000 -23.500000000 18.500000000 -36.500000000 -23.500000000 19.500000000 -36.500000000 -23.500000000 20.500000000 -36.500000000 -23.500000000 21.500000000 -36.500000000 -23.500000000 22.500000000 -36.500000000 -23.500000000 23.500000000 -36.500000000 -23.500000000 24.500000000 -36.500000000 -23.500000000 25.499996185 -36.499996185 -23.500000000 26.499954224 -36.499954224 -23.500000000 27.499591827 -36.499591827 -23.500000000 28.497470856 -36.497467041 -23.500000000 29.488407135 -36.488403320 -23.500000000 30.458978653 -36.458980560 -23.500000000 31.384418488 -36.384422302 -23.500000000 32.233222961 -36.233222961 -23.500000000 32.981101990 -35.981101990 -23.500000000 -35.167964935 -35.167964935 -23.500000000 -34.622871399 -35.622871399 -23.500000000 33.622871399 -35.622871399 -23.500000000 34.167964935 -35.167964935 -23.500000000 -35.981101990 -33.981101990 -23.500000000 -35.622871399 -34.622871399 -23.500000000 34.622871399 -34.622871399 -23.500000000 34.981101990 -33.981101990 -23.500000000 -36.233222961 -33.233222961 -23.500000000 35.233222961 -33.233226776 -23.500000000 -36.384418488 -32.384422302 -23.500000000 35.384418488 -32.384422302 -23.500000000 -36.458976746 -31.458978653 -23.500000000 35.458980560 -31.458978653 -23.500000000 -36.488403320 -30.488407135 -23.500000000 35.488403320 -30.488407135 -23.500000000 -36.497467041 -29.497472763 -23.500000000 35.497474670 -29.497472763 -23.500000000 -36.499591827 -28.499593735 -23.500000000 35.499591827 -28.499593735 -23.500000000 -36.499954224 -27.499954224 -23.500000000 35.499954224 -27.499954224 -23.500000000 -36.499996185 -26.499996185 -23.500000000 35.499996185 -26.499996185 -23.500000000 -36.500000000 -25.500000000 -23.500000000 35.500000000 -25.500000000 -23.500000000 -36.500000000 -24.500000000 -23.500000000 35.500000000 -24.500000000 -23.500000000 -36.500000000 -23.500000000 -23.500000000 35.500000000 -23.500000000 -23.500000000 -36.500000000 -22.500000000 -23.500000000 35.500000000 -22.500000000 -23.500000000 -36.500000000 -21.500000000 -23.500000000 35.500000000 -21.500000000 -23.500000000 -36.500000000 -20.500000000 -23.500000000 35.500000000 -20.500000000 -23.500000000 -36.500000000 -19.500000000 -23.500000000 35.500000000 -19.500000000 -23.500000000 -36.500000000 -18.500000000 -23.500000000 35.500000000 -18.500000000 -23.500000000 -36.500000000 -17.500000000 -23.500000000 35.500000000 -17.500000000 -23.500000000 -36.500000000 -16.500000000 -23.500000000 35.500000000 -16.500000000 -23.500000000 -36.500000000 -15.500000000 -23.500000000 35.500000000 -15.500000000 -23.500000000 -36.500000000 -14.500000000 -23.500000000 35.500000000 -14.500000000 -23.500000000 -36.500000000 -13.500000000 -23.500000000 35.500000000 -13.500000000 -23.500000000 -36.500000000 -12.500000000 -23.500000000 35.500000000 -12.500000000 -23.500000000 -36.500000000 -11.500000000 -23.500000000 35.500000000 -11.500000000 -23.500000000 -36.500000000 -10.500000000 -23.500000000 35.500000000 -10.500000000 -23.500000000 -36.500000000 -9.500000000 -23.500000000 35.500000000 -9.500000000 -23.500000000 -36.500000000 -8.500000000 -23.500000000 35.500000000 -8.500000000 -23.500000000 -36.500000000 -7.500000000 -23.500000000 35.500000000 -7.500000000 -23.500000000 -36.500000000 -6.500000000 -23.500000000 35.500000000 -6.500000000 -23.500000000 -36.500000000 -5.500000000 -23.500000000 35.500000000 -5.500000000 -23.500000000 -36.500000000 -4.500000000 -23.500000000 35.500000000 -4.500000000 -23.500000000 -36.500000000 -3.500000000 -23.500000000 35.500000000 -3.500000000 -23.500000000 -36.500000000 -2.500000000 -23.500000000 35.500000000 -2.500000000 -23.500000000 -36.500000000 -1.500000000 -23.500000000 35.500000000 -1.500000000 -23.500000000 -36.500000000 -0.500000000 -23.500000000 35.500000000 -0.500000000 -23.500000000 -36.500000000 0.500000000 -23.500000000 35.500000000 0.500000000 -23.500000000 -36.500000000 1.500000000 -23.500000000 35.500000000 1.500000000 -23.500000000 -36.500000000 2.500000000 -23.500000000 35.500000000 2.500000000 -23.500000000 -36.500000000 3.500000000 -23.500000000 35.500000000 3.500000000 -23.500000000 -36.500000000 4.500000000 -23.500000000 35.500000000 4.500000000 -23.500000000 -36.500000000 5.500000000 -23.500000000 35.500000000 5.500000000 -23.500000000 -36.500000000 6.500000000 -23.500000000 35.500000000 6.500000000 -23.500000000 -36.500000000 7.500000000 -23.500000000 35.500000000 7.500000000 -23.500000000 -36.500000000 8.500000000 -23.500000000 35.500000000 8.500000000 -23.500000000 -36.500000000 9.500000000 -23.500000000 35.500000000 9.500000000 -23.500000000 -36.500000000 10.500000000 -23.500000000 35.500000000 10.500000000 -23.500000000 -36.500000000 11.500000000 -23.500000000 35.500000000 11.500000000 -23.500000000 -36.500000000 12.500000000 -23.500000000 35.500000000 12.500000000 -23.500000000 -36.500000000 13.500000000 -23.500000000 35.500000000 13.500000000 -23.500000000 -36.500000000 14.500000000 -23.500000000 35.500000000 14.500000000 -23.500000000 -36.500000000 15.500000000 -23.500000000 35.500000000 15.500000000 -23.500000000 -36.500000000 16.500000000 -23.500000000 35.500000000 16.500000000 -23.500000000 -36.500000000 17.500000000 -23.500000000 35.500000000 17.500000000 -23.500000000 -36.500000000 18.500000000 -23.500000000 35.500000000 18.500000000 -23.500000000 -36.500000000 19.500000000 -23.500000000 35.500000000 19.500000000 -23.500000000 -36.500000000 20.500000000 -23.500000000 35.500000000 20.500000000 -23.500000000 -36.500000000 21.500000000 -23.500000000 35.500000000 21.500000000 -23.500000000 -36.500000000 22.500000000 -23.500000000 35.500000000 22.500000000 -23.500000000 -36.500000000 23.500000000 -23.500000000 35.500000000 23.500000000 -23.500000000 -36.500000000 24.500000000 -23.500000000 35.500000000 24.500000000 -23.500000000 -36.499996185 25.499996185 -23.500000000 35.499996185 25.499996185 -23.500000000 -36.499954224 26.499954224 -23.500000000 35.499954224 26.499954224 -23.500000000 -36.499591827 27.499591827 -23.500000000 35.499591827 27.499591827 -23.500000000 -36.497474670 28.497470856 -23.500000000 35.497467041 28.497470856 -23.500000000 -36.488403320 29.488407135 -23.500000000 35.488403320 29.488407135 -23.500000000 -36.458980560 30.458978653 -23.500000000 35.458980560 30.458978653 -23.500000000 -36.384422302 31.384418488 -23.500000000 35.384422302 31.384418488 -23.500000000 -36.233222961 32.233222961 -23.500000000 35.233222961 32.233222961 -23.500000000 -35.981101990 32.981101990 -23.500000000 -35.622871399 33.622871399 -23.500000000 34.622871399 33.622871399 -23.500000000 34.981101990 32.981101990 -23.500000000 -35.167964935 34.167964935 -23.500000000 -34.622871399 34.622871399 -23.500000000 33.622871399 34.622871399 -23.500000000 34.167964935 34.167964935 -23.500000000 -33.981101990 34.981101990 -23.500000000 -33.233222961 35.233222961 -23.500000000 -32.384422302 35.384418488 -23.500000000 -31.458978653 35.458976746 -23.500000000 -30.488407135 35.488403320 -23.500000000 -29.497472763 35.497467041 -23.500000000 -28.499593735 35.499591827 -23.500000000 -27.499954224 35.499954224 -23.500000000 -26.499996185 35.499996185 -23.500000000 -25.500000000 35.500000000 -23.500000000 -24.500000000 35.500000000 -23.500000000 -23.500000000 35.500000000 -23.500000000 -22.500000000 35.500000000 -23.500000000 -21.500000000 35.500000000 -23.500000000 -20.500000000 35.500000000 -23.500000000 -19.500000000 35.500000000 -23.500000000 -18.500000000 35.500000000 -23.500000000 -17.500000000 35.500000000 -23.500000000 -16.500000000 35.500000000 -23.500000000 -15.500000000 35.500000000 -23.500000000 -14.500000000 35.500000000 -23.500000000 -13.500000000 35.500000000 -23.500000000 -12.500000000 35.500000000 -23.500000000 -11.500000000 35.500000000 -23.500000000 -10.500000000 35.500000000 -23.500000000 -9.500000000 35.500000000 -23.500000000 -8.500000000 35.500000000 -23.500000000 -7.500000000 35.500000000 -23.500000000 -6.500000000 35.500000000 -23.500000000 -5.500000000 35.500000000 -23.500000000 -4.500000000 35.500000000 -23.500000000 -3.500000000 35.500000000 -23.500000000 -2.500000000 35.500000000 -23.500000000 -1.500000000 35.500000000 -23.500000000 -0.500000000 35.500000000 -23.500000000 0.500000000 35.500000000 -23.500000000 1.500000000 35.500000000 -23.500000000 2.500000000 35.500000000 -23.500000000 3.500000000 35.500000000 -23.500000000 4.500000000 35.500000000 -23.500000000 5.500000000 35.500000000 -23.500000000 6.500000000 35.500000000 -23.500000000 7.500000000 35.500000000 -23.500000000 8.500000000 35.500000000 -23.500000000 9.500000000 35.500000000 -23.500000000 10.500000000 35.500000000 -23.500000000 11.500000000 35.500000000 -23.500000000 12.500000000 35.500000000 -23.500000000 13.500000000 35.500000000 -23.500000000 14.500000000 35.500000000 -23.500000000 15.500000000 35.500000000 -23.500000000 16.500000000 35.500000000 -23.500000000 17.500000000 35.500000000 -23.500000000 18.500000000 35.500000000 -23.500000000 19.500000000 35.500000000 -23.500000000 20.500000000 35.500000000 -23.500000000 21.500000000 35.500000000 -23.500000000 22.500000000 35.500000000 -23.500000000 23.500000000 35.500000000 -23.500000000 24.500000000 35.500000000 -23.500000000 25.499996185 35.499996185 -23.500000000 26.499954224 35.499954224 -23.500000000 27.499591827 35.499591827 -23.500000000 28.497470856 35.497474670 -23.500000000 29.488407135 35.488403320 -23.500000000 30.458978653 35.458980560 -23.500000000 31.384418488 35.384422302 -23.500000000 32.233222961 35.233222961 -23.500000000 32.981101990 34.981101990 -23.500000000 -33.981101990 -35.981101990 -22.500000000 -33.233226776 -36.233222961 -22.500000000 -32.384422302 -36.384418488 -22.500000000 -31.458978653 -36.458980560 -22.500000000 -30.488407135 -36.488403320 -22.500000000 -29.497472763 -36.497474670 -22.500000000 -28.499593735 -36.499591827 -22.500000000 -27.499954224 -36.499954224 -22.500000000 -26.499996185 -36.499996185 -22.500000000 -25.500000000 -36.500000000 -22.500000000 -24.500000000 -36.500000000 -22.500000000 -23.500000000 -36.500000000 -22.500000000 -22.500000000 -36.500000000 -22.500000000 -21.500000000 -36.500000000 -22.500000000 -20.500000000 -36.500000000 -22.500000000 -19.500000000 -36.500000000 -22.500000000 -18.500000000 -36.500000000 -22.500000000 -17.500000000 -36.500000000 -22.500000000 -16.500000000 -36.500000000 -22.500000000 -15.500000000 -36.500000000 -22.500000000 -14.500000000 -36.500000000 -22.500000000 -13.500000000 -36.500000000 -22.500000000 -12.500000000 -36.500000000 -22.500000000 -11.500000000 -36.500000000 -22.500000000 -10.500000000 -36.500000000 -22.500000000 -9.500000000 -36.500000000 -22.500000000 -8.500000000 -36.500000000 -22.500000000 -7.500000000 -36.500000000 -22.500000000 -6.500000000 -36.500000000 -22.500000000 -5.500000000 -36.500000000 -22.500000000 -4.500000000 -36.500000000 -22.500000000 -3.500000000 -36.500000000 -22.500000000 -2.500000000 -36.500000000 -22.500000000 -1.500000000 -36.500000000 -22.500000000 -0.500000000 -36.500000000 -22.500000000 0.500000000 -36.500000000 -22.500000000 1.500000000 -36.500000000 -22.500000000 2.500000000 -36.500000000 -22.500000000 3.500000000 -36.500000000 -22.500000000 4.500000000 -36.500000000 -22.500000000 5.500000000 -36.500000000 -22.500000000 6.500000000 -36.500000000 -22.500000000 7.500000000 -36.500000000 -22.500000000 8.500000000 -36.500000000 -22.500000000 9.500000000 -36.500000000 -22.500000000 10.500000000 -36.500000000 -22.500000000 11.500000000 -36.500000000 -22.500000000 12.500000000 -36.500000000 -22.500000000 13.500000000 -36.500000000 -22.500000000 14.500000000 -36.500000000 -22.500000000 15.500000000 -36.500000000 -22.500000000 16.500000000 -36.500000000 -22.500000000 17.500000000 -36.500000000 -22.500000000 18.500000000 -36.500000000 -22.500000000 19.500000000 -36.500000000 -22.500000000 20.500000000 -36.500000000 -22.500000000 21.500000000 -36.500000000 -22.500000000 22.500000000 -36.500000000 -22.500000000 23.500000000 -36.500000000 -22.500000000 24.500000000 -36.500000000 -22.500000000 25.499996185 -36.499996185 -22.500000000 26.499954224 -36.499954224 -22.500000000 27.499591827 -36.499591827 -22.500000000 28.497470856 -36.497467041 -22.500000000 29.488407135 -36.488403320 -22.500000000 30.458978653 -36.458980560 -22.500000000 31.384418488 -36.384422302 -22.500000000 32.233222961 -36.233222961 -22.500000000 32.981101990 -35.981101990 -22.500000000 -35.167964935 -35.167964935 -22.500000000 -34.622871399 -35.622871399 -22.500000000 33.622871399 -35.622871399 -22.500000000 34.167964935 -35.167964935 -22.500000000 -35.981101990 -33.981101990 -22.500000000 -35.622871399 -34.622871399 -22.500000000 34.622871399 -34.622871399 -22.500000000 34.981101990 -33.981101990 -22.500000000 -36.233222961 -33.233222961 -22.500000000 35.233222961 -33.233226776 -22.500000000 -36.384418488 -32.384422302 -22.500000000 35.384418488 -32.384422302 -22.500000000 -36.458976746 -31.458978653 -22.500000000 35.458980560 -31.458978653 -22.500000000 -36.488403320 -30.488407135 -22.500000000 35.488403320 -30.488407135 -22.500000000 -36.497467041 -29.497472763 -22.500000000 35.497474670 -29.497472763 -22.500000000 -36.499591827 -28.499593735 -22.500000000 35.499591827 -28.499593735 -22.500000000 -36.499954224 -27.499954224 -22.500000000 35.499954224 -27.499954224 -22.500000000 -36.499996185 -26.499996185 -22.500000000 35.499996185 -26.499996185 -22.500000000 -36.500000000 -25.500000000 -22.500000000 35.500000000 -25.500000000 -22.500000000 -36.500000000 -24.500000000 -22.500000000 35.500000000 -24.500000000 -22.500000000 -36.500000000 -23.500000000 -22.500000000 35.500000000 -23.500000000 -22.500000000 -36.500000000 -22.500000000 -22.500000000 35.500000000 -22.500000000 -22.500000000 -36.500000000 -21.500000000 -22.500000000 35.500000000 -21.500000000 -22.500000000 -36.500000000 -20.500000000 -22.500000000 35.500000000 -20.500000000 -22.500000000 -36.500000000 -19.500000000 -22.500000000 35.500000000 -19.500000000 -22.500000000 -36.500000000 -18.500000000 -22.500000000 35.500000000 -18.500000000 -22.500000000 -36.500000000 -17.500000000 -22.500000000 35.500000000 -17.500000000 -22.500000000 -36.500000000 -16.500000000 -22.500000000 35.500000000 -16.500000000 -22.500000000 -36.500000000 -15.500000000 -22.500000000 35.500000000 -15.500000000 -22.500000000 -36.500000000 -14.500000000 -22.500000000 35.500000000 -14.500000000 -22.500000000 -36.500000000 -13.500000000 -22.500000000 35.500000000 -13.500000000 -22.500000000 -36.500000000 -12.500000000 -22.500000000 35.500000000 -12.500000000 -22.500000000 -36.500000000 -11.500000000 -22.500000000 35.500000000 -11.500000000 -22.500000000 -36.500000000 -10.500000000 -22.500000000 35.500000000 -10.500000000 -22.500000000 -36.500000000 -9.500000000 -22.500000000 35.500000000 -9.500000000 -22.500000000 -36.500000000 -8.500000000 -22.500000000 35.500000000 -8.500000000 -22.500000000 -36.500000000 -7.500000000 -22.500000000 35.500000000 -7.500000000 -22.500000000 -36.500000000 -6.500000000 -22.500000000 35.500000000 -6.500000000 -22.500000000 -36.500000000 -5.500000000 -22.500000000 35.500000000 -5.500000000 -22.500000000 -36.500000000 -4.500000000 -22.500000000 35.500000000 -4.500000000 -22.500000000 -36.500000000 -3.500000000 -22.500000000 35.500000000 -3.500000000 -22.500000000 -36.500000000 -2.500000000 -22.500000000 35.500000000 -2.500000000 -22.500000000 -36.500000000 -1.500000000 -22.500000000 35.500000000 -1.500000000 -22.500000000 -36.500000000 -0.500000000 -22.500000000 35.500000000 -0.500000000 -22.500000000 -36.500000000 0.500000000 -22.500000000 35.500000000 0.500000000 -22.500000000 -36.500000000 1.500000000 -22.500000000 35.500000000 1.500000000 -22.500000000 -36.500000000 2.500000000 -22.500000000 35.500000000 2.500000000 -22.500000000 -36.500000000 3.500000000 -22.500000000 35.500000000 3.500000000 -22.500000000 -36.500000000 4.500000000 -22.500000000 35.500000000 4.500000000 -22.500000000 -36.500000000 5.500000000 -22.500000000 35.500000000 5.500000000 -22.500000000 -36.500000000 6.500000000 -22.500000000 35.500000000 6.500000000 -22.500000000 -36.500000000 7.500000000 -22.500000000 35.500000000 7.500000000 -22.500000000 -36.500000000 8.500000000 -22.500000000 35.500000000 8.500000000 -22.500000000 -36.500000000 9.500000000 -22.500000000 35.500000000 9.500000000 -22.500000000 -36.500000000 10.500000000 -22.500000000 35.500000000 10.500000000 -22.500000000 -36.500000000 11.500000000 -22.500000000 35.500000000 11.500000000 -22.500000000 -36.500000000 12.500000000 -22.500000000 35.500000000 12.500000000 -22.500000000 -36.500000000 13.500000000 -22.500000000 35.500000000 13.500000000 -22.500000000 -36.500000000 14.500000000 -22.500000000 35.500000000 14.500000000 -22.500000000 -36.500000000 15.500000000 -22.500000000 35.500000000 15.500000000 -22.500000000 -36.500000000 16.500000000 -22.500000000 35.500000000 16.500000000 -22.500000000 -36.500000000 17.500000000 -22.500000000 35.500000000 17.500000000 -22.500000000 -36.500000000 18.500000000 -22.500000000 35.500000000 18.500000000 -22.500000000 -36.500000000 19.500000000 -22.500000000 35.500000000 19.500000000 -22.500000000 -36.500000000 20.500000000 -22.500000000 35.500000000 20.500000000 -22.500000000 -36.500000000 21.500000000 -22.500000000 35.500000000 21.500000000 -22.500000000 -36.500000000 22.500000000 -22.500000000 35.500000000 22.500000000 -22.500000000 -36.500000000 23.500000000 -22.500000000 35.500000000 23.500000000 -22.500000000 -36.500000000 24.500000000 -22.500000000 35.500000000 24.500000000 -22.500000000 -36.499996185 25.499996185 -22.500000000 35.499996185 25.499996185 -22.500000000 -36.499954224 26.499954224 -22.500000000 35.499954224 26.499954224 -22.500000000 -36.499591827 27.499591827 -22.500000000 35.499591827 27.499591827 -22.500000000 -36.497474670 28.497470856 -22.500000000 35.497467041 28.497470856 -22.500000000 -36.488403320 29.488407135 -22.500000000 35.488403320 29.488407135 -22.500000000 -36.458980560 30.458978653 -22.500000000 35.458980560 30.458978653 -22.500000000 -36.384422302 31.384418488 -22.500000000 35.384422302 31.384418488 -22.500000000 -36.233222961 32.233222961 -22.500000000 35.233222961 32.233222961 -22.500000000 -35.981101990 32.981101990 -22.500000000 -35.622871399 33.622871399 -22.500000000 34.622871399 33.622871399 -22.500000000 34.981101990 32.981101990 -22.500000000 -35.167964935 34.167964935 -22.500000000 -34.622871399 34.622871399 -22.500000000 33.622871399 34.622871399 -22.500000000 34.167964935 34.167964935 -22.500000000 -33.981101990 34.981101990 -22.500000000 -33.233222961 35.233222961 -22.500000000 -32.384422302 35.384418488 -22.500000000 -31.458978653 35.458976746 -22.500000000 -30.488407135 35.488403320 -22.500000000 -29.497472763 35.497467041 -22.500000000 -28.499593735 35.499591827 -22.500000000 -27.499954224 35.499954224 -22.500000000 -26.499996185 35.499996185 -22.500000000 -25.500000000 35.500000000 -22.500000000 -24.500000000 35.500000000 -22.500000000 -23.500000000 35.500000000 -22.500000000 -22.500000000 35.500000000 -22.500000000 -21.500000000 35.500000000 -22.500000000 -20.500000000 35.500000000 -22.500000000 -19.500000000 35.500000000 -22.500000000 -18.500000000 35.500000000 -22.500000000 -17.500000000 35.500000000 -22.500000000 -16.500000000 35.500000000 -22.500000000 -15.500000000 35.500000000 -22.500000000 -14.500000000 35.500000000 -22.500000000 -13.500000000 35.500000000 -22.500000000 -12.500000000 35.500000000 -22.500000000 -11.500000000 35.500000000 -22.500000000 -10.500000000 35.500000000 -22.500000000 -9.500000000 35.500000000 -22.500000000 -8.500000000 35.500000000 -22.500000000 -7.500000000 35.500000000 -22.500000000 -6.500000000 35.500000000 -22.500000000 -5.500000000 35.500000000 -22.500000000 -4.500000000 35.500000000 -22.500000000 -3.500000000 35.500000000 -22.500000000 -2.500000000 35.500000000 -22.500000000 -1.500000000 35.500000000 -22.500000000 -0.500000000 35.500000000 -22.500000000 0.500000000 35.500000000 -22.500000000 1.500000000 35.500000000 -22.500000000 2.500000000 35.500000000 -22.500000000 3.500000000 35.500000000 -22.500000000 4.500000000 35.500000000 -22.500000000 5.500000000 35.500000000 -22.500000000 6.500000000 35.500000000 -22.500000000 7.500000000 35.500000000 -22.500000000 8.500000000 35.500000000 -22.500000000 9.500000000 35.500000000 -22.500000000 10.500000000 35.500000000 -22.500000000 11.500000000 35.500000000 -22.500000000 12.500000000 35.500000000 -22.500000000 13.500000000 35.500000000 -22.500000000 14.500000000 35.500000000 -22.500000000 15.500000000 35.500000000 -22.500000000 16.500000000 35.500000000 -22.500000000 17.500000000 35.500000000 -22.500000000 18.500000000 35.500000000 -22.500000000 19.500000000 35.500000000 -22.500000000 20.500000000 35.500000000 -22.500000000 21.500000000 35.500000000 -22.500000000 22.500000000 35.500000000 -22.500000000 23.500000000 35.500000000 -22.500000000 24.500000000 35.500000000 -22.500000000 25.499996185 35.499996185 -22.500000000 26.499954224 35.499954224 -22.500000000 27.499591827 35.499591827 -22.500000000 28.497470856 35.497474670 -22.500000000 29.488407135 35.488403320 -22.500000000 30.458978653 35.458980560 -22.500000000 31.384418488 35.384422302 -22.500000000 32.233222961 35.233222961 -22.500000000 32.981101990 34.981101990 -22.500000000 -33.981101990 -35.981101990 -21.500000000 -33.233226776 -36.233222961 -21.500000000 -32.384422302 -36.384418488 -21.500000000 -31.458978653 -36.458980560 -21.500000000 -30.488407135 -36.488403320 -21.500000000 -29.497472763 -36.497474670 -21.500000000 -28.499593735 -36.499591827 -21.500000000 -27.499954224 -36.499954224 -21.500000000 -26.499996185 -36.499996185 -21.500000000 -25.500000000 -36.500000000 -21.500000000 -24.500000000 -36.500000000 -21.500000000 -23.500000000 -36.500000000 -21.500000000 -22.500000000 -36.500000000 -21.500000000 -21.500000000 -36.500000000 -21.500000000 -20.500000000 -36.500000000 -21.500000000 -19.500000000 -36.500000000 -21.500000000 -18.500000000 -36.500000000 -21.500000000 -17.500000000 -36.500000000 -21.500000000 -16.500000000 -36.500000000 -21.500000000 -15.500000000 -36.500000000 -21.500000000 -14.500000000 -36.500000000 -21.500000000 -13.500000000 -36.500000000 -21.500000000 -12.500000000 -36.500000000 -21.500000000 -11.500000000 -36.500000000 -21.500000000 -10.500000000 -36.500000000 -21.500000000 -9.500000000 -36.500000000 -21.500000000 -8.500000000 -36.500000000 -21.500000000 -7.500000000 -36.500000000 -21.500000000 -6.500000000 -36.500000000 -21.500000000 -5.500000000 -36.500000000 -21.500000000 -4.500000000 -36.500000000 -21.500000000 -3.500000000 -36.500000000 -21.500000000 -2.500000000 -36.500000000 -21.500000000 -1.500000000 -36.500000000 -21.500000000 -0.500000000 -36.500000000 -21.500000000 0.500000000 -36.500000000 -21.500000000 1.500000000 -36.500000000 -21.500000000 2.500000000 -36.500000000 -21.500000000 3.500000000 -36.500000000 -21.500000000 4.500000000 -36.500000000 -21.500000000 5.500000000 -36.500000000 -21.500000000 6.500000000 -36.500000000 -21.500000000 7.500000000 -36.500000000 -21.500000000 8.500000000 -36.500000000 -21.500000000 9.500000000 -36.500000000 -21.500000000 10.500000000 -36.500000000 -21.500000000 11.500000000 -36.500000000 -21.500000000 12.500000000 -36.500000000 -21.500000000 13.500000000 -36.500000000 -21.500000000 14.500000000 -36.500000000 -21.500000000 15.500000000 -36.500000000 -21.500000000 16.500000000 -36.500000000 -21.500000000 17.500000000 -36.500000000 -21.500000000 18.500000000 -36.500000000 -21.500000000 19.500000000 -36.500000000 -21.500000000 20.500000000 -36.500000000 -21.500000000 21.500000000 -36.500000000 -21.500000000 22.500000000 -36.500000000 -21.500000000 23.500000000 -36.500000000 -21.500000000 24.500000000 -36.500000000 -21.500000000 25.499996185 -36.499996185 -21.500000000 26.499954224 -36.499954224 -21.500000000 27.499591827 -36.499591827 -21.500000000 28.497470856 -36.497467041 -21.500000000 29.488407135 -36.488403320 -21.500000000 30.458978653 -36.458980560 -21.500000000 31.384418488 -36.384422302 -21.500000000 32.233222961 -36.233222961 -21.500000000 32.981101990 -35.981101990 -21.500000000 -35.167964935 -35.167964935 -21.500000000 -34.622871399 -35.622871399 -21.500000000 33.622871399 -35.622871399 -21.500000000 34.167964935 -35.167964935 -21.500000000 -35.981101990 -33.981101990 -21.500000000 -35.622871399 -34.622871399 -21.500000000 34.622871399 -34.622871399 -21.500000000 34.981101990 -33.981101990 -21.500000000 -36.233222961 -33.233222961 -21.500000000 35.233222961 -33.233226776 -21.500000000 -36.384418488 -32.384422302 -21.500000000 35.384418488 -32.384422302 -21.500000000 -36.458976746 -31.458978653 -21.500000000 35.458980560 -31.458978653 -21.500000000 -36.488403320 -30.488407135 -21.500000000 35.488403320 -30.488407135 -21.500000000 -36.497467041 -29.497472763 -21.500000000 35.497474670 -29.497472763 -21.500000000 -36.499591827 -28.499593735 -21.500000000 35.499591827 -28.499593735 -21.500000000 -36.499954224 -27.499954224 -21.500000000 35.499954224 -27.499954224 -21.500000000 -36.499996185 -26.499996185 -21.500000000 35.499996185 -26.499996185 -21.500000000 -36.500000000 -25.500000000 -21.500000000 35.500000000 -25.500000000 -21.500000000 -36.500000000 -24.500000000 -21.500000000 35.500000000 -24.500000000 -21.500000000 -36.500000000 -23.500000000 -21.500000000 35.500000000 -23.500000000 -21.500000000 -36.500000000 -22.500000000 -21.500000000 35.500000000 -22.500000000 -21.500000000 -36.500000000 -21.500000000 -21.500000000 35.500000000 -21.500000000 -21.500000000 -36.500000000 -20.500000000 -21.500000000 35.500000000 -20.500000000 -21.500000000 -36.500000000 -19.500000000 -21.500000000 35.500000000 -19.500000000 -21.500000000 -36.500000000 -18.500000000 -21.500000000 35.500000000 -18.500000000 -21.500000000 -36.500000000 -17.500000000 -21.500000000 35.500000000 -17.500000000 -21.500000000 -36.500000000 -16.500000000 -21.500000000 35.500000000 -16.500000000 -21.500000000 -36.500000000 -15.500000000 -21.500000000 35.500000000 -15.500000000 -21.500000000 -36.500000000 -14.500000000 -21.500000000 35.500000000 -14.500000000 -21.500000000 -36.500000000 -13.500000000 -21.500000000 35.500000000 -13.500000000 -21.500000000 -36.500000000 -12.500000000 -21.500000000 35.500000000 -12.500000000 -21.500000000 -36.500000000 -11.500000000 -21.500000000 35.500000000 -11.500000000 -21.500000000 -36.500000000 -10.500000000 -21.500000000 35.500000000 -10.500000000 -21.500000000 -36.500000000 -9.500000000 -21.500000000 35.500000000 -9.500000000 -21.500000000 -36.500000000 -8.500000000 -21.500000000 35.500000000 -8.500000000 -21.500000000 -36.500000000 -7.500000000 -21.500000000 35.500000000 -7.500000000 -21.500000000 -36.500000000 -6.500000000 -21.500000000 35.500000000 -6.500000000 -21.500000000 -36.500000000 -5.500000000 -21.500000000 35.500000000 -5.500000000 -21.500000000 -36.500000000 -4.500000000 -21.500000000 35.500000000 -4.500000000 -21.500000000 -36.500000000 -3.500000000 -21.500000000 35.500000000 -3.500000000 -21.500000000 -36.500000000 -2.500000000 -21.500000000 35.500000000 -2.500000000 -21.500000000 -36.500000000 -1.500000000 -21.500000000 35.500000000 -1.500000000 -21.500000000 -36.500000000 -0.500000000 -21.500000000 35.500000000 -0.500000000 -21.500000000 -36.500000000 0.500000000 -21.500000000 35.500000000 0.500000000 -21.500000000 -36.500000000 1.500000000 -21.500000000 35.500000000 1.500000000 -21.500000000 -36.500000000 2.500000000 -21.500000000 35.500000000 2.500000000 -21.500000000 -36.500000000 3.500000000 -21.500000000 35.500000000 3.500000000 -21.500000000 -36.500000000 4.500000000 -21.500000000 35.500000000 4.500000000 -21.500000000 -36.500000000 5.500000000 -21.500000000 35.500000000 5.500000000 -21.500000000 -36.500000000 6.500000000 -21.500000000 35.500000000 6.500000000 -21.500000000 -36.500000000 7.500000000 -21.500000000 35.500000000 7.500000000 -21.500000000 -36.500000000 8.500000000 -21.500000000 35.500000000 8.500000000 -21.500000000 -36.500000000 9.500000000 -21.500000000 35.500000000 9.500000000 -21.500000000 -36.500000000 10.500000000 -21.500000000 35.500000000 10.500000000 -21.500000000 -36.500000000 11.500000000 -21.500000000 35.500000000 11.500000000 -21.500000000 -36.500000000 12.500000000 -21.500000000 35.500000000 12.500000000 -21.500000000 -36.500000000 13.500000000 -21.500000000 35.500000000 13.500000000 -21.500000000 -36.500000000 14.500000000 -21.500000000 35.500000000 14.500000000 -21.500000000 -36.500000000 15.500000000 -21.500000000 35.500000000 15.500000000 -21.500000000 -36.500000000 16.500000000 -21.500000000 35.500000000 16.500000000 -21.500000000 -36.500000000 17.500000000 -21.500000000 35.500000000 17.500000000 -21.500000000 -36.500000000 18.500000000 -21.500000000 35.500000000 18.500000000 -21.500000000 -36.500000000 19.500000000 -21.500000000 35.500000000 19.500000000 -21.500000000 -36.500000000 20.500000000 -21.500000000 35.500000000 20.500000000 -21.500000000 -36.500000000 21.500000000 -21.500000000 35.500000000 21.500000000 -21.500000000 -36.500000000 22.500000000 -21.500000000 35.500000000 22.500000000 -21.500000000 -36.500000000 23.500000000 -21.500000000 35.500000000 23.500000000 -21.500000000 -36.500000000 24.500000000 -21.500000000 35.500000000 24.500000000 -21.500000000 -36.499996185 25.499996185 -21.500000000 35.499996185 25.499996185 -21.500000000 -36.499954224 26.499954224 -21.500000000 35.499954224 26.499954224 -21.500000000 -36.499591827 27.499591827 -21.500000000 35.499591827 27.499591827 -21.500000000 -36.497474670 28.497470856 -21.500000000 35.497467041 28.497470856 -21.500000000 -36.488403320 29.488407135 -21.500000000 35.488403320 29.488407135 -21.500000000 -36.458980560 30.458978653 -21.500000000 35.458980560 30.458978653 -21.500000000 -36.384422302 31.384418488 -21.500000000 35.384422302 31.384418488 -21.500000000 -36.233222961 32.233222961 -21.500000000 35.233222961 32.233222961 -21.500000000 -35.981101990 32.981101990 -21.500000000 -35.622871399 33.622871399 -21.500000000 34.622871399 33.622871399 -21.500000000 34.981101990 32.981101990 -21.500000000 -35.167964935 34.167964935 -21.500000000 -34.622871399 34.622871399 -21.500000000 33.622871399 34.622871399 -21.500000000 34.167964935 34.167964935 -21.500000000 -33.981101990 34.981101990 -21.500000000 -33.233222961 35.233222961 -21.500000000 -32.384422302 35.384418488 -21.500000000 -31.458978653 35.458976746 -21.500000000 -30.488407135 35.488403320 -21.500000000 -29.497472763 35.497467041 -21.500000000 -28.499593735 35.499591827 -21.500000000 -27.499954224 35.499954224 -21.500000000 -26.499996185 35.499996185 -21.500000000 -25.500000000 35.500000000 -21.500000000 -24.500000000 35.500000000 -21.500000000 -23.500000000 35.500000000 -21.500000000 -22.500000000 35.500000000 -21.500000000 -21.500000000 35.500000000 -21.500000000 -20.500000000 35.500000000 -21.500000000 -19.500000000 35.500000000 -21.500000000 -18.500000000 35.500000000 -21.500000000 -17.500000000 35.500000000 -21.500000000 -16.500000000 35.500000000 -21.500000000 -15.500000000 35.500000000 -21.500000000 -14.500000000 35.500000000 -21.500000000 -13.500000000 35.500000000 -21.500000000 -12.500000000 35.500000000 -21.500000000 -11.500000000 35.500000000 -21.500000000 -10.500000000 35.500000000 -21.500000000 -9.500000000 35.500000000 -21.500000000 -8.500000000 35.500000000 -21.500000000 -7.500000000 35.500000000 -21.500000000 -6.500000000 35.500000000 -21.500000000 -5.500000000 35.500000000 -21.500000000 -4.500000000 35.500000000 -21.500000000 -3.500000000 35.500000000 -21.500000000 -2.500000000 35.500000000 -21.500000000 -1.500000000 35.500000000 -21.500000000 -0.500000000 35.500000000 -21.500000000 0.500000000 35.500000000 -21.500000000 1.500000000 35.500000000 -21.500000000 2.500000000 35.500000000 -21.500000000 3.500000000 35.500000000 -21.500000000 4.500000000 35.500000000 -21.500000000 5.500000000 35.500000000 -21.500000000 6.500000000 35.500000000 -21.500000000 7.500000000 35.500000000 -21.500000000 8.500000000 35.500000000 -21.500000000 9.500000000 35.500000000 -21.500000000 10.500000000 35.500000000 -21.500000000 11.500000000 35.500000000 -21.500000000 12.500000000 35.500000000 -21.500000000 13.500000000 35.500000000 -21.500000000 14.500000000 35.500000000 -21.500000000 15.500000000 35.500000000 -21.500000000 16.500000000 35.500000000 -21.500000000 17.500000000 35.500000000 -21.500000000 18.500000000 35.500000000 -21.500000000 19.500000000 35.500000000 -21.500000000 20.500000000 35.500000000 -21.500000000 21.500000000 35.500000000 -21.500000000 22.500000000 35.500000000 -21.500000000 23.500000000 35.500000000 -21.500000000 24.500000000 35.500000000 -21.500000000 25.499996185 35.499996185 -21.500000000 26.499954224 35.499954224 -21.500000000 27.499591827 35.499591827 -21.500000000 28.497470856 35.497474670 -21.500000000 29.488407135 35.488403320 -21.500000000 30.458978653 35.458980560 -21.500000000 31.384418488 35.384422302 -21.500000000 32.233222961 35.233222961 -21.500000000 32.981101990 34.981101990 -21.500000000 -33.981101990 -35.981101990 -20.500000000 -33.233226776 -36.233222961 -20.500000000 -32.384422302 -36.384418488 -20.500000000 -31.458978653 -36.458980560 -20.500000000 -30.488407135 -36.488403320 -20.500000000 -29.497472763 -36.497474670 -20.500000000 -28.499593735 -36.499591827 -20.500000000 -27.499954224 -36.499954224 -20.500000000 -26.499996185 -36.499996185 -20.500000000 -25.500000000 -36.500000000 -20.500000000 -24.500000000 -36.500000000 -20.500000000 -23.500000000 -36.500000000 -20.500000000 -22.500000000 -36.500000000 -20.500000000 -21.500000000 -36.500000000 -20.500000000 -20.500000000 -36.500000000 -20.500000000 -19.500000000 -36.500000000 -20.500000000 -18.500000000 -36.500000000 -20.500000000 -17.500000000 -36.500000000 -20.500000000 -16.500000000 -36.500000000 -20.500000000 -15.500000000 -36.500000000 -20.500000000 -14.500000000 -36.500000000 -20.500000000 -13.500000000 -36.500000000 -20.500000000 -12.500000000 -36.500000000 -20.500000000 -11.500000000 -36.500000000 -20.500000000 -10.500000000 -36.500000000 -20.500000000 -9.500000000 -36.500000000 -20.500000000 -8.500000000 -36.500000000 -20.500000000 -7.500000000 -36.500000000 -20.500000000 -6.500000000 -36.500000000 -20.500000000 -5.500000000 -36.500000000 -20.500000000 -4.500000000 -36.500000000 -20.500000000 -3.500000000 -36.500000000 -20.500000000 -2.500000000 -36.500000000 -20.500000000 -1.500000000 -36.500000000 -20.500000000 -0.500000000 -36.500000000 -20.500000000 0.500000000 -36.500000000 -20.500000000 1.500000000 -36.500000000 -20.500000000 2.500000000 -36.500000000 -20.500000000 3.500000000 -36.500000000 -20.500000000 4.500000000 -36.500000000 -20.500000000 5.500000000 -36.500000000 -20.500000000 6.500000000 -36.500000000 -20.500000000 7.500000000 -36.500000000 -20.500000000 8.500000000 -36.500000000 -20.500000000 9.500000000 -36.500000000 -20.500000000 10.500000000 -36.500000000 -20.500000000 11.500000000 -36.500000000 -20.500000000 12.500000000 -36.500000000 -20.500000000 13.500000000 -36.500000000 -20.500000000 14.500000000 -36.500000000 -20.500000000 15.500000000 -36.500000000 -20.500000000 16.500000000 -36.500000000 -20.500000000 17.500000000 -36.500000000 -20.500000000 18.500000000 -36.500000000 -20.500000000 19.500000000 -36.500000000 -20.500000000 20.500000000 -36.500000000 -20.500000000 21.500000000 -36.500000000 -20.500000000 22.500000000 -36.500000000 -20.500000000 23.500000000 -36.500000000 -20.500000000 24.500000000 -36.500000000 -20.500000000 25.499996185 -36.499996185 -20.500000000 26.499954224 -36.499954224 -20.500000000 27.499591827 -36.499591827 -20.500000000 28.497470856 -36.497467041 -20.500000000 29.488407135 -36.488403320 -20.500000000 30.458978653 -36.458980560 -20.500000000 31.384418488 -36.384422302 -20.500000000 32.233222961 -36.233222961 -20.500000000 32.981101990 -35.981101990 -20.500000000 -35.167964935 -35.167964935 -20.500000000 -34.622871399 -35.622871399 -20.500000000 33.622871399 -35.622871399 -20.500000000 34.167964935 -35.167964935 -20.500000000 -35.981101990 -33.981101990 -20.500000000 -35.622871399 -34.622871399 -20.500000000 34.622871399 -34.622871399 -20.500000000 34.981101990 -33.981101990 -20.500000000 -36.233222961 -33.233222961 -20.500000000 35.233222961 -33.233226776 -20.500000000 -36.384418488 -32.384422302 -20.500000000 35.384418488 -32.384422302 -20.500000000 -36.458976746 -31.458978653 -20.500000000 35.458980560 -31.458978653 -20.500000000 -36.488403320 -30.488407135 -20.500000000 35.488403320 -30.488407135 -20.500000000 -36.497467041 -29.497472763 -20.500000000 35.497474670 -29.497472763 -20.500000000 -36.499591827 -28.499593735 -20.500000000 35.499591827 -28.499593735 -20.500000000 -36.499954224 -27.499954224 -20.500000000 35.499954224 -27.499954224 -20.500000000 -36.499996185 -26.499996185 -20.500000000 35.499996185 -26.499996185 -20.500000000 -36.500000000 -25.500000000 -20.500000000 35.500000000 -25.500000000 -20.500000000 -36.500000000 -24.500000000 -20.500000000 35.500000000 -24.500000000 -20.500000000 -36.500000000 -23.500000000 -20.500000000 35.500000000 -23.500000000 -20.500000000 -36.500000000 -22.500000000 -20.500000000 35.500000000 -22.500000000 -20.500000000 -36.500000000 -21.500000000 -20.500000000 35.500000000 -21.500000000 -20.500000000 -36.500000000 -20.500000000 -20.500000000 35.500000000 -20.500000000 -20.500000000 -36.500000000 -19.500000000 -20.500000000 35.500000000 -19.500000000 -20.500000000 -36.500000000 -18.500000000 -20.500000000 35.500000000 -18.500000000 -20.500000000 -36.500000000 -17.500000000 -20.500000000 35.500000000 -17.500000000 -20.500000000 -36.500000000 -16.500000000 -20.500000000 35.500000000 -16.500000000 -20.500000000 -36.500000000 -15.500000000 -20.500000000 35.500000000 -15.500000000 -20.500000000 -36.500000000 -14.500000000 -20.500000000 35.500000000 -14.500000000 -20.500000000 -36.500000000 -13.500000000 -20.500000000 35.500000000 -13.500000000 -20.500000000 -36.500000000 -12.500000000 -20.500000000 35.500000000 -12.500000000 -20.500000000 -36.500000000 -11.500000000 -20.500000000 35.500000000 -11.500000000 -20.500000000 -36.500000000 -10.500000000 -20.500000000 35.500000000 -10.500000000 -20.500000000 -36.500000000 -9.500000000 -20.500000000 35.500000000 -9.500000000 -20.500000000 -36.500000000 -8.500000000 -20.500000000 35.500000000 -8.500000000 -20.500000000 -36.500000000 -7.500000000 -20.500000000 35.500000000 -7.500000000 -20.500000000 -36.500000000 -6.500000000 -20.500000000 35.500000000 -6.500000000 -20.500000000 -36.500000000 -5.500000000 -20.500000000 35.500000000 -5.500000000 -20.500000000 -36.500000000 -4.500000000 -20.500000000 35.500000000 -4.500000000 -20.500000000 -36.500000000 -3.500000000 -20.500000000 35.500000000 -3.500000000 -20.500000000 -36.500000000 -2.500000000 -20.500000000 35.500000000 -2.500000000 -20.500000000 -36.500000000 -1.500000000 -20.500000000 35.500000000 -1.500000000 -20.500000000 -36.500000000 -0.500000000 -20.500000000 35.500000000 -0.500000000 -20.500000000 -36.500000000 0.500000000 -20.500000000 35.500000000 0.500000000 -20.500000000 -36.500000000 1.500000000 -20.500000000 35.500000000 1.500000000 -20.500000000 -36.500000000 2.500000000 -20.500000000 35.500000000 2.500000000 -20.500000000 -36.500000000 3.500000000 -20.500000000 35.500000000 3.500000000 -20.500000000 -36.500000000 4.500000000 -20.500000000 35.500000000 4.500000000 -20.500000000 -36.500000000 5.500000000 -20.500000000 35.500000000 5.500000000 -20.500000000 -36.500000000 6.500000000 -20.500000000 35.500000000 6.500000000 -20.500000000 -36.500000000 7.500000000 -20.500000000 35.500000000 7.500000000 -20.500000000 -36.500000000 8.500000000 -20.500000000 35.500000000 8.500000000 -20.500000000 -36.500000000 9.500000000 -20.500000000 35.500000000 9.500000000 -20.500000000 -36.500000000 10.500000000 -20.500000000 35.500000000 10.500000000 -20.500000000 -36.500000000 11.500000000 -20.500000000 35.500000000 11.500000000 -20.500000000 -36.500000000 12.500000000 -20.500000000 35.500000000 12.500000000 -20.500000000 -36.500000000 13.500000000 -20.500000000 35.500000000 13.500000000 -20.500000000 -36.500000000 14.500000000 -20.500000000 35.500000000 14.500000000 -20.500000000 -36.500000000 15.500000000 -20.500000000 35.500000000 15.500000000 -20.500000000 -36.500000000 16.500000000 -20.500000000 35.500000000 16.500000000 -20.500000000 -36.500000000 17.500000000 -20.500000000 35.500000000 17.500000000 -20.500000000 -36.500000000 18.500000000 -20.500000000 35.500000000 18.500000000 -20.500000000 -36.500000000 19.500000000 -20.500000000 35.500000000 19.500000000 -20.500000000 -36.500000000 20.500000000 -20.500000000 35.500000000 20.500000000 -20.500000000 -36.500000000 21.500000000 -20.500000000 35.500000000 21.500000000 -20.500000000 -36.500000000 22.500000000 -20.500000000 35.500000000 22.500000000 -20.500000000 -36.500000000 23.500000000 -20.500000000 35.500000000 23.500000000 -20.500000000 -36.500000000 24.500000000 -20.500000000 35.500000000 24.500000000 -20.500000000 -36.499996185 25.499996185 -20.500000000 35.499996185 25.499996185 -20.500000000 -36.499954224 26.499954224 -20.500000000 35.499954224 26.499954224 -20.500000000 -36.499591827 27.499591827 -20.500000000 35.499591827 27.499591827 -20.500000000 -36.497474670 28.497470856 -20.500000000 35.497467041 28.497470856 -20.500000000 -36.488403320 29.488407135 -20.500000000 35.488403320 29.488407135 -20.500000000 -36.458980560 30.458978653 -20.500000000 35.458980560 30.458978653 -20.500000000 -36.384422302 31.384418488 -20.500000000 35.384422302 31.384418488 -20.500000000 -36.233222961 32.233222961 -20.500000000 35.233222961 32.233222961 -20.500000000 -35.981101990 32.981101990 -20.500000000 -35.622871399 33.622871399 -20.500000000 34.622871399 33.622871399 -20.500000000 34.981101990 32.981101990 -20.500000000 -35.167964935 34.167964935 -20.500000000 -34.622871399 34.622871399 -20.500000000 33.622871399 34.622871399 -20.500000000 34.167964935 34.167964935 -20.500000000 -33.981101990 34.981101990 -20.500000000 -33.233222961 35.233222961 -20.500000000 -32.384422302 35.384418488 -20.500000000 -31.458978653 35.458976746 -20.500000000 -30.488407135 35.488403320 -20.500000000 -29.497472763 35.497467041 -20.500000000 -28.499593735 35.499591827 -20.500000000 -27.499954224 35.499954224 -20.500000000 -26.499996185 35.499996185 -20.500000000 -25.500000000 35.500000000 -20.500000000 -24.500000000 35.500000000 -20.500000000 -23.500000000 35.500000000 -20.500000000 -22.500000000 35.500000000 -20.500000000 -21.500000000 35.500000000 -20.500000000 -20.500000000 35.500000000 -20.500000000 -19.500000000 35.500000000 -20.500000000 -18.500000000 35.500000000 -20.500000000 -17.500000000 35.500000000 -20.500000000 -16.500000000 35.500000000 -20.500000000 -15.500000000 35.500000000 -20.500000000 -14.500000000 35.500000000 -20.500000000 -13.500000000 35.500000000 -20.500000000 -12.500000000 35.500000000 -20.500000000 -11.500000000 35.500000000 -20.500000000 -10.500000000 35.500000000 -20.500000000 -9.500000000 35.500000000 -20.500000000 -8.500000000 35.500000000 -20.500000000 -7.500000000 35.500000000 -20.500000000 -6.500000000 35.500000000 -20.500000000 -5.500000000 35.500000000 -20.500000000 -4.500000000 35.500000000 -20.500000000 -3.500000000 35.500000000 -20.500000000 -2.500000000 35.500000000 -20.500000000 -1.500000000 35.500000000 -20.500000000 -0.500000000 35.500000000 -20.500000000 0.500000000 35.500000000 -20.500000000 1.500000000 35.500000000 -20.500000000 2.500000000 35.500000000 -20.500000000 3.500000000 35.500000000 -20.500000000 4.500000000 35.500000000 -20.500000000 5.500000000 35.500000000 -20.500000000 6.500000000 35.500000000 -20.500000000 7.500000000 35.500000000 -20.500000000 8.500000000 35.500000000 -20.500000000 9.500000000 35.500000000 -20.500000000 10.500000000 35.500000000 -20.500000000 11.500000000 35.500000000 -20.500000000 12.500000000 35.500000000 -20.500000000 13.500000000 35.500000000 -20.500000000 14.500000000 35.500000000 -20.500000000 15.500000000 35.500000000 -20.500000000 16.500000000 35.500000000 -20.500000000 17.500000000 35.500000000 -20.500000000 18.500000000 35.500000000 -20.500000000 19.500000000 35.500000000 -20.500000000 20.500000000 35.500000000 -20.500000000 21.500000000 35.500000000 -20.500000000 22.500000000 35.500000000 -20.500000000 23.500000000 35.500000000 -20.500000000 24.500000000 35.500000000 -20.500000000 25.499996185 35.499996185 -20.500000000 26.499954224 35.499954224 -20.500000000 27.499591827 35.499591827 -20.500000000 28.497470856 35.497474670 -20.500000000 29.488407135 35.488403320 -20.500000000 30.458978653 35.458980560 -20.500000000 31.384418488 35.384422302 -20.500000000 32.233222961 35.233222961 -20.500000000 32.981101990 34.981101990 -20.500000000 -33.981101990 -35.981101990 -19.500000000 -33.233226776 -36.233222961 -19.500000000 -32.384422302 -36.384418488 -19.500000000 -31.458978653 -36.458980560 -19.500000000 -30.488407135 -36.488403320 -19.500000000 -29.497472763 -36.497474670 -19.500000000 -28.499593735 -36.499591827 -19.500000000 -27.499954224 -36.499954224 -19.500000000 -26.499996185 -36.499996185 -19.500000000 -25.500000000 -36.500000000 -19.500000000 -24.500000000 -36.500000000 -19.500000000 -23.500000000 -36.500000000 -19.500000000 -22.500000000 -36.500000000 -19.500000000 -21.500000000 -36.500000000 -19.500000000 -20.500000000 -36.500000000 -19.500000000 -19.500000000 -36.500000000 -19.500000000 -18.500000000 -36.500000000 -19.500000000 -17.500000000 -36.500000000 -19.500000000 -16.500000000 -36.500000000 -19.500000000 -15.500000000 -36.500000000 -19.500000000 -14.500000000 -36.500000000 -19.500000000 -13.500000000 -36.500000000 -19.500000000 -12.500000000 -36.500000000 -19.500000000 -11.500000000 -36.500000000 -19.500000000 -10.500000000 -36.500000000 -19.500000000 -9.500000000 -36.500000000 -19.500000000 -8.500000000 -36.500000000 -19.500000000 -7.500000000 -36.500000000 -19.500000000 -6.500000000 -36.500000000 -19.500000000 -5.500000000 -36.500000000 -19.500000000 -4.500000000 -36.500000000 -19.500000000 -3.500000000 -36.500000000 -19.500000000 -2.500000000 -36.500000000 -19.500000000 -1.500000000 -36.500000000 -19.500000000 -0.500000000 -36.500000000 -19.500000000 0.500000000 -36.500000000 -19.500000000 1.500000000 -36.500000000 -19.500000000 2.500000000 -36.500000000 -19.500000000 3.500000000 -36.500000000 -19.500000000 4.500000000 -36.500000000 -19.500000000 5.500000000 -36.500000000 -19.500000000 6.500000000 -36.500000000 -19.500000000 7.500000000 -36.500000000 -19.500000000 8.500000000 -36.500000000 -19.500000000 9.500000000 -36.500000000 -19.500000000 10.500000000 -36.500000000 -19.500000000 11.500000000 -36.500000000 -19.500000000 12.500000000 -36.500000000 -19.500000000 13.500000000 -36.500000000 -19.500000000 14.500000000 -36.500000000 -19.500000000 15.500000000 -36.500000000 -19.500000000 16.500000000 -36.500000000 -19.500000000 17.500000000 -36.500000000 -19.500000000 18.500000000 -36.500000000 -19.500000000 19.500000000 -36.500000000 -19.500000000 20.500000000 -36.500000000 -19.500000000 21.500000000 -36.500000000 -19.500000000 22.500000000 -36.500000000 -19.500000000 23.500000000 -36.500000000 -19.500000000 24.500000000 -36.500000000 -19.500000000 25.499996185 -36.499996185 -19.500000000 26.499954224 -36.499954224 -19.500000000 27.499591827 -36.499591827 -19.500000000 28.497470856 -36.497467041 -19.500000000 29.488407135 -36.488403320 -19.500000000 30.458978653 -36.458980560 -19.500000000 31.384418488 -36.384422302 -19.500000000 32.233222961 -36.233222961 -19.500000000 32.981101990 -35.981101990 -19.500000000 -35.167964935 -35.167964935 -19.500000000 -34.622871399 -35.622871399 -19.500000000 33.622871399 -35.622871399 -19.500000000 34.167964935 -35.167964935 -19.500000000 -35.981101990 -33.981101990 -19.500000000 -35.622871399 -34.622871399 -19.500000000 34.622871399 -34.622871399 -19.500000000 34.981101990 -33.981101990 -19.500000000 -36.233222961 -33.233222961 -19.500000000 35.233222961 -33.233226776 -19.500000000 -36.384418488 -32.384422302 -19.500000000 35.384418488 -32.384422302 -19.500000000 -36.458976746 -31.458978653 -19.500000000 35.458980560 -31.458978653 -19.500000000 -36.488403320 -30.488407135 -19.500000000 35.488403320 -30.488407135 -19.500000000 -36.497467041 -29.497472763 -19.500000000 35.497474670 -29.497472763 -19.500000000 -36.499591827 -28.499593735 -19.500000000 35.499591827 -28.499593735 -19.500000000 -36.499954224 -27.499954224 -19.500000000 35.499954224 -27.499954224 -19.500000000 -36.499996185 -26.499996185 -19.500000000 35.499996185 -26.499996185 -19.500000000 -36.500000000 -25.500000000 -19.500000000 35.500000000 -25.500000000 -19.500000000 -36.500000000 -24.500000000 -19.500000000 35.500000000 -24.500000000 -19.500000000 -36.500000000 -23.500000000 -19.500000000 35.500000000 -23.500000000 -19.500000000 -36.500000000 -22.500000000 -19.500000000 35.500000000 -22.500000000 -19.500000000 -36.500000000 -21.500000000 -19.500000000 35.500000000 -21.500000000 -19.500000000 -36.500000000 -20.500000000 -19.500000000 35.500000000 -20.500000000 -19.500000000 -36.500000000 -19.500000000 -19.500000000 35.500000000 -19.500000000 -19.500000000 -36.500000000 -18.500000000 -19.500000000 35.500000000 -18.500000000 -19.500000000 -36.500000000 -17.500000000 -19.500000000 35.500000000 -17.500000000 -19.500000000 -36.500000000 -16.500000000 -19.500000000 35.500000000 -16.500000000 -19.500000000 -36.500000000 -15.500000000 -19.500000000 35.500000000 -15.500000000 -19.500000000 -36.500000000 -14.500000000 -19.500000000 35.500000000 -14.500000000 -19.500000000 -36.500000000 -13.500000000 -19.500000000 35.500000000 -13.500000000 -19.500000000 -36.500000000 -12.500000000 -19.500000000 35.500000000 -12.500000000 -19.500000000 -36.500000000 -11.500000000 -19.500000000 35.500000000 -11.500000000 -19.500000000 -36.500000000 -10.500000000 -19.500000000 35.500000000 -10.500000000 -19.500000000 -36.500000000 -9.500000000 -19.500000000 35.500000000 -9.500000000 -19.500000000 -36.500000000 -8.500000000 -19.500000000 35.500000000 -8.500000000 -19.500000000 -36.500000000 -7.500000000 -19.500000000 35.500000000 -7.500000000 -19.500000000 -36.500000000 -6.500000000 -19.500000000 35.500000000 -6.500000000 -19.500000000 -36.500000000 -5.500000000 -19.500000000 35.500000000 -5.500000000 -19.500000000 -36.500000000 -4.500000000 -19.500000000 35.500000000 -4.500000000 -19.500000000 -36.500000000 -3.500000000 -19.500000000 35.500000000 -3.500000000 -19.500000000 -36.500000000 -2.500000000 -19.500000000 35.500000000 -2.500000000 -19.500000000 -36.500000000 -1.500000000 -19.500000000 35.500000000 -1.500000000 -19.500000000 -36.500000000 -0.500000000 -19.500000000 35.500000000 -0.500000000 -19.500000000 -36.500000000 0.500000000 -19.500000000 35.500000000 0.500000000 -19.500000000 -36.500000000 1.500000000 -19.500000000 35.500000000 1.500000000 -19.500000000 -36.500000000 2.500000000 -19.500000000 35.500000000 2.500000000 -19.500000000 -36.500000000 3.500000000 -19.500000000 35.500000000 3.500000000 -19.500000000 -36.500000000 4.500000000 -19.500000000 35.500000000 4.500000000 -19.500000000 -36.500000000 5.500000000 -19.500000000 35.500000000 5.500000000 -19.500000000 -36.500000000 6.500000000 -19.500000000 35.500000000 6.500000000 -19.500000000 -36.500000000 7.500000000 -19.500000000 35.500000000 7.500000000 -19.500000000 -36.500000000 8.500000000 -19.500000000 35.500000000 8.500000000 -19.500000000 -36.500000000 9.500000000 -19.500000000 35.500000000 9.500000000 -19.500000000 -36.500000000 10.500000000 -19.500000000 35.500000000 10.500000000 -19.500000000 -36.500000000 11.500000000 -19.500000000 35.500000000 11.500000000 -19.500000000 -36.500000000 12.500000000 -19.500000000 35.500000000 12.500000000 -19.500000000 -36.500000000 13.500000000 -19.500000000 35.500000000 13.500000000 -19.500000000 -36.500000000 14.500000000 -19.500000000 35.500000000 14.500000000 -19.500000000 -36.500000000 15.500000000 -19.500000000 35.500000000 15.500000000 -19.500000000 -36.500000000 16.500000000 -19.500000000 35.500000000 16.500000000 -19.500000000 -36.500000000 17.500000000 -19.500000000 35.500000000 17.500000000 -19.500000000 -36.500000000 18.500000000 -19.500000000 35.500000000 18.500000000 -19.500000000 -36.500000000 19.500000000 -19.500000000 35.500000000 19.500000000 -19.500000000 -36.500000000 20.500000000 -19.500000000 35.500000000 20.500000000 -19.500000000 -36.500000000 21.500000000 -19.500000000 35.500000000 21.500000000 -19.500000000 -36.500000000 22.500000000 -19.500000000 35.500000000 22.500000000 -19.500000000 -36.500000000 23.500000000 -19.500000000 35.500000000 23.500000000 -19.500000000 -36.500000000 24.500000000 -19.500000000 35.500000000 24.500000000 -19.500000000 -36.499996185 25.499996185 -19.500000000 35.499996185 25.499996185 -19.500000000 -36.499954224 26.499954224 -19.500000000 35.499954224 26.499954224 -19.500000000 -36.499591827 27.499591827 -19.500000000 35.499591827 27.499591827 -19.500000000 -36.497474670 28.497470856 -19.500000000 35.497467041 28.497470856 -19.500000000 -36.488403320 29.488407135 -19.500000000 35.488403320 29.488407135 -19.500000000 -36.458980560 30.458978653 -19.500000000 35.458980560 30.458978653 -19.500000000 -36.384422302 31.384418488 -19.500000000 35.384422302 31.384418488 -19.500000000 -36.233222961 32.233222961 -19.500000000 35.233222961 32.233222961 -19.500000000 -35.981101990 32.981101990 -19.500000000 -35.622871399 33.622871399 -19.500000000 34.622871399 33.622871399 -19.500000000 34.981101990 32.981101990 -19.500000000 -35.167964935 34.167964935 -19.500000000 -34.622871399 34.622871399 -19.500000000 33.622871399 34.622871399 -19.500000000 34.167964935 34.167964935 -19.500000000 -33.981101990 34.981101990 -19.500000000 -33.233222961 35.233222961 -19.500000000 -32.384422302 35.384418488 -19.500000000 -31.458978653 35.458976746 -19.500000000 -30.488407135 35.488403320 -19.500000000 -29.497472763 35.497467041 -19.500000000 -28.499593735 35.499591827 -19.500000000 -27.499954224 35.499954224 -19.500000000 -26.499996185 35.499996185 -19.500000000 -25.500000000 35.500000000 -19.500000000 -24.500000000 35.500000000 -19.500000000 -23.500000000 35.500000000 -19.500000000 -22.500000000 35.500000000 -19.500000000 -21.500000000 35.500000000 -19.500000000 -20.500000000 35.500000000 -19.500000000 -19.500000000 35.500000000 -19.500000000 -18.500000000 35.500000000 -19.500000000 -17.500000000 35.500000000 -19.500000000 -16.500000000 35.500000000 -19.500000000 -15.500000000 35.500000000 -19.500000000 -14.500000000 35.500000000 -19.500000000 -13.500000000 35.500000000 -19.500000000 -12.500000000 35.500000000 -19.500000000 -11.500000000 35.500000000 -19.500000000 -10.500000000 35.500000000 -19.500000000 -9.500000000 35.500000000 -19.500000000 -8.500000000 35.500000000 -19.500000000 -7.500000000 35.500000000 -19.500000000 -6.500000000 35.500000000 -19.500000000 -5.500000000 35.500000000 -19.500000000 -4.500000000 35.500000000 -19.500000000 -3.500000000 35.500000000 -19.500000000 -2.500000000 35.500000000 -19.500000000 -1.500000000 35.500000000 -19.500000000 -0.500000000 35.500000000 -19.500000000 0.500000000 35.500000000 -19.500000000 1.500000000 35.500000000 -19.500000000 2.500000000 35.500000000 -19.500000000 3.500000000 35.500000000 -19.500000000 4.500000000 35.500000000 -19.500000000 5.500000000 35.500000000 -19.500000000 6.500000000 35.500000000 -19.500000000 7.500000000 35.500000000 -19.500000000 8.500000000 35.500000000 -19.500000000 9.500000000 35.500000000 -19.500000000 10.500000000 35.500000000 -19.500000000 11.500000000 35.500000000 -19.500000000 12.500000000 35.500000000 -19.500000000 13.500000000 35.500000000 -19.500000000 14.500000000 35.500000000 -19.500000000 15.500000000 35.500000000 -19.500000000 16.500000000 35.500000000 -19.500000000 17.500000000 35.500000000 -19.500000000 18.500000000 35.500000000 -19.500000000 19.500000000 35.500000000 -19.500000000 20.500000000 35.500000000 -19.500000000 21.500000000 35.500000000 -19.500000000 22.500000000 35.500000000 -19.500000000 23.500000000 35.500000000 -19.500000000 24.500000000 35.500000000 -19.500000000 25.499996185 35.499996185 -19.500000000 26.499954224 35.499954224 -19.500000000 27.499591827 35.499591827 -19.500000000 28.497470856 35.497474670 -19.500000000 29.488407135 35.488403320 -19.500000000 30.458978653 35.458980560 -19.500000000 31.384418488 35.384422302 -19.500000000 32.233222961 35.233222961 -19.500000000 32.981101990 34.981101990 -19.500000000 -33.981101990 -35.981101990 -18.500000000 -33.233226776 -36.233222961 -18.500000000 -32.384422302 -36.384418488 -18.500000000 -31.458978653 -36.458980560 -18.500000000 -30.488407135 -36.488403320 -18.500000000 -29.497472763 -36.497474670 -18.500000000 -28.499593735 -36.499591827 -18.500000000 -27.499954224 -36.499954224 -18.500000000 -26.499996185 -36.499996185 -18.500000000 -25.500000000 -36.500000000 -18.500000000 -24.500000000 -36.500000000 -18.500000000 -23.500000000 -36.500000000 -18.500000000 -22.500000000 -36.500000000 -18.500000000 -21.500000000 -36.500000000 -18.500000000 -20.500000000 -36.500000000 -18.500000000 -19.500000000 -36.500000000 -18.500000000 -18.500000000 -36.500000000 -18.500000000 -17.500000000 -36.500000000 -18.500000000 -16.500000000 -36.500000000 -18.500000000 -15.500000000 -36.500000000 -18.500000000 -14.500000000 -36.500000000 -18.500000000 -13.500000000 -36.500000000 -18.500000000 -12.500000000 -36.500000000 -18.500000000 -11.500000000 -36.500000000 -18.500000000 -10.500000000 -36.500000000 -18.500000000 -9.500000000 -36.500000000 -18.500000000 -8.500000000 -36.500000000 -18.500000000 -7.500000000 -36.500000000 -18.500000000 -6.500000000 -36.500000000 -18.500000000 -5.500000000 -36.500000000 -18.500000000 -4.500000000 -36.500000000 -18.500000000 -3.500000000 -36.500000000 -18.500000000 -2.500000000 -36.500000000 -18.500000000 -1.500000000 -36.500000000 -18.500000000 -0.500000000 -36.500000000 -18.500000000 0.500000000 -36.500000000 -18.500000000 1.500000000 -36.500000000 -18.500000000 2.500000000 -36.500000000 -18.500000000 3.500000000 -36.500000000 -18.500000000 4.500000000 -36.500000000 -18.500000000 5.500000000 -36.500000000 -18.500000000 6.500000000 -36.500000000 -18.500000000 7.500000000 -36.500000000 -18.500000000 8.500000000 -36.500000000 -18.500000000 9.500000000 -36.500000000 -18.500000000 10.500000000 -36.500000000 -18.500000000 11.500000000 -36.500000000 -18.500000000 12.500000000 -36.500000000 -18.500000000 13.500000000 -36.500000000 -18.500000000 14.500000000 -36.500000000 -18.500000000 15.500000000 -36.500000000 -18.500000000 16.500000000 -36.500000000 -18.500000000 17.500000000 -36.500000000 -18.500000000 18.500000000 -36.500000000 -18.500000000 19.500000000 -36.500000000 -18.500000000 20.500000000 -36.500000000 -18.500000000 21.500000000 -36.500000000 -18.500000000 22.500000000 -36.500000000 -18.500000000 23.500000000 -36.500000000 -18.500000000 24.500000000 -36.500000000 -18.500000000 25.499996185 -36.499996185 -18.500000000 26.499954224 -36.499954224 -18.500000000 27.499591827 -36.499591827 -18.500000000 28.497470856 -36.497467041 -18.500000000 29.488407135 -36.488403320 -18.500000000 30.458978653 -36.458980560 -18.500000000 31.384418488 -36.384422302 -18.500000000 32.233222961 -36.233222961 -18.500000000 32.981101990 -35.981101990 -18.500000000 -35.167964935 -35.167964935 -18.500000000 -34.622871399 -35.622871399 -18.500000000 33.622871399 -35.622871399 -18.500000000 34.167964935 -35.167964935 -18.500000000 -35.981101990 -33.981101990 -18.500000000 -35.622871399 -34.622871399 -18.500000000 34.622871399 -34.622871399 -18.500000000 34.981101990 -33.981101990 -18.500000000 -36.233222961 -33.233222961 -18.500000000 35.233222961 -33.233226776 -18.500000000 -36.384418488 -32.384422302 -18.500000000 35.384418488 -32.384422302 -18.500000000 -36.458976746 -31.458978653 -18.500000000 35.458980560 -31.458978653 -18.500000000 -36.488403320 -30.488407135 -18.500000000 35.488403320 -30.488407135 -18.500000000 -36.497467041 -29.497472763 -18.500000000 35.497474670 -29.497472763 -18.500000000 -36.499591827 -28.499593735 -18.500000000 35.499591827 -28.499593735 -18.500000000 -36.499954224 -27.499954224 -18.500000000 35.499954224 -27.499954224 -18.500000000 -36.499996185 -26.499996185 -18.500000000 35.499996185 -26.499996185 -18.500000000 -36.500000000 -25.500000000 -18.500000000 35.500000000 -25.500000000 -18.500000000 -36.500000000 -24.500000000 -18.500000000 35.500000000 -24.500000000 -18.500000000 -36.500000000 -23.500000000 -18.500000000 35.500000000 -23.500000000 -18.500000000 -36.500000000 -22.500000000 -18.500000000 35.500000000 -22.500000000 -18.500000000 -36.500000000 -21.500000000 -18.500000000 35.500000000 -21.500000000 -18.500000000 -36.500000000 -20.500000000 -18.500000000 35.500000000 -20.500000000 -18.500000000 -36.500000000 -19.500000000 -18.500000000 35.500000000 -19.500000000 -18.500000000 -36.500000000 -18.500000000 -18.500000000 35.500000000 -18.500000000 -18.500000000 -36.500000000 -17.500000000 -18.500000000 35.500000000 -17.500000000 -18.500000000 -36.500000000 -16.500000000 -18.500000000 35.500000000 -16.500000000 -18.500000000 -36.500000000 -15.500000000 -18.500000000 35.500000000 -15.500000000 -18.500000000 -36.500000000 -14.500000000 -18.500000000 35.500000000 -14.500000000 -18.500000000 -36.500000000 -13.500000000 -18.500000000 35.500000000 -13.500000000 -18.500000000 -36.500000000 -12.500000000 -18.500000000 35.500000000 -12.500000000 -18.500000000 -36.500000000 -11.500000000 -18.500000000 35.500000000 -11.500000000 -18.500000000 -36.500000000 -10.500000000 -18.500000000 35.500000000 -10.500000000 -18.500000000 -36.500000000 -9.500000000 -18.500000000 35.500000000 -9.500000000 -18.500000000 -36.500000000 -8.500000000 -18.500000000 35.500000000 -8.500000000 -18.500000000 -36.500000000 -7.500000000 -18.500000000 35.500000000 -7.500000000 -18.500000000 -36.500000000 -6.500000000 -18.500000000 35.500000000 -6.500000000 -18.500000000 -36.500000000 -5.500000000 -18.500000000 35.500000000 -5.500000000 -18.500000000 -36.500000000 -4.500000000 -18.500000000 35.500000000 -4.500000000 -18.500000000 -36.500000000 -3.500000000 -18.500000000 35.500000000 -3.500000000 -18.500000000 -36.500000000 -2.500000000 -18.500000000 35.500000000 -2.500000000 -18.500000000 -36.500000000 -1.500000000 -18.500000000 35.500000000 -1.500000000 -18.500000000 -36.500000000 -0.500000000 -18.500000000 35.500000000 -0.500000000 -18.500000000 -36.500000000 0.500000000 -18.500000000 35.500000000 0.500000000 -18.500000000 -36.500000000 1.500000000 -18.500000000 35.500000000 1.500000000 -18.500000000 -36.500000000 2.500000000 -18.500000000 35.500000000 2.500000000 -18.500000000 -36.500000000 3.500000000 -18.500000000 35.500000000 3.500000000 -18.500000000 -36.500000000 4.500000000 -18.500000000 35.500000000 4.500000000 -18.500000000 -36.500000000 5.500000000 -18.500000000 35.500000000 5.500000000 -18.500000000 -36.500000000 6.500000000 -18.500000000 35.500000000 6.500000000 -18.500000000 -36.500000000 7.500000000 -18.500000000 35.500000000 7.500000000 -18.500000000 -36.500000000 8.500000000 -18.500000000 35.500000000 8.500000000 -18.500000000 -36.500000000 9.500000000 -18.500000000 35.500000000 9.500000000 -18.500000000 -36.500000000 10.500000000 -18.500000000 35.500000000 10.500000000 -18.500000000 -36.500000000 11.500000000 -18.500000000 35.500000000 11.500000000 -18.500000000 -36.500000000 12.500000000 -18.500000000 35.500000000 12.500000000 -18.500000000 -36.500000000 13.500000000 -18.500000000 35.500000000 13.500000000 -18.500000000 -36.500000000 14.500000000 -18.500000000 35.500000000 14.500000000 -18.500000000 -36.500000000 15.500000000 -18.500000000 35.500000000 15.500000000 -18.500000000 -36.500000000 16.500000000 -18.500000000 35.500000000 16.500000000 -18.500000000 -36.500000000 17.500000000 -18.500000000 35.500000000 17.500000000 -18.500000000 -36.500000000 18.500000000 -18.500000000 35.500000000 18.500000000 -18.500000000 -36.500000000 19.500000000 -18.500000000 35.500000000 19.500000000 -18.500000000 -36.500000000 20.500000000 -18.500000000 35.500000000 20.500000000 -18.500000000 -36.500000000 21.500000000 -18.500000000 35.500000000 21.500000000 -18.500000000 -36.500000000 22.500000000 -18.500000000 35.500000000 22.500000000 -18.500000000 -36.500000000 23.500000000 -18.500000000 35.500000000 23.500000000 -18.500000000 -36.500000000 24.500000000 -18.500000000 35.500000000 24.500000000 -18.500000000 -36.499996185 25.499996185 -18.500000000 35.499996185 25.499996185 -18.500000000 -36.499954224 26.499954224 -18.500000000 35.499954224 26.499954224 -18.500000000 -36.499591827 27.499591827 -18.500000000 35.499591827 27.499591827 -18.500000000 -36.497474670 28.497470856 -18.500000000 35.497467041 28.497470856 -18.500000000 -36.488403320 29.488407135 -18.500000000 35.488403320 29.488407135 -18.500000000 -36.458980560 30.458978653 -18.500000000 35.458980560 30.458978653 -18.500000000 -36.384422302 31.384418488 -18.500000000 35.384422302 31.384418488 -18.500000000 -36.233222961 32.233222961 -18.500000000 35.233222961 32.233222961 -18.500000000 -35.981101990 32.981101990 -18.500000000 -35.622871399 33.622871399 -18.500000000 34.622871399 33.622871399 -18.500000000 34.981101990 32.981101990 -18.500000000 -35.167964935 34.167964935 -18.500000000 -34.622871399 34.622871399 -18.500000000 33.622871399 34.622871399 -18.500000000 34.167964935 34.167964935 -18.500000000 -33.981101990 34.981101990 -18.500000000 -33.233222961 35.233222961 -18.500000000 -32.384422302 35.384418488 -18.500000000 -31.458978653 35.458976746 -18.500000000 -30.488407135 35.488403320 -18.500000000 -29.497472763 35.497467041 -18.500000000 -28.499593735 35.499591827 -18.500000000 -27.499954224 35.499954224 -18.500000000 -26.499996185 35.499996185 -18.500000000 -25.500000000 35.500000000 -18.500000000 -24.500000000 35.500000000 -18.500000000 -23.500000000 35.500000000 -18.500000000 -22.500000000 35.500000000 -18.500000000 -21.500000000 35.500000000 -18.500000000 -20.500000000 35.500000000 -18.500000000 -19.500000000 35.500000000 -18.500000000 -18.500000000 35.500000000 -18.500000000 -17.500000000 35.500000000 -18.500000000 -16.500000000 35.500000000 -18.500000000 -15.500000000 35.500000000 -18.500000000 -14.500000000 35.500000000 -18.500000000 -13.500000000 35.500000000 -18.500000000 -12.500000000 35.500000000 -18.500000000 -11.500000000 35.500000000 -18.500000000 -10.500000000 35.500000000 -18.500000000 -9.500000000 35.500000000 -18.500000000 -8.500000000 35.500000000 -18.500000000 -7.500000000 35.500000000 -18.500000000 -6.500000000 35.500000000 -18.500000000 -5.500000000 35.500000000 -18.500000000 -4.500000000 35.500000000 -18.500000000 -3.500000000 35.500000000 -18.500000000 -2.500000000 35.500000000 -18.500000000 -1.500000000 35.500000000 -18.500000000 -0.500000000 35.500000000 -18.500000000 0.500000000 35.500000000 -18.500000000 1.500000000 35.500000000 -18.500000000 2.500000000 35.500000000 -18.500000000 3.500000000 35.500000000 -18.500000000 4.500000000 35.500000000 -18.500000000 5.500000000 35.500000000 -18.500000000 6.500000000 35.500000000 -18.500000000 7.500000000 35.500000000 -18.500000000 8.500000000 35.500000000 -18.500000000 9.500000000 35.500000000 -18.500000000 10.500000000 35.500000000 -18.500000000 11.500000000 35.500000000 -18.500000000 12.500000000 35.500000000 -18.500000000 13.500000000 35.500000000 -18.500000000 14.500000000 35.500000000 -18.500000000 15.500000000 35.500000000 -18.500000000 16.500000000 35.500000000 -18.500000000 17.500000000 35.500000000 -18.500000000 18.500000000 35.500000000 -18.500000000 19.500000000 35.500000000 -18.500000000 20.500000000 35.500000000 -18.500000000 21.500000000 35.500000000 -18.500000000 22.500000000 35.500000000 -18.500000000 23.500000000 35.500000000 -18.500000000 24.500000000 35.500000000 -18.500000000 25.499996185 35.499996185 -18.500000000 26.499954224 35.499954224 -18.500000000 27.499591827 35.499591827 -18.500000000 28.497470856 35.497474670 -18.500000000 29.488407135 35.488403320 -18.500000000 30.458978653 35.458980560 -18.500000000 31.384418488 35.384422302 -18.500000000 32.233222961 35.233222961 -18.500000000 32.981101990 34.981101990 -18.500000000 -33.981101990 -35.981101990 -17.500000000 -33.233226776 -36.233222961 -17.500000000 -32.384422302 -36.384418488 -17.500000000 -31.458978653 -36.458980560 -17.500000000 -30.488407135 -36.488403320 -17.500000000 -29.497472763 -36.497474670 -17.500000000 -28.499593735 -36.499591827 -17.500000000 -27.499954224 -36.499954224 -17.500000000 -26.499996185 -36.499996185 -17.500000000 -25.500000000 -36.500000000 -17.500000000 -24.500000000 -36.500000000 -17.500000000 -23.500000000 -36.500000000 -17.500000000 -22.500000000 -36.500000000 -17.500000000 -21.500000000 -36.500000000 -17.500000000 -20.500000000 -36.500000000 -17.500000000 -19.500000000 -36.500000000 -17.500000000 -18.500000000 -36.500000000 -17.500000000 -17.500000000 -36.500000000 -17.500000000 -16.500000000 -36.500000000 -17.500000000 -15.500000000 -36.500000000 -17.500000000 -14.500000000 -36.500000000 -17.500000000 -13.500000000 -36.500000000 -17.500000000 -12.500000000 -36.500000000 -17.500000000 -11.500000000 -36.500000000 -17.500000000 -10.500000000 -36.500000000 -17.500000000 -9.500000000 -36.500000000 -17.500000000 -8.500000000 -36.500000000 -17.500000000 -7.500000000 -36.500000000 -17.500000000 -6.500000000 -36.500000000 -17.500000000 -5.500000000 -36.500000000 -17.500000000 -4.500000000 -36.500000000 -17.500000000 -3.500000000 -36.500000000 -17.500000000 -2.500000000 -36.500000000 -17.500000000 -1.500000000 -36.500000000 -17.500000000 -0.500000000 -36.500000000 -17.500000000 0.500000000 -36.500000000 -17.500000000 1.500000000 -36.500000000 -17.500000000 2.500000000 -36.500000000 -17.500000000 3.500000000 -36.500000000 -17.500000000 4.500000000 -36.500000000 -17.500000000 5.500000000 -36.500000000 -17.500000000 6.500000000 -36.500000000 -17.500000000 7.500000000 -36.500000000 -17.500000000 8.500000000 -36.500000000 -17.500000000 9.500000000 -36.500000000 -17.500000000 10.500000000 -36.500000000 -17.500000000 11.500000000 -36.500000000 -17.500000000 12.500000000 -36.500000000 -17.500000000 13.500000000 -36.500000000 -17.500000000 14.500000000 -36.500000000 -17.500000000 15.500000000 -36.500000000 -17.500000000 16.500000000 -36.500000000 -17.500000000 17.500000000 -36.500000000 -17.500000000 18.500000000 -36.500000000 -17.500000000 19.500000000 -36.500000000 -17.500000000 20.500000000 -36.500000000 -17.500000000 21.500000000 -36.500000000 -17.500000000 22.500000000 -36.500000000 -17.500000000 23.500000000 -36.500000000 -17.500000000 24.500000000 -36.500000000 -17.500000000 25.499996185 -36.499996185 -17.500000000 26.499954224 -36.499954224 -17.500000000 27.499591827 -36.499591827 -17.500000000 28.497470856 -36.497467041 -17.500000000 29.488407135 -36.488403320 -17.500000000 30.458978653 -36.458980560 -17.500000000 31.384418488 -36.384422302 -17.500000000 32.233222961 -36.233222961 -17.500000000 32.981101990 -35.981101990 -17.500000000 -35.167964935 -35.167964935 -17.500000000 -34.622871399 -35.622871399 -17.500000000 33.622871399 -35.622871399 -17.500000000 34.167964935 -35.167964935 -17.500000000 -35.981101990 -33.981101990 -17.500000000 -35.622871399 -34.622871399 -17.500000000 34.622871399 -34.622871399 -17.500000000 34.981101990 -33.981101990 -17.500000000 -36.233222961 -33.233222961 -17.500000000 35.233222961 -33.233226776 -17.500000000 -36.384418488 -32.384422302 -17.500000000 35.384418488 -32.384422302 -17.500000000 -36.458976746 -31.458978653 -17.500000000 35.458980560 -31.458978653 -17.500000000 -36.488403320 -30.488407135 -17.500000000 35.488403320 -30.488407135 -17.500000000 -36.497467041 -29.497472763 -17.500000000 35.497474670 -29.497472763 -17.500000000 -36.499591827 -28.499593735 -17.500000000 35.499591827 -28.499593735 -17.500000000 -36.499954224 -27.499954224 -17.500000000 35.499954224 -27.499954224 -17.500000000 -36.499996185 -26.499996185 -17.500000000 35.499996185 -26.499996185 -17.500000000 -36.500000000 -25.500000000 -17.500000000 35.500000000 -25.500000000 -17.500000000 -36.500000000 -24.500000000 -17.500000000 35.500000000 -24.500000000 -17.500000000 -36.500000000 -23.500000000 -17.500000000 35.500000000 -23.500000000 -17.500000000 -36.500000000 -22.500000000 -17.500000000 35.500000000 -22.500000000 -17.500000000 -36.500000000 -21.500000000 -17.500000000 35.500000000 -21.500000000 -17.500000000 -36.500000000 -20.500000000 -17.500000000 35.500000000 -20.500000000 -17.500000000 -36.500000000 -19.500000000 -17.500000000 35.500000000 -19.500000000 -17.500000000 -36.500000000 -18.500000000 -17.500000000 35.500000000 -18.500000000 -17.500000000 -36.500000000 -17.500000000 -17.500000000 35.500000000 -17.500000000 -17.500000000 -36.500000000 -16.500000000 -17.500000000 35.500000000 -16.500000000 -17.500000000 -36.500000000 -15.500000000 -17.500000000 35.500000000 -15.500000000 -17.500000000 -36.500000000 -14.500000000 -17.500000000 35.500000000 -14.500000000 -17.500000000 -36.500000000 -13.500000000 -17.500000000 35.500000000 -13.500000000 -17.500000000 -36.500000000 -12.500000000 -17.500000000 35.500000000 -12.500000000 -17.500000000 -36.500000000 -11.500000000 -17.500000000 35.500000000 -11.500000000 -17.500000000 -36.500000000 -10.500000000 -17.500000000 35.500000000 -10.500000000 -17.500000000 -36.500000000 -9.500000000 -17.500000000 35.500000000 -9.500000000 -17.500000000 -36.500000000 -8.500000000 -17.500000000 35.500000000 -8.500000000 -17.500000000 -36.500000000 -7.500000000 -17.500000000 35.500000000 -7.500000000 -17.500000000 -36.500000000 -6.500000000 -17.500000000 35.500000000 -6.500000000 -17.500000000 -36.500000000 -5.500000000 -17.500000000 35.500000000 -5.500000000 -17.500000000 -36.500000000 -4.500000000 -17.500000000 35.500000000 -4.500000000 -17.500000000 -36.500000000 -3.500000000 -17.500000000 35.500000000 -3.500000000 -17.500000000 -36.500000000 -2.500000000 -17.500000000 35.500000000 -2.500000000 -17.500000000 -36.500000000 -1.500000000 -17.500000000 35.500000000 -1.500000000 -17.500000000 -36.500000000 -0.500000000 -17.500000000 35.500000000 -0.500000000 -17.500000000 -36.500000000 0.500000000 -17.500000000 35.500000000 0.500000000 -17.500000000 -36.500000000 1.500000000 -17.500000000 35.500000000 1.500000000 -17.500000000 -36.500000000 2.500000000 -17.500000000 35.500000000 2.500000000 -17.500000000 -36.500000000 3.500000000 -17.500000000 35.500000000 3.500000000 -17.500000000 -36.500000000 4.500000000 -17.500000000 35.500000000 4.500000000 -17.500000000 -36.500000000 5.500000000 -17.500000000 35.500000000 5.500000000 -17.500000000 -36.500000000 6.500000000 -17.500000000 35.500000000 6.500000000 -17.500000000 -36.500000000 7.500000000 -17.500000000 35.500000000 7.500000000 -17.500000000 -36.500000000 8.500000000 -17.500000000 35.500000000 8.500000000 -17.500000000 -36.500000000 9.500000000 -17.500000000 35.500000000 9.500000000 -17.500000000 -36.500000000 10.500000000 -17.500000000 35.500000000 10.500000000 -17.500000000 -36.500000000 11.500000000 -17.500000000 35.500000000 11.500000000 -17.500000000 -36.500000000 12.500000000 -17.500000000 35.500000000 12.500000000 -17.500000000 -36.500000000 13.500000000 -17.500000000 35.500000000 13.500000000 -17.500000000 -36.500000000 14.500000000 -17.500000000 35.500000000 14.500000000 -17.500000000 -36.500000000 15.500000000 -17.500000000 35.500000000 15.500000000 -17.500000000 -36.500000000 16.500000000 -17.500000000 35.500000000 16.500000000 -17.500000000 -36.500000000 17.500000000 -17.500000000 35.500000000 17.500000000 -17.500000000 -36.500000000 18.500000000 -17.500000000 35.500000000 18.500000000 -17.500000000 -36.500000000 19.500000000 -17.500000000 35.500000000 19.500000000 -17.500000000 -36.500000000 20.500000000 -17.500000000 35.500000000 20.500000000 -17.500000000 -36.500000000 21.500000000 -17.500000000 35.500000000 21.500000000 -17.500000000 -36.500000000 22.500000000 -17.500000000 35.500000000 22.500000000 -17.500000000 -36.500000000 23.500000000 -17.500000000 35.500000000 23.500000000 -17.500000000 -36.500000000 24.500000000 -17.500000000 35.500000000 24.500000000 -17.500000000 -36.499996185 25.499996185 -17.500000000 35.499996185 25.499996185 -17.500000000 -36.499954224 26.499954224 -17.500000000 35.499954224 26.499954224 -17.500000000 -36.499591827 27.499591827 -17.500000000 35.499591827 27.499591827 -17.500000000 -36.497474670 28.497470856 -17.500000000 35.497467041 28.497470856 -17.500000000 -36.488403320 29.488407135 -17.500000000 35.488403320 29.488407135 -17.500000000 -36.458980560 30.458978653 -17.500000000 35.458980560 30.458978653 -17.500000000 -36.384422302 31.384418488 -17.500000000 35.384422302 31.384418488 -17.500000000 -36.233222961 32.233222961 -17.500000000 35.233222961 32.233222961 -17.500000000 -35.981101990 32.981101990 -17.500000000 -35.622871399 33.622871399 -17.500000000 34.622871399 33.622871399 -17.500000000 34.981101990 32.981101990 -17.500000000 -35.167964935 34.167964935 -17.500000000 -34.622871399 34.622871399 -17.500000000 33.622871399 34.622871399 -17.500000000 34.167964935 34.167964935 -17.500000000 -33.981101990 34.981101990 -17.500000000 -33.233222961 35.233222961 -17.500000000 -32.384422302 35.384418488 -17.500000000 -31.458978653 35.458976746 -17.500000000 -30.488407135 35.488403320 -17.500000000 -29.497472763 35.497467041 -17.500000000 -28.499593735 35.499591827 -17.500000000 -27.499954224 35.499954224 -17.500000000 -26.499996185 35.499996185 -17.500000000 -25.500000000 35.500000000 -17.500000000 -24.500000000 35.500000000 -17.500000000 -23.500000000 35.500000000 -17.500000000 -22.500000000 35.500000000 -17.500000000 -21.500000000 35.500000000 -17.500000000 -20.500000000 35.500000000 -17.500000000 -19.500000000 35.500000000 -17.500000000 -18.500000000 35.500000000 -17.500000000 -17.500000000 35.500000000 -17.500000000 -16.500000000 35.500000000 -17.500000000 -15.500000000 35.500000000 -17.500000000 -14.500000000 35.500000000 -17.500000000 -13.500000000 35.500000000 -17.500000000 -12.500000000 35.500000000 -17.500000000 -11.500000000 35.500000000 -17.500000000 -10.500000000 35.500000000 -17.500000000 -9.500000000 35.500000000 -17.500000000 -8.500000000 35.500000000 -17.500000000 -7.500000000 35.500000000 -17.500000000 -6.500000000 35.500000000 -17.500000000 -5.500000000 35.500000000 -17.500000000 -4.500000000 35.500000000 -17.500000000 -3.500000000 35.500000000 -17.500000000 -2.500000000 35.500000000 -17.500000000 -1.500000000 35.500000000 -17.500000000 -0.500000000 35.500000000 -17.500000000 0.500000000 35.500000000 -17.500000000 1.500000000 35.500000000 -17.500000000 2.500000000 35.500000000 -17.500000000 3.500000000 35.500000000 -17.500000000 4.500000000 35.500000000 -17.500000000 5.500000000 35.500000000 -17.500000000 6.500000000 35.500000000 -17.500000000 7.500000000 35.500000000 -17.500000000 8.500000000 35.500000000 -17.500000000 9.500000000 35.500000000 -17.500000000 10.500000000 35.500000000 -17.500000000 11.500000000 35.500000000 -17.500000000 12.500000000 35.500000000 -17.500000000 13.500000000 35.500000000 -17.500000000 14.500000000 35.500000000 -17.500000000 15.500000000 35.500000000 -17.500000000 16.500000000 35.500000000 -17.500000000 17.500000000 35.500000000 -17.500000000 18.500000000 35.500000000 -17.500000000 19.500000000 35.500000000 -17.500000000 20.500000000 35.500000000 -17.500000000 21.500000000 35.500000000 -17.500000000 22.500000000 35.500000000 -17.500000000 23.500000000 35.500000000 -17.500000000 24.500000000 35.500000000 -17.500000000 25.499996185 35.499996185 -17.500000000 26.499954224 35.499954224 -17.500000000 27.499591827 35.499591827 -17.500000000 28.497470856 35.497474670 -17.500000000 29.488407135 35.488403320 -17.500000000 30.458978653 35.458980560 -17.500000000 31.384418488 35.384422302 -17.500000000 32.233222961 35.233222961 -17.500000000 32.981101990 34.981101990 -17.500000000 -33.981101990 -35.981101990 -16.500000000 -33.233226776 -36.233222961 -16.500000000 -32.384422302 -36.384418488 -16.500000000 -31.458978653 -36.458980560 -16.500000000 -30.488407135 -36.488403320 -16.500000000 -29.497472763 -36.497474670 -16.500000000 -28.499593735 -36.499591827 -16.500000000 -27.499954224 -36.499954224 -16.500000000 -26.499996185 -36.499996185 -16.500000000 -25.500000000 -36.500000000 -16.500000000 -24.500000000 -36.500000000 -16.500000000 -23.500000000 -36.500000000 -16.500000000 -22.500000000 -36.500000000 -16.500000000 -21.500000000 -36.500000000 -16.500000000 -20.500000000 -36.500000000 -16.500000000 -19.500000000 -36.500000000 -16.500000000 -18.500000000 -36.500000000 -16.500000000 -17.500000000 -36.500000000 -16.500000000 -16.500000000 -36.500000000 -16.500000000 -15.500000000 -36.500000000 -16.500000000 -14.500000000 -36.500000000 -16.500000000 -13.500000000 -36.500000000 -16.500000000 -12.500000000 -36.500000000 -16.500000000 -11.500000000 -36.500000000 -16.500000000 -10.500000000 -36.500000000 -16.500000000 -9.500000000 -36.500000000 -16.500000000 -8.500000000 -36.500000000 -16.500000000 -7.500000000 -36.500000000 -16.500000000 -6.500000000 -36.500000000 -16.500000000 -5.500000000 -36.500000000 -16.500000000 -4.500000000 -36.500000000 -16.500000000 -3.500000000 -36.500000000 -16.500000000 -2.500000000 -36.500000000 -16.500000000 -1.500000000 -36.500000000 -16.500000000 -0.500000000 -36.500000000 -16.500000000 0.500000000 -36.500000000 -16.500000000 1.500000000 -36.500000000 -16.500000000 2.500000000 -36.500000000 -16.500000000 3.500000000 -36.500000000 -16.500000000 4.500000000 -36.500000000 -16.500000000 5.500000000 -36.500000000 -16.500000000 6.500000000 -36.500000000 -16.500000000 7.500000000 -36.500000000 -16.500000000 8.500000000 -36.500000000 -16.500000000 9.500000000 -36.500000000 -16.500000000 10.500000000 -36.500000000 -16.500000000 11.500000000 -36.500000000 -16.500000000 12.500000000 -36.500000000 -16.500000000 13.500000000 -36.500000000 -16.500000000 14.500000000 -36.500000000 -16.500000000 15.500000000 -36.500000000 -16.500000000 16.500000000 -36.500000000 -16.500000000 17.500000000 -36.500000000 -16.500000000 18.500000000 -36.500000000 -16.500000000 19.500000000 -36.500000000 -16.500000000 20.500000000 -36.500000000 -16.500000000 21.500000000 -36.500000000 -16.500000000 22.500000000 -36.500000000 -16.500000000 23.500000000 -36.500000000 -16.500000000 24.500000000 -36.500000000 -16.500000000 25.499996185 -36.499996185 -16.500000000 26.499954224 -36.499954224 -16.500000000 27.499591827 -36.499591827 -16.500000000 28.497470856 -36.497467041 -16.500000000 29.488407135 -36.488403320 -16.500000000 30.458978653 -36.458980560 -16.500000000 31.384418488 -36.384422302 -16.500000000 32.233222961 -36.233222961 -16.500000000 32.981101990 -35.981101990 -16.500000000 -35.167964935 -35.167964935 -16.500000000 -34.622871399 -35.622871399 -16.500000000 33.622871399 -35.622871399 -16.500000000 34.167964935 -35.167964935 -16.500000000 -35.981101990 -33.981101990 -16.500000000 -35.622871399 -34.622871399 -16.500000000 34.622871399 -34.622871399 -16.500000000 34.981101990 -33.981101990 -16.500000000 -36.233222961 -33.233222961 -16.500000000 35.233222961 -33.233226776 -16.500000000 -36.384418488 -32.384422302 -16.500000000 35.384418488 -32.384422302 -16.500000000 -36.458976746 -31.458978653 -16.500000000 35.458980560 -31.458978653 -16.500000000 -36.488403320 -30.488407135 -16.500000000 35.488403320 -30.488407135 -16.500000000 -36.497467041 -29.497472763 -16.500000000 35.497474670 -29.497472763 -16.500000000 -36.499591827 -28.499593735 -16.500000000 35.499591827 -28.499593735 -16.500000000 -36.499954224 -27.499954224 -16.500000000 35.499954224 -27.499954224 -16.500000000 -36.499996185 -26.499996185 -16.500000000 35.499996185 -26.499996185 -16.500000000 -36.500000000 -25.500000000 -16.500000000 35.500000000 -25.500000000 -16.500000000 -36.500000000 -24.500000000 -16.500000000 35.500000000 -24.500000000 -16.500000000 -36.500000000 -23.500000000 -16.500000000 35.500000000 -23.500000000 -16.500000000 -36.500000000 -22.500000000 -16.500000000 35.500000000 -22.500000000 -16.500000000 -36.500000000 -21.500000000 -16.500000000 35.500000000 -21.500000000 -16.500000000 -36.500000000 -20.500000000 -16.500000000 35.500000000 -20.500000000 -16.500000000 -36.500000000 -19.500000000 -16.500000000 35.500000000 -19.500000000 -16.500000000 -36.500000000 -18.500000000 -16.500000000 35.500000000 -18.500000000 -16.500000000 -36.500000000 -17.500000000 -16.500000000 35.500000000 -17.500000000 -16.500000000 -36.500000000 -16.500000000 -16.500000000 35.500000000 -16.500000000 -16.500000000 -36.500000000 -15.500000000 -16.500000000 35.500000000 -15.500000000 -16.500000000 -36.500000000 -14.500000000 -16.500000000 35.500000000 -14.500000000 -16.500000000 -36.500000000 -13.500000000 -16.500000000 35.500000000 -13.500000000 -16.500000000 -36.500000000 -12.500000000 -16.500000000 35.500000000 -12.500000000 -16.500000000 -36.500000000 -11.500000000 -16.500000000 35.500000000 -11.500000000 -16.500000000 -36.500000000 -10.500000000 -16.500000000 35.500000000 -10.500000000 -16.500000000 -36.500000000 -9.500000000 -16.500000000 35.500000000 -9.500000000 -16.500000000 -36.500000000 -8.500000000 -16.500000000 35.500000000 -8.500000000 -16.500000000 -36.500000000 -7.500000000 -16.500000000 35.500000000 -7.500000000 -16.500000000 -36.500000000 -6.500000000 -16.500000000 35.500000000 -6.500000000 -16.500000000 -36.500000000 -5.500000000 -16.500000000 35.500000000 -5.500000000 -16.500000000 -36.500000000 -4.500000000 -16.500000000 35.500000000 -4.500000000 -16.500000000 -36.500000000 -3.500000000 -16.500000000 35.500000000 -3.500000000 -16.500000000 -36.500000000 -2.500000000 -16.500000000 35.500000000 -2.500000000 -16.500000000 -36.500000000 -1.500000000 -16.500000000 35.500000000 -1.500000000 -16.500000000 -36.500000000 -0.500000000 -16.500000000 35.500000000 -0.500000000 -16.500000000 -36.500000000 0.500000000 -16.500000000 35.500000000 0.500000000 -16.500000000 -36.500000000 1.500000000 -16.500000000 35.500000000 1.500000000 -16.500000000 -36.500000000 2.500000000 -16.500000000 35.500000000 2.500000000 -16.500000000 -36.500000000 3.500000000 -16.500000000 35.500000000 3.500000000 -16.500000000 -36.500000000 4.500000000 -16.500000000 35.500000000 4.500000000 -16.500000000 -36.500000000 5.500000000 -16.500000000 35.500000000 5.500000000 -16.500000000 -36.500000000 6.500000000 -16.500000000 35.500000000 6.500000000 -16.500000000 -36.500000000 7.500000000 -16.500000000 35.500000000 7.500000000 -16.500000000 -36.500000000 8.500000000 -16.500000000 35.500000000 8.500000000 -16.500000000 -36.500000000 9.500000000 -16.500000000 35.500000000 9.500000000 -16.500000000 -36.500000000 10.500000000 -16.500000000 35.500000000 10.500000000 -16.500000000 -36.500000000 11.500000000 -16.500000000 35.500000000 11.500000000 -16.500000000 -36.500000000 12.500000000 -16.500000000 35.500000000 12.500000000 -16.500000000 -36.500000000 13.500000000 -16.500000000 35.500000000 13.500000000 -16.500000000 -36.500000000 14.500000000 -16.500000000 35.500000000 14.500000000 -16.500000000 -36.500000000 15.500000000 -16.500000000 35.500000000 15.500000000 -16.500000000 -36.500000000 16.500000000 -16.500000000 35.500000000 16.500000000 -16.500000000 -36.500000000 17.500000000 -16.500000000 35.500000000 17.500000000 -16.500000000 -36.500000000 18.500000000 -16.500000000 35.500000000 18.500000000 -16.500000000 -36.500000000 19.500000000 -16.500000000 35.500000000 19.500000000 -16.500000000 -36.500000000 20.500000000 -16.500000000 35.500000000 20.500000000 -16.500000000 -36.500000000 21.500000000 -16.500000000 35.500000000 21.500000000 -16.500000000 -36.500000000 22.500000000 -16.500000000 35.500000000 22.500000000 -16.500000000 -36.500000000 23.500000000 -16.500000000 35.500000000 23.500000000 -16.500000000 -36.500000000 24.500000000 -16.500000000 35.500000000 24.500000000 -16.500000000 -36.499996185 25.499996185 -16.500000000 35.499996185 25.499996185 -16.500000000 -36.499954224 26.499954224 -16.500000000 35.499954224 26.499954224 -16.500000000 -36.499591827 27.499591827 -16.500000000 35.499591827 27.499591827 -16.500000000 -36.497474670 28.497470856 -16.500000000 35.497467041 28.497470856 -16.500000000 -36.488403320 29.488407135 -16.500000000 35.488403320 29.488407135 -16.500000000 -36.458980560 30.458978653 -16.500000000 35.458980560 30.458978653 -16.500000000 -36.384422302 31.384418488 -16.500000000 35.384422302 31.384418488 -16.500000000 -36.233222961 32.233222961 -16.500000000 35.233222961 32.233222961 -16.500000000 -35.981101990 32.981101990 -16.500000000 -35.622871399 33.622871399 -16.500000000 34.622871399 33.622871399 -16.500000000 34.981101990 32.981101990 -16.500000000 -35.167964935 34.167964935 -16.500000000 -34.622871399 34.622871399 -16.500000000 33.622871399 34.622871399 -16.500000000 34.167964935 34.167964935 -16.500000000 -33.981101990 34.981101990 -16.500000000 -33.233222961 35.233222961 -16.500000000 -32.384422302 35.384418488 -16.500000000 -31.458978653 35.458976746 -16.500000000 -30.488407135 35.488403320 -16.500000000 -29.497472763 35.497467041 -16.500000000 -28.499593735 35.499591827 -16.500000000 -27.499954224 35.499954224 -16.500000000 -26.499996185 35.499996185 -16.500000000 -25.500000000 35.500000000 -16.500000000 -24.500000000 35.500000000 -16.500000000 -23.500000000 35.500000000 -16.500000000 -22.500000000 35.500000000 -16.500000000 -21.500000000 35.500000000 -16.500000000 -20.500000000 35.500000000 -16.500000000 -19.500000000 35.500000000 -16.500000000 -18.500000000 35.500000000 -16.500000000 -17.500000000 35.500000000 -16.500000000 -16.500000000 35.500000000 -16.500000000 -15.500000000 35.500000000 -16.500000000 -14.500000000 35.500000000 -16.500000000 -13.500000000 35.500000000 -16.500000000 -12.500000000 35.500000000 -16.500000000 -11.500000000 35.500000000 -16.500000000 -10.500000000 35.500000000 -16.500000000 -9.500000000 35.500000000 -16.500000000 -8.500000000 35.500000000 -16.500000000 -7.500000000 35.500000000 -16.500000000 -6.500000000 35.500000000 -16.500000000 -5.500000000 35.500000000 -16.500000000 -4.500000000 35.500000000 -16.500000000 -3.500000000 35.500000000 -16.500000000 -2.500000000 35.500000000 -16.500000000 -1.500000000 35.500000000 -16.500000000 -0.500000000 35.500000000 -16.500000000 0.500000000 35.500000000 -16.500000000 1.500000000 35.500000000 -16.500000000 2.500000000 35.500000000 -16.500000000 3.500000000 35.500000000 -16.500000000 4.500000000 35.500000000 -16.500000000 5.500000000 35.500000000 -16.500000000 6.500000000 35.500000000 -16.500000000 7.500000000 35.500000000 -16.500000000 8.500000000 35.500000000 -16.500000000 9.500000000 35.500000000 -16.500000000 10.500000000 35.500000000 -16.500000000 11.500000000 35.500000000 -16.500000000 12.500000000 35.500000000 -16.500000000 13.500000000 35.500000000 -16.500000000 14.500000000 35.500000000 -16.500000000 15.500000000 35.500000000 -16.500000000 16.500000000 35.500000000 -16.500000000 17.500000000 35.500000000 -16.500000000 18.500000000 35.500000000 -16.500000000 19.500000000 35.500000000 -16.500000000 20.500000000 35.500000000 -16.500000000 21.500000000 35.500000000 -16.500000000 22.500000000 35.500000000 -16.500000000 23.500000000 35.500000000 -16.500000000 24.500000000 35.500000000 -16.500000000 25.499996185 35.499996185 -16.500000000 26.499954224 35.499954224 -16.500000000 27.499591827 35.499591827 -16.500000000 28.497470856 35.497474670 -16.500000000 29.488407135 35.488403320 -16.500000000 30.458978653 35.458980560 -16.500000000 31.384418488 35.384422302 -16.500000000 32.233222961 35.233222961 -16.500000000 32.981101990 34.981101990 -16.500000000 -33.981101990 -35.981101990 -15.500000000 -33.233226776 -36.233222961 -15.500000000 -32.384422302 -36.384418488 -15.500000000 -31.458978653 -36.458980560 -15.500000000 -30.488407135 -36.488403320 -15.500000000 -29.497472763 -36.497474670 -15.500000000 -28.499593735 -36.499591827 -15.500000000 -27.499954224 -36.499954224 -15.500000000 -26.499996185 -36.499996185 -15.500000000 -25.500000000 -36.500000000 -15.500000000 -24.500000000 -36.500000000 -15.500000000 -23.500000000 -36.500000000 -15.500000000 -22.500000000 -36.500000000 -15.500000000 -21.500000000 -36.500000000 -15.500000000 -20.500000000 -36.500000000 -15.500000000 -19.500000000 -36.500000000 -15.500000000 -18.500000000 -36.500000000 -15.500000000 -17.500000000 -36.500000000 -15.500000000 -16.500000000 -36.500000000 -15.500000000 -15.500000000 -36.500000000 -15.500000000 -14.500000000 -36.500000000 -15.500000000 -13.500000000 -36.500000000 -15.500000000 -12.500000000 -36.500000000 -15.500000000 -11.500000000 -36.500000000 -15.500000000 -10.500000000 -36.500000000 -15.500000000 -9.500000000 -36.500000000 -15.500000000 -8.500000000 -36.500000000 -15.500000000 -7.500000000 -36.500000000 -15.500000000 -6.500000000 -36.500000000 -15.500000000 -5.500000000 -36.500000000 -15.500000000 -4.500000000 -36.500000000 -15.500000000 -3.500000000 -36.500000000 -15.500000000 -2.500000000 -36.500000000 -15.500000000 -1.500000000 -36.500000000 -15.500000000 -0.500000000 -36.500000000 -15.500000000 0.500000000 -36.500000000 -15.500000000 1.500000000 -36.500000000 -15.500000000 2.500000000 -36.500000000 -15.500000000 3.500000000 -36.500000000 -15.500000000 4.500000000 -36.500000000 -15.500000000 5.500000000 -36.500000000 -15.500000000 6.500000000 -36.500000000 -15.500000000 7.500000000 -36.500000000 -15.500000000 8.500000000 -36.500000000 -15.500000000 9.500000000 -36.500000000 -15.500000000 10.500000000 -36.500000000 -15.500000000 11.500000000 -36.500000000 -15.500000000 12.500000000 -36.500000000 -15.500000000 13.500000000 -36.500000000 -15.500000000 14.500000000 -36.500000000 -15.500000000 15.500000000 -36.500000000 -15.500000000 16.500000000 -36.500000000 -15.500000000 17.500000000 -36.500000000 -15.500000000 18.500000000 -36.500000000 -15.500000000 19.500000000 -36.500000000 -15.500000000 20.500000000 -36.500000000 -15.500000000 21.500000000 -36.500000000 -15.500000000 22.500000000 -36.500000000 -15.500000000 23.500000000 -36.500000000 -15.500000000 24.500000000 -36.500000000 -15.500000000 25.499996185 -36.499996185 -15.500000000 26.499954224 -36.499954224 -15.500000000 27.499591827 -36.499591827 -15.500000000 28.497470856 -36.497467041 -15.500000000 29.488407135 -36.488403320 -15.500000000 30.458978653 -36.458980560 -15.500000000 31.384418488 -36.384422302 -15.500000000 32.233222961 -36.233222961 -15.500000000 32.981101990 -35.981101990 -15.500000000 -35.167964935 -35.167964935 -15.500000000 -34.622871399 -35.622871399 -15.500000000 33.622871399 -35.622871399 -15.500000000 34.167964935 -35.167964935 -15.500000000 -35.981101990 -33.981101990 -15.500000000 -35.622871399 -34.622871399 -15.500000000 34.622871399 -34.622871399 -15.500000000 34.981101990 -33.981101990 -15.500000000 -36.233222961 -33.233222961 -15.500000000 35.233222961 -33.233226776 -15.500000000 -36.384418488 -32.384422302 -15.500000000 35.384418488 -32.384422302 -15.500000000 -36.458976746 -31.458978653 -15.500000000 35.458980560 -31.458978653 -15.500000000 -36.488403320 -30.488407135 -15.500000000 35.488403320 -30.488407135 -15.500000000 -36.497467041 -29.497472763 -15.500000000 35.497474670 -29.497472763 -15.500000000 -36.499591827 -28.499593735 -15.500000000 35.499591827 -28.499593735 -15.500000000 -36.499954224 -27.499954224 -15.500000000 35.499954224 -27.499954224 -15.500000000 -36.499996185 -26.499996185 -15.500000000 35.499996185 -26.499996185 -15.500000000 -36.500000000 -25.500000000 -15.500000000 35.500000000 -25.500000000 -15.500000000 -36.500000000 -24.500000000 -15.500000000 35.500000000 -24.500000000 -15.500000000 -36.500000000 -23.500000000 -15.500000000 35.500000000 -23.500000000 -15.500000000 -36.500000000 -22.500000000 -15.500000000 35.500000000 -22.500000000 -15.500000000 -36.500000000 -21.500000000 -15.500000000 35.500000000 -21.500000000 -15.500000000 -36.500000000 -20.500000000 -15.500000000 35.500000000 -20.500000000 -15.500000000 -36.500000000 -19.500000000 -15.500000000 35.500000000 -19.500000000 -15.500000000 -36.500000000 -18.500000000 -15.500000000 35.500000000 -18.500000000 -15.500000000 -36.500000000 -17.500000000 -15.500000000 35.500000000 -17.500000000 -15.500000000 -36.500000000 -16.500000000 -15.500000000 35.500000000 -16.500000000 -15.500000000 -36.500000000 -15.500000000 -15.500000000 35.500000000 -15.500000000 -15.500000000 -36.500000000 -14.500000000 -15.500000000 35.500000000 -14.500000000 -15.500000000 -36.500000000 -13.500000000 -15.500000000 35.500000000 -13.500000000 -15.500000000 -36.500000000 -12.500000000 -15.500000000 35.500000000 -12.500000000 -15.500000000 -36.500000000 -11.500000000 -15.500000000 35.500000000 -11.500000000 -15.500000000 -36.500000000 -10.500000000 -15.500000000 35.500000000 -10.500000000 -15.500000000 -36.500000000 -9.500000000 -15.500000000 35.500000000 -9.500000000 -15.500000000 -36.500000000 -8.500000000 -15.500000000 35.500000000 -8.500000000 -15.500000000 -36.500000000 -7.500000000 -15.500000000 35.500000000 -7.500000000 -15.500000000 -36.500000000 -6.500000000 -15.500000000 35.500000000 -6.500000000 -15.500000000 -36.500000000 -5.500000000 -15.500000000 35.500000000 -5.500000000 -15.500000000 -36.500000000 -4.500000000 -15.500000000 35.500000000 -4.500000000 -15.500000000 -36.500000000 -3.500000000 -15.500000000 35.500000000 -3.500000000 -15.500000000 -36.500000000 -2.500000000 -15.500000000 35.500000000 -2.500000000 -15.500000000 -36.500000000 -1.500000000 -15.500000000 35.500000000 -1.500000000 -15.500000000 -36.500000000 -0.500000000 -15.500000000 35.500000000 -0.500000000 -15.500000000 -36.500000000 0.500000000 -15.500000000 35.500000000 0.500000000 -15.500000000 -36.500000000 1.500000000 -15.500000000 35.500000000 1.500000000 -15.500000000 -36.500000000 2.500000000 -15.500000000 35.500000000 2.500000000 -15.500000000 -36.500000000 3.500000000 -15.500000000 35.500000000 3.500000000 -15.500000000 -36.500000000 4.500000000 -15.500000000 35.500000000 4.500000000 -15.500000000 -36.500000000 5.500000000 -15.500000000 35.500000000 5.500000000 -15.500000000 -36.500000000 6.500000000 -15.500000000 35.500000000 6.500000000 -15.500000000 -36.500000000 7.500000000 -15.500000000 35.500000000 7.500000000 -15.500000000 -36.500000000 8.500000000 -15.500000000 35.500000000 8.500000000 -15.500000000 -36.500000000 9.500000000 -15.500000000 35.500000000 9.500000000 -15.500000000 -36.500000000 10.500000000 -15.500000000 35.500000000 10.500000000 -15.500000000 -36.500000000 11.500000000 -15.500000000 35.500000000 11.500000000 -15.500000000 -36.500000000 12.500000000 -15.500000000 35.500000000 12.500000000 -15.500000000 -36.500000000 13.500000000 -15.500000000 35.500000000 13.500000000 -15.500000000 -36.500000000 14.500000000 -15.500000000 35.500000000 14.500000000 -15.500000000 -36.500000000 15.500000000 -15.500000000 35.500000000 15.500000000 -15.500000000 -36.500000000 16.500000000 -15.500000000 35.500000000 16.500000000 -15.500000000 -36.500000000 17.500000000 -15.500000000 35.500000000 17.500000000 -15.500000000 -36.500000000 18.500000000 -15.500000000 35.500000000 18.500000000 -15.500000000 -36.500000000 19.500000000 -15.500000000 35.500000000 19.500000000 -15.500000000 -36.500000000 20.500000000 -15.500000000 35.500000000 20.500000000 -15.500000000 -36.500000000 21.500000000 -15.500000000 35.500000000 21.500000000 -15.500000000 -36.500000000 22.500000000 -15.500000000 35.500000000 22.500000000 -15.500000000 -36.500000000 23.500000000 -15.500000000 35.500000000 23.500000000 -15.500000000 -36.500000000 24.500000000 -15.500000000 35.500000000 24.500000000 -15.500000000 -36.499996185 25.499996185 -15.500000000 35.499996185 25.499996185 -15.500000000 -36.499954224 26.499954224 -15.500000000 35.499954224 26.499954224 -15.500000000 -36.499591827 27.499591827 -15.500000000 35.499591827 27.499591827 -15.500000000 -36.497474670 28.497470856 -15.500000000 35.497467041 28.497470856 -15.500000000 -36.488403320 29.488407135 -15.500000000 35.488403320 29.488407135 -15.500000000 -36.458980560 30.458978653 -15.500000000 35.458980560 30.458978653 -15.500000000 -36.384422302 31.384418488 -15.500000000 35.384422302 31.384418488 -15.500000000 -36.233222961 32.233222961 -15.500000000 35.233222961 32.233222961 -15.500000000 -35.981101990 32.981101990 -15.500000000 -35.622871399 33.622871399 -15.500000000 34.622871399 33.622871399 -15.500000000 34.981101990 32.981101990 -15.500000000 -35.167964935 34.167964935 -15.500000000 -34.622871399 34.622871399 -15.500000000 33.622871399 34.622871399 -15.500000000 34.167964935 34.167964935 -15.500000000 -33.981101990 34.981101990 -15.500000000 -33.233222961 35.233222961 -15.500000000 -32.384422302 35.384418488 -15.500000000 -31.458978653 35.458976746 -15.500000000 -30.488407135 35.488403320 -15.500000000 -29.497472763 35.497467041 -15.500000000 -28.499593735 35.499591827 -15.500000000 -27.499954224 35.499954224 -15.500000000 -26.499996185 35.499996185 -15.500000000 -25.500000000 35.500000000 -15.500000000 -24.500000000 35.500000000 -15.500000000 -23.500000000 35.500000000 -15.500000000 -22.500000000 35.500000000 -15.500000000 -21.500000000 35.500000000 -15.500000000 -20.500000000 35.500000000 -15.500000000 -19.500000000 35.500000000 -15.500000000 -18.500000000 35.500000000 -15.500000000 -17.500000000 35.500000000 -15.500000000 -16.500000000 35.500000000 -15.500000000 -15.500000000 35.500000000 -15.500000000 -14.500000000 35.500000000 -15.500000000 -13.500000000 35.500000000 -15.500000000 -12.500000000 35.500000000 -15.500000000 -11.500000000 35.500000000 -15.500000000 -10.500000000 35.500000000 -15.500000000 -9.500000000 35.500000000 -15.500000000 -8.500000000 35.500000000 -15.500000000 -7.500000000 35.500000000 -15.500000000 -6.500000000 35.500000000 -15.500000000 -5.500000000 35.500000000 -15.500000000 -4.500000000 35.500000000 -15.500000000 -3.500000000 35.500000000 -15.500000000 -2.500000000 35.500000000 -15.500000000 -1.500000000 35.500000000 -15.500000000 -0.500000000 35.500000000 -15.500000000 0.500000000 35.500000000 -15.500000000 1.500000000 35.500000000 -15.500000000 2.500000000 35.500000000 -15.500000000 3.500000000 35.500000000 -15.500000000 4.500000000 35.500000000 -15.500000000 5.500000000 35.500000000 -15.500000000 6.500000000 35.500000000 -15.500000000 7.500000000 35.500000000 -15.500000000 8.500000000 35.500000000 -15.500000000 9.500000000 35.500000000 -15.500000000 10.500000000 35.500000000 -15.500000000 11.500000000 35.500000000 -15.500000000 12.500000000 35.500000000 -15.500000000 13.500000000 35.500000000 -15.500000000 14.500000000 35.500000000 -15.500000000 15.500000000 35.500000000 -15.500000000 16.500000000 35.500000000 -15.500000000 17.500000000 35.500000000 -15.500000000 18.500000000 35.500000000 -15.500000000 19.500000000 35.500000000 -15.500000000 20.500000000 35.500000000 -15.500000000 21.500000000 35.500000000 -15.500000000 22.500000000 35.500000000 -15.500000000 23.500000000 35.500000000 -15.500000000 24.500000000 35.500000000 -15.500000000 25.499996185 35.499996185 -15.500000000 26.499954224 35.499954224 -15.500000000 27.499591827 35.499591827 -15.500000000 28.497470856 35.497474670 -15.500000000 29.488407135 35.488403320 -15.500000000 30.458978653 35.458980560 -15.500000000 31.384418488 35.384422302 -15.500000000 32.233222961 35.233222961 -15.500000000 32.981101990 34.981101990 -15.500000000 -33.981101990 -35.981101990 -14.500000000 -33.233226776 -36.233222961 -14.500000000 -32.384422302 -36.384418488 -14.500000000 -31.458978653 -36.458980560 -14.500000000 -30.488407135 -36.488403320 -14.500000000 -29.497472763 -36.497474670 -14.500000000 -28.499593735 -36.499591827 -14.500000000 -27.499954224 -36.499954224 -14.500000000 -26.499996185 -36.499996185 -14.500000000 -25.500000000 -36.500000000 -14.500000000 -24.500000000 -36.500000000 -14.500000000 -23.500000000 -36.500000000 -14.500000000 -22.500000000 -36.500000000 -14.500000000 -21.500000000 -36.500000000 -14.500000000 -20.500000000 -36.500000000 -14.500000000 -19.500000000 -36.500000000 -14.500000000 -18.500000000 -36.500000000 -14.500000000 -17.500000000 -36.500000000 -14.500000000 -16.500000000 -36.500000000 -14.500000000 -15.500000000 -36.500000000 -14.500000000 -14.500000000 -36.500000000 -14.500000000 -13.500000000 -36.500000000 -14.500000000 -12.500000000 -36.500000000 -14.500000000 -11.500000000 -36.500000000 -14.500000000 -10.500000000 -36.500000000 -14.500000000 -9.500000000 -36.500000000 -14.500000000 -8.500000000 -36.500000000 -14.500000000 -7.500000000 -36.500000000 -14.500000000 -6.500000000 -36.500000000 -14.500000000 -5.500000000 -36.500000000 -14.500000000 -4.500000000 -36.500000000 -14.500000000 -3.500000000 -36.500000000 -14.500000000 -2.500000000 -36.500000000 -14.500000000 -1.500000000 -36.500000000 -14.500000000 -0.500000000 -36.500000000 -14.500000000 0.500000000 -36.500000000 -14.500000000 1.500000000 -36.500000000 -14.500000000 2.500000000 -36.500000000 -14.500000000 3.500000000 -36.500000000 -14.500000000 4.500000000 -36.500000000 -14.500000000 5.500000000 -36.500000000 -14.500000000 6.500000000 -36.500000000 -14.500000000 7.500000000 -36.500000000 -14.500000000 8.500000000 -36.500000000 -14.500000000 9.500000000 -36.500000000 -14.500000000 10.500000000 -36.500000000 -14.500000000 11.500000000 -36.500000000 -14.500000000 12.500000000 -36.500000000 -14.500000000 13.500000000 -36.500000000 -14.500000000 14.500000000 -36.500000000 -14.500000000 15.500000000 -36.500000000 -14.500000000 16.500000000 -36.500000000 -14.500000000 17.500000000 -36.500000000 -14.500000000 18.500000000 -36.500000000 -14.500000000 19.500000000 -36.500000000 -14.500000000 20.500000000 -36.500000000 -14.500000000 21.500000000 -36.500000000 -14.500000000 22.500000000 -36.500000000 -14.500000000 23.500000000 -36.500000000 -14.500000000 24.500000000 -36.500000000 -14.500000000 25.499996185 -36.499996185 -14.500000000 26.499954224 -36.499954224 -14.500000000 27.499591827 -36.499591827 -14.500000000 28.497470856 -36.497467041 -14.500000000 29.488407135 -36.488403320 -14.500000000 30.458978653 -36.458980560 -14.500000000 31.384418488 -36.384422302 -14.500000000 32.233222961 -36.233222961 -14.500000000 32.981101990 -35.981101990 -14.500000000 -35.167964935 -35.167964935 -14.500000000 -34.622871399 -35.622871399 -14.500000000 33.622871399 -35.622871399 -14.500000000 34.167964935 -35.167964935 -14.500000000 -35.981101990 -33.981101990 -14.500000000 -35.622871399 -34.622871399 -14.500000000 34.622871399 -34.622871399 -14.500000000 34.981101990 -33.981101990 -14.500000000 -36.233222961 -33.233222961 -14.500000000 35.233222961 -33.233226776 -14.500000000 -36.384418488 -32.384422302 -14.500000000 35.384418488 -32.384422302 -14.500000000 -36.458976746 -31.458978653 -14.500000000 35.458980560 -31.458978653 -14.500000000 -36.488403320 -30.488407135 -14.500000000 35.488403320 -30.488407135 -14.500000000 -36.497467041 -29.497472763 -14.500000000 35.497474670 -29.497472763 -14.500000000 -36.499591827 -28.499593735 -14.500000000 35.499591827 -28.499593735 -14.500000000 -36.499954224 -27.499954224 -14.500000000 35.499954224 -27.499954224 -14.500000000 -36.499996185 -26.499996185 -14.500000000 35.499996185 -26.499996185 -14.500000000 -36.500000000 -25.500000000 -14.500000000 35.500000000 -25.500000000 -14.500000000 -36.500000000 -24.500000000 -14.500000000 35.500000000 -24.500000000 -14.500000000 -36.500000000 -23.500000000 -14.500000000 35.500000000 -23.500000000 -14.500000000 -36.500000000 -22.500000000 -14.500000000 35.500000000 -22.500000000 -14.500000000 -36.500000000 -21.500000000 -14.500000000 35.500000000 -21.500000000 -14.500000000 -36.500000000 -20.500000000 -14.500000000 35.500000000 -20.500000000 -14.500000000 -36.500000000 -19.500000000 -14.500000000 35.500000000 -19.500000000 -14.500000000 -36.500000000 -18.500000000 -14.500000000 35.500000000 -18.500000000 -14.500000000 -36.500000000 -17.500000000 -14.500000000 35.500000000 -17.500000000 -14.500000000 -36.500000000 -16.500000000 -14.500000000 35.500000000 -16.500000000 -14.500000000 -36.500000000 -15.500000000 -14.500000000 35.500000000 -15.500000000 -14.500000000 -36.500000000 -14.500000000 -14.500000000 35.500000000 -14.500000000 -14.500000000 -36.500000000 -13.500000000 -14.500000000 35.500000000 -13.500000000 -14.500000000 -36.500000000 -12.500000000 -14.500000000 35.500000000 -12.500000000 -14.500000000 -36.500000000 -11.500000000 -14.500000000 35.500000000 -11.500000000 -14.500000000 -36.500000000 -10.500000000 -14.500000000 35.500000000 -10.500000000 -14.500000000 -36.500000000 -9.500000000 -14.500000000 35.500000000 -9.500000000 -14.500000000 -36.500000000 -8.500000000 -14.500000000 35.500000000 -8.500000000 -14.500000000 -36.500000000 -7.500000000 -14.500000000 35.500000000 -7.500000000 -14.500000000 -36.500000000 -6.500000000 -14.500000000 35.500000000 -6.500000000 -14.500000000 -36.500000000 -5.500000000 -14.500000000 35.500000000 -5.500000000 -14.500000000 -36.500000000 -4.500000000 -14.500000000 35.500000000 -4.500000000 -14.500000000 -36.500000000 -3.500000000 -14.500000000 35.500000000 -3.500000000 -14.500000000 -36.500000000 -2.500000000 -14.500000000 35.500000000 -2.500000000 -14.500000000 -36.500000000 -1.500000000 -14.500000000 35.500000000 -1.500000000 -14.500000000 -36.500000000 -0.500000000 -14.500000000 35.500000000 -0.500000000 -14.500000000 -36.500000000 0.500000000 -14.500000000 35.500000000 0.500000000 -14.500000000 -36.500000000 1.500000000 -14.500000000 35.500000000 1.500000000 -14.500000000 -36.500000000 2.500000000 -14.500000000 35.500000000 2.500000000 -14.500000000 -36.500000000 3.500000000 -14.500000000 35.500000000 3.500000000 -14.500000000 -36.500000000 4.500000000 -14.500000000 35.500000000 4.500000000 -14.500000000 -36.500000000 5.500000000 -14.500000000 35.500000000 5.500000000 -14.500000000 -36.500000000 6.500000000 -14.500000000 35.500000000 6.500000000 -14.500000000 -36.500000000 7.500000000 -14.500000000 35.500000000 7.500000000 -14.500000000 -36.500000000 8.500000000 -14.500000000 35.500000000 8.500000000 -14.500000000 -36.500000000 9.500000000 -14.500000000 35.500000000 9.500000000 -14.500000000 -36.500000000 10.500000000 -14.500000000 35.500000000 10.500000000 -14.500000000 -36.500000000 11.500000000 -14.500000000 35.500000000 11.500000000 -14.500000000 -36.500000000 12.500000000 -14.500000000 35.500000000 12.500000000 -14.500000000 -36.500000000 13.500000000 -14.500000000 35.500000000 13.500000000 -14.500000000 -36.500000000 14.500000000 -14.500000000 35.500000000 14.500000000 -14.500000000 -36.500000000 15.500000000 -14.500000000 35.500000000 15.500000000 -14.500000000 -36.500000000 16.500000000 -14.500000000 35.500000000 16.500000000 -14.500000000 -36.500000000 17.500000000 -14.500000000 35.500000000 17.500000000 -14.500000000 -36.500000000 18.500000000 -14.500000000 35.500000000 18.500000000 -14.500000000 -36.500000000 19.500000000 -14.500000000 35.500000000 19.500000000 -14.500000000 -36.500000000 20.500000000 -14.500000000 35.500000000 20.500000000 -14.500000000 -36.500000000 21.500000000 -14.500000000 35.500000000 21.500000000 -14.500000000 -36.500000000 22.500000000 -14.500000000 35.500000000 22.500000000 -14.500000000 -36.500000000 23.500000000 -14.500000000 35.500000000 23.500000000 -14.500000000 -36.500000000 24.500000000 -14.500000000 35.500000000 24.500000000 -14.500000000 -36.499996185 25.499996185 -14.500000000 35.499996185 25.499996185 -14.500000000 -36.499954224 26.499954224 -14.500000000 35.499954224 26.499954224 -14.500000000 -36.499591827 27.499591827 -14.500000000 35.499591827 27.499591827 -14.500000000 -36.497474670 28.497470856 -14.500000000 35.497467041 28.497470856 -14.500000000 -36.488403320 29.488407135 -14.500000000 35.488403320 29.488407135 -14.500000000 -36.458980560 30.458978653 -14.500000000 35.458980560 30.458978653 -14.500000000 -36.384422302 31.384418488 -14.500000000 35.384422302 31.384418488 -14.500000000 -36.233222961 32.233222961 -14.500000000 35.233222961 32.233222961 -14.500000000 -35.981101990 32.981101990 -14.500000000 -35.622871399 33.622871399 -14.500000000 34.622871399 33.622871399 -14.500000000 34.981101990 32.981101990 -14.500000000 -35.167964935 34.167964935 -14.500000000 -34.622871399 34.622871399 -14.500000000 33.622871399 34.622871399 -14.500000000 34.167964935 34.167964935 -14.500000000 -33.981101990 34.981101990 -14.500000000 -33.233222961 35.233222961 -14.500000000 -32.384422302 35.384418488 -14.500000000 -31.458978653 35.458976746 -14.500000000 -30.488407135 35.488403320 -14.500000000 -29.497472763 35.497467041 -14.500000000 -28.499593735 35.499591827 -14.500000000 -27.499954224 35.499954224 -14.500000000 -26.499996185 35.499996185 -14.500000000 -25.500000000 35.500000000 -14.500000000 -24.500000000 35.500000000 -14.500000000 -23.500000000 35.500000000 -14.500000000 -22.500000000 35.500000000 -14.500000000 -21.500000000 35.500000000 -14.500000000 -20.500000000 35.500000000 -14.500000000 -19.500000000 35.500000000 -14.500000000 -18.500000000 35.500000000 -14.500000000 -17.500000000 35.500000000 -14.500000000 -16.500000000 35.500000000 -14.500000000 -15.500000000 35.500000000 -14.500000000 -14.500000000 35.500000000 -14.500000000 -13.500000000 35.500000000 -14.500000000 -12.500000000 35.500000000 -14.500000000 -11.500000000 35.500000000 -14.500000000 -10.500000000 35.500000000 -14.500000000 -9.500000000 35.500000000 -14.500000000 -8.500000000 35.500000000 -14.500000000 -7.500000000 35.500000000 -14.500000000 -6.500000000 35.500000000 -14.500000000 -5.500000000 35.500000000 -14.500000000 -4.500000000 35.500000000 -14.500000000 -3.500000000 35.500000000 -14.500000000 -2.500000000 35.500000000 -14.500000000 -1.500000000 35.500000000 -14.500000000 -0.500000000 35.500000000 -14.500000000 0.500000000 35.500000000 -14.500000000 1.500000000 35.500000000 -14.500000000 2.500000000 35.500000000 -14.500000000 3.500000000 35.500000000 -14.500000000 4.500000000 35.500000000 -14.500000000 5.500000000 35.500000000 -14.500000000 6.500000000 35.500000000 -14.500000000 7.500000000 35.500000000 -14.500000000 8.500000000 35.500000000 -14.500000000 9.500000000 35.500000000 -14.500000000 10.500000000 35.500000000 -14.500000000 11.500000000 35.500000000 -14.500000000 12.500000000 35.500000000 -14.500000000 13.500000000 35.500000000 -14.500000000 14.500000000 35.500000000 -14.500000000 15.500000000 35.500000000 -14.500000000 16.500000000 35.500000000 -14.500000000 17.500000000 35.500000000 -14.500000000 18.500000000 35.500000000 -14.500000000 19.500000000 35.500000000 -14.500000000 20.500000000 35.500000000 -14.500000000 21.500000000 35.500000000 -14.500000000 22.500000000 35.500000000 -14.500000000 23.500000000 35.500000000 -14.500000000 24.500000000 35.500000000 -14.500000000 25.499996185 35.499996185 -14.500000000 26.499954224 35.499954224 -14.500000000 27.499591827 35.499591827 -14.500000000 28.497470856 35.497474670 -14.500000000 29.488407135 35.488403320 -14.500000000 30.458978653 35.458980560 -14.500000000 31.384418488 35.384422302 -14.500000000 32.233222961 35.233222961 -14.500000000 32.981101990 34.981101990 -14.500000000 -33.981101990 -35.981101990 -13.500000000 -33.233226776 -36.233222961 -13.500000000 -32.384422302 -36.384418488 -13.500000000 -31.458978653 -36.458980560 -13.500000000 -30.488407135 -36.488403320 -13.500000000 -29.497472763 -36.497474670 -13.500000000 -28.499593735 -36.499591827 -13.500000000 -27.499954224 -36.499954224 -13.500000000 -26.499996185 -36.499996185 -13.500000000 -25.500000000 -36.500000000 -13.500000000 -24.500000000 -36.500000000 -13.500000000 -23.500000000 -36.500000000 -13.500000000 -22.500000000 -36.500000000 -13.500000000 -21.500000000 -36.500000000 -13.500000000 -20.500000000 -36.500000000 -13.500000000 -19.500000000 -36.500000000 -13.500000000 -18.500000000 -36.500000000 -13.500000000 -17.500000000 -36.500000000 -13.500000000 -16.500000000 -36.500000000 -13.500000000 -15.500000000 -36.500000000 -13.500000000 -14.500000000 -36.500000000 -13.500000000 -13.500000000 -36.500000000 -13.500000000 -12.500000000 -36.500000000 -13.500000000 -11.500000000 -36.500000000 -13.500000000 -10.500000000 -36.500000000 -13.500000000 -9.500000000 -36.500000000 -13.500000000 -8.500000000 -36.500000000 -13.500000000 -7.500000000 -36.500000000 -13.500000000 -6.500000000 -36.500000000 -13.500000000 -5.500000000 -36.500000000 -13.500000000 -4.500000000 -36.500000000 -13.500000000 -3.500000000 -36.500000000 -13.500000000 -2.500000000 -36.500000000 -13.500000000 -1.500000000 -36.500000000 -13.500000000 -0.500000000 -36.500000000 -13.500000000 0.500000000 -36.500000000 -13.500000000 1.500000000 -36.500000000 -13.500000000 2.500000000 -36.500000000 -13.500000000 3.500000000 -36.500000000 -13.500000000 4.500000000 -36.500000000 -13.500000000 5.500000000 -36.500000000 -13.500000000 6.500000000 -36.500000000 -13.500000000 7.500000000 -36.500000000 -13.500000000 8.500000000 -36.500000000 -13.500000000 9.500000000 -36.500000000 -13.500000000 10.500000000 -36.500000000 -13.500000000 11.500000000 -36.500000000 -13.500000000 12.500000000 -36.500000000 -13.500000000 13.500000000 -36.500000000 -13.500000000 14.500000000 -36.500000000 -13.500000000 15.500000000 -36.500000000 -13.500000000 16.500000000 -36.500000000 -13.500000000 17.500000000 -36.500000000 -13.500000000 18.500000000 -36.500000000 -13.500000000 19.500000000 -36.500000000 -13.500000000 20.500000000 -36.500000000 -13.500000000 21.500000000 -36.500000000 -13.500000000 22.500000000 -36.500000000 -13.500000000 23.500000000 -36.500000000 -13.500000000 24.500000000 -36.500000000 -13.500000000 25.499996185 -36.499996185 -13.500000000 26.499954224 -36.499954224 -13.500000000 27.499591827 -36.499591827 -13.500000000 28.497470856 -36.497467041 -13.500000000 29.488407135 -36.488403320 -13.500000000 30.458978653 -36.458980560 -13.500000000 31.384418488 -36.384422302 -13.500000000 32.233222961 -36.233222961 -13.500000000 32.981101990 -35.981101990 -13.500000000 -35.167964935 -35.167964935 -13.500000000 -34.622871399 -35.622871399 -13.500000000 33.622871399 -35.622871399 -13.500000000 34.167964935 -35.167964935 -13.500000000 -35.981101990 -33.981101990 -13.500000000 -35.622871399 -34.622871399 -13.500000000 34.622871399 -34.622871399 -13.500000000 34.981101990 -33.981101990 -13.500000000 -36.233222961 -33.233222961 -13.500000000 35.233222961 -33.233226776 -13.500000000 -36.384418488 -32.384422302 -13.500000000 35.384418488 -32.384422302 -13.500000000 -36.458976746 -31.458978653 -13.500000000 35.458980560 -31.458978653 -13.500000000 -36.488403320 -30.488407135 -13.500000000 35.488403320 -30.488407135 -13.500000000 -36.497467041 -29.497472763 -13.500000000 35.497474670 -29.497472763 -13.500000000 -36.499591827 -28.499593735 -13.500000000 35.499591827 -28.499593735 -13.500000000 -36.499954224 -27.499954224 -13.500000000 35.499954224 -27.499954224 -13.500000000 -36.499996185 -26.499996185 -13.500000000 35.499996185 -26.499996185 -13.500000000 -36.500000000 -25.500000000 -13.500000000 35.500000000 -25.500000000 -13.500000000 -36.500000000 -24.500000000 -13.500000000 35.500000000 -24.500000000 -13.500000000 -36.500000000 -23.500000000 -13.500000000 35.500000000 -23.500000000 -13.500000000 -36.500000000 -22.500000000 -13.500000000 35.500000000 -22.500000000 -13.500000000 -36.500000000 -21.500000000 -13.500000000 35.500000000 -21.500000000 -13.500000000 -36.500000000 -20.500000000 -13.500000000 35.500000000 -20.500000000 -13.500000000 -36.500000000 -19.500000000 -13.500000000 35.500000000 -19.500000000 -13.500000000 -36.500000000 -18.500000000 -13.500000000 35.500000000 -18.500000000 -13.500000000 -36.500000000 -17.500000000 -13.500000000 35.500000000 -17.500000000 -13.500000000 -36.500000000 -16.500000000 -13.500000000 35.500000000 -16.500000000 -13.500000000 -36.500000000 -15.500000000 -13.500000000 35.500000000 -15.500000000 -13.500000000 -36.500000000 -14.500000000 -13.500000000 35.500000000 -14.500000000 -13.500000000 -36.500000000 -13.500000000 -13.500000000 35.500000000 -13.500000000 -13.500000000 -36.500000000 -12.500000000 -13.500000000 35.500000000 -12.500000000 -13.500000000 -36.500000000 -11.500000000 -13.500000000 35.500000000 -11.500000000 -13.500000000 -36.500000000 -10.500000000 -13.500000000 35.500000000 -10.500000000 -13.500000000 -36.500000000 -9.500000000 -13.500000000 35.500000000 -9.500000000 -13.500000000 -36.500000000 -8.500000000 -13.500000000 35.500000000 -8.500000000 -13.500000000 -36.500000000 -7.500000000 -13.500000000 35.500000000 -7.500000000 -13.500000000 -36.500000000 -6.500000000 -13.500000000 35.500000000 -6.500000000 -13.500000000 -36.500000000 -5.500000000 -13.500000000 35.500000000 -5.500000000 -13.500000000 -36.500000000 -4.500000000 -13.500000000 35.500000000 -4.500000000 -13.500000000 -36.500000000 -3.500000000 -13.500000000 35.500000000 -3.500000000 -13.500000000 -36.500000000 -2.500000000 -13.500000000 35.500000000 -2.500000000 -13.500000000 -36.500000000 -1.500000000 -13.500000000 35.500000000 -1.500000000 -13.500000000 -36.500000000 -0.500000000 -13.500000000 35.500000000 -0.500000000 -13.500000000 -36.500000000 0.500000000 -13.500000000 35.500000000 0.500000000 -13.500000000 -36.500000000 1.500000000 -13.500000000 35.500000000 1.500000000 -13.500000000 -36.500000000 2.500000000 -13.500000000 35.500000000 2.500000000 -13.500000000 -36.500000000 3.500000000 -13.500000000 35.500000000 3.500000000 -13.500000000 -36.500000000 4.500000000 -13.500000000 35.500000000 4.500000000 -13.500000000 -36.500000000 5.500000000 -13.500000000 35.500000000 5.500000000 -13.500000000 -36.500000000 6.500000000 -13.500000000 35.500000000 6.500000000 -13.500000000 -36.500000000 7.500000000 -13.500000000 35.500000000 7.500000000 -13.500000000 -36.500000000 8.500000000 -13.500000000 35.500000000 8.500000000 -13.500000000 -36.500000000 9.500000000 -13.500000000 35.500000000 9.500000000 -13.500000000 -36.500000000 10.500000000 -13.500000000 35.500000000 10.500000000 -13.500000000 -36.500000000 11.500000000 -13.500000000 35.500000000 11.500000000 -13.500000000 -36.500000000 12.500000000 -13.500000000 35.500000000 12.500000000 -13.500000000 -36.500000000 13.500000000 -13.500000000 35.500000000 13.500000000 -13.500000000 -36.500000000 14.500000000 -13.500000000 35.500000000 14.500000000 -13.500000000 -36.500000000 15.500000000 -13.500000000 35.500000000 15.500000000 -13.500000000 -36.500000000 16.500000000 -13.500000000 35.500000000 16.500000000 -13.500000000 -36.500000000 17.500000000 -13.500000000 35.500000000 17.500000000 -13.500000000 -36.500000000 18.500000000 -13.500000000 35.500000000 18.500000000 -13.500000000 -36.500000000 19.500000000 -13.500000000 35.500000000 19.500000000 -13.500000000 -36.500000000 20.500000000 -13.500000000 35.500000000 20.500000000 -13.500000000 -36.500000000 21.500000000 -13.500000000 35.500000000 21.500000000 -13.500000000 -36.500000000 22.500000000 -13.500000000 35.500000000 22.500000000 -13.500000000 -36.500000000 23.500000000 -13.500000000 35.500000000 23.500000000 -13.500000000 -36.500000000 24.500000000 -13.500000000 35.500000000 24.500000000 -13.500000000 -36.499996185 25.499996185 -13.500000000 35.499996185 25.499996185 -13.500000000 -36.499954224 26.499954224 -13.500000000 35.499954224 26.499954224 -13.500000000 -36.499591827 27.499591827 -13.500000000 35.499591827 27.499591827 -13.500000000 -36.497474670 28.497470856 -13.500000000 35.497467041 28.497470856 -13.500000000 -36.488403320 29.488407135 -13.500000000 35.488403320 29.488407135 -13.500000000 -36.458980560 30.458978653 -13.500000000 35.458980560 30.458978653 -13.500000000 -36.384422302 31.384418488 -13.500000000 35.384422302 31.384418488 -13.500000000 -36.233222961 32.233222961 -13.500000000 35.233222961 32.233222961 -13.500000000 -35.981101990 32.981101990 -13.500000000 -35.622871399 33.622871399 -13.500000000 34.622871399 33.622871399 -13.500000000 34.981101990 32.981101990 -13.500000000 -35.167964935 34.167964935 -13.500000000 -34.622871399 34.622871399 -13.500000000 33.622871399 34.622871399 -13.500000000 34.167964935 34.167964935 -13.500000000 -33.981101990 34.981101990 -13.500000000 -33.233222961 35.233222961 -13.500000000 -32.384422302 35.384418488 -13.500000000 -31.458978653 35.458976746 -13.500000000 -30.488407135 35.488403320 -13.500000000 -29.497472763 35.497467041 -13.500000000 -28.499593735 35.499591827 -13.500000000 -27.499954224 35.499954224 -13.500000000 -26.499996185 35.499996185 -13.500000000 -25.500000000 35.500000000 -13.500000000 -24.500000000 35.500000000 -13.500000000 -23.500000000 35.500000000 -13.500000000 -22.500000000 35.500000000 -13.500000000 -21.500000000 35.500000000 -13.500000000 -20.500000000 35.500000000 -13.500000000 -19.500000000 35.500000000 -13.500000000 -18.500000000 35.500000000 -13.500000000 -17.500000000 35.500000000 -13.500000000 -16.500000000 35.500000000 -13.500000000 -15.500000000 35.500000000 -13.500000000 -14.500000000 35.500000000 -13.500000000 -13.500000000 35.500000000 -13.500000000 -12.500000000 35.500000000 -13.500000000 -11.500000000 35.500000000 -13.500000000 -10.500000000 35.500000000 -13.500000000 -9.500000000 35.500000000 -13.500000000 -8.500000000 35.500000000 -13.500000000 -7.500000000 35.500000000 -13.500000000 -6.500000000 35.500000000 -13.500000000 -5.500000000 35.500000000 -13.500000000 -4.500000000 35.500000000 -13.500000000 -3.500000000 35.500000000 -13.500000000 -2.500000000 35.500000000 -13.500000000 -1.500000000 35.500000000 -13.500000000 -0.500000000 35.500000000 -13.500000000 0.500000000 35.500000000 -13.500000000 1.500000000 35.500000000 -13.500000000 2.500000000 35.500000000 -13.500000000 3.500000000 35.500000000 -13.500000000 4.500000000 35.500000000 -13.500000000 5.500000000 35.500000000 -13.500000000 6.500000000 35.500000000 -13.500000000 7.500000000 35.500000000 -13.500000000 8.500000000 35.500000000 -13.500000000 9.500000000 35.500000000 -13.500000000 10.500000000 35.500000000 -13.500000000 11.500000000 35.500000000 -13.500000000 12.500000000 35.500000000 -13.500000000 13.500000000 35.500000000 -13.500000000 14.500000000 35.500000000 -13.500000000 15.500000000 35.500000000 -13.500000000 16.500000000 35.500000000 -13.500000000 17.500000000 35.500000000 -13.500000000 18.500000000 35.500000000 -13.500000000 19.500000000 35.500000000 -13.500000000 20.500000000 35.500000000 -13.500000000 21.500000000 35.500000000 -13.500000000 22.500000000 35.500000000 -13.500000000 23.500000000 35.500000000 -13.500000000 24.500000000 35.500000000 -13.500000000 25.499996185 35.499996185 -13.500000000 26.499954224 35.499954224 -13.500000000 27.499591827 35.499591827 -13.500000000 28.497470856 35.497474670 -13.500000000 29.488407135 35.488403320 -13.500000000 30.458978653 35.458980560 -13.500000000 31.384418488 35.384422302 -13.500000000 32.233222961 35.233222961 -13.500000000 32.981101990 34.981101990 -13.500000000 -33.981101990 -35.981101990 -12.500000000 -33.233226776 -36.233222961 -12.500000000 -32.384422302 -36.384418488 -12.500000000 -31.458978653 -36.458980560 -12.500000000 -30.488407135 -36.488403320 -12.500000000 -29.497472763 -36.497474670 -12.500000000 -28.499593735 -36.499591827 -12.500000000 -27.499954224 -36.499954224 -12.500000000 -26.499996185 -36.499996185 -12.500000000 -25.500000000 -36.500000000 -12.500000000 -24.500000000 -36.500000000 -12.500000000 -23.500000000 -36.500000000 -12.500000000 -22.500000000 -36.500000000 -12.500000000 -21.500000000 -36.500000000 -12.500000000 -20.500000000 -36.500000000 -12.500000000 -19.500000000 -36.500000000 -12.500000000 -18.500000000 -36.500000000 -12.500000000 -17.500000000 -36.500000000 -12.500000000 -16.500000000 -36.500000000 -12.500000000 -15.500000000 -36.500000000 -12.500000000 -14.500000000 -36.500000000 -12.500000000 -13.500000000 -36.500000000 -12.500000000 -12.500000000 -36.500000000 -12.500000000 -11.500000000 -36.500000000 -12.500000000 -10.500000000 -36.500000000 -12.500000000 -9.500000000 -36.500000000 -12.500000000 -8.500000000 -36.500000000 -12.500000000 -7.500000000 -36.500000000 -12.500000000 -6.500000000 -36.500000000 -12.500000000 -5.500000000 -36.500000000 -12.500000000 -4.500000000 -36.500000000 -12.500000000 -3.500000000 -36.500000000 -12.500000000 -2.500000000 -36.500000000 -12.500000000 -1.500000000 -36.500000000 -12.500000000 -0.500000000 -36.500000000 -12.500000000 0.500000000 -36.500000000 -12.500000000 1.500000000 -36.500000000 -12.500000000 2.500000000 -36.500000000 -12.500000000 3.500000000 -36.500000000 -12.500000000 4.500000000 -36.500000000 -12.500000000 5.500000000 -36.500000000 -12.500000000 6.500000000 -36.500000000 -12.500000000 7.500000000 -36.500000000 -12.500000000 8.500000000 -36.500000000 -12.500000000 9.500000000 -36.500000000 -12.500000000 10.500000000 -36.500000000 -12.500000000 11.500000000 -36.500000000 -12.500000000 12.500000000 -36.500000000 -12.500000000 13.500000000 -36.500000000 -12.500000000 14.500000000 -36.500000000 -12.500000000 15.500000000 -36.500000000 -12.500000000 16.500000000 -36.500000000 -12.500000000 17.500000000 -36.500000000 -12.500000000 18.500000000 -36.500000000 -12.500000000 19.500000000 -36.500000000 -12.500000000 20.500000000 -36.500000000 -12.500000000 21.500000000 -36.500000000 -12.500000000 22.500000000 -36.500000000 -12.500000000 23.500000000 -36.500000000 -12.500000000 24.500000000 -36.500000000 -12.500000000 25.499996185 -36.499996185 -12.500000000 26.499954224 -36.499954224 -12.500000000 27.499591827 -36.499591827 -12.500000000 28.497470856 -36.497467041 -12.500000000 29.488407135 -36.488403320 -12.500000000 30.458978653 -36.458980560 -12.500000000 31.384418488 -36.384422302 -12.500000000 32.233222961 -36.233222961 -12.500000000 32.981101990 -35.981101990 -12.500000000 -35.167964935 -35.167964935 -12.500000000 -34.622871399 -35.622871399 -12.500000000 33.622871399 -35.622871399 -12.500000000 34.167964935 -35.167964935 -12.500000000 -35.981101990 -33.981101990 -12.500000000 -35.622871399 -34.622871399 -12.500000000 34.622871399 -34.622871399 -12.500000000 34.981101990 -33.981101990 -12.500000000 -36.233222961 -33.233222961 -12.500000000 35.233222961 -33.233226776 -12.500000000 -36.384418488 -32.384422302 -12.500000000 35.384418488 -32.384422302 -12.500000000 -36.458976746 -31.458978653 -12.500000000 35.458980560 -31.458978653 -12.500000000 -36.488403320 -30.488407135 -12.500000000 35.488403320 -30.488407135 -12.500000000 -36.497467041 -29.497472763 -12.500000000 35.497474670 -29.497472763 -12.500000000 -36.499591827 -28.499593735 -12.500000000 35.499591827 -28.499593735 -12.500000000 -36.499954224 -27.499954224 -12.500000000 35.499954224 -27.499954224 -12.500000000 -36.499996185 -26.499996185 -12.500000000 35.499996185 -26.499996185 -12.500000000 -36.500000000 -25.500000000 -12.500000000 35.500000000 -25.500000000 -12.500000000 -36.500000000 -24.500000000 -12.500000000 35.500000000 -24.500000000 -12.500000000 -36.500000000 -23.500000000 -12.500000000 35.500000000 -23.500000000 -12.500000000 -36.500000000 -22.500000000 -12.500000000 35.500000000 -22.500000000 -12.500000000 -36.500000000 -21.500000000 -12.500000000 35.500000000 -21.500000000 -12.500000000 -36.500000000 -20.500000000 -12.500000000 35.500000000 -20.500000000 -12.500000000 -36.500000000 -19.500000000 -12.500000000 35.500000000 -19.500000000 -12.500000000 -36.500000000 -18.500000000 -12.500000000 35.500000000 -18.500000000 -12.500000000 -36.500000000 -17.500000000 -12.500000000 35.500000000 -17.500000000 -12.500000000 -36.500000000 -16.500000000 -12.500000000 35.500000000 -16.500000000 -12.500000000 -36.500000000 -15.500000000 -12.500000000 35.500000000 -15.500000000 -12.500000000 -36.500000000 -14.500000000 -12.500000000 35.500000000 -14.500000000 -12.500000000 -36.500000000 -13.500000000 -12.500000000 35.500000000 -13.500000000 -12.500000000 -36.500000000 -12.500000000 -12.500000000 35.500000000 -12.500000000 -12.500000000 -36.500000000 -11.500000000 -12.500000000 35.500000000 -11.500000000 -12.500000000 -36.500000000 -10.500000000 -12.500000000 35.500000000 -10.500000000 -12.500000000 -36.500000000 -9.500000000 -12.500000000 35.500000000 -9.500000000 -12.500000000 -36.500000000 -8.500000000 -12.500000000 35.500000000 -8.500000000 -12.500000000 -36.500000000 -7.500000000 -12.500000000 35.500000000 -7.500000000 -12.500000000 -36.500000000 -6.500000000 -12.500000000 35.500000000 -6.500000000 -12.500000000 -36.500000000 -5.500000000 -12.500000000 35.500000000 -5.500000000 -12.500000000 -36.500000000 -4.500000000 -12.500000000 35.500000000 -4.500000000 -12.500000000 -36.500000000 -3.500000000 -12.500000000 35.500000000 -3.500000000 -12.500000000 -36.500000000 -2.500000000 -12.500000000 35.500000000 -2.500000000 -12.500000000 -36.500000000 -1.500000000 -12.500000000 35.500000000 -1.500000000 -12.500000000 -36.500000000 -0.500000000 -12.500000000 35.500000000 -0.500000000 -12.500000000 -36.500000000 0.500000000 -12.500000000 35.500000000 0.500000000 -12.500000000 -36.500000000 1.500000000 -12.500000000 35.500000000 1.500000000 -12.500000000 -36.500000000 2.500000000 -12.500000000 35.500000000 2.500000000 -12.500000000 -36.500000000 3.500000000 -12.500000000 35.500000000 3.500000000 -12.500000000 -36.500000000 4.500000000 -12.500000000 35.500000000 4.500000000 -12.500000000 -36.500000000 5.500000000 -12.500000000 35.500000000 5.500000000 -12.500000000 -36.500000000 6.500000000 -12.500000000 35.500000000 6.500000000 -12.500000000 -36.500000000 7.500000000 -12.500000000 35.500000000 7.500000000 -12.500000000 -36.500000000 8.500000000 -12.500000000 35.500000000 8.500000000 -12.500000000 -36.500000000 9.500000000 -12.500000000 35.500000000 9.500000000 -12.500000000 -36.500000000 10.500000000 -12.500000000 35.500000000 10.500000000 -12.500000000 -36.500000000 11.500000000 -12.500000000 35.500000000 11.500000000 -12.500000000 -36.500000000 12.500000000 -12.500000000 35.500000000 12.500000000 -12.500000000 -36.500000000 13.500000000 -12.500000000 35.500000000 13.500000000 -12.500000000 -36.500000000 14.500000000 -12.500000000 35.500000000 14.500000000 -12.500000000 -36.500000000 15.500000000 -12.500000000 35.500000000 15.500000000 -12.500000000 -36.500000000 16.500000000 -12.500000000 35.500000000 16.500000000 -12.500000000 -36.500000000 17.500000000 -12.500000000 35.500000000 17.500000000 -12.500000000 -36.500000000 18.500000000 -12.500000000 35.500000000 18.500000000 -12.500000000 -36.500000000 19.500000000 -12.500000000 35.500000000 19.500000000 -12.500000000 -36.500000000 20.500000000 -12.500000000 35.500000000 20.500000000 -12.500000000 -36.500000000 21.500000000 -12.500000000 35.500000000 21.500000000 -12.500000000 -36.500000000 22.500000000 -12.500000000 35.500000000 22.500000000 -12.500000000 -36.500000000 23.500000000 -12.500000000 35.500000000 23.500000000 -12.500000000 -36.500000000 24.500000000 -12.500000000 35.500000000 24.500000000 -12.500000000 -36.499996185 25.499996185 -12.500000000 35.499996185 25.499996185 -12.500000000 -36.499954224 26.499954224 -12.500000000 35.499954224 26.499954224 -12.500000000 -36.499591827 27.499591827 -12.500000000 35.499591827 27.499591827 -12.500000000 -36.497474670 28.497470856 -12.500000000 35.497467041 28.497470856 -12.500000000 -36.488403320 29.488407135 -12.500000000 35.488403320 29.488407135 -12.500000000 -36.458980560 30.458978653 -12.500000000 35.458980560 30.458978653 -12.500000000 -36.384422302 31.384418488 -12.500000000 35.384422302 31.384418488 -12.500000000 -36.233222961 32.233222961 -12.500000000 35.233222961 32.233222961 -12.500000000 -35.981101990 32.981101990 -12.500000000 -35.622871399 33.622871399 -12.500000000 34.622871399 33.622871399 -12.500000000 34.981101990 32.981101990 -12.500000000 -35.167964935 34.167964935 -12.500000000 -34.622871399 34.622871399 -12.500000000 33.622871399 34.622871399 -12.500000000 34.167964935 34.167964935 -12.500000000 -33.981101990 34.981101990 -12.500000000 -33.233222961 35.233222961 -12.500000000 -32.384422302 35.384418488 -12.500000000 -31.458978653 35.458976746 -12.500000000 -30.488407135 35.488403320 -12.500000000 -29.497472763 35.497467041 -12.500000000 -28.499593735 35.499591827 -12.500000000 -27.499954224 35.499954224 -12.500000000 -26.499996185 35.499996185 -12.500000000 -25.500000000 35.500000000 -12.500000000 -24.500000000 35.500000000 -12.500000000 -23.500000000 35.500000000 -12.500000000 -22.500000000 35.500000000 -12.500000000 -21.500000000 35.500000000 -12.500000000 -20.500000000 35.500000000 -12.500000000 -19.500000000 35.500000000 -12.500000000 -18.500000000 35.500000000 -12.500000000 -17.500000000 35.500000000 -12.500000000 -16.500000000 35.500000000 -12.500000000 -15.500000000 35.500000000 -12.500000000 -14.500000000 35.500000000 -12.500000000 -13.500000000 35.500000000 -12.500000000 -12.500000000 35.500000000 -12.500000000 -11.500000000 35.500000000 -12.500000000 -10.500000000 35.500000000 -12.500000000 -9.500000000 35.500000000 -12.500000000 -8.500000000 35.500000000 -12.500000000 -7.500000000 35.500000000 -12.500000000 -6.500000000 35.500000000 -12.500000000 -5.500000000 35.500000000 -12.500000000 -4.500000000 35.500000000 -12.500000000 -3.500000000 35.500000000 -12.500000000 -2.500000000 35.500000000 -12.500000000 -1.500000000 35.500000000 -12.500000000 -0.500000000 35.500000000 -12.500000000 0.500000000 35.500000000 -12.500000000 1.500000000 35.500000000 -12.500000000 2.500000000 35.500000000 -12.500000000 3.500000000 35.500000000 -12.500000000 4.500000000 35.500000000 -12.500000000 5.500000000 35.500000000 -12.500000000 6.500000000 35.500000000 -12.500000000 7.500000000 35.500000000 -12.500000000 8.500000000 35.500000000 -12.500000000 9.500000000 35.500000000 -12.500000000 10.500000000 35.500000000 -12.500000000 11.500000000 35.500000000 -12.500000000 12.500000000 35.500000000 -12.500000000 13.500000000 35.500000000 -12.500000000 14.500000000 35.500000000 -12.500000000 15.500000000 35.500000000 -12.500000000 16.500000000 35.500000000 -12.500000000 17.500000000 35.500000000 -12.500000000 18.500000000 35.500000000 -12.500000000 19.500000000 35.500000000 -12.500000000 20.500000000 35.500000000 -12.500000000 21.500000000 35.500000000 -12.500000000 22.500000000 35.500000000 -12.500000000 23.500000000 35.500000000 -12.500000000 24.500000000 35.500000000 -12.500000000 25.499996185 35.499996185 -12.500000000 26.499954224 35.499954224 -12.500000000 27.499591827 35.499591827 -12.500000000 28.497470856 35.497474670 -12.500000000 29.488407135 35.488403320 -12.500000000 30.458978653 35.458980560 -12.500000000 31.384418488 35.384422302 -12.500000000 32.233222961 35.233222961 -12.500000000 32.981101990 34.981101990 -12.500000000 -33.981101990 -35.981101990 -11.500000000 -33.233226776 -36.233222961 -11.500000000 -32.384422302 -36.384418488 -11.500000000 -31.458978653 -36.458980560 -11.500000000 -30.488407135 -36.488403320 -11.500000000 -29.497472763 -36.497474670 -11.500000000 -28.499593735 -36.499591827 -11.500000000 -27.499954224 -36.499954224 -11.500000000 -26.499996185 -36.499996185 -11.500000000 -25.500000000 -36.500000000 -11.500000000 -24.500000000 -36.500000000 -11.500000000 -23.500000000 -36.500000000 -11.500000000 -22.500000000 -36.500000000 -11.500000000 -21.500000000 -36.500000000 -11.500000000 -20.500000000 -36.500000000 -11.500000000 -19.500000000 -36.500000000 -11.500000000 -18.500000000 -36.500000000 -11.500000000 -17.500000000 -36.500000000 -11.500000000 -16.500000000 -36.500000000 -11.500000000 -15.500000000 -36.500000000 -11.500000000 -14.500000000 -36.500000000 -11.500000000 -13.500000000 -36.500000000 -11.500000000 -12.500000000 -36.500000000 -11.500000000 -11.500000000 -36.500000000 -11.500000000 -10.500000000 -36.500000000 -11.500000000 -9.500000000 -36.500000000 -11.500000000 -8.500000000 -36.500000000 -11.500000000 -7.500000000 -36.500000000 -11.500000000 -6.500000000 -36.500000000 -11.500000000 -5.500000000 -36.500000000 -11.500000000 -4.500000000 -36.500000000 -11.500000000 -3.500000000 -36.500000000 -11.500000000 -2.500000000 -36.500000000 -11.500000000 -1.500000000 -36.500000000 -11.500000000 -0.500000000 -36.500000000 -11.500000000 0.500000000 -36.500000000 -11.500000000 1.500000000 -36.500000000 -11.500000000 2.500000000 -36.500000000 -11.500000000 3.500000000 -36.500000000 -11.500000000 4.500000000 -36.500000000 -11.500000000 5.500000000 -36.500000000 -11.500000000 6.500000000 -36.500000000 -11.500000000 7.500000000 -36.500000000 -11.500000000 8.500000000 -36.500000000 -11.500000000 9.500000000 -36.500000000 -11.500000000 10.500000000 -36.500000000 -11.500000000 11.500000000 -36.500000000 -11.500000000 12.500000000 -36.500000000 -11.500000000 13.500000000 -36.500000000 -11.500000000 14.500000000 -36.500000000 -11.500000000 15.500000000 -36.500000000 -11.500000000 16.500000000 -36.500000000 -11.500000000 17.500000000 -36.500000000 -11.500000000 18.500000000 -36.500000000 -11.500000000 19.500000000 -36.500000000 -11.500000000 20.500000000 -36.500000000 -11.500000000 21.500000000 -36.500000000 -11.500000000 22.500000000 -36.500000000 -11.500000000 23.500000000 -36.500000000 -11.500000000 24.500000000 -36.500000000 -11.500000000 25.499996185 -36.499996185 -11.500000000 26.499954224 -36.499954224 -11.500000000 27.499591827 -36.499591827 -11.500000000 28.497470856 -36.497467041 -11.500000000 29.488407135 -36.488403320 -11.500000000 30.458978653 -36.458980560 -11.500000000 31.384418488 -36.384422302 -11.500000000 32.233222961 -36.233222961 -11.500000000 32.981101990 -35.981101990 -11.500000000 -35.167964935 -35.167964935 -11.500000000 -34.622871399 -35.622871399 -11.500000000 33.622871399 -35.622871399 -11.500000000 34.167964935 -35.167964935 -11.500000000 -35.981101990 -33.981101990 -11.500000000 -35.622871399 -34.622871399 -11.500000000 34.622871399 -34.622871399 -11.500000000 34.981101990 -33.981101990 -11.500000000 -36.233222961 -33.233222961 -11.500000000 35.233222961 -33.233226776 -11.500000000 -36.384418488 -32.384422302 -11.500000000 35.384418488 -32.384422302 -11.500000000 -36.458976746 -31.458978653 -11.500000000 35.458980560 -31.458978653 -11.500000000 -36.488403320 -30.488407135 -11.500000000 35.488403320 -30.488407135 -11.500000000 -36.497467041 -29.497472763 -11.500000000 35.497474670 -29.497472763 -11.500000000 -36.499591827 -28.499593735 -11.500000000 35.499591827 -28.499593735 -11.500000000 -36.499954224 -27.499954224 -11.500000000 35.499954224 -27.499954224 -11.500000000 -36.499996185 -26.499996185 -11.500000000 35.499996185 -26.499996185 -11.500000000 -36.500000000 -25.500000000 -11.500000000 35.500000000 -25.500000000 -11.500000000 -36.500000000 -24.500000000 -11.500000000 35.500000000 -24.500000000 -11.500000000 -36.500000000 -23.500000000 -11.500000000 35.500000000 -23.500000000 -11.500000000 -36.500000000 -22.500000000 -11.500000000 35.500000000 -22.500000000 -11.500000000 -36.500000000 -21.500000000 -11.500000000 35.500000000 -21.500000000 -11.500000000 -36.500000000 -20.500000000 -11.500000000 35.500000000 -20.500000000 -11.500000000 -36.500000000 -19.500000000 -11.500000000 35.500000000 -19.500000000 -11.500000000 -36.500000000 -18.500000000 -11.500000000 35.500000000 -18.500000000 -11.500000000 -36.500000000 -17.500000000 -11.500000000 35.500000000 -17.500000000 -11.500000000 -36.500000000 -16.500000000 -11.500000000 35.500000000 -16.500000000 -11.500000000 -36.500000000 -15.500000000 -11.500000000 35.500000000 -15.500000000 -11.500000000 -36.500000000 -14.500000000 -11.500000000 35.500000000 -14.500000000 -11.500000000 -36.500000000 -13.500000000 -11.500000000 35.500000000 -13.500000000 -11.500000000 -36.500000000 -12.500000000 -11.500000000 35.500000000 -12.500000000 -11.500000000 -36.500000000 -11.500000000 -11.500000000 35.500000000 -11.500000000 -11.500000000 -36.500000000 -10.500000000 -11.500000000 35.500000000 -10.500000000 -11.500000000 -36.500000000 -9.500000000 -11.500000000 35.500000000 -9.500000000 -11.500000000 -36.500000000 -8.500000000 -11.500000000 35.500000000 -8.500000000 -11.500000000 -36.500000000 -7.500000000 -11.500000000 35.500000000 -7.500000000 -11.500000000 -36.500000000 -6.500000000 -11.500000000 35.500000000 -6.500000000 -11.500000000 -36.500000000 -5.500000000 -11.500000000 35.500000000 -5.500000000 -11.500000000 -36.500000000 -4.500000000 -11.500000000 35.500000000 -4.500000000 -11.500000000 -36.500000000 -3.500000000 -11.500000000 35.500000000 -3.500000000 -11.500000000 -36.500000000 -2.500000000 -11.500000000 35.500000000 -2.500000000 -11.500000000 -36.500000000 -1.500000000 -11.500000000 35.500000000 -1.500000000 -11.500000000 -36.500000000 -0.500000000 -11.500000000 35.500000000 -0.500000000 -11.500000000 -36.500000000 0.500000000 -11.500000000 35.500000000 0.500000000 -11.500000000 -36.500000000 1.500000000 -11.500000000 35.500000000 1.500000000 -11.500000000 -36.500000000 2.500000000 -11.500000000 35.500000000 2.500000000 -11.500000000 -36.500000000 3.500000000 -11.500000000 35.500000000 3.500000000 -11.500000000 -36.500000000 4.500000000 -11.500000000 35.500000000 4.500000000 -11.500000000 -36.500000000 5.500000000 -11.500000000 35.500000000 5.500000000 -11.500000000 -36.500000000 6.500000000 -11.500000000 35.500000000 6.500000000 -11.500000000 -36.500000000 7.500000000 -11.500000000 35.500000000 7.500000000 -11.500000000 -36.500000000 8.500000000 -11.500000000 35.500000000 8.500000000 -11.500000000 -36.500000000 9.500000000 -11.500000000 35.500000000 9.500000000 -11.500000000 -36.500000000 10.500000000 -11.500000000 35.500000000 10.500000000 -11.500000000 -36.500000000 11.500000000 -11.500000000 35.500000000 11.500000000 -11.500000000 -36.500000000 12.500000000 -11.500000000 35.500000000 12.500000000 -11.500000000 -36.500000000 13.500000000 -11.500000000 35.500000000 13.500000000 -11.500000000 -36.500000000 14.500000000 -11.500000000 35.500000000 14.500000000 -11.500000000 -36.500000000 15.500000000 -11.500000000 35.500000000 15.500000000 -11.500000000 -36.500000000 16.500000000 -11.500000000 35.500000000 16.500000000 -11.500000000 -36.500000000 17.500000000 -11.500000000 35.500000000 17.500000000 -11.500000000 -36.500000000 18.500000000 -11.500000000 35.500000000 18.500000000 -11.500000000 -36.500000000 19.500000000 -11.500000000 35.500000000 19.500000000 -11.500000000 -36.500000000 20.500000000 -11.500000000 35.500000000 20.500000000 -11.500000000 -36.500000000 21.500000000 -11.500000000 35.500000000 21.500000000 -11.500000000 -36.500000000 22.500000000 -11.500000000 35.500000000 22.500000000 -11.500000000 -36.500000000 23.500000000 -11.500000000 35.500000000 23.500000000 -11.500000000 -36.500000000 24.500000000 -11.500000000 35.500000000 24.500000000 -11.500000000 -36.499996185 25.499996185 -11.500000000 35.499996185 25.499996185 -11.500000000 -36.499954224 26.499954224 -11.500000000 35.499954224 26.499954224 -11.500000000 -36.499591827 27.499591827 -11.500000000 35.499591827 27.499591827 -11.500000000 -36.497474670 28.497470856 -11.500000000 35.497467041 28.497470856 -11.500000000 -36.488403320 29.488407135 -11.500000000 35.488403320 29.488407135 -11.500000000 -36.458980560 30.458978653 -11.500000000 35.458980560 30.458978653 -11.500000000 -36.384422302 31.384418488 -11.500000000 35.384422302 31.384418488 -11.500000000 -36.233222961 32.233222961 -11.500000000 35.233222961 32.233222961 -11.500000000 -35.981101990 32.981101990 -11.500000000 -35.622871399 33.622871399 -11.500000000 34.622871399 33.622871399 -11.500000000 34.981101990 32.981101990 -11.500000000 -35.167964935 34.167964935 -11.500000000 -34.622871399 34.622871399 -11.500000000 33.622871399 34.622871399 -11.500000000 34.167964935 34.167964935 -11.500000000 -33.981101990 34.981101990 -11.500000000 -33.233222961 35.233222961 -11.500000000 -32.384422302 35.384418488 -11.500000000 -31.458978653 35.458976746 -11.500000000 -30.488407135 35.488403320 -11.500000000 -29.497472763 35.497467041 -11.500000000 -28.499593735 35.499591827 -11.500000000 -27.499954224 35.499954224 -11.500000000 -26.499996185 35.499996185 -11.500000000 -25.500000000 35.500000000 -11.500000000 -24.500000000 35.500000000 -11.500000000 -23.500000000 35.500000000 -11.500000000 -22.500000000 35.500000000 -11.500000000 -21.500000000 35.500000000 -11.500000000 -20.500000000 35.500000000 -11.500000000 -19.500000000 35.500000000 -11.500000000 -18.500000000 35.500000000 -11.500000000 -17.500000000 35.500000000 -11.500000000 -16.500000000 35.500000000 -11.500000000 -15.500000000 35.500000000 -11.500000000 -14.500000000 35.500000000 -11.500000000 -13.500000000 35.500000000 -11.500000000 -12.500000000 35.500000000 -11.500000000 -11.500000000 35.500000000 -11.500000000 -10.500000000 35.500000000 -11.500000000 -9.500000000 35.500000000 -11.500000000 -8.500000000 35.500000000 -11.500000000 -7.500000000 35.500000000 -11.500000000 -6.500000000 35.500000000 -11.500000000 -5.500000000 35.500000000 -11.500000000 -4.500000000 35.500000000 -11.500000000 -3.500000000 35.500000000 -11.500000000 -2.500000000 35.500000000 -11.500000000 -1.500000000 35.500000000 -11.500000000 -0.500000000 35.500000000 -11.500000000 0.500000000 35.500000000 -11.500000000 1.500000000 35.500000000 -11.500000000 2.500000000 35.500000000 -11.500000000 3.500000000 35.500000000 -11.500000000 4.500000000 35.500000000 -11.500000000 5.500000000 35.500000000 -11.500000000 6.500000000 35.500000000 -11.500000000 7.500000000 35.500000000 -11.500000000 8.500000000 35.500000000 -11.500000000 9.500000000 35.500000000 -11.500000000 10.500000000 35.500000000 -11.500000000 11.500000000 35.500000000 -11.500000000 12.500000000 35.500000000 -11.500000000 13.500000000 35.500000000 -11.500000000 14.500000000 35.500000000 -11.500000000 15.500000000 35.500000000 -11.500000000 16.500000000 35.500000000 -11.500000000 17.500000000 35.500000000 -11.500000000 18.500000000 35.500000000 -11.500000000 19.500000000 35.500000000 -11.500000000 20.500000000 35.500000000 -11.500000000 21.500000000 35.500000000 -11.500000000 22.500000000 35.500000000 -11.500000000 23.500000000 35.500000000 -11.500000000 24.500000000 35.500000000 -11.500000000 25.499996185 35.499996185 -11.500000000 26.499954224 35.499954224 -11.500000000 27.499591827 35.499591827 -11.500000000 28.497470856 35.497474670 -11.500000000 29.488407135 35.488403320 -11.500000000 30.458978653 35.458980560 -11.500000000 31.384418488 35.384422302 -11.500000000 32.233222961 35.233222961 -11.500000000 32.981101990 34.981101990 -11.500000000 -33.981101990 -35.981101990 -10.500000000 -33.233226776 -36.233222961 -10.500000000 -32.384422302 -36.384418488 -10.500000000 -31.458978653 -36.458980560 -10.500000000 -30.488407135 -36.488403320 -10.500000000 -29.497472763 -36.497474670 -10.500000000 -28.499593735 -36.499591827 -10.500000000 -27.499954224 -36.499954224 -10.500000000 -26.499996185 -36.499996185 -10.500000000 -25.500000000 -36.500000000 -10.500000000 -24.500000000 -36.500000000 -10.500000000 -23.500000000 -36.500000000 -10.500000000 -22.500000000 -36.500000000 -10.500000000 -21.500000000 -36.500000000 -10.500000000 -20.500000000 -36.500000000 -10.500000000 -19.500000000 -36.500000000 -10.500000000 -18.500000000 -36.500000000 -10.500000000 -17.500000000 -36.500000000 -10.500000000 -16.500000000 -36.500000000 -10.500000000 -15.500000000 -36.500000000 -10.500000000 -14.500000000 -36.500000000 -10.500000000 -13.500000000 -36.500000000 -10.500000000 -12.500000000 -36.500000000 -10.500000000 -11.500000000 -36.500000000 -10.500000000 -10.500000000 -36.500000000 -10.500000000 -9.500000000 -36.500000000 -10.500000000 -8.500000000 -36.500000000 -10.500000000 -7.500000000 -36.500000000 -10.500000000 -6.500000000 -36.500000000 -10.500000000 -5.500000000 -36.500000000 -10.500000000 -4.500000000 -36.500000000 -10.500000000 -3.500000000 -36.500000000 -10.500000000 -2.500000000 -36.500000000 -10.500000000 -1.500000000 -36.500000000 -10.500000000 -0.500000000 -36.500000000 -10.500000000 0.500000000 -36.500000000 -10.500000000 1.500000000 -36.500000000 -10.500000000 2.500000000 -36.500000000 -10.500000000 3.500000000 -36.500000000 -10.500000000 4.500000000 -36.500000000 -10.500000000 5.500000000 -36.500000000 -10.500000000 6.500000000 -36.500000000 -10.500000000 7.500000000 -36.500000000 -10.500000000 8.500000000 -36.500000000 -10.500000000 9.500000000 -36.500000000 -10.500000000 10.500000000 -36.500000000 -10.500000000 11.500000000 -36.500000000 -10.500000000 12.500000000 -36.500000000 -10.500000000 13.500000000 -36.500000000 -10.500000000 14.500000000 -36.500000000 -10.500000000 15.500000000 -36.500000000 -10.500000000 16.500000000 -36.500000000 -10.500000000 17.500000000 -36.500000000 -10.500000000 18.500000000 -36.500000000 -10.500000000 19.500000000 -36.500000000 -10.500000000 20.500000000 -36.500000000 -10.500000000 21.500000000 -36.500000000 -10.500000000 22.500000000 -36.500000000 -10.500000000 23.500000000 -36.500000000 -10.500000000 24.500000000 -36.500000000 -10.500000000 25.499996185 -36.499996185 -10.500000000 26.499954224 -36.499954224 -10.500000000 27.499591827 -36.499591827 -10.500000000 28.497470856 -36.497467041 -10.500000000 29.488407135 -36.488403320 -10.500000000 30.458978653 -36.458980560 -10.500000000 31.384418488 -36.384422302 -10.500000000 32.233222961 -36.233222961 -10.500000000 32.981101990 -35.981101990 -10.500000000 -35.167964935 -35.167964935 -10.500000000 -34.622871399 -35.622871399 -10.500000000 33.622871399 -35.622871399 -10.500000000 34.167964935 -35.167964935 -10.500000000 -35.981101990 -33.981101990 -10.500000000 -35.622871399 -34.622871399 -10.500000000 34.622871399 -34.622871399 -10.500000000 34.981101990 -33.981101990 -10.500000000 -36.233222961 -33.233222961 -10.500000000 35.233222961 -33.233226776 -10.500000000 -36.384418488 -32.384422302 -10.500000000 35.384418488 -32.384422302 -10.500000000 -36.458976746 -31.458978653 -10.500000000 35.458980560 -31.458978653 -10.500000000 -36.488403320 -30.488407135 -10.500000000 35.488403320 -30.488407135 -10.500000000 -36.497467041 -29.497472763 -10.500000000 35.497474670 -29.497472763 -10.500000000 -36.499591827 -28.499593735 -10.500000000 35.499591827 -28.499593735 -10.500000000 -36.499954224 -27.499954224 -10.500000000 35.499954224 -27.499954224 -10.500000000 -36.499996185 -26.499996185 -10.500000000 35.499996185 -26.499996185 -10.500000000 -36.500000000 -25.500000000 -10.500000000 35.500000000 -25.500000000 -10.500000000 -36.500000000 -24.500000000 -10.500000000 35.500000000 -24.500000000 -10.500000000 -36.500000000 -23.500000000 -10.500000000 35.500000000 -23.500000000 -10.500000000 -36.500000000 -22.500000000 -10.500000000 35.500000000 -22.500000000 -10.500000000 -36.500000000 -21.500000000 -10.500000000 35.500000000 -21.500000000 -10.500000000 -36.500000000 -20.500000000 -10.500000000 35.500000000 -20.500000000 -10.500000000 -36.500000000 -19.500000000 -10.500000000 35.500000000 -19.500000000 -10.500000000 -36.500000000 -18.500000000 -10.500000000 35.500000000 -18.500000000 -10.500000000 -36.500000000 -17.500000000 -10.500000000 35.500000000 -17.500000000 -10.500000000 -36.500000000 -16.500000000 -10.500000000 35.500000000 -16.500000000 -10.500000000 -36.500000000 -15.500000000 -10.500000000 35.500000000 -15.500000000 -10.500000000 -36.500000000 -14.500000000 -10.500000000 35.500000000 -14.500000000 -10.500000000 -36.500000000 -13.500000000 -10.500000000 35.500000000 -13.500000000 -10.500000000 -36.500000000 -12.500000000 -10.500000000 35.500000000 -12.500000000 -10.500000000 -36.500000000 -11.500000000 -10.500000000 35.500000000 -11.500000000 -10.500000000 -36.500000000 -10.500000000 -10.500000000 35.500000000 -10.500000000 -10.500000000 -36.500000000 -9.500000000 -10.500000000 35.500000000 -9.500000000 -10.500000000 -36.500000000 -8.500000000 -10.500000000 35.500000000 -8.500000000 -10.500000000 -36.500000000 -7.500000000 -10.500000000 35.500000000 -7.500000000 -10.500000000 -36.500000000 -6.500000000 -10.500000000 35.500000000 -6.500000000 -10.500000000 -36.500000000 -5.500000000 -10.500000000 35.500000000 -5.500000000 -10.500000000 -36.500000000 -4.500000000 -10.500000000 35.500000000 -4.500000000 -10.500000000 -36.500000000 -3.500000000 -10.500000000 35.500000000 -3.500000000 -10.500000000 -36.500000000 -2.500000000 -10.500000000 35.500000000 -2.500000000 -10.500000000 -36.500000000 -1.500000000 -10.500000000 35.500000000 -1.500000000 -10.500000000 -36.500000000 -0.500000000 -10.500000000 35.500000000 -0.500000000 -10.500000000 -36.500000000 0.500000000 -10.500000000 35.500000000 0.500000000 -10.500000000 -36.500000000 1.500000000 -10.500000000 35.500000000 1.500000000 -10.500000000 -36.500000000 2.500000000 -10.500000000 35.500000000 2.500000000 -10.500000000 -36.500000000 3.500000000 -10.500000000 35.500000000 3.500000000 -10.500000000 -36.500000000 4.500000000 -10.500000000 35.500000000 4.500000000 -10.500000000 -36.500000000 5.500000000 -10.500000000 35.500000000 5.500000000 -10.500000000 -36.500000000 6.500000000 -10.500000000 35.500000000 6.500000000 -10.500000000 -36.500000000 7.500000000 -10.500000000 35.500000000 7.500000000 -10.500000000 -36.500000000 8.500000000 -10.500000000 35.500000000 8.500000000 -10.500000000 -36.500000000 9.500000000 -10.500000000 35.500000000 9.500000000 -10.500000000 -36.500000000 10.500000000 -10.500000000 35.500000000 10.500000000 -10.500000000 -36.500000000 11.500000000 -10.500000000 35.500000000 11.500000000 -10.500000000 -36.500000000 12.500000000 -10.500000000 35.500000000 12.500000000 -10.500000000 -36.500000000 13.500000000 -10.500000000 35.500000000 13.500000000 -10.500000000 -36.500000000 14.500000000 -10.500000000 35.500000000 14.500000000 -10.500000000 -36.500000000 15.500000000 -10.500000000 35.500000000 15.500000000 -10.500000000 -36.500000000 16.500000000 -10.500000000 35.500000000 16.500000000 -10.500000000 -36.500000000 17.500000000 -10.500000000 35.500000000 17.500000000 -10.500000000 -36.500000000 18.500000000 -10.500000000 35.500000000 18.500000000 -10.500000000 -36.500000000 19.500000000 -10.500000000 35.500000000 19.500000000 -10.500000000 -36.500000000 20.500000000 -10.500000000 35.500000000 20.500000000 -10.500000000 -36.500000000 21.500000000 -10.500000000 35.500000000 21.500000000 -10.500000000 -36.500000000 22.500000000 -10.500000000 35.500000000 22.500000000 -10.500000000 -36.500000000 23.500000000 -10.500000000 35.500000000 23.500000000 -10.500000000 -36.500000000 24.500000000 -10.500000000 35.500000000 24.500000000 -10.500000000 -36.499996185 25.499996185 -10.500000000 35.499996185 25.499996185 -10.500000000 -36.499954224 26.499954224 -10.500000000 35.499954224 26.499954224 -10.500000000 -36.499591827 27.499591827 -10.500000000 35.499591827 27.499591827 -10.500000000 -36.497474670 28.497470856 -10.500000000 35.497467041 28.497470856 -10.500000000 -36.488403320 29.488407135 -10.500000000 35.488403320 29.488407135 -10.500000000 -36.458980560 30.458978653 -10.500000000 35.458980560 30.458978653 -10.500000000 -36.384422302 31.384418488 -10.500000000 35.384422302 31.384418488 -10.500000000 -36.233222961 32.233222961 -10.500000000 35.233222961 32.233222961 -10.500000000 -35.981101990 32.981101990 -10.500000000 -35.622871399 33.622871399 -10.500000000 34.622871399 33.622871399 -10.500000000 34.981101990 32.981101990 -10.500000000 -35.167964935 34.167964935 -10.500000000 -34.622871399 34.622871399 -10.500000000 33.622871399 34.622871399 -10.500000000 34.167964935 34.167964935 -10.500000000 -33.981101990 34.981101990 -10.500000000 -33.233222961 35.233222961 -10.500000000 -32.384422302 35.384418488 -10.500000000 -31.458978653 35.458976746 -10.500000000 -30.488407135 35.488403320 -10.500000000 -29.497472763 35.497467041 -10.500000000 -28.499593735 35.499591827 -10.500000000 -27.499954224 35.499954224 -10.500000000 -26.499996185 35.499996185 -10.500000000 -25.500000000 35.500000000 -10.500000000 -24.500000000 35.500000000 -10.500000000 -23.500000000 35.500000000 -10.500000000 -22.500000000 35.500000000 -10.500000000 -21.500000000 35.500000000 -10.500000000 -20.500000000 35.500000000 -10.500000000 -19.500000000 35.500000000 -10.500000000 -18.500000000 35.500000000 -10.500000000 -17.500000000 35.500000000 -10.500000000 -16.500000000 35.500000000 -10.500000000 -15.500000000 35.500000000 -10.500000000 -14.500000000 35.500000000 -10.500000000 -13.500000000 35.500000000 -10.500000000 -12.500000000 35.500000000 -10.500000000 -11.500000000 35.500000000 -10.500000000 -10.500000000 35.500000000 -10.500000000 -9.500000000 35.500000000 -10.500000000 -8.500000000 35.500000000 -10.500000000 -7.500000000 35.500000000 -10.500000000 -6.500000000 35.500000000 -10.500000000 -5.500000000 35.500000000 -10.500000000 -4.500000000 35.500000000 -10.500000000 -3.500000000 35.500000000 -10.500000000 -2.500000000 35.500000000 -10.500000000 -1.500000000 35.500000000 -10.500000000 -0.500000000 35.500000000 -10.500000000 0.500000000 35.500000000 -10.500000000 1.500000000 35.500000000 -10.500000000 2.500000000 35.500000000 -10.500000000 3.500000000 35.500000000 -10.500000000 4.500000000 35.500000000 -10.500000000 5.500000000 35.500000000 -10.500000000 6.500000000 35.500000000 -10.500000000 7.500000000 35.500000000 -10.500000000 8.500000000 35.500000000 -10.500000000 9.500000000 35.500000000 -10.500000000 10.500000000 35.500000000 -10.500000000 11.500000000 35.500000000 -10.500000000 12.500000000 35.500000000 -10.500000000 13.500000000 35.500000000 -10.500000000 14.500000000 35.500000000 -10.500000000 15.500000000 35.500000000 -10.500000000 16.500000000 35.500000000 -10.500000000 17.500000000 35.500000000 -10.500000000 18.500000000 35.500000000 -10.500000000 19.500000000 35.500000000 -10.500000000 20.500000000 35.500000000 -10.500000000 21.500000000 35.500000000 -10.500000000 22.500000000 35.500000000 -10.500000000 23.500000000 35.500000000 -10.500000000 24.500000000 35.500000000 -10.500000000 25.499996185 35.499996185 -10.500000000 26.499954224 35.499954224 -10.500000000 27.499591827 35.499591827 -10.500000000 28.497470856 35.497474670 -10.500000000 29.488407135 35.488403320 -10.500000000 30.458978653 35.458980560 -10.500000000 31.384418488 35.384422302 -10.500000000 32.233222961 35.233222961 -10.500000000 32.981101990 34.981101990 -10.500000000 -33.981101990 -35.981101990 -9.500000000 -33.233226776 -36.233222961 -9.500000000 -32.384422302 -36.384418488 -9.500000000 -31.458978653 -36.458980560 -9.500000000 -30.488407135 -36.488403320 -9.500000000 -29.497472763 -36.497474670 -9.500000000 -28.499593735 -36.499591827 -9.500000000 -27.499954224 -36.499954224 -9.500000000 -26.499996185 -36.499996185 -9.500000000 -25.500000000 -36.500000000 -9.500000000 -24.500000000 -36.500000000 -9.500000000 -23.500000000 -36.500000000 -9.500000000 -22.500000000 -36.500000000 -9.500000000 -21.500000000 -36.500000000 -9.500000000 -20.500000000 -36.500000000 -9.500000000 -19.500000000 -36.500000000 -9.500000000 -18.500000000 -36.500000000 -9.500000000 -17.500000000 -36.500000000 -9.500000000 -16.500000000 -36.500000000 -9.500000000 -15.500000000 -36.500000000 -9.500000000 -14.500000000 -36.500000000 -9.500000000 -13.500000000 -36.500000000 -9.500000000 -12.500000000 -36.500000000 -9.500000000 -11.500000000 -36.500000000 -9.500000000 -10.500000000 -36.500000000 -9.500000000 -9.500000000 -36.500000000 -9.500000000 -8.500000000 -36.500000000 -9.500000000 -7.500000000 -36.500000000 -9.500000000 -6.500000000 -36.500000000 -9.500000000 -5.500000000 -36.500000000 -9.500000000 -4.500000000 -36.500000000 -9.500000000 -3.500000000 -36.500000000 -9.500000000 -2.500000000 -36.500000000 -9.500000000 -1.500000000 -36.500000000 -9.500000000 -0.500000000 -36.500000000 -9.500000000 0.500000000 -36.500000000 -9.500000000 1.500000000 -36.500000000 -9.500000000 2.500000000 -36.500000000 -9.500000000 3.500000000 -36.500000000 -9.500000000 4.500000000 -36.500000000 -9.500000000 5.500000000 -36.500000000 -9.500000000 6.500000000 -36.500000000 -9.500000000 7.500000000 -36.500000000 -9.500000000 8.500000000 -36.500000000 -9.500000000 9.500000000 -36.500000000 -9.500000000 10.500000000 -36.500000000 -9.500000000 11.500000000 -36.500000000 -9.500000000 12.500000000 -36.500000000 -9.500000000 13.500000000 -36.500000000 -9.500000000 14.500000000 -36.500000000 -9.500000000 15.500000000 -36.500000000 -9.500000000 16.500000000 -36.500000000 -9.500000000 17.500000000 -36.500000000 -9.500000000 18.500000000 -36.500000000 -9.500000000 19.500000000 -36.500000000 -9.500000000 20.500000000 -36.500000000 -9.500000000 21.500000000 -36.500000000 -9.500000000 22.500000000 -36.500000000 -9.500000000 23.500000000 -36.500000000 -9.500000000 24.500000000 -36.500000000 -9.500000000 25.499996185 -36.499996185 -9.500000000 26.499954224 -36.499954224 -9.500000000 27.499591827 -36.499591827 -9.500000000 28.497470856 -36.497467041 -9.500000000 29.488407135 -36.488403320 -9.500000000 30.458978653 -36.458980560 -9.500000000 31.384418488 -36.384422302 -9.500000000 32.233222961 -36.233222961 -9.500000000 32.981101990 -35.981101990 -9.500000000 -35.167964935 -35.167964935 -9.500000000 -34.622871399 -35.622871399 -9.500000000 33.622871399 -35.622871399 -9.500000000 34.167964935 -35.167964935 -9.500000000 -35.981101990 -33.981101990 -9.500000000 -35.622871399 -34.622871399 -9.500000000 34.622871399 -34.622871399 -9.500000000 34.981101990 -33.981101990 -9.500000000 -36.233222961 -33.233222961 -9.500000000 35.233222961 -33.233226776 -9.500000000 -36.384418488 -32.384422302 -9.500000000 35.384418488 -32.384422302 -9.500000000 -36.458976746 -31.458978653 -9.500000000 35.458980560 -31.458978653 -9.500000000 -36.488403320 -30.488407135 -9.500000000 35.488403320 -30.488407135 -9.500000000 -36.497467041 -29.497472763 -9.500000000 35.497474670 -29.497472763 -9.500000000 -36.499591827 -28.499593735 -9.500000000 35.499591827 -28.499593735 -9.500000000 -36.499954224 -27.499954224 -9.500000000 35.499954224 -27.499954224 -9.500000000 -36.499996185 -26.499996185 -9.500000000 35.499996185 -26.499996185 -9.500000000 -36.500000000 -25.500000000 -9.500000000 35.500000000 -25.500000000 -9.500000000 -36.500000000 -24.500000000 -9.500000000 35.500000000 -24.500000000 -9.500000000 -36.500000000 -23.500000000 -9.500000000 35.500000000 -23.500000000 -9.500000000 -36.500000000 -22.500000000 -9.500000000 35.500000000 -22.500000000 -9.500000000 -36.500000000 -21.500000000 -9.500000000 35.500000000 -21.500000000 -9.500000000 -36.500000000 -20.500000000 -9.500000000 35.500000000 -20.500000000 -9.500000000 -36.500000000 -19.500000000 -9.500000000 35.500000000 -19.500000000 -9.500000000 -36.500000000 -18.500000000 -9.500000000 35.500000000 -18.500000000 -9.500000000 -36.500000000 -17.500000000 -9.500000000 35.500000000 -17.500000000 -9.500000000 -36.500000000 -16.500000000 -9.500000000 35.500000000 -16.500000000 -9.500000000 -36.500000000 -15.500000000 -9.500000000 35.500000000 -15.500000000 -9.500000000 -36.500000000 -14.500000000 -9.500000000 35.500000000 -14.500000000 -9.500000000 -36.500000000 -13.500000000 -9.500000000 35.500000000 -13.500000000 -9.500000000 -36.500000000 -12.500000000 -9.500000000 35.500000000 -12.500000000 -9.500000000 -36.500000000 -11.500000000 -9.500000000 35.500000000 -11.500000000 -9.500000000 -36.500000000 -10.500000000 -9.500000000 35.500000000 -10.500000000 -9.500000000 -36.500000000 -9.500000000 -9.500000000 35.500000000 -9.500000000 -9.500000000 -36.500000000 -8.500000000 -9.500000000 35.500000000 -8.500000000 -9.500000000 -36.500000000 -7.500000000 -9.500000000 35.500000000 -7.500000000 -9.500000000 -36.500000000 -6.500000000 -9.500000000 35.500000000 -6.500000000 -9.500000000 -36.500000000 -5.500000000 -9.500000000 35.500000000 -5.500000000 -9.500000000 -36.500000000 -4.500000000 -9.500000000 35.500000000 -4.500000000 -9.500000000 -36.500000000 -3.500000000 -9.500000000 35.500000000 -3.500000000 -9.500000000 -36.500000000 -2.500000000 -9.500000000 35.500000000 -2.500000000 -9.500000000 -36.500000000 -1.500000000 -9.500000000 35.500000000 -1.500000000 -9.500000000 -36.500000000 -0.500000000 -9.500000000 35.500000000 -0.500000000 -9.500000000 -36.500000000 0.500000000 -9.500000000 35.500000000 0.500000000 -9.500000000 -36.500000000 1.500000000 -9.500000000 35.500000000 1.500000000 -9.500000000 -36.500000000 2.500000000 -9.500000000 35.500000000 2.500000000 -9.500000000 -36.500000000 3.500000000 -9.500000000 35.500000000 3.500000000 -9.500000000 -36.500000000 4.500000000 -9.500000000 35.500000000 4.500000000 -9.500000000 -36.500000000 5.500000000 -9.500000000 35.500000000 5.500000000 -9.500000000 -36.500000000 6.500000000 -9.500000000 35.500000000 6.500000000 -9.500000000 -36.500000000 7.500000000 -9.500000000 35.500000000 7.500000000 -9.500000000 -36.500000000 8.500000000 -9.500000000 35.500000000 8.500000000 -9.500000000 -36.500000000 9.500000000 -9.500000000 35.500000000 9.500000000 -9.500000000 -36.500000000 10.500000000 -9.500000000 35.500000000 10.500000000 -9.500000000 -36.500000000 11.500000000 -9.500000000 35.500000000 11.500000000 -9.500000000 -36.500000000 12.500000000 -9.500000000 35.500000000 12.500000000 -9.500000000 -36.500000000 13.500000000 -9.500000000 35.500000000 13.500000000 -9.500000000 -36.500000000 14.500000000 -9.500000000 35.500000000 14.500000000 -9.500000000 -36.500000000 15.500000000 -9.500000000 35.500000000 15.500000000 -9.500000000 -36.500000000 16.500000000 -9.500000000 35.500000000 16.500000000 -9.500000000 -36.500000000 17.500000000 -9.500000000 35.500000000 17.500000000 -9.500000000 -36.500000000 18.500000000 -9.500000000 35.500000000 18.500000000 -9.500000000 -36.500000000 19.500000000 -9.500000000 35.500000000 19.500000000 -9.500000000 -36.500000000 20.500000000 -9.500000000 35.500000000 20.500000000 -9.500000000 -36.500000000 21.500000000 -9.500000000 35.500000000 21.500000000 -9.500000000 -36.500000000 22.500000000 -9.500000000 35.500000000 22.500000000 -9.500000000 -36.500000000 23.500000000 -9.500000000 35.500000000 23.500000000 -9.500000000 -36.500000000 24.500000000 -9.500000000 35.500000000 24.500000000 -9.500000000 -36.499996185 25.499996185 -9.500000000 35.499996185 25.499996185 -9.500000000 -36.499954224 26.499954224 -9.500000000 35.499954224 26.499954224 -9.500000000 -36.499591827 27.499591827 -9.500000000 35.499591827 27.499591827 -9.500000000 -36.497474670 28.497470856 -9.500000000 35.497467041 28.497470856 -9.500000000 -36.488403320 29.488407135 -9.500000000 35.488403320 29.488407135 -9.500000000 -36.458980560 30.458978653 -9.500000000 35.458980560 30.458978653 -9.500000000 -36.384422302 31.384418488 -9.500000000 35.384422302 31.384418488 -9.500000000 -36.233222961 32.233222961 -9.500000000 35.233222961 32.233222961 -9.500000000 -35.981101990 32.981101990 -9.500000000 -35.622871399 33.622871399 -9.500000000 34.622871399 33.622871399 -9.500000000 34.981101990 32.981101990 -9.500000000 -35.167964935 34.167964935 -9.500000000 -34.622871399 34.622871399 -9.500000000 33.622871399 34.622871399 -9.500000000 34.167964935 34.167964935 -9.500000000 -33.981101990 34.981101990 -9.500000000 -33.233222961 35.233222961 -9.500000000 -32.384422302 35.384418488 -9.500000000 -31.458978653 35.458976746 -9.500000000 -30.488407135 35.488403320 -9.500000000 -29.497472763 35.497467041 -9.500000000 -28.499593735 35.499591827 -9.500000000 -27.499954224 35.499954224 -9.500000000 -26.499996185 35.499996185 -9.500000000 -25.500000000 35.500000000 -9.500000000 -24.500000000 35.500000000 -9.500000000 -23.500000000 35.500000000 -9.500000000 -22.500000000 35.500000000 -9.500000000 -21.500000000 35.500000000 -9.500000000 -20.500000000 35.500000000 -9.500000000 -19.500000000 35.500000000 -9.500000000 -18.500000000 35.500000000 -9.500000000 -17.500000000 35.500000000 -9.500000000 -16.500000000 35.500000000 -9.500000000 -15.500000000 35.500000000 -9.500000000 -14.500000000 35.500000000 -9.500000000 -13.500000000 35.500000000 -9.500000000 -12.500000000 35.500000000 -9.500000000 -11.500000000 35.500000000 -9.500000000 -10.500000000 35.500000000 -9.500000000 -9.500000000 35.500000000 -9.500000000 -8.500000000 35.500000000 -9.500000000 -7.500000000 35.500000000 -9.500000000 -6.500000000 35.500000000 -9.500000000 -5.500000000 35.500000000 -9.500000000 -4.500000000 35.500000000 -9.500000000 -3.500000000 35.500000000 -9.500000000 -2.500000000 35.500000000 -9.500000000 -1.500000000 35.500000000 -9.500000000 -0.500000000 35.500000000 -9.500000000 0.500000000 35.500000000 -9.500000000 1.500000000 35.500000000 -9.500000000 2.500000000 35.500000000 -9.500000000 3.500000000 35.500000000 -9.500000000 4.500000000 35.500000000 -9.500000000 5.500000000 35.500000000 -9.500000000 6.500000000 35.500000000 -9.500000000 7.500000000 35.500000000 -9.500000000 8.500000000 35.500000000 -9.500000000 9.500000000 35.500000000 -9.500000000 10.500000000 35.500000000 -9.500000000 11.500000000 35.500000000 -9.500000000 12.500000000 35.500000000 -9.500000000 13.500000000 35.500000000 -9.500000000 14.500000000 35.500000000 -9.500000000 15.500000000 35.500000000 -9.500000000 16.500000000 35.500000000 -9.500000000 17.500000000 35.500000000 -9.500000000 18.500000000 35.500000000 -9.500000000 19.500000000 35.500000000 -9.500000000 20.500000000 35.500000000 -9.500000000 21.500000000 35.500000000 -9.500000000 22.500000000 35.500000000 -9.500000000 23.500000000 35.500000000 -9.500000000 24.500000000 35.500000000 -9.500000000 25.499996185 35.499996185 -9.500000000 26.499954224 35.499954224 -9.500000000 27.499591827 35.499591827 -9.500000000 28.497470856 35.497474670 -9.500000000 29.488407135 35.488403320 -9.500000000 30.458978653 35.458980560 -9.500000000 31.384418488 35.384422302 -9.500000000 32.233222961 35.233222961 -9.500000000 32.981101990 34.981101990 -9.500000000 -33.981101990 -35.981101990 -8.500000000 -33.233226776 -36.233222961 -8.500000000 -32.384422302 -36.384418488 -8.500000000 -31.458978653 -36.458980560 -8.500000000 -30.488407135 -36.488403320 -8.500000000 -29.497472763 -36.497474670 -8.500000000 -28.499593735 -36.499591827 -8.500000000 -27.499954224 -36.499954224 -8.500000000 -26.499996185 -36.499996185 -8.500000000 -25.500000000 -36.500000000 -8.500000000 -24.500000000 -36.500000000 -8.500000000 -23.500000000 -36.500000000 -8.500000000 -22.500000000 -36.500000000 -8.500000000 -21.500000000 -36.500000000 -8.500000000 -20.500000000 -36.500000000 -8.500000000 -19.500000000 -36.500000000 -8.500000000 -18.500000000 -36.500000000 -8.500000000 -17.500000000 -36.500000000 -8.500000000 -16.500000000 -36.500000000 -8.500000000 -15.500000000 -36.500000000 -8.500000000 -14.500000000 -36.500000000 -8.500000000 -13.500000000 -36.500000000 -8.500000000 -12.500000000 -36.500000000 -8.500000000 -11.500000000 -36.500000000 -8.500000000 -10.500000000 -36.500000000 -8.500000000 -9.500000000 -36.500000000 -8.500000000 -8.500000000 -36.500000000 -8.500000000 -7.500000000 -36.500000000 -8.500000000 -6.500000000 -36.500000000 -8.500000000 -5.500000000 -36.500000000 -8.500000000 -4.500000000 -36.500000000 -8.500000000 -3.500000000 -36.500000000 -8.500000000 -2.500000000 -36.500000000 -8.500000000 -1.500000000 -36.500000000 -8.500000000 -0.500000000 -36.500000000 -8.500000000 0.500000000 -36.500000000 -8.500000000 1.500000000 -36.500000000 -8.500000000 2.500000000 -36.500000000 -8.500000000 3.500000000 -36.500000000 -8.500000000 4.500000000 -36.500000000 -8.500000000 5.500000000 -36.500000000 -8.500000000 6.500000000 -36.500000000 -8.500000000 7.500000000 -36.500000000 -8.500000000 8.500000000 -36.500000000 -8.500000000 9.500000000 -36.500000000 -8.500000000 10.500000000 -36.500000000 -8.500000000 11.500000000 -36.500000000 -8.500000000 12.500000000 -36.500000000 -8.500000000 13.500000000 -36.500000000 -8.500000000 14.500000000 -36.500000000 -8.500000000 15.500000000 -36.500000000 -8.500000000 16.500000000 -36.500000000 -8.500000000 17.500000000 -36.500000000 -8.500000000 18.500000000 -36.500000000 -8.500000000 19.500000000 -36.500000000 -8.500000000 20.500000000 -36.500000000 -8.500000000 21.500000000 -36.500000000 -8.500000000 22.500000000 -36.500000000 -8.500000000 23.500000000 -36.500000000 -8.500000000 24.500000000 -36.500000000 -8.500000000 25.499996185 -36.499996185 -8.500000000 26.499954224 -36.499954224 -8.500000000 27.499591827 -36.499591827 -8.500000000 28.497470856 -36.497467041 -8.500000000 29.488407135 -36.488403320 -8.500000000 30.458978653 -36.458980560 -8.500000000 31.384418488 -36.384422302 -8.500000000 32.233222961 -36.233222961 -8.500000000 32.981101990 -35.981101990 -8.500000000 -35.167964935 -35.167964935 -8.500000000 -34.622871399 -35.622871399 -8.500000000 33.622871399 -35.622871399 -8.500000000 34.167964935 -35.167964935 -8.500000000 -35.981101990 -33.981101990 -8.500000000 -35.622871399 -34.622871399 -8.500000000 34.622871399 -34.622871399 -8.500000000 34.981101990 -33.981101990 -8.500000000 -36.233222961 -33.233222961 -8.500000000 35.233222961 -33.233226776 -8.500000000 -36.384418488 -32.384422302 -8.500000000 35.384418488 -32.384422302 -8.500000000 -36.458976746 -31.458978653 -8.500000000 35.458980560 -31.458978653 -8.500000000 -36.488403320 -30.488407135 -8.500000000 35.488403320 -30.488407135 -8.500000000 -36.497467041 -29.497472763 -8.500000000 35.497474670 -29.497472763 -8.500000000 -36.499591827 -28.499593735 -8.500000000 35.499591827 -28.499593735 -8.500000000 -36.499954224 -27.499954224 -8.500000000 35.499954224 -27.499954224 -8.500000000 -36.499996185 -26.499996185 -8.500000000 35.499996185 -26.499996185 -8.500000000 -36.500000000 -25.500000000 -8.500000000 35.500000000 -25.500000000 -8.500000000 -36.500000000 -24.500000000 -8.500000000 35.500000000 -24.500000000 -8.500000000 -36.500000000 -23.500000000 -8.500000000 35.500000000 -23.500000000 -8.500000000 -36.500000000 -22.500000000 -8.500000000 35.500000000 -22.500000000 -8.500000000 -36.500000000 -21.500000000 -8.500000000 35.500000000 -21.500000000 -8.500000000 -36.500000000 -20.500000000 -8.500000000 35.500000000 -20.500000000 -8.500000000 -36.500000000 -19.500000000 -8.500000000 35.500000000 -19.500000000 -8.500000000 -36.500000000 -18.500000000 -8.500000000 35.500000000 -18.500000000 -8.500000000 -36.500000000 -17.500000000 -8.500000000 35.500000000 -17.500000000 -8.500000000 -36.500000000 -16.500000000 -8.500000000 35.500000000 -16.500000000 -8.500000000 -36.500000000 -15.500000000 -8.500000000 35.500000000 -15.500000000 -8.500000000 -36.500000000 -14.500000000 -8.500000000 35.500000000 -14.500000000 -8.500000000 -36.500000000 -13.500000000 -8.500000000 35.500000000 -13.500000000 -8.500000000 -36.500000000 -12.500000000 -8.500000000 35.500000000 -12.500000000 -8.500000000 -36.500000000 -11.500000000 -8.500000000 35.500000000 -11.500000000 -8.500000000 -36.500000000 -10.500000000 -8.500000000 35.500000000 -10.500000000 -8.500000000 -36.500000000 -9.500000000 -8.500000000 35.500000000 -9.500000000 -8.500000000 -36.500000000 -8.500000000 -8.500000000 35.500000000 -8.500000000 -8.500000000 -36.500000000 -7.500000000 -8.500000000 35.500000000 -7.500000000 -8.500000000 -36.500000000 -6.500000000 -8.500000000 35.500000000 -6.500000000 -8.500000000 -36.500000000 -5.500000000 -8.500000000 35.500000000 -5.500000000 -8.500000000 -36.500000000 -4.500000000 -8.500000000 35.500000000 -4.500000000 -8.500000000 -36.500000000 -3.500000000 -8.500000000 35.500000000 -3.500000000 -8.500000000 -36.500000000 -2.500000000 -8.500000000 35.500000000 -2.500000000 -8.500000000 -36.500000000 -1.500000000 -8.500000000 35.500000000 -1.500000000 -8.500000000 -36.500000000 -0.500000000 -8.500000000 35.500000000 -0.500000000 -8.500000000 -36.500000000 0.500000000 -8.500000000 35.500000000 0.500000000 -8.500000000 -36.500000000 1.500000000 -8.500000000 35.500000000 1.500000000 -8.500000000 -36.500000000 2.500000000 -8.500000000 35.500000000 2.500000000 -8.500000000 -36.500000000 3.500000000 -8.500000000 35.500000000 3.500000000 -8.500000000 -36.500000000 4.500000000 -8.500000000 35.500000000 4.500000000 -8.500000000 -36.500000000 5.500000000 -8.500000000 35.500000000 5.500000000 -8.500000000 -36.500000000 6.500000000 -8.500000000 35.500000000 6.500000000 -8.500000000 -36.500000000 7.500000000 -8.500000000 35.500000000 7.500000000 -8.500000000 -36.500000000 8.500000000 -8.500000000 35.500000000 8.500000000 -8.500000000 -36.500000000 9.500000000 -8.500000000 35.500000000 9.500000000 -8.500000000 -36.500000000 10.500000000 -8.500000000 35.500000000 10.500000000 -8.500000000 -36.500000000 11.500000000 -8.500000000 35.500000000 11.500000000 -8.500000000 -36.500000000 12.500000000 -8.500000000 35.500000000 12.500000000 -8.500000000 -36.500000000 13.500000000 -8.500000000 35.500000000 13.500000000 -8.500000000 -36.500000000 14.500000000 -8.500000000 35.500000000 14.500000000 -8.500000000 -36.500000000 15.500000000 -8.500000000 35.500000000 15.500000000 -8.500000000 -36.500000000 16.500000000 -8.500000000 35.500000000 16.500000000 -8.500000000 -36.500000000 17.500000000 -8.500000000 35.500000000 17.500000000 -8.500000000 -36.500000000 18.500000000 -8.500000000 35.500000000 18.500000000 -8.500000000 -36.500000000 19.500000000 -8.500000000 35.500000000 19.500000000 -8.500000000 -36.500000000 20.500000000 -8.500000000 35.500000000 20.500000000 -8.500000000 -36.500000000 21.500000000 -8.500000000 35.500000000 21.500000000 -8.500000000 -36.500000000 22.500000000 -8.500000000 35.500000000 22.500000000 -8.500000000 -36.500000000 23.500000000 -8.500000000 35.500000000 23.500000000 -8.500000000 -36.500000000 24.500000000 -8.500000000 35.500000000 24.500000000 -8.500000000 -36.499996185 25.499996185 -8.500000000 35.499996185 25.499996185 -8.500000000 -36.499954224 26.499954224 -8.500000000 35.499954224 26.499954224 -8.500000000 -36.499591827 27.499591827 -8.500000000 35.499591827 27.499591827 -8.500000000 -36.497474670 28.497470856 -8.500000000 35.497467041 28.497470856 -8.500000000 -36.488403320 29.488407135 -8.500000000 35.488403320 29.488407135 -8.500000000 -36.458980560 30.458978653 -8.500000000 35.458980560 30.458978653 -8.500000000 -36.384422302 31.384418488 -8.500000000 35.384422302 31.384418488 -8.500000000 -36.233222961 32.233222961 -8.500000000 35.233222961 32.233222961 -8.500000000 -35.981101990 32.981101990 -8.500000000 -35.622871399 33.622871399 -8.500000000 34.622871399 33.622871399 -8.500000000 34.981101990 32.981101990 -8.500000000 -35.167964935 34.167964935 -8.500000000 -34.622871399 34.622871399 -8.500000000 33.622871399 34.622871399 -8.500000000 34.167964935 34.167964935 -8.500000000 -33.981101990 34.981101990 -8.500000000 -33.233222961 35.233222961 -8.500000000 -32.384422302 35.384418488 -8.500000000 -31.458978653 35.458976746 -8.500000000 -30.488407135 35.488403320 -8.500000000 -29.497472763 35.497467041 -8.500000000 -28.499593735 35.499591827 -8.500000000 -27.499954224 35.499954224 -8.500000000 -26.499996185 35.499996185 -8.500000000 -25.500000000 35.500000000 -8.500000000 -24.500000000 35.500000000 -8.500000000 -23.500000000 35.500000000 -8.500000000 -22.500000000 35.500000000 -8.500000000 -21.500000000 35.500000000 -8.500000000 -20.500000000 35.500000000 -8.500000000 -19.500000000 35.500000000 -8.500000000 -18.500000000 35.500000000 -8.500000000 -17.500000000 35.500000000 -8.500000000 -16.500000000 35.500000000 -8.500000000 -15.500000000 35.500000000 -8.500000000 -14.500000000 35.500000000 -8.500000000 -13.500000000 35.500000000 -8.500000000 -12.500000000 35.500000000 -8.500000000 -11.500000000 35.500000000 -8.500000000 -10.500000000 35.500000000 -8.500000000 -9.500000000 35.500000000 -8.500000000 -8.500000000 35.500000000 -8.500000000 -7.500000000 35.500000000 -8.500000000 -6.500000000 35.500000000 -8.500000000 -5.500000000 35.500000000 -8.500000000 -4.500000000 35.500000000 -8.500000000 -3.500000000 35.500000000 -8.500000000 -2.500000000 35.500000000 -8.500000000 -1.500000000 35.500000000 -8.500000000 -0.500000000 35.500000000 -8.500000000 0.500000000 35.500000000 -8.500000000 1.500000000 35.500000000 -8.500000000 2.500000000 35.500000000 -8.500000000 3.500000000 35.500000000 -8.500000000 4.500000000 35.500000000 -8.500000000 5.500000000 35.500000000 -8.500000000 6.500000000 35.500000000 -8.500000000 7.500000000 35.500000000 -8.500000000 8.500000000 35.500000000 -8.500000000 9.500000000 35.500000000 -8.500000000 10.500000000 35.500000000 -8.500000000 11.500000000 35.500000000 -8.500000000 12.500000000 35.500000000 -8.500000000 13.500000000 35.500000000 -8.500000000 14.500000000 35.500000000 -8.500000000 15.500000000 35.500000000 -8.500000000 16.500000000 35.500000000 -8.500000000 17.500000000 35.500000000 -8.500000000 18.500000000 35.500000000 -8.500000000 19.500000000 35.500000000 -8.500000000 20.500000000 35.500000000 -8.500000000 21.500000000 35.500000000 -8.500000000 22.500000000 35.500000000 -8.500000000 23.500000000 35.500000000 -8.500000000 24.500000000 35.500000000 -8.500000000 25.499996185 35.499996185 -8.500000000 26.499954224 35.499954224 -8.500000000 27.499591827 35.499591827 -8.500000000 28.497470856 35.497474670 -8.500000000 29.488407135 35.488403320 -8.500000000 30.458978653 35.458980560 -8.500000000 31.384418488 35.384422302 -8.500000000 32.233222961 35.233222961 -8.500000000 32.981101990 34.981101990 -8.500000000 -33.981101990 -35.981101990 -7.500000000 -33.233226776 -36.233222961 -7.500000000 -32.384422302 -36.384418488 -7.500000000 -31.458978653 -36.458980560 -7.500000000 -30.488407135 -36.488403320 -7.500000000 -29.497472763 -36.497474670 -7.500000000 -28.499593735 -36.499591827 -7.500000000 -27.499954224 -36.499954224 -7.500000000 -26.499996185 -36.499996185 -7.500000000 -25.500000000 -36.500000000 -7.500000000 -24.500000000 -36.500000000 -7.500000000 -23.500000000 -36.500000000 -7.500000000 -22.500000000 -36.500000000 -7.500000000 -21.500000000 -36.500000000 -7.500000000 -20.500000000 -36.500000000 -7.500000000 -19.500000000 -36.500000000 -7.500000000 -18.500000000 -36.500000000 -7.500000000 -17.500000000 -36.500000000 -7.500000000 -16.500000000 -36.500000000 -7.500000000 -15.500000000 -36.500000000 -7.500000000 -14.500000000 -36.500000000 -7.500000000 -13.500000000 -36.500000000 -7.500000000 -12.500000000 -36.500000000 -7.500000000 -11.500000000 -36.500000000 -7.500000000 -10.500000000 -36.500000000 -7.500000000 -9.500000000 -36.500000000 -7.500000000 -8.500000000 -36.500000000 -7.500000000 -7.500000000 -36.500000000 -7.500000000 -6.500000000 -36.500000000 -7.500000000 -5.500000000 -36.500000000 -7.500000000 -4.500000000 -36.500000000 -7.500000000 -3.500000000 -36.500000000 -7.500000000 -2.500000000 -36.500000000 -7.500000000 -1.500000000 -36.500000000 -7.500000000 -0.500000000 -36.500000000 -7.500000000 0.500000000 -36.500000000 -7.500000000 1.500000000 -36.500000000 -7.500000000 2.500000000 -36.500000000 -7.500000000 3.500000000 -36.500000000 -7.500000000 4.500000000 -36.500000000 -7.500000000 5.500000000 -36.500000000 -7.500000000 6.500000000 -36.500000000 -7.500000000 7.500000000 -36.500000000 -7.500000000 8.500000000 -36.500000000 -7.500000000 9.500000000 -36.500000000 -7.500000000 10.500000000 -36.500000000 -7.500000000 11.500000000 -36.500000000 -7.500000000 12.500000000 -36.500000000 -7.500000000 13.500000000 -36.500000000 -7.500000000 14.500000000 -36.500000000 -7.500000000 15.500000000 -36.500000000 -7.500000000 16.500000000 -36.500000000 -7.500000000 17.500000000 -36.500000000 -7.500000000 18.500000000 -36.500000000 -7.500000000 19.500000000 -36.500000000 -7.500000000 20.500000000 -36.500000000 -7.500000000 21.500000000 -36.500000000 -7.500000000 22.500000000 -36.500000000 -7.500000000 23.500000000 -36.500000000 -7.500000000 24.500000000 -36.500000000 -7.500000000 25.499996185 -36.499996185 -7.500000000 26.499954224 -36.499954224 -7.500000000 27.499591827 -36.499591827 -7.500000000 28.497470856 -36.497467041 -7.500000000 29.488407135 -36.488403320 -7.500000000 30.458978653 -36.458980560 -7.500000000 31.384418488 -36.384422302 -7.500000000 32.233222961 -36.233222961 -7.500000000 32.981101990 -35.981101990 -7.500000000 -35.167964935 -35.167964935 -7.500000000 -34.622871399 -35.622871399 -7.500000000 33.622871399 -35.622871399 -7.500000000 34.167964935 -35.167964935 -7.500000000 -35.981101990 -33.981101990 -7.500000000 -35.622871399 -34.622871399 -7.500000000 34.622871399 -34.622871399 -7.500000000 34.981101990 -33.981101990 -7.500000000 -36.233222961 -33.233222961 -7.500000000 35.233222961 -33.233226776 -7.500000000 -36.384418488 -32.384422302 -7.500000000 35.384418488 -32.384422302 -7.500000000 -36.458976746 -31.458978653 -7.500000000 35.458980560 -31.458978653 -7.500000000 -36.488403320 -30.488407135 -7.500000000 35.488403320 -30.488407135 -7.500000000 -36.497467041 -29.497472763 -7.500000000 35.497474670 -29.497472763 -7.500000000 -36.499591827 -28.499593735 -7.500000000 35.499591827 -28.499593735 -7.500000000 -36.499954224 -27.499954224 -7.500000000 35.499954224 -27.499954224 -7.500000000 -36.499996185 -26.499996185 -7.500000000 35.499996185 -26.499996185 -7.500000000 -36.500000000 -25.500000000 -7.500000000 35.500000000 -25.500000000 -7.500000000 -36.500000000 -24.500000000 -7.500000000 35.500000000 -24.500000000 -7.500000000 -36.500000000 -23.500000000 -7.500000000 35.500000000 -23.500000000 -7.500000000 -36.500000000 -22.500000000 -7.500000000 35.500000000 -22.500000000 -7.500000000 -36.500000000 -21.500000000 -7.500000000 35.500000000 -21.500000000 -7.500000000 -36.500000000 -20.500000000 -7.500000000 35.500000000 -20.500000000 -7.500000000 -36.500000000 -19.500000000 -7.500000000 35.500000000 -19.500000000 -7.500000000 -36.500000000 -18.500000000 -7.500000000 35.500000000 -18.500000000 -7.500000000 -36.500000000 -17.500000000 -7.500000000 35.500000000 -17.500000000 -7.500000000 -36.500000000 -16.500000000 -7.500000000 35.500000000 -16.500000000 -7.500000000 -36.500000000 -15.500000000 -7.500000000 35.500000000 -15.500000000 -7.500000000 -36.500000000 -14.500000000 -7.500000000 35.500000000 -14.500000000 -7.500000000 -36.500000000 -13.500000000 -7.500000000 35.500000000 -13.500000000 -7.500000000 -36.500000000 -12.500000000 -7.500000000 35.500000000 -12.500000000 -7.500000000 -36.500000000 -11.500000000 -7.500000000 35.500000000 -11.500000000 -7.500000000 -36.500000000 -10.500000000 -7.500000000 35.500000000 -10.500000000 -7.500000000 -36.500000000 -9.500000000 -7.500000000 35.500000000 -9.500000000 -7.500000000 -36.500000000 -8.500000000 -7.500000000 35.500000000 -8.500000000 -7.500000000 -36.500000000 -7.500000000 -7.500000000 35.500000000 -7.500000000 -7.500000000 -36.500000000 -6.500000000 -7.500000000 35.500000000 -6.500000000 -7.500000000 -36.500000000 -5.500000000 -7.500000000 35.500000000 -5.500000000 -7.500000000 -36.500000000 -4.500000000 -7.500000000 35.500000000 -4.500000000 -7.500000000 -36.500000000 -3.500000000 -7.500000000 35.500000000 -3.500000000 -7.500000000 -36.500000000 -2.500000000 -7.500000000 35.500000000 -2.500000000 -7.500000000 -36.500000000 -1.500000000 -7.500000000 35.500000000 -1.500000000 -7.500000000 -36.500000000 -0.500000000 -7.500000000 35.500000000 -0.500000000 -7.500000000 -36.500000000 0.500000000 -7.500000000 35.500000000 0.500000000 -7.500000000 -36.500000000 1.500000000 -7.500000000 35.500000000 1.500000000 -7.500000000 -36.500000000 2.500000000 -7.500000000 35.500000000 2.500000000 -7.500000000 -36.500000000 3.500000000 -7.500000000 35.500000000 3.500000000 -7.500000000 -36.500000000 4.500000000 -7.500000000 35.500000000 4.500000000 -7.500000000 -36.500000000 5.500000000 -7.500000000 35.500000000 5.500000000 -7.500000000 -36.500000000 6.500000000 -7.500000000 35.500000000 6.500000000 -7.500000000 -36.500000000 7.500000000 -7.500000000 35.500000000 7.500000000 -7.500000000 -36.500000000 8.500000000 -7.500000000 35.500000000 8.500000000 -7.500000000 -36.500000000 9.500000000 -7.500000000 35.500000000 9.500000000 -7.500000000 -36.500000000 10.500000000 -7.500000000 35.500000000 10.500000000 -7.500000000 -36.500000000 11.500000000 -7.500000000 35.500000000 11.500000000 -7.500000000 -36.500000000 12.500000000 -7.500000000 35.500000000 12.500000000 -7.500000000 -36.500000000 13.500000000 -7.500000000 35.500000000 13.500000000 -7.500000000 -36.500000000 14.500000000 -7.500000000 35.500000000 14.500000000 -7.500000000 -36.500000000 15.500000000 -7.500000000 35.500000000 15.500000000 -7.500000000 -36.500000000 16.500000000 -7.500000000 35.500000000 16.500000000 -7.500000000 -36.500000000 17.500000000 -7.500000000 35.500000000 17.500000000 -7.500000000 -36.500000000 18.500000000 -7.500000000 35.500000000 18.500000000 -7.500000000 -36.500000000 19.500000000 -7.500000000 35.500000000 19.500000000 -7.500000000 -36.500000000 20.500000000 -7.500000000 35.500000000 20.500000000 -7.500000000 -36.500000000 21.500000000 -7.500000000 35.500000000 21.500000000 -7.500000000 -36.500000000 22.500000000 -7.500000000 35.500000000 22.500000000 -7.500000000 -36.500000000 23.500000000 -7.500000000 35.500000000 23.500000000 -7.500000000 -36.500000000 24.500000000 -7.500000000 35.500000000 24.500000000 -7.500000000 -36.499996185 25.499996185 -7.500000000 35.499996185 25.499996185 -7.500000000 -36.499954224 26.499954224 -7.500000000 35.499954224 26.499954224 -7.500000000 -36.499591827 27.499591827 -7.500000000 35.499591827 27.499591827 -7.500000000 -36.497474670 28.497470856 -7.500000000 35.497467041 28.497470856 -7.500000000 -36.488403320 29.488407135 -7.500000000 35.488403320 29.488407135 -7.500000000 -36.458980560 30.458978653 -7.500000000 35.458980560 30.458978653 -7.500000000 -36.384422302 31.384418488 -7.500000000 35.384422302 31.384418488 -7.500000000 -36.233222961 32.233222961 -7.500000000 35.233222961 32.233222961 -7.500000000 -35.981101990 32.981101990 -7.500000000 -35.622871399 33.622871399 -7.500000000 34.622871399 33.622871399 -7.500000000 34.981101990 32.981101990 -7.500000000 -35.167964935 34.167964935 -7.500000000 -34.622871399 34.622871399 -7.500000000 33.622871399 34.622871399 -7.500000000 34.167964935 34.167964935 -7.500000000 -33.981101990 34.981101990 -7.500000000 -33.233222961 35.233222961 -7.500000000 -32.384422302 35.384418488 -7.500000000 -31.458978653 35.458976746 -7.500000000 -30.488407135 35.488403320 -7.500000000 -29.497472763 35.497467041 -7.500000000 -28.499593735 35.499591827 -7.500000000 -27.499954224 35.499954224 -7.500000000 -26.499996185 35.499996185 -7.500000000 -25.500000000 35.500000000 -7.500000000 -24.500000000 35.500000000 -7.500000000 -23.500000000 35.500000000 -7.500000000 -22.500000000 35.500000000 -7.500000000 -21.500000000 35.500000000 -7.500000000 -20.500000000 35.500000000 -7.500000000 -19.500000000 35.500000000 -7.500000000 -18.500000000 35.500000000 -7.500000000 -17.500000000 35.500000000 -7.500000000 -16.500000000 35.500000000 -7.500000000 -15.500000000 35.500000000 -7.500000000 -14.500000000 35.500000000 -7.500000000 -13.500000000 35.500000000 -7.500000000 -12.500000000 35.500000000 -7.500000000 -11.500000000 35.500000000 -7.500000000 -10.500000000 35.500000000 -7.500000000 -9.500000000 35.500000000 -7.500000000 -8.500000000 35.500000000 -7.500000000 -7.500000000 35.500000000 -7.500000000 -6.500000000 35.500000000 -7.500000000 -5.500000000 35.500000000 -7.500000000 -4.500000000 35.500000000 -7.500000000 -3.500000000 35.500000000 -7.500000000 -2.500000000 35.500000000 -7.500000000 -1.500000000 35.500000000 -7.500000000 -0.500000000 35.500000000 -7.500000000 0.500000000 35.500000000 -7.500000000 1.500000000 35.500000000 -7.500000000 2.500000000 35.500000000 -7.500000000 3.500000000 35.500000000 -7.500000000 4.500000000 35.500000000 -7.500000000 5.500000000 35.500000000 -7.500000000 6.500000000 35.500000000 -7.500000000 7.500000000 35.500000000 -7.500000000 8.500000000 35.500000000 -7.500000000 9.500000000 35.500000000 -7.500000000 10.500000000 35.500000000 -7.500000000 11.500000000 35.500000000 -7.500000000 12.500000000 35.500000000 -7.500000000 13.500000000 35.500000000 -7.500000000 14.500000000 35.500000000 -7.500000000 15.500000000 35.500000000 -7.500000000 16.500000000 35.500000000 -7.500000000 17.500000000 35.500000000 -7.500000000 18.500000000 35.500000000 -7.500000000 19.500000000 35.500000000 -7.500000000 20.500000000 35.500000000 -7.500000000 21.500000000 35.500000000 -7.500000000 22.500000000 35.500000000 -7.500000000 23.500000000 35.500000000 -7.500000000 24.500000000 35.500000000 -7.500000000 25.499996185 35.499996185 -7.500000000 26.499954224 35.499954224 -7.500000000 27.499591827 35.499591827 -7.500000000 28.497470856 35.497474670 -7.500000000 29.488407135 35.488403320 -7.500000000 30.458978653 35.458980560 -7.500000000 31.384418488 35.384422302 -7.500000000 32.233222961 35.233222961 -7.500000000 32.981101990 34.981101990 -7.500000000 -33.981101990 -35.981101990 -6.500000000 -33.233226776 -36.233222961 -6.500000000 -32.384422302 -36.384418488 -6.500000000 -31.458978653 -36.458980560 -6.500000000 -30.488407135 -36.488403320 -6.500000000 -29.497472763 -36.497474670 -6.500000000 -28.499593735 -36.499591827 -6.500000000 -27.499954224 -36.499954224 -6.500000000 -26.499996185 -36.499996185 -6.500000000 -25.500000000 -36.500000000 -6.500000000 -24.500000000 -36.500000000 -6.500000000 -23.500000000 -36.500000000 -6.500000000 -22.500000000 -36.500000000 -6.500000000 -21.500000000 -36.500000000 -6.500000000 -20.500000000 -36.500000000 -6.500000000 -19.500000000 -36.500000000 -6.500000000 -18.500000000 -36.500000000 -6.500000000 -17.500000000 -36.500000000 -6.500000000 -16.500000000 -36.500000000 -6.500000000 -15.500000000 -36.500000000 -6.500000000 -14.500000000 -36.500000000 -6.500000000 -13.500000000 -36.500000000 -6.500000000 -12.500000000 -36.500000000 -6.500000000 -11.500000000 -36.500000000 -6.500000000 -10.500000000 -36.500000000 -6.500000000 -9.500000000 -36.500000000 -6.500000000 -8.500000000 -36.500000000 -6.500000000 -7.500000000 -36.500000000 -6.500000000 -6.500000000 -36.500000000 -6.500000000 -5.500000000 -36.500000000 -6.500000000 -4.500000000 -36.500000000 -6.500000000 -3.500000000 -36.500000000 -6.500000000 -2.500000000 -36.500000000 -6.500000000 -1.500000000 -36.500000000 -6.500000000 -0.500000000 -36.500000000 -6.500000000 0.500000000 -36.500000000 -6.500000000 1.500000000 -36.500000000 -6.500000000 2.500000000 -36.500000000 -6.500000000 3.500000000 -36.500000000 -6.500000000 4.500000000 -36.500000000 -6.500000000 5.500000000 -36.500000000 -6.500000000 6.500000000 -36.500000000 -6.500000000 7.500000000 -36.500000000 -6.500000000 8.500000000 -36.500000000 -6.500000000 9.500000000 -36.500000000 -6.500000000 10.500000000 -36.500000000 -6.500000000 11.500000000 -36.500000000 -6.500000000 12.500000000 -36.500000000 -6.500000000 13.500000000 -36.500000000 -6.500000000 14.500000000 -36.500000000 -6.500000000 15.500000000 -36.500000000 -6.500000000 16.500000000 -36.500000000 -6.500000000 17.500000000 -36.500000000 -6.500000000 18.500000000 -36.500000000 -6.500000000 19.500000000 -36.500000000 -6.500000000 20.500000000 -36.500000000 -6.500000000 21.500000000 -36.500000000 -6.500000000 22.500000000 -36.500000000 -6.500000000 23.500000000 -36.500000000 -6.500000000 24.500000000 -36.500000000 -6.500000000 25.499996185 -36.499996185 -6.500000000 26.499954224 -36.499954224 -6.500000000 27.499591827 -36.499591827 -6.500000000 28.497470856 -36.497467041 -6.500000000 29.488407135 -36.488403320 -6.500000000 30.458978653 -36.458980560 -6.500000000 31.384418488 -36.384422302 -6.500000000 32.233222961 -36.233222961 -6.500000000 32.981101990 -35.981101990 -6.500000000 -35.167964935 -35.167964935 -6.500000000 -34.622871399 -35.622871399 -6.500000000 33.622871399 -35.622871399 -6.500000000 34.167964935 -35.167964935 -6.500000000 -35.981101990 -33.981101990 -6.500000000 -35.622871399 -34.622871399 -6.500000000 34.622871399 -34.622871399 -6.500000000 34.981101990 -33.981101990 -6.500000000 -36.233222961 -33.233222961 -6.500000000 35.233222961 -33.233226776 -6.500000000 -36.384418488 -32.384422302 -6.500000000 35.384418488 -32.384422302 -6.500000000 -36.458976746 -31.458978653 -6.500000000 35.458980560 -31.458978653 -6.500000000 -36.488403320 -30.488407135 -6.500000000 35.488403320 -30.488407135 -6.500000000 -36.497467041 -29.497472763 -6.500000000 35.497474670 -29.497472763 -6.500000000 -36.499591827 -28.499593735 -6.500000000 35.499591827 -28.499593735 -6.500000000 -36.499954224 -27.499954224 -6.500000000 35.499954224 -27.499954224 -6.500000000 -36.499996185 -26.499996185 -6.500000000 35.499996185 -26.499996185 -6.500000000 -36.500000000 -25.500000000 -6.500000000 35.500000000 -25.500000000 -6.500000000 -36.500000000 -24.500000000 -6.500000000 35.500000000 -24.500000000 -6.500000000 -36.500000000 -23.500000000 -6.500000000 35.500000000 -23.500000000 -6.500000000 -36.500000000 -22.500000000 -6.500000000 35.500000000 -22.500000000 -6.500000000 -36.500000000 -21.500000000 -6.500000000 35.500000000 -21.500000000 -6.500000000 -36.500000000 -20.500000000 -6.500000000 35.500000000 -20.500000000 -6.500000000 -36.500000000 -19.500000000 -6.500000000 35.500000000 -19.500000000 -6.500000000 -36.500000000 -18.500000000 -6.500000000 35.500000000 -18.500000000 -6.500000000 -36.500000000 -17.500000000 -6.500000000 35.500000000 -17.500000000 -6.500000000 -36.500000000 -16.500000000 -6.500000000 35.500000000 -16.500000000 -6.500000000 -36.500000000 -15.500000000 -6.500000000 35.500000000 -15.500000000 -6.500000000 -36.500000000 -14.500000000 -6.500000000 35.500000000 -14.500000000 -6.500000000 -36.500000000 -13.500000000 -6.500000000 35.500000000 -13.500000000 -6.500000000 -36.500000000 -12.500000000 -6.500000000 35.500000000 -12.500000000 -6.500000000 -36.500000000 -11.500000000 -6.500000000 35.500000000 -11.500000000 -6.500000000 -36.500000000 -10.500000000 -6.500000000 35.500000000 -10.500000000 -6.500000000 -36.500000000 -9.500000000 -6.500000000 35.500000000 -9.500000000 -6.500000000 -36.500000000 -8.500000000 -6.500000000 35.500000000 -8.500000000 -6.500000000 -36.500000000 -7.500000000 -6.500000000 35.500000000 -7.500000000 -6.500000000 -36.500000000 -6.500000000 -6.500000000 35.500000000 -6.500000000 -6.500000000 -36.500000000 -5.500000000 -6.500000000 35.500000000 -5.500000000 -6.500000000 -36.500000000 -4.500000000 -6.500000000 35.500000000 -4.500000000 -6.500000000 -36.500000000 -3.500000000 -6.500000000 35.500000000 -3.500000000 -6.500000000 -36.500000000 -2.500000000 -6.500000000 35.500000000 -2.500000000 -6.500000000 -36.500000000 -1.500000000 -6.500000000 35.500000000 -1.500000000 -6.500000000 -36.500000000 -0.500000000 -6.500000000 35.500000000 -0.500000000 -6.500000000 -36.500000000 0.500000000 -6.500000000 35.500000000 0.500000000 -6.500000000 -36.500000000 1.500000000 -6.500000000 35.500000000 1.500000000 -6.500000000 -36.500000000 2.500000000 -6.500000000 35.500000000 2.500000000 -6.500000000 -36.500000000 3.500000000 -6.500000000 35.500000000 3.500000000 -6.500000000 -36.500000000 4.500000000 -6.500000000 35.500000000 4.500000000 -6.500000000 -36.500000000 5.500000000 -6.500000000 35.500000000 5.500000000 -6.500000000 -36.500000000 6.500000000 -6.500000000 35.500000000 6.500000000 -6.500000000 -36.500000000 7.500000000 -6.500000000 35.500000000 7.500000000 -6.500000000 -36.500000000 8.500000000 -6.500000000 35.500000000 8.500000000 -6.500000000 -36.500000000 9.500000000 -6.500000000 35.500000000 9.500000000 -6.500000000 -36.500000000 10.500000000 -6.500000000 35.500000000 10.500000000 -6.500000000 -36.500000000 11.500000000 -6.500000000 35.500000000 11.500000000 -6.500000000 -36.500000000 12.500000000 -6.500000000 35.500000000 12.500000000 -6.500000000 -36.500000000 13.500000000 -6.500000000 35.500000000 13.500000000 -6.500000000 -36.500000000 14.500000000 -6.500000000 35.500000000 14.500000000 -6.500000000 -36.500000000 15.500000000 -6.500000000 35.500000000 15.500000000 -6.500000000 -36.500000000 16.500000000 -6.500000000 35.500000000 16.500000000 -6.500000000 -36.500000000 17.500000000 -6.500000000 35.500000000 17.500000000 -6.500000000 -36.500000000 18.500000000 -6.500000000 35.500000000 18.500000000 -6.500000000 -36.500000000 19.500000000 -6.500000000 35.500000000 19.500000000 -6.500000000 -36.500000000 20.500000000 -6.500000000 35.500000000 20.500000000 -6.500000000 -36.500000000 21.500000000 -6.500000000 35.500000000 21.500000000 -6.500000000 -36.500000000 22.500000000 -6.500000000 35.500000000 22.500000000 -6.500000000 -36.500000000 23.500000000 -6.500000000 35.500000000 23.500000000 -6.500000000 -36.500000000 24.500000000 -6.500000000 35.500000000 24.500000000 -6.500000000 -36.499996185 25.499996185 -6.500000000 35.499996185 25.499996185 -6.500000000 -36.499954224 26.499954224 -6.500000000 35.499954224 26.499954224 -6.500000000 -36.499591827 27.499591827 -6.500000000 35.499591827 27.499591827 -6.500000000 -36.497474670 28.497470856 -6.500000000 35.497467041 28.497470856 -6.500000000 -36.488403320 29.488407135 -6.500000000 35.488403320 29.488407135 -6.500000000 -36.458980560 30.458978653 -6.500000000 35.458980560 30.458978653 -6.500000000 -36.384422302 31.384418488 -6.500000000 35.384422302 31.384418488 -6.500000000 -36.233222961 32.233222961 -6.500000000 35.233222961 32.233222961 -6.500000000 -35.981101990 32.981101990 -6.500000000 -35.622871399 33.622871399 -6.500000000 34.622871399 33.622871399 -6.500000000 34.981101990 32.981101990 -6.500000000 -35.167964935 34.167964935 -6.500000000 -34.622871399 34.622871399 -6.500000000 33.622871399 34.622871399 -6.500000000 34.167964935 34.167964935 -6.500000000 -33.981101990 34.981101990 -6.500000000 -33.233222961 35.233222961 -6.500000000 -32.384422302 35.384418488 -6.500000000 -31.458978653 35.458976746 -6.500000000 -30.488407135 35.488403320 -6.500000000 -29.497472763 35.497467041 -6.500000000 -28.499593735 35.499591827 -6.500000000 -27.499954224 35.499954224 -6.500000000 -26.499996185 35.499996185 -6.500000000 -25.500000000 35.500000000 -6.500000000 -24.500000000 35.500000000 -6.500000000 -23.500000000 35.500000000 -6.500000000 -22.500000000 35.500000000 -6.500000000 -21.500000000 35.500000000 -6.500000000 -20.500000000 35.500000000 -6.500000000 -19.500000000 35.500000000 -6.500000000 -18.500000000 35.500000000 -6.500000000 -17.500000000 35.500000000 -6.500000000 -16.500000000 35.500000000 -6.500000000 -15.500000000 35.500000000 -6.500000000 -14.500000000 35.500000000 -6.500000000 -13.500000000 35.500000000 -6.500000000 -12.500000000 35.500000000 -6.500000000 -11.500000000 35.500000000 -6.500000000 -10.500000000 35.500000000 -6.500000000 -9.500000000 35.500000000 -6.500000000 -8.500000000 35.500000000 -6.500000000 -7.500000000 35.500000000 -6.500000000 -6.500000000 35.500000000 -6.500000000 -5.500000000 35.500000000 -6.500000000 -4.500000000 35.500000000 -6.500000000 -3.500000000 35.500000000 -6.500000000 -2.500000000 35.500000000 -6.500000000 -1.500000000 35.500000000 -6.500000000 -0.500000000 35.500000000 -6.500000000 0.500000000 35.500000000 -6.500000000 1.500000000 35.500000000 -6.500000000 2.500000000 35.500000000 -6.500000000 3.500000000 35.500000000 -6.500000000 4.500000000 35.500000000 -6.500000000 5.500000000 35.500000000 -6.500000000 6.500000000 35.500000000 -6.500000000 7.500000000 35.500000000 -6.500000000 8.500000000 35.500000000 -6.500000000 9.500000000 35.500000000 -6.500000000 10.500000000 35.500000000 -6.500000000 11.500000000 35.500000000 -6.500000000 12.500000000 35.500000000 -6.500000000 13.500000000 35.500000000 -6.500000000 14.500000000 35.500000000 -6.500000000 15.500000000 35.500000000 -6.500000000 16.500000000 35.500000000 -6.500000000 17.500000000 35.500000000 -6.500000000 18.500000000 35.500000000 -6.500000000 19.500000000 35.500000000 -6.500000000 20.500000000 35.500000000 -6.500000000 21.500000000 35.500000000 -6.500000000 22.500000000 35.500000000 -6.500000000 23.500000000 35.500000000 -6.500000000 24.500000000 35.500000000 -6.500000000 25.499996185 35.499996185 -6.500000000 26.499954224 35.499954224 -6.500000000 27.499591827 35.499591827 -6.500000000 28.497470856 35.497474670 -6.500000000 29.488407135 35.488403320 -6.500000000 30.458978653 35.458980560 -6.500000000 31.384418488 35.384422302 -6.500000000 32.233222961 35.233222961 -6.500000000 32.981101990 34.981101990 -6.500000000 -33.981101990 -35.981101990 -5.500000000 -33.233226776 -36.233222961 -5.500000000 -32.384422302 -36.384418488 -5.500000000 -31.458978653 -36.458980560 -5.500000000 -30.488407135 -36.488403320 -5.500000000 -29.497472763 -36.497474670 -5.500000000 -28.499593735 -36.499591827 -5.500000000 -27.499954224 -36.499954224 -5.500000000 -26.499996185 -36.499996185 -5.500000000 -25.500000000 -36.500000000 -5.500000000 -24.500000000 -36.500000000 -5.500000000 -23.500000000 -36.500000000 -5.500000000 -22.500000000 -36.500000000 -5.500000000 -21.500000000 -36.500000000 -5.500000000 -20.500000000 -36.500000000 -5.500000000 -19.500000000 -36.500000000 -5.500000000 -18.500000000 -36.500000000 -5.500000000 -17.500000000 -36.500000000 -5.500000000 -16.500000000 -36.500000000 -5.500000000 -15.500000000 -36.500000000 -5.500000000 -14.500000000 -36.500000000 -5.500000000 -13.500000000 -36.500000000 -5.500000000 -12.500000000 -36.500000000 -5.500000000 -11.500000000 -36.500000000 -5.500000000 -10.500000000 -36.500000000 -5.500000000 -9.500000000 -36.500000000 -5.500000000 -8.500000000 -36.500000000 -5.500000000 -7.500000000 -36.500000000 -5.500000000 -6.500000000 -36.500000000 -5.500000000 -5.500000000 -36.500000000 -5.500000000 -4.500000000 -36.500000000 -5.500000000 -3.500000000 -36.500000000 -5.500000000 -2.500000000 -36.500000000 -5.500000000 -1.500000000 -36.500000000 -5.500000000 -0.500000000 -36.500000000 -5.500000000 0.500000000 -36.500000000 -5.500000000 1.500000000 -36.500000000 -5.500000000 2.500000000 -36.500000000 -5.500000000 3.500000000 -36.500000000 -5.500000000 4.500000000 -36.500000000 -5.500000000 5.500000000 -36.500000000 -5.500000000 6.500000000 -36.500000000 -5.500000000 7.500000000 -36.500000000 -5.500000000 8.500000000 -36.500000000 -5.500000000 9.500000000 -36.500000000 -5.500000000 10.500000000 -36.500000000 -5.500000000 11.500000000 -36.500000000 -5.500000000 12.500000000 -36.500000000 -5.500000000 13.500000000 -36.500000000 -5.500000000 14.500000000 -36.500000000 -5.500000000 15.500000000 -36.500000000 -5.500000000 16.500000000 -36.500000000 -5.500000000 17.500000000 -36.500000000 -5.500000000 18.500000000 -36.500000000 -5.500000000 19.500000000 -36.500000000 -5.500000000 20.500000000 -36.500000000 -5.500000000 21.500000000 -36.500000000 -5.500000000 22.500000000 -36.500000000 -5.500000000 23.500000000 -36.500000000 -5.500000000 24.500000000 -36.500000000 -5.500000000 25.499996185 -36.499996185 -5.500000000 26.499954224 -36.499954224 -5.500000000 27.499591827 -36.499591827 -5.500000000 28.497470856 -36.497467041 -5.500000000 29.488407135 -36.488403320 -5.500000000 30.458978653 -36.458980560 -5.500000000 31.384418488 -36.384422302 -5.500000000 32.233222961 -36.233222961 -5.500000000 32.981101990 -35.981101990 -5.500000000 -35.167964935 -35.167964935 -5.500000000 -34.622871399 -35.622871399 -5.500000000 33.622871399 -35.622871399 -5.500000000 34.167964935 -35.167964935 -5.500000000 -35.981101990 -33.981101990 -5.500000000 -35.622871399 -34.622871399 -5.500000000 34.622871399 -34.622871399 -5.500000000 34.981101990 -33.981101990 -5.500000000 -36.233222961 -33.233222961 -5.500000000 35.233222961 -33.233226776 -5.500000000 -36.384418488 -32.384422302 -5.500000000 35.384418488 -32.384422302 -5.500000000 -36.458976746 -31.458978653 -5.500000000 35.458980560 -31.458978653 -5.500000000 -36.488403320 -30.488407135 -5.500000000 35.488403320 -30.488407135 -5.500000000 -36.497467041 -29.497472763 -5.500000000 35.497474670 -29.497472763 -5.500000000 -36.499591827 -28.499593735 -5.500000000 35.499591827 -28.499593735 -5.500000000 -36.499954224 -27.499954224 -5.500000000 35.499954224 -27.499954224 -5.500000000 -36.499996185 -26.499996185 -5.500000000 35.499996185 -26.499996185 -5.500000000 -36.500000000 -25.500000000 -5.500000000 35.500000000 -25.500000000 -5.500000000 -36.500000000 -24.500000000 -5.500000000 35.500000000 -24.500000000 -5.500000000 -36.500000000 -23.500000000 -5.500000000 35.500000000 -23.500000000 -5.500000000 -36.500000000 -22.500000000 -5.500000000 35.500000000 -22.500000000 -5.500000000 -36.500000000 -21.500000000 -5.500000000 35.500000000 -21.500000000 -5.500000000 -36.500000000 -20.500000000 -5.500000000 35.500000000 -20.500000000 -5.500000000 -36.500000000 -19.500000000 -5.500000000 35.500000000 -19.500000000 -5.500000000 -36.500000000 -18.500000000 -5.500000000 35.500000000 -18.500000000 -5.500000000 -36.500000000 -17.500000000 -5.500000000 35.500000000 -17.500000000 -5.500000000 -36.500000000 -16.500000000 -5.500000000 35.500000000 -16.500000000 -5.500000000 -36.500000000 -15.500000000 -5.500000000 35.500000000 -15.500000000 -5.500000000 -36.500000000 -14.500000000 -5.500000000 35.500000000 -14.500000000 -5.500000000 -36.500000000 -13.500000000 -5.500000000 35.500000000 -13.500000000 -5.500000000 -36.500000000 -12.500000000 -5.500000000 35.500000000 -12.500000000 -5.500000000 -36.500000000 -11.500000000 -5.500000000 35.500000000 -11.500000000 -5.500000000 -36.500000000 -10.500000000 -5.500000000 35.500000000 -10.500000000 -5.500000000 -36.500000000 -9.500000000 -5.500000000 35.500000000 -9.500000000 -5.500000000 -36.500000000 -8.500000000 -5.500000000 35.500000000 -8.500000000 -5.500000000 -36.500000000 -7.500000000 -5.500000000 35.500000000 -7.500000000 -5.500000000 -36.500000000 -6.500000000 -5.500000000 35.500000000 -6.500000000 -5.500000000 -36.500000000 -5.500000000 -5.500000000 35.500000000 -5.500000000 -5.500000000 -36.500000000 -4.500000000 -5.500000000 35.500000000 -4.500000000 -5.500000000 -36.500000000 -3.500000000 -5.500000000 35.500000000 -3.500000000 -5.500000000 -36.500000000 -2.500000000 -5.500000000 35.500000000 -2.500000000 -5.500000000 -36.500000000 -1.500000000 -5.500000000 35.500000000 -1.500000000 -5.500000000 -36.500000000 -0.500000000 -5.500000000 35.500000000 -0.500000000 -5.500000000 -36.500000000 0.500000000 -5.500000000 35.500000000 0.500000000 -5.500000000 -36.500000000 1.500000000 -5.500000000 35.500000000 1.500000000 -5.500000000 -36.500000000 2.500000000 -5.500000000 35.500000000 2.500000000 -5.500000000 -36.500000000 3.500000000 -5.500000000 35.500000000 3.500000000 -5.500000000 -36.500000000 4.500000000 -5.500000000 35.500000000 4.500000000 -5.500000000 -36.500000000 5.500000000 -5.500000000 35.500000000 5.500000000 -5.500000000 -36.500000000 6.500000000 -5.500000000 35.500000000 6.500000000 -5.500000000 -36.500000000 7.500000000 -5.500000000 35.500000000 7.500000000 -5.500000000 -36.500000000 8.500000000 -5.500000000 35.500000000 8.500000000 -5.500000000 -36.500000000 9.500000000 -5.500000000 35.500000000 9.500000000 -5.500000000 -36.500000000 10.500000000 -5.500000000 35.500000000 10.500000000 -5.500000000 -36.500000000 11.500000000 -5.500000000 35.500000000 11.500000000 -5.500000000 -36.500000000 12.500000000 -5.500000000 35.500000000 12.500000000 -5.500000000 -36.500000000 13.500000000 -5.500000000 35.500000000 13.500000000 -5.500000000 -36.500000000 14.500000000 -5.500000000 35.500000000 14.500000000 -5.500000000 -36.500000000 15.500000000 -5.500000000 35.500000000 15.500000000 -5.500000000 -36.500000000 16.500000000 -5.500000000 35.500000000 16.500000000 -5.500000000 -36.500000000 17.500000000 -5.500000000 35.500000000 17.500000000 -5.500000000 -36.500000000 18.500000000 -5.500000000 35.500000000 18.500000000 -5.500000000 -36.500000000 19.500000000 -5.500000000 35.500000000 19.500000000 -5.500000000 -36.500000000 20.500000000 -5.500000000 35.500000000 20.500000000 -5.500000000 -36.500000000 21.500000000 -5.500000000 35.500000000 21.500000000 -5.500000000 -36.500000000 22.500000000 -5.500000000 35.500000000 22.500000000 -5.500000000 -36.500000000 23.500000000 -5.500000000 35.500000000 23.500000000 -5.500000000 -36.500000000 24.500000000 -5.500000000 35.500000000 24.500000000 -5.500000000 -36.499996185 25.499996185 -5.500000000 35.499996185 25.499996185 -5.500000000 -36.499954224 26.499954224 -5.500000000 35.499954224 26.499954224 -5.500000000 -36.499591827 27.499591827 -5.500000000 35.499591827 27.499591827 -5.500000000 -36.497474670 28.497470856 -5.500000000 35.497467041 28.497470856 -5.500000000 -36.488403320 29.488407135 -5.500000000 35.488403320 29.488407135 -5.500000000 -36.458980560 30.458978653 -5.500000000 35.458980560 30.458978653 -5.500000000 -36.384422302 31.384418488 -5.500000000 35.384422302 31.384418488 -5.500000000 -36.233222961 32.233222961 -5.500000000 35.233222961 32.233222961 -5.500000000 -35.981101990 32.981101990 -5.500000000 -35.622871399 33.622871399 -5.500000000 34.622871399 33.622871399 -5.500000000 34.981101990 32.981101990 -5.500000000 -35.167964935 34.167964935 -5.500000000 -34.622871399 34.622871399 -5.500000000 33.622871399 34.622871399 -5.500000000 34.167964935 34.167964935 -5.500000000 -33.981101990 34.981101990 -5.500000000 -33.233222961 35.233222961 -5.500000000 -32.384422302 35.384418488 -5.500000000 -31.458978653 35.458976746 -5.500000000 -30.488407135 35.488403320 -5.500000000 -29.497472763 35.497467041 -5.500000000 -28.499593735 35.499591827 -5.500000000 -27.499954224 35.499954224 -5.500000000 -26.499996185 35.499996185 -5.500000000 -25.500000000 35.500000000 -5.500000000 -24.500000000 35.500000000 -5.500000000 -23.500000000 35.500000000 -5.500000000 -22.500000000 35.500000000 -5.500000000 -21.500000000 35.500000000 -5.500000000 -20.500000000 35.500000000 -5.500000000 -19.500000000 35.500000000 -5.500000000 -18.500000000 35.500000000 -5.500000000 -17.500000000 35.500000000 -5.500000000 -16.500000000 35.500000000 -5.500000000 -15.500000000 35.500000000 -5.500000000 -14.500000000 35.500000000 -5.500000000 -13.500000000 35.500000000 -5.500000000 -12.500000000 35.500000000 -5.500000000 -11.500000000 35.500000000 -5.500000000 -10.500000000 35.500000000 -5.500000000 -9.500000000 35.500000000 -5.500000000 -8.500000000 35.500000000 -5.500000000 -7.500000000 35.500000000 -5.500000000 -6.500000000 35.500000000 -5.500000000 -5.500000000 35.500000000 -5.500000000 -4.500000000 35.500000000 -5.500000000 -3.500000000 35.500000000 -5.500000000 -2.500000000 35.500000000 -5.500000000 -1.500000000 35.500000000 -5.500000000 -0.500000000 35.500000000 -5.500000000 0.500000000 35.500000000 -5.500000000 1.500000000 35.500000000 -5.500000000 2.500000000 35.500000000 -5.500000000 3.500000000 35.500000000 -5.500000000 4.500000000 35.500000000 -5.500000000 5.500000000 35.500000000 -5.500000000 6.500000000 35.500000000 -5.500000000 7.500000000 35.500000000 -5.500000000 8.500000000 35.500000000 -5.500000000 9.500000000 35.500000000 -5.500000000 10.500000000 35.500000000 -5.500000000 11.500000000 35.500000000 -5.500000000 12.500000000 35.500000000 -5.500000000 13.500000000 35.500000000 -5.500000000 14.500000000 35.500000000 -5.500000000 15.500000000 35.500000000 -5.500000000 16.500000000 35.500000000 -5.500000000 17.500000000 35.500000000 -5.500000000 18.500000000 35.500000000 -5.500000000 19.500000000 35.500000000 -5.500000000 20.500000000 35.500000000 -5.500000000 21.500000000 35.500000000 -5.500000000 22.500000000 35.500000000 -5.500000000 23.500000000 35.500000000 -5.500000000 24.500000000 35.500000000 -5.500000000 25.499996185 35.499996185 -5.500000000 26.499954224 35.499954224 -5.500000000 27.499591827 35.499591827 -5.500000000 28.497470856 35.497474670 -5.500000000 29.488407135 35.488403320 -5.500000000 30.458978653 35.458980560 -5.500000000 31.384418488 35.384422302 -5.500000000 32.233222961 35.233222961 -5.500000000 32.981101990 34.981101990 -5.500000000 -33.981101990 -35.981101990 -4.500000000 -33.233226776 -36.233222961 -4.500000000 -32.384422302 -36.384418488 -4.500000000 -31.458978653 -36.458980560 -4.500000000 -30.488407135 -36.488403320 -4.500000000 -29.497472763 -36.497474670 -4.500000000 -28.499593735 -36.499591827 -4.500000000 -27.499954224 -36.499954224 -4.500000000 -26.499996185 -36.499996185 -4.500000000 -25.500000000 -36.500000000 -4.500000000 -24.500000000 -36.500000000 -4.500000000 -23.500000000 -36.500000000 -4.500000000 -22.500000000 -36.500000000 -4.500000000 -21.500000000 -36.500000000 -4.500000000 -20.500000000 -36.500000000 -4.500000000 -19.500000000 -36.500000000 -4.500000000 -18.500000000 -36.500000000 -4.500000000 -17.500000000 -36.500000000 -4.500000000 -16.500000000 -36.500000000 -4.500000000 -15.500000000 -36.500000000 -4.500000000 -14.500000000 -36.500000000 -4.500000000 -13.500000000 -36.500000000 -4.500000000 -12.500000000 -36.500000000 -4.500000000 -11.500000000 -36.500000000 -4.500000000 -10.500000000 -36.500000000 -4.500000000 -9.500000000 -36.500000000 -4.500000000 -8.500000000 -36.500000000 -4.500000000 -7.500000000 -36.500000000 -4.500000000 -6.500000000 -36.500000000 -4.500000000 -5.500000000 -36.500000000 -4.500000000 -4.500000000 -36.500000000 -4.500000000 -3.500000000 -36.500000000 -4.500000000 -2.500000000 -36.500000000 -4.500000000 -1.500000000 -36.500000000 -4.500000000 -0.500000000 -36.500000000 -4.500000000 0.500000000 -36.500000000 -4.500000000 1.500000000 -36.500000000 -4.500000000 2.500000000 -36.500000000 -4.500000000 3.500000000 -36.500000000 -4.500000000 4.500000000 -36.500000000 -4.500000000 5.500000000 -36.500000000 -4.500000000 6.500000000 -36.500000000 -4.500000000 7.500000000 -36.500000000 -4.500000000 8.500000000 -36.500000000 -4.500000000 9.500000000 -36.500000000 -4.500000000 10.500000000 -36.500000000 -4.500000000 11.500000000 -36.500000000 -4.500000000 12.500000000 -36.500000000 -4.500000000 13.500000000 -36.500000000 -4.500000000 14.500000000 -36.500000000 -4.500000000 15.500000000 -36.500000000 -4.500000000 16.500000000 -36.500000000 -4.500000000 17.500000000 -36.500000000 -4.500000000 18.500000000 -36.500000000 -4.500000000 19.500000000 -36.500000000 -4.500000000 20.500000000 -36.500000000 -4.500000000 21.500000000 -36.500000000 -4.500000000 22.500000000 -36.500000000 -4.500000000 23.500000000 -36.500000000 -4.500000000 24.500000000 -36.500000000 -4.500000000 25.499996185 -36.499996185 -4.500000000 26.499954224 -36.499954224 -4.500000000 27.499591827 -36.499591827 -4.500000000 28.497470856 -36.497467041 -4.500000000 29.488407135 -36.488403320 -4.500000000 30.458978653 -36.458980560 -4.500000000 31.384418488 -36.384422302 -4.500000000 32.233222961 -36.233222961 -4.500000000 32.981101990 -35.981101990 -4.500000000 -35.167964935 -35.167964935 -4.500000000 -34.622871399 -35.622871399 -4.500000000 33.622871399 -35.622871399 -4.500000000 34.167964935 -35.167964935 -4.500000000 -35.981101990 -33.981101990 -4.500000000 -35.622871399 -34.622871399 -4.500000000 34.622871399 -34.622871399 -4.500000000 34.981101990 -33.981101990 -4.500000000 -36.233222961 -33.233222961 -4.500000000 35.233222961 -33.233226776 -4.500000000 -36.384418488 -32.384422302 -4.500000000 35.384418488 -32.384422302 -4.500000000 -36.458976746 -31.458978653 -4.500000000 35.458980560 -31.458978653 -4.500000000 -36.488403320 -30.488407135 -4.500000000 35.488403320 -30.488407135 -4.500000000 -36.497467041 -29.497472763 -4.500000000 35.497474670 -29.497472763 -4.500000000 -36.499591827 -28.499593735 -4.500000000 35.499591827 -28.499593735 -4.500000000 -36.499954224 -27.499954224 -4.500000000 35.499954224 -27.499954224 -4.500000000 -36.499996185 -26.499996185 -4.500000000 35.499996185 -26.499996185 -4.500000000 -36.500000000 -25.500000000 -4.500000000 35.500000000 -25.500000000 -4.500000000 -36.500000000 -24.500000000 -4.500000000 35.500000000 -24.500000000 -4.500000000 -36.500000000 -23.500000000 -4.500000000 35.500000000 -23.500000000 -4.500000000 -36.500000000 -22.500000000 -4.500000000 35.500000000 -22.500000000 -4.500000000 -36.500000000 -21.500000000 -4.500000000 35.500000000 -21.500000000 -4.500000000 -36.500000000 -20.500000000 -4.500000000 35.500000000 -20.500000000 -4.500000000 -36.500000000 -19.500000000 -4.500000000 35.500000000 -19.500000000 -4.500000000 -36.500000000 -18.500000000 -4.500000000 35.500000000 -18.500000000 -4.500000000 -36.500000000 -17.500000000 -4.500000000 35.500000000 -17.500000000 -4.500000000 -36.500000000 -16.500000000 -4.500000000 35.500000000 -16.500000000 -4.500000000 -36.500000000 -15.500000000 -4.500000000 35.500000000 -15.500000000 -4.500000000 -36.500000000 -14.500000000 -4.500000000 35.500000000 -14.500000000 -4.500000000 -36.500000000 -13.500000000 -4.500000000 35.500000000 -13.500000000 -4.500000000 -36.500000000 -12.500000000 -4.500000000 35.500000000 -12.500000000 -4.500000000 -36.500000000 -11.500000000 -4.500000000 35.500000000 -11.500000000 -4.500000000 -36.500000000 -10.500000000 -4.500000000 35.500000000 -10.500000000 -4.500000000 -36.500000000 -9.500000000 -4.500000000 35.500000000 -9.500000000 -4.500000000 -36.500000000 -8.500000000 -4.500000000 35.500000000 -8.500000000 -4.500000000 -36.500000000 -7.500000000 -4.500000000 35.500000000 -7.500000000 -4.500000000 -36.500000000 -6.500000000 -4.500000000 35.500000000 -6.500000000 -4.500000000 -36.500000000 -5.500000000 -4.500000000 35.500000000 -5.500000000 -4.500000000 -36.500000000 -4.500000000 -4.500000000 35.500000000 -4.500000000 -4.500000000 -36.500000000 -3.500000000 -4.500000000 35.500000000 -3.500000000 -4.500000000 -36.500000000 -2.500000000 -4.500000000 35.500000000 -2.500000000 -4.500000000 -36.500000000 -1.500000000 -4.500000000 35.500000000 -1.500000000 -4.500000000 -36.500000000 -0.500000000 -4.500000000 35.500000000 -0.500000000 -4.500000000 -36.500000000 0.500000000 -4.500000000 35.500000000 0.500000000 -4.500000000 -36.500000000 1.500000000 -4.500000000 35.500000000 1.500000000 -4.500000000 -36.500000000 2.500000000 -4.500000000 35.500000000 2.500000000 -4.500000000 -36.500000000 3.500000000 -4.500000000 35.500000000 3.500000000 -4.500000000 -36.500000000 4.500000000 -4.500000000 35.500000000 4.500000000 -4.500000000 -36.500000000 5.500000000 -4.500000000 35.500000000 5.500000000 -4.500000000 -36.500000000 6.500000000 -4.500000000 35.500000000 6.500000000 -4.500000000 -36.500000000 7.500000000 -4.500000000 35.500000000 7.500000000 -4.500000000 -36.500000000 8.500000000 -4.500000000 35.500000000 8.500000000 -4.500000000 -36.500000000 9.500000000 -4.500000000 35.500000000 9.500000000 -4.500000000 -36.500000000 10.500000000 -4.500000000 35.500000000 10.500000000 -4.500000000 -36.500000000 11.500000000 -4.500000000 35.500000000 11.500000000 -4.500000000 -36.500000000 12.500000000 -4.500000000 35.500000000 12.500000000 -4.500000000 -36.500000000 13.500000000 -4.500000000 35.500000000 13.500000000 -4.500000000 -36.500000000 14.500000000 -4.500000000 35.500000000 14.500000000 -4.500000000 -36.500000000 15.500000000 -4.500000000 35.500000000 15.500000000 -4.500000000 -36.500000000 16.500000000 -4.500000000 35.500000000 16.500000000 -4.500000000 -36.500000000 17.500000000 -4.500000000 35.500000000 17.500000000 -4.500000000 -36.500000000 18.500000000 -4.500000000 35.500000000 18.500000000 -4.500000000 -36.500000000 19.500000000 -4.500000000 35.500000000 19.500000000 -4.500000000 -36.500000000 20.500000000 -4.500000000 35.500000000 20.500000000 -4.500000000 -36.500000000 21.500000000 -4.500000000 35.500000000 21.500000000 -4.500000000 -36.500000000 22.500000000 -4.500000000 35.500000000 22.500000000 -4.500000000 -36.500000000 23.500000000 -4.500000000 35.500000000 23.500000000 -4.500000000 -36.500000000 24.500000000 -4.500000000 35.500000000 24.500000000 -4.500000000 -36.499996185 25.499996185 -4.500000000 35.499996185 25.499996185 -4.500000000 -36.499954224 26.499954224 -4.500000000 35.499954224 26.499954224 -4.500000000 -36.499591827 27.499591827 -4.500000000 35.499591827 27.499591827 -4.500000000 -36.497474670 28.497470856 -4.500000000 35.497467041 28.497470856 -4.500000000 -36.488403320 29.488407135 -4.500000000 35.488403320 29.488407135 -4.500000000 -36.458980560 30.458978653 -4.500000000 35.458980560 30.458978653 -4.500000000 -36.384422302 31.384418488 -4.500000000 35.384422302 31.384418488 -4.500000000 -36.233222961 32.233222961 -4.500000000 35.233222961 32.233222961 -4.500000000 -35.981101990 32.981101990 -4.500000000 -35.622871399 33.622871399 -4.500000000 34.622871399 33.622871399 -4.500000000 34.981101990 32.981101990 -4.500000000 -35.167964935 34.167964935 -4.500000000 -34.622871399 34.622871399 -4.500000000 33.622871399 34.622871399 -4.500000000 34.167964935 34.167964935 -4.500000000 -33.981101990 34.981101990 -4.500000000 -33.233222961 35.233222961 -4.500000000 -32.384422302 35.384418488 -4.500000000 -31.458978653 35.458976746 -4.500000000 -30.488407135 35.488403320 -4.500000000 -29.497472763 35.497467041 -4.500000000 -28.499593735 35.499591827 -4.500000000 -27.499954224 35.499954224 -4.500000000 -26.499996185 35.499996185 -4.500000000 -25.500000000 35.500000000 -4.500000000 -24.500000000 35.500000000 -4.500000000 -23.500000000 35.500000000 -4.500000000 -22.500000000 35.500000000 -4.500000000 -21.500000000 35.500000000 -4.500000000 -20.500000000 35.500000000 -4.500000000 -19.500000000 35.500000000 -4.500000000 -18.500000000 35.500000000 -4.500000000 -17.500000000 35.500000000 -4.500000000 -16.500000000 35.500000000 -4.500000000 -15.500000000 35.500000000 -4.500000000 -14.500000000 35.500000000 -4.500000000 -13.500000000 35.500000000 -4.500000000 -12.500000000 35.500000000 -4.500000000 -11.500000000 35.500000000 -4.500000000 -10.500000000 35.500000000 -4.500000000 -9.500000000 35.500000000 -4.500000000 -8.500000000 35.500000000 -4.500000000 -7.500000000 35.500000000 -4.500000000 -6.500000000 35.500000000 -4.500000000 -5.500000000 35.500000000 -4.500000000 -4.500000000 35.500000000 -4.500000000 -3.500000000 35.500000000 -4.500000000 -2.500000000 35.500000000 -4.500000000 -1.500000000 35.500000000 -4.500000000 -0.500000000 35.500000000 -4.500000000 0.500000000 35.500000000 -4.500000000 1.500000000 35.500000000 -4.500000000 2.500000000 35.500000000 -4.500000000 3.500000000 35.500000000 -4.500000000 4.500000000 35.500000000 -4.500000000 5.500000000 35.500000000 -4.500000000 6.500000000 35.500000000 -4.500000000 7.500000000 35.500000000 -4.500000000 8.500000000 35.500000000 -4.500000000 9.500000000 35.500000000 -4.500000000 10.500000000 35.500000000 -4.500000000 11.500000000 35.500000000 -4.500000000 12.500000000 35.500000000 -4.500000000 13.500000000 35.500000000 -4.500000000 14.500000000 35.500000000 -4.500000000 15.500000000 35.500000000 -4.500000000 16.500000000 35.500000000 -4.500000000 17.500000000 35.500000000 -4.500000000 18.500000000 35.500000000 -4.500000000 19.500000000 35.500000000 -4.500000000 20.500000000 35.500000000 -4.500000000 21.500000000 35.500000000 -4.500000000 22.500000000 35.500000000 -4.500000000 23.500000000 35.500000000 -4.500000000 24.500000000 35.500000000 -4.500000000 25.499996185 35.499996185 -4.500000000 26.499954224 35.499954224 -4.500000000 27.499591827 35.499591827 -4.500000000 28.497470856 35.497474670 -4.500000000 29.488407135 35.488403320 -4.500000000 30.458978653 35.458980560 -4.500000000 31.384418488 35.384422302 -4.500000000 32.233222961 35.233222961 -4.500000000 32.981101990 34.981101990 -4.500000000 -33.981101990 -35.981101990 -3.500000000 -33.233226776 -36.233222961 -3.500000000 -32.384422302 -36.384418488 -3.500000000 -31.458978653 -36.458980560 -3.500000000 -30.488407135 -36.488403320 -3.500000000 -29.497472763 -36.497474670 -3.500000000 -28.499593735 -36.499591827 -3.500000000 -27.499954224 -36.499954224 -3.500000000 -26.499996185 -36.499996185 -3.500000000 -25.500000000 -36.500000000 -3.500000000 -24.500000000 -36.500000000 -3.500000000 -23.500000000 -36.500000000 -3.500000000 -22.500000000 -36.500000000 -3.500000000 -21.500000000 -36.500000000 -3.500000000 -20.500000000 -36.500000000 -3.500000000 -19.500000000 -36.500000000 -3.500000000 -18.500000000 -36.500000000 -3.500000000 -17.500000000 -36.500000000 -3.500000000 -16.500000000 -36.500000000 -3.500000000 -15.500000000 -36.500000000 -3.500000000 -14.500000000 -36.500000000 -3.500000000 -13.500000000 -36.500000000 -3.500000000 -12.500000000 -36.500000000 -3.500000000 -11.500000000 -36.500000000 -3.500000000 -10.500000000 -36.500000000 -3.500000000 -9.500000000 -36.500000000 -3.500000000 -8.500000000 -36.500000000 -3.500000000 -7.500000000 -36.500000000 -3.500000000 -6.500000000 -36.500000000 -3.500000000 -5.500000000 -36.500000000 -3.500000000 -4.500000000 -36.500000000 -3.500000000 -3.500000000 -36.500000000 -3.500000000 -2.500000000 -36.500000000 -3.500000000 -1.500000000 -36.500000000 -3.500000000 -0.500000000 -36.500000000 -3.500000000 0.500000000 -36.500000000 -3.500000000 1.500000000 -36.500000000 -3.500000000 2.500000000 -36.500000000 -3.500000000 3.500000000 -36.500000000 -3.500000000 4.500000000 -36.500000000 -3.500000000 5.500000000 -36.500000000 -3.500000000 6.500000000 -36.500000000 -3.500000000 7.500000000 -36.500000000 -3.500000000 8.500000000 -36.500000000 -3.500000000 9.500000000 -36.500000000 -3.500000000 10.500000000 -36.500000000 -3.500000000 11.500000000 -36.500000000 -3.500000000 12.500000000 -36.500000000 -3.500000000 13.500000000 -36.500000000 -3.500000000 14.500000000 -36.500000000 -3.500000000 15.500000000 -36.500000000 -3.500000000 16.500000000 -36.500000000 -3.500000000 17.500000000 -36.500000000 -3.500000000 18.500000000 -36.500000000 -3.500000000 19.500000000 -36.500000000 -3.500000000 20.500000000 -36.500000000 -3.500000000 21.500000000 -36.500000000 -3.500000000 22.500000000 -36.500000000 -3.500000000 23.500000000 -36.500000000 -3.500000000 24.500000000 -36.500000000 -3.500000000 25.499996185 -36.499996185 -3.500000000 26.499954224 -36.499954224 -3.500000000 27.499591827 -36.499591827 -3.500000000 28.497470856 -36.497467041 -3.500000000 29.488407135 -36.488403320 -3.500000000 30.458978653 -36.458980560 -3.500000000 31.384418488 -36.384422302 -3.500000000 32.233222961 -36.233222961 -3.500000000 32.981101990 -35.981101990 -3.500000000 -35.167964935 -35.167964935 -3.500000000 -34.622871399 -35.622871399 -3.500000000 33.622871399 -35.622871399 -3.500000000 34.167964935 -35.167964935 -3.500000000 -35.981101990 -33.981101990 -3.500000000 -35.622871399 -34.622871399 -3.500000000 34.622871399 -34.622871399 -3.500000000 34.981101990 -33.981101990 -3.500000000 -36.233222961 -33.233222961 -3.500000000 35.233222961 -33.233226776 -3.500000000 -36.384418488 -32.384422302 -3.500000000 35.384418488 -32.384422302 -3.500000000 -36.458976746 -31.458978653 -3.500000000 35.458980560 -31.458978653 -3.500000000 -36.488403320 -30.488407135 -3.500000000 35.488403320 -30.488407135 -3.500000000 -36.497467041 -29.497472763 -3.500000000 35.497474670 -29.497472763 -3.500000000 -36.499591827 -28.499593735 -3.500000000 35.499591827 -28.499593735 -3.500000000 -36.499954224 -27.499954224 -3.500000000 35.499954224 -27.499954224 -3.500000000 -36.499996185 -26.499996185 -3.500000000 35.499996185 -26.499996185 -3.500000000 -36.500000000 -25.500000000 -3.500000000 35.500000000 -25.500000000 -3.500000000 -36.500000000 -24.500000000 -3.500000000 35.500000000 -24.500000000 -3.500000000 -36.500000000 -23.500000000 -3.500000000 35.500000000 -23.500000000 -3.500000000 -36.500000000 -22.500000000 -3.500000000 35.500000000 -22.500000000 -3.500000000 -36.500000000 -21.500000000 -3.500000000 35.500000000 -21.500000000 -3.500000000 -36.500000000 -20.500000000 -3.500000000 35.500000000 -20.500000000 -3.500000000 -36.500000000 -19.500000000 -3.500000000 35.500000000 -19.500000000 -3.500000000 -36.500000000 -18.500000000 -3.500000000 35.500000000 -18.500000000 -3.500000000 -36.500000000 -17.500000000 -3.500000000 35.500000000 -17.500000000 -3.500000000 -36.500000000 -16.500000000 -3.500000000 35.500000000 -16.500000000 -3.500000000 -36.500000000 -15.500000000 -3.500000000 35.500000000 -15.500000000 -3.500000000 -36.500000000 -14.500000000 -3.500000000 35.500000000 -14.500000000 -3.500000000 -36.500000000 -13.500000000 -3.500000000 35.500000000 -13.500000000 -3.500000000 -36.500000000 -12.500000000 -3.500000000 35.500000000 -12.500000000 -3.500000000 -36.500000000 -11.500000000 -3.500000000 35.500000000 -11.500000000 -3.500000000 -36.500000000 -10.500000000 -3.500000000 35.500000000 -10.500000000 -3.500000000 -36.500000000 -9.500000000 -3.500000000 35.500000000 -9.500000000 -3.500000000 -36.500000000 -8.500000000 -3.500000000 35.500000000 -8.500000000 -3.500000000 -36.500000000 -7.500000000 -3.500000000 35.500000000 -7.500000000 -3.500000000 -36.500000000 -6.500000000 -3.500000000 35.500000000 -6.500000000 -3.500000000 -36.500000000 -5.500000000 -3.500000000 35.500000000 -5.500000000 -3.500000000 -36.500000000 -4.500000000 -3.500000000 35.500000000 -4.500000000 -3.500000000 -36.500000000 -3.500000000 -3.500000000 35.500000000 -3.500000000 -3.500000000 -36.500000000 -2.500000000 -3.500000000 35.500000000 -2.500000000 -3.500000000 -36.500000000 -1.500000000 -3.500000000 35.500000000 -1.500000000 -3.500000000 -36.500000000 -0.500000000 -3.500000000 35.500000000 -0.500000000 -3.500000000 -36.500000000 0.500000000 -3.500000000 35.500000000 0.500000000 -3.500000000 -36.500000000 1.500000000 -3.500000000 35.500000000 1.500000000 -3.500000000 -36.500000000 2.500000000 -3.500000000 35.500000000 2.500000000 -3.500000000 -36.500000000 3.500000000 -3.500000000 35.500000000 3.500000000 -3.500000000 -36.500000000 4.500000000 -3.500000000 35.500000000 4.500000000 -3.500000000 -36.500000000 5.500000000 -3.500000000 35.500000000 5.500000000 -3.500000000 -36.500000000 6.500000000 -3.500000000 35.500000000 6.500000000 -3.500000000 -36.500000000 7.500000000 -3.500000000 35.500000000 7.500000000 -3.500000000 -36.500000000 8.500000000 -3.500000000 35.500000000 8.500000000 -3.500000000 -36.500000000 9.500000000 -3.500000000 35.500000000 9.500000000 -3.500000000 -36.500000000 10.500000000 -3.500000000 35.500000000 10.500000000 -3.500000000 -36.500000000 11.500000000 -3.500000000 35.500000000 11.500000000 -3.500000000 -36.500000000 12.500000000 -3.500000000 35.500000000 12.500000000 -3.500000000 -36.500000000 13.500000000 -3.500000000 35.500000000 13.500000000 -3.500000000 -36.500000000 14.500000000 -3.500000000 35.500000000 14.500000000 -3.500000000 -36.500000000 15.500000000 -3.500000000 35.500000000 15.500000000 -3.500000000 -36.500000000 16.500000000 -3.500000000 35.500000000 16.500000000 -3.500000000 -36.500000000 17.500000000 -3.500000000 35.500000000 17.500000000 -3.500000000 -36.500000000 18.500000000 -3.500000000 35.500000000 18.500000000 -3.500000000 -36.500000000 19.500000000 -3.500000000 35.500000000 19.500000000 -3.500000000 -36.500000000 20.500000000 -3.500000000 35.500000000 20.500000000 -3.500000000 -36.500000000 21.500000000 -3.500000000 35.500000000 21.500000000 -3.500000000 -36.500000000 22.500000000 -3.500000000 35.500000000 22.500000000 -3.500000000 -36.500000000 23.500000000 -3.500000000 35.500000000 23.500000000 -3.500000000 -36.500000000 24.500000000 -3.500000000 35.500000000 24.500000000 -3.500000000 -36.499996185 25.499996185 -3.500000000 35.499996185 25.499996185 -3.500000000 -36.499954224 26.499954224 -3.500000000 35.499954224 26.499954224 -3.500000000 -36.499591827 27.499591827 -3.500000000 35.499591827 27.499591827 -3.500000000 -36.497474670 28.497470856 -3.500000000 35.497467041 28.497470856 -3.500000000 -36.488403320 29.488407135 -3.500000000 35.488403320 29.488407135 -3.500000000 -36.458980560 30.458978653 -3.500000000 35.458980560 30.458978653 -3.500000000 -36.384422302 31.384418488 -3.500000000 35.384422302 31.384418488 -3.500000000 -36.233222961 32.233222961 -3.500000000 35.233222961 32.233222961 -3.500000000 -35.981101990 32.981101990 -3.500000000 -35.622871399 33.622871399 -3.500000000 34.622871399 33.622871399 -3.500000000 34.981101990 32.981101990 -3.500000000 -35.167964935 34.167964935 -3.500000000 -34.622871399 34.622871399 -3.500000000 33.622871399 34.622871399 -3.500000000 34.167964935 34.167964935 -3.500000000 -33.981101990 34.981101990 -3.500000000 -33.233222961 35.233222961 -3.500000000 -32.384422302 35.384418488 -3.500000000 -31.458978653 35.458976746 -3.500000000 -30.488407135 35.488403320 -3.500000000 -29.497472763 35.497467041 -3.500000000 -28.499593735 35.499591827 -3.500000000 -27.499954224 35.499954224 -3.500000000 -26.499996185 35.499996185 -3.500000000 -25.500000000 35.500000000 -3.500000000 -24.500000000 35.500000000 -3.500000000 -23.500000000 35.500000000 -3.500000000 -22.500000000 35.500000000 -3.500000000 -21.500000000 35.500000000 -3.500000000 -20.500000000 35.500000000 -3.500000000 -19.500000000 35.500000000 -3.500000000 -18.500000000 35.500000000 -3.500000000 -17.500000000 35.500000000 -3.500000000 -16.500000000 35.500000000 -3.500000000 -15.500000000 35.500000000 -3.500000000 -14.500000000 35.500000000 -3.500000000 -13.500000000 35.500000000 -3.500000000 -12.500000000 35.500000000 -3.500000000 -11.500000000 35.500000000 -3.500000000 -10.500000000 35.500000000 -3.500000000 -9.500000000 35.500000000 -3.500000000 -8.500000000 35.500000000 -3.500000000 -7.500000000 35.500000000 -3.500000000 -6.500000000 35.500000000 -3.500000000 -5.500000000 35.500000000 -3.500000000 -4.500000000 35.500000000 -3.500000000 -3.500000000 35.500000000 -3.500000000 -2.500000000 35.500000000 -3.500000000 -1.500000000 35.500000000 -3.500000000 -0.500000000 35.500000000 -3.500000000 0.500000000 35.500000000 -3.500000000 1.500000000 35.500000000 -3.500000000 2.500000000 35.500000000 -3.500000000 3.500000000 35.500000000 -3.500000000 4.500000000 35.500000000 -3.500000000 5.500000000 35.500000000 -3.500000000 6.500000000 35.500000000 -3.500000000 7.500000000 35.500000000 -3.500000000 8.500000000 35.500000000 -3.500000000 9.500000000 35.500000000 -3.500000000 10.500000000 35.500000000 -3.500000000 11.500000000 35.500000000 -3.500000000 12.500000000 35.500000000 -3.500000000 13.500000000 35.500000000 -3.500000000 14.500000000 35.500000000 -3.500000000 15.500000000 35.500000000 -3.500000000 16.500000000 35.500000000 -3.500000000 17.500000000 35.500000000 -3.500000000 18.500000000 35.500000000 -3.500000000 19.500000000 35.500000000 -3.500000000 20.500000000 35.500000000 -3.500000000 21.500000000 35.500000000 -3.500000000 22.500000000 35.500000000 -3.500000000 23.500000000 35.500000000 -3.500000000 24.500000000 35.500000000 -3.500000000 25.499996185 35.499996185 -3.500000000 26.499954224 35.499954224 -3.500000000 27.499591827 35.499591827 -3.500000000 28.497470856 35.497474670 -3.500000000 29.488407135 35.488403320 -3.500000000 30.458978653 35.458980560 -3.500000000 31.384418488 35.384422302 -3.500000000 32.233222961 35.233222961 -3.500000000 32.981101990 34.981101990 -3.500000000 -33.981101990 -35.981101990 -2.500000000 -33.233226776 -36.233222961 -2.500000000 -32.384422302 -36.384418488 -2.500000000 -31.458978653 -36.458980560 -2.500000000 -30.488407135 -36.488403320 -2.500000000 -29.497472763 -36.497474670 -2.500000000 -28.499593735 -36.499591827 -2.500000000 -27.499954224 -36.499954224 -2.500000000 -26.499996185 -36.499996185 -2.500000000 -25.500000000 -36.500000000 -2.500000000 -24.500000000 -36.500000000 -2.500000000 -23.500000000 -36.500000000 -2.500000000 -22.500000000 -36.500000000 -2.500000000 -21.500000000 -36.500000000 -2.500000000 -20.500000000 -36.500000000 -2.500000000 -19.500000000 -36.500000000 -2.500000000 -18.500000000 -36.500000000 -2.500000000 -17.500000000 -36.500000000 -2.500000000 -16.500000000 -36.500000000 -2.500000000 -15.500000000 -36.500000000 -2.500000000 -14.500000000 -36.500000000 -2.500000000 -13.500000000 -36.500000000 -2.500000000 -12.500000000 -36.500000000 -2.500000000 -11.500000000 -36.500000000 -2.500000000 -10.500000000 -36.500000000 -2.500000000 -9.500000000 -36.500000000 -2.500000000 -8.500000000 -36.500000000 -2.500000000 -7.500000000 -36.500000000 -2.500000000 -6.500000000 -36.500000000 -2.500000000 -5.500000000 -36.500000000 -2.500000000 -4.500000000 -36.500000000 -2.500000000 -3.500000000 -36.500000000 -2.500000000 -2.500000000 -36.500000000 -2.500000000 -1.500000000 -36.500000000 -2.500000000 -0.500000000 -36.500000000 -2.500000000 0.500000000 -36.500000000 -2.500000000 1.500000000 -36.500000000 -2.500000000 2.500000000 -36.500000000 -2.500000000 3.500000000 -36.500000000 -2.500000000 4.500000000 -36.500000000 -2.500000000 5.500000000 -36.500000000 -2.500000000 6.500000000 -36.500000000 -2.500000000 7.500000000 -36.500000000 -2.500000000 8.500000000 -36.500000000 -2.500000000 9.500000000 -36.500000000 -2.500000000 10.500000000 -36.500000000 -2.500000000 11.500000000 -36.500000000 -2.500000000 12.500000000 -36.500000000 -2.500000000 13.500000000 -36.500000000 -2.500000000 14.500000000 -36.500000000 -2.500000000 15.500000000 -36.500000000 -2.500000000 16.500000000 -36.500000000 -2.500000000 17.500000000 -36.500000000 -2.500000000 18.500000000 -36.500000000 -2.500000000 19.500000000 -36.500000000 -2.500000000 20.500000000 -36.500000000 -2.500000000 21.500000000 -36.500000000 -2.500000000 22.500000000 -36.500000000 -2.500000000 23.500000000 -36.500000000 -2.500000000 24.500000000 -36.500000000 -2.500000000 25.499996185 -36.499996185 -2.500000000 26.499954224 -36.499954224 -2.500000000 27.499591827 -36.499591827 -2.500000000 28.497470856 -36.497467041 -2.500000000 29.488407135 -36.488403320 -2.500000000 30.458978653 -36.458980560 -2.500000000 31.384418488 -36.384422302 -2.500000000 32.233222961 -36.233222961 -2.500000000 32.981101990 -35.981101990 -2.500000000 -35.167964935 -35.167964935 -2.500000000 -34.622871399 -35.622871399 -2.500000000 33.622871399 -35.622871399 -2.500000000 34.167964935 -35.167964935 -2.500000000 -35.981101990 -33.981101990 -2.500000000 -35.622871399 -34.622871399 -2.500000000 34.622871399 -34.622871399 -2.500000000 34.981101990 -33.981101990 -2.500000000 -36.233222961 -33.233222961 -2.500000000 35.233222961 -33.233226776 -2.500000000 -36.384418488 -32.384422302 -2.500000000 35.384418488 -32.384422302 -2.500000000 -36.458976746 -31.458978653 -2.500000000 35.458980560 -31.458978653 -2.500000000 -36.488403320 -30.488407135 -2.500000000 35.488403320 -30.488407135 -2.500000000 -36.497467041 -29.497472763 -2.500000000 35.497474670 -29.497472763 -2.500000000 -36.499591827 -28.499593735 -2.500000000 35.499591827 -28.499593735 -2.500000000 -36.499954224 -27.499954224 -2.500000000 35.499954224 -27.499954224 -2.500000000 -36.499996185 -26.499996185 -2.500000000 35.499996185 -26.499996185 -2.500000000 -36.500000000 -25.500000000 -2.500000000 35.500000000 -25.500000000 -2.500000000 -36.500000000 -24.500000000 -2.500000000 35.500000000 -24.500000000 -2.500000000 -36.500000000 -23.500000000 -2.500000000 35.500000000 -23.500000000 -2.500000000 -36.500000000 -22.500000000 -2.500000000 35.500000000 -22.500000000 -2.500000000 -36.500000000 -21.500000000 -2.500000000 35.500000000 -21.500000000 -2.500000000 -36.500000000 -20.500000000 -2.500000000 35.500000000 -20.500000000 -2.500000000 -36.500000000 -19.500000000 -2.500000000 35.500000000 -19.500000000 -2.500000000 -36.500000000 -18.500000000 -2.500000000 35.500000000 -18.500000000 -2.500000000 -36.500000000 -17.500000000 -2.500000000 35.500000000 -17.500000000 -2.500000000 -36.500000000 -16.500000000 -2.500000000 35.500000000 -16.500000000 -2.500000000 -36.500000000 -15.500000000 -2.500000000 35.500000000 -15.500000000 -2.500000000 -36.500000000 -14.500000000 -2.500000000 35.500000000 -14.500000000 -2.500000000 -36.500000000 -13.500000000 -2.500000000 35.500000000 -13.500000000 -2.500000000 -36.500000000 -12.500000000 -2.500000000 35.500000000 -12.500000000 -2.500000000 -36.500000000 -11.500000000 -2.500000000 35.500000000 -11.500000000 -2.500000000 -36.500000000 -10.500000000 -2.500000000 35.500000000 -10.500000000 -2.500000000 -36.500000000 -9.500000000 -2.500000000 35.500000000 -9.500000000 -2.500000000 -36.500000000 -8.500000000 -2.500000000 35.500000000 -8.500000000 -2.500000000 -36.500000000 -7.500000000 -2.500000000 35.500000000 -7.500000000 -2.500000000 -36.500000000 -6.500000000 -2.500000000 35.500000000 -6.500000000 -2.500000000 -36.500000000 -5.500000000 -2.500000000 35.500000000 -5.500000000 -2.500000000 -36.500000000 -4.500000000 -2.500000000 35.500000000 -4.500000000 -2.500000000 -36.500000000 -3.500000000 -2.500000000 35.500000000 -3.500000000 -2.500000000 -36.500000000 -2.500000000 -2.500000000 35.500000000 -2.500000000 -2.500000000 -36.500000000 -1.500000000 -2.500000000 35.500000000 -1.500000000 -2.500000000 -36.500000000 -0.500000000 -2.500000000 35.500000000 -0.500000000 -2.500000000 -36.500000000 0.500000000 -2.500000000 35.500000000 0.500000000 -2.500000000 -36.500000000 1.500000000 -2.500000000 35.500000000 1.500000000 -2.500000000 -36.500000000 2.500000000 -2.500000000 35.500000000 2.500000000 -2.500000000 -36.500000000 3.500000000 -2.500000000 35.500000000 3.500000000 -2.500000000 -36.500000000 4.500000000 -2.500000000 35.500000000 4.500000000 -2.500000000 -36.500000000 5.500000000 -2.500000000 35.500000000 5.500000000 -2.500000000 -36.500000000 6.500000000 -2.500000000 35.500000000 6.500000000 -2.500000000 -36.500000000 7.500000000 -2.500000000 35.500000000 7.500000000 -2.500000000 -36.500000000 8.500000000 -2.500000000 35.500000000 8.500000000 -2.500000000 -36.500000000 9.500000000 -2.500000000 35.500000000 9.500000000 -2.500000000 -36.500000000 10.500000000 -2.500000000 35.500000000 10.500000000 -2.500000000 -36.500000000 11.500000000 -2.500000000 35.500000000 11.500000000 -2.500000000 -36.500000000 12.500000000 -2.500000000 35.500000000 12.500000000 -2.500000000 -36.500000000 13.500000000 -2.500000000 35.500000000 13.500000000 -2.500000000 -36.500000000 14.500000000 -2.500000000 35.500000000 14.500000000 -2.500000000 -36.500000000 15.500000000 -2.500000000 35.500000000 15.500000000 -2.500000000 -36.500000000 16.500000000 -2.500000000 35.500000000 16.500000000 -2.500000000 -36.500000000 17.500000000 -2.500000000 35.500000000 17.500000000 -2.500000000 -36.500000000 18.500000000 -2.500000000 35.500000000 18.500000000 -2.500000000 -36.500000000 19.500000000 -2.500000000 35.500000000 19.500000000 -2.500000000 -36.500000000 20.500000000 -2.500000000 35.500000000 20.500000000 -2.500000000 -36.500000000 21.500000000 -2.500000000 35.500000000 21.500000000 -2.500000000 -36.500000000 22.500000000 -2.500000000 35.500000000 22.500000000 -2.500000000 -36.500000000 23.500000000 -2.500000000 35.500000000 23.500000000 -2.500000000 -36.500000000 24.500000000 -2.500000000 35.500000000 24.500000000 -2.500000000 -36.499996185 25.499996185 -2.500000000 35.499996185 25.499996185 -2.500000000 -36.499954224 26.499954224 -2.500000000 35.499954224 26.499954224 -2.500000000 -36.499591827 27.499591827 -2.500000000 35.499591827 27.499591827 -2.500000000 -36.497474670 28.497470856 -2.500000000 35.497467041 28.497470856 -2.500000000 -36.488403320 29.488407135 -2.500000000 35.488403320 29.488407135 -2.500000000 -36.458980560 30.458978653 -2.500000000 35.458980560 30.458978653 -2.500000000 -36.384422302 31.384418488 -2.500000000 35.384422302 31.384418488 -2.500000000 -36.233222961 32.233222961 -2.500000000 35.233222961 32.233222961 -2.500000000 -35.981101990 32.981101990 -2.500000000 -35.622871399 33.622871399 -2.500000000 34.622871399 33.622871399 -2.500000000 34.981101990 32.981101990 -2.500000000 -35.167964935 34.167964935 -2.500000000 -34.622871399 34.622871399 -2.500000000 33.622871399 34.622871399 -2.500000000 34.167964935 34.167964935 -2.500000000 -33.981101990 34.981101990 -2.500000000 -33.233222961 35.233222961 -2.500000000 -32.384422302 35.384418488 -2.500000000 -31.458978653 35.458976746 -2.500000000 -30.488407135 35.488403320 -2.500000000 -29.497472763 35.497467041 -2.500000000 -28.499593735 35.499591827 -2.500000000 -27.499954224 35.499954224 -2.500000000 -26.499996185 35.499996185 -2.500000000 -25.500000000 35.500000000 -2.500000000 -24.500000000 35.500000000 -2.500000000 -23.500000000 35.500000000 -2.500000000 -22.500000000 35.500000000 -2.500000000 -21.500000000 35.500000000 -2.500000000 -20.500000000 35.500000000 -2.500000000 -19.500000000 35.500000000 -2.500000000 -18.500000000 35.500000000 -2.500000000 -17.500000000 35.500000000 -2.500000000 -16.500000000 35.500000000 -2.500000000 -15.500000000 35.500000000 -2.500000000 -14.500000000 35.500000000 -2.500000000 -13.500000000 35.500000000 -2.500000000 -12.500000000 35.500000000 -2.500000000 -11.500000000 35.500000000 -2.500000000 -10.500000000 35.500000000 -2.500000000 -9.500000000 35.500000000 -2.500000000 -8.500000000 35.500000000 -2.500000000 -7.500000000 35.500000000 -2.500000000 -6.500000000 35.500000000 -2.500000000 -5.500000000 35.500000000 -2.500000000 -4.500000000 35.500000000 -2.500000000 -3.500000000 35.500000000 -2.500000000 -2.500000000 35.500000000 -2.500000000 -1.500000000 35.500000000 -2.500000000 -0.500000000 35.500000000 -2.500000000 0.500000000 35.500000000 -2.500000000 1.500000000 35.500000000 -2.500000000 2.500000000 35.500000000 -2.500000000 3.500000000 35.500000000 -2.500000000 4.500000000 35.500000000 -2.500000000 5.500000000 35.500000000 -2.500000000 6.500000000 35.500000000 -2.500000000 7.500000000 35.500000000 -2.500000000 8.500000000 35.500000000 -2.500000000 9.500000000 35.500000000 -2.500000000 10.500000000 35.500000000 -2.500000000 11.500000000 35.500000000 -2.500000000 12.500000000 35.500000000 -2.500000000 13.500000000 35.500000000 -2.500000000 14.500000000 35.500000000 -2.500000000 15.500000000 35.500000000 -2.500000000 16.500000000 35.500000000 -2.500000000 17.500000000 35.500000000 -2.500000000 18.500000000 35.500000000 -2.500000000 19.500000000 35.500000000 -2.500000000 20.500000000 35.500000000 -2.500000000 21.500000000 35.500000000 -2.500000000 22.500000000 35.500000000 -2.500000000 23.500000000 35.500000000 -2.500000000 24.500000000 35.500000000 -2.500000000 25.499996185 35.499996185 -2.500000000 26.499954224 35.499954224 -2.500000000 27.499591827 35.499591827 -2.500000000 28.497470856 35.497474670 -2.500000000 29.488407135 35.488403320 -2.500000000 30.458978653 35.458980560 -2.500000000 31.384418488 35.384422302 -2.500000000 32.233222961 35.233222961 -2.500000000 32.981101990 34.981101990 -2.500000000 -33.981101990 -35.981101990 -1.500000000 -33.233226776 -36.233222961 -1.500000000 -32.384422302 -36.384418488 -1.500000000 -31.458978653 -36.458980560 -1.500000000 -30.488407135 -36.488403320 -1.500000000 -29.497472763 -36.497474670 -1.500000000 -28.499593735 -36.499591827 -1.500000000 -27.499954224 -36.499954224 -1.500000000 -26.499996185 -36.499996185 -1.500000000 -25.500000000 -36.500000000 -1.500000000 -24.500000000 -36.500000000 -1.500000000 -23.500000000 -36.500000000 -1.500000000 -22.500000000 -36.500000000 -1.500000000 -21.500000000 -36.500000000 -1.500000000 -20.500000000 -36.500000000 -1.500000000 -19.500000000 -36.500000000 -1.500000000 -18.500000000 -36.500000000 -1.500000000 -17.500000000 -36.500000000 -1.500000000 -16.500000000 -36.500000000 -1.500000000 -15.500000000 -36.500000000 -1.500000000 -14.500000000 -36.500000000 -1.500000000 -13.500000000 -36.500000000 -1.500000000 -12.500000000 -36.500000000 -1.500000000 -11.500000000 -36.500000000 -1.500000000 -10.500000000 -36.500000000 -1.500000000 -9.500000000 -36.500000000 -1.500000000 -8.500000000 -36.500000000 -1.500000000 -7.500000000 -36.500000000 -1.500000000 -6.500000000 -36.500000000 -1.500000000 -5.500000000 -36.500000000 -1.500000000 -4.500000000 -36.500000000 -1.500000000 -3.500000000 -36.500000000 -1.500000000 -2.500000000 -36.500000000 -1.500000000 -1.500000000 -36.500000000 -1.500000000 -0.500000000 -36.500000000 -1.500000000 0.500000000 -36.500000000 -1.500000000 1.500000000 -36.500000000 -1.500000000 2.500000000 -36.500000000 -1.500000000 3.500000000 -36.500000000 -1.500000000 4.500000000 -36.500000000 -1.500000000 5.500000000 -36.500000000 -1.500000000 6.500000000 -36.500000000 -1.500000000 7.500000000 -36.500000000 -1.500000000 8.500000000 -36.500000000 -1.500000000 9.500000000 -36.500000000 -1.500000000 10.500000000 -36.500000000 -1.500000000 11.500000000 -36.500000000 -1.500000000 12.500000000 -36.500000000 -1.500000000 13.500000000 -36.500000000 -1.500000000 14.500000000 -36.500000000 -1.500000000 15.500000000 -36.500000000 -1.500000000 16.500000000 -36.500000000 -1.500000000 17.500000000 -36.500000000 -1.500000000 18.500000000 -36.500000000 -1.500000000 19.500000000 -36.500000000 -1.500000000 20.500000000 -36.500000000 -1.500000000 21.500000000 -36.500000000 -1.500000000 22.500000000 -36.500000000 -1.500000000 23.500000000 -36.500000000 -1.500000000 24.500000000 -36.500000000 -1.500000000 25.499996185 -36.499996185 -1.500000000 26.499954224 -36.499954224 -1.500000000 27.499591827 -36.499591827 -1.500000000 28.497470856 -36.497467041 -1.500000000 29.488407135 -36.488403320 -1.500000000 30.458978653 -36.458980560 -1.500000000 31.384418488 -36.384422302 -1.500000000 32.233222961 -36.233222961 -1.500000000 32.981101990 -35.981101990 -1.500000000 -35.167964935 -35.167964935 -1.500000000 -34.622871399 -35.622871399 -1.500000000 33.622871399 -35.622871399 -1.500000000 34.167964935 -35.167964935 -1.500000000 -35.981101990 -33.981101990 -1.500000000 -35.622871399 -34.622871399 -1.500000000 34.622871399 -34.622871399 -1.500000000 34.981101990 -33.981101990 -1.500000000 -36.233222961 -33.233222961 -1.500000000 35.233222961 -33.233226776 -1.500000000 -36.384418488 -32.384422302 -1.500000000 35.384418488 -32.384422302 -1.500000000 -36.458976746 -31.458978653 -1.500000000 35.458980560 -31.458978653 -1.500000000 -36.488403320 -30.488407135 -1.500000000 35.488403320 -30.488407135 -1.500000000 -36.497467041 -29.497472763 -1.500000000 35.497474670 -29.497472763 -1.500000000 -36.499591827 -28.499593735 -1.500000000 35.499591827 -28.499593735 -1.500000000 -36.499954224 -27.499954224 -1.500000000 35.499954224 -27.499954224 -1.500000000 -36.499996185 -26.499996185 -1.500000000 35.499996185 -26.499996185 -1.500000000 -36.500000000 -25.500000000 -1.500000000 35.500000000 -25.500000000 -1.500000000 -36.500000000 -24.500000000 -1.500000000 35.500000000 -24.500000000 -1.500000000 -36.500000000 -23.500000000 -1.500000000 35.500000000 -23.500000000 -1.500000000 -36.500000000 -22.500000000 -1.500000000 35.500000000 -22.500000000 -1.500000000 -36.500000000 -21.500000000 -1.500000000 35.500000000 -21.500000000 -1.500000000 -36.500000000 -20.500000000 -1.500000000 35.500000000 -20.500000000 -1.500000000 -36.500000000 -19.500000000 -1.500000000 35.500000000 -19.500000000 -1.500000000 -36.500000000 -18.500000000 -1.500000000 35.500000000 -18.500000000 -1.500000000 -36.500000000 -17.500000000 -1.500000000 35.500000000 -17.500000000 -1.500000000 -36.500000000 -16.500000000 -1.500000000 35.500000000 -16.500000000 -1.500000000 -36.500000000 -15.500000000 -1.500000000 35.500000000 -15.500000000 -1.500000000 -36.500000000 -14.500000000 -1.500000000 35.500000000 -14.500000000 -1.500000000 -36.500000000 -13.500000000 -1.500000000 35.500000000 -13.500000000 -1.500000000 -36.500000000 -12.500000000 -1.500000000 35.500000000 -12.500000000 -1.500000000 -36.500000000 -11.500000000 -1.500000000 35.500000000 -11.500000000 -1.500000000 -36.500000000 -10.500000000 -1.500000000 35.500000000 -10.500000000 -1.500000000 -36.500000000 -9.500000000 -1.500000000 35.500000000 -9.500000000 -1.500000000 -36.500000000 -8.500000000 -1.500000000 35.500000000 -8.500000000 -1.500000000 -36.500000000 -7.500000000 -1.500000000 35.500000000 -7.500000000 -1.500000000 -36.500000000 -6.500000000 -1.500000000 35.500000000 -6.500000000 -1.500000000 -36.500000000 -5.500000000 -1.500000000 35.500000000 -5.500000000 -1.500000000 -36.500000000 -4.500000000 -1.500000000 35.500000000 -4.500000000 -1.500000000 -36.500000000 -3.500000000 -1.500000000 35.500000000 -3.500000000 -1.500000000 -36.500000000 -2.500000000 -1.500000000 35.500000000 -2.500000000 -1.500000000 -36.500000000 -1.500000000 -1.500000000 35.500000000 -1.500000000 -1.500000000 -36.500000000 -0.500000000 -1.500000000 35.500000000 -0.500000000 -1.500000000 -36.500000000 0.500000000 -1.500000000 35.500000000 0.500000000 -1.500000000 -36.500000000 1.500000000 -1.500000000 35.500000000 1.500000000 -1.500000000 -36.500000000 2.500000000 -1.500000000 35.500000000 2.500000000 -1.500000000 -36.500000000 3.500000000 -1.500000000 35.500000000 3.500000000 -1.500000000 -36.500000000 4.500000000 -1.500000000 35.500000000 4.500000000 -1.500000000 -36.500000000 5.500000000 -1.500000000 35.500000000 5.500000000 -1.500000000 -36.500000000 6.500000000 -1.500000000 35.500000000 6.500000000 -1.500000000 -36.500000000 7.500000000 -1.500000000 35.500000000 7.500000000 -1.500000000 -36.500000000 8.500000000 -1.500000000 35.500000000 8.500000000 -1.500000000 -36.500000000 9.500000000 -1.500000000 35.500000000 9.500000000 -1.500000000 -36.500000000 10.500000000 -1.500000000 35.500000000 10.500000000 -1.500000000 -36.500000000 11.500000000 -1.500000000 35.500000000 11.500000000 -1.500000000 -36.500000000 12.500000000 -1.500000000 35.500000000 12.500000000 -1.500000000 -36.500000000 13.500000000 -1.500000000 35.500000000 13.500000000 -1.500000000 -36.500000000 14.500000000 -1.500000000 35.500000000 14.500000000 -1.500000000 -36.500000000 15.500000000 -1.500000000 35.500000000 15.500000000 -1.500000000 -36.500000000 16.500000000 -1.500000000 35.500000000 16.500000000 -1.500000000 -36.500000000 17.500000000 -1.500000000 35.500000000 17.500000000 -1.500000000 -36.500000000 18.500000000 -1.500000000 35.500000000 18.500000000 -1.500000000 -36.500000000 19.500000000 -1.500000000 35.500000000 19.500000000 -1.500000000 -36.500000000 20.500000000 -1.500000000 35.500000000 20.500000000 -1.500000000 -36.500000000 21.500000000 -1.500000000 35.500000000 21.500000000 -1.500000000 -36.500000000 22.500000000 -1.500000000 35.500000000 22.500000000 -1.500000000 -36.500000000 23.500000000 -1.500000000 35.500000000 23.500000000 -1.500000000 -36.500000000 24.500000000 -1.500000000 35.500000000 24.500000000 -1.500000000 -36.499996185 25.499996185 -1.500000000 35.499996185 25.499996185 -1.500000000 -36.499954224 26.499954224 -1.500000000 35.499954224 26.499954224 -1.500000000 -36.499591827 27.499591827 -1.500000000 35.499591827 27.499591827 -1.500000000 -36.497474670 28.497470856 -1.500000000 35.497467041 28.497470856 -1.500000000 -36.488403320 29.488407135 -1.500000000 35.488403320 29.488407135 -1.500000000 -36.458980560 30.458978653 -1.500000000 35.458980560 30.458978653 -1.500000000 -36.384422302 31.384418488 -1.500000000 35.384422302 31.384418488 -1.500000000 -36.233222961 32.233222961 -1.500000000 35.233222961 32.233222961 -1.500000000 -35.981101990 32.981101990 -1.500000000 -35.622871399 33.622871399 -1.500000000 34.622871399 33.622871399 -1.500000000 34.981101990 32.981101990 -1.500000000 -35.167964935 34.167964935 -1.500000000 -34.622871399 34.622871399 -1.500000000 33.622871399 34.622871399 -1.500000000 34.167964935 34.167964935 -1.500000000 -33.981101990 34.981101990 -1.500000000 -33.233222961 35.233222961 -1.500000000 -32.384422302 35.384418488 -1.500000000 -31.458978653 35.458976746 -1.500000000 -30.488407135 35.488403320 -1.500000000 -29.497472763 35.497467041 -1.500000000 -28.499593735 35.499591827 -1.500000000 -27.499954224 35.499954224 -1.500000000 -26.499996185 35.499996185 -1.500000000 -25.500000000 35.500000000 -1.500000000 -24.500000000 35.500000000 -1.500000000 -23.500000000 35.500000000 -1.500000000 -22.500000000 35.500000000 -1.500000000 -21.500000000 35.500000000 -1.500000000 -20.500000000 35.500000000 -1.500000000 -19.500000000 35.500000000 -1.500000000 -18.500000000 35.500000000 -1.500000000 -17.500000000 35.500000000 -1.500000000 -16.500000000 35.500000000 -1.500000000 -15.500000000 35.500000000 -1.500000000 -14.500000000 35.500000000 -1.500000000 -13.500000000 35.500000000 -1.500000000 -12.500000000 35.500000000 -1.500000000 -11.500000000 35.500000000 -1.500000000 -10.500000000 35.500000000 -1.500000000 -9.500000000 35.500000000 -1.500000000 -8.500000000 35.500000000 -1.500000000 -7.500000000 35.500000000 -1.500000000 -6.500000000 35.500000000 -1.500000000 -5.500000000 35.500000000 -1.500000000 -4.500000000 35.500000000 -1.500000000 -3.500000000 35.500000000 -1.500000000 -2.500000000 35.500000000 -1.500000000 -1.500000000 35.500000000 -1.500000000 -0.500000000 35.500000000 -1.500000000 0.500000000 35.500000000 -1.500000000 1.500000000 35.500000000 -1.500000000 2.500000000 35.500000000 -1.500000000 3.500000000 35.500000000 -1.500000000 4.500000000 35.500000000 -1.500000000 5.500000000 35.500000000 -1.500000000 6.500000000 35.500000000 -1.500000000 7.500000000 35.500000000 -1.500000000 8.500000000 35.500000000 -1.500000000 9.500000000 35.500000000 -1.500000000 10.500000000 35.500000000 -1.500000000 11.500000000 35.500000000 -1.500000000 12.500000000 35.500000000 -1.500000000 13.500000000 35.500000000 -1.500000000 14.500000000 35.500000000 -1.500000000 15.500000000 35.500000000 -1.500000000 16.500000000 35.500000000 -1.500000000 17.500000000 35.500000000 -1.500000000 18.500000000 35.500000000 -1.500000000 19.500000000 35.500000000 -1.500000000 20.500000000 35.500000000 -1.500000000 21.500000000 35.500000000 -1.500000000 22.500000000 35.500000000 -1.500000000 23.500000000 35.500000000 -1.500000000 24.500000000 35.500000000 -1.500000000 25.499996185 35.499996185 -1.500000000 26.499954224 35.499954224 -1.500000000 27.499591827 35.499591827 -1.500000000 28.497470856 35.497474670 -1.500000000 29.488407135 35.488403320 -1.500000000 30.458978653 35.458980560 -1.500000000 31.384418488 35.384422302 -1.500000000 32.233222961 35.233222961 -1.500000000 32.981101990 34.981101990 -1.500000000 -33.981101990 -35.981101990 -0.500000000 -33.233226776 -36.233222961 -0.500000000 -32.384422302 -36.384418488 -0.500000000 -31.458978653 -36.458980560 -0.500000000 -30.488407135 -36.488403320 -0.500000000 -29.497472763 -36.497474670 -0.500000000 -28.499593735 -36.499591827 -0.500000000 -27.499954224 -36.499954224 -0.500000000 -26.499996185 -36.499996185 -0.500000000 -25.500000000 -36.500000000 -0.500000000 -24.500000000 -36.500000000 -0.500000000 -23.500000000 -36.500000000 -0.500000000 -22.500000000 -36.500000000 -0.500000000 -21.500000000 -36.500000000 -0.500000000 -20.500000000 -36.500000000 -0.500000000 -19.500000000 -36.500000000 -0.500000000 -18.500000000 -36.500000000 -0.500000000 -17.500000000 -36.500000000 -0.500000000 -16.500000000 -36.500000000 -0.500000000 -15.500000000 -36.500000000 -0.500000000 -14.500000000 -36.500000000 -0.500000000 -13.500000000 -36.500000000 -0.500000000 -12.500000000 -36.500000000 -0.500000000 -11.500000000 -36.500000000 -0.500000000 -10.500000000 -36.500000000 -0.500000000 -9.500000000 -36.500000000 -0.500000000 -8.500000000 -36.500000000 -0.500000000 -7.500000000 -36.500000000 -0.500000000 -6.500000000 -36.500000000 -0.500000000 -5.500000000 -36.500000000 -0.500000000 -4.500000000 -36.500000000 -0.500000000 -3.500000000 -36.500000000 -0.500000000 -2.500000000 -36.500000000 -0.500000000 -1.500000000 -36.500000000 -0.500000000 -0.500000000 -36.500000000 -0.500000000 0.500000000 -36.500000000 -0.500000000 1.500000000 -36.500000000 -0.500000000 2.500000000 -36.500000000 -0.500000000 3.500000000 -36.500000000 -0.500000000 4.500000000 -36.500000000 -0.500000000 5.500000000 -36.500000000 -0.500000000 6.500000000 -36.500000000 -0.500000000 7.500000000 -36.500000000 -0.500000000 8.500000000 -36.500000000 -0.500000000 9.500000000 -36.500000000 -0.500000000 10.500000000 -36.500000000 -0.500000000 11.500000000 -36.500000000 -0.500000000 12.500000000 -36.500000000 -0.500000000 13.500000000 -36.500000000 -0.500000000 14.500000000 -36.500000000 -0.500000000 15.500000000 -36.500000000 -0.500000000 16.500000000 -36.500000000 -0.500000000 17.500000000 -36.500000000 -0.500000000 18.500000000 -36.500000000 -0.500000000 19.500000000 -36.500000000 -0.500000000 20.500000000 -36.500000000 -0.500000000 21.500000000 -36.500000000 -0.500000000 22.500000000 -36.500000000 -0.500000000 23.500000000 -36.500000000 -0.500000000 24.500000000 -36.500000000 -0.500000000 25.499996185 -36.499996185 -0.500000000 26.499954224 -36.499954224 -0.500000000 27.499591827 -36.499591827 -0.500000000 28.497470856 -36.497467041 -0.500000000 29.488407135 -36.488403320 -0.500000000 30.458978653 -36.458980560 -0.500000000 31.384418488 -36.384422302 -0.500000000 32.233222961 -36.233222961 -0.500000000 32.981101990 -35.981101990 -0.500000000 -35.167964935 -35.167964935 -0.500000000 -34.622871399 -35.622871399 -0.500000000 33.622871399 -35.622871399 -0.500000000 34.167964935 -35.167964935 -0.500000000 -35.981101990 -33.981101990 -0.500000000 -35.622871399 -34.622871399 -0.500000000 34.622871399 -34.622871399 -0.500000000 34.981101990 -33.981101990 -0.500000000 -36.233222961 -33.233222961 -0.500000000 35.233222961 -33.233226776 -0.500000000 -36.384418488 -32.384422302 -0.500000000 35.384418488 -32.384422302 -0.500000000 -36.458976746 -31.458978653 -0.500000000 35.458980560 -31.458978653 -0.500000000 -36.488403320 -30.488407135 -0.500000000 35.488403320 -30.488407135 -0.500000000 -36.497467041 -29.497472763 -0.500000000 35.497474670 -29.497472763 -0.500000000 -36.499591827 -28.499593735 -0.500000000 35.499591827 -28.499593735 -0.500000000 -36.499954224 -27.499954224 -0.500000000 35.499954224 -27.499954224 -0.500000000 -36.499996185 -26.499996185 -0.500000000 35.499996185 -26.499996185 -0.500000000 -36.500000000 -25.500000000 -0.500000000 35.500000000 -25.500000000 -0.500000000 -36.500000000 -24.500000000 -0.500000000 35.500000000 -24.500000000 -0.500000000 -36.500000000 -23.500000000 -0.500000000 35.500000000 -23.500000000 -0.500000000 -36.500000000 -22.500000000 -0.500000000 35.500000000 -22.500000000 -0.500000000 -36.500000000 -21.500000000 -0.500000000 35.500000000 -21.500000000 -0.500000000 -36.500000000 -20.500000000 -0.500000000 35.500000000 -20.500000000 -0.500000000 -36.500000000 -19.500000000 -0.500000000 35.500000000 -19.500000000 -0.500000000 -36.500000000 -18.500000000 -0.500000000 35.500000000 -18.500000000 -0.500000000 -36.500000000 -17.500000000 -0.500000000 35.500000000 -17.500000000 -0.500000000 -36.500000000 -16.500000000 -0.500000000 35.500000000 -16.500000000 -0.500000000 -36.500000000 -15.500000000 -0.500000000 35.500000000 -15.500000000 -0.500000000 -36.500000000 -14.500000000 -0.500000000 35.500000000 -14.500000000 -0.500000000 -36.500000000 -13.500000000 -0.500000000 35.500000000 -13.500000000 -0.500000000 -36.500000000 -12.500000000 -0.500000000 35.500000000 -12.500000000 -0.500000000 -36.500000000 -11.500000000 -0.500000000 35.500000000 -11.500000000 -0.500000000 -36.500000000 -10.500000000 -0.500000000 35.500000000 -10.500000000 -0.500000000 -36.500000000 -9.500000000 -0.500000000 35.500000000 -9.500000000 -0.500000000 -36.500000000 -8.500000000 -0.500000000 35.500000000 -8.500000000 -0.500000000 -36.500000000 -7.500000000 -0.500000000 35.500000000 -7.500000000 -0.500000000 -36.500000000 -6.500000000 -0.500000000 35.500000000 -6.500000000 -0.500000000 -36.500000000 -5.500000000 -0.500000000 35.500000000 -5.500000000 -0.500000000 -36.500000000 -4.500000000 -0.500000000 35.500000000 -4.500000000 -0.500000000 -36.500000000 -3.500000000 -0.500000000 35.500000000 -3.500000000 -0.500000000 -36.500000000 -2.500000000 -0.500000000 35.500000000 -2.500000000 -0.500000000 -36.500000000 -1.500000000 -0.500000000 35.500000000 -1.500000000 -0.500000000 -36.500000000 -0.500000000 -0.500000000 35.500000000 -0.500000000 -0.500000000 -36.500000000 0.500000000 -0.500000000 35.500000000 0.500000000 -0.500000000 -36.500000000 1.500000000 -0.500000000 35.500000000 1.500000000 -0.500000000 -36.500000000 2.500000000 -0.500000000 35.500000000 2.500000000 -0.500000000 -36.500000000 3.500000000 -0.500000000 35.500000000 3.500000000 -0.500000000 -36.500000000 4.500000000 -0.500000000 35.500000000 4.500000000 -0.500000000 -36.500000000 5.500000000 -0.500000000 35.500000000 5.500000000 -0.500000000 -36.500000000 6.500000000 -0.500000000 35.500000000 6.500000000 -0.500000000 -36.500000000 7.500000000 -0.500000000 35.500000000 7.500000000 -0.500000000 -36.500000000 8.500000000 -0.500000000 35.500000000 8.500000000 -0.500000000 -36.500000000 9.500000000 -0.500000000 35.500000000 9.500000000 -0.500000000 -36.500000000 10.500000000 -0.500000000 35.500000000 10.500000000 -0.500000000 -36.500000000 11.500000000 -0.500000000 35.500000000 11.500000000 -0.500000000 -36.500000000 12.500000000 -0.500000000 35.500000000 12.500000000 -0.500000000 -36.500000000 13.500000000 -0.500000000 35.500000000 13.500000000 -0.500000000 -36.500000000 14.500000000 -0.500000000 35.500000000 14.500000000 -0.500000000 -36.500000000 15.500000000 -0.500000000 35.500000000 15.500000000 -0.500000000 -36.500000000 16.500000000 -0.500000000 35.500000000 16.500000000 -0.500000000 -36.500000000 17.500000000 -0.500000000 35.500000000 17.500000000 -0.500000000 -36.500000000 18.500000000 -0.500000000 35.500000000 18.500000000 -0.500000000 -36.500000000 19.500000000 -0.500000000 35.500000000 19.500000000 -0.500000000 -36.500000000 20.500000000 -0.500000000 35.500000000 20.500000000 -0.500000000 -36.500000000 21.500000000 -0.500000000 35.500000000 21.500000000 -0.500000000 -36.500000000 22.500000000 -0.500000000 35.500000000 22.500000000 -0.500000000 -36.500000000 23.500000000 -0.500000000 35.500000000 23.500000000 -0.500000000 -36.500000000 24.500000000 -0.500000000 35.500000000 24.500000000 -0.500000000 -36.499996185 25.499996185 -0.500000000 35.499996185 25.499996185 -0.500000000 -36.499954224 26.499954224 -0.500000000 35.499954224 26.499954224 -0.500000000 -36.499591827 27.499591827 -0.500000000 35.499591827 27.499591827 -0.500000000 -36.497474670 28.497470856 -0.500000000 35.497467041 28.497470856 -0.500000000 -36.488403320 29.488407135 -0.500000000 35.488403320 29.488407135 -0.500000000 -36.458980560 30.458978653 -0.500000000 35.458980560 30.458978653 -0.500000000 -36.384422302 31.384418488 -0.500000000 35.384422302 31.384418488 -0.500000000 -36.233222961 32.233222961 -0.500000000 35.233222961 32.233222961 -0.500000000 -35.981101990 32.981101990 -0.500000000 -35.622871399 33.622871399 -0.500000000 34.622871399 33.622871399 -0.500000000 34.981101990 32.981101990 -0.500000000 -35.167964935 34.167964935 -0.500000000 -34.622871399 34.622871399 -0.500000000 33.622871399 34.622871399 -0.500000000 34.167964935 34.167964935 -0.500000000 -33.981101990 34.981101990 -0.500000000 -33.233222961 35.233222961 -0.500000000 -32.384422302 35.384418488 -0.500000000 -31.458978653 35.458976746 -0.500000000 -30.488407135 35.488403320 -0.500000000 -29.497472763 35.497467041 -0.500000000 -28.499593735 35.499591827 -0.500000000 -27.499954224 35.499954224 -0.500000000 -26.499996185 35.499996185 -0.500000000 -25.500000000 35.500000000 -0.500000000 -24.500000000 35.500000000 -0.500000000 -23.500000000 35.500000000 -0.500000000 -22.500000000 35.500000000 -0.500000000 -21.500000000 35.500000000 -0.500000000 -20.500000000 35.500000000 -0.500000000 -19.500000000 35.500000000 -0.500000000 -18.500000000 35.500000000 -0.500000000 -17.500000000 35.500000000 -0.500000000 -16.500000000 35.500000000 -0.500000000 -15.500000000 35.500000000 -0.500000000 -14.500000000 35.500000000 -0.500000000 -13.500000000 35.500000000 -0.500000000 -12.500000000 35.500000000 -0.500000000 -11.500000000 35.500000000 -0.500000000 -10.500000000 35.500000000 -0.500000000 -9.500000000 35.500000000 -0.500000000 -8.500000000 35.500000000 -0.500000000 -7.500000000 35.500000000 -0.500000000 -6.500000000 35.500000000 -0.500000000 -5.500000000 35.500000000 -0.500000000 -4.500000000 35.500000000 -0.500000000 -3.500000000 35.500000000 -0.500000000 -2.500000000 35.500000000 -0.500000000 -1.500000000 35.500000000 -0.500000000 -0.500000000 35.500000000 -0.500000000 0.500000000 35.500000000 -0.500000000 1.500000000 35.500000000 -0.500000000 2.500000000 35.500000000 -0.500000000 3.500000000 35.500000000 -0.500000000 4.500000000 35.500000000 -0.500000000 5.500000000 35.500000000 -0.500000000 6.500000000 35.500000000 -0.500000000 7.500000000 35.500000000 -0.500000000 8.500000000 35.500000000 -0.500000000 9.500000000 35.500000000 -0.500000000 10.500000000 35.500000000 -0.500000000 11.500000000 35.500000000 -0.500000000 12.500000000 35.500000000 -0.500000000 13.500000000 35.500000000 -0.500000000 14.500000000 35.500000000 -0.500000000 15.500000000 35.500000000 -0.500000000 16.500000000 35.500000000 -0.500000000 17.500000000 35.500000000 -0.500000000 18.500000000 35.500000000 -0.500000000 19.500000000 35.500000000 -0.500000000 20.500000000 35.500000000 -0.500000000 21.500000000 35.500000000 -0.500000000 22.500000000 35.500000000 -0.500000000 23.500000000 35.500000000 -0.500000000 24.500000000 35.500000000 -0.500000000 25.499996185 35.499996185 -0.500000000 26.499954224 35.499954224 -0.500000000 27.499591827 35.499591827 -0.500000000 28.497470856 35.497474670 -0.500000000 29.488407135 35.488403320 -0.500000000 30.458978653 35.458980560 -0.500000000 31.384418488 35.384422302 -0.500000000 32.233222961 35.233222961 -0.500000000 32.981101990 34.981101990 -0.500000000 -33.981101990 -35.981101990 0.500000000 -33.233226776 -36.233222961 0.500000000 -32.384422302 -36.384418488 0.500000000 -31.458978653 -36.458980560 0.500000000 -30.488407135 -36.488403320 0.500000000 -29.497472763 -36.497474670 0.500000000 -28.499593735 -36.499591827 0.500000000 -27.499954224 -36.499954224 0.500000000 -26.499996185 -36.499996185 0.500000000 -25.500000000 -36.500000000 0.500000000 -24.500000000 -36.500000000 0.500000000 -23.500000000 -36.500000000 0.500000000 -22.500000000 -36.500000000 0.500000000 -21.500000000 -36.500000000 0.500000000 -20.500000000 -36.500000000 0.500000000 -19.500000000 -36.500000000 0.500000000 -18.500000000 -36.500000000 0.500000000 -17.500000000 -36.500000000 0.500000000 -16.500000000 -36.500000000 0.500000000 -15.500000000 -36.500000000 0.500000000 -14.500000000 -36.500000000 0.500000000 -13.500000000 -36.500000000 0.500000000 -12.500000000 -36.500000000 0.500000000 -11.500000000 -36.500000000 0.500000000 -10.500000000 -36.500000000 0.500000000 -9.500000000 -36.500000000 0.500000000 -8.500000000 -36.500000000 0.500000000 -7.500000000 -36.500000000 0.500000000 -6.500000000 -36.500000000 0.500000000 -5.500000000 -36.500000000 0.500000000 -4.500000000 -36.500000000 0.500000000 -3.500000000 -36.500000000 0.500000000 -2.500000000 -36.500000000 0.500000000 -1.500000000 -36.500000000 0.500000000 -0.500000000 -36.500000000 0.500000000 0.500000000 -36.500000000 0.500000000 1.500000000 -36.500000000 0.500000000 2.500000000 -36.500000000 0.500000000 3.500000000 -36.500000000 0.500000000 4.500000000 -36.500000000 0.500000000 5.500000000 -36.500000000 0.500000000 6.500000000 -36.500000000 0.500000000 7.500000000 -36.500000000 0.500000000 8.500000000 -36.500000000 0.500000000 9.500000000 -36.500000000 0.500000000 10.500000000 -36.500000000 0.500000000 11.500000000 -36.500000000 0.500000000 12.500000000 -36.500000000 0.500000000 13.500000000 -36.500000000 0.500000000 14.500000000 -36.500000000 0.500000000 15.500000000 -36.500000000 0.500000000 16.500000000 -36.500000000 0.500000000 17.500000000 -36.500000000 0.500000000 18.500000000 -36.500000000 0.500000000 19.500000000 -36.500000000 0.500000000 20.500000000 -36.500000000 0.500000000 21.500000000 -36.500000000 0.500000000 22.500000000 -36.500000000 0.500000000 23.500000000 -36.500000000 0.500000000 24.500000000 -36.500000000 0.500000000 25.499996185 -36.499996185 0.500000000 26.499954224 -36.499954224 0.500000000 27.499591827 -36.499591827 0.500000000 28.497470856 -36.497467041 0.500000000 29.488407135 -36.488403320 0.500000000 30.458978653 -36.458980560 0.500000000 31.384418488 -36.384422302 0.500000000 32.233222961 -36.233222961 0.500000000 32.981101990 -35.981101990 0.500000000 -35.167964935 -35.167964935 0.500000000 -34.622871399 -35.622871399 0.500000000 33.622871399 -35.622871399 0.500000000 34.167964935 -35.167964935 0.500000000 -35.981101990 -33.981101990 0.500000000 -35.622871399 -34.622871399 0.500000000 34.622871399 -34.622871399 0.500000000 34.981101990 -33.981101990 0.500000000 -36.233222961 -33.233222961 0.500000000 35.233222961 -33.233226776 0.500000000 -36.384418488 -32.384422302 0.500000000 35.384418488 -32.384422302 0.500000000 -36.458976746 -31.458978653 0.500000000 35.458980560 -31.458978653 0.500000000 -36.488403320 -30.488407135 0.500000000 35.488403320 -30.488407135 0.500000000 -36.497467041 -29.497472763 0.500000000 35.497474670 -29.497472763 0.500000000 -36.499591827 -28.499593735 0.500000000 35.499591827 -28.499593735 0.500000000 -36.499954224 -27.499954224 0.500000000 35.499954224 -27.499954224 0.500000000 -36.499996185 -26.499996185 0.500000000 35.499996185 -26.499996185 0.500000000 -36.500000000 -25.500000000 0.500000000 35.500000000 -25.500000000 0.500000000 -36.500000000 -24.500000000 0.500000000 35.500000000 -24.500000000 0.500000000 -36.500000000 -23.500000000 0.500000000 35.500000000 -23.500000000 0.500000000 -36.500000000 -22.500000000 0.500000000 35.500000000 -22.500000000 0.500000000 -36.500000000 -21.500000000 0.500000000 35.500000000 -21.500000000 0.500000000 -36.500000000 -20.500000000 0.500000000 35.500000000 -20.500000000 0.500000000 -36.500000000 -19.500000000 0.500000000 35.500000000 -19.500000000 0.500000000 -36.500000000 -18.500000000 0.500000000 35.500000000 -18.500000000 0.500000000 -36.500000000 -17.500000000 0.500000000 35.500000000 -17.500000000 0.500000000 -36.500000000 -16.500000000 0.500000000 35.500000000 -16.500000000 0.500000000 -36.500000000 -15.500000000 0.500000000 35.500000000 -15.500000000 0.500000000 -36.500000000 -14.500000000 0.500000000 35.500000000 -14.500000000 0.500000000 -36.500000000 -13.500000000 0.500000000 35.500000000 -13.500000000 0.500000000 -36.500000000 -12.500000000 0.500000000 35.500000000 -12.500000000 0.500000000 -36.500000000 -11.500000000 0.500000000 35.500000000 -11.500000000 0.500000000 -36.500000000 -10.500000000 0.500000000 35.500000000 -10.500000000 0.500000000 -36.500000000 -9.500000000 0.500000000 35.500000000 -9.500000000 0.500000000 -36.500000000 -8.500000000 0.500000000 35.500000000 -8.500000000 0.500000000 -36.500000000 -7.500000000 0.500000000 35.500000000 -7.500000000 0.500000000 -36.500000000 -6.500000000 0.500000000 35.500000000 -6.500000000 0.500000000 -36.500000000 -5.500000000 0.500000000 35.500000000 -5.500000000 0.500000000 -36.500000000 -4.500000000 0.500000000 35.500000000 -4.500000000 0.500000000 -36.500000000 -3.500000000 0.500000000 35.500000000 -3.500000000 0.500000000 -36.500000000 -2.500000000 0.500000000 35.500000000 -2.500000000 0.500000000 -36.500000000 -1.500000000 0.500000000 35.500000000 -1.500000000 0.500000000 -36.500000000 -0.500000000 0.500000000 35.500000000 -0.500000000 0.500000000 -36.500000000 0.500000000 0.500000000 35.500000000 0.500000000 0.500000000 -36.500000000 1.500000000 0.500000000 35.500000000 1.500000000 0.500000000 -36.500000000 2.500000000 0.500000000 35.500000000 2.500000000 0.500000000 -36.500000000 3.500000000 0.500000000 35.500000000 3.500000000 0.500000000 -36.500000000 4.500000000 0.500000000 35.500000000 4.500000000 0.500000000 -36.500000000 5.500000000 0.500000000 35.500000000 5.500000000 0.500000000 -36.500000000 6.500000000 0.500000000 35.500000000 6.500000000 0.500000000 -36.500000000 7.500000000 0.500000000 35.500000000 7.500000000 0.500000000 -36.500000000 8.500000000 0.500000000 35.500000000 8.500000000 0.500000000 -36.500000000 9.500000000 0.500000000 35.500000000 9.500000000 0.500000000 -36.500000000 10.500000000 0.500000000 35.500000000 10.500000000 0.500000000 -36.500000000 11.500000000 0.500000000 35.500000000 11.500000000 0.500000000 -36.500000000 12.500000000 0.500000000 35.500000000 12.500000000 0.500000000 -36.500000000 13.500000000 0.500000000 35.500000000 13.500000000 0.500000000 -36.500000000 14.500000000 0.500000000 35.500000000 14.500000000 0.500000000 -36.500000000 15.500000000 0.500000000 35.500000000 15.500000000 0.500000000 -36.500000000 16.500000000 0.500000000 35.500000000 16.500000000 0.500000000 -36.500000000 17.500000000 0.500000000 35.500000000 17.500000000 0.500000000 -36.500000000 18.500000000 0.500000000 35.500000000 18.500000000 0.500000000 -36.500000000 19.500000000 0.500000000 35.500000000 19.500000000 0.500000000 -36.500000000 20.500000000 0.500000000 35.500000000 20.500000000 0.500000000 -36.500000000 21.500000000 0.500000000 35.500000000 21.500000000 0.500000000 -36.500000000 22.500000000 0.500000000 35.500000000 22.500000000 0.500000000 -36.500000000 23.500000000 0.500000000 35.500000000 23.500000000 0.500000000 -36.500000000 24.500000000 0.500000000 35.500000000 24.500000000 0.500000000 -36.499996185 25.499996185 0.500000000 35.499996185 25.499996185 0.500000000 -36.499954224 26.499954224 0.500000000 35.499954224 26.499954224 0.500000000 -36.499591827 27.499591827 0.500000000 35.499591827 27.499591827 0.500000000 -36.497474670 28.497470856 0.500000000 35.497467041 28.497470856 0.500000000 -36.488403320 29.488407135 0.500000000 35.488403320 29.488407135 0.500000000 -36.458980560 30.458978653 0.500000000 35.458980560 30.458978653 0.500000000 -36.384422302 31.384418488 0.500000000 35.384422302 31.384418488 0.500000000 -36.233222961 32.233222961 0.500000000 35.233222961 32.233222961 0.500000000 -35.981101990 32.981101990 0.500000000 -35.622871399 33.622871399 0.500000000 34.622871399 33.622871399 0.500000000 34.981101990 32.981101990 0.500000000 -35.167964935 34.167964935 0.500000000 -34.622871399 34.622871399 0.500000000 33.622871399 34.622871399 0.500000000 34.167964935 34.167964935 0.500000000 -33.981101990 34.981101990 0.500000000 -33.233222961 35.233222961 0.500000000 -32.384422302 35.384418488 0.500000000 -31.458978653 35.458976746 0.500000000 -30.488407135 35.488403320 0.500000000 -29.497472763 35.497467041 0.500000000 -28.499593735 35.499591827 0.500000000 -27.499954224 35.499954224 0.500000000 -26.499996185 35.499996185 0.500000000 -25.500000000 35.500000000 0.500000000 -24.500000000 35.500000000 0.500000000 -23.500000000 35.500000000 0.500000000 -22.500000000 35.500000000 0.500000000 -21.500000000 35.500000000 0.500000000 -20.500000000 35.500000000 0.500000000 -19.500000000 35.500000000 0.500000000 -18.500000000 35.500000000 0.500000000 -17.500000000 35.500000000 0.500000000 -16.500000000 35.500000000 0.500000000 -15.500000000 35.500000000 0.500000000 -14.500000000 35.500000000 0.500000000 -13.500000000 35.500000000 0.500000000 -12.500000000 35.500000000 0.500000000 -11.500000000 35.500000000 0.500000000 -10.500000000 35.500000000 0.500000000 -9.500000000 35.500000000 0.500000000 -8.500000000 35.500000000 0.500000000 -7.500000000 35.500000000 0.500000000 -6.500000000 35.500000000 0.500000000 -5.500000000 35.500000000 0.500000000 -4.500000000 35.500000000 0.500000000 -3.500000000 35.500000000 0.500000000 -2.500000000 35.500000000 0.500000000 -1.500000000 35.500000000 0.500000000 -0.500000000 35.500000000 0.500000000 0.500000000 35.500000000 0.500000000 1.500000000 35.500000000 0.500000000 2.500000000 35.500000000 0.500000000 3.500000000 35.500000000 0.500000000 4.500000000 35.500000000 0.500000000 5.500000000 35.500000000 0.500000000 6.500000000 35.500000000 0.500000000 7.500000000 35.500000000 0.500000000 8.500000000 35.500000000 0.500000000 9.500000000 35.500000000 0.500000000 10.500000000 35.500000000 0.500000000 11.500000000 35.500000000 0.500000000 12.500000000 35.500000000 0.500000000 13.500000000 35.500000000 0.500000000 14.500000000 35.500000000 0.500000000 15.500000000 35.500000000 0.500000000 16.500000000 35.500000000 0.500000000 17.500000000 35.500000000 0.500000000 18.500000000 35.500000000 0.500000000 19.500000000 35.500000000 0.500000000 20.500000000 35.500000000 0.500000000 21.500000000 35.500000000 0.500000000 22.500000000 35.500000000 0.500000000 23.500000000 35.500000000 0.500000000 24.500000000 35.500000000 0.500000000 25.499996185 35.499996185 0.500000000 26.499954224 35.499954224 0.500000000 27.499591827 35.499591827 0.500000000 28.497470856 35.497474670 0.500000000 29.488407135 35.488403320 0.500000000 30.458978653 35.458980560 0.500000000 31.384418488 35.384422302 0.500000000 32.233222961 35.233222961 0.500000000 32.981101990 34.981101990 0.500000000 -33.981101990 -35.981101990 1.500000000 -33.233226776 -36.233222961 1.500000000 -32.384422302 -36.384418488 1.500000000 -31.458978653 -36.458980560 1.500000000 -30.488407135 -36.488403320 1.500000000 -29.497472763 -36.497474670 1.500000000 -28.499593735 -36.499591827 1.500000000 -27.499954224 -36.499954224 1.500000000 -26.499996185 -36.499996185 1.500000000 -25.500000000 -36.500000000 1.500000000 -24.500000000 -36.500000000 1.500000000 -23.500000000 -36.500000000 1.500000000 -22.500000000 -36.500000000 1.500000000 -21.500000000 -36.500000000 1.500000000 -20.500000000 -36.500000000 1.500000000 -19.500000000 -36.500000000 1.500000000 -18.500000000 -36.500000000 1.500000000 -17.500000000 -36.500000000 1.500000000 -16.500000000 -36.500000000 1.500000000 -15.500000000 -36.500000000 1.500000000 -14.500000000 -36.500000000 1.500000000 -13.500000000 -36.500000000 1.500000000 -12.500000000 -36.500000000 1.500000000 -11.500000000 -36.500000000 1.500000000 -10.500000000 -36.500000000 1.500000000 -9.500000000 -36.500000000 1.500000000 -8.500000000 -36.500000000 1.500000000 -7.500000000 -36.500000000 1.500000000 -6.500000000 -36.500000000 1.500000000 -5.500000000 -36.500000000 1.500000000 -4.500000000 -36.500000000 1.500000000 -3.500000000 -36.500000000 1.500000000 -2.500000000 -36.500000000 1.500000000 -1.500000000 -36.500000000 1.500000000 -0.500000000 -36.500000000 1.500000000 0.500000000 -36.500000000 1.500000000 1.500000000 -36.500000000 1.500000000 2.500000000 -36.500000000 1.500000000 3.500000000 -36.500000000 1.500000000 4.500000000 -36.500000000 1.500000000 5.500000000 -36.500000000 1.500000000 6.500000000 -36.500000000 1.500000000 7.500000000 -36.500000000 1.500000000 8.500000000 -36.500000000 1.500000000 9.500000000 -36.500000000 1.500000000 10.500000000 -36.500000000 1.500000000 11.500000000 -36.500000000 1.500000000 12.500000000 -36.500000000 1.500000000 13.500000000 -36.500000000 1.500000000 14.500000000 -36.500000000 1.500000000 15.500000000 -36.500000000 1.500000000 16.500000000 -36.500000000 1.500000000 17.500000000 -36.500000000 1.500000000 18.500000000 -36.500000000 1.500000000 19.500000000 -36.500000000 1.500000000 20.500000000 -36.500000000 1.500000000 21.500000000 -36.500000000 1.500000000 22.500000000 -36.500000000 1.500000000 23.500000000 -36.500000000 1.500000000 24.500000000 -36.500000000 1.500000000 25.499996185 -36.499996185 1.500000000 26.499954224 -36.499954224 1.500000000 27.499591827 -36.499591827 1.500000000 28.497470856 -36.497467041 1.500000000 29.488407135 -36.488403320 1.500000000 30.458978653 -36.458980560 1.500000000 31.384418488 -36.384422302 1.500000000 32.233222961 -36.233222961 1.500000000 32.981101990 -35.981101990 1.500000000 -35.167964935 -35.167964935 1.500000000 -34.622871399 -35.622871399 1.500000000 33.622871399 -35.622871399 1.500000000 34.167964935 -35.167964935 1.500000000 -35.981101990 -33.981101990 1.500000000 -35.622871399 -34.622871399 1.500000000 34.622871399 -34.622871399 1.500000000 34.981101990 -33.981101990 1.500000000 -36.233222961 -33.233222961 1.500000000 35.233222961 -33.233226776 1.500000000 -36.384418488 -32.384422302 1.500000000 35.384418488 -32.384422302 1.500000000 -36.458976746 -31.458978653 1.500000000 35.458980560 -31.458978653 1.500000000 -36.488403320 -30.488407135 1.500000000 35.488403320 -30.488407135 1.500000000 -36.497467041 -29.497472763 1.500000000 35.497474670 -29.497472763 1.500000000 -36.499591827 -28.499593735 1.500000000 35.499591827 -28.499593735 1.500000000 -36.499954224 -27.499954224 1.500000000 35.499954224 -27.499954224 1.500000000 -36.499996185 -26.499996185 1.500000000 35.499996185 -26.499996185 1.500000000 -36.500000000 -25.500000000 1.500000000 35.500000000 -25.500000000 1.500000000 -36.500000000 -24.500000000 1.500000000 35.500000000 -24.500000000 1.500000000 -36.500000000 -23.500000000 1.500000000 35.500000000 -23.500000000 1.500000000 -36.500000000 -22.500000000 1.500000000 35.500000000 -22.500000000 1.500000000 -36.500000000 -21.500000000 1.500000000 35.500000000 -21.500000000 1.500000000 -36.500000000 -20.500000000 1.500000000 35.500000000 -20.500000000 1.500000000 -36.500000000 -19.500000000 1.500000000 35.500000000 -19.500000000 1.500000000 -36.500000000 -18.500000000 1.500000000 35.500000000 -18.500000000 1.500000000 -36.500000000 -17.500000000 1.500000000 35.500000000 -17.500000000 1.500000000 -36.500000000 -16.500000000 1.500000000 35.500000000 -16.500000000 1.500000000 -36.500000000 -15.500000000 1.500000000 35.500000000 -15.500000000 1.500000000 -36.500000000 -14.500000000 1.500000000 35.500000000 -14.500000000 1.500000000 -36.500000000 -13.500000000 1.500000000 35.500000000 -13.500000000 1.500000000 -36.500000000 -12.500000000 1.500000000 35.500000000 -12.500000000 1.500000000 -36.500000000 -11.500000000 1.500000000 35.500000000 -11.500000000 1.500000000 -36.500000000 -10.500000000 1.500000000 35.500000000 -10.500000000 1.500000000 -36.500000000 -9.500000000 1.500000000 35.500000000 -9.500000000 1.500000000 -36.500000000 -8.500000000 1.500000000 35.500000000 -8.500000000 1.500000000 -36.500000000 -7.500000000 1.500000000 35.500000000 -7.500000000 1.500000000 -36.500000000 -6.500000000 1.500000000 35.500000000 -6.500000000 1.500000000 -36.500000000 -5.500000000 1.500000000 35.500000000 -5.500000000 1.500000000 -36.500000000 -4.500000000 1.500000000 35.500000000 -4.500000000 1.500000000 -36.500000000 -3.500000000 1.500000000 35.500000000 -3.500000000 1.500000000 -36.500000000 -2.500000000 1.500000000 35.500000000 -2.500000000 1.500000000 -36.500000000 -1.500000000 1.500000000 35.500000000 -1.500000000 1.500000000 -36.500000000 -0.500000000 1.500000000 35.500000000 -0.500000000 1.500000000 -36.500000000 0.500000000 1.500000000 35.500000000 0.500000000 1.500000000 -36.500000000 1.500000000 1.500000000 35.500000000 1.500000000 1.500000000 -36.500000000 2.500000000 1.500000000 35.500000000 2.500000000 1.500000000 -36.500000000 3.500000000 1.500000000 35.500000000 3.500000000 1.500000000 -36.500000000 4.500000000 1.500000000 35.500000000 4.500000000 1.500000000 -36.500000000 5.500000000 1.500000000 35.500000000 5.500000000 1.500000000 -36.500000000 6.500000000 1.500000000 35.500000000 6.500000000 1.500000000 -36.500000000 7.500000000 1.500000000 35.500000000 7.500000000 1.500000000 -36.500000000 8.500000000 1.500000000 35.500000000 8.500000000 1.500000000 -36.500000000 9.500000000 1.500000000 35.500000000 9.500000000 1.500000000 -36.500000000 10.500000000 1.500000000 35.500000000 10.500000000 1.500000000 -36.500000000 11.500000000 1.500000000 35.500000000 11.500000000 1.500000000 -36.500000000 12.500000000 1.500000000 35.500000000 12.500000000 1.500000000 -36.500000000 13.500000000 1.500000000 35.500000000 13.500000000 1.500000000 -36.500000000 14.500000000 1.500000000 35.500000000 14.500000000 1.500000000 -36.500000000 15.500000000 1.500000000 35.500000000 15.500000000 1.500000000 -36.500000000 16.500000000 1.500000000 35.500000000 16.500000000 1.500000000 -36.500000000 17.500000000 1.500000000 35.500000000 17.500000000 1.500000000 -36.500000000 18.500000000 1.500000000 35.500000000 18.500000000 1.500000000 -36.500000000 19.500000000 1.500000000 35.500000000 19.500000000 1.500000000 -36.500000000 20.500000000 1.500000000 35.500000000 20.500000000 1.500000000 -36.500000000 21.500000000 1.500000000 35.500000000 21.500000000 1.500000000 -36.500000000 22.500000000 1.500000000 35.500000000 22.500000000 1.500000000 -36.500000000 23.500000000 1.500000000 35.500000000 23.500000000 1.500000000 -36.500000000 24.500000000 1.500000000 35.500000000 24.500000000 1.500000000 -36.499996185 25.499996185 1.500000000 35.499996185 25.499996185 1.500000000 -36.499954224 26.499954224 1.500000000 35.499954224 26.499954224 1.500000000 -36.499591827 27.499591827 1.500000000 35.499591827 27.499591827 1.500000000 -36.497474670 28.497470856 1.500000000 35.497467041 28.497470856 1.500000000 -36.488403320 29.488407135 1.500000000 35.488403320 29.488407135 1.500000000 -36.458980560 30.458978653 1.500000000 35.458980560 30.458978653 1.500000000 -36.384422302 31.384418488 1.500000000 35.384422302 31.384418488 1.500000000 -36.233222961 32.233222961 1.500000000 35.233222961 32.233222961 1.500000000 -35.981101990 32.981101990 1.500000000 -35.622871399 33.622871399 1.500000000 34.622871399 33.622871399 1.500000000 34.981101990 32.981101990 1.500000000 -35.167964935 34.167964935 1.500000000 -34.622871399 34.622871399 1.500000000 33.622871399 34.622871399 1.500000000 34.167964935 34.167964935 1.500000000 -33.981101990 34.981101990 1.500000000 -33.233222961 35.233222961 1.500000000 -32.384422302 35.384418488 1.500000000 -31.458978653 35.458976746 1.500000000 -30.488407135 35.488403320 1.500000000 -29.497472763 35.497467041 1.500000000 -28.499593735 35.499591827 1.500000000 -27.499954224 35.499954224 1.500000000 -26.499996185 35.499996185 1.500000000 -25.500000000 35.500000000 1.500000000 -24.500000000 35.500000000 1.500000000 -23.500000000 35.500000000 1.500000000 -22.500000000 35.500000000 1.500000000 -21.500000000 35.500000000 1.500000000 -20.500000000 35.500000000 1.500000000 -19.500000000 35.500000000 1.500000000 -18.500000000 35.500000000 1.500000000 -17.500000000 35.500000000 1.500000000 -16.500000000 35.500000000 1.500000000 -15.500000000 35.500000000 1.500000000 -14.500000000 35.500000000 1.500000000 -13.500000000 35.500000000 1.500000000 -12.500000000 35.500000000 1.500000000 -11.500000000 35.500000000 1.500000000 -10.500000000 35.500000000 1.500000000 -9.500000000 35.500000000 1.500000000 -8.500000000 35.500000000 1.500000000 -7.500000000 35.500000000 1.500000000 -6.500000000 35.500000000 1.500000000 -5.500000000 35.500000000 1.500000000 -4.500000000 35.500000000 1.500000000 -3.500000000 35.500000000 1.500000000 -2.500000000 35.500000000 1.500000000 -1.500000000 35.500000000 1.500000000 -0.500000000 35.500000000 1.500000000 0.500000000 35.500000000 1.500000000 1.500000000 35.500000000 1.500000000 2.500000000 35.500000000 1.500000000 3.500000000 35.500000000 1.500000000 4.500000000 35.500000000 1.500000000 5.500000000 35.500000000 1.500000000 6.500000000 35.500000000 1.500000000 7.500000000 35.500000000 1.500000000 8.500000000 35.500000000 1.500000000 9.500000000 35.500000000 1.500000000 10.500000000 35.500000000 1.500000000 11.500000000 35.500000000 1.500000000 12.500000000 35.500000000 1.500000000 13.500000000 35.500000000 1.500000000 14.500000000 35.500000000 1.500000000 15.500000000 35.500000000 1.500000000 16.500000000 35.500000000 1.500000000 17.500000000 35.500000000 1.500000000 18.500000000 35.500000000 1.500000000 19.500000000 35.500000000 1.500000000 20.500000000 35.500000000 1.500000000 21.500000000 35.500000000 1.500000000 22.500000000 35.500000000 1.500000000 23.500000000 35.500000000 1.500000000 24.500000000 35.500000000 1.500000000 25.499996185 35.499996185 1.500000000 26.499954224 35.499954224 1.500000000 27.499591827 35.499591827 1.500000000 28.497470856 35.497474670 1.500000000 29.488407135 35.488403320 1.500000000 30.458978653 35.458980560 1.500000000 31.384418488 35.384422302 1.500000000 32.233222961 35.233222961 1.500000000 32.981101990 34.981101990 1.500000000 -33.981101990 -35.981101990 2.500000000 -33.233226776 -36.233222961 2.500000000 -32.384422302 -36.384418488 2.500000000 -31.458978653 -36.458980560 2.500000000 -30.488407135 -36.488403320 2.500000000 -29.497472763 -36.497474670 2.500000000 -28.499593735 -36.499591827 2.500000000 -27.499954224 -36.499954224 2.500000000 -26.499996185 -36.499996185 2.500000000 -25.500000000 -36.500000000 2.500000000 -24.500000000 -36.500000000 2.500000000 -23.500000000 -36.500000000 2.500000000 -22.500000000 -36.500000000 2.500000000 -21.500000000 -36.500000000 2.500000000 -20.500000000 -36.500000000 2.500000000 -19.500000000 -36.500000000 2.500000000 -18.500000000 -36.500000000 2.500000000 -17.500000000 -36.500000000 2.500000000 -16.500000000 -36.500000000 2.500000000 -15.500000000 -36.500000000 2.500000000 -14.500000000 -36.500000000 2.500000000 -13.500000000 -36.500000000 2.500000000 -12.500000000 -36.500000000 2.500000000 -11.500000000 -36.500000000 2.500000000 -10.500000000 -36.500000000 2.500000000 -9.500000000 -36.500000000 2.500000000 -8.500000000 -36.500000000 2.500000000 -7.500000000 -36.500000000 2.500000000 -6.500000000 -36.500000000 2.500000000 -5.500000000 -36.500000000 2.500000000 -4.500000000 -36.500000000 2.500000000 -3.500000000 -36.500000000 2.500000000 -2.500000000 -36.500000000 2.500000000 -1.500000000 -36.500000000 2.500000000 -0.500000000 -36.500000000 2.500000000 0.500000000 -36.500000000 2.500000000 1.500000000 -36.500000000 2.500000000 2.500000000 -36.500000000 2.500000000 3.500000000 -36.500000000 2.500000000 4.500000000 -36.500000000 2.500000000 5.500000000 -36.500000000 2.500000000 6.500000000 -36.500000000 2.500000000 7.500000000 -36.500000000 2.500000000 8.500000000 -36.500000000 2.500000000 9.500000000 -36.500000000 2.500000000 10.500000000 -36.500000000 2.500000000 11.500000000 -36.500000000 2.500000000 12.500000000 -36.500000000 2.500000000 13.500000000 -36.500000000 2.500000000 14.500000000 -36.500000000 2.500000000 15.500000000 -36.500000000 2.500000000 16.500000000 -36.500000000 2.500000000 17.500000000 -36.500000000 2.500000000 18.500000000 -36.500000000 2.500000000 19.500000000 -36.500000000 2.500000000 20.500000000 -36.500000000 2.500000000 21.500000000 -36.500000000 2.500000000 22.500000000 -36.500000000 2.500000000 23.500000000 -36.500000000 2.500000000 24.500000000 -36.500000000 2.500000000 25.499996185 -36.499996185 2.500000000 26.499954224 -36.499954224 2.500000000 27.499591827 -36.499591827 2.500000000 28.497470856 -36.497467041 2.500000000 29.488407135 -36.488403320 2.500000000 30.458978653 -36.458980560 2.500000000 31.384418488 -36.384422302 2.500000000 32.233222961 -36.233222961 2.500000000 32.981101990 -35.981101990 2.500000000 -35.167964935 -35.167964935 2.500000000 -34.622871399 -35.622871399 2.500000000 33.622871399 -35.622871399 2.500000000 34.167964935 -35.167964935 2.500000000 -35.981101990 -33.981101990 2.500000000 -35.622871399 -34.622871399 2.500000000 34.622871399 -34.622871399 2.500000000 34.981101990 -33.981101990 2.500000000 -36.233222961 -33.233222961 2.500000000 35.233222961 -33.233226776 2.500000000 -36.384418488 -32.384422302 2.500000000 35.384418488 -32.384422302 2.500000000 -36.458976746 -31.458978653 2.500000000 35.458980560 -31.458978653 2.500000000 -36.488403320 -30.488407135 2.500000000 35.488403320 -30.488407135 2.500000000 -36.497467041 -29.497472763 2.500000000 35.497474670 -29.497472763 2.500000000 -36.499591827 -28.499593735 2.500000000 35.499591827 -28.499593735 2.500000000 -36.499954224 -27.499954224 2.500000000 35.499954224 -27.499954224 2.500000000 -36.499996185 -26.499996185 2.500000000 35.499996185 -26.499996185 2.500000000 -36.500000000 -25.500000000 2.500000000 35.500000000 -25.500000000 2.500000000 -36.500000000 -24.500000000 2.500000000 35.500000000 -24.500000000 2.500000000 -36.500000000 -23.500000000 2.500000000 35.500000000 -23.500000000 2.500000000 -36.500000000 -22.500000000 2.500000000 35.500000000 -22.500000000 2.500000000 -36.500000000 -21.500000000 2.500000000 35.500000000 -21.500000000 2.500000000 -36.500000000 -20.500000000 2.500000000 35.500000000 -20.500000000 2.500000000 -36.500000000 -19.500000000 2.500000000 35.500000000 -19.500000000 2.500000000 -36.500000000 -18.500000000 2.500000000 35.500000000 -18.500000000 2.500000000 -36.500000000 -17.500000000 2.500000000 35.500000000 -17.500000000 2.500000000 -36.500000000 -16.500000000 2.500000000 35.500000000 -16.500000000 2.500000000 -36.500000000 -15.500000000 2.500000000 35.500000000 -15.500000000 2.500000000 -36.500000000 -14.500000000 2.500000000 35.500000000 -14.500000000 2.500000000 -36.500000000 -13.500000000 2.500000000 35.500000000 -13.500000000 2.500000000 -36.500000000 -12.500000000 2.500000000 35.500000000 -12.500000000 2.500000000 -36.500000000 -11.500000000 2.500000000 35.500000000 -11.500000000 2.500000000 -36.500000000 -10.500000000 2.500000000 35.500000000 -10.500000000 2.500000000 -36.500000000 -9.500000000 2.500000000 35.500000000 -9.500000000 2.500000000 -36.500000000 -8.500000000 2.500000000 35.500000000 -8.500000000 2.500000000 -36.500000000 -7.500000000 2.500000000 35.500000000 -7.500000000 2.500000000 -36.500000000 -6.500000000 2.500000000 35.500000000 -6.500000000 2.500000000 -36.500000000 -5.500000000 2.500000000 35.500000000 -5.500000000 2.500000000 -36.500000000 -4.500000000 2.500000000 35.500000000 -4.500000000 2.500000000 -36.500000000 -3.500000000 2.500000000 35.500000000 -3.500000000 2.500000000 -36.500000000 -2.500000000 2.500000000 35.500000000 -2.500000000 2.500000000 -36.500000000 -1.500000000 2.500000000 35.500000000 -1.500000000 2.500000000 -36.500000000 -0.500000000 2.500000000 35.500000000 -0.500000000 2.500000000 -36.500000000 0.500000000 2.500000000 35.500000000 0.500000000 2.500000000 -36.500000000 1.500000000 2.500000000 35.500000000 1.500000000 2.500000000 -36.500000000 2.500000000 2.500000000 35.500000000 2.500000000 2.500000000 -36.500000000 3.500000000 2.500000000 35.500000000 3.500000000 2.500000000 -36.500000000 4.500000000 2.500000000 35.500000000 4.500000000 2.500000000 -36.500000000 5.500000000 2.500000000 35.500000000 5.500000000 2.500000000 -36.500000000 6.500000000 2.500000000 35.500000000 6.500000000 2.500000000 -36.500000000 7.500000000 2.500000000 35.500000000 7.500000000 2.500000000 -36.500000000 8.500000000 2.500000000 35.500000000 8.500000000 2.500000000 -36.500000000 9.500000000 2.500000000 35.500000000 9.500000000 2.500000000 -36.500000000 10.500000000 2.500000000 35.500000000 10.500000000 2.500000000 -36.500000000 11.500000000 2.500000000 35.500000000 11.500000000 2.500000000 -36.500000000 12.500000000 2.500000000 35.500000000 12.500000000 2.500000000 -36.500000000 13.500000000 2.500000000 35.500000000 13.500000000 2.500000000 -36.500000000 14.500000000 2.500000000 35.500000000 14.500000000 2.500000000 -36.500000000 15.500000000 2.500000000 35.500000000 15.500000000 2.500000000 -36.500000000 16.500000000 2.500000000 35.500000000 16.500000000 2.500000000 -36.500000000 17.500000000 2.500000000 35.500000000 17.500000000 2.500000000 -36.500000000 18.500000000 2.500000000 35.500000000 18.500000000 2.500000000 -36.500000000 19.500000000 2.500000000 35.500000000 19.500000000 2.500000000 -36.500000000 20.500000000 2.500000000 35.500000000 20.500000000 2.500000000 -36.500000000 21.500000000 2.500000000 35.500000000 21.500000000 2.500000000 -36.500000000 22.500000000 2.500000000 35.500000000 22.500000000 2.500000000 -36.500000000 23.500000000 2.500000000 35.500000000 23.500000000 2.500000000 -36.500000000 24.500000000 2.500000000 35.500000000 24.500000000 2.500000000 -36.499996185 25.499996185 2.500000000 35.499996185 25.499996185 2.500000000 -36.499954224 26.499954224 2.500000000 35.499954224 26.499954224 2.500000000 -36.499591827 27.499591827 2.500000000 35.499591827 27.499591827 2.500000000 -36.497474670 28.497470856 2.500000000 35.497467041 28.497470856 2.500000000 -36.488403320 29.488407135 2.500000000 35.488403320 29.488407135 2.500000000 -36.458980560 30.458978653 2.500000000 35.458980560 30.458978653 2.500000000 -36.384422302 31.384418488 2.500000000 35.384422302 31.384418488 2.500000000 -36.233222961 32.233222961 2.500000000 35.233222961 32.233222961 2.500000000 -35.981101990 32.981101990 2.500000000 -35.622871399 33.622871399 2.500000000 34.622871399 33.622871399 2.500000000 34.981101990 32.981101990 2.500000000 -35.167964935 34.167964935 2.500000000 -34.622871399 34.622871399 2.500000000 33.622871399 34.622871399 2.500000000 34.167964935 34.167964935 2.500000000 -33.981101990 34.981101990 2.500000000 -33.233222961 35.233222961 2.500000000 -32.384422302 35.384418488 2.500000000 -31.458978653 35.458976746 2.500000000 -30.488407135 35.488403320 2.500000000 -29.497472763 35.497467041 2.500000000 -28.499593735 35.499591827 2.500000000 -27.499954224 35.499954224 2.500000000 -26.499996185 35.499996185 2.500000000 -25.500000000 35.500000000 2.500000000 -24.500000000 35.500000000 2.500000000 -23.500000000 35.500000000 2.500000000 -22.500000000 35.500000000 2.500000000 -21.500000000 35.500000000 2.500000000 -20.500000000 35.500000000 2.500000000 -19.500000000 35.500000000 2.500000000 -18.500000000 35.500000000 2.500000000 -17.500000000 35.500000000 2.500000000 -16.500000000 35.500000000 2.500000000 -15.500000000 35.500000000 2.500000000 -14.500000000 35.500000000 2.500000000 -13.500000000 35.500000000 2.500000000 -12.500000000 35.500000000 2.500000000 -11.500000000 35.500000000 2.500000000 -10.500000000 35.500000000 2.500000000 -9.500000000 35.500000000 2.500000000 -8.500000000 35.500000000 2.500000000 -7.500000000 35.500000000 2.500000000 -6.500000000 35.500000000 2.500000000 -5.500000000 35.500000000 2.500000000 -4.500000000 35.500000000 2.500000000 -3.500000000 35.500000000 2.500000000 -2.500000000 35.500000000 2.500000000 -1.500000000 35.500000000 2.500000000 -0.500000000 35.500000000 2.500000000 0.500000000 35.500000000 2.500000000 1.500000000 35.500000000 2.500000000 2.500000000 35.500000000 2.500000000 3.500000000 35.500000000 2.500000000 4.500000000 35.500000000 2.500000000 5.500000000 35.500000000 2.500000000 6.500000000 35.500000000 2.500000000 7.500000000 35.500000000 2.500000000 8.500000000 35.500000000 2.500000000 9.500000000 35.500000000 2.500000000 10.500000000 35.500000000 2.500000000 11.500000000 35.500000000 2.500000000 12.500000000 35.500000000 2.500000000 13.500000000 35.500000000 2.500000000 14.500000000 35.500000000 2.500000000 15.500000000 35.500000000 2.500000000 16.500000000 35.500000000 2.500000000 17.500000000 35.500000000 2.500000000 18.500000000 35.500000000 2.500000000 19.500000000 35.500000000 2.500000000 20.500000000 35.500000000 2.500000000 21.500000000 35.500000000 2.500000000 22.500000000 35.500000000 2.500000000 23.500000000 35.500000000 2.500000000 24.500000000 35.500000000 2.500000000 25.499996185 35.499996185 2.500000000 26.499954224 35.499954224 2.500000000 27.499591827 35.499591827 2.500000000 28.497470856 35.497474670 2.500000000 29.488407135 35.488403320 2.500000000 30.458978653 35.458980560 2.500000000 31.384418488 35.384422302 2.500000000 32.233222961 35.233222961 2.500000000 32.981101990 34.981101990 2.500000000 -33.981101990 -35.981101990 3.500000000 -33.233226776 -36.233222961 3.500000000 -32.384422302 -36.384418488 3.500000000 -31.458978653 -36.458980560 3.500000000 -30.488407135 -36.488403320 3.500000000 -29.497472763 -36.497474670 3.500000000 -28.499593735 -36.499591827 3.500000000 -27.499954224 -36.499954224 3.500000000 -26.499996185 -36.499996185 3.500000000 -25.500000000 -36.500000000 3.500000000 -24.500000000 -36.500000000 3.500000000 -23.500000000 -36.500000000 3.500000000 -22.500000000 -36.500000000 3.500000000 -21.500000000 -36.500000000 3.500000000 -20.500000000 -36.500000000 3.500000000 -19.500000000 -36.500000000 3.500000000 -18.500000000 -36.500000000 3.500000000 -17.500000000 -36.500000000 3.500000000 -16.500000000 -36.500000000 3.500000000 -15.500000000 -36.500000000 3.500000000 -14.500000000 -36.500000000 3.500000000 -13.500000000 -36.500000000 3.500000000 -12.500000000 -36.500000000 3.500000000 -11.500000000 -36.500000000 3.500000000 -10.500000000 -36.500000000 3.500000000 -9.500000000 -36.500000000 3.500000000 -8.500000000 -36.500000000 3.500000000 -7.500000000 -36.500000000 3.500000000 -6.500000000 -36.500000000 3.500000000 -5.500000000 -36.500000000 3.500000000 -4.500000000 -36.500000000 3.500000000 -3.500000000 -36.500000000 3.500000000 -2.500000000 -36.500000000 3.500000000 -1.500000000 -36.500000000 3.500000000 -0.500000000 -36.500000000 3.500000000 0.500000000 -36.500000000 3.500000000 1.500000000 -36.500000000 3.500000000 2.500000000 -36.500000000 3.500000000 3.500000000 -36.500000000 3.500000000 4.500000000 -36.500000000 3.500000000 5.500000000 -36.500000000 3.500000000 6.500000000 -36.500000000 3.500000000 7.500000000 -36.500000000 3.500000000 8.500000000 -36.500000000 3.500000000 9.500000000 -36.500000000 3.500000000 10.500000000 -36.500000000 3.500000000 11.500000000 -36.500000000 3.500000000 12.500000000 -36.500000000 3.500000000 13.500000000 -36.500000000 3.500000000 14.500000000 -36.500000000 3.500000000 15.500000000 -36.500000000 3.500000000 16.500000000 -36.500000000 3.500000000 17.500000000 -36.500000000 3.500000000 18.500000000 -36.500000000 3.500000000 19.500000000 -36.500000000 3.500000000 20.500000000 -36.500000000 3.500000000 21.500000000 -36.500000000 3.500000000 22.500000000 -36.500000000 3.500000000 23.500000000 -36.500000000 3.500000000 24.500000000 -36.500000000 3.500000000 25.499996185 -36.499996185 3.500000000 26.499954224 -36.499954224 3.500000000 27.499591827 -36.499591827 3.500000000 28.497470856 -36.497467041 3.500000000 29.488407135 -36.488403320 3.500000000 30.458978653 -36.458980560 3.500000000 31.384418488 -36.384422302 3.500000000 32.233222961 -36.233222961 3.500000000 32.981101990 -35.981101990 3.500000000 -35.167964935 -35.167964935 3.500000000 -34.622871399 -35.622871399 3.500000000 33.622871399 -35.622871399 3.500000000 34.167964935 -35.167964935 3.500000000 -35.981101990 -33.981101990 3.500000000 -35.622871399 -34.622871399 3.500000000 34.622871399 -34.622871399 3.500000000 34.981101990 -33.981101990 3.500000000 -36.233222961 -33.233222961 3.500000000 35.233222961 -33.233226776 3.500000000 -36.384418488 -32.384422302 3.500000000 35.384418488 -32.384422302 3.500000000 -36.458976746 -31.458978653 3.500000000 35.458980560 -31.458978653 3.500000000 -36.488403320 -30.488407135 3.500000000 35.488403320 -30.488407135 3.500000000 -36.497467041 -29.497472763 3.500000000 35.497474670 -29.497472763 3.500000000 -36.499591827 -28.499593735 3.500000000 35.499591827 -28.499593735 3.500000000 -36.499954224 -27.499954224 3.500000000 35.499954224 -27.499954224 3.500000000 -36.499996185 -26.499996185 3.500000000 35.499996185 -26.499996185 3.500000000 -36.500000000 -25.500000000 3.500000000 35.500000000 -25.500000000 3.500000000 -36.500000000 -24.500000000 3.500000000 35.500000000 -24.500000000 3.500000000 -36.500000000 -23.500000000 3.500000000 35.500000000 -23.500000000 3.500000000 -36.500000000 -22.500000000 3.500000000 35.500000000 -22.500000000 3.500000000 -36.500000000 -21.500000000 3.500000000 35.500000000 -21.500000000 3.500000000 -36.500000000 -20.500000000 3.500000000 35.500000000 -20.500000000 3.500000000 -36.500000000 -19.500000000 3.500000000 35.500000000 -19.500000000 3.500000000 -36.500000000 -18.500000000 3.500000000 35.500000000 -18.500000000 3.500000000 -36.500000000 -17.500000000 3.500000000 35.500000000 -17.500000000 3.500000000 -36.500000000 -16.500000000 3.500000000 35.500000000 -16.500000000 3.500000000 -36.500000000 -15.500000000 3.500000000 35.500000000 -15.500000000 3.500000000 -36.500000000 -14.500000000 3.500000000 35.500000000 -14.500000000 3.500000000 -36.500000000 -13.500000000 3.500000000 35.500000000 -13.500000000 3.500000000 -36.500000000 -12.500000000 3.500000000 35.500000000 -12.500000000 3.500000000 -36.500000000 -11.500000000 3.500000000 35.500000000 -11.500000000 3.500000000 -36.500000000 -10.500000000 3.500000000 35.500000000 -10.500000000 3.500000000 -36.500000000 -9.500000000 3.500000000 35.500000000 -9.500000000 3.500000000 -36.500000000 -8.500000000 3.500000000 35.500000000 -8.500000000 3.500000000 -36.500000000 -7.500000000 3.500000000 35.500000000 -7.500000000 3.500000000 -36.500000000 -6.500000000 3.500000000 35.500000000 -6.500000000 3.500000000 -36.500000000 -5.500000000 3.500000000 35.500000000 -5.500000000 3.500000000 -36.500000000 -4.500000000 3.500000000 35.500000000 -4.500000000 3.500000000 -36.500000000 -3.500000000 3.500000000 35.500000000 -3.500000000 3.500000000 -36.500000000 -2.500000000 3.500000000 35.500000000 -2.500000000 3.500000000 -36.500000000 -1.500000000 3.500000000 35.500000000 -1.500000000 3.500000000 -36.500000000 -0.500000000 3.500000000 35.500000000 -0.500000000 3.500000000 -36.500000000 0.500000000 3.500000000 35.500000000 0.500000000 3.500000000 -36.500000000 1.500000000 3.500000000 35.500000000 1.500000000 3.500000000 -36.500000000 2.500000000 3.500000000 35.500000000 2.500000000 3.500000000 -36.500000000 3.500000000 3.500000000 35.500000000 3.500000000 3.500000000 -36.500000000 4.500000000 3.500000000 35.500000000 4.500000000 3.500000000 -36.500000000 5.500000000 3.500000000 35.500000000 5.500000000 3.500000000 -36.500000000 6.500000000 3.500000000 35.500000000 6.500000000 3.500000000 -36.500000000 7.500000000 3.500000000 35.500000000 7.500000000 3.500000000 -36.500000000 8.500000000 3.500000000 35.500000000 8.500000000 3.500000000 -36.500000000 9.500000000 3.500000000 35.500000000 9.500000000 3.500000000 -36.500000000 10.500000000 3.500000000 35.500000000 10.500000000 3.500000000 -36.500000000 11.500000000 3.500000000 35.500000000 11.500000000 3.500000000 -36.500000000 12.500000000 3.500000000 35.500000000 12.500000000 3.500000000 -36.500000000 13.500000000 3.500000000 35.500000000 13.500000000 3.500000000 -36.500000000 14.500000000 3.500000000 35.500000000 14.500000000 3.500000000 -36.500000000 15.500000000 3.500000000 35.500000000 15.500000000 3.500000000 -36.500000000 16.500000000 3.500000000 35.500000000 16.500000000 3.500000000 -36.500000000 17.500000000 3.500000000 35.500000000 17.500000000 3.500000000 -36.500000000 18.500000000 3.500000000 35.500000000 18.500000000 3.500000000 -36.500000000 19.500000000 3.500000000 35.500000000 19.500000000 3.500000000 -36.500000000 20.500000000 3.500000000 35.500000000 20.500000000 3.500000000 -36.500000000 21.500000000 3.500000000 35.500000000 21.500000000 3.500000000 -36.500000000 22.500000000 3.500000000 35.500000000 22.500000000 3.500000000 -36.500000000 23.500000000 3.500000000 35.500000000 23.500000000 3.500000000 -36.500000000 24.500000000 3.500000000 35.500000000 24.500000000 3.500000000 -36.499996185 25.499996185 3.500000000 35.499996185 25.499996185 3.500000000 -36.499954224 26.499954224 3.500000000 35.499954224 26.499954224 3.500000000 -36.499591827 27.499591827 3.500000000 35.499591827 27.499591827 3.500000000 -36.497474670 28.497470856 3.500000000 35.497467041 28.497470856 3.500000000 -36.488403320 29.488407135 3.500000000 35.488403320 29.488407135 3.500000000 -36.458980560 30.458978653 3.500000000 35.458980560 30.458978653 3.500000000 -36.384422302 31.384418488 3.500000000 35.384422302 31.384418488 3.500000000 -36.233222961 32.233222961 3.500000000 35.233222961 32.233222961 3.500000000 -35.981101990 32.981101990 3.500000000 -35.622871399 33.622871399 3.500000000 34.622871399 33.622871399 3.500000000 34.981101990 32.981101990 3.500000000 -35.167964935 34.167964935 3.500000000 -34.622871399 34.622871399 3.500000000 33.622871399 34.622871399 3.500000000 34.167964935 34.167964935 3.500000000 -33.981101990 34.981101990 3.500000000 -33.233222961 35.233222961 3.500000000 -32.384422302 35.384418488 3.500000000 -31.458978653 35.458976746 3.500000000 -30.488407135 35.488403320 3.500000000 -29.497472763 35.497467041 3.500000000 -28.499593735 35.499591827 3.500000000 -27.499954224 35.499954224 3.500000000 -26.499996185 35.499996185 3.500000000 -25.500000000 35.500000000 3.500000000 -24.500000000 35.500000000 3.500000000 -23.500000000 35.500000000 3.500000000 -22.500000000 35.500000000 3.500000000 -21.500000000 35.500000000 3.500000000 -20.500000000 35.500000000 3.500000000 -19.500000000 35.500000000 3.500000000 -18.500000000 35.500000000 3.500000000 -17.500000000 35.500000000 3.500000000 -16.500000000 35.500000000 3.500000000 -15.500000000 35.500000000 3.500000000 -14.500000000 35.500000000 3.500000000 -13.500000000 35.500000000 3.500000000 -12.500000000 35.500000000 3.500000000 -11.500000000 35.500000000 3.500000000 -10.500000000 35.500000000 3.500000000 -9.500000000 35.500000000 3.500000000 -8.500000000 35.500000000 3.500000000 -7.500000000 35.500000000 3.500000000 -6.500000000 35.500000000 3.500000000 -5.500000000 35.500000000 3.500000000 -4.500000000 35.500000000 3.500000000 -3.500000000 35.500000000 3.500000000 -2.500000000 35.500000000 3.500000000 -1.500000000 35.500000000 3.500000000 -0.500000000 35.500000000 3.500000000 0.500000000 35.500000000 3.500000000 1.500000000 35.500000000 3.500000000 2.500000000 35.500000000 3.500000000 3.500000000 35.500000000 3.500000000 4.500000000 35.500000000 3.500000000 5.500000000 35.500000000 3.500000000 6.500000000 35.500000000 3.500000000 7.500000000 35.500000000 3.500000000 8.500000000 35.500000000 3.500000000 9.500000000 35.500000000 3.500000000 10.500000000 35.500000000 3.500000000 11.500000000 35.500000000 3.500000000 12.500000000 35.500000000 3.500000000 13.500000000 35.500000000 3.500000000 14.500000000 35.500000000 3.500000000 15.500000000 35.500000000 3.500000000 16.500000000 35.500000000 3.500000000 17.500000000 35.500000000 3.500000000 18.500000000 35.500000000 3.500000000 19.500000000 35.500000000 3.500000000 20.500000000 35.500000000 3.500000000 21.500000000 35.500000000 3.500000000 22.500000000 35.500000000 3.500000000 23.500000000 35.500000000 3.500000000 24.500000000 35.500000000 3.500000000 25.499996185 35.499996185 3.500000000 26.499954224 35.499954224 3.500000000 27.499591827 35.499591827 3.500000000 28.497470856 35.497474670 3.500000000 29.488407135 35.488403320 3.500000000 30.458978653 35.458980560 3.500000000 31.384418488 35.384422302 3.500000000 32.233222961 35.233222961 3.500000000 32.981101990 34.981101990 3.500000000 -33.981101990 -35.981101990 4.500000000 -33.233226776 -36.233222961 4.500000000 -32.384422302 -36.384418488 4.500000000 -31.458978653 -36.458980560 4.500000000 -30.488407135 -36.488403320 4.500000000 -29.497472763 -36.497474670 4.500000000 -28.499593735 -36.499591827 4.500000000 -27.499954224 -36.499954224 4.500000000 -26.499996185 -36.499996185 4.500000000 -25.500000000 -36.500000000 4.500000000 -24.500000000 -36.500000000 4.500000000 -23.500000000 -36.500000000 4.500000000 -22.500000000 -36.500000000 4.500000000 -21.500000000 -36.500000000 4.500000000 -20.500000000 -36.500000000 4.500000000 -19.500000000 -36.500000000 4.500000000 -18.500000000 -36.500000000 4.500000000 -17.500000000 -36.500000000 4.500000000 -16.500000000 -36.500000000 4.500000000 -15.500000000 -36.500000000 4.500000000 -14.500000000 -36.500000000 4.500000000 -13.500000000 -36.500000000 4.500000000 -12.500000000 -36.500000000 4.500000000 -11.500000000 -36.500000000 4.500000000 -10.500000000 -36.500000000 4.500000000 -9.500000000 -36.500000000 4.500000000 -8.500000000 -36.500000000 4.500000000 -7.500000000 -36.500000000 4.500000000 -6.500000000 -36.500000000 4.500000000 -5.500000000 -36.500000000 4.500000000 -4.500000000 -36.500000000 4.500000000 -3.500000000 -36.500000000 4.500000000 -2.500000000 -36.500000000 4.500000000 -1.500000000 -36.500000000 4.500000000 -0.500000000 -36.500000000 4.500000000 0.500000000 -36.500000000 4.500000000 1.500000000 -36.500000000 4.500000000 2.500000000 -36.500000000 4.500000000 3.500000000 -36.500000000 4.500000000 4.500000000 -36.500000000 4.500000000 5.500000000 -36.500000000 4.500000000 6.500000000 -36.500000000 4.500000000 7.500000000 -36.500000000 4.500000000 8.500000000 -36.500000000 4.500000000 9.500000000 -36.500000000 4.500000000 10.500000000 -36.500000000 4.500000000 11.500000000 -36.500000000 4.500000000 12.500000000 -36.500000000 4.500000000 13.500000000 -36.500000000 4.500000000 14.500000000 -36.500000000 4.500000000 15.500000000 -36.500000000 4.500000000 16.500000000 -36.500000000 4.500000000 17.500000000 -36.500000000 4.500000000 18.500000000 -36.500000000 4.500000000 19.500000000 -36.500000000 4.500000000 20.500000000 -36.500000000 4.500000000 21.500000000 -36.500000000 4.500000000 22.500000000 -36.500000000 4.500000000 23.500000000 -36.500000000 4.500000000 24.500000000 -36.500000000 4.500000000 25.499996185 -36.499996185 4.500000000 26.499954224 -36.499954224 4.500000000 27.499591827 -36.499591827 4.500000000 28.497470856 -36.497467041 4.500000000 29.488407135 -36.488403320 4.500000000 30.458978653 -36.458980560 4.500000000 31.384418488 -36.384422302 4.500000000 32.233222961 -36.233222961 4.500000000 32.981101990 -35.981101990 4.500000000 -35.167964935 -35.167964935 4.500000000 -34.622871399 -35.622871399 4.500000000 33.622871399 -35.622871399 4.500000000 34.167964935 -35.167964935 4.500000000 -35.981101990 -33.981101990 4.500000000 -35.622871399 -34.622871399 4.500000000 34.622871399 -34.622871399 4.500000000 34.981101990 -33.981101990 4.500000000 -36.233222961 -33.233222961 4.500000000 35.233222961 -33.233226776 4.500000000 -36.384418488 -32.384422302 4.500000000 35.384418488 -32.384422302 4.500000000 -36.458976746 -31.458978653 4.500000000 35.458980560 -31.458978653 4.500000000 -36.488403320 -30.488407135 4.500000000 35.488403320 -30.488407135 4.500000000 -36.497467041 -29.497472763 4.500000000 35.497474670 -29.497472763 4.500000000 -36.499591827 -28.499593735 4.500000000 35.499591827 -28.499593735 4.500000000 -36.499954224 -27.499954224 4.500000000 35.499954224 -27.499954224 4.500000000 -36.499996185 -26.499996185 4.500000000 35.499996185 -26.499996185 4.500000000 -36.500000000 -25.500000000 4.500000000 35.500000000 -25.500000000 4.500000000 -36.500000000 -24.500000000 4.500000000 35.500000000 -24.500000000 4.500000000 -36.500000000 -23.500000000 4.500000000 35.500000000 -23.500000000 4.500000000 -36.500000000 -22.500000000 4.500000000 35.500000000 -22.500000000 4.500000000 -36.500000000 -21.500000000 4.500000000 35.500000000 -21.500000000 4.500000000 -36.500000000 -20.500000000 4.500000000 35.500000000 -20.500000000 4.500000000 -36.500000000 -19.500000000 4.500000000 35.500000000 -19.500000000 4.500000000 -36.500000000 -18.500000000 4.500000000 35.500000000 -18.500000000 4.500000000 -36.500000000 -17.500000000 4.500000000 35.500000000 -17.500000000 4.500000000 -36.500000000 -16.500000000 4.500000000 35.500000000 -16.500000000 4.500000000 -36.500000000 -15.500000000 4.500000000 35.500000000 -15.500000000 4.500000000 -36.500000000 -14.500000000 4.500000000 35.500000000 -14.500000000 4.500000000 -36.500000000 -13.500000000 4.500000000 35.500000000 -13.500000000 4.500000000 -36.500000000 -12.500000000 4.500000000 35.500000000 -12.500000000 4.500000000 -36.500000000 -11.500000000 4.500000000 35.500000000 -11.500000000 4.500000000 -36.500000000 -10.500000000 4.500000000 35.500000000 -10.500000000 4.500000000 -36.500000000 -9.500000000 4.500000000 35.500000000 -9.500000000 4.500000000 -36.500000000 -8.500000000 4.500000000 35.500000000 -8.500000000 4.500000000 -36.500000000 -7.500000000 4.500000000 35.500000000 -7.500000000 4.500000000 -36.500000000 -6.500000000 4.500000000 35.500000000 -6.500000000 4.500000000 -36.500000000 -5.500000000 4.500000000 35.500000000 -5.500000000 4.500000000 -36.500000000 -4.500000000 4.500000000 35.500000000 -4.500000000 4.500000000 -36.500000000 -3.500000000 4.500000000 35.500000000 -3.500000000 4.500000000 -36.500000000 -2.500000000 4.500000000 35.500000000 -2.500000000 4.500000000 -36.500000000 -1.500000000 4.500000000 35.500000000 -1.500000000 4.500000000 -36.500000000 -0.500000000 4.500000000 35.500000000 -0.500000000 4.500000000 -36.500000000 0.500000000 4.500000000 35.500000000 0.500000000 4.500000000 -36.500000000 1.500000000 4.500000000 35.500000000 1.500000000 4.500000000 -36.500000000 2.500000000 4.500000000 35.500000000 2.500000000 4.500000000 -36.500000000 3.500000000 4.500000000 35.500000000 3.500000000 4.500000000 -36.500000000 4.500000000 4.500000000 35.500000000 4.500000000 4.500000000 -36.500000000 5.500000000 4.500000000 35.500000000 5.500000000 4.500000000 -36.500000000 6.500000000 4.500000000 35.500000000 6.500000000 4.500000000 -36.500000000 7.500000000 4.500000000 35.500000000 7.500000000 4.500000000 -36.500000000 8.500000000 4.500000000 35.500000000 8.500000000 4.500000000 -36.500000000 9.500000000 4.500000000 35.500000000 9.500000000 4.500000000 -36.500000000 10.500000000 4.500000000 35.500000000 10.500000000 4.500000000 -36.500000000 11.500000000 4.500000000 35.500000000 11.500000000 4.500000000 -36.500000000 12.500000000 4.500000000 35.500000000 12.500000000 4.500000000 -36.500000000 13.500000000 4.500000000 35.500000000 13.500000000 4.500000000 -36.500000000 14.500000000 4.500000000 35.500000000 14.500000000 4.500000000 -36.500000000 15.500000000 4.500000000 35.500000000 15.500000000 4.500000000 -36.500000000 16.500000000 4.500000000 35.500000000 16.500000000 4.500000000 -36.500000000 17.500000000 4.500000000 35.500000000 17.500000000 4.500000000 -36.500000000 18.500000000 4.500000000 35.500000000 18.500000000 4.500000000 -36.500000000 19.500000000 4.500000000 35.500000000 19.500000000 4.500000000 -36.500000000 20.500000000 4.500000000 35.500000000 20.500000000 4.500000000 -36.500000000 21.500000000 4.500000000 35.500000000 21.500000000 4.500000000 -36.500000000 22.500000000 4.500000000 35.500000000 22.500000000 4.500000000 -36.500000000 23.500000000 4.500000000 35.500000000 23.500000000 4.500000000 -36.500000000 24.500000000 4.500000000 35.500000000 24.500000000 4.500000000 -36.499996185 25.499996185 4.500000000 35.499996185 25.499996185 4.500000000 -36.499954224 26.499954224 4.500000000 35.499954224 26.499954224 4.500000000 -36.499591827 27.499591827 4.500000000 35.499591827 27.499591827 4.500000000 -36.497474670 28.497470856 4.500000000 35.497467041 28.497470856 4.500000000 -36.488403320 29.488407135 4.500000000 35.488403320 29.488407135 4.500000000 -36.458980560 30.458978653 4.500000000 35.458980560 30.458978653 4.500000000 -36.384422302 31.384418488 4.500000000 35.384422302 31.384418488 4.500000000 -36.233222961 32.233222961 4.500000000 35.233222961 32.233222961 4.500000000 -35.981101990 32.981101990 4.500000000 -35.622871399 33.622871399 4.500000000 34.622871399 33.622871399 4.500000000 34.981101990 32.981101990 4.500000000 -35.167964935 34.167964935 4.500000000 -34.622871399 34.622871399 4.500000000 33.622871399 34.622871399 4.500000000 34.167964935 34.167964935 4.500000000 -33.981101990 34.981101990 4.500000000 -33.233222961 35.233222961 4.500000000 -32.384422302 35.384418488 4.500000000 -31.458978653 35.458976746 4.500000000 -30.488407135 35.488403320 4.500000000 -29.497472763 35.497467041 4.500000000 -28.499593735 35.499591827 4.500000000 -27.499954224 35.499954224 4.500000000 -26.499996185 35.499996185 4.500000000 -25.500000000 35.500000000 4.500000000 -24.500000000 35.500000000 4.500000000 -23.500000000 35.500000000 4.500000000 -22.500000000 35.500000000 4.500000000 -21.500000000 35.500000000 4.500000000 -20.500000000 35.500000000 4.500000000 -19.500000000 35.500000000 4.500000000 -18.500000000 35.500000000 4.500000000 -17.500000000 35.500000000 4.500000000 -16.500000000 35.500000000 4.500000000 -15.500000000 35.500000000 4.500000000 -14.500000000 35.500000000 4.500000000 -13.500000000 35.500000000 4.500000000 -12.500000000 35.500000000 4.500000000 -11.500000000 35.500000000 4.500000000 -10.500000000 35.500000000 4.500000000 -9.500000000 35.500000000 4.500000000 -8.500000000 35.500000000 4.500000000 -7.500000000 35.500000000 4.500000000 -6.500000000 35.500000000 4.500000000 -5.500000000 35.500000000 4.500000000 -4.500000000 35.500000000 4.500000000 -3.500000000 35.500000000 4.500000000 -2.500000000 35.500000000 4.500000000 -1.500000000 35.500000000 4.500000000 -0.500000000 35.500000000 4.500000000 0.500000000 35.500000000 4.500000000 1.500000000 35.500000000 4.500000000 2.500000000 35.500000000 4.500000000 3.500000000 35.500000000 4.500000000 4.500000000 35.500000000 4.500000000 5.500000000 35.500000000 4.500000000 6.500000000 35.500000000 4.500000000 7.500000000 35.500000000 4.500000000 8.500000000 35.500000000 4.500000000 9.500000000 35.500000000 4.500000000 10.500000000 35.500000000 4.500000000 11.500000000 35.500000000 4.500000000 12.500000000 35.500000000 4.500000000 13.500000000 35.500000000 4.500000000 14.500000000 35.500000000 4.500000000 15.500000000 35.500000000 4.500000000 16.500000000 35.500000000 4.500000000 17.500000000 35.500000000 4.500000000 18.500000000 35.500000000 4.500000000 19.500000000 35.500000000 4.500000000 20.500000000 35.500000000 4.500000000 21.500000000 35.500000000 4.500000000 22.500000000 35.500000000 4.500000000 23.500000000 35.500000000 4.500000000 24.500000000 35.500000000 4.500000000 25.499996185 35.499996185 4.500000000 26.499954224 35.499954224 4.500000000 27.499591827 35.499591827 4.500000000 28.497470856 35.497474670 4.500000000 29.488407135 35.488403320 4.500000000 30.458978653 35.458980560 4.500000000 31.384418488 35.384422302 4.500000000 32.233222961 35.233222961 4.500000000 32.981101990 34.981101990 4.500000000 -33.981101990 -35.981101990 5.500000000 -33.233226776 -36.233222961 5.500000000 -32.384422302 -36.384418488 5.500000000 -31.458978653 -36.458980560 5.500000000 -30.488407135 -36.488403320 5.500000000 -29.497472763 -36.497474670 5.500000000 -28.499593735 -36.499591827 5.500000000 -27.499954224 -36.499954224 5.500000000 -26.499996185 -36.499996185 5.500000000 -25.500000000 -36.500000000 5.500000000 -24.500000000 -36.500000000 5.500000000 -23.500000000 -36.500000000 5.500000000 -22.500000000 -36.500000000 5.500000000 -21.500000000 -36.500000000 5.500000000 -20.500000000 -36.500000000 5.500000000 -19.500000000 -36.500000000 5.500000000 -18.500000000 -36.500000000 5.500000000 -17.500000000 -36.500000000 5.500000000 -16.500000000 -36.500000000 5.500000000 -15.500000000 -36.500000000 5.500000000 -14.500000000 -36.500000000 5.500000000 -13.500000000 -36.500000000 5.500000000 -12.500000000 -36.500000000 5.500000000 -11.500000000 -36.500000000 5.500000000 -10.500000000 -36.500000000 5.500000000 -9.500000000 -36.500000000 5.500000000 -8.500000000 -36.500000000 5.500000000 -7.500000000 -36.500000000 5.500000000 -6.500000000 -36.500000000 5.500000000 -5.500000000 -36.500000000 5.500000000 -4.500000000 -36.500000000 5.500000000 -3.500000000 -36.500000000 5.500000000 -2.500000000 -36.500000000 5.500000000 -1.500000000 -36.500000000 5.500000000 -0.500000000 -36.500000000 5.500000000 0.500000000 -36.500000000 5.500000000 1.500000000 -36.500000000 5.500000000 2.500000000 -36.500000000 5.500000000 3.500000000 -36.500000000 5.500000000 4.500000000 -36.500000000 5.500000000 5.500000000 -36.500000000 5.500000000 6.500000000 -36.500000000 5.500000000 7.500000000 -36.500000000 5.500000000 8.500000000 -36.500000000 5.500000000 9.500000000 -36.500000000 5.500000000 10.500000000 -36.500000000 5.500000000 11.500000000 -36.500000000 5.500000000 12.500000000 -36.500000000 5.500000000 13.500000000 -36.500000000 5.500000000 14.500000000 -36.500000000 5.500000000 15.500000000 -36.500000000 5.500000000 16.500000000 -36.500000000 5.500000000 17.500000000 -36.500000000 5.500000000 18.500000000 -36.500000000 5.500000000 19.500000000 -36.500000000 5.500000000 20.500000000 -36.500000000 5.500000000 21.500000000 -36.500000000 5.500000000 22.500000000 -36.500000000 5.500000000 23.500000000 -36.500000000 5.500000000 24.500000000 -36.500000000 5.500000000 25.499996185 -36.499996185 5.500000000 26.499954224 -36.499954224 5.500000000 27.499591827 -36.499591827 5.500000000 28.497470856 -36.497467041 5.500000000 29.488407135 -36.488403320 5.500000000 30.458978653 -36.458980560 5.500000000 31.384418488 -36.384422302 5.500000000 32.233222961 -36.233222961 5.500000000 32.981101990 -35.981101990 5.500000000 -35.167964935 -35.167964935 5.500000000 -34.622871399 -35.622871399 5.500000000 33.622871399 -35.622871399 5.500000000 34.167964935 -35.167964935 5.500000000 -35.981101990 -33.981101990 5.500000000 -35.622871399 -34.622871399 5.500000000 34.622871399 -34.622871399 5.500000000 34.981101990 -33.981101990 5.500000000 -36.233222961 -33.233222961 5.500000000 35.233222961 -33.233226776 5.500000000 -36.384418488 -32.384422302 5.500000000 35.384418488 -32.384422302 5.500000000 -36.458976746 -31.458978653 5.500000000 35.458980560 -31.458978653 5.500000000 -36.488403320 -30.488407135 5.500000000 35.488403320 -30.488407135 5.500000000 -36.497467041 -29.497472763 5.500000000 35.497474670 -29.497472763 5.500000000 -36.499591827 -28.499593735 5.500000000 35.499591827 -28.499593735 5.500000000 -36.499954224 -27.499954224 5.500000000 35.499954224 -27.499954224 5.500000000 -36.499996185 -26.499996185 5.500000000 35.499996185 -26.499996185 5.500000000 -36.500000000 -25.500000000 5.500000000 35.500000000 -25.500000000 5.500000000 -36.500000000 -24.500000000 5.500000000 35.500000000 -24.500000000 5.500000000 -36.500000000 -23.500000000 5.500000000 35.500000000 -23.500000000 5.500000000 -36.500000000 -22.500000000 5.500000000 35.500000000 -22.500000000 5.500000000 -36.500000000 -21.500000000 5.500000000 35.500000000 -21.500000000 5.500000000 -36.500000000 -20.500000000 5.500000000 35.500000000 -20.500000000 5.500000000 -36.500000000 -19.500000000 5.500000000 35.500000000 -19.500000000 5.500000000 -36.500000000 -18.500000000 5.500000000 35.500000000 -18.500000000 5.500000000 -36.500000000 -17.500000000 5.500000000 35.500000000 -17.500000000 5.500000000 -36.500000000 -16.500000000 5.500000000 35.500000000 -16.500000000 5.500000000 -36.500000000 -15.500000000 5.500000000 35.500000000 -15.500000000 5.500000000 -36.500000000 -14.500000000 5.500000000 35.500000000 -14.500000000 5.500000000 -36.500000000 -13.500000000 5.500000000 35.500000000 -13.500000000 5.500000000 -36.500000000 -12.500000000 5.500000000 35.500000000 -12.500000000 5.500000000 -36.500000000 -11.500000000 5.500000000 35.500000000 -11.500000000 5.500000000 -36.500000000 -10.500000000 5.500000000 35.500000000 -10.500000000 5.500000000 -36.500000000 -9.500000000 5.500000000 35.500000000 -9.500000000 5.500000000 -36.500000000 -8.500000000 5.500000000 35.500000000 -8.500000000 5.500000000 -36.500000000 -7.500000000 5.500000000 35.500000000 -7.500000000 5.500000000 -36.500000000 -6.500000000 5.500000000 35.500000000 -6.500000000 5.500000000 -36.500000000 -5.500000000 5.500000000 35.500000000 -5.500000000 5.500000000 -36.500000000 -4.500000000 5.500000000 35.500000000 -4.500000000 5.500000000 -36.500000000 -3.500000000 5.500000000 35.500000000 -3.500000000 5.500000000 -36.500000000 -2.500000000 5.500000000 35.500000000 -2.500000000 5.500000000 -36.500000000 -1.500000000 5.500000000 35.500000000 -1.500000000 5.500000000 -36.500000000 -0.500000000 5.500000000 35.500000000 -0.500000000 5.500000000 -36.500000000 0.500000000 5.500000000 35.500000000 0.500000000 5.500000000 -36.500000000 1.500000000 5.500000000 35.500000000 1.500000000 5.500000000 -36.500000000 2.500000000 5.500000000 35.500000000 2.500000000 5.500000000 -36.500000000 3.500000000 5.500000000 35.500000000 3.500000000 5.500000000 -36.500000000 4.500000000 5.500000000 35.500000000 4.500000000 5.500000000 -36.500000000 5.500000000 5.500000000 35.500000000 5.500000000 5.500000000 -36.500000000 6.500000000 5.500000000 35.500000000 6.500000000 5.500000000 -36.500000000 7.500000000 5.500000000 35.500000000 7.500000000 5.500000000 -36.500000000 8.500000000 5.500000000 35.500000000 8.500000000 5.500000000 -36.500000000 9.500000000 5.500000000 35.500000000 9.500000000 5.500000000 -36.500000000 10.500000000 5.500000000 35.500000000 10.500000000 5.500000000 -36.500000000 11.500000000 5.500000000 35.500000000 11.500000000 5.500000000 -36.500000000 12.500000000 5.500000000 35.500000000 12.500000000 5.500000000 -36.500000000 13.500000000 5.500000000 35.500000000 13.500000000 5.500000000 -36.500000000 14.500000000 5.500000000 35.500000000 14.500000000 5.500000000 -36.500000000 15.500000000 5.500000000 35.500000000 15.500000000 5.500000000 -36.500000000 16.500000000 5.500000000 35.500000000 16.500000000 5.500000000 -36.500000000 17.500000000 5.500000000 35.500000000 17.500000000 5.500000000 -36.500000000 18.500000000 5.500000000 35.500000000 18.500000000 5.500000000 -36.500000000 19.500000000 5.500000000 35.500000000 19.500000000 5.500000000 -36.500000000 20.500000000 5.500000000 35.500000000 20.500000000 5.500000000 -36.500000000 21.500000000 5.500000000 35.500000000 21.500000000 5.500000000 -36.500000000 22.500000000 5.500000000 35.500000000 22.500000000 5.500000000 -36.500000000 23.500000000 5.500000000 35.500000000 23.500000000 5.500000000 -36.500000000 24.500000000 5.500000000 35.500000000 24.500000000 5.500000000 -36.499996185 25.499996185 5.500000000 35.499996185 25.499996185 5.500000000 -36.499954224 26.499954224 5.500000000 35.499954224 26.499954224 5.500000000 -36.499591827 27.499591827 5.500000000 35.499591827 27.499591827 5.500000000 -36.497474670 28.497470856 5.500000000 35.497467041 28.497470856 5.500000000 -36.488403320 29.488407135 5.500000000 35.488403320 29.488407135 5.500000000 -36.458980560 30.458978653 5.500000000 35.458980560 30.458978653 5.500000000 -36.384422302 31.384418488 5.500000000 35.384422302 31.384418488 5.500000000 -36.233222961 32.233222961 5.500000000 35.233222961 32.233222961 5.500000000 -35.981101990 32.981101990 5.500000000 -35.622871399 33.622871399 5.500000000 34.622871399 33.622871399 5.500000000 34.981101990 32.981101990 5.500000000 -35.167964935 34.167964935 5.500000000 -34.622871399 34.622871399 5.500000000 33.622871399 34.622871399 5.500000000 34.167964935 34.167964935 5.500000000 -33.981101990 34.981101990 5.500000000 -33.233222961 35.233222961 5.500000000 -32.384422302 35.384418488 5.500000000 -31.458978653 35.458976746 5.500000000 -30.488407135 35.488403320 5.500000000 -29.497472763 35.497467041 5.500000000 -28.499593735 35.499591827 5.500000000 -27.499954224 35.499954224 5.500000000 -26.499996185 35.499996185 5.500000000 -25.500000000 35.500000000 5.500000000 -24.500000000 35.500000000 5.500000000 -23.500000000 35.500000000 5.500000000 -22.500000000 35.500000000 5.500000000 -21.500000000 35.500000000 5.500000000 -20.500000000 35.500000000 5.500000000 -19.500000000 35.500000000 5.500000000 -18.500000000 35.500000000 5.500000000 -17.500000000 35.500000000 5.500000000 -16.500000000 35.500000000 5.500000000 -15.500000000 35.500000000 5.500000000 -14.500000000 35.500000000 5.500000000 -13.500000000 35.500000000 5.500000000 -12.500000000 35.500000000 5.500000000 -11.500000000 35.500000000 5.500000000 -10.500000000 35.500000000 5.500000000 -9.500000000 35.500000000 5.500000000 -8.500000000 35.500000000 5.500000000 -7.500000000 35.500000000 5.500000000 -6.500000000 35.500000000 5.500000000 -5.500000000 35.500000000 5.500000000 -4.500000000 35.500000000 5.500000000 -3.500000000 35.500000000 5.500000000 -2.500000000 35.500000000 5.500000000 -1.500000000 35.500000000 5.500000000 -0.500000000 35.500000000 5.500000000 0.500000000 35.500000000 5.500000000 1.500000000 35.500000000 5.500000000 2.500000000 35.500000000 5.500000000 3.500000000 35.500000000 5.500000000 4.500000000 35.500000000 5.500000000 5.500000000 35.500000000 5.500000000 6.500000000 35.500000000 5.500000000 7.500000000 35.500000000 5.500000000 8.500000000 35.500000000 5.500000000 9.500000000 35.500000000 5.500000000 10.500000000 35.500000000 5.500000000 11.500000000 35.500000000 5.500000000 12.500000000 35.500000000 5.500000000 13.500000000 35.500000000 5.500000000 14.500000000 35.500000000 5.500000000 15.500000000 35.500000000 5.500000000 16.500000000 35.500000000 5.500000000 17.500000000 35.500000000 5.500000000 18.500000000 35.500000000 5.500000000 19.500000000 35.500000000 5.500000000 20.500000000 35.500000000 5.500000000 21.500000000 35.500000000 5.500000000 22.500000000 35.500000000 5.500000000 23.500000000 35.500000000 5.500000000 24.500000000 35.500000000 5.500000000 25.499996185 35.499996185 5.500000000 26.499954224 35.499954224 5.500000000 27.499591827 35.499591827 5.500000000 28.497470856 35.497474670 5.500000000 29.488407135 35.488403320 5.500000000 30.458978653 35.458980560 5.500000000 31.384418488 35.384422302 5.500000000 32.233222961 35.233222961 5.500000000 32.981101990 34.981101990 5.500000000 -33.981101990 -35.981101990 6.500000000 -33.233226776 -36.233222961 6.500000000 -32.384422302 -36.384418488 6.500000000 -31.458978653 -36.458980560 6.500000000 -30.488407135 -36.488403320 6.500000000 -29.497472763 -36.497474670 6.500000000 -28.499593735 -36.499591827 6.500000000 -27.499954224 -36.499954224 6.500000000 -26.499996185 -36.499996185 6.500000000 -25.500000000 -36.500000000 6.500000000 -24.500000000 -36.500000000 6.500000000 -23.500000000 -36.500000000 6.500000000 -22.500000000 -36.500000000 6.500000000 -21.500000000 -36.500000000 6.500000000 -20.500000000 -36.500000000 6.500000000 -19.500000000 -36.500000000 6.500000000 -18.500000000 -36.500000000 6.500000000 -17.500000000 -36.500000000 6.500000000 -16.500000000 -36.500000000 6.500000000 -15.500000000 -36.500000000 6.500000000 -14.500000000 -36.500000000 6.500000000 -13.500000000 -36.500000000 6.500000000 -12.500000000 -36.500000000 6.500000000 -11.500000000 -36.500000000 6.500000000 -10.500000000 -36.500000000 6.500000000 -9.500000000 -36.500000000 6.500000000 -8.500000000 -36.500000000 6.500000000 -7.500000000 -36.500000000 6.500000000 -6.500000000 -36.500000000 6.500000000 -5.500000000 -36.500000000 6.500000000 -4.500000000 -36.500000000 6.500000000 -3.500000000 -36.500000000 6.500000000 -2.500000000 -36.500000000 6.500000000 -1.500000000 -36.500000000 6.500000000 -0.500000000 -36.500000000 6.500000000 0.500000000 -36.500000000 6.500000000 1.500000000 -36.500000000 6.500000000 2.500000000 -36.500000000 6.500000000 3.500000000 -36.500000000 6.500000000 4.500000000 -36.500000000 6.500000000 5.500000000 -36.500000000 6.500000000 6.500000000 -36.500000000 6.500000000 7.500000000 -36.500000000 6.500000000 8.500000000 -36.500000000 6.500000000 9.500000000 -36.500000000 6.500000000 10.500000000 -36.500000000 6.500000000 11.500000000 -36.500000000 6.500000000 12.500000000 -36.500000000 6.500000000 13.500000000 -36.500000000 6.500000000 14.500000000 -36.500000000 6.500000000 15.500000000 -36.500000000 6.500000000 16.500000000 -36.500000000 6.500000000 17.500000000 -36.500000000 6.500000000 18.500000000 -36.500000000 6.500000000 19.500000000 -36.500000000 6.500000000 20.500000000 -36.500000000 6.500000000 21.500000000 -36.500000000 6.500000000 22.500000000 -36.500000000 6.500000000 23.500000000 -36.500000000 6.500000000 24.500000000 -36.500000000 6.500000000 25.499996185 -36.499996185 6.500000000 26.499954224 -36.499954224 6.500000000 27.499591827 -36.499591827 6.500000000 28.497470856 -36.497467041 6.500000000 29.488407135 -36.488403320 6.500000000 30.458978653 -36.458980560 6.500000000 31.384418488 -36.384422302 6.500000000 32.233222961 -36.233222961 6.500000000 32.981101990 -35.981101990 6.500000000 -35.167964935 -35.167964935 6.500000000 -34.622871399 -35.622871399 6.500000000 33.622871399 -35.622871399 6.500000000 34.167964935 -35.167964935 6.500000000 -35.981101990 -33.981101990 6.500000000 -35.622871399 -34.622871399 6.500000000 34.622871399 -34.622871399 6.500000000 34.981101990 -33.981101990 6.500000000 -36.233222961 -33.233222961 6.500000000 35.233222961 -33.233226776 6.500000000 -36.384418488 -32.384422302 6.500000000 35.384418488 -32.384422302 6.500000000 -36.458976746 -31.458978653 6.500000000 35.458980560 -31.458978653 6.500000000 -36.488403320 -30.488407135 6.500000000 35.488403320 -30.488407135 6.500000000 -36.497467041 -29.497472763 6.500000000 35.497474670 -29.497472763 6.500000000 -36.499591827 -28.499593735 6.500000000 35.499591827 -28.499593735 6.500000000 -36.499954224 -27.499954224 6.500000000 35.499954224 -27.499954224 6.500000000 -36.499996185 -26.499996185 6.500000000 35.499996185 -26.499996185 6.500000000 -36.500000000 -25.500000000 6.500000000 35.500000000 -25.500000000 6.500000000 -36.500000000 -24.500000000 6.500000000 35.500000000 -24.500000000 6.500000000 -36.500000000 -23.500000000 6.500000000 35.500000000 -23.500000000 6.500000000 -36.500000000 -22.500000000 6.500000000 35.500000000 -22.500000000 6.500000000 -36.500000000 -21.500000000 6.500000000 35.500000000 -21.500000000 6.500000000 -36.500000000 -20.500000000 6.500000000 35.500000000 -20.500000000 6.500000000 -36.500000000 -19.500000000 6.500000000 35.500000000 -19.500000000 6.500000000 -36.500000000 -18.500000000 6.500000000 35.500000000 -18.500000000 6.500000000 -36.500000000 -17.500000000 6.500000000 35.500000000 -17.500000000 6.500000000 -36.500000000 -16.500000000 6.500000000 35.500000000 -16.500000000 6.500000000 -36.500000000 -15.500000000 6.500000000 35.500000000 -15.500000000 6.500000000 -36.500000000 -14.500000000 6.500000000 35.500000000 -14.500000000 6.500000000 -36.500000000 -13.500000000 6.500000000 35.500000000 -13.500000000 6.500000000 -36.500000000 -12.500000000 6.500000000 35.500000000 -12.500000000 6.500000000 -36.500000000 -11.500000000 6.500000000 35.500000000 -11.500000000 6.500000000 -36.500000000 -10.500000000 6.500000000 35.500000000 -10.500000000 6.500000000 -36.500000000 -9.500000000 6.500000000 35.500000000 -9.500000000 6.500000000 -36.500000000 -8.500000000 6.500000000 35.500000000 -8.500000000 6.500000000 -36.500000000 -7.500000000 6.500000000 35.500000000 -7.500000000 6.500000000 -36.500000000 -6.500000000 6.500000000 35.500000000 -6.500000000 6.500000000 -36.500000000 -5.500000000 6.500000000 35.500000000 -5.500000000 6.500000000 -36.500000000 -4.500000000 6.500000000 35.500000000 -4.500000000 6.500000000 -36.500000000 -3.500000000 6.500000000 35.500000000 -3.500000000 6.500000000 -36.500000000 -2.500000000 6.500000000 35.500000000 -2.500000000 6.500000000 -36.500000000 -1.500000000 6.500000000 35.500000000 -1.500000000 6.500000000 -36.500000000 -0.500000000 6.500000000 35.500000000 -0.500000000 6.500000000 -36.500000000 0.500000000 6.500000000 35.500000000 0.500000000 6.500000000 -36.500000000 1.500000000 6.500000000 35.500000000 1.500000000 6.500000000 -36.500000000 2.500000000 6.500000000 35.500000000 2.500000000 6.500000000 -36.500000000 3.500000000 6.500000000 35.500000000 3.500000000 6.500000000 -36.500000000 4.500000000 6.500000000 35.500000000 4.500000000 6.500000000 -36.500000000 5.500000000 6.500000000 35.500000000 5.500000000 6.500000000 -36.500000000 6.500000000 6.500000000 35.500000000 6.500000000 6.500000000 -36.500000000 7.500000000 6.500000000 35.500000000 7.500000000 6.500000000 -36.500000000 8.500000000 6.500000000 35.500000000 8.500000000 6.500000000 -36.500000000 9.500000000 6.500000000 35.500000000 9.500000000 6.500000000 -36.500000000 10.500000000 6.500000000 35.500000000 10.500000000 6.500000000 -36.500000000 11.500000000 6.500000000 35.500000000 11.500000000 6.500000000 -36.500000000 12.500000000 6.500000000 35.500000000 12.500000000 6.500000000 -36.500000000 13.500000000 6.500000000 35.500000000 13.500000000 6.500000000 -36.500000000 14.500000000 6.500000000 35.500000000 14.500000000 6.500000000 -36.500000000 15.500000000 6.500000000 35.500000000 15.500000000 6.500000000 -36.500000000 16.500000000 6.500000000 35.500000000 16.500000000 6.500000000 -36.500000000 17.500000000 6.500000000 35.500000000 17.500000000 6.500000000 -36.500000000 18.500000000 6.500000000 35.500000000 18.500000000 6.500000000 -36.500000000 19.500000000 6.500000000 35.500000000 19.500000000 6.500000000 -36.500000000 20.500000000 6.500000000 35.500000000 20.500000000 6.500000000 -36.500000000 21.500000000 6.500000000 35.500000000 21.500000000 6.500000000 -36.500000000 22.500000000 6.500000000 35.500000000 22.500000000 6.500000000 -36.500000000 23.500000000 6.500000000 35.500000000 23.500000000 6.500000000 -36.500000000 24.500000000 6.500000000 35.500000000 24.500000000 6.500000000 -36.499996185 25.499996185 6.500000000 35.499996185 25.499996185 6.500000000 -36.499954224 26.499954224 6.500000000 35.499954224 26.499954224 6.500000000 -36.499591827 27.499591827 6.500000000 35.499591827 27.499591827 6.500000000 -36.497474670 28.497470856 6.500000000 35.497467041 28.497470856 6.500000000 -36.488403320 29.488407135 6.500000000 35.488403320 29.488407135 6.500000000 -36.458980560 30.458978653 6.500000000 35.458980560 30.458978653 6.500000000 -36.384422302 31.384418488 6.500000000 35.384422302 31.384418488 6.500000000 -36.233222961 32.233222961 6.500000000 35.233222961 32.233222961 6.500000000 -35.981101990 32.981101990 6.500000000 -35.622871399 33.622871399 6.500000000 34.622871399 33.622871399 6.500000000 34.981101990 32.981101990 6.500000000 -35.167964935 34.167964935 6.500000000 -34.622871399 34.622871399 6.500000000 33.622871399 34.622871399 6.500000000 34.167964935 34.167964935 6.500000000 -33.981101990 34.981101990 6.500000000 -33.233222961 35.233222961 6.500000000 -32.384422302 35.384418488 6.500000000 -31.458978653 35.458976746 6.500000000 -30.488407135 35.488403320 6.500000000 -29.497472763 35.497467041 6.500000000 -28.499593735 35.499591827 6.500000000 -27.499954224 35.499954224 6.500000000 -26.499996185 35.499996185 6.500000000 -25.500000000 35.500000000 6.500000000 -24.500000000 35.500000000 6.500000000 -23.500000000 35.500000000 6.500000000 -22.500000000 35.500000000 6.500000000 -21.500000000 35.500000000 6.500000000 -20.500000000 35.500000000 6.500000000 -19.500000000 35.500000000 6.500000000 -18.500000000 35.500000000 6.500000000 -17.500000000 35.500000000 6.500000000 -16.500000000 35.500000000 6.500000000 -15.500000000 35.500000000 6.500000000 -14.500000000 35.500000000 6.500000000 -13.500000000 35.500000000 6.500000000 -12.500000000 35.500000000 6.500000000 -11.500000000 35.500000000 6.500000000 -10.500000000 35.500000000 6.500000000 -9.500000000 35.500000000 6.500000000 -8.500000000 35.500000000 6.500000000 -7.500000000 35.500000000 6.500000000 -6.500000000 35.500000000 6.500000000 -5.500000000 35.500000000 6.500000000 -4.500000000 35.500000000 6.500000000 -3.500000000 35.500000000 6.500000000 -2.500000000 35.500000000 6.500000000 -1.500000000 35.500000000 6.500000000 -0.500000000 35.500000000 6.500000000 0.500000000 35.500000000 6.500000000 1.500000000 35.500000000 6.500000000 2.500000000 35.500000000 6.500000000 3.500000000 35.500000000 6.500000000 4.500000000 35.500000000 6.500000000 5.500000000 35.500000000 6.500000000 6.500000000 35.500000000 6.500000000 7.500000000 35.500000000 6.500000000 8.500000000 35.500000000 6.500000000 9.500000000 35.500000000 6.500000000 10.500000000 35.500000000 6.500000000 11.500000000 35.500000000 6.500000000 12.500000000 35.500000000 6.500000000 13.500000000 35.500000000 6.500000000 14.500000000 35.500000000 6.500000000 15.500000000 35.500000000 6.500000000 16.500000000 35.500000000 6.500000000 17.500000000 35.500000000 6.500000000 18.500000000 35.500000000 6.500000000 19.500000000 35.500000000 6.500000000 20.500000000 35.500000000 6.500000000 21.500000000 35.500000000 6.500000000 22.500000000 35.500000000 6.500000000 23.500000000 35.500000000 6.500000000 24.500000000 35.500000000 6.500000000 25.499996185 35.499996185 6.500000000 26.499954224 35.499954224 6.500000000 27.499591827 35.499591827 6.500000000 28.497470856 35.497474670 6.500000000 29.488407135 35.488403320 6.500000000 30.458978653 35.458980560 6.500000000 31.384418488 35.384422302 6.500000000 32.233222961 35.233222961 6.500000000 32.981101990 34.981101990 6.500000000 -33.981101990 -35.981101990 7.500000000 -33.233226776 -36.233222961 7.500000000 -32.384422302 -36.384418488 7.500000000 -31.458978653 -36.458980560 7.500000000 -30.488407135 -36.488403320 7.500000000 -29.497472763 -36.497474670 7.500000000 -28.499593735 -36.499591827 7.500000000 -27.499954224 -36.499954224 7.500000000 -26.499996185 -36.499996185 7.500000000 -25.500000000 -36.500000000 7.500000000 -24.500000000 -36.500000000 7.500000000 -23.500000000 -36.500000000 7.500000000 -22.500000000 -36.500000000 7.500000000 -21.500000000 -36.500000000 7.500000000 -20.500000000 -36.500000000 7.500000000 -19.500000000 -36.500000000 7.500000000 -18.500000000 -36.500000000 7.500000000 -17.500000000 -36.500000000 7.500000000 -16.500000000 -36.500000000 7.500000000 -15.500000000 -36.500000000 7.500000000 -14.500000000 -36.500000000 7.500000000 -13.500000000 -36.500000000 7.500000000 -12.500000000 -36.500000000 7.500000000 -11.500000000 -36.500000000 7.500000000 -10.500000000 -36.500000000 7.500000000 -9.500000000 -36.500000000 7.500000000 -8.500000000 -36.500000000 7.500000000 -7.500000000 -36.500000000 7.500000000 -6.500000000 -36.500000000 7.500000000 -5.500000000 -36.500000000 7.500000000 -4.500000000 -36.500000000 7.500000000 -3.500000000 -36.500000000 7.500000000 -2.500000000 -36.500000000 7.500000000 -1.500000000 -36.500000000 7.500000000 -0.500000000 -36.500000000 7.500000000 0.500000000 -36.500000000 7.500000000 1.500000000 -36.500000000 7.500000000 2.500000000 -36.500000000 7.500000000 3.500000000 -36.500000000 7.500000000 4.500000000 -36.500000000 7.500000000 5.500000000 -36.500000000 7.500000000 6.500000000 -36.500000000 7.500000000 7.500000000 -36.500000000 7.500000000 8.500000000 -36.500000000 7.500000000 9.500000000 -36.500000000 7.500000000 10.500000000 -36.500000000 7.500000000 11.500000000 -36.500000000 7.500000000 12.500000000 -36.500000000 7.500000000 13.500000000 -36.500000000 7.500000000 14.500000000 -36.500000000 7.500000000 15.500000000 -36.500000000 7.500000000 16.500000000 -36.500000000 7.500000000 17.500000000 -36.500000000 7.500000000 18.500000000 -36.500000000 7.500000000 19.500000000 -36.500000000 7.500000000 20.500000000 -36.500000000 7.500000000 21.500000000 -36.500000000 7.500000000 22.500000000 -36.500000000 7.500000000 23.500000000 -36.500000000 7.500000000 24.500000000 -36.500000000 7.500000000 25.499996185 -36.499996185 7.500000000 26.499954224 -36.499954224 7.500000000 27.499591827 -36.499591827 7.500000000 28.497470856 -36.497467041 7.500000000 29.488407135 -36.488403320 7.500000000 30.458978653 -36.458980560 7.500000000 31.384418488 -36.384422302 7.500000000 32.233222961 -36.233222961 7.500000000 32.981101990 -35.981101990 7.500000000 -35.167964935 -35.167964935 7.500000000 -34.622871399 -35.622871399 7.500000000 33.622871399 -35.622871399 7.500000000 34.167964935 -35.167964935 7.500000000 -35.981101990 -33.981101990 7.500000000 -35.622871399 -34.622871399 7.500000000 34.622871399 -34.622871399 7.500000000 34.981101990 -33.981101990 7.500000000 -36.233222961 -33.233222961 7.500000000 35.233222961 -33.233226776 7.500000000 -36.384418488 -32.384422302 7.500000000 35.384418488 -32.384422302 7.500000000 -36.458976746 -31.458978653 7.500000000 35.458980560 -31.458978653 7.500000000 -36.488403320 -30.488407135 7.500000000 35.488403320 -30.488407135 7.500000000 -36.497467041 -29.497472763 7.500000000 35.497474670 -29.497472763 7.500000000 -36.499591827 -28.499593735 7.500000000 35.499591827 -28.499593735 7.500000000 -36.499954224 -27.499954224 7.500000000 35.499954224 -27.499954224 7.500000000 -36.499996185 -26.499996185 7.500000000 35.499996185 -26.499996185 7.500000000 -36.500000000 -25.500000000 7.500000000 35.500000000 -25.500000000 7.500000000 -36.500000000 -24.500000000 7.500000000 35.500000000 -24.500000000 7.500000000 -36.500000000 -23.500000000 7.500000000 35.500000000 -23.500000000 7.500000000 -36.500000000 -22.500000000 7.500000000 35.500000000 -22.500000000 7.500000000 -36.500000000 -21.500000000 7.500000000 35.500000000 -21.500000000 7.500000000 -36.500000000 -20.500000000 7.500000000 35.500000000 -20.500000000 7.500000000 -36.500000000 -19.500000000 7.500000000 35.500000000 -19.500000000 7.500000000 -36.500000000 -18.500000000 7.500000000 35.500000000 -18.500000000 7.500000000 -36.500000000 -17.500000000 7.500000000 35.500000000 -17.500000000 7.500000000 -36.500000000 -16.500000000 7.500000000 35.500000000 -16.500000000 7.500000000 -36.500000000 -15.500000000 7.500000000 35.500000000 -15.500000000 7.500000000 -36.500000000 -14.500000000 7.500000000 35.500000000 -14.500000000 7.500000000 -36.500000000 -13.500000000 7.500000000 35.500000000 -13.500000000 7.500000000 -36.500000000 -12.500000000 7.500000000 35.500000000 -12.500000000 7.500000000 -36.500000000 -11.500000000 7.500000000 35.500000000 -11.500000000 7.500000000 -36.500000000 -10.500000000 7.500000000 35.500000000 -10.500000000 7.500000000 -36.500000000 -9.500000000 7.500000000 35.500000000 -9.500000000 7.500000000 -36.500000000 -8.500000000 7.500000000 35.500000000 -8.500000000 7.500000000 -36.500000000 -7.500000000 7.500000000 35.500000000 -7.500000000 7.500000000 -36.500000000 -6.500000000 7.500000000 35.500000000 -6.500000000 7.500000000 -36.500000000 -5.500000000 7.500000000 35.500000000 -5.500000000 7.500000000 -36.500000000 -4.500000000 7.500000000 35.500000000 -4.500000000 7.500000000 -36.500000000 -3.500000000 7.500000000 35.500000000 -3.500000000 7.500000000 -36.500000000 -2.500000000 7.500000000 35.500000000 -2.500000000 7.500000000 -36.500000000 -1.500000000 7.500000000 35.500000000 -1.500000000 7.500000000 -36.500000000 -0.500000000 7.500000000 35.500000000 -0.500000000 7.500000000 -36.500000000 0.500000000 7.500000000 35.500000000 0.500000000 7.500000000 -36.500000000 1.500000000 7.500000000 35.500000000 1.500000000 7.500000000 -36.500000000 2.500000000 7.500000000 35.500000000 2.500000000 7.500000000 -36.500000000 3.500000000 7.500000000 35.500000000 3.500000000 7.500000000 -36.500000000 4.500000000 7.500000000 35.500000000 4.500000000 7.500000000 -36.500000000 5.500000000 7.500000000 35.500000000 5.500000000 7.500000000 -36.500000000 6.500000000 7.500000000 35.500000000 6.500000000 7.500000000 -36.500000000 7.500000000 7.500000000 35.500000000 7.500000000 7.500000000 -36.500000000 8.500000000 7.500000000 35.500000000 8.500000000 7.500000000 -36.500000000 9.500000000 7.500000000 35.500000000 9.500000000 7.500000000 -36.500000000 10.500000000 7.500000000 35.500000000 10.500000000 7.500000000 -36.500000000 11.500000000 7.500000000 35.500000000 11.500000000 7.500000000 -36.500000000 12.500000000 7.500000000 35.500000000 12.500000000 7.500000000 -36.500000000 13.500000000 7.500000000 35.500000000 13.500000000 7.500000000 -36.500000000 14.500000000 7.500000000 35.500000000 14.500000000 7.500000000 -36.500000000 15.500000000 7.500000000 35.500000000 15.500000000 7.500000000 -36.500000000 16.500000000 7.500000000 35.500000000 16.500000000 7.500000000 -36.500000000 17.500000000 7.500000000 35.500000000 17.500000000 7.500000000 -36.500000000 18.500000000 7.500000000 35.500000000 18.500000000 7.500000000 -36.500000000 19.500000000 7.500000000 35.500000000 19.500000000 7.500000000 -36.500000000 20.500000000 7.500000000 35.500000000 20.500000000 7.500000000 -36.500000000 21.500000000 7.500000000 35.500000000 21.500000000 7.500000000 -36.500000000 22.500000000 7.500000000 35.500000000 22.500000000 7.500000000 -36.500000000 23.500000000 7.500000000 35.500000000 23.500000000 7.500000000 -36.500000000 24.500000000 7.500000000 35.500000000 24.500000000 7.500000000 -36.499996185 25.499996185 7.500000000 35.499996185 25.499996185 7.500000000 -36.499954224 26.499954224 7.500000000 35.499954224 26.499954224 7.500000000 -36.499591827 27.499591827 7.500000000 35.499591827 27.499591827 7.500000000 -36.497474670 28.497470856 7.500000000 35.497467041 28.497470856 7.500000000 -36.488403320 29.488407135 7.500000000 35.488403320 29.488407135 7.500000000 -36.458980560 30.458978653 7.500000000 35.458980560 30.458978653 7.500000000 -36.384422302 31.384418488 7.500000000 35.384422302 31.384418488 7.500000000 -36.233222961 32.233222961 7.500000000 35.233222961 32.233222961 7.500000000 -35.981101990 32.981101990 7.500000000 -35.622871399 33.622871399 7.500000000 34.622871399 33.622871399 7.500000000 34.981101990 32.981101990 7.500000000 -35.167964935 34.167964935 7.500000000 -34.622871399 34.622871399 7.500000000 33.622871399 34.622871399 7.500000000 34.167964935 34.167964935 7.500000000 -33.981101990 34.981101990 7.500000000 -33.233222961 35.233222961 7.500000000 -32.384422302 35.384418488 7.500000000 -31.458978653 35.458976746 7.500000000 -30.488407135 35.488403320 7.500000000 -29.497472763 35.497467041 7.500000000 -28.499593735 35.499591827 7.500000000 -27.499954224 35.499954224 7.500000000 -26.499996185 35.499996185 7.500000000 -25.500000000 35.500000000 7.500000000 -24.500000000 35.500000000 7.500000000 -23.500000000 35.500000000 7.500000000 -22.500000000 35.500000000 7.500000000 -21.500000000 35.500000000 7.500000000 -20.500000000 35.500000000 7.500000000 -19.500000000 35.500000000 7.500000000 -18.500000000 35.500000000 7.500000000 -17.500000000 35.500000000 7.500000000 -16.500000000 35.500000000 7.500000000 -15.500000000 35.500000000 7.500000000 -14.500000000 35.500000000 7.500000000 -13.500000000 35.500000000 7.500000000 -12.500000000 35.500000000 7.500000000 -11.500000000 35.500000000 7.500000000 -10.500000000 35.500000000 7.500000000 -9.500000000 35.500000000 7.500000000 -8.500000000 35.500000000 7.500000000 -7.500000000 35.500000000 7.500000000 -6.500000000 35.500000000 7.500000000 -5.500000000 35.500000000 7.500000000 -4.500000000 35.500000000 7.500000000 -3.500000000 35.500000000 7.500000000 -2.500000000 35.500000000 7.500000000 -1.500000000 35.500000000 7.500000000 -0.500000000 35.500000000 7.500000000 0.500000000 35.500000000 7.500000000 1.500000000 35.500000000 7.500000000 2.500000000 35.500000000 7.500000000 3.500000000 35.500000000 7.500000000 4.500000000 35.500000000 7.500000000 5.500000000 35.500000000 7.500000000 6.500000000 35.500000000 7.500000000 7.500000000 35.500000000 7.500000000 8.500000000 35.500000000 7.500000000 9.500000000 35.500000000 7.500000000 10.500000000 35.500000000 7.500000000 11.500000000 35.500000000 7.500000000 12.500000000 35.500000000 7.500000000 13.500000000 35.500000000 7.500000000 14.500000000 35.500000000 7.500000000 15.500000000 35.500000000 7.500000000 16.500000000 35.500000000 7.500000000 17.500000000 35.500000000 7.500000000 18.500000000 35.500000000 7.500000000 19.500000000 35.500000000 7.500000000 20.500000000 35.500000000 7.500000000 21.500000000 35.500000000 7.500000000 22.500000000 35.500000000 7.500000000 23.500000000 35.500000000 7.500000000 24.500000000 35.500000000 7.500000000 25.499996185 35.499996185 7.500000000 26.499954224 35.499954224 7.500000000 27.499591827 35.499591827 7.500000000 28.497470856 35.497474670 7.500000000 29.488407135 35.488403320 7.500000000 30.458978653 35.458980560 7.500000000 31.384418488 35.384422302 7.500000000 32.233222961 35.233222961 7.500000000 32.981101990 34.981101990 7.500000000 -33.981101990 -35.981101990 8.500000000 -33.233226776 -36.233222961 8.500000000 -32.384422302 -36.384418488 8.500000000 -31.458978653 -36.458980560 8.500000000 -30.488407135 -36.488403320 8.500000000 -29.497472763 -36.497474670 8.500000000 -28.499593735 -36.499591827 8.500000000 -27.499954224 -36.499954224 8.500000000 -26.499996185 -36.499996185 8.500000000 -25.500000000 -36.500000000 8.500000000 -24.500000000 -36.500000000 8.500000000 -23.500000000 -36.500000000 8.500000000 -22.500000000 -36.500000000 8.500000000 -21.500000000 -36.500000000 8.500000000 -20.500000000 -36.500000000 8.500000000 -19.500000000 -36.500000000 8.500000000 -18.500000000 -36.500000000 8.500000000 -17.500000000 -36.500000000 8.500000000 -16.500000000 -36.500000000 8.500000000 -15.500000000 -36.500000000 8.500000000 -14.500000000 -36.500000000 8.500000000 -13.500000000 -36.500000000 8.500000000 -12.500000000 -36.500000000 8.500000000 -11.500000000 -36.500000000 8.500000000 -10.500000000 -36.500000000 8.500000000 -9.500000000 -36.500000000 8.500000000 -8.500000000 -36.500000000 8.500000000 -7.500000000 -36.500000000 8.500000000 -6.500000000 -36.500000000 8.500000000 -5.500000000 -36.500000000 8.500000000 -4.500000000 -36.500000000 8.500000000 -3.500000000 -36.500000000 8.500000000 -2.500000000 -36.500000000 8.500000000 -1.500000000 -36.500000000 8.500000000 -0.500000000 -36.500000000 8.500000000 0.500000000 -36.500000000 8.500000000 1.500000000 -36.500000000 8.500000000 2.500000000 -36.500000000 8.500000000 3.500000000 -36.500000000 8.500000000 4.500000000 -36.500000000 8.500000000 5.500000000 -36.500000000 8.500000000 6.500000000 -36.500000000 8.500000000 7.500000000 -36.500000000 8.500000000 8.500000000 -36.500000000 8.500000000 9.500000000 -36.500000000 8.500000000 10.500000000 -36.500000000 8.500000000 11.500000000 -36.500000000 8.500000000 12.500000000 -36.500000000 8.500000000 13.500000000 -36.500000000 8.500000000 14.500000000 -36.500000000 8.500000000 15.500000000 -36.500000000 8.500000000 16.500000000 -36.500000000 8.500000000 17.500000000 -36.500000000 8.500000000 18.500000000 -36.500000000 8.500000000 19.500000000 -36.500000000 8.500000000 20.500000000 -36.500000000 8.500000000 21.500000000 -36.500000000 8.500000000 22.500000000 -36.500000000 8.500000000 23.500000000 -36.500000000 8.500000000 24.500000000 -36.500000000 8.500000000 25.499996185 -36.499996185 8.500000000 26.499954224 -36.499954224 8.500000000 27.499591827 -36.499591827 8.500000000 28.497470856 -36.497467041 8.500000000 29.488407135 -36.488403320 8.500000000 30.458978653 -36.458980560 8.500000000 31.384418488 -36.384422302 8.500000000 32.233222961 -36.233222961 8.500000000 32.981101990 -35.981101990 8.500000000 -35.167964935 -35.167964935 8.500000000 -34.622871399 -35.622871399 8.500000000 33.622871399 -35.622871399 8.500000000 34.167964935 -35.167964935 8.500000000 -35.981101990 -33.981101990 8.500000000 -35.622871399 -34.622871399 8.500000000 34.622871399 -34.622871399 8.500000000 34.981101990 -33.981101990 8.500000000 -36.233222961 -33.233222961 8.500000000 35.233222961 -33.233226776 8.500000000 -36.384418488 -32.384422302 8.500000000 35.384418488 -32.384422302 8.500000000 -36.458976746 -31.458978653 8.500000000 35.458980560 -31.458978653 8.500000000 -36.488403320 -30.488407135 8.500000000 35.488403320 -30.488407135 8.500000000 -36.497467041 -29.497472763 8.500000000 35.497474670 -29.497472763 8.500000000 -36.499591827 -28.499593735 8.500000000 35.499591827 -28.499593735 8.500000000 -36.499954224 -27.499954224 8.500000000 35.499954224 -27.499954224 8.500000000 -36.499996185 -26.499996185 8.500000000 35.499996185 -26.499996185 8.500000000 -36.500000000 -25.500000000 8.500000000 35.500000000 -25.500000000 8.500000000 -36.500000000 -24.500000000 8.500000000 35.500000000 -24.500000000 8.500000000 -36.500000000 -23.500000000 8.500000000 35.500000000 -23.500000000 8.500000000 -36.500000000 -22.500000000 8.500000000 35.500000000 -22.500000000 8.500000000 -36.500000000 -21.500000000 8.500000000 35.500000000 -21.500000000 8.500000000 -36.500000000 -20.500000000 8.500000000 35.500000000 -20.500000000 8.500000000 -36.500000000 -19.500000000 8.500000000 35.500000000 -19.500000000 8.500000000 -36.500000000 -18.500000000 8.500000000 35.500000000 -18.500000000 8.500000000 -36.500000000 -17.500000000 8.500000000 35.500000000 -17.500000000 8.500000000 -36.500000000 -16.500000000 8.500000000 35.500000000 -16.500000000 8.500000000 -36.500000000 -15.500000000 8.500000000 35.500000000 -15.500000000 8.500000000 -36.500000000 -14.500000000 8.500000000 35.500000000 -14.500000000 8.500000000 -36.500000000 -13.500000000 8.500000000 35.500000000 -13.500000000 8.500000000 -36.500000000 -12.500000000 8.500000000 35.500000000 -12.500000000 8.500000000 -36.500000000 -11.500000000 8.500000000 35.500000000 -11.500000000 8.500000000 -36.500000000 -10.500000000 8.500000000 35.500000000 -10.500000000 8.500000000 -36.500000000 -9.500000000 8.500000000 35.500000000 -9.500000000 8.500000000 -36.500000000 -8.500000000 8.500000000 35.500000000 -8.500000000 8.500000000 -36.500000000 -7.500000000 8.500000000 35.500000000 -7.500000000 8.500000000 -36.500000000 -6.500000000 8.500000000 35.500000000 -6.500000000 8.500000000 -36.500000000 -5.500000000 8.500000000 35.500000000 -5.500000000 8.500000000 -36.500000000 -4.500000000 8.500000000 35.500000000 -4.500000000 8.500000000 -36.500000000 -3.500000000 8.500000000 35.500000000 -3.500000000 8.500000000 -36.500000000 -2.500000000 8.500000000 35.500000000 -2.500000000 8.500000000 -36.500000000 -1.500000000 8.500000000 35.500000000 -1.500000000 8.500000000 -36.500000000 -0.500000000 8.500000000 35.500000000 -0.500000000 8.500000000 -36.500000000 0.500000000 8.500000000 35.500000000 0.500000000 8.500000000 -36.500000000 1.500000000 8.500000000 35.500000000 1.500000000 8.500000000 -36.500000000 2.500000000 8.500000000 35.500000000 2.500000000 8.500000000 -36.500000000 3.500000000 8.500000000 35.500000000 3.500000000 8.500000000 -36.500000000 4.500000000 8.500000000 35.500000000 4.500000000 8.500000000 -36.500000000 5.500000000 8.500000000 35.500000000 5.500000000 8.500000000 -36.500000000 6.500000000 8.500000000 35.500000000 6.500000000 8.500000000 -36.500000000 7.500000000 8.500000000 35.500000000 7.500000000 8.500000000 -36.500000000 8.500000000 8.500000000 35.500000000 8.500000000 8.500000000 -36.500000000 9.500000000 8.500000000 35.500000000 9.500000000 8.500000000 -36.500000000 10.500000000 8.500000000 35.500000000 10.500000000 8.500000000 -36.500000000 11.500000000 8.500000000 35.500000000 11.500000000 8.500000000 -36.500000000 12.500000000 8.500000000 35.500000000 12.500000000 8.500000000 -36.500000000 13.500000000 8.500000000 35.500000000 13.500000000 8.500000000 -36.500000000 14.500000000 8.500000000 35.500000000 14.500000000 8.500000000 -36.500000000 15.500000000 8.500000000 35.500000000 15.500000000 8.500000000 -36.500000000 16.500000000 8.500000000 35.500000000 16.500000000 8.500000000 -36.500000000 17.500000000 8.500000000 35.500000000 17.500000000 8.500000000 -36.500000000 18.500000000 8.500000000 35.500000000 18.500000000 8.500000000 -36.500000000 19.500000000 8.500000000 35.500000000 19.500000000 8.500000000 -36.500000000 20.500000000 8.500000000 35.500000000 20.500000000 8.500000000 -36.500000000 21.500000000 8.500000000 35.500000000 21.500000000 8.500000000 -36.500000000 22.500000000 8.500000000 35.500000000 22.500000000 8.500000000 -36.500000000 23.500000000 8.500000000 35.500000000 23.500000000 8.500000000 -36.500000000 24.500000000 8.500000000 35.500000000 24.500000000 8.500000000 -36.499996185 25.499996185 8.500000000 35.499996185 25.499996185 8.500000000 -36.499954224 26.499954224 8.500000000 35.499954224 26.499954224 8.500000000 -36.499591827 27.499591827 8.500000000 35.499591827 27.499591827 8.500000000 -36.497474670 28.497470856 8.500000000 35.497467041 28.497470856 8.500000000 -36.488403320 29.488407135 8.500000000 35.488403320 29.488407135 8.500000000 -36.458980560 30.458978653 8.500000000 35.458980560 30.458978653 8.500000000 -36.384422302 31.384418488 8.500000000 35.384422302 31.384418488 8.500000000 -36.233222961 32.233222961 8.500000000 35.233222961 32.233222961 8.500000000 -35.981101990 32.981101990 8.500000000 -35.622871399 33.622871399 8.500000000 34.622871399 33.622871399 8.500000000 34.981101990 32.981101990 8.500000000 -35.167964935 34.167964935 8.500000000 -34.622871399 34.622871399 8.500000000 33.622871399 34.622871399 8.500000000 34.167964935 34.167964935 8.500000000 -33.981101990 34.981101990 8.500000000 -33.233222961 35.233222961 8.500000000 -32.384422302 35.384418488 8.500000000 -31.458978653 35.458976746 8.500000000 -30.488407135 35.488403320 8.500000000 -29.497472763 35.497467041 8.500000000 -28.499593735 35.499591827 8.500000000 -27.499954224 35.499954224 8.500000000 -26.499996185 35.499996185 8.500000000 -25.500000000 35.500000000 8.500000000 -24.500000000 35.500000000 8.500000000 -23.500000000 35.500000000 8.500000000 -22.500000000 35.500000000 8.500000000 -21.500000000 35.500000000 8.500000000 -20.500000000 35.500000000 8.500000000 -19.500000000 35.500000000 8.500000000 -18.500000000 35.500000000 8.500000000 -17.500000000 35.500000000 8.500000000 -16.500000000 35.500000000 8.500000000 -15.500000000 35.500000000 8.500000000 -14.500000000 35.500000000 8.500000000 -13.500000000 35.500000000 8.500000000 -12.500000000 35.500000000 8.500000000 -11.500000000 35.500000000 8.500000000 -10.500000000 35.500000000 8.500000000 -9.500000000 35.500000000 8.500000000 -8.500000000 35.500000000 8.500000000 -7.500000000 35.500000000 8.500000000 -6.500000000 35.500000000 8.500000000 -5.500000000 35.500000000 8.500000000 -4.500000000 35.500000000 8.500000000 -3.500000000 35.500000000 8.500000000 -2.500000000 35.500000000 8.500000000 -1.500000000 35.500000000 8.500000000 -0.500000000 35.500000000 8.500000000 0.500000000 35.500000000 8.500000000 1.500000000 35.500000000 8.500000000 2.500000000 35.500000000 8.500000000 3.500000000 35.500000000 8.500000000 4.500000000 35.500000000 8.500000000 5.500000000 35.500000000 8.500000000 6.500000000 35.500000000 8.500000000 7.500000000 35.500000000 8.500000000 8.500000000 35.500000000 8.500000000 9.500000000 35.500000000 8.500000000 10.500000000 35.500000000 8.500000000 11.500000000 35.500000000 8.500000000 12.500000000 35.500000000 8.500000000 13.500000000 35.500000000 8.500000000 14.500000000 35.500000000 8.500000000 15.500000000 35.500000000 8.500000000 16.500000000 35.500000000 8.500000000 17.500000000 35.500000000 8.500000000 18.500000000 35.500000000 8.500000000 19.500000000 35.500000000 8.500000000 20.500000000 35.500000000 8.500000000 21.500000000 35.500000000 8.500000000 22.500000000 35.500000000 8.500000000 23.500000000 35.500000000 8.500000000 24.500000000 35.500000000 8.500000000 25.499996185 35.499996185 8.500000000 26.499954224 35.499954224 8.500000000 27.499591827 35.499591827 8.500000000 28.497470856 35.497474670 8.500000000 29.488407135 35.488403320 8.500000000 30.458978653 35.458980560 8.500000000 31.384418488 35.384422302 8.500000000 32.233222961 35.233222961 8.500000000 32.981101990 34.981101990 8.500000000 -33.981101990 -35.981101990 9.500000000 -33.233226776 -36.233222961 9.500000000 -32.384422302 -36.384418488 9.500000000 -31.458978653 -36.458980560 9.500000000 -30.488407135 -36.488403320 9.500000000 -29.497472763 -36.497474670 9.500000000 -28.499593735 -36.499591827 9.500000000 -27.499954224 -36.499954224 9.500000000 -26.499996185 -36.499996185 9.500000000 -25.500000000 -36.500000000 9.500000000 -24.500000000 -36.500000000 9.500000000 -23.500000000 -36.500000000 9.500000000 -22.500000000 -36.500000000 9.500000000 -21.500000000 -36.500000000 9.500000000 -20.500000000 -36.500000000 9.500000000 -19.500000000 -36.500000000 9.500000000 -18.500000000 -36.500000000 9.500000000 -17.500000000 -36.500000000 9.500000000 -16.500000000 -36.500000000 9.500000000 -15.500000000 -36.500000000 9.500000000 -14.500000000 -36.500000000 9.500000000 -13.500000000 -36.500000000 9.500000000 -12.500000000 -36.500000000 9.500000000 -11.500000000 -36.500000000 9.500000000 -10.500000000 -36.500000000 9.500000000 -9.500000000 -36.500000000 9.500000000 -8.500000000 -36.500000000 9.500000000 -7.500000000 -36.500000000 9.500000000 -6.500000000 -36.500000000 9.500000000 -5.500000000 -36.500000000 9.500000000 -4.500000000 -36.500000000 9.500000000 -3.500000000 -36.500000000 9.500000000 -2.500000000 -36.500000000 9.500000000 -1.500000000 -36.500000000 9.500000000 -0.500000000 -36.500000000 9.500000000 0.500000000 -36.500000000 9.500000000 1.500000000 -36.500000000 9.500000000 2.500000000 -36.500000000 9.500000000 3.500000000 -36.500000000 9.500000000 4.500000000 -36.500000000 9.500000000 5.500000000 -36.500000000 9.500000000 6.500000000 -36.500000000 9.500000000 7.500000000 -36.500000000 9.500000000 8.500000000 -36.500000000 9.500000000 9.500000000 -36.500000000 9.500000000 10.500000000 -36.500000000 9.500000000 11.500000000 -36.500000000 9.500000000 12.500000000 -36.500000000 9.500000000 13.500000000 -36.500000000 9.500000000 14.500000000 -36.500000000 9.500000000 15.500000000 -36.500000000 9.500000000 16.500000000 -36.500000000 9.500000000 17.500000000 -36.500000000 9.500000000 18.500000000 -36.500000000 9.500000000 19.500000000 -36.500000000 9.500000000 20.500000000 -36.500000000 9.500000000 21.500000000 -36.500000000 9.500000000 22.500000000 -36.500000000 9.500000000 23.500000000 -36.500000000 9.500000000 24.500000000 -36.500000000 9.500000000 25.499996185 -36.499996185 9.500000000 26.499954224 -36.499954224 9.500000000 27.499591827 -36.499591827 9.500000000 28.497470856 -36.497467041 9.500000000 29.488407135 -36.488403320 9.500000000 30.458978653 -36.458980560 9.500000000 31.384418488 -36.384422302 9.500000000 32.233222961 -36.233222961 9.500000000 32.981101990 -35.981101990 9.500000000 -35.167964935 -35.167964935 9.500000000 -34.622871399 -35.622871399 9.500000000 33.622871399 -35.622871399 9.500000000 34.167964935 -35.167964935 9.500000000 -35.981101990 -33.981101990 9.500000000 -35.622871399 -34.622871399 9.500000000 34.622871399 -34.622871399 9.500000000 34.981101990 -33.981101990 9.500000000 -36.233222961 -33.233222961 9.500000000 35.233222961 -33.233226776 9.500000000 -36.384418488 -32.384422302 9.500000000 35.384418488 -32.384422302 9.500000000 -36.458976746 -31.458978653 9.500000000 35.458980560 -31.458978653 9.500000000 -36.488403320 -30.488407135 9.500000000 35.488403320 -30.488407135 9.500000000 -36.497467041 -29.497472763 9.500000000 35.497474670 -29.497472763 9.500000000 -36.499591827 -28.499593735 9.500000000 35.499591827 -28.499593735 9.500000000 -36.499954224 -27.499954224 9.500000000 35.499954224 -27.499954224 9.500000000 -36.499996185 -26.499996185 9.500000000 35.499996185 -26.499996185 9.500000000 -36.500000000 -25.500000000 9.500000000 35.500000000 -25.500000000 9.500000000 -36.500000000 -24.500000000 9.500000000 35.500000000 -24.500000000 9.500000000 -36.500000000 -23.500000000 9.500000000 35.500000000 -23.500000000 9.500000000 -36.500000000 -22.500000000 9.500000000 35.500000000 -22.500000000 9.500000000 -36.500000000 -21.500000000 9.500000000 35.500000000 -21.500000000 9.500000000 -36.500000000 -20.500000000 9.500000000 35.500000000 -20.500000000 9.500000000 -36.500000000 -19.500000000 9.500000000 35.500000000 -19.500000000 9.500000000 -36.500000000 -18.500000000 9.500000000 35.500000000 -18.500000000 9.500000000 -36.500000000 -17.500000000 9.500000000 35.500000000 -17.500000000 9.500000000 -36.500000000 -16.500000000 9.500000000 35.500000000 -16.500000000 9.500000000 -36.500000000 -15.500000000 9.500000000 35.500000000 -15.500000000 9.500000000 -36.500000000 -14.500000000 9.500000000 35.500000000 -14.500000000 9.500000000 -36.500000000 -13.500000000 9.500000000 35.500000000 -13.500000000 9.500000000 -36.500000000 -12.500000000 9.500000000 35.500000000 -12.500000000 9.500000000 -36.500000000 -11.500000000 9.500000000 35.500000000 -11.500000000 9.500000000 -36.500000000 -10.500000000 9.500000000 35.500000000 -10.500000000 9.500000000 -36.500000000 -9.500000000 9.500000000 35.500000000 -9.500000000 9.500000000 -36.500000000 -8.500000000 9.500000000 35.500000000 -8.500000000 9.500000000 -36.500000000 -7.500000000 9.500000000 35.500000000 -7.500000000 9.500000000 -36.500000000 -6.500000000 9.500000000 35.500000000 -6.500000000 9.500000000 -36.500000000 -5.500000000 9.500000000 35.500000000 -5.500000000 9.500000000 -36.500000000 -4.500000000 9.500000000 35.500000000 -4.500000000 9.500000000 -36.500000000 -3.500000000 9.500000000 35.500000000 -3.500000000 9.500000000 -36.500000000 -2.500000000 9.500000000 35.500000000 -2.500000000 9.500000000 -36.500000000 -1.500000000 9.500000000 35.500000000 -1.500000000 9.500000000 -36.500000000 -0.500000000 9.500000000 35.500000000 -0.500000000 9.500000000 -36.500000000 0.500000000 9.500000000 35.500000000 0.500000000 9.500000000 -36.500000000 1.500000000 9.500000000 35.500000000 1.500000000 9.500000000 -36.500000000 2.500000000 9.500000000 35.500000000 2.500000000 9.500000000 -36.500000000 3.500000000 9.500000000 35.500000000 3.500000000 9.500000000 -36.500000000 4.500000000 9.500000000 35.500000000 4.500000000 9.500000000 -36.500000000 5.500000000 9.500000000 35.500000000 5.500000000 9.500000000 -36.500000000 6.500000000 9.500000000 35.500000000 6.500000000 9.500000000 -36.500000000 7.500000000 9.500000000 35.500000000 7.500000000 9.500000000 -36.500000000 8.500000000 9.500000000 35.500000000 8.500000000 9.500000000 -36.500000000 9.500000000 9.500000000 35.500000000 9.500000000 9.500000000 -36.500000000 10.500000000 9.500000000 35.500000000 10.500000000 9.500000000 -36.500000000 11.500000000 9.500000000 35.500000000 11.500000000 9.500000000 -36.500000000 12.500000000 9.500000000 35.500000000 12.500000000 9.500000000 -36.500000000 13.500000000 9.500000000 35.500000000 13.500000000 9.500000000 -36.500000000 14.500000000 9.500000000 35.500000000 14.500000000 9.500000000 -36.500000000 15.500000000 9.500000000 35.500000000 15.500000000 9.500000000 -36.500000000 16.500000000 9.500000000 35.500000000 16.500000000 9.500000000 -36.500000000 17.500000000 9.500000000 35.500000000 17.500000000 9.500000000 -36.500000000 18.500000000 9.500000000 35.500000000 18.500000000 9.500000000 -36.500000000 19.500000000 9.500000000 35.500000000 19.500000000 9.500000000 -36.500000000 20.500000000 9.500000000 35.500000000 20.500000000 9.500000000 -36.500000000 21.500000000 9.500000000 35.500000000 21.500000000 9.500000000 -36.500000000 22.500000000 9.500000000 35.500000000 22.500000000 9.500000000 -36.500000000 23.500000000 9.500000000 35.500000000 23.500000000 9.500000000 -36.500000000 24.500000000 9.500000000 35.500000000 24.500000000 9.500000000 -36.499996185 25.499996185 9.500000000 35.499996185 25.499996185 9.500000000 -36.499954224 26.499954224 9.500000000 35.499954224 26.499954224 9.500000000 -36.499591827 27.499591827 9.500000000 35.499591827 27.499591827 9.500000000 -36.497474670 28.497470856 9.500000000 35.497467041 28.497470856 9.500000000 -36.488403320 29.488407135 9.500000000 35.488403320 29.488407135 9.500000000 -36.458980560 30.458978653 9.500000000 35.458980560 30.458978653 9.500000000 -36.384422302 31.384418488 9.500000000 35.384422302 31.384418488 9.500000000 -36.233222961 32.233222961 9.500000000 35.233222961 32.233222961 9.500000000 -35.981101990 32.981101990 9.500000000 -35.622871399 33.622871399 9.500000000 34.622871399 33.622871399 9.500000000 34.981101990 32.981101990 9.500000000 -35.167964935 34.167964935 9.500000000 -34.622871399 34.622871399 9.500000000 33.622871399 34.622871399 9.500000000 34.167964935 34.167964935 9.500000000 -33.981101990 34.981101990 9.500000000 -33.233222961 35.233222961 9.500000000 -32.384422302 35.384418488 9.500000000 -31.458978653 35.458976746 9.500000000 -30.488407135 35.488403320 9.500000000 -29.497472763 35.497467041 9.500000000 -28.499593735 35.499591827 9.500000000 -27.499954224 35.499954224 9.500000000 -26.499996185 35.499996185 9.500000000 -25.500000000 35.500000000 9.500000000 -24.500000000 35.500000000 9.500000000 -23.500000000 35.500000000 9.500000000 -22.500000000 35.500000000 9.500000000 -21.500000000 35.500000000 9.500000000 -20.500000000 35.500000000 9.500000000 -19.500000000 35.500000000 9.500000000 -18.500000000 35.500000000 9.500000000 -17.500000000 35.500000000 9.500000000 -16.500000000 35.500000000 9.500000000 -15.500000000 35.500000000 9.500000000 -14.500000000 35.500000000 9.500000000 -13.500000000 35.500000000 9.500000000 -12.500000000 35.500000000 9.500000000 -11.500000000 35.500000000 9.500000000 -10.500000000 35.500000000 9.500000000 -9.500000000 35.500000000 9.500000000 -8.500000000 35.500000000 9.500000000 -7.500000000 35.500000000 9.500000000 -6.500000000 35.500000000 9.500000000 -5.500000000 35.500000000 9.500000000 -4.500000000 35.500000000 9.500000000 -3.500000000 35.500000000 9.500000000 -2.500000000 35.500000000 9.500000000 -1.500000000 35.500000000 9.500000000 -0.500000000 35.500000000 9.500000000 0.500000000 35.500000000 9.500000000 1.500000000 35.500000000 9.500000000 2.500000000 35.500000000 9.500000000 3.500000000 35.500000000 9.500000000 4.500000000 35.500000000 9.500000000 5.500000000 35.500000000 9.500000000 6.500000000 35.500000000 9.500000000 7.500000000 35.500000000 9.500000000 8.500000000 35.500000000 9.500000000 9.500000000 35.500000000 9.500000000 10.500000000 35.500000000 9.500000000 11.500000000 35.500000000 9.500000000 12.500000000 35.500000000 9.500000000 13.500000000 35.500000000 9.500000000 14.500000000 35.500000000 9.500000000 15.500000000 35.500000000 9.500000000 16.500000000 35.500000000 9.500000000 17.500000000 35.500000000 9.500000000 18.500000000 35.500000000 9.500000000 19.500000000 35.500000000 9.500000000 20.500000000 35.500000000 9.500000000 21.500000000 35.500000000 9.500000000 22.500000000 35.500000000 9.500000000 23.500000000 35.500000000 9.500000000 24.500000000 35.500000000 9.500000000 25.499996185 35.499996185 9.500000000 26.499954224 35.499954224 9.500000000 27.499591827 35.499591827 9.500000000 28.497470856 35.497474670 9.500000000 29.488407135 35.488403320 9.500000000 30.458978653 35.458980560 9.500000000 31.384418488 35.384422302 9.500000000 32.233222961 35.233222961 9.500000000 32.981101990 34.981101990 9.500000000 -33.981101990 -35.981101990 10.500000000 -33.233226776 -36.233222961 10.500000000 -32.384422302 -36.384418488 10.500000000 -31.458978653 -36.458980560 10.500000000 -30.488407135 -36.488403320 10.500000000 -29.497472763 -36.497474670 10.500000000 -28.499593735 -36.499591827 10.500000000 -27.499954224 -36.499954224 10.500000000 -26.499996185 -36.499996185 10.500000000 -25.500000000 -36.500000000 10.500000000 -24.500000000 -36.500000000 10.500000000 -23.500000000 -36.500000000 10.500000000 -22.500000000 -36.500000000 10.500000000 -21.500000000 -36.500000000 10.500000000 -20.500000000 -36.500000000 10.500000000 -19.500000000 -36.500000000 10.500000000 -18.500000000 -36.500000000 10.500000000 -17.500000000 -36.500000000 10.500000000 -16.500000000 -36.500000000 10.500000000 -15.500000000 -36.500000000 10.500000000 -14.500000000 -36.500000000 10.500000000 -13.500000000 -36.500000000 10.500000000 -12.500000000 -36.500000000 10.500000000 -11.500000000 -36.500000000 10.500000000 -10.500000000 -36.500000000 10.500000000 -9.500000000 -36.500000000 10.500000000 -8.500000000 -36.500000000 10.500000000 -7.500000000 -36.500000000 10.500000000 -6.500000000 -36.500000000 10.500000000 -5.500000000 -36.500000000 10.500000000 -4.500000000 -36.500000000 10.500000000 -3.500000000 -36.500000000 10.500000000 -2.500000000 -36.500000000 10.500000000 -1.500000000 -36.500000000 10.500000000 -0.500000000 -36.500000000 10.500000000 0.500000000 -36.500000000 10.500000000 1.500000000 -36.500000000 10.500000000 2.500000000 -36.500000000 10.500000000 3.500000000 -36.500000000 10.500000000 4.500000000 -36.500000000 10.500000000 5.500000000 -36.500000000 10.500000000 6.500000000 -36.500000000 10.500000000 7.500000000 -36.500000000 10.500000000 8.500000000 -36.500000000 10.500000000 9.500000000 -36.500000000 10.500000000 10.500000000 -36.500000000 10.500000000 11.500000000 -36.500000000 10.500000000 12.500000000 -36.500000000 10.500000000 13.500000000 -36.500000000 10.500000000 14.500000000 -36.500000000 10.500000000 15.500000000 -36.500000000 10.500000000 16.500000000 -36.500000000 10.500000000 17.500000000 -36.500000000 10.500000000 18.500000000 -36.500000000 10.500000000 19.500000000 -36.500000000 10.500000000 20.500000000 -36.500000000 10.500000000 21.500000000 -36.500000000 10.500000000 22.500000000 -36.500000000 10.500000000 23.500000000 -36.500000000 10.500000000 24.500000000 -36.500000000 10.500000000 25.499996185 -36.499996185 10.500000000 26.499954224 -36.499954224 10.500000000 27.499591827 -36.499591827 10.500000000 28.497470856 -36.497467041 10.500000000 29.488407135 -36.488403320 10.500000000 30.458978653 -36.458980560 10.500000000 31.384418488 -36.384422302 10.500000000 32.233222961 -36.233222961 10.500000000 32.981101990 -35.981101990 10.500000000 -35.167964935 -35.167964935 10.500000000 -34.622871399 -35.622871399 10.500000000 33.622871399 -35.622871399 10.500000000 34.167964935 -35.167964935 10.500000000 -35.981101990 -33.981101990 10.500000000 -35.622871399 -34.622871399 10.500000000 34.622871399 -34.622871399 10.500000000 34.981101990 -33.981101990 10.500000000 -36.233222961 -33.233222961 10.500000000 35.233222961 -33.233226776 10.500000000 -36.384418488 -32.384422302 10.500000000 35.384418488 -32.384422302 10.500000000 -36.458976746 -31.458978653 10.500000000 35.458980560 -31.458978653 10.500000000 -36.488403320 -30.488407135 10.500000000 35.488403320 -30.488407135 10.500000000 -36.497467041 -29.497472763 10.500000000 35.497474670 -29.497472763 10.500000000 -36.499591827 -28.499593735 10.500000000 35.499591827 -28.499593735 10.500000000 -36.499954224 -27.499954224 10.500000000 35.499954224 -27.499954224 10.500000000 -36.499996185 -26.499996185 10.500000000 35.499996185 -26.499996185 10.500000000 -36.500000000 -25.500000000 10.500000000 35.500000000 -25.500000000 10.500000000 -36.500000000 -24.500000000 10.500000000 35.500000000 -24.500000000 10.500000000 -36.500000000 -23.500000000 10.500000000 35.500000000 -23.500000000 10.500000000 -36.500000000 -22.500000000 10.500000000 35.500000000 -22.500000000 10.500000000 -36.500000000 -21.500000000 10.500000000 35.500000000 -21.500000000 10.500000000 -36.500000000 -20.500000000 10.500000000 35.500000000 -20.500000000 10.500000000 -36.500000000 -19.500000000 10.500000000 35.500000000 -19.500000000 10.500000000 -36.500000000 -18.500000000 10.500000000 35.500000000 -18.500000000 10.500000000 -36.500000000 -17.500000000 10.500000000 35.500000000 -17.500000000 10.500000000 -36.500000000 -16.500000000 10.500000000 35.500000000 -16.500000000 10.500000000 -36.500000000 -15.500000000 10.500000000 35.500000000 -15.500000000 10.500000000 -36.500000000 -14.500000000 10.500000000 35.500000000 -14.500000000 10.500000000 -36.500000000 -13.500000000 10.500000000 35.500000000 -13.500000000 10.500000000 -36.500000000 -12.500000000 10.500000000 35.500000000 -12.500000000 10.500000000 -36.500000000 -11.500000000 10.500000000 35.500000000 -11.500000000 10.500000000 -36.500000000 -10.500000000 10.500000000 35.500000000 -10.500000000 10.500000000 -36.500000000 -9.500000000 10.500000000 35.500000000 -9.500000000 10.500000000 -36.500000000 -8.500000000 10.500000000 35.500000000 -8.500000000 10.500000000 -36.500000000 -7.500000000 10.500000000 35.500000000 -7.500000000 10.500000000 -36.500000000 -6.500000000 10.500000000 35.500000000 -6.500000000 10.500000000 -36.500000000 -5.500000000 10.500000000 35.500000000 -5.500000000 10.500000000 -36.500000000 -4.500000000 10.500000000 35.500000000 -4.500000000 10.500000000 -36.500000000 -3.500000000 10.500000000 35.500000000 -3.500000000 10.500000000 -36.500000000 -2.500000000 10.500000000 35.500000000 -2.500000000 10.500000000 -36.500000000 -1.500000000 10.500000000 35.500000000 -1.500000000 10.500000000 -36.500000000 -0.500000000 10.500000000 35.500000000 -0.500000000 10.500000000 -36.500000000 0.500000000 10.500000000 35.500000000 0.500000000 10.500000000 -36.500000000 1.500000000 10.500000000 35.500000000 1.500000000 10.500000000 -36.500000000 2.500000000 10.500000000 35.500000000 2.500000000 10.500000000 -36.500000000 3.500000000 10.500000000 35.500000000 3.500000000 10.500000000 -36.500000000 4.500000000 10.500000000 35.500000000 4.500000000 10.500000000 -36.500000000 5.500000000 10.500000000 35.500000000 5.500000000 10.500000000 -36.500000000 6.500000000 10.500000000 35.500000000 6.500000000 10.500000000 -36.500000000 7.500000000 10.500000000 35.500000000 7.500000000 10.500000000 -36.500000000 8.500000000 10.500000000 35.500000000 8.500000000 10.500000000 -36.500000000 9.500000000 10.500000000 35.500000000 9.500000000 10.500000000 -36.500000000 10.500000000 10.500000000 35.500000000 10.500000000 10.500000000 -36.500000000 11.500000000 10.500000000 35.500000000 11.500000000 10.500000000 -36.500000000 12.500000000 10.500000000 35.500000000 12.500000000 10.500000000 -36.500000000 13.500000000 10.500000000 35.500000000 13.500000000 10.500000000 -36.500000000 14.500000000 10.500000000 35.500000000 14.500000000 10.500000000 -36.500000000 15.500000000 10.500000000 35.500000000 15.500000000 10.500000000 -36.500000000 16.500000000 10.500000000 35.500000000 16.500000000 10.500000000 -36.500000000 17.500000000 10.500000000 35.500000000 17.500000000 10.500000000 -36.500000000 18.500000000 10.500000000 35.500000000 18.500000000 10.500000000 -36.500000000 19.500000000 10.500000000 35.500000000 19.500000000 10.500000000 -36.500000000 20.500000000 10.500000000 35.500000000 20.500000000 10.500000000 -36.500000000 21.500000000 10.500000000 35.500000000 21.500000000 10.500000000 -36.500000000 22.500000000 10.500000000 35.500000000 22.500000000 10.500000000 -36.500000000 23.500000000 10.500000000 35.500000000 23.500000000 10.500000000 -36.500000000 24.500000000 10.500000000 35.500000000 24.500000000 10.500000000 -36.499996185 25.499996185 10.500000000 35.499996185 25.499996185 10.500000000 -36.499954224 26.499954224 10.500000000 35.499954224 26.499954224 10.500000000 -36.499591827 27.499591827 10.500000000 35.499591827 27.499591827 10.500000000 -36.497474670 28.497470856 10.500000000 35.497467041 28.497470856 10.500000000 -36.488403320 29.488407135 10.500000000 35.488403320 29.488407135 10.500000000 -36.458980560 30.458978653 10.500000000 35.458980560 30.458978653 10.500000000 -36.384422302 31.384418488 10.500000000 35.384422302 31.384418488 10.500000000 -36.233222961 32.233222961 10.500000000 35.233222961 32.233222961 10.500000000 -35.981101990 32.981101990 10.500000000 -35.622871399 33.622871399 10.500000000 34.622871399 33.622871399 10.500000000 34.981101990 32.981101990 10.500000000 -35.167964935 34.167964935 10.500000000 -34.622871399 34.622871399 10.500000000 33.622871399 34.622871399 10.500000000 34.167964935 34.167964935 10.500000000 -33.981101990 34.981101990 10.500000000 -33.233222961 35.233222961 10.500000000 -32.384422302 35.384418488 10.500000000 -31.458978653 35.458976746 10.500000000 -30.488407135 35.488403320 10.500000000 -29.497472763 35.497467041 10.500000000 -28.499593735 35.499591827 10.500000000 -27.499954224 35.499954224 10.500000000 -26.499996185 35.499996185 10.500000000 -25.500000000 35.500000000 10.500000000 -24.500000000 35.500000000 10.500000000 -23.500000000 35.500000000 10.500000000 -22.500000000 35.500000000 10.500000000 -21.500000000 35.500000000 10.500000000 -20.500000000 35.500000000 10.500000000 -19.500000000 35.500000000 10.500000000 -18.500000000 35.500000000 10.500000000 -17.500000000 35.500000000 10.500000000 -16.500000000 35.500000000 10.500000000 -15.500000000 35.500000000 10.500000000 -14.500000000 35.500000000 10.500000000 -13.500000000 35.500000000 10.500000000 -12.500000000 35.500000000 10.500000000 -11.500000000 35.500000000 10.500000000 -10.500000000 35.500000000 10.500000000 -9.500000000 35.500000000 10.500000000 -8.500000000 35.500000000 10.500000000 -7.500000000 35.500000000 10.500000000 -6.500000000 35.500000000 10.500000000 -5.500000000 35.500000000 10.500000000 -4.500000000 35.500000000 10.500000000 -3.500000000 35.500000000 10.500000000 -2.500000000 35.500000000 10.500000000 -1.500000000 35.500000000 10.500000000 -0.500000000 35.500000000 10.500000000 0.500000000 35.500000000 10.500000000 1.500000000 35.500000000 10.500000000 2.500000000 35.500000000 10.500000000 3.500000000 35.500000000 10.500000000 4.500000000 35.500000000 10.500000000 5.500000000 35.500000000 10.500000000 6.500000000 35.500000000 10.500000000 7.500000000 35.500000000 10.500000000 8.500000000 35.500000000 10.500000000 9.500000000 35.500000000 10.500000000 10.500000000 35.500000000 10.500000000 11.500000000 35.500000000 10.500000000 12.500000000 35.500000000 10.500000000 13.500000000 35.500000000 10.500000000 14.500000000 35.500000000 10.500000000 15.500000000 35.500000000 10.500000000 16.500000000 35.500000000 10.500000000 17.500000000 35.500000000 10.500000000 18.500000000 35.500000000 10.500000000 19.500000000 35.500000000 10.500000000 20.500000000 35.500000000 10.500000000 21.500000000 35.500000000 10.500000000 22.500000000 35.500000000 10.500000000 23.500000000 35.500000000 10.500000000 24.500000000 35.500000000 10.500000000 25.499996185 35.499996185 10.500000000 26.499954224 35.499954224 10.500000000 27.499591827 35.499591827 10.500000000 28.497470856 35.497474670 10.500000000 29.488407135 35.488403320 10.500000000 30.458978653 35.458980560 10.500000000 31.384418488 35.384422302 10.500000000 32.233222961 35.233222961 10.500000000 32.981101990 34.981101990 10.500000000 -33.981101990 -35.981101990 11.500000000 -33.233226776 -36.233222961 11.500000000 -32.384422302 -36.384418488 11.500000000 -31.458978653 -36.458980560 11.500000000 -30.488407135 -36.488403320 11.500000000 -29.497472763 -36.497474670 11.500000000 -28.499593735 -36.499591827 11.500000000 -27.499954224 -36.499954224 11.500000000 -26.499996185 -36.499996185 11.500000000 -25.500000000 -36.500000000 11.500000000 -24.500000000 -36.500000000 11.500000000 -23.500000000 -36.500000000 11.500000000 -22.500000000 -36.500000000 11.500000000 -21.500000000 -36.500000000 11.500000000 -20.500000000 -36.500000000 11.500000000 -19.500000000 -36.500000000 11.500000000 -18.500000000 -36.500000000 11.500000000 -17.500000000 -36.500000000 11.500000000 -16.500000000 -36.500000000 11.500000000 -15.500000000 -36.500000000 11.500000000 -14.500000000 -36.500000000 11.500000000 -13.500000000 -36.500000000 11.500000000 -12.500000000 -36.500000000 11.500000000 -11.500000000 -36.500000000 11.500000000 -10.500000000 -36.500000000 11.500000000 -9.500000000 -36.500000000 11.500000000 -8.500000000 -36.500000000 11.500000000 -7.500000000 -36.500000000 11.500000000 -6.500000000 -36.500000000 11.500000000 -5.500000000 -36.500000000 11.500000000 -4.500000000 -36.500000000 11.500000000 -3.500000000 -36.500000000 11.500000000 -2.500000000 -36.500000000 11.500000000 -1.500000000 -36.500000000 11.500000000 -0.500000000 -36.500000000 11.500000000 0.500000000 -36.500000000 11.500000000 1.500000000 -36.500000000 11.500000000 2.500000000 -36.500000000 11.500000000 3.500000000 -36.500000000 11.500000000 4.500000000 -36.500000000 11.500000000 5.500000000 -36.500000000 11.500000000 6.500000000 -36.500000000 11.500000000 7.500000000 -36.500000000 11.500000000 8.500000000 -36.500000000 11.500000000 9.500000000 -36.500000000 11.500000000 10.500000000 -36.500000000 11.500000000 11.500000000 -36.500000000 11.500000000 12.500000000 -36.500000000 11.500000000 13.500000000 -36.500000000 11.500000000 14.500000000 -36.500000000 11.500000000 15.500000000 -36.500000000 11.500000000 16.500000000 -36.500000000 11.500000000 17.500000000 -36.500000000 11.500000000 18.500000000 -36.500000000 11.500000000 19.500000000 -36.500000000 11.500000000 20.500000000 -36.500000000 11.500000000 21.500000000 -36.500000000 11.500000000 22.500000000 -36.500000000 11.500000000 23.500000000 -36.500000000 11.500000000 24.500000000 -36.500000000 11.500000000 25.499996185 -36.499996185 11.500000000 26.499954224 -36.499954224 11.500000000 27.499591827 -36.499591827 11.500000000 28.497470856 -36.497467041 11.500000000 29.488407135 -36.488403320 11.500000000 30.458978653 -36.458980560 11.500000000 31.384418488 -36.384422302 11.500000000 32.233222961 -36.233222961 11.500000000 32.981101990 -35.981101990 11.500000000 -35.167964935 -35.167964935 11.500000000 -34.622871399 -35.622871399 11.500000000 33.622871399 -35.622871399 11.500000000 34.167964935 -35.167964935 11.500000000 -35.981101990 -33.981101990 11.500000000 -35.622871399 -34.622871399 11.500000000 34.622871399 -34.622871399 11.500000000 34.981101990 -33.981101990 11.500000000 -36.233222961 -33.233222961 11.500000000 35.233222961 -33.233226776 11.500000000 -36.384418488 -32.384422302 11.500000000 35.384418488 -32.384422302 11.500000000 -36.458976746 -31.458978653 11.500000000 35.458980560 -31.458978653 11.500000000 -36.488403320 -30.488407135 11.500000000 35.488403320 -30.488407135 11.500000000 -36.497467041 -29.497472763 11.500000000 35.497474670 -29.497472763 11.500000000 -36.499591827 -28.499593735 11.500000000 35.499591827 -28.499593735 11.500000000 -36.499954224 -27.499954224 11.500000000 35.499954224 -27.499954224 11.500000000 -36.499996185 -26.499996185 11.500000000 35.499996185 -26.499996185 11.500000000 -36.500000000 -25.500000000 11.500000000 35.500000000 -25.500000000 11.500000000 -36.500000000 -24.500000000 11.500000000 35.500000000 -24.500000000 11.500000000 -36.500000000 -23.500000000 11.500000000 35.500000000 -23.500000000 11.500000000 -36.500000000 -22.500000000 11.500000000 35.500000000 -22.500000000 11.500000000 -36.500000000 -21.500000000 11.500000000 35.500000000 -21.500000000 11.500000000 -36.500000000 -20.500000000 11.500000000 35.500000000 -20.500000000 11.500000000 -36.500000000 -19.500000000 11.500000000 35.500000000 -19.500000000 11.500000000 -36.500000000 -18.500000000 11.500000000 35.500000000 -18.500000000 11.500000000 -36.500000000 -17.500000000 11.500000000 35.500000000 -17.500000000 11.500000000 -36.500000000 -16.500000000 11.500000000 35.500000000 -16.500000000 11.500000000 -36.500000000 -15.500000000 11.500000000 35.500000000 -15.500000000 11.500000000 -36.500000000 -14.500000000 11.500000000 35.500000000 -14.500000000 11.500000000 -36.500000000 -13.500000000 11.500000000 35.500000000 -13.500000000 11.500000000 -36.500000000 -12.500000000 11.500000000 35.500000000 -12.500000000 11.500000000 -36.500000000 -11.500000000 11.500000000 35.500000000 -11.500000000 11.500000000 -36.500000000 -10.500000000 11.500000000 35.500000000 -10.500000000 11.500000000 -36.500000000 -9.500000000 11.500000000 35.500000000 -9.500000000 11.500000000 -36.500000000 -8.500000000 11.500000000 35.500000000 -8.500000000 11.500000000 -36.500000000 -7.500000000 11.500000000 35.500000000 -7.500000000 11.500000000 -36.500000000 -6.500000000 11.500000000 35.500000000 -6.500000000 11.500000000 -36.500000000 -5.500000000 11.500000000 35.500000000 -5.500000000 11.500000000 -36.500000000 -4.500000000 11.500000000 35.500000000 -4.500000000 11.500000000 -36.500000000 -3.500000000 11.500000000 35.500000000 -3.500000000 11.500000000 -36.500000000 -2.500000000 11.500000000 35.500000000 -2.500000000 11.500000000 -36.500000000 -1.500000000 11.500000000 35.500000000 -1.500000000 11.500000000 -36.500000000 -0.500000000 11.500000000 35.500000000 -0.500000000 11.500000000 -36.500000000 0.500000000 11.500000000 35.500000000 0.500000000 11.500000000 -36.500000000 1.500000000 11.500000000 35.500000000 1.500000000 11.500000000 -36.500000000 2.500000000 11.500000000 35.500000000 2.500000000 11.500000000 -36.500000000 3.500000000 11.500000000 35.500000000 3.500000000 11.500000000 -36.500000000 4.500000000 11.500000000 35.500000000 4.500000000 11.500000000 -36.500000000 5.500000000 11.500000000 35.500000000 5.500000000 11.500000000 -36.500000000 6.500000000 11.500000000 35.500000000 6.500000000 11.500000000 -36.500000000 7.500000000 11.500000000 35.500000000 7.500000000 11.500000000 -36.500000000 8.500000000 11.500000000 35.500000000 8.500000000 11.500000000 -36.500000000 9.500000000 11.500000000 35.500000000 9.500000000 11.500000000 -36.500000000 10.500000000 11.500000000 35.500000000 10.500000000 11.500000000 -36.500000000 11.500000000 11.500000000 35.500000000 11.500000000 11.500000000 -36.500000000 12.500000000 11.500000000 35.500000000 12.500000000 11.500000000 -36.500000000 13.500000000 11.500000000 35.500000000 13.500000000 11.500000000 -36.500000000 14.500000000 11.500000000 35.500000000 14.500000000 11.500000000 -36.500000000 15.500000000 11.500000000 35.500000000 15.500000000 11.500000000 -36.500000000 16.500000000 11.500000000 35.500000000 16.500000000 11.500000000 -36.500000000 17.500000000 11.500000000 35.500000000 17.500000000 11.500000000 -36.500000000 18.500000000 11.500000000 35.500000000 18.500000000 11.500000000 -36.500000000 19.500000000 11.500000000 35.500000000 19.500000000 11.500000000 -36.500000000 20.500000000 11.500000000 35.500000000 20.500000000 11.500000000 -36.500000000 21.500000000 11.500000000 35.500000000 21.500000000 11.500000000 -36.500000000 22.500000000 11.500000000 35.500000000 22.500000000 11.500000000 -36.500000000 23.500000000 11.500000000 35.500000000 23.500000000 11.500000000 -36.500000000 24.500000000 11.500000000 35.500000000 24.500000000 11.500000000 -36.499996185 25.499996185 11.500000000 35.499996185 25.499996185 11.500000000 -36.499954224 26.499954224 11.500000000 35.499954224 26.499954224 11.500000000 -36.499591827 27.499591827 11.500000000 35.499591827 27.499591827 11.500000000 -36.497474670 28.497470856 11.500000000 35.497467041 28.497470856 11.500000000 -36.488403320 29.488407135 11.500000000 35.488403320 29.488407135 11.500000000 -36.458980560 30.458978653 11.500000000 35.458980560 30.458978653 11.500000000 -36.384422302 31.384418488 11.500000000 35.384422302 31.384418488 11.500000000 -36.233222961 32.233222961 11.500000000 35.233222961 32.233222961 11.500000000 -35.981101990 32.981101990 11.500000000 -35.622871399 33.622871399 11.500000000 34.622871399 33.622871399 11.500000000 34.981101990 32.981101990 11.500000000 -35.167964935 34.167964935 11.500000000 -34.622871399 34.622871399 11.500000000 33.622871399 34.622871399 11.500000000 34.167964935 34.167964935 11.500000000 -33.981101990 34.981101990 11.500000000 -33.233222961 35.233222961 11.500000000 -32.384422302 35.384418488 11.500000000 -31.458978653 35.458976746 11.500000000 -30.488407135 35.488403320 11.500000000 -29.497472763 35.497467041 11.500000000 -28.499593735 35.499591827 11.500000000 -27.499954224 35.499954224 11.500000000 -26.499996185 35.499996185 11.500000000 -25.500000000 35.500000000 11.500000000 -24.500000000 35.500000000 11.500000000 -23.500000000 35.500000000 11.500000000 -22.500000000 35.500000000 11.500000000 -21.500000000 35.500000000 11.500000000 -20.500000000 35.500000000 11.500000000 -19.500000000 35.500000000 11.500000000 -18.500000000 35.500000000 11.500000000 -17.500000000 35.500000000 11.500000000 -16.500000000 35.500000000 11.500000000 -15.500000000 35.500000000 11.500000000 -14.500000000 35.500000000 11.500000000 -13.500000000 35.500000000 11.500000000 -12.500000000 35.500000000 11.500000000 -11.500000000 35.500000000 11.500000000 -10.500000000 35.500000000 11.500000000 -9.500000000 35.500000000 11.500000000 -8.500000000 35.500000000 11.500000000 -7.500000000 35.500000000 11.500000000 -6.500000000 35.500000000 11.500000000 -5.500000000 35.500000000 11.500000000 -4.500000000 35.500000000 11.500000000 -3.500000000 35.500000000 11.500000000 -2.500000000 35.500000000 11.500000000 -1.500000000 35.500000000 11.500000000 -0.500000000 35.500000000 11.500000000 0.500000000 35.500000000 11.500000000 1.500000000 35.500000000 11.500000000 2.500000000 35.500000000 11.500000000 3.500000000 35.500000000 11.500000000 4.500000000 35.500000000 11.500000000 5.500000000 35.500000000 11.500000000 6.500000000 35.500000000 11.500000000 7.500000000 35.500000000 11.500000000 8.500000000 35.500000000 11.500000000 9.500000000 35.500000000 11.500000000 10.500000000 35.500000000 11.500000000 11.500000000 35.500000000 11.500000000 12.500000000 35.500000000 11.500000000 13.500000000 35.500000000 11.500000000 14.500000000 35.500000000 11.500000000 15.500000000 35.500000000 11.500000000 16.500000000 35.500000000 11.500000000 17.500000000 35.500000000 11.500000000 18.500000000 35.500000000 11.500000000 19.500000000 35.500000000 11.500000000 20.500000000 35.500000000 11.500000000 21.500000000 35.500000000 11.500000000 22.500000000 35.500000000 11.500000000 23.500000000 35.500000000 11.500000000 24.500000000 35.500000000 11.500000000 25.499996185 35.499996185 11.500000000 26.499954224 35.499954224 11.500000000 27.499591827 35.499591827 11.500000000 28.497470856 35.497474670 11.500000000 29.488407135 35.488403320 11.500000000 30.458978653 35.458980560 11.500000000 31.384418488 35.384422302 11.500000000 32.233222961 35.233222961 11.500000000 32.981101990 34.981101990 11.500000000 -33.981101990 -35.981101990 12.500000000 -33.233226776 -36.233222961 12.500000000 -32.384422302 -36.384418488 12.500000000 -31.458978653 -36.458980560 12.500000000 -30.488407135 -36.488403320 12.500000000 -29.497472763 -36.497474670 12.500000000 -28.499593735 -36.499591827 12.500000000 -27.499954224 -36.499954224 12.500000000 -26.499996185 -36.499996185 12.500000000 -25.500000000 -36.500000000 12.500000000 -24.500000000 -36.500000000 12.500000000 -23.500000000 -36.500000000 12.500000000 -22.500000000 -36.500000000 12.500000000 -21.500000000 -36.500000000 12.500000000 -20.500000000 -36.500000000 12.500000000 -19.500000000 -36.500000000 12.500000000 -18.500000000 -36.500000000 12.500000000 -17.500000000 -36.500000000 12.500000000 -16.500000000 -36.500000000 12.500000000 -15.500000000 -36.500000000 12.500000000 -14.500000000 -36.500000000 12.500000000 -13.500000000 -36.500000000 12.500000000 -12.500000000 -36.500000000 12.500000000 -11.500000000 -36.500000000 12.500000000 -10.500000000 -36.500000000 12.500000000 -9.500000000 -36.500000000 12.500000000 -8.500000000 -36.500000000 12.500000000 -7.500000000 -36.500000000 12.500000000 -6.500000000 -36.500000000 12.500000000 -5.500000000 -36.500000000 12.500000000 -4.500000000 -36.500000000 12.500000000 -3.500000000 -36.500000000 12.500000000 -2.500000000 -36.500000000 12.500000000 -1.500000000 -36.500000000 12.500000000 -0.500000000 -36.500000000 12.500000000 0.500000000 -36.500000000 12.500000000 1.500000000 -36.500000000 12.500000000 2.500000000 -36.500000000 12.500000000 3.500000000 -36.500000000 12.500000000 4.500000000 -36.500000000 12.500000000 5.500000000 -36.500000000 12.500000000 6.500000000 -36.500000000 12.500000000 7.500000000 -36.500000000 12.500000000 8.500000000 -36.500000000 12.500000000 9.500000000 -36.500000000 12.500000000 10.500000000 -36.500000000 12.500000000 11.500000000 -36.500000000 12.500000000 12.500000000 -36.500000000 12.500000000 13.500000000 -36.500000000 12.500000000 14.500000000 -36.500000000 12.500000000 15.500000000 -36.500000000 12.500000000 16.500000000 -36.500000000 12.500000000 17.500000000 -36.500000000 12.500000000 18.500000000 -36.500000000 12.500000000 19.500000000 -36.500000000 12.500000000 20.500000000 -36.500000000 12.500000000 21.500000000 -36.500000000 12.500000000 22.500000000 -36.500000000 12.500000000 23.500000000 -36.500000000 12.500000000 24.500000000 -36.500000000 12.500000000 25.499996185 -36.499996185 12.500000000 26.499954224 -36.499954224 12.500000000 27.499591827 -36.499591827 12.500000000 28.497470856 -36.497467041 12.500000000 29.488407135 -36.488403320 12.500000000 30.458978653 -36.458980560 12.500000000 31.384418488 -36.384422302 12.500000000 32.233222961 -36.233222961 12.500000000 32.981101990 -35.981101990 12.500000000 -35.167964935 -35.167964935 12.500000000 -34.622871399 -35.622871399 12.500000000 33.622871399 -35.622871399 12.500000000 34.167964935 -35.167964935 12.500000000 -35.981101990 -33.981101990 12.500000000 -35.622871399 -34.622871399 12.500000000 34.622871399 -34.622871399 12.500000000 34.981101990 -33.981101990 12.500000000 -36.233222961 -33.233222961 12.500000000 35.233222961 -33.233226776 12.500000000 -36.384418488 -32.384422302 12.500000000 35.384418488 -32.384422302 12.500000000 -36.458976746 -31.458978653 12.500000000 35.458980560 -31.458978653 12.500000000 -36.488403320 -30.488407135 12.500000000 35.488403320 -30.488407135 12.500000000 -36.497467041 -29.497472763 12.500000000 35.497474670 -29.497472763 12.500000000 -36.499591827 -28.499593735 12.500000000 35.499591827 -28.499593735 12.500000000 -36.499954224 -27.499954224 12.500000000 35.499954224 -27.499954224 12.500000000 -36.499996185 -26.499996185 12.500000000 35.499996185 -26.499996185 12.500000000 -36.500000000 -25.500000000 12.500000000 35.500000000 -25.500000000 12.500000000 -36.500000000 -24.500000000 12.500000000 35.500000000 -24.500000000 12.500000000 -36.500000000 -23.500000000 12.500000000 35.500000000 -23.500000000 12.500000000 -36.500000000 -22.500000000 12.500000000 35.500000000 -22.500000000 12.500000000 -36.500000000 -21.500000000 12.500000000 35.500000000 -21.500000000 12.500000000 -36.500000000 -20.500000000 12.500000000 35.500000000 -20.500000000 12.500000000 -36.500000000 -19.500000000 12.500000000 35.500000000 -19.500000000 12.500000000 -36.500000000 -18.500000000 12.500000000 35.500000000 -18.500000000 12.500000000 -36.500000000 -17.500000000 12.500000000 35.500000000 -17.500000000 12.500000000 -36.500000000 -16.500000000 12.500000000 35.500000000 -16.500000000 12.500000000 -36.500000000 -15.500000000 12.500000000 35.500000000 -15.500000000 12.500000000 -36.500000000 -14.500000000 12.500000000 35.500000000 -14.500000000 12.500000000 -36.500000000 -13.500000000 12.500000000 35.500000000 -13.500000000 12.500000000 -36.500000000 -12.500000000 12.500000000 35.500000000 -12.500000000 12.500000000 -36.500000000 -11.500000000 12.500000000 35.500000000 -11.500000000 12.500000000 -36.500000000 -10.500000000 12.500000000 35.500000000 -10.500000000 12.500000000 -36.500000000 -9.500000000 12.500000000 35.500000000 -9.500000000 12.500000000 -36.500000000 -8.500000000 12.500000000 35.500000000 -8.500000000 12.500000000 -36.500000000 -7.500000000 12.500000000 35.500000000 -7.500000000 12.500000000 -36.500000000 -6.500000000 12.500000000 35.500000000 -6.500000000 12.500000000 -36.500000000 -5.500000000 12.500000000 35.500000000 -5.500000000 12.500000000 -36.500000000 -4.500000000 12.500000000 35.500000000 -4.500000000 12.500000000 -36.500000000 -3.500000000 12.500000000 35.500000000 -3.500000000 12.500000000 -36.500000000 -2.500000000 12.500000000 35.500000000 -2.500000000 12.500000000 -36.500000000 -1.500000000 12.500000000 35.500000000 -1.500000000 12.500000000 -36.500000000 -0.500000000 12.500000000 35.500000000 -0.500000000 12.500000000 -36.500000000 0.500000000 12.500000000 35.500000000 0.500000000 12.500000000 -36.500000000 1.500000000 12.500000000 35.500000000 1.500000000 12.500000000 -36.500000000 2.500000000 12.500000000 35.500000000 2.500000000 12.500000000 -36.500000000 3.500000000 12.500000000 35.500000000 3.500000000 12.500000000 -36.500000000 4.500000000 12.500000000 35.500000000 4.500000000 12.500000000 -36.500000000 5.500000000 12.500000000 35.500000000 5.500000000 12.500000000 -36.500000000 6.500000000 12.500000000 35.500000000 6.500000000 12.500000000 -36.500000000 7.500000000 12.500000000 35.500000000 7.500000000 12.500000000 -36.500000000 8.500000000 12.500000000 35.500000000 8.500000000 12.500000000 -36.500000000 9.500000000 12.500000000 35.500000000 9.500000000 12.500000000 -36.500000000 10.500000000 12.500000000 35.500000000 10.500000000 12.500000000 -36.500000000 11.500000000 12.500000000 35.500000000 11.500000000 12.500000000 -36.500000000 12.500000000 12.500000000 35.500000000 12.500000000 12.500000000 -36.500000000 13.500000000 12.500000000 35.500000000 13.500000000 12.500000000 -36.500000000 14.500000000 12.500000000 35.500000000 14.500000000 12.500000000 -36.500000000 15.500000000 12.500000000 35.500000000 15.500000000 12.500000000 -36.500000000 16.500000000 12.500000000 35.500000000 16.500000000 12.500000000 -36.500000000 17.500000000 12.500000000 35.500000000 17.500000000 12.500000000 -36.500000000 18.500000000 12.500000000 35.500000000 18.500000000 12.500000000 -36.500000000 19.500000000 12.500000000 35.500000000 19.500000000 12.500000000 -36.500000000 20.500000000 12.500000000 35.500000000 20.500000000 12.500000000 -36.500000000 21.500000000 12.500000000 35.500000000 21.500000000 12.500000000 -36.500000000 22.500000000 12.500000000 35.500000000 22.500000000 12.500000000 -36.500000000 23.500000000 12.500000000 35.500000000 23.500000000 12.500000000 -36.500000000 24.500000000 12.500000000 35.500000000 24.500000000 12.500000000 -36.499996185 25.499996185 12.500000000 35.499996185 25.499996185 12.500000000 -36.499954224 26.499954224 12.500000000 35.499954224 26.499954224 12.500000000 -36.499591827 27.499591827 12.500000000 35.499591827 27.499591827 12.500000000 -36.497474670 28.497470856 12.500000000 35.497467041 28.497470856 12.500000000 -36.488403320 29.488407135 12.500000000 35.488403320 29.488407135 12.500000000 -36.458980560 30.458978653 12.500000000 35.458980560 30.458978653 12.500000000 -36.384422302 31.384418488 12.500000000 35.384422302 31.384418488 12.500000000 -36.233222961 32.233222961 12.500000000 35.233222961 32.233222961 12.500000000 -35.981101990 32.981101990 12.500000000 -35.622871399 33.622871399 12.500000000 34.622871399 33.622871399 12.500000000 34.981101990 32.981101990 12.500000000 -35.167964935 34.167964935 12.500000000 -34.622871399 34.622871399 12.500000000 33.622871399 34.622871399 12.500000000 34.167964935 34.167964935 12.500000000 -33.981101990 34.981101990 12.500000000 -33.233222961 35.233222961 12.500000000 -32.384422302 35.384418488 12.500000000 -31.458978653 35.458976746 12.500000000 -30.488407135 35.488403320 12.500000000 -29.497472763 35.497467041 12.500000000 -28.499593735 35.499591827 12.500000000 -27.499954224 35.499954224 12.500000000 -26.499996185 35.499996185 12.500000000 -25.500000000 35.500000000 12.500000000 -24.500000000 35.500000000 12.500000000 -23.500000000 35.500000000 12.500000000 -22.500000000 35.500000000 12.500000000 -21.500000000 35.500000000 12.500000000 -20.500000000 35.500000000 12.500000000 -19.500000000 35.500000000 12.500000000 -18.500000000 35.500000000 12.500000000 -17.500000000 35.500000000 12.500000000 -16.500000000 35.500000000 12.500000000 -15.500000000 35.500000000 12.500000000 -14.500000000 35.500000000 12.500000000 -13.500000000 35.500000000 12.500000000 -12.500000000 35.500000000 12.500000000 -11.500000000 35.500000000 12.500000000 -10.500000000 35.500000000 12.500000000 -9.500000000 35.500000000 12.500000000 -8.500000000 35.500000000 12.500000000 -7.500000000 35.500000000 12.500000000 -6.500000000 35.500000000 12.500000000 -5.500000000 35.500000000 12.500000000 -4.500000000 35.500000000 12.500000000 -3.500000000 35.500000000 12.500000000 -2.500000000 35.500000000 12.500000000 -1.500000000 35.500000000 12.500000000 -0.500000000 35.500000000 12.500000000 0.500000000 35.500000000 12.500000000 1.500000000 35.500000000 12.500000000 2.500000000 35.500000000 12.500000000 3.500000000 35.500000000 12.500000000 4.500000000 35.500000000 12.500000000 5.500000000 35.500000000 12.500000000 6.500000000 35.500000000 12.500000000 7.500000000 35.500000000 12.500000000 8.500000000 35.500000000 12.500000000 9.500000000 35.500000000 12.500000000 10.500000000 35.500000000 12.500000000 11.500000000 35.500000000 12.500000000 12.500000000 35.500000000 12.500000000 13.500000000 35.500000000 12.500000000 14.500000000 35.500000000 12.500000000 15.500000000 35.500000000 12.500000000 16.500000000 35.500000000 12.500000000 17.500000000 35.500000000 12.500000000 18.500000000 35.500000000 12.500000000 19.500000000 35.500000000 12.500000000 20.500000000 35.500000000 12.500000000 21.500000000 35.500000000 12.500000000 22.500000000 35.500000000 12.500000000 23.500000000 35.500000000 12.500000000 24.500000000 35.500000000 12.500000000 25.499996185 35.499996185 12.500000000 26.499954224 35.499954224 12.500000000 27.499591827 35.499591827 12.500000000 28.497470856 35.497474670 12.500000000 29.488407135 35.488403320 12.500000000 30.458978653 35.458980560 12.500000000 31.384418488 35.384422302 12.500000000 32.233222961 35.233222961 12.500000000 32.981101990 34.981101990 12.500000000 -33.981101990 -35.981101990 13.500000000 -33.233226776 -36.233222961 13.500000000 -32.384422302 -36.384418488 13.500000000 -31.458978653 -36.458980560 13.500000000 -30.488407135 -36.488403320 13.500000000 -29.497472763 -36.497474670 13.500000000 -28.499593735 -36.499591827 13.500000000 -27.499954224 -36.499954224 13.500000000 -26.499996185 -36.499996185 13.500000000 -25.500000000 -36.500000000 13.500000000 -24.500000000 -36.500000000 13.500000000 -23.500000000 -36.500000000 13.500000000 -22.500000000 -36.500000000 13.500000000 -21.500000000 -36.500000000 13.500000000 -20.500000000 -36.500000000 13.500000000 -19.500000000 -36.500000000 13.500000000 -18.500000000 -36.500000000 13.500000000 -17.500000000 -36.500000000 13.500000000 -16.500000000 -36.500000000 13.500000000 -15.500000000 -36.500000000 13.500000000 -14.500000000 -36.500000000 13.500000000 -13.500000000 -36.500000000 13.500000000 -12.500000000 -36.500000000 13.500000000 -11.500000000 -36.500000000 13.500000000 -10.500000000 -36.500000000 13.500000000 -9.500000000 -36.500000000 13.500000000 -8.500000000 -36.500000000 13.500000000 -7.500000000 -36.500000000 13.500000000 -6.500000000 -36.500000000 13.500000000 -5.500000000 -36.500000000 13.500000000 -4.500000000 -36.500000000 13.500000000 -3.500000000 -36.500000000 13.500000000 -2.500000000 -36.500000000 13.500000000 -1.500000000 -36.500000000 13.500000000 -0.500000000 -36.500000000 13.500000000 0.500000000 -36.500000000 13.500000000 1.500000000 -36.500000000 13.500000000 2.500000000 -36.500000000 13.500000000 3.500000000 -36.500000000 13.500000000 4.500000000 -36.500000000 13.500000000 5.500000000 -36.500000000 13.500000000 6.500000000 -36.500000000 13.500000000 7.500000000 -36.500000000 13.500000000 8.500000000 -36.500000000 13.500000000 9.500000000 -36.500000000 13.500000000 10.500000000 -36.500000000 13.500000000 11.500000000 -36.500000000 13.500000000 12.500000000 -36.500000000 13.500000000 13.500000000 -36.500000000 13.500000000 14.500000000 -36.500000000 13.500000000 15.500000000 -36.500000000 13.500000000 16.500000000 -36.500000000 13.500000000 17.500000000 -36.500000000 13.500000000 18.500000000 -36.500000000 13.500000000 19.500000000 -36.500000000 13.500000000 20.500000000 -36.500000000 13.500000000 21.500000000 -36.500000000 13.500000000 22.500000000 -36.500000000 13.500000000 23.500000000 -36.500000000 13.500000000 24.500000000 -36.500000000 13.500000000 25.499996185 -36.499996185 13.500000000 26.499954224 -36.499954224 13.500000000 27.499591827 -36.499591827 13.500000000 28.497470856 -36.497467041 13.500000000 29.488407135 -36.488403320 13.500000000 30.458978653 -36.458980560 13.500000000 31.384418488 -36.384422302 13.500000000 32.233222961 -36.233222961 13.500000000 32.981101990 -35.981101990 13.500000000 -35.167964935 -35.167964935 13.500000000 -34.622871399 -35.622871399 13.500000000 33.622871399 -35.622871399 13.500000000 34.167964935 -35.167964935 13.500000000 -35.981101990 -33.981101990 13.500000000 -35.622871399 -34.622871399 13.500000000 34.622871399 -34.622871399 13.500000000 34.981101990 -33.981101990 13.500000000 -36.233222961 -33.233222961 13.500000000 35.233222961 -33.233226776 13.500000000 -36.384418488 -32.384422302 13.500000000 35.384418488 -32.384422302 13.500000000 -36.458976746 -31.458978653 13.500000000 35.458980560 -31.458978653 13.500000000 -36.488403320 -30.488407135 13.500000000 35.488403320 -30.488407135 13.500000000 -36.497467041 -29.497472763 13.500000000 35.497474670 -29.497472763 13.500000000 -36.499591827 -28.499593735 13.500000000 35.499591827 -28.499593735 13.500000000 -36.499954224 -27.499954224 13.500000000 35.499954224 -27.499954224 13.500000000 -36.499996185 -26.499996185 13.500000000 35.499996185 -26.499996185 13.500000000 -36.500000000 -25.500000000 13.500000000 35.500000000 -25.500000000 13.500000000 -36.500000000 -24.500000000 13.500000000 35.500000000 -24.500000000 13.500000000 -36.500000000 -23.500000000 13.500000000 35.500000000 -23.500000000 13.500000000 -36.500000000 -22.500000000 13.500000000 35.500000000 -22.500000000 13.500000000 -36.500000000 -21.500000000 13.500000000 35.500000000 -21.500000000 13.500000000 -36.500000000 -20.500000000 13.500000000 35.500000000 -20.500000000 13.500000000 -36.500000000 -19.500000000 13.500000000 35.500000000 -19.500000000 13.500000000 -36.500000000 -18.500000000 13.500000000 35.500000000 -18.500000000 13.500000000 -36.500000000 -17.500000000 13.500000000 35.500000000 -17.500000000 13.500000000 -36.500000000 -16.500000000 13.500000000 35.500000000 -16.500000000 13.500000000 -36.500000000 -15.500000000 13.500000000 35.500000000 -15.500000000 13.500000000 -36.500000000 -14.500000000 13.500000000 35.500000000 -14.500000000 13.500000000 -36.500000000 -13.500000000 13.500000000 35.500000000 -13.500000000 13.500000000 -36.500000000 -12.500000000 13.500000000 35.500000000 -12.500000000 13.500000000 -36.500000000 -11.500000000 13.500000000 35.500000000 -11.500000000 13.500000000 -36.500000000 -10.500000000 13.500000000 35.500000000 -10.500000000 13.500000000 -36.500000000 -9.500000000 13.500000000 35.500000000 -9.500000000 13.500000000 -36.500000000 -8.500000000 13.500000000 35.500000000 -8.500000000 13.500000000 -36.500000000 -7.500000000 13.500000000 35.500000000 -7.500000000 13.500000000 -36.500000000 -6.500000000 13.500000000 35.500000000 -6.500000000 13.500000000 -36.500000000 -5.500000000 13.500000000 35.500000000 -5.500000000 13.500000000 -36.500000000 -4.500000000 13.500000000 35.500000000 -4.500000000 13.500000000 -36.500000000 -3.500000000 13.500000000 35.500000000 -3.500000000 13.500000000 -36.500000000 -2.500000000 13.500000000 35.500000000 -2.500000000 13.500000000 -36.500000000 -1.500000000 13.500000000 35.500000000 -1.500000000 13.500000000 -36.500000000 -0.500000000 13.500000000 35.500000000 -0.500000000 13.500000000 -36.500000000 0.500000000 13.500000000 35.500000000 0.500000000 13.500000000 -36.500000000 1.500000000 13.500000000 35.500000000 1.500000000 13.500000000 -36.500000000 2.500000000 13.500000000 35.500000000 2.500000000 13.500000000 -36.500000000 3.500000000 13.500000000 35.500000000 3.500000000 13.500000000 -36.500000000 4.500000000 13.500000000 35.500000000 4.500000000 13.500000000 -36.500000000 5.500000000 13.500000000 35.500000000 5.500000000 13.500000000 -36.500000000 6.500000000 13.500000000 35.500000000 6.500000000 13.500000000 -36.500000000 7.500000000 13.500000000 35.500000000 7.500000000 13.500000000 -36.500000000 8.500000000 13.500000000 35.500000000 8.500000000 13.500000000 -36.500000000 9.500000000 13.500000000 35.500000000 9.500000000 13.500000000 -36.500000000 10.500000000 13.500000000 35.500000000 10.500000000 13.500000000 -36.500000000 11.500000000 13.500000000 35.500000000 11.500000000 13.500000000 -36.500000000 12.500000000 13.500000000 35.500000000 12.500000000 13.500000000 -36.500000000 13.500000000 13.500000000 35.500000000 13.500000000 13.500000000 -36.500000000 14.500000000 13.500000000 35.500000000 14.500000000 13.500000000 -36.500000000 15.500000000 13.500000000 35.500000000 15.500000000 13.500000000 -36.500000000 16.500000000 13.500000000 35.500000000 16.500000000 13.500000000 -36.500000000 17.500000000 13.500000000 35.500000000 17.500000000 13.500000000 -36.500000000 18.500000000 13.500000000 35.500000000 18.500000000 13.500000000 -36.500000000 19.500000000 13.500000000 35.500000000 19.500000000 13.500000000 -36.500000000 20.500000000 13.500000000 35.500000000 20.500000000 13.500000000 -36.500000000 21.500000000 13.500000000 35.500000000 21.500000000 13.500000000 -36.500000000 22.500000000 13.500000000 35.500000000 22.500000000 13.500000000 -36.500000000 23.500000000 13.500000000 35.500000000 23.500000000 13.500000000 -36.500000000 24.500000000 13.500000000 35.500000000 24.500000000 13.500000000 -36.499996185 25.499996185 13.500000000 35.499996185 25.499996185 13.500000000 -36.499954224 26.499954224 13.500000000 35.499954224 26.499954224 13.500000000 -36.499591827 27.499591827 13.500000000 35.499591827 27.499591827 13.500000000 -36.497474670 28.497470856 13.500000000 35.497467041 28.497470856 13.500000000 -36.488403320 29.488407135 13.500000000 35.488403320 29.488407135 13.500000000 -36.458980560 30.458978653 13.500000000 35.458980560 30.458978653 13.500000000 -36.384422302 31.384418488 13.500000000 35.384422302 31.384418488 13.500000000 -36.233222961 32.233222961 13.500000000 35.233222961 32.233222961 13.500000000 -35.981101990 32.981101990 13.500000000 -35.622871399 33.622871399 13.500000000 34.622871399 33.622871399 13.500000000 34.981101990 32.981101990 13.500000000 -35.167964935 34.167964935 13.500000000 -34.622871399 34.622871399 13.500000000 33.622871399 34.622871399 13.500000000 34.167964935 34.167964935 13.500000000 -33.981101990 34.981101990 13.500000000 -33.233222961 35.233222961 13.500000000 -32.384422302 35.384418488 13.500000000 -31.458978653 35.458976746 13.500000000 -30.488407135 35.488403320 13.500000000 -29.497472763 35.497467041 13.500000000 -28.499593735 35.499591827 13.500000000 -27.499954224 35.499954224 13.500000000 -26.499996185 35.499996185 13.500000000 -25.500000000 35.500000000 13.500000000 -24.500000000 35.500000000 13.500000000 -23.500000000 35.500000000 13.500000000 -22.500000000 35.500000000 13.500000000 -21.500000000 35.500000000 13.500000000 -20.500000000 35.500000000 13.500000000 -19.500000000 35.500000000 13.500000000 -18.500000000 35.500000000 13.500000000 -17.500000000 35.500000000 13.500000000 -16.500000000 35.500000000 13.500000000 -15.500000000 35.500000000 13.500000000 -14.500000000 35.500000000 13.500000000 -13.500000000 35.500000000 13.500000000 -12.500000000 35.500000000 13.500000000 -11.500000000 35.500000000 13.500000000 -10.500000000 35.500000000 13.500000000 -9.500000000 35.500000000 13.500000000 -8.500000000 35.500000000 13.500000000 -7.500000000 35.500000000 13.500000000 -6.500000000 35.500000000 13.500000000 -5.500000000 35.500000000 13.500000000 -4.500000000 35.500000000 13.500000000 -3.500000000 35.500000000 13.500000000 -2.500000000 35.500000000 13.500000000 -1.500000000 35.500000000 13.500000000 -0.500000000 35.500000000 13.500000000 0.500000000 35.500000000 13.500000000 1.500000000 35.500000000 13.500000000 2.500000000 35.500000000 13.500000000 3.500000000 35.500000000 13.500000000 4.500000000 35.500000000 13.500000000 5.500000000 35.500000000 13.500000000 6.500000000 35.500000000 13.500000000 7.500000000 35.500000000 13.500000000 8.500000000 35.500000000 13.500000000 9.500000000 35.500000000 13.500000000 10.500000000 35.500000000 13.500000000 11.500000000 35.500000000 13.500000000 12.500000000 35.500000000 13.500000000 13.500000000 35.500000000 13.500000000 14.500000000 35.500000000 13.500000000 15.500000000 35.500000000 13.500000000 16.500000000 35.500000000 13.500000000 17.500000000 35.500000000 13.500000000 18.500000000 35.500000000 13.500000000 19.500000000 35.500000000 13.500000000 20.500000000 35.500000000 13.500000000 21.500000000 35.500000000 13.500000000 22.500000000 35.500000000 13.500000000 23.500000000 35.500000000 13.500000000 24.500000000 35.500000000 13.500000000 25.499996185 35.499996185 13.500000000 26.499954224 35.499954224 13.500000000 27.499591827 35.499591827 13.500000000 28.497470856 35.497474670 13.500000000 29.488407135 35.488403320 13.500000000 30.458978653 35.458980560 13.500000000 31.384418488 35.384422302 13.500000000 32.233222961 35.233222961 13.500000000 32.981101990 34.981101990 13.500000000 -33.981101990 -35.981101990 14.500000000 -33.233226776 -36.233222961 14.500000000 -32.384422302 -36.384418488 14.500000000 -31.458978653 -36.458980560 14.500000000 -30.488407135 -36.488403320 14.500000000 -29.497472763 -36.497474670 14.500000000 -28.499593735 -36.499591827 14.500000000 -27.499954224 -36.499954224 14.500000000 -26.499996185 -36.499996185 14.500000000 -25.500000000 -36.500000000 14.500000000 -24.500000000 -36.500000000 14.500000000 -23.500000000 -36.500000000 14.500000000 -22.500000000 -36.500000000 14.500000000 -21.500000000 -36.500000000 14.500000000 -20.500000000 -36.500000000 14.500000000 -19.500000000 -36.500000000 14.500000000 -18.500000000 -36.500000000 14.500000000 -17.500000000 -36.500000000 14.500000000 -16.500000000 -36.500000000 14.500000000 -15.500000000 -36.500000000 14.500000000 -14.500000000 -36.500000000 14.500000000 -13.500000000 -36.500000000 14.500000000 -12.500000000 -36.500000000 14.500000000 -11.500000000 -36.500000000 14.500000000 -10.500000000 -36.500000000 14.500000000 -9.500000000 -36.500000000 14.500000000 -8.500000000 -36.500000000 14.500000000 -7.500000000 -36.500000000 14.500000000 -6.500000000 -36.500000000 14.500000000 -5.500000000 -36.500000000 14.500000000 -4.500000000 -36.500000000 14.500000000 -3.500000000 -36.500000000 14.500000000 -2.500000000 -36.500000000 14.500000000 -1.500000000 -36.500000000 14.500000000 -0.500000000 -36.500000000 14.500000000 0.500000000 -36.500000000 14.500000000 1.500000000 -36.500000000 14.500000000 2.500000000 -36.500000000 14.500000000 3.500000000 -36.500000000 14.500000000 4.500000000 -36.500000000 14.500000000 5.500000000 -36.500000000 14.500000000 6.500000000 -36.500000000 14.500000000 7.500000000 -36.500000000 14.500000000 8.500000000 -36.500000000 14.500000000 9.500000000 -36.500000000 14.500000000 10.500000000 -36.500000000 14.500000000 11.500000000 -36.500000000 14.500000000 12.500000000 -36.500000000 14.500000000 13.500000000 -36.500000000 14.500000000 14.500000000 -36.500000000 14.500000000 15.500000000 -36.500000000 14.500000000 16.500000000 -36.500000000 14.500000000 17.500000000 -36.500000000 14.500000000 18.500000000 -36.500000000 14.500000000 19.500000000 -36.500000000 14.500000000 20.500000000 -36.500000000 14.500000000 21.500000000 -36.500000000 14.500000000 22.500000000 -36.500000000 14.500000000 23.500000000 -36.500000000 14.500000000 24.500000000 -36.500000000 14.500000000 25.499996185 -36.499996185 14.500000000 26.499954224 -36.499954224 14.500000000 27.499591827 -36.499591827 14.500000000 28.497470856 -36.497467041 14.500000000 29.488407135 -36.488403320 14.500000000 30.458978653 -36.458980560 14.500000000 31.384418488 -36.384422302 14.500000000 32.233222961 -36.233222961 14.500000000 32.981101990 -35.981101990 14.500000000 -35.167964935 -35.167964935 14.500000000 -34.622871399 -35.622871399 14.500000000 33.622871399 -35.622871399 14.500000000 34.167964935 -35.167964935 14.500000000 -35.981101990 -33.981101990 14.500000000 -35.622871399 -34.622871399 14.500000000 34.622871399 -34.622871399 14.500000000 34.981101990 -33.981101990 14.500000000 -36.233222961 -33.233222961 14.500000000 35.233222961 -33.233226776 14.500000000 -36.384418488 -32.384422302 14.500000000 35.384418488 -32.384422302 14.500000000 -36.458976746 -31.458978653 14.500000000 35.458980560 -31.458978653 14.500000000 -36.488403320 -30.488407135 14.500000000 35.488403320 -30.488407135 14.500000000 -36.497467041 -29.497472763 14.500000000 35.497474670 -29.497472763 14.500000000 -36.499591827 -28.499593735 14.500000000 35.499591827 -28.499593735 14.500000000 -36.499954224 -27.499954224 14.500000000 35.499954224 -27.499954224 14.500000000 -36.499996185 -26.499996185 14.500000000 35.499996185 -26.499996185 14.500000000 -36.500000000 -25.500000000 14.500000000 35.500000000 -25.500000000 14.500000000 -36.500000000 -24.500000000 14.500000000 35.500000000 -24.500000000 14.500000000 -36.500000000 -23.500000000 14.500000000 35.500000000 -23.500000000 14.500000000 -36.500000000 -22.500000000 14.500000000 35.500000000 -22.500000000 14.500000000 -36.500000000 -21.500000000 14.500000000 35.500000000 -21.500000000 14.500000000 -36.500000000 -20.500000000 14.500000000 35.500000000 -20.500000000 14.500000000 -36.500000000 -19.500000000 14.500000000 35.500000000 -19.500000000 14.500000000 -36.500000000 -18.500000000 14.500000000 35.500000000 -18.500000000 14.500000000 -36.500000000 -17.500000000 14.500000000 35.500000000 -17.500000000 14.500000000 -36.500000000 -16.500000000 14.500000000 35.500000000 -16.500000000 14.500000000 -36.500000000 -15.500000000 14.500000000 35.500000000 -15.500000000 14.500000000 -36.500000000 -14.500000000 14.500000000 35.500000000 -14.500000000 14.500000000 -36.500000000 -13.500000000 14.500000000 35.500000000 -13.500000000 14.500000000 -36.500000000 -12.500000000 14.500000000 35.500000000 -12.500000000 14.500000000 -36.500000000 -11.500000000 14.500000000 35.500000000 -11.500000000 14.500000000 -36.500000000 -10.500000000 14.500000000 35.500000000 -10.500000000 14.500000000 -36.500000000 -9.500000000 14.500000000 35.500000000 -9.500000000 14.500000000 -36.500000000 -8.500000000 14.500000000 35.500000000 -8.500000000 14.500000000 -36.500000000 -7.500000000 14.500000000 35.500000000 -7.500000000 14.500000000 -36.500000000 -6.500000000 14.500000000 35.500000000 -6.500000000 14.500000000 -36.500000000 -5.500000000 14.500000000 35.500000000 -5.500000000 14.500000000 -36.500000000 -4.500000000 14.500000000 35.500000000 -4.500000000 14.500000000 -36.500000000 -3.500000000 14.500000000 35.500000000 -3.500000000 14.500000000 -36.500000000 -2.500000000 14.500000000 35.500000000 -2.500000000 14.500000000 -36.500000000 -1.500000000 14.500000000 35.500000000 -1.500000000 14.500000000 -36.500000000 -0.500000000 14.500000000 35.500000000 -0.500000000 14.500000000 -36.500000000 0.500000000 14.500000000 35.500000000 0.500000000 14.500000000 -36.500000000 1.500000000 14.500000000 35.500000000 1.500000000 14.500000000 -36.500000000 2.500000000 14.500000000 35.500000000 2.500000000 14.500000000 -36.500000000 3.500000000 14.500000000 35.500000000 3.500000000 14.500000000 -36.500000000 4.500000000 14.500000000 35.500000000 4.500000000 14.500000000 -36.500000000 5.500000000 14.500000000 35.500000000 5.500000000 14.500000000 -36.500000000 6.500000000 14.500000000 35.500000000 6.500000000 14.500000000 -36.500000000 7.500000000 14.500000000 35.500000000 7.500000000 14.500000000 -36.500000000 8.500000000 14.500000000 35.500000000 8.500000000 14.500000000 -36.500000000 9.500000000 14.500000000 35.500000000 9.500000000 14.500000000 -36.500000000 10.500000000 14.500000000 35.500000000 10.500000000 14.500000000 -36.500000000 11.500000000 14.500000000 35.500000000 11.500000000 14.500000000 -36.500000000 12.500000000 14.500000000 35.500000000 12.500000000 14.500000000 -36.500000000 13.500000000 14.500000000 35.500000000 13.500000000 14.500000000 -36.500000000 14.500000000 14.500000000 35.500000000 14.500000000 14.500000000 -36.500000000 15.500000000 14.500000000 35.500000000 15.500000000 14.500000000 -36.500000000 16.500000000 14.500000000 35.500000000 16.500000000 14.500000000 -36.500000000 17.500000000 14.500000000 35.500000000 17.500000000 14.500000000 -36.500000000 18.500000000 14.500000000 35.500000000 18.500000000 14.500000000 -36.500000000 19.500000000 14.500000000 35.500000000 19.500000000 14.500000000 -36.500000000 20.500000000 14.500000000 35.500000000 20.500000000 14.500000000 -36.500000000 21.500000000 14.500000000 35.500000000 21.500000000 14.500000000 -36.500000000 22.500000000 14.500000000 35.500000000 22.500000000 14.500000000 -36.500000000 23.500000000 14.500000000 35.500000000 23.500000000 14.500000000 -36.500000000 24.500000000 14.500000000 35.500000000 24.500000000 14.500000000 -36.499996185 25.499996185 14.500000000 35.499996185 25.499996185 14.500000000 -36.499954224 26.499954224 14.500000000 35.499954224 26.499954224 14.500000000 -36.499591827 27.499591827 14.500000000 35.499591827 27.499591827 14.500000000 -36.497474670 28.497470856 14.500000000 35.497467041 28.497470856 14.500000000 -36.488403320 29.488407135 14.500000000 35.488403320 29.488407135 14.500000000 -36.458980560 30.458978653 14.500000000 35.458980560 30.458978653 14.500000000 -36.384422302 31.384418488 14.500000000 35.384422302 31.384418488 14.500000000 -36.233222961 32.233222961 14.500000000 35.233222961 32.233222961 14.500000000 -35.981101990 32.981101990 14.500000000 -35.622871399 33.622871399 14.500000000 34.622871399 33.622871399 14.500000000 34.981101990 32.981101990 14.500000000 -35.167964935 34.167964935 14.500000000 -34.622871399 34.622871399 14.500000000 33.622871399 34.622871399 14.500000000 34.167964935 34.167964935 14.500000000 -33.981101990 34.981101990 14.500000000 -33.233222961 35.233222961 14.500000000 -32.384422302 35.384418488 14.500000000 -31.458978653 35.458976746 14.500000000 -30.488407135 35.488403320 14.500000000 -29.497472763 35.497467041 14.500000000 -28.499593735 35.499591827 14.500000000 -27.499954224 35.499954224 14.500000000 -26.499996185 35.499996185 14.500000000 -25.500000000 35.500000000 14.500000000 -24.500000000 35.500000000 14.500000000 -23.500000000 35.500000000 14.500000000 -22.500000000 35.500000000 14.500000000 -21.500000000 35.500000000 14.500000000 -20.500000000 35.500000000 14.500000000 -19.500000000 35.500000000 14.500000000 -18.500000000 35.500000000 14.500000000 -17.500000000 35.500000000 14.500000000 -16.500000000 35.500000000 14.500000000 -15.500000000 35.500000000 14.500000000 -14.500000000 35.500000000 14.500000000 -13.500000000 35.500000000 14.500000000 -12.500000000 35.500000000 14.500000000 -11.500000000 35.500000000 14.500000000 -10.500000000 35.500000000 14.500000000 -9.500000000 35.500000000 14.500000000 -8.500000000 35.500000000 14.500000000 -7.500000000 35.500000000 14.500000000 -6.500000000 35.500000000 14.500000000 -5.500000000 35.500000000 14.500000000 -4.500000000 35.500000000 14.500000000 -3.500000000 35.500000000 14.500000000 -2.500000000 35.500000000 14.500000000 -1.500000000 35.500000000 14.500000000 -0.500000000 35.500000000 14.500000000 0.500000000 35.500000000 14.500000000 1.500000000 35.500000000 14.500000000 2.500000000 35.500000000 14.500000000 3.500000000 35.500000000 14.500000000 4.500000000 35.500000000 14.500000000 5.500000000 35.500000000 14.500000000 6.500000000 35.500000000 14.500000000 7.500000000 35.500000000 14.500000000 8.500000000 35.500000000 14.500000000 9.500000000 35.500000000 14.500000000 10.500000000 35.500000000 14.500000000 11.500000000 35.500000000 14.500000000 12.500000000 35.500000000 14.500000000 13.500000000 35.500000000 14.500000000 14.500000000 35.500000000 14.500000000 15.500000000 35.500000000 14.500000000 16.500000000 35.500000000 14.500000000 17.500000000 35.500000000 14.500000000 18.500000000 35.500000000 14.500000000 19.500000000 35.500000000 14.500000000 20.500000000 35.500000000 14.500000000 21.500000000 35.500000000 14.500000000 22.500000000 35.500000000 14.500000000 23.500000000 35.500000000 14.500000000 24.500000000 35.500000000 14.500000000 25.499996185 35.499996185 14.500000000 26.499954224 35.499954224 14.500000000 27.499591827 35.499591827 14.500000000 28.497470856 35.497474670 14.500000000 29.488407135 35.488403320 14.500000000 30.458978653 35.458980560 14.500000000 31.384418488 35.384422302 14.500000000 32.233222961 35.233222961 14.500000000 32.981101990 34.981101990 14.500000000 -33.981101990 -35.981101990 15.500000000 -33.233226776 -36.233222961 15.500000000 -32.384422302 -36.384418488 15.500000000 -31.458978653 -36.458980560 15.500000000 -30.488407135 -36.488403320 15.500000000 -29.497472763 -36.497474670 15.500000000 -28.499593735 -36.499591827 15.500000000 -27.499954224 -36.499954224 15.500000000 -26.499996185 -36.499996185 15.500000000 -25.500000000 -36.500000000 15.500000000 -24.500000000 -36.500000000 15.500000000 -23.500000000 -36.500000000 15.500000000 -22.500000000 -36.500000000 15.500000000 -21.500000000 -36.500000000 15.500000000 -20.500000000 -36.500000000 15.500000000 -19.500000000 -36.500000000 15.500000000 -18.500000000 -36.500000000 15.500000000 -17.500000000 -36.500000000 15.500000000 -16.500000000 -36.500000000 15.500000000 -15.500000000 -36.500000000 15.500000000 -14.500000000 -36.500000000 15.500000000 -13.500000000 -36.500000000 15.500000000 -12.500000000 -36.500000000 15.500000000 -11.500000000 -36.500000000 15.500000000 -10.500000000 -36.500000000 15.500000000 -9.500000000 -36.500000000 15.500000000 -8.500000000 -36.500000000 15.500000000 -7.500000000 -36.500000000 15.500000000 -6.500000000 -36.500000000 15.500000000 -5.500000000 -36.500000000 15.500000000 -4.500000000 -36.500000000 15.500000000 -3.500000000 -36.500000000 15.500000000 -2.500000000 -36.500000000 15.500000000 -1.500000000 -36.500000000 15.500000000 -0.500000000 -36.500000000 15.500000000 0.500000000 -36.500000000 15.500000000 1.500000000 -36.500000000 15.500000000 2.500000000 -36.500000000 15.500000000 3.500000000 -36.500000000 15.500000000 4.500000000 -36.500000000 15.500000000 5.500000000 -36.500000000 15.500000000 6.500000000 -36.500000000 15.500000000 7.500000000 -36.500000000 15.500000000 8.500000000 -36.500000000 15.500000000 9.500000000 -36.500000000 15.500000000 10.500000000 -36.500000000 15.500000000 11.500000000 -36.500000000 15.500000000 12.500000000 -36.500000000 15.500000000 13.500000000 -36.500000000 15.500000000 14.500000000 -36.500000000 15.500000000 15.500000000 -36.500000000 15.500000000 16.500000000 -36.500000000 15.500000000 17.500000000 -36.500000000 15.500000000 18.500000000 -36.500000000 15.500000000 19.500000000 -36.500000000 15.500000000 20.500000000 -36.500000000 15.500000000 21.500000000 -36.500000000 15.500000000 22.500000000 -36.500000000 15.500000000 23.500000000 -36.500000000 15.500000000 24.500000000 -36.500000000 15.500000000 25.499996185 -36.499996185 15.500000000 26.499954224 -36.499954224 15.500000000 27.499591827 -36.499591827 15.500000000 28.497470856 -36.497467041 15.500000000 29.488407135 -36.488403320 15.500000000 30.458978653 -36.458980560 15.500000000 31.384418488 -36.384422302 15.500000000 32.233222961 -36.233222961 15.500000000 32.981101990 -35.981101990 15.500000000 -35.167964935 -35.167964935 15.500000000 -34.622871399 -35.622871399 15.500000000 33.622871399 -35.622871399 15.500000000 34.167964935 -35.167964935 15.500000000 -35.981101990 -33.981101990 15.500000000 -35.622871399 -34.622871399 15.500000000 34.622871399 -34.622871399 15.500000000 34.981101990 -33.981101990 15.500000000 -36.233222961 -33.233222961 15.500000000 35.233222961 -33.233226776 15.500000000 -36.384418488 -32.384422302 15.500000000 35.384418488 -32.384422302 15.500000000 -36.458976746 -31.458978653 15.500000000 35.458980560 -31.458978653 15.500000000 -36.488403320 -30.488407135 15.500000000 35.488403320 -30.488407135 15.500000000 -36.497467041 -29.497472763 15.500000000 35.497474670 -29.497472763 15.500000000 -36.499591827 -28.499593735 15.500000000 35.499591827 -28.499593735 15.500000000 -36.499954224 -27.499954224 15.500000000 35.499954224 -27.499954224 15.500000000 -36.499996185 -26.499996185 15.500000000 35.499996185 -26.499996185 15.500000000 -36.500000000 -25.500000000 15.500000000 35.500000000 -25.500000000 15.500000000 -36.500000000 -24.500000000 15.500000000 35.500000000 -24.500000000 15.500000000 -36.500000000 -23.500000000 15.500000000 35.500000000 -23.500000000 15.500000000 -36.500000000 -22.500000000 15.500000000 35.500000000 -22.500000000 15.500000000 -36.500000000 -21.500000000 15.500000000 35.500000000 -21.500000000 15.500000000 -36.500000000 -20.500000000 15.500000000 35.500000000 -20.500000000 15.500000000 -36.500000000 -19.500000000 15.500000000 35.500000000 -19.500000000 15.500000000 -36.500000000 -18.500000000 15.500000000 35.500000000 -18.500000000 15.500000000 -36.500000000 -17.500000000 15.500000000 35.500000000 -17.500000000 15.500000000 -36.500000000 -16.500000000 15.500000000 35.500000000 -16.500000000 15.500000000 -36.500000000 -15.500000000 15.500000000 35.500000000 -15.500000000 15.500000000 -36.500000000 -14.500000000 15.500000000 35.500000000 -14.500000000 15.500000000 -36.500000000 -13.500000000 15.500000000 35.500000000 -13.500000000 15.500000000 -36.500000000 -12.500000000 15.500000000 35.500000000 -12.500000000 15.500000000 -36.500000000 -11.500000000 15.500000000 35.500000000 -11.500000000 15.500000000 -36.500000000 -10.500000000 15.500000000 35.500000000 -10.500000000 15.500000000 -36.500000000 -9.500000000 15.500000000 35.500000000 -9.500000000 15.500000000 -36.500000000 -8.500000000 15.500000000 35.500000000 -8.500000000 15.500000000 -36.500000000 -7.500000000 15.500000000 35.500000000 -7.500000000 15.500000000 -36.500000000 -6.500000000 15.500000000 35.500000000 -6.500000000 15.500000000 -36.500000000 -5.500000000 15.500000000 35.500000000 -5.500000000 15.500000000 -36.500000000 -4.500000000 15.500000000 35.500000000 -4.500000000 15.500000000 -36.500000000 -3.500000000 15.500000000 35.500000000 -3.500000000 15.500000000 -36.500000000 -2.500000000 15.500000000 35.500000000 -2.500000000 15.500000000 -36.500000000 -1.500000000 15.500000000 35.500000000 -1.500000000 15.500000000 -36.500000000 -0.500000000 15.500000000 35.500000000 -0.500000000 15.500000000 -36.500000000 0.500000000 15.500000000 35.500000000 0.500000000 15.500000000 -36.500000000 1.500000000 15.500000000 35.500000000 1.500000000 15.500000000 -36.500000000 2.500000000 15.500000000 35.500000000 2.500000000 15.500000000 -36.500000000 3.500000000 15.500000000 35.500000000 3.500000000 15.500000000 -36.500000000 4.500000000 15.500000000 35.500000000 4.500000000 15.500000000 -36.500000000 5.500000000 15.500000000 35.500000000 5.500000000 15.500000000 -36.500000000 6.500000000 15.500000000 35.500000000 6.500000000 15.500000000 -36.500000000 7.500000000 15.500000000 35.500000000 7.500000000 15.500000000 -36.500000000 8.500000000 15.500000000 35.500000000 8.500000000 15.500000000 -36.500000000 9.500000000 15.500000000 35.500000000 9.500000000 15.500000000 -36.500000000 10.500000000 15.500000000 35.500000000 10.500000000 15.500000000 -36.500000000 11.500000000 15.500000000 35.500000000 11.500000000 15.500000000 -36.500000000 12.500000000 15.500000000 35.500000000 12.500000000 15.500000000 -36.500000000 13.500000000 15.500000000 35.500000000 13.500000000 15.500000000 -36.500000000 14.500000000 15.500000000 35.500000000 14.500000000 15.500000000 -36.500000000 15.500000000 15.500000000 35.500000000 15.500000000 15.500000000 -36.500000000 16.500000000 15.500000000 35.500000000 16.500000000 15.500000000 -36.500000000 17.500000000 15.500000000 35.500000000 17.500000000 15.500000000 -36.500000000 18.500000000 15.500000000 35.500000000 18.500000000 15.500000000 -36.500000000 19.500000000 15.500000000 35.500000000 19.500000000 15.500000000 -36.500000000 20.500000000 15.500000000 35.500000000 20.500000000 15.500000000 -36.500000000 21.500000000 15.500000000 35.500000000 21.500000000 15.500000000 -36.500000000 22.500000000 15.500000000 35.500000000 22.500000000 15.500000000 -36.500000000 23.500000000 15.500000000 35.500000000 23.500000000 15.500000000 -36.500000000 24.500000000 15.500000000 35.500000000 24.500000000 15.500000000 -36.499996185 25.499996185 15.500000000 35.499996185 25.499996185 15.500000000 -36.499954224 26.499954224 15.500000000 35.499954224 26.499954224 15.500000000 -36.499591827 27.499591827 15.500000000 35.499591827 27.499591827 15.500000000 -36.497474670 28.497470856 15.500000000 35.497467041 28.497470856 15.500000000 -36.488403320 29.488407135 15.500000000 35.488403320 29.488407135 15.500000000 -36.458980560 30.458978653 15.500000000 35.458980560 30.458978653 15.500000000 -36.384422302 31.384418488 15.500000000 35.384422302 31.384418488 15.500000000 -36.233222961 32.233222961 15.500000000 35.233222961 32.233222961 15.500000000 -35.981101990 32.981101990 15.500000000 -35.622871399 33.622871399 15.500000000 34.622871399 33.622871399 15.500000000 34.981101990 32.981101990 15.500000000 -35.167964935 34.167964935 15.500000000 -34.622871399 34.622871399 15.500000000 33.622871399 34.622871399 15.500000000 34.167964935 34.167964935 15.500000000 -33.981101990 34.981101990 15.500000000 -33.233222961 35.233222961 15.500000000 -32.384422302 35.384418488 15.500000000 -31.458978653 35.458976746 15.500000000 -30.488407135 35.488403320 15.500000000 -29.497472763 35.497467041 15.500000000 -28.499593735 35.499591827 15.500000000 -27.499954224 35.499954224 15.500000000 -26.499996185 35.499996185 15.500000000 -25.500000000 35.500000000 15.500000000 -24.500000000 35.500000000 15.500000000 -23.500000000 35.500000000 15.500000000 -22.500000000 35.500000000 15.500000000 -21.500000000 35.500000000 15.500000000 -20.500000000 35.500000000 15.500000000 -19.500000000 35.500000000 15.500000000 -18.500000000 35.500000000 15.500000000 -17.500000000 35.500000000 15.500000000 -16.500000000 35.500000000 15.500000000 -15.500000000 35.500000000 15.500000000 -14.500000000 35.500000000 15.500000000 -13.500000000 35.500000000 15.500000000 -12.500000000 35.500000000 15.500000000 -11.500000000 35.500000000 15.500000000 -10.500000000 35.500000000 15.500000000 -9.500000000 35.500000000 15.500000000 -8.500000000 35.500000000 15.500000000 -7.500000000 35.500000000 15.500000000 -6.500000000 35.500000000 15.500000000 -5.500000000 35.500000000 15.500000000 -4.500000000 35.500000000 15.500000000 -3.500000000 35.500000000 15.500000000 -2.500000000 35.500000000 15.500000000 -1.500000000 35.500000000 15.500000000 -0.500000000 35.500000000 15.500000000 0.500000000 35.500000000 15.500000000 1.500000000 35.500000000 15.500000000 2.500000000 35.500000000 15.500000000 3.500000000 35.500000000 15.500000000 4.500000000 35.500000000 15.500000000 5.500000000 35.500000000 15.500000000 6.500000000 35.500000000 15.500000000 7.500000000 35.500000000 15.500000000 8.500000000 35.500000000 15.500000000 9.500000000 35.500000000 15.500000000 10.500000000 35.500000000 15.500000000 11.500000000 35.500000000 15.500000000 12.500000000 35.500000000 15.500000000 13.500000000 35.500000000 15.500000000 14.500000000 35.500000000 15.500000000 15.500000000 35.500000000 15.500000000 16.500000000 35.500000000 15.500000000 17.500000000 35.500000000 15.500000000 18.500000000 35.500000000 15.500000000 19.500000000 35.500000000 15.500000000 20.500000000 35.500000000 15.500000000 21.500000000 35.500000000 15.500000000 22.500000000 35.500000000 15.500000000 23.500000000 35.500000000 15.500000000 24.500000000 35.500000000 15.500000000 25.499996185 35.499996185 15.500000000 26.499954224 35.499954224 15.500000000 27.499591827 35.499591827 15.500000000 28.497470856 35.497474670 15.500000000 29.488407135 35.488403320 15.500000000 30.458978653 35.458980560 15.500000000 31.384418488 35.384422302 15.500000000 32.233222961 35.233222961 15.500000000 32.981101990 34.981101990 15.500000000 -33.981101990 -35.981101990 16.500000000 -33.233226776 -36.233222961 16.500000000 -32.384422302 -36.384418488 16.500000000 -31.458978653 -36.458980560 16.500000000 -30.488407135 -36.488403320 16.500000000 -29.497472763 -36.497474670 16.500000000 -28.499593735 -36.499591827 16.500000000 -27.499954224 -36.499954224 16.500000000 -26.499996185 -36.499996185 16.500000000 -25.500000000 -36.500000000 16.500000000 -24.500000000 -36.500000000 16.500000000 -23.500000000 -36.500000000 16.500000000 -22.500000000 -36.500000000 16.500000000 -21.500000000 -36.500000000 16.500000000 -20.500000000 -36.500000000 16.500000000 -19.500000000 -36.500000000 16.500000000 -18.500000000 -36.500000000 16.500000000 -17.500000000 -36.500000000 16.500000000 -16.500000000 -36.500000000 16.500000000 -15.500000000 -36.500000000 16.500000000 -14.500000000 -36.500000000 16.500000000 -13.500000000 -36.500000000 16.500000000 -12.500000000 -36.500000000 16.500000000 -11.500000000 -36.500000000 16.500000000 -10.500000000 -36.500000000 16.500000000 -9.500000000 -36.500000000 16.500000000 -8.500000000 -36.500000000 16.500000000 -7.500000000 -36.500000000 16.500000000 -6.500000000 -36.500000000 16.500000000 -5.500000000 -36.500000000 16.500000000 -4.500000000 -36.500000000 16.500000000 -3.500000000 -36.500000000 16.500000000 -2.500000000 -36.500000000 16.500000000 -1.500000000 -36.500000000 16.500000000 -0.500000000 -36.500000000 16.500000000 0.500000000 -36.500000000 16.500000000 1.500000000 -36.500000000 16.500000000 2.500000000 -36.500000000 16.500000000 3.500000000 -36.500000000 16.500000000 4.500000000 -36.500000000 16.500000000 5.500000000 -36.500000000 16.500000000 6.500000000 -36.500000000 16.500000000 7.500000000 -36.500000000 16.500000000 8.500000000 -36.500000000 16.500000000 9.500000000 -36.500000000 16.500000000 10.500000000 -36.500000000 16.500000000 11.500000000 -36.500000000 16.500000000 12.500000000 -36.500000000 16.500000000 13.500000000 -36.500000000 16.500000000 14.500000000 -36.500000000 16.500000000 15.500000000 -36.500000000 16.500000000 16.500000000 -36.500000000 16.500000000 17.500000000 -36.500000000 16.500000000 18.500000000 -36.500000000 16.500000000 19.500000000 -36.500000000 16.500000000 20.500000000 -36.500000000 16.500000000 21.500000000 -36.500000000 16.500000000 22.500000000 -36.500000000 16.500000000 23.500000000 -36.500000000 16.500000000 24.500000000 -36.500000000 16.500000000 25.499996185 -36.499996185 16.500000000 26.499954224 -36.499954224 16.500000000 27.499591827 -36.499591827 16.500000000 28.497470856 -36.497467041 16.500000000 29.488407135 -36.488403320 16.500000000 30.458978653 -36.458980560 16.500000000 31.384418488 -36.384422302 16.500000000 32.233222961 -36.233222961 16.500000000 32.981101990 -35.981101990 16.500000000 -35.167964935 -35.167964935 16.500000000 -34.622871399 -35.622871399 16.500000000 33.622871399 -35.622871399 16.500000000 34.167964935 -35.167964935 16.500000000 -35.981101990 -33.981101990 16.500000000 -35.622871399 -34.622871399 16.500000000 34.622871399 -34.622871399 16.500000000 34.981101990 -33.981101990 16.500000000 -36.233222961 -33.233222961 16.500000000 35.233222961 -33.233226776 16.500000000 -36.384418488 -32.384422302 16.500000000 35.384418488 -32.384422302 16.500000000 -36.458976746 -31.458978653 16.500000000 35.458980560 -31.458978653 16.500000000 -36.488403320 -30.488407135 16.500000000 35.488403320 -30.488407135 16.500000000 -36.497467041 -29.497472763 16.500000000 35.497474670 -29.497472763 16.500000000 -36.499591827 -28.499593735 16.500000000 35.499591827 -28.499593735 16.500000000 -36.499954224 -27.499954224 16.500000000 35.499954224 -27.499954224 16.500000000 -36.499996185 -26.499996185 16.500000000 35.499996185 -26.499996185 16.500000000 -36.500000000 -25.500000000 16.500000000 35.500000000 -25.500000000 16.500000000 -36.500000000 -24.500000000 16.500000000 35.500000000 -24.500000000 16.500000000 -36.500000000 -23.500000000 16.500000000 35.500000000 -23.500000000 16.500000000 -36.500000000 -22.500000000 16.500000000 35.500000000 -22.500000000 16.500000000 -36.500000000 -21.500000000 16.500000000 35.500000000 -21.500000000 16.500000000 -36.500000000 -20.500000000 16.500000000 35.500000000 -20.500000000 16.500000000 -36.500000000 -19.500000000 16.500000000 35.500000000 -19.500000000 16.500000000 -36.500000000 -18.500000000 16.500000000 35.500000000 -18.500000000 16.500000000 -36.500000000 -17.500000000 16.500000000 35.500000000 -17.500000000 16.500000000 -36.500000000 -16.500000000 16.500000000 35.500000000 -16.500000000 16.500000000 -36.500000000 -15.500000000 16.500000000 35.500000000 -15.500000000 16.500000000 -36.500000000 -14.500000000 16.500000000 35.500000000 -14.500000000 16.500000000 -36.500000000 -13.500000000 16.500000000 35.500000000 -13.500000000 16.500000000 -36.500000000 -12.500000000 16.500000000 35.500000000 -12.500000000 16.500000000 -36.500000000 -11.500000000 16.500000000 35.500000000 -11.500000000 16.500000000 -36.500000000 -10.500000000 16.500000000 35.500000000 -10.500000000 16.500000000 -36.500000000 -9.500000000 16.500000000 35.500000000 -9.500000000 16.500000000 -36.500000000 -8.500000000 16.500000000 35.500000000 -8.500000000 16.500000000 -36.500000000 -7.500000000 16.500000000 35.500000000 -7.500000000 16.500000000 -36.500000000 -6.500000000 16.500000000 35.500000000 -6.500000000 16.500000000 -36.500000000 -5.500000000 16.500000000 35.500000000 -5.500000000 16.500000000 -36.500000000 -4.500000000 16.500000000 35.500000000 -4.500000000 16.500000000 -36.500000000 -3.500000000 16.500000000 35.500000000 -3.500000000 16.500000000 -36.500000000 -2.500000000 16.500000000 35.500000000 -2.500000000 16.500000000 -36.500000000 -1.500000000 16.500000000 35.500000000 -1.500000000 16.500000000 -36.500000000 -0.500000000 16.500000000 35.500000000 -0.500000000 16.500000000 -36.500000000 0.500000000 16.500000000 35.500000000 0.500000000 16.500000000 -36.500000000 1.500000000 16.500000000 35.500000000 1.500000000 16.500000000 -36.500000000 2.500000000 16.500000000 35.500000000 2.500000000 16.500000000 -36.500000000 3.500000000 16.500000000 35.500000000 3.500000000 16.500000000 -36.500000000 4.500000000 16.500000000 35.500000000 4.500000000 16.500000000 -36.500000000 5.500000000 16.500000000 35.500000000 5.500000000 16.500000000 -36.500000000 6.500000000 16.500000000 35.500000000 6.500000000 16.500000000 -36.500000000 7.500000000 16.500000000 35.500000000 7.500000000 16.500000000 -36.500000000 8.500000000 16.500000000 35.500000000 8.500000000 16.500000000 -36.500000000 9.500000000 16.500000000 35.500000000 9.500000000 16.500000000 -36.500000000 10.500000000 16.500000000 35.500000000 10.500000000 16.500000000 -36.500000000 11.500000000 16.500000000 35.500000000 11.500000000 16.500000000 -36.500000000 12.500000000 16.500000000 35.500000000 12.500000000 16.500000000 -36.500000000 13.500000000 16.500000000 35.500000000 13.500000000 16.500000000 -36.500000000 14.500000000 16.500000000 35.500000000 14.500000000 16.500000000 -36.500000000 15.500000000 16.500000000 35.500000000 15.500000000 16.500000000 -36.500000000 16.500000000 16.500000000 35.500000000 16.500000000 16.500000000 -36.500000000 17.500000000 16.500000000 35.500000000 17.500000000 16.500000000 -36.500000000 18.500000000 16.500000000 35.500000000 18.500000000 16.500000000 -36.500000000 19.500000000 16.500000000 35.500000000 19.500000000 16.500000000 -36.500000000 20.500000000 16.500000000 35.500000000 20.500000000 16.500000000 -36.500000000 21.500000000 16.500000000 35.500000000 21.500000000 16.500000000 -36.500000000 22.500000000 16.500000000 35.500000000 22.500000000 16.500000000 -36.500000000 23.500000000 16.500000000 35.500000000 23.500000000 16.500000000 -36.500000000 24.500000000 16.500000000 35.500000000 24.500000000 16.500000000 -36.499996185 25.499996185 16.500000000 35.499996185 25.499996185 16.500000000 -36.499954224 26.499954224 16.500000000 35.499954224 26.499954224 16.500000000 -36.499591827 27.499591827 16.500000000 35.499591827 27.499591827 16.500000000 -36.497474670 28.497470856 16.500000000 35.497467041 28.497470856 16.500000000 -36.488403320 29.488407135 16.500000000 35.488403320 29.488407135 16.500000000 -36.458980560 30.458978653 16.500000000 35.458980560 30.458978653 16.500000000 -36.384422302 31.384418488 16.500000000 35.384422302 31.384418488 16.500000000 -36.233222961 32.233222961 16.500000000 35.233222961 32.233222961 16.500000000 -35.981101990 32.981101990 16.500000000 -35.622871399 33.622871399 16.500000000 34.622871399 33.622871399 16.500000000 34.981101990 32.981101990 16.500000000 -35.167964935 34.167964935 16.500000000 -34.622871399 34.622871399 16.500000000 33.622871399 34.622871399 16.500000000 34.167964935 34.167964935 16.500000000 -33.981101990 34.981101990 16.500000000 -33.233222961 35.233222961 16.500000000 -32.384422302 35.384418488 16.500000000 -31.458978653 35.458976746 16.500000000 -30.488407135 35.488403320 16.500000000 -29.497472763 35.497467041 16.500000000 -28.499593735 35.499591827 16.500000000 -27.499954224 35.499954224 16.500000000 -26.499996185 35.499996185 16.500000000 -25.500000000 35.500000000 16.500000000 -24.500000000 35.500000000 16.500000000 -23.500000000 35.500000000 16.500000000 -22.500000000 35.500000000 16.500000000 -21.500000000 35.500000000 16.500000000 -20.500000000 35.500000000 16.500000000 -19.500000000 35.500000000 16.500000000 -18.500000000 35.500000000 16.500000000 -17.500000000 35.500000000 16.500000000 -16.500000000 35.500000000 16.500000000 -15.500000000 35.500000000 16.500000000 -14.500000000 35.500000000 16.500000000 -13.500000000 35.500000000 16.500000000 -12.500000000 35.500000000 16.500000000 -11.500000000 35.500000000 16.500000000 -10.500000000 35.500000000 16.500000000 -9.500000000 35.500000000 16.500000000 -8.500000000 35.500000000 16.500000000 -7.500000000 35.500000000 16.500000000 -6.500000000 35.500000000 16.500000000 -5.500000000 35.500000000 16.500000000 -4.500000000 35.500000000 16.500000000 -3.500000000 35.500000000 16.500000000 -2.500000000 35.500000000 16.500000000 -1.500000000 35.500000000 16.500000000 -0.500000000 35.500000000 16.500000000 0.500000000 35.500000000 16.500000000 1.500000000 35.500000000 16.500000000 2.500000000 35.500000000 16.500000000 3.500000000 35.500000000 16.500000000 4.500000000 35.500000000 16.500000000 5.500000000 35.500000000 16.500000000 6.500000000 35.500000000 16.500000000 7.500000000 35.500000000 16.500000000 8.500000000 35.500000000 16.500000000 9.500000000 35.500000000 16.500000000 10.500000000 35.500000000 16.500000000 11.500000000 35.500000000 16.500000000 12.500000000 35.500000000 16.500000000 13.500000000 35.500000000 16.500000000 14.500000000 35.500000000 16.500000000 15.500000000 35.500000000 16.500000000 16.500000000 35.500000000 16.500000000 17.500000000 35.500000000 16.500000000 18.500000000 35.500000000 16.500000000 19.500000000 35.500000000 16.500000000 20.500000000 35.500000000 16.500000000 21.500000000 35.500000000 16.500000000 22.500000000 35.500000000 16.500000000 23.500000000 35.500000000 16.500000000 24.500000000 35.500000000 16.500000000 25.499996185 35.499996185 16.500000000 26.499954224 35.499954224 16.500000000 27.499591827 35.499591827 16.500000000 28.497470856 35.497474670 16.500000000 29.488407135 35.488403320 16.500000000 30.458978653 35.458980560 16.500000000 31.384418488 35.384422302 16.500000000 32.233222961 35.233222961 16.500000000 32.981101990 34.981101990 16.500000000 -33.981101990 -35.981101990 17.500000000 -33.233226776 -36.233222961 17.500000000 -32.384422302 -36.384418488 17.500000000 -31.458978653 -36.458980560 17.500000000 -30.488407135 -36.488403320 17.500000000 -29.497472763 -36.497474670 17.500000000 -28.499593735 -36.499591827 17.500000000 -27.499954224 -36.499954224 17.500000000 -26.499996185 -36.499996185 17.500000000 -25.500000000 -36.500000000 17.500000000 -24.500000000 -36.500000000 17.500000000 -23.500000000 -36.500000000 17.500000000 -22.500000000 -36.500000000 17.500000000 -21.500000000 -36.500000000 17.500000000 -20.500000000 -36.500000000 17.500000000 -19.500000000 -36.500000000 17.500000000 -18.500000000 -36.500000000 17.500000000 -17.500000000 -36.500000000 17.500000000 -16.500000000 -36.500000000 17.500000000 -15.500000000 -36.500000000 17.500000000 -14.500000000 -36.500000000 17.500000000 -13.500000000 -36.500000000 17.500000000 -12.500000000 -36.500000000 17.500000000 -11.500000000 -36.500000000 17.500000000 -10.500000000 -36.500000000 17.500000000 -9.500000000 -36.500000000 17.500000000 -8.500000000 -36.500000000 17.500000000 -7.500000000 -36.500000000 17.500000000 -6.500000000 -36.500000000 17.500000000 -5.500000000 -36.500000000 17.500000000 -4.500000000 -36.500000000 17.500000000 -3.500000000 -36.500000000 17.500000000 -2.500000000 -36.500000000 17.500000000 -1.500000000 -36.500000000 17.500000000 -0.500000000 -36.500000000 17.500000000 0.500000000 -36.500000000 17.500000000 1.500000000 -36.500000000 17.500000000 2.500000000 -36.500000000 17.500000000 3.500000000 -36.500000000 17.500000000 4.500000000 -36.500000000 17.500000000 5.500000000 -36.500000000 17.500000000 6.500000000 -36.500000000 17.500000000 7.500000000 -36.500000000 17.500000000 8.500000000 -36.500000000 17.500000000 9.500000000 -36.500000000 17.500000000 10.500000000 -36.500000000 17.500000000 11.500000000 -36.500000000 17.500000000 12.500000000 -36.500000000 17.500000000 13.500000000 -36.500000000 17.500000000 14.500000000 -36.500000000 17.500000000 15.500000000 -36.500000000 17.500000000 16.500000000 -36.500000000 17.500000000 17.500000000 -36.500000000 17.500000000 18.500000000 -36.500000000 17.500000000 19.500000000 -36.500000000 17.500000000 20.500000000 -36.500000000 17.500000000 21.500000000 -36.500000000 17.500000000 22.500000000 -36.500000000 17.500000000 23.500000000 -36.500000000 17.500000000 24.500000000 -36.500000000 17.500000000 25.499996185 -36.499996185 17.500000000 26.499954224 -36.499954224 17.500000000 27.499591827 -36.499591827 17.500000000 28.497470856 -36.497467041 17.500000000 29.488407135 -36.488403320 17.500000000 30.458978653 -36.458980560 17.500000000 31.384418488 -36.384422302 17.500000000 32.233222961 -36.233222961 17.500000000 32.981101990 -35.981101990 17.500000000 -35.167964935 -35.167964935 17.500000000 -34.622871399 -35.622871399 17.500000000 33.622871399 -35.622871399 17.500000000 34.167964935 -35.167964935 17.500000000 -35.981101990 -33.981101990 17.500000000 -35.622871399 -34.622871399 17.500000000 34.622871399 -34.622871399 17.500000000 34.981101990 -33.981101990 17.500000000 -36.233222961 -33.233222961 17.500000000 35.233222961 -33.233226776 17.500000000 -36.384418488 -32.384422302 17.500000000 35.384418488 -32.384422302 17.500000000 -36.458976746 -31.458978653 17.500000000 35.458980560 -31.458978653 17.500000000 -36.488403320 -30.488407135 17.500000000 35.488403320 -30.488407135 17.500000000 -36.497467041 -29.497472763 17.500000000 35.497474670 -29.497472763 17.500000000 -36.499591827 -28.499593735 17.500000000 35.499591827 -28.499593735 17.500000000 -36.499954224 -27.499954224 17.500000000 35.499954224 -27.499954224 17.500000000 -36.499996185 -26.499996185 17.500000000 35.499996185 -26.499996185 17.500000000 -36.500000000 -25.500000000 17.500000000 35.500000000 -25.500000000 17.500000000 -36.500000000 -24.500000000 17.500000000 35.500000000 -24.500000000 17.500000000 -36.500000000 -23.500000000 17.500000000 35.500000000 -23.500000000 17.500000000 -36.500000000 -22.500000000 17.500000000 35.500000000 -22.500000000 17.500000000 -36.500000000 -21.500000000 17.500000000 35.500000000 -21.500000000 17.500000000 -36.500000000 -20.500000000 17.500000000 35.500000000 -20.500000000 17.500000000 -36.500000000 -19.500000000 17.500000000 35.500000000 -19.500000000 17.500000000 -36.500000000 -18.500000000 17.500000000 35.500000000 -18.500000000 17.500000000 -36.500000000 -17.500000000 17.500000000 35.500000000 -17.500000000 17.500000000 -36.500000000 -16.500000000 17.500000000 35.500000000 -16.500000000 17.500000000 -36.500000000 -15.500000000 17.500000000 35.500000000 -15.500000000 17.500000000 -36.500000000 -14.500000000 17.500000000 35.500000000 -14.500000000 17.500000000 -36.500000000 -13.500000000 17.500000000 35.500000000 -13.500000000 17.500000000 -36.500000000 -12.500000000 17.500000000 35.500000000 -12.500000000 17.500000000 -36.500000000 -11.500000000 17.500000000 35.500000000 -11.500000000 17.500000000 -36.500000000 -10.500000000 17.500000000 35.500000000 -10.500000000 17.500000000 -36.500000000 -9.500000000 17.500000000 35.500000000 -9.500000000 17.500000000 -36.500000000 -8.500000000 17.500000000 35.500000000 -8.500000000 17.500000000 -36.500000000 -7.500000000 17.500000000 35.500000000 -7.500000000 17.500000000 -36.500000000 -6.500000000 17.500000000 35.500000000 -6.500000000 17.500000000 -36.500000000 -5.500000000 17.500000000 35.500000000 -5.500000000 17.500000000 -36.500000000 -4.500000000 17.500000000 35.500000000 -4.500000000 17.500000000 -36.500000000 -3.500000000 17.500000000 35.500000000 -3.500000000 17.500000000 -36.500000000 -2.500000000 17.500000000 35.500000000 -2.500000000 17.500000000 -36.500000000 -1.500000000 17.500000000 35.500000000 -1.500000000 17.500000000 -36.500000000 -0.500000000 17.500000000 35.500000000 -0.500000000 17.500000000 -36.500000000 0.500000000 17.500000000 35.500000000 0.500000000 17.500000000 -36.500000000 1.500000000 17.500000000 35.500000000 1.500000000 17.500000000 -36.500000000 2.500000000 17.500000000 35.500000000 2.500000000 17.500000000 -36.500000000 3.500000000 17.500000000 35.500000000 3.500000000 17.500000000 -36.500000000 4.500000000 17.500000000 35.500000000 4.500000000 17.500000000 -36.500000000 5.500000000 17.500000000 35.500000000 5.500000000 17.500000000 -36.500000000 6.500000000 17.500000000 35.500000000 6.500000000 17.500000000 -36.500000000 7.500000000 17.500000000 35.500000000 7.500000000 17.500000000 -36.500000000 8.500000000 17.500000000 35.500000000 8.500000000 17.500000000 -36.500000000 9.500000000 17.500000000 35.500000000 9.500000000 17.500000000 -36.500000000 10.500000000 17.500000000 35.500000000 10.500000000 17.500000000 -36.500000000 11.500000000 17.500000000 35.500000000 11.500000000 17.500000000 -36.500000000 12.500000000 17.500000000 35.500000000 12.500000000 17.500000000 -36.500000000 13.500000000 17.500000000 35.500000000 13.500000000 17.500000000 -36.500000000 14.500000000 17.500000000 35.500000000 14.500000000 17.500000000 -36.500000000 15.500000000 17.500000000 35.500000000 15.500000000 17.500000000 -36.500000000 16.500000000 17.500000000 35.500000000 16.500000000 17.500000000 -36.500000000 17.500000000 17.500000000 35.500000000 17.500000000 17.500000000 -36.500000000 18.500000000 17.500000000 35.500000000 18.500000000 17.500000000 -36.500000000 19.500000000 17.500000000 35.500000000 19.500000000 17.500000000 -36.500000000 20.500000000 17.500000000 35.500000000 20.500000000 17.500000000 -36.500000000 21.500000000 17.500000000 35.500000000 21.500000000 17.500000000 -36.500000000 22.500000000 17.500000000 35.500000000 22.500000000 17.500000000 -36.500000000 23.500000000 17.500000000 35.500000000 23.500000000 17.500000000 -36.500000000 24.500000000 17.500000000 35.500000000 24.500000000 17.500000000 -36.499996185 25.499996185 17.500000000 35.499996185 25.499996185 17.500000000 -36.499954224 26.499954224 17.500000000 35.499954224 26.499954224 17.500000000 -36.499591827 27.499591827 17.500000000 35.499591827 27.499591827 17.500000000 -36.497474670 28.497470856 17.500000000 35.497467041 28.497470856 17.500000000 -36.488403320 29.488407135 17.500000000 35.488403320 29.488407135 17.500000000 -36.458980560 30.458978653 17.500000000 35.458980560 30.458978653 17.500000000 -36.384422302 31.384418488 17.500000000 35.384422302 31.384418488 17.500000000 -36.233222961 32.233222961 17.500000000 35.233222961 32.233222961 17.500000000 -35.981101990 32.981101990 17.500000000 -35.622871399 33.622871399 17.500000000 34.622871399 33.622871399 17.500000000 34.981101990 32.981101990 17.500000000 -35.167964935 34.167964935 17.500000000 -34.622871399 34.622871399 17.500000000 33.622871399 34.622871399 17.500000000 34.167964935 34.167964935 17.500000000 -33.981101990 34.981101990 17.500000000 -33.233222961 35.233222961 17.500000000 -32.384422302 35.384418488 17.500000000 -31.458978653 35.458976746 17.500000000 -30.488407135 35.488403320 17.500000000 -29.497472763 35.497467041 17.500000000 -28.499593735 35.499591827 17.500000000 -27.499954224 35.499954224 17.500000000 -26.499996185 35.499996185 17.500000000 -25.500000000 35.500000000 17.500000000 -24.500000000 35.500000000 17.500000000 -23.500000000 35.500000000 17.500000000 -22.500000000 35.500000000 17.500000000 -21.500000000 35.500000000 17.500000000 -20.500000000 35.500000000 17.500000000 -19.500000000 35.500000000 17.500000000 -18.500000000 35.500000000 17.500000000 -17.500000000 35.500000000 17.500000000 -16.500000000 35.500000000 17.500000000 -15.500000000 35.500000000 17.500000000 -14.500000000 35.500000000 17.500000000 -13.500000000 35.500000000 17.500000000 -12.500000000 35.500000000 17.500000000 -11.500000000 35.500000000 17.500000000 -10.500000000 35.500000000 17.500000000 -9.500000000 35.500000000 17.500000000 -8.500000000 35.500000000 17.500000000 -7.500000000 35.500000000 17.500000000 -6.500000000 35.500000000 17.500000000 -5.500000000 35.500000000 17.500000000 -4.500000000 35.500000000 17.500000000 -3.500000000 35.500000000 17.500000000 -2.500000000 35.500000000 17.500000000 -1.500000000 35.500000000 17.500000000 -0.500000000 35.500000000 17.500000000 0.500000000 35.500000000 17.500000000 1.500000000 35.500000000 17.500000000 2.500000000 35.500000000 17.500000000 3.500000000 35.500000000 17.500000000 4.500000000 35.500000000 17.500000000 5.500000000 35.500000000 17.500000000 6.500000000 35.500000000 17.500000000 7.500000000 35.500000000 17.500000000 8.500000000 35.500000000 17.500000000 9.500000000 35.500000000 17.500000000 10.500000000 35.500000000 17.500000000 11.500000000 35.500000000 17.500000000 12.500000000 35.500000000 17.500000000 13.500000000 35.500000000 17.500000000 14.500000000 35.500000000 17.500000000 15.500000000 35.500000000 17.500000000 16.500000000 35.500000000 17.500000000 17.500000000 35.500000000 17.500000000 18.500000000 35.500000000 17.500000000 19.500000000 35.500000000 17.500000000 20.500000000 35.500000000 17.500000000 21.500000000 35.500000000 17.500000000 22.500000000 35.500000000 17.500000000 23.500000000 35.500000000 17.500000000 24.500000000 35.500000000 17.500000000 25.499996185 35.499996185 17.500000000 26.499954224 35.499954224 17.500000000 27.499591827 35.499591827 17.500000000 28.497470856 35.497474670 17.500000000 29.488407135 35.488403320 17.500000000 30.458978653 35.458980560 17.500000000 31.384418488 35.384422302 17.500000000 32.233222961 35.233222961 17.500000000 32.981101990 34.981101990 17.500000000 -33.981101990 -35.981101990 18.500000000 -33.233226776 -36.233222961 18.500000000 -32.384422302 -36.384418488 18.500000000 -31.458978653 -36.458980560 18.500000000 -30.488407135 -36.488403320 18.500000000 -29.497472763 -36.497474670 18.500000000 -28.499593735 -36.499591827 18.500000000 -27.499954224 -36.499954224 18.500000000 -26.499996185 -36.499996185 18.500000000 -25.500000000 -36.500000000 18.500000000 -24.500000000 -36.500000000 18.500000000 -23.500000000 -36.500000000 18.500000000 -22.500000000 -36.500000000 18.500000000 -21.500000000 -36.500000000 18.500000000 -20.500000000 -36.500000000 18.500000000 -19.500000000 -36.500000000 18.500000000 -18.500000000 -36.500000000 18.500000000 -17.500000000 -36.500000000 18.500000000 -16.500000000 -36.500000000 18.500000000 -15.500000000 -36.500000000 18.500000000 -14.500000000 -36.500000000 18.500000000 -13.500000000 -36.500000000 18.500000000 -12.500000000 -36.500000000 18.500000000 -11.500000000 -36.500000000 18.500000000 -10.500000000 -36.500000000 18.500000000 -9.500000000 -36.500000000 18.500000000 -8.500000000 -36.500000000 18.500000000 -7.500000000 -36.500000000 18.500000000 -6.500000000 -36.500000000 18.500000000 -5.500000000 -36.500000000 18.500000000 -4.500000000 -36.500000000 18.500000000 -3.500000000 -36.500000000 18.500000000 -2.500000000 -36.500000000 18.500000000 -1.500000000 -36.500000000 18.500000000 -0.500000000 -36.500000000 18.500000000 0.500000000 -36.500000000 18.500000000 1.500000000 -36.500000000 18.500000000 2.500000000 -36.500000000 18.500000000 3.500000000 -36.500000000 18.500000000 4.500000000 -36.500000000 18.500000000 5.500000000 -36.500000000 18.500000000 6.500000000 -36.500000000 18.500000000 7.500000000 -36.500000000 18.500000000 8.500000000 -36.500000000 18.500000000 9.500000000 -36.500000000 18.500000000 10.500000000 -36.500000000 18.500000000 11.500000000 -36.500000000 18.500000000 12.500000000 -36.500000000 18.500000000 13.500000000 -36.500000000 18.500000000 14.500000000 -36.500000000 18.500000000 15.500000000 -36.500000000 18.500000000 16.500000000 -36.500000000 18.500000000 17.500000000 -36.500000000 18.500000000 18.500000000 -36.500000000 18.500000000 19.500000000 -36.500000000 18.500000000 20.500000000 -36.500000000 18.500000000 21.500000000 -36.500000000 18.500000000 22.500000000 -36.500000000 18.500000000 23.500000000 -36.500000000 18.500000000 24.500000000 -36.500000000 18.500000000 25.499996185 -36.499996185 18.500000000 26.499954224 -36.499954224 18.500000000 27.499591827 -36.499591827 18.500000000 28.497470856 -36.497467041 18.500000000 29.488407135 -36.488403320 18.500000000 30.458978653 -36.458980560 18.500000000 31.384418488 -36.384422302 18.500000000 32.233222961 -36.233222961 18.500000000 32.981101990 -35.981101990 18.500000000 -35.167964935 -35.167964935 18.500000000 -34.622871399 -35.622871399 18.500000000 33.622871399 -35.622871399 18.500000000 34.167964935 -35.167964935 18.500000000 -35.981101990 -33.981101990 18.500000000 -35.622871399 -34.622871399 18.500000000 34.622871399 -34.622871399 18.500000000 34.981101990 -33.981101990 18.500000000 -36.233222961 -33.233222961 18.500000000 35.233222961 -33.233226776 18.500000000 -36.384418488 -32.384422302 18.500000000 35.384418488 -32.384422302 18.500000000 -36.458976746 -31.458978653 18.500000000 35.458980560 -31.458978653 18.500000000 -36.488403320 -30.488407135 18.500000000 35.488403320 -30.488407135 18.500000000 -36.497467041 -29.497472763 18.500000000 35.497474670 -29.497472763 18.500000000 -36.499591827 -28.499593735 18.500000000 35.499591827 -28.499593735 18.500000000 -36.499954224 -27.499954224 18.500000000 35.499954224 -27.499954224 18.500000000 -36.499996185 -26.499996185 18.500000000 35.499996185 -26.499996185 18.500000000 -36.500000000 -25.500000000 18.500000000 35.500000000 -25.500000000 18.500000000 -36.500000000 -24.500000000 18.500000000 35.500000000 -24.500000000 18.500000000 -36.500000000 -23.500000000 18.500000000 35.500000000 -23.500000000 18.500000000 -36.500000000 -22.500000000 18.500000000 35.500000000 -22.500000000 18.500000000 -36.500000000 -21.500000000 18.500000000 35.500000000 -21.500000000 18.500000000 -36.500000000 -20.500000000 18.500000000 35.500000000 -20.500000000 18.500000000 -36.500000000 -19.500000000 18.500000000 35.500000000 -19.500000000 18.500000000 -36.500000000 -18.500000000 18.500000000 35.500000000 -18.500000000 18.500000000 -36.500000000 -17.500000000 18.500000000 35.500000000 -17.500000000 18.500000000 -36.500000000 -16.500000000 18.500000000 35.500000000 -16.500000000 18.500000000 -36.500000000 -15.500000000 18.500000000 35.500000000 -15.500000000 18.500000000 -36.500000000 -14.500000000 18.500000000 35.500000000 -14.500000000 18.500000000 -36.500000000 -13.500000000 18.500000000 35.500000000 -13.500000000 18.500000000 -36.500000000 -12.500000000 18.500000000 35.500000000 -12.500000000 18.500000000 -36.500000000 -11.500000000 18.500000000 35.500000000 -11.500000000 18.500000000 -36.500000000 -10.500000000 18.500000000 35.500000000 -10.500000000 18.500000000 -36.500000000 -9.500000000 18.500000000 35.500000000 -9.500000000 18.500000000 -36.500000000 -8.500000000 18.500000000 35.500000000 -8.500000000 18.500000000 -36.500000000 -7.500000000 18.500000000 35.500000000 -7.500000000 18.500000000 -36.500000000 -6.500000000 18.500000000 35.500000000 -6.500000000 18.500000000 -36.500000000 -5.500000000 18.500000000 35.500000000 -5.500000000 18.500000000 -36.500000000 -4.500000000 18.500000000 35.500000000 -4.500000000 18.500000000 -36.500000000 -3.500000000 18.500000000 35.500000000 -3.500000000 18.500000000 -36.500000000 -2.500000000 18.500000000 35.500000000 -2.500000000 18.500000000 -36.500000000 -1.500000000 18.500000000 35.500000000 -1.500000000 18.500000000 -36.500000000 -0.500000000 18.500000000 35.500000000 -0.500000000 18.500000000 -36.500000000 0.500000000 18.500000000 35.500000000 0.500000000 18.500000000 -36.500000000 1.500000000 18.500000000 35.500000000 1.500000000 18.500000000 -36.500000000 2.500000000 18.500000000 35.500000000 2.500000000 18.500000000 -36.500000000 3.500000000 18.500000000 35.500000000 3.500000000 18.500000000 -36.500000000 4.500000000 18.500000000 35.500000000 4.500000000 18.500000000 -36.500000000 5.500000000 18.500000000 35.500000000 5.500000000 18.500000000 -36.500000000 6.500000000 18.500000000 35.500000000 6.500000000 18.500000000 -36.500000000 7.500000000 18.500000000 35.500000000 7.500000000 18.500000000 -36.500000000 8.500000000 18.500000000 35.500000000 8.500000000 18.500000000 -36.500000000 9.500000000 18.500000000 35.500000000 9.500000000 18.500000000 -36.500000000 10.500000000 18.500000000 35.500000000 10.500000000 18.500000000 -36.500000000 11.500000000 18.500000000 35.500000000 11.500000000 18.500000000 -36.500000000 12.500000000 18.500000000 35.500000000 12.500000000 18.500000000 -36.500000000 13.500000000 18.500000000 35.500000000 13.500000000 18.500000000 -36.500000000 14.500000000 18.500000000 35.500000000 14.500000000 18.500000000 -36.500000000 15.500000000 18.500000000 35.500000000 15.500000000 18.500000000 -36.500000000 16.500000000 18.500000000 35.500000000 16.500000000 18.500000000 -36.500000000 17.500000000 18.500000000 35.500000000 17.500000000 18.500000000 -36.500000000 18.500000000 18.500000000 35.500000000 18.500000000 18.500000000 -36.500000000 19.500000000 18.500000000 35.500000000 19.500000000 18.500000000 -36.500000000 20.500000000 18.500000000 35.500000000 20.500000000 18.500000000 -36.500000000 21.500000000 18.500000000 35.500000000 21.500000000 18.500000000 -36.500000000 22.500000000 18.500000000 35.500000000 22.500000000 18.500000000 -36.500000000 23.500000000 18.500000000 35.500000000 23.500000000 18.500000000 -36.500000000 24.500000000 18.500000000 35.500000000 24.500000000 18.500000000 -36.499996185 25.499996185 18.500000000 35.499996185 25.499996185 18.500000000 -36.499954224 26.499954224 18.500000000 35.499954224 26.499954224 18.500000000 -36.499591827 27.499591827 18.500000000 35.499591827 27.499591827 18.500000000 -36.497474670 28.497470856 18.500000000 35.497467041 28.497470856 18.500000000 -36.488403320 29.488407135 18.500000000 35.488403320 29.488407135 18.500000000 -36.458980560 30.458978653 18.500000000 35.458980560 30.458978653 18.500000000 -36.384422302 31.384418488 18.500000000 35.384422302 31.384418488 18.500000000 -36.233222961 32.233222961 18.500000000 35.233222961 32.233222961 18.500000000 -35.981101990 32.981101990 18.500000000 -35.622871399 33.622871399 18.500000000 34.622871399 33.622871399 18.500000000 34.981101990 32.981101990 18.500000000 -35.167964935 34.167964935 18.500000000 -34.622871399 34.622871399 18.500000000 33.622871399 34.622871399 18.500000000 34.167964935 34.167964935 18.500000000 -33.981101990 34.981101990 18.500000000 -33.233222961 35.233222961 18.500000000 -32.384422302 35.384418488 18.500000000 -31.458978653 35.458976746 18.500000000 -30.488407135 35.488403320 18.500000000 -29.497472763 35.497467041 18.500000000 -28.499593735 35.499591827 18.500000000 -27.499954224 35.499954224 18.500000000 -26.499996185 35.499996185 18.500000000 -25.500000000 35.500000000 18.500000000 -24.500000000 35.500000000 18.500000000 -23.500000000 35.500000000 18.500000000 -22.500000000 35.500000000 18.500000000 -21.500000000 35.500000000 18.500000000 -20.500000000 35.500000000 18.500000000 -19.500000000 35.500000000 18.500000000 -18.500000000 35.500000000 18.500000000 -17.500000000 35.500000000 18.500000000 -16.500000000 35.500000000 18.500000000 -15.500000000 35.500000000 18.500000000 -14.500000000 35.500000000 18.500000000 -13.500000000 35.500000000 18.500000000 -12.500000000 35.500000000 18.500000000 -11.500000000 35.500000000 18.500000000 -10.500000000 35.500000000 18.500000000 -9.500000000 35.500000000 18.500000000 -8.500000000 35.500000000 18.500000000 -7.500000000 35.500000000 18.500000000 -6.500000000 35.500000000 18.500000000 -5.500000000 35.500000000 18.500000000 -4.500000000 35.500000000 18.500000000 -3.500000000 35.500000000 18.500000000 -2.500000000 35.500000000 18.500000000 -1.500000000 35.500000000 18.500000000 -0.500000000 35.500000000 18.500000000 0.500000000 35.500000000 18.500000000 1.500000000 35.500000000 18.500000000 2.500000000 35.500000000 18.500000000 3.500000000 35.500000000 18.500000000 4.500000000 35.500000000 18.500000000 5.500000000 35.500000000 18.500000000 6.500000000 35.500000000 18.500000000 7.500000000 35.500000000 18.500000000 8.500000000 35.500000000 18.500000000 9.500000000 35.500000000 18.500000000 10.500000000 35.500000000 18.500000000 11.500000000 35.500000000 18.500000000 12.500000000 35.500000000 18.500000000 13.500000000 35.500000000 18.500000000 14.500000000 35.500000000 18.500000000 15.500000000 35.500000000 18.500000000 16.500000000 35.500000000 18.500000000 17.500000000 35.500000000 18.500000000 18.500000000 35.500000000 18.500000000 19.500000000 35.500000000 18.500000000 20.500000000 35.500000000 18.500000000 21.500000000 35.500000000 18.500000000 22.500000000 35.500000000 18.500000000 23.500000000 35.500000000 18.500000000 24.500000000 35.500000000 18.500000000 25.499996185 35.499996185 18.500000000 26.499954224 35.499954224 18.500000000 27.499591827 35.499591827 18.500000000 28.497470856 35.497474670 18.500000000 29.488407135 35.488403320 18.500000000 30.458978653 35.458980560 18.500000000 31.384418488 35.384422302 18.500000000 32.233222961 35.233222961 18.500000000 32.981101990 34.981101990 18.500000000 -33.981101990 -35.981101990 19.500000000 -33.233226776 -36.233222961 19.500000000 -32.384422302 -36.384418488 19.500000000 -31.458978653 -36.458980560 19.500000000 -30.488407135 -36.488403320 19.500000000 -29.497472763 -36.497474670 19.500000000 -28.499593735 -36.499591827 19.500000000 -27.499954224 -36.499954224 19.500000000 -26.499996185 -36.499996185 19.500000000 -25.500000000 -36.500000000 19.500000000 -24.500000000 -36.500000000 19.500000000 -23.500000000 -36.500000000 19.500000000 -22.500000000 -36.500000000 19.500000000 -21.500000000 -36.500000000 19.500000000 -20.500000000 -36.500000000 19.500000000 -19.500000000 -36.500000000 19.500000000 -18.500000000 -36.500000000 19.500000000 -17.500000000 -36.500000000 19.500000000 -16.500000000 -36.500000000 19.500000000 -15.500000000 -36.500000000 19.500000000 -14.500000000 -36.500000000 19.500000000 -13.500000000 -36.500000000 19.500000000 -12.500000000 -36.500000000 19.500000000 -11.500000000 -36.500000000 19.500000000 -10.500000000 -36.500000000 19.500000000 -9.500000000 -36.500000000 19.500000000 -8.500000000 -36.500000000 19.500000000 -7.500000000 -36.500000000 19.500000000 -6.500000000 -36.500000000 19.500000000 -5.500000000 -36.500000000 19.500000000 -4.500000000 -36.500000000 19.500000000 -3.500000000 -36.500000000 19.500000000 -2.500000000 -36.500000000 19.500000000 -1.500000000 -36.500000000 19.500000000 -0.500000000 -36.500000000 19.500000000 0.500000000 -36.500000000 19.500000000 1.500000000 -36.500000000 19.500000000 2.500000000 -36.500000000 19.500000000 3.500000000 -36.500000000 19.500000000 4.500000000 -36.500000000 19.500000000 5.500000000 -36.500000000 19.500000000 6.500000000 -36.500000000 19.500000000 7.500000000 -36.500000000 19.500000000 8.500000000 -36.500000000 19.500000000 9.500000000 -36.500000000 19.500000000 10.500000000 -36.500000000 19.500000000 11.500000000 -36.500000000 19.500000000 12.500000000 -36.500000000 19.500000000 13.500000000 -36.500000000 19.500000000 14.500000000 -36.500000000 19.500000000 15.500000000 -36.500000000 19.500000000 16.500000000 -36.500000000 19.500000000 17.500000000 -36.500000000 19.500000000 18.500000000 -36.500000000 19.500000000 19.500000000 -36.500000000 19.500000000 20.500000000 -36.500000000 19.500000000 21.500000000 -36.500000000 19.500000000 22.500000000 -36.500000000 19.500000000 23.500000000 -36.500000000 19.500000000 24.500000000 -36.500000000 19.500000000 25.499996185 -36.499996185 19.500000000 26.499954224 -36.499954224 19.500000000 27.499591827 -36.499591827 19.500000000 28.497470856 -36.497467041 19.500000000 29.488407135 -36.488403320 19.500000000 30.458978653 -36.458980560 19.500000000 31.384418488 -36.384422302 19.500000000 32.233222961 -36.233222961 19.500000000 32.981101990 -35.981101990 19.500000000 -35.167964935 -35.167964935 19.500000000 -34.622871399 -35.622871399 19.500000000 33.622871399 -35.622871399 19.500000000 34.167964935 -35.167964935 19.500000000 -35.981101990 -33.981101990 19.500000000 -35.622871399 -34.622871399 19.500000000 34.622871399 -34.622871399 19.500000000 34.981101990 -33.981101990 19.500000000 -36.233222961 -33.233222961 19.500000000 35.233222961 -33.233226776 19.500000000 -36.384418488 -32.384422302 19.500000000 35.384418488 -32.384422302 19.500000000 -36.458976746 -31.458978653 19.500000000 35.458980560 -31.458978653 19.500000000 -36.488403320 -30.488407135 19.500000000 35.488403320 -30.488407135 19.500000000 -36.497467041 -29.497472763 19.500000000 35.497474670 -29.497472763 19.500000000 -36.499591827 -28.499593735 19.500000000 35.499591827 -28.499593735 19.500000000 -36.499954224 -27.499954224 19.500000000 35.499954224 -27.499954224 19.500000000 -36.499996185 -26.499996185 19.500000000 35.499996185 -26.499996185 19.500000000 -36.500000000 -25.500000000 19.500000000 35.500000000 -25.500000000 19.500000000 -36.500000000 -24.500000000 19.500000000 35.500000000 -24.500000000 19.500000000 -36.500000000 -23.500000000 19.500000000 35.500000000 -23.500000000 19.500000000 -36.500000000 -22.500000000 19.500000000 35.500000000 -22.500000000 19.500000000 -36.500000000 -21.500000000 19.500000000 35.500000000 -21.500000000 19.500000000 -36.500000000 -20.500000000 19.500000000 35.500000000 -20.500000000 19.500000000 -36.500000000 -19.500000000 19.500000000 35.500000000 -19.500000000 19.500000000 -36.500000000 -18.500000000 19.500000000 35.500000000 -18.500000000 19.500000000 -36.500000000 -17.500000000 19.500000000 35.500000000 -17.500000000 19.500000000 -36.500000000 -16.500000000 19.500000000 35.500000000 -16.500000000 19.500000000 -36.500000000 -15.500000000 19.500000000 35.500000000 -15.500000000 19.500000000 -36.500000000 -14.500000000 19.500000000 35.500000000 -14.500000000 19.500000000 -36.500000000 -13.500000000 19.500000000 35.500000000 -13.500000000 19.500000000 -36.500000000 -12.500000000 19.500000000 35.500000000 -12.500000000 19.500000000 -36.500000000 -11.500000000 19.500000000 35.500000000 -11.500000000 19.500000000 -36.500000000 -10.500000000 19.500000000 35.500000000 -10.500000000 19.500000000 -36.500000000 -9.500000000 19.500000000 35.500000000 -9.500000000 19.500000000 -36.500000000 -8.500000000 19.500000000 35.500000000 -8.500000000 19.500000000 -36.500000000 -7.500000000 19.500000000 35.500000000 -7.500000000 19.500000000 -36.500000000 -6.500000000 19.500000000 35.500000000 -6.500000000 19.500000000 -36.500000000 -5.500000000 19.500000000 35.500000000 -5.500000000 19.500000000 -36.500000000 -4.500000000 19.500000000 35.500000000 -4.500000000 19.500000000 -36.500000000 -3.500000000 19.500000000 35.500000000 -3.500000000 19.500000000 -36.500000000 -2.500000000 19.500000000 35.500000000 -2.500000000 19.500000000 -36.500000000 -1.500000000 19.500000000 35.500000000 -1.500000000 19.500000000 -36.500000000 -0.500000000 19.500000000 35.500000000 -0.500000000 19.500000000 -36.500000000 0.500000000 19.500000000 35.500000000 0.500000000 19.500000000 -36.500000000 1.500000000 19.500000000 35.500000000 1.500000000 19.500000000 -36.500000000 2.500000000 19.500000000 35.500000000 2.500000000 19.500000000 -36.500000000 3.500000000 19.500000000 35.500000000 3.500000000 19.500000000 -36.500000000 4.500000000 19.500000000 35.500000000 4.500000000 19.500000000 -36.500000000 5.500000000 19.500000000 35.500000000 5.500000000 19.500000000 -36.500000000 6.500000000 19.500000000 35.500000000 6.500000000 19.500000000 -36.500000000 7.500000000 19.500000000 35.500000000 7.500000000 19.500000000 -36.500000000 8.500000000 19.500000000 35.500000000 8.500000000 19.500000000 -36.500000000 9.500000000 19.500000000 35.500000000 9.500000000 19.500000000 -36.500000000 10.500000000 19.500000000 35.500000000 10.500000000 19.500000000 -36.500000000 11.500000000 19.500000000 35.500000000 11.500000000 19.500000000 -36.500000000 12.500000000 19.500000000 35.500000000 12.500000000 19.500000000 -36.500000000 13.500000000 19.500000000 35.500000000 13.500000000 19.500000000 -36.500000000 14.500000000 19.500000000 35.500000000 14.500000000 19.500000000 -36.500000000 15.500000000 19.500000000 35.500000000 15.500000000 19.500000000 -36.500000000 16.500000000 19.500000000 35.500000000 16.500000000 19.500000000 -36.500000000 17.500000000 19.500000000 35.500000000 17.500000000 19.500000000 -36.500000000 18.500000000 19.500000000 35.500000000 18.500000000 19.500000000 -36.500000000 19.500000000 19.500000000 35.500000000 19.500000000 19.500000000 -36.500000000 20.500000000 19.500000000 35.500000000 20.500000000 19.500000000 -36.500000000 21.500000000 19.500000000 35.500000000 21.500000000 19.500000000 -36.500000000 22.500000000 19.500000000 35.500000000 22.500000000 19.500000000 -36.500000000 23.500000000 19.500000000 35.500000000 23.500000000 19.500000000 -36.500000000 24.500000000 19.500000000 35.500000000 24.500000000 19.500000000 -36.499996185 25.499996185 19.500000000 35.499996185 25.499996185 19.500000000 -36.499954224 26.499954224 19.500000000 35.499954224 26.499954224 19.500000000 -36.499591827 27.499591827 19.500000000 35.499591827 27.499591827 19.500000000 -36.497474670 28.497470856 19.500000000 35.497467041 28.497470856 19.500000000 -36.488403320 29.488407135 19.500000000 35.488403320 29.488407135 19.500000000 -36.458980560 30.458978653 19.500000000 35.458980560 30.458978653 19.500000000 -36.384422302 31.384418488 19.500000000 35.384422302 31.384418488 19.500000000 -36.233222961 32.233222961 19.500000000 35.233222961 32.233222961 19.500000000 -35.981101990 32.981101990 19.500000000 -35.622871399 33.622871399 19.500000000 34.622871399 33.622871399 19.500000000 34.981101990 32.981101990 19.500000000 -35.167964935 34.167964935 19.500000000 -34.622871399 34.622871399 19.500000000 33.622871399 34.622871399 19.500000000 34.167964935 34.167964935 19.500000000 -33.981101990 34.981101990 19.500000000 -33.233222961 35.233222961 19.500000000 -32.384422302 35.384418488 19.500000000 -31.458978653 35.458976746 19.500000000 -30.488407135 35.488403320 19.500000000 -29.497472763 35.497467041 19.500000000 -28.499593735 35.499591827 19.500000000 -27.499954224 35.499954224 19.500000000 -26.499996185 35.499996185 19.500000000 -25.500000000 35.500000000 19.500000000 -24.500000000 35.500000000 19.500000000 -23.500000000 35.500000000 19.500000000 -22.500000000 35.500000000 19.500000000 -21.500000000 35.500000000 19.500000000 -20.500000000 35.500000000 19.500000000 -19.500000000 35.500000000 19.500000000 -18.500000000 35.500000000 19.500000000 -17.500000000 35.500000000 19.500000000 -16.500000000 35.500000000 19.500000000 -15.500000000 35.500000000 19.500000000 -14.500000000 35.500000000 19.500000000 -13.500000000 35.500000000 19.500000000 -12.500000000 35.500000000 19.500000000 -11.500000000 35.500000000 19.500000000 -10.500000000 35.500000000 19.500000000 -9.500000000 35.500000000 19.500000000 -8.500000000 35.500000000 19.500000000 -7.500000000 35.500000000 19.500000000 -6.500000000 35.500000000 19.500000000 -5.500000000 35.500000000 19.500000000 -4.500000000 35.500000000 19.500000000 -3.500000000 35.500000000 19.500000000 -2.500000000 35.500000000 19.500000000 -1.500000000 35.500000000 19.500000000 -0.500000000 35.500000000 19.500000000 0.500000000 35.500000000 19.500000000 1.500000000 35.500000000 19.500000000 2.500000000 35.500000000 19.500000000 3.500000000 35.500000000 19.500000000 4.500000000 35.500000000 19.500000000 5.500000000 35.500000000 19.500000000 6.500000000 35.500000000 19.500000000 7.500000000 35.500000000 19.500000000 8.500000000 35.500000000 19.500000000 9.500000000 35.500000000 19.500000000 10.500000000 35.500000000 19.500000000 11.500000000 35.500000000 19.500000000 12.500000000 35.500000000 19.500000000 13.500000000 35.500000000 19.500000000 14.500000000 35.500000000 19.500000000 15.500000000 35.500000000 19.500000000 16.500000000 35.500000000 19.500000000 17.500000000 35.500000000 19.500000000 18.500000000 35.500000000 19.500000000 19.500000000 35.500000000 19.500000000 20.500000000 35.500000000 19.500000000 21.500000000 35.500000000 19.500000000 22.500000000 35.500000000 19.500000000 23.500000000 35.500000000 19.500000000 24.500000000 35.500000000 19.500000000 25.499996185 35.499996185 19.500000000 26.499954224 35.499954224 19.500000000 27.499591827 35.499591827 19.500000000 28.497470856 35.497474670 19.500000000 29.488407135 35.488403320 19.500000000 30.458978653 35.458980560 19.500000000 31.384418488 35.384422302 19.500000000 32.233222961 35.233222961 19.500000000 32.981101990 34.981101990 19.500000000 -33.981101990 -35.981101990 20.500000000 -33.233226776 -36.233222961 20.500000000 -32.384422302 -36.384418488 20.500000000 -31.458978653 -36.458980560 20.500000000 -30.488407135 -36.488403320 20.500000000 -29.497472763 -36.497474670 20.500000000 -28.499593735 -36.499591827 20.500000000 -27.499954224 -36.499954224 20.500000000 -26.499996185 -36.499996185 20.500000000 -25.500000000 -36.500000000 20.500000000 -24.500000000 -36.500000000 20.500000000 -23.500000000 -36.500000000 20.500000000 -22.500000000 -36.500000000 20.500000000 -21.500000000 -36.500000000 20.500000000 -20.500000000 -36.500000000 20.500000000 -19.500000000 -36.500000000 20.500000000 -18.500000000 -36.500000000 20.500000000 -17.500000000 -36.500000000 20.500000000 -16.500000000 -36.500000000 20.500000000 -15.500000000 -36.500000000 20.500000000 -14.500000000 -36.500000000 20.500000000 -13.500000000 -36.500000000 20.500000000 -12.500000000 -36.500000000 20.500000000 -11.500000000 -36.500000000 20.500000000 -10.500000000 -36.500000000 20.500000000 -9.500000000 -36.500000000 20.500000000 -8.500000000 -36.500000000 20.500000000 -7.500000000 -36.500000000 20.500000000 -6.500000000 -36.500000000 20.500000000 -5.500000000 -36.500000000 20.500000000 -4.500000000 -36.500000000 20.500000000 -3.500000000 -36.500000000 20.500000000 -2.500000000 -36.500000000 20.500000000 -1.500000000 -36.500000000 20.500000000 -0.500000000 -36.500000000 20.500000000 0.500000000 -36.500000000 20.500000000 1.500000000 -36.500000000 20.500000000 2.500000000 -36.500000000 20.500000000 3.500000000 -36.500000000 20.500000000 4.500000000 -36.500000000 20.500000000 5.500000000 -36.500000000 20.500000000 6.500000000 -36.500000000 20.500000000 7.500000000 -36.500000000 20.500000000 8.500000000 -36.500000000 20.500000000 9.500000000 -36.500000000 20.500000000 10.500000000 -36.500000000 20.500000000 11.500000000 -36.500000000 20.500000000 12.500000000 -36.500000000 20.500000000 13.500000000 -36.500000000 20.500000000 14.500000000 -36.500000000 20.500000000 15.500000000 -36.500000000 20.500000000 16.500000000 -36.500000000 20.500000000 17.500000000 -36.500000000 20.500000000 18.500000000 -36.500000000 20.500000000 19.500000000 -36.500000000 20.500000000 20.500000000 -36.500000000 20.500000000 21.500000000 -36.500000000 20.500000000 22.500000000 -36.500000000 20.500000000 23.500000000 -36.500000000 20.500000000 24.500000000 -36.500000000 20.500000000 25.499996185 -36.499996185 20.500000000 26.499954224 -36.499954224 20.500000000 27.499591827 -36.499591827 20.500000000 28.497470856 -36.497467041 20.500000000 29.488407135 -36.488403320 20.500000000 30.458978653 -36.458980560 20.500000000 31.384418488 -36.384422302 20.500000000 32.233222961 -36.233222961 20.500000000 32.981101990 -35.981101990 20.500000000 -35.167964935 -35.167964935 20.500000000 -34.622871399 -35.622871399 20.500000000 33.622871399 -35.622871399 20.500000000 34.167964935 -35.167964935 20.500000000 -35.981101990 -33.981101990 20.500000000 -35.622871399 -34.622871399 20.500000000 34.622871399 -34.622871399 20.500000000 34.981101990 -33.981101990 20.500000000 -36.233222961 -33.233222961 20.500000000 35.233222961 -33.233226776 20.500000000 -36.384418488 -32.384422302 20.500000000 35.384418488 -32.384422302 20.500000000 -36.458976746 -31.458978653 20.500000000 35.458980560 -31.458978653 20.500000000 -36.488403320 -30.488407135 20.500000000 35.488403320 -30.488407135 20.500000000 -36.497467041 -29.497472763 20.500000000 35.497474670 -29.497472763 20.500000000 -36.499591827 -28.499593735 20.500000000 35.499591827 -28.499593735 20.500000000 -36.499954224 -27.499954224 20.500000000 35.499954224 -27.499954224 20.500000000 -36.499996185 -26.499996185 20.500000000 35.499996185 -26.499996185 20.500000000 -36.500000000 -25.500000000 20.500000000 35.500000000 -25.500000000 20.500000000 -36.500000000 -24.500000000 20.500000000 35.500000000 -24.500000000 20.500000000 -36.500000000 -23.500000000 20.500000000 35.500000000 -23.500000000 20.500000000 -36.500000000 -22.500000000 20.500000000 35.500000000 -22.500000000 20.500000000 -36.500000000 -21.500000000 20.500000000 35.500000000 -21.500000000 20.500000000 -36.500000000 -20.500000000 20.500000000 35.500000000 -20.500000000 20.500000000 -36.500000000 -19.500000000 20.500000000 35.500000000 -19.500000000 20.500000000 -36.500000000 -18.500000000 20.500000000 35.500000000 -18.500000000 20.500000000 -36.500000000 -17.500000000 20.500000000 35.500000000 -17.500000000 20.500000000 -36.500000000 -16.500000000 20.500000000 35.500000000 -16.500000000 20.500000000 -36.500000000 -15.500000000 20.500000000 35.500000000 -15.500000000 20.500000000 -36.500000000 -14.500000000 20.500000000 35.500000000 -14.500000000 20.500000000 -36.500000000 -13.500000000 20.500000000 35.500000000 -13.500000000 20.500000000 -36.500000000 -12.500000000 20.500000000 35.500000000 -12.500000000 20.500000000 -36.500000000 -11.500000000 20.500000000 35.500000000 -11.500000000 20.500000000 -36.500000000 -10.500000000 20.500000000 35.500000000 -10.500000000 20.500000000 -36.500000000 -9.500000000 20.500000000 35.500000000 -9.500000000 20.500000000 -36.500000000 -8.500000000 20.500000000 35.500000000 -8.500000000 20.500000000 -36.500000000 -7.500000000 20.500000000 35.500000000 -7.500000000 20.500000000 -36.500000000 -6.500000000 20.500000000 35.500000000 -6.500000000 20.500000000 -36.500000000 -5.500000000 20.500000000 35.500000000 -5.500000000 20.500000000 -36.500000000 -4.500000000 20.500000000 35.500000000 -4.500000000 20.500000000 -36.500000000 -3.500000000 20.500000000 35.500000000 -3.500000000 20.500000000 -36.500000000 -2.500000000 20.500000000 35.500000000 -2.500000000 20.500000000 -36.500000000 -1.500000000 20.500000000 35.500000000 -1.500000000 20.500000000 -36.500000000 -0.500000000 20.500000000 35.500000000 -0.500000000 20.500000000 -36.500000000 0.500000000 20.500000000 35.500000000 0.500000000 20.500000000 -36.500000000 1.500000000 20.500000000 35.500000000 1.500000000 20.500000000 -36.500000000 2.500000000 20.500000000 35.500000000 2.500000000 20.500000000 -36.500000000 3.500000000 20.500000000 35.500000000 3.500000000 20.500000000 -36.500000000 4.500000000 20.500000000 35.500000000 4.500000000 20.500000000 -36.500000000 5.500000000 20.500000000 35.500000000 5.500000000 20.500000000 -36.500000000 6.500000000 20.500000000 35.500000000 6.500000000 20.500000000 -36.500000000 7.500000000 20.500000000 35.500000000 7.500000000 20.500000000 -36.500000000 8.500000000 20.500000000 35.500000000 8.500000000 20.500000000 -36.500000000 9.500000000 20.500000000 35.500000000 9.500000000 20.500000000 -36.500000000 10.500000000 20.500000000 35.500000000 10.500000000 20.500000000 -36.500000000 11.500000000 20.500000000 35.500000000 11.500000000 20.500000000 -36.500000000 12.500000000 20.500000000 35.500000000 12.500000000 20.500000000 -36.500000000 13.500000000 20.500000000 35.500000000 13.500000000 20.500000000 -36.500000000 14.500000000 20.500000000 35.500000000 14.500000000 20.500000000 -36.500000000 15.500000000 20.500000000 35.500000000 15.500000000 20.500000000 -36.500000000 16.500000000 20.500000000 35.500000000 16.500000000 20.500000000 -36.500000000 17.500000000 20.500000000 35.500000000 17.500000000 20.500000000 -36.500000000 18.500000000 20.500000000 35.500000000 18.500000000 20.500000000 -36.500000000 19.500000000 20.500000000 35.500000000 19.500000000 20.500000000 -36.500000000 20.500000000 20.500000000 35.500000000 20.500000000 20.500000000 -36.500000000 21.500000000 20.500000000 35.500000000 21.500000000 20.500000000 -36.500000000 22.500000000 20.500000000 35.500000000 22.500000000 20.500000000 -36.500000000 23.500000000 20.500000000 35.500000000 23.500000000 20.500000000 -36.500000000 24.500000000 20.500000000 35.500000000 24.500000000 20.500000000 -36.499996185 25.499996185 20.500000000 35.499996185 25.499996185 20.500000000 -36.499954224 26.499954224 20.500000000 35.499954224 26.499954224 20.500000000 -36.499591827 27.499591827 20.500000000 35.499591827 27.499591827 20.500000000 -36.497474670 28.497470856 20.500000000 35.497467041 28.497470856 20.500000000 -36.488403320 29.488407135 20.500000000 35.488403320 29.488407135 20.500000000 -36.458980560 30.458978653 20.500000000 35.458980560 30.458978653 20.500000000 -36.384422302 31.384418488 20.500000000 35.384422302 31.384418488 20.500000000 -36.233222961 32.233222961 20.500000000 35.233222961 32.233222961 20.500000000 -35.981101990 32.981101990 20.500000000 -35.622871399 33.622871399 20.500000000 34.622871399 33.622871399 20.500000000 34.981101990 32.981101990 20.500000000 -35.167964935 34.167964935 20.500000000 -34.622871399 34.622871399 20.500000000 33.622871399 34.622871399 20.500000000 34.167964935 34.167964935 20.500000000 -33.981101990 34.981101990 20.500000000 -33.233222961 35.233222961 20.500000000 -32.384422302 35.384418488 20.500000000 -31.458978653 35.458976746 20.500000000 -30.488407135 35.488403320 20.500000000 -29.497472763 35.497467041 20.500000000 -28.499593735 35.499591827 20.500000000 -27.499954224 35.499954224 20.500000000 -26.499996185 35.499996185 20.500000000 -25.500000000 35.500000000 20.500000000 -24.500000000 35.500000000 20.500000000 -23.500000000 35.500000000 20.500000000 -22.500000000 35.500000000 20.500000000 -21.500000000 35.500000000 20.500000000 -20.500000000 35.500000000 20.500000000 -19.500000000 35.500000000 20.500000000 -18.500000000 35.500000000 20.500000000 -17.500000000 35.500000000 20.500000000 -16.500000000 35.500000000 20.500000000 -15.500000000 35.500000000 20.500000000 -14.500000000 35.500000000 20.500000000 -13.500000000 35.500000000 20.500000000 -12.500000000 35.500000000 20.500000000 -11.500000000 35.500000000 20.500000000 -10.500000000 35.500000000 20.500000000 -9.500000000 35.500000000 20.500000000 -8.500000000 35.500000000 20.500000000 -7.500000000 35.500000000 20.500000000 -6.500000000 35.500000000 20.500000000 -5.500000000 35.500000000 20.500000000 -4.500000000 35.500000000 20.500000000 -3.500000000 35.500000000 20.500000000 -2.500000000 35.500000000 20.500000000 -1.500000000 35.500000000 20.500000000 -0.500000000 35.500000000 20.500000000 0.500000000 35.500000000 20.500000000 1.500000000 35.500000000 20.500000000 2.500000000 35.500000000 20.500000000 3.500000000 35.500000000 20.500000000 4.500000000 35.500000000 20.500000000 5.500000000 35.500000000 20.500000000 6.500000000 35.500000000 20.500000000 7.500000000 35.500000000 20.500000000 8.500000000 35.500000000 20.500000000 9.500000000 35.500000000 20.500000000 10.500000000 35.500000000 20.500000000 11.500000000 35.500000000 20.500000000 12.500000000 35.500000000 20.500000000 13.500000000 35.500000000 20.500000000 14.500000000 35.500000000 20.500000000 15.500000000 35.500000000 20.500000000 16.500000000 35.500000000 20.500000000 17.500000000 35.500000000 20.500000000 18.500000000 35.500000000 20.500000000 19.500000000 35.500000000 20.500000000 20.500000000 35.500000000 20.500000000 21.500000000 35.500000000 20.500000000 22.500000000 35.500000000 20.500000000 23.500000000 35.500000000 20.500000000 24.500000000 35.500000000 20.500000000 25.499996185 35.499996185 20.500000000 26.499954224 35.499954224 20.500000000 27.499591827 35.499591827 20.500000000 28.497470856 35.497474670 20.500000000 29.488407135 35.488403320 20.500000000 30.458978653 35.458980560 20.500000000 31.384418488 35.384422302 20.500000000 32.233222961 35.233222961 20.500000000 32.981101990 34.981101990 20.500000000 -33.981101990 -35.981101990 21.500000000 -33.233226776 -36.233222961 21.500000000 -32.384422302 -36.384418488 21.500000000 -31.458978653 -36.458980560 21.500000000 -30.488407135 -36.488403320 21.500000000 -29.497472763 -36.497474670 21.500000000 -28.499593735 -36.499591827 21.500000000 -27.499954224 -36.499954224 21.500000000 -26.499996185 -36.499996185 21.500000000 -25.500000000 -36.500000000 21.500000000 -24.500000000 -36.500000000 21.500000000 -23.500000000 -36.500000000 21.500000000 -22.500000000 -36.500000000 21.500000000 -21.500000000 -36.500000000 21.500000000 -20.500000000 -36.500000000 21.500000000 -19.500000000 -36.500000000 21.500000000 -18.500000000 -36.500000000 21.500000000 -17.500000000 -36.500000000 21.500000000 -16.500000000 -36.500000000 21.500000000 -15.500000000 -36.500000000 21.500000000 -14.500000000 -36.500000000 21.500000000 -13.500000000 -36.500000000 21.500000000 -12.500000000 -36.500000000 21.500000000 -11.500000000 -36.500000000 21.500000000 -10.500000000 -36.500000000 21.500000000 -9.500000000 -36.500000000 21.500000000 -8.500000000 -36.500000000 21.500000000 -7.500000000 -36.500000000 21.500000000 -6.500000000 -36.500000000 21.500000000 -5.500000000 -36.500000000 21.500000000 -4.500000000 -36.500000000 21.500000000 -3.500000000 -36.500000000 21.500000000 -2.500000000 -36.500000000 21.500000000 -1.500000000 -36.500000000 21.500000000 -0.500000000 -36.500000000 21.500000000 0.500000000 -36.500000000 21.500000000 1.500000000 -36.500000000 21.500000000 2.500000000 -36.500000000 21.500000000 3.500000000 -36.500000000 21.500000000 4.500000000 -36.500000000 21.500000000 5.500000000 -36.500000000 21.500000000 6.500000000 -36.500000000 21.500000000 7.500000000 -36.500000000 21.500000000 8.500000000 -36.500000000 21.500000000 9.500000000 -36.500000000 21.500000000 10.500000000 -36.500000000 21.500000000 11.500000000 -36.500000000 21.500000000 12.500000000 -36.500000000 21.500000000 13.500000000 -36.500000000 21.500000000 14.500000000 -36.500000000 21.500000000 15.500000000 -36.500000000 21.500000000 16.500000000 -36.500000000 21.500000000 17.500000000 -36.500000000 21.500000000 18.500000000 -36.500000000 21.500000000 19.500000000 -36.500000000 21.500000000 20.500000000 -36.500000000 21.500000000 21.500000000 -36.500000000 21.500000000 22.500000000 -36.500000000 21.500000000 23.500000000 -36.500000000 21.500000000 24.500000000 -36.500000000 21.500000000 25.499996185 -36.499996185 21.500000000 26.499954224 -36.499954224 21.500000000 27.499591827 -36.499591827 21.500000000 28.497470856 -36.497467041 21.500000000 29.488407135 -36.488403320 21.500000000 30.458978653 -36.458980560 21.500000000 31.384418488 -36.384422302 21.500000000 32.233222961 -36.233222961 21.500000000 32.981101990 -35.981101990 21.500000000 -35.167964935 -35.167964935 21.500000000 -34.622871399 -35.622871399 21.500000000 33.622871399 -35.622871399 21.500000000 34.167964935 -35.167964935 21.500000000 -35.981101990 -33.981101990 21.500000000 -35.622871399 -34.622871399 21.500000000 34.622871399 -34.622871399 21.500000000 34.981101990 -33.981101990 21.500000000 -36.233222961 -33.233222961 21.500000000 35.233222961 -33.233226776 21.500000000 -36.384418488 -32.384422302 21.500000000 35.384418488 -32.384422302 21.500000000 -36.458976746 -31.458978653 21.500000000 35.458980560 -31.458978653 21.500000000 -36.488403320 -30.488407135 21.500000000 35.488403320 -30.488407135 21.500000000 -36.497467041 -29.497472763 21.500000000 35.497474670 -29.497472763 21.500000000 -36.499591827 -28.499593735 21.500000000 35.499591827 -28.499593735 21.500000000 -36.499954224 -27.499954224 21.500000000 35.499954224 -27.499954224 21.500000000 -36.499996185 -26.499996185 21.500000000 35.499996185 -26.499996185 21.500000000 -36.500000000 -25.500000000 21.500000000 35.500000000 -25.500000000 21.500000000 -36.500000000 -24.500000000 21.500000000 35.500000000 -24.500000000 21.500000000 -36.500000000 -23.500000000 21.500000000 35.500000000 -23.500000000 21.500000000 -36.500000000 -22.500000000 21.500000000 35.500000000 -22.500000000 21.500000000 -36.500000000 -21.500000000 21.500000000 35.500000000 -21.500000000 21.500000000 -36.500000000 -20.500000000 21.500000000 35.500000000 -20.500000000 21.500000000 -36.500000000 -19.500000000 21.500000000 35.500000000 -19.500000000 21.500000000 -36.500000000 -18.500000000 21.500000000 35.500000000 -18.500000000 21.500000000 -36.500000000 -17.500000000 21.500000000 35.500000000 -17.500000000 21.500000000 -36.500000000 -16.500000000 21.500000000 35.500000000 -16.500000000 21.500000000 -36.500000000 -15.500000000 21.500000000 35.500000000 -15.500000000 21.500000000 -36.500000000 -14.500000000 21.500000000 35.500000000 -14.500000000 21.500000000 -36.500000000 -13.500000000 21.500000000 35.500000000 -13.500000000 21.500000000 -36.500000000 -12.500000000 21.500000000 35.500000000 -12.500000000 21.500000000 -36.500000000 -11.500000000 21.500000000 35.500000000 -11.500000000 21.500000000 -36.500000000 -10.500000000 21.500000000 35.500000000 -10.500000000 21.500000000 -36.500000000 -9.500000000 21.500000000 35.500000000 -9.500000000 21.500000000 -36.500000000 -8.500000000 21.500000000 35.500000000 -8.500000000 21.500000000 -36.500000000 -7.500000000 21.500000000 35.500000000 -7.500000000 21.500000000 -36.500000000 -6.500000000 21.500000000 35.500000000 -6.500000000 21.500000000 -36.500000000 -5.500000000 21.500000000 35.500000000 -5.500000000 21.500000000 -36.500000000 -4.500000000 21.500000000 35.500000000 -4.500000000 21.500000000 -36.500000000 -3.500000000 21.500000000 35.500000000 -3.500000000 21.500000000 -36.500000000 -2.500000000 21.500000000 35.500000000 -2.500000000 21.500000000 -36.500000000 -1.500000000 21.500000000 35.500000000 -1.500000000 21.500000000 -36.500000000 -0.500000000 21.500000000 35.500000000 -0.500000000 21.500000000 -36.500000000 0.500000000 21.500000000 35.500000000 0.500000000 21.500000000 -36.500000000 1.500000000 21.500000000 35.500000000 1.500000000 21.500000000 -36.500000000 2.500000000 21.500000000 35.500000000 2.500000000 21.500000000 -36.500000000 3.500000000 21.500000000 35.500000000 3.500000000 21.500000000 -36.500000000 4.500000000 21.500000000 35.500000000 4.500000000 21.500000000 -36.500000000 5.500000000 21.500000000 35.500000000 5.500000000 21.500000000 -36.500000000 6.500000000 21.500000000 35.500000000 6.500000000 21.500000000 -36.500000000 7.500000000 21.500000000 35.500000000 7.500000000 21.500000000 -36.500000000 8.500000000 21.500000000 35.500000000 8.500000000 21.500000000 -36.500000000 9.500000000 21.500000000 35.500000000 9.500000000 21.500000000 -36.500000000 10.500000000 21.500000000 35.500000000 10.500000000 21.500000000 -36.500000000 11.500000000 21.500000000 35.500000000 11.500000000 21.500000000 -36.500000000 12.500000000 21.500000000 35.500000000 12.500000000 21.500000000 -36.500000000 13.500000000 21.500000000 35.500000000 13.500000000 21.500000000 -36.500000000 14.500000000 21.500000000 35.500000000 14.500000000 21.500000000 -36.500000000 15.500000000 21.500000000 35.500000000 15.500000000 21.500000000 -36.500000000 16.500000000 21.500000000 35.500000000 16.500000000 21.500000000 -36.500000000 17.500000000 21.500000000 35.500000000 17.500000000 21.500000000 -36.500000000 18.500000000 21.500000000 35.500000000 18.500000000 21.500000000 -36.500000000 19.500000000 21.500000000 35.500000000 19.500000000 21.500000000 -36.500000000 20.500000000 21.500000000 35.500000000 20.500000000 21.500000000 -36.500000000 21.500000000 21.500000000 35.500000000 21.500000000 21.500000000 -36.500000000 22.500000000 21.500000000 35.500000000 22.500000000 21.500000000 -36.500000000 23.500000000 21.500000000 35.500000000 23.500000000 21.500000000 -36.500000000 24.500000000 21.500000000 35.500000000 24.500000000 21.500000000 -36.499996185 25.499996185 21.500000000 35.499996185 25.499996185 21.500000000 -36.499954224 26.499954224 21.500000000 35.499954224 26.499954224 21.500000000 -36.499591827 27.499591827 21.500000000 35.499591827 27.499591827 21.500000000 -36.497474670 28.497470856 21.500000000 35.497467041 28.497470856 21.500000000 -36.488403320 29.488407135 21.500000000 35.488403320 29.488407135 21.500000000 -36.458980560 30.458978653 21.500000000 35.458980560 30.458978653 21.500000000 -36.384422302 31.384418488 21.500000000 35.384422302 31.384418488 21.500000000 -36.233222961 32.233222961 21.500000000 35.233222961 32.233222961 21.500000000 -35.981101990 32.981101990 21.500000000 -35.622871399 33.622871399 21.500000000 34.622871399 33.622871399 21.500000000 34.981101990 32.981101990 21.500000000 -35.167964935 34.167964935 21.500000000 -34.622871399 34.622871399 21.500000000 33.622871399 34.622871399 21.500000000 34.167964935 34.167964935 21.500000000 -33.981101990 34.981101990 21.500000000 -33.233222961 35.233222961 21.500000000 -32.384422302 35.384418488 21.500000000 -31.458978653 35.458976746 21.500000000 -30.488407135 35.488403320 21.500000000 -29.497472763 35.497467041 21.500000000 -28.499593735 35.499591827 21.500000000 -27.499954224 35.499954224 21.500000000 -26.499996185 35.499996185 21.500000000 -25.500000000 35.500000000 21.500000000 -24.500000000 35.500000000 21.500000000 -23.500000000 35.500000000 21.500000000 -22.500000000 35.500000000 21.500000000 -21.500000000 35.500000000 21.500000000 -20.500000000 35.500000000 21.500000000 -19.500000000 35.500000000 21.500000000 -18.500000000 35.500000000 21.500000000 -17.500000000 35.500000000 21.500000000 -16.500000000 35.500000000 21.500000000 -15.500000000 35.500000000 21.500000000 -14.500000000 35.500000000 21.500000000 -13.500000000 35.500000000 21.500000000 -12.500000000 35.500000000 21.500000000 -11.500000000 35.500000000 21.500000000 -10.500000000 35.500000000 21.500000000 -9.500000000 35.500000000 21.500000000 -8.500000000 35.500000000 21.500000000 -7.500000000 35.500000000 21.500000000 -6.500000000 35.500000000 21.500000000 -5.500000000 35.500000000 21.500000000 -4.500000000 35.500000000 21.500000000 -3.500000000 35.500000000 21.500000000 -2.500000000 35.500000000 21.500000000 -1.500000000 35.500000000 21.500000000 -0.500000000 35.500000000 21.500000000 0.500000000 35.500000000 21.500000000 1.500000000 35.500000000 21.500000000 2.500000000 35.500000000 21.500000000 3.500000000 35.500000000 21.500000000 4.500000000 35.500000000 21.500000000 5.500000000 35.500000000 21.500000000 6.500000000 35.500000000 21.500000000 7.500000000 35.500000000 21.500000000 8.500000000 35.500000000 21.500000000 9.500000000 35.500000000 21.500000000 10.500000000 35.500000000 21.500000000 11.500000000 35.500000000 21.500000000 12.500000000 35.500000000 21.500000000 13.500000000 35.500000000 21.500000000 14.500000000 35.500000000 21.500000000 15.500000000 35.500000000 21.500000000 16.500000000 35.500000000 21.500000000 17.500000000 35.500000000 21.500000000 18.500000000 35.500000000 21.500000000 19.500000000 35.500000000 21.500000000 20.500000000 35.500000000 21.500000000 21.500000000 35.500000000 21.500000000 22.500000000 35.500000000 21.500000000 23.500000000 35.500000000 21.500000000 24.500000000 35.500000000 21.500000000 25.499996185 35.499996185 21.500000000 26.499954224 35.499954224 21.500000000 27.499591827 35.499591827 21.500000000 28.497470856 35.497474670 21.500000000 29.488407135 35.488403320 21.500000000 30.458978653 35.458980560 21.500000000 31.384418488 35.384422302 21.500000000 32.233222961 35.233222961 21.500000000 32.981101990 34.981101990 21.500000000 -33.981101990 -35.981101990 22.500000000 -33.233226776 -36.233222961 22.500000000 -32.384422302 -36.384418488 22.500000000 -31.458978653 -36.458980560 22.500000000 -30.488407135 -36.488403320 22.500000000 -29.497472763 -36.497474670 22.500000000 -28.499593735 -36.499591827 22.500000000 -27.499954224 -36.499954224 22.500000000 -26.499996185 -36.499996185 22.500000000 -25.500000000 -36.500000000 22.500000000 -24.500000000 -36.500000000 22.500000000 -23.500000000 -36.500000000 22.500000000 -22.500000000 -36.500000000 22.500000000 -21.500000000 -36.500000000 22.500000000 -20.500000000 -36.500000000 22.500000000 -19.500000000 -36.500000000 22.500000000 -18.500000000 -36.500000000 22.500000000 -17.500000000 -36.500000000 22.500000000 -16.500000000 -36.500000000 22.500000000 -15.500000000 -36.500000000 22.500000000 -14.500000000 -36.500000000 22.500000000 -13.500000000 -36.500000000 22.500000000 -12.500000000 -36.500000000 22.500000000 -11.500000000 -36.500000000 22.500000000 -10.500000000 -36.500000000 22.500000000 -9.500000000 -36.500000000 22.500000000 -8.500000000 -36.500000000 22.500000000 -7.500000000 -36.500000000 22.500000000 -6.500000000 -36.500000000 22.500000000 -5.500000000 -36.500000000 22.500000000 -4.500000000 -36.500000000 22.500000000 -3.500000000 -36.500000000 22.500000000 -2.500000000 -36.500000000 22.500000000 -1.500000000 -36.500000000 22.500000000 -0.500000000 -36.500000000 22.500000000 0.500000000 -36.500000000 22.500000000 1.500000000 -36.500000000 22.500000000 2.500000000 -36.500000000 22.500000000 3.500000000 -36.500000000 22.500000000 4.500000000 -36.500000000 22.500000000 5.500000000 -36.500000000 22.500000000 6.500000000 -36.500000000 22.500000000 7.500000000 -36.500000000 22.500000000 8.500000000 -36.500000000 22.500000000 9.500000000 -36.500000000 22.500000000 10.500000000 -36.500000000 22.500000000 11.500000000 -36.500000000 22.500000000 12.500000000 -36.500000000 22.500000000 13.500000000 -36.500000000 22.500000000 14.500000000 -36.500000000 22.500000000 15.500000000 -36.500000000 22.500000000 16.500000000 -36.500000000 22.500000000 17.500000000 -36.500000000 22.500000000 18.500000000 -36.500000000 22.500000000 19.500000000 -36.500000000 22.500000000 20.500000000 -36.500000000 22.500000000 21.500000000 -36.500000000 22.500000000 22.500000000 -36.500000000 22.500000000 23.500000000 -36.500000000 22.500000000 24.500000000 -36.500000000 22.500000000 25.499996185 -36.499996185 22.500000000 26.499954224 -36.499954224 22.500000000 27.499591827 -36.499591827 22.500000000 28.497470856 -36.497467041 22.500000000 29.488407135 -36.488403320 22.500000000 30.458978653 -36.458980560 22.500000000 31.384418488 -36.384422302 22.500000000 32.233222961 -36.233222961 22.500000000 32.981101990 -35.981101990 22.500000000 -35.167964935 -35.167964935 22.500000000 -34.622871399 -35.622871399 22.500000000 33.622871399 -35.622871399 22.500000000 34.167964935 -35.167964935 22.500000000 -35.981101990 -33.981101990 22.500000000 -35.622871399 -34.622871399 22.500000000 34.622871399 -34.622871399 22.500000000 34.981101990 -33.981101990 22.500000000 -36.233222961 -33.233222961 22.500000000 35.233222961 -33.233226776 22.500000000 -36.384418488 -32.384422302 22.500000000 35.384418488 -32.384422302 22.500000000 -36.458976746 -31.458978653 22.500000000 35.458980560 -31.458978653 22.500000000 -36.488403320 -30.488407135 22.500000000 35.488403320 -30.488407135 22.500000000 -36.497467041 -29.497472763 22.500000000 35.497474670 -29.497472763 22.500000000 -36.499591827 -28.499593735 22.500000000 35.499591827 -28.499593735 22.500000000 -36.499954224 -27.499954224 22.500000000 35.499954224 -27.499954224 22.500000000 -36.499996185 -26.499996185 22.500000000 35.499996185 -26.499996185 22.500000000 -36.500000000 -25.500000000 22.500000000 35.500000000 -25.500000000 22.500000000 -36.500000000 -24.500000000 22.500000000 35.500000000 -24.500000000 22.500000000 -36.500000000 -23.500000000 22.500000000 35.500000000 -23.500000000 22.500000000 -36.500000000 -22.500000000 22.500000000 35.500000000 -22.500000000 22.500000000 -36.500000000 -21.500000000 22.500000000 35.500000000 -21.500000000 22.500000000 -36.500000000 -20.500000000 22.500000000 35.500000000 -20.500000000 22.500000000 -36.500000000 -19.500000000 22.500000000 35.500000000 -19.500000000 22.500000000 -36.500000000 -18.500000000 22.500000000 35.500000000 -18.500000000 22.500000000 -36.500000000 -17.500000000 22.500000000 35.500000000 -17.500000000 22.500000000 -36.500000000 -16.500000000 22.500000000 35.500000000 -16.500000000 22.500000000 -36.500000000 -15.500000000 22.500000000 35.500000000 -15.500000000 22.500000000 -36.500000000 -14.500000000 22.500000000 35.500000000 -14.500000000 22.500000000 -36.500000000 -13.500000000 22.500000000 35.500000000 -13.500000000 22.500000000 -36.500000000 -12.500000000 22.500000000 35.500000000 -12.500000000 22.500000000 -36.500000000 -11.500000000 22.500000000 35.500000000 -11.500000000 22.500000000 -36.500000000 -10.500000000 22.500000000 35.500000000 -10.500000000 22.500000000 -36.500000000 -9.500000000 22.500000000 35.500000000 -9.500000000 22.500000000 -36.500000000 -8.500000000 22.500000000 35.500000000 -8.500000000 22.500000000 -36.500000000 -7.500000000 22.500000000 35.500000000 -7.500000000 22.500000000 -36.500000000 -6.500000000 22.500000000 35.500000000 -6.500000000 22.500000000 -36.500000000 -5.500000000 22.500000000 35.500000000 -5.500000000 22.500000000 -36.500000000 -4.500000000 22.500000000 35.500000000 -4.500000000 22.500000000 -36.500000000 -3.500000000 22.500000000 35.500000000 -3.500000000 22.500000000 -36.500000000 -2.500000000 22.500000000 35.500000000 -2.500000000 22.500000000 -36.500000000 -1.500000000 22.500000000 35.500000000 -1.500000000 22.500000000 -36.500000000 -0.500000000 22.500000000 35.500000000 -0.500000000 22.500000000 -36.500000000 0.500000000 22.500000000 35.500000000 0.500000000 22.500000000 -36.500000000 1.500000000 22.500000000 35.500000000 1.500000000 22.500000000 -36.500000000 2.500000000 22.500000000 35.500000000 2.500000000 22.500000000 -36.500000000 3.500000000 22.500000000 35.500000000 3.500000000 22.500000000 -36.500000000 4.500000000 22.500000000 35.500000000 4.500000000 22.500000000 -36.500000000 5.500000000 22.500000000 35.500000000 5.500000000 22.500000000 -36.500000000 6.500000000 22.500000000 35.500000000 6.500000000 22.500000000 -36.500000000 7.500000000 22.500000000 35.500000000 7.500000000 22.500000000 -36.500000000 8.500000000 22.500000000 35.500000000 8.500000000 22.500000000 -36.500000000 9.500000000 22.500000000 35.500000000 9.500000000 22.500000000 -36.500000000 10.500000000 22.500000000 35.500000000 10.500000000 22.500000000 -36.500000000 11.500000000 22.500000000 35.500000000 11.500000000 22.500000000 -36.500000000 12.500000000 22.500000000 35.500000000 12.500000000 22.500000000 -36.500000000 13.500000000 22.500000000 35.500000000 13.500000000 22.500000000 -36.500000000 14.500000000 22.500000000 35.500000000 14.500000000 22.500000000 -36.500000000 15.500000000 22.500000000 35.500000000 15.500000000 22.500000000 -36.500000000 16.500000000 22.500000000 35.500000000 16.500000000 22.500000000 -36.500000000 17.500000000 22.500000000 35.500000000 17.500000000 22.500000000 -36.500000000 18.500000000 22.500000000 35.500000000 18.500000000 22.500000000 -36.500000000 19.500000000 22.500000000 35.500000000 19.500000000 22.500000000 -36.500000000 20.500000000 22.500000000 35.500000000 20.500000000 22.500000000 -36.500000000 21.500000000 22.500000000 35.500000000 21.500000000 22.500000000 -36.500000000 22.500000000 22.500000000 35.500000000 22.500000000 22.500000000 -36.500000000 23.500000000 22.500000000 35.500000000 23.500000000 22.500000000 -36.500000000 24.500000000 22.500000000 35.500000000 24.500000000 22.500000000 -36.499996185 25.499996185 22.500000000 35.499996185 25.499996185 22.500000000 -36.499954224 26.499954224 22.500000000 35.499954224 26.499954224 22.500000000 -36.499591827 27.499591827 22.500000000 35.499591827 27.499591827 22.500000000 -36.497474670 28.497470856 22.500000000 35.497467041 28.497470856 22.500000000 -36.488403320 29.488407135 22.500000000 35.488403320 29.488407135 22.500000000 -36.458980560 30.458978653 22.500000000 35.458980560 30.458978653 22.500000000 -36.384422302 31.384418488 22.500000000 35.384422302 31.384418488 22.500000000 -36.233222961 32.233222961 22.500000000 35.233222961 32.233222961 22.500000000 -35.981101990 32.981101990 22.500000000 -35.622871399 33.622871399 22.500000000 34.622871399 33.622871399 22.500000000 34.981101990 32.981101990 22.500000000 -35.167964935 34.167964935 22.500000000 -34.622871399 34.622871399 22.500000000 33.622871399 34.622871399 22.500000000 34.167964935 34.167964935 22.500000000 -33.981101990 34.981101990 22.500000000 -33.233222961 35.233222961 22.500000000 -32.384422302 35.384418488 22.500000000 -31.458978653 35.458976746 22.500000000 -30.488407135 35.488403320 22.500000000 -29.497472763 35.497467041 22.500000000 -28.499593735 35.499591827 22.500000000 -27.499954224 35.499954224 22.500000000 -26.499996185 35.499996185 22.500000000 -25.500000000 35.500000000 22.500000000 -24.500000000 35.500000000 22.500000000 -23.500000000 35.500000000 22.500000000 -22.500000000 35.500000000 22.500000000 -21.500000000 35.500000000 22.500000000 -20.500000000 35.500000000 22.500000000 -19.500000000 35.500000000 22.500000000 -18.500000000 35.500000000 22.500000000 -17.500000000 35.500000000 22.500000000 -16.500000000 35.500000000 22.500000000 -15.500000000 35.500000000 22.500000000 -14.500000000 35.500000000 22.500000000 -13.500000000 35.500000000 22.500000000 -12.500000000 35.500000000 22.500000000 -11.500000000 35.500000000 22.500000000 -10.500000000 35.500000000 22.500000000 -9.500000000 35.500000000 22.500000000 -8.500000000 35.500000000 22.500000000 -7.500000000 35.500000000 22.500000000 -6.500000000 35.500000000 22.500000000 -5.500000000 35.500000000 22.500000000 -4.500000000 35.500000000 22.500000000 -3.500000000 35.500000000 22.500000000 -2.500000000 35.500000000 22.500000000 -1.500000000 35.500000000 22.500000000 -0.500000000 35.500000000 22.500000000 0.500000000 35.500000000 22.500000000 1.500000000 35.500000000 22.500000000 2.500000000 35.500000000 22.500000000 3.500000000 35.500000000 22.500000000 4.500000000 35.500000000 22.500000000 5.500000000 35.500000000 22.500000000 6.500000000 35.500000000 22.500000000 7.500000000 35.500000000 22.500000000 8.500000000 35.500000000 22.500000000 9.500000000 35.500000000 22.500000000 10.500000000 35.500000000 22.500000000 11.500000000 35.500000000 22.500000000 12.500000000 35.500000000 22.500000000 13.500000000 35.500000000 22.500000000 14.500000000 35.500000000 22.500000000 15.500000000 35.500000000 22.500000000 16.500000000 35.500000000 22.500000000 17.500000000 35.500000000 22.500000000 18.500000000 35.500000000 22.500000000 19.500000000 35.500000000 22.500000000 20.500000000 35.500000000 22.500000000 21.500000000 35.500000000 22.500000000 22.500000000 35.500000000 22.500000000 23.500000000 35.500000000 22.500000000 24.500000000 35.500000000 22.500000000 25.499996185 35.499996185 22.500000000 26.499954224 35.499954224 22.500000000 27.499591827 35.499591827 22.500000000 28.497470856 35.497474670 22.500000000 29.488407135 35.488403320 22.500000000 30.458978653 35.458980560 22.500000000 31.384418488 35.384422302 22.500000000 32.233222961 35.233222961 22.500000000 32.981101990 34.981101990 22.500000000 -33.981101990 -35.981101990 23.499998093 -33.233226776 -36.233222961 23.500000000 -32.384422302 -36.384418488 23.500000000 -31.458978653 -36.458980560 23.500000000 -30.488407135 -36.488403320 23.500000000 -29.497472763 -36.497474670 23.500000000 -28.499593735 -36.499591827 23.500000000 -27.499954224 -36.499954224 23.500000000 -26.499996185 -36.499996185 23.500000000 -25.500000000 -36.500000000 23.500000000 -24.500000000 -36.500000000 23.500000000 -23.500000000 -36.500000000 23.500000000 -22.500000000 -36.500000000 23.500000000 -21.500000000 -36.500000000 23.500000000 -20.500000000 -36.500000000 23.500000000 -19.500000000 -36.500000000 23.500000000 -18.500000000 -36.500000000 23.500000000 -17.500000000 -36.500000000 23.500000000 -16.500000000 -36.500000000 23.500000000 -15.500000000 -36.500000000 23.500000000 -14.500000000 -36.500000000 23.500000000 -13.500000000 -36.500000000 23.500000000 -12.500000000 -36.500000000 23.500000000 -11.500000000 -36.500000000 23.500000000 -10.500000000 -36.500000000 23.500000000 -9.500000000 -36.500000000 23.500000000 -8.500000000 -36.500000000 23.500000000 -7.500000000 -36.500000000 23.500000000 -6.500000000 -36.500000000 23.500000000 -5.500000000 -36.500000000 23.500000000 -4.500000000 -36.500000000 23.500000000 -3.500000000 -36.500000000 23.500000000 -2.500000000 -36.500000000 23.500000000 -1.500000000 -36.500000000 23.500000000 -0.500000000 -36.500000000 23.500000000 0.500000000 -36.500000000 23.500000000 1.500000000 -36.500000000 23.500000000 2.500000000 -36.500000000 23.500000000 3.500000000 -36.500000000 23.500000000 4.500000000 -36.500000000 23.500000000 5.500000000 -36.500000000 23.500000000 6.500000000 -36.500000000 23.500000000 7.500000000 -36.500000000 23.500000000 8.500000000 -36.500000000 23.500000000 9.500000000 -36.500000000 23.500000000 10.500000000 -36.500000000 23.500000000 11.500000000 -36.500000000 23.500000000 12.500000000 -36.500000000 23.500000000 13.500000000 -36.500000000 23.500000000 14.500000000 -36.500000000 23.500000000 15.500000000 -36.500000000 23.500000000 16.500000000 -36.500000000 23.500000000 17.500000000 -36.500000000 23.500000000 18.500000000 -36.500000000 23.500000000 19.500000000 -36.500000000 23.500000000 20.500000000 -36.500000000 23.500000000 21.500000000 -36.500000000 23.500000000 22.500000000 -36.500000000 23.500000000 23.500000000 -36.500000000 23.500000000 24.500000000 -36.500000000 23.500000000 25.499996185 -36.499996185 23.500000000 26.499954224 -36.499954224 23.500000000 27.499591827 -36.499591827 23.500000000 28.497470856 -36.497467041 23.500000000 29.488407135 -36.488403320 23.500000000 30.458978653 -36.458980560 23.500000000 31.384418488 -36.384422302 23.500000000 32.233222961 -36.233222961 23.500000000 32.981101990 -35.981101990 23.499998093 -35.167964935 -35.167964935 23.500000000 -34.622867584 -35.622867584 23.500000000 33.622871399 -35.622871399 23.500000000 34.167961121 -35.167961121 23.500000000 -35.981101990 -33.981101990 23.499998093 -35.622867584 -34.622867584 23.500000000 34.622867584 -34.622867584 23.500000000 34.981101990 -33.981101990 23.499998093 -36.233222961 -33.233222961 23.500000000 35.233222961 -33.233226776 23.500000000 -36.384418488 -32.384422302 23.500000000 35.384418488 -32.384422302 23.500000000 -36.458976746 -31.458978653 23.500000000 35.458980560 -31.458978653 23.500000000 -36.488403320 -30.488407135 23.500000000 35.488403320 -30.488407135 23.500000000 -36.497467041 -29.497472763 23.500000000 35.497474670 -29.497472763 23.500000000 -36.499591827 -28.499593735 23.500000000 35.499591827 -28.499593735 23.500000000 -36.499954224 -27.499954224 23.500000000 35.499954224 -27.499954224 23.500000000 -36.499996185 -26.499996185 23.500000000 35.499996185 -26.499996185 23.500000000 -36.500000000 -25.500000000 23.500000000 35.500000000 -25.500000000 23.500000000 -36.500000000 -24.500000000 23.500000000 35.500000000 -24.500000000 23.500000000 -36.500000000 -23.500000000 23.500000000 35.500000000 -23.500000000 23.500000000 -36.500000000 -22.500000000 23.500000000 35.500000000 -22.500000000 23.500000000 -36.500000000 -21.500000000 23.500000000 35.500000000 -21.500000000 23.500000000 -36.500000000 -20.500000000 23.500000000 35.500000000 -20.500000000 23.500000000 -36.500000000 -19.500000000 23.500000000 35.500000000 -19.500000000 23.500000000 -36.500000000 -18.500000000 23.500000000 35.500000000 -18.500000000 23.500000000 -36.500000000 -17.500000000 23.500000000 35.500000000 -17.500000000 23.500000000 -36.500000000 -16.500000000 23.500000000 35.500000000 -16.500000000 23.500000000 -36.500000000 -15.500000000 23.500000000 35.500000000 -15.500000000 23.500000000 -36.500000000 -14.500000000 23.500000000 35.500000000 -14.500000000 23.500000000 -36.500000000 -13.500000000 23.500000000 35.500000000 -13.500000000 23.500000000 -36.500000000 -12.500000000 23.500000000 35.500000000 -12.500000000 23.500000000 -36.500000000 -11.500000000 23.500000000 35.500000000 -11.500000000 23.500000000 -36.500000000 -10.500000000 23.500000000 35.500000000 -10.500000000 23.500000000 -36.500000000 -9.500000000 23.500000000 35.500000000 -9.500000000 23.500000000 -36.500000000 -8.500000000 23.500000000 35.500000000 -8.500000000 23.500000000 -36.500000000 -7.500000000 23.500000000 35.500000000 -7.500000000 23.500000000 -36.500000000 -6.500000000 23.500000000 35.500000000 -6.500000000 23.500000000 -36.500000000 -5.500000000 23.500000000 35.500000000 -5.500000000 23.500000000 -36.500000000 -4.500000000 23.500000000 35.500000000 -4.500000000 23.500000000 -36.500000000 -3.500000000 23.500000000 35.500000000 -3.500000000 23.500000000 -36.500000000 -2.500000000 23.500000000 35.500000000 -2.500000000 23.500000000 -36.500000000 -1.500000000 23.500000000 35.500000000 -1.500000000 23.500000000 -36.500000000 -0.500000000 23.500000000 35.500000000 -0.500000000 23.500000000 -36.500000000 0.500000000 23.500000000 35.500000000 0.500000000 23.500000000 -36.500000000 1.500000000 23.500000000 35.500000000 1.500000000 23.500000000 -36.500000000 2.500000000 23.500000000 35.500000000 2.500000000 23.500000000 -36.500000000 3.500000000 23.500000000 35.500000000 3.500000000 23.500000000 -36.500000000 4.500000000 23.500000000 35.500000000 4.500000000 23.500000000 -36.500000000 5.500000000 23.500000000 35.500000000 5.500000000 23.500000000 -36.500000000 6.500000000 23.500000000 35.500000000 6.500000000 23.500000000 -36.500000000 7.500000000 23.500000000 35.500000000 7.500000000 23.500000000 -36.500000000 8.500000000 23.500000000 35.500000000 8.500000000 23.500000000 -36.500000000 9.500000000 23.500000000 35.500000000 9.500000000 23.500000000 -36.500000000 10.500000000 23.500000000 35.500000000 10.500000000 23.500000000 -36.500000000 11.500000000 23.500000000 35.500000000 11.500000000 23.500000000 -36.500000000 12.500000000 23.500000000 35.500000000 12.500000000 23.500000000 -36.500000000 13.500000000 23.500000000 35.500000000 13.500000000 23.500000000 -36.500000000 14.500000000 23.500000000 35.500000000 14.500000000 23.500000000 -36.500000000 15.500000000 23.500000000 35.500000000 15.500000000 23.500000000 -36.500000000 16.500000000 23.500000000 35.500000000 16.500000000 23.500000000 -36.500000000 17.500000000 23.500000000 35.500000000 17.500000000 23.500000000 -36.500000000 18.500000000 23.500000000 35.500000000 18.500000000 23.500000000 -36.500000000 19.500000000 23.500000000 35.500000000 19.500000000 23.500000000 -36.500000000 20.500000000 23.500000000 35.500000000 20.500000000 23.500000000 -36.500000000 21.500000000 23.500000000 35.500000000 21.500000000 23.500000000 -36.500000000 22.500000000 23.500000000 35.500000000 22.500000000 23.500000000 -36.500000000 23.500000000 23.500000000 35.500000000 23.500000000 23.500000000 -36.500000000 24.500000000 23.500000000 35.500000000 24.500000000 23.500000000 -36.499996185 25.499996185 23.500000000 35.499996185 25.499996185 23.500000000 -36.499954224 26.499954224 23.500000000 35.499954224 26.499954224 23.500000000 -36.499591827 27.499591827 23.500000000 35.499591827 27.499591827 23.500000000 -36.497474670 28.497470856 23.500000000 35.497467041 28.497470856 23.500000000 -36.488403320 29.488407135 23.500000000 35.488403320 29.488407135 23.500000000 -36.458980560 30.458978653 23.500000000 35.458980560 30.458978653 23.500000000 -36.384422302 31.384418488 23.500000000 35.384422302 31.384418488 23.500000000 -36.233222961 32.233222961 23.500000000 35.233222961 32.233222961 23.500000000 -35.981101990 32.981101990 23.499998093 -35.622867584 33.622867584 23.500000000 34.622871399 33.622867584 23.500000000 34.981101990 32.981101990 23.499998093 -35.167964935 34.167964935 23.500000000 -34.622867584 34.622867584 23.500000000 33.622867584 34.622867584 23.500000000 34.167961121 34.167961121 23.500000000 -33.981101990 34.981101990 23.499998093 -33.233222961 35.233222961 23.500000000 -32.384422302 35.384418488 23.500000000 -31.458978653 35.458976746 23.500000000 -30.488407135 35.488403320 23.500000000 -29.497472763 35.497467041 23.500000000 -28.499593735 35.499591827 23.500000000 -27.499954224 35.499954224 23.500000000 -26.499996185 35.499996185 23.500000000 -25.500000000 35.500000000 23.500000000 -24.500000000 35.500000000 23.500000000 -23.500000000 35.500000000 23.500000000 -22.500000000 35.500000000 23.500000000 -21.500000000 35.500000000 23.500000000 -20.500000000 35.500000000 23.500000000 -19.500000000 35.500000000 23.500000000 -18.500000000 35.500000000 23.500000000 -17.500000000 35.500000000 23.500000000 -16.500000000 35.500000000 23.500000000 -15.500000000 35.500000000 23.500000000 -14.500000000 35.500000000 23.500000000 -13.500000000 35.500000000 23.500000000 -12.500000000 35.500000000 23.500000000 -11.500000000 35.500000000 23.500000000 -10.500000000 35.500000000 23.500000000 -9.500000000 35.500000000 23.500000000 -8.500000000 35.500000000 23.500000000 -7.500000000 35.500000000 23.500000000 -6.500000000 35.500000000 23.500000000 -5.500000000 35.500000000 23.500000000 -4.500000000 35.500000000 23.500000000 -3.500000000 35.500000000 23.500000000 -2.500000000 35.500000000 23.500000000 -1.500000000 35.500000000 23.500000000 -0.500000000 35.500000000 23.500000000 0.500000000 35.500000000 23.500000000 1.500000000 35.500000000 23.500000000 2.500000000 35.500000000 23.500000000 3.500000000 35.500000000 23.500000000 4.500000000 35.500000000 23.500000000 5.500000000 35.500000000 23.500000000 6.500000000 35.500000000 23.500000000 7.500000000 35.500000000 23.500000000 8.500000000 35.500000000 23.500000000 9.500000000 35.500000000 23.500000000 10.500000000 35.500000000 23.500000000 11.500000000 35.500000000 23.500000000 12.500000000 35.500000000 23.500000000 13.500000000 35.500000000 23.500000000 14.500000000 35.500000000 23.500000000 15.500000000 35.500000000 23.500000000 16.500000000 35.500000000 23.500000000 17.500000000 35.500000000 23.500000000 18.500000000 35.500000000 23.500000000 19.500000000 35.500000000 23.500000000 20.500000000 35.500000000 23.500000000 21.500000000 35.500000000 23.500000000 22.500000000 35.500000000 23.500000000 23.500000000 35.500000000 23.500000000 24.500000000 35.500000000 23.500000000 25.499996185 35.499996185 23.500000000 26.499954224 35.499954224 23.500000000 27.499591827 35.499591827 23.500000000 28.497470856 35.497474670 23.500000000 29.488407135 35.488403320 23.500000000 30.458978653 35.458980560 23.500000000 31.384418488 35.384422302 23.500000000 32.233222961 35.233222961 23.500000000 32.981101990 34.981101990 23.499998093 -33.981086731 -35.981086731 24.499979019 -33.233219147 -36.233203888 24.499984741 -32.384422302 -36.384407043 24.499996185 -31.458978653 -36.458972931 24.500000000 -30.488407135 -36.488403320 24.500000000 -29.497472763 -36.497474670 24.500000000 -28.499593735 -36.499591827 24.500000000 -27.499954224 -36.499954224 24.500000000 -26.499996185 -36.499996185 24.500000000 -25.500000000 -36.500000000 24.500000000 -24.500000000 -36.500000000 24.500000000 -23.500000000 -36.500000000 24.500000000 -22.500000000 -36.500000000 24.500000000 -21.500000000 -36.500000000 24.500000000 -20.500000000 -36.500000000 24.500000000 -19.500000000 -36.500000000 24.500000000 -18.500000000 -36.500000000 24.500000000 -17.500000000 -36.500000000 24.500000000 -16.500000000 -36.500000000 24.500000000 -15.500000000 -36.500000000 24.500000000 -14.500000000 -36.500000000 24.500000000 -13.500000000 -36.500000000 24.500000000 -12.500000000 -36.500000000 24.500000000 -11.500000000 -36.500000000 24.500000000 -10.500000000 -36.500000000 24.500000000 -9.500000000 -36.500000000 24.500000000 -8.500000000 -36.500000000 24.500000000 -7.500000000 -36.500000000 24.500000000 -6.500000000 -36.500000000 24.500000000 -5.500000000 -36.500000000 24.500000000 -4.500000000 -36.500000000 24.500000000 -3.500000000 -36.500000000 24.500000000 -2.500000000 -36.500000000 24.500000000 -1.500000000 -36.500000000 24.500000000 -0.500000000 -36.500000000 24.500000000 0.500000000 -36.500000000 24.500000000 1.500000000 -36.500000000 24.500000000 2.500000000 -36.500000000 24.500000000 3.500000000 -36.500000000 24.500000000 4.500000000 -36.500000000 24.500000000 5.500000000 -36.500000000 24.500000000 6.500000000 -36.500000000 24.500000000 7.500000000 -36.500000000 24.500000000 8.500000000 -36.500000000 24.500000000 9.500000000 -36.500000000 24.500000000 10.500000000 -36.500000000 24.500000000 11.500000000 -36.500000000 24.500000000 12.500000000 -36.500000000 24.500000000 13.500000000 -36.500000000 24.500000000 14.500000000 -36.500000000 24.500000000 15.500000000 -36.500000000 24.500000000 16.500000000 -36.500000000 24.500000000 17.500000000 -36.500000000 24.500000000 18.500000000 -36.500000000 24.500000000 19.500000000 -36.500000000 24.500000000 20.500000000 -36.500000000 24.500000000 21.500000000 -36.500000000 24.500000000 22.500000000 -36.500000000 24.500000000 23.500000000 -36.500000000 24.500000000 24.500000000 -36.500000000 24.500000000 25.499996185 -36.499996185 24.500000000 26.499954224 -36.499954224 24.500000000 27.499591827 -36.499591827 24.500000000 28.497470856 -36.497467041 24.500000000 29.488407135 -36.488403320 24.500000000 30.458978653 -36.458976746 24.500000000 31.384418488 -36.384407043 24.499996185 32.233219147 -36.233207703 24.499988556 32.981086731 -35.981086731 24.499979019 -35.167949677 -35.167949677 24.499992371 -34.622844696 -35.622856140 24.499986649 33.622844696 -35.622856140 24.499986649 34.167949677 -35.167949677 24.499992371 -35.981086731 -33.981086731 24.499979019 -35.622856140 -34.622844696 24.499988556 34.622856140 -34.622844696 24.499988556 34.981086731 -33.981086731 24.499979019 -36.233207703 -33.233219147 24.499984741 35.233203888 -33.233219147 24.499984741 -36.384407043 -32.384422302 24.499996185 35.384407043 -32.384422302 24.499996185 -36.458972931 -31.458978653 24.500000000 35.458972931 -31.458978653 24.500000000 -36.488403320 -30.488407135 24.500000000 35.488403320 -30.488407135 24.500000000 -36.497467041 -29.497472763 24.500000000 35.497474670 -29.497472763 24.500000000 -36.499591827 -28.499593735 24.500000000 35.499591827 -28.499593735 24.500000000 -36.499954224 -27.499954224 24.500000000 35.499954224 -27.499954224 24.500000000 -36.499996185 -26.499996185 24.500000000 35.499996185 -26.499996185 24.500000000 -36.500000000 -25.500000000 24.500000000 35.500000000 -25.500000000 24.500000000 -36.500000000 -24.500000000 24.500000000 35.500000000 -24.500000000 24.500000000 -36.500000000 -23.500000000 24.500000000 35.500000000 -23.500000000 24.500000000 -36.500000000 -22.500000000 24.500000000 35.500000000 -22.500000000 24.500000000 -36.500000000 -21.500000000 24.500000000 35.500000000 -21.500000000 24.500000000 -36.500000000 -20.500000000 24.500000000 35.500000000 -20.500000000 24.500000000 -36.500000000 -19.500000000 24.500000000 35.500000000 -19.500000000 24.500000000 -36.500000000 -18.500000000 24.500000000 35.500000000 -18.500000000 24.500000000 -36.500000000 -17.500000000 24.500000000 35.500000000 -17.500000000 24.500000000 -36.500000000 -16.500000000 24.500000000 35.500000000 -16.500000000 24.500000000 -36.500000000 -15.500000000 24.500000000 35.500000000 -15.500000000 24.500000000 -36.500000000 -14.500000000 24.500000000 35.500000000 -14.500000000 24.500000000 -36.500000000 -13.500000000 24.500000000 35.500000000 -13.500000000 24.500000000 -36.500000000 -12.500000000 24.500000000 35.500000000 -12.500000000 24.500000000 -36.500000000 -11.500000000 24.500000000 35.500000000 -11.500000000 24.500000000 -36.500000000 -10.500000000 24.500000000 35.500000000 -10.500000000 24.500000000 -36.500000000 -9.500000000 24.500000000 35.500000000 -9.500000000 24.500000000 -36.500000000 -8.500000000 24.500000000 35.500000000 -8.500000000 24.500000000 -36.500000000 -7.500000000 24.500000000 35.500000000 -7.500000000 24.500000000 -36.500000000 -6.500000000 24.500000000 35.500000000 -6.500000000 24.500000000 -36.500000000 -5.500000000 24.500000000 35.500000000 -5.500000000 24.500000000 -36.500000000 -4.500000000 24.500000000 35.500000000 -4.500000000 24.500000000 -36.500000000 -3.500000000 24.500000000 35.500000000 -3.500000000 24.500000000 -36.500000000 -2.500000000 24.500000000 35.500000000 -2.500000000 24.500000000 -36.500000000 -1.500000000 24.500000000 35.500000000 -1.500000000 24.500000000 -36.500000000 -0.500000000 24.500000000 35.500000000 -0.500000000 24.500000000 -36.500000000 0.500000000 24.500000000 35.500000000 0.500000000 24.500000000 -36.500000000 1.500000000 24.500000000 35.500000000 1.500000000 24.500000000 -36.500000000 2.500000000 24.500000000 35.500000000 2.500000000 24.500000000 -36.500000000 3.500000000 24.500000000 35.500000000 3.500000000 24.500000000 -36.500000000 4.500000000 24.500000000 35.500000000 4.500000000 24.500000000 -36.500000000 5.500000000 24.500000000 35.500000000 5.500000000 24.500000000 -36.500000000 6.500000000 24.500000000 35.500000000 6.500000000 24.500000000 -36.500000000 7.500000000 24.500000000 35.500000000 7.500000000 24.500000000 -36.500000000 8.500000000 24.500000000 35.500000000 8.500000000 24.500000000 -36.500000000 9.500000000 24.500000000 35.500000000 9.500000000 24.500000000 -36.500000000 10.500000000 24.500000000 35.500000000 10.500000000 24.500000000 -36.500000000 11.500000000 24.500000000 35.500000000 11.500000000 24.500000000 -36.500000000 12.500000000 24.500000000 35.500000000 12.500000000 24.500000000 -36.500000000 13.500000000 24.500000000 35.500000000 13.500000000 24.500000000 -36.500000000 14.500000000 24.500000000 35.500000000 14.500000000 24.500000000 -36.500000000 15.500000000 24.500000000 35.500000000 15.500000000 24.500000000 -36.500000000 16.500000000 24.500000000 35.500000000 16.500000000 24.500000000 -36.500000000 17.500000000 24.500000000 35.500000000 17.500000000 24.500000000 -36.500000000 18.500000000 24.500000000 35.500000000 18.500000000 24.500000000 -36.500000000 19.500000000 24.500000000 35.500000000 19.500000000 24.500000000 -36.500000000 20.500000000 24.500000000 35.500000000 20.500000000 24.500000000 -36.500000000 21.500000000 24.500000000 35.500000000 21.500000000 24.500000000 -36.500000000 22.500000000 24.500000000 35.500000000 22.500000000 24.500000000 -36.500000000 23.500000000 24.500000000 35.500000000 23.500000000 24.500000000 -36.500000000 24.500000000 24.500000000 35.500000000 24.500000000 24.500000000 -36.499996185 25.499996185 24.500000000 35.499996185 25.499996185 24.500000000 -36.499954224 26.499954224 24.500000000 35.499954224 26.499954224 24.500000000 -36.499591827 27.499591827 24.500000000 35.499591827 27.499591827 24.500000000 -36.497474670 28.497470856 24.500000000 35.497467041 28.497470856 24.500000000 -36.488403320 29.488407135 24.500000000 35.488403320 29.488407135 24.500000000 -36.458976746 30.458978653 24.500000000 35.458976746 30.458978653 24.500000000 -36.384407043 31.384418488 24.499996185 35.384407043 31.384418488 24.499996185 -36.233207703 32.233219147 24.499988556 35.233207703 32.233219147 24.499988556 -35.981086731 32.981086731 24.499979019 -35.622856140 33.622844696 24.499986649 34.622856140 33.622844696 24.499986649 34.981086731 32.981086731 24.499979019 -35.167949677 34.167949677 24.499992371 -34.622844696 34.622856140 24.499988556 33.622844696 34.622856140 24.499988556 34.167949677 34.167949677 24.499992371 -33.981086731 34.981086731 24.499979019 -33.233219147 35.233207703 24.499984741 -32.384422302 35.384407043 24.499996185 -31.458978653 35.458972931 24.500000000 -30.488407135 35.488403320 24.500000000 -29.497472763 35.497467041 24.500000000 -28.499593735 35.499591827 24.500000000 -27.499954224 35.499954224 24.500000000 -26.499996185 35.499996185 24.500000000 -25.500000000 35.500000000 24.500000000 -24.500000000 35.500000000 24.500000000 -23.500000000 35.500000000 24.500000000 -22.500000000 35.500000000 24.500000000 -21.500000000 35.500000000 24.500000000 -20.500000000 35.500000000 24.500000000 -19.500000000 35.500000000 24.500000000 -18.500000000 35.500000000 24.500000000 -17.500000000 35.500000000 24.500000000 -16.500000000 35.500000000 24.500000000 -15.500000000 35.500000000 24.500000000 -14.500000000 35.500000000 24.500000000 -13.500000000 35.500000000 24.500000000 -12.500000000 35.500000000 24.500000000 -11.500000000 35.500000000 24.500000000 -10.500000000 35.500000000 24.500000000 -9.500000000 35.500000000 24.500000000 -8.500000000 35.500000000 24.500000000 -7.500000000 35.500000000 24.500000000 -6.500000000 35.500000000 24.500000000 -5.500000000 35.500000000 24.500000000 -4.500000000 35.500000000 24.500000000 -3.500000000 35.500000000 24.500000000 -2.500000000 35.500000000 24.500000000 -1.500000000 35.500000000 24.500000000 -0.500000000 35.500000000 24.500000000 0.500000000 35.500000000 24.500000000 1.500000000 35.500000000 24.500000000 2.500000000 35.500000000 24.500000000 3.500000000 35.500000000 24.500000000 4.500000000 35.500000000 24.500000000 5.500000000 35.500000000 24.500000000 6.500000000 35.500000000 24.500000000 7.500000000 35.500000000 24.500000000 8.500000000 35.500000000 24.500000000 9.500000000 35.500000000 24.500000000 10.500000000 35.500000000 24.500000000 11.500000000 35.500000000 24.500000000 12.500000000 35.500000000 24.500000000 13.500000000 35.500000000 24.500000000 14.500000000 35.500000000 24.500000000 15.500000000 35.500000000 24.500000000 16.500000000 35.500000000 24.500000000 17.500000000 35.500000000 24.500000000 18.500000000 35.500000000 24.500000000 19.500000000 35.500000000 24.500000000 20.500000000 35.500000000 24.500000000 21.500000000 35.500000000 24.500000000 22.500000000 35.500000000 24.500000000 23.500000000 35.500000000 24.500000000 24.500000000 35.500000000 24.500000000 25.499996185 35.499996185 24.500000000 26.499954224 35.499954224 24.500000000 27.499591827 35.499591827 24.500000000 28.497470856 35.497474670 24.500000000 29.488407135 35.488403320 24.500000000 30.458978653 35.458976746 24.500000000 31.384418488 35.384407043 24.499996185 32.233219147 35.233207703 24.499988556 32.981086731 34.981086731 24.499979019 -33.980976105 -35.980957031 25.499824524 -33.233169556 -36.233074188 25.499874115 -32.384407043 -36.384307861 25.499948502 -31.458978653 -36.458930969 25.499988556 -30.488407135 -36.488388062 25.499996185 -29.497472763 -36.497470856 25.499996185 -28.499593735 -36.499588013 25.499996185 -27.499954224 -36.499950409 25.499996185 -26.499996185 -36.499992371 25.499996185 -25.500000000 -36.499996185 25.499996185 -24.500000000 -36.499996185 25.499996185 -23.500000000 -36.499996185 25.499996185 -22.500000000 -36.499996185 25.499996185 -21.500000000 -36.499996185 25.499996185 -20.500000000 -36.499996185 25.499996185 -19.500000000 -36.499996185 25.499996185 -18.500000000 -36.499996185 25.499996185 -17.500000000 -36.499996185 25.499996185 -16.500000000 -36.499996185 25.499996185 -15.500000000 -36.499996185 25.499996185 -14.500000000 -36.499996185 25.499996185 -13.500000000 -36.499996185 25.499996185 -12.500000000 -36.499996185 25.499996185 -11.500000000 -36.499996185 25.499996185 -10.500000000 -36.499996185 25.499996185 -9.500000000 -36.499996185 25.499996185 -8.500000000 -36.499996185 25.499996185 -7.500000000 -36.499996185 25.499996185 -6.500000000 -36.499996185 25.499996185 -5.500000000 -36.499996185 25.499996185 -4.500000000 -36.499996185 25.499996185 -3.500000000 -36.499996185 25.499996185 -2.500000000 -36.499996185 25.499996185 -1.500000000 -36.499996185 25.499996185 -0.500000000 -36.499996185 25.499996185 0.500000000 -36.499996185 25.499996185 1.500000000 -36.499996185 25.499996185 2.500000000 -36.499996185 25.499996185 3.500000000 -36.499996185 25.499996185 4.500000000 -36.499996185 25.499996185 5.500000000 -36.499996185 25.499996185 6.500000000 -36.499996185 25.499996185 7.500000000 -36.499996185 25.499996185 8.500000000 -36.499996185 25.499996185 9.500000000 -36.499996185 25.499996185 10.500000000 -36.499996185 25.499996185 11.500000000 -36.499996185 25.499996185 12.500000000 -36.499996185 25.499996185 13.500000000 -36.499996185 25.499996185 14.500000000 -36.499996185 25.499996185 15.500000000 -36.499996185 25.499996185 16.500000000 -36.499996185 25.499996185 17.500000000 -36.499996185 25.499996185 18.500000000 -36.499996185 25.499996185 19.500000000 -36.499996185 25.499996185 20.500000000 -36.499996185 25.499996185 21.500000000 -36.499996185 25.499996185 22.500000000 -36.499996185 25.499996185 23.500000000 -36.499996185 25.499996185 24.500000000 -36.499996185 25.499996185 25.499996185 -36.499992371 25.499996185 26.499954224 -36.499950409 25.499996185 27.499591827 -36.499588013 25.499996185 28.497470856 -36.497467041 25.499996185 29.488407135 -36.488391876 25.499996185 30.458974838 -36.458934784 25.499988556 31.384403229 -36.384307861 25.499948502 32.233165741 -36.233070374 25.499874115 32.980976105 -35.980957031 25.499826431 -35.167804718 -35.167808533 25.499902725 -34.622692108 -35.622734070 25.499866486 33.622692108 -35.622734070 25.499866486 34.167808533 -35.167804718 25.499902725 -35.980957031 -33.980976105 25.499824524 -35.622734070 -34.622692108 25.499866486 34.622734070 -34.622692108 25.499868393 34.980957031 -33.980976105 25.499824524 -36.233074188 -33.233169556 25.499874115 35.233074188 -33.233165741 25.499874115 -36.384307861 -32.384407043 25.499948502 35.384307861 -32.384407043 25.499948502 -36.458930969 -31.458978653 25.499988556 35.458930969 -31.458978653 25.499988556 -36.488388062 -30.488407135 25.499996185 35.488388062 -30.488407135 25.499996185 -36.497467041 -29.497472763 25.499996185 35.497470856 -29.497472763 25.499996185 -36.499584198 -28.499593735 25.499996185 35.499588013 -28.499593735 25.499996185 -36.499950409 -27.499954224 25.499996185 35.499950409 -27.499954224 25.499996185 -36.499992371 -26.499996185 25.499996185 35.499992371 -26.499996185 25.499996185 -36.499996185 -25.500000000 25.499996185 35.499996185 -25.500000000 25.499996185 -36.499996185 -24.500000000 25.499996185 35.499996185 -24.500000000 25.499996185 -36.499996185 -23.500000000 25.499996185 35.499996185 -23.500000000 25.499996185 -36.499996185 -22.500000000 25.499996185 35.499996185 -22.500000000 25.499996185 -36.499996185 -21.500000000 25.499996185 35.499996185 -21.500000000 25.499996185 -36.499996185 -20.500000000 25.499996185 35.499996185 -20.500000000 25.499996185 -36.499996185 -19.500000000 25.499996185 35.499996185 -19.500000000 25.499996185 -36.499996185 -18.500000000 25.499996185 35.499996185 -18.500000000 25.499996185 -36.499996185 -17.500000000 25.499996185 35.499996185 -17.500000000 25.499996185 -36.499996185 -16.500000000 25.499996185 35.499996185 -16.500000000 25.499996185 -36.499996185 -15.500000000 25.499996185 35.499996185 -15.500000000 25.499996185 -36.499996185 -14.500000000 25.499996185 35.499996185 -14.500000000 25.499996185 -36.499996185 -13.500000000 25.499996185 35.499996185 -13.500000000 25.499996185 -36.499996185 -12.500000000 25.499996185 35.499996185 -12.500000000 25.499996185 -36.499996185 -11.500000000 25.499996185 35.499996185 -11.500000000 25.499996185 -36.499996185 -10.500000000 25.499996185 35.499996185 -10.500000000 25.499996185 -36.499996185 -9.500000000 25.499996185 35.499996185 -9.500000000 25.499996185 -36.499996185 -8.500000000 25.499996185 35.499996185 -8.500000000 25.499996185 -36.499996185 -7.500000000 25.499996185 35.499996185 -7.500000000 25.499996185 -36.499996185 -6.500000000 25.499996185 35.499996185 -6.500000000 25.499996185 -36.499996185 -5.500000000 25.499996185 35.499996185 -5.500000000 25.499996185 -36.499996185 -4.500000000 25.499996185 35.499996185 -4.500000000 25.499996185 -36.499996185 -3.500000000 25.499996185 35.499996185 -3.500000000 25.499996185 -36.499996185 -2.500000000 25.499996185 35.499996185 -2.500000000 25.499996185 -36.499996185 -1.500000000 25.499996185 35.499996185 -1.500000000 25.499996185 -36.499996185 -0.500000000 25.499996185 35.499996185 -0.500000000 25.499996185 -36.499996185 0.500000000 25.499996185 35.499996185 0.500000000 25.499996185 -36.499996185 1.500000000 25.499996185 35.499996185 1.500000000 25.499996185 -36.499996185 2.500000000 25.499996185 35.499996185 2.500000000 25.499996185 -36.499996185 3.500000000 25.499996185 35.499996185 3.500000000 25.499996185 -36.499996185 4.500000000 25.499996185 35.499996185 4.500000000 25.499996185 -36.499996185 5.500000000 25.499996185 35.499996185 5.500000000 25.499996185 -36.499996185 6.500000000 25.499996185 35.499996185 6.500000000 25.499996185 -36.499996185 7.500000000 25.499996185 35.499996185 7.500000000 25.499996185 -36.499996185 8.500000000 25.499996185 35.499996185 8.500000000 25.499996185 -36.499996185 9.500000000 25.499996185 35.499996185 9.500000000 25.499996185 -36.499996185 10.500000000 25.499996185 35.499996185 10.500000000 25.499996185 -36.499996185 11.500000000 25.499996185 35.499996185 11.500000000 25.499996185 -36.499996185 12.500000000 25.499996185 35.499996185 12.500000000 25.499996185 -36.499996185 13.500000000 25.499996185 35.499996185 13.500000000 25.499996185 -36.499996185 14.500000000 25.499996185 35.499996185 14.500000000 25.499996185 -36.499996185 15.500000000 25.499996185 35.499996185 15.500000000 25.499996185 -36.499996185 16.500000000 25.499996185 35.499996185 16.500000000 25.499996185 -36.499996185 17.500000000 25.499996185 35.499996185 17.500000000 25.499996185 -36.499996185 18.500000000 25.499996185 35.499996185 18.500000000 25.499996185 -36.499996185 19.500000000 25.499996185 35.499996185 19.500000000 25.499996185 -36.499996185 20.500000000 25.499996185 35.499996185 20.500000000 25.499996185 -36.499996185 21.500000000 25.499996185 35.499996185 21.500000000 25.499996185 -36.499996185 22.500000000 25.499996185 35.499996185 22.500000000 25.499996185 -36.499996185 23.500000000 25.499996185 35.499996185 23.500000000 25.499996185 -36.499996185 24.500000000 25.499996185 35.499996185 24.500000000 25.499996185 -36.499992371 25.499996185 25.499996185 35.499992371 25.499996185 25.499996185 -36.499950409 26.499954224 25.499996185 35.499950409 26.499954224 25.499996185 -36.499588013 27.499591827 25.499996185 35.499588013 27.499591827 25.499996185 -36.497467041 28.497470856 25.499996185 35.497467041 28.497470856 25.499996185 -36.488388062 29.488407135 25.499996185 35.488391876 29.488407135 25.499996185 -36.458934784 30.458974838 25.499988556 35.458934784 30.458974838 25.499988556 -36.384307861 31.384399414 25.499948502 35.384307861 31.384403229 25.499948502 -36.233074188 32.233165741 25.499874115 35.233070374 32.233165741 25.499874115 -35.980957031 32.980976105 25.499824524 -35.622734070 33.622692108 25.499866486 34.622734070 33.622692108 25.499866486 34.980957031 32.980976105 25.499826431 -35.167808533 34.167804718 25.499902725 -34.622692108 34.622734070 25.499866486 33.622692108 34.622734070 25.499868393 34.167804718 34.167808533 25.499902725 -33.980976105 34.980957031 25.499824524 -33.233169556 35.233074188 25.499874115 -32.384407043 35.384307861 25.499948502 -31.458978653 35.458930969 25.499988556 -30.488407135 35.488388062 25.499996185 -29.497472763 35.497467041 25.499996185 -28.499593735 35.499584198 25.499996185 -27.499954224 35.499950409 25.499996185 -26.499996185 35.499992371 25.499996185 -25.500000000 35.499996185 25.499996185 -24.500000000 35.499996185 25.499996185 -23.500000000 35.499996185 25.499996185 -22.500000000 35.499996185 25.499996185 -21.500000000 35.499996185 25.499996185 -20.500000000 35.499996185 25.499996185 -19.500000000 35.499996185 25.499996185 -18.500000000 35.499996185 25.499996185 -17.500000000 35.499996185 25.499996185 -16.500000000 35.499996185 25.499996185 -15.500000000 35.499996185 25.499996185 -14.500000000 35.499996185 25.499996185 -13.500000000 35.499996185 25.499996185 -12.500000000 35.499996185 25.499996185 -11.500000000 35.499996185 25.499996185 -10.500000000 35.499996185 25.499996185 -9.500000000 35.499996185 25.499996185 -8.500000000 35.499996185 25.499996185 -7.500000000 35.499996185 25.499996185 -6.500000000 35.499996185 25.499996185 -5.500000000 35.499996185 25.499996185 -4.500000000 35.499996185 25.499996185 -3.500000000 35.499996185 25.499996185 -2.500000000 35.499996185 25.499996185 -1.500000000 35.499996185 25.499996185 -0.500000000 35.499996185 25.499996185 0.500000000 35.499996185 25.499996185 1.500000000 35.499996185 25.499996185 2.500000000 35.499996185 25.499996185 3.500000000 35.499996185 25.499996185 4.500000000 35.499996185 25.499996185 5.500000000 35.499996185 25.499996185 6.500000000 35.499996185 25.499996185 7.500000000 35.499996185 25.499996185 8.500000000 35.499996185 25.499996185 9.500000000 35.499996185 25.499996185 10.500000000 35.499996185 25.499996185 11.500000000 35.499996185 25.499996185 12.500000000 35.499996185 25.499996185 13.500000000 35.499996185 25.499996185 14.500000000 35.499996185 25.499996185 15.500000000 35.499996185 25.499996185 16.500000000 35.499996185 25.499996185 17.500000000 35.499996185 25.499996185 18.500000000 35.499996185 25.499996185 19.500000000 35.499996185 25.499996185 20.500000000 35.499996185 25.499996185 21.500000000 35.499996185 25.499996185 22.500000000 35.499996185 25.499996185 23.500000000 35.499996185 25.499996185 24.500000000 35.499996185 25.499996185 25.499996185 35.499992371 25.499996185 26.499954224 35.499950409 25.499996185 27.499591827 35.499588013 25.499996185 28.497470856 35.497467041 25.499996185 29.488407135 35.488388062 25.499996185 30.458974838 35.458934784 25.499988556 31.384403229 35.384307861 25.499948502 32.233165741 35.233074188 25.499874115 32.980968475 34.980957031 25.499824524 -33.980335236 -35.980194092 26.498950958 -33.232864380 -36.232299805 26.499225616 -32.384296417 -36.383720398 26.499622345 -31.458948135 -36.458606720 26.499858856 -30.488397598 -36.488258362 26.499938965 -29.497472763 -36.497406006 26.499954224 -28.499593735 -36.499549866 26.499954224 -27.499954224 -36.499908447 26.499954224 -26.499996185 -36.499950409 26.499954224 -25.500000000 -36.499954224 26.499954224 -24.500000000 -36.499954224 26.499954224 -23.500000000 -36.499954224 26.499954224 -22.500000000 -36.499954224 26.499954224 -21.500000000 -36.499954224 26.499954224 -20.500000000 -36.499954224 26.499954224 -19.500000000 -36.499954224 26.499954224 -18.500000000 -36.499954224 26.499954224 -17.500000000 -36.499954224 26.499954224 -16.500000000 -36.499954224 26.499954224 -15.500000000 -36.499954224 26.499954224 -14.500000000 -36.499954224 26.499954224 -13.500000000 -36.499954224 26.499954224 -12.500000000 -36.499954224 26.499954224 -11.500000000 -36.499954224 26.499954224 -10.500000000 -36.499954224 26.499954224 -9.500000000 -36.499954224 26.499954224 -8.500000000 -36.499954224 26.499954224 -7.500000000 -36.499954224 26.499954224 -6.500000000 -36.499954224 26.499954224 -5.500000000 -36.499954224 26.499954224 -4.500000000 -36.499954224 26.499954224 -3.500000000 -36.499954224 26.499954224 -2.500000000 -36.499954224 26.499954224 -1.500000000 -36.499954224 26.499954224 -0.500000000 -36.499954224 26.499954224 0.500000000 -36.499954224 26.499954224 1.500000000 -36.499954224 26.499954224 2.500000000 -36.499954224 26.499954224 3.500000000 -36.499954224 26.499954224 4.500000000 -36.499954224 26.499954224 5.500000000 -36.499954224 26.499954224 6.500000000 -36.499954224 26.499954224 7.500000000 -36.499954224 26.499954224 8.500000000 -36.499954224 26.499954224 9.500000000 -36.499954224 26.499954224 10.500000000 -36.499954224 26.499954224 11.500000000 -36.499954224 26.499954224 12.500000000 -36.499954224 26.499954224 13.500000000 -36.499954224 26.499954224 14.500000000 -36.499954224 26.499954224 15.500000000 -36.499954224 26.499954224 16.500000000 -36.499954224 26.499954224 17.500000000 -36.499954224 26.499954224 18.500000000 -36.499954224 26.499954224 19.500000000 -36.499954224 26.499954224 20.500000000 -36.499954224 26.499954224 21.500000000 -36.499954224 26.499954224 22.500000000 -36.499954224 26.499954224 23.500000000 -36.499954224 26.499954224 24.500000000 -36.499954224 26.499954224 25.499996185 -36.499950409 26.499954224 26.499954224 -36.499908447 26.499954224 27.499591827 -36.499542236 26.499954224 28.497470856 -36.497409821 26.499954224 29.488397598 -36.488258362 26.499938965 30.458948135 -36.458606720 26.499858856 31.384296417 -36.383720398 26.499622345 32.232860565 -36.232303619 26.499225616 32.980335236 -35.980201721 26.498950958 -35.166954041 -35.166954041 26.499298096 -34.621799469 -35.621978760 26.499156952 33.621799469 -35.621978760 26.499156952 34.166961670 -35.166957855 26.499298096 -35.980201721 -33.980327606 26.498950958 -35.621978760 -34.621799469 26.499160767 34.621982574 -34.621799469 26.499156952 34.980201721 -33.980331421 26.498950958 -36.232303619 -33.232860565 26.499225616 35.232303619 -33.232864380 26.499225616 -36.383720398 -32.384296417 26.499618530 35.383720398 -32.384296417 26.499622345 -36.458606720 -31.458948135 26.499858856 35.458606720 -31.458948135 26.499858856 -36.488258362 -30.488397598 26.499938965 35.488258362 -30.488397598 26.499938965 -36.497406006 -29.497472763 26.499954224 35.497406006 -29.497472763 26.499954224 -36.499542236 -28.499593735 26.499954224 35.499549866 -28.499593735 26.499954224 -36.499908447 -27.499954224 26.499954224 35.499908447 -27.499954224 26.499954224 -36.499950409 -26.499996185 26.499954224 35.499950409 -26.499996185 26.499954224 -36.499950409 -25.500000000 26.499954224 35.499954224 -25.500000000 26.499954224 -36.499950409 -24.500000000 26.499954224 35.499954224 -24.500000000 26.499954224 -36.499950409 -23.500000000 26.499954224 35.499954224 -23.500000000 26.499954224 -36.499950409 -22.500000000 26.499954224 35.499954224 -22.500000000 26.499954224 -36.499950409 -21.500000000 26.499954224 35.499954224 -21.500000000 26.499954224 -36.499950409 -20.500000000 26.499954224 35.499954224 -20.500000000 26.499954224 -36.499950409 -19.500000000 26.499954224 35.499954224 -19.500000000 26.499954224 -36.499950409 -18.500000000 26.499954224 35.499954224 -18.500000000 26.499954224 -36.499950409 -17.500000000 26.499954224 35.499954224 -17.500000000 26.499954224 -36.499950409 -16.500000000 26.499954224 35.499954224 -16.500000000 26.499954224 -36.499950409 -15.500000000 26.499954224 35.499954224 -15.500000000 26.499954224 -36.499950409 -14.500000000 26.499954224 35.499954224 -14.500000000 26.499954224 -36.499950409 -13.500000000 26.499954224 35.499954224 -13.500000000 26.499954224 -36.499950409 -12.500000000 26.499954224 35.499954224 -12.500000000 26.499954224 -36.499950409 -11.500000000 26.499954224 35.499954224 -11.500000000 26.499954224 -36.499950409 -10.500000000 26.499954224 35.499954224 -10.500000000 26.499954224 -36.499950409 -9.500000000 26.499954224 35.499954224 -9.500000000 26.499954224 -36.499950409 -8.500000000 26.499954224 35.499954224 -8.500000000 26.499954224 -36.499950409 -7.500000000 26.499954224 35.499954224 -7.500000000 26.499954224 -36.499950409 -6.500000000 26.499954224 35.499954224 -6.500000000 26.499954224 -36.499950409 -5.500000000 26.499954224 35.499954224 -5.500000000 26.499954224 -36.499950409 -4.500000000 26.499954224 35.499954224 -4.500000000 26.499954224 -36.499950409 -3.500000000 26.499954224 35.499954224 -3.500000000 26.499954224 -36.499950409 -2.500000000 26.499954224 35.499954224 -2.500000000 26.499954224 -36.499950409 -1.500000000 26.499954224 35.499954224 -1.500000000 26.499954224 -36.499950409 -0.500000000 26.499954224 35.499954224 -0.500000000 26.499954224 -36.499950409 0.500000000 26.499954224 35.499954224 0.500000000 26.499954224 -36.499950409 1.500000000 26.499954224 35.499954224 1.500000000 26.499954224 -36.499950409 2.500000000 26.499954224 35.499954224 2.500000000 26.499954224 -36.499950409 3.500000000 26.499954224 35.499954224 3.500000000 26.499954224 -36.499950409 4.500000000 26.499954224 35.499954224 4.500000000 26.499954224 -36.499950409 5.500000000 26.499954224 35.499954224 5.500000000 26.499954224 -36.499950409 6.500000000 26.499954224 35.499954224 6.500000000 26.499954224 -36.499950409 7.500000000 26.499954224 35.499954224 7.500000000 26.499954224 -36.499950409 8.500000000 26.499954224 35.499954224 8.500000000 26.499954224 -36.499950409 9.500000000 26.499954224 35.499954224 9.500000000 26.499954224 -36.499950409 10.500000000 26.499954224 35.499954224 10.500000000 26.499954224 -36.499950409 11.500000000 26.499954224 35.499954224 11.500000000 26.499954224 -36.499950409 12.500000000 26.499954224 35.499954224 12.500000000 26.499954224 -36.499950409 13.500000000 26.499954224 35.499954224 13.500000000 26.499954224 -36.499950409 14.500000000 26.499954224 35.499954224 14.500000000 26.499954224 -36.499950409 15.500000000 26.499954224 35.499954224 15.500000000 26.499954224 -36.499950409 16.500000000 26.499954224 35.499954224 16.500000000 26.499954224 -36.499950409 17.500000000 26.499954224 35.499954224 17.500000000 26.499954224 -36.499950409 18.500000000 26.499954224 35.499954224 18.500000000 26.499954224 -36.499950409 19.500000000 26.499954224 35.499954224 19.500000000 26.499954224 -36.499950409 20.500000000 26.499954224 35.499954224 20.500000000 26.499954224 -36.499950409 21.500000000 26.499954224 35.499954224 21.500000000 26.499954224 -36.499950409 22.500000000 26.499954224 35.499954224 22.500000000 26.499954224 -36.499950409 23.500000000 26.499954224 35.499954224 23.500000000 26.499954224 -36.499950409 24.500000000 26.499954224 35.499954224 24.500000000 26.499954224 -36.499950409 25.499996185 26.499954224 35.499950409 25.499996185 26.499954224 -36.499908447 26.499954224 26.499954224 35.499908447 26.499954224 26.499954224 -36.499542236 27.499591827 26.499954224 35.499542236 27.499591827 26.499954224 -36.497406006 28.497470856 26.499954224 35.497409821 28.497470856 26.499954224 -36.488258362 29.488397598 26.499938965 35.488258362 29.488397598 26.499938965 -36.458606720 30.458948135 26.499858856 35.458606720 30.458948135 26.499858856 -36.383720398 31.384296417 26.499622345 35.383720398 31.384296417 26.499622345 -36.232299805 32.232860565 26.499225616 35.232303619 32.232860565 26.499225616 -35.980194092 32.980335236 26.498950958 -35.621978760 33.621799469 26.499156952 34.621978760 33.621799469 26.499156952 34.980201721 32.980335236 26.498950958 -35.166954041 34.166954041 26.499298096 -34.621799469 34.621978760 26.499160767 33.621799469 34.621982574 26.499160767 34.166954041 34.166961670 26.499298096 -33.980327606 34.980201721 26.498950958 -33.232860565 35.232303619 26.499225616 -32.384296417 35.383720398 26.499618530 -31.458948135 35.458606720 26.499858856 -30.488397598 35.488258362 26.499938965 -29.497472763 35.497406006 26.499954224 -28.499593735 35.499542236 26.499954224 -27.499954224 35.499908447 26.499954224 -26.499996185 35.499950409 26.499954224 -25.500000000 35.499950409 26.499954224 -24.500000000 35.499950409 26.499954224 -23.500000000 35.499950409 26.499954224 -22.500000000 35.499950409 26.499954224 -21.500000000 35.499950409 26.499954224 -20.500000000 35.499950409 26.499954224 -19.500000000 35.499950409 26.499954224 -18.500000000 35.499950409 26.499954224 -17.500000000 35.499950409 26.499954224 -16.500000000 35.499950409 26.499954224 -15.500000000 35.499950409 26.499954224 -14.500000000 35.499950409 26.499954224 -13.500000000 35.499950409 26.499954224 -12.500000000 35.499950409 26.499954224 -11.500000000 35.499950409 26.499954224 -10.500000000 35.499950409 26.499954224 -9.500000000 35.499950409 26.499954224 -8.500000000 35.499950409 26.499954224 -7.500000000 35.499950409 26.499954224 -6.500000000 35.499950409 26.499954224 -5.500000000 35.499950409 26.499954224 -4.500000000 35.499950409 26.499954224 -3.500000000 35.499950409 26.499954224 -2.500000000 35.499950409 26.499954224 -1.500000000 35.499950409 26.499954224 -0.500000000 35.499950409 26.499954224 0.500000000 35.499950409 26.499954224 1.500000000 35.499950409 26.499954224 2.500000000 35.499950409 26.499954224 3.500000000 35.499950409 26.499954224 4.500000000 35.499950409 26.499954224 5.500000000 35.499950409 26.499954224 6.500000000 35.499950409 26.499954224 7.500000000 35.499950409 26.499954224 8.500000000 35.499950409 26.499954224 9.500000000 35.499950409 26.499954224 10.500000000 35.499950409 26.499954224 11.500000000 35.499950409 26.499954224 12.500000000 35.499950409 26.499954224 13.500000000 35.499950409 26.499954224 14.500000000 35.499950409 26.499954224 15.500000000 35.499950409 26.499954224 16.500000000 35.499950409 26.499954224 17.500000000 35.499950409 26.499954224 18.500000000 35.499950409 26.499954224 19.500000000 35.499950409 26.499954224 20.500000000 35.499950409 26.499954224 21.500000000 35.499950409 26.499954224 22.500000000 35.499950409 26.499954224 23.500000000 35.499950409 26.499954224 24.500000000 35.499950409 26.499954224 25.499996185 35.499950409 26.499954224 26.499954224 35.499908447 26.499954224 27.499591827 35.499542236 26.499954224 28.497470856 35.497406006 26.499954224 29.488397598 35.488258362 26.499938965 30.458948135 35.458606720 26.499858856 31.384296417 35.383720398 26.499622345 32.232860565 35.232303619 26.499225616 32.980327606 34.980201721 26.498950958 -33.977619171 -35.976856232 27.495168686 -33.231597900 -36.228954315 27.496377945 -32.383811951 -36.381027222 27.498052597 -31.458766937 -36.456939697 27.499073029 -30.488346100 -36.487373352 27.499475479 -29.497461319 -36.496910095 27.499576569 -28.499593735 -36.499160767 27.499591827 -27.499954224 -36.499542236 27.499591827 -26.499996185 -36.499591827 27.499591827 -25.500000000 -36.499591827 27.499591827 -24.500000000 -36.499591827 27.499591827 -23.500000000 -36.499591827 27.499591827 -22.500000000 -36.499591827 27.499591827 -21.500000000 -36.499591827 27.499591827 -20.500000000 -36.499591827 27.499591827 -19.500000000 -36.499591827 27.499591827 -18.500000000 -36.499591827 27.499591827 -17.500000000 -36.499591827 27.499591827 -16.500000000 -36.499591827 27.499591827 -15.500000000 -36.499591827 27.499591827 -14.500000000 -36.499591827 27.499591827 -13.500000000 -36.499591827 27.499591827 -12.500000000 -36.499591827 27.499591827 -11.500000000 -36.499591827 27.499591827 -10.500000000 -36.499591827 27.499591827 -9.500000000 -36.499591827 27.499591827 -8.500000000 -36.499591827 27.499591827 -7.500000000 -36.499591827 27.499591827 -6.500000000 -36.499591827 27.499591827 -5.500000000 -36.499591827 27.499591827 -4.500000000 -36.499591827 27.499591827 -3.500000000 -36.499591827 27.499591827 -2.500000000 -36.499591827 27.499591827 -1.500000000 -36.499591827 27.499591827 -0.500000000 -36.499591827 27.499591827 0.500000000 -36.499591827 27.499591827 1.500000000 -36.499591827 27.499591827 2.500000000 -36.499591827 27.499591827 3.500000000 -36.499591827 27.499591827 4.500000000 -36.499591827 27.499591827 5.500000000 -36.499591827 27.499591827 6.500000000 -36.499591827 27.499591827 7.500000000 -36.499591827 27.499591827 8.500000000 -36.499591827 27.499591827 9.500000000 -36.499591827 27.499591827 10.500000000 -36.499591827 27.499591827 11.500000000 -36.499591827 27.499591827 12.500000000 -36.499591827 27.499591827 13.500000000 -36.499591827 27.499591827 14.500000000 -36.499591827 27.499591827 15.500000000 -36.499591827 27.499591827 16.500000000 -36.499591827 27.499591827 17.500000000 -36.499591827 27.499591827 18.500000000 -36.499591827 27.499591827 19.500000000 -36.499591827 27.499591827 20.500000000 -36.499591827 27.499591827 21.500000000 -36.499591827 27.499591827 22.500000000 -36.499591827 27.499591827 23.500000000 -36.499591827 27.499591827 24.500000000 -36.499591827 27.499591827 25.499996185 -36.499591827 27.499591827 26.499954224 -36.499546051 27.499591827 27.499591827 -36.499164581 27.499591827 28.497457504 -36.496910095 27.499576569 29.488346100 -36.487373352 27.499475479 30.458766937 -36.456939697 27.499073029 31.383810043 -36.381027222 27.498052597 32.231597900 -36.228958130 27.496377945 32.977615356 -35.976860046 27.495168686 -35.163158417 -35.163158417 27.496557236 -34.617927551 -35.618598938 27.496026993 33.617927551 -35.618602753 27.496023178 34.163158417 -35.163158417 27.496557236 -35.976860046 -33.977615356 27.495168686 -35.618602753 -34.617927551 27.496026993 34.618602753 -34.617927551 27.496026993 34.976860046 -33.977619171 27.495168686 -36.228958130 -33.231597900 27.496377945 35.228958130 -33.231597900 27.496377945 -36.381027222 -32.383811951 27.498052597 35.381027222 -32.383811951 27.498052597 -36.456939697 -31.458766937 27.499073029 35.456939697 -31.458766937 27.499073029 -36.487373352 -30.488346100 27.499475479 35.487373352 -30.488346100 27.499475479 -36.496910095 -29.497461319 27.499576569 35.496910095 -29.497461319 27.499576569 -36.499160767 -28.499593735 27.499591827 35.499160767 -28.499593735 27.499591827 -36.499542236 -27.499954224 27.499591827 35.499542236 -27.499954224 27.499591827 -36.499584198 -26.499996185 27.499591827 35.499591827 -26.499996185 27.499591827 -36.499591827 -25.500000000 27.499591827 35.499591827 -25.500000000 27.499591827 -36.499591827 -24.500000000 27.499591827 35.499591827 -24.500000000 27.499591827 -36.499591827 -23.500000000 27.499591827 35.499591827 -23.500000000 27.499591827 -36.499591827 -22.500000000 27.499591827 35.499591827 -22.500000000 27.499591827 -36.499591827 -21.500000000 27.499591827 35.499591827 -21.500000000 27.499591827 -36.499591827 -20.500000000 27.499591827 35.499591827 -20.500000000 27.499591827 -36.499591827 -19.500000000 27.499591827 35.499591827 -19.500000000 27.499591827 -36.499591827 -18.500000000 27.499591827 35.499591827 -18.500000000 27.499591827 -36.499591827 -17.500000000 27.499591827 35.499591827 -17.500000000 27.499591827 -36.499591827 -16.500000000 27.499591827 35.499591827 -16.500000000 27.499591827 -36.499591827 -15.500000000 27.499591827 35.499591827 -15.500000000 27.499591827 -36.499591827 -14.500000000 27.499591827 35.499591827 -14.500000000 27.499591827 -36.499591827 -13.500000000 27.499591827 35.499591827 -13.500000000 27.499591827 -36.499591827 -12.500000000 27.499591827 35.499591827 -12.500000000 27.499591827 -36.499591827 -11.500000000 27.499591827 35.499591827 -11.500000000 27.499591827 -36.499591827 -10.500000000 27.499591827 35.499591827 -10.500000000 27.499591827 -36.499591827 -9.500000000 27.499591827 35.499591827 -9.500000000 27.499591827 -36.499591827 -8.500000000 27.499591827 35.499591827 -8.500000000 27.499591827 -36.499591827 -7.500000000 27.499591827 35.499591827 -7.500000000 27.499591827 -36.499591827 -6.500000000 27.499591827 35.499591827 -6.500000000 27.499591827 -36.499591827 -5.500000000 27.499591827 35.499591827 -5.500000000 27.499591827 -36.499591827 -4.500000000 27.499591827 35.499591827 -4.500000000 27.499591827 -36.499591827 -3.500000000 27.499591827 35.499591827 -3.500000000 27.499591827 -36.499591827 -2.500000000 27.499591827 35.499591827 -2.500000000 27.499591827 -36.499591827 -1.500000000 27.499591827 35.499591827 -1.500000000 27.499591827 -36.499591827 -0.500000000 27.499591827 35.499591827 -0.500000000 27.499591827 -36.499591827 0.500000000 27.499591827 35.499591827 0.500000000 27.499591827 -36.499591827 1.500000000 27.499591827 35.499591827 1.500000000 27.499591827 -36.499591827 2.500000000 27.499591827 35.499591827 2.500000000 27.499591827 -36.499591827 3.500000000 27.499591827 35.499591827 3.500000000 27.499591827 -36.499591827 4.500000000 27.499591827 35.499591827 4.500000000 27.499591827 -36.499591827 5.500000000 27.499591827 35.499591827 5.500000000 27.499591827 -36.499591827 6.500000000 27.499591827 35.499591827 6.500000000 27.499591827 -36.499591827 7.500000000 27.499591827 35.499591827 7.500000000 27.499591827 -36.499591827 8.500000000 27.499591827 35.499591827 8.500000000 27.499591827 -36.499591827 9.500000000 27.499591827 35.499591827 9.500000000 27.499591827 -36.499591827 10.500000000 27.499591827 35.499591827 10.500000000 27.499591827 -36.499591827 11.500000000 27.499591827 35.499591827 11.500000000 27.499591827 -36.499591827 12.500000000 27.499591827 35.499591827 12.500000000 27.499591827 -36.499591827 13.500000000 27.499591827 35.499591827 13.500000000 27.499591827 -36.499591827 14.500000000 27.499591827 35.499591827 14.500000000 27.499591827 -36.499591827 15.500000000 27.499591827 35.499591827 15.500000000 27.499591827 -36.499591827 16.500000000 27.499591827 35.499591827 16.500000000 27.499591827 -36.499591827 17.500000000 27.499591827 35.499591827 17.500000000 27.499591827 -36.499591827 18.500000000 27.499591827 35.499591827 18.500000000 27.499591827 -36.499591827 19.500000000 27.499591827 35.499591827 19.500000000 27.499591827 -36.499591827 20.500000000 27.499591827 35.499591827 20.500000000 27.499591827 -36.499591827 21.500000000 27.499591827 35.499591827 21.500000000 27.499591827 -36.499591827 22.500000000 27.499591827 35.499591827 22.500000000 27.499591827 -36.499591827 23.500000000 27.499591827 35.499591827 23.500000000 27.499591827 -36.499591827 24.500000000 27.499591827 35.499591827 24.500000000 27.499591827 -36.499584198 25.499996185 27.499591827 35.499591827 25.499996185 27.499591827 -36.499542236 26.499954224 27.499591827 35.499546051 26.499954224 27.499591827 -36.499160767 27.499591827 27.499591827 35.499164581 27.499591827 27.499591827 -36.496910095 28.497457504 27.499576569 35.496910095 28.497457504 27.499576569 -36.487373352 29.488346100 27.499475479 35.487373352 29.488346100 27.499475479 -36.456939697 30.458766937 27.499073029 35.456939697 30.458766937 27.499073029 -36.381027222 31.383810043 27.498052597 35.381027222 31.383810043 27.498052597 -36.228950500 32.231597900 27.496377945 35.228958130 32.231597900 27.496377945 -35.976856232 32.977611542 27.495168686 -35.618598938 33.617927551 27.496026993 34.618602753 33.617927551 27.496023178 34.976860046 32.977611542 27.495168686 -35.163158417 34.163158417 27.496557236 -34.617927551 34.618602753 27.496026993 33.617927551 34.618602753 27.496026993 34.163158417 34.163158417 27.496557236 -33.977615356 34.976860046 27.495168686 -33.231597900 35.228958130 27.496377945 -32.383811951 35.381027222 27.498052597 -31.458766937 35.456939697 27.499073029 -30.488346100 35.487373352 27.499475479 -29.497461319 35.496910095 27.499576569 -28.499593735 35.499160767 27.499591827 -27.499954224 35.499542236 27.499591827 -26.499996185 35.499584198 27.499591827 -25.500000000 35.499591827 27.499591827 -24.500000000 35.499591827 27.499591827 -23.500000000 35.499591827 27.499591827 -22.500000000 35.499591827 27.499591827 -21.500000000 35.499591827 27.499591827 -20.500000000 35.499591827 27.499591827 -19.500000000 35.499591827 27.499591827 -18.500000000 35.499591827 27.499591827 -17.500000000 35.499591827 27.499591827 -16.500000000 35.499591827 27.499591827 -15.500000000 35.499591827 27.499591827 -14.500000000 35.499591827 27.499591827 -13.500000000 35.499591827 27.499591827 -12.500000000 35.499591827 27.499591827 -11.500000000 35.499591827 27.499591827 -10.500000000 35.499591827 27.499591827 -9.500000000 35.499591827 27.499591827 -8.500000000 35.499591827 27.499591827 -7.500000000 35.499591827 27.499591827 -6.500000000 35.499591827 27.499591827 -5.500000000 35.499591827 27.499591827 -4.500000000 35.499591827 27.499591827 -3.500000000 35.499591827 27.499591827 -2.500000000 35.499591827 27.499591827 -1.500000000 35.499591827 27.499591827 -0.500000000 35.499591827 27.499591827 0.500000000 35.499591827 27.499591827 1.500000000 35.499591827 27.499591827 2.500000000 35.499591827 27.499591827 3.500000000 35.499591827 27.499591827 4.500000000 35.499591827 27.499591827 5.500000000 35.499591827 27.499591827 6.500000000 35.499591827 27.499591827 7.500000000 35.499591827 27.499591827 8.500000000 35.499591827 27.499591827 9.500000000 35.499591827 27.499591827 10.500000000 35.499591827 27.499591827 11.500000000 35.499591827 27.499591827 12.500000000 35.499591827 27.499591827 13.500000000 35.499591827 27.499591827 14.500000000 35.499591827 27.499591827 15.500000000 35.499591827 27.499591827 16.500000000 35.499591827 27.499591827 17.500000000 35.499591827 27.499591827 18.500000000 35.499591827 27.499591827 19.500000000 35.499591827 27.499591827 20.500000000 35.499591827 27.499591827 21.500000000 35.499591827 27.499591827 22.500000000 35.499591827 27.499591827 23.500000000 35.499591827 27.499591827 24.500000000 35.499591827 27.499591827 25.499996185 35.499584198 27.499591827 26.499954224 35.499542236 27.499591827 27.499591827 35.499160767 27.499591827 28.497457504 35.496910095 27.499576569 29.488346100 35.487373352 27.499475479 30.458766937 35.456939697 27.499073029 31.383810043 35.381027222 27.498052597 32.231597900 35.228958130 27.496377945 32.977615356 34.976860046 27.495168686 -33.968864441 -35.965759277 28.481937408 -33.227870941 -36.217510223 28.486719131 -32.382308960 -36.371490479 28.492321014 -31.458078384 -36.450428009 28.495611191 -30.488048553 -36.483341217 28.496957779 -29.497375488 -36.494178772 28.497371674 -28.499578476 -36.496910095 28.497461319 -27.499954224 -36.497402191 28.497470856 -26.499996185 -36.497467041 28.497470856 -25.500000000 -36.497470856 28.497470856 -24.500000000 -36.497470856 28.497470856 -23.500000000 -36.497470856 28.497470856 -22.500000000 -36.497470856 28.497470856 -21.500000000 -36.497470856 28.497470856 -20.500000000 -36.497470856 28.497470856 -19.500000000 -36.497470856 28.497470856 -18.500000000 -36.497470856 28.497470856 -17.500000000 -36.497470856 28.497470856 -16.500000000 -36.497470856 28.497470856 -15.500000000 -36.497470856 28.497470856 -14.500000000 -36.497470856 28.497470856 -13.500000000 -36.497470856 28.497470856 -12.500000000 -36.497470856 28.497470856 -11.500000000 -36.497470856 28.497470856 -10.500000000 -36.497470856 28.497470856 -9.500000000 -36.497470856 28.497470856 -8.500000000 -36.497470856 28.497470856 -7.500000000 -36.497470856 28.497470856 -6.500000000 -36.497470856 28.497470856 -5.500000000 -36.497470856 28.497470856 -4.500000000 -36.497470856 28.497470856 -3.500000000 -36.497470856 28.497470856 -2.500000000 -36.497470856 28.497470856 -1.500000000 -36.497470856 28.497470856 -0.500000000 -36.497470856 28.497470856 0.500000000 -36.497470856 28.497470856 1.500000000 -36.497470856 28.497470856 2.500000000 -36.497470856 28.497470856 3.500000000 -36.497470856 28.497470856 4.500000000 -36.497470856 28.497470856 5.500000000 -36.497470856 28.497470856 6.500000000 -36.497470856 28.497470856 7.500000000 -36.497470856 28.497470856 8.500000000 -36.497470856 28.497470856 9.500000000 -36.497470856 28.497470856 10.500000000 -36.497470856 28.497470856 11.500000000 -36.497470856 28.497470856 12.500000000 -36.497470856 28.497470856 13.500000000 -36.497470856 28.497470856 14.500000000 -36.497470856 28.497470856 15.500000000 -36.497470856 28.497470856 16.500000000 -36.497470856 28.497470856 17.500000000 -36.497470856 28.497470856 18.500000000 -36.497470856 28.497470856 19.500000000 -36.497470856 28.497470856 20.500000000 -36.497470856 28.497470856 21.500000000 -36.497470856 28.497470856 22.500000000 -36.497470856 28.497470856 23.500000000 -36.497470856 28.497470856 24.500000000 -36.497470856 28.497470856 25.499996185 -36.497467041 28.497470856 26.499954224 -36.497406006 28.497470856 27.499576569 -36.496910095 28.497457504 28.497371674 -36.494174957 28.497375488 29.488048553 -36.483337402 28.496957779 30.458078384 -36.450428009 28.495611191 31.382312775 -36.371490479 28.492321014 32.227874756 -36.217517853 28.486719131 32.968864441 -35.965766907 28.481933594 -35.150249481 -35.150249481 28.487289429 -34.604705811 -35.607204437 28.485630035 33.604705811 -35.607208252 28.485630035 34.150249481 -35.150249481 28.487289429 -35.965766907 -33.968864441 28.481937408 -35.607204437 -34.604705811 28.485631943 34.607204437 -34.604705811 28.485631943 34.965766907 -33.968868256 28.481937408 -36.217517853 -33.227874756 28.486719131 35.217517853 -33.227878571 28.486719131 -36.371490479 -32.382312775 28.492321014 35.371490479 -32.382308960 28.492321014 -36.450428009 -31.458078384 28.495611191 35.450428009 -31.458078384 28.495611191 -36.483337402 -30.488048553 28.496957779 35.483341217 -30.488048553 28.496957779 -36.494178772 -29.497375488 28.497371674 35.494178772 -29.497375488 28.497371674 -36.496910095 -28.499578476 28.497461319 35.496910095 -28.499578476 28.497461319 -36.497406006 -27.499954224 28.497470856 35.497402191 -27.499954224 28.497470856 -36.497467041 -26.499996185 28.497470856 35.497467041 -26.499996185 28.497470856 -36.497467041 -25.500000000 28.497470856 35.497470856 -25.500000000 28.497470856 -36.497467041 -24.500000000 28.497470856 35.497470856 -24.500000000 28.497470856 -36.497467041 -23.500000000 28.497470856 35.497470856 -23.500000000 28.497470856 -36.497467041 -22.500000000 28.497470856 35.497470856 -22.500000000 28.497470856 -36.497467041 -21.500000000 28.497470856 35.497470856 -21.500000000 28.497470856 -36.497467041 -20.500000000 28.497470856 35.497470856 -20.500000000 28.497470856 -36.497467041 -19.500000000 28.497470856 35.497470856 -19.500000000 28.497470856 -36.497467041 -18.500000000 28.497470856 35.497470856 -18.500000000 28.497470856 -36.497467041 -17.500000000 28.497470856 35.497470856 -17.500000000 28.497470856 -36.497467041 -16.500000000 28.497470856 35.497470856 -16.500000000 28.497470856 -36.497467041 -15.500000000 28.497470856 35.497470856 -15.500000000 28.497470856 -36.497467041 -14.500000000 28.497470856 35.497470856 -14.500000000 28.497470856 -36.497467041 -13.500000000 28.497470856 35.497470856 -13.500000000 28.497470856 -36.497467041 -12.500000000 28.497470856 35.497470856 -12.500000000 28.497470856 -36.497467041 -11.500000000 28.497470856 35.497470856 -11.500000000 28.497470856 -36.497467041 -10.500000000 28.497470856 35.497470856 -10.500000000 28.497470856 -36.497467041 -9.500000000 28.497470856 35.497470856 -9.500000000 28.497470856 -36.497467041 -8.500000000 28.497470856 35.497470856 -8.500000000 28.497470856 -36.497467041 -7.500000000 28.497470856 35.497470856 -7.500000000 28.497470856 -36.497467041 -6.500000000 28.497470856 35.497470856 -6.500000000 28.497470856 -36.497467041 -5.500000000 28.497470856 35.497470856 -5.500000000 28.497470856 -36.497467041 -4.500000000 28.497470856 35.497470856 -4.500000000 28.497470856 -36.497467041 -3.500000000 28.497470856 35.497470856 -3.500000000 28.497470856 -36.497467041 -2.500000000 28.497470856 35.497470856 -2.500000000 28.497470856 -36.497467041 -1.500000000 28.497470856 35.497470856 -1.500000000 28.497470856 -36.497467041 -0.500000000 28.497470856 35.497470856 -0.500000000 28.497470856 -36.497467041 0.500000000 28.497470856 35.497470856 0.500000000 28.497470856 -36.497467041 1.500000000 28.497470856 35.497470856 1.500000000 28.497470856 -36.497467041 2.500000000 28.497470856 35.497470856 2.500000000 28.497470856 -36.497467041 3.500000000 28.497470856 35.497470856 3.500000000 28.497470856 -36.497467041 4.500000000 28.497470856 35.497470856 4.500000000 28.497470856 -36.497467041 5.500000000 28.497470856 35.497470856 5.500000000 28.497470856 -36.497467041 6.500000000 28.497470856 35.497470856 6.500000000 28.497470856 -36.497467041 7.500000000 28.497470856 35.497470856 7.500000000 28.497470856 -36.497467041 8.500000000 28.497470856 35.497470856 8.500000000 28.497470856 -36.497467041 9.500000000 28.497470856 35.497470856 9.500000000 28.497470856 -36.497467041 10.500000000 28.497470856 35.497470856 10.500000000 28.497470856 -36.497467041 11.500000000 28.497470856 35.497470856 11.500000000 28.497470856 -36.497467041 12.500000000 28.497470856 35.497470856 12.500000000 28.497470856 -36.497467041 13.500000000 28.497470856 35.497470856 13.500000000 28.497470856 -36.497467041 14.500000000 28.497470856 35.497470856 14.500000000 28.497470856 -36.497467041 15.500000000 28.497470856 35.497470856 15.500000000 28.497470856 -36.497467041 16.500000000 28.497470856 35.497470856 16.500000000 28.497470856 -36.497467041 17.500000000 28.497470856 35.497470856 17.500000000 28.497470856 -36.497467041 18.500000000 28.497470856 35.497470856 18.500000000 28.497470856 -36.497467041 19.500000000 28.497470856 35.497470856 19.500000000 28.497470856 -36.497467041 20.500000000 28.497470856 35.497470856 20.500000000 28.497470856 -36.497467041 21.500000000 28.497470856 35.497470856 21.500000000 28.497470856 -36.497467041 22.500000000 28.497470856 35.497470856 22.500000000 28.497470856 -36.497467041 23.500000000 28.497470856 35.497470856 23.500000000 28.497470856 -36.497467041 24.500000000 28.497470856 35.497470856 24.500000000 28.497470856 -36.497467041 25.499996185 28.497470856 35.497467041 25.499996185 28.497470856 -36.497406006 26.499954224 28.497470856 35.497406006 26.499954224 28.497470856 -36.496910095 27.499576569 28.497457504 35.496910095 27.499576569 28.497457504 -36.494178772 28.497375488 28.497371674 35.494174957 28.497371674 28.497375488 -36.483337402 29.488048553 28.496957779 35.483337402 29.488048553 28.496957779 -36.450428009 30.458078384 28.495609283 35.450428009 30.458078384 28.495611191 -36.371490479 31.382312775 28.492321014 35.371490479 31.382312775 28.492321014 -36.217510223 32.227870941 28.486719131 35.217510223 32.227870941 28.486719131 -35.965759277 32.968864441 28.481937408 -35.607204437 33.604705811 28.485630035 34.607204437 33.604705811 28.485630035 34.965759277 32.968864441 28.481937408 -35.150249481 34.150249481 28.487289429 -34.604705811 34.607204437 28.485631943 33.604705811 34.607204437 28.485631943 34.150249481 34.150249481 28.487293243 -33.968864441 34.965766907 28.481937408 -33.227874756 35.217517853 28.486719131 -32.382312775 35.371490479 28.492321014 -31.458078384 35.450428009 28.495611191 -30.488048553 35.483337402 28.496957779 -29.497375488 35.494178772 28.497371674 -28.499578476 35.496910095 28.497461319 -27.499954224 35.497406006 28.497470856 -26.499996185 35.497467041 28.497470856 -25.500000000 35.497467041 28.497470856 -24.500000000 35.497467041 28.497470856 -23.500000000 35.497467041 28.497470856 -22.500000000 35.497467041 28.497470856 -21.500000000 35.497467041 28.497470856 -20.500000000 35.497467041 28.497470856 -19.500000000 35.497467041 28.497470856 -18.500000000 35.497467041 28.497470856 -17.500000000 35.497467041 28.497470856 -16.500000000 35.497467041 28.497470856 -15.500000000 35.497467041 28.497470856 -14.500000000 35.497467041 28.497470856 -13.500000000 35.497467041 28.497470856 -12.500000000 35.497467041 28.497470856 -11.500000000 35.497467041 28.497470856 -10.500000000 35.497467041 28.497470856 -9.500000000 35.497467041 28.497470856 -8.500000000 35.497467041 28.497470856 -7.500000000 35.497467041 28.497470856 -6.500000000 35.497467041 28.497470856 -5.500000000 35.497467041 28.497470856 -4.500000000 35.497467041 28.497470856 -3.500000000 35.497467041 28.497470856 -2.500000000 35.497467041 28.497470856 -1.500000000 35.497467041 28.497470856 -0.500000000 35.497467041 28.497470856 0.500000000 35.497467041 28.497470856 1.500000000 35.497467041 28.497470856 2.500000000 35.497467041 28.497470856 3.500000000 35.497467041 28.497470856 4.500000000 35.497467041 28.497470856 5.500000000 35.497467041 28.497470856 6.500000000 35.497467041 28.497470856 7.500000000 35.497467041 28.497470856 8.500000000 35.497467041 28.497470856 9.500000000 35.497467041 28.497470856 10.500000000 35.497467041 28.497470856 11.500000000 35.497467041 28.497470856 12.500000000 35.497467041 28.497470856 13.500000000 35.497467041 28.497470856 14.500000000 35.497467041 28.497470856 15.500000000 35.497467041 28.497470856 16.500000000 35.497467041 28.497470856 17.500000000 35.497467041 28.497470856 18.500000000 35.497467041 28.497470856 19.500000000 35.497467041 28.497470856 20.500000000 35.497467041 28.497470856 21.500000000 35.497467041 28.497470856 22.500000000 35.497467041 28.497470856 23.500000000 35.497467041 28.497470856 24.500000000 35.497467041 28.497470856 25.499996185 35.497467041 28.497470856 26.499954224 35.497406006 28.497470856 27.499576569 35.496910095 28.497457504 28.497375488 35.494178772 28.497371674 29.488048553 35.483337402 28.496957779 30.458078384 35.450428009 28.495609283 31.382308960 35.371490479 28.492321014 32.227870941 35.217517853 28.486719131 32.968864441 34.965766907 28.481937408 -33.946807861 -35.937091827 29.442840576 -33.220714569 -36.185966492 29.460592270 -32.379219055 -36.344280243 29.476003647 -31.456085205 -36.430480957 29.483789444 -30.486968994 -36.469238281 29.486968994 -29.496959686 -36.483337402 29.488048553 -28.499475479 -36.487373352 29.488346100 -27.499938965 -36.488258362 29.488399506 -26.499996185 -36.488391876 29.488407135 -25.500000000 -36.488403320 29.488407135 -24.500000000 -36.488407135 29.488407135 -23.500000000 -36.488407135 29.488407135 -22.500000000 -36.488407135 29.488407135 -21.500000000 -36.488407135 29.488407135 -20.500000000 -36.488407135 29.488407135 -19.500000000 -36.488407135 29.488407135 -18.500000000 -36.488407135 29.488407135 -17.500000000 -36.488407135 29.488407135 -16.500000000 -36.488407135 29.488407135 -15.500000000 -36.488407135 29.488407135 -14.500000000 -36.488407135 29.488407135 -13.500000000 -36.488407135 29.488407135 -12.500000000 -36.488407135 29.488407135 -11.500000000 -36.488407135 29.488407135 -10.500000000 -36.488407135 29.488407135 -9.500000000 -36.488407135 29.488407135 -8.500000000 -36.488407135 29.488407135 -7.500000000 -36.488407135 29.488407135 -6.500000000 -36.488407135 29.488407135 -5.500000000 -36.488407135 29.488407135 -4.500000000 -36.488407135 29.488407135 -3.500000000 -36.488407135 29.488407135 -2.500000000 -36.488407135 29.488407135 -1.500000000 -36.488407135 29.488407135 -0.500000000 -36.488407135 29.488407135 0.500000000 -36.488407135 29.488407135 1.500000000 -36.488407135 29.488407135 2.500000000 -36.488407135 29.488407135 3.500000000 -36.488407135 29.488407135 4.500000000 -36.488407135 29.488407135 5.500000000 -36.488407135 29.488407135 6.500000000 -36.488407135 29.488407135 7.500000000 -36.488407135 29.488407135 8.500000000 -36.488407135 29.488407135 9.500000000 -36.488407135 29.488407135 10.500000000 -36.488407135 29.488407135 11.500000000 -36.488407135 29.488407135 12.500000000 -36.488407135 29.488407135 13.500000000 -36.488407135 29.488407135 14.500000000 -36.488407135 29.488407135 15.500000000 -36.488407135 29.488407135 16.500000000 -36.488407135 29.488407135 17.500000000 -36.488407135 29.488407135 18.500000000 -36.488407135 29.488407135 19.500000000 -36.488407135 29.488407135 20.500000000 -36.488407135 29.488407135 21.500000000 -36.488407135 29.488407135 22.500000000 -36.488407135 29.488407135 23.500000000 -36.488407135 29.488407135 24.500000000 -36.488407135 29.488407135 25.499996185 -36.488391876 29.488407135 26.499938965 -36.488258362 29.488399506 27.499475479 -36.487373352 29.488346100 28.496959686 -36.483337402 29.488048553 29.486968994 -36.469238281 29.486965179 30.456085205 -36.430480957 29.483789444 31.379222870 -36.344280243 29.476003647 32.220714569 -36.185966492 29.460596085 32.946807861 -35.937091827 29.442840576 -35.115642548 -35.115642548 29.463253021 -34.567741394 -35.577739716 29.459178925 33.567741394 -35.577739716 29.459178925 34.115642548 -35.115642548 29.463253021 -35.937091827 -33.946807861 29.442840576 -35.577739716 -34.567741394 29.459178925 34.577739716 -34.567749023 29.459177017 34.937091827 -33.946807861 29.442840576 -36.185966492 -33.220714569 29.460592270 35.185966492 -33.220718384 29.460592270 -36.344280243 -32.379222870 29.476003647 35.344280243 -32.379222870 29.476003647 -36.430480957 -31.456085205 29.483789444 35.430480957 -31.456085205 29.483789444 -36.469230652 -30.486968994 29.486965179 35.469238281 -30.486968994 29.486968994 -36.483337402 -29.496959686 29.488052368 35.483337402 -29.496959686 29.488048553 -36.487373352 -28.499475479 29.488346100 35.487373352 -28.499475479 29.488346100 -36.488258362 -27.499938965 29.488399506 35.488258362 -27.499938965 29.488399506 -36.488391876 -26.499996185 29.488407135 35.488391876 -26.499996185 29.488407135 -36.488403320 -25.500000000 29.488407135 35.488403320 -25.500000000 29.488407135 -36.488403320 -24.500000000 29.488407135 35.488407135 -24.500000000 29.488407135 -36.488403320 -23.500000000 29.488407135 35.488407135 -23.500000000 29.488407135 -36.488403320 -22.500000000 29.488407135 35.488407135 -22.500000000 29.488407135 -36.488403320 -21.500000000 29.488407135 35.488407135 -21.500000000 29.488407135 -36.488403320 -20.500000000 29.488407135 35.488407135 -20.500000000 29.488407135 -36.488403320 -19.500000000 29.488407135 35.488407135 -19.500000000 29.488407135 -36.488403320 -18.500000000 29.488407135 35.488407135 -18.500000000 29.488407135 -36.488403320 -17.500000000 29.488407135 35.488407135 -17.500000000 29.488407135 -36.488403320 -16.500000000 29.488407135 35.488407135 -16.500000000 29.488407135 -36.488403320 -15.500000000 29.488407135 35.488407135 -15.500000000 29.488407135 -36.488403320 -14.500000000 29.488407135 35.488407135 -14.500000000 29.488407135 -36.488403320 -13.500000000 29.488407135 35.488407135 -13.500000000 29.488407135 -36.488403320 -12.500000000 29.488407135 35.488407135 -12.500000000 29.488407135 -36.488403320 -11.500000000 29.488407135 35.488407135 -11.500000000 29.488407135 -36.488403320 -10.500000000 29.488407135 35.488407135 -10.500000000 29.488407135 -36.488403320 -9.500000000 29.488407135 35.488407135 -9.500000000 29.488407135 -36.488403320 -8.500000000 29.488407135 35.488407135 -8.500000000 29.488407135 -36.488403320 -7.500000000 29.488407135 35.488407135 -7.500000000 29.488407135 -36.488403320 -6.500000000 29.488407135 35.488407135 -6.500000000 29.488407135 -36.488403320 -5.500000000 29.488407135 35.488407135 -5.500000000 29.488407135 -36.488403320 -4.500000000 29.488407135 35.488407135 -4.500000000 29.488407135 -36.488403320 -3.500000000 29.488407135 35.488407135 -3.500000000 29.488407135 -36.488403320 -2.500000000 29.488407135 35.488407135 -2.500000000 29.488407135 -36.488403320 -1.500000000 29.488407135 35.488407135 -1.500000000 29.488407135 -36.488403320 -0.500000000 29.488407135 35.488407135 -0.500000000 29.488407135 -36.488403320 0.500000000 29.488407135 35.488407135 0.500000000 29.488407135 -36.488403320 1.500000000 29.488407135 35.488407135 1.500000000 29.488407135 -36.488403320 2.500000000 29.488407135 35.488407135 2.500000000 29.488407135 -36.488403320 3.500000000 29.488407135 35.488407135 3.500000000 29.488407135 -36.488403320 4.500000000 29.488407135 35.488407135 4.500000000 29.488407135 -36.488403320 5.500000000 29.488407135 35.488407135 5.500000000 29.488407135 -36.488403320 6.500000000 29.488407135 35.488407135 6.500000000 29.488407135 -36.488403320 7.500000000 29.488407135 35.488407135 7.500000000 29.488407135 -36.488403320 8.500000000 29.488407135 35.488407135 8.500000000 29.488407135 -36.488403320 9.500000000 29.488407135 35.488407135 9.500000000 29.488407135 -36.488403320 10.500000000 29.488407135 35.488407135 10.500000000 29.488407135 -36.488403320 11.500000000 29.488407135 35.488407135 11.500000000 29.488407135 -36.488403320 12.500000000 29.488407135 35.488407135 12.500000000 29.488407135 -36.488403320 13.500000000 29.488407135 35.488407135 13.500000000 29.488407135 -36.488403320 14.500000000 29.488407135 35.488407135 14.500000000 29.488407135 -36.488403320 15.500000000 29.488407135 35.488407135 15.500000000 29.488407135 -36.488403320 16.500000000 29.488407135 35.488407135 16.500000000 29.488407135 -36.488403320 17.500000000 29.488407135 35.488407135 17.500000000 29.488407135 -36.488403320 18.500000000 29.488407135 35.488407135 18.500000000 29.488407135 -36.488403320 19.500000000 29.488407135 35.488407135 19.500000000 29.488407135 -36.488403320 20.500000000 29.488407135 35.488407135 20.500000000 29.488407135 -36.488403320 21.500000000 29.488407135 35.488407135 21.500000000 29.488407135 -36.488403320 22.500000000 29.488407135 35.488407135 22.500000000 29.488407135 -36.488403320 23.500000000 29.488407135 35.488407135 23.500000000 29.488407135 -36.488403320 24.500000000 29.488407135 35.488407135 24.500000000 29.488407135 -36.488391876 25.499996185 29.488407135 35.488391876 25.499996185 29.488407135 -36.488258362 26.499938965 29.488399506 35.488258362 26.499938965 29.488399506 -36.487377167 27.499475479 29.488348007 35.487373352 27.499475479 29.488346100 -36.483337402 28.496957779 29.488052368 35.483337402 28.496959686 29.488048553 -36.469238281 29.486965179 29.486968994 35.469238281 29.486968994 29.486965179 -36.430473328 30.456085205 29.483785629 35.430480957 30.456085205 29.483789444 -36.344280243 31.379220963 29.476003647 35.344280243 31.379222870 29.476003647 -36.185966492 32.220710754 29.460596085 35.185966492 32.220714569 29.460596085 -35.937091827 32.946807861 29.442840576 -35.577739716 33.567741394 29.459178925 34.577739716 33.567741394 29.459178925 34.937091827 32.946807861 29.442840576 -35.115642548 34.115642548 29.463253021 -34.567741394 34.577739716 29.459178925 33.567741394 34.577739716 29.459178925 34.115642548 34.115642548 29.463256836 -33.946807861 34.937091827 29.442840576 -33.220714569 35.185966492 29.460592270 -32.379222870 35.344280243 29.476003647 -31.456085205 35.430480957 29.483789444 -30.486968994 35.469230652 29.486965179 -29.496959686 35.483337402 29.488052368 -28.499475479 35.487373352 29.488346100 -27.499938965 35.488258362 29.488399506 -26.499996185 35.488391876 29.488407135 -25.500000000 35.488403320 29.488407135 -24.500000000 35.488403320 29.488407135 -23.500000000 35.488403320 29.488407135 -22.500000000 35.488403320 29.488407135 -21.500000000 35.488403320 29.488407135 -20.500000000 35.488403320 29.488407135 -19.500000000 35.488403320 29.488407135 -18.500000000 35.488403320 29.488407135 -17.500000000 35.488403320 29.488407135 -16.500000000 35.488403320 29.488407135 -15.500000000 35.488403320 29.488407135 -14.500000000 35.488403320 29.488407135 -13.500000000 35.488403320 29.488407135 -12.500000000 35.488403320 29.488407135 -11.500000000 35.488403320 29.488407135 -10.500000000 35.488403320 29.488407135 -9.500000000 35.488403320 29.488407135 -8.500000000 35.488403320 29.488407135 -7.500000000 35.488403320 29.488407135 -6.500000000 35.488403320 29.488407135 -5.500000000 35.488403320 29.488407135 -4.500000000 35.488403320 29.488407135 -3.500000000 35.488403320 29.488407135 -2.500000000 35.488403320 29.488407135 -1.500000000 35.488403320 29.488407135 -0.500000000 35.488403320 29.488407135 0.500000000 35.488403320 29.488407135 1.500000000 35.488403320 29.488407135 2.500000000 35.488403320 29.488407135 3.500000000 35.488403320 29.488407135 4.500000000 35.488403320 29.488407135 5.500000000 35.488403320 29.488407135 6.500000000 35.488403320 29.488407135 7.500000000 35.488403320 29.488407135 8.500000000 35.488403320 29.488407135 9.500000000 35.488403320 29.488407135 10.500000000 35.488403320 29.488407135 11.500000000 35.488403320 29.488407135 12.500000000 35.488403320 29.488407135 13.500000000 35.488403320 29.488407135 14.500000000 35.488403320 29.488407135 15.500000000 35.488403320 29.488407135 16.500000000 35.488403320 29.488407135 17.500000000 35.488403320 29.488407135 18.500000000 35.488403320 29.488407135 19.500000000 35.488403320 29.488407135 20.500000000 35.488403320 29.488407135 21.500000000 35.488403320 29.488407135 22.500000000 35.488403320 29.488407135 23.500000000 35.488403320 29.488407135 24.500000000 35.488403320 29.488407135 25.499996185 35.488391876 29.488407135 26.499938965 35.488258362 29.488399506 27.499475479 35.487377167 29.488348007 28.496957779 35.483337402 29.488052368 29.486965179 35.469238281 29.486968994 30.456085205 35.430473328 29.483785629 31.379220963 35.344280243 29.476003647 32.220710754 35.185966492 29.460596085 32.946807861 34.937091827 29.442840576 -33.903377533 -35.879737854 30.336599350 -33.216838837 -36.112342834 30.405670166 -32.375358582 -36.280395508 30.438953400 -31.451217651 -36.380989075 30.451217651 -30.483789444 -36.430473328 30.456085205 -29.495611191 -36.450424194 30.458078384 -28.499073029 -36.456939697 30.458770752 -27.499858856 -36.458606720 30.458950043 -26.499988556 -36.458930969 30.458980560 -25.500000000 -36.458976746 30.458980560 -24.500000000 -36.458976746 30.458980560 -23.500000000 -36.458976746 30.458980560 -22.500000000 -36.458976746 30.458980560 -21.500000000 -36.458976746 30.458980560 -20.500000000 -36.458976746 30.458980560 -19.500000000 -36.458976746 30.458980560 -18.500000000 -36.458976746 30.458980560 -17.500000000 -36.458976746 30.458980560 -16.500000000 -36.458976746 30.458980560 -15.500000000 -36.458976746 30.458980560 -14.500000000 -36.458976746 30.458980560 -13.500000000 -36.458976746 30.458980560 -12.500000000 -36.458976746 30.458980560 -11.500000000 -36.458976746 30.458980560 -10.500000000 -36.458976746 30.458980560 -9.500000000 -36.458976746 30.458980560 -8.500000000 -36.458976746 30.458980560 -7.500000000 -36.458976746 30.458980560 -6.500000000 -36.458976746 30.458980560 -5.500000000 -36.458976746 30.458980560 -4.500000000 -36.458976746 30.458980560 -3.500000000 -36.458976746 30.458980560 -2.500000000 -36.458976746 30.458980560 -1.500000000 -36.458976746 30.458980560 -0.500000000 -36.458976746 30.458980560 0.500000000 -36.458976746 30.458980560 1.500000000 -36.458976746 30.458980560 2.500000000 -36.458976746 30.458980560 3.500000000 -36.458976746 30.458980560 4.500000000 -36.458976746 30.458980560 5.500000000 -36.458976746 30.458980560 6.500000000 -36.458976746 30.458980560 7.500000000 -36.458976746 30.458980560 8.500000000 -36.458976746 30.458980560 9.500000000 -36.458976746 30.458980560 10.500000000 -36.458976746 30.458980560 11.500000000 -36.458976746 30.458980560 12.500000000 -36.458976746 30.458980560 13.500000000 -36.458976746 30.458980560 14.500000000 -36.458976746 30.458980560 15.500000000 -36.458976746 30.458980560 16.500000000 -36.458976746 30.458980560 17.500000000 -36.458976746 30.458980560 18.500000000 -36.458976746 30.458980560 19.500000000 -36.458976746 30.458980560 20.500000000 -36.458976746 30.458980560 21.500000000 -36.458976746 30.458980560 22.500000000 -36.458976746 30.458980560 23.500000000 -36.458976746 30.458980560 24.500000000 -36.458972931 30.458980560 25.499988556 -36.458930969 30.458978653 26.499858856 -36.458606720 30.458948135 27.499073029 -36.456939697 30.458770752 28.495611191 -36.450424194 30.458080292 29.483789444 -36.430473328 30.456085205 30.451217651 -36.380989075 30.451217651 31.375356674 -36.280395508 30.438949585 32.216838837 -36.112342834 30.405673981 32.903377533 -35.879737854 30.336599350 -35.040893555 -35.040893555 30.413049698 -34.477191925 -35.518615723 30.410833359 33.477191925 -35.518619537 30.410833359 34.040893555 -35.040897369 30.413049698 -35.879737854 -33.903377533 30.336599350 -35.518615723 -34.477191925 30.410833359 34.518619537 -34.477191925 30.410833359 34.879737854 -33.903381348 30.336603165 -36.112342834 -33.216838837 30.405670166 35.112342834 -33.216838837 30.405670166 -36.280395508 -32.375358582 30.438953400 35.280395508 -32.375358582 30.438953400 -36.380989075 -31.451217651 30.451217651 35.380989075 -31.451217651 30.451217651 -36.430473328 -30.483789444 30.456085205 35.430473328 -30.483789444 30.456085205 -36.450424194 -29.495611191 30.458078384 35.450424194 -29.495611191 30.458078384 -36.456939697 -28.499073029 30.458770752 35.456939697 -28.499073029 30.458770752 -36.458606720 -27.499858856 30.458950043 35.458606720 -27.499858856 30.458950043 -36.458930969 -26.499988556 30.458980560 35.458930969 -26.499988556 30.458980560 -36.458976746 -25.500000000 30.458980560 35.458976746 -25.500000000 30.458980560 -36.458980560 -24.500000000 30.458980560 35.458976746 -24.500000000 30.458980560 -36.458980560 -23.500000000 30.458980560 35.458976746 -23.500000000 30.458980560 -36.458980560 -22.500000000 30.458980560 35.458976746 -22.500000000 30.458980560 -36.458980560 -21.500000000 30.458980560 35.458976746 -21.500000000 30.458980560 -36.458980560 -20.500000000 30.458980560 35.458976746 -20.500000000 30.458980560 -36.458980560 -19.500000000 30.458980560 35.458976746 -19.500000000 30.458980560 -36.458980560 -18.500000000 30.458980560 35.458976746 -18.500000000 30.458980560 -36.458980560 -17.500000000 30.458980560 35.458976746 -17.500000000 30.458980560 -36.458980560 -16.500000000 30.458980560 35.458976746 -16.500000000 30.458980560 -36.458980560 -15.500000000 30.458980560 35.458976746 -15.500000000 30.458980560 -36.458980560 -14.500000000 30.458980560 35.458976746 -14.500000000 30.458980560 -36.458980560 -13.500000000 30.458980560 35.458976746 -13.500000000 30.458980560 -36.458980560 -12.500000000 30.458980560 35.458976746 -12.500000000 30.458980560 -36.458980560 -11.500000000 30.458980560 35.458976746 -11.500000000 30.458980560 -36.458980560 -10.500000000 30.458980560 35.458976746 -10.500000000 30.458980560 -36.458980560 -9.500000000 30.458980560 35.458976746 -9.500000000 30.458980560 -36.458980560 -8.500000000 30.458980560 35.458976746 -8.500000000 30.458980560 -36.458980560 -7.500000000 30.458980560 35.458976746 -7.500000000 30.458980560 -36.458980560 -6.500000000 30.458980560 35.458976746 -6.500000000 30.458980560 -36.458980560 -5.500000000 30.458980560 35.458976746 -5.500000000 30.458980560 -36.458980560 -4.500000000 30.458980560 35.458976746 -4.500000000 30.458980560 -36.458980560 -3.500000000 30.458980560 35.458976746 -3.500000000 30.458980560 -36.458980560 -2.500000000 30.458980560 35.458976746 -2.500000000 30.458980560 -36.458980560 -1.500000000 30.458980560 35.458976746 -1.500000000 30.458980560 -36.458980560 -0.500000000 30.458980560 35.458976746 -0.500000000 30.458980560 -36.458980560 0.500000000 30.458980560 35.458976746 0.500000000 30.458980560 -36.458980560 1.500000000 30.458980560 35.458976746 1.500000000 30.458980560 -36.458980560 2.500000000 30.458980560 35.458976746 2.500000000 30.458980560 -36.458980560 3.500000000 30.458980560 35.458976746 3.500000000 30.458980560 -36.458980560 4.500000000 30.458980560 35.458976746 4.500000000 30.458980560 -36.458980560 5.500000000 30.458980560 35.458976746 5.500000000 30.458980560 -36.458980560 6.500000000 30.458980560 35.458976746 6.500000000 30.458980560 -36.458980560 7.500000000 30.458980560 35.458976746 7.500000000 30.458980560 -36.458980560 8.500000000 30.458980560 35.458976746 8.500000000 30.458980560 -36.458980560 9.500000000 30.458980560 35.458976746 9.500000000 30.458980560 -36.458980560 10.500000000 30.458980560 35.458976746 10.500000000 30.458980560 -36.458980560 11.500000000 30.458980560 35.458976746 11.500000000 30.458980560 -36.458980560 12.500000000 30.458980560 35.458976746 12.500000000 30.458980560 -36.458980560 13.500000000 30.458980560 35.458976746 13.500000000 30.458980560 -36.458980560 14.500000000 30.458980560 35.458976746 14.500000000 30.458980560 -36.458980560 15.500000000 30.458980560 35.458976746 15.500000000 30.458980560 -36.458980560 16.500000000 30.458980560 35.458976746 16.500000000 30.458980560 -36.458980560 17.500000000 30.458980560 35.458976746 17.500000000 30.458980560 -36.458980560 18.500000000 30.458980560 35.458976746 18.500000000 30.458980560 -36.458980560 19.500000000 30.458980560 35.458976746 19.500000000 30.458980560 -36.458980560 20.500000000 30.458980560 35.458976746 20.500000000 30.458980560 -36.458980560 21.500000000 30.458980560 35.458976746 21.500000000 30.458980560 -36.458980560 22.500000000 30.458980560 35.458976746 22.500000000 30.458980560 -36.458980560 23.500000000 30.458980560 35.458976746 23.500000000 30.458980560 -36.458976746 24.500000000 30.458980560 35.458972931 24.500000000 30.458980560 -36.458934784 25.499988556 30.458978653 35.458930969 25.499988556 30.458978653 -36.458606720 26.499858856 30.458948135 35.458606720 26.499858856 30.458948135 -36.456939697 27.499073029 30.458770752 35.456939697 27.499073029 30.458770752 -36.450428009 28.495611191 30.458080292 35.450424194 28.495611191 30.458080292 -36.430473328 29.483789444 30.456085205 35.430473328 29.483789444 30.456085205 -36.380989075 30.451217651 30.451217651 35.380989075 30.451217651 30.451217651 -36.280395508 31.375356674 30.438953400 35.280395508 31.375356674 30.438949585 -36.112342834 32.216835022 30.405673981 35.112342834 32.216835022 30.405673981 -35.879737854 32.903377533 30.336599350 -35.518615723 33.477191925 30.410833359 34.518615723 33.477191925 30.410833359 34.879737854 32.903377533 30.336599350 -35.040893555 34.040893555 30.413049698 -34.477191925 34.518615723 30.410833359 33.477191925 34.518615723 30.410833359 34.040893555 34.040893555 30.413049698 -33.903377533 34.879737854 30.336599350 -33.216838837 35.112342834 30.405670166 -32.375358582 35.280395508 30.438953400 -31.451217651 35.380989075 30.451217651 -30.483789444 35.430473328 30.456085205 -29.495611191 35.450424194 30.458078384 -28.499073029 35.456939697 30.458770752 -27.499858856 35.458606720 30.458950043 -26.499988556 35.458930969 30.458980560 -25.500000000 35.458976746 30.458980560 -24.500000000 35.458980560 30.458980560 -23.500000000 35.458980560 30.458980560 -22.500000000 35.458980560 30.458980560 -21.500000000 35.458980560 30.458980560 -20.500000000 35.458980560 30.458980560 -19.500000000 35.458980560 30.458980560 -18.500000000 35.458980560 30.458980560 -17.500000000 35.458980560 30.458980560 -16.500000000 35.458980560 30.458980560 -15.500000000 35.458980560 30.458980560 -14.500000000 35.458980560 30.458980560 -13.500000000 35.458980560 30.458980560 -12.500000000 35.458980560 30.458980560 -11.500000000 35.458980560 30.458980560 -10.500000000 35.458980560 30.458980560 -9.500000000 35.458980560 30.458980560 -8.500000000 35.458980560 30.458980560 -7.500000000 35.458980560 30.458980560 -6.500000000 35.458980560 30.458980560 -5.500000000 35.458980560 30.458980560 -4.500000000 35.458980560 30.458980560 -3.500000000 35.458980560 30.458980560 -2.500000000 35.458980560 30.458980560 -1.500000000 35.458980560 30.458980560 -0.500000000 35.458980560 30.458980560 0.500000000 35.458980560 30.458980560 1.500000000 35.458980560 30.458980560 2.500000000 35.458980560 30.458980560 3.500000000 35.458980560 30.458980560 4.500000000 35.458980560 30.458980560 5.500000000 35.458980560 30.458980560 6.500000000 35.458980560 30.458980560 7.500000000 35.458980560 30.458980560 8.500000000 35.458980560 30.458980560 9.500000000 35.458980560 30.458980560 10.500000000 35.458980560 30.458980560 11.500000000 35.458980560 30.458980560 12.500000000 35.458980560 30.458980560 13.500000000 35.458980560 30.458980560 14.500000000 35.458980560 30.458980560 15.500000000 35.458980560 30.458980560 16.500000000 35.458980560 30.458980560 17.500000000 35.458980560 30.458980560 18.500000000 35.458980560 30.458980560 19.500000000 35.458980560 30.458980560 20.500000000 35.458980560 30.458980560 21.500000000 35.458980560 30.458980560 22.500000000 35.458980560 30.458980560 23.500000000 35.458980560 30.458980560 24.500000000 35.458976746 30.458980560 25.499988556 35.458934784 30.458978653 26.499858856 35.458606720 30.458948135 27.499073029 35.456939697 30.458770752 28.495611191 35.450428009 30.458080292 29.483789444 35.430473328 30.456085205 30.451217651 35.380989075 30.451217651 31.375360489 35.280395508 30.438953400 32.216842651 35.112342834 30.405673981 32.903377533 34.879737854 30.336603165 -33.840934753 -35.797355652 31.035345078 -33.254272461 -35.954723358 31.334243774 -32.371025085 -36.154674530 31.371026993 -31.438953400 -36.280391693 31.375360489 -30.476007462 -36.344280243 31.379222870 -29.492319107 -36.371486664 31.382312775 -28.498052597 -36.381027222 31.383813858 -27.499622345 -36.383720398 31.384298325 -26.499948502 -36.384304047 31.384403229 -25.499996185 -36.384407043 31.384418488 -24.500000000 -36.384418488 31.384418488 -23.500000000 -36.384418488 31.384418488 -22.500000000 -36.384418488 31.384418488 -21.500000000 -36.384418488 31.384418488 -20.500000000 -36.384418488 31.384418488 -19.500000000 -36.384418488 31.384418488 -18.500000000 -36.384418488 31.384418488 -17.500000000 -36.384418488 31.384418488 -16.500000000 -36.384418488 31.384418488 -15.500000000 -36.384418488 31.384418488 -14.500000000 -36.384418488 31.384418488 -13.500000000 -36.384418488 31.384418488 -12.500000000 -36.384418488 31.384418488 -11.500000000 -36.384418488 31.384418488 -10.500000000 -36.384418488 31.384418488 -9.500000000 -36.384418488 31.384418488 -8.500000000 -36.384418488 31.384418488 -7.500000000 -36.384418488 31.384418488 -6.500000000 -36.384418488 31.384418488 -5.500000000 -36.384418488 31.384418488 -4.500000000 -36.384418488 31.384418488 -3.500000000 -36.384418488 31.384418488 -2.500000000 -36.384418488 31.384418488 -1.500000000 -36.384418488 31.384418488 -0.500000000 -36.384418488 31.384418488 0.500000000 -36.384418488 31.384418488 1.500000000 -36.384418488 31.384418488 2.500000000 -36.384418488 31.384418488 3.500000000 -36.384418488 31.384418488 4.500000000 -36.384418488 31.384418488 5.500000000 -36.384418488 31.384418488 6.500000000 -36.384418488 31.384418488 7.500000000 -36.384418488 31.384418488 8.500000000 -36.384418488 31.384418488 9.500000000 -36.384418488 31.384418488 10.500000000 -36.384418488 31.384418488 11.500000000 -36.384418488 31.384418488 12.500000000 -36.384418488 31.384418488 13.500000000 -36.384418488 31.384418488 14.500000000 -36.384418488 31.384418488 15.500000000 -36.384418488 31.384418488 16.500000000 -36.384418488 31.384418488 17.500000000 -36.384418488 31.384418488 18.500000000 -36.384418488 31.384418488 19.500000000 -36.384418488 31.384418488 20.500000000 -36.384418488 31.384418488 21.500000000 -36.384418488 31.384418488 22.500000000 -36.384418488 31.384418488 23.500000000 -36.384410858 31.384418488 24.499996185 -36.384407043 31.384418488 25.499948502 -36.384307861 31.384405136 26.499618530 -36.383720398 31.384296417 27.498050690 -36.381027222 31.383813858 28.492321014 -36.371486664 31.382312775 29.476007462 -36.344284058 31.379222870 30.438953400 -36.280395508 31.375360489 31.371026993 -36.154670715 31.371026993 32.254276276 -35.954719543 31.334241867 32.840930939 -35.797355652 31.035345078 -34.912059784 -34.912059784 31.313602448 -34.260543823 -35.428531647 31.380739212 -33.678352356 -35.670326233 31.736053467 32.678352356 -35.670326233 31.736053467 33.260547638 -35.428535461 31.380739212 33.912063599 -34.912063599 31.313602448 -35.797355652 -33.840930939 31.035345078 -35.428535461 -34.260543823 31.380739212 34.428535461 -34.260543823 31.380739212 34.797355652 -33.840930939 31.035345078 -35.954723358 -33.254272461 31.334243774 -35.670322418 -33.678352356 31.736053467 34.670326233 -33.678352356 31.736053467 34.954723358 -33.254272461 31.334243774 -36.154674530 -32.371025085 31.371026993 35.154674530 -32.371025085 31.371026993 -36.280391693 -31.438953400 31.375360489 35.280395508 -31.438953400 31.375360489 -36.344280243 -30.476007462 31.379222870 35.344280243 -30.476007462 31.379222870 -36.371486664 -29.492321014 31.382312775 35.371486664 -29.492319107 31.382312775 -36.381027222 -28.498050690 31.383813858 35.381027222 -28.498052597 31.383813858 -36.383720398 -27.499618530 31.384296417 35.383720398 -27.499622345 31.384298325 -36.384307861 -26.499948502 31.384403229 35.384304047 -26.499948502 31.384403229 -36.384407043 -25.499996185 31.384418488 35.384407043 -25.499996185 31.384418488 -36.384418488 -24.500000000 31.384418488 35.384418488 -24.500000000 31.384418488 -36.384418488 -23.500000000 31.384418488 35.384418488 -23.500000000 31.384418488 -36.384418488 -22.500000000 31.384418488 35.384418488 -22.500000000 31.384418488 -36.384418488 -21.500000000 31.384418488 35.384418488 -21.500000000 31.384418488 -36.384418488 -20.500000000 31.384418488 35.384418488 -20.500000000 31.384418488 -36.384418488 -19.500000000 31.384418488 35.384418488 -19.500000000 31.384418488 -36.384418488 -18.500000000 31.384418488 35.384418488 -18.500000000 31.384418488 -36.384418488 -17.500000000 31.384418488 35.384418488 -17.500000000 31.384418488 -36.384418488 -16.500000000 31.384418488 35.384418488 -16.500000000 31.384418488 -36.384418488 -15.500000000 31.384418488 35.384418488 -15.500000000 31.384418488 -36.384418488 -14.500000000 31.384418488 35.384418488 -14.500000000 31.384418488 -36.384418488 -13.500000000 31.384418488 35.384418488 -13.500000000 31.384418488 -36.384418488 -12.500000000 31.384418488 35.384418488 -12.500000000 31.384418488 -36.384418488 -11.500000000 31.384418488 35.384418488 -11.500000000 31.384418488 -36.384418488 -10.500000000 31.384418488 35.384418488 -10.500000000 31.384418488 -36.384418488 -9.500000000 31.384418488 35.384418488 -9.500000000 31.384418488 -36.384418488 -8.500000000 31.384418488 35.384418488 -8.500000000 31.384418488 -36.384418488 -7.500000000 31.384418488 35.384418488 -7.500000000 31.384418488 -36.384418488 -6.500000000 31.384418488 35.384418488 -6.500000000 31.384418488 -36.384418488 -5.500000000 31.384418488 35.384418488 -5.500000000 31.384418488 -36.384418488 -4.500000000 31.384418488 35.384418488 -4.500000000 31.384418488 -36.384418488 -3.500000000 31.384418488 35.384418488 -3.500000000 31.384418488 -36.384418488 -2.500000000 31.384418488 35.384418488 -2.500000000 31.384418488 -36.384418488 -1.500000000 31.384418488 35.384418488 -1.500000000 31.384418488 -36.384418488 -0.500000000 31.384418488 35.384418488 -0.500000000 31.384418488 -36.384418488 0.500000000 31.384418488 35.384418488 0.500000000 31.384418488 -36.384418488 1.500000000 31.384418488 35.384418488 1.500000000 31.384418488 -36.384418488 2.500000000 31.384418488 35.384418488 2.500000000 31.384418488 -36.384418488 3.500000000 31.384418488 35.384418488 3.500000000 31.384418488 -36.384418488 4.500000000 31.384418488 35.384418488 4.500000000 31.384418488 -36.384418488 5.500000000 31.384418488 35.384418488 5.500000000 31.384418488 -36.384418488 6.500000000 31.384418488 35.384418488 6.500000000 31.384418488 -36.384418488 7.500000000 31.384418488 35.384418488 7.500000000 31.384418488 -36.384418488 8.500000000 31.384418488 35.384418488 8.500000000 31.384418488 -36.384418488 9.500000000 31.384418488 35.384418488 9.500000000 31.384418488 -36.384418488 10.500000000 31.384418488 35.384418488 10.500000000 31.384418488 -36.384418488 11.500000000 31.384418488 35.384418488 11.500000000 31.384418488 -36.384418488 12.500000000 31.384418488 35.384418488 12.500000000 31.384418488 -36.384418488 13.500000000 31.384418488 35.384418488 13.500000000 31.384418488 -36.384418488 14.500000000 31.384418488 35.384418488 14.500000000 31.384418488 -36.384418488 15.500000000 31.384418488 35.384418488 15.500000000 31.384418488 -36.384418488 16.500000000 31.384418488 35.384418488 16.500000000 31.384418488 -36.384418488 17.500000000 31.384418488 35.384418488 17.500000000 31.384418488 -36.384418488 18.500000000 31.384418488 35.384418488 18.500000000 31.384418488 -36.384418488 19.500000000 31.384418488 35.384418488 19.500000000 31.384418488 -36.384418488 20.500000000 31.384418488 35.384418488 20.500000000 31.384418488 -36.384418488 21.500000000 31.384418488 35.384418488 21.500000000 31.384418488 -36.384418488 22.500000000 31.384418488 35.384418488 22.500000000 31.384418488 -36.384418488 23.500000000 31.384418488 35.384410858 23.500000000 31.384418488 -36.384407043 24.499996185 31.384418488 35.384407043 24.499996185 31.384418488 -36.384311676 25.499948502 31.384403229 35.384307861 25.499948502 31.384405136 -36.383720398 26.499622345 31.384296417 35.383720398 26.499618530 31.384296417 -36.381027222 27.498052597 31.383813858 35.381023407 27.498050690 31.383813858 -36.371486664 28.492321014 31.382312775 35.371486664 28.492321014 31.382312775 -36.344280243 29.476007462 31.379222870 35.344280243 29.476007462 31.379222870 -36.280395508 30.438953400 31.375360489 35.280395508 30.438953400 31.375360489 -36.154670715 31.371026993 31.371028900 35.154670715 31.371026993 31.371026993 -35.954723358 32.254276276 31.334243774 -35.670322418 32.678352356 31.736053467 34.670326233 32.678352356 31.736053467 34.954719543 32.254276276 31.334241867 -35.797355652 32.840934753 31.035345078 -35.428531647 33.260543823 31.380739212 34.428531647 33.260543823 31.380739212 34.797351837 32.840930939 31.035345078 -34.912059784 33.912059784 31.313602448 -34.260543823 34.428535461 31.380739212 -33.678352356 34.670322418 31.736053467 32.678352356 34.670326233 31.736053467 33.260543823 34.428535461 31.380739212 33.912063599 33.912063599 31.313602448 -33.840930939 34.797355652 31.035345078 -33.254272461 34.954723358 31.334243774 -32.371025085 35.154674530 31.371026993 -31.438953400 35.280391693 31.375360489 -30.476007462 35.344280243 31.379222870 -29.492321014 35.371486664 31.382312775 -28.498050690 35.381027222 31.383813858 -27.499618530 35.383720398 31.384296417 -26.499948502 35.384307861 31.384403229 -25.499996185 35.384407043 31.384418488 -24.500000000 35.384418488 31.384418488 -23.500000000 35.384418488 31.384418488 -22.500000000 35.384418488 31.384418488 -21.500000000 35.384418488 31.384418488 -20.500000000 35.384418488 31.384418488 -19.500000000 35.384418488 31.384418488 -18.500000000 35.384418488 31.384418488 -17.500000000 35.384418488 31.384418488 -16.500000000 35.384418488 31.384418488 -15.500000000 35.384418488 31.384418488 -14.500000000 35.384418488 31.384418488 -13.500000000 35.384418488 31.384418488 -12.500000000 35.384418488 31.384418488 -11.500000000 35.384418488 31.384418488 -10.500000000 35.384418488 31.384418488 -9.500000000 35.384418488 31.384418488 -8.500000000 35.384418488 31.384418488 -7.500000000 35.384418488 31.384418488 -6.500000000 35.384418488 31.384418488 -5.500000000 35.384418488 31.384418488 -4.500000000 35.384418488 31.384418488 -3.500000000 35.384418488 31.384418488 -2.500000000 35.384418488 31.384418488 -1.500000000 35.384418488 31.384418488 -0.500000000 35.384418488 31.384418488 0.500000000 35.384418488 31.384418488 1.500000000 35.384418488 31.384418488 2.500000000 35.384418488 31.384418488 3.500000000 35.384418488 31.384418488 4.500000000 35.384418488 31.384418488 5.500000000 35.384418488 31.384418488 6.500000000 35.384418488 31.384418488 7.500000000 35.384418488 31.384418488 8.500000000 35.384418488 31.384418488 9.500000000 35.384418488 31.384418488 10.500000000 35.384418488 31.384418488 11.500000000 35.384418488 31.384418488 12.500000000 35.384418488 31.384418488 13.500000000 35.384418488 31.384418488 14.500000000 35.384418488 31.384418488 15.500000000 35.384418488 31.384418488 16.500000000 35.384418488 31.384418488 17.500000000 35.384418488 31.384418488 18.500000000 35.384418488 31.384418488 19.500000000 35.384418488 31.384418488 20.500000000 35.384418488 31.384418488 21.500000000 35.384418488 31.384418488 22.500000000 35.384418488 31.384418488 23.500000000 35.384418488 31.384418488 24.499996185 35.384407043 31.384418488 25.499948502 35.384311676 31.384403229 26.499622345 35.383720398 31.384296417 27.498052597 35.381027222 31.383813858 28.492321014 35.371486664 31.382312775 29.476007462 35.344280243 31.379222870 30.438953400 35.280395508 31.375360489 31.371026993 35.154674530 31.371028900 32.254272461 34.954723358 31.334243774 32.840934753 34.797355652 31.035345078 -33.030693054 -35.846317291 32.030693054 -32.334243774 -35.954723358 32.254276276 -31.405670166 -36.112342834 32.216838837 -30.460596085 -36.185962677 32.220710754 -29.486719131 -36.217510223 32.227870941 -28.496377945 -36.228950500 32.231601715 -27.499225616 -36.232303619 32.232860565 -26.499872208 -36.233070374 32.233165741 -25.499986649 -36.233203888 32.233215332 -24.499998093 -36.233222961 32.233222961 -23.500000000 -36.233222961 32.233222961 -22.500000000 -36.233222961 32.233222961 -21.500000000 -36.233222961 32.233222961 -20.500000000 -36.233222961 32.233222961 -19.500000000 -36.233222961 32.233222961 -18.500000000 -36.233222961 32.233222961 -17.500000000 -36.233222961 32.233222961 -16.500000000 -36.233222961 32.233222961 -15.500000000 -36.233222961 32.233222961 -14.500000000 -36.233222961 32.233222961 -13.500000000 -36.233222961 32.233222961 -12.500000000 -36.233222961 32.233222961 -11.500000000 -36.233222961 32.233222961 -10.500000000 -36.233222961 32.233222961 -9.500000000 -36.233222961 32.233222961 -8.500000000 -36.233222961 32.233222961 -7.500000000 -36.233222961 32.233222961 -6.500000000 -36.233222961 32.233222961 -5.500000000 -36.233222961 32.233222961 -4.500000000 -36.233222961 32.233222961 -3.500000000 -36.233222961 32.233222961 -2.500000000 -36.233222961 32.233222961 -1.500000000 -36.233222961 32.233222961 -0.500000000 -36.233222961 32.233222961 0.500000000 -36.233222961 32.233222961 1.500000000 -36.233222961 32.233222961 2.500000000 -36.233222961 32.233222961 3.500000000 -36.233222961 32.233222961 4.500000000 -36.233222961 32.233222961 5.500000000 -36.233222961 32.233222961 6.500000000 -36.233222961 32.233222961 7.500000000 -36.233222961 32.233222961 8.500000000 -36.233222961 32.233222961 9.500000000 -36.233222961 32.233222961 10.500000000 -36.233222961 32.233222961 11.500000000 -36.233222961 32.233222961 12.500000000 -36.233222961 32.233222961 13.500000000 -36.233222961 32.233222961 14.500000000 -36.233222961 32.233222961 15.500000000 -36.233222961 32.233222961 16.500000000 -36.233222961 32.233222961 17.500000000 -36.233222961 32.233222961 18.500000000 -36.233222961 32.233222961 19.500000000 -36.233222961 32.233222961 20.500000000 -36.233222961 32.233222961 21.500000000 -36.233222961 32.233222961 22.500000000 -36.233222961 32.233222961 23.499998093 -36.233222961 32.233222961 24.499986649 -36.233203888 32.233219147 25.499874115 -36.233070374 32.233165741 26.499225616 -36.232303619 32.232860565 27.496377945 -36.228954315 32.231597900 28.486715317 -36.217510223 32.227874756 29.460596085 -36.185966492 32.220710754 30.405673981 -36.112342834 32.216835022 31.334245682 -35.954723358 32.254276276 32.030693054 -35.846313477 32.030693054 -34.772159576 -34.772163391 32.006500244 -34.231872559 -35.057430267 32.316947937 -33.424438477 -35.498546600 32.424438477 -32.736053467 -35.670326233 32.678352356 31.736051559 -35.670322418 32.678352356 32.424442291 -35.498542786 32.424442291 33.231868744 -35.057430267 32.316951752 33.772163391 -34.772163391 32.006500244 -35.057430267 -34.231868744 32.316947937 -34.565631866 -34.565628052 32.672851562 33.565631866 -34.565631866 32.672851562 34.057430267 -34.231872559 32.316947937 -35.846313477 -33.030693054 32.030693054 -35.498538971 -33.424438477 32.424442291 34.498542786 -33.424442291 32.424438477 34.846317291 -33.030693054 32.030693054 -35.954715729 -32.334243774 32.254276276 -35.670322418 -32.736049652 32.678356171 34.670326233 -32.736049652 32.678352356 34.954723358 -32.334243774 32.254276276 -36.112346649 -31.405673981 32.216838837 35.112342834 -31.405673981 32.216838837 -36.185966492 -30.460596085 32.220714569 35.185966492 -30.460596085 32.220710754 -36.217510223 -29.486719131 32.227870941 35.217510223 -29.486719131 32.227870941 -36.228950500 -28.496377945 32.231597900 35.228954315 -28.496377945 32.231597900 -36.232299805 -27.499221802 32.232860565 35.232303619 -27.499225616 32.232860565 -36.233070374 -26.499872208 32.233165741 35.233070374 -26.499872208 32.233165741 -36.233207703 -25.499986649 32.233215332 35.233203888 -25.499986649 32.233215332 -36.233222961 -24.499998093 32.233222961 35.233222961 -24.499998093 32.233222961 -36.233222961 -23.500000000 32.233222961 35.233222961 -23.500000000 32.233222961 -36.233222961 -22.500000000 32.233222961 35.233222961 -22.500000000 32.233222961 -36.233222961 -21.500000000 32.233222961 35.233222961 -21.500000000 32.233222961 -36.233222961 -20.500000000 32.233222961 35.233222961 -20.500000000 32.233222961 -36.233222961 -19.500000000 32.233222961 35.233222961 -19.500000000 32.233222961 -36.233222961 -18.500000000 32.233222961 35.233222961 -18.500000000 32.233222961 -36.233222961 -17.500000000 32.233222961 35.233222961 -17.500000000 32.233222961 -36.233222961 -16.500000000 32.233222961 35.233222961 -16.500000000 32.233222961 -36.233222961 -15.500000000 32.233222961 35.233222961 -15.500000000 32.233222961 -36.233222961 -14.500000000 32.233222961 35.233222961 -14.500000000 32.233222961 -36.233222961 -13.500000000 32.233222961 35.233222961 -13.500000000 32.233222961 -36.233222961 -12.500000000 32.233222961 35.233222961 -12.500000000 32.233222961 -36.233222961 -11.500000000 32.233222961 35.233222961 -11.500000000 32.233222961 -36.233222961 -10.500000000 32.233222961 35.233222961 -10.500000000 32.233222961 -36.233222961 -9.500000000 32.233222961 35.233222961 -9.500000000 32.233222961 -36.233222961 -8.500000000 32.233222961 35.233222961 -8.500000000 32.233222961 -36.233222961 -7.500000000 32.233222961 35.233222961 -7.500000000 32.233222961 -36.233222961 -6.500000000 32.233222961 35.233222961 -6.500000000 32.233222961 -36.233222961 -5.500000000 32.233222961 35.233222961 -5.500000000 32.233222961 -36.233222961 -4.500000000 32.233222961 35.233222961 -4.500000000 32.233222961 -36.233222961 -3.500000000 32.233222961 35.233222961 -3.500000000 32.233222961 -36.233222961 -2.500000000 32.233222961 35.233222961 -2.500000000 32.233222961 -36.233222961 -1.500000000 32.233222961 35.233222961 -1.500000000 32.233222961 -36.233222961 -0.500000000 32.233222961 35.233222961 -0.500000000 32.233222961 -36.233222961 0.500000000 32.233222961 35.233222961 0.500000000 32.233222961 -36.233222961 1.500000000 32.233222961 35.233222961 1.500000000 32.233222961 -36.233222961 2.500000000 32.233222961 35.233222961 2.500000000 32.233222961 -36.233222961 3.500000000 32.233222961 35.233222961 3.500000000 32.233222961 -36.233222961 4.500000000 32.233222961 35.233222961 4.500000000 32.233222961 -36.233222961 5.500000000 32.233222961 35.233222961 5.500000000 32.233222961 -36.233222961 6.500000000 32.233222961 35.233222961 6.500000000 32.233222961 -36.233222961 7.500000000 32.233222961 35.233222961 7.500000000 32.233222961 -36.233222961 8.500000000 32.233222961 35.233222961 8.500000000 32.233222961 -36.233222961 9.500000000 32.233222961 35.233222961 9.500000000 32.233222961 -36.233222961 10.500000000 32.233222961 35.233222961 10.500000000 32.233222961 -36.233222961 11.500000000 32.233222961 35.233222961 11.500000000 32.233222961 -36.233222961 12.500000000 32.233222961 35.233222961 12.500000000 32.233222961 -36.233222961 13.500000000 32.233222961 35.233222961 13.500000000 32.233222961 -36.233222961 14.500000000 32.233222961 35.233222961 14.500000000 32.233222961 -36.233222961 15.500000000 32.233222961 35.233222961 15.500000000 32.233222961 -36.233222961 16.500000000 32.233222961 35.233222961 16.500000000 32.233222961 -36.233222961 17.500000000 32.233222961 35.233222961 17.500000000 32.233222961 -36.233222961 18.500000000 32.233222961 35.233222961 18.500000000 32.233222961 -36.233222961 19.500000000 32.233222961 35.233222961 19.500000000 32.233222961 -36.233222961 20.500000000 32.233222961 35.233222961 20.500000000 32.233222961 -36.233222961 21.500000000 32.233222961 35.233222961 21.500000000 32.233222961 -36.233222961 22.500000000 32.233222961 35.233222961 22.500000000 32.233222961 -36.233222961 23.499998093 32.233222961 35.233222961 23.499998093 32.233222961 -36.233207703 24.499986649 32.233219147 35.233203888 24.499986649 32.233219147 -36.233070374 25.499872208 32.233165741 35.233070374 25.499874115 32.233165741 -36.232303619 26.499225616 32.232860565 35.232303619 26.499225616 32.232860565 -36.228954315 27.496377945 32.231597900 35.228954315 27.496377945 32.231597900 -36.217510223 28.486719131 32.227874756 35.217510223 28.486715317 32.227874756 -36.185966492 29.460592270 32.220714569 35.185962677 29.460596085 32.220710754 -36.112342834 30.405673981 32.216838837 35.112342834 30.405673981 32.216835022 -35.954723358 31.334241867 32.254276276 -35.670322418 31.736051559 32.678356171 34.670322418 31.736051559 32.678352356 34.954723358 31.334245682 32.254276276 -35.846313477 32.030693054 32.030693054 -35.498542786 32.424438477 32.424442291 34.498542786 32.424442291 32.424438477 34.846313477 32.030693054 32.030693054 -35.057430267 33.231868744 32.316951752 -34.565628052 33.565628052 32.672851562 33.565628052 33.565628052 32.672851562 34.057430267 33.231868744 32.316951752 -34.772159576 33.772159576 32.006500244 -34.231868744 34.057430267 32.316947937 -33.424438477 34.498538971 32.424442291 -32.736049652 34.670322418 32.678356171 31.736051559 34.670322418 32.678356171 32.424438477 34.498542786 32.424442291 33.231868744 34.057430267 32.316947937 33.772163391 33.772163391 32.006500244 -33.030693054 34.846313477 32.030693054 -32.334243774 34.954715729 32.254276276 -31.405673981 35.112346649 32.216838837 -30.460596085 35.185966492 32.220714569 -29.486719131 35.217510223 32.227870941 -28.496377945 35.228954315 32.231597900 -27.499221802 35.232299805 32.232860565 -26.499872208 35.233070374 32.233165741 -25.499986649 35.233207703 32.233215332 -24.499998093 35.233222961 32.233222961 -23.500000000 35.233222961 32.233222961 -22.500000000 35.233222961 32.233222961 -21.500000000 35.233222961 32.233222961 -20.500000000 35.233222961 32.233222961 -19.500000000 35.233222961 32.233222961 -18.500000000 35.233222961 32.233222961 -17.500000000 35.233222961 32.233222961 -16.500000000 35.233222961 32.233222961 -15.500000000 35.233222961 32.233222961 -14.500000000 35.233222961 32.233222961 -13.500000000 35.233222961 32.233222961 -12.500000000 35.233222961 32.233222961 -11.500000000 35.233222961 32.233222961 -10.500000000 35.233222961 32.233222961 -9.500000000 35.233222961 32.233222961 -8.500000000 35.233222961 32.233222961 -7.500000000 35.233222961 32.233222961 -6.500000000 35.233222961 32.233222961 -5.500000000 35.233222961 32.233222961 -4.500000000 35.233222961 32.233222961 -3.500000000 35.233222961 32.233222961 -2.500000000 35.233222961 32.233222961 -1.500000000 35.233222961 32.233222961 -0.500000000 35.233222961 32.233222961 0.500000000 35.233222961 32.233222961 1.500000000 35.233222961 32.233222961 2.500000000 35.233222961 32.233222961 3.500000000 35.233222961 32.233222961 4.500000000 35.233222961 32.233222961 5.500000000 35.233222961 32.233222961 6.500000000 35.233222961 32.233222961 7.500000000 35.233222961 32.233222961 8.500000000 35.233222961 32.233222961 9.500000000 35.233222961 32.233222961 10.500000000 35.233222961 32.233222961 11.500000000 35.233222961 32.233222961 12.500000000 35.233222961 32.233222961 13.500000000 35.233222961 32.233222961 14.500000000 35.233222961 32.233222961 15.500000000 35.233222961 32.233222961 16.500000000 35.233222961 32.233222961 17.500000000 35.233222961 32.233222961 18.500000000 35.233222961 32.233222961 19.500000000 35.233222961 32.233222961 20.500000000 35.233222961 32.233222961 21.500000000 35.233222961 32.233222961 22.500000000 35.233222961 32.233222961 23.499998093 35.233222961 32.233222961 24.499986649 35.233207703 32.233219147 25.499872208 35.233070374 32.233165741 26.499225616 35.232303619 32.232860565 27.496377945 35.228954315 32.231597900 28.486719131 35.217510223 32.227874756 29.460592270 35.185966492 32.220714569 30.405673981 35.112346649 32.216838837 31.334241867 34.954723358 32.254276276 32.030689240 34.846317291 32.030693054 -32.035343170 -35.797355652 32.840930939 -31.336603165 -35.879737854 32.903373718 -30.442844391 -35.937091827 32.946807861 -29.481937408 -35.965759277 32.968868256 -28.495168686 -35.976860046 32.977619171 -27.498952866 -35.980201721 32.980335236 -26.499826431 -35.980957031 32.980976105 -25.499975204 -35.981086731 32.981086731 -24.499998093 -35.981101990 32.981105804 -23.500000000 -35.981105804 32.981105804 -22.500000000 -35.981105804 32.981105804 -21.500000000 -35.981105804 32.981105804 -20.500000000 -35.981105804 32.981105804 -19.500000000 -35.981105804 32.981105804 -18.500000000 -35.981105804 32.981105804 -17.500000000 -35.981105804 32.981105804 -16.500000000 -35.981105804 32.981105804 -15.500000000 -35.981105804 32.981105804 -14.500000000 -35.981105804 32.981105804 -13.500000000 -35.981105804 32.981105804 -12.500000000 -35.981105804 32.981105804 -11.500000000 -35.981105804 32.981105804 -10.500000000 -35.981105804 32.981105804 -9.500000000 -35.981105804 32.981105804 -8.500000000 -35.981105804 32.981105804 -7.500000000 -35.981105804 32.981105804 -6.500000000 -35.981105804 32.981105804 -5.500000000 -35.981105804 32.981105804 -4.500000000 -35.981105804 32.981105804 -3.500000000 -35.981105804 32.981105804 -2.500000000 -35.981105804 32.981105804 -1.500000000 -35.981105804 32.981105804 -0.500000000 -35.981105804 32.981105804 0.500000000 -35.981105804 32.981105804 1.500000000 -35.981105804 32.981105804 2.500000000 -35.981105804 32.981105804 3.500000000 -35.981105804 32.981105804 4.500000000 -35.981105804 32.981105804 5.500000000 -35.981105804 32.981105804 6.500000000 -35.981105804 32.981105804 7.500000000 -35.981105804 32.981105804 8.500000000 -35.981105804 32.981105804 9.500000000 -35.981105804 32.981105804 10.500000000 -35.981105804 32.981105804 11.500000000 -35.981105804 32.981105804 12.500000000 -35.981105804 32.981105804 13.500000000 -35.981105804 32.981105804 14.500000000 -35.981105804 32.981105804 15.500000000 -35.981105804 32.981105804 16.500000000 -35.981105804 32.981105804 17.500000000 -35.981105804 32.981105804 18.500000000 -35.981105804 32.981105804 19.500000000 -35.981105804 32.981105804 20.500000000 -35.981105804 32.981105804 21.500000000 -35.981105804 32.981105804 22.500000000 -35.981105804 32.981105804 23.499998093 -35.981105804 32.981101990 24.499979019 -35.981086731 32.981090546 25.499826431 -35.980960846 32.980976105 26.498950958 -35.980201721 32.980335236 27.495168686 -35.976860046 32.977615356 28.481937408 -35.965759277 32.968864441 29.442840576 -35.937095642 32.946807861 30.336603165 -35.879737854 32.903373718 31.035345078 -35.797355652 32.840927124 -33.980762482 -34.842590332 32.980762482 -33.316951752 -35.057430267 33.231868744 -32.380741119 -35.428535461 33.260547638 -31.410833359 -35.518615723 33.477191925 -30.459177017 -35.577739716 33.567745209 -29.485630035 -35.607204437 33.604709625 -28.496026993 -35.618598938 33.617927551 -27.499156952 -35.621978760 33.621795654 -26.499868393 -35.622734070 33.622692108 -25.499986649 -35.622856140 33.622852325 -24.500000000 -35.622871399 33.622871399 -23.500000000 -35.622871399 33.622871399 -22.500000000 -35.622871399 33.622871399 -21.500000000 -35.622871399 33.622871399 -20.500000000 -35.622871399 33.622871399 -19.500000000 -35.622871399 33.622871399 -18.500000000 -35.622871399 33.622871399 -17.500000000 -35.622871399 33.622871399 -16.500000000 -35.622871399 33.622871399 -15.500000000 -35.622871399 33.622871399 -14.500000000 -35.622871399 33.622871399 -13.500000000 -35.622871399 33.622871399 -12.500000000 -35.622871399 33.622871399 -11.500000000 -35.622871399 33.622871399 -10.500000000 -35.622871399 33.622871399 -9.500000000 -35.622871399 33.622871399 -8.500000000 -35.622871399 33.622871399 -7.500000000 -35.622871399 33.622871399 -6.500000000 -35.622871399 33.622871399 -5.500000000 -35.622871399 33.622871399 -4.500000000 -35.622871399 33.622871399 -3.500000000 -35.622871399 33.622871399 -2.500000000 -35.622871399 33.622871399 -1.500000000 -35.622871399 33.622871399 -0.500000000 -35.622871399 33.622871399 0.500000000 -35.622871399 33.622871399 1.500000000 -35.622871399 33.622871399 2.500000000 -35.622871399 33.622871399 3.500000000 -35.622871399 33.622871399 4.500000000 -35.622871399 33.622871399 5.500000000 -35.622871399 33.622871399 6.500000000 -35.622871399 33.622871399 7.500000000 -35.622871399 33.622871399 8.500000000 -35.622871399 33.622871399 9.500000000 -35.622871399 33.622871399 10.500000000 -35.622871399 33.622871399 11.500000000 -35.622871399 33.622871399 12.500000000 -35.622871399 33.622871399 13.500000000 -35.622871399 33.622871399 14.500000000 -35.622871399 33.622871399 15.500000000 -35.622871399 33.622871399 16.500000000 -35.622871399 33.622871399 17.500000000 -35.622871399 33.622871399 18.500000000 -35.622871399 33.622871399 19.500000000 -35.622871399 33.622871399 20.500000000 -35.622871399 33.622871399 21.500000000 -35.622871399 33.622871399 22.500000000 -35.622871399 33.622871399 23.500000000 -35.622871399 33.622871399 24.499986649 -35.622856140 33.622852325 25.499868393 -35.622741699 33.622692108 26.499156952 -35.621986389 33.621795654 27.496026993 -35.618598938 33.617927551 28.485630035 -35.607208252 33.604709625 29.459178925 -35.577739716 33.567745209 30.410833359 -35.518615723 33.477184296 31.380737305 -35.428535461 33.260547638 32.316951752 -35.057430267 33.231868744 32.980762482 -34.842594147 32.980762482 -34.842590332 -33.980758667 32.980762482 -34.297924042 -34.297924042 33.297924042 -33.672851562 -34.565624237 33.565631866 32.672851562 -34.565628052 33.565631866 33.297924042 -34.297924042 33.297924042 33.842590332 -33.980762482 32.980758667 -35.057430267 -33.316947937 33.231872559 -34.565628052 -33.672847748 33.565631866 33.565628052 -33.672851562 33.565628052 34.057430267 -33.316947937 33.231868744 -35.797355652 -32.035343170 32.840934753 -35.428535461 -32.380737305 33.260543823 34.428535461 -32.380741119 33.260540009 34.797359467 -32.035343170 32.840927124 -35.879737854 -31.336603165 32.903373718 -35.518615723 -31.410833359 33.477191925 34.518615723 -31.410833359 33.477188110 34.879737854 -31.336603165 32.903373718 -35.937095642 -30.442844391 32.946807861 -35.577735901 -30.459178925 33.567745209 34.577739716 -30.459178925 33.567745209 34.937091827 -30.442844391 32.946807861 -35.965759277 -29.481937408 32.968864441 -35.607208252 -29.485630035 33.604705811 34.607204437 -29.485630035 33.604709625 34.965766907 -29.481937408 32.968864441 -35.976860046 -28.495168686 32.977611542 -35.618598938 -28.496026993 33.617927551 34.618598938 -28.496026993 33.617927551 34.976860046 -28.495168686 32.977615356 -35.980201721 -27.498950958 32.980331421 -35.621982574 -27.499156952 33.621795654 34.621982574 -27.499156952 33.621799469 34.980201721 -27.498952866 32.980331421 -35.980957031 -26.499826431 32.980968475 -35.622734070 -26.499868393 33.622692108 34.622734070 -26.499868393 33.622692108 34.980957031 -26.499826431 32.980976105 -35.981086731 -25.499979019 32.981086731 -35.622856140 -25.499986649 33.622844696 34.622856140 -25.499986649 33.622852325 34.981086731 -25.499975204 32.981090546 -35.981101990 -24.499998093 32.981101990 -35.622871399 -24.500000000 33.622871399 34.622871399 -24.500000000 33.622871399 34.981101990 -24.499998093 32.981101990 -35.981101990 -23.500000000 32.981101990 -35.622871399 -23.500000000 33.622871399 34.622871399 -23.500000000 33.622871399 34.981101990 -23.500000000 32.981101990 -35.981101990 -22.500000000 32.981101990 -35.622871399 -22.500000000 33.622871399 34.622871399 -22.500000000 33.622871399 34.981101990 -22.500000000 32.981101990 -35.981101990 -21.500000000 32.981101990 -35.622871399 -21.500000000 33.622871399 34.622871399 -21.500000000 33.622871399 34.981101990 -21.500000000 32.981101990 -35.981101990 -20.500000000 32.981101990 -35.622871399 -20.500000000 33.622871399 34.622871399 -20.500000000 33.622871399 34.981101990 -20.500000000 32.981101990 -35.981101990 -19.500000000 32.981101990 -35.622871399 -19.500000000 33.622871399 34.622871399 -19.500000000 33.622871399 34.981101990 -19.500000000 32.981101990 -35.981101990 -18.500000000 32.981101990 -35.622871399 -18.500000000 33.622871399 34.622871399 -18.500000000 33.622871399 34.981101990 -18.500000000 32.981101990 -35.981101990 -17.500000000 32.981101990 -35.622871399 -17.500000000 33.622871399 34.622871399 -17.500000000 33.622871399 34.981101990 -17.500000000 32.981101990 -35.981101990 -16.500000000 32.981101990 -35.622871399 -16.500000000 33.622871399 34.622871399 -16.500000000 33.622871399 34.981101990 -16.500000000 32.981101990 -35.981101990 -15.500000000 32.981101990 -35.622871399 -15.500000000 33.622871399 34.622871399 -15.500000000 33.622871399 34.981101990 -15.500000000 32.981101990 -35.981101990 -14.500000000 32.981101990 -35.622871399 -14.500000000 33.622871399 34.622871399 -14.500000000 33.622871399 34.981101990 -14.500000000 32.981101990 -35.981101990 -13.500000000 32.981101990 -35.622871399 -13.500000000 33.622871399 34.622871399 -13.500000000 33.622871399 34.981101990 -13.500000000 32.981101990 -35.981101990 -12.500000000 32.981101990 -35.622871399 -12.500000000 33.622871399 34.622871399 -12.500000000 33.622871399 34.981101990 -12.500000000 32.981101990 -35.981101990 -11.500000000 32.981101990 -35.622871399 -11.500000000 33.622871399 34.622871399 -11.500000000 33.622871399 34.981101990 -11.500000000 32.981101990 -35.981101990 -10.500000000 32.981101990 -35.622871399 -10.500000000 33.622871399 34.622871399 -10.500000000 33.622871399 34.981101990 -10.500000000 32.981101990 -35.981101990 -9.500000000 32.981101990 -35.622871399 -9.500000000 33.622871399 34.622871399 -9.500000000 33.622871399 34.981101990 -9.500000000 32.981101990 -35.981101990 -8.500000000 32.981101990 -35.622871399 -8.500000000 33.622871399 34.622871399 -8.500000000 33.622871399 34.981101990 -8.500000000 32.981101990 -35.981101990 -7.500000000 32.981101990 -35.622871399 -7.500000000 33.622871399 34.622871399 -7.500000000 33.622871399 34.981101990 -7.500000000 32.981101990 -35.981101990 -6.500000000 32.981101990 -35.622871399 -6.500000000 33.622871399 34.622871399 -6.500000000 33.622871399 34.981101990 -6.500000000 32.981101990 -35.981101990 -5.500000000 32.981101990 -35.622871399 -5.500000000 33.622871399 34.622871399 -5.500000000 33.622871399 34.981101990 -5.500000000 32.981101990 -35.981101990 -4.500000000 32.981101990 -35.622871399 -4.500000000 33.622871399 34.622871399 -4.500000000 33.622871399 34.981101990 -4.500000000 32.981101990 -35.981101990 -3.500000000 32.981101990 -35.622871399 -3.500000000 33.622871399 34.622871399 -3.500000000 33.622871399 34.981101990 -3.500000000 32.981101990 -35.981101990 -2.500000000 32.981101990 -35.622871399 -2.500000000 33.622871399 34.622871399 -2.500000000 33.622871399 34.981101990 -2.500000000 32.981101990 -35.981101990 -1.500000000 32.981101990 -35.622871399 -1.500000000 33.622871399 34.622871399 -1.500000000 33.622871399 34.981101990 -1.500000000 32.981101990 -35.981101990 -0.500000000 32.981101990 -35.622871399 -0.500000000 33.622871399 34.622871399 -0.500000000 33.622871399 34.981101990 -0.500000000 32.981101990 -35.981101990 0.500000000 32.981101990 -35.622871399 0.500000000 33.622871399 34.622871399 0.500000000 33.622871399 34.981101990 0.500000000 32.981101990 -35.981101990 1.500000000 32.981101990 -35.622871399 1.500000000 33.622871399 34.622871399 1.500000000 33.622871399 34.981101990 1.500000000 32.981101990 -35.981101990 2.500000000 32.981101990 -35.622871399 2.500000000 33.622871399 34.622871399 2.500000000 33.622871399 34.981101990 2.500000000 32.981101990 -35.981101990 3.500000000 32.981101990 -35.622871399 3.500000000 33.622871399 34.622871399 3.500000000 33.622871399 34.981101990 3.500000000 32.981101990 -35.981101990 4.500000000 32.981101990 -35.622871399 4.500000000 33.622871399 34.622871399 4.500000000 33.622871399 34.981101990 4.500000000 32.981101990 -35.981101990 5.500000000 32.981101990 -35.622871399 5.500000000 33.622871399 34.622871399 5.500000000 33.622871399 34.981101990 5.500000000 32.981101990 -35.981101990 6.500000000 32.981101990 -35.622871399 6.500000000 33.622871399 34.622871399 6.500000000 33.622871399 34.981101990 6.500000000 32.981101990 -35.981101990 7.500000000 32.981101990 -35.622871399 7.500000000 33.622871399 34.622871399 7.500000000 33.622871399 34.981101990 7.500000000 32.981101990 -35.981101990 8.500000000 32.981101990 -35.622871399 8.500000000 33.622871399 34.622871399 8.500000000 33.622871399 34.981101990 8.500000000 32.981101990 -35.981101990 9.500000000 32.981101990 -35.622871399 9.500000000 33.622871399 34.622871399 9.500000000 33.622871399 34.981101990 9.500000000 32.981101990 -35.981101990 10.500000000 32.981101990 -35.622871399 10.500000000 33.622871399 34.622871399 10.500000000 33.622871399 34.981101990 10.500000000 32.981101990 -35.981101990 11.500000000 32.981101990 -35.622871399 11.500000000 33.622871399 34.622871399 11.500000000 33.622871399 34.981101990 11.500000000 32.981101990 -35.981101990 12.500000000 32.981101990 -35.622871399 12.500000000 33.622871399 34.622871399 12.500000000 33.622871399 34.981101990 12.500000000 32.981101990 -35.981101990 13.500000000 32.981101990 -35.622871399 13.500000000 33.622871399 34.622871399 13.500000000 33.622871399 34.981101990 13.500000000 32.981101990 -35.981101990 14.500000000 32.981101990 -35.622871399 14.500000000 33.622871399 34.622871399 14.500000000 33.622871399 34.981101990 14.500000000 32.981101990 -35.981101990 15.500000000 32.981101990 -35.622871399 15.500000000 33.622871399 34.622871399 15.500000000 33.622871399 34.981101990 15.500000000 32.981101990 -35.981101990 16.500000000 32.981101990 -35.622871399 16.500000000 33.622871399 34.622871399 16.500000000 33.622871399 34.981101990 16.500000000 32.981101990 -35.981101990 17.500000000 32.981101990 -35.622871399 17.500000000 33.622871399 34.622871399 17.500000000 33.622871399 34.981101990 17.500000000 32.981101990 -35.981101990 18.500000000 32.981101990 -35.622871399 18.500000000 33.622871399 34.622871399 18.500000000 33.622871399 34.981101990 18.500000000 32.981101990 -35.981101990 19.500000000 32.981101990 -35.622871399 19.500000000 33.622871399 34.622871399 19.500000000 33.622871399 34.981101990 19.500000000 32.981101990 -35.981101990 20.500000000 32.981101990 -35.622871399 20.500000000 33.622871399 34.622871399 20.500000000 33.622871399 34.981101990 20.500000000 32.981101990 -35.981101990 21.500000000 32.981101990 -35.622871399 21.500000000 33.622871399 34.622871399 21.500000000 33.622871399 34.981101990 21.500000000 32.981101990 -35.981101990 22.500000000 32.981101990 -35.622871399 22.500000000 33.622871399 34.622871399 22.500000000 33.622871399 34.981101990 22.500000000 32.981101990 -35.981101990 23.499998093 32.981101990 -35.622871399 23.500000000 33.622871399 34.622871399 23.500000000 33.622871399 34.981101990 23.499998093 32.981101990 -35.981086731 24.499979019 32.981086731 -35.622856140 24.499986649 33.622844696 34.622856140 24.499986649 33.622852325 34.981086731 24.499979019 32.981090546 -35.980957031 25.499826431 32.980968475 -35.622734070 25.499868393 33.622692108 34.622734070 25.499868393 33.622692108 34.980957031 25.499826431 32.980976105 -35.980201721 26.498950958 32.980335236 -35.621978760 26.499156952 33.621799469 34.621978760 26.499156952 33.621799469 34.980201721 26.498950958 32.980335236 -35.976860046 27.495168686 32.977615356 -35.618598938 27.496026993 33.617927551 34.618598938 27.496026993 33.617927551 34.976860046 27.495168686 32.977615356 -35.965766907 28.481937408 32.968864441 -35.607208252 28.485630035 33.604705811 34.607208252 28.485630035 33.604705811 34.965759277 28.481937408 32.968864441 -35.937091827 29.442840576 32.946807861 -35.577739716 29.459178925 33.567749023 34.577739716 29.459178925 33.567745209 34.937091827 29.442840576 32.946807861 -35.879737854 30.336603165 32.903373718 -35.518615723 30.410833359 33.477191925 34.518619537 30.410833359 33.477184296 34.879737854 30.336603165 32.903377533 -35.797355652 31.035345078 32.840930939 -35.428535461 31.380737305 33.260547638 34.428535461 31.380737305 33.260547638 34.797355652 31.035345078 32.840927124 -35.057430267 32.316951752 33.231872559 -34.565628052 32.672847748 33.565631866 33.565628052 32.672847748 33.565628052 34.057430267 32.316951752 33.231868744 -34.842590332 32.980758667 32.980762482 -34.297924042 33.297924042 33.297924042 -33.672851562 33.565624237 33.565628052 32.672851562 33.565624237 33.565628052 33.297924042 33.297924042 33.297924042 33.842590332 32.980758667 32.980762482 -33.980758667 33.842590332 32.980762482 -33.316947937 34.057430267 33.231872559 -32.380737305 34.428535461 33.260543823 -31.410833359 34.518615723 33.477188110 -30.459178925 34.577739716 33.567745209 -29.485630035 34.607204437 33.604709625 -28.496026993 34.618602753 33.617927551 -27.499156952 34.621982574 33.621795654 -26.499868393 34.622737885 33.622692108 -25.499986649 34.622856140 33.622844696 -24.500000000 34.622871399 33.622871399 -23.500000000 34.622871399 33.622871399 -22.500000000 34.622871399 33.622871399 -21.500000000 34.622871399 33.622871399 -20.500000000 34.622871399 33.622871399 -19.500000000 34.622871399 33.622871399 -18.500000000 34.622871399 33.622871399 -17.500000000 34.622871399 33.622871399 -16.500000000 34.622871399 33.622871399 -15.500000000 34.622871399 33.622871399 -14.500000000 34.622871399 33.622871399 -13.500000000 34.622871399 33.622871399 -12.500000000 34.622871399 33.622871399 -11.500000000 34.622871399 33.622871399 -10.500000000 34.622871399 33.622871399 -9.500000000 34.622871399 33.622871399 -8.500000000 34.622871399 33.622871399 -7.500000000 34.622871399 33.622871399 -6.500000000 34.622871399 33.622871399 -5.500000000 34.622871399 33.622871399 -4.500000000 34.622871399 33.622871399 -3.500000000 34.622871399 33.622871399 -2.500000000 34.622871399 33.622871399 -1.500000000 34.622871399 33.622871399 -0.500000000 34.622871399 33.622871399 0.500000000 34.622871399 33.622871399 1.500000000 34.622871399 33.622871399 2.500000000 34.622871399 33.622871399 3.500000000 34.622871399 33.622871399 4.500000000 34.622871399 33.622871399 5.500000000 34.622871399 33.622871399 6.500000000 34.622871399 33.622871399 7.500000000 34.622871399 33.622871399 8.500000000 34.622871399 33.622871399 9.500000000 34.622871399 33.622871399 10.500000000 34.622871399 33.622871399 11.500000000 34.622871399 33.622871399 12.500000000 34.622871399 33.622871399 13.500000000 34.622871399 33.622871399 14.500000000 34.622871399 33.622871399 15.500000000 34.622871399 33.622871399 16.500000000 34.622871399 33.622871399 17.500000000 34.622871399 33.622871399 18.500000000 34.622871399 33.622871399 19.500000000 34.622871399 33.622871399 20.500000000 34.622871399 33.622871399 21.500000000 34.622871399 33.622871399 22.500000000 34.622871399 33.622871399 23.500000000 34.622871399 33.622871399 24.499986649 34.622856140 33.622844696 25.499868393 34.622734070 33.622692108 26.499156952 34.621978760 33.621799469 27.496026993 34.618598938 33.617927551 28.485630035 34.607208252 33.604705811 29.459178925 34.577739716 33.567749023 30.410833359 34.518615723 33.477191925 31.380737305 34.428535461 33.260547638 32.316947937 34.057430267 33.231872559 32.980758667 33.842590332 32.980762482 -32.035343170 34.797355652 32.840934753 -31.336603165 34.879737854 32.903373718 -30.442844391 34.937095642 32.946804047 -29.481937408 34.965759277 32.968864441 -28.495168686 34.976860046 32.977615356 -27.498950958 34.980201721 32.980331421 -26.499826431 34.980957031 32.980968475 -25.499979019 34.981086731 32.981086731 -24.499998093 34.981101990 32.981101990 -23.500000000 34.981101990 32.981101990 -22.500000000 34.981101990 32.981101990 -21.500000000 34.981101990 32.981101990 -20.500000000 34.981101990 32.981101990 -19.500000000 34.981101990 32.981101990 -18.500000000 34.981101990 32.981101990 -17.500000000 34.981101990 32.981101990 -16.500000000 34.981101990 32.981101990 -15.500000000 34.981101990 32.981101990 -14.500000000 34.981101990 32.981101990 -13.500000000 34.981101990 32.981101990 -12.500000000 34.981101990 32.981101990 -11.500000000 34.981101990 32.981101990 -10.500000000 34.981101990 32.981101990 -9.500000000 34.981101990 32.981101990 -8.500000000 34.981101990 32.981101990 -7.500000000 34.981101990 32.981101990 -6.500000000 34.981101990 32.981101990 -5.500000000 34.981101990 32.981101990 -4.500000000 34.981101990 32.981101990 -3.500000000 34.981101990 32.981101990 -2.500000000 34.981101990 32.981101990 -1.500000000 34.981101990 32.981101990 -0.500000000 34.981101990 32.981101990 0.500000000 34.981101990 32.981101990 1.500000000 34.981101990 32.981101990 2.500000000 34.981101990 32.981101990 3.500000000 34.981101990 32.981101990 4.500000000 34.981101990 32.981101990 5.500000000 34.981101990 32.981101990 6.500000000 34.981101990 32.981101990 7.500000000 34.981101990 32.981101990 8.500000000 34.981101990 32.981101990 9.500000000 34.981101990 32.981101990 10.500000000 34.981101990 32.981101990 11.500000000 34.981101990 32.981101990 12.500000000 34.981101990 32.981101990 13.500000000 34.981101990 32.981101990 14.500000000 34.981101990 32.981101990 15.500000000 34.981101990 32.981101990 16.500000000 34.981101990 32.981101990 17.500000000 34.981101990 32.981101990 18.500000000 34.981101990 32.981101990 19.500000000 34.981101990 32.981101990 20.500000000 34.981101990 32.981101990 21.500000000 34.981101990 32.981101990 22.500000000 34.981101990 32.981101990 23.499998093 34.981101990 32.981101990 24.499979019 34.981086731 32.981086731 25.499826431 34.980957031 32.980968475 26.498950958 34.980201721 32.980335236 27.495168686 34.976860046 32.977615356 28.481937408 34.965766907 32.968864441 29.442840576 34.937091827 32.946807861 30.336603165 34.879737854 32.903373718 31.035345078 34.797359467 32.840934753 -33.006500244 -34.772159576 33.772163391 -32.313602448 -34.912063599 33.912063599 -31.413049698 -35.040893555 34.040893555 -30.463253021 -35.115642548 34.115642548 -29.487289429 -35.150253296 34.150257111 -28.496557236 -35.163162231 34.163158417 -27.499298096 -35.166961670 34.166954041 -26.499902725 -35.167808533 34.167808533 -25.499990463 -35.167949677 34.167949677 -24.500000000 -35.167968750 34.167968750 -23.500000000 -35.167968750 34.167968750 -22.500000000 -35.167968750 34.167968750 -21.500000000 -35.167968750 34.167968750 -20.500000000 -35.167968750 34.167968750 -19.500000000 -35.167968750 34.167968750 -18.500000000 -35.167968750 34.167968750 -17.500000000 -35.167968750 34.167968750 -16.500000000 -35.167968750 34.167968750 -15.500000000 -35.167968750 34.167968750 -14.500000000 -35.167968750 34.167968750 -13.500000000 -35.167968750 34.167968750 -12.500000000 -35.167968750 34.167968750 -11.500000000 -35.167968750 34.167968750 -10.500000000 -35.167968750 34.167968750 -9.500000000 -35.167968750 34.167968750 -8.500000000 -35.167968750 34.167968750 -7.500000000 -35.167968750 34.167968750 -6.500000000 -35.167968750 34.167968750 -5.500000000 -35.167968750 34.167968750 -4.500000000 -35.167968750 34.167968750 -3.500000000 -35.167968750 34.167968750 -2.500000000 -35.167968750 34.167968750 -1.500000000 -35.167968750 34.167968750 -0.500000000 -35.167968750 34.167968750 0.500000000 -35.167968750 34.167968750 1.500000000 -35.167968750 34.167968750 2.500000000 -35.167968750 34.167968750 3.500000000 -35.167968750 34.167968750 4.500000000 -35.167968750 34.167968750 5.500000000 -35.167968750 34.167968750 6.500000000 -35.167968750 34.167968750 7.500000000 -35.167968750 34.167968750 8.500000000 -35.167968750 34.167968750 9.500000000 -35.167968750 34.167968750 10.500000000 -35.167968750 34.167968750 11.500000000 -35.167968750 34.167968750 12.500000000 -35.167968750 34.167968750 13.500000000 -35.167968750 34.167968750 14.500000000 -35.167968750 34.167968750 15.500000000 -35.167968750 34.167968750 16.500000000 -35.167968750 34.167968750 17.500000000 -35.167968750 34.167968750 18.500000000 -35.167968750 34.167968750 19.500000000 -35.167968750 34.167968750 20.500000000 -35.167968750 34.167968750 21.500000000 -35.167968750 34.167968750 22.500000000 -35.167968750 34.167968750 23.500000000 -35.167968750 34.167968750 24.499994278 -35.167949677 34.167949677 25.499898911 -35.167804718 34.167808533 26.499298096 -35.166954041 34.166954041 27.496557236 -35.163154602 34.163158417 28.487289429 -35.150249481 34.150249481 29.463253021 -35.115642548 34.115642548 30.413049698 -35.040893555 34.040893555 31.313602448 -34.912059784 33.912063599 32.006500244 -34.772159576 33.772163391 -33.980758667 -33.980758667 33.842590332 -33.316947937 -34.231868744 34.057434082 -32.380737305 -34.260547638 34.428531647 -31.410831451 -34.477184296 34.518615723 -30.459177017 -34.567749023 34.577739716 -29.485630035 -34.604709625 34.607204437 -28.496026993 -34.617927551 34.618598938 -27.499160767 -34.621795654 34.621982574 -26.499868393 -34.622692108 34.622734070 -25.499986649 -34.622844696 34.622856140 -24.500000000 -34.622871399 34.622871399 -23.500000000 -34.622871399 34.622871399 -22.500000000 -34.622871399 34.622871399 -21.500000000 -34.622871399 34.622871399 -20.500000000 -34.622871399 34.622871399 -19.500000000 -34.622871399 34.622871399 -18.500000000 -34.622871399 34.622871399 -17.500000000 -34.622871399 34.622871399 -16.500000000 -34.622871399 34.622871399 -15.500000000 -34.622871399 34.622871399 -14.500000000 -34.622871399 34.622871399 -13.500000000 -34.622871399 34.622871399 -12.500000000 -34.622871399 34.622871399 -11.500000000 -34.622871399 34.622871399 -10.500000000 -34.622871399 34.622871399 -9.500000000 -34.622871399 34.622871399 -8.500000000 -34.622871399 34.622871399 -7.500000000 -34.622871399 34.622871399 -6.500000000 -34.622871399 34.622871399 -5.500000000 -34.622871399 34.622871399 -4.500000000 -34.622871399 34.622871399 -3.500000000 -34.622871399 34.622871399 -2.500000000 -34.622871399 34.622871399 -1.500000000 -34.622871399 34.622871399 -0.500000000 -34.622871399 34.622871399 0.500000000 -34.622871399 34.622871399 1.500000000 -34.622871399 34.622871399 2.500000000 -34.622871399 34.622871399 3.500000000 -34.622871399 34.622871399 4.500000000 -34.622871399 34.622871399 5.500000000 -34.622871399 34.622871399 6.500000000 -34.622871399 34.622871399 7.500000000 -34.622871399 34.622871399 8.500000000 -34.622871399 34.622871399 9.500000000 -34.622871399 34.622871399 10.500000000 -34.622871399 34.622871399 11.500000000 -34.622871399 34.622871399 12.500000000 -34.622871399 34.622871399 13.500000000 -34.622871399 34.622871399 14.500000000 -34.622871399 34.622871399 15.500000000 -34.622871399 34.622871399 16.500000000 -34.622871399 34.622871399 17.500000000 -34.622871399 34.622871399 18.500000000 -34.622871399 34.622871399 19.500000000 -34.622871399 34.622871399 20.500000000 -34.622871399 34.622871399 21.500000000 -34.622871399 34.622871399 22.500000000 -34.622871399 34.622871399 23.500000000 -34.622871399 34.622871399 24.499986649 -34.622844696 34.622856140 25.499868393 -34.622692108 34.622734070 26.499156952 -34.621795654 34.621978760 27.496026993 -34.617927551 34.618598938 28.485631943 -34.604705811 34.607204437 29.459178925 -34.567741394 34.577739716 30.410831451 -34.477191925 34.518615723 31.380739212 -34.260543823 34.428535461 32.316947937 -34.231868744 34.057430267 32.980758667 -33.980758667 33.842590332 -34.772159576 -33.006500244 33.772163391 -34.231868744 -33.316947937 34.057434082 -33.424438477 -33.424438477 34.498546600 -32.736049652 -33.678352356 34.670326233 31.736051559 -33.678352356 34.670322418 32.424438477 -33.424442291 34.498542786 33.231868744 -33.316947937 34.057430267 33.772159576 -33.006500244 33.772155762 -34.912063599 -32.313598633 33.912063599 -34.260547638 -32.380737305 34.428535461 -33.678352356 -32.736053467 34.670326233 32.678352356 -32.736053467 34.670322418 33.260543823 -32.380741119 34.428531647 33.912059784 -32.313602448 33.912059784 -35.040893555 -31.413049698 34.040893555 -34.477191925 -31.410831451 34.518615723 33.477191925 -31.410831451 34.518611908 34.040893555 -31.413049698 34.040893555 -35.115642548 -30.463253021 34.115642548 -34.567745209 -30.459178925 34.577739716 33.567745209 -30.459178925 34.577739716 34.115642548 -30.463253021 34.115638733 -35.150253296 -29.487289429 34.150249481 -34.604709625 -29.485630035 34.607204437 33.604709625 -29.485630035 34.607208252 34.150253296 -29.487293243 34.150257111 -35.163154602 -28.496557236 34.163158417 -34.617927551 -28.496026993 34.618598938 33.617927551 -28.496026993 34.618598938 34.163154602 -28.496557236 34.163158417 -35.166954041 -27.499298096 34.166954041 -34.621795654 -27.499156952 34.621982574 33.621795654 -27.499156952 34.621982574 34.166957855 -27.499298096 34.166961670 -35.167808533 -26.499898911 34.167808533 -34.622692108 -26.499868393 34.622734070 33.622692108 -26.499868393 34.622734070 34.167808533 -26.499902725 34.167808533 -35.167949677 -25.499990463 34.167949677 -34.622844696 -25.499986649 34.622856140 33.622844696 -25.499986649 34.622852325 34.167949677 -25.499990463 34.167945862 -35.167961121 -24.500000000 34.167961121 -34.622863770 -24.500000000 34.622871399 33.622871399 -24.500000000 34.622871399 34.167968750 -24.500000000 34.167968750 -35.167961121 -23.500000000 34.167961121 -34.622871399 -23.500000000 34.622871399 33.622871399 -23.500000000 34.622871399 34.167968750 -23.500000000 34.167968750 -35.167961121 -22.500000000 34.167961121 -34.622871399 -22.500000000 34.622871399 33.622871399 -22.500000000 34.622871399 34.167968750 -22.500000000 34.167968750 -35.167961121 -21.500000000 34.167961121 -34.622871399 -21.500000000 34.622871399 33.622871399 -21.500000000 34.622871399 34.167968750 -21.500000000 34.167968750 -35.167961121 -20.500000000 34.167961121 -34.622871399 -20.500000000 34.622871399 33.622871399 -20.500000000 34.622871399 34.167968750 -20.500000000 34.167968750 -35.167961121 -19.500000000 34.167961121 -34.622871399 -19.500000000 34.622871399 33.622871399 -19.500000000 34.622871399 34.167968750 -19.500000000 34.167968750 -35.167961121 -18.500000000 34.167961121 -34.622871399 -18.500000000 34.622871399 33.622871399 -18.500000000 34.622871399 34.167968750 -18.500000000 34.167968750 -35.167961121 -17.500000000 34.167961121 -34.622871399 -17.500000000 34.622871399 33.622871399 -17.500000000 34.622871399 34.167968750 -17.500000000 34.167968750 -35.167961121 -16.500000000 34.167961121 -34.622871399 -16.500000000 34.622871399 33.622871399 -16.500000000 34.622871399 34.167968750 -16.500000000 34.167968750 -35.167961121 -15.500000000 34.167961121 -34.622871399 -15.500000000 34.622871399 33.622871399 -15.500000000 34.622871399 34.167968750 -15.500000000 34.167968750 -35.167961121 -14.500000000 34.167961121 -34.622871399 -14.500000000 34.622871399 33.622871399 -14.500000000 34.622871399 34.167968750 -14.500000000 34.167968750 -35.167961121 -13.500000000 34.167961121 -34.622871399 -13.500000000 34.622871399 33.622871399 -13.500000000 34.622871399 34.167968750 -13.500000000 34.167968750 -35.167961121 -12.500000000 34.167961121 -34.622871399 -12.500000000 34.622871399 33.622871399 -12.500000000 34.622871399 34.167968750 -12.500000000 34.167968750 -35.167961121 -11.500000000 34.167961121 -34.622871399 -11.500000000 34.622871399 33.622871399 -11.500000000 34.622871399 34.167968750 -11.500000000 34.167968750 -35.167961121 -10.500000000 34.167961121 -34.622871399 -10.500000000 34.622871399 33.622871399 -10.500000000 34.622871399 34.167968750 -10.500000000 34.167968750 -35.167961121 -9.500000000 34.167961121 -34.622871399 -9.500000000 34.622871399 33.622871399 -9.500000000 34.622871399 34.167968750 -9.500000000 34.167968750 -35.167961121 -8.500000000 34.167961121 -34.622871399 -8.500000000 34.622871399 33.622871399 -8.500000000 34.622871399 34.167968750 -8.500000000 34.167968750 -35.167961121 -7.500000000 34.167961121 -34.622871399 -7.500000000 34.622871399 33.622871399 -7.500000000 34.622871399 34.167968750 -7.500000000 34.167968750 -35.167961121 -6.500000000 34.167961121 -34.622871399 -6.500000000 34.622871399 33.622871399 -6.500000000 34.622871399 34.167968750 -6.500000000 34.167968750 -35.167961121 -5.500000000 34.167961121 -34.622871399 -5.500000000 34.622871399 33.622871399 -5.500000000 34.622871399 34.167968750 -5.500000000 34.167968750 -35.167961121 -4.500000000 34.167961121 -34.622871399 -4.500000000 34.622871399 33.622871399 -4.500000000 34.622871399 34.167968750 -4.500000000 34.167968750 -35.167961121 -3.500000000 34.167961121 -34.622871399 -3.500000000 34.622871399 33.622871399 -3.500000000 34.622871399 34.167968750 -3.500000000 34.167968750 -35.167961121 -2.500000000 34.167961121 -34.622871399 -2.500000000 34.622871399 33.622871399 -2.500000000 34.622871399 34.167968750 -2.500000000 34.167968750 -35.167961121 -1.500000000 34.167961121 -34.622871399 -1.500000000 34.622871399 33.622871399 -1.500000000 34.622871399 34.167968750 -1.500000000 34.167968750 -35.167961121 -0.500000000 34.167961121 -34.622871399 -0.500000000 34.622871399 33.622871399 -0.500000000 34.622871399 34.167968750 -0.500000000 34.167968750 -35.167961121 0.500000000 34.167961121 -34.622871399 0.500000000 34.622871399 33.622871399 0.500000000 34.622871399 34.167968750 0.500000000 34.167968750 -35.167961121 1.500000000 34.167961121 -34.622871399 1.500000000 34.622871399 33.622871399 1.500000000 34.622871399 34.167968750 1.500000000 34.167968750 -35.167961121 2.500000000 34.167961121 -34.622871399 2.500000000 34.622871399 33.622871399 2.500000000 34.622871399 34.167968750 2.500000000 34.167968750 -35.167961121 3.500000000 34.167961121 -34.622871399 3.500000000 34.622871399 33.622871399 3.500000000 34.622871399 34.167968750 3.500000000 34.167968750 -35.167961121 4.500000000 34.167961121 -34.622871399 4.500000000 34.622871399 33.622871399 4.500000000 34.622871399 34.167968750 4.500000000 34.167968750 -35.167961121 5.500000000 34.167961121 -34.622871399 5.500000000 34.622871399 33.622871399 5.500000000 34.622871399 34.167968750 5.500000000 34.167968750 -35.167961121 6.500000000 34.167961121 -34.622871399 6.500000000 34.622871399 33.622871399 6.500000000 34.622871399 34.167968750 6.500000000 34.167968750 -35.167961121 7.500000000 34.167961121 -34.622871399 7.500000000 34.622871399 33.622871399 7.500000000 34.622871399 34.167968750 7.500000000 34.167968750 -35.167961121 8.500000000 34.167961121 -34.622871399 8.500000000 34.622871399 33.622871399 8.500000000 34.622871399 34.167968750 8.500000000 34.167968750 -35.167961121 9.500000000 34.167961121 -34.622871399 9.500000000 34.622871399 33.622871399 9.500000000 34.622871399 34.167968750 9.500000000 34.167968750 -35.167961121 10.500000000 34.167961121 -34.622871399 10.500000000 34.622871399 33.622871399 10.500000000 34.622871399 34.167968750 10.500000000 34.167968750 -35.167961121 11.500000000 34.167961121 -34.622871399 11.500000000 34.622871399 33.622871399 11.500000000 34.622871399 34.167968750 11.500000000 34.167968750 -35.167961121 12.500000000 34.167961121 -34.622871399 12.500000000 34.622871399 33.622871399 12.500000000 34.622871399 34.167968750 12.500000000 34.167968750 -35.167961121 13.500000000 34.167961121 -34.622871399 13.500000000 34.622871399 33.622871399 13.500000000 34.622871399 34.167968750 13.500000000 34.167968750 -35.167961121 14.500000000 34.167961121 -34.622871399 14.500000000 34.622871399 33.622871399 14.500000000 34.622871399 34.167968750 14.500000000 34.167968750 -35.167961121 15.500000000 34.167961121 -34.622871399 15.500000000 34.622871399 33.622871399 15.500000000 34.622871399 34.167968750 15.500000000 34.167968750 -35.167961121 16.500000000 34.167961121 -34.622871399 16.500000000 34.622871399 33.622871399 16.500000000 34.622871399 34.167968750 16.500000000 34.167968750 -35.167961121 17.500000000 34.167961121 -34.622871399 17.500000000 34.622871399 33.622871399 17.500000000 34.622871399 34.167968750 17.500000000 34.167968750 -35.167961121 18.500000000 34.167961121 -34.622871399 18.500000000 34.622871399 33.622871399 18.500000000 34.622871399 34.167968750 18.500000000 34.167968750 -35.167961121 19.500000000 34.167961121 -34.622871399 19.500000000 34.622871399 33.622871399 19.500000000 34.622871399 34.167968750 19.500000000 34.167968750 -35.167961121 20.500000000 34.167961121 -34.622871399 20.500000000 34.622871399 33.622871399 20.500000000 34.622871399 34.167968750 20.500000000 34.167968750 -35.167961121 21.500000000 34.167961121 -34.622871399 21.500000000 34.622871399 33.622871399 21.500000000 34.622871399 34.167968750 21.500000000 34.167968750 -35.167961121 22.500000000 34.167961121 -34.622871399 22.500000000 34.622871399 33.622871399 22.500000000 34.622871399 34.167968750 22.500000000 34.167968750 -35.167961121 23.500000000 34.167961121 -34.622867584 23.500000000 34.622867584 33.622867584 23.500000000 34.622867584 34.167968750 23.500000000 34.167968750 -35.167949677 24.499994278 34.167949677 -34.622844696 24.499988556 34.622856140 33.622844696 24.499988556 34.622856140 34.167949677 24.499994278 34.167949677 -35.167804718 25.499900818 34.167808533 -34.622692108 25.499868393 34.622734070 33.622692108 25.499868393 34.622734070 34.167804718 25.499900818 34.167808533 -35.166954041 26.499298096 34.166961670 -34.621799469 26.499156952 34.621982574 33.621799469 26.499160767 34.621982574 34.166954041 26.499298096 34.166954041 -35.163158417 27.496557236 34.163154602 -34.617927551 27.496026993 34.618598938 33.617927551 27.496026993 34.618598938 34.163158417 27.496557236 34.163158417 -35.150249481 28.487293243 34.150249481 -34.604705811 28.485630035 34.607204437 33.604705811 28.485631943 34.607204437 34.150249481 28.487289429 34.150249481 -35.115642548 29.463253021 34.115638733 -34.567741394 29.459178925 34.577739716 33.567741394 29.459178925 34.577739716 34.115638733 29.463256836 34.115642548 -35.040893555 30.413049698 34.040893555 -34.477191925 30.410831451 34.518619537 33.477191925 30.410833359 34.518615723 34.040893555 30.413049698 34.040893555 -34.912063599 31.313602448 33.912063599 -34.260543823 31.380737305 34.428535461 -33.678352356 31.736051559 34.670322418 32.678352356 31.736051559 34.670322418 33.260543823 31.380739212 34.428535461 33.912059784 31.313604355 33.912059784 -34.772159576 32.006500244 33.772163391 -34.231868744 32.316947937 34.057430267 -33.424438477 32.424438477 34.498542786 -32.736053467 32.678352356 34.670322418 31.736051559 32.678352356 34.670322418 32.424438477 32.424434662 34.498542786 33.231868744 32.316951752 34.057430267 33.772159576 32.006500244 33.772159576 -33.980758667 32.980758667 33.842590332 -33.316947937 33.231868744 34.057430267 -32.380737305 33.260543823 34.428539276 -31.410833359 33.477191925 34.518619537 -30.459178925 33.567745209 34.577747345 -29.485630035 33.604709625 34.607208252 -28.496026993 33.617927551 34.618598938 -27.499156952 33.621795654 34.621982574 -26.499868393 33.622692108 34.622734070 -25.499986649 33.622844696 34.622856140 -24.500000000 33.622871399 34.622871399 -23.500000000 33.622871399 34.622871399 -22.500000000 33.622871399 34.622871399 -21.500000000 33.622871399 34.622871399 -20.500000000 33.622871399 34.622871399 -19.500000000 33.622871399 34.622871399 -18.500000000 33.622871399 34.622871399 -17.500000000 33.622871399 34.622871399 -16.500000000 33.622871399 34.622871399 -15.500000000 33.622871399 34.622871399 -14.500000000 33.622871399 34.622871399 -13.500000000 33.622871399 34.622871399 -12.500000000 33.622871399 34.622871399 -11.500000000 33.622871399 34.622871399 -10.500000000 33.622871399 34.622871399 -9.500000000 33.622871399 34.622871399 -8.500000000 33.622871399 34.622871399 -7.500000000 33.622871399 34.622871399 -6.500000000 33.622871399 34.622871399 -5.500000000 33.622871399 34.622871399 -4.500000000 33.622871399 34.622871399 -3.500000000 33.622871399 34.622871399 -2.500000000 33.622871399 34.622871399 -1.500000000 33.622871399 34.622871399 -0.500000000 33.622871399 34.622871399 0.500000000 33.622871399 34.622871399 1.500000000 33.622871399 34.622871399 2.500000000 33.622871399 34.622871399 3.500000000 33.622871399 34.622871399 4.500000000 33.622871399 34.622871399 5.500000000 33.622871399 34.622871399 6.500000000 33.622871399 34.622871399 7.500000000 33.622871399 34.622871399 8.500000000 33.622871399 34.622871399 9.500000000 33.622871399 34.622871399 10.500000000 33.622871399 34.622871399 11.500000000 33.622871399 34.622871399 12.500000000 33.622871399 34.622871399 13.500000000 33.622871399 34.622871399 14.500000000 33.622871399 34.622871399 15.500000000 33.622871399 34.622871399 16.500000000 33.622871399 34.622871399 17.500000000 33.622871399 34.622871399 18.500000000 33.622871399 34.622871399 19.500000000 33.622871399 34.622871399 20.500000000 33.622871399 34.622871399 21.500000000 33.622871399 34.622871399 22.500000000 33.622871399 34.622871399 23.500000000 33.622867584 34.622867584 24.499988556 33.622844696 34.622856140 25.499868393 33.622692108 34.622734070 26.499160767 33.621799469 34.621982574 27.496026993 33.617927551 34.618598938 28.485631943 33.604705811 34.607204437 29.459178925 33.567749023 34.577739716 30.410833359 33.477184296 34.518615723 31.380737305 33.260543823 34.428535461 32.316947937 33.231868744 34.057430267 32.980758667 32.980758667 33.842590332 -33.006500244 33.772159576 33.772163391 -32.313598633 33.912063599 33.912063599 -31.413049698 34.040893555 34.040893555 -30.463253021 34.115642548 34.115638733 -29.487289429 34.150253296 34.150249481 -28.496557236 34.163162231 34.163158417 -27.499298096 34.166961670 34.166961670 -26.499898911 34.167808533 34.167808533 -25.499990463 34.167949677 34.167949677 -24.500000000 34.167961121 34.167961121 -23.500000000 34.167961121 34.167961121 -22.500000000 34.167961121 34.167961121 -21.500000000 34.167961121 34.167961121 -20.500000000 34.167961121 34.167961121 -19.500000000 34.167961121 34.167961121 -18.500000000 34.167961121 34.167961121 -17.500000000 34.167961121 34.167961121 -16.500000000 34.167961121 34.167961121 -15.500000000 34.167961121 34.167961121 -14.500000000 34.167961121 34.167961121 -13.500000000 34.167961121 34.167961121 -12.500000000 34.167961121 34.167961121 -11.500000000 34.167961121 34.167961121 -10.500000000 34.167961121 34.167961121 -9.500000000 34.167961121 34.167961121 -8.500000000 34.167961121 34.167961121 -7.500000000 34.167961121 34.167961121 -6.500000000 34.167961121 34.167961121 -5.500000000 34.167961121 34.167961121 -4.500000000 34.167961121 34.167961121 -3.500000000 34.167961121 34.167961121 -2.500000000 34.167961121 34.167961121 -1.500000000 34.167961121 34.167961121 -0.500000000 34.167961121 34.167961121 0.500000000 34.167961121 34.167961121 1.500000000 34.167961121 34.167961121 2.500000000 34.167961121 34.167961121 3.500000000 34.167961121 34.167961121 4.500000000 34.167961121 34.167961121 5.500000000 34.167961121 34.167961121 6.500000000 34.167961121 34.167961121 7.500000000 34.167961121 34.167961121 8.500000000 34.167961121 34.167961121 9.500000000 34.167961121 34.167961121 10.500000000 34.167961121 34.167961121 11.500000000 34.167961121 34.167961121 12.500000000 34.167961121 34.167961121 13.500000000 34.167961121 34.167961121 14.500000000 34.167961121 34.167961121 15.500000000 34.167961121 34.167961121 16.500000000 34.167961121 34.167961121 17.500000000 34.167961121 34.167961121 18.500000000 34.167961121 34.167961121 19.500000000 34.167961121 34.167961121 20.500000000 34.167961121 34.167961121 21.500000000 34.167961121 34.167961121 22.500000000 34.167961121 34.167961121 23.500000000 34.167961121 34.167961121 24.499994278 34.167949677 34.167949677 25.499900818 34.167804718 34.167808533 26.499298096 34.166954041 34.166961670 27.496557236 34.163158417 34.163154602 28.487293243 34.150249481 34.150249481 29.463253021 34.115642548 34.115638733 30.413049698 34.040893555 34.040893555 31.313604355 33.912059784 33.912063599 32.006500244 33.772159576 33.772163391 -32.035343170 -33.840930939 34.797355652 -31.336603165 -33.903373718 34.879734039 -30.442840576 -33.946807861 34.937091827 -29.481933594 -33.968864441 34.965763092 -28.495172501 -33.977615356 34.976860046 -27.498952866 -33.980331421 34.980201721 -26.499826431 -33.980972290 34.980957031 -25.499977112 -33.981090546 34.981086731 -24.499998093 -33.981101990 34.981101990 -23.500000000 -33.981101990 34.981101990 -22.500000000 -33.981101990 34.981101990 -21.500000000 -33.981101990 34.981101990 -20.500000000 -33.981101990 34.981101990 -19.500000000 -33.981101990 34.981101990 -18.500000000 -33.981101990 34.981101990 -17.500000000 -33.981101990 34.981101990 -16.500000000 -33.981101990 34.981101990 -15.500000000 -33.981101990 34.981101990 -14.500000000 -33.981101990 34.981101990 -13.500000000 -33.981101990 34.981101990 -12.500000000 -33.981101990 34.981101990 -11.500000000 -33.981101990 34.981101990 -10.500000000 -33.981101990 34.981101990 -9.500000000 -33.981101990 34.981101990 -8.500000000 -33.981101990 34.981101990 -7.500000000 -33.981101990 34.981101990 -6.500000000 -33.981101990 34.981101990 -5.500000000 -33.981101990 34.981101990 -4.500000000 -33.981101990 34.981101990 -3.500000000 -33.981101990 34.981101990 -2.500000000 -33.981101990 34.981101990 -1.500000000 -33.981101990 34.981101990 -0.500000000 -33.981101990 34.981101990 0.500000000 -33.981101990 34.981101990 1.500000000 -33.981101990 34.981101990 2.500000000 -33.981101990 34.981101990 3.500000000 -33.981101990 34.981101990 4.500000000 -33.981101990 34.981101990 5.500000000 -33.981101990 34.981101990 6.500000000 -33.981101990 34.981101990 7.500000000 -33.981101990 34.981101990 8.500000000 -33.981101990 34.981101990 9.500000000 -33.981101990 34.981101990 10.500000000 -33.981101990 34.981101990 11.500000000 -33.981101990 34.981101990 12.500000000 -33.981101990 34.981101990 13.500000000 -33.981101990 34.981101990 14.500000000 -33.981101990 34.981101990 15.500000000 -33.981101990 34.981101990 16.500000000 -33.981101990 34.981101990 17.500000000 -33.981101990 34.981101990 18.500000000 -33.981101990 34.981101990 19.500000000 -33.981101990 34.981101990 20.500000000 -33.981101990 34.981101990 21.500000000 -33.981101990 34.981101990 22.500000000 -33.981101990 34.981101990 23.499998093 -33.981101990 34.981101990 24.499979019 -33.981090546 34.981086731 25.499826431 -33.980972290 34.980957031 26.498950958 -33.980331421 34.980201721 27.495172501 -33.977611542 34.976860046 28.481937408 -33.968864441 34.965763092 29.442840576 -33.946807861 34.937091827 30.336599350 -33.903373718 34.879737854 31.035345078 -33.840927124 34.797355652 -33.030689240 -33.030693054 34.846317291 -32.334243774 -33.254272461 34.954723358 -31.405673981 -33.216838837 35.112342834 -30.460596085 -33.220710754 35.185966492 -29.486719131 -33.227874756 35.217510223 -28.496377945 -33.231601715 35.228954315 -27.499221802 -33.232868195 35.232299805 -26.499872208 -33.233169556 35.233074188 -25.499986649 -33.233219147 35.233207703 -24.499998093 -33.233222961 35.233222961 -23.500000000 -33.233222961 35.233222961 -22.500000000 -33.233222961 35.233222961 -21.500000000 -33.233222961 35.233222961 -20.500000000 -33.233222961 35.233222961 -19.500000000 -33.233222961 35.233222961 -18.500000000 -33.233222961 35.233222961 -17.500000000 -33.233222961 35.233222961 -16.500000000 -33.233222961 35.233222961 -15.500000000 -33.233222961 35.233222961 -14.500000000 -33.233222961 35.233222961 -13.500000000 -33.233222961 35.233222961 -12.500000000 -33.233222961 35.233222961 -11.500000000 -33.233222961 35.233222961 -10.500000000 -33.233222961 35.233222961 -9.500000000 -33.233222961 35.233222961 -8.500000000 -33.233222961 35.233222961 -7.500000000 -33.233222961 35.233222961 -6.500000000 -33.233222961 35.233222961 -5.500000000 -33.233222961 35.233222961 -4.500000000 -33.233222961 35.233222961 -3.500000000 -33.233222961 35.233222961 -2.500000000 -33.233222961 35.233222961 -1.500000000 -33.233222961 35.233222961 -0.500000000 -33.233222961 35.233222961 0.500000000 -33.233222961 35.233222961 1.500000000 -33.233222961 35.233222961 2.500000000 -33.233222961 35.233222961 3.500000000 -33.233222961 35.233222961 4.500000000 -33.233222961 35.233222961 5.500000000 -33.233222961 35.233222961 6.500000000 -33.233222961 35.233222961 7.500000000 -33.233222961 35.233222961 8.500000000 -33.233222961 35.233222961 9.500000000 -33.233222961 35.233222961 10.500000000 -33.233222961 35.233222961 11.500000000 -33.233222961 35.233222961 12.500000000 -33.233222961 35.233222961 13.500000000 -33.233222961 35.233222961 14.500000000 -33.233222961 35.233222961 15.500000000 -33.233222961 35.233222961 16.500000000 -33.233222961 35.233222961 17.500000000 -33.233222961 35.233222961 18.500000000 -33.233222961 35.233222961 19.500000000 -33.233222961 35.233222961 20.500000000 -33.233222961 35.233222961 21.500000000 -33.233222961 35.233222961 22.500000000 -33.233222961 35.233222961 23.500000000 -33.233222961 35.233222961 24.499984741 -33.233219147 35.233207703 25.499874115 -33.233169556 35.233074188 26.499225616 -33.232864380 35.232299805 27.496379852 -33.231597900 35.228954315 28.486719131 -33.227867126 35.217514038 29.460596085 -33.220710754 35.185966492 30.405673981 -33.216835022 35.112342834 31.334243774 -33.254272461 34.954723358 32.030693054 -33.030693054 34.846313477 -33.840934753 -32.035343170 34.797355652 -33.254272461 -32.334239960 34.954723358 -32.371025085 -32.371025085 35.154670715 -31.438953400 -32.375358582 35.280395508 -30.476007462 -32.379222870 35.344284058 -29.492319107 -32.382308960 35.371490479 -28.498050690 -32.383811951 35.381023407 -27.499622345 -32.384300232 35.383720398 -26.499948502 -32.384403229 35.384311676 -25.499996185 -32.384422302 35.384407043 -24.500000000 -32.384422302 35.384418488 -23.500000000 -32.384422302 35.384418488 -22.500000000 -32.384422302 35.384418488 -21.500000000 -32.384422302 35.384418488 -20.500000000 -32.384422302 35.384418488 -19.500000000 -32.384422302 35.384418488 -18.500000000 -32.384422302 35.384418488 -17.500000000 -32.384422302 35.384418488 -16.500000000 -32.384422302 35.384418488 -15.500000000 -32.384422302 35.384418488 -14.500000000 -32.384422302 35.384418488 -13.500000000 -32.384422302 35.384418488 -12.500000000 -32.384422302 35.384418488 -11.500000000 -32.384422302 35.384418488 -10.500000000 -32.384422302 35.384418488 -9.500000000 -32.384422302 35.384418488 -8.500000000 -32.384422302 35.384418488 -7.500000000 -32.384422302 35.384418488 -6.500000000 -32.384422302 35.384418488 -5.500000000 -32.384422302 35.384418488 -4.500000000 -32.384422302 35.384418488 -3.500000000 -32.384422302 35.384418488 -2.500000000 -32.384422302 35.384418488 -1.500000000 -32.384422302 35.384418488 -0.500000000 -32.384422302 35.384418488 0.500000000 -32.384422302 35.384418488 1.500000000 -32.384422302 35.384418488 2.500000000 -32.384422302 35.384418488 3.500000000 -32.384422302 35.384418488 4.500000000 -32.384422302 35.384418488 5.500000000 -32.384422302 35.384418488 6.500000000 -32.384422302 35.384418488 7.500000000 -32.384422302 35.384418488 8.500000000 -32.384422302 35.384418488 9.500000000 -32.384422302 35.384418488 10.500000000 -32.384422302 35.384418488 11.500000000 -32.384422302 35.384418488 12.500000000 -32.384422302 35.384418488 13.500000000 -32.384422302 35.384418488 14.500000000 -32.384422302 35.384418488 15.500000000 -32.384422302 35.384418488 16.500000000 -32.384422302 35.384418488 17.500000000 -32.384422302 35.384418488 18.500000000 -32.384422302 35.384418488 19.500000000 -32.384422302 35.384418488 20.500000000 -32.384422302 35.384418488 21.500000000 -32.384422302 35.384418488 22.500000000 -32.384422302 35.384418488 23.500000000 -32.384422302 35.384418488 24.499996185 -32.384418488 35.384407043 25.499948502 -32.384407043 35.384307861 26.499622345 -32.384300232 35.383720398 27.498052597 -32.383811951 35.381023407 28.492321014 -32.382316589 35.371486664 29.476007462 -32.379222870 35.344280243 30.438953400 -32.375358582 35.280395508 31.371026993 -32.371025085 35.154674530 32.254272461 -32.334243774 34.954723358 32.840930939 -32.035346985 34.797355652 -33.903373718 -31.336603165 34.879734039 -33.216838837 -31.405673981 35.112346649 -32.375358582 -31.438955307 35.280395508 -31.451217651 -31.451221466 35.380989075 -30.483785629 -31.456085205 35.430477142 -29.495611191 -31.458078384 35.450424194 -28.499073029 -31.458770752 35.456939697 -27.499858856 -31.458948135 35.458606720 -26.499988556 -31.458978653 35.458930969 -25.500000000 -31.458980560 35.458976746 -24.500000000 -31.458980560 35.458976746 -23.500000000 -31.458980560 35.458976746 -22.500000000 -31.458980560 35.458976746 -21.500000000 -31.458980560 35.458976746 -20.500000000 -31.458980560 35.458976746 -19.500000000 -31.458980560 35.458976746 -18.500000000 -31.458980560 35.458976746 -17.500000000 -31.458980560 35.458976746 -16.500000000 -31.458980560 35.458976746 -15.500000000 -31.458980560 35.458976746 -14.500000000 -31.458980560 35.458976746 -13.500000000 -31.458980560 35.458976746 -12.500000000 -31.458980560 35.458976746 -11.500000000 -31.458980560 35.458976746 -10.500000000 -31.458980560 35.458976746 -9.500000000 -31.458980560 35.458976746 -8.500000000 -31.458980560 35.458976746 -7.500000000 -31.458980560 35.458976746 -6.500000000 -31.458980560 35.458976746 -5.500000000 -31.458980560 35.458976746 -4.500000000 -31.458980560 35.458976746 -3.500000000 -31.458980560 35.458976746 -2.500000000 -31.458980560 35.458976746 -1.500000000 -31.458980560 35.458976746 -0.500000000 -31.458980560 35.458976746 0.500000000 -31.458980560 35.458976746 1.500000000 -31.458980560 35.458976746 2.500000000 -31.458980560 35.458976746 3.500000000 -31.458980560 35.458976746 4.500000000 -31.458980560 35.458976746 5.500000000 -31.458980560 35.458976746 6.500000000 -31.458980560 35.458976746 7.500000000 -31.458980560 35.458976746 8.500000000 -31.458980560 35.458976746 9.500000000 -31.458980560 35.458976746 10.500000000 -31.458980560 35.458976746 11.500000000 -31.458980560 35.458976746 12.500000000 -31.458980560 35.458976746 13.500000000 -31.458980560 35.458976746 14.500000000 -31.458980560 35.458976746 15.500000000 -31.458980560 35.458976746 16.500000000 -31.458980560 35.458976746 17.500000000 -31.458980560 35.458976746 18.500000000 -31.458980560 35.458976746 19.500000000 -31.458980560 35.458976746 20.500000000 -31.458980560 35.458976746 21.500000000 -31.458980560 35.458976746 22.500000000 -31.458980560 35.458976746 23.500000000 -31.458980560 35.458976746 24.500000000 -31.458980560 35.458976746 25.499988556 -31.458978653 35.458930969 26.499858856 -31.458948135 35.458606720 27.499073029 -31.458770752 35.456935883 28.495611191 -31.458078384 35.450424194 29.483785629 -31.456085205 35.430473328 30.451217651 -31.451217651 35.380989075 31.375356674 -31.438953400 35.280395508 32.216842651 -31.405675888 35.112346649 32.903373718 -31.336603165 34.879734039 -33.946807861 -30.442844391 34.937091827 -33.220714569 -30.460596085 35.185966492 -32.379222870 -30.476007462 35.344284058 -31.456081390 -30.483789444 35.430477142 -30.486965179 -30.486968994 35.469234467 -29.496957779 -30.488048553 35.483337402 -28.499475479 -30.488346100 35.487373352 -27.499938965 -30.488399506 35.488258362 -26.499996185 -30.488403320 35.488391876 -25.500000000 -30.488403320 35.488403320 -24.500000000 -30.488403320 35.488403320 -23.500000000 -30.488403320 35.488403320 -22.500000000 -30.488403320 35.488403320 -21.500000000 -30.488403320 35.488403320 -20.500000000 -30.488403320 35.488403320 -19.500000000 -30.488403320 35.488403320 -18.500000000 -30.488403320 35.488403320 -17.500000000 -30.488403320 35.488403320 -16.500000000 -30.488403320 35.488403320 -15.500000000 -30.488403320 35.488403320 -14.500000000 -30.488403320 35.488403320 -13.500000000 -30.488403320 35.488403320 -12.500000000 -30.488403320 35.488403320 -11.500000000 -30.488403320 35.488403320 -10.500000000 -30.488403320 35.488403320 -9.500000000 -30.488403320 35.488403320 -8.500000000 -30.488403320 35.488403320 -7.500000000 -30.488403320 35.488403320 -6.500000000 -30.488403320 35.488403320 -5.500000000 -30.488403320 35.488403320 -4.500000000 -30.488403320 35.488403320 -3.500000000 -30.488403320 35.488403320 -2.500000000 -30.488403320 35.488403320 -1.500000000 -30.488403320 35.488403320 -0.500000000 -30.488403320 35.488403320 0.500000000 -30.488403320 35.488403320 1.500000000 -30.488403320 35.488403320 2.500000000 -30.488403320 35.488403320 3.500000000 -30.488403320 35.488403320 4.500000000 -30.488403320 35.488403320 5.500000000 -30.488403320 35.488403320 6.500000000 -30.488403320 35.488403320 7.500000000 -30.488403320 35.488403320 8.500000000 -30.488403320 35.488403320 9.500000000 -30.488403320 35.488403320 10.500000000 -30.488403320 35.488403320 11.500000000 -30.488403320 35.488403320 12.500000000 -30.488403320 35.488403320 13.500000000 -30.488403320 35.488403320 14.500000000 -30.488403320 35.488403320 15.500000000 -30.488403320 35.488403320 16.500000000 -30.488403320 35.488403320 17.500000000 -30.488403320 35.488403320 18.500000000 -30.488403320 35.488403320 19.500000000 -30.488403320 35.488403320 20.500000000 -30.488403320 35.488403320 21.500000000 -30.488403320 35.488403320 22.500000000 -30.488403320 35.488403320 23.500000000 -30.488403320 35.488403320 24.500000000 -30.488403320 35.488403320 25.499996185 -30.488403320 35.488391876 26.499938965 -30.488399506 35.488258362 27.499475479 -30.488346100 35.487373352 28.496959686 -30.488048553 35.483337402 29.486968994 -30.486968994 35.469234467 30.456085205 -30.483785629 35.430473328 31.379222870 -30.476007462 35.344284058 32.220714569 -30.460596085 35.185966492 32.946807861 -30.442840576 34.937091827 -33.968864441 -29.481937408 34.965763092 -33.227874756 -29.486719131 35.217510223 -32.382308960 -29.492319107 35.371490479 -31.458078384 -29.495611191 35.450428009 -30.488048553 -29.496957779 35.483337402 -29.497375488 -29.497371674 35.494174957 -28.499578476 -29.497461319 35.496910095 -27.499954224 -29.497470856 35.497409821 -26.499996185 -29.497470856 35.497467041 -25.500000000 -29.497470856 35.497474670 -24.500000000 -29.497470856 35.497474670 -23.500000000 -29.497470856 35.497474670 -22.500000000 -29.497470856 35.497474670 -21.500000000 -29.497470856 35.497474670 -20.500000000 -29.497470856 35.497474670 -19.500000000 -29.497470856 35.497474670 -18.500000000 -29.497470856 35.497474670 -17.500000000 -29.497470856 35.497474670 -16.500000000 -29.497470856 35.497474670 -15.500000000 -29.497470856 35.497474670 -14.500000000 -29.497470856 35.497474670 -13.500000000 -29.497470856 35.497474670 -12.500000000 -29.497470856 35.497474670 -11.500000000 -29.497470856 35.497474670 -10.500000000 -29.497470856 35.497474670 -9.500000000 -29.497470856 35.497474670 -8.500000000 -29.497470856 35.497474670 -7.500000000 -29.497470856 35.497474670 -6.500000000 -29.497470856 35.497474670 -5.500000000 -29.497470856 35.497474670 -4.500000000 -29.497470856 35.497474670 -3.500000000 -29.497470856 35.497474670 -2.500000000 -29.497470856 35.497474670 -1.500000000 -29.497470856 35.497474670 -0.500000000 -29.497470856 35.497474670 0.500000000 -29.497470856 35.497474670 1.500000000 -29.497470856 35.497474670 2.500000000 -29.497470856 35.497474670 3.500000000 -29.497470856 35.497474670 4.500000000 -29.497470856 35.497474670 5.500000000 -29.497470856 35.497474670 6.500000000 -29.497470856 35.497474670 7.500000000 -29.497470856 35.497474670 8.500000000 -29.497470856 35.497474670 9.500000000 -29.497470856 35.497474670 10.500000000 -29.497470856 35.497474670 11.500000000 -29.497470856 35.497474670 12.500000000 -29.497470856 35.497474670 13.500000000 -29.497470856 35.497474670 14.500000000 -29.497470856 35.497474670 15.500000000 -29.497470856 35.497474670 16.500000000 -29.497470856 35.497474670 17.500000000 -29.497470856 35.497474670 18.500000000 -29.497470856 35.497474670 19.500000000 -29.497470856 35.497474670 20.500000000 -29.497470856 35.497474670 21.500000000 -29.497470856 35.497474670 22.500000000 -29.497470856 35.497474670 23.500000000 -29.497470856 35.497474670 24.500000000 -29.497470856 35.497474670 25.499996185 -29.497470856 35.497470856 26.499954224 -29.497470856 35.497406006 27.499576569 -29.497461319 35.496910095 28.497375488 -29.497375488 35.494174957 29.488048553 -29.496957779 35.483337402 30.458078384 -29.495611191 35.450428009 31.382312775 -29.492321014 35.371490479 32.227874756 -29.486719131 35.217510223 32.968864441 -29.481937408 34.965763092 -33.977611542 -28.495172501 34.976860046 -33.231597900 -28.496379852 35.228954315 -32.383808136 -28.498052597 35.381023407 -31.458766937 -28.499073029 35.456939697 -30.488346100 -28.499475479 35.487373352 -29.497461319 -28.499576569 35.496910095 -28.499593735 -28.499591827 35.499164581 -27.499954224 -28.499591827 35.499546051 -26.499996185 -28.499591827 35.499588013 -25.500000000 -28.499591827 35.499591827 -24.500000000 -28.499591827 35.499591827 -23.500000000 -28.499591827 35.499591827 -22.500000000 -28.499591827 35.499591827 -21.500000000 -28.499591827 35.499591827 -20.500000000 -28.499591827 35.499591827 -19.500000000 -28.499591827 35.499591827 -18.500000000 -28.499591827 35.499591827 -17.500000000 -28.499591827 35.499591827 -16.500000000 -28.499591827 35.499591827 -15.500000000 -28.499591827 35.499591827 -14.500000000 -28.499591827 35.499591827 -13.500000000 -28.499591827 35.499591827 -12.500000000 -28.499591827 35.499591827 -11.500000000 -28.499591827 35.499591827 -10.500000000 -28.499591827 35.499591827 -9.500000000 -28.499591827 35.499591827 -8.500000000 -28.499591827 35.499591827 -7.500000000 -28.499591827 35.499591827 -6.500000000 -28.499591827 35.499591827 -5.500000000 -28.499591827 35.499591827 -4.500000000 -28.499591827 35.499591827 -3.500000000 -28.499591827 35.499591827 -2.500000000 -28.499591827 35.499591827 -1.500000000 -28.499591827 35.499591827 -0.500000000 -28.499591827 35.499591827 0.500000000 -28.499591827 35.499591827 1.500000000 -28.499591827 35.499591827 2.500000000 -28.499591827 35.499591827 3.500000000 -28.499591827 35.499591827 4.500000000 -28.499591827 35.499591827 5.500000000 -28.499591827 35.499591827 6.500000000 -28.499591827 35.499591827 7.500000000 -28.499591827 35.499591827 8.500000000 -28.499591827 35.499591827 9.500000000 -28.499591827 35.499591827 10.500000000 -28.499591827 35.499591827 11.500000000 -28.499591827 35.499591827 12.500000000 -28.499591827 35.499591827 13.500000000 -28.499591827 35.499591827 14.500000000 -28.499591827 35.499591827 15.500000000 -28.499591827 35.499591827 16.500000000 -28.499591827 35.499591827 17.500000000 -28.499591827 35.499591827 18.500000000 -28.499591827 35.499591827 19.500000000 -28.499591827 35.499591827 20.500000000 -28.499591827 35.499591827 21.500000000 -28.499591827 35.499591827 22.500000000 -28.499591827 35.499591827 23.500000000 -28.499591827 35.499591827 24.500000000 -28.499591827 35.499591827 25.499996185 -28.499591827 35.499588013 26.499954224 -28.499591827 35.499542236 27.499591827 -28.499591827 35.499160767 28.497461319 -28.499576569 35.496910095 29.488346100 -28.499475479 35.487377167 30.458766937 -28.499073029 35.456939697 31.383810043 -28.498052597 35.381023407 32.231597900 -28.496377945 35.228954315 32.977611542 -28.495172501 34.976860046 -33.980331421 -27.498952866 34.980201721 -33.232864380 -27.499225616 35.232307434 -32.384296417 -27.499622345 35.383720398 -31.458948135 -27.499858856 35.458606720 -30.488399506 -27.499938965 35.488258362 -29.497472763 -27.499954224 35.497406006 -28.499593735 -27.499954224 35.499549866 -27.499954224 -27.499954224 35.499908447 -26.499996185 -27.499954224 35.499950409 -25.500000000 -27.499954224 35.499954224 -24.500000000 -27.499954224 35.499954224 -23.500000000 -27.499954224 35.499954224 -22.500000000 -27.499954224 35.499954224 -21.500000000 -27.499954224 35.499954224 -20.500000000 -27.499954224 35.499954224 -19.500000000 -27.499954224 35.499954224 -18.500000000 -27.499954224 35.499954224 -17.500000000 -27.499954224 35.499954224 -16.500000000 -27.499954224 35.499954224 -15.500000000 -27.499954224 35.499954224 -14.500000000 -27.499954224 35.499954224 -13.500000000 -27.499954224 35.499954224 -12.500000000 -27.499954224 35.499954224 -11.500000000 -27.499954224 35.499954224 -10.500000000 -27.499954224 35.499954224 -9.500000000 -27.499954224 35.499954224 -8.500000000 -27.499954224 35.499954224 -7.500000000 -27.499954224 35.499954224 -6.500000000 -27.499954224 35.499954224 -5.500000000 -27.499954224 35.499954224 -4.500000000 -27.499954224 35.499954224 -3.500000000 -27.499954224 35.499954224 -2.500000000 -27.499954224 35.499954224 -1.500000000 -27.499954224 35.499954224 -0.500000000 -27.499954224 35.499954224 0.500000000 -27.499954224 35.499954224 1.500000000 -27.499954224 35.499954224 2.500000000 -27.499954224 35.499954224 3.500000000 -27.499954224 35.499954224 4.500000000 -27.499954224 35.499954224 5.500000000 -27.499954224 35.499954224 6.500000000 -27.499954224 35.499954224 7.500000000 -27.499954224 35.499954224 8.500000000 -27.499954224 35.499954224 9.500000000 -27.499954224 35.499954224 10.500000000 -27.499954224 35.499954224 11.500000000 -27.499954224 35.499954224 12.500000000 -27.499954224 35.499954224 13.500000000 -27.499954224 35.499954224 14.500000000 -27.499954224 35.499954224 15.500000000 -27.499954224 35.499954224 16.500000000 -27.499954224 35.499954224 17.500000000 -27.499954224 35.499954224 18.500000000 -27.499954224 35.499954224 19.500000000 -27.499954224 35.499954224 20.500000000 -27.499954224 35.499954224 21.500000000 -27.499954224 35.499954224 22.500000000 -27.499954224 35.499954224 23.500000000 -27.499954224 35.499954224 24.500000000 -27.499954224 35.499954224 25.499996185 -27.499954224 35.499950409 26.499954224 -27.499954224 35.499908447 27.499591827 -27.499954224 35.499546051 28.497470856 -27.499954224 35.497413635 29.488399506 -27.499938965 35.488258362 30.458948135 -27.499858856 35.458606720 31.384298325 -27.499618530 35.383720398 32.232860565 -27.499225616 35.232307434 32.980335236 -27.498952866 34.980201721 -33.980972290 -26.499826431 34.980957031 -33.233165741 -26.499874115 35.233070374 -32.384403229 -26.499948502 35.384307861 -31.458978653 -26.499988556 35.458930969 -30.488407135 -26.499996185 35.488384247 -29.497472763 -26.499996185 35.497467041 -28.499593735 -26.499996185 35.499591827 -27.499954224 -26.499996185 35.499950409 -26.499996185 -26.499996185 35.499992371 -25.500000000 -26.499996185 35.499996185 -24.500000000 -26.499996185 35.499996185 -23.500000000 -26.499996185 35.499996185 -22.500000000 -26.499996185 35.499996185 -21.500000000 -26.499996185 35.499996185 -20.500000000 -26.499996185 35.499996185 -19.500000000 -26.499996185 35.499996185 -18.500000000 -26.499996185 35.499996185 -17.500000000 -26.499996185 35.499996185 -16.500000000 -26.499996185 35.499996185 -15.500000000 -26.499996185 35.499996185 -14.500000000 -26.499996185 35.499996185 -13.500000000 -26.499996185 35.499996185 -12.500000000 -26.499996185 35.499996185 -11.500000000 -26.499996185 35.499996185 -10.500000000 -26.499996185 35.499996185 -9.500000000 -26.499996185 35.499996185 -8.500000000 -26.499996185 35.499996185 -7.500000000 -26.499996185 35.499996185 -6.500000000 -26.499996185 35.499996185 -5.500000000 -26.499996185 35.499996185 -4.500000000 -26.499996185 35.499996185 -3.500000000 -26.499996185 35.499996185 -2.500000000 -26.499996185 35.499996185 -1.500000000 -26.499996185 35.499996185 -0.500000000 -26.499996185 35.499996185 0.500000000 -26.499996185 35.499996185 1.500000000 -26.499996185 35.499996185 2.500000000 -26.499996185 35.499996185 3.500000000 -26.499996185 35.499996185 4.500000000 -26.499996185 35.499996185 5.500000000 -26.499996185 35.499996185 6.500000000 -26.499996185 35.499996185 7.500000000 -26.499996185 35.499996185 8.500000000 -26.499996185 35.499996185 9.500000000 -26.499996185 35.499996185 10.500000000 -26.499996185 35.499996185 11.500000000 -26.499996185 35.499996185 12.500000000 -26.499996185 35.499996185 13.500000000 -26.499996185 35.499996185 14.500000000 -26.499996185 35.499996185 15.500000000 -26.499996185 35.499996185 16.500000000 -26.499996185 35.499996185 17.500000000 -26.499996185 35.499996185 18.500000000 -26.499996185 35.499996185 19.500000000 -26.499996185 35.499996185 20.500000000 -26.499996185 35.499996185 21.500000000 -26.499996185 35.499996185 22.500000000 -26.499996185 35.499996185 23.500000000 -26.499996185 35.499996185 24.500000000 -26.499996185 35.499996185 25.499996185 -26.499996185 35.499992371 26.499954224 -26.499996185 35.499950409 27.499591827 -26.499996185 35.499591827 28.497470856 -26.499996185 35.497467041 29.488407135 -26.499996185 35.488391876 30.458978653 -26.499988556 35.458934784 31.384403229 -26.499948502 35.384307861 32.233165741 -26.499872208 35.233070374 32.980972290 -26.499826431 34.980957031 -33.981090546 -25.499979019 34.981086731 -33.233222961 -25.499984741 35.233203888 -32.384422302 -25.499996185 35.384407043 -31.458978653 -25.500000000 35.458976746 -30.488407135 -25.500000000 35.488403320 -29.497472763 -25.500000000 35.497474670 -28.499593735 -25.500000000 35.499591827 -27.499954224 -25.500000000 35.499954224 -26.499996185 -25.500000000 35.499996185 -25.500000000 -25.500000000 35.500000000 -24.500000000 -25.500000000 35.500000000 -23.500000000 -25.500000000 35.500000000 -22.500000000 -25.500000000 35.500000000 -21.500000000 -25.500000000 35.500000000 -20.500000000 -25.500000000 35.500000000 -19.500000000 -25.500000000 35.500000000 -18.500000000 -25.500000000 35.500000000 -17.500000000 -25.500000000 35.500000000 -16.500000000 -25.500000000 35.500000000 -15.500000000 -25.500000000 35.500000000 -14.500000000 -25.500000000 35.500000000 -13.500000000 -25.500000000 35.500000000 -12.500000000 -25.500000000 35.500000000 -11.500000000 -25.500000000 35.500000000 -10.500000000 -25.500000000 35.500000000 -9.500000000 -25.500000000 35.500000000 -8.500000000 -25.500000000 35.500000000 -7.500000000 -25.500000000 35.500000000 -6.500000000 -25.500000000 35.500000000 -5.500000000 -25.500000000 35.500000000 -4.500000000 -25.500000000 35.500000000 -3.500000000 -25.500000000 35.500000000 -2.500000000 -25.500000000 35.500000000 -1.500000000 -25.500000000 35.500000000 -0.500000000 -25.500000000 35.500000000 0.500000000 -25.500000000 35.500000000 1.500000000 -25.500000000 35.500000000 2.500000000 -25.500000000 35.500000000 3.500000000 -25.500000000 35.500000000 4.500000000 -25.500000000 35.500000000 5.500000000 -25.500000000 35.500000000 6.500000000 -25.500000000 35.500000000 7.500000000 -25.500000000 35.500000000 8.500000000 -25.500000000 35.500000000 9.500000000 -25.500000000 35.500000000 10.500000000 -25.500000000 35.500000000 11.500000000 -25.500000000 35.500000000 12.500000000 -25.500000000 35.500000000 13.500000000 -25.500000000 35.500000000 14.500000000 -25.500000000 35.500000000 15.500000000 -25.500000000 35.500000000 16.500000000 -25.500000000 35.500000000 17.500000000 -25.500000000 35.500000000 18.500000000 -25.500000000 35.500000000 19.500000000 -25.500000000 35.500000000 20.500000000 -25.500000000 35.500000000 21.500000000 -25.500000000 35.500000000 22.500000000 -25.500000000 35.500000000 23.500000000 -25.500000000 35.500000000 24.500000000 -25.500000000 35.500000000 25.499996185 -25.500000000 35.499996185 26.499954224 -25.500000000 35.499954224 27.499591827 -25.500000000 35.499591827 28.497470856 -25.500000000 35.497467041 29.488407135 -25.500000000 35.488403320 30.458978653 -25.500000000 35.458976746 31.384418488 -25.499996185 35.384407043 32.233215332 -25.499986649 35.233203888 32.981086731 -25.499977112 34.981086731 -33.981101990 -24.499998093 34.981101990 -33.233226776 -24.500000000 35.233222961 -32.384422302 -24.500000000 35.384418488 -31.458978653 -24.500000000 35.458980560 -30.488407135 -24.500000000 35.488403320 -29.497472763 -24.500000000 35.497474670 -28.499593735 -24.500000000 35.499591827 -27.499954224 -24.500000000 35.499954224 -26.499996185 -24.500000000 35.499996185 -25.500000000 -24.500000000 35.500000000 -24.500000000 -24.500000000 35.500000000 -23.500000000 -24.500000000 35.500000000 -22.500000000 -24.500000000 35.500000000 -21.500000000 -24.500000000 35.500000000 -20.500000000 -24.500000000 35.500000000 -19.500000000 -24.500000000 35.500000000 -18.500000000 -24.500000000 35.500000000 -17.500000000 -24.500000000 35.500000000 -16.500000000 -24.500000000 35.500000000 -15.500000000 -24.500000000 35.500000000 -14.500000000 -24.500000000 35.500000000 -13.500000000 -24.500000000 35.500000000 -12.500000000 -24.500000000 35.500000000 -11.500000000 -24.500000000 35.500000000 -10.500000000 -24.500000000 35.500000000 -9.500000000 -24.500000000 35.500000000 -8.500000000 -24.500000000 35.500000000 -7.500000000 -24.500000000 35.500000000 -6.500000000 -24.500000000 35.500000000 -5.500000000 -24.500000000 35.500000000 -4.500000000 -24.500000000 35.500000000 -3.500000000 -24.500000000 35.500000000 -2.500000000 -24.500000000 35.500000000 -1.500000000 -24.500000000 35.500000000 -0.500000000 -24.500000000 35.500000000 0.500000000 -24.500000000 35.500000000 1.500000000 -24.500000000 35.500000000 2.500000000 -24.500000000 35.500000000 3.500000000 -24.500000000 35.500000000 4.500000000 -24.500000000 35.500000000 5.500000000 -24.500000000 35.500000000 6.500000000 -24.500000000 35.500000000 7.500000000 -24.500000000 35.500000000 8.500000000 -24.500000000 35.500000000 9.500000000 -24.500000000 35.500000000 10.500000000 -24.500000000 35.500000000 11.500000000 -24.500000000 35.500000000 12.500000000 -24.500000000 35.500000000 13.500000000 -24.500000000 35.500000000 14.500000000 -24.500000000 35.500000000 15.500000000 -24.500000000 35.500000000 16.500000000 -24.500000000 35.500000000 17.500000000 -24.500000000 35.500000000 18.500000000 -24.500000000 35.500000000 19.500000000 -24.500000000 35.500000000 20.500000000 -24.500000000 35.500000000 21.500000000 -24.500000000 35.500000000 22.500000000 -24.500000000 35.500000000 23.500000000 -24.500000000 35.500000000 24.500000000 -24.500000000 35.500000000 25.499996185 -24.500000000 35.499996185 26.499954224 -24.500000000 35.499954224 27.499591827 -24.500000000 35.499591827 28.497470856 -24.500000000 35.497467041 29.488407135 -24.500000000 35.488403320 30.458978653 -24.500000000 35.458980560 31.384418488 -24.500000000 35.384418488 32.233222961 -24.499998093 35.233222961 32.981101990 -24.499998093 34.981101990 -33.981101990 -23.500000000 34.981101990 -33.233226776 -23.500000000 35.233222961 -32.384422302 -23.500000000 35.384418488 -31.458978653 -23.500000000 35.458980560 -30.488407135 -23.500000000 35.488403320 -29.497472763 -23.500000000 35.497474670 -28.499593735 -23.500000000 35.499591827 -27.499954224 -23.500000000 35.499954224 -26.499996185 -23.500000000 35.499996185 -25.500000000 -23.500000000 35.500000000 -24.500000000 -23.500000000 35.500000000 -23.500000000 -23.500000000 35.500000000 -22.500000000 -23.500000000 35.500000000 -21.500000000 -23.500000000 35.500000000 -20.500000000 -23.500000000 35.500000000 -19.500000000 -23.500000000 35.500000000 -18.500000000 -23.500000000 35.500000000 -17.500000000 -23.500000000 35.500000000 -16.500000000 -23.500000000 35.500000000 -15.500000000 -23.500000000 35.500000000 -14.500000000 -23.500000000 35.500000000 -13.500000000 -23.500000000 35.500000000 -12.500000000 -23.500000000 35.500000000 -11.500000000 -23.500000000 35.500000000 -10.500000000 -23.500000000 35.500000000 -9.500000000 -23.500000000 35.500000000 -8.500000000 -23.500000000 35.500000000 -7.500000000 -23.500000000 35.500000000 -6.500000000 -23.500000000 35.500000000 -5.500000000 -23.500000000 35.500000000 -4.500000000 -23.500000000 35.500000000 -3.500000000 -23.500000000 35.500000000 -2.500000000 -23.500000000 35.500000000 -1.500000000 -23.500000000 35.500000000 -0.500000000 -23.500000000 35.500000000 0.500000000 -23.500000000 35.500000000 1.500000000 -23.500000000 35.500000000 2.500000000 -23.500000000 35.500000000 3.500000000 -23.500000000 35.500000000 4.500000000 -23.500000000 35.500000000 5.500000000 -23.500000000 35.500000000 6.500000000 -23.500000000 35.500000000 7.500000000 -23.500000000 35.500000000 8.500000000 -23.500000000 35.500000000 9.500000000 -23.500000000 35.500000000 10.500000000 -23.500000000 35.500000000 11.500000000 -23.500000000 35.500000000 12.500000000 -23.500000000 35.500000000 13.500000000 -23.500000000 35.500000000 14.500000000 -23.500000000 35.500000000 15.500000000 -23.500000000 35.500000000 16.500000000 -23.500000000 35.500000000 17.500000000 -23.500000000 35.500000000 18.500000000 -23.500000000 35.500000000 19.500000000 -23.500000000 35.500000000 20.500000000 -23.500000000 35.500000000 21.500000000 -23.500000000 35.500000000 22.500000000 -23.500000000 35.500000000 23.500000000 -23.500000000 35.500000000 24.500000000 -23.500000000 35.500000000 25.499996185 -23.500000000 35.499996185 26.499954224 -23.500000000 35.499954224 27.499591827 -23.500000000 35.499591827 28.497470856 -23.500000000 35.497467041 29.488407135 -23.500000000 35.488403320 30.458978653 -23.500000000 35.458980560 31.384418488 -23.500000000 35.384422302 32.233222961 -23.500000000 35.233222961 32.981101990 -23.500000000 34.981101990 -33.981101990 -22.500000000 34.981101990 -33.233226776 -22.500000000 35.233222961 -32.384422302 -22.500000000 35.384418488 -31.458978653 -22.500000000 35.458980560 -30.488407135 -22.500000000 35.488403320 -29.497472763 -22.500000000 35.497474670 -28.499593735 -22.500000000 35.499591827 -27.499954224 -22.500000000 35.499954224 -26.499996185 -22.500000000 35.499996185 -25.500000000 -22.500000000 35.500000000 -24.500000000 -22.500000000 35.500000000 -23.500000000 -22.500000000 35.500000000 -22.500000000 -22.500000000 35.500000000 -21.500000000 -22.500000000 35.500000000 -20.500000000 -22.500000000 35.500000000 -19.500000000 -22.500000000 35.500000000 -18.500000000 -22.500000000 35.500000000 -17.500000000 -22.500000000 35.500000000 -16.500000000 -22.500000000 35.500000000 -15.500000000 -22.500000000 35.500000000 -14.500000000 -22.500000000 35.500000000 -13.500000000 -22.500000000 35.500000000 -12.500000000 -22.500000000 35.500000000 -11.500000000 -22.500000000 35.500000000 -10.500000000 -22.500000000 35.500000000 -9.500000000 -22.500000000 35.500000000 -8.500000000 -22.500000000 35.500000000 -7.500000000 -22.500000000 35.500000000 -6.500000000 -22.500000000 35.500000000 -5.500000000 -22.500000000 35.500000000 -4.500000000 -22.500000000 35.500000000 -3.500000000 -22.500000000 35.500000000 -2.500000000 -22.500000000 35.500000000 -1.500000000 -22.500000000 35.500000000 -0.500000000 -22.500000000 35.500000000 0.500000000 -22.500000000 35.500000000 1.500000000 -22.500000000 35.500000000 2.500000000 -22.500000000 35.500000000 3.500000000 -22.500000000 35.500000000 4.500000000 -22.500000000 35.500000000 5.500000000 -22.500000000 35.500000000 6.500000000 -22.500000000 35.500000000 7.500000000 -22.500000000 35.500000000 8.500000000 -22.500000000 35.500000000 9.500000000 -22.500000000 35.500000000 10.500000000 -22.500000000 35.500000000 11.500000000 -22.500000000 35.500000000 12.500000000 -22.500000000 35.500000000 13.500000000 -22.500000000 35.500000000 14.500000000 -22.500000000 35.500000000 15.500000000 -22.500000000 35.500000000 16.500000000 -22.500000000 35.500000000 17.500000000 -22.500000000 35.500000000 18.500000000 -22.500000000 35.500000000 19.500000000 -22.500000000 35.500000000 20.500000000 -22.500000000 35.500000000 21.500000000 -22.500000000 35.500000000 22.500000000 -22.500000000 35.500000000 23.500000000 -22.500000000 35.500000000 24.500000000 -22.500000000 35.500000000 25.499996185 -22.500000000 35.499996185 26.499954224 -22.500000000 35.499954224 27.499591827 -22.500000000 35.499591827 28.497470856 -22.500000000 35.497467041 29.488407135 -22.500000000 35.488403320 30.458978653 -22.500000000 35.458980560 31.384418488 -22.500000000 35.384422302 32.233222961 -22.500000000 35.233222961 32.981101990 -22.500000000 34.981101990 -33.981101990 -21.500000000 34.981101990 -33.233226776 -21.500000000 35.233222961 -32.384422302 -21.500000000 35.384418488 -31.458978653 -21.500000000 35.458980560 -30.488407135 -21.500000000 35.488403320 -29.497472763 -21.500000000 35.497474670 -28.499593735 -21.500000000 35.499591827 -27.499954224 -21.500000000 35.499954224 -26.499996185 -21.500000000 35.499996185 -25.500000000 -21.500000000 35.500000000 -24.500000000 -21.500000000 35.500000000 -23.500000000 -21.500000000 35.500000000 -22.500000000 -21.500000000 35.500000000 -21.500000000 -21.500000000 35.500000000 -20.500000000 -21.500000000 35.500000000 -19.500000000 -21.500000000 35.500000000 -18.500000000 -21.500000000 35.500000000 -17.500000000 -21.500000000 35.500000000 -16.500000000 -21.500000000 35.500000000 -15.500000000 -21.500000000 35.500000000 -14.500000000 -21.500000000 35.500000000 -13.500000000 -21.500000000 35.500000000 -12.500000000 -21.500000000 35.500000000 -11.500000000 -21.500000000 35.500000000 -10.500000000 -21.500000000 35.500000000 -9.500000000 -21.500000000 35.500000000 -8.500000000 -21.500000000 35.500000000 -7.500000000 -21.500000000 35.500000000 -6.500000000 -21.500000000 35.500000000 -5.500000000 -21.500000000 35.500000000 -4.500000000 -21.500000000 35.500000000 -3.500000000 -21.500000000 35.500000000 -2.500000000 -21.500000000 35.500000000 -1.500000000 -21.500000000 35.500000000 -0.500000000 -21.500000000 35.500000000 0.500000000 -21.500000000 35.500000000 1.500000000 -21.500000000 35.500000000 2.500000000 -21.500000000 35.500000000 3.500000000 -21.500000000 35.500000000 4.500000000 -21.500000000 35.500000000 5.500000000 -21.500000000 35.500000000 6.500000000 -21.500000000 35.500000000 7.500000000 -21.500000000 35.500000000 8.500000000 -21.500000000 35.500000000 9.500000000 -21.500000000 35.500000000 10.500000000 -21.500000000 35.500000000 11.500000000 -21.500000000 35.500000000 12.500000000 -21.500000000 35.500000000 13.500000000 -21.500000000 35.500000000 14.500000000 -21.500000000 35.500000000 15.500000000 -21.500000000 35.500000000 16.500000000 -21.500000000 35.500000000 17.500000000 -21.500000000 35.500000000 18.500000000 -21.500000000 35.500000000 19.500000000 -21.500000000 35.500000000 20.500000000 -21.500000000 35.500000000 21.500000000 -21.500000000 35.500000000 22.500000000 -21.500000000 35.500000000 23.500000000 -21.500000000 35.500000000 24.500000000 -21.500000000 35.500000000 25.499996185 -21.500000000 35.499996185 26.499954224 -21.500000000 35.499954224 27.499591827 -21.500000000 35.499591827 28.497470856 -21.500000000 35.497467041 29.488407135 -21.500000000 35.488403320 30.458978653 -21.500000000 35.458980560 31.384418488 -21.500000000 35.384422302 32.233222961 -21.500000000 35.233222961 32.981101990 -21.500000000 34.981101990 -33.981101990 -20.500000000 34.981101990 -33.233226776 -20.500000000 35.233222961 -32.384422302 -20.500000000 35.384418488 -31.458978653 -20.500000000 35.458980560 -30.488407135 -20.500000000 35.488403320 -29.497472763 -20.500000000 35.497474670 -28.499593735 -20.500000000 35.499591827 -27.499954224 -20.500000000 35.499954224 -26.499996185 -20.500000000 35.499996185 -25.500000000 -20.500000000 35.500000000 -24.500000000 -20.500000000 35.500000000 -23.500000000 -20.500000000 35.500000000 -22.500000000 -20.500000000 35.500000000 -21.500000000 -20.500000000 35.500000000 -20.500000000 -20.500000000 35.500000000 -19.500000000 -20.500000000 35.500000000 -18.500000000 -20.500000000 35.500000000 -17.500000000 -20.500000000 35.500000000 -16.500000000 -20.500000000 35.500000000 -15.500000000 -20.500000000 35.500000000 -14.500000000 -20.500000000 35.500000000 -13.500000000 -20.500000000 35.500000000 -12.500000000 -20.500000000 35.500000000 -11.500000000 -20.500000000 35.500000000 -10.500000000 -20.500000000 35.500000000 -9.500000000 -20.500000000 35.500000000 -8.500000000 -20.500000000 35.500000000 -7.500000000 -20.500000000 35.500000000 -6.500000000 -20.500000000 35.500000000 -5.500000000 -20.500000000 35.500000000 -4.500000000 -20.500000000 35.500000000 -3.500000000 -20.500000000 35.500000000 -2.500000000 -20.500000000 35.500000000 -1.500000000 -20.500000000 35.500000000 -0.500000000 -20.500000000 35.500000000 0.500000000 -20.500000000 35.500000000 1.500000000 -20.500000000 35.500000000 2.500000000 -20.500000000 35.500000000 3.500000000 -20.500000000 35.500000000 4.500000000 -20.500000000 35.500000000 5.500000000 -20.500000000 35.500000000 6.500000000 -20.500000000 35.500000000 7.500000000 -20.500000000 35.500000000 8.500000000 -20.500000000 35.500000000 9.500000000 -20.500000000 35.500000000 10.500000000 -20.500000000 35.500000000 11.500000000 -20.500000000 35.500000000 12.500000000 -20.500000000 35.500000000 13.500000000 -20.500000000 35.500000000 14.500000000 -20.500000000 35.500000000 15.500000000 -20.500000000 35.500000000 16.500000000 -20.500000000 35.500000000 17.500000000 -20.500000000 35.500000000 18.500000000 -20.500000000 35.500000000 19.500000000 -20.500000000 35.500000000 20.500000000 -20.500000000 35.500000000 21.500000000 -20.500000000 35.500000000 22.500000000 -20.500000000 35.500000000 23.500000000 -20.500000000 35.500000000 24.500000000 -20.500000000 35.500000000 25.499996185 -20.500000000 35.499996185 26.499954224 -20.500000000 35.499954224 27.499591827 -20.500000000 35.499591827 28.497470856 -20.500000000 35.497467041 29.488407135 -20.500000000 35.488403320 30.458978653 -20.500000000 35.458980560 31.384418488 -20.500000000 35.384422302 32.233222961 -20.500000000 35.233222961 32.981101990 -20.500000000 34.981101990 -33.981101990 -19.500000000 34.981101990 -33.233226776 -19.500000000 35.233222961 -32.384422302 -19.500000000 35.384418488 -31.458978653 -19.500000000 35.458980560 -30.488407135 -19.500000000 35.488403320 -29.497472763 -19.500000000 35.497474670 -28.499593735 -19.500000000 35.499591827 -27.499954224 -19.500000000 35.499954224 -26.499996185 -19.500000000 35.499996185 -25.500000000 -19.500000000 35.500000000 -24.500000000 -19.500000000 35.500000000 -23.500000000 -19.500000000 35.500000000 -22.500000000 -19.500000000 35.500000000 -21.500000000 -19.500000000 35.500000000 -20.500000000 -19.500000000 35.500000000 -19.500000000 -19.500000000 35.500000000 -18.500000000 -19.500000000 35.500000000 -17.500000000 -19.500000000 35.500000000 -16.500000000 -19.500000000 35.500000000 -15.500000000 -19.500000000 35.500000000 -14.500000000 -19.500000000 35.500000000 -13.500000000 -19.500000000 35.500000000 -12.500000000 -19.500000000 35.500000000 -11.500000000 -19.500000000 35.500000000 -10.500000000 -19.500000000 35.500000000 -9.500000000 -19.500000000 35.500000000 -8.500000000 -19.500000000 35.500000000 -7.500000000 -19.500000000 35.500000000 -6.500000000 -19.500000000 35.500000000 -5.500000000 -19.500000000 35.500000000 -4.500000000 -19.500000000 35.500000000 -3.500000000 -19.500000000 35.500000000 -2.500000000 -19.500000000 35.500000000 -1.500000000 -19.500000000 35.500000000 -0.500000000 -19.500000000 35.500000000 0.500000000 -19.500000000 35.500000000 1.500000000 -19.500000000 35.500000000 2.500000000 -19.500000000 35.500000000 3.500000000 -19.500000000 35.500000000 4.500000000 -19.500000000 35.500000000 5.500000000 -19.500000000 35.500000000 6.500000000 -19.500000000 35.500000000 7.500000000 -19.500000000 35.500000000 8.500000000 -19.500000000 35.500000000 9.500000000 -19.500000000 35.500000000 10.500000000 -19.500000000 35.500000000 11.500000000 -19.500000000 35.500000000 12.500000000 -19.500000000 35.500000000 13.500000000 -19.500000000 35.500000000 14.500000000 -19.500000000 35.500000000 15.500000000 -19.500000000 35.500000000 16.500000000 -19.500000000 35.500000000 17.500000000 -19.500000000 35.500000000 18.500000000 -19.500000000 35.500000000 19.500000000 -19.500000000 35.500000000 20.500000000 -19.500000000 35.500000000 21.500000000 -19.500000000 35.500000000 22.500000000 -19.500000000 35.500000000 23.500000000 -19.500000000 35.500000000 24.500000000 -19.500000000 35.500000000 25.499996185 -19.500000000 35.499996185 26.499954224 -19.500000000 35.499954224 27.499591827 -19.500000000 35.499591827 28.497470856 -19.500000000 35.497467041 29.488407135 -19.500000000 35.488403320 30.458978653 -19.500000000 35.458980560 31.384418488 -19.500000000 35.384422302 32.233222961 -19.500000000 35.233222961 32.981101990 -19.500000000 34.981101990 -33.981101990 -18.500000000 34.981101990 -33.233226776 -18.500000000 35.233222961 -32.384422302 -18.500000000 35.384418488 -31.458978653 -18.500000000 35.458980560 -30.488407135 -18.500000000 35.488403320 -29.497472763 -18.500000000 35.497474670 -28.499593735 -18.500000000 35.499591827 -27.499954224 -18.500000000 35.499954224 -26.499996185 -18.500000000 35.499996185 -25.500000000 -18.500000000 35.500000000 -24.500000000 -18.500000000 35.500000000 -23.500000000 -18.500000000 35.500000000 -22.500000000 -18.500000000 35.500000000 -21.500000000 -18.500000000 35.500000000 -20.500000000 -18.500000000 35.500000000 -19.500000000 -18.500000000 35.500000000 -18.500000000 -18.500000000 35.500000000 -17.500000000 -18.500000000 35.500000000 -16.500000000 -18.500000000 35.500000000 -15.500000000 -18.500000000 35.500000000 -14.500000000 -18.500000000 35.500000000 -13.500000000 -18.500000000 35.500000000 -12.500000000 -18.500000000 35.500000000 -11.500000000 -18.500000000 35.500000000 -10.500000000 -18.500000000 35.500000000 -9.500000000 -18.500000000 35.500000000 -8.500000000 -18.500000000 35.500000000 -7.500000000 -18.500000000 35.500000000 -6.500000000 -18.500000000 35.500000000 -5.500000000 -18.500000000 35.500000000 -4.500000000 -18.500000000 35.500000000 -3.500000000 -18.500000000 35.500000000 -2.500000000 -18.500000000 35.500000000 -1.500000000 -18.500000000 35.500000000 -0.500000000 -18.500000000 35.500000000 0.500000000 -18.500000000 35.500000000 1.500000000 -18.500000000 35.500000000 2.500000000 -18.500000000 35.500000000 3.500000000 -18.500000000 35.500000000 4.500000000 -18.500000000 35.500000000 5.500000000 -18.500000000 35.500000000 6.500000000 -18.500000000 35.500000000 7.500000000 -18.500000000 35.500000000 8.500000000 -18.500000000 35.500000000 9.500000000 -18.500000000 35.500000000 10.500000000 -18.500000000 35.500000000 11.500000000 -18.500000000 35.500000000 12.500000000 -18.500000000 35.500000000 13.500000000 -18.500000000 35.500000000 14.500000000 -18.500000000 35.500000000 15.500000000 -18.500000000 35.500000000 16.500000000 -18.500000000 35.500000000 17.500000000 -18.500000000 35.500000000 18.500000000 -18.500000000 35.500000000 19.500000000 -18.500000000 35.500000000 20.500000000 -18.500000000 35.500000000 21.500000000 -18.500000000 35.500000000 22.500000000 -18.500000000 35.500000000 23.500000000 -18.500000000 35.500000000 24.500000000 -18.500000000 35.500000000 25.499996185 -18.500000000 35.499996185 26.499954224 -18.500000000 35.499954224 27.499591827 -18.500000000 35.499591827 28.497470856 -18.500000000 35.497467041 29.488407135 -18.500000000 35.488403320 30.458978653 -18.500000000 35.458980560 31.384418488 -18.500000000 35.384422302 32.233222961 -18.500000000 35.233222961 32.981101990 -18.500000000 34.981101990 -33.981101990 -17.500000000 34.981101990 -33.233226776 -17.500000000 35.233222961 -32.384422302 -17.500000000 35.384418488 -31.458978653 -17.500000000 35.458980560 -30.488407135 -17.500000000 35.488403320 -29.497472763 -17.500000000 35.497474670 -28.499593735 -17.500000000 35.499591827 -27.499954224 -17.500000000 35.499954224 -26.499996185 -17.500000000 35.499996185 -25.500000000 -17.500000000 35.500000000 -24.500000000 -17.500000000 35.500000000 -23.500000000 -17.500000000 35.500000000 -22.500000000 -17.500000000 35.500000000 -21.500000000 -17.500000000 35.500000000 -20.500000000 -17.500000000 35.500000000 -19.500000000 -17.500000000 35.500000000 -18.500000000 -17.500000000 35.500000000 -17.500000000 -17.500000000 35.500000000 -16.500000000 -17.500000000 35.500000000 -15.500000000 -17.500000000 35.500000000 -14.500000000 -17.500000000 35.500000000 -13.500000000 -17.500000000 35.500000000 -12.500000000 -17.500000000 35.500000000 -11.500000000 -17.500000000 35.500000000 -10.500000000 -17.500000000 35.500000000 -9.500000000 -17.500000000 35.500000000 -8.500000000 -17.500000000 35.500000000 -7.500000000 -17.500000000 35.500000000 -6.500000000 -17.500000000 35.500000000 -5.500000000 -17.500000000 35.500000000 -4.500000000 -17.500000000 35.500000000 -3.500000000 -17.500000000 35.500000000 -2.500000000 -17.500000000 35.500000000 -1.500000000 -17.500000000 35.500000000 -0.500000000 -17.500000000 35.500000000 0.500000000 -17.500000000 35.500000000 1.500000000 -17.500000000 35.500000000 2.500000000 -17.500000000 35.500000000 3.500000000 -17.500000000 35.500000000 4.500000000 -17.500000000 35.500000000 5.500000000 -17.500000000 35.500000000 6.500000000 -17.500000000 35.500000000 7.500000000 -17.500000000 35.500000000 8.500000000 -17.500000000 35.500000000 9.500000000 -17.500000000 35.500000000 10.500000000 -17.500000000 35.500000000 11.500000000 -17.500000000 35.500000000 12.500000000 -17.500000000 35.500000000 13.500000000 -17.500000000 35.500000000 14.500000000 -17.500000000 35.500000000 15.500000000 -17.500000000 35.500000000 16.500000000 -17.500000000 35.500000000 17.500000000 -17.500000000 35.500000000 18.500000000 -17.500000000 35.500000000 19.500000000 -17.500000000 35.500000000 20.500000000 -17.500000000 35.500000000 21.500000000 -17.500000000 35.500000000 22.500000000 -17.500000000 35.500000000 23.500000000 -17.500000000 35.500000000 24.500000000 -17.500000000 35.500000000 25.499996185 -17.500000000 35.499996185 26.499954224 -17.500000000 35.499954224 27.499591827 -17.500000000 35.499591827 28.497470856 -17.500000000 35.497467041 29.488407135 -17.500000000 35.488403320 30.458978653 -17.500000000 35.458980560 31.384418488 -17.500000000 35.384422302 32.233222961 -17.500000000 35.233222961 32.981101990 -17.500000000 34.981101990 -33.981101990 -16.500000000 34.981101990 -33.233226776 -16.500000000 35.233222961 -32.384422302 -16.500000000 35.384418488 -31.458978653 -16.500000000 35.458980560 -30.488407135 -16.500000000 35.488403320 -29.497472763 -16.500000000 35.497474670 -28.499593735 -16.500000000 35.499591827 -27.499954224 -16.500000000 35.499954224 -26.499996185 -16.500000000 35.499996185 -25.500000000 -16.500000000 35.500000000 -24.500000000 -16.500000000 35.500000000 -23.500000000 -16.500000000 35.500000000 -22.500000000 -16.500000000 35.500000000 -21.500000000 -16.500000000 35.500000000 -20.500000000 -16.500000000 35.500000000 -19.500000000 -16.500000000 35.500000000 -18.500000000 -16.500000000 35.500000000 -17.500000000 -16.500000000 35.500000000 -16.500000000 -16.500000000 35.500000000 -15.500000000 -16.500000000 35.500000000 -14.500000000 -16.500000000 35.500000000 -13.500000000 -16.500000000 35.500000000 -12.500000000 -16.500000000 35.500000000 -11.500000000 -16.500000000 35.500000000 -10.500000000 -16.500000000 35.500000000 -9.500000000 -16.500000000 35.500000000 -8.500000000 -16.500000000 35.500000000 -7.500000000 -16.500000000 35.500000000 -6.500000000 -16.500000000 35.500000000 -5.500000000 -16.500000000 35.500000000 -4.500000000 -16.500000000 35.500000000 -3.500000000 -16.500000000 35.500000000 -2.500000000 -16.500000000 35.500000000 -1.500000000 -16.500000000 35.500000000 -0.500000000 -16.500000000 35.500000000 0.500000000 -16.500000000 35.500000000 1.500000000 -16.500000000 35.500000000 2.500000000 -16.500000000 35.500000000 3.500000000 -16.500000000 35.500000000 4.500000000 -16.500000000 35.500000000 5.500000000 -16.500000000 35.500000000 6.500000000 -16.500000000 35.500000000 7.500000000 -16.500000000 35.500000000 8.500000000 -16.500000000 35.500000000 9.500000000 -16.500000000 35.500000000 10.500000000 -16.500000000 35.500000000 11.500000000 -16.500000000 35.500000000 12.500000000 -16.500000000 35.500000000 13.500000000 -16.500000000 35.500000000 14.500000000 -16.500000000 35.500000000 15.500000000 -16.500000000 35.500000000 16.500000000 -16.500000000 35.500000000 17.500000000 -16.500000000 35.500000000 18.500000000 -16.500000000 35.500000000 19.500000000 -16.500000000 35.500000000 20.500000000 -16.500000000 35.500000000 21.500000000 -16.500000000 35.500000000 22.500000000 -16.500000000 35.500000000 23.500000000 -16.500000000 35.500000000 24.500000000 -16.500000000 35.500000000 25.499996185 -16.500000000 35.499996185 26.499954224 -16.500000000 35.499954224 27.499591827 -16.500000000 35.499591827 28.497470856 -16.500000000 35.497467041 29.488407135 -16.500000000 35.488403320 30.458978653 -16.500000000 35.458980560 31.384418488 -16.500000000 35.384422302 32.233222961 -16.500000000 35.233222961 32.981101990 -16.500000000 34.981101990 -33.981101990 -15.500000000 34.981101990 -33.233226776 -15.500000000 35.233222961 -32.384422302 -15.500000000 35.384418488 -31.458978653 -15.500000000 35.458980560 -30.488407135 -15.500000000 35.488403320 -29.497472763 -15.500000000 35.497474670 -28.499593735 -15.500000000 35.499591827 -27.499954224 -15.500000000 35.499954224 -26.499996185 -15.500000000 35.499996185 -25.500000000 -15.500000000 35.500000000 -24.500000000 -15.500000000 35.500000000 -23.500000000 -15.500000000 35.500000000 -22.500000000 -15.500000000 35.500000000 -21.500000000 -15.500000000 35.500000000 -20.500000000 -15.500000000 35.500000000 -19.500000000 -15.500000000 35.500000000 -18.500000000 -15.500000000 35.500000000 -17.500000000 -15.500000000 35.500000000 -16.500000000 -15.500000000 35.500000000 -15.500000000 -15.500000000 35.500000000 -14.500000000 -15.500000000 35.500000000 -13.500000000 -15.500000000 35.500000000 -12.500000000 -15.500000000 35.500000000 -11.500000000 -15.500000000 35.500000000 -10.500000000 -15.500000000 35.500000000 -9.500000000 -15.500000000 35.500000000 -8.500000000 -15.500000000 35.500000000 -7.500000000 -15.500000000 35.500000000 -6.500000000 -15.500000000 35.500000000 -5.500000000 -15.500000000 35.500000000 -4.500000000 -15.500000000 35.500000000 -3.500000000 -15.500000000 35.500000000 -2.500000000 -15.500000000 35.500000000 -1.500000000 -15.500000000 35.500000000 -0.500000000 -15.500000000 35.500000000 0.500000000 -15.500000000 35.500000000 1.500000000 -15.500000000 35.500000000 2.500000000 -15.500000000 35.500000000 3.500000000 -15.500000000 35.500000000 4.500000000 -15.500000000 35.500000000 5.500000000 -15.500000000 35.500000000 6.500000000 -15.500000000 35.500000000 7.500000000 -15.500000000 35.500000000 8.500000000 -15.500000000 35.500000000 9.500000000 -15.500000000 35.500000000 10.500000000 -15.500000000 35.500000000 11.500000000 -15.500000000 35.500000000 12.500000000 -15.500000000 35.500000000 13.500000000 -15.500000000 35.500000000 14.500000000 -15.500000000 35.500000000 15.500000000 -15.500000000 35.500000000 16.500000000 -15.500000000 35.500000000 17.500000000 -15.500000000 35.500000000 18.500000000 -15.500000000 35.500000000 19.500000000 -15.500000000 35.500000000 20.500000000 -15.500000000 35.500000000 21.500000000 -15.500000000 35.500000000 22.500000000 -15.500000000 35.500000000 23.500000000 -15.500000000 35.500000000 24.500000000 -15.500000000 35.500000000 25.499996185 -15.500000000 35.499996185 26.499954224 -15.500000000 35.499954224 27.499591827 -15.500000000 35.499591827 28.497470856 -15.500000000 35.497467041 29.488407135 -15.500000000 35.488403320 30.458978653 -15.500000000 35.458980560 31.384418488 -15.500000000 35.384422302 32.233222961 -15.500000000 35.233222961 32.981101990 -15.500000000 34.981101990 -33.981101990 -14.500000000 34.981101990 -33.233226776 -14.500000000 35.233222961 -32.384422302 -14.500000000 35.384418488 -31.458978653 -14.500000000 35.458980560 -30.488407135 -14.500000000 35.488403320 -29.497472763 -14.500000000 35.497474670 -28.499593735 -14.500000000 35.499591827 -27.499954224 -14.500000000 35.499954224 -26.499996185 -14.500000000 35.499996185 -25.500000000 -14.500000000 35.500000000 -24.500000000 -14.500000000 35.500000000 -23.500000000 -14.500000000 35.500000000 -22.500000000 -14.500000000 35.500000000 -21.500000000 -14.500000000 35.500000000 -20.500000000 -14.500000000 35.500000000 -19.500000000 -14.500000000 35.500000000 -18.500000000 -14.500000000 35.500000000 -17.500000000 -14.500000000 35.500000000 -16.500000000 -14.500000000 35.500000000 -15.500000000 -14.500000000 35.500000000 -14.500000000 -14.500000000 35.500000000 -13.500000000 -14.500000000 35.500000000 -12.500000000 -14.500000000 35.500000000 -11.500000000 -14.500000000 35.500000000 -10.500000000 -14.500000000 35.500000000 -9.500000000 -14.500000000 35.500000000 -8.500000000 -14.500000000 35.500000000 -7.500000000 -14.500000000 35.500000000 -6.500000000 -14.500000000 35.500000000 -5.500000000 -14.500000000 35.500000000 -4.500000000 -14.500000000 35.500000000 -3.500000000 -14.500000000 35.500000000 -2.500000000 -14.500000000 35.500000000 -1.500000000 -14.500000000 35.500000000 -0.500000000 -14.500000000 35.500000000 0.500000000 -14.500000000 35.500000000 1.500000000 -14.500000000 35.500000000 2.500000000 -14.500000000 35.500000000 3.500000000 -14.500000000 35.500000000 4.500000000 -14.500000000 35.500000000 5.500000000 -14.500000000 35.500000000 6.500000000 -14.500000000 35.500000000 7.500000000 -14.500000000 35.500000000 8.500000000 -14.500000000 35.500000000 9.500000000 -14.500000000 35.500000000 10.500000000 -14.500000000 35.500000000 11.500000000 -14.500000000 35.500000000 12.500000000 -14.500000000 35.500000000 13.500000000 -14.500000000 35.500000000 14.500000000 -14.500000000 35.500000000 15.500000000 -14.500000000 35.500000000 16.500000000 -14.500000000 35.500000000 17.500000000 -14.500000000 35.500000000 18.500000000 -14.500000000 35.500000000 19.500000000 -14.500000000 35.500000000 20.500000000 -14.500000000 35.500000000 21.500000000 -14.500000000 35.500000000 22.500000000 -14.500000000 35.500000000 23.500000000 -14.500000000 35.500000000 24.500000000 -14.500000000 35.500000000 25.499996185 -14.500000000 35.499996185 26.499954224 -14.500000000 35.499954224 27.499591827 -14.500000000 35.499591827 28.497470856 -14.500000000 35.497467041 29.488407135 -14.500000000 35.488403320 30.458978653 -14.500000000 35.458980560 31.384418488 -14.500000000 35.384422302 32.233222961 -14.500000000 35.233222961 32.981101990 -14.500000000 34.981101990 -33.981101990 -13.500000000 34.981101990 -33.233226776 -13.500000000 35.233222961 -32.384422302 -13.500000000 35.384418488 -31.458978653 -13.500000000 35.458980560 -30.488407135 -13.500000000 35.488403320 -29.497472763 -13.500000000 35.497474670 -28.499593735 -13.500000000 35.499591827 -27.499954224 -13.500000000 35.499954224 -26.499996185 -13.500000000 35.499996185 -25.500000000 -13.500000000 35.500000000 -24.500000000 -13.500000000 35.500000000 -23.500000000 -13.500000000 35.500000000 -22.500000000 -13.500000000 35.500000000 -21.500000000 -13.500000000 35.500000000 -20.500000000 -13.500000000 35.500000000 -19.500000000 -13.500000000 35.500000000 -18.500000000 -13.500000000 35.500000000 -17.500000000 -13.500000000 35.500000000 -16.500000000 -13.500000000 35.500000000 -15.500000000 -13.500000000 35.500000000 -14.500000000 -13.500000000 35.500000000 -13.500000000 -13.500000000 35.500000000 -12.500000000 -13.500000000 35.500000000 -11.500000000 -13.500000000 35.500000000 -10.500000000 -13.500000000 35.500000000 -9.500000000 -13.500000000 35.500000000 -8.500000000 -13.500000000 35.500000000 -7.500000000 -13.500000000 35.500000000 -6.500000000 -13.500000000 35.500000000 -5.500000000 -13.500000000 35.500000000 -4.500000000 -13.500000000 35.500000000 -3.500000000 -13.500000000 35.500000000 -2.500000000 -13.500000000 35.500000000 -1.500000000 -13.500000000 35.500000000 -0.500000000 -13.500000000 35.500000000 0.500000000 -13.500000000 35.500000000 1.500000000 -13.500000000 35.500000000 2.500000000 -13.500000000 35.500000000 3.500000000 -13.500000000 35.500000000 4.500000000 -13.500000000 35.500000000 5.500000000 -13.500000000 35.500000000 6.500000000 -13.500000000 35.500000000 7.500000000 -13.500000000 35.500000000 8.500000000 -13.500000000 35.500000000 9.500000000 -13.500000000 35.500000000 10.500000000 -13.500000000 35.500000000 11.500000000 -13.500000000 35.500000000 12.500000000 -13.500000000 35.500000000 13.500000000 -13.500000000 35.500000000 14.500000000 -13.500000000 35.500000000 15.500000000 -13.500000000 35.500000000 16.500000000 -13.500000000 35.500000000 17.500000000 -13.500000000 35.500000000 18.500000000 -13.500000000 35.500000000 19.500000000 -13.500000000 35.500000000 20.500000000 -13.500000000 35.500000000 21.500000000 -13.500000000 35.500000000 22.500000000 -13.500000000 35.500000000 23.500000000 -13.500000000 35.500000000 24.500000000 -13.500000000 35.500000000 25.499996185 -13.500000000 35.499996185 26.499954224 -13.500000000 35.499954224 27.499591827 -13.500000000 35.499591827 28.497470856 -13.500000000 35.497467041 29.488407135 -13.500000000 35.488403320 30.458978653 -13.500000000 35.458980560 31.384418488 -13.500000000 35.384422302 32.233222961 -13.500000000 35.233222961 32.981101990 -13.500000000 34.981101990 -33.981101990 -12.500000000 34.981101990 -33.233226776 -12.500000000 35.233222961 -32.384422302 -12.500000000 35.384418488 -31.458978653 -12.500000000 35.458980560 -30.488407135 -12.500000000 35.488403320 -29.497472763 -12.500000000 35.497474670 -28.499593735 -12.500000000 35.499591827 -27.499954224 -12.500000000 35.499954224 -26.499996185 -12.500000000 35.499996185 -25.500000000 -12.500000000 35.500000000 -24.500000000 -12.500000000 35.500000000 -23.500000000 -12.500000000 35.500000000 -22.500000000 -12.500000000 35.500000000 -21.500000000 -12.500000000 35.500000000 -20.500000000 -12.500000000 35.500000000 -19.500000000 -12.500000000 35.500000000 -18.500000000 -12.500000000 35.500000000 -17.500000000 -12.500000000 35.500000000 -16.500000000 -12.500000000 35.500000000 -15.500000000 -12.500000000 35.500000000 -14.500000000 -12.500000000 35.500000000 -13.500000000 -12.500000000 35.500000000 -12.500000000 -12.500000000 35.500000000 -11.500000000 -12.500000000 35.500000000 -10.500000000 -12.500000000 35.500000000 -9.500000000 -12.500000000 35.500000000 -8.500000000 -12.500000000 35.500000000 -7.500000000 -12.500000000 35.500000000 -6.500000000 -12.500000000 35.500000000 -5.500000000 -12.500000000 35.500000000 -4.500000000 -12.500000000 35.500000000 -3.500000000 -12.500000000 35.500000000 -2.500000000 -12.500000000 35.500000000 -1.500000000 -12.500000000 35.500000000 -0.500000000 -12.500000000 35.500000000 0.500000000 -12.500000000 35.500000000 1.500000000 -12.500000000 35.500000000 2.500000000 -12.500000000 35.500000000 3.500000000 -12.500000000 35.500000000 4.500000000 -12.500000000 35.500000000 5.500000000 -12.500000000 35.500000000 6.500000000 -12.500000000 35.500000000 7.500000000 -12.500000000 35.500000000 8.500000000 -12.500000000 35.500000000 9.500000000 -12.500000000 35.500000000 10.500000000 -12.500000000 35.500000000 11.500000000 -12.500000000 35.500000000 12.500000000 -12.500000000 35.500000000 13.500000000 -12.500000000 35.500000000 14.500000000 -12.500000000 35.500000000 15.500000000 -12.500000000 35.500000000 16.500000000 -12.500000000 35.500000000 17.500000000 -12.500000000 35.500000000 18.500000000 -12.500000000 35.500000000 19.500000000 -12.500000000 35.500000000 20.500000000 -12.500000000 35.500000000 21.500000000 -12.500000000 35.500000000 22.500000000 -12.500000000 35.500000000 23.500000000 -12.500000000 35.500000000 24.500000000 -12.500000000 35.500000000 25.499996185 -12.500000000 35.499996185 26.499954224 -12.500000000 35.499954224 27.499591827 -12.500000000 35.499591827 28.497470856 -12.500000000 35.497467041 29.488407135 -12.500000000 35.488403320 30.458978653 -12.500000000 35.458980560 31.384418488 -12.500000000 35.384422302 32.233222961 -12.500000000 35.233222961 32.981101990 -12.500000000 34.981101990 -33.981101990 -11.500000000 34.981101990 -33.233226776 -11.500000000 35.233222961 -32.384422302 -11.500000000 35.384418488 -31.458978653 -11.500000000 35.458980560 -30.488407135 -11.500000000 35.488403320 -29.497472763 -11.500000000 35.497474670 -28.499593735 -11.500000000 35.499591827 -27.499954224 -11.500000000 35.499954224 -26.499996185 -11.500000000 35.499996185 -25.500000000 -11.500000000 35.500000000 -24.500000000 -11.500000000 35.500000000 -23.500000000 -11.500000000 35.500000000 -22.500000000 -11.500000000 35.500000000 -21.500000000 -11.500000000 35.500000000 -20.500000000 -11.500000000 35.500000000 -19.500000000 -11.500000000 35.500000000 -18.500000000 -11.500000000 35.500000000 -17.500000000 -11.500000000 35.500000000 -16.500000000 -11.500000000 35.500000000 -15.500000000 -11.500000000 35.500000000 -14.500000000 -11.500000000 35.500000000 -13.500000000 -11.500000000 35.500000000 -12.500000000 -11.500000000 35.500000000 -11.500000000 -11.500000000 35.500000000 -10.500000000 -11.500000000 35.500000000 -9.500000000 -11.500000000 35.500000000 -8.500000000 -11.500000000 35.500000000 -7.500000000 -11.500000000 35.500000000 -6.500000000 -11.500000000 35.500000000 -5.500000000 -11.500000000 35.500000000 -4.500000000 -11.500000000 35.500000000 -3.500000000 -11.500000000 35.500000000 -2.500000000 -11.500000000 35.500000000 -1.500000000 -11.500000000 35.500000000 -0.500000000 -11.500000000 35.500000000 0.500000000 -11.500000000 35.500000000 1.500000000 -11.500000000 35.500000000 2.500000000 -11.500000000 35.500000000 3.500000000 -11.500000000 35.500000000 4.500000000 -11.500000000 35.500000000 5.500000000 -11.500000000 35.500000000 6.500000000 -11.500000000 35.500000000 7.500000000 -11.500000000 35.500000000 8.500000000 -11.500000000 35.500000000 9.500000000 -11.500000000 35.500000000 10.500000000 -11.500000000 35.500000000 11.500000000 -11.500000000 35.500000000 12.500000000 -11.500000000 35.500000000 13.500000000 -11.500000000 35.500000000 14.500000000 -11.500000000 35.500000000 15.500000000 -11.500000000 35.500000000 16.500000000 -11.500000000 35.500000000 17.500000000 -11.500000000 35.500000000 18.500000000 -11.500000000 35.500000000 19.500000000 -11.500000000 35.500000000 20.500000000 -11.500000000 35.500000000 21.500000000 -11.500000000 35.500000000 22.500000000 -11.500000000 35.500000000 23.500000000 -11.500000000 35.500000000 24.500000000 -11.500000000 35.500000000 25.499996185 -11.500000000 35.499996185 26.499954224 -11.500000000 35.499954224 27.499591827 -11.500000000 35.499591827 28.497470856 -11.500000000 35.497467041 29.488407135 -11.500000000 35.488403320 30.458978653 -11.500000000 35.458980560 31.384418488 -11.500000000 35.384422302 32.233222961 -11.500000000 35.233222961 32.981101990 -11.500000000 34.981101990 -33.981101990 -10.500000000 34.981101990 -33.233226776 -10.500000000 35.233222961 -32.384422302 -10.500000000 35.384418488 -31.458978653 -10.500000000 35.458980560 -30.488407135 -10.500000000 35.488403320 -29.497472763 -10.500000000 35.497474670 -28.499593735 -10.500000000 35.499591827 -27.499954224 -10.500000000 35.499954224 -26.499996185 -10.500000000 35.499996185 -25.500000000 -10.500000000 35.500000000 -24.500000000 -10.500000000 35.500000000 -23.500000000 -10.500000000 35.500000000 -22.500000000 -10.500000000 35.500000000 -21.500000000 -10.500000000 35.500000000 -20.500000000 -10.500000000 35.500000000 -19.500000000 -10.500000000 35.500000000 -18.500000000 -10.500000000 35.500000000 -17.500000000 -10.500000000 35.500000000 -16.500000000 -10.500000000 35.500000000 -15.500000000 -10.500000000 35.500000000 -14.500000000 -10.500000000 35.500000000 -13.500000000 -10.500000000 35.500000000 -12.500000000 -10.500000000 35.500000000 -11.500000000 -10.500000000 35.500000000 -10.500000000 -10.500000000 35.500000000 -9.500000000 -10.500000000 35.500000000 -8.500000000 -10.500000000 35.500000000 -7.500000000 -10.500000000 35.500000000 -6.500000000 -10.500000000 35.500000000 -5.500000000 -10.500000000 35.500000000 -4.500000000 -10.500000000 35.500000000 -3.500000000 -10.500000000 35.500000000 -2.500000000 -10.500000000 35.500000000 -1.500000000 -10.500000000 35.500000000 -0.500000000 -10.500000000 35.500000000 0.500000000 -10.500000000 35.500000000 1.500000000 -10.500000000 35.500000000 2.500000000 -10.500000000 35.500000000 3.500000000 -10.500000000 35.500000000 4.500000000 -10.500000000 35.500000000 5.500000000 -10.500000000 35.500000000 6.500000000 -10.500000000 35.500000000 7.500000000 -10.500000000 35.500000000 8.500000000 -10.500000000 35.500000000 9.500000000 -10.500000000 35.500000000 10.500000000 -10.500000000 35.500000000 11.500000000 -10.500000000 35.500000000 12.500000000 -10.500000000 35.500000000 13.500000000 -10.500000000 35.500000000 14.500000000 -10.500000000 35.500000000 15.500000000 -10.500000000 35.500000000 16.500000000 -10.500000000 35.500000000 17.500000000 -10.500000000 35.500000000 18.500000000 -10.500000000 35.500000000 19.500000000 -10.500000000 35.500000000 20.500000000 -10.500000000 35.500000000 21.500000000 -10.500000000 35.500000000 22.500000000 -10.500000000 35.500000000 23.500000000 -10.500000000 35.500000000 24.500000000 -10.500000000 35.500000000 25.499996185 -10.500000000 35.499996185 26.499954224 -10.500000000 35.499954224 27.499591827 -10.500000000 35.499591827 28.497470856 -10.500000000 35.497467041 29.488407135 -10.500000000 35.488403320 30.458978653 -10.500000000 35.458980560 31.384418488 -10.500000000 35.384422302 32.233222961 -10.500000000 35.233222961 32.981101990 -10.500000000 34.981101990 -33.981101990 -9.500000000 34.981101990 -33.233226776 -9.500000000 35.233222961 -32.384422302 -9.500000000 35.384418488 -31.458978653 -9.500000000 35.458980560 -30.488407135 -9.500000000 35.488403320 -29.497472763 -9.500000000 35.497474670 -28.499593735 -9.500000000 35.499591827 -27.499954224 -9.500000000 35.499954224 -26.499996185 -9.500000000 35.499996185 -25.500000000 -9.500000000 35.500000000 -24.500000000 -9.500000000 35.500000000 -23.500000000 -9.500000000 35.500000000 -22.500000000 -9.500000000 35.500000000 -21.500000000 -9.500000000 35.500000000 -20.500000000 -9.500000000 35.500000000 -19.500000000 -9.500000000 35.500000000 -18.500000000 -9.500000000 35.500000000 -17.500000000 -9.500000000 35.500000000 -16.500000000 -9.500000000 35.500000000 -15.500000000 -9.500000000 35.500000000 -14.500000000 -9.500000000 35.500000000 -13.500000000 -9.500000000 35.500000000 -12.500000000 -9.500000000 35.500000000 -11.500000000 -9.500000000 35.500000000 -10.500000000 -9.500000000 35.500000000 -9.500000000 -9.500000000 35.500000000 -8.500000000 -9.500000000 35.500000000 -7.500000000 -9.500000000 35.500000000 -6.500000000 -9.500000000 35.500000000 -5.500000000 -9.500000000 35.500000000 -4.500000000 -9.500000000 35.500000000 -3.500000000 -9.500000000 35.500000000 -2.500000000 -9.500000000 35.500000000 -1.500000000 -9.500000000 35.500000000 -0.500000000 -9.500000000 35.500000000 0.500000000 -9.500000000 35.500000000 1.500000000 -9.500000000 35.500000000 2.500000000 -9.500000000 35.500000000 3.500000000 -9.500000000 35.500000000 4.500000000 -9.500000000 35.500000000 5.500000000 -9.500000000 35.500000000 6.500000000 -9.500000000 35.500000000 7.500000000 -9.500000000 35.500000000 8.500000000 -9.500000000 35.500000000 9.500000000 -9.500000000 35.500000000 10.500000000 -9.500000000 35.500000000 11.500000000 -9.500000000 35.500000000 12.500000000 -9.500000000 35.500000000 13.500000000 -9.500000000 35.500000000 14.500000000 -9.500000000 35.500000000 15.500000000 -9.500000000 35.500000000 16.500000000 -9.500000000 35.500000000 17.500000000 -9.500000000 35.500000000 18.500000000 -9.500000000 35.500000000 19.500000000 -9.500000000 35.500000000 20.500000000 -9.500000000 35.500000000 21.500000000 -9.500000000 35.500000000 22.500000000 -9.500000000 35.500000000 23.500000000 -9.500000000 35.500000000 24.500000000 -9.500000000 35.500000000 25.499996185 -9.500000000 35.499996185 26.499954224 -9.500000000 35.499954224 27.499591827 -9.500000000 35.499591827 28.497470856 -9.500000000 35.497467041 29.488407135 -9.500000000 35.488403320 30.458978653 -9.500000000 35.458980560 31.384418488 -9.500000000 35.384422302 32.233222961 -9.500000000 35.233222961 32.981101990 -9.500000000 34.981101990 -33.981101990 -8.500000000 34.981101990 -33.233226776 -8.500000000 35.233222961 -32.384422302 -8.500000000 35.384418488 -31.458978653 -8.500000000 35.458980560 -30.488407135 -8.500000000 35.488403320 -29.497472763 -8.500000000 35.497474670 -28.499593735 -8.500000000 35.499591827 -27.499954224 -8.500000000 35.499954224 -26.499996185 -8.500000000 35.499996185 -25.500000000 -8.500000000 35.500000000 -24.500000000 -8.500000000 35.500000000 -23.500000000 -8.500000000 35.500000000 -22.500000000 -8.500000000 35.500000000 -21.500000000 -8.500000000 35.500000000 -20.500000000 -8.500000000 35.500000000 -19.500000000 -8.500000000 35.500000000 -18.500000000 -8.500000000 35.500000000 -17.500000000 -8.500000000 35.500000000 -16.500000000 -8.500000000 35.500000000 -15.500000000 -8.500000000 35.500000000 -14.500000000 -8.500000000 35.500000000 -13.500000000 -8.500000000 35.500000000 -12.500000000 -8.500000000 35.500000000 -11.500000000 -8.500000000 35.500000000 -10.500000000 -8.500000000 35.500000000 -9.500000000 -8.500000000 35.500000000 -8.500000000 -8.500000000 35.500000000 -7.500000000 -8.500000000 35.500000000 -6.500000000 -8.500000000 35.500000000 -5.500000000 -8.500000000 35.500000000 -4.500000000 -8.500000000 35.500000000 -3.500000000 -8.500000000 35.500000000 -2.500000000 -8.500000000 35.500000000 -1.500000000 -8.500000000 35.500000000 -0.500000000 -8.500000000 35.500000000 0.500000000 -8.500000000 35.500000000 1.500000000 -8.500000000 35.500000000 2.500000000 -8.500000000 35.500000000 3.500000000 -8.500000000 35.500000000 4.500000000 -8.500000000 35.500000000 5.500000000 -8.500000000 35.500000000 6.500000000 -8.500000000 35.500000000 7.500000000 -8.500000000 35.500000000 8.500000000 -8.500000000 35.500000000 9.500000000 -8.500000000 35.500000000 10.500000000 -8.500000000 35.500000000 11.500000000 -8.500000000 35.500000000 12.500000000 -8.500000000 35.500000000 13.500000000 -8.500000000 35.500000000 14.500000000 -8.500000000 35.500000000 15.500000000 -8.500000000 35.500000000 16.500000000 -8.500000000 35.500000000 17.500000000 -8.500000000 35.500000000 18.500000000 -8.500000000 35.500000000 19.500000000 -8.500000000 35.500000000 20.500000000 -8.500000000 35.500000000 21.500000000 -8.500000000 35.500000000 22.500000000 -8.500000000 35.500000000 23.500000000 -8.500000000 35.500000000 24.500000000 -8.500000000 35.500000000 25.499996185 -8.500000000 35.499996185 26.499954224 -8.500000000 35.499954224 27.499591827 -8.500000000 35.499591827 28.497470856 -8.500000000 35.497467041 29.488407135 -8.500000000 35.488403320 30.458978653 -8.500000000 35.458980560 31.384418488 -8.500000000 35.384422302 32.233222961 -8.500000000 35.233222961 32.981101990 -8.500000000 34.981101990 -33.981101990 -7.500000000 34.981101990 -33.233226776 -7.500000000 35.233222961 -32.384422302 -7.500000000 35.384418488 -31.458978653 -7.500000000 35.458980560 -30.488407135 -7.500000000 35.488403320 -29.497472763 -7.500000000 35.497474670 -28.499593735 -7.500000000 35.499591827 -27.499954224 -7.500000000 35.499954224 -26.499996185 -7.500000000 35.499996185 -25.500000000 -7.500000000 35.500000000 -24.500000000 -7.500000000 35.500000000 -23.500000000 -7.500000000 35.500000000 -22.500000000 -7.500000000 35.500000000 -21.500000000 -7.500000000 35.500000000 -20.500000000 -7.500000000 35.500000000 -19.500000000 -7.500000000 35.500000000 -18.500000000 -7.500000000 35.500000000 -17.500000000 -7.500000000 35.500000000 -16.500000000 -7.500000000 35.500000000 -15.500000000 -7.500000000 35.500000000 -14.500000000 -7.500000000 35.500000000 -13.500000000 -7.500000000 35.500000000 -12.500000000 -7.500000000 35.500000000 -11.500000000 -7.500000000 35.500000000 -10.500000000 -7.500000000 35.500000000 -9.500000000 -7.500000000 35.500000000 -8.500000000 -7.500000000 35.500000000 -7.500000000 -7.500000000 35.500000000 -6.500000000 -7.500000000 35.500000000 -5.500000000 -7.500000000 35.500000000 -4.500000000 -7.500000000 35.500000000 -3.500000000 -7.500000000 35.500000000 -2.500000000 -7.500000000 35.500000000 -1.500000000 -7.500000000 35.500000000 -0.500000000 -7.500000000 35.500000000 0.500000000 -7.500000000 35.500000000 1.500000000 -7.500000000 35.500000000 2.500000000 -7.500000000 35.500000000 3.500000000 -7.500000000 35.500000000 4.500000000 -7.500000000 35.500000000 5.500000000 -7.500000000 35.500000000 6.500000000 -7.500000000 35.500000000 7.500000000 -7.500000000 35.500000000 8.500000000 -7.500000000 35.500000000 9.500000000 -7.500000000 35.500000000 10.500000000 -7.500000000 35.500000000 11.500000000 -7.500000000 35.500000000 12.500000000 -7.500000000 35.500000000 13.500000000 -7.500000000 35.500000000 14.500000000 -7.500000000 35.500000000 15.500000000 -7.500000000 35.500000000 16.500000000 -7.500000000 35.500000000 17.500000000 -7.500000000 35.500000000 18.500000000 -7.500000000 35.500000000 19.500000000 -7.500000000 35.500000000 20.500000000 -7.500000000 35.500000000 21.500000000 -7.500000000 35.500000000 22.500000000 -7.500000000 35.500000000 23.500000000 -7.500000000 35.500000000 24.500000000 -7.500000000 35.500000000 25.499996185 -7.500000000 35.499996185 26.499954224 -7.500000000 35.499954224 27.499591827 -7.500000000 35.499591827 28.497470856 -7.500000000 35.497467041 29.488407135 -7.500000000 35.488403320 30.458978653 -7.500000000 35.458980560 31.384418488 -7.500000000 35.384422302 32.233222961 -7.500000000 35.233222961 32.981101990 -7.500000000 34.981101990 -33.981101990 -6.500000000 34.981101990 -33.233226776 -6.500000000 35.233222961 -32.384422302 -6.500000000 35.384418488 -31.458978653 -6.500000000 35.458980560 -30.488407135 -6.500000000 35.488403320 -29.497472763 -6.500000000 35.497474670 -28.499593735 -6.500000000 35.499591827 -27.499954224 -6.500000000 35.499954224 -26.499996185 -6.500000000 35.499996185 -25.500000000 -6.500000000 35.500000000 -24.500000000 -6.500000000 35.500000000 -23.500000000 -6.500000000 35.500000000 -22.500000000 -6.500000000 35.500000000 -21.500000000 -6.500000000 35.500000000 -20.500000000 -6.500000000 35.500000000 -19.500000000 -6.500000000 35.500000000 -18.500000000 -6.500000000 35.500000000 -17.500000000 -6.500000000 35.500000000 -16.500000000 -6.500000000 35.500000000 -15.500000000 -6.500000000 35.500000000 -14.500000000 -6.500000000 35.500000000 -13.500000000 -6.500000000 35.500000000 -12.500000000 -6.500000000 35.500000000 -11.500000000 -6.500000000 35.500000000 -10.500000000 -6.500000000 35.500000000 -9.500000000 -6.500000000 35.500000000 -8.500000000 -6.500000000 35.500000000 -7.500000000 -6.500000000 35.500000000 -6.500000000 -6.500000000 35.500000000 -5.500000000 -6.500000000 35.500000000 -4.500000000 -6.500000000 35.500000000 -3.500000000 -6.500000000 35.500000000 -2.500000000 -6.500000000 35.500000000 -1.500000000 -6.500000000 35.500000000 -0.500000000 -6.500000000 35.500000000 0.500000000 -6.500000000 35.500000000 1.500000000 -6.500000000 35.500000000 2.500000000 -6.500000000 35.500000000 3.500000000 -6.500000000 35.500000000 4.500000000 -6.500000000 35.500000000 5.500000000 -6.500000000 35.500000000 6.500000000 -6.500000000 35.500000000 7.500000000 -6.500000000 35.500000000 8.500000000 -6.500000000 35.500000000 9.500000000 -6.500000000 35.500000000 10.500000000 -6.500000000 35.500000000 11.500000000 -6.500000000 35.500000000 12.500000000 -6.500000000 35.500000000 13.500000000 -6.500000000 35.500000000 14.500000000 -6.500000000 35.500000000 15.500000000 -6.500000000 35.500000000 16.500000000 -6.500000000 35.500000000 17.500000000 -6.500000000 35.500000000 18.500000000 -6.500000000 35.500000000 19.500000000 -6.500000000 35.500000000 20.500000000 -6.500000000 35.500000000 21.500000000 -6.500000000 35.500000000 22.500000000 -6.500000000 35.500000000 23.500000000 -6.500000000 35.500000000 24.500000000 -6.500000000 35.500000000 25.499996185 -6.500000000 35.499996185 26.499954224 -6.500000000 35.499954224 27.499591827 -6.500000000 35.499591827 28.497470856 -6.500000000 35.497467041 29.488407135 -6.500000000 35.488403320 30.458978653 -6.500000000 35.458980560 31.384418488 -6.500000000 35.384422302 32.233222961 -6.500000000 35.233222961 32.981101990 -6.500000000 34.981101990 -33.981101990 -5.500000000 34.981101990 -33.233226776 -5.500000000 35.233222961 -32.384422302 -5.500000000 35.384418488 -31.458978653 -5.500000000 35.458980560 -30.488407135 -5.500000000 35.488403320 -29.497472763 -5.500000000 35.497474670 -28.499593735 -5.500000000 35.499591827 -27.499954224 -5.500000000 35.499954224 -26.499996185 -5.500000000 35.499996185 -25.500000000 -5.500000000 35.500000000 -24.500000000 -5.500000000 35.500000000 -23.500000000 -5.500000000 35.500000000 -22.500000000 -5.500000000 35.500000000 -21.500000000 -5.500000000 35.500000000 -20.500000000 -5.500000000 35.500000000 -19.500000000 -5.500000000 35.500000000 -18.500000000 -5.500000000 35.500000000 -17.500000000 -5.500000000 35.500000000 -16.500000000 -5.500000000 35.500000000 -15.500000000 -5.500000000 35.500000000 -14.500000000 -5.500000000 35.500000000 -13.500000000 -5.500000000 35.500000000 -12.500000000 -5.500000000 35.500000000 -11.500000000 -5.500000000 35.500000000 -10.500000000 -5.500000000 35.500000000 -9.500000000 -5.500000000 35.500000000 -8.500000000 -5.500000000 35.500000000 -7.500000000 -5.500000000 35.500000000 -6.500000000 -5.500000000 35.500000000 -5.500000000 -5.500000000 35.500000000 -4.500000000 -5.500000000 35.500000000 -3.500000000 -5.500000000 35.500000000 -2.500000000 -5.500000000 35.500000000 -1.500000000 -5.500000000 35.500000000 -0.500000000 -5.500000000 35.500000000 0.500000000 -5.500000000 35.500000000 1.500000000 -5.500000000 35.500000000 2.500000000 -5.500000000 35.500000000 3.500000000 -5.500000000 35.500000000 4.500000000 -5.500000000 35.500000000 5.500000000 -5.500000000 35.500000000 6.500000000 -5.500000000 35.500000000 7.500000000 -5.500000000 35.500000000 8.500000000 -5.500000000 35.500000000 9.500000000 -5.500000000 35.500000000 10.500000000 -5.500000000 35.500000000 11.500000000 -5.500000000 35.500000000 12.500000000 -5.500000000 35.500000000 13.500000000 -5.500000000 35.500000000 14.500000000 -5.500000000 35.500000000 15.500000000 -5.500000000 35.500000000 16.500000000 -5.500000000 35.500000000 17.500000000 -5.500000000 35.500000000 18.500000000 -5.500000000 35.500000000 19.500000000 -5.500000000 35.500000000 20.500000000 -5.500000000 35.500000000 21.500000000 -5.500000000 35.500000000 22.500000000 -5.500000000 35.500000000 23.500000000 -5.500000000 35.500000000 24.500000000 -5.500000000 35.500000000 25.499996185 -5.500000000 35.499996185 26.499954224 -5.500000000 35.499954224 27.499591827 -5.500000000 35.499591827 28.497470856 -5.500000000 35.497467041 29.488407135 -5.500000000 35.488403320 30.458978653 -5.500000000 35.458980560 31.384418488 -5.500000000 35.384422302 32.233222961 -5.500000000 35.233222961 32.981101990 -5.500000000 34.981101990 -33.981101990 -4.500000000 34.981101990 -33.233226776 -4.500000000 35.233222961 -32.384422302 -4.500000000 35.384418488 -31.458978653 -4.500000000 35.458980560 -30.488407135 -4.500000000 35.488403320 -29.497472763 -4.500000000 35.497474670 -28.499593735 -4.500000000 35.499591827 -27.499954224 -4.500000000 35.499954224 -26.499996185 -4.500000000 35.499996185 -25.500000000 -4.500000000 35.500000000 -24.500000000 -4.500000000 35.500000000 -23.500000000 -4.500000000 35.500000000 -22.500000000 -4.500000000 35.500000000 -21.500000000 -4.500000000 35.500000000 -20.500000000 -4.500000000 35.500000000 -19.500000000 -4.500000000 35.500000000 -18.500000000 -4.500000000 35.500000000 -17.500000000 -4.500000000 35.500000000 -16.500000000 -4.500000000 35.500000000 -15.500000000 -4.500000000 35.500000000 -14.500000000 -4.500000000 35.500000000 -13.500000000 -4.500000000 35.500000000 -12.500000000 -4.500000000 35.500000000 -11.500000000 -4.500000000 35.500000000 -10.500000000 -4.500000000 35.500000000 -9.500000000 -4.500000000 35.500000000 -8.500000000 -4.500000000 35.500000000 -7.500000000 -4.500000000 35.500000000 -6.500000000 -4.500000000 35.500000000 -5.500000000 -4.500000000 35.500000000 -4.500000000 -4.500000000 35.500000000 -3.500000000 -4.500000000 35.500000000 -2.500000000 -4.500000000 35.500000000 -1.500000000 -4.500000000 35.500000000 -0.500000000 -4.500000000 35.500000000 0.500000000 -4.500000000 35.500000000 1.500000000 -4.500000000 35.500000000 2.500000000 -4.500000000 35.500000000 3.500000000 -4.500000000 35.500000000 4.500000000 -4.500000000 35.500000000 5.500000000 -4.500000000 35.500000000 6.500000000 -4.500000000 35.500000000 7.500000000 -4.500000000 35.500000000 8.500000000 -4.500000000 35.500000000 9.500000000 -4.500000000 35.500000000 10.500000000 -4.500000000 35.500000000 11.500000000 -4.500000000 35.500000000 12.500000000 -4.500000000 35.500000000 13.500000000 -4.500000000 35.500000000 14.500000000 -4.500000000 35.500000000 15.500000000 -4.500000000 35.500000000 16.500000000 -4.500000000 35.500000000 17.500000000 -4.500000000 35.500000000 18.500000000 -4.500000000 35.500000000 19.500000000 -4.500000000 35.500000000 20.500000000 -4.500000000 35.500000000 21.500000000 -4.500000000 35.500000000 22.500000000 -4.500000000 35.500000000 23.500000000 -4.500000000 35.500000000 24.500000000 -4.500000000 35.500000000 25.499996185 -4.500000000 35.499996185 26.499954224 -4.500000000 35.499954224 27.499591827 -4.500000000 35.499591827 28.497470856 -4.500000000 35.497467041 29.488407135 -4.500000000 35.488403320 30.458978653 -4.500000000 35.458980560 31.384418488 -4.500000000 35.384422302 32.233222961 -4.500000000 35.233222961 32.981101990 -4.500000000 34.981101990 -33.981101990 -3.500000000 34.981101990 -33.233226776 -3.500000000 35.233222961 -32.384422302 -3.500000000 35.384418488 -31.458978653 -3.500000000 35.458980560 -30.488407135 -3.500000000 35.488403320 -29.497472763 -3.500000000 35.497474670 -28.499593735 -3.500000000 35.499591827 -27.499954224 -3.500000000 35.499954224 -26.499996185 -3.500000000 35.499996185 -25.500000000 -3.500000000 35.500000000 -24.500000000 -3.500000000 35.500000000 -23.500000000 -3.500000000 35.500000000 -22.500000000 -3.500000000 35.500000000 -21.500000000 -3.500000000 35.500000000 -20.500000000 -3.500000000 35.500000000 -19.500000000 -3.500000000 35.500000000 -18.500000000 -3.500000000 35.500000000 -17.500000000 -3.500000000 35.500000000 -16.500000000 -3.500000000 35.500000000 -15.500000000 -3.500000000 35.500000000 -14.500000000 -3.500000000 35.500000000 -13.500000000 -3.500000000 35.500000000 -12.500000000 -3.500000000 35.500000000 -11.500000000 -3.500000000 35.500000000 -10.500000000 -3.500000000 35.500000000 -9.500000000 -3.500000000 35.500000000 -8.500000000 -3.500000000 35.500000000 -7.500000000 -3.500000000 35.500000000 -6.500000000 -3.500000000 35.500000000 -5.500000000 -3.500000000 35.500000000 -4.500000000 -3.500000000 35.500000000 -3.500000000 -3.500000000 35.500000000 -2.500000000 -3.500000000 35.500000000 -1.500000000 -3.500000000 35.500000000 -0.500000000 -3.500000000 35.500000000 0.500000000 -3.500000000 35.500000000 1.500000000 -3.500000000 35.500000000 2.500000000 -3.500000000 35.500000000 3.500000000 -3.500000000 35.500000000 4.500000000 -3.500000000 35.500000000 5.500000000 -3.500000000 35.500000000 6.500000000 -3.500000000 35.500000000 7.500000000 -3.500000000 35.500000000 8.500000000 -3.500000000 35.500000000 9.500000000 -3.500000000 35.500000000 10.500000000 -3.500000000 35.500000000 11.500000000 -3.500000000 35.500000000 12.500000000 -3.500000000 35.500000000 13.500000000 -3.500000000 35.500000000 14.500000000 -3.500000000 35.500000000 15.500000000 -3.500000000 35.500000000 16.500000000 -3.500000000 35.500000000 17.500000000 -3.500000000 35.500000000 18.500000000 -3.500000000 35.500000000 19.500000000 -3.500000000 35.500000000 20.500000000 -3.500000000 35.500000000 21.500000000 -3.500000000 35.500000000 22.500000000 -3.500000000 35.500000000 23.500000000 -3.500000000 35.500000000 24.500000000 -3.500000000 35.500000000 25.499996185 -3.500000000 35.499996185 26.499954224 -3.500000000 35.499954224 27.499591827 -3.500000000 35.499591827 28.497470856 -3.500000000 35.497467041 29.488407135 -3.500000000 35.488403320 30.458978653 -3.500000000 35.458980560 31.384418488 -3.500000000 35.384422302 32.233222961 -3.500000000 35.233222961 32.981101990 -3.500000000 34.981101990 -33.981101990 -2.500000000 34.981101990 -33.233226776 -2.500000000 35.233222961 -32.384422302 -2.500000000 35.384418488 -31.458978653 -2.500000000 35.458980560 -30.488407135 -2.500000000 35.488403320 -29.497472763 -2.500000000 35.497474670 -28.499593735 -2.500000000 35.499591827 -27.499954224 -2.500000000 35.499954224 -26.499996185 -2.500000000 35.499996185 -25.500000000 -2.500000000 35.500000000 -24.500000000 -2.500000000 35.500000000 -23.500000000 -2.500000000 35.500000000 -22.500000000 -2.500000000 35.500000000 -21.500000000 -2.500000000 35.500000000 -20.500000000 -2.500000000 35.500000000 -19.500000000 -2.500000000 35.500000000 -18.500000000 -2.500000000 35.500000000 -17.500000000 -2.500000000 35.500000000 -16.500000000 -2.500000000 35.500000000 -15.500000000 -2.500000000 35.500000000 -14.500000000 -2.500000000 35.500000000 -13.500000000 -2.500000000 35.500000000 -12.500000000 -2.500000000 35.500000000 -11.500000000 -2.500000000 35.500000000 -10.500000000 -2.500000000 35.500000000 -9.500000000 -2.500000000 35.500000000 -8.500000000 -2.500000000 35.500000000 -7.500000000 -2.500000000 35.500000000 -6.500000000 -2.500000000 35.500000000 -5.500000000 -2.500000000 35.500000000 -4.500000000 -2.500000000 35.500000000 -3.500000000 -2.500000000 35.500000000 -2.500000000 -2.500000000 35.500000000 -1.500000000 -2.500000000 35.500000000 -0.500000000 -2.500000000 35.500000000 0.500000000 -2.500000000 35.500000000 1.500000000 -2.500000000 35.500000000 2.500000000 -2.500000000 35.500000000 3.500000000 -2.500000000 35.500000000 4.500000000 -2.500000000 35.500000000 5.500000000 -2.500000000 35.500000000 6.500000000 -2.500000000 35.500000000 7.500000000 -2.500000000 35.500000000 8.500000000 -2.500000000 35.500000000 9.500000000 -2.500000000 35.500000000 10.500000000 -2.500000000 35.500000000 11.500000000 -2.500000000 35.500000000 12.500000000 -2.500000000 35.500000000 13.500000000 -2.500000000 35.500000000 14.500000000 -2.500000000 35.500000000 15.500000000 -2.500000000 35.500000000 16.500000000 -2.500000000 35.500000000 17.500000000 -2.500000000 35.500000000 18.500000000 -2.500000000 35.500000000 19.500000000 -2.500000000 35.500000000 20.500000000 -2.500000000 35.500000000 21.500000000 -2.500000000 35.500000000 22.500000000 -2.500000000 35.500000000 23.500000000 -2.500000000 35.500000000 24.500000000 -2.500000000 35.500000000 25.499996185 -2.500000000 35.499996185 26.499954224 -2.500000000 35.499954224 27.499591827 -2.500000000 35.499591827 28.497470856 -2.500000000 35.497467041 29.488407135 -2.500000000 35.488403320 30.458978653 -2.500000000 35.458980560 31.384418488 -2.500000000 35.384422302 32.233222961 -2.500000000 35.233222961 32.981101990 -2.500000000 34.981101990 -33.981101990 -1.500000000 34.981101990 -33.233226776 -1.500000000 35.233222961 -32.384422302 -1.500000000 35.384418488 -31.458978653 -1.500000000 35.458980560 -30.488407135 -1.500000000 35.488403320 -29.497472763 -1.500000000 35.497474670 -28.499593735 -1.500000000 35.499591827 -27.499954224 -1.500000000 35.499954224 -26.499996185 -1.500000000 35.499996185 -25.500000000 -1.500000000 35.500000000 -24.500000000 -1.500000000 35.500000000 -23.500000000 -1.500000000 35.500000000 -22.500000000 -1.500000000 35.500000000 -21.500000000 -1.500000000 35.500000000 -20.500000000 -1.500000000 35.500000000 -19.500000000 -1.500000000 35.500000000 -18.500000000 -1.500000000 35.500000000 -17.500000000 -1.500000000 35.500000000 -16.500000000 -1.500000000 35.500000000 -15.500000000 -1.500000000 35.500000000 -14.500000000 -1.500000000 35.500000000 -13.500000000 -1.500000000 35.500000000 -12.500000000 -1.500000000 35.500000000 -11.500000000 -1.500000000 35.500000000 -10.500000000 -1.500000000 35.500000000 -9.500000000 -1.500000000 35.500000000 -8.500000000 -1.500000000 35.500000000 -7.500000000 -1.500000000 35.500000000 -6.500000000 -1.500000000 35.500000000 -5.500000000 -1.500000000 35.500000000 -4.500000000 -1.500000000 35.500000000 -3.500000000 -1.500000000 35.500000000 -2.500000000 -1.500000000 35.500000000 -1.500000000 -1.500000000 35.500000000 -0.500000000 -1.500000000 35.500000000 0.500000000 -1.500000000 35.500000000 1.500000000 -1.500000000 35.500000000 2.500000000 -1.500000000 35.500000000 3.500000000 -1.500000000 35.500000000 4.500000000 -1.500000000 35.500000000 5.500000000 -1.500000000 35.500000000 6.500000000 -1.500000000 35.500000000 7.500000000 -1.500000000 35.500000000 8.500000000 -1.500000000 35.500000000 9.500000000 -1.500000000 35.500000000 10.500000000 -1.500000000 35.500000000 11.500000000 -1.500000000 35.500000000 12.500000000 -1.500000000 35.500000000 13.500000000 -1.500000000 35.500000000 14.500000000 -1.500000000 35.500000000 15.500000000 -1.500000000 35.500000000 16.500000000 -1.500000000 35.500000000 17.500000000 -1.500000000 35.500000000 18.500000000 -1.500000000 35.500000000 19.500000000 -1.500000000 35.500000000 20.500000000 -1.500000000 35.500000000 21.500000000 -1.500000000 35.500000000 22.500000000 -1.500000000 35.500000000 23.500000000 -1.500000000 35.500000000 24.500000000 -1.500000000 35.500000000 25.499996185 -1.500000000 35.499996185 26.499954224 -1.500000000 35.499954224 27.499591827 -1.500000000 35.499591827 28.497470856 -1.500000000 35.497467041 29.488407135 -1.500000000 35.488403320 30.458978653 -1.500000000 35.458980560 31.384418488 -1.500000000 35.384422302 32.233222961 -1.500000000 35.233222961 32.981101990 -1.500000000 34.981101990 -33.981101990 -0.500000000 34.981101990 -33.233226776 -0.500000000 35.233222961 -32.384422302 -0.500000000 35.384418488 -31.458978653 -0.500000000 35.458980560 -30.488407135 -0.500000000 35.488403320 -29.497472763 -0.500000000 35.497474670 -28.499593735 -0.500000000 35.499591827 -27.499954224 -0.500000000 35.499954224 -26.499996185 -0.500000000 35.499996185 -25.500000000 -0.500000000 35.500000000 -24.500000000 -0.500000000 35.500000000 -23.500000000 -0.500000000 35.500000000 -22.500000000 -0.500000000 35.500000000 -21.500000000 -0.500000000 35.500000000 -20.500000000 -0.500000000 35.500000000 -19.500000000 -0.500000000 35.500000000 -18.500000000 -0.500000000 35.500000000 -17.500000000 -0.500000000 35.500000000 -16.500000000 -0.500000000 35.500000000 -15.500000000 -0.500000000 35.500000000 -14.500000000 -0.500000000 35.500000000 -13.500000000 -0.500000000 35.500000000 -12.500000000 -0.500000000 35.500000000 -11.500000000 -0.500000000 35.500000000 -10.500000000 -0.500000000 35.500000000 -9.500000000 -0.500000000 35.500000000 -8.500000000 -0.500000000 35.500000000 -7.500000000 -0.500000000 35.500000000 -6.500000000 -0.500000000 35.500000000 -5.500000000 -0.500000000 35.500000000 -4.500000000 -0.500000000 35.500000000 -3.500000000 -0.500000000 35.500000000 -2.500000000 -0.500000000 35.500000000 -1.500000000 -0.500000000 35.500000000 -0.500000000 -0.500000000 35.500000000 0.500000000 -0.500000000 35.500000000 1.500000000 -0.500000000 35.500000000 2.500000000 -0.500000000 35.500000000 3.500000000 -0.500000000 35.500000000 4.500000000 -0.500000000 35.500000000 5.500000000 -0.500000000 35.500000000 6.500000000 -0.500000000 35.500000000 7.500000000 -0.500000000 35.500000000 8.500000000 -0.500000000 35.500000000 9.500000000 -0.500000000 35.500000000 10.500000000 -0.500000000 35.500000000 11.500000000 -0.500000000 35.500000000 12.500000000 -0.500000000 35.500000000 13.500000000 -0.500000000 35.500000000 14.500000000 -0.500000000 35.500000000 15.500000000 -0.500000000 35.500000000 16.500000000 -0.500000000 35.500000000 17.500000000 -0.500000000 35.500000000 18.500000000 -0.500000000 35.500000000 19.500000000 -0.500000000 35.500000000 20.500000000 -0.500000000 35.500000000 21.500000000 -0.500000000 35.500000000 22.500000000 -0.500000000 35.500000000 23.500000000 -0.500000000 35.500000000 24.500000000 -0.500000000 35.500000000 25.499996185 -0.500000000 35.499996185 26.499954224 -0.500000000 35.499954224 27.499591827 -0.500000000 35.499591827 28.497470856 -0.500000000 35.497467041 29.488407135 -0.500000000 35.488403320 30.458978653 -0.500000000 35.458980560 31.384418488 -0.500000000 35.384422302 32.233222961 -0.500000000 35.233222961 32.981101990 -0.500000000 34.981101990 -33.981101990 0.500000000 34.981101990 -33.233226776 0.500000000 35.233222961 -32.384422302 0.500000000 35.384418488 -31.458978653 0.500000000 35.458980560 -30.488407135 0.500000000 35.488403320 -29.497472763 0.500000000 35.497474670 -28.499593735 0.500000000 35.499591827 -27.499954224 0.500000000 35.499954224 -26.499996185 0.500000000 35.499996185 -25.500000000 0.500000000 35.500000000 -24.500000000 0.500000000 35.500000000 -23.500000000 0.500000000 35.500000000 -22.500000000 0.500000000 35.500000000 -21.500000000 0.500000000 35.500000000 -20.500000000 0.500000000 35.500000000 -19.500000000 0.500000000 35.500000000 -18.500000000 0.500000000 35.500000000 -17.500000000 0.500000000 35.500000000 -16.500000000 0.500000000 35.500000000 -15.500000000 0.500000000 35.500000000 -14.500000000 0.500000000 35.500000000 -13.500000000 0.500000000 35.500000000 -12.500000000 0.500000000 35.500000000 -11.500000000 0.500000000 35.500000000 -10.500000000 0.500000000 35.500000000 -9.500000000 0.500000000 35.500000000 -8.500000000 0.500000000 35.500000000 -7.500000000 0.500000000 35.500000000 -6.500000000 0.500000000 35.500000000 -5.500000000 0.500000000 35.500000000 -4.500000000 0.500000000 35.500000000 -3.500000000 0.500000000 35.500000000 -2.500000000 0.500000000 35.500000000 -1.500000000 0.500000000 35.500000000 -0.500000000 0.500000000 35.500000000 0.500000000 0.500000000 35.500000000 1.500000000 0.500000000 35.500000000 2.500000000 0.500000000 35.500000000 3.500000000 0.500000000 35.500000000 4.500000000 0.500000000 35.500000000 5.500000000 0.500000000 35.500000000 6.500000000 0.500000000 35.500000000 7.500000000 0.500000000 35.500000000 8.500000000 0.500000000 35.500000000 9.500000000 0.500000000 35.500000000 10.500000000 0.500000000 35.500000000 11.500000000 0.500000000 35.500000000 12.500000000 0.500000000 35.500000000 13.500000000 0.500000000 35.500000000 14.500000000 0.500000000 35.500000000 15.500000000 0.500000000 35.500000000 16.500000000 0.500000000 35.500000000 17.500000000 0.500000000 35.500000000 18.500000000 0.500000000 35.500000000 19.500000000 0.500000000 35.500000000 20.500000000 0.500000000 35.500000000 21.500000000 0.500000000 35.500000000 22.500000000 0.500000000 35.500000000 23.500000000 0.500000000 35.500000000 24.500000000 0.500000000 35.500000000 25.499996185 0.500000000 35.499996185 26.499954224 0.500000000 35.499954224 27.499591827 0.500000000 35.499591827 28.497470856 0.500000000 35.497467041 29.488407135 0.500000000 35.488403320 30.458978653 0.500000000 35.458980560 31.384418488 0.500000000 35.384422302 32.233222961 0.500000000 35.233222961 32.981101990 0.500000000 34.981101990 -33.981101990 1.500000000 34.981101990 -33.233226776 1.500000000 35.233222961 -32.384422302 1.500000000 35.384418488 -31.458978653 1.500000000 35.458980560 -30.488407135 1.500000000 35.488403320 -29.497472763 1.500000000 35.497474670 -28.499593735 1.500000000 35.499591827 -27.499954224 1.500000000 35.499954224 -26.499996185 1.500000000 35.499996185 -25.500000000 1.500000000 35.500000000 -24.500000000 1.500000000 35.500000000 -23.500000000 1.500000000 35.500000000 -22.500000000 1.500000000 35.500000000 -21.500000000 1.500000000 35.500000000 -20.500000000 1.500000000 35.500000000 -19.500000000 1.500000000 35.500000000 -18.500000000 1.500000000 35.500000000 -17.500000000 1.500000000 35.500000000 -16.500000000 1.500000000 35.500000000 -15.500000000 1.500000000 35.500000000 -14.500000000 1.500000000 35.500000000 -13.500000000 1.500000000 35.500000000 -12.500000000 1.500000000 35.500000000 -11.500000000 1.500000000 35.500000000 -10.500000000 1.500000000 35.500000000 -9.500000000 1.500000000 35.500000000 -8.500000000 1.500000000 35.500000000 -7.500000000 1.500000000 35.500000000 -6.500000000 1.500000000 35.500000000 -5.500000000 1.500000000 35.500000000 -4.500000000 1.500000000 35.500000000 -3.500000000 1.500000000 35.500000000 -2.500000000 1.500000000 35.500000000 -1.500000000 1.500000000 35.500000000 -0.500000000 1.500000000 35.500000000 0.500000000 1.500000000 35.500000000 1.500000000 1.500000000 35.500000000 2.500000000 1.500000000 35.500000000 3.500000000 1.500000000 35.500000000 4.500000000 1.500000000 35.500000000 5.500000000 1.500000000 35.500000000 6.500000000 1.500000000 35.500000000 7.500000000 1.500000000 35.500000000 8.500000000 1.500000000 35.500000000 9.500000000 1.500000000 35.500000000 10.500000000 1.500000000 35.500000000 11.500000000 1.500000000 35.500000000 12.500000000 1.500000000 35.500000000 13.500000000 1.500000000 35.500000000 14.500000000 1.500000000 35.500000000 15.500000000 1.500000000 35.500000000 16.500000000 1.500000000 35.500000000 17.500000000 1.500000000 35.500000000 18.500000000 1.500000000 35.500000000 19.500000000 1.500000000 35.500000000 20.500000000 1.500000000 35.500000000 21.500000000 1.500000000 35.500000000 22.500000000 1.500000000 35.500000000 23.500000000 1.500000000 35.500000000 24.500000000 1.500000000 35.500000000 25.499996185 1.500000000 35.499996185 26.499954224 1.500000000 35.499954224 27.499591827 1.500000000 35.499591827 28.497470856 1.500000000 35.497467041 29.488407135 1.500000000 35.488403320 30.458978653 1.500000000 35.458980560 31.384418488 1.500000000 35.384422302 32.233222961 1.500000000 35.233222961 32.981101990 1.500000000 34.981101990 -33.981101990 2.500000000 34.981101990 -33.233226776 2.500000000 35.233222961 -32.384422302 2.500000000 35.384418488 -31.458978653 2.500000000 35.458980560 -30.488407135 2.500000000 35.488403320 -29.497472763 2.500000000 35.497474670 -28.499593735 2.500000000 35.499591827 -27.499954224 2.500000000 35.499954224 -26.499996185 2.500000000 35.499996185 -25.500000000 2.500000000 35.500000000 -24.500000000 2.500000000 35.500000000 -23.500000000 2.500000000 35.500000000 -22.500000000 2.500000000 35.500000000 -21.500000000 2.500000000 35.500000000 -20.500000000 2.500000000 35.500000000 -19.500000000 2.500000000 35.500000000 -18.500000000 2.500000000 35.500000000 -17.500000000 2.500000000 35.500000000 -16.500000000 2.500000000 35.500000000 -15.500000000 2.500000000 35.500000000 -14.500000000 2.500000000 35.500000000 -13.500000000 2.500000000 35.500000000 -12.500000000 2.500000000 35.500000000 -11.500000000 2.500000000 35.500000000 -10.500000000 2.500000000 35.500000000 -9.500000000 2.500000000 35.500000000 -8.500000000 2.500000000 35.500000000 -7.500000000 2.500000000 35.500000000 -6.500000000 2.500000000 35.500000000 -5.500000000 2.500000000 35.500000000 -4.500000000 2.500000000 35.500000000 -3.500000000 2.500000000 35.500000000 -2.500000000 2.500000000 35.500000000 -1.500000000 2.500000000 35.500000000 -0.500000000 2.500000000 35.500000000 0.500000000 2.500000000 35.500000000 1.500000000 2.500000000 35.500000000 2.500000000 2.500000000 35.500000000 3.500000000 2.500000000 35.500000000 4.500000000 2.500000000 35.500000000 5.500000000 2.500000000 35.500000000 6.500000000 2.500000000 35.500000000 7.500000000 2.500000000 35.500000000 8.500000000 2.500000000 35.500000000 9.500000000 2.500000000 35.500000000 10.500000000 2.500000000 35.500000000 11.500000000 2.500000000 35.500000000 12.500000000 2.500000000 35.500000000 13.500000000 2.500000000 35.500000000 14.500000000 2.500000000 35.500000000 15.500000000 2.500000000 35.500000000 16.500000000 2.500000000 35.500000000 17.500000000 2.500000000 35.500000000 18.500000000 2.500000000 35.500000000 19.500000000 2.500000000 35.500000000 20.500000000 2.500000000 35.500000000 21.500000000 2.500000000 35.500000000 22.500000000 2.500000000 35.500000000 23.500000000 2.500000000 35.500000000 24.500000000 2.500000000 35.500000000 25.499996185 2.500000000 35.499996185 26.499954224 2.500000000 35.499954224 27.499591827 2.500000000 35.499591827 28.497470856 2.500000000 35.497467041 29.488407135 2.500000000 35.488403320 30.458978653 2.500000000 35.458980560 31.384418488 2.500000000 35.384422302 32.233222961 2.500000000 35.233222961 32.981101990 2.500000000 34.981101990 -33.981101990 3.500000000 34.981101990 -33.233226776 3.500000000 35.233222961 -32.384422302 3.500000000 35.384418488 -31.458978653 3.500000000 35.458980560 -30.488407135 3.500000000 35.488403320 -29.497472763 3.500000000 35.497474670 -28.499593735 3.500000000 35.499591827 -27.499954224 3.500000000 35.499954224 -26.499996185 3.500000000 35.499996185 -25.500000000 3.500000000 35.500000000 -24.500000000 3.500000000 35.500000000 -23.500000000 3.500000000 35.500000000 -22.500000000 3.500000000 35.500000000 -21.500000000 3.500000000 35.500000000 -20.500000000 3.500000000 35.500000000 -19.500000000 3.500000000 35.500000000 -18.500000000 3.500000000 35.500000000 -17.500000000 3.500000000 35.500000000 -16.500000000 3.500000000 35.500000000 -15.500000000 3.500000000 35.500000000 -14.500000000 3.500000000 35.500000000 -13.500000000 3.500000000 35.500000000 -12.500000000 3.500000000 35.500000000 -11.500000000 3.500000000 35.500000000 -10.500000000 3.500000000 35.500000000 -9.500000000 3.500000000 35.500000000 -8.500000000 3.500000000 35.500000000 -7.500000000 3.500000000 35.500000000 -6.500000000 3.500000000 35.500000000 -5.500000000 3.500000000 35.500000000 -4.500000000 3.500000000 35.500000000 -3.500000000 3.500000000 35.500000000 -2.500000000 3.500000000 35.500000000 -1.500000000 3.500000000 35.500000000 -0.500000000 3.500000000 35.500000000 0.500000000 3.500000000 35.500000000 1.500000000 3.500000000 35.500000000 2.500000000 3.500000000 35.500000000 3.500000000 3.500000000 35.500000000 4.500000000 3.500000000 35.500000000 5.500000000 3.500000000 35.500000000 6.500000000 3.500000000 35.500000000 7.500000000 3.500000000 35.500000000 8.500000000 3.500000000 35.500000000 9.500000000 3.500000000 35.500000000 10.500000000 3.500000000 35.500000000 11.500000000 3.500000000 35.500000000 12.500000000 3.500000000 35.500000000 13.500000000 3.500000000 35.500000000 14.500000000 3.500000000 35.500000000 15.500000000 3.500000000 35.500000000 16.500000000 3.500000000 35.500000000 17.500000000 3.500000000 35.500000000 18.500000000 3.500000000 35.500000000 19.500000000 3.500000000 35.500000000 20.500000000 3.500000000 35.500000000 21.500000000 3.500000000 35.500000000 22.500000000 3.500000000 35.500000000 23.500000000 3.500000000 35.500000000 24.500000000 3.500000000 35.500000000 25.499996185 3.500000000 35.499996185 26.499954224 3.500000000 35.499954224 27.499591827 3.500000000 35.499591827 28.497470856 3.500000000 35.497467041 29.488407135 3.500000000 35.488403320 30.458978653 3.500000000 35.458980560 31.384418488 3.500000000 35.384422302 32.233222961 3.500000000 35.233222961 32.981101990 3.500000000 34.981101990 -33.981101990 4.500000000 34.981101990 -33.233226776 4.500000000 35.233222961 -32.384422302 4.500000000 35.384418488 -31.458978653 4.500000000 35.458980560 -30.488407135 4.500000000 35.488403320 -29.497472763 4.500000000 35.497474670 -28.499593735 4.500000000 35.499591827 -27.499954224 4.500000000 35.499954224 -26.499996185 4.500000000 35.499996185 -25.500000000 4.500000000 35.500000000 -24.500000000 4.500000000 35.500000000 -23.500000000 4.500000000 35.500000000 -22.500000000 4.500000000 35.500000000 -21.500000000 4.500000000 35.500000000 -20.500000000 4.500000000 35.500000000 -19.500000000 4.500000000 35.500000000 -18.500000000 4.500000000 35.500000000 -17.500000000 4.500000000 35.500000000 -16.500000000 4.500000000 35.500000000 -15.500000000 4.500000000 35.500000000 -14.500000000 4.500000000 35.500000000 -13.500000000 4.500000000 35.500000000 -12.500000000 4.500000000 35.500000000 -11.500000000 4.500000000 35.500000000 -10.500000000 4.500000000 35.500000000 -9.500000000 4.500000000 35.500000000 -8.500000000 4.500000000 35.500000000 -7.500000000 4.500000000 35.500000000 -6.500000000 4.500000000 35.500000000 -5.500000000 4.500000000 35.500000000 -4.500000000 4.500000000 35.500000000 -3.500000000 4.500000000 35.500000000 -2.500000000 4.500000000 35.500000000 -1.500000000 4.500000000 35.500000000 -0.500000000 4.500000000 35.500000000 0.500000000 4.500000000 35.500000000 1.500000000 4.500000000 35.500000000 2.500000000 4.500000000 35.500000000 3.500000000 4.500000000 35.500000000 4.500000000 4.500000000 35.500000000 5.500000000 4.500000000 35.500000000 6.500000000 4.500000000 35.500000000 7.500000000 4.500000000 35.500000000 8.500000000 4.500000000 35.500000000 9.500000000 4.500000000 35.500000000 10.500000000 4.500000000 35.500000000 11.500000000 4.500000000 35.500000000 12.500000000 4.500000000 35.500000000 13.500000000 4.500000000 35.500000000 14.500000000 4.500000000 35.500000000 15.500000000 4.500000000 35.500000000 16.500000000 4.500000000 35.500000000 17.500000000 4.500000000 35.500000000 18.500000000 4.500000000 35.500000000 19.500000000 4.500000000 35.500000000 20.500000000 4.500000000 35.500000000 21.500000000 4.500000000 35.500000000 22.500000000 4.500000000 35.500000000 23.500000000 4.500000000 35.500000000 24.500000000 4.500000000 35.500000000 25.499996185 4.500000000 35.499996185 26.499954224 4.500000000 35.499954224 27.499591827 4.500000000 35.499591827 28.497470856 4.500000000 35.497467041 29.488407135 4.500000000 35.488403320 30.458978653 4.500000000 35.458980560 31.384418488 4.500000000 35.384422302 32.233222961 4.500000000 35.233222961 32.981101990 4.500000000 34.981101990 -33.981101990 5.500000000 34.981101990 -33.233226776 5.500000000 35.233222961 -32.384422302 5.500000000 35.384418488 -31.458978653 5.500000000 35.458980560 -30.488407135 5.500000000 35.488403320 -29.497472763 5.500000000 35.497474670 -28.499593735 5.500000000 35.499591827 -27.499954224 5.500000000 35.499954224 -26.499996185 5.500000000 35.499996185 -25.500000000 5.500000000 35.500000000 -24.500000000 5.500000000 35.500000000 -23.500000000 5.500000000 35.500000000 -22.500000000 5.500000000 35.500000000 -21.500000000 5.500000000 35.500000000 -20.500000000 5.500000000 35.500000000 -19.500000000 5.500000000 35.500000000 -18.500000000 5.500000000 35.500000000 -17.500000000 5.500000000 35.500000000 -16.500000000 5.500000000 35.500000000 -15.500000000 5.500000000 35.500000000 -14.500000000 5.500000000 35.500000000 -13.500000000 5.500000000 35.500000000 -12.500000000 5.500000000 35.500000000 -11.500000000 5.500000000 35.500000000 -10.500000000 5.500000000 35.500000000 -9.500000000 5.500000000 35.500000000 -8.500000000 5.500000000 35.500000000 -7.500000000 5.500000000 35.500000000 -6.500000000 5.500000000 35.500000000 -5.500000000 5.500000000 35.500000000 -4.500000000 5.500000000 35.500000000 -3.500000000 5.500000000 35.500000000 -2.500000000 5.500000000 35.500000000 -1.500000000 5.500000000 35.500000000 -0.500000000 5.500000000 35.500000000 0.500000000 5.500000000 35.500000000 1.500000000 5.500000000 35.500000000 2.500000000 5.500000000 35.500000000 3.500000000 5.500000000 35.500000000 4.500000000 5.500000000 35.500000000 5.500000000 5.500000000 35.500000000 6.500000000 5.500000000 35.500000000 7.500000000 5.500000000 35.500000000 8.500000000 5.500000000 35.500000000 9.500000000 5.500000000 35.500000000 10.500000000 5.500000000 35.500000000 11.500000000 5.500000000 35.500000000 12.500000000 5.500000000 35.500000000 13.500000000 5.500000000 35.500000000 14.500000000 5.500000000 35.500000000 15.500000000 5.500000000 35.500000000 16.500000000 5.500000000 35.500000000 17.500000000 5.500000000 35.500000000 18.500000000 5.500000000 35.500000000 19.500000000 5.500000000 35.500000000 20.500000000 5.500000000 35.500000000 21.500000000 5.500000000 35.500000000 22.500000000 5.500000000 35.500000000 23.500000000 5.500000000 35.500000000 24.500000000 5.500000000 35.500000000 25.499996185 5.500000000 35.499996185 26.499954224 5.500000000 35.499954224 27.499591827 5.500000000 35.499591827 28.497470856 5.500000000 35.497467041 29.488407135 5.500000000 35.488403320 30.458978653 5.500000000 35.458980560 31.384418488 5.500000000 35.384422302 32.233222961 5.500000000 35.233222961 32.981101990 5.500000000 34.981101990 -33.981101990 6.500000000 34.981101990 -33.233226776 6.500000000 35.233222961 -32.384422302 6.500000000 35.384418488 -31.458978653 6.500000000 35.458980560 -30.488407135 6.500000000 35.488403320 -29.497472763 6.500000000 35.497474670 -28.499593735 6.500000000 35.499591827 -27.499954224 6.500000000 35.499954224 -26.499996185 6.500000000 35.499996185 -25.500000000 6.500000000 35.500000000 -24.500000000 6.500000000 35.500000000 -23.500000000 6.500000000 35.500000000 -22.500000000 6.500000000 35.500000000 -21.500000000 6.500000000 35.500000000 -20.500000000 6.500000000 35.500000000 -19.500000000 6.500000000 35.500000000 -18.500000000 6.500000000 35.500000000 -17.500000000 6.500000000 35.500000000 -16.500000000 6.500000000 35.500000000 -15.500000000 6.500000000 35.500000000 -14.500000000 6.500000000 35.500000000 -13.500000000 6.500000000 35.500000000 -12.500000000 6.500000000 35.500000000 -11.500000000 6.500000000 35.500000000 -10.500000000 6.500000000 35.500000000 -9.500000000 6.500000000 35.500000000 -8.500000000 6.500000000 35.500000000 -7.500000000 6.500000000 35.500000000 -6.500000000 6.500000000 35.500000000 -5.500000000 6.500000000 35.500000000 -4.500000000 6.500000000 35.500000000 -3.500000000 6.500000000 35.500000000 -2.500000000 6.500000000 35.500000000 -1.500000000 6.500000000 35.500000000 -0.500000000 6.500000000 35.500000000 0.500000000 6.500000000 35.500000000 1.500000000 6.500000000 35.500000000 2.500000000 6.500000000 35.500000000 3.500000000 6.500000000 35.500000000 4.500000000 6.500000000 35.500000000 5.500000000 6.500000000 35.500000000 6.500000000 6.500000000 35.500000000 7.500000000 6.500000000 35.500000000 8.500000000 6.500000000 35.500000000 9.500000000 6.500000000 35.500000000 10.500000000 6.500000000 35.500000000 11.500000000 6.500000000 35.500000000 12.500000000 6.500000000 35.500000000 13.500000000 6.500000000 35.500000000 14.500000000 6.500000000 35.500000000 15.500000000 6.500000000 35.500000000 16.500000000 6.500000000 35.500000000 17.500000000 6.500000000 35.500000000 18.500000000 6.500000000 35.500000000 19.500000000 6.500000000 35.500000000 20.500000000 6.500000000 35.500000000 21.500000000 6.500000000 35.500000000 22.500000000 6.500000000 35.500000000 23.500000000 6.500000000 35.500000000 24.500000000 6.500000000 35.500000000 25.499996185 6.500000000 35.499996185 26.499954224 6.500000000 35.499954224 27.499591827 6.500000000 35.499591827 28.497470856 6.500000000 35.497467041 29.488407135 6.500000000 35.488403320 30.458978653 6.500000000 35.458980560 31.384418488 6.500000000 35.384422302 32.233222961 6.500000000 35.233222961 32.981101990 6.500000000 34.981101990 -33.981101990 7.500000000 34.981101990 -33.233226776 7.500000000 35.233222961 -32.384422302 7.500000000 35.384418488 -31.458978653 7.500000000 35.458980560 -30.488407135 7.500000000 35.488403320 -29.497472763 7.500000000 35.497474670 -28.499593735 7.500000000 35.499591827 -27.499954224 7.500000000 35.499954224 -26.499996185 7.500000000 35.499996185 -25.500000000 7.500000000 35.500000000 -24.500000000 7.500000000 35.500000000 -23.500000000 7.500000000 35.500000000 -22.500000000 7.500000000 35.500000000 -21.500000000 7.500000000 35.500000000 -20.500000000 7.500000000 35.500000000 -19.500000000 7.500000000 35.500000000 -18.500000000 7.500000000 35.500000000 -17.500000000 7.500000000 35.500000000 -16.500000000 7.500000000 35.500000000 -15.500000000 7.500000000 35.500000000 -14.500000000 7.500000000 35.500000000 -13.500000000 7.500000000 35.500000000 -12.500000000 7.500000000 35.500000000 -11.500000000 7.500000000 35.500000000 -10.500000000 7.500000000 35.500000000 -9.500000000 7.500000000 35.500000000 -8.500000000 7.500000000 35.500000000 -7.500000000 7.500000000 35.500000000 -6.500000000 7.500000000 35.500000000 -5.500000000 7.500000000 35.500000000 -4.500000000 7.500000000 35.500000000 -3.500000000 7.500000000 35.500000000 -2.500000000 7.500000000 35.500000000 -1.500000000 7.500000000 35.500000000 -0.500000000 7.500000000 35.500000000 0.500000000 7.500000000 35.500000000 1.500000000 7.500000000 35.500000000 2.500000000 7.500000000 35.500000000 3.500000000 7.500000000 35.500000000 4.500000000 7.500000000 35.500000000 5.500000000 7.500000000 35.500000000 6.500000000 7.500000000 35.500000000 7.500000000 7.500000000 35.500000000 8.500000000 7.500000000 35.500000000 9.500000000 7.500000000 35.500000000 10.500000000 7.500000000 35.500000000 11.500000000 7.500000000 35.500000000 12.500000000 7.500000000 35.500000000 13.500000000 7.500000000 35.500000000 14.500000000 7.500000000 35.500000000 15.500000000 7.500000000 35.500000000 16.500000000 7.500000000 35.500000000 17.500000000 7.500000000 35.500000000 18.500000000 7.500000000 35.500000000 19.500000000 7.500000000 35.500000000 20.500000000 7.500000000 35.500000000 21.500000000 7.500000000 35.500000000 22.500000000 7.500000000 35.500000000 23.500000000 7.500000000 35.500000000 24.500000000 7.500000000 35.500000000 25.499996185 7.500000000 35.499996185 26.499954224 7.500000000 35.499954224 27.499591827 7.500000000 35.499591827 28.497470856 7.500000000 35.497467041 29.488407135 7.500000000 35.488403320 30.458978653 7.500000000 35.458980560 31.384418488 7.500000000 35.384422302 32.233222961 7.500000000 35.233222961 32.981101990 7.500000000 34.981101990 -33.981101990 8.500000000 34.981101990 -33.233226776 8.500000000 35.233222961 -32.384422302 8.500000000 35.384418488 -31.458978653 8.500000000 35.458980560 -30.488407135 8.500000000 35.488403320 -29.497472763 8.500000000 35.497474670 -28.499593735 8.500000000 35.499591827 -27.499954224 8.500000000 35.499954224 -26.499996185 8.500000000 35.499996185 -25.500000000 8.500000000 35.500000000 -24.500000000 8.500000000 35.500000000 -23.500000000 8.500000000 35.500000000 -22.500000000 8.500000000 35.500000000 -21.500000000 8.500000000 35.500000000 -20.500000000 8.500000000 35.500000000 -19.500000000 8.500000000 35.500000000 -18.500000000 8.500000000 35.500000000 -17.500000000 8.500000000 35.500000000 -16.500000000 8.500000000 35.500000000 -15.500000000 8.500000000 35.500000000 -14.500000000 8.500000000 35.500000000 -13.500000000 8.500000000 35.500000000 -12.500000000 8.500000000 35.500000000 -11.500000000 8.500000000 35.500000000 -10.500000000 8.500000000 35.500000000 -9.500000000 8.500000000 35.500000000 -8.500000000 8.500000000 35.500000000 -7.500000000 8.500000000 35.500000000 -6.500000000 8.500000000 35.500000000 -5.500000000 8.500000000 35.500000000 -4.500000000 8.500000000 35.500000000 -3.500000000 8.500000000 35.500000000 -2.500000000 8.500000000 35.500000000 -1.500000000 8.500000000 35.500000000 -0.500000000 8.500000000 35.500000000 0.500000000 8.500000000 35.500000000 1.500000000 8.500000000 35.500000000 2.500000000 8.500000000 35.500000000 3.500000000 8.500000000 35.500000000 4.500000000 8.500000000 35.500000000 5.500000000 8.500000000 35.500000000 6.500000000 8.500000000 35.500000000 7.500000000 8.500000000 35.500000000 8.500000000 8.500000000 35.500000000 9.500000000 8.500000000 35.500000000 10.500000000 8.500000000 35.500000000 11.500000000 8.500000000 35.500000000 12.500000000 8.500000000 35.500000000 13.500000000 8.500000000 35.500000000 14.500000000 8.500000000 35.500000000 15.500000000 8.500000000 35.500000000 16.500000000 8.500000000 35.500000000 17.500000000 8.500000000 35.500000000 18.500000000 8.500000000 35.500000000 19.500000000 8.500000000 35.500000000 20.500000000 8.500000000 35.500000000 21.500000000 8.500000000 35.500000000 22.500000000 8.500000000 35.500000000 23.500000000 8.500000000 35.500000000 24.500000000 8.500000000 35.500000000 25.499996185 8.500000000 35.499996185 26.499954224 8.500000000 35.499954224 27.499591827 8.500000000 35.499591827 28.497470856 8.500000000 35.497467041 29.488407135 8.500000000 35.488403320 30.458978653 8.500000000 35.458980560 31.384418488 8.500000000 35.384422302 32.233222961 8.500000000 35.233222961 32.981101990 8.500000000 34.981101990 -33.981101990 9.500000000 34.981101990 -33.233226776 9.500000000 35.233222961 -32.384422302 9.500000000 35.384418488 -31.458978653 9.500000000 35.458980560 -30.488407135 9.500000000 35.488403320 -29.497472763 9.500000000 35.497474670 -28.499593735 9.500000000 35.499591827 -27.499954224 9.500000000 35.499954224 -26.499996185 9.500000000 35.499996185 -25.500000000 9.500000000 35.500000000 -24.500000000 9.500000000 35.500000000 -23.500000000 9.500000000 35.500000000 -22.500000000 9.500000000 35.500000000 -21.500000000 9.500000000 35.500000000 -20.500000000 9.500000000 35.500000000 -19.500000000 9.500000000 35.500000000 -18.500000000 9.500000000 35.500000000 -17.500000000 9.500000000 35.500000000 -16.500000000 9.500000000 35.500000000 -15.500000000 9.500000000 35.500000000 -14.500000000 9.500000000 35.500000000 -13.500000000 9.500000000 35.500000000 -12.500000000 9.500000000 35.500000000 -11.500000000 9.500000000 35.500000000 -10.500000000 9.500000000 35.500000000 -9.500000000 9.500000000 35.500000000 -8.500000000 9.500000000 35.500000000 -7.500000000 9.500000000 35.500000000 -6.500000000 9.500000000 35.500000000 -5.500000000 9.500000000 35.500000000 -4.500000000 9.500000000 35.500000000 -3.500000000 9.500000000 35.500000000 -2.500000000 9.500000000 35.500000000 -1.500000000 9.500000000 35.500000000 -0.500000000 9.500000000 35.500000000 0.500000000 9.500000000 35.500000000 1.500000000 9.500000000 35.500000000 2.500000000 9.500000000 35.500000000 3.500000000 9.500000000 35.500000000 4.500000000 9.500000000 35.500000000 5.500000000 9.500000000 35.500000000 6.500000000 9.500000000 35.500000000 7.500000000 9.500000000 35.500000000 8.500000000 9.500000000 35.500000000 9.500000000 9.500000000 35.500000000 10.500000000 9.500000000 35.500000000 11.500000000 9.500000000 35.500000000 12.500000000 9.500000000 35.500000000 13.500000000 9.500000000 35.500000000 14.500000000 9.500000000 35.500000000 15.500000000 9.500000000 35.500000000 16.500000000 9.500000000 35.500000000 17.500000000 9.500000000 35.500000000 18.500000000 9.500000000 35.500000000 19.500000000 9.500000000 35.500000000 20.500000000 9.500000000 35.500000000 21.500000000 9.500000000 35.500000000 22.500000000 9.500000000 35.500000000 23.500000000 9.500000000 35.500000000 24.500000000 9.500000000 35.500000000 25.499996185 9.500000000 35.499996185 26.499954224 9.500000000 35.499954224 27.499591827 9.500000000 35.499591827 28.497470856 9.500000000 35.497467041 29.488407135 9.500000000 35.488403320 30.458978653 9.500000000 35.458980560 31.384418488 9.500000000 35.384422302 32.233222961 9.500000000 35.233222961 32.981101990 9.500000000 34.981101990 -33.981101990 10.500000000 34.981101990 -33.233226776 10.500000000 35.233222961 -32.384422302 10.500000000 35.384418488 -31.458978653 10.500000000 35.458980560 -30.488407135 10.500000000 35.488403320 -29.497472763 10.500000000 35.497474670 -28.499593735 10.500000000 35.499591827 -27.499954224 10.500000000 35.499954224 -26.499996185 10.500000000 35.499996185 -25.500000000 10.500000000 35.500000000 -24.500000000 10.500000000 35.500000000 -23.500000000 10.500000000 35.500000000 -22.500000000 10.500000000 35.500000000 -21.500000000 10.500000000 35.500000000 -20.500000000 10.500000000 35.500000000 -19.500000000 10.500000000 35.500000000 -18.500000000 10.500000000 35.500000000 -17.500000000 10.500000000 35.500000000 -16.500000000 10.500000000 35.500000000 -15.500000000 10.500000000 35.500000000 -14.500000000 10.500000000 35.500000000 -13.500000000 10.500000000 35.500000000 -12.500000000 10.500000000 35.500000000 -11.500000000 10.500000000 35.500000000 -10.500000000 10.500000000 35.500000000 -9.500000000 10.500000000 35.500000000 -8.500000000 10.500000000 35.500000000 -7.500000000 10.500000000 35.500000000 -6.500000000 10.500000000 35.500000000 -5.500000000 10.500000000 35.500000000 -4.500000000 10.500000000 35.500000000 -3.500000000 10.500000000 35.500000000 -2.500000000 10.500000000 35.500000000 -1.500000000 10.500000000 35.500000000 -0.500000000 10.500000000 35.500000000 0.500000000 10.500000000 35.500000000 1.500000000 10.500000000 35.500000000 2.500000000 10.500000000 35.500000000 3.500000000 10.500000000 35.500000000 4.500000000 10.500000000 35.500000000 5.500000000 10.500000000 35.500000000 6.500000000 10.500000000 35.500000000 7.500000000 10.500000000 35.500000000 8.500000000 10.500000000 35.500000000 9.500000000 10.500000000 35.500000000 10.500000000 10.500000000 35.500000000 11.500000000 10.500000000 35.500000000 12.500000000 10.500000000 35.500000000 13.500000000 10.500000000 35.500000000 14.500000000 10.500000000 35.500000000 15.500000000 10.500000000 35.500000000 16.500000000 10.500000000 35.500000000 17.500000000 10.500000000 35.500000000 18.500000000 10.500000000 35.500000000 19.500000000 10.500000000 35.500000000 20.500000000 10.500000000 35.500000000 21.500000000 10.500000000 35.500000000 22.500000000 10.500000000 35.500000000 23.500000000 10.500000000 35.500000000 24.500000000 10.500000000 35.500000000 25.499996185 10.500000000 35.499996185 26.499954224 10.500000000 35.499954224 27.499591827 10.500000000 35.499591827 28.497470856 10.500000000 35.497467041 29.488407135 10.500000000 35.488403320 30.458978653 10.500000000 35.458980560 31.384418488 10.500000000 35.384422302 32.233222961 10.500000000 35.233222961 32.981101990 10.500000000 34.981101990 -33.981101990 11.500000000 34.981101990 -33.233226776 11.500000000 35.233222961 -32.384422302 11.500000000 35.384418488 -31.458978653 11.500000000 35.458980560 -30.488407135 11.500000000 35.488403320 -29.497472763 11.500000000 35.497474670 -28.499593735 11.500000000 35.499591827 -27.499954224 11.500000000 35.499954224 -26.499996185 11.500000000 35.499996185 -25.500000000 11.500000000 35.500000000 -24.500000000 11.500000000 35.500000000 -23.500000000 11.500000000 35.500000000 -22.500000000 11.500000000 35.500000000 -21.500000000 11.500000000 35.500000000 -20.500000000 11.500000000 35.500000000 -19.500000000 11.500000000 35.500000000 -18.500000000 11.500000000 35.500000000 -17.500000000 11.500000000 35.500000000 -16.500000000 11.500000000 35.500000000 -15.500000000 11.500000000 35.500000000 -14.500000000 11.500000000 35.500000000 -13.500000000 11.500000000 35.500000000 -12.500000000 11.500000000 35.500000000 -11.500000000 11.500000000 35.500000000 -10.500000000 11.500000000 35.500000000 -9.500000000 11.500000000 35.500000000 -8.500000000 11.500000000 35.500000000 -7.500000000 11.500000000 35.500000000 -6.500000000 11.500000000 35.500000000 -5.500000000 11.500000000 35.500000000 -4.500000000 11.500000000 35.500000000 -3.500000000 11.500000000 35.500000000 -2.500000000 11.500000000 35.500000000 -1.500000000 11.500000000 35.500000000 -0.500000000 11.500000000 35.500000000 0.500000000 11.500000000 35.500000000 1.500000000 11.500000000 35.500000000 2.500000000 11.500000000 35.500000000 3.500000000 11.500000000 35.500000000 4.500000000 11.500000000 35.500000000 5.500000000 11.500000000 35.500000000 6.500000000 11.500000000 35.500000000 7.500000000 11.500000000 35.500000000 8.500000000 11.500000000 35.500000000 9.500000000 11.500000000 35.500000000 10.500000000 11.500000000 35.500000000 11.500000000 11.500000000 35.500000000 12.500000000 11.500000000 35.500000000 13.500000000 11.500000000 35.500000000 14.500000000 11.500000000 35.500000000 15.500000000 11.500000000 35.500000000 16.500000000 11.500000000 35.500000000 17.500000000 11.500000000 35.500000000 18.500000000 11.500000000 35.500000000 19.500000000 11.500000000 35.500000000 20.500000000 11.500000000 35.500000000 21.500000000 11.500000000 35.500000000 22.500000000 11.500000000 35.500000000 23.500000000 11.500000000 35.500000000 24.500000000 11.500000000 35.500000000 25.499996185 11.500000000 35.499996185 26.499954224 11.500000000 35.499954224 27.499591827 11.500000000 35.499591827 28.497470856 11.500000000 35.497467041 29.488407135 11.500000000 35.488403320 30.458978653 11.500000000 35.458980560 31.384418488 11.500000000 35.384422302 32.233222961 11.500000000 35.233222961 32.981101990 11.500000000 34.981101990 -33.981101990 12.500000000 34.981101990 -33.233226776 12.500000000 35.233222961 -32.384422302 12.500000000 35.384418488 -31.458978653 12.500000000 35.458980560 -30.488407135 12.500000000 35.488403320 -29.497472763 12.500000000 35.497474670 -28.499593735 12.500000000 35.499591827 -27.499954224 12.500000000 35.499954224 -26.499996185 12.500000000 35.499996185 -25.500000000 12.500000000 35.500000000 -24.500000000 12.500000000 35.500000000 -23.500000000 12.500000000 35.500000000 -22.500000000 12.500000000 35.500000000 -21.500000000 12.500000000 35.500000000 -20.500000000 12.500000000 35.500000000 -19.500000000 12.500000000 35.500000000 -18.500000000 12.500000000 35.500000000 -17.500000000 12.500000000 35.500000000 -16.500000000 12.500000000 35.500000000 -15.500000000 12.500000000 35.500000000 -14.500000000 12.500000000 35.500000000 -13.500000000 12.500000000 35.500000000 -12.500000000 12.500000000 35.500000000 -11.500000000 12.500000000 35.500000000 -10.500000000 12.500000000 35.500000000 -9.500000000 12.500000000 35.500000000 -8.500000000 12.500000000 35.500000000 -7.500000000 12.500000000 35.500000000 -6.500000000 12.500000000 35.500000000 -5.500000000 12.500000000 35.500000000 -4.500000000 12.500000000 35.500000000 -3.500000000 12.500000000 35.500000000 -2.500000000 12.500000000 35.500000000 -1.500000000 12.500000000 35.500000000 -0.500000000 12.500000000 35.500000000 0.500000000 12.500000000 35.500000000 1.500000000 12.500000000 35.500000000 2.500000000 12.500000000 35.500000000 3.500000000 12.500000000 35.500000000 4.500000000 12.500000000 35.500000000 5.500000000 12.500000000 35.500000000 6.500000000 12.500000000 35.500000000 7.500000000 12.500000000 35.500000000 8.500000000 12.500000000 35.500000000 9.500000000 12.500000000 35.500000000 10.500000000 12.500000000 35.500000000 11.500000000 12.500000000 35.500000000 12.500000000 12.500000000 35.500000000 13.500000000 12.500000000 35.500000000 14.500000000 12.500000000 35.500000000 15.500000000 12.500000000 35.500000000 16.500000000 12.500000000 35.500000000 17.500000000 12.500000000 35.500000000 18.500000000 12.500000000 35.500000000 19.500000000 12.500000000 35.500000000 20.500000000 12.500000000 35.500000000 21.500000000 12.500000000 35.500000000 22.500000000 12.500000000 35.500000000 23.500000000 12.500000000 35.500000000 24.500000000 12.500000000 35.500000000 25.499996185 12.500000000 35.499996185 26.499954224 12.500000000 35.499954224 27.499591827 12.500000000 35.499591827 28.497470856 12.500000000 35.497467041 29.488407135 12.500000000 35.488403320 30.458978653 12.500000000 35.458980560 31.384418488 12.500000000 35.384422302 32.233222961 12.500000000 35.233222961 32.981101990 12.500000000 34.981101990 -33.981101990 13.500000000 34.981101990 -33.233226776 13.500000000 35.233222961 -32.384422302 13.500000000 35.384418488 -31.458978653 13.500000000 35.458980560 -30.488407135 13.500000000 35.488403320 -29.497472763 13.500000000 35.497474670 -28.499593735 13.500000000 35.499591827 -27.499954224 13.500000000 35.499954224 -26.499996185 13.500000000 35.499996185 -25.500000000 13.500000000 35.500000000 -24.500000000 13.500000000 35.500000000 -23.500000000 13.500000000 35.500000000 -22.500000000 13.500000000 35.500000000 -21.500000000 13.500000000 35.500000000 -20.500000000 13.500000000 35.500000000 -19.500000000 13.500000000 35.500000000 -18.500000000 13.500000000 35.500000000 -17.500000000 13.500000000 35.500000000 -16.500000000 13.500000000 35.500000000 -15.500000000 13.500000000 35.500000000 -14.500000000 13.500000000 35.500000000 -13.500000000 13.500000000 35.500000000 -12.500000000 13.500000000 35.500000000 -11.500000000 13.500000000 35.500000000 -10.500000000 13.500000000 35.500000000 -9.500000000 13.500000000 35.500000000 -8.500000000 13.500000000 35.500000000 -7.500000000 13.500000000 35.500000000 -6.500000000 13.500000000 35.500000000 -5.500000000 13.500000000 35.500000000 -4.500000000 13.500000000 35.500000000 -3.500000000 13.500000000 35.500000000 -2.500000000 13.500000000 35.500000000 -1.500000000 13.500000000 35.500000000 -0.500000000 13.500000000 35.500000000 0.500000000 13.500000000 35.500000000 1.500000000 13.500000000 35.500000000 2.500000000 13.500000000 35.500000000 3.500000000 13.500000000 35.500000000 4.500000000 13.500000000 35.500000000 5.500000000 13.500000000 35.500000000 6.500000000 13.500000000 35.500000000 7.500000000 13.500000000 35.500000000 8.500000000 13.500000000 35.500000000 9.500000000 13.500000000 35.500000000 10.500000000 13.500000000 35.500000000 11.500000000 13.500000000 35.500000000 12.500000000 13.500000000 35.500000000 13.500000000 13.500000000 35.500000000 14.500000000 13.500000000 35.500000000 15.500000000 13.500000000 35.500000000 16.500000000 13.500000000 35.500000000 17.500000000 13.500000000 35.500000000 18.500000000 13.500000000 35.500000000 19.500000000 13.500000000 35.500000000 20.500000000 13.500000000 35.500000000 21.500000000 13.500000000 35.500000000 22.500000000 13.500000000 35.500000000 23.500000000 13.500000000 35.500000000 24.500000000 13.500000000 35.500000000 25.499996185 13.500000000 35.499996185 26.499954224 13.500000000 35.499954224 27.499591827 13.500000000 35.499591827 28.497470856 13.500000000 35.497467041 29.488407135 13.500000000 35.488403320 30.458978653 13.500000000 35.458980560 31.384418488 13.500000000 35.384422302 32.233222961 13.500000000 35.233222961 32.981101990 13.500000000 34.981101990 -33.981101990 14.500000000 34.981101990 -33.233226776 14.500000000 35.233222961 -32.384422302 14.500000000 35.384418488 -31.458978653 14.500000000 35.458980560 -30.488407135 14.500000000 35.488403320 -29.497472763 14.500000000 35.497474670 -28.499593735 14.500000000 35.499591827 -27.499954224 14.500000000 35.499954224 -26.499996185 14.500000000 35.499996185 -25.500000000 14.500000000 35.500000000 -24.500000000 14.500000000 35.500000000 -23.500000000 14.500000000 35.500000000 -22.500000000 14.500000000 35.500000000 -21.500000000 14.500000000 35.500000000 -20.500000000 14.500000000 35.500000000 -19.500000000 14.500000000 35.500000000 -18.500000000 14.500000000 35.500000000 -17.500000000 14.500000000 35.500000000 -16.500000000 14.500000000 35.500000000 -15.500000000 14.500000000 35.500000000 -14.500000000 14.500000000 35.500000000 -13.500000000 14.500000000 35.500000000 -12.500000000 14.500000000 35.500000000 -11.500000000 14.500000000 35.500000000 -10.500000000 14.500000000 35.500000000 -9.500000000 14.500000000 35.500000000 -8.500000000 14.500000000 35.500000000 -7.500000000 14.500000000 35.500000000 -6.500000000 14.500000000 35.500000000 -5.500000000 14.500000000 35.500000000 -4.500000000 14.500000000 35.500000000 -3.500000000 14.500000000 35.500000000 -2.500000000 14.500000000 35.500000000 -1.500000000 14.500000000 35.500000000 -0.500000000 14.500000000 35.500000000 0.500000000 14.500000000 35.500000000 1.500000000 14.500000000 35.500000000 2.500000000 14.500000000 35.500000000 3.500000000 14.500000000 35.500000000 4.500000000 14.500000000 35.500000000 5.500000000 14.500000000 35.500000000 6.500000000 14.500000000 35.500000000 7.500000000 14.500000000 35.500000000 8.500000000 14.500000000 35.500000000 9.500000000 14.500000000 35.500000000 10.500000000 14.500000000 35.500000000 11.500000000 14.500000000 35.500000000 12.500000000 14.500000000 35.500000000 13.500000000 14.500000000 35.500000000 14.500000000 14.500000000 35.500000000 15.500000000 14.500000000 35.500000000 16.500000000 14.500000000 35.500000000 17.500000000 14.500000000 35.500000000 18.500000000 14.500000000 35.500000000 19.500000000 14.500000000 35.500000000 20.500000000 14.500000000 35.500000000 21.500000000 14.500000000 35.500000000 22.500000000 14.500000000 35.500000000 23.500000000 14.500000000 35.500000000 24.500000000 14.500000000 35.500000000 25.499996185 14.500000000 35.499996185 26.499954224 14.500000000 35.499954224 27.499591827 14.500000000 35.499591827 28.497470856 14.500000000 35.497467041 29.488407135 14.500000000 35.488403320 30.458978653 14.500000000 35.458980560 31.384418488 14.500000000 35.384422302 32.233222961 14.500000000 35.233222961 32.981101990 14.500000000 34.981101990 -33.981101990 15.500000000 34.981101990 -33.233226776 15.500000000 35.233222961 -32.384422302 15.500000000 35.384418488 -31.458978653 15.500000000 35.458980560 -30.488407135 15.500000000 35.488403320 -29.497472763 15.500000000 35.497474670 -28.499593735 15.500000000 35.499591827 -27.499954224 15.500000000 35.499954224 -26.499996185 15.500000000 35.499996185 -25.500000000 15.500000000 35.500000000 -24.500000000 15.500000000 35.500000000 -23.500000000 15.500000000 35.500000000 -22.500000000 15.500000000 35.500000000 -21.500000000 15.500000000 35.500000000 -20.500000000 15.500000000 35.500000000 -19.500000000 15.500000000 35.500000000 -18.500000000 15.500000000 35.500000000 -17.500000000 15.500000000 35.500000000 -16.500000000 15.500000000 35.500000000 -15.500000000 15.500000000 35.500000000 -14.500000000 15.500000000 35.500000000 -13.500000000 15.500000000 35.500000000 -12.500000000 15.500000000 35.500000000 -11.500000000 15.500000000 35.500000000 -10.500000000 15.500000000 35.500000000 -9.500000000 15.500000000 35.500000000 -8.500000000 15.500000000 35.500000000 -7.500000000 15.500000000 35.500000000 -6.500000000 15.500000000 35.500000000 -5.500000000 15.500000000 35.500000000 -4.500000000 15.500000000 35.500000000 -3.500000000 15.500000000 35.500000000 -2.500000000 15.500000000 35.500000000 -1.500000000 15.500000000 35.500000000 -0.500000000 15.500000000 35.500000000 0.500000000 15.500000000 35.500000000 1.500000000 15.500000000 35.500000000 2.500000000 15.500000000 35.500000000 3.500000000 15.500000000 35.500000000 4.500000000 15.500000000 35.500000000 5.500000000 15.500000000 35.500000000 6.500000000 15.500000000 35.500000000 7.500000000 15.500000000 35.500000000 8.500000000 15.500000000 35.500000000 9.500000000 15.500000000 35.500000000 10.500000000 15.500000000 35.500000000 11.500000000 15.500000000 35.500000000 12.500000000 15.500000000 35.500000000 13.500000000 15.500000000 35.500000000 14.500000000 15.500000000 35.500000000 15.500000000 15.500000000 35.500000000 16.500000000 15.500000000 35.500000000 17.500000000 15.500000000 35.500000000 18.500000000 15.500000000 35.500000000 19.500000000 15.500000000 35.500000000 20.500000000 15.500000000 35.500000000 21.500000000 15.500000000 35.500000000 22.500000000 15.500000000 35.500000000 23.500000000 15.500000000 35.500000000 24.500000000 15.500000000 35.500000000 25.499996185 15.500000000 35.499996185 26.499954224 15.500000000 35.499954224 27.499591827 15.500000000 35.499591827 28.497470856 15.500000000 35.497467041 29.488407135 15.500000000 35.488403320 30.458978653 15.500000000 35.458980560 31.384418488 15.500000000 35.384422302 32.233222961 15.500000000 35.233222961 32.981101990 15.500000000 34.981101990 -33.981101990 16.500000000 34.981101990 -33.233226776 16.500000000 35.233222961 -32.384422302 16.500000000 35.384418488 -31.458978653 16.500000000 35.458980560 -30.488407135 16.500000000 35.488403320 -29.497472763 16.500000000 35.497474670 -28.499593735 16.500000000 35.499591827 -27.499954224 16.500000000 35.499954224 -26.499996185 16.500000000 35.499996185 -25.500000000 16.500000000 35.500000000 -24.500000000 16.500000000 35.500000000 -23.500000000 16.500000000 35.500000000 -22.500000000 16.500000000 35.500000000 -21.500000000 16.500000000 35.500000000 -20.500000000 16.500000000 35.500000000 -19.500000000 16.500000000 35.500000000 -18.500000000 16.500000000 35.500000000 -17.500000000 16.500000000 35.500000000 -16.500000000 16.500000000 35.500000000 -15.500000000 16.500000000 35.500000000 -14.500000000 16.500000000 35.500000000 -13.500000000 16.500000000 35.500000000 -12.500000000 16.500000000 35.500000000 -11.500000000 16.500000000 35.500000000 -10.500000000 16.500000000 35.500000000 -9.500000000 16.500000000 35.500000000 -8.500000000 16.500000000 35.500000000 -7.500000000 16.500000000 35.500000000 -6.500000000 16.500000000 35.500000000 -5.500000000 16.500000000 35.500000000 -4.500000000 16.500000000 35.500000000 -3.500000000 16.500000000 35.500000000 -2.500000000 16.500000000 35.500000000 -1.500000000 16.500000000 35.500000000 -0.500000000 16.500000000 35.500000000 0.500000000 16.500000000 35.500000000 1.500000000 16.500000000 35.500000000 2.500000000 16.500000000 35.500000000 3.500000000 16.500000000 35.500000000 4.500000000 16.500000000 35.500000000 5.500000000 16.500000000 35.500000000 6.500000000 16.500000000 35.500000000 7.500000000 16.500000000 35.500000000 8.500000000 16.500000000 35.500000000 9.500000000 16.500000000 35.500000000 10.500000000 16.500000000 35.500000000 11.500000000 16.500000000 35.500000000 12.500000000 16.500000000 35.500000000 13.500000000 16.500000000 35.500000000 14.500000000 16.500000000 35.500000000 15.500000000 16.500000000 35.500000000 16.500000000 16.500000000 35.500000000 17.500000000 16.500000000 35.500000000 18.500000000 16.500000000 35.500000000 19.500000000 16.500000000 35.500000000 20.500000000 16.500000000 35.500000000 21.500000000 16.500000000 35.500000000 22.500000000 16.500000000 35.500000000 23.500000000 16.500000000 35.500000000 24.500000000 16.500000000 35.500000000 25.499996185 16.500000000 35.499996185 26.499954224 16.500000000 35.499954224 27.499591827 16.500000000 35.499591827 28.497470856 16.500000000 35.497467041 29.488407135 16.500000000 35.488403320 30.458978653 16.500000000 35.458980560 31.384418488 16.500000000 35.384422302 32.233222961 16.500000000 35.233222961 32.981101990 16.500000000 34.981101990 -33.981101990 17.500000000 34.981101990 -33.233226776 17.500000000 35.233222961 -32.384422302 17.500000000 35.384418488 -31.458978653 17.500000000 35.458980560 -30.488407135 17.500000000 35.488403320 -29.497472763 17.500000000 35.497474670 -28.499593735 17.500000000 35.499591827 -27.499954224 17.500000000 35.499954224 -26.499996185 17.500000000 35.499996185 -25.500000000 17.500000000 35.500000000 -24.500000000 17.500000000 35.500000000 -23.500000000 17.500000000 35.500000000 -22.500000000 17.500000000 35.500000000 -21.500000000 17.500000000 35.500000000 -20.500000000 17.500000000 35.500000000 -19.500000000 17.500000000 35.500000000 -18.500000000 17.500000000 35.500000000 -17.500000000 17.500000000 35.500000000 -16.500000000 17.500000000 35.500000000 -15.500000000 17.500000000 35.500000000 -14.500000000 17.500000000 35.500000000 -13.500000000 17.500000000 35.500000000 -12.500000000 17.500000000 35.500000000 -11.500000000 17.500000000 35.500000000 -10.500000000 17.500000000 35.500000000 -9.500000000 17.500000000 35.500000000 -8.500000000 17.500000000 35.500000000 -7.500000000 17.500000000 35.500000000 -6.500000000 17.500000000 35.500000000 -5.500000000 17.500000000 35.500000000 -4.500000000 17.500000000 35.500000000 -3.500000000 17.500000000 35.500000000 -2.500000000 17.500000000 35.500000000 -1.500000000 17.500000000 35.500000000 -0.500000000 17.500000000 35.500000000 0.500000000 17.500000000 35.500000000 1.500000000 17.500000000 35.500000000 2.500000000 17.500000000 35.500000000 3.500000000 17.500000000 35.500000000 4.500000000 17.500000000 35.500000000 5.500000000 17.500000000 35.500000000 6.500000000 17.500000000 35.500000000 7.500000000 17.500000000 35.500000000 8.500000000 17.500000000 35.500000000 9.500000000 17.500000000 35.500000000 10.500000000 17.500000000 35.500000000 11.500000000 17.500000000 35.500000000 12.500000000 17.500000000 35.500000000 13.500000000 17.500000000 35.500000000 14.500000000 17.500000000 35.500000000 15.500000000 17.500000000 35.500000000 16.500000000 17.500000000 35.500000000 17.500000000 17.500000000 35.500000000 18.500000000 17.500000000 35.500000000 19.500000000 17.500000000 35.500000000 20.500000000 17.500000000 35.500000000 21.500000000 17.500000000 35.500000000 22.500000000 17.500000000 35.500000000 23.500000000 17.500000000 35.500000000 24.500000000 17.500000000 35.500000000 25.499996185 17.500000000 35.499996185 26.499954224 17.500000000 35.499954224 27.499591827 17.500000000 35.499591827 28.497470856 17.500000000 35.497467041 29.488407135 17.500000000 35.488403320 30.458978653 17.500000000 35.458980560 31.384418488 17.500000000 35.384422302 32.233222961 17.500000000 35.233222961 32.981101990 17.500000000 34.981101990 -33.981101990 18.500000000 34.981101990 -33.233226776 18.500000000 35.233222961 -32.384422302 18.500000000 35.384418488 -31.458978653 18.500000000 35.458980560 -30.488407135 18.500000000 35.488403320 -29.497472763 18.500000000 35.497474670 -28.499593735 18.500000000 35.499591827 -27.499954224 18.500000000 35.499954224 -26.499996185 18.500000000 35.499996185 -25.500000000 18.500000000 35.500000000 -24.500000000 18.500000000 35.500000000 -23.500000000 18.500000000 35.500000000 -22.500000000 18.500000000 35.500000000 -21.500000000 18.500000000 35.500000000 -20.500000000 18.500000000 35.500000000 -19.500000000 18.500000000 35.500000000 -18.500000000 18.500000000 35.500000000 -17.500000000 18.500000000 35.500000000 -16.500000000 18.500000000 35.500000000 -15.500000000 18.500000000 35.500000000 -14.500000000 18.500000000 35.500000000 -13.500000000 18.500000000 35.500000000 -12.500000000 18.500000000 35.500000000 -11.500000000 18.500000000 35.500000000 -10.500000000 18.500000000 35.500000000 -9.500000000 18.500000000 35.500000000 -8.500000000 18.500000000 35.500000000 -7.500000000 18.500000000 35.500000000 -6.500000000 18.500000000 35.500000000 -5.500000000 18.500000000 35.500000000 -4.500000000 18.500000000 35.500000000 -3.500000000 18.500000000 35.500000000 -2.500000000 18.500000000 35.500000000 -1.500000000 18.500000000 35.500000000 -0.500000000 18.500000000 35.500000000 0.500000000 18.500000000 35.500000000 1.500000000 18.500000000 35.500000000 2.500000000 18.500000000 35.500000000 3.500000000 18.500000000 35.500000000 4.500000000 18.500000000 35.500000000 5.500000000 18.500000000 35.500000000 6.500000000 18.500000000 35.500000000 7.500000000 18.500000000 35.500000000 8.500000000 18.500000000 35.500000000 9.500000000 18.500000000 35.500000000 10.500000000 18.500000000 35.500000000 11.500000000 18.500000000 35.500000000 12.500000000 18.500000000 35.500000000 13.500000000 18.500000000 35.500000000 14.500000000 18.500000000 35.500000000 15.500000000 18.500000000 35.500000000 16.500000000 18.500000000 35.500000000 17.500000000 18.500000000 35.500000000 18.500000000 18.500000000 35.500000000 19.500000000 18.500000000 35.500000000 20.500000000 18.500000000 35.500000000 21.500000000 18.500000000 35.500000000 22.500000000 18.500000000 35.500000000 23.500000000 18.500000000 35.500000000 24.500000000 18.500000000 35.500000000 25.499996185 18.500000000 35.499996185 26.499954224 18.500000000 35.499954224 27.499591827 18.500000000 35.499591827 28.497470856 18.500000000 35.497467041 29.488407135 18.500000000 35.488403320 30.458978653 18.500000000 35.458980560 31.384418488 18.500000000 35.384422302 32.233222961 18.500000000 35.233222961 32.981101990 18.500000000 34.981101990 -33.981101990 19.500000000 34.981101990 -33.233226776 19.500000000 35.233222961 -32.384422302 19.500000000 35.384418488 -31.458978653 19.500000000 35.458980560 -30.488407135 19.500000000 35.488403320 -29.497472763 19.500000000 35.497474670 -28.499593735 19.500000000 35.499591827 -27.499954224 19.500000000 35.499954224 -26.499996185 19.500000000 35.499996185 -25.500000000 19.500000000 35.500000000 -24.500000000 19.500000000 35.500000000 -23.500000000 19.500000000 35.500000000 -22.500000000 19.500000000 35.500000000 -21.500000000 19.500000000 35.500000000 -20.500000000 19.500000000 35.500000000 -19.500000000 19.500000000 35.500000000 -18.500000000 19.500000000 35.500000000 -17.500000000 19.500000000 35.500000000 -16.500000000 19.500000000 35.500000000 -15.500000000 19.500000000 35.500000000 -14.500000000 19.500000000 35.500000000 -13.500000000 19.500000000 35.500000000 -12.500000000 19.500000000 35.500000000 -11.500000000 19.500000000 35.500000000 -10.500000000 19.500000000 35.500000000 -9.500000000 19.500000000 35.500000000 -8.500000000 19.500000000 35.500000000 -7.500000000 19.500000000 35.500000000 -6.500000000 19.500000000 35.500000000 -5.500000000 19.500000000 35.500000000 -4.500000000 19.500000000 35.500000000 -3.500000000 19.500000000 35.500000000 -2.500000000 19.500000000 35.500000000 -1.500000000 19.500000000 35.500000000 -0.500000000 19.500000000 35.500000000 0.500000000 19.500000000 35.500000000 1.500000000 19.500000000 35.500000000 2.500000000 19.500000000 35.500000000 3.500000000 19.500000000 35.500000000 4.500000000 19.500000000 35.500000000 5.500000000 19.500000000 35.500000000 6.500000000 19.500000000 35.500000000 7.500000000 19.500000000 35.500000000 8.500000000 19.500000000 35.500000000 9.500000000 19.500000000 35.500000000 10.500000000 19.500000000 35.500000000 11.500000000 19.500000000 35.500000000 12.500000000 19.500000000 35.500000000 13.500000000 19.500000000 35.500000000 14.500000000 19.500000000 35.500000000 15.500000000 19.500000000 35.500000000 16.500000000 19.500000000 35.500000000 17.500000000 19.500000000 35.500000000 18.500000000 19.500000000 35.500000000 19.500000000 19.500000000 35.500000000 20.500000000 19.500000000 35.500000000 21.500000000 19.500000000 35.500000000 22.500000000 19.500000000 35.500000000 23.500000000 19.500000000 35.500000000 24.500000000 19.500000000 35.500000000 25.499996185 19.500000000 35.499996185 26.499954224 19.500000000 35.499954224 27.499591827 19.500000000 35.499591827 28.497470856 19.500000000 35.497467041 29.488407135 19.500000000 35.488403320 30.458978653 19.500000000 35.458980560 31.384418488 19.500000000 35.384422302 32.233222961 19.500000000 35.233222961 32.981101990 19.500000000 34.981101990 -33.981101990 20.500000000 34.981101990 -33.233226776 20.500000000 35.233222961 -32.384422302 20.500000000 35.384418488 -31.458978653 20.500000000 35.458980560 -30.488407135 20.500000000 35.488403320 -29.497472763 20.500000000 35.497474670 -28.499593735 20.500000000 35.499591827 -27.499954224 20.500000000 35.499954224 -26.499996185 20.500000000 35.499996185 -25.500000000 20.500000000 35.500000000 -24.500000000 20.500000000 35.500000000 -23.500000000 20.500000000 35.500000000 -22.500000000 20.500000000 35.500000000 -21.500000000 20.500000000 35.500000000 -20.500000000 20.500000000 35.500000000 -19.500000000 20.500000000 35.500000000 -18.500000000 20.500000000 35.500000000 -17.500000000 20.500000000 35.500000000 -16.500000000 20.500000000 35.500000000 -15.500000000 20.500000000 35.500000000 -14.500000000 20.500000000 35.500000000 -13.500000000 20.500000000 35.500000000 -12.500000000 20.500000000 35.500000000 -11.500000000 20.500000000 35.500000000 -10.500000000 20.500000000 35.500000000 -9.500000000 20.500000000 35.500000000 -8.500000000 20.500000000 35.500000000 -7.500000000 20.500000000 35.500000000 -6.500000000 20.500000000 35.500000000 -5.500000000 20.500000000 35.500000000 -4.500000000 20.500000000 35.500000000 -3.500000000 20.500000000 35.500000000 -2.500000000 20.500000000 35.500000000 -1.500000000 20.500000000 35.500000000 -0.500000000 20.500000000 35.500000000 0.500000000 20.500000000 35.500000000 1.500000000 20.500000000 35.500000000 2.500000000 20.500000000 35.500000000 3.500000000 20.500000000 35.500000000 4.500000000 20.500000000 35.500000000 5.500000000 20.500000000 35.500000000 6.500000000 20.500000000 35.500000000 7.500000000 20.500000000 35.500000000 8.500000000 20.500000000 35.500000000 9.500000000 20.500000000 35.500000000 10.500000000 20.500000000 35.500000000 11.500000000 20.500000000 35.500000000 12.500000000 20.500000000 35.500000000 13.500000000 20.500000000 35.500000000 14.500000000 20.500000000 35.500000000 15.500000000 20.500000000 35.500000000 16.500000000 20.500000000 35.500000000 17.500000000 20.500000000 35.500000000 18.500000000 20.500000000 35.500000000 19.500000000 20.500000000 35.500000000 20.500000000 20.500000000 35.500000000 21.500000000 20.500000000 35.500000000 22.500000000 20.500000000 35.500000000 23.500000000 20.500000000 35.500000000 24.500000000 20.500000000 35.500000000 25.499996185 20.500000000 35.499996185 26.499954224 20.500000000 35.499954224 27.499591827 20.500000000 35.499591827 28.497470856 20.500000000 35.497467041 29.488407135 20.500000000 35.488403320 30.458978653 20.500000000 35.458980560 31.384418488 20.500000000 35.384422302 32.233222961 20.500000000 35.233222961 32.981101990 20.500000000 34.981101990 -33.981101990 21.500000000 34.981101990 -33.233226776 21.500000000 35.233222961 -32.384422302 21.500000000 35.384418488 -31.458978653 21.500000000 35.458980560 -30.488407135 21.500000000 35.488403320 -29.497472763 21.500000000 35.497474670 -28.499593735 21.500000000 35.499591827 -27.499954224 21.500000000 35.499954224 -26.499996185 21.500000000 35.499996185 -25.500000000 21.500000000 35.500000000 -24.500000000 21.500000000 35.500000000 -23.500000000 21.500000000 35.500000000 -22.500000000 21.500000000 35.500000000 -21.500000000 21.500000000 35.500000000 -20.500000000 21.500000000 35.500000000 -19.500000000 21.500000000 35.500000000 -18.500000000 21.500000000 35.500000000 -17.500000000 21.500000000 35.500000000 -16.500000000 21.500000000 35.500000000 -15.500000000 21.500000000 35.500000000 -14.500000000 21.500000000 35.500000000 -13.500000000 21.500000000 35.500000000 -12.500000000 21.500000000 35.500000000 -11.500000000 21.500000000 35.500000000 -10.500000000 21.500000000 35.500000000 -9.500000000 21.500000000 35.500000000 -8.500000000 21.500000000 35.500000000 -7.500000000 21.500000000 35.500000000 -6.500000000 21.500000000 35.500000000 -5.500000000 21.500000000 35.500000000 -4.500000000 21.500000000 35.500000000 -3.500000000 21.500000000 35.500000000 -2.500000000 21.500000000 35.500000000 -1.500000000 21.500000000 35.500000000 -0.500000000 21.500000000 35.500000000 0.500000000 21.500000000 35.500000000 1.500000000 21.500000000 35.500000000 2.500000000 21.500000000 35.500000000 3.500000000 21.500000000 35.500000000 4.500000000 21.500000000 35.500000000 5.500000000 21.500000000 35.500000000 6.500000000 21.500000000 35.500000000 7.500000000 21.500000000 35.500000000 8.500000000 21.500000000 35.500000000 9.500000000 21.500000000 35.500000000 10.500000000 21.500000000 35.500000000 11.500000000 21.500000000 35.500000000 12.500000000 21.500000000 35.500000000 13.500000000 21.500000000 35.500000000 14.500000000 21.500000000 35.500000000 15.500000000 21.500000000 35.500000000 16.500000000 21.500000000 35.500000000 17.500000000 21.500000000 35.500000000 18.500000000 21.500000000 35.500000000 19.500000000 21.500000000 35.500000000 20.500000000 21.500000000 35.500000000 21.500000000 21.500000000 35.500000000 22.500000000 21.500000000 35.500000000 23.500000000 21.500000000 35.500000000 24.500000000 21.500000000 35.500000000 25.499996185 21.500000000 35.499996185 26.499954224 21.500000000 35.499954224 27.499591827 21.500000000 35.499591827 28.497470856 21.500000000 35.497467041 29.488407135 21.500000000 35.488403320 30.458978653 21.500000000 35.458980560 31.384418488 21.500000000 35.384422302 32.233222961 21.500000000 35.233222961 32.981101990 21.500000000 34.981101990 -33.981101990 22.500000000 34.981101990 -33.233226776 22.500000000 35.233222961 -32.384422302 22.500000000 35.384418488 -31.458978653 22.500000000 35.458980560 -30.488407135 22.500000000 35.488403320 -29.497472763 22.500000000 35.497474670 -28.499593735 22.500000000 35.499591827 -27.499954224 22.500000000 35.499954224 -26.499996185 22.500000000 35.499996185 -25.500000000 22.500000000 35.500000000 -24.500000000 22.500000000 35.500000000 -23.500000000 22.500000000 35.500000000 -22.500000000 22.500000000 35.500000000 -21.500000000 22.500000000 35.500000000 -20.500000000 22.500000000 35.500000000 -19.500000000 22.500000000 35.500000000 -18.500000000 22.500000000 35.500000000 -17.500000000 22.500000000 35.500000000 -16.500000000 22.500000000 35.500000000 -15.500000000 22.500000000 35.500000000 -14.500000000 22.500000000 35.500000000 -13.500000000 22.500000000 35.500000000 -12.500000000 22.500000000 35.500000000 -11.500000000 22.500000000 35.500000000 -10.500000000 22.500000000 35.500000000 -9.500000000 22.500000000 35.500000000 -8.500000000 22.500000000 35.500000000 -7.500000000 22.500000000 35.500000000 -6.500000000 22.500000000 35.500000000 -5.500000000 22.500000000 35.500000000 -4.500000000 22.500000000 35.500000000 -3.500000000 22.500000000 35.500000000 -2.500000000 22.500000000 35.500000000 -1.500000000 22.500000000 35.500000000 -0.500000000 22.500000000 35.500000000 0.500000000 22.500000000 35.500000000 1.500000000 22.500000000 35.500000000 2.500000000 22.500000000 35.500000000 3.500000000 22.500000000 35.500000000 4.500000000 22.500000000 35.500000000 5.500000000 22.500000000 35.500000000 6.500000000 22.500000000 35.500000000 7.500000000 22.500000000 35.500000000 8.500000000 22.500000000 35.500000000 9.500000000 22.500000000 35.500000000 10.500000000 22.500000000 35.500000000 11.500000000 22.500000000 35.500000000 12.500000000 22.500000000 35.500000000 13.500000000 22.500000000 35.500000000 14.500000000 22.500000000 35.500000000 15.500000000 22.500000000 35.500000000 16.500000000 22.500000000 35.500000000 17.500000000 22.500000000 35.500000000 18.500000000 22.500000000 35.500000000 19.500000000 22.500000000 35.500000000 20.500000000 22.500000000 35.500000000 21.500000000 22.500000000 35.500000000 22.500000000 22.500000000 35.500000000 23.500000000 22.500000000 35.500000000 24.500000000 22.500000000 35.500000000 25.499996185 22.500000000 35.499996185 26.499954224 22.500000000 35.499954224 27.499591827 22.500000000 35.499591827 28.497470856 22.500000000 35.497467041 29.488407135 22.500000000 35.488403320 30.458978653 22.500000000 35.458980560 31.384418488 22.500000000 35.384422302 32.233222961 22.500000000 35.233222961 32.981101990 22.500000000 34.981101990 -33.981101990 23.499998093 34.981101990 -33.233226776 23.500000000 35.233222961 -32.384422302 23.500000000 35.384418488 -31.458978653 23.500000000 35.458980560 -30.488407135 23.500000000 35.488403320 -29.497472763 23.500000000 35.497474670 -28.499593735 23.500000000 35.499591827 -27.499954224 23.500000000 35.499954224 -26.499996185 23.500000000 35.499996185 -25.500000000 23.500000000 35.500000000 -24.500000000 23.500000000 35.500000000 -23.500000000 23.500000000 35.500000000 -22.500000000 23.500000000 35.500000000 -21.500000000 23.500000000 35.500000000 -20.500000000 23.500000000 35.500000000 -19.500000000 23.500000000 35.500000000 -18.500000000 23.500000000 35.500000000 -17.500000000 23.500000000 35.500000000 -16.500000000 23.500000000 35.500000000 -15.500000000 23.500000000 35.500000000 -14.500000000 23.500000000 35.500000000 -13.500000000 23.500000000 35.500000000 -12.500000000 23.500000000 35.500000000 -11.500000000 23.500000000 35.500000000 -10.500000000 23.500000000 35.500000000 -9.500000000 23.500000000 35.500000000 -8.500000000 23.500000000 35.500000000 -7.500000000 23.500000000 35.500000000 -6.500000000 23.500000000 35.500000000 -5.500000000 23.500000000 35.500000000 -4.500000000 23.500000000 35.500000000 -3.500000000 23.500000000 35.500000000 -2.500000000 23.500000000 35.500000000 -1.500000000 23.500000000 35.500000000 -0.500000000 23.500000000 35.500000000 0.500000000 23.500000000 35.500000000 1.500000000 23.500000000 35.500000000 2.500000000 23.500000000 35.500000000 3.500000000 23.500000000 35.500000000 4.500000000 23.500000000 35.500000000 5.500000000 23.500000000 35.500000000 6.500000000 23.500000000 35.500000000 7.500000000 23.500000000 35.500000000 8.500000000 23.500000000 35.500000000 9.500000000 23.500000000 35.500000000 10.500000000 23.500000000 35.500000000 11.500000000 23.500000000 35.500000000 12.500000000 23.500000000 35.500000000 13.500000000 23.500000000 35.500000000 14.500000000 23.500000000 35.500000000 15.500000000 23.500000000 35.500000000 16.500000000 23.500000000 35.500000000 17.500000000 23.500000000 35.500000000 18.500000000 23.500000000 35.500000000 19.500000000 23.500000000 35.500000000 20.500000000 23.500000000 35.500000000 21.500000000 23.500000000 35.500000000 22.500000000 23.500000000 35.500000000 23.500000000 23.500000000 35.500000000 24.500000000 23.500000000 35.500000000 25.499996185 23.500000000 35.499996185 26.499954224 23.500000000 35.499954224 27.499591827 23.500000000 35.499591827 28.497470856 23.500000000 35.497467041 29.488407135 23.500000000 35.488403320 30.458978653 23.500000000 35.458980560 31.384418488 23.500000000 35.384422302 32.233222961 23.500000000 35.233222961 32.981101990 23.499998093 34.981101990 -33.981086731 24.499979019 34.981086731 -33.233219147 24.499984741 35.233203888 -32.384422302 24.499996185 35.384407043 -31.458978653 24.500000000 35.458972931 -30.488407135 24.500000000 35.488403320 -29.497472763 24.500000000 35.497474670 -28.499593735 24.500000000 35.499591827 -27.499954224 24.500000000 35.499954224 -26.499996185 24.500000000 35.499996185 -25.500000000 24.500000000 35.500000000 -24.500000000 24.500000000 35.500000000 -23.500000000 24.500000000 35.500000000 -22.500000000 24.500000000 35.500000000 -21.500000000 24.500000000 35.500000000 -20.500000000 24.500000000 35.500000000 -19.500000000 24.500000000 35.500000000 -18.500000000 24.500000000 35.500000000 -17.500000000 24.500000000 35.500000000 -16.500000000 24.500000000 35.500000000 -15.500000000 24.500000000 35.500000000 -14.500000000 24.500000000 35.500000000 -13.500000000 24.500000000 35.500000000 -12.500000000 24.500000000 35.500000000 -11.500000000 24.500000000 35.500000000 -10.500000000 24.500000000 35.500000000 -9.500000000 24.500000000 35.500000000 -8.500000000 24.500000000 35.500000000 -7.500000000 24.500000000 35.500000000 -6.500000000 24.500000000 35.500000000 -5.500000000 24.500000000 35.500000000 -4.500000000 24.500000000 35.500000000 -3.500000000 24.500000000 35.500000000 -2.500000000 24.500000000 35.500000000 -1.500000000 24.500000000 35.500000000 -0.500000000 24.500000000 35.500000000 0.500000000 24.500000000 35.500000000 1.500000000 24.500000000 35.500000000 2.500000000 24.500000000 35.500000000 3.500000000 24.500000000 35.500000000 4.500000000 24.500000000 35.500000000 5.500000000 24.500000000 35.500000000 6.500000000 24.500000000 35.500000000 7.500000000 24.500000000 35.500000000 8.500000000 24.500000000 35.500000000 9.500000000 24.500000000 35.500000000 10.500000000 24.500000000 35.500000000 11.500000000 24.500000000 35.500000000 12.500000000 24.500000000 35.500000000 13.500000000 24.500000000 35.500000000 14.500000000 24.500000000 35.500000000 15.500000000 24.500000000 35.500000000 16.500000000 24.500000000 35.500000000 17.500000000 24.500000000 35.500000000 18.500000000 24.500000000 35.500000000 19.500000000 24.500000000 35.500000000 20.500000000 24.500000000 35.500000000 21.500000000 24.500000000 35.500000000 22.500000000 24.500000000 35.500000000 23.500000000 24.500000000 35.500000000 24.500000000 24.500000000 35.500000000 25.499996185 24.500000000 35.499996185 26.499954224 24.500000000 35.499954224 27.499591827 24.500000000 35.499591827 28.497470856 24.500000000 35.497467041 29.488407135 24.500000000 35.488403320 30.458978653 24.500000000 35.458976746 31.384418488 24.499996185 35.384407043 32.233219147 24.499988556 35.233207703 32.981086731 24.499979019 34.981086731 -33.980972290 25.499826431 34.980957031 -33.233169556 25.499874115 35.233074188 -32.384407043 25.499950409 35.384307861 -31.458978653 25.499988556 35.458930969 -30.488407135 25.499996185 35.488388062 -29.497472763 25.499996185 35.497470856 -28.499593735 25.499996185 35.499588013 -27.499954224 25.499996185 35.499950409 -26.499996185 25.499996185 35.499992371 -25.500000000 25.499996185 35.499996185 -24.500000000 25.499996185 35.499996185 -23.500000000 25.499996185 35.499996185 -22.500000000 25.499996185 35.499996185 -21.500000000 25.499996185 35.499996185 -20.500000000 25.499996185 35.499996185 -19.500000000 25.499996185 35.499996185 -18.500000000 25.499996185 35.499996185 -17.500000000 25.499996185 35.499996185 -16.500000000 25.499996185 35.499996185 -15.500000000 25.499996185 35.499996185 -14.500000000 25.499996185 35.499996185 -13.500000000 25.499996185 35.499996185 -12.500000000 25.499996185 35.499996185 -11.500000000 25.499996185 35.499996185 -10.500000000 25.499996185 35.499996185 -9.500000000 25.499996185 35.499996185 -8.500000000 25.499996185 35.499996185 -7.500000000 25.499996185 35.499996185 -6.500000000 25.499996185 35.499996185 -5.500000000 25.499996185 35.499996185 -4.500000000 25.499996185 35.499996185 -3.500000000 25.499996185 35.499996185 -2.500000000 25.499996185 35.499996185 -1.500000000 25.499996185 35.499996185 -0.500000000 25.499996185 35.499996185 0.500000000 25.499996185 35.499996185 1.500000000 25.499996185 35.499996185 2.500000000 25.499996185 35.499996185 3.500000000 25.499996185 35.499996185 4.500000000 25.499996185 35.499996185 5.500000000 25.499996185 35.499996185 6.500000000 25.499996185 35.499996185 7.500000000 25.499996185 35.499996185 8.500000000 25.499996185 35.499996185 9.500000000 25.499996185 35.499996185 10.500000000 25.499996185 35.499996185 11.500000000 25.499996185 35.499996185 12.500000000 25.499996185 35.499996185 13.500000000 25.499996185 35.499996185 14.500000000 25.499996185 35.499996185 15.500000000 25.499996185 35.499996185 16.500000000 25.499996185 35.499996185 17.500000000 25.499996185 35.499996185 18.500000000 25.499996185 35.499996185 19.500000000 25.499996185 35.499996185 20.500000000 25.499996185 35.499996185 21.500000000 25.499996185 35.499996185 22.500000000 25.499996185 35.499996185 23.500000000 25.499996185 35.499996185 24.500000000 25.499996185 35.499996185 25.499996185 25.499996185 35.499992371 26.499954224 25.499996185 35.499950409 27.499591827 25.499996185 35.499588013 28.497470856 25.499996185 35.497467041 29.488407135 25.499996185 35.488391876 30.458974838 25.499988556 35.458934784 31.384403229 25.499950409 35.384307861 32.233165741 25.499874115 35.233070374 32.980972290 25.499826431 34.980957031 -33.980331421 26.498952866 34.980201721 -33.232864380 26.499227524 35.232303619 -32.384296417 26.499622345 35.383720398 -31.458948135 26.499858856 35.458606720 -30.488397598 26.499938965 35.488258362 -29.497472763 26.499954224 35.497406006 -28.499593735 26.499954224 35.499549866 -27.499954224 26.499954224 35.499908447 -26.499996185 26.499954224 35.499950409 -25.500000000 26.499954224 35.499954224 -24.500000000 26.499954224 35.499954224 -23.500000000 26.499954224 35.499954224 -22.500000000 26.499954224 35.499954224 -21.500000000 26.499954224 35.499954224 -20.500000000 26.499954224 35.499954224 -19.500000000 26.499954224 35.499954224 -18.500000000 26.499954224 35.499954224 -17.500000000 26.499954224 35.499954224 -16.500000000 26.499954224 35.499954224 -15.500000000 26.499954224 35.499954224 -14.500000000 26.499954224 35.499954224 -13.500000000 26.499954224 35.499954224 -12.500000000 26.499954224 35.499954224 -11.500000000 26.499954224 35.499954224 -10.500000000 26.499954224 35.499954224 -9.500000000 26.499954224 35.499954224 -8.500000000 26.499954224 35.499954224 -7.500000000 26.499954224 35.499954224 -6.500000000 26.499954224 35.499954224 -5.500000000 26.499954224 35.499954224 -4.500000000 26.499954224 35.499954224 -3.500000000 26.499954224 35.499954224 -2.500000000 26.499954224 35.499954224 -1.500000000 26.499954224 35.499954224 -0.500000000 26.499954224 35.499954224 0.500000000 26.499954224 35.499954224 1.500000000 26.499954224 35.499954224 2.500000000 26.499954224 35.499954224 3.500000000 26.499954224 35.499954224 4.500000000 26.499954224 35.499954224 5.500000000 26.499954224 35.499954224 6.500000000 26.499954224 35.499954224 7.500000000 26.499954224 35.499954224 8.500000000 26.499954224 35.499954224 9.500000000 26.499954224 35.499954224 10.500000000 26.499954224 35.499954224 11.500000000 26.499954224 35.499954224 12.500000000 26.499954224 35.499954224 13.500000000 26.499954224 35.499954224 14.500000000 26.499954224 35.499954224 15.500000000 26.499954224 35.499954224 16.500000000 26.499954224 35.499954224 17.500000000 26.499954224 35.499954224 18.500000000 26.499954224 35.499954224 19.500000000 26.499954224 35.499954224 20.500000000 26.499954224 35.499954224 21.500000000 26.499954224 35.499954224 22.500000000 26.499954224 35.499954224 23.500000000 26.499954224 35.499954224 24.500000000 26.499954224 35.499954224 25.499996185 26.499954224 35.499950409 26.499954224 26.499954224 35.499908447 27.499591827 26.499954224 35.499542236 28.497470856 26.499954224 35.497409821 29.488397598 26.499938965 35.488258362 30.458948135 26.499862671 35.458606720 31.384296417 26.499622345 35.383720398 32.232860565 26.499225616 35.232303619 32.980327606 26.498952866 34.980201721 -33.977615356 27.495172501 34.976860046 -33.231597900 27.496379852 35.228954315 -32.383811951 27.498052597 35.381027222 -31.458766937 27.499073029 35.456939697 -30.488346100 27.499475479 35.487373352 -29.497461319 27.499576569 35.496910095 -28.499593735 27.499591827 35.499160767 -27.499954224 27.499591827 35.499542236 -26.499996185 27.499591827 35.499591827 -25.500000000 27.499591827 35.499591827 -24.500000000 27.499591827 35.499591827 -23.500000000 27.499591827 35.499591827 -22.500000000 27.499591827 35.499591827 -21.500000000 27.499591827 35.499591827 -20.500000000 27.499591827 35.499591827 -19.500000000 27.499591827 35.499591827 -18.500000000 27.499591827 35.499591827 -17.500000000 27.499591827 35.499591827 -16.500000000 27.499591827 35.499591827 -15.500000000 27.499591827 35.499591827 -14.500000000 27.499591827 35.499591827 -13.500000000 27.499591827 35.499591827 -12.500000000 27.499591827 35.499591827 -11.500000000 27.499591827 35.499591827 -10.500000000 27.499591827 35.499591827 -9.500000000 27.499591827 35.499591827 -8.500000000 27.499591827 35.499591827 -7.500000000 27.499591827 35.499591827 -6.500000000 27.499591827 35.499591827 -5.500000000 27.499591827 35.499591827 -4.500000000 27.499591827 35.499591827 -3.500000000 27.499591827 35.499591827 -2.500000000 27.499591827 35.499591827 -1.500000000 27.499591827 35.499591827 -0.500000000 27.499591827 35.499591827 0.500000000 27.499591827 35.499591827 1.500000000 27.499591827 35.499591827 2.500000000 27.499591827 35.499591827 3.500000000 27.499591827 35.499591827 4.500000000 27.499591827 35.499591827 5.500000000 27.499591827 35.499591827 6.500000000 27.499591827 35.499591827 7.500000000 27.499591827 35.499591827 8.500000000 27.499591827 35.499591827 9.500000000 27.499591827 35.499591827 10.500000000 27.499591827 35.499591827 11.500000000 27.499591827 35.499591827 12.500000000 27.499591827 35.499591827 13.500000000 27.499591827 35.499591827 14.500000000 27.499591827 35.499591827 15.500000000 27.499591827 35.499591827 16.500000000 27.499591827 35.499591827 17.500000000 27.499591827 35.499591827 18.500000000 27.499591827 35.499591827 19.500000000 27.499591827 35.499591827 20.500000000 27.499591827 35.499591827 21.500000000 27.499591827 35.499591827 22.500000000 27.499591827 35.499591827 23.500000000 27.499591827 35.499591827 24.500000000 27.499591827 35.499591827 25.499996185 27.499591827 35.499591827 26.499954224 27.499591827 35.499546051 27.499591827 27.499591827 35.499164581 28.497457504 27.499576569 35.496910095 29.488346100 27.499475479 35.487373352 30.458766937 27.499073029 35.456939697 31.383810043 27.498052597 35.381027222 32.231597900 27.496379852 35.228954315 32.977619171 27.495172501 34.976860046 -33.968864441 28.481937408 34.965763092 -33.227870941 28.486719131 35.217517853 -32.382308960 28.492321014 35.371490479 -31.458078384 28.495611191 35.450428009 -30.488048553 28.496957779 35.483341217 -29.497375488 28.497371674 35.494178772 -28.499578476 28.497461319 35.496910095 -27.499954224 28.497470856 35.497402191 -26.499996185 28.497470856 35.497467041 -25.500000000 28.497470856 35.497470856 -24.500000000 28.497470856 35.497470856 -23.500000000 28.497470856 35.497470856 -22.500000000 28.497470856 35.497470856 -21.500000000 28.497470856 35.497470856 -20.500000000 28.497470856 35.497470856 -19.500000000 28.497470856 35.497470856 -18.500000000 28.497470856 35.497470856 -17.500000000 28.497470856 35.497470856 -16.500000000 28.497470856 35.497470856 -15.500000000 28.497470856 35.497470856 -14.500000000 28.497470856 35.497470856 -13.500000000 28.497470856 35.497470856 -12.500000000 28.497470856 35.497470856 -11.500000000 28.497470856 35.497470856 -10.500000000 28.497470856 35.497470856 -9.500000000 28.497470856 35.497470856 -8.500000000 28.497470856 35.497470856 -7.500000000 28.497470856 35.497470856 -6.500000000 28.497470856 35.497470856 -5.500000000 28.497470856 35.497470856 -4.500000000 28.497470856 35.497470856 -3.500000000 28.497470856 35.497470856 -2.500000000 28.497470856 35.497470856 -1.500000000 28.497470856 35.497470856 -0.500000000 28.497470856 35.497470856 0.500000000 28.497470856 35.497470856 1.500000000 28.497470856 35.497470856 2.500000000 28.497470856 35.497470856 3.500000000 28.497470856 35.497470856 4.500000000 28.497470856 35.497470856 5.500000000 28.497470856 35.497470856 6.500000000 28.497470856 35.497470856 7.500000000 28.497470856 35.497470856 8.500000000 28.497470856 35.497470856 9.500000000 28.497470856 35.497470856 10.500000000 28.497470856 35.497470856 11.500000000 28.497470856 35.497470856 12.500000000 28.497470856 35.497470856 13.500000000 28.497470856 35.497470856 14.500000000 28.497470856 35.497470856 15.500000000 28.497470856 35.497470856 16.500000000 28.497470856 35.497470856 17.500000000 28.497470856 35.497470856 18.500000000 28.497470856 35.497470856 19.500000000 28.497470856 35.497470856 20.500000000 28.497470856 35.497470856 21.500000000 28.497470856 35.497470856 22.500000000 28.497470856 35.497470856 23.500000000 28.497470856 35.497470856 24.500000000 28.497470856 35.497470856 25.499996185 28.497470856 35.497467041 26.499954224 28.497470856 35.497406006 27.499576569 28.497457504 35.496910095 28.497371674 28.497375488 35.494174957 29.488048553 28.496957779 35.483337402 30.458078384 28.495611191 35.450428009 31.382312775 28.492321014 35.371490479 32.227874756 28.486719131 35.217510223 32.968864441 28.481939316 34.965759277 -33.946811676 29.442840576 34.937091827 -33.220714569 29.460596085 35.185966492 -32.379219055 29.476003647 35.344280243 -31.456085205 29.483789444 35.430480957 -30.486968994 29.486968994 35.469238281 -29.496959686 29.488048553 35.483337402 -28.499475479 29.488346100 35.487373352 -27.499938965 29.488399506 35.488258362 -26.499996185 29.488407135 35.488391876 -25.500000000 29.488407135 35.488403320 -24.500000000 29.488407135 35.488407135 -23.500000000 29.488407135 35.488407135 -22.500000000 29.488407135 35.488407135 -21.500000000 29.488407135 35.488407135 -20.500000000 29.488407135 35.488407135 -19.500000000 29.488407135 35.488407135 -18.500000000 29.488407135 35.488407135 -17.500000000 29.488407135 35.488407135 -16.500000000 29.488407135 35.488407135 -15.500000000 29.488407135 35.488407135 -14.500000000 29.488407135 35.488407135 -13.500000000 29.488407135 35.488407135 -12.500000000 29.488407135 35.488407135 -11.500000000 29.488407135 35.488407135 -10.500000000 29.488407135 35.488407135 -9.500000000 29.488407135 35.488407135 -8.500000000 29.488407135 35.488407135 -7.500000000 29.488407135 35.488407135 -6.500000000 29.488407135 35.488407135 -5.500000000 29.488407135 35.488407135 -4.500000000 29.488407135 35.488407135 -3.500000000 29.488407135 35.488407135 -2.500000000 29.488407135 35.488407135 -1.500000000 29.488407135 35.488407135 -0.500000000 29.488407135 35.488407135 0.500000000 29.488407135 35.488407135 1.500000000 29.488407135 35.488407135 2.500000000 29.488407135 35.488407135 3.500000000 29.488407135 35.488407135 4.500000000 29.488407135 35.488407135 5.500000000 29.488407135 35.488407135 6.500000000 29.488407135 35.488407135 7.500000000 29.488407135 35.488407135 8.500000000 29.488407135 35.488407135 9.500000000 29.488407135 35.488407135 10.500000000 29.488407135 35.488407135 11.500000000 29.488407135 35.488407135 12.500000000 29.488407135 35.488407135 13.500000000 29.488407135 35.488407135 14.500000000 29.488407135 35.488407135 15.500000000 29.488407135 35.488407135 16.500000000 29.488407135 35.488407135 17.500000000 29.488407135 35.488407135 18.500000000 29.488407135 35.488407135 19.500000000 29.488407135 35.488407135 20.500000000 29.488407135 35.488407135 21.500000000 29.488407135 35.488407135 22.500000000 29.488407135 35.488407135 23.500000000 29.488407135 35.488407135 24.500000000 29.488407135 35.488407135 25.499996185 29.488407135 35.488391876 26.499938965 29.488399506 35.488258362 27.499475479 29.488346100 35.487373352 28.496959686 29.488048553 35.483337402 29.486968994 29.486965179 35.469238281 30.456085205 29.483789444 35.430480957 31.379222870 29.476003647 35.344280243 32.220714569 29.460596085 35.185966492 32.946811676 29.442840576 34.937091827 -33.903377533 30.336603165 34.879737854 -33.216838837 30.405670166 35.112342834 -32.375358582 30.438953400 35.280399323 -31.451217651 30.451217651 35.380989075 -30.483789444 30.456085205 35.430473328 -29.495611191 30.458078384 35.450424194 -28.499073029 30.458770752 35.456939697 -27.499858856 30.458950043 35.458606720 -26.499988556 30.458980560 35.458930969 -25.500000000 30.458980560 35.458976746 -24.500000000 30.458980560 35.458976746 -23.500000000 30.458980560 35.458976746 -22.500000000 30.458980560 35.458976746 -21.500000000 30.458980560 35.458976746 -20.500000000 30.458980560 35.458976746 -19.500000000 30.458980560 35.458976746 -18.500000000 30.458980560 35.458976746 -17.500000000 30.458980560 35.458976746 -16.500000000 30.458980560 35.458976746 -15.500000000 30.458980560 35.458976746 -14.500000000 30.458980560 35.458976746 -13.500000000 30.458980560 35.458976746 -12.500000000 30.458980560 35.458976746 -11.500000000 30.458980560 35.458976746 -10.500000000 30.458980560 35.458976746 -9.500000000 30.458980560 35.458976746 -8.500000000 30.458980560 35.458976746 -7.500000000 30.458980560 35.458976746 -6.500000000 30.458980560 35.458976746 -5.500000000 30.458980560 35.458976746 -4.500000000 30.458980560 35.458976746 -3.500000000 30.458980560 35.458976746 -2.500000000 30.458980560 35.458976746 -1.500000000 30.458980560 35.458976746 -0.500000000 30.458980560 35.458976746 0.500000000 30.458980560 35.458976746 1.500000000 30.458980560 35.458976746 2.500000000 30.458980560 35.458976746 3.500000000 30.458980560 35.458976746 4.500000000 30.458980560 35.458976746 5.500000000 30.458980560 35.458976746 6.500000000 30.458980560 35.458976746 7.500000000 30.458980560 35.458976746 8.500000000 30.458980560 35.458976746 9.500000000 30.458980560 35.458976746 10.500000000 30.458980560 35.458976746 11.500000000 30.458980560 35.458976746 12.500000000 30.458980560 35.458976746 13.500000000 30.458980560 35.458976746 14.500000000 30.458980560 35.458976746 15.500000000 30.458980560 35.458976746 16.500000000 30.458980560 35.458976746 17.500000000 30.458980560 35.458976746 18.500000000 30.458980560 35.458976746 19.500000000 30.458980560 35.458976746 20.500000000 30.458980560 35.458976746 21.500000000 30.458980560 35.458976746 22.500000000 30.458980560 35.458976746 23.500000000 30.458980560 35.458976746 24.500000000 30.458980560 35.458972931 25.499988556 30.458978653 35.458930969 26.499858856 30.458948135 35.458606720 27.499073029 30.458770752 35.456939697 28.495611191 30.458080292 35.450424194 29.483789444 30.456085205 35.430473328 30.451217651 30.451217651 35.380989075 31.375356674 30.438949585 35.280395508 32.216835022 30.405673981 35.112342834 32.903373718 30.336603165 34.879737854 -33.840930939 31.035345078 34.797355652 -33.254276276 31.334241867 34.954723358 -32.371025085 31.371026993 35.154674530 -31.438953400 31.375360489 35.280391693 -30.476007462 31.379222870 35.344280243 -29.492319107 31.382312775 35.371486664 -28.498052597 31.383813858 35.381027222 -27.499622345 31.384298325 35.383720398 -26.499948502 31.384403229 35.384304047 -25.499996185 31.384418488 35.384407043 -24.500000000 31.384418488 35.384414673 -23.500000000 31.384418488 35.384414673 -22.500000000 31.384418488 35.384414673 -21.500000000 31.384418488 35.384414673 -20.500000000 31.384418488 35.384414673 -19.500000000 31.384418488 35.384414673 -18.500000000 31.384418488 35.384414673 -17.500000000 31.384418488 35.384414673 -16.500000000 31.384418488 35.384414673 -15.500000000 31.384418488 35.384414673 -14.500000000 31.384418488 35.384414673 -13.500000000 31.384418488 35.384414673 -12.500000000 31.384418488 35.384414673 -11.500000000 31.384418488 35.384414673 -10.500000000 31.384418488 35.384414673 -9.500000000 31.384418488 35.384414673 -8.500000000 31.384418488 35.384414673 -7.500000000 31.384418488 35.384414673 -6.500000000 31.384418488 35.384414673 -5.500000000 31.384418488 35.384414673 -4.500000000 31.384418488 35.384414673 -3.500000000 31.384418488 35.384414673 -2.500000000 31.384418488 35.384414673 -1.500000000 31.384418488 35.384414673 -0.500000000 31.384418488 35.384414673 0.500000000 31.384418488 35.384414673 1.500000000 31.384418488 35.384414673 2.500000000 31.384418488 35.384414673 3.500000000 31.384418488 35.384414673 4.500000000 31.384418488 35.384414673 5.500000000 31.384418488 35.384414673 6.500000000 31.384418488 35.384414673 7.500000000 31.384418488 35.384414673 8.500000000 31.384418488 35.384414673 9.500000000 31.384418488 35.384414673 10.500000000 31.384418488 35.384414673 11.500000000 31.384418488 35.384414673 12.500000000 31.384418488 35.384414673 13.500000000 31.384418488 35.384414673 14.500000000 31.384418488 35.384414673 15.500000000 31.384418488 35.384414673 16.500000000 31.384418488 35.384414673 17.500000000 31.384418488 35.384414673 18.500000000 31.384418488 35.384414673 19.500000000 31.384418488 35.384414673 20.500000000 31.384418488 35.384414673 21.500000000 31.384418488 35.384414673 22.500000000 31.384418488 35.384414673 23.500000000 31.384418488 35.384410858 24.499996185 31.384418488 35.384407043 25.499948502 31.384403229 35.384304047 26.499618530 31.384296417 35.383716583 27.498050690 31.383813858 35.381023407 28.492321014 31.382312775 35.371486664 29.476007462 31.379222870 35.344280243 30.438953400 31.375360489 35.280395508 31.371026993 31.371026993 35.154670715 32.254272461 31.334243774 34.954723358 32.840927124 31.035345078 34.797355652 -33.030693054 32.030693054 34.846317291 -32.334243774 32.254272461 34.954723358 -31.405673981 32.216838837 35.112342834 -30.460596085 32.220714569 35.185966492 -29.486719131 32.227870941 35.217510223 -28.496377945 32.231601715 35.228950500 -27.499225616 32.232860565 35.232303619 -26.499872208 32.233165741 35.233070374 -25.499986649 32.233215332 35.233203888 -24.499998093 32.233222961 35.233219147 -23.500000000 32.233222961 35.233219147 -22.500000000 32.233222961 35.233219147 -21.500000000 32.233222961 35.233219147 -20.500000000 32.233222961 35.233219147 -19.500000000 32.233222961 35.233219147 -18.500000000 32.233222961 35.233219147 -17.500000000 32.233222961 35.233219147 -16.500000000 32.233222961 35.233219147 -15.500000000 32.233222961 35.233219147 -14.500000000 32.233222961 35.233219147 -13.500000000 32.233222961 35.233219147 -12.500000000 32.233222961 35.233219147 -11.500000000 32.233222961 35.233219147 -10.500000000 32.233222961 35.233219147 -9.500000000 32.233222961 35.233219147 -8.500000000 32.233222961 35.233219147 -7.500000000 32.233222961 35.233219147 -6.500000000 32.233222961 35.233219147 -5.500000000 32.233222961 35.233219147 -4.500000000 32.233222961 35.233219147 -3.500000000 32.233222961 35.233219147 -2.500000000 32.233222961 35.233219147 -1.500000000 32.233222961 35.233219147 -0.500000000 32.233222961 35.233219147 0.500000000 32.233222961 35.233219147 1.500000000 32.233222961 35.233219147 2.500000000 32.233222961 35.233219147 3.500000000 32.233222961 35.233219147 4.500000000 32.233222961 35.233219147 5.500000000 32.233222961 35.233219147 6.500000000 32.233222961 35.233219147 7.500000000 32.233222961 35.233219147 8.500000000 32.233222961 35.233219147 9.500000000 32.233222961 35.233219147 10.500000000 32.233222961 35.233219147 11.500000000 32.233222961 35.233219147 12.500000000 32.233222961 35.233219147 13.500000000 32.233222961 35.233219147 14.500000000 32.233222961 35.233219147 15.500000000 32.233222961 35.233219147 16.500000000 32.233222961 35.233219147 17.500000000 32.233222961 35.233219147 18.500000000 32.233222961 35.233219147 19.500000000 32.233222961 35.233219147 20.500000000 32.233222961 35.233219147 21.500000000 32.233222961 35.233219147 22.500000000 32.233222961 35.233219147 23.499998093 32.233222961 35.233219147 24.499986649 32.233219147 35.233203888 25.499874115 32.233161926 35.233070374 26.499225616 32.232856750 35.232299805 27.496377945 32.231597900 35.228950500 28.486719131 32.227874756 35.217510223 29.460596085 32.220714569 35.185962677 30.405673981 32.216838837 35.112342834 31.334243774 32.254276276 34.954723358 32.030693054 32.030693054 34.846313477 -32.035343170 32.840934753 34.797355652 -31.336603165 32.903373718 34.879737854 -30.442840576 32.946811676 34.937095642 -29.481933594 32.968864441 34.965763092 -28.495172501 32.977619171 34.976860046 -27.498952866 32.980335236 34.980201721 -26.499826431 32.980972290 34.980957031 -25.499977112 32.981086731 34.981086731 -24.499998093 32.981101990 34.981098175 -23.500000000 32.981101990 34.981098175 -22.500000000 32.981101990 34.981098175 -21.500000000 32.981101990 34.981098175 -20.500000000 32.981101990 34.981098175 -19.500000000 32.981101990 34.981098175 -18.500000000 32.981101990 34.981098175 -17.500000000 32.981101990 34.981098175 -16.500000000 32.981101990 34.981098175 -15.500000000 32.981101990 34.981098175 -14.500000000 32.981101990 34.981098175 -13.500000000 32.981101990 34.981098175 -12.500000000 32.981101990 34.981098175 -11.500000000 32.981101990 34.981098175 -10.500000000 32.981101990 34.981098175 -9.500000000 32.981101990 34.981098175 -8.500000000 32.981101990 34.981098175 -7.500000000 32.981101990 34.981098175 -6.500000000 32.981101990 34.981098175 -5.500000000 32.981101990 34.981098175 -4.500000000 32.981101990 34.981098175 -3.500000000 32.981101990 34.981098175 -2.500000000 32.981101990 34.981098175 -1.500000000 32.981101990 34.981098175 -0.500000000 32.981101990 34.981098175 0.500000000 32.981101990 34.981098175 1.500000000 32.981101990 34.981098175 2.500000000 32.981101990 34.981098175 3.500000000 32.981101990 34.981098175 4.500000000 32.981101990 34.981098175 5.500000000 32.981101990 34.981098175 6.500000000 32.981101990 34.981098175 7.500000000 32.981101990 34.981098175 8.500000000 32.981101990 34.981098175 9.500000000 32.981101990 34.981098175 10.500000000 32.981101990 34.981098175 11.500000000 32.981101990 34.981098175 12.500000000 32.981101990 34.981098175 13.500000000 32.981101990 34.981098175 14.500000000 32.981101990 34.981098175 15.500000000 32.981101990 34.981098175 16.500000000 32.981101990 34.981098175 17.500000000 32.981101990 34.981098175 18.500000000 32.981101990 34.981098175 19.500000000 32.981101990 34.981098175 20.500000000 32.981101990 34.981098175 21.500000000 32.981101990 34.981098175 22.500000000 32.981101990 34.981098175 23.499998093 32.981101990 34.981098175 24.499977112 32.981086731 34.981086731 25.499826431 32.980972290 34.980957031 26.498952866 32.980327606 34.980201721 27.495172501 32.977611542 34.976860046 28.481937408 32.968864441 34.965763092 29.442840576 32.946811676 34.937091827 30.336603165 32.903381348 34.879737854 31.035345078 32.840930939 34.797355652 POLYGONS 62064 248256 3 0 1 66 3 67 66 1 3 0 4818 1 3 4819 1 4818 3 0 66 4888 3 0 4888 4818 3 1 2 67 3 68 67 2 3 1 4819 4820 3 1 4820 2 3 2 3 69 3 2 69 68 3 2 4820 3 3 4821 3 4820 3 3 4 69 3 70 69 4 3 3 4821 4 3 4822 4 4821 3 4 5 71 3 4 71 70 3 4 4822 4823 3 4 4823 5 3 5 6 71 3 72 71 6 3 5 4823 4824 3 5 4824 6 3 6 7 72 3 73 72 7 3 6 4824 4825 3 6 4825 7 3 7 8 74 3 7 74 73 3 7 4825 8 3 4826 8 4825 3 8 9 74 3 75 74 9 3 8 4826 9 3 4827 9 4826 3 9 10 75 3 76 75 10 3 9 4827 10 3 4828 10 4827 3 10 11 77 3 10 77 76 3 10 4828 11 3 4829 11 4828 3 11 12 78 3 11 78 77 3 11 4829 4830 3 11 4830 12 3 12 13 78 3 79 78 13 3 12 4830 4831 3 12 4831 13 3 13 14 79 3 80 79 14 3 13 4831 4832 3 13 4832 14 3 14 15 80 3 81 80 15 3 14 4832 4833 3 14 4833 15 3 15 16 82 3 15 82 81 3 15 4833 4834 3 15 4834 16 3 16 17 83 3 16 83 82 3 16 4834 17 3 4835 17 4834 3 17 18 83 3 84 83 18 3 17 4835 18 3 4836 18 4835 3 18 19 84 3 85 84 19 3 18 4836 19 3 4837 19 4836 3 19 20 85 3 86 85 20 3 19 4837 20 3 4838 20 4837 3 20 21 87 3 20 87 86 3 20 4838 21 3 4839 21 4838 3 21 22 88 3 21 88 87 3 21 4839 22 3 4840 22 4839 3 22 23 89 3 22 89 88 3 22 4840 4841 3 22 4841 23 3 23 24 89 3 90 89 24 3 23 4841 4842 3 23 4842 24 3 24 25 90 3 91 90 25 3 24 4842 4843 3 24 4843 25 3 25 26 91 3 92 91 26 3 25 4843 4844 3 25 4844 26 3 26 27 93 3 26 93 92 3 26 4844 4845 3 26 4845 27 3 27 28 94 3 27 94 93 3 27 4845 4846 3 27 4846 28 3 28 29 95 3 28 95 94 3 28 4846 29 3 4847 29 4846 3 29 30 95 3 96 95 30 3 29 4847 30 3 4848 30 4847 3 30 31 96 3 97 96 31 3 30 4848 31 3 4849 31 4848 3 31 32 97 3 98 97 32 3 31 4849 32 3 4850 32 4849 3 32 33 99 3 32 99 98 3 32 4850 33 3 4851 33 4850 3 33 34 100 3 33 100 99 3 33 4851 34 3 4852 34 4851 3 34 35 101 3 34 101 100 3 34 4852 35 3 4853 35 4852 3 35 36 102 3 35 102 101 3 35 4853 36 3 4854 36 4853 3 36 37 102 3 103 102 37 3 36 4854 4855 3 36 4855 37 3 37 38 103 3 104 103 38 3 37 4855 4856 3 37 4856 38 3 38 39 104 3 105 104 39 3 38 4856 4857 3 38 4857 39 3 39 40 105 3 106 105 40 3 39 4857 4858 3 39 4858 40 3 40 41 107 3 40 107 106 3 40 4858 4859 3 40 4859 41 3 41 42 108 3 41 108 107 3 41 4859 4860 3 41 4860 42 3 42 43 109 3 42 109 108 3 42 4860 4861 3 42 4861 43 3 43 44 110 3 43 110 109 3 43 4861 4862 3 43 4862 44 3 44 45 110 3 111 110 45 3 44 4862 45 3 4863 45 4862 3 45 46 111 3 112 111 46 3 45 4863 46 3 4864 46 4863 3 46 47 112 3 113 112 47 3 46 4864 47 3 4865 47 4864 3 47 48 113 3 114 113 48 3 47 4865 48 3 4866 48 4865 3 48 49 115 3 48 115 114 3 48 4866 49 3 4867 49 4866 3 49 50 116 3 49 116 115 3 49 4867 50 3 4868 50 4867 3 50 51 117 3 50 117 116 3 50 4868 51 3 4869 51 4868 3 51 52 118 3 51 118 117 3 51 4869 52 3 4870 52 4869 3 52 53 118 3 119 118 53 3 52 4870 53 3 4871 53 4870 3 53 54 119 3 120 119 54 3 53 4871 4872 3 53 4872 54 3 54 55 120 3 121 120 55 3 54 4872 4873 3 54 4873 55 3 55 56 121 3 122 121 56 3 55 4873 4874 3 55 4874 56 3 56 57 122 3 123 122 57 3 56 4874 4875 3 56 4875 57 3 57 58 124 3 57 124 123 3 57 4875 4876 3 57 4876 58 3 58 59 125 3 58 125 124 3 58 4876 4877 3 58 4877 59 3 59 60 126 3 59 126 125 3 59 4877 4878 3 59 4878 60 3 60 61 127 3 60 127 126 3 60 4878 4879 3 60 4879 61 3 61 62 128 3 61 128 127 3 61 4879 4880 3 61 4880 62 3 62 63 128 3 129 128 63 3 62 4880 63 3 4881 63 4880 3 63 64 129 3 130 129 64 3 63 4881 64 3 4882 64 4881 3 64 4882 130 3 4889 130 4882 3 65 66 133 3 134 133 66 3 65 4887 66 3 4888 66 4887 3 65 133 4895 3 65 4895 4887 3 66 67 135 3 66 135 134 3 67 68 136 3 67 136 135 3 68 69 137 3 68 137 136 3 69 70 138 3 69 138 137 3 70 71 139 3 70 139 138 3 71 72 140 3 71 140 139 3 72 73 140 3 141 140 73 3 73 74 141 3 142 141 74 3 74 75 142 3 143 142 75 3 75 76 143 3 144 143 76 3 76 77 144 3 145 144 77 3 77 78 146 3 77 146 145 3 78 79 147 3 78 147 146 3 79 80 148 3 79 148 147 3 80 81 149 3 80 149 148 3 81 82 150 3 81 150 149 3 82 83 150 3 151 150 83 3 83 84 151 3 152 151 84 3 84 85 152 3 153 152 85 3 85 86 153 3 154 153 86 3 86 87 154 3 155 154 87 3 87 88 155 3 156 155 88 3 88 89 157 3 88 157 156 3 89 90 158 3 89 158 157 3 90 91 159 3 90 159 158 3 91 92 160 3 91 160 159 3 92 93 161 3 92 161 160 3 93 94 162 3 93 162 161 3 94 95 162 3 163 162 95 3 95 96 163 3 164 163 96 3 96 97 164 3 165 164 97 3 97 98 165 3 166 165 98 3 98 99 166 3 167 166 99 3 99 100 167 3 168 167 100 3 100 101 169 3 100 169 168 3 101 102 170 3 101 170 169 3 102 103 171 3 102 171 170 3 103 104 172 3 103 172 171 3 104 105 173 3 104 173 172 3 105 106 174 3 105 174 173 3 106 107 174 3 175 174 107 3 107 108 175 3 176 175 108 3 108 109 176 3 177 176 109 3 109 110 177 3 178 177 110 3 110 111 178 3 179 178 111 3 111 112 179 3 180 179 112 3 112 113 180 3 181 180 113 3 113 114 182 3 113 182 181 3 114 115 183 3 114 183 182 3 115 116 184 3 115 184 183 3 116 117 185 3 116 185 184 3 117 118 186 3 117 186 185 3 118 119 187 3 118 187 186 3 119 120 187 3 188 187 120 3 120 121 188 3 189 188 121 3 121 122 189 3 190 189 122 3 122 123 190 3 191 190 123 3 123 124 191 3 192 191 124 3 124 125 192 3 193 192 125 3 125 126 193 3 194 193 126 3 126 127 195 3 126 195 194 3 127 128 196 3 127 196 195 3 128 129 197 3 128 197 196 3 129 130 198 3 129 198 197 3 130 131 199 3 130 199 198 3 130 4889 4890 3 130 4890 131 3 131 4890 4896 3 131 4896 199 3 132 133 202 3 132 202 201 3 132 4894 4895 3 132 4895 133 3 132 201 4894 3 4900 4894 201 3 133 134 202 3 203 202 134 3 134 135 203 3 204 203 135 3 135 136 204 3 205 204 136 3 136 137 205 3 206 205 137 3 137 138 206 3 207 206 138 3 138 139 207 3 208 207 139 3 139 140 208 3 209 208 140 3 140 141 210 3 140 210 209 3 141 142 211 3 141 211 210 3 142 143 212 3 142 212 211 3 143 144 213 3 143 213 212 3 144 145 214 3 144 214 213 3 145 146 215 3 145 215 214 3 146 147 216 3 146 216 215 3 147 148 217 3 147 217 216 3 148 149 217 3 218 217 149 3 149 150 218 3 219 218 150 3 150 151 219 3 220 219 151 3 151 152 220 3 221 220 152 3 152 153 221 3 222 221 153 3 153 154 222 3 223 222 154 3 154 155 223 3 224 223 155 3 155 156 225 3 155 225 224 3 156 157 226 3 156 226 225 3 157 158 227 3 157 227 226 3 158 159 228 3 158 228 227 3 159 160 229 3 159 229 228 3 160 161 230 3 160 230 229 3 161 162 231 3 161 231 230 3 162 163 232 3 162 232 231 3 163 164 232 3 233 232 164 3 164 165 233 3 234 233 165 3 165 166 234 3 235 234 166 3 166 167 235 3 236 235 167 3 167 168 236 3 237 236 168 3 168 169 237 3 238 237 169 3 169 170 238 3 239 238 170 3 170 171 239 3 240 239 171 3 171 172 241 3 171 241 240 3 172 173 242 3 172 242 241 3 173 174 243 3 173 243 242 3 174 175 244 3 174 244 243 3 175 176 245 3 175 245 244 3 176 177 246 3 176 246 245 3 177 178 247 3 177 247 246 3 178 179 248 3 178 248 247 3 179 180 248 3 249 248 180 3 180 181 249 3 250 249 181 3 181 182 250 3 251 250 182 3 182 183 251 3 252 251 183 3 183 184 252 3 253 252 184 3 184 185 253 3 254 253 185 3 185 186 254 3 255 254 186 3 186 187 255 3 256 255 187 3 187 188 257 3 187 257 256 3 188 189 258 3 188 258 257 3 189 190 259 3 189 259 258 3 190 191 260 3 190 260 259 3 191 192 261 3 191 261 260 3 192 193 262 3 192 262 261 3 193 194 263 3 193 263 262 3 194 195 264 3 194 264 263 3 195 196 265 3 195 265 264 3 196 197 265 3 266 265 197 3 197 198 266 3 267 266 198 3 198 199 267 3 268 267 199 3 199 200 268 3 269 268 200 3 199 4896 200 3 4897 200 4896 3 200 4897 269 3 4901 269 4897 3 201 202 270 3 271 270 202 3 201 270 4900 3 4904 4900 270 3 202 203 271 3 272 271 203 3 203 204 272 3 273 272 204 3 204 205 273 3 274 273 205 3 205 206 275 3 205 275 274 3 206 207 276 3 206 276 275 3 207 208 277 3 207 277 276 3 208 209 278 3 208 278 277 3 209 210 279 3 209 279 278 3 210 211 280 3 210 280 279 3 211 212 281 3 211 281 280 3 212 213 282 3 212 282 281 3 213 214 283 3 213 283 282 3 214 215 283 3 284 283 215 3 215 216 284 3 285 284 216 3 216 217 285 3 286 285 217 3 217 218 286 3 287 286 218 3 218 219 287 3 288 287 219 3 219 220 288 3 289 288 220 3 220 221 289 3 290 289 221 3 221 222 290 3 291 290 222 3 222 223 291 3 292 291 223 3 223 224 293 3 223 293 292 3 224 225 294 3 224 294 293 3 225 226 295 3 225 295 294 3 226 227 296 3 226 296 295 3 227 228 297 3 227 297 296 3 228 229 298 3 228 298 297 3 229 230 299 3 229 299 298 3 230 231 300 3 230 300 299 3 231 232 301 3 231 301 300 3 232 233 301 3 302 301 233 3 233 234 302 3 303 302 234 3 234 235 303 3 304 303 235 3 235 236 304 3 305 304 236 3 236 237 305 3 306 305 237 3 237 238 306 3 307 306 238 3 238 239 307 3 308 307 239 3 239 240 308 3 309 308 240 3 240 241 309 3 310 309 241 3 241 242 311 3 241 311 310 3 242 243 312 3 242 312 311 3 243 244 313 3 243 313 312 3 244 245 314 3 244 314 313 3 245 246 315 3 245 315 314 3 246 247 316 3 246 316 315 3 247 248 317 3 247 317 316 3 248 249 318 3 248 318 317 3 249 250 319 3 249 319 318 3 250 251 320 3 250 320 319 3 251 252 320 3 321 320 252 3 252 253 321 3 322 321 253 3 253 254 322 3 323 322 254 3 254 255 323 3 324 323 255 3 255 256 324 3 325 324 256 3 256 257 325 3 326 325 257 3 257 258 326 3 327 326 258 3 258 259 327 3 328 327 259 3 259 260 328 3 329 328 260 3 260 261 329 3 330 329 261 3 261 262 331 3 261 331 330 3 262 263 332 3 262 332 331 3 263 264 333 3 263 333 332 3 264 265 334 3 264 334 333 3 265 266 335 3 265 335 334 3 266 267 336 3 266 336 335 3 267 268 337 3 267 337 336 3 268 269 338 3 268 338 337 3 269 4901 338 3 4905 338 4901 3 270 271 340 3 270 340 339 3 270 339 4908 3 270 4908 4904 3 271 272 340 3 341 340 272 3 272 273 341 3 342 341 273 3 273 274 342 3 343 342 274 3 274 275 343 3 344 343 275 3 275 276 344 3 345 344 276 3 276 277 345 3 346 345 277 3 277 278 346 3 347 346 278 3 278 279 347 3 348 347 279 3 279 280 348 3 349 348 280 3 280 281 349 3 350 349 281 3 281 282 351 3 281 351 350 3 282 283 352 3 282 352 351 3 283 284 353 3 283 353 352 3 284 285 354 3 284 354 353 3 285 286 355 3 285 355 354 3 286 287 356 3 286 356 355 3 287 288 357 3 287 357 356 3 288 289 358 3 288 358 357 3 289 290 359 3 289 359 358 3 290 291 360 3 290 360 359 3 291 292 361 3 291 361 360 3 292 293 361 3 362 361 293 3 293 294 362 3 363 362 294 3 294 295 363 3 364 363 295 3 295 296 364 3 365 364 296 3 296 297 365 3 366 365 297 3 297 298 366 3 367 366 298 3 298 299 367 3 368 367 299 3 299 300 368 3 369 368 300 3 300 301 369 3 370 369 301 3 301 302 370 3 371 370 302 3 302 303 372 3 302 372 371 3 303 304 373 3 303 373 372 3 304 305 374 3 304 374 373 3 305 306 375 3 305 375 374 3 306 307 376 3 306 376 375 3 307 308 377 3 307 377 376 3 308 309 378 3 308 378 377 3 309 310 379 3 309 379 378 3 310 311 380 3 310 380 379 3 311 312 381 3 311 381 380 3 312 313 382 3 312 382 381 3 313 314 382 3 383 382 314 3 314 315 383 3 384 383 315 3 315 316 384 3 385 384 316 3 316 317 385 3 386 385 317 3 317 318 386 3 387 386 318 3 318 319 387 3 388 387 319 3 319 320 388 3 389 388 320 3 320 321 389 3 390 389 321 3 321 322 390 3 391 390 322 3 322 323 391 3 392 391 323 3 323 324 392 3 393 392 324 3 324 325 394 3 324 394 393 3 325 326 395 3 325 395 394 3 326 327 396 3 326 396 395 3 327 328 397 3 327 397 396 3 328 329 398 3 328 398 397 3 329 330 399 3 329 399 398 3 330 331 400 3 330 400 399 3 331 332 401 3 331 401 400 3 332 333 402 3 332 402 401 3 333 334 403 3 333 403 402 3 334 335 404 3 334 404 403 3 335 336 404 3 405 404 336 3 336 337 405 3 406 405 337 3 337 338 406 3 407 406 338 3 338 4905 407 3 4909 407 4905 3 339 340 408 3 409 408 340 3 339 408 4912 3 339 4912 4908 3 340 341 409 3 410 409 341 3 341 342 410 3 411 410 342 3 342 343 411 3 412 411 343 3 343 344 412 3 413 412 344 3 344 345 413 3 414 413 345 3 345 346 414 3 415 414 346 3 346 347 415 3 416 415 347 3 347 348 417 3 347 417 416 3 348 349 418 3 348 418 417 3 349 350 419 3 349 419 418 3 350 351 420 3 350 420 419 3 351 352 421 3 351 421 420 3 352 353 422 3 352 422 421 3 353 354 423 3 353 423 422 3 354 355 424 3 354 424 423 3 355 356 425 3 355 425 424 3 356 357 426 3 356 426 425 3 357 358 427 3 357 427 426 3 358 359 427 3 428 427 359 3 359 360 428 3 429 428 360 3 360 361 429 3 430 429 361 3 361 362 430 3 431 430 362 3 362 363 431 3 432 431 363 3 363 364 432 3 433 432 364 3 364 365 433 3 434 433 365 3 365 366 434 3 435 434 366 3 366 367 435 3 436 435 367 3 367 368 436 3 437 436 368 3 368 369 437 3 438 437 369 3 369 370 438 3 439 438 370 3 370 371 440 3 370 440 439 3 371 372 441 3 371 441 440 3 372 373 442 3 372 442 441 3 373 374 443 3 373 443 442 3 374 375 444 3 374 444 443 3 375 376 445 3 375 445 444 3 376 377 446 3 376 446 445 3 377 378 447 3 377 447 446 3 378 379 448 3 378 448 447 3 379 380 449 3 379 449 448 3 380 381 450 3 380 450 449 3 381 382 451 3 381 451 450 3 382 383 451 3 452 451 383 3 383 384 452 3 453 452 384 3 384 385 453 3 454 453 385 3 385 386 454 3 455 454 386 3 386 387 455 3 456 455 387 3 387 388 456 3 457 456 388 3 388 389 457 3 458 457 389 3 389 390 458 3 459 458 390 3 390 391 459 3 460 459 391 3 391 392 460 3 461 460 392 3 392 393 461 3 462 461 393 3 393 394 462 3 463 462 394 3 394 395 464 3 394 464 463 3 395 396 465 3 395 465 464 3 396 397 466 3 396 466 465 3 397 398 467 3 397 467 466 3 398 399 468 3 398 468 467 3 399 400 469 3 399 469 468 3 400 401 470 3 400 470 469 3 401 402 471 3 401 471 470 3 402 403 472 3 402 472 471 3 403 404 473 3 403 473 472 3 404 405 474 3 404 474 473 3 405 406 475 3 405 475 474 3 406 407 475 3 476 475 407 3 407 4909 4913 3 407 4913 476 3 408 409 477 3 478 477 409 3 408 477 4916 3 408 4916 4912 3 409 410 478 3 479 478 410 3 410 411 479 3 480 479 411 3 411 412 480 3 481 480 412 3 412 413 481 3 482 481 413 3 413 414 482 3 483 482 414 3 414 415 483 3 484 483 415 3 415 416 484 3 485 484 416 3 416 417 485 3 486 485 417 3 417 418 486 3 487 486 418 3 418 419 487 3 488 487 419 3 419 420 489 3 419 489 488 3 420 421 490 3 420 490 489 3 421 422 491 3 421 491 490 3 422 423 492 3 422 492 491 3 423 424 493 3 423 493 492 3 424 425 494 3 424 494 493 3 425 426 495 3 425 495 494 3 426 427 496 3 426 496 495 3 427 428 497 3 427 497 496 3 428 429 498 3 428 498 497 3 429 430 499 3 429 499 498 3 430 431 500 3 430 500 499 3 431 432 500 3 501 500 432 3 432 433 501 3 502 501 433 3 433 434 502 3 503 502 434 3 434 435 503 3 504 503 435 3 435 436 504 3 505 504 436 3 436 437 505 3 506 505 437 3 437 438 506 3 507 506 438 3 438 439 507 3 508 507 439 3 439 440 508 3 509 508 440 3 440 441 509 3 510 509 441 3 441 442 510 3 511 510 442 3 442 443 511 3 512 511 443 3 443 444 512 3 513 512 444 3 444 445 514 3 444 514 513 3 445 446 515 3 445 515 514 3 446 447 516 3 446 516 515 3 447 448 517 3 447 517 516 3 448 449 518 3 448 518 517 3 449 450 519 3 449 519 518 3 450 451 520 3 450 520 519 3 451 452 521 3 451 521 520 3 452 453 522 3 452 522 521 3 453 454 523 3 453 523 522 3 454 455 524 3 454 524 523 3 455 456 525 3 455 525 524 3 456 457 526 3 456 526 525 3 457 458 526 3 527 526 458 3 458 459 527 3 528 527 459 3 459 460 528 3 529 528 460 3 460 461 529 3 530 529 461 3 461 462 530 3 531 530 462 3 462 463 531 3 532 531 463 3 463 464 532 3 533 532 464 3 464 465 533 3 534 533 465 3 465 466 534 3 535 534 466 3 466 467 535 3 536 535 467 3 467 468 536 3 537 536 468 3 468 469 537 3 538 537 469 3 469 470 538 3 539 538 470 3 470 471 539 3 540 539 471 3 471 472 541 3 471 541 540 3 472 473 542 3 472 542 541 3 473 474 543 3 473 543 542 3 474 475 544 3 474 544 543 3 475 476 545 3 475 545 544 3 476 4913 4917 3 476 4917 545 3 477 478 547 3 477 547 546 3 477 546 4916 3 4920 4916 546 3 478 479 548 3 478 548 547 3 479 480 549 3 479 549 548 3 480 481 550 3 480 550 549 3 481 482 551 3 481 551 550 3 482 483 552 3 482 552 551 3 483 484 553 3 483 553 552 3 484 485 553 3 554 553 485 3 485 486 554 3 555 554 486 3 486 487 555 3 556 555 487 3 487 488 556 3 557 556 488 3 488 489 557 3 558 557 489 3 489 490 558 3 559 558 490 3 490 491 559 3 560 559 491 3 491 492 560 3 561 560 492 3 492 493 561 3 562 561 493 3 493 494 562 3 563 562 494 3 494 495 563 3 564 563 495 3 495 496 564 3 565 564 496 3 496 497 565 3 566 565 497 3 497 498 566 3 567 566 498 3 498 499 568 3 498 568 567 3 499 500 569 3 499 569 568 3 500 501 570 3 500 570 569 3 501 502 571 3 501 571 570 3 502 503 572 3 502 572 571 3 503 504 573 3 503 573 572 3 504 505 574 3 504 574 573 3 505 506 575 3 505 575 574 3 506 507 576 3 506 576 575 3 507 508 577 3 507 577 576 3 508 509 578 3 508 578 577 3 509 510 579 3 509 579 578 3 510 511 580 3 510 580 579 3 511 512 581 3 511 581 580 3 512 513 581 3 582 581 513 3 513 514 582 3 583 582 514 3 514 515 583 3 584 583 515 3 515 516 584 3 585 584 516 3 516 517 585 3 586 585 517 3 517 518 586 3 587 586 518 3 518 519 587 3 588 587 519 3 519 520 588 3 589 588 520 3 520 521 589 3 590 589 521 3 521 522 590 3 591 590 522 3 522 523 591 3 592 591 523 3 523 524 592 3 593 592 524 3 524 525 593 3 594 593 525 3 525 526 594 3 595 594 526 3 526 527 596 3 526 596 595 3 527 528 597 3 527 597 596 3 528 529 598 3 528 598 597 3 529 530 599 3 529 599 598 3 530 531 600 3 530 600 599 3 531 532 601 3 531 601 600 3 532 533 602 3 532 602 601 3 533 534 603 3 533 603 602 3 534 535 604 3 534 604 603 3 535 536 605 3 535 605 604 3 536 537 606 3 536 606 605 3 537 538 607 3 537 607 606 3 538 539 608 3 538 608 607 3 539 540 609 3 539 609 608 3 540 541 609 3 610 609 541 3 541 542 610 3 611 610 542 3 542 543 611 3 612 611 543 3 543 544 612 3 613 612 544 3 544 545 613 3 614 613 545 3 545 4917 4921 3 545 4921 614 3 546 547 615 3 616 615 547 3 546 615 4924 3 546 4924 4920 3 547 548 616 3 617 616 548 3 548 549 617 3 618 617 549 3 549 550 618 3 619 618 550 3 550 551 619 3 620 619 551 3 551 552 620 3 621 620 552 3 552 553 621 3 622 621 553 3 553 554 622 3 623 622 554 3 554 555 624 3 554 624 623 3 555 556 625 3 555 625 624 3 556 557 626 3 556 626 625 3 557 558 627 3 557 627 626 3 558 559 628 3 558 628 627 3 559 560 629 3 559 629 628 3 560 561 630 3 560 630 629 3 561 562 631 3 561 631 630 3 562 563 632 3 562 632 631 3 563 564 633 3 563 633 632 3 564 565 634 3 564 634 633 3 565 566 635 3 565 635 634 3 566 567 636 3 566 636 635 3 567 568 637 3 567 637 636 3 568 569 638 3 568 638 637 3 569 570 638 3 639 638 570 3 570 571 639 3 640 639 571 3 571 572 640 3 641 640 572 3 572 573 641 3 642 641 573 3 573 574 642 3 643 642 574 3 574 575 643 3 644 643 575 3 575 576 644 3 645 644 576 3 576 577 645 3 646 645 577 3 577 578 646 3 647 646 578 3 578 579 647 3 648 647 579 3 579 580 648 3 649 648 580 3 580 581 649 3 650 649 581 3 581 582 650 3 651 650 582 3 582 583 651 3 652 651 583 3 583 584 652 3 653 652 584 3 584 585 654 3 584 654 653 3 585 586 655 3 585 655 654 3 586 587 656 3 586 656 655 3 587 588 657 3 587 657 656 3 588 589 658 3 588 658 657 3 589 590 659 3 589 659 658 3 590 591 660 3 590 660 659 3 591 592 661 3 591 661 660 3 592 593 662 3 592 662 661 3 593 594 663 3 593 663 662 3 594 595 664 3 594 664 663 3 595 596 665 3 595 665 664 3 596 597 666 3 596 666 665 3 597 598 667 3 597 667 666 3 598 599 668 3 598 668 667 3 599 600 668 3 669 668 600 3 600 601 669 3 670 669 601 3 601 602 670 3 671 670 602 3 602 603 671 3 672 671 603 3 603 604 672 3 673 672 604 3 604 605 673 3 674 673 605 3 605 606 674 3 675 674 606 3 606 607 675 3 676 675 607 3 607 608 676 3 677 676 608 3 608 609 677 3 678 677 609 3 609 610 678 3 679 678 610 3 610 611 679 3 680 679 611 3 611 612 680 3 681 680 612 3 612 613 681 3 682 681 613 3 613 614 682 3 683 682 614 3 614 4921 4925 3 614 4925 683 3 615 616 685 3 615 685 684 3 615 684 4928 3 615 4928 4924 3 616 617 686 3 616 686 685 3 617 618 687 3 617 687 686 3 618 619 688 3 618 688 687 3 619 620 689 3 619 689 688 3 620 621 690 3 620 690 689 3 621 622 691 3 621 691 690 3 622 623 692 3 622 692 691 3 623 624 693 3 623 693 692 3 624 625 694 3 624 694 693 3 625 626 695 3 625 695 694 3 626 627 696 3 626 696 695 3 627 628 697 3 627 697 696 3 628 629 698 3 628 698 697 3 629 630 698 3 699 698 630 3 630 631 699 3 700 699 631 3 631 632 700 3 701 700 632 3 632 633 701 3 702 701 633 3 633 634 702 3 703 702 634 3 634 635 703 3 704 703 635 3 635 636 704 3 705 704 636 3 636 637 705 3 706 705 637 3 637 638 706 3 707 706 638 3 638 639 707 3 708 707 639 3 639 640 708 3 709 708 640 3 640 641 709 3 710 709 641 3 641 642 710 3 711 710 642 3 642 643 711 3 712 711 643 3 643 644 712 3 713 712 644 3 644 645 713 3 714 713 645 3 645 646 715 3 645 715 714 3 646 647 716 3 646 716 715 3 647 648 717 3 647 717 716 3 648 649 718 3 648 718 717 3 649 650 719 3 649 719 718 3 650 651 720 3 650 720 719 3 651 652 721 3 651 721 720 3 652 653 722 3 652 722 721 3 653 654 723 3 653 723 722 3 654 655 724 3 654 724 723 3 655 656 725 3 655 725 724 3 656 657 726 3 656 726 725 3 657 658 727 3 657 727 726 3 658 659 728 3 658 728 727 3 659 660 729 3 659 729 728 3 660 661 729 3 730 729 661 3 661 662 730 3 731 730 662 3 662 663 731 3 732 731 663 3 663 664 732 3 733 732 664 3 664 665 733 3 734 733 665 3 665 666 734 3 735 734 666 3 666 667 735 3 736 735 667 3 667 668 736 3 737 736 668 3 668 669 737 3 738 737 669 3 669 670 738 3 739 738 670 3 670 671 739 3 740 739 671 3 671 672 740 3 741 740 672 3 672 673 741 3 742 741 673 3 673 674 742 3 743 742 674 3 674 675 743 3 744 743 675 3 675 676 744 3 745 744 676 3 676 677 746 3 676 746 745 3 677 678 747 3 677 747 746 3 678 679 748 3 678 748 747 3 679 680 749 3 679 749 748 3 680 681 750 3 680 750 749 3 681 682 751 3 681 751 750 3 682 683 752 3 682 752 751 3 683 4925 4929 3 683 4929 752 3 684 685 754 3 684 754 753 3 684 753 4932 3 684 4932 4928 3 685 686 755 3 685 755 754 3 686 687 756 3 686 756 755 3 687 688 757 3 687 757 756 3 688 689 758 3 688 758 757 3 689 690 759 3 689 759 758 3 690 691 760 3 690 760 759 3 691 692 761 3 691 761 760 3 692 693 761 3 762 761 693 3 693 694 762 3 763 762 694 3 694 695 763 3 764 763 695 3 695 696 764 3 765 764 696 3 696 697 765 3 766 765 697 3 697 698 766 3 767 766 698 3 698 699 767 3 768 767 699 3 699 700 768 3 769 768 700 3 700 701 769 3 770 769 701 3 701 702 770 3 771 770 702 3 702 703 771 3 772 771 703 3 703 704 772 3 773 772 704 3 704 705 773 3 774 773 705 3 705 706 774 3 775 774 706 3 706 707 775 3 776 775 707 3 707 708 776 3 777 776 708 3 708 709 777 3 778 777 709 3 709 710 779 3 709 779 778 3 710 711 780 3 710 780 779 3 711 712 781 3 711 781 780 3 712 713 782 3 712 782 781 3 713 714 783 3 713 783 782 3 714 715 784 3 714 784 783 3 715 716 785 3 715 785 784 3 716 717 786 3 716 786 785 3 717 718 787 3 717 787 786 3 718 719 788 3 718 788 787 3 719 720 789 3 719 789 788 3 720 721 790 3 720 790 789 3 721 722 791 3 721 791 790 3 722 723 792 3 722 792 791 3 723 724 793 3 723 793 792 3 724 725 794 3 724 794 793 3 725 726 794 3 795 794 726 3 726 727 795 3 796 795 727 3 727 728 796 3 797 796 728 3 728 729 797 3 798 797 729 3 729 730 798 3 799 798 730 3 730 731 799 3 800 799 731 3 731 732 800 3 801 800 732 3 732 733 801 3 802 801 733 3 733 734 802 3 803 802 734 3 734 735 803 3 804 803 735 3 735 736 804 3 805 804 736 3 736 737 805 3 806 805 737 3 737 738 806 3 807 806 738 3 738 739 807 3 808 807 739 3 739 740 808 3 809 808 740 3 740 741 809 3 810 809 741 3 741 742 810 3 811 810 742 3 742 743 812 3 742 812 811 3 743 744 813 3 743 813 812 3 744 745 814 3 744 814 813 3 745 746 815 3 745 815 814 3 746 747 816 3 746 816 815 3 747 748 817 3 747 817 816 3 748 749 818 3 748 818 817 3 749 750 819 3 749 819 818 3 750 751 820 3 750 820 819 3 751 752 821 3 751 821 820 3 752 4929 4933 3 752 4933 821 3 753 754 823 3 753 823 822 3 753 822 4936 3 753 4936 4932 3 754 755 824 3 754 824 823 3 755 756 825 3 755 825 824 3 756 757 826 3 756 826 825 3 757 758 827 3 757 827 826 3 758 759 828 3 758 828 827 3 759 760 828 3 829 828 760 3 760 761 829 3 830 829 761 3 761 762 830 3 831 830 762 3 762 763 831 3 832 831 763 3 763 764 832 3 833 832 764 3 764 765 833 3 834 833 765 3 765 766 834 3 835 834 766 3 766 767 835 3 836 835 767 3 767 768 836 3 837 836 768 3 768 769 837 3 838 837 769 3 769 770 838 3 839 838 770 3 770 771 839 3 840 839 771 3 771 772 840 3 841 840 772 3 772 773 841 3 842 841 773 3 773 774 842 3 843 842 774 3 774 775 843 3 844 843 775 3 775 776 844 3 845 844 776 3 776 777 846 3 776 846 845 3 777 778 847 3 777 847 846 3 778 779 848 3 778 848 847 3 779 780 849 3 779 849 848 3 780 781 850 3 780 850 849 3 781 782 851 3 781 851 850 3 782 783 852 3 782 852 851 3 783 784 853 3 783 853 852 3 784 785 854 3 784 854 853 3 785 786 855 3 785 855 854 3 786 787 856 3 786 856 855 3 787 788 857 3 787 857 856 3 788 789 858 3 788 858 857 3 789 790 859 3 789 859 858 3 790 791 860 3 790 860 859 3 791 792 861 3 791 861 860 3 792 793 862 3 792 862 861 3 793 794 862 3 863 862 794 3 794 795 863 3 864 863 795 3 795 796 864 3 865 864 796 3 796 797 865 3 866 865 797 3 797 798 866 3 867 866 798 3 798 799 867 3 868 867 799 3 799 800 868 3 869 868 800 3 800 801 869 3 870 869 801 3 801 802 870 3 871 870 802 3 802 803 871 3 872 871 803 3 803 804 872 3 873 872 804 3 804 805 873 3 874 873 805 3 805 806 874 3 875 874 806 3 806 807 875 3 876 875 807 3 807 808 876 3 877 876 808 3 808 809 877 3 878 877 809 3 809 810 878 3 879 878 810 3 810 811 880 3 810 880 879 3 811 812 881 3 811 881 880 3 812 813 882 3 812 882 881 3 813 814 883 3 813 883 882 3 814 815 884 3 814 884 883 3 815 816 885 3 815 885 884 3 816 817 886 3 816 886 885 3 817 818 887 3 817 887 886 3 818 819 888 3 818 888 887 3 819 820 889 3 819 889 888 3 820 821 890 3 820 890 889 3 821 4933 4937 3 821 4937 890 3 822 823 892 3 822 892 891 3 822 891 4940 3 822 4940 4936 3 823 824 893 3 823 893 892 3 824 825 894 3 824 894 893 3 825 826 895 3 825 895 894 3 826 827 896 3 826 896 895 3 827 828 897 3 827 897 896 3 828 829 897 3 898 897 829 3 829 830 898 3 899 898 830 3 830 831 899 3 900 899 831 3 831 832 900 3 901 900 832 3 832 833 901 3 902 901 833 3 833 834 902 3 903 902 834 3 834 835 903 3 904 903 835 3 835 836 904 3 905 904 836 3 836 837 905 3 906 905 837 3 837 838 906 3 907 906 838 3 838 839 907 3 908 907 839 3 839 840 908 3 909 908 840 3 840 841 909 3 910 909 841 3 841 842 910 3 911 910 842 3 842 843 911 3 912 911 843 3 843 844 912 3 913 912 844 3 844 845 913 3 914 913 845 3 845 846 914 3 915 914 846 3 846 847 916 3 846 916 915 3 847 848 917 3 847 917 916 3 848 849 918 3 848 918 917 3 849 850 919 3 849 919 918 3 850 851 920 3 850 920 919 3 851 852 921 3 851 921 920 3 852 853 922 3 852 922 921 3 853 854 923 3 853 923 922 3 854 855 924 3 854 924 923 3 855 856 925 3 855 925 924 3 856 857 926 3 856 926 925 3 857 858 927 3 857 927 926 3 858 859 928 3 858 928 927 3 859 860 929 3 859 929 928 3 860 861 930 3 860 930 929 3 861 862 931 3 861 931 930 3 862 863 932 3 862 932 931 3 863 864 933 3 863 933 932 3 864 865 933 3 934 933 865 3 865 866 934 3 935 934 866 3 866 867 935 3 936 935 867 3 867 868 936 3 937 936 868 3 868 869 937 3 938 937 869 3 869 870 938 3 939 938 870 3 870 871 939 3 940 939 871 3 871 872 940 3 941 940 872 3 872 873 941 3 942 941 873 3 873 874 942 3 943 942 874 3 874 875 943 3 944 943 875 3 875 876 944 3 945 944 876 3 876 877 945 3 946 945 877 3 877 878 946 3 947 946 878 3 878 879 947 3 948 947 879 3 879 880 948 3 949 948 880 3 880 881 949 3 950 949 881 3 881 882 950 3 951 950 882 3 882 883 952 3 882 952 951 3 883 884 953 3 883 953 952 3 884 885 954 3 884 954 953 3 885 886 955 3 885 955 954 3 886 887 956 3 886 956 955 3 887 888 957 3 887 957 956 3 888 889 958 3 888 958 957 3 889 890 959 3 889 959 958 3 890 4937 4941 3 890 4941 959 3 891 892 961 3 891 961 960 3 891 960 4944 3 891 4944 4940 3 892 893 962 3 892 962 961 3 893 894 963 3 893 963 962 3 894 895 964 3 894 964 963 3 895 896 965 3 895 965 964 3 896 897 966 3 896 966 965 3 897 898 967 3 897 967 966 3 898 899 968 3 898 968 967 3 899 900 969 3 899 969 968 3 900 901 969 3 970 969 901 3 901 902 970 3 971 970 902 3 902 903 971 3 972 971 903 3 903 904 972 3 973 972 904 3 904 905 973 3 974 973 905 3 905 906 974 3 975 974 906 3 906 907 975 3 976 975 907 3 907 908 976 3 977 976 908 3 908 909 977 3 978 977 909 3 909 910 978 3 979 978 910 3 910 911 979 3 980 979 911 3 911 912 980 3 981 980 912 3 912 913 981 3 982 981 913 3 913 914 982 3 983 982 914 3 914 915 983 3 984 983 915 3 915 916 984 3 985 984 916 3 916 917 985 3 986 985 917 3 917 918 986 3 987 986 918 3 918 919 987 3 988 987 919 3 919 920 989 3 919 989 988 3 920 921 990 3 920 990 989 3 921 922 991 3 921 991 990 3 922 923 992 3 922 992 991 3 923 924 993 3 923 993 992 3 924 925 994 3 924 994 993 3 925 926 995 3 925 995 994 3 926 927 996 3 926 996 995 3 927 928 997 3 927 997 996 3 928 929 998 3 928 998 997 3 929 930 999 3 929 999 998 3 930 931 1000 3 930 1000 999 3 931 932 1001 3 931 1001 1000 3 932 933 1002 3 932 1002 1001 3 933 934 1003 3 933 1003 1002 3 934 935 1004 3 934 1004 1003 3 935 936 1005 3 935 1005 1004 3 936 937 1006 3 936 1006 1005 3 937 938 1007 3 937 1007 1006 3 938 939 1007 3 1008 1007 939 3 939 940 1008 3 1009 1008 940 3 940 941 1009 3 1010 1009 941 3 941 942 1010 3 1011 1010 942 3 942 943 1011 3 1012 1011 943 3 943 944 1012 3 1013 1012 944 3 944 945 1013 3 1014 1013 945 3 945 946 1014 3 1015 1014 946 3 946 947 1015 3 1016 1015 947 3 947 948 1016 3 1017 1016 948 3 948 949 1017 3 1018 1017 949 3 949 950 1018 3 1019 1018 950 3 950 951 1019 3 1020 1019 951 3 951 952 1020 3 1021 1020 952 3 952 953 1021 3 1022 1021 953 3 953 954 1022 3 1023 1022 954 3 954 955 1023 3 1024 1023 955 3 955 956 1024 3 1025 1024 956 3 956 957 1025 3 1026 1025 957 3 957 958 1027 3 957 1027 1026 3 958 959 1028 3 958 1028 1027 3 959 4941 1028 3 4945 1028 4941 3 960 961 1030 3 960 1030 1029 3 960 1029 4948 3 960 4948 4944 3 961 962 1031 3 961 1031 1030 3 962 963 1032 3 962 1032 1031 3 963 964 1033 3 963 1033 1032 3 964 965 1034 3 964 1034 1033 3 965 966 1035 3 965 1035 1034 3 966 967 1036 3 966 1036 1035 3 967 968 1037 3 967 1037 1036 3 968 969 1038 3 968 1038 1037 3 969 970 1039 3 969 1039 1038 3 970 971 1040 3 970 1040 1039 3 971 972 1041 3 971 1041 1040 3 972 973 1042 3 972 1042 1041 3 973 974 1043 3 973 1043 1042 3 974 975 1044 3 974 1044 1043 3 975 976 1045 3 975 1045 1044 3 976 977 1045 3 1046 1045 977 3 977 978 1046 3 1047 1046 978 3 978 979 1047 3 1048 1047 979 3 979 980 1048 3 1049 1048 980 3 980 981 1049 3 1050 1049 981 3 981 982 1050 3 1051 1050 982 3 982 983 1051 3 1052 1051 983 3 983 984 1052 3 1053 1052 984 3 984 985 1053 3 1054 1053 985 3 985 986 1054 3 1055 1054 986 3 986 987 1055 3 1056 1055 987 3 987 988 1056 3 1057 1056 988 3 988 989 1057 3 1058 1057 989 3 989 990 1058 3 1059 1058 990 3 990 991 1059 3 1060 1059 991 3 991 992 1060 3 1061 1060 992 3 992 993 1061 3 1062 1061 993 3 993 994 1062 3 1063 1062 994 3 994 995 1063 3 1064 1063 995 3 995 996 1065 3 995 1065 1064 3 996 997 1066 3 996 1066 1065 3 997 998 1067 3 997 1067 1066 3 998 999 1068 3 998 1068 1067 3 999 1000 1069 3 999 1069 1068 3 1000 1001 1070 3 1000 1070 1069 3 1001 1002 1071 3 1001 1071 1070 3 1002 1003 1072 3 1002 1072 1071 3 1003 1004 1073 3 1003 1073 1072 3 1004 1005 1074 3 1004 1074 1073 3 1005 1006 1075 3 1005 1075 1074 3 1006 1007 1076 3 1006 1076 1075 3 1007 1008 1077 3 1007 1077 1076 3 1008 1009 1078 3 1008 1078 1077 3 1009 1010 1079 3 1009 1079 1078 3 1010 1011 1080 3 1010 1080 1079 3 1011 1012 1081 3 1011 1081 1080 3 1012 1013 1082 3 1012 1082 1081 3 1013 1014 1083 3 1013 1083 1082 3 1014 1015 1084 3 1014 1084 1083 3 1015 1016 1084 3 1085 1084 1016 3 1016 1017 1085 3 1086 1085 1017 3 1017 1018 1086 3 1087 1086 1018 3 1018 1019 1087 3 1088 1087 1019 3 1019 1020 1088 3 1089 1088 1020 3 1020 1021 1089 3 1090 1089 1021 3 1021 1022 1090 3 1091 1090 1022 3 1022 1023 1091 3 1092 1091 1023 3 1023 1024 1092 3 1093 1092 1024 3 1024 1025 1093 3 1094 1093 1025 3 1025 1026 1094 3 1095 1094 1026 3 1026 1027 1095 3 1096 1095 1027 3 1027 1028 1096 3 1097 1096 1028 3 1028 4945 1097 3 4949 1097 4945 3 1029 1030 1098 3 1099 1098 1030 3 1029 1098 4948 3 4952 4948 1098 3 1030 1031 1099 3 1100 1099 1031 3 1031 1032 1100 3 1101 1100 1032 3 1032 1033 1101 3 1102 1101 1033 3 1033 1034 1102 3 1103 1102 1034 3 1034 1035 1104 3 1034 1104 1103 3 1035 1036 1105 3 1035 1105 1104 3 1036 1037 1106 3 1036 1106 1105 3 1037 1038 1107 3 1037 1107 1106 3 1038 1039 1108 3 1038 1108 1107 3 1039 1040 1109 3 1039 1109 1108 3 1040 1041 1110 3 1040 1110 1109 3 1041 1042 1111 3 1041 1111 1110 3 1042 1043 1112 3 1042 1112 1111 3 1043 1044 1113 3 1043 1113 1112 3 1044 1045 1114 3 1044 1114 1113 3 1045 1046 1115 3 1045 1115 1114 3 1046 1047 1116 3 1046 1116 1115 3 1047 1048 1117 3 1047 1117 1116 3 1048 1049 1118 3 1048 1118 1117 3 1049 1050 1119 3 1049 1119 1118 3 1050 1051 1120 3 1050 1120 1119 3 1051 1052 1121 3 1051 1121 1120 3 1052 1053 1122 3 1052 1122 1121 3 1053 1054 1123 3 1053 1123 1122 3 1054 1055 1123 3 1124 1123 1055 3 1055 1056 1124 3 1125 1124 1056 3 1056 1057 1125 3 1126 1125 1057 3 1057 1058 1126 3 1127 1126 1058 3 1058 1059 1127 3 1128 1127 1059 3 1059 1060 1128 3 1129 1128 1060 3 1060 1061 1129 3 1130 1129 1061 3 1061 1062 1130 3 1131 1130 1062 3 1062 1063 1131 3 1132 1131 1063 3 1063 1064 1132 3 1133 1132 1064 3 1064 1065 1133 3 1134 1133 1065 3 1065 1066 1134 3 1135 1134 1066 3 1066 1067 1135 3 1136 1135 1067 3 1067 1068 1136 3 1137 1136 1068 3 1068 1069 1137 3 1138 1137 1069 3 1069 1070 1138 3 1139 1138 1070 3 1070 1071 1139 3 1140 1139 1071 3 1071 1072 1140 3 1141 1140 1072 3 1072 1073 1141 3 1142 1141 1073 3 1073 1074 1142 3 1143 1142 1074 3 1074 1075 1144 3 1074 1144 1143 3 1075 1076 1145 3 1075 1145 1144 3 1076 1077 1146 3 1076 1146 1145 3 1077 1078 1147 3 1077 1147 1146 3 1078 1079 1148 3 1078 1148 1147 3 1079 1080 1149 3 1079 1149 1148 3 1080 1081 1150 3 1080 1150 1149 3 1081 1082 1151 3 1081 1151 1150 3 1082 1083 1152 3 1082 1152 1151 3 1083 1084 1153 3 1083 1153 1152 3 1084 1085 1154 3 1084 1154 1153 3 1085 1086 1155 3 1085 1155 1154 3 1086 1087 1156 3 1086 1156 1155 3 1087 1088 1157 3 1087 1157 1156 3 1088 1089 1158 3 1088 1158 1157 3 1089 1090 1159 3 1089 1159 1158 3 1090 1091 1160 3 1090 1160 1159 3 1091 1092 1161 3 1091 1161 1160 3 1092 1093 1162 3 1092 1162 1161 3 1093 1094 1163 3 1093 1163 1162 3 1094 1095 1164 3 1094 1164 1163 3 1095 1096 1164 3 1165 1164 1096 3 1096 1097 1165 3 1166 1165 1097 3 1097 4949 4953 3 1097 4953 1166 3 1098 1099 1167 3 1168 1167 1099 3 1098 1167 4952 3 4956 4952 1167 3 1099 1100 1168 3 1169 1168 1100 3 1100 1101 1169 3 1170 1169 1101 3 1101 1102 1170 3 1171 1170 1102 3 1102 1103 1171 3 1172 1171 1103 3 1103 1104 1172 3 1173 1172 1104 3 1104 1105 1173 3 1174 1173 1105 3 1105 1106 1174 3 1175 1174 1106 3 1106 1107 1175 3 1176 1175 1107 3 1107 1108 1176 3 1177 1176 1108 3 1108 1109 1177 3 1178 1177 1109 3 1109 1110 1178 3 1179 1178 1110 3 1110 1111 1179 3 1180 1179 1111 3 1111 1112 1180 3 1181 1180 1112 3 1112 1113 1181 3 1182 1181 1113 3 1113 1114 1182 3 1183 1182 1114 3 1114 1115 1183 3 1184 1183 1115 3 1115 1116 1185 3 1115 1185 1184 3 1116 1117 1186 3 1116 1186 1185 3 1117 1118 1187 3 1117 1187 1186 3 1118 1119 1188 3 1118 1188 1187 3 1119 1120 1189 3 1119 1189 1188 3 1120 1121 1190 3 1120 1190 1189 3 1121 1122 1191 3 1121 1191 1190 3 1122 1123 1192 3 1122 1192 1191 3 1123 1124 1193 3 1123 1193 1192 3 1124 1125 1194 3 1124 1194 1193 3 1125 1126 1195 3 1125 1195 1194 3 1126 1127 1196 3 1126 1196 1195 3 1127 1128 1197 3 1127 1197 1196 3 1128 1129 1198 3 1128 1198 1197 3 1129 1130 1199 3 1129 1199 1198 3 1130 1131 1200 3 1130 1200 1199 3 1131 1132 1201 3 1131 1201 1200 3 1132 1133 1202 3 1132 1202 1201 3 1133 1134 1203 3 1133 1203 1202 3 1134 1135 1204 3 1134 1204 1203 3 1135 1136 1205 3 1135 1205 1204 3 1136 1137 1205 3 1206 1205 1137 3 1137 1138 1206 3 1207 1206 1138 3 1138 1139 1207 3 1208 1207 1139 3 1139 1140 1208 3 1209 1208 1140 3 1140 1141 1209 3 1210 1209 1141 3 1141 1142 1210 3 1211 1210 1142 3 1142 1143 1211 3 1212 1211 1143 3 1143 1144 1212 3 1213 1212 1144 3 1144 1145 1213 3 1214 1213 1145 3 1145 1146 1214 3 1215 1214 1146 3 1146 1147 1215 3 1216 1215 1147 3 1147 1148 1216 3 1217 1216 1148 3 1148 1149 1217 3 1218 1217 1149 3 1149 1150 1218 3 1219 1218 1150 3 1150 1151 1219 3 1220 1219 1151 3 1151 1152 1220 3 1221 1220 1152 3 1152 1153 1221 3 1222 1221 1153 3 1153 1154 1222 3 1223 1222 1154 3 1154 1155 1223 3 1224 1223 1155 3 1155 1156 1224 3 1225 1224 1156 3 1156 1157 1226 3 1156 1226 1225 3 1157 1158 1227 3 1157 1227 1226 3 1158 1159 1228 3 1158 1228 1227 3 1159 1160 1229 3 1159 1229 1228 3 1160 1161 1230 3 1160 1230 1229 3 1161 1162 1231 3 1161 1231 1230 3 1162 1163 1232 3 1162 1232 1231 3 1163 1164 1233 3 1163 1233 1232 3 1164 1165 1234 3 1164 1234 1233 3 1165 1166 1235 3 1165 1235 1234 3 1166 4953 4957 3 1166 4957 1235 3 1167 1168 1237 3 1167 1237 1236 3 1167 1236 4960 3 1167 4960 4956 3 1168 1169 1238 3 1168 1238 1237 3 1169 1170 1239 3 1169 1239 1238 3 1170 1171 1240 3 1170 1240 1239 3 1171 1172 1241 3 1171 1241 1240 3 1172 1173 1242 3 1172 1242 1241 3 1173 1174 1243 3 1173 1243 1242 3 1174 1175 1244 3 1174 1244 1243 3 1175 1176 1245 3 1175 1245 1244 3 1176 1177 1246 3 1176 1246 1245 3 1177 1178 1246 3 1247 1246 1178 3 1178 1179 1247 3 1248 1247 1179 3 1179 1180 1248 3 1249 1248 1180 3 1180 1181 1249 3 1250 1249 1181 3 1181 1182 1250 3 1251 1250 1182 3 1182 1183 1251 3 1252 1251 1183 3 1183 1184 1252 3 1253 1252 1184 3 1184 1185 1253 3 1254 1253 1185 3 1185 1186 1254 3 1255 1254 1186 3 1186 1187 1255 3 1256 1255 1187 3 1187 1188 1256 3 1257 1256 1188 3 1188 1189 1257 3 1258 1257 1189 3 1189 1190 1258 3 1259 1258 1190 3 1190 1191 1259 3 1260 1259 1191 3 1191 1192 1260 3 1261 1260 1192 3 1192 1193 1261 3 1262 1261 1193 3 1193 1194 1262 3 1263 1262 1194 3 1194 1195 1263 3 1264 1263 1195 3 1195 1196 1264 3 1265 1264 1196 3 1196 1197 1265 3 1266 1265 1197 3 1197 1198 1266 3 1267 1266 1198 3 1198 1199 1267 3 1268 1267 1199 3 1199 1200 1269 3 1199 1269 1268 3 1200 1201 1270 3 1200 1270 1269 3 1201 1202 1271 3 1201 1271 1270 3 1202 1203 1272 3 1202 1272 1271 3 1203 1204 1273 3 1203 1273 1272 3 1204 1205 1274 3 1204 1274 1273 3 1205 1206 1275 3 1205 1275 1274 3 1206 1207 1276 3 1206 1276 1275 3 1207 1208 1277 3 1207 1277 1276 3 1208 1209 1278 3 1208 1278 1277 3 1209 1210 1279 3 1209 1279 1278 3 1210 1211 1280 3 1210 1280 1279 3 1211 1212 1281 3 1211 1281 1280 3 1212 1213 1282 3 1212 1282 1281 3 1213 1214 1283 3 1213 1283 1282 3 1214 1215 1284 3 1214 1284 1283 3 1215 1216 1285 3 1215 1285 1284 3 1216 1217 1286 3 1216 1286 1285 3 1217 1218 1287 3 1217 1287 1286 3 1218 1219 1288 3 1218 1288 1287 3 1219 1220 1289 3 1219 1289 1288 3 1220 1221 1289 3 1290 1289 1221 3 1221 1222 1290 3 1291 1290 1222 3 1222 1223 1291 3 1292 1291 1223 3 1223 1224 1292 3 1293 1292 1224 3 1224 1225 1293 3 1294 1293 1225 3 1225 1226 1294 3 1295 1294 1226 3 1226 1227 1295 3 1296 1295 1227 3 1227 1228 1296 3 1297 1296 1228 3 1228 1229 1297 3 1298 1297 1229 3 1229 1230 1298 3 1299 1298 1230 3 1230 1231 1299 3 1300 1299 1231 3 1231 1232 1300 3 1301 1300 1232 3 1232 1233 1301 3 1302 1301 1233 3 1233 1234 1302 3 1303 1302 1234 3 1234 1235 1303 3 1304 1303 1235 3 1235 4957 1304 3 4961 1304 4957 3 1236 1237 1305 3 1306 1305 1237 3 1236 1305 4960 3 4964 4960 1305 3 1237 1238 1306 3 1307 1306 1238 3 1238 1239 1307 3 1308 1307 1239 3 1239 1240 1308 3 1309 1308 1240 3 1240 1241 1309 3 1310 1309 1241 3 1241 1242 1310 3 1311 1310 1242 3 1242 1243 1312 3 1242 1312 1311 3 1243 1244 1313 3 1243 1313 1312 3 1244 1245 1314 3 1244 1314 1313 3 1245 1246 1315 3 1245 1315 1314 3 1246 1247 1316 3 1246 1316 1315 3 1247 1248 1317 3 1247 1317 1316 3 1248 1249 1318 3 1248 1318 1317 3 1249 1250 1319 3 1249 1319 1318 3 1250 1251 1320 3 1250 1320 1319 3 1251 1252 1321 3 1251 1321 1320 3 1252 1253 1322 3 1252 1322 1321 3 1253 1254 1323 3 1253 1323 1322 3 1254 1255 1324 3 1254 1324 1323 3 1255 1256 1325 3 1255 1325 1324 3 1256 1257 1326 3 1256 1326 1325 3 1257 1258 1327 3 1257 1327 1326 3 1258 1259 1328 3 1258 1328 1327 3 1259 1260 1329 3 1259 1329 1328 3 1260 1261 1330 3 1260 1330 1329 3 1261 1262 1331 3 1261 1331 1330 3 1262 1263 1332 3 1262 1332 1331 3 1263 1264 1333 3 1263 1333 1332 3 1264 1265 1333 3 1334 1333 1265 3 1265 1266 1334 3 1335 1334 1266 3 1266 1267 1335 3 1336 1335 1267 3 1267 1268 1336 3 1337 1336 1268 3 1268 1269 1337 3 1338 1337 1269 3 1269 1270 1338 3 1339 1338 1270 3 1270 1271 1339 3 1340 1339 1271 3 1271 1272 1340 3 1341 1340 1272 3 1272 1273 1341 3 1342 1341 1273 3 1273 1274 1342 3 1343 1342 1274 3 1274 1275 1343 3 1344 1343 1275 3 1275 1276 1344 3 1345 1344 1276 3 1276 1277 1345 3 1346 1345 1277 3 1277 1278 1346 3 1347 1346 1278 3 1278 1279 1347 3 1348 1347 1279 3 1279 1280 1348 3 1349 1348 1280 3 1280 1281 1349 3 1350 1349 1281 3 1281 1282 1350 3 1351 1350 1282 3 1282 1283 1351 3 1352 1351 1283 3 1283 1284 1352 3 1353 1352 1284 3 1284 1285 1353 3 1354 1353 1285 3 1285 1286 1355 3 1285 1355 1354 3 1286 1287 1356 3 1286 1356 1355 3 1287 1288 1357 3 1287 1357 1356 3 1288 1289 1358 3 1288 1358 1357 3 1289 1290 1359 3 1289 1359 1358 3 1290 1291 1360 3 1290 1360 1359 3 1291 1292 1361 3 1291 1361 1360 3 1292 1293 1362 3 1292 1362 1361 3 1293 1294 1363 3 1293 1363 1362 3 1294 1295 1364 3 1294 1364 1363 3 1295 1296 1365 3 1295 1365 1364 3 1296 1297 1366 3 1296 1366 1365 3 1297 1298 1367 3 1297 1367 1366 3 1298 1299 1368 3 1298 1368 1367 3 1299 1300 1369 3 1299 1369 1368 3 1300 1301 1370 3 1300 1370 1369 3 1301 1302 1371 3 1301 1371 1370 3 1302 1303 1372 3 1302 1372 1371 3 1303 1304 1373 3 1303 1373 1372 3 1304 4961 1373 3 4965 1373 4961 3 1305 1306 1375 3 1305 1375 1374 3 1305 1374 4968 3 1305 4968 4964 3 1306 1307 1376 3 1306 1376 1375 3 1307 1308 1377 3 1307 1377 1376 3 1308 1309 1377 3 1378 1377 1309 3 1309 1310 1378 3 1379 1378 1310 3 1310 1311 1379 3 1380 1379 1311 3 1311 1312 1380 3 1381 1380 1312 3 1312 1313 1381 3 1382 1381 1313 3 1313 1314 1382 3 1383 1382 1314 3 1314 1315 1383 3 1384 1383 1315 3 1315 1316 1384 3 1385 1384 1316 3 1316 1317 1385 3 1386 1385 1317 3 1317 1318 1386 3 1387 1386 1318 3 1318 1319 1387 3 1388 1387 1319 3 1319 1320 1388 3 1389 1388 1320 3 1320 1321 1389 3 1390 1389 1321 3 1321 1322 1390 3 1391 1390 1322 3 1322 1323 1391 3 1392 1391 1323 3 1323 1324 1392 3 1393 1392 1324 3 1324 1325 1393 3 1394 1393 1325 3 1325 1326 1394 3 1395 1394 1326 3 1326 1327 1395 3 1396 1395 1327 3 1327 1328 1396 3 1397 1396 1328 3 1328 1329 1397 3 1398 1397 1329 3 1329 1330 1398 3 1399 1398 1330 3 1330 1331 1400 3 1330 1400 1399 3 1331 1332 1401 3 1331 1401 1400 3 1332 1333 1402 3 1332 1402 1401 3 1333 1334 1403 3 1333 1403 1402 3 1334 1335 1404 3 1334 1404 1403 3 1335 1336 1405 3 1335 1405 1404 3 1336 1337 1406 3 1336 1406 1405 3 1337 1338 1407 3 1337 1407 1406 3 1338 1339 1408 3 1338 1408 1407 3 1339 1340 1409 3 1339 1409 1408 3 1340 1341 1410 3 1340 1410 1409 3 1341 1342 1411 3 1341 1411 1410 3 1342 1343 1412 3 1342 1412 1411 3 1343 1344 1413 3 1343 1413 1412 3 1344 1345 1414 3 1344 1414 1413 3 1345 1346 1415 3 1345 1415 1414 3 1346 1347 1416 3 1346 1416 1415 3 1347 1348 1417 3 1347 1417 1416 3 1348 1349 1418 3 1348 1418 1417 3 1349 1350 1419 3 1349 1419 1418 3 1350 1351 1420 3 1350 1420 1419 3 1351 1352 1421 3 1351 1421 1420 3 1352 1353 1422 3 1352 1422 1421 3 1353 1354 1422 3 1423 1422 1354 3 1354 1355 1423 3 1424 1423 1355 3 1355 1356 1424 3 1425 1424 1356 3 1356 1357 1425 3 1426 1425 1357 3 1357 1358 1426 3 1427 1426 1358 3 1358 1359 1427 3 1428 1427 1359 3 1359 1360 1428 3 1429 1428 1360 3 1360 1361 1429 3 1430 1429 1361 3 1361 1362 1430 3 1431 1430 1362 3 1362 1363 1431 3 1432 1431 1363 3 1363 1364 1432 3 1433 1432 1364 3 1364 1365 1433 3 1434 1433 1365 3 1365 1366 1434 3 1435 1434 1366 3 1366 1367 1435 3 1436 1435 1367 3 1367 1368 1436 3 1437 1436 1368 3 1368 1369 1437 3 1438 1437 1369 3 1369 1370 1438 3 1439 1438 1370 3 1370 1371 1439 3 1440 1439 1371 3 1371 1372 1440 3 1441 1440 1372 3 1372 1373 1441 3 1442 1441 1373 3 1373 4965 4969 3 1373 4969 1442 3 1374 1375 1443 3 1444 1443 1375 3 1374 1443 4968 3 4972 4968 1443 3 1375 1376 1445 3 1375 1445 1444 3 1376 1377 1446 3 1376 1446 1445 3 1377 1378 1447 3 1377 1447 1446 3 1378 1379 1448 3 1378 1448 1447 3 1379 1380 1449 3 1379 1449 1448 3 1380 1381 1450 3 1380 1450 1449 3 1381 1382 1451 3 1381 1451 1450 3 1382 1383 1452 3 1382 1452 1451 3 1383 1384 1453 3 1383 1453 1452 3 1384 1385 1454 3 1384 1454 1453 3 1385 1386 1455 3 1385 1455 1454 3 1386 1387 1456 3 1386 1456 1455 3 1387 1388 1457 3 1387 1457 1456 3 1388 1389 1458 3 1388 1458 1457 3 1389 1390 1459 3 1389 1459 1458 3 1390 1391 1460 3 1390 1460 1459 3 1391 1392 1461 3 1391 1461 1460 3 1392 1393 1462 3 1392 1462 1461 3 1393 1394 1463 3 1393 1463 1462 3 1394 1395 1464 3 1394 1464 1463 3 1395 1396 1465 3 1395 1465 1464 3 1396 1397 1466 3 1396 1466 1465 3 1397 1398 1467 3 1397 1467 1466 3 1398 1399 1467 3 1468 1467 1399 3 1399 1400 1468 3 1469 1468 1400 3 1400 1401 1469 3 1470 1469 1401 3 1401 1402 1470 3 1471 1470 1402 3 1402 1403 1471 3 1472 1471 1403 3 1403 1404 1472 3 1473 1472 1404 3 1404 1405 1473 3 1474 1473 1405 3 1405 1406 1474 3 1475 1474 1406 3 1406 1407 1475 3 1476 1475 1407 3 1407 1408 1476 3 1477 1476 1408 3 1408 1409 1477 3 1478 1477 1409 3 1409 1410 1478 3 1479 1478 1410 3 1410 1411 1479 3 1480 1479 1411 3 1411 1412 1480 3 1481 1480 1412 3 1412 1413 1481 3 1482 1481 1413 3 1413 1414 1482 3 1483 1482 1414 3 1414 1415 1483 3 1484 1483 1415 3 1415 1416 1484 3 1485 1484 1416 3 1416 1417 1485 3 1486 1485 1417 3 1417 1418 1486 3 1487 1486 1418 3 1418 1419 1487 3 1488 1487 1419 3 1419 1420 1488 3 1489 1488 1420 3 1420 1421 1489 3 1490 1489 1421 3 1421 1422 1491 3 1421 1491 1490 3 1422 1423 1492 3 1422 1492 1491 3 1423 1424 1493 3 1423 1493 1492 3 1424 1425 1494 3 1424 1494 1493 3 1425 1426 1495 3 1425 1495 1494 3 1426 1427 1496 3 1426 1496 1495 3 1427 1428 1497 3 1427 1497 1496 3 1428 1429 1498 3 1428 1498 1497 3 1429 1430 1499 3 1429 1499 1498 3 1430 1431 1500 3 1430 1500 1499 3 1431 1432 1501 3 1431 1501 1500 3 1432 1433 1502 3 1432 1502 1501 3 1433 1434 1503 3 1433 1503 1502 3 1434 1435 1504 3 1434 1504 1503 3 1435 1436 1505 3 1435 1505 1504 3 1436 1437 1506 3 1436 1506 1505 3 1437 1438 1507 3 1437 1507 1506 3 1438 1439 1508 3 1438 1508 1507 3 1439 1440 1509 3 1439 1509 1508 3 1440 1441 1510 3 1440 1510 1509 3 1441 1442 1511 3 1441 1511 1510 3 1442 4969 1511 3 4973 1511 4969 3 1443 1444 1513 3 1443 1513 1512 3 1443 1512 4976 3 1443 4976 4972 3 1444 1445 1514 3 1444 1514 1513 3 1445 1446 1514 3 1515 1514 1446 3 1446 1447 1515 3 1516 1515 1447 3 1447 1448 1516 3 1517 1516 1448 3 1448 1449 1517 3 1518 1517 1449 3 1449 1450 1518 3 1519 1518 1450 3 1450 1451 1519 3 1520 1519 1451 3 1451 1452 1520 3 1521 1520 1452 3 1452 1453 1521 3 1522 1521 1453 3 1453 1454 1522 3 1523 1522 1454 3 1454 1455 1523 3 1524 1523 1455 3 1455 1456 1524 3 1525 1524 1456 3 1456 1457 1525 3 1526 1525 1457 3 1457 1458 1526 3 1527 1526 1458 3 1458 1459 1527 3 1528 1527 1459 3 1459 1460 1528 3 1529 1528 1460 3 1460 1461 1529 3 1530 1529 1461 3 1461 1462 1530 3 1531 1530 1462 3 1462 1463 1531 3 1532 1531 1463 3 1463 1464 1532 3 1533 1532 1464 3 1464 1465 1533 3 1534 1533 1465 3 1465 1466 1534 3 1535 1534 1466 3 1466 1467 1535 3 1536 1535 1467 3 1467 1468 1536 3 1537 1536 1468 3 1468 1469 1538 3 1468 1538 1537 3 1469 1470 1539 3 1469 1539 1538 3 1470 1471 1540 3 1470 1540 1539 3 1471 1472 1541 3 1471 1541 1540 3 1472 1473 1542 3 1472 1542 1541 3 1473 1474 1543 3 1473 1543 1542 3 1474 1475 1544 3 1474 1544 1543 3 1475 1476 1545 3 1475 1545 1544 3 1476 1477 1546 3 1476 1546 1545 3 1477 1478 1547 3 1477 1547 1546 3 1478 1479 1548 3 1478 1548 1547 3 1479 1480 1549 3 1479 1549 1548 3 1480 1481 1550 3 1480 1550 1549 3 1481 1482 1551 3 1481 1551 1550 3 1482 1483 1552 3 1482 1552 1551 3 1483 1484 1553 3 1483 1553 1552 3 1484 1485 1554 3 1484 1554 1553 3 1485 1486 1555 3 1485 1555 1554 3 1486 1487 1556 3 1486 1556 1555 3 1487 1488 1557 3 1487 1557 1556 3 1488 1489 1558 3 1488 1558 1557 3 1489 1490 1559 3 1489 1559 1558 3 1490 1491 1560 3 1490 1560 1559 3 1491 1492 1561 3 1491 1561 1560 3 1492 1493 1561 3 1562 1561 1493 3 1493 1494 1562 3 1563 1562 1494 3 1494 1495 1563 3 1564 1563 1495 3 1495 1496 1564 3 1565 1564 1496 3 1496 1497 1565 3 1566 1565 1497 3 1497 1498 1566 3 1567 1566 1498 3 1498 1499 1567 3 1568 1567 1499 3 1499 1500 1568 3 1569 1568 1500 3 1500 1501 1569 3 1570 1569 1501 3 1501 1502 1570 3 1571 1570 1502 3 1502 1503 1571 3 1572 1571 1503 3 1503 1504 1572 3 1573 1572 1504 3 1504 1505 1573 3 1574 1573 1505 3 1505 1506 1574 3 1575 1574 1506 3 1506 1507 1575 3 1576 1575 1507 3 1507 1508 1576 3 1577 1576 1508 3 1508 1509 1577 3 1578 1577 1509 3 1509 1510 1578 3 1579 1578 1510 3 1510 1511 1579 3 1580 1579 1511 3 1511 4973 1580 3 4977 1580 4973 3 1512 1513 1581 3 1582 1581 1513 3 1512 1581 4976 3 4980 4976 1581 3 1513 1514 1582 3 1583 1582 1514 3 1514 1515 1583 3 1584 1583 1515 3 1515 1516 1584 3 1585 1584 1516 3 1516 1517 1586 3 1516 1586 1585 3 1517 1518 1587 3 1517 1587 1586 3 1518 1519 1588 3 1518 1588 1587 3 1519 1520 1589 3 1519 1589 1588 3 1520 1521 1590 3 1520 1590 1589 3 1521 1522 1591 3 1521 1591 1590 3 1522 1523 1592 3 1522 1592 1591 3 1523 1524 1593 3 1523 1593 1592 3 1524 1525 1594 3 1524 1594 1593 3 1525 1526 1595 3 1525 1595 1594 3 1526 1527 1596 3 1526 1596 1595 3 1527 1528 1597 3 1527 1597 1596 3 1528 1529 1598 3 1528 1598 1597 3 1529 1530 1599 3 1529 1599 1598 3 1530 1531 1600 3 1530 1600 1599 3 1531 1532 1601 3 1531 1601 1600 3 1532 1533 1602 3 1532 1602 1601 3 1533 1534 1603 3 1533 1603 1602 3 1534 1535 1604 3 1534 1604 1603 3 1535 1536 1605 3 1535 1605 1604 3 1536 1537 1606 3 1536 1606 1605 3 1537 1538 1607 3 1537 1607 1606 3 1538 1539 1608 3 1538 1608 1607 3 1539 1540 1609 3 1539 1609 1608 3 1540 1541 1609 3 1610 1609 1541 3 1541 1542 1610 3 1611 1610 1542 3 1542 1543 1611 3 1612 1611 1543 3 1543 1544 1612 3 1613 1612 1544 3 1544 1545 1613 3 1614 1613 1545 3 1545 1546 1614 3 1615 1614 1546 3 1546 1547 1615 3 1616 1615 1547 3 1547 1548 1616 3 1617 1616 1548 3 1548 1549 1617 3 1618 1617 1549 3 1549 1550 1618 3 1619 1618 1550 3 1550 1551 1619 3 1620 1619 1551 3 1551 1552 1620 3 1621 1620 1552 3 1552 1553 1621 3 1622 1621 1553 3 1553 1554 1622 3 1623 1622 1554 3 1554 1555 1623 3 1624 1623 1555 3 1555 1556 1624 3 1625 1624 1556 3 1556 1557 1625 3 1626 1625 1557 3 1557 1558 1626 3 1627 1626 1558 3 1558 1559 1627 3 1628 1627 1559 3 1559 1560 1628 3 1629 1628 1560 3 1560 1561 1629 3 1630 1629 1561 3 1561 1562 1630 3 1631 1630 1562 3 1562 1563 1631 3 1632 1631 1563 3 1563 1564 1632 3 1633 1632 1564 3 1564 1565 1634 3 1564 1634 1633 3 1565 1566 1635 3 1565 1635 1634 3 1566 1567 1636 3 1566 1636 1635 3 1567 1568 1637 3 1567 1637 1636 3 1568 1569 1638 3 1568 1638 1637 3 1569 1570 1639 3 1569 1639 1638 3 1570 1571 1640 3 1570 1640 1639 3 1571 1572 1641 3 1571 1641 1640 3 1572 1573 1642 3 1572 1642 1641 3 1573 1574 1643 3 1573 1643 1642 3 1574 1575 1644 3 1574 1644 1643 3 1575 1576 1645 3 1575 1645 1644 3 1576 1577 1646 3 1576 1646 1645 3 1577 1578 1647 3 1577 1647 1646 3 1578 1579 1648 3 1578 1648 1647 3 1579 1580 1649 3 1579 1649 1648 3 1580 4977 4981 3 1580 4981 1649 3 1581 1582 1651 3 1581 1651 1650 3 1581 1650 4984 3 1581 4984 4980 3 1582 1583 1652 3 1582 1652 1651 3 1583 1584 1653 3 1583 1653 1652 3 1584 1585 1654 3 1584 1654 1653 3 1585 1586 1655 3 1585 1655 1654 3 1586 1587 1656 3 1586 1656 1655 3 1587 1588 1657 3 1587 1657 1656 3 1588 1589 1657 3 1658 1657 1589 3 1589 1590 1658 3 1659 1658 1590 3 1590 1591 1659 3 1660 1659 1591 3 1591 1592 1660 3 1661 1660 1592 3 1592 1593 1661 3 1662 1661 1593 3 1593 1594 1662 3 1663 1662 1594 3 1594 1595 1663 3 1664 1663 1595 3 1595 1596 1664 3 1665 1664 1596 3 1596 1597 1665 3 1666 1665 1597 3 1597 1598 1666 3 1667 1666 1598 3 1598 1599 1667 3 1668 1667 1599 3 1599 1600 1668 3 1669 1668 1600 3 1600 1601 1669 3 1670 1669 1601 3 1601 1602 1670 3 1671 1670 1602 3 1602 1603 1671 3 1672 1671 1603 3 1603 1604 1672 3 1673 1672 1604 3 1604 1605 1673 3 1674 1673 1605 3 1605 1606 1674 3 1675 1674 1606 3 1606 1607 1675 3 1676 1675 1607 3 1607 1608 1676 3 1677 1676 1608 3 1608 1609 1677 3 1678 1677 1609 3 1609 1610 1678 3 1679 1678 1610 3 1610 1611 1679 3 1680 1679 1611 3 1611 1612 1680 3 1681 1680 1612 3 1612 1613 1681 3 1682 1681 1613 3 1613 1614 1683 3 1613 1683 1682 3 1614 1615 1684 3 1614 1684 1683 3 1615 1616 1685 3 1615 1685 1684 3 1616 1617 1686 3 1616 1686 1685 3 1617 1618 1687 3 1617 1687 1686 3 1618 1619 1688 3 1618 1688 1687 3 1619 1620 1689 3 1619 1689 1688 3 1620 1621 1690 3 1620 1690 1689 3 1621 1622 1691 3 1621 1691 1690 3 1622 1623 1692 3 1622 1692 1691 3 1623 1624 1693 3 1623 1693 1692 3 1624 1625 1694 3 1624 1694 1693 3 1625 1626 1695 3 1625 1695 1694 3 1626 1627 1696 3 1626 1696 1695 3 1627 1628 1697 3 1627 1697 1696 3 1628 1629 1698 3 1628 1698 1697 3 1629 1630 1699 3 1629 1699 1698 3 1630 1631 1700 3 1630 1700 1699 3 1631 1632 1701 3 1631 1701 1700 3 1632 1633 1702 3 1632 1702 1701 3 1633 1634 1703 3 1633 1703 1702 3 1634 1635 1704 3 1634 1704 1703 3 1635 1636 1705 3 1635 1705 1704 3 1636 1637 1706 3 1636 1706 1705 3 1637 1638 1707 3 1637 1707 1706 3 1638 1639 1707 3 1708 1707 1639 3 1639 1640 1708 3 1709 1708 1640 3 1640 1641 1709 3 1710 1709 1641 3 1641 1642 1710 3 1711 1710 1642 3 1642 1643 1711 3 1712 1711 1643 3 1643 1644 1712 3 1713 1712 1644 3 1644 1645 1713 3 1714 1713 1645 3 1645 1646 1714 3 1715 1714 1646 3 1646 1647 1715 3 1716 1715 1647 3 1647 1648 1716 3 1717 1716 1648 3 1648 1649 1717 3 1718 1717 1649 3 1649 4981 1718 3 4985 1718 4981 3 1650 1651 1719 3 1720 1719 1651 3 1650 1719 4984 3 4988 4984 1719 3 1651 1652 1720 3 1721 1720 1652 3 1652 1653 1721 3 1722 1721 1653 3 1653 1654 1722 3 1723 1722 1654 3 1654 1655 1723 3 1724 1723 1655 3 1655 1656 1724 3 1725 1724 1656 3 1656 1657 1725 3 1726 1725 1657 3 1657 1658 1726 3 1727 1726 1658 3 1658 1659 1727 3 1728 1727 1659 3 1659 1660 1728 3 1729 1728 1660 3 1660 1661 1729 3 1730 1729 1661 3 1661 1662 1730 3 1731 1730 1662 3 1662 1663 1731 3 1732 1731 1663 3 1663 1664 1733 3 1663 1733 1732 3 1664 1665 1734 3 1664 1734 1733 3 1665 1666 1735 3 1665 1735 1734 3 1666 1667 1736 3 1666 1736 1735 3 1667 1668 1737 3 1667 1737 1736 3 1668 1669 1738 3 1668 1738 1737 3 1669 1670 1739 3 1669 1739 1738 3 1670 1671 1740 3 1670 1740 1739 3 1671 1672 1741 3 1671 1741 1740 3 1672 1673 1742 3 1672 1742 1741 3 1673 1674 1743 3 1673 1743 1742 3 1674 1675 1744 3 1674 1744 1743 3 1675 1676 1745 3 1675 1745 1744 3 1676 1677 1746 3 1676 1746 1745 3 1677 1678 1747 3 1677 1747 1746 3 1678 1679 1748 3 1678 1748 1747 3 1679 1680 1749 3 1679 1749 1748 3 1680 1681 1750 3 1680 1750 1749 3 1681 1682 1751 3 1681 1751 1750 3 1682 1683 1752 3 1682 1752 1751 3 1683 1684 1753 3 1683 1753 1752 3 1684 1685 1754 3 1684 1754 1753 3 1685 1686 1755 3 1685 1755 1754 3 1686 1687 1756 3 1686 1756 1755 3 1687 1688 1757 3 1687 1757 1756 3 1688 1689 1757 3 1758 1757 1689 3 1689 1690 1758 3 1759 1758 1690 3 1690 1691 1759 3 1760 1759 1691 3 1691 1692 1760 3 1761 1760 1692 3 1692 1693 1761 3 1762 1761 1693 3 1693 1694 1762 3 1763 1762 1694 3 1694 1695 1763 3 1764 1763 1695 3 1695 1696 1764 3 1765 1764 1696 3 1696 1697 1765 3 1766 1765 1697 3 1697 1698 1766 3 1767 1766 1698 3 1698 1699 1767 3 1768 1767 1699 3 1699 1700 1768 3 1769 1768 1700 3 1700 1701 1769 3 1770 1769 1701 3 1701 1702 1770 3 1771 1770 1702 3 1702 1703 1771 3 1772 1771 1703 3 1703 1704 1772 3 1773 1772 1704 3 1704 1705 1773 3 1774 1773 1705 3 1705 1706 1774 3 1775 1774 1706 3 1706 1707 1775 3 1776 1775 1707 3 1707 1708 1776 3 1777 1776 1708 3 1708 1709 1777 3 1778 1777 1709 3 1709 1710 1778 3 1779 1778 1710 3 1710 1711 1779 3 1780 1779 1711 3 1711 1712 1780 3 1781 1780 1712 3 1712 1713 1781 3 1782 1781 1713 3 1713 1714 1783 3 1713 1783 1782 3 1714 1715 1784 3 1714 1784 1783 3 1715 1716 1785 3 1715 1785 1784 3 1716 1717 1786 3 1716 1786 1785 3 1717 1718 1787 3 1717 1787 1786 3 1718 4985 4989 3 1718 4989 1787 3 1719 1720 1789 3 1719 1789 1788 3 1719 1788 4988 3 4992 4988 1788 3 1720 1721 1790 3 1720 1790 1789 3 1721 1722 1791 3 1721 1791 1790 3 1722 1723 1792 3 1722 1792 1791 3 1723 1724 1793 3 1723 1793 1792 3 1724 1725 1794 3 1724 1794 1793 3 1725 1726 1795 3 1725 1795 1794 3 1726 1727 1796 3 1726 1796 1795 3 1727 1728 1797 3 1727 1797 1796 3 1728 1729 1798 3 1728 1798 1797 3 1729 1730 1799 3 1729 1799 1798 3 1730 1731 1800 3 1730 1800 1799 3 1731 1732 1801 3 1731 1801 1800 3 1732 1733 1802 3 1732 1802 1801 3 1733 1734 1803 3 1733 1803 1802 3 1734 1735 1804 3 1734 1804 1803 3 1735 1736 1805 3 1735 1805 1804 3 1736 1737 1806 3 1736 1806 1805 3 1737 1738 1807 3 1737 1807 1806 3 1738 1739 1808 3 1738 1808 1807 3 1739 1740 1808 3 1809 1808 1740 3 1740 1741 1809 3 1810 1809 1741 3 1741 1742 1810 3 1811 1810 1742 3 1742 1743 1811 3 1812 1811 1743 3 1743 1744 1812 3 1813 1812 1744 3 1744 1745 1813 3 1814 1813 1745 3 1745 1746 1814 3 1815 1814 1746 3 1746 1747 1815 3 1816 1815 1747 3 1747 1748 1816 3 1817 1816 1748 3 1748 1749 1817 3 1818 1817 1749 3 1749 1750 1818 3 1819 1818 1750 3 1750 1751 1819 3 1820 1819 1751 3 1751 1752 1820 3 1821 1820 1752 3 1752 1753 1821 3 1822 1821 1753 3 1753 1754 1822 3 1823 1822 1754 3 1754 1755 1823 3 1824 1823 1755 3 1755 1756 1824 3 1825 1824 1756 3 1756 1757 1825 3 1826 1825 1757 3 1757 1758 1826 3 1827 1826 1758 3 1758 1759 1827 3 1828 1827 1759 3 1759 1760 1828 3 1829 1828 1760 3 1760 1761 1829 3 1830 1829 1761 3 1761 1762 1830 3 1831 1830 1762 3 1762 1763 1831 3 1832 1831 1763 3 1763 1764 1832 3 1833 1832 1764 3 1764 1765 1833 3 1834 1833 1765 3 1765 1766 1835 3 1765 1835 1834 3 1766 1767 1836 3 1766 1836 1835 3 1767 1768 1837 3 1767 1837 1836 3 1768 1769 1838 3 1768 1838 1837 3 1769 1770 1839 3 1769 1839 1838 3 1770 1771 1840 3 1770 1840 1839 3 1771 1772 1841 3 1771 1841 1840 3 1772 1773 1842 3 1772 1842 1841 3 1773 1774 1843 3 1773 1843 1842 3 1774 1775 1844 3 1774 1844 1843 3 1775 1776 1845 3 1775 1845 1844 3 1776 1777 1846 3 1776 1846 1845 3 1777 1778 1847 3 1777 1847 1846 3 1778 1779 1848 3 1778 1848 1847 3 1779 1780 1849 3 1779 1849 1848 3 1780 1781 1850 3 1780 1850 1849 3 1781 1782 1851 3 1781 1851 1850 3 1782 1783 1852 3 1782 1852 1851 3 1783 1784 1853 3 1783 1853 1852 3 1784 1785 1854 3 1784 1854 1853 3 1785 1786 1855 3 1785 1855 1854 3 1786 1787 1856 3 1786 1856 1855 3 1787 4989 1856 3 4993 1856 4989 3 1788 1789 1858 3 1788 1858 1857 3 1788 1857 4996 3 1788 4996 4992 3 1789 1790 1859 3 1789 1859 1858 3 1790 1791 1860 3 1790 1860 1859 3 1791 1792 1860 3 1861 1860 1792 3 1792 1793 1861 3 1862 1861 1793 3 1793 1794 1862 3 1863 1862 1794 3 1794 1795 1863 3 1864 1863 1795 3 1795 1796 1864 3 1865 1864 1796 3 1796 1797 1865 3 1866 1865 1797 3 1797 1798 1866 3 1867 1866 1798 3 1798 1799 1867 3 1868 1867 1799 3 1799 1800 1868 3 1869 1868 1800 3 1800 1801 1869 3 1870 1869 1801 3 1801 1802 1870 3 1871 1870 1802 3 1802 1803 1871 3 1872 1871 1803 3 1803 1804 1872 3 1873 1872 1804 3 1804 1805 1873 3 1874 1873 1805 3 1805 1806 1874 3 1875 1874 1806 3 1806 1807 1875 3 1876 1875 1807 3 1807 1808 1876 3 1877 1876 1808 3 1808 1809 1877 3 1878 1877 1809 3 1809 1810 1878 3 1879 1878 1810 3 1810 1811 1879 3 1880 1879 1811 3 1811 1812 1880 3 1881 1880 1812 3 1812 1813 1881 3 1882 1881 1813 3 1813 1814 1882 3 1883 1882 1814 3 1814 1815 1883 3 1884 1883 1815 3 1815 1816 1884 3 1885 1884 1816 3 1816 1817 1885 3 1886 1885 1817 3 1817 1818 1887 3 1817 1887 1886 3 1818 1819 1888 3 1818 1888 1887 3 1819 1820 1889 3 1819 1889 1888 3 1820 1821 1890 3 1820 1890 1889 3 1821 1822 1891 3 1821 1891 1890 3 1822 1823 1892 3 1822 1892 1891 3 1823 1824 1893 3 1823 1893 1892 3 1824 1825 1894 3 1824 1894 1893 3 1825 1826 1895 3 1825 1895 1894 3 1826 1827 1896 3 1826 1896 1895 3 1827 1828 1897 3 1827 1897 1896 3 1828 1829 1898 3 1828 1898 1897 3 1829 1830 1899 3 1829 1899 1898 3 1830 1831 1900 3 1830 1900 1899 3 1831 1832 1901 3 1831 1901 1900 3 1832 1833 1902 3 1832 1902 1901 3 1833 1834 1903 3 1833 1903 1902 3 1834 1835 1904 3 1834 1904 1903 3 1835 1836 1905 3 1835 1905 1904 3 1836 1837 1906 3 1836 1906 1905 3 1837 1838 1907 3 1837 1907 1906 3 1838 1839 1908 3 1838 1908 1907 3 1839 1840 1909 3 1839 1909 1908 3 1840 1841 1910 3 1840 1910 1909 3 1841 1842 1911 3 1841 1911 1910 3 1842 1843 1912 3 1842 1912 1911 3 1843 1844 1912 3 1913 1912 1844 3 1844 1845 1913 3 1914 1913 1845 3 1845 1846 1914 3 1915 1914 1846 3 1846 1847 1915 3 1916 1915 1847 3 1847 1848 1916 3 1917 1916 1848 3 1848 1849 1917 3 1918 1917 1849 3 1849 1850 1918 3 1919 1918 1850 3 1850 1851 1919 3 1920 1919 1851 3 1851 1852 1920 3 1921 1920 1852 3 1852 1853 1921 3 1922 1921 1853 3 1853 1854 1922 3 1923 1922 1854 3 1854 1855 1923 3 1924 1923 1855 3 1855 1856 1924 3 1925 1924 1856 3 1856 4993 1925 3 4997 1925 4993 3 1857 1858 1926 3 1927 1926 1858 3 1857 1926 4996 3 5000 4996 1926 3 1858 1859 1927 3 1928 1927 1859 3 1859 1860 1928 3 1929 1928 1860 3 1860 1861 1929 3 1930 1929 1861 3 1861 1862 1930 3 1931 1930 1862 3 1862 1863 1931 3 1932 1931 1863 3 1863 1864 1932 3 1933 1932 1864 3 1864 1865 1933 3 1934 1933 1865 3 1865 1866 1934 3 1935 1934 1866 3 1866 1867 1935 3 1936 1935 1867 3 1867 1868 1936 3 1937 1936 1868 3 1868 1869 1937 3 1938 1937 1869 3 1869 1870 1938 3 1939 1938 1870 3 1870 1871 1940 3 1870 1940 1939 3 1871 1872 1941 3 1871 1941 1940 3 1872 1873 1942 3 1872 1942 1941 3 1873 1874 1943 3 1873 1943 1942 3 1874 1875 1944 3 1874 1944 1943 3 1875 1876 1945 3 1875 1945 1944 3 1876 1877 1946 3 1876 1946 1945 3 1877 1878 1947 3 1877 1947 1946 3 1878 1879 1948 3 1878 1948 1947 3 1879 1880 1949 3 1879 1949 1948 3 1880 1881 1950 3 1880 1950 1949 3 1881 1882 1951 3 1881 1951 1950 3 1882 1883 1952 3 1882 1952 1951 3 1883 1884 1953 3 1883 1953 1952 3 1884 1885 1954 3 1884 1954 1953 3 1885 1886 1955 3 1885 1955 1954 3 1886 1887 1956 3 1886 1956 1955 3 1887 1888 1957 3 1887 1957 1956 3 1888 1889 1958 3 1888 1958 1957 3 1889 1890 1959 3 1889 1959 1958 3 1890 1891 1960 3 1890 1960 1959 3 1891 1892 1961 3 1891 1961 1960 3 1892 1893 1962 3 1892 1962 1961 3 1893 1894 1963 3 1893 1963 1962 3 1894 1895 1964 3 1894 1964 1963 3 1895 1896 1965 3 1895 1965 1964 3 1896 1897 1965 3 1966 1965 1897 3 1897 1898 1966 3 1967 1966 1898 3 1898 1899 1967 3 1968 1967 1899 3 1899 1900 1968 3 1969 1968 1900 3 1900 1901 1969 3 1970 1969 1901 3 1901 1902 1970 3 1971 1970 1902 3 1902 1903 1971 3 1972 1971 1903 3 1903 1904 1972 3 1973 1972 1904 3 1904 1905 1973 3 1974 1973 1905 3 1905 1906 1974 3 1975 1974 1906 3 1906 1907 1975 3 1976 1975 1907 3 1907 1908 1976 3 1977 1976 1908 3 1908 1909 1977 3 1978 1977 1909 3 1909 1910 1978 3 1979 1978 1910 3 1910 1911 1979 3 1980 1979 1911 3 1911 1912 1980 3 1981 1980 1912 3 1912 1913 1981 3 1982 1981 1913 3 1913 1914 1982 3 1983 1982 1914 3 1914 1915 1983 3 1984 1983 1915 3 1915 1916 1984 3 1985 1984 1916 3 1916 1917 1985 3 1986 1985 1917 3 1917 1918 1986 3 1987 1986 1918 3 1918 1919 1987 3 1988 1987 1919 3 1919 1920 1988 3 1989 1988 1920 3 1920 1921 1989 3 1990 1989 1921 3 1921 1922 1990 3 1991 1990 1922 3 1922 1923 1991 3 1992 1991 1923 3 1923 1924 1993 3 1923 1993 1992 3 1924 1925 1994 3 1924 1994 1993 3 1925 4997 5001 3 1925 5001 1994 3 1926 1927 1996 3 1926 1996 1995 3 1926 1995 5000 3 5004 5000 1995 3 1927 1928 1997 3 1927 1997 1996 3 1928 1929 1998 3 1928 1998 1997 3 1929 1930 1999 3 1929 1999 1998 3 1930 1931 2000 3 1930 2000 1999 3 1931 1932 2001 3 1931 2001 2000 3 1932 1933 2002 3 1932 2002 2001 3 1933 1934 2003 3 1933 2003 2002 3 1934 1935 2004 3 1934 2004 2003 3 1935 1936 2005 3 1935 2005 2004 3 1936 1937 2006 3 1936 2006 2005 3 1937 1938 2007 3 1937 2007 2006 3 1938 1939 2008 3 1938 2008 2007 3 1939 1940 2009 3 1939 2009 2008 3 1940 1941 2010 3 1940 2010 2009 3 1941 1942 2011 3 1941 2011 2010 3 1942 1943 2012 3 1942 2012 2011 3 1943 1944 2013 3 1943 2013 2012 3 1944 1945 2014 3 1944 2014 2013 3 1945 1946 2015 3 1945 2015 2014 3 1946 1947 2016 3 1946 2016 2015 3 1947 1948 2017 3 1947 2017 2016 3 1948 1949 2018 3 1948 2018 2017 3 1949 1950 2019 3 1949 2019 2018 3 1950 1951 2019 3 2020 2019 1951 3 1951 1952 2020 3 2021 2020 1952 3 1952 1953 2021 3 2022 2021 1953 3 1953 1954 2022 3 2023 2022 1954 3 1954 1955 2023 3 2024 2023 1955 3 1955 1956 2024 3 2025 2024 1956 3 1956 1957 2025 3 2026 2025 1957 3 1957 1958 2026 3 2027 2026 1958 3 1958 1959 2027 3 2028 2027 1959 3 1959 1960 2028 3 2029 2028 1960 3 1960 1961 2029 3 2030 2029 1961 3 1961 1962 2030 3 2031 2030 1962 3 1962 1963 2031 3 2032 2031 1963 3 1963 1964 2032 3 2033 2032 1964 3 1964 1965 2033 3 2034 2033 1965 3 1965 1966 2034 3 2035 2034 1966 3 1966 1967 2035 3 2036 2035 1967 3 1967 1968 2036 3 2037 2036 1968 3 1968 1969 2037 3 2038 2037 1969 3 1969 1970 2038 3 2039 2038 1970 3 1970 1971 2039 3 2040 2039 1971 3 1971 1972 2040 3 2041 2040 1972 3 1972 1973 2041 3 2042 2041 1973 3 1973 1974 2042 3 2043 2042 1974 3 1974 1975 2043 3 2044 2043 1975 3 1975 1976 2044 3 2045 2044 1976 3 1976 1977 2045 3 2046 2045 1977 3 1977 1978 2046 3 2047 2046 1978 3 1978 1979 2048 3 1978 2048 2047 3 1979 1980 2049 3 1979 2049 2048 3 1980 1981 2050 3 1980 2050 2049 3 1981 1982 2051 3 1981 2051 2050 3 1982 1983 2052 3 1982 2052 2051 3 1983 1984 2053 3 1983 2053 2052 3 1984 1985 2054 3 1984 2054 2053 3 1985 1986 2055 3 1985 2055 2054 3 1986 1987 2056 3 1986 2056 2055 3 1987 1988 2057 3 1987 2057 2056 3 1988 1989 2058 3 1988 2058 2057 3 1989 1990 2059 3 1989 2059 2058 3 1990 1991 2060 3 1990 2060 2059 3 1991 1992 2061 3 1991 2061 2060 3 1992 1993 2062 3 1992 2062 2061 3 1993 1994 2063 3 1993 2063 2062 3 1994 5001 2063 3 5005 2063 5001 3 1995 1996 2065 3 1995 2065 2064 3 1995 2064 5008 3 1995 5008 5004 3 1996 1997 2066 3 1996 2066 2065 3 1997 1998 2067 3 1997 2067 2066 3 1998 1999 2068 3 1998 2068 2067 3 1999 2000 2069 3 1999 2069 2068 3 2000 2001 2070 3 2000 2070 2069 3 2001 2002 2071 3 2001 2071 2070 3 2002 2003 2072 3 2002 2072 2071 3 2003 2004 2073 3 2003 2073 2072 3 2004 2005 2074 3 2004 2074 2073 3 2005 2006 2074 3 2075 2074 2006 3 2006 2007 2075 3 2076 2075 2007 3 2007 2008 2076 3 2077 2076 2008 3 2008 2009 2077 3 2078 2077 2009 3 2009 2010 2078 3 2079 2078 2010 3 2010 2011 2079 3 2080 2079 2011 3 2011 2012 2080 3 2081 2080 2012 3 2012 2013 2081 3 2082 2081 2013 3 2013 2014 2082 3 2083 2082 2014 3 2014 2015 2083 3 2084 2083 2015 3 2015 2016 2084 3 2085 2084 2016 3 2016 2017 2085 3 2086 2085 2017 3 2017 2018 2086 3 2087 2086 2018 3 2018 2019 2087 3 2088 2087 2019 3 2019 2020 2088 3 2089 2088 2020 3 2020 2021 2089 3 2090 2089 2021 3 2021 2022 2090 3 2091 2090 2022 3 2022 2023 2091 3 2092 2091 2023 3 2023 2024 2092 3 2093 2092 2024 3 2024 2025 2093 3 2094 2093 2025 3 2025 2026 2094 3 2095 2094 2026 3 2026 2027 2095 3 2096 2095 2027 3 2027 2028 2096 3 2097 2096 2028 3 2028 2029 2097 3 2098 2097 2029 3 2029 2030 2098 3 2099 2098 2030 3 2030 2031 2099 3 2100 2099 2031 3 2031 2032 2100 3 2101 2100 2032 3 2032 2033 2101 3 2102 2101 2033 3 2033 2034 2103 3 2033 2103 2102 3 2034 2035 2104 3 2034 2104 2103 3 2035 2036 2105 3 2035 2105 2104 3 2036 2037 2106 3 2036 2106 2105 3 2037 2038 2107 3 2037 2107 2106 3 2038 2039 2108 3 2038 2108 2107 3 2039 2040 2109 3 2039 2109 2108 3 2040 2041 2110 3 2040 2110 2109 3 2041 2042 2111 3 2041 2111 2110 3 2042 2043 2112 3 2042 2112 2111 3 2043 2044 2113 3 2043 2113 2112 3 2044 2045 2114 3 2044 2114 2113 3 2045 2046 2115 3 2045 2115 2114 3 2046 2047 2116 3 2046 2116 2115 3 2047 2048 2117 3 2047 2117 2116 3 2048 2049 2118 3 2048 2118 2117 3 2049 2050 2119 3 2049 2119 2118 3 2050 2051 2120 3 2050 2120 2119 3 2051 2052 2121 3 2051 2121 2120 3 2052 2053 2122 3 2052 2122 2121 3 2053 2054 2123 3 2053 2123 2122 3 2054 2055 2124 3 2054 2124 2123 3 2055 2056 2125 3 2055 2125 2124 3 2056 2057 2126 3 2056 2126 2125 3 2057 2058 2127 3 2057 2127 2126 3 2058 2059 2128 3 2058 2128 2127 3 2059 2060 2129 3 2059 2129 2128 3 2060 2061 2130 3 2060 2130 2129 3 2061 2062 2130 3 2131 2130 2062 3 2062 2063 2131 3 2132 2131 2063 3 2063 5005 5009 3 2063 5009 2132 3 2064 2065 2133 3 2134 2133 2065 3 2064 2133 5012 3 2064 5012 5008 3 2065 2066 2134 3 2135 2134 2066 3 2066 2067 2135 3 2136 2135 2067 3 2067 2068 2136 3 2137 2136 2068 3 2068 2069 2137 3 2138 2137 2069 3 2069 2070 2138 3 2139 2138 2070 3 2070 2071 2139 3 2140 2139 2071 3 2071 2072 2140 3 2141 2140 2072 3 2072 2073 2141 3 2142 2141 2073 3 2073 2074 2142 3 2143 2142 2074 3 2074 2075 2143 3 2144 2143 2075 3 2075 2076 2144 3 2145 2144 2076 3 2076 2077 2145 3 2146 2145 2077 3 2077 2078 2146 3 2147 2146 2078 3 2078 2079 2147 3 2148 2147 2079 3 2079 2080 2148 3 2149 2148 2080 3 2080 2081 2149 3 2150 2149 2081 3 2081 2082 2150 3 2151 2150 2082 3 2082 2083 2151 3 2152 2151 2083 3 2083 2084 2152 3 2153 2152 2084 3 2084 2085 2153 3 2154 2153 2085 3 2085 2086 2154 3 2155 2154 2086 3 2086 2087 2155 3 2156 2155 2087 3 2087 2088 2156 3 2157 2156 2088 3 2088 2089 2157 3 2158 2157 2089 3 2089 2090 2159 3 2089 2159 2158 3 2090 2091 2160 3 2090 2160 2159 3 2091 2092 2161 3 2091 2161 2160 3 2092 2093 2162 3 2092 2162 2161 3 2093 2094 2163 3 2093 2163 2162 3 2094 2095 2164 3 2094 2164 2163 3 2095 2096 2165 3 2095 2165 2164 3 2096 2097 2166 3 2096 2166 2165 3 2097 2098 2167 3 2097 2167 2166 3 2098 2099 2168 3 2098 2168 2167 3 2099 2100 2169 3 2099 2169 2168 3 2100 2101 2170 3 2100 2170 2169 3 2101 2102 2171 3 2101 2171 2170 3 2102 2103 2172 3 2102 2172 2171 3 2103 2104 2173 3 2103 2173 2172 3 2104 2105 2174 3 2104 2174 2173 3 2105 2106 2175 3 2105 2175 2174 3 2106 2107 2176 3 2106 2176 2175 3 2107 2108 2177 3 2107 2177 2176 3 2108 2109 2178 3 2108 2178 2177 3 2109 2110 2179 3 2109 2179 2178 3 2110 2111 2180 3 2110 2180 2179 3 2111 2112 2181 3 2111 2181 2180 3 2112 2113 2182 3 2112 2182 2181 3 2113 2114 2183 3 2113 2183 2182 3 2114 2115 2184 3 2114 2184 2183 3 2115 2116 2185 3 2115 2185 2184 3 2116 2117 2186 3 2116 2186 2185 3 2117 2118 2186 3 2187 2186 2118 3 2118 2119 2187 3 2188 2187 2119 3 2119 2120 2188 3 2189 2188 2120 3 2120 2121 2189 3 2190 2189 2121 3 2121 2122 2190 3 2191 2190 2122 3 2122 2123 2191 3 2192 2191 2123 3 2123 2124 2192 3 2193 2192 2124 3 2124 2125 2193 3 2194 2193 2125 3 2125 2126 2194 3 2195 2194 2126 3 2126 2127 2195 3 2196 2195 2127 3 2127 2128 2196 3 2197 2196 2128 3 2128 2129 2197 3 2198 2197 2129 3 2129 2130 2198 3 2199 2198 2130 3 2130 2131 2199 3 2200 2199 2131 3 2131 2132 2200 3 2201 2200 2132 3 2132 5009 2201 3 5013 2201 5009 3 2133 2134 2202 3 2203 2202 2134 3 2133 2202 5016 3 2133 5016 5012 3 2134 2135 2203 3 2204 2203 2135 3 2135 2136 2204 3 2205 2204 2136 3 2136 2137 2205 3 2206 2205 2137 3 2137 2138 2206 3 2207 2206 2138 3 2138 2139 2207 3 2208 2207 2139 3 2139 2140 2208 3 2209 2208 2140 3 2140 2141 2209 3 2210 2209 2141 3 2141 2142 2210 3 2211 2210 2142 3 2142 2143 2211 3 2212 2211 2143 3 2143 2144 2212 3 2213 2212 2144 3 2144 2145 2213 3 2214 2213 2145 3 2145 2146 2215 3 2145 2215 2214 3 2146 2147 2216 3 2146 2216 2215 3 2147 2148 2217 3 2147 2217 2216 3 2148 2149 2218 3 2148 2218 2217 3 2149 2150 2219 3 2149 2219 2218 3 2150 2151 2220 3 2150 2220 2219 3 2151 2152 2221 3 2151 2221 2220 3 2152 2153 2222 3 2152 2222 2221 3 2153 2154 2223 3 2153 2223 2222 3 2154 2155 2224 3 2154 2224 2223 3 2155 2156 2225 3 2155 2225 2224 3 2156 2157 2226 3 2156 2226 2225 3 2157 2158 2227 3 2157 2227 2226 3 2158 2159 2228 3 2158 2228 2227 3 2159 2160 2229 3 2159 2229 2228 3 2160 2161 2230 3 2160 2230 2229 3 2161 2162 2231 3 2161 2231 2230 3 2162 2163 2232 3 2162 2232 2231 3 2163 2164 2233 3 2163 2233 2232 3 2164 2165 2234 3 2164 2234 2233 3 2165 2166 2235 3 2165 2235 2234 3 2166 2167 2236 3 2166 2236 2235 3 2167 2168 2237 3 2167 2237 2236 3 2168 2169 2238 3 2168 2238 2237 3 2169 2170 2239 3 2169 2239 2238 3 2170 2171 2240 3 2170 2240 2239 3 2171 2172 2241 3 2171 2241 2240 3 2172 2173 2242 3 2172 2242 2241 3 2173 2174 2243 3 2173 2243 2242 3 2174 2175 2243 3 2244 2243 2175 3 2175 2176 2244 3 2245 2244 2176 3 2176 2177 2245 3 2246 2245 2177 3 2177 2178 2246 3 2247 2246 2178 3 2178 2179 2247 3 2248 2247 2179 3 2179 2180 2248 3 2249 2248 2180 3 2180 2181 2249 3 2250 2249 2181 3 2181 2182 2250 3 2251 2250 2182 3 2182 2183 2251 3 2252 2251 2183 3 2183 2184 2252 3 2253 2252 2184 3 2184 2185 2253 3 2254 2253 2185 3 2185 2186 2254 3 2255 2254 2186 3 2186 2187 2255 3 2256 2255 2187 3 2187 2188 2256 3 2257 2256 2188 3 2188 2189 2257 3 2258 2257 2189 3 2189 2190 2258 3 2259 2258 2190 3 2190 2191 2259 3 2260 2259 2191 3 2191 2192 2260 3 2261 2260 2192 3 2192 2193 2261 3 2262 2261 2193 3 2193 2194 2262 3 2263 2262 2194 3 2194 2195 2263 3 2264 2263 2195 3 2195 2196 2264 3 2265 2264 2196 3 2196 2197 2265 3 2266 2265 2197 3 2197 2198 2266 3 2267 2266 2198 3 2198 2199 2267 3 2268 2267 2199 3 2199 2200 2268 3 2269 2268 2200 3 2200 2201 2269 3 2270 2269 2201 3 2201 5013 5017 3 2201 5017 2270 3 2202 2203 2271 3 2272 2271 2203 3 2202 2271 5016 3 5020 5016 2271 3 2203 2204 2273 3 2203 2273 2272 3 2204 2205 2274 3 2204 2274 2273 3 2205 2206 2275 3 2205 2275 2274 3 2206 2207 2276 3 2206 2276 2275 3 2207 2208 2277 3 2207 2277 2276 3 2208 2209 2278 3 2208 2278 2277 3 2209 2210 2279 3 2209 2279 2278 3 2210 2211 2280 3 2210 2280 2279 3 2211 2212 2281 3 2211 2281 2280 3 2212 2213 2282 3 2212 2282 2281 3 2213 2214 2283 3 2213 2283 2282 3 2214 2215 2284 3 2214 2284 2283 3 2215 2216 2285 3 2215 2285 2284 3 2216 2217 2286 3 2216 2286 2285 3 2217 2218 2287 3 2217 2287 2286 3 2218 2219 2288 3 2218 2288 2287 3 2219 2220 2289 3 2219 2289 2288 3 2220 2221 2290 3 2220 2290 2289 3 2221 2222 2291 3 2221 2291 2290 3 2222 2223 2292 3 2222 2292 2291 3 2223 2224 2293 3 2223 2293 2292 3 2224 2225 2294 3 2224 2294 2293 3 2225 2226 2295 3 2225 2295 2294 3 2226 2227 2296 3 2226 2296 2295 3 2227 2228 2297 3 2227 2297 2296 3 2228 2229 2298 3 2228 2298 2297 3 2229 2230 2299 3 2229 2299 2298 3 2230 2231 2300 3 2230 2300 2299 3 2231 2232 2301 3 2231 2301 2300 3 2232 2233 2301 3 2302 2301 2233 3 2233 2234 2302 3 2303 2302 2234 3 2234 2235 2303 3 2304 2303 2235 3 2235 2236 2304 3 2305 2304 2236 3 2236 2237 2305 3 2306 2305 2237 3 2237 2238 2306 3 2307 2306 2238 3 2238 2239 2307 3 2308 2307 2239 3 2239 2240 2308 3 2309 2308 2240 3 2240 2241 2309 3 2310 2309 2241 3 2241 2242 2310 3 2311 2310 2242 3 2242 2243 2311 3 2312 2311 2243 3 2243 2244 2312 3 2313 2312 2244 3 2244 2245 2313 3 2314 2313 2245 3 2245 2246 2314 3 2315 2314 2246 3 2246 2247 2315 3 2316 2315 2247 3 2247 2248 2316 3 2317 2316 2248 3 2248 2249 2317 3 2318 2317 2249 3 2249 2250 2318 3 2319 2318 2250 3 2250 2251 2319 3 2320 2319 2251 3 2251 2252 2320 3 2321 2320 2252 3 2252 2253 2321 3 2322 2321 2253 3 2253 2254 2322 3 2323 2322 2254 3 2254 2255 2323 3 2324 2323 2255 3 2255 2256 2324 3 2325 2324 2256 3 2256 2257 2325 3 2326 2325 2257 3 2257 2258 2326 3 2327 2326 2258 3 2258 2259 2327 3 2328 2327 2259 3 2259 2260 2328 3 2329 2328 2260 3 2260 2261 2329 3 2330 2329 2261 3 2261 2262 2331 3 2261 2331 2330 3 2262 2263 2332 3 2262 2332 2331 3 2263 2264 2333 3 2263 2333 2332 3 2264 2265 2334 3 2264 2334 2333 3 2265 2266 2335 3 2265 2335 2334 3 2266 2267 2336 3 2266 2336 2335 3 2267 2268 2337 3 2267 2337 2336 3 2268 2269 2338 3 2268 2338 2337 3 2269 2270 2339 3 2269 2339 2338 3 2270 5017 2339 3 5021 2339 5017 3 2271 2272 2341 3 2271 2341 2340 3 2271 2340 5020 3 5024 5020 2340 3 2272 2273 2342 3 2272 2342 2341 3 2273 2274 2343 3 2273 2343 2342 3 2274 2275 2344 3 2274 2344 2343 3 2275 2276 2345 3 2275 2345 2344 3 2276 2277 2346 3 2276 2346 2345 3 2277 2278 2347 3 2277 2347 2346 3 2278 2279 2348 3 2278 2348 2347 3 2279 2280 2349 3 2279 2349 2348 3 2280 2281 2350 3 2280 2350 2349 3 2281 2282 2351 3 2281 2351 2350 3 2282 2283 2352 3 2282 2352 2351 3 2283 2284 2353 3 2283 2353 2352 3 2284 2285 2354 3 2284 2354 2353 3 2285 2286 2355 3 2285 2355 2354 3 2286 2287 2356 3 2286 2356 2355 3 2287 2288 2357 3 2287 2357 2356 3 2288 2289 2358 3 2288 2358 2357 3 2289 2290 2359 3 2289 2359 2358 3 2290 2291 2359 3 2360 2359 2291 3 2291 2292 2360 3 2361 2360 2292 3 2292 2293 2361 3 2362 2361 2293 3 2293 2294 2362 3 2363 2362 2294 3 2294 2295 2363 3 2364 2363 2295 3 2295 2296 2364 3 2365 2364 2296 3 2296 2297 2365 3 2366 2365 2297 3 2297 2298 2366 3 2367 2366 2298 3 2298 2299 2367 3 2368 2367 2299 3 2299 2300 2368 3 2369 2368 2300 3 2300 2301 2369 3 2370 2369 2301 3 2301 2302 2370 3 2371 2370 2302 3 2302 2303 2371 3 2372 2371 2303 3 2303 2304 2372 3 2373 2372 2304 3 2304 2305 2373 3 2374 2373 2305 3 2305 2306 2374 3 2375 2374 2306 3 2306 2307 2375 3 2376 2375 2307 3 2307 2308 2376 3 2377 2376 2308 3 2308 2309 2377 3 2378 2377 2309 3 2309 2310 2378 3 2379 2378 2310 3 2310 2311 2379 3 2380 2379 2311 3 2311 2312 2380 3 2381 2380 2312 3 2312 2313 2381 3 2382 2381 2313 3 2313 2314 2382 3 2383 2382 2314 3 2314 2315 2383 3 2384 2383 2315 3 2315 2316 2384 3 2385 2384 2316 3 2316 2317 2385 3 2386 2385 2317 3 2317 2318 2386 3 2387 2386 2318 3 2318 2319 2387 3 2388 2387 2319 3 2319 2320 2388 3 2389 2388 2320 3 2320 2321 2390 3 2320 2390 2389 3 2321 2322 2391 3 2321 2391 2390 3 2322 2323 2392 3 2322 2392 2391 3 2323 2324 2393 3 2323 2393 2392 3 2324 2325 2394 3 2324 2394 2393 3 2325 2326 2395 3 2325 2395 2394 3 2326 2327 2396 3 2326 2396 2395 3 2327 2328 2397 3 2327 2397 2396 3 2328 2329 2398 3 2328 2398 2397 3 2329 2330 2399 3 2329 2399 2398 3 2330 2331 2400 3 2330 2400 2399 3 2331 2332 2401 3 2331 2401 2400 3 2332 2333 2402 3 2332 2402 2401 3 2333 2334 2403 3 2333 2403 2402 3 2334 2335 2404 3 2334 2404 2403 3 2335 2336 2405 3 2335 2405 2404 3 2336 2337 2406 3 2336 2406 2405 3 2337 2338 2407 3 2337 2407 2406 3 2338 2339 2408 3 2338 2408 2407 3 2339 5021 5025 3 2339 5025 2408 3 2340 2341 2410 3 2340 2410 2409 3 2340 2409 5028 3 2340 5028 5024 3 2341 2342 2411 3 2341 2411 2410 3 2342 2343 2412 3 2342 2412 2411 3 2343 2344 2413 3 2343 2413 2412 3 2344 2345 2414 3 2344 2414 2413 3 2345 2346 2415 3 2345 2415 2414 3 2346 2347 2416 3 2346 2416 2415 3 2347 2348 2417 3 2347 2417 2416 3 2348 2349 2418 3 2348 2418 2417 3 2349 2350 2419 3 2349 2419 2418 3 2350 2351 2419 3 2420 2419 2351 3 2351 2352 2420 3 2421 2420 2352 3 2352 2353 2421 3 2422 2421 2353 3 2353 2354 2422 3 2423 2422 2354 3 2354 2355 2423 3 2424 2423 2355 3 2355 2356 2424 3 2425 2424 2356 3 2356 2357 2425 3 2426 2425 2357 3 2357 2358 2426 3 2427 2426 2358 3 2358 2359 2427 3 2428 2427 2359 3 2359 2360 2428 3 2429 2428 2360 3 2360 2361 2429 3 2430 2429 2361 3 2361 2362 2430 3 2431 2430 2362 3 2362 2363 2431 3 2432 2431 2363 3 2363 2364 2432 3 2433 2432 2364 3 2364 2365 2433 3 2434 2433 2365 3 2365 2366 2434 3 2435 2434 2366 3 2366 2367 2435 3 2436 2435 2367 3 2367 2368 2436 3 2437 2436 2368 3 2368 2369 2437 3 2438 2437 2369 3 2369 2370 2438 3 2439 2438 2370 3 2370 2371 2439 3 2440 2439 2371 3 2371 2372 2440 3 2441 2440 2372 3 2372 2373 2441 3 2442 2441 2373 3 2373 2374 2442 3 2443 2442 2374 3 2374 2375 2443 3 2444 2443 2375 3 2375 2376 2444 3 2445 2444 2376 3 2376 2377 2445 3 2446 2445 2377 3 2377 2378 2446 3 2447 2446 2378 3 2378 2379 2447 3 2448 2447 2379 3 2379 2380 2449 3 2379 2449 2448 3 2380 2381 2450 3 2380 2450 2449 3 2381 2382 2451 3 2381 2451 2450 3 2382 2383 2452 3 2382 2452 2451 3 2383 2384 2453 3 2383 2453 2452 3 2384 2385 2454 3 2384 2454 2453 3 2385 2386 2455 3 2385 2455 2454 3 2386 2387 2456 3 2386 2456 2455 3 2387 2388 2457 3 2387 2457 2456 3 2388 2389 2458 3 2388 2458 2457 3 2389 2390 2459 3 2389 2459 2458 3 2390 2391 2460 3 2390 2460 2459 3 2391 2392 2461 3 2391 2461 2460 3 2392 2393 2462 3 2392 2462 2461 3 2393 2394 2463 3 2393 2463 2462 3 2394 2395 2464 3 2394 2464 2463 3 2395 2396 2465 3 2395 2465 2464 3 2396 2397 2466 3 2396 2466 2465 3 2397 2398 2467 3 2397 2467 2466 3 2398 2399 2468 3 2398 2468 2467 3 2399 2400 2469 3 2399 2469 2468 3 2400 2401 2470 3 2400 2470 2469 3 2401 2402 2471 3 2401 2471 2470 3 2402 2403 2472 3 2402 2472 2471 3 2403 2404 2473 3 2403 2473 2472 3 2404 2405 2474 3 2404 2474 2473 3 2405 2406 2475 3 2405 2475 2474 3 2406 2407 2476 3 2406 2476 2475 3 2407 2408 2477 3 2407 2477 2476 3 2408 5025 2477 3 5029 2477 5025 3 2409 2410 2479 3 2409 2479 2478 3 2409 2478 5032 3 2409 5032 5028 3 2410 2411 2479 3 2480 2479 2411 3 2411 2412 2480 3 2481 2480 2412 3 2412 2413 2481 3 2482 2481 2413 3 2413 2414 2482 3 2483 2482 2414 3 2414 2415 2483 3 2484 2483 2415 3 2415 2416 2484 3 2485 2484 2416 3 2416 2417 2485 3 2486 2485 2417 3 2417 2418 2486 3 2487 2486 2418 3 2418 2419 2487 3 2488 2487 2419 3 2419 2420 2488 3 2489 2488 2420 3 2420 2421 2489 3 2490 2489 2421 3 2421 2422 2490 3 2491 2490 2422 3 2422 2423 2491 3 2492 2491 2423 3 2423 2424 2492 3 2493 2492 2424 3 2424 2425 2493 3 2494 2493 2425 3 2425 2426 2494 3 2495 2494 2426 3 2426 2427 2495 3 2496 2495 2427 3 2427 2428 2496 3 2497 2496 2428 3 2428 2429 2497 3 2498 2497 2429 3 2429 2430 2498 3 2499 2498 2430 3 2430 2431 2499 3 2500 2499 2431 3 2431 2432 2500 3 2501 2500 2432 3 2432 2433 2501 3 2502 2501 2433 3 2433 2434 2502 3 2503 2502 2434 3 2434 2435 2503 3 2504 2503 2435 3 2435 2436 2504 3 2505 2504 2436 3 2436 2437 2505 3 2506 2505 2437 3 2437 2438 2506 3 2507 2506 2438 3 2438 2439 2507 3 2508 2507 2439 3 2439 2440 2508 3 2509 2508 2440 3 2440 2441 2510 3 2440 2510 2509 3 2441 2442 2511 3 2441 2511 2510 3 2442 2443 2512 3 2442 2512 2511 3 2443 2444 2513 3 2443 2513 2512 3 2444 2445 2514 3 2444 2514 2513 3 2445 2446 2515 3 2445 2515 2514 3 2446 2447 2516 3 2446 2516 2515 3 2447 2448 2517 3 2447 2517 2516 3 2448 2449 2518 3 2448 2518 2517 3 2449 2450 2519 3 2449 2519 2518 3 2450 2451 2520 3 2450 2520 2519 3 2451 2452 2521 3 2451 2521 2520 3 2452 2453 2522 3 2452 2522 2521 3 2453 2454 2523 3 2453 2523 2522 3 2454 2455 2524 3 2454 2524 2523 3 2455 2456 2525 3 2455 2525 2524 3 2456 2457 2526 3 2456 2526 2525 3 2457 2458 2527 3 2457 2527 2526 3 2458 2459 2528 3 2458 2528 2527 3 2459 2460 2529 3 2459 2529 2528 3 2460 2461 2530 3 2460 2530 2529 3 2461 2462 2531 3 2461 2531 2530 3 2462 2463 2532 3 2462 2532 2531 3 2463 2464 2533 3 2463 2533 2532 3 2464 2465 2534 3 2464 2534 2533 3 2465 2466 2535 3 2465 2535 2534 3 2466 2467 2536 3 2466 2536 2535 3 2467 2468 2537 3 2467 2537 2536 3 2468 2469 2538 3 2468 2538 2537 3 2469 2470 2539 3 2469 2539 2538 3 2470 2471 2539 3 2540 2539 2471 3 2471 2472 2540 3 2541 2540 2472 3 2472 2473 2541 3 2542 2541 2473 3 2473 2474 2542 3 2543 2542 2474 3 2474 2475 2543 3 2544 2543 2475 3 2475 2476 2544 3 2545 2544 2476 3 2476 2477 2545 3 2546 2545 2477 3 2477 5029 5033 3 2477 5033 2546 3 2478 2479 2547 3 2548 2547 2479 3 2478 2547 5036 3 2478 5036 5032 3 2479 2480 2548 3 2549 2548 2480 3 2480 2481 2549 3 2550 2549 2481 3 2481 2482 2550 3 2551 2550 2482 3 2482 2483 2551 3 2552 2551 2483 3 2483 2484 2552 3 2553 2552 2484 3 2484 2485 2553 3 2554 2553 2485 3 2485 2486 2554 3 2555 2554 2486 3 2486 2487 2555 3 2556 2555 2487 3 2487 2488 2556 3 2557 2556 2488 3 2488 2489 2557 3 2558 2557 2489 3 2489 2490 2558 3 2559 2558 2490 3 2490 2491 2559 3 2560 2559 2491 3 2491 2492 2560 3 2561 2560 2492 3 2492 2493 2561 3 2562 2561 2493 3 2493 2494 2562 3 2563 2562 2494 3 2494 2495 2563 3 2564 2563 2495 3 2495 2496 2564 3 2565 2564 2496 3 2496 2497 2565 3 2566 2565 2497 3 2497 2498 2566 3 2567 2566 2498 3 2498 2499 2567 3 2568 2567 2499 3 2499 2500 2568 3 2569 2568 2500 3 2500 2501 2569 3 2570 2569 2501 3 2501 2502 2571 3 2501 2571 2570 3 2502 2503 2572 3 2502 2572 2571 3 2503 2504 2573 3 2503 2573 2572 3 2504 2505 2574 3 2504 2574 2573 3 2505 2506 2575 3 2505 2575 2574 3 2506 2507 2576 3 2506 2576 2575 3 2507 2508 2577 3 2507 2577 2576 3 2508 2509 2578 3 2508 2578 2577 3 2509 2510 2579 3 2509 2579 2578 3 2510 2511 2580 3 2510 2580 2579 3 2511 2512 2581 3 2511 2581 2580 3 2512 2513 2582 3 2512 2582 2581 3 2513 2514 2583 3 2513 2583 2582 3 2514 2515 2584 3 2514 2584 2583 3 2515 2516 2585 3 2515 2585 2584 3 2516 2517 2586 3 2516 2586 2585 3 2517 2518 2587 3 2517 2587 2586 3 2518 2519 2588 3 2518 2588 2587 3 2519 2520 2589 3 2519 2589 2588 3 2520 2521 2590 3 2520 2590 2589 3 2521 2522 2591 3 2521 2591 2590 3 2522 2523 2592 3 2522 2592 2591 3 2523 2524 2593 3 2523 2593 2592 3 2524 2525 2594 3 2524 2594 2593 3 2525 2526 2595 3 2525 2595 2594 3 2526 2527 2596 3 2526 2596 2595 3 2527 2528 2597 3 2527 2597 2596 3 2528 2529 2598 3 2528 2598 2597 3 2529 2530 2599 3 2529 2599 2598 3 2530 2531 2600 3 2530 2600 2599 3 2531 2532 2601 3 2531 2601 2600 3 2532 2533 2601 3 2602 2601 2533 3 2533 2534 2602 3 2603 2602 2534 3 2534 2535 2603 3 2604 2603 2535 3 2535 2536 2604 3 2605 2604 2536 3 2536 2537 2605 3 2606 2605 2537 3 2537 2538 2606 3 2607 2606 2538 3 2538 2539 2607 3 2608 2607 2539 3 2539 2540 2608 3 2609 2608 2540 3 2540 2541 2609 3 2610 2609 2541 3 2541 2542 2610 3 2611 2610 2542 3 2542 2543 2611 3 2612 2611 2543 3 2543 2544 2612 3 2613 2612 2544 3 2544 2545 2613 3 2614 2613 2545 3 2545 2546 2614 3 2615 2614 2546 3 2546 5033 2615 3 5037 2615 5033 3 2547 2548 2616 3 2617 2616 2548 3 2547 2616 5040 3 2547 5040 5036 3 2548 2549 2617 3 2618 2617 2549 3 2549 2550 2618 3 2619 2618 2550 3 2550 2551 2619 3 2620 2619 2551 3 2551 2552 2620 3 2621 2620 2552 3 2552 2553 2621 3 2622 2621 2553 3 2553 2554 2622 3 2623 2622 2554 3 2554 2555 2623 3 2624 2623 2555 3 2555 2556 2624 3 2625 2624 2556 3 2556 2557 2625 3 2626 2625 2557 3 2557 2558 2626 3 2627 2626 2558 3 2558 2559 2627 3 2628 2627 2559 3 2559 2560 2628 3 2629 2628 2560 3 2560 2561 2629 3 2630 2629 2561 3 2561 2562 2630 3 2631 2630 2562 3 2562 2563 2631 3 2632 2631 2563 3 2563 2564 2633 3 2563 2633 2632 3 2564 2565 2634 3 2564 2634 2633 3 2565 2566 2635 3 2565 2635 2634 3 2566 2567 2636 3 2566 2636 2635 3 2567 2568 2637 3 2567 2637 2636 3 2568 2569 2638 3 2568 2638 2637 3 2569 2570 2639 3 2569 2639 2638 3 2570 2571 2640 3 2570 2640 2639 3 2571 2572 2641 3 2571 2641 2640 3 2572 2573 2642 3 2572 2642 2641 3 2573 2574 2643 3 2573 2643 2642 3 2574 2575 2644 3 2574 2644 2643 3 2575 2576 2645 3 2575 2645 2644 3 2576 2577 2646 3 2576 2646 2645 3 2577 2578 2647 3 2577 2647 2646 3 2578 2579 2648 3 2578 2648 2647 3 2579 2580 2649 3 2579 2649 2648 3 2580 2581 2650 3 2580 2650 2649 3 2581 2582 2651 3 2581 2651 2650 3 2582 2583 2652 3 2582 2652 2651 3 2583 2584 2653 3 2583 2653 2652 3 2584 2585 2654 3 2584 2654 2653 3 2585 2586 2655 3 2585 2655 2654 3 2586 2587 2656 3 2586 2656 2655 3 2587 2588 2657 3 2587 2657 2656 3 2588 2589 2658 3 2588 2658 2657 3 2589 2590 2659 3 2589 2659 2658 3 2590 2591 2660 3 2590 2660 2659 3 2591 2592 2661 3 2591 2661 2660 3 2592 2593 2662 3 2592 2662 2661 3 2593 2594 2663 3 2593 2663 2662 3 2594 2595 2663 3 2664 2663 2595 3 2595 2596 2664 3 2665 2664 2596 3 2596 2597 2665 3 2666 2665 2597 3 2597 2598 2666 3 2667 2666 2598 3 2598 2599 2667 3 2668 2667 2599 3 2599 2600 2668 3 2669 2668 2600 3 2600 2601 2669 3 2670 2669 2601 3 2601 2602 2670 3 2671 2670 2602 3 2602 2603 2671 3 2672 2671 2603 3 2603 2604 2672 3 2673 2672 2604 3 2604 2605 2673 3 2674 2673 2605 3 2605 2606 2674 3 2675 2674 2606 3 2606 2607 2675 3 2676 2675 2607 3 2607 2608 2676 3 2677 2676 2608 3 2608 2609 2677 3 2678 2677 2609 3 2609 2610 2678 3 2679 2678 2610 3 2610 2611 2679 3 2680 2679 2611 3 2611 2612 2680 3 2681 2680 2612 3 2612 2613 2681 3 2682 2681 2613 3 2613 2614 2682 3 2683 2682 2614 3 2614 2615 2683 3 2684 2683 2615 3 2615 5037 5041 3 2615 5041 2684 3 2616 2617 2685 3 2686 2685 2617 3 2616 2685 5044 3 2616 5044 5040 3 2617 2618 2686 3 2687 2686 2618 3 2618 2619 2687 3 2688 2687 2619 3 2619 2620 2688 3 2689 2688 2620 3 2620 2621 2689 3 2690 2689 2621 3 2621 2622 2690 3 2691 2690 2622 3 2622 2623 2691 3 2692 2691 2623 3 2623 2624 2692 3 2693 2692 2624 3 2624 2625 2693 3 2694 2693 2625 3 2625 2626 2694 3 2695 2694 2626 3 2626 2627 2696 3 2626 2696 2695 3 2627 2628 2697 3 2627 2697 2696 3 2628 2629 2698 3 2628 2698 2697 3 2629 2630 2699 3 2629 2699 2698 3 2630 2631 2700 3 2630 2700 2699 3 2631 2632 2701 3 2631 2701 2700 3 2632 2633 2702 3 2632 2702 2701 3 2633 2634 2703 3 2633 2703 2702 3 2634 2635 2704 3 2634 2704 2703 3 2635 2636 2705 3 2635 2705 2704 3 2636 2637 2706 3 2636 2706 2705 3 2637 2638 2707 3 2637 2707 2706 3 2638 2639 2708 3 2638 2708 2707 3 2639 2640 2709 3 2639 2709 2708 3 2640 2641 2710 3 2640 2710 2709 3 2641 2642 2711 3 2641 2711 2710 3 2642 2643 2712 3 2642 2712 2711 3 2643 2644 2713 3 2643 2713 2712 3 2644 2645 2714 3 2644 2714 2713 3 2645 2646 2715 3 2645 2715 2714 3 2646 2647 2716 3 2646 2716 2715 3 2647 2648 2717 3 2647 2717 2716 3 2648 2649 2718 3 2648 2718 2717 3 2649 2650 2719 3 2649 2719 2718 3 2650 2651 2720 3 2650 2720 2719 3 2651 2652 2721 3 2651 2721 2720 3 2652 2653 2722 3 2652 2722 2721 3 2653 2654 2723 3 2653 2723 2722 3 2654 2655 2724 3 2654 2724 2723 3 2655 2656 2725 3 2655 2725 2724 3 2656 2657 2726 3 2656 2726 2725 3 2657 2658 2726 3 2727 2726 2658 3 2658 2659 2727 3 2728 2727 2659 3 2659 2660 2728 3 2729 2728 2660 3 2660 2661 2729 3 2730 2729 2661 3 2661 2662 2730 3 2731 2730 2662 3 2662 2663 2731 3 2732 2731 2663 3 2663 2664 2732 3 2733 2732 2664 3 2664 2665 2733 3 2734 2733 2665 3 2665 2666 2734 3 2735 2734 2666 3 2666 2667 2735 3 2736 2735 2667 3 2667 2668 2736 3 2737 2736 2668 3 2668 2669 2737 3 2738 2737 2669 3 2669 2670 2738 3 2739 2738 2670 3 2670 2671 2739 3 2740 2739 2671 3 2671 2672 2740 3 2741 2740 2672 3 2672 2673 2741 3 2742 2741 2673 3 2673 2674 2742 3 2743 2742 2674 3 2674 2675 2743 3 2744 2743 2675 3 2675 2676 2744 3 2745 2744 2676 3 2676 2677 2745 3 2746 2745 2677 3 2677 2678 2746 3 2747 2746 2678 3 2678 2679 2747 3 2748 2747 2679 3 2679 2680 2748 3 2749 2748 2680 3 2680 2681 2749 3 2750 2749 2681 3 2681 2682 2750 3 2751 2750 2682 3 2682 2683 2751 3 2752 2751 2683 3 2683 2684 2752 3 2753 2752 2684 3 2684 5041 2753 3 5045 2753 5041 3 2685 2686 2754 3 2755 2754 2686 3 2685 2754 5044 3 5048 5044 2754 3 2686 2687 2755 3 2756 2755 2687 3 2687 2688 2756 3 2757 2756 2688 3 2688 2689 2757 3 2758 2757 2689 3 2689 2690 2759 3 2689 2759 2758 3 2690 2691 2760 3 2690 2760 2759 3 2691 2692 2761 3 2691 2761 2760 3 2692 2693 2762 3 2692 2762 2761 3 2693 2694 2763 3 2693 2763 2762 3 2694 2695 2764 3 2694 2764 2763 3 2695 2696 2765 3 2695 2765 2764 3 2696 2697 2766 3 2696 2766 2765 3 2697 2698 2767 3 2697 2767 2766 3 2698 2699 2768 3 2698 2768 2767 3 2699 2700 2769 3 2699 2769 2768 3 2700 2701 2770 3 2700 2770 2769 3 2701 2702 2771 3 2701 2771 2770 3 2702 2703 2772 3 2702 2772 2771 3 2703 2704 2773 3 2703 2773 2772 3 2704 2705 2774 3 2704 2774 2773 3 2705 2706 2775 3 2705 2775 2774 3 2706 2707 2776 3 2706 2776 2775 3 2707 2708 2777 3 2707 2777 2776 3 2708 2709 2778 3 2708 2778 2777 3 2709 2710 2779 3 2709 2779 2778 3 2710 2711 2780 3 2710 2780 2779 3 2711 2712 2781 3 2711 2781 2780 3 2712 2713 2782 3 2712 2782 2781 3 2713 2714 2783 3 2713 2783 2782 3 2714 2715 2784 3 2714 2784 2783 3 2715 2716 2785 3 2715 2785 2784 3 2716 2717 2786 3 2716 2786 2785 3 2717 2718 2787 3 2717 2787 2786 3 2718 2719 2788 3 2718 2788 2787 3 2719 2720 2789 3 2719 2789 2788 3 2720 2721 2790 3 2720 2790 2789 3 2721 2722 2790 3 2791 2790 2722 3 2722 2723 2791 3 2792 2791 2723 3 2723 2724 2792 3 2793 2792 2724 3 2724 2725 2793 3 2794 2793 2725 3 2725 2726 2794 3 2795 2794 2726 3 2726 2727 2795 3 2796 2795 2727 3 2727 2728 2796 3 2797 2796 2728 3 2728 2729 2797 3 2798 2797 2729 3 2729 2730 2798 3 2799 2798 2730 3 2730 2731 2799 3 2800 2799 2731 3 2731 2732 2800 3 2801 2800 2732 3 2732 2733 2801 3 2802 2801 2733 3 2733 2734 2802 3 2803 2802 2734 3 2734 2735 2803 3 2804 2803 2735 3 2735 2736 2804 3 2805 2804 2736 3 2736 2737 2805 3 2806 2805 2737 3 2737 2738 2806 3 2807 2806 2738 3 2738 2739 2807 3 2808 2807 2739 3 2739 2740 2808 3 2809 2808 2740 3 2740 2741 2809 3 2810 2809 2741 3 2741 2742 2810 3 2811 2810 2742 3 2742 2743 2811 3 2812 2811 2743 3 2743 2744 2812 3 2813 2812 2744 3 2744 2745 2813 3 2814 2813 2745 3 2745 2746 2814 3 2815 2814 2746 3 2746 2747 2815 3 2816 2815 2747 3 2747 2748 2816 3 2817 2816 2748 3 2748 2749 2817 3 2818 2817 2749 3 2749 2750 2818 3 2819 2818 2750 3 2750 2751 2819 3 2820 2819 2751 3 2751 2752 2820 3 2821 2820 2752 3 2752 2753 2821 3 2822 2821 2753 3 2753 5045 5049 3 2753 5049 2822 3 2754 2755 2824 3 2754 2824 2823 3 2754 2823 5048 3 5052 5048 2823 3 2755 2756 2825 3 2755 2825 2824 3 2756 2757 2826 3 2756 2826 2825 3 2757 2758 2827 3 2757 2827 2826 3 2758 2759 2828 3 2758 2828 2827 3 2759 2760 2829 3 2759 2829 2828 3 2760 2761 2830 3 2760 2830 2829 3 2761 2762 2831 3 2761 2831 2830 3 2762 2763 2832 3 2762 2832 2831 3 2763 2764 2833 3 2763 2833 2832 3 2764 2765 2834 3 2764 2834 2833 3 2765 2766 2835 3 2765 2835 2834 3 2766 2767 2836 3 2766 2836 2835 3 2767 2768 2837 3 2767 2837 2836 3 2768 2769 2838 3 2768 2838 2837 3 2769 2770 2839 3 2769 2839 2838 3 2770 2771 2840 3 2770 2840 2839 3 2771 2772 2841 3 2771 2841 2840 3 2772 2773 2842 3 2772 2842 2841 3 2773 2774 2843 3 2773 2843 2842 3 2774 2775 2844 3 2774 2844 2843 3 2775 2776 2845 3 2775 2845 2844 3 2776 2777 2846 3 2776 2846 2845 3 2777 2778 2847 3 2777 2847 2846 3 2778 2779 2848 3 2778 2848 2847 3 2779 2780 2849 3 2779 2849 2848 3 2780 2781 2850 3 2780 2850 2849 3 2781 2782 2851 3 2781 2851 2850 3 2782 2783 2852 3 2782 2852 2851 3 2783 2784 2853 3 2783 2853 2852 3 2784 2785 2854 3 2784 2854 2853 3 2785 2786 2855 3 2785 2855 2854 3 2786 2787 2855 3 2856 2855 2787 3 2787 2788 2856 3 2857 2856 2788 3 2788 2789 2857 3 2858 2857 2789 3 2789 2790 2858 3 2859 2858 2790 3 2790 2791 2859 3 2860 2859 2791 3 2791 2792 2860 3 2861 2860 2792 3 2792 2793 2861 3 2862 2861 2793 3 2793 2794 2862 3 2863 2862 2794 3 2794 2795 2863 3 2864 2863 2795 3 2795 2796 2864 3 2865 2864 2796 3 2796 2797 2865 3 2866 2865 2797 3 2797 2798 2866 3 2867 2866 2798 3 2798 2799 2867 3 2868 2867 2799 3 2799 2800 2868 3 2869 2868 2800 3 2800 2801 2869 3 2870 2869 2801 3 2801 2802 2870 3 2871 2870 2802 3 2802 2803 2871 3 2872 2871 2803 3 2803 2804 2872 3 2873 2872 2804 3 2804 2805 2873 3 2874 2873 2805 3 2805 2806 2874 3 2875 2874 2806 3 2806 2807 2875 3 2876 2875 2807 3 2807 2808 2876 3 2877 2876 2808 3 2808 2809 2877 3 2878 2877 2809 3 2809 2810 2878 3 2879 2878 2810 3 2810 2811 2879 3 2880 2879 2811 3 2811 2812 2880 3 2881 2880 2812 3 2812 2813 2881 3 2882 2881 2813 3 2813 2814 2882 3 2883 2882 2814 3 2814 2815 2883 3 2884 2883 2815 3 2815 2816 2884 3 2885 2884 2816 3 2816 2817 2885 3 2886 2885 2817 3 2817 2818 2886 3 2887 2886 2818 3 2818 2819 2888 3 2818 2888 2887 3 2819 2820 2889 3 2819 2889 2888 3 2820 2821 2890 3 2820 2890 2889 3 2821 2822 2891 3 2821 2891 2890 3 2822 5049 2891 3 5053 2891 5049 3 2823 2824 2893 3 2823 2893 2892 3 2823 2892 5052 3 5056 5052 2892 3 2824 2825 2894 3 2824 2894 2893 3 2825 2826 2895 3 2825 2895 2894 3 2826 2827 2896 3 2826 2896 2895 3 2827 2828 2897 3 2827 2897 2896 3 2828 2829 2898 3 2828 2898 2897 3 2829 2830 2899 3 2829 2899 2898 3 2830 2831 2900 3 2830 2900 2899 3 2831 2832 2901 3 2831 2901 2900 3 2832 2833 2902 3 2832 2902 2901 3 2833 2834 2903 3 2833 2903 2902 3 2834 2835 2904 3 2834 2904 2903 3 2835 2836 2905 3 2835 2905 2904 3 2836 2837 2906 3 2836 2906 2905 3 2837 2838 2907 3 2837 2907 2906 3 2838 2839 2908 3 2838 2908 2907 3 2839 2840 2909 3 2839 2909 2908 3 2840 2841 2910 3 2840 2910 2909 3 2841 2842 2911 3 2841 2911 2910 3 2842 2843 2912 3 2842 2912 2911 3 2843 2844 2913 3 2843 2913 2912 3 2844 2845 2914 3 2844 2914 2913 3 2845 2846 2915 3 2845 2915 2914 3 2846 2847 2916 3 2846 2916 2915 3 2847 2848 2917 3 2847 2917 2916 3 2848 2849 2918 3 2848 2918 2917 3 2849 2850 2919 3 2849 2919 2918 3 2850 2851 2920 3 2850 2920 2919 3 2851 2852 2920 3 2921 2920 2852 3 2852 2853 2921 3 2922 2921 2853 3 2853 2854 2922 3 2923 2922 2854 3 2854 2855 2923 3 2924 2923 2855 3 2855 2856 2924 3 2925 2924 2856 3 2856 2857 2925 3 2926 2925 2857 3 2857 2858 2926 3 2927 2926 2858 3 2858 2859 2927 3 2928 2927 2859 3 2859 2860 2928 3 2929 2928 2860 3 2860 2861 2929 3 2930 2929 2861 3 2861 2862 2930 3 2931 2930 2862 3 2862 2863 2931 3 2932 2931 2863 3 2863 2864 2932 3 2933 2932 2864 3 2864 2865 2933 3 2934 2933 2865 3 2865 2866 2934 3 2935 2934 2866 3 2866 2867 2935 3 2936 2935 2867 3 2867 2868 2936 3 2937 2936 2868 3 2868 2869 2937 3 2938 2937 2869 3 2869 2870 2938 3 2939 2938 2870 3 2870 2871 2939 3 2940 2939 2871 3 2871 2872 2940 3 2941 2940 2872 3 2872 2873 2941 3 2942 2941 2873 3 2873 2874 2942 3 2943 2942 2874 3 2874 2875 2943 3 2944 2943 2875 3 2875 2876 2944 3 2945 2944 2876 3 2876 2877 2945 3 2946 2945 2877 3 2877 2878 2946 3 2947 2946 2878 3 2878 2879 2947 3 2948 2947 2879 3 2879 2880 2948 3 2949 2948 2880 3 2880 2881 2949 3 2950 2949 2881 3 2881 2882 2950 3 2951 2950 2882 3 2882 2883 2951 3 2952 2951 2883 3 2883 2884 2952 3 2953 2952 2884 3 2884 2885 2954 3 2884 2954 2953 3 2885 2886 2955 3 2885 2955 2954 3 2886 2887 2956 3 2886 2956 2955 3 2887 2888 2957 3 2887 2957 2956 3 2888 2889 2958 3 2888 2958 2957 3 2889 2890 2959 3 2889 2959 2958 3 2890 2891 2960 3 2890 2960 2959 3 2891 5053 5057 3 2891 5057 2960 3 2892 2893 2962 3 2892 2962 2961 3 2892 2961 5056 3 5060 5056 2961 3 2893 2894 2963 3 2893 2963 2962 3 2894 2895 2964 3 2894 2964 2963 3 2895 2896 2965 3 2895 2965 2964 3 2896 2897 2966 3 2896 2966 2965 3 2897 2898 2967 3 2897 2967 2966 3 2898 2899 2968 3 2898 2968 2967 3 2899 2900 2969 3 2899 2969 2968 3 2900 2901 2970 3 2900 2970 2969 3 2901 2902 2971 3 2901 2971 2970 3 2902 2903 2972 3 2902 2972 2971 3 2903 2904 2973 3 2903 2973 2972 3 2904 2905 2974 3 2904 2974 2973 3 2905 2906 2975 3 2905 2975 2974 3 2906 2907 2976 3 2906 2976 2975 3 2907 2908 2977 3 2907 2977 2976 3 2908 2909 2978 3 2908 2978 2977 3 2909 2910 2979 3 2909 2979 2978 3 2910 2911 2980 3 2910 2980 2979 3 2911 2912 2981 3 2911 2981 2980 3 2912 2913 2982 3 2912 2982 2981 3 2913 2914 2983 3 2913 2983 2982 3 2914 2915 2984 3 2914 2984 2983 3 2915 2916 2985 3 2915 2985 2984 3 2916 2917 2986 3 2916 2986 2985 3 2917 2918 2986 3 2987 2986 2918 3 2918 2919 2987 3 2988 2987 2919 3 2919 2920 2988 3 2989 2988 2920 3 2920 2921 2989 3 2990 2989 2921 3 2921 2922 2990 3 2991 2990 2922 3 2922 2923 2991 3 2992 2991 2923 3 2923 2924 2992 3 2993 2992 2924 3 2924 2925 2993 3 2994 2993 2925 3 2925 2926 2994 3 2995 2994 2926 3 2926 2927 2995 3 2996 2995 2927 3 2927 2928 2996 3 2997 2996 2928 3 2928 2929 2997 3 2998 2997 2929 3 2929 2930 2998 3 2999 2998 2930 3 2930 2931 2999 3 3000 2999 2931 3 2931 2932 3000 3 3001 3000 2932 3 2932 2933 3001 3 3002 3001 2933 3 2933 2934 3002 3 3003 3002 2934 3 2934 2935 3003 3 3004 3003 2935 3 2935 2936 3004 3 3005 3004 2936 3 2936 2937 3005 3 3006 3005 2937 3 2937 2938 3006 3 3007 3006 2938 3 2938 2939 3007 3 3008 3007 2939 3 2939 2940 3008 3 3009 3008 2940 3 2940 2941 3009 3 3010 3009 2941 3 2941 2942 3010 3 3011 3010 2942 3 2942 2943 3011 3 3012 3011 2943 3 2943 2944 3012 3 3013 3012 2944 3 2944 2945 3013 3 3014 3013 2945 3 2945 2946 3014 3 3015 3014 2946 3 2946 2947 3015 3 3016 3015 2947 3 2947 2948 3016 3 3017 3016 2948 3 2948 2949 3017 3 3018 3017 2949 3 2949 2950 3018 3 3019 3018 2950 3 2950 2951 3019 3 3020 3019 2951 3 2951 2952 3021 3 2951 3021 3020 3 2952 2953 3022 3 2952 3022 3021 3 2953 2954 3023 3 2953 3023 3022 3 2954 2955 3024 3 2954 3024 3023 3 2955 2956 3025 3 2955 3025 3024 3 2956 2957 3026 3 2956 3026 3025 3 2957 2958 3027 3 2957 3027 3026 3 2958 2959 3028 3 2958 3028 3027 3 2959 2960 3029 3 2959 3029 3028 3 2960 5057 3029 3 5061 3029 5057 3 2961 2962 3031 3 2961 3031 3030 3 2961 3030 5060 3 5064 5060 3030 3 2962 2963 3032 3 2962 3032 3031 3 2963 2964 3033 3 2963 3033 3032 3 2964 2965 3034 3 2964 3034 3033 3 2965 2966 3035 3 2965 3035 3034 3 2966 2967 3036 3 2966 3036 3035 3 2967 2968 3037 3 2967 3037 3036 3 2968 2969 3038 3 2968 3038 3037 3 2969 2970 3039 3 2969 3039 3038 3 2970 2971 3040 3 2970 3040 3039 3 2971 2972 3041 3 2971 3041 3040 3 2972 2973 3042 3 2972 3042 3041 3 2973 2974 3043 3 2973 3043 3042 3 2974 2975 3044 3 2974 3044 3043 3 2975 2976 3045 3 2975 3045 3044 3 2976 2977 3046 3 2976 3046 3045 3 2977 2978 3047 3 2977 3047 3046 3 2978 2979 3048 3 2978 3048 3047 3 2979 2980 3049 3 2979 3049 3048 3 2980 2981 3050 3 2980 3050 3049 3 2981 2982 3051 3 2981 3051 3050 3 2982 2983 3052 3 2982 3052 3051 3 2983 2984 3053 3 2983 3053 3052 3 2984 2985 3053 3 3054 3053 2985 3 2985 2986 3054 3 3055 3054 2986 3 2986 2987 3055 3 3056 3055 2987 3 2987 2988 3056 3 3057 3056 2988 3 2988 2989 3057 3 3058 3057 2989 3 2989 2990 3058 3 3059 3058 2990 3 2990 2991 3059 3 3060 3059 2991 3 2991 2992 3060 3 3061 3060 2992 3 2992 2993 3061 3 3062 3061 2993 3 2993 2994 3062 3 3063 3062 2994 3 2994 2995 3063 3 3064 3063 2995 3 2995 2996 3064 3 3065 3064 2996 3 2996 2997 3065 3 3066 3065 2997 3 2997 2998 3066 3 3067 3066 2998 3 2998 2999 3067 3 3068 3067 2999 3 2999 3000 3068 3 3069 3068 3000 3 3000 3001 3069 3 3070 3069 3001 3 3001 3002 3070 3 3071 3070 3002 3 3002 3003 3071 3 3072 3071 3003 3 3003 3004 3072 3 3073 3072 3004 3 3004 3005 3073 3 3074 3073 3005 3 3005 3006 3074 3 3075 3074 3006 3 3006 3007 3075 3 3076 3075 3007 3 3007 3008 3076 3 3077 3076 3008 3 3008 3009 3077 3 3078 3077 3009 3 3009 3010 3078 3 3079 3078 3010 3 3010 3011 3079 3 3080 3079 3011 3 3011 3012 3080 3 3081 3080 3012 3 3012 3013 3081 3 3082 3081 3013 3 3013 3014 3082 3 3083 3082 3014 3 3014 3015 3083 3 3084 3083 3015 3 3015 3016 3084 3 3085 3084 3016 3 3016 3017 3085 3 3086 3085 3017 3 3017 3018 3086 3 3087 3086 3018 3 3018 3019 3088 3 3018 3088 3087 3 3019 3020 3089 3 3019 3089 3088 3 3020 3021 3090 3 3020 3090 3089 3 3021 3022 3091 3 3021 3091 3090 3 3022 3023 3092 3 3022 3092 3091 3 3023 3024 3093 3 3023 3093 3092 3 3024 3025 3094 3 3024 3094 3093 3 3025 3026 3095 3 3025 3095 3094 3 3026 3027 3096 3 3026 3096 3095 3 3027 3028 3097 3 3027 3097 3096 3 3028 3029 3098 3 3028 3098 3097 3 3029 5061 5065 3 3029 5065 3098 3 3030 3031 3100 3 3030 3100 3099 3 3030 3099 5064 3 5068 5064 3099 3 3031 3032 3101 3 3031 3101 3100 3 3032 3033 3102 3 3032 3102 3101 3 3033 3034 3103 3 3033 3103 3102 3 3034 3035 3104 3 3034 3104 3103 3 3035 3036 3105 3 3035 3105 3104 3 3036 3037 3106 3 3036 3106 3105 3 3037 3038 3107 3 3037 3107 3106 3 3038 3039 3108 3 3038 3108 3107 3 3039 3040 3109 3 3039 3109 3108 3 3040 3041 3110 3 3040 3110 3109 3 3041 3042 3111 3 3041 3111 3110 3 3042 3043 3112 3 3042 3112 3111 3 3043 3044 3113 3 3043 3113 3112 3 3044 3045 3114 3 3044 3114 3113 3 3045 3046 3115 3 3045 3115 3114 3 3046 3047 3116 3 3046 3116 3115 3 3047 3048 3117 3 3047 3117 3116 3 3048 3049 3118 3 3048 3118 3117 3 3049 3050 3119 3 3049 3119 3118 3 3050 3051 3120 3 3050 3120 3119 3 3051 3052 3121 3 3051 3121 3120 3 3052 3053 3121 3 3122 3121 3053 3 3053 3054 3122 3 3123 3122 3054 3 3054 3055 3123 3 3124 3123 3055 3 3055 3056 3124 3 3125 3124 3056 3 3056 3057 3125 3 3126 3125 3057 3 3057 3058 3126 3 3127 3126 3058 3 3058 3059 3127 3 3128 3127 3059 3 3059 3060 3128 3 3129 3128 3060 3 3060 3061 3129 3 3130 3129 3061 3 3061 3062 3130 3 3131 3130 3062 3 3062 3063 3131 3 3132 3131 3063 3 3063 3064 3132 3 3133 3132 3064 3 3064 3065 3133 3 3134 3133 3065 3 3065 3066 3134 3 3135 3134 3066 3 3066 3067 3135 3 3136 3135 3067 3 3067 3068 3136 3 3137 3136 3068 3 3068 3069 3137 3 3138 3137 3069 3 3069 3070 3138 3 3139 3138 3070 3 3070 3071 3139 3 3140 3139 3071 3 3071 3072 3140 3 3141 3140 3072 3 3072 3073 3141 3 3142 3141 3073 3 3073 3074 3142 3 3143 3142 3074 3 3074 3075 3143 3 3144 3143 3075 3 3075 3076 3144 3 3145 3144 3076 3 3076 3077 3145 3 3146 3145 3077 3 3077 3078 3146 3 3147 3146 3078 3 3078 3079 3147 3 3148 3147 3079 3 3079 3080 3148 3 3149 3148 3080 3 3080 3081 3149 3 3150 3149 3081 3 3081 3082 3150 3 3151 3150 3082 3 3082 3083 3151 3 3152 3151 3083 3 3083 3084 3152 3 3153 3152 3084 3 3084 3085 3153 3 3154 3153 3085 3 3085 3086 3154 3 3155 3154 3086 3 3086 3087 3156 3 3086 3156 3155 3 3087 3088 3157 3 3087 3157 3156 3 3088 3089 3158 3 3088 3158 3157 3 3089 3090 3159 3 3089 3159 3158 3 3090 3091 3160 3 3090 3160 3159 3 3091 3092 3161 3 3091 3161 3160 3 3092 3093 3162 3 3092 3162 3161 3 3093 3094 3163 3 3093 3163 3162 3 3094 3095 3164 3 3094 3164 3163 3 3095 3096 3165 3 3095 3165 3164 3 3096 3097 3166 3 3096 3166 3165 3 3097 3098 3167 3 3097 3167 3166 3 3098 5065 3167 3 5069 3167 5065 3 3099 3100 3169 3 3099 3169 3168 3 3099 3168 5068 3 5072 5068 3168 3 3100 3101 3170 3 3100 3170 3169 3 3101 3102 3171 3 3101 3171 3170 3 3102 3103 3172 3 3102 3172 3171 3 3103 3104 3173 3 3103 3173 3172 3 3104 3105 3174 3 3104 3174 3173 3 3105 3106 3175 3 3105 3175 3174 3 3106 3107 3176 3 3106 3176 3175 3 3107 3108 3177 3 3107 3177 3176 3 3108 3109 3178 3 3108 3178 3177 3 3109 3110 3179 3 3109 3179 3178 3 3110 3111 3180 3 3110 3180 3179 3 3111 3112 3181 3 3111 3181 3180 3 3112 3113 3182 3 3112 3182 3181 3 3113 3114 3183 3 3113 3183 3182 3 3114 3115 3184 3 3114 3184 3183 3 3115 3116 3185 3 3115 3185 3184 3 3116 3117 3186 3 3116 3186 3185 3 3117 3118 3187 3 3117 3187 3186 3 3118 3119 3188 3 3118 3188 3187 3 3119 3120 3189 3 3119 3189 3188 3 3120 3121 3189 3 3190 3189 3121 3 3121 3122 3190 3 3191 3190 3122 3 3122 3123 3191 3 3192 3191 3123 3 3123 3124 3192 3 3193 3192 3124 3 3124 3125 3193 3 3194 3193 3125 3 3125 3126 3194 3 3195 3194 3126 3 3126 3127 3195 3 3196 3195 3127 3 3127 3128 3196 3 3197 3196 3128 3 3128 3129 3197 3 3198 3197 3129 3 3129 3130 3198 3 3199 3198 3130 3 3130 3131 3199 3 3200 3199 3131 3 3131 3132 3200 3 3201 3200 3132 3 3132 3133 3201 3 3202 3201 3133 3 3133 3134 3202 3 3203 3202 3134 3 3134 3135 3203 3 3204 3203 3135 3 3135 3136 3204 3 3205 3204 3136 3 3136 3137 3205 3 3206 3205 3137 3 3137 3138 3206 3 3207 3206 3138 3 3138 3139 3207 3 3208 3207 3139 3 3139 3140 3208 3 3209 3208 3140 3 3140 3141 3209 3 3210 3209 3141 3 3141 3142 3210 3 3211 3210 3142 3 3142 3143 3211 3 3212 3211 3143 3 3143 3144 3212 3 3213 3212 3144 3 3144 3145 3213 3 3214 3213 3145 3 3145 3146 3214 3 3215 3214 3146 3 3146 3147 3215 3 3216 3215 3147 3 3147 3148 3216 3 3217 3216 3148 3 3148 3149 3217 3 3218 3217 3149 3 3149 3150 3218 3 3219 3218 3150 3 3150 3151 3219 3 3220 3219 3151 3 3151 3152 3220 3 3221 3220 3152 3 3152 3153 3221 3 3222 3221 3153 3 3153 3154 3222 3 3223 3222 3154 3 3154 3155 3223 3 3224 3223 3155 3 3155 3156 3225 3 3155 3225 3224 3 3156 3157 3226 3 3156 3226 3225 3 3157 3158 3227 3 3157 3227 3226 3 3158 3159 3228 3 3158 3228 3227 3 3159 3160 3229 3 3159 3229 3228 3 3160 3161 3230 3 3160 3230 3229 3 3161 3162 3231 3 3161 3231 3230 3 3162 3163 3232 3 3162 3232 3231 3 3163 3164 3233 3 3163 3233 3232 3 3164 3165 3234 3 3164 3234 3233 3 3165 3166 3235 3 3165 3235 3234 3 3166 3167 3236 3 3166 3236 3235 3 3167 5069 5073 3 3167 5073 3236 3 3168 3169 3238 3 3168 3238 3237 3 3168 3237 5072 3 5076 5072 3237 3 3169 3170 3239 3 3169 3239 3238 3 3170 3171 3240 3 3170 3240 3239 3 3171 3172 3241 3 3171 3241 3240 3 3172 3173 3242 3 3172 3242 3241 3 3173 3174 3243 3 3173 3243 3242 3 3174 3175 3244 3 3174 3244 3243 3 3175 3176 3245 3 3175 3245 3244 3 3176 3177 3246 3 3176 3246 3245 3 3177 3178 3247 3 3177 3247 3246 3 3178 3179 3248 3 3178 3248 3247 3 3179 3180 3249 3 3179 3249 3248 3 3180 3181 3250 3 3180 3250 3249 3 3181 3182 3251 3 3181 3251 3250 3 3182 3183 3252 3 3182 3252 3251 3 3183 3184 3253 3 3183 3253 3252 3 3184 3185 3254 3 3184 3254 3253 3 3185 3186 3255 3 3185 3255 3254 3 3186 3187 3256 3 3186 3256 3255 3 3187 3188 3257 3 3187 3257 3256 3 3188 3189 3258 3 3188 3258 3257 3 3189 3190 3258 3 3259 3258 3190 3 3190 3191 3259 3 3260 3259 3191 3 3191 3192 3260 3 3261 3260 3192 3 3192 3193 3261 3 3262 3261 3193 3 3193 3194 3262 3 3263 3262 3194 3 3194 3195 3263 3 3264 3263 3195 3 3195 3196 3264 3 3265 3264 3196 3 3196 3197 3265 3 3266 3265 3197 3 3197 3198 3266 3 3267 3266 3198 3 3198 3199 3267 3 3268 3267 3199 3 3199 3200 3268 3 3269 3268 3200 3 3200 3201 3269 3 3270 3269 3201 3 3201 3202 3270 3 3271 3270 3202 3 3202 3203 3271 3 3272 3271 3203 3 3203 3204 3272 3 3273 3272 3204 3 3204 3205 3273 3 3274 3273 3205 3 3205 3206 3274 3 3275 3274 3206 3 3206 3207 3275 3 3276 3275 3207 3 3207 3208 3276 3 3277 3276 3208 3 3208 3209 3277 3 3278 3277 3209 3 3209 3210 3278 3 3279 3278 3210 3 3210 3211 3279 3 3280 3279 3211 3 3211 3212 3280 3 3281 3280 3212 3 3212 3213 3281 3 3282 3281 3213 3 3213 3214 3282 3 3283 3282 3214 3 3214 3215 3283 3 3284 3283 3215 3 3215 3216 3284 3 3285 3284 3216 3 3216 3217 3285 3 3286 3285 3217 3 3217 3218 3286 3 3287 3286 3218 3 3218 3219 3287 3 3288 3287 3219 3 3219 3220 3288 3 3289 3288 3220 3 3220 3221 3289 3 3290 3289 3221 3 3221 3222 3290 3 3291 3290 3222 3 3222 3223 3291 3 3292 3291 3223 3 3223 3224 3292 3 3293 3292 3224 3 3224 3225 3294 3 3224 3294 3293 3 3225 3226 3295 3 3225 3295 3294 3 3226 3227 3296 3 3226 3296 3295 3 3227 3228 3297 3 3227 3297 3296 3 3228 3229 3298 3 3228 3298 3297 3 3229 3230 3299 3 3229 3299 3298 3 3230 3231 3300 3 3230 3300 3299 3 3231 3232 3301 3 3231 3301 3300 3 3232 3233 3302 3 3232 3302 3301 3 3233 3234 3303 3 3233 3303 3302 3 3234 3235 3304 3 3234 3304 3303 3 3235 3236 3305 3 3235 3305 3304 3 3236 5073 3305 3 5077 3305 5073 3 3237 3238 3307 3 3237 3307 3306 3 3237 3306 5076 3 5080 5076 3306 3 3238 3239 3308 3 3238 3308 3307 3 3239 3240 3309 3 3239 3309 3308 3 3240 3241 3310 3 3240 3310 3309 3 3241 3242 3311 3 3241 3311 3310 3 3242 3243 3312 3 3242 3312 3311 3 3243 3244 3313 3 3243 3313 3312 3 3244 3245 3314 3 3244 3314 3313 3 3245 3246 3315 3 3245 3315 3314 3 3246 3247 3316 3 3246 3316 3315 3 3247 3248 3317 3 3247 3317 3316 3 3248 3249 3318 3 3248 3318 3317 3 3249 3250 3319 3 3249 3319 3318 3 3250 3251 3320 3 3250 3320 3319 3 3251 3252 3321 3 3251 3321 3320 3 3252 3253 3322 3 3252 3322 3321 3 3253 3254 3323 3 3253 3323 3322 3 3254 3255 3324 3 3254 3324 3323 3 3255 3256 3325 3 3255 3325 3324 3 3256 3257 3326 3 3256 3326 3325 3 3257 3258 3327 3 3257 3327 3326 3 3258 3259 3328 3 3258 3328 3327 3 3259 3260 3328 3 3329 3328 3260 3 3260 3261 3329 3 3330 3329 3261 3 3261 3262 3330 3 3331 3330 3262 3 3262 3263 3331 3 3332 3331 3263 3 3263 3264 3332 3 3333 3332 3264 3 3264 3265 3333 3 3334 3333 3265 3 3265 3266 3334 3 3335 3334 3266 3 3266 3267 3335 3 3336 3335 3267 3 3267 3268 3336 3 3337 3336 3268 3 3268 3269 3337 3 3338 3337 3269 3 3269 3270 3338 3 3339 3338 3270 3 3270 3271 3339 3 3340 3339 3271 3 3271 3272 3340 3 3341 3340 3272 3 3272 3273 3341 3 3342 3341 3273 3 3273 3274 3342 3 3343 3342 3274 3 3274 3275 3343 3 3344 3343 3275 3 3275 3276 3344 3 3345 3344 3276 3 3276 3277 3345 3 3346 3345 3277 3 3277 3278 3346 3 3347 3346 3278 3 3278 3279 3347 3 3348 3347 3279 3 3279 3280 3348 3 3349 3348 3280 3 3280 3281 3349 3 3350 3349 3281 3 3281 3282 3350 3 3351 3350 3282 3 3282 3283 3351 3 3352 3351 3283 3 3283 3284 3352 3 3353 3352 3284 3 3284 3285 3353 3 3354 3353 3285 3 3285 3286 3354 3 3355 3354 3286 3 3286 3287 3355 3 3356 3355 3287 3 3287 3288 3356 3 3357 3356 3288 3 3288 3289 3357 3 3358 3357 3289 3 3289 3290 3358 3 3359 3358 3290 3 3290 3291 3359 3 3360 3359 3291 3 3291 3292 3360 3 3361 3360 3292 3 3292 3293 3361 3 3362 3361 3293 3 3293 3294 3362 3 3363 3362 3294 3 3294 3295 3364 3 3294 3364 3363 3 3295 3296 3365 3 3295 3365 3364 3 3296 3297 3366 3 3296 3366 3365 3 3297 3298 3367 3 3297 3367 3366 3 3298 3299 3368 3 3298 3368 3367 3 3299 3300 3369 3 3299 3369 3368 3 3300 3301 3370 3 3300 3370 3369 3 3301 3302 3371 3 3301 3371 3370 3 3302 3303 3372 3 3302 3372 3371 3 3303 3304 3373 3 3303 3373 3372 3 3304 3305 3374 3 3304 3374 3373 3 3305 5077 5081 3 3305 5081 3374 3 3306 3307 3376 3 3306 3376 3375 3 3306 3375 5080 3 5084 5080 3375 3 3307 3308 3377 3 3307 3377 3376 3 3308 3309 3378 3 3308 3378 3377 3 3309 3310 3379 3 3309 3379 3378 3 3310 3311 3380 3 3310 3380 3379 3 3311 3312 3381 3 3311 3381 3380 3 3312 3313 3382 3 3312 3382 3381 3 3313 3314 3383 3 3313 3383 3382 3 3314 3315 3384 3 3314 3384 3383 3 3315 3316 3385 3 3315 3385 3384 3 3316 3317 3386 3 3316 3386 3385 3 3317 3318 3387 3 3317 3387 3386 3 3318 3319 3388 3 3318 3388 3387 3 3319 3320 3389 3 3319 3389 3388 3 3320 3321 3390 3 3320 3390 3389 3 3321 3322 3391 3 3321 3391 3390 3 3322 3323 3392 3 3322 3392 3391 3 3323 3324 3393 3 3323 3393 3392 3 3324 3325 3394 3 3324 3394 3393 3 3325 3326 3395 3 3325 3395 3394 3 3326 3327 3396 3 3326 3396 3395 3 3327 3328 3397 3 3327 3397 3396 3 3328 3329 3398 3 3328 3398 3397 3 3329 3330 3399 3 3329 3399 3398 3 3330 3331 3399 3 3400 3399 3331 3 3331 3332 3400 3 3401 3400 3332 3 3332 3333 3401 3 3402 3401 3333 3 3333 3334 3402 3 3403 3402 3334 3 3334 3335 3403 3 3404 3403 3335 3 3335 3336 3404 3 3405 3404 3336 3 3336 3337 3405 3 3406 3405 3337 3 3337 3338 3406 3 3407 3406 3338 3 3338 3339 3407 3 3408 3407 3339 3 3339 3340 3408 3 3409 3408 3340 3 3340 3341 3409 3 3410 3409 3341 3 3341 3342 3410 3 3411 3410 3342 3 3342 3343 3411 3 3412 3411 3343 3 3343 3344 3412 3 3413 3412 3344 3 3344 3345 3413 3 3414 3413 3345 3 3345 3346 3414 3 3415 3414 3346 3 3346 3347 3415 3 3416 3415 3347 3 3347 3348 3416 3 3417 3416 3348 3 3348 3349 3417 3 3418 3417 3349 3 3349 3350 3418 3 3419 3418 3350 3 3350 3351 3419 3 3420 3419 3351 3 3351 3352 3420 3 3421 3420 3352 3 3352 3353 3421 3 3422 3421 3353 3 3353 3354 3422 3 3423 3422 3354 3 3354 3355 3423 3 3424 3423 3355 3 3355 3356 3424 3 3425 3424 3356 3 3356 3357 3425 3 3426 3425 3357 3 3357 3358 3426 3 3427 3426 3358 3 3358 3359 3427 3 3428 3427 3359 3 3359 3360 3428 3 3429 3428 3360 3 3360 3361 3429 3 3430 3429 3361 3 3361 3362 3430 3 3431 3430 3362 3 3362 3363 3431 3 3432 3431 3363 3 3363 3364 3432 3 3433 3432 3364 3 3364 3365 3433 3 3434 3433 3365 3 3365 3366 3434 3 3435 3434 3366 3 3366 3367 3436 3 3366 3436 3435 3 3367 3368 3437 3 3367 3437 3436 3 3368 3369 3438 3 3368 3438 3437 3 3369 3370 3439 3 3369 3439 3438 3 3370 3371 3440 3 3370 3440 3439 3 3371 3372 3441 3 3371 3441 3440 3 3372 3373 3442 3 3372 3442 3441 3 3373 3374 3443 3 3373 3443 3442 3 3374 5081 5085 3 3374 5085 3443 3 3375 3376 3445 3 3375 3445 3444 3 3375 3444 5084 3 5088 5084 3444 3 3376 3377 3446 3 3376 3446 3445 3 3377 3378 3447 3 3377 3447 3446 3 3378 3379 3448 3 3378 3448 3447 3 3379 3380 3449 3 3379 3449 3448 3 3380 3381 3450 3 3380 3450 3449 3 3381 3382 3451 3 3381 3451 3450 3 3382 3383 3452 3 3382 3452 3451 3 3383 3384 3453 3 3383 3453 3452 3 3384 3385 3454 3 3384 3454 3453 3 3385 3386 3455 3 3385 3455 3454 3 3386 3387 3456 3 3386 3456 3455 3 3387 3388 3457 3 3387 3457 3456 3 3388 3389 3458 3 3388 3458 3457 3 3389 3390 3459 3 3389 3459 3458 3 3390 3391 3460 3 3390 3460 3459 3 3391 3392 3461 3 3391 3461 3460 3 3392 3393 3462 3 3392 3462 3461 3 3393 3394 3463 3 3393 3463 3462 3 3394 3395 3464 3 3394 3464 3463 3 3395 3396 3465 3 3395 3465 3464 3 3396 3397 3466 3 3396 3466 3465 3 3397 3398 3467 3 3397 3467 3466 3 3398 3399 3468 3 3398 3468 3467 3 3399 3400 3469 3 3399 3469 3468 3 3400 3401 3470 3 3400 3470 3469 3 3401 3402 3470 3 3471 3470 3402 3 3402 3403 3471 3 3472 3471 3403 3 3403 3404 3472 3 3473 3472 3404 3 3404 3405 3473 3 3474 3473 3405 3 3405 3406 3474 3 3475 3474 3406 3 3406 3407 3475 3 3476 3475 3407 3 3407 3408 3476 3 3477 3476 3408 3 3408 3409 3477 3 3478 3477 3409 3 3409 3410 3478 3 3479 3478 3410 3 3410 3411 3479 3 3480 3479 3411 3 3411 3412 3480 3 3481 3480 3412 3 3412 3413 3481 3 3482 3481 3413 3 3413 3414 3482 3 3483 3482 3414 3 3414 3415 3483 3 3484 3483 3415 3 3415 3416 3484 3 3485 3484 3416 3 3416 3417 3485 3 3486 3485 3417 3 3417 3418 3486 3 3487 3486 3418 3 3418 3419 3487 3 3488 3487 3419 3 3419 3420 3488 3 3489 3488 3420 3 3420 3421 3489 3 3490 3489 3421 3 3421 3422 3490 3 3491 3490 3422 3 3422 3423 3491 3 3492 3491 3423 3 3423 3424 3492 3 3493 3492 3424 3 3424 3425 3493 3 3494 3493 3425 3 3425 3426 3494 3 3495 3494 3426 3 3426 3427 3495 3 3496 3495 3427 3 3427 3428 3496 3 3497 3496 3428 3 3428 3429 3497 3 3498 3497 3429 3 3429 3430 3498 3 3499 3498 3430 3 3430 3431 3499 3 3500 3499 3431 3 3431 3432 3500 3 3501 3500 3432 3 3432 3433 3501 3 3502 3501 3433 3 3433 3434 3502 3 3503 3502 3434 3 3434 3435 3503 3 3504 3503 3435 3 3435 3436 3504 3 3505 3504 3436 3 3436 3437 3505 3 3506 3505 3437 3 3437 3438 3507 3 3437 3507 3506 3 3438 3439 3508 3 3438 3508 3507 3 3439 3440 3509 3 3439 3509 3508 3 3440 3441 3510 3 3440 3510 3509 3 3441 3442 3511 3 3441 3511 3510 3 3442 3443 3512 3 3442 3512 3511 3 3443 5085 3512 3 5089 3512 5085 3 3444 3445 3514 3 3444 3514 3513 3 3444 3513 5088 3 5092 5088 3513 3 3445 3446 3515 3 3445 3515 3514 3 3446 3447 3516 3 3446 3516 3515 3 3447 3448 3517 3 3447 3517 3516 3 3448 3449 3518 3 3448 3518 3517 3 3449 3450 3519 3 3449 3519 3518 3 3450 3451 3520 3 3450 3520 3519 3 3451 3452 3521 3 3451 3521 3520 3 3452 3453 3522 3 3452 3522 3521 3 3453 3454 3523 3 3453 3523 3522 3 3454 3455 3524 3 3454 3524 3523 3 3455 3456 3525 3 3455 3525 3524 3 3456 3457 3526 3 3456 3526 3525 3 3457 3458 3527 3 3457 3527 3526 3 3458 3459 3528 3 3458 3528 3527 3 3459 3460 3529 3 3459 3529 3528 3 3460 3461 3530 3 3460 3530 3529 3 3461 3462 3531 3 3461 3531 3530 3 3462 3463 3532 3 3462 3532 3531 3 3463 3464 3533 3 3463 3533 3532 3 3464 3465 3534 3 3464 3534 3533 3 3465 3466 3535 3 3465 3535 3534 3 3466 3467 3536 3 3466 3536 3535 3 3467 3468 3537 3 3467 3537 3536 3 3468 3469 3538 3 3468 3538 3537 3 3469 3470 3539 3 3469 3539 3538 3 3470 3471 3540 3 3470 3540 3539 3 3471 3472 3541 3 3471 3541 3540 3 3472 3473 3542 3 3472 3542 3541 3 3473 3474 3542 3 3543 3542 3474 3 3474 3475 3543 3 3544 3543 3475 3 3475 3476 3544 3 3545 3544 3476 3 3476 3477 3545 3 3546 3545 3477 3 3477 3478 3546 3 3547 3546 3478 3 3478 3479 3547 3 3548 3547 3479 3 3479 3480 3548 3 3549 3548 3480 3 3480 3481 3549 3 3550 3549 3481 3 3481 3482 3550 3 3551 3550 3482 3 3482 3483 3551 3 3552 3551 3483 3 3483 3484 3552 3 3553 3552 3484 3 3484 3485 3553 3 3554 3553 3485 3 3485 3486 3554 3 3555 3554 3486 3 3486 3487 3555 3 3556 3555 3487 3 3487 3488 3556 3 3557 3556 3488 3 3488 3489 3557 3 3558 3557 3489 3 3489 3490 3558 3 3559 3558 3490 3 3490 3491 3559 3 3560 3559 3491 3 3491 3492 3560 3 3561 3560 3492 3 3492 3493 3561 3 3562 3561 3493 3 3493 3494 3562 3 3563 3562 3494 3 3494 3495 3563 3 3564 3563 3495 3 3495 3496 3564 3 3565 3564 3496 3 3496 3497 3565 3 3566 3565 3497 3 3497 3498 3566 3 3567 3566 3498 3 3498 3499 3567 3 3568 3567 3499 3 3499 3500 3568 3 3569 3568 3500 3 3500 3501 3569 3 3570 3569 3501 3 3501 3502 3570 3 3571 3570 3502 3 3502 3503 3571 3 3572 3571 3503 3 3503 3504 3572 3 3573 3572 3504 3 3504 3505 3573 3 3574 3573 3505 3 3505 3506 3574 3 3575 3574 3506 3 3506 3507 3575 3 3576 3575 3507 3 3507 3508 3576 3 3577 3576 3508 3 3508 3509 3577 3 3578 3577 3509 3 3509 3510 3578 3 3579 3578 3510 3 3510 3511 3580 3 3510 3580 3579 3 3511 3512 3581 3 3511 3581 3580 3 3512 5089 5093 3 3512 5093 3581 3 3513 3514 3583 3 3513 3583 3582 3 3513 3582 5092 3 5096 5092 3582 3 3514 3515 3584 3 3514 3584 3583 3 3515 3516 3585 3 3515 3585 3584 3 3516 3517 3586 3 3516 3586 3585 3 3517 3518 3587 3 3517 3587 3586 3 3518 3519 3588 3 3518 3588 3587 3 3519 3520 3589 3 3519 3589 3588 3 3520 3521 3590 3 3520 3590 3589 3 3521 3522 3591 3 3521 3591 3590 3 3522 3523 3592 3 3522 3592 3591 3 3523 3524 3593 3 3523 3593 3592 3 3524 3525 3594 3 3524 3594 3593 3 3525 3526 3595 3 3525 3595 3594 3 3526 3527 3596 3 3526 3596 3595 3 3527 3528 3597 3 3527 3597 3596 3 3528 3529 3598 3 3528 3598 3597 3 3529 3530 3599 3 3529 3599 3598 3 3530 3531 3600 3 3530 3600 3599 3 3531 3532 3601 3 3531 3601 3600 3 3532 3533 3602 3 3532 3602 3601 3 3533 3534 3603 3 3533 3603 3602 3 3534 3535 3604 3 3534 3604 3603 3 3535 3536 3605 3 3535 3605 3604 3 3536 3537 3606 3 3536 3606 3605 3 3537 3538 3607 3 3537 3607 3606 3 3538 3539 3608 3 3538 3608 3607 3 3539 3540 3609 3 3539 3609 3608 3 3540 3541 3610 3 3540 3610 3609 3 3541 3542 3611 3 3541 3611 3610 3 3542 3543 3612 3 3542 3612 3611 3 3543 3544 3613 3 3543 3613 3612 3 3544 3545 3614 3 3544 3614 3613 3 3545 3546 3615 3 3545 3615 3614 3 3546 3547 3615 3 3616 3615 3547 3 3547 3548 3616 3 3617 3616 3548 3 3548 3549 3617 3 3618 3617 3549 3 3549 3550 3618 3 3619 3618 3550 3 3550 3551 3619 3 3620 3619 3551 3 3551 3552 3620 3 3621 3620 3552 3 3552 3553 3621 3 3622 3621 3553 3 3553 3554 3622 3 3623 3622 3554 3 3554 3555 3623 3 3624 3623 3555 3 3555 3556 3624 3 3625 3624 3556 3 3556 3557 3625 3 3626 3625 3557 3 3557 3558 3626 3 3627 3626 3558 3 3558 3559 3627 3 3628 3627 3559 3 3559 3560 3628 3 3629 3628 3560 3 3560 3561 3629 3 3630 3629 3561 3 3561 3562 3630 3 3631 3630 3562 3 3562 3563 3631 3 3632 3631 3563 3 3563 3564 3632 3 3633 3632 3564 3 3564 3565 3633 3 3634 3633 3565 3 3565 3566 3634 3 3635 3634 3566 3 3566 3567 3635 3 3636 3635 3567 3 3567 3568 3636 3 3637 3636 3568 3 3568 3569 3637 3 3638 3637 3569 3 3569 3570 3638 3 3639 3638 3570 3 3570 3571 3639 3 3640 3639 3571 3 3571 3572 3640 3 3641 3640 3572 3 3572 3573 3641 3 3642 3641 3573 3 3573 3574 3642 3 3643 3642 3574 3 3574 3575 3643 3 3644 3643 3575 3 3575 3576 3644 3 3645 3644 3576 3 3576 3577 3645 3 3646 3645 3577 3 3577 3578 3646 3 3647 3646 3578 3 3578 3579 3647 3 3648 3647 3579 3 3579 3580 3648 3 3649 3648 3580 3 3580 3581 3649 3 3650 3649 3581 3 3581 5093 3650 3 5097 3650 5093 3 3582 3583 3651 3 3652 3651 3583 3 3582 3651 5096 3 5100 5096 3651 3 3583 3584 3653 3 3583 3653 3652 3 3584 3585 3654 3 3584 3654 3653 3 3585 3586 3655 3 3585 3655 3654 3 3586 3587 3656 3 3586 3656 3655 3 3587 3588 3657 3 3587 3657 3656 3 3588 3589 3658 3 3588 3658 3657 3 3589 3590 3659 3 3589 3659 3658 3 3590 3591 3660 3 3590 3660 3659 3 3591 3592 3661 3 3591 3661 3660 3 3592 3593 3662 3 3592 3662 3661 3 3593 3594 3663 3 3593 3663 3662 3 3594 3595 3664 3 3594 3664 3663 3 3595 3596 3665 3 3595 3665 3664 3 3596 3597 3666 3 3596 3666 3665 3 3597 3598 3667 3 3597 3667 3666 3 3598 3599 3668 3 3598 3668 3667 3 3599 3600 3669 3 3599 3669 3668 3 3600 3601 3670 3 3600 3670 3669 3 3601 3602 3671 3 3601 3671 3670 3 3602 3603 3672 3 3602 3672 3671 3 3603 3604 3673 3 3603 3673 3672 3 3604 3605 3674 3 3604 3674 3673 3 3605 3606 3675 3 3605 3675 3674 3 3606 3607 3676 3 3606 3676 3675 3 3607 3608 3677 3 3607 3677 3676 3 3608 3609 3678 3 3608 3678 3677 3 3609 3610 3679 3 3609 3679 3678 3 3610 3611 3680 3 3610 3680 3679 3 3611 3612 3681 3 3611 3681 3680 3 3612 3613 3682 3 3612 3682 3681 3 3613 3614 3683 3 3613 3683 3682 3 3614 3615 3684 3 3614 3684 3683 3 3615 3616 3685 3 3615 3685 3684 3 3616 3617 3686 3 3616 3686 3685 3 3617 3618 3687 3 3617 3687 3686 3 3618 3619 3688 3 3618 3688 3687 3 3619 3620 3689 3 3619 3689 3688 3 3620 3621 3689 3 3690 3689 3621 3 3621 3622 3690 3 3691 3690 3622 3 3622 3623 3691 3 3692 3691 3623 3 3623 3624 3692 3 3693 3692 3624 3 3624 3625 3693 3 3694 3693 3625 3 3625 3626 3694 3 3695 3694 3626 3 3626 3627 3695 3 3696 3695 3627 3 3627 3628 3696 3 3697 3696 3628 3 3628 3629 3697 3 3698 3697 3629 3 3629 3630 3698 3 3699 3698 3630 3 3630 3631 3699 3 3700 3699 3631 3 3631 3632 3700 3 3701 3700 3632 3 3632 3633 3701 3 3702 3701 3633 3 3633 3634 3702 3 3703 3702 3634 3 3634 3635 3703 3 3704 3703 3635 3 3635 3636 3704 3 3705 3704 3636 3 3636 3637 3705 3 3706 3705 3637 3 3637 3638 3706 3 3707 3706 3638 3 3638 3639 3707 3 3708 3707 3639 3 3639 3640 3708 3 3709 3708 3640 3 3640 3641 3709 3 3710 3709 3641 3 3641 3642 3710 3 3711 3710 3642 3 3642 3643 3711 3 3712 3711 3643 3 3643 3644 3712 3 3713 3712 3644 3 3644 3645 3713 3 3714 3713 3645 3 3645 3646 3714 3 3715 3714 3646 3 3646 3647 3715 3 3716 3715 3647 3 3647 3648 3716 3 3717 3716 3648 3 3648 3649 3717 3 3718 3717 3649 3 3649 3650 3718 3 3719 3718 3650 3 3650 5097 5101 3 3650 5101 3719 3 3651 3652 3720 3 3721 3720 3652 3 3651 3720 5104 3 3651 5104 5100 3 3652 3653 3721 3 3722 3721 3653 3 3653 3654 3722 3 3723 3722 3654 3 3654 3655 3723 3 3724 3723 3655 3 3655 3656 3724 3 3725 3724 3656 3 3656 3657 3725 3 3726 3725 3657 3 3657 3658 3727 3 3657 3727 3726 3 3658 3659 3728 3 3658 3728 3727 3 3659 3660 3729 3 3659 3729 3728 3 3660 3661 3730 3 3660 3730 3729 3 3661 3662 3731 3 3661 3731 3730 3 3662 3663 3732 3 3662 3732 3731 3 3663 3664 3733 3 3663 3733 3732 3 3664 3665 3734 3 3664 3734 3733 3 3665 3666 3735 3 3665 3735 3734 3 3666 3667 3736 3 3666 3736 3735 3 3667 3668 3737 3 3667 3737 3736 3 3668 3669 3738 3 3668 3738 3737 3 3669 3670 3739 3 3669 3739 3738 3 3670 3671 3740 3 3670 3740 3739 3 3671 3672 3741 3 3671 3741 3740 3 3672 3673 3742 3 3672 3742 3741 3 3673 3674 3743 3 3673 3743 3742 3 3674 3675 3744 3 3674 3744 3743 3 3675 3676 3745 3 3675 3745 3744 3 3676 3677 3746 3 3676 3746 3745 3 3677 3678 3747 3 3677 3747 3746 3 3678 3679 3748 3 3678 3748 3747 3 3679 3680 3749 3 3679 3749 3748 3 3680 3681 3750 3 3680 3750 3749 3 3681 3682 3751 3 3681 3751 3750 3 3682 3683 3752 3 3682 3752 3751 3 3683 3684 3753 3 3683 3753 3752 3 3684 3685 3754 3 3684 3754 3753 3 3685 3686 3755 3 3685 3755 3754 3 3686 3687 3756 3 3686 3756 3755 3 3687 3688 3757 3 3687 3757 3756 3 3688 3689 3758 3 3688 3758 3757 3 3689 3690 3759 3 3689 3759 3758 3 3690 3691 3760 3 3690 3760 3759 3 3691 3692 3761 3 3691 3761 3760 3 3692 3693 3762 3 3692 3762 3761 3 3693 3694 3763 3 3693 3763 3762 3 3694 3695 3764 3 3694 3764 3763 3 3695 3696 3764 3 3765 3764 3696 3 3696 3697 3765 3 3766 3765 3697 3 3697 3698 3766 3 3767 3766 3698 3 3698 3699 3767 3 3768 3767 3699 3 3699 3700 3768 3 3769 3768 3700 3 3700 3701 3769 3 3770 3769 3701 3 3701 3702 3770 3 3771 3770 3702 3 3702 3703 3771 3 3772 3771 3703 3 3703 3704 3772 3 3773 3772 3704 3 3704 3705 3773 3 3774 3773 3705 3 3705 3706 3774 3 3775 3774 3706 3 3706 3707 3775 3 3776 3775 3707 3 3707 3708 3776 3 3777 3776 3708 3 3708 3709 3777 3 3778 3777 3709 3 3709 3710 3778 3 3779 3778 3710 3 3710 3711 3779 3 3780 3779 3711 3 3711 3712 3780 3 3781 3780 3712 3 3712 3713 3781 3 3782 3781 3713 3 3713 3714 3782 3 3783 3782 3714 3 3714 3715 3783 3 3784 3783 3715 3 3715 3716 3784 3 3785 3784 3716 3 3716 3717 3785 3 3786 3785 3717 3 3717 3718 3786 3 3787 3786 3718 3 3718 3719 3787 3 3788 3787 3719 3 3719 5101 3788 3 5105 3788 5101 3 3720 3721 3789 3 3790 3789 3721 3 3720 3789 5108 3 3720 5108 5104 3 3721 3722 3790 3 3791 3790 3722 3 3722 3723 3791 3 3792 3791 3723 3 3723 3724 3792 3 3793 3792 3724 3 3724 3725 3793 3 3794 3793 3725 3 3725 3726 3794 3 3795 3794 3726 3 3726 3727 3795 3 3796 3795 3727 3 3727 3728 3796 3 3797 3796 3728 3 3728 3729 3797 3 3798 3797 3729 3 3729 3730 3798 3 3799 3798 3730 3 3730 3731 3799 3 3800 3799 3731 3 3731 3732 3800 3 3801 3800 3732 3 3732 3733 3802 3 3732 3802 3801 3 3733 3734 3803 3 3733 3803 3802 3 3734 3735 3804 3 3734 3804 3803 3 3735 3736 3805 3 3735 3805 3804 3 3736 3737 3806 3 3736 3806 3805 3 3737 3738 3807 3 3737 3807 3806 3 3738 3739 3808 3 3738 3808 3807 3 3739 3740 3809 3 3739 3809 3808 3 3740 3741 3810 3 3740 3810 3809 3 3741 3742 3811 3 3741 3811 3810 3 3742 3743 3812 3 3742 3812 3811 3 3743 3744 3813 3 3743 3813 3812 3 3744 3745 3814 3 3744 3814 3813 3 3745 3746 3815 3 3745 3815 3814 3 3746 3747 3816 3 3746 3816 3815 3 3747 3748 3817 3 3747 3817 3816 3 3748 3749 3818 3 3748 3818 3817 3 3749 3750 3819 3 3749 3819 3818 3 3750 3751 3820 3 3750 3820 3819 3 3751 3752 3821 3 3751 3821 3820 3 3752 3753 3822 3 3752 3822 3821 3 3753 3754 3823 3 3753 3823 3822 3 3754 3755 3824 3 3754 3824 3823 3 3755 3756 3825 3 3755 3825 3824 3 3756 3757 3826 3 3756 3826 3825 3 3757 3758 3827 3 3757 3827 3826 3 3758 3759 3828 3 3758 3828 3827 3 3759 3760 3829 3 3759 3829 3828 3 3760 3761 3830 3 3760 3830 3829 3 3761 3762 3831 3 3761 3831 3830 3 3762 3763 3832 3 3762 3832 3831 3 3763 3764 3833 3 3763 3833 3832 3 3764 3765 3834 3 3764 3834 3833 3 3765 3766 3835 3 3765 3835 3834 3 3766 3767 3836 3 3766 3836 3835 3 3767 3768 3837 3 3767 3837 3836 3 3768 3769 3838 3 3768 3838 3837 3 3769 3770 3839 3 3769 3839 3838 3 3770 3771 3839 3 3840 3839 3771 3 3771 3772 3840 3 3841 3840 3772 3 3772 3773 3841 3 3842 3841 3773 3 3773 3774 3842 3 3843 3842 3774 3 3774 3775 3843 3 3844 3843 3775 3 3775 3776 3844 3 3845 3844 3776 3 3776 3777 3845 3 3846 3845 3777 3 3777 3778 3846 3 3847 3846 3778 3 3778 3779 3847 3 3848 3847 3779 3 3779 3780 3848 3 3849 3848 3780 3 3780 3781 3849 3 3850 3849 3781 3 3781 3782 3850 3 3851 3850 3782 3 3782 3783 3851 3 3852 3851 3783 3 3783 3784 3852 3 3853 3852 3784 3 3784 3785 3853 3 3854 3853 3785 3 3785 3786 3854 3 3855 3854 3786 3 3786 3787 3855 3 3856 3855 3787 3 3787 3788 3856 3 3857 3856 3788 3 3788 5105 5109 3 3788 5109 3857 3 3789 3790 3858 3 3859 3858 3790 3 3789 3858 5112 3 3789 5112 5108 3 3790 3791 3859 3 3860 3859 3791 3 3791 3792 3860 3 3861 3860 3792 3 3792 3793 3861 3 3862 3861 3793 3 3793 3794 3862 3 3863 3862 3794 3 3794 3795 3863 3 3864 3863 3795 3 3795 3796 3864 3 3865 3864 3796 3 3796 3797 3865 3 3866 3865 3797 3 3797 3798 3866 3 3867 3866 3798 3 3798 3799 3867 3 3868 3867 3799 3 3799 3800 3868 3 3869 3868 3800 3 3800 3801 3869 3 3870 3869 3801 3 3801 3802 3870 3 3871 3870 3802 3 3802 3803 3871 3 3872 3871 3803 3 3803 3804 3872 3 3873 3872 3804 3 3804 3805 3873 3 3874 3873 3805 3 3805 3806 3874 3 3875 3874 3806 3 3806 3807 3875 3 3876 3875 3807 3 3807 3808 3876 3 3877 3876 3808 3 3808 3809 3878 3 3808 3878 3877 3 3809 3810 3879 3 3809 3879 3878 3 3810 3811 3880 3 3810 3880 3879 3 3811 3812 3881 3 3811 3881 3880 3 3812 3813 3882 3 3812 3882 3881 3 3813 3814 3883 3 3813 3883 3882 3 3814 3815 3884 3 3814 3884 3883 3 3815 3816 3885 3 3815 3885 3884 3 3816 3817 3886 3 3816 3886 3885 3 3817 3818 3887 3 3817 3887 3886 3 3818 3819 3888 3 3818 3888 3887 3 3819 3820 3889 3 3819 3889 3888 3 3820 3821 3890 3 3820 3890 3889 3 3821 3822 3891 3 3821 3891 3890 3 3822 3823 3892 3 3822 3892 3891 3 3823 3824 3893 3 3823 3893 3892 3 3824 3825 3894 3 3824 3894 3893 3 3825 3826 3895 3 3825 3895 3894 3 3826 3827 3896 3 3826 3896 3895 3 3827 3828 3897 3 3827 3897 3896 3 3828 3829 3898 3 3828 3898 3897 3 3829 3830 3899 3 3829 3899 3898 3 3830 3831 3900 3 3830 3900 3899 3 3831 3832 3901 3 3831 3901 3900 3 3832 3833 3902 3 3832 3902 3901 3 3833 3834 3903 3 3833 3903 3902 3 3834 3835 3904 3 3834 3904 3903 3 3835 3836 3905 3 3835 3905 3904 3 3836 3837 3906 3 3836 3906 3905 3 3837 3838 3907 3 3837 3907 3906 3 3838 3839 3908 3 3838 3908 3907 3 3839 3840 3909 3 3839 3909 3908 3 3840 3841 3910 3 3840 3910 3909 3 3841 3842 3911 3 3841 3911 3910 3 3842 3843 3912 3 3842 3912 3911 3 3843 3844 3913 3 3843 3913 3912 3 3844 3845 3914 3 3844 3914 3913 3 3845 3846 3915 3 3845 3915 3914 3 3846 3847 3915 3 3916 3915 3847 3 3847 3848 3916 3 3917 3916 3848 3 3848 3849 3917 3 3918 3917 3849 3 3849 3850 3918 3 3919 3918 3850 3 3850 3851 3919 3 3920 3919 3851 3 3851 3852 3920 3 3921 3920 3852 3 3852 3853 3921 3 3922 3921 3853 3 3853 3854 3922 3 3923 3922 3854 3 3854 3855 3923 3 3924 3923 3855 3 3855 3856 3924 3 3925 3924 3856 3 3856 3857 3925 3 3926 3925 3857 3 3857 5109 5113 3 3857 5113 3926 3 3858 3859 3927 3 3928 3927 3859 3 3858 3927 5116 3 3858 5116 5112 3 3859 3860 3928 3 3929 3928 3860 3 3860 3861 3929 3 3930 3929 3861 3 3861 3862 3930 3 3931 3930 3862 3 3862 3863 3931 3 3932 3931 3863 3 3863 3864 3932 3 3933 3932 3864 3 3864 3865 3933 3 3934 3933 3865 3 3865 3866 3934 3 3935 3934 3866 3 3866 3867 3935 3 3936 3935 3867 3 3867 3868 3936 3 3937 3936 3868 3 3868 3869 3937 3 3938 3937 3869 3 3869 3870 3938 3 3939 3938 3870 3 3870 3871 3939 3 3940 3939 3871 3 3871 3872 3940 3 3941 3940 3872 3 3872 3873 3941 3 3942 3941 3873 3 3873 3874 3942 3 3943 3942 3874 3 3874 3875 3943 3 3944 3943 3875 3 3875 3876 3944 3 3945 3944 3876 3 3876 3877 3945 3 3946 3945 3877 3 3877 3878 3946 3 3947 3946 3878 3 3878 3879 3947 3 3948 3947 3879 3 3879 3880 3948 3 3949 3948 3880 3 3880 3881 3949 3 3950 3949 3881 3 3881 3882 3950 3 3951 3950 3882 3 3882 3883 3951 3 3952 3951 3883 3 3883 3884 3952 3 3953 3952 3884 3 3884 3885 3954 3 3884 3954 3953 3 3885 3886 3955 3 3885 3955 3954 3 3886 3887 3956 3 3886 3956 3955 3 3887 3888 3957 3 3887 3957 3956 3 3888 3889 3958 3 3888 3958 3957 3 3889 3890 3959 3 3889 3959 3958 3 3890 3891 3960 3 3890 3960 3959 3 3891 3892 3961 3 3891 3961 3960 3 3892 3893 3962 3 3892 3962 3961 3 3893 3894 3963 3 3893 3963 3962 3 3894 3895 3964 3 3894 3964 3963 3 3895 3896 3965 3 3895 3965 3964 3 3896 3897 3966 3 3896 3966 3965 3 3897 3898 3967 3 3897 3967 3966 3 3898 3899 3968 3 3898 3968 3967 3 3899 3900 3969 3 3899 3969 3968 3 3900 3901 3970 3 3900 3970 3969 3 3901 3902 3971 3 3901 3971 3970 3 3902 3903 3972 3 3902 3972 3971 3 3903 3904 3973 3 3903 3973 3972 3 3904 3905 3974 3 3904 3974 3973 3 3905 3906 3975 3 3905 3975 3974 3 3906 3907 3976 3 3906 3976 3975 3 3907 3908 3977 3 3907 3977 3976 3 3908 3909 3978 3 3908 3978 3977 3 3909 3910 3979 3 3909 3979 3978 3 3910 3911 3980 3 3910 3980 3979 3 3911 3912 3981 3 3911 3981 3980 3 3912 3913 3982 3 3912 3982 3981 3 3913 3914 3983 3 3913 3983 3982 3 3914 3915 3984 3 3914 3984 3983 3 3915 3916 3985 3 3915 3985 3984 3 3916 3917 3986 3 3916 3986 3985 3 3917 3918 3987 3 3917 3987 3986 3 3918 3919 3988 3 3918 3988 3987 3 3919 3920 3989 3 3919 3989 3988 3 3920 3921 3990 3 3920 3990 3989 3 3921 3922 3991 3 3921 3991 3990 3 3922 3923 3991 3 3992 3991 3923 3 3923 3924 3992 3 3993 3992 3924 3 3924 3925 3993 3 3994 3993 3925 3 3925 3926 3994 3 3995 3994 3926 3 3926 5113 3995 3 5117 3995 5113 3 3927 3928 3996 3 3997 3996 3928 3 3927 3996 5120 3 3927 5120 5116 3 3928 3929 3997 3 3998 3997 3929 3 3929 3930 3998 3 3999 3998 3930 3 3930 3931 3999 3 4000 3999 3931 3 3931 3932 4000 3 4001 4000 3932 3 3932 3933 4001 3 4002 4001 3933 3 3933 3934 4002 3 4003 4002 3934 3 3934 3935 4003 3 4004 4003 3935 3 3935 3936 4004 3 4005 4004 3936 3 3936 3937 4005 3 4006 4005 3937 3 3937 3938 4006 3 4007 4006 3938 3 3938 3939 4007 3 4008 4007 3939 3 3939 3940 4008 3 4009 4008 3940 3 3940 3941 4009 3 4010 4009 3941 3 3941 3942 4010 3 4011 4010 3942 3 3942 3943 4011 3 4012 4011 3943 3 3943 3944 4012 3 4013 4012 3944 3 3944 3945 4013 3 4014 4013 3945 3 3945 3946 4014 3 4015 4014 3946 3 3946 3947 4015 3 4016 4015 3947 3 3947 3948 4016 3 4017 4016 3948 3 3948 3949 4017 3 4018 4017 3949 3 3949 3950 4018 3 4019 4018 3950 3 3950 3951 4019 3 4020 4019 3951 3 3951 3952 4020 3 4021 4020 3952 3 3952 3953 4021 3 4022 4021 3953 3 3953 3954 4022 3 4023 4022 3954 3 3954 3955 4023 3 4024 4023 3955 3 3955 3956 4024 3 4025 4024 3956 3 3956 3957 4025 3 4026 4025 3957 3 3957 3958 4026 3 4027 4026 3958 3 3958 3959 4027 3 4028 4027 3959 3 3959 3960 4028 3 4029 4028 3960 3 3960 3961 4029 3 4030 4029 3961 3 3961 3962 4031 3 3961 4031 4030 3 3962 3963 4032 3 3962 4032 4031 3 3963 3964 4033 3 3963 4033 4032 3 3964 3965 4034 3 3964 4034 4033 3 3965 3966 4035 3 3965 4035 4034 3 3966 3967 4036 3 3966 4036 4035 3 3967 3968 4037 3 3967 4037 4036 3 3968 3969 4038 3 3968 4038 4037 3 3969 3970 4039 3 3969 4039 4038 3 3970 3971 4040 3 3970 4040 4039 3 3971 3972 4041 3 3971 4041 4040 3 3972 3973 4042 3 3972 4042 4041 3 3973 3974 4043 3 3973 4043 4042 3 3974 3975 4044 3 3974 4044 4043 3 3975 3976 4045 3 3975 4045 4044 3 3976 3977 4046 3 3976 4046 4045 3 3977 3978 4047 3 3977 4047 4046 3 3978 3979 4048 3 3978 4048 4047 3 3979 3980 4049 3 3979 4049 4048 3 3980 3981 4050 3 3980 4050 4049 3 3981 3982 4051 3 3981 4051 4050 3 3982 3983 4052 3 3982 4052 4051 3 3983 3984 4053 3 3983 4053 4052 3 3984 3985 4054 3 3984 4054 4053 3 3985 3986 4055 3 3985 4055 4054 3 3986 3987 4056 3 3986 4056 4055 3 3987 3988 4057 3 3987 4057 4056 3 3988 3989 4058 3 3988 4058 4057 3 3989 3990 4059 3 3989 4059 4058 3 3990 3991 4060 3 3990 4060 4059 3 3991 3992 4061 3 3991 4061 4060 3 3992 3993 4062 3 3992 4062 4061 3 3993 3994 4063 3 3993 4063 4062 3 3994 3995 4064 3 3994 4064 4063 3 3995 5117 5121 3 3995 5121 4064 3 3996 3997 4066 3 3996 4066 4065 3 3996 4065 5120 3 5124 5120 4065 3 3997 3998 4067 3 3997 4067 4066 3 3998 3999 4068 3 3998 4068 4067 3 3999 4000 4069 3 3999 4069 4068 3 4000 4001 4069 3 4070 4069 4001 3 4001 4002 4070 3 4071 4070 4002 3 4002 4003 4071 3 4072 4071 4003 3 4003 4004 4072 3 4073 4072 4004 3 4004 4005 4073 3 4074 4073 4005 3 4005 4006 4074 3 4075 4074 4006 3 4006 4007 4075 3 4076 4075 4007 3 4007 4008 4076 3 4077 4076 4008 3 4008 4009 4077 3 4078 4077 4009 3 4009 4010 4078 3 4079 4078 4010 3 4010 4011 4079 3 4080 4079 4011 3 4011 4012 4080 3 4081 4080 4012 3 4012 4013 4081 3 4082 4081 4013 3 4013 4014 4082 3 4083 4082 4014 3 4014 4015 4083 3 4084 4083 4015 3 4015 4016 4084 3 4085 4084 4016 3 4016 4017 4085 3 4086 4085 4017 3 4017 4018 4086 3 4087 4086 4018 3 4018 4019 4087 3 4088 4087 4019 3 4019 4020 4088 3 4089 4088 4020 3 4020 4021 4089 3 4090 4089 4021 3 4021 4022 4090 3 4091 4090 4022 3 4022 4023 4091 3 4092 4091 4023 3 4023 4024 4092 3 4093 4092 4024 3 4024 4025 4093 3 4094 4093 4025 3 4025 4026 4094 3 4095 4094 4026 3 4026 4027 4095 3 4096 4095 4027 3 4027 4028 4096 3 4097 4096 4028 3 4028 4029 4097 3 4098 4097 4029 3 4029 4030 4098 3 4099 4098 4030 3 4030 4031 4099 3 4100 4099 4031 3 4031 4032 4100 3 4101 4100 4032 3 4032 4033 4101 3 4102 4101 4033 3 4033 4034 4102 3 4103 4102 4034 3 4034 4035 4103 3 4104 4103 4035 3 4035 4036 4104 3 4105 4104 4036 3 4036 4037 4105 3 4106 4105 4037 3 4037 4038 4106 3 4107 4106 4038 3 4038 4039 4107 3 4108 4107 4039 3 4039 4040 4109 3 4039 4109 4108 3 4040 4041 4110 3 4040 4110 4109 3 4041 4042 4111 3 4041 4111 4110 3 4042 4043 4112 3 4042 4112 4111 3 4043 4044 4113 3 4043 4113 4112 3 4044 4045 4114 3 4044 4114 4113 3 4045 4046 4115 3 4045 4115 4114 3 4046 4047 4116 3 4046 4116 4115 3 4047 4048 4117 3 4047 4117 4116 3 4048 4049 4118 3 4048 4118 4117 3 4049 4050 4119 3 4049 4119 4118 3 4050 4051 4120 3 4050 4120 4119 3 4051 4052 4121 3 4051 4121 4120 3 4052 4053 4122 3 4052 4122 4121 3 4053 4054 4123 3 4053 4123 4122 3 4054 4055 4124 3 4054 4124 4123 3 4055 4056 4125 3 4055 4125 4124 3 4056 4057 4126 3 4056 4126 4125 3 4057 4058 4127 3 4057 4127 4126 3 4058 4059 4128 3 4058 4128 4127 3 4059 4060 4129 3 4059 4129 4128 3 4060 4061 4130 3 4060 4130 4129 3 4061 4062 4131 3 4061 4131 4130 3 4062 4063 4132 3 4062 4132 4131 3 4063 4064 4133 3 4063 4133 4132 3 4064 5121 4133 3 5125 4133 5121 3 4065 4066 4135 3 4065 4135 4134 3 4065 4134 5124 3 5128 5124 4134 3 4066 4067 4136 3 4066 4136 4135 3 4067 4068 4137 3 4067 4137 4136 3 4068 4069 4138 3 4068 4138 4137 3 4069 4070 4139 3 4069 4139 4138 3 4070 4071 4140 3 4070 4140 4139 3 4071 4072 4141 3 4071 4141 4140 3 4072 4073 4142 3 4072 4142 4141 3 4073 4074 4143 3 4073 4143 4142 3 4074 4075 4144 3 4074 4144 4143 3 4075 4076 4145 3 4075 4145 4144 3 4076 4077 4146 3 4076 4146 4145 3 4077 4078 4147 3 4077 4147 4146 3 4078 4079 4147 3 4148 4147 4079 3 4079 4080 4148 3 4149 4148 4080 3 4080 4081 4149 3 4150 4149 4081 3 4081 4082 4150 3 4151 4150 4082 3 4082 4083 4151 3 4152 4151 4083 3 4083 4084 4152 3 4153 4152 4084 3 4084 4085 4153 3 4154 4153 4085 3 4085 4086 4154 3 4155 4154 4086 3 4086 4087 4155 3 4156 4155 4087 3 4087 4088 4156 3 4157 4156 4088 3 4088 4089 4157 3 4158 4157 4089 3 4089 4090 4158 3 4159 4158 4090 3 4090 4091 4159 3 4160 4159 4091 3 4091 4092 4160 3 4161 4160 4092 3 4092 4093 4161 3 4162 4161 4093 3 4093 4094 4162 3 4163 4162 4094 3 4094 4095 4163 3 4164 4163 4095 3 4095 4096 4164 3 4165 4164 4096 3 4096 4097 4165 3 4166 4165 4097 3 4097 4098 4166 3 4167 4166 4098 3 4098 4099 4167 3 4168 4167 4099 3 4099 4100 4168 3 4169 4168 4100 3 4100 4101 4169 3 4170 4169 4101 3 4101 4102 4170 3 4171 4170 4102 3 4102 4103 4171 3 4172 4171 4103 3 4103 4104 4172 3 4173 4172 4104 3 4104 4105 4173 3 4174 4173 4105 3 4105 4106 4174 3 4175 4174 4106 3 4106 4107 4175 3 4176 4175 4107 3 4107 4108 4176 3 4177 4176 4108 3 4108 4109 4177 3 4178 4177 4109 3 4109 4110 4178 3 4179 4178 4110 3 4110 4111 4179 3 4180 4179 4111 3 4111 4112 4180 3 4181 4180 4112 3 4112 4113 4181 3 4182 4181 4113 3 4113 4114 4182 3 4183 4182 4114 3 4114 4115 4183 3 4184 4183 4115 3 4115 4116 4184 3 4185 4184 4116 3 4116 4117 4185 3 4186 4185 4117 3 4117 4118 4186 3 4187 4186 4118 3 4118 4119 4188 3 4118 4188 4187 3 4119 4120 4189 3 4119 4189 4188 3 4120 4121 4190 3 4120 4190 4189 3 4121 4122 4191 3 4121 4191 4190 3 4122 4123 4192 3 4122 4192 4191 3 4123 4124 4193 3 4123 4193 4192 3 4124 4125 4194 3 4124 4194 4193 3 4125 4126 4195 3 4125 4195 4194 3 4126 4127 4196 3 4126 4196 4195 3 4127 4128 4197 3 4127 4197 4196 3 4128 4129 4198 3 4128 4198 4197 3 4129 4130 4199 3 4129 4199 4198 3 4130 4131 4200 3 4130 4200 4199 3 4131 4132 4201 3 4131 4201 4200 3 4132 4133 4202 3 4132 4202 4201 3 4133 5125 5129 3 4133 5129 4202 3 4134 4135 4204 3 4134 4204 4203 3 4134 4203 5128 3 5132 5128 4203 3 4135 4136 4205 3 4135 4205 4204 3 4136 4137 4206 3 4136 4206 4205 3 4137 4138 4207 3 4137 4207 4206 3 4138 4139 4208 3 4138 4208 4207 3 4139 4140 4209 3 4139 4209 4208 3 4140 4141 4210 3 4140 4210 4209 3 4141 4142 4211 3 4141 4211 4210 3 4142 4143 4212 3 4142 4212 4211 3 4143 4144 4213 3 4143 4213 4212 3 4144 4145 4214 3 4144 4214 4213 3 4145 4146 4215 3 4145 4215 4214 3 4146 4147 4216 3 4146 4216 4215 3 4147 4148 4217 3 4147 4217 4216 3 4148 4149 4218 3 4148 4218 4217 3 4149 4150 4219 3 4149 4219 4218 3 4150 4151 4220 3 4150 4220 4219 3 4151 4152 4221 3 4151 4221 4220 3 4152 4153 4222 3 4152 4222 4221 3 4153 4154 4223 3 4153 4223 4222 3 4154 4155 4224 3 4154 4224 4223 3 4155 4156 4225 3 4155 4225 4224 3 4156 4157 4226 3 4156 4226 4225 3 4157 4158 4226 3 4227 4226 4158 3 4158 4159 4227 3 4228 4227 4159 3 4159 4160 4228 3 4229 4228 4160 3 4160 4161 4229 3 4230 4229 4161 3 4161 4162 4230 3 4231 4230 4162 3 4162 4163 4231 3 4232 4231 4163 3 4163 4164 4232 3 4233 4232 4164 3 4164 4165 4233 3 4234 4233 4165 3 4165 4166 4234 3 4235 4234 4166 3 4166 4167 4235 3 4236 4235 4167 3 4167 4168 4236 3 4237 4236 4168 3 4168 4169 4237 3 4238 4237 4169 3 4169 4170 4238 3 4239 4238 4170 3 4170 4171 4239 3 4240 4239 4171 3 4171 4172 4240 3 4241 4240 4172 3 4172 4173 4241 3 4242 4241 4173 3 4173 4174 4242 3 4243 4242 4174 3 4174 4175 4243 3 4244 4243 4175 3 4175 4176 4244 3 4245 4244 4176 3 4176 4177 4245 3 4246 4245 4177 3 4177 4178 4246 3 4247 4246 4178 3 4178 4179 4247 3 4248 4247 4179 3 4179 4180 4248 3 4249 4248 4180 3 4180 4181 4249 3 4250 4249 4181 3 4181 4182 4250 3 4251 4250 4182 3 4182 4183 4251 3 4252 4251 4183 3 4183 4184 4252 3 4253 4252 4184 3 4184 4185 4253 3 4254 4253 4185 3 4185 4186 4254 3 4255 4254 4186 3 4186 4187 4255 3 4256 4255 4187 3 4187 4188 4256 3 4257 4256 4188 3 4188 4189 4257 3 4258 4257 4189 3 4189 4190 4258 3 4259 4258 4190 3 4190 4191 4259 3 4260 4259 4191 3 4191 4192 4260 3 4261 4260 4192 3 4192 4193 4261 3 4262 4261 4193 3 4193 4194 4262 3 4263 4262 4194 3 4194 4195 4263 3 4264 4263 4195 3 4195 4196 4264 3 4265 4264 4196 3 4196 4197 4265 3 4266 4265 4197 3 4197 4198 4267 3 4197 4267 4266 3 4198 4199 4268 3 4198 4268 4267 3 4199 4200 4269 3 4199 4269 4268 3 4200 4201 4270 3 4200 4270 4269 3 4201 4202 4271 3 4201 4271 4270 3 4202 5129 5133 3 4202 5133 4271 3 4203 4204 4273 3 4203 4273 4272 3 4203 4272 5132 3 5136 5132 4272 3 4204 4205 4274 3 4204 4274 4273 3 4205 4206 4275 3 4205 4275 4274 3 4206 4207 4276 3 4206 4276 4275 3 4207 4208 4277 3 4207 4277 4276 3 4208 4209 4278 3 4208 4278 4277 3 4209 4210 4279 3 4209 4279 4278 3 4210 4211 4280 3 4210 4280 4279 3 4211 4212 4281 3 4211 4281 4280 3 4212 4213 4282 3 4212 4282 4281 3 4213 4214 4283 3 4213 4283 4282 3 4214 4215 4284 3 4214 4284 4283 3 4215 4216 4285 3 4215 4285 4284 3 4216 4217 4286 3 4216 4286 4285 3 4217 4218 4287 3 4217 4287 4286 3 4218 4219 4288 3 4218 4288 4287 3 4219 4220 4289 3 4219 4289 4288 3 4220 4221 4290 3 4220 4290 4289 3 4221 4222 4291 3 4221 4291 4290 3 4222 4223 4292 3 4222 4292 4291 3 4223 4224 4293 3 4223 4293 4292 3 4224 4225 4294 3 4224 4294 4293 3 4225 4226 4295 3 4225 4295 4294 3 4226 4227 4296 3 4226 4296 4295 3 4227 4228 4297 3 4227 4297 4296 3 4228 4229 4298 3 4228 4298 4297 3 4229 4230 4299 3 4229 4299 4298 3 4230 4231 4300 3 4230 4300 4299 3 4231 4232 4301 3 4231 4301 4300 3 4232 4233 4302 3 4232 4302 4301 3 4233 4234 4303 3 4233 4303 4302 3 4234 4235 4304 3 4234 4304 4303 3 4235 4236 4305 3 4235 4305 4304 3 4236 4237 4306 3 4236 4306 4305 3 4237 4238 4306 3 4307 4306 4238 3 4238 4239 4307 3 4308 4307 4239 3 4239 4240 4308 3 4309 4308 4240 3 4240 4241 4309 3 4310 4309 4241 3 4241 4242 4310 3 4311 4310 4242 3 4242 4243 4311 3 4312 4311 4243 3 4243 4244 4312 3 4313 4312 4244 3 4244 4245 4313 3 4314 4313 4245 3 4245 4246 4314 3 4315 4314 4246 3 4246 4247 4315 3 4316 4315 4247 3 4247 4248 4316 3 4317 4316 4248 3 4248 4249 4317 3 4318 4317 4249 3 4249 4250 4318 3 4319 4318 4250 3 4250 4251 4319 3 4320 4319 4251 3 4251 4252 4320 3 4321 4320 4252 3 4252 4253 4321 3 4322 4321 4253 3 4253 4254 4322 3 4323 4322 4254 3 4254 4255 4323 3 4324 4323 4255 3 4255 4256 4324 3 4325 4324 4256 3 4256 4257 4325 3 4326 4325 4257 3 4257 4258 4326 3 4327 4326 4258 3 4258 4259 4327 3 4328 4327 4259 3 4259 4260 4328 3 4329 4328 4260 3 4260 4261 4329 3 4330 4329 4261 3 4261 4262 4330 3 4331 4330 4262 3 4262 4263 4331 3 4332 4331 4263 3 4263 4264 4332 3 4333 4332 4264 3 4264 4265 4333 3 4334 4333 4265 3 4265 4266 4334 3 4335 4334 4266 3 4266 4267 4335 3 4336 4335 4267 3 4267 4268 4336 3 4337 4336 4268 3 4268 4269 4337 3 4338 4337 4269 3 4269 4270 4338 3 4339 4338 4270 3 4270 4271 4339 3 4340 4339 4271 3 4271 5133 4340 3 5137 4340 5133 3 4272 4273 4341 3 4342 4341 4273 3 4272 4341 5140 3 4272 5140 5136 3 4273 4274 4342 3 4343 4342 4274 3 4274 4275 4343 3 4344 4343 4275 3 4275 4276 4344 3 4345 4344 4276 3 4276 4277 4345 3 4346 4345 4277 3 4277 4278 4347 3 4277 4347 4346 3 4278 4279 4348 3 4278 4348 4347 3 4279 4280 4349 3 4279 4349 4348 3 4280 4281 4350 3 4280 4350 4349 3 4281 4282 4351 3 4281 4351 4350 3 4282 4283 4352 3 4282 4352 4351 3 4283 4284 4353 3 4283 4353 4352 3 4284 4285 4354 3 4284 4354 4353 3 4285 4286 4355 3 4285 4355 4354 3 4286 4287 4356 3 4286 4356 4355 3 4287 4288 4357 3 4287 4357 4356 3 4288 4289 4358 3 4288 4358 4357 3 4289 4290 4359 3 4289 4359 4358 3 4290 4291 4360 3 4290 4360 4359 3 4291 4292 4361 3 4291 4361 4360 3 4292 4293 4362 3 4292 4362 4361 3 4293 4294 4363 3 4293 4363 4362 3 4294 4295 4364 3 4294 4364 4363 3 4295 4296 4365 3 4295 4365 4364 3 4296 4297 4366 3 4296 4366 4365 3 4297 4298 4367 3 4297 4367 4366 3 4298 4299 4368 3 4298 4368 4367 3 4299 4300 4369 3 4299 4369 4368 3 4300 4301 4370 3 4300 4370 4369 3 4301 4302 4371 3 4301 4371 4370 3 4302 4303 4372 3 4302 4372 4371 3 4303 4304 4373 3 4303 4373 4372 3 4304 4305 4374 3 4304 4374 4373 3 4305 4306 4375 3 4305 4375 4374 3 4306 4307 4376 3 4306 4376 4375 3 4307 4308 4377 3 4307 4377 4376 3 4308 4309 4378 3 4308 4378 4377 3 4309 4310 4379 3 4309 4379 4378 3 4310 4311 4380 3 4310 4380 4379 3 4311 4312 4381 3 4311 4381 4380 3 4312 4313 4382 3 4312 4382 4381 3 4313 4314 4383 3 4313 4383 4382 3 4314 4315 4384 3 4314 4384 4383 3 4315 4316 4385 3 4315 4385 4384 3 4316 4317 4386 3 4316 4386 4385 3 4317 4318 4386 3 4387 4386 4318 3 4318 4319 4387 3 4388 4387 4319 3 4319 4320 4388 3 4389 4388 4320 3 4320 4321 4389 3 4390 4389 4321 3 4321 4322 4390 3 4391 4390 4322 3 4322 4323 4391 3 4392 4391 4323 3 4323 4324 4392 3 4393 4392 4324 3 4324 4325 4393 3 4394 4393 4325 3 4325 4326 4394 3 4395 4394 4326 3 4326 4327 4395 3 4396 4395 4327 3 4327 4328 4396 3 4397 4396 4328 3 4328 4329 4397 3 4398 4397 4329 3 4329 4330 4398 3 4399 4398 4330 3 4330 4331 4399 3 4400 4399 4331 3 4331 4332 4400 3 4401 4400 4332 3 4332 4333 4401 3 4402 4401 4333 3 4333 4334 4402 3 4403 4402 4334 3 4334 4335 4403 3 4404 4403 4335 3 4335 4336 4404 3 4405 4404 4336 3 4336 4337 4405 3 4406 4405 4337 3 4337 4338 4406 3 4407 4406 4338 3 4338 4339 4407 3 4408 4407 4339 3 4339 4340 4408 3 4409 4408 4340 3 4340 5137 5141 3 4340 5141 4409 3 4341 4342 4410 3 4411 4410 4342 3 4341 4410 5144 3 4341 5144 5140 3 4342 4343 4411 3 4412 4411 4343 3 4343 4344 4412 3 4413 4412 4344 3 4344 4345 4413 3 4414 4413 4345 3 4345 4346 4414 3 4415 4414 4346 3 4346 4347 4415 3 4416 4415 4347 3 4347 4348 4416 3 4417 4416 4348 3 4348 4349 4417 3 4418 4417 4349 3 4349 4350 4418 3 4419 4418 4350 3 4350 4351 4419 3 4420 4419 4351 3 4351 4352 4420 3 4421 4420 4352 3 4352 4353 4421 3 4422 4421 4353 3 4353 4354 4422 3 4423 4422 4354 3 4354 4355 4423 3 4424 4423 4355 3 4355 4356 4424 3 4425 4424 4356 3 4356 4357 4425 3 4426 4425 4357 3 4357 4358 4426 3 4427 4426 4358 3 4358 4359 4428 3 4358 4428 4427 3 4359 4360 4429 3 4359 4429 4428 3 4360 4361 4430 3 4360 4430 4429 3 4361 4362 4431 3 4361 4431 4430 3 4362 4363 4432 3 4362 4432 4431 3 4363 4364 4433 3 4363 4433 4432 3 4364 4365 4434 3 4364 4434 4433 3 4365 4366 4435 3 4365 4435 4434 3 4366 4367 4436 3 4366 4436 4435 3 4367 4368 4437 3 4367 4437 4436 3 4368 4369 4438 3 4368 4438 4437 3 4369 4370 4439 3 4369 4439 4438 3 4370 4371 4440 3 4370 4440 4439 3 4371 4372 4441 3 4371 4441 4440 3 4372 4373 4442 3 4372 4442 4441 3 4373 4374 4443 3 4373 4443 4442 3 4374 4375 4444 3 4374 4444 4443 3 4375 4376 4445 3 4375 4445 4444 3 4376 4377 4446 3 4376 4446 4445 3 4377 4378 4447 3 4377 4447 4446 3 4378 4379 4448 3 4378 4448 4447 3 4379 4380 4449 3 4379 4449 4448 3 4380 4381 4450 3 4380 4450 4449 3 4381 4382 4451 3 4381 4451 4450 3 4382 4383 4452 3 4382 4452 4451 3 4383 4384 4453 3 4383 4453 4452 3 4384 4385 4454 3 4384 4454 4453 3 4385 4386 4455 3 4385 4455 4454 3 4386 4387 4456 3 4386 4456 4455 3 4387 4388 4457 3 4387 4457 4456 3 4388 4389 4458 3 4388 4458 4457 3 4389 4390 4459 3 4389 4459 4458 3 4390 4391 4460 3 4390 4460 4459 3 4391 4392 4461 3 4391 4461 4460 3 4392 4393 4462 3 4392 4462 4461 3 4393 4394 4463 3 4393 4463 4462 3 4394 4395 4464 3 4394 4464 4463 3 4395 4396 4465 3 4395 4465 4464 3 4396 4397 4466 3 4396 4466 4465 3 4397 4398 4467 3 4397 4467 4466 3 4398 4399 4468 3 4398 4468 4467 3 4399 4400 4468 3 4469 4468 4400 3 4400 4401 4469 3 4470 4469 4401 3 4401 4402 4470 3 4471 4470 4402 3 4402 4403 4471 3 4472 4471 4403 3 4403 4404 4472 3 4473 4472 4404 3 4404 4405 4473 3 4474 4473 4405 3 4405 4406 4474 3 4475 4474 4406 3 4406 4407 4475 3 4476 4475 4407 3 4407 4408 4476 3 4477 4476 4408 3 4408 4409 4477 3 4478 4477 4409 3 4409 5141 4478 3 5145 4478 5141 3 4410 4411 4479 3 4480 4479 4411 3 4410 4479 5148 3 4410 5148 5144 3 4411 4412 4480 3 4481 4480 4412 3 4412 4413 4481 3 4482 4481 4413 3 4413 4414 4482 3 4483 4482 4414 3 4414 4415 4483 3 4484 4483 4415 3 4415 4416 4484 3 4485 4484 4416 3 4416 4417 4485 3 4486 4485 4417 3 4417 4418 4486 3 4487 4486 4418 3 4418 4419 4487 3 4488 4487 4419 3 4419 4420 4488 3 4489 4488 4420 3 4420 4421 4489 3 4490 4489 4421 3 4421 4422 4490 3 4491 4490 4422 3 4422 4423 4491 3 4492 4491 4423 3 4423 4424 4492 3 4493 4492 4424 3 4424 4425 4493 3 4494 4493 4425 3 4425 4426 4494 3 4495 4494 4426 3 4426 4427 4495 3 4496 4495 4427 3 4427 4428 4496 3 4497 4496 4428 3 4428 4429 4497 3 4498 4497 4429 3 4429 4430 4498 3 4499 4498 4430 3 4430 4431 4499 3 4500 4499 4431 3 4431 4432 4500 3 4501 4500 4432 3 4432 4433 4501 3 4502 4501 4433 3 4433 4434 4502 3 4503 4502 4434 3 4434 4435 4503 3 4504 4503 4435 3 4435 4436 4504 3 4505 4504 4436 3 4436 4437 4505 3 4506 4505 4437 3 4437 4438 4506 3 4507 4506 4438 3 4438 4439 4507 3 4508 4507 4439 3 4439 4440 4508 3 4509 4508 4440 3 4440 4441 4510 3 4440 4510 4509 3 4441 4442 4511 3 4441 4511 4510 3 4442 4443 4512 3 4442 4512 4511 3 4443 4444 4513 3 4443 4513 4512 3 4444 4445 4514 3 4444 4514 4513 3 4445 4446 4515 3 4445 4515 4514 3 4446 4447 4516 3 4446 4516 4515 3 4447 4448 4517 3 4447 4517 4516 3 4448 4449 4518 3 4448 4518 4517 3 4449 4450 4519 3 4449 4519 4518 3 4450 4451 4520 3 4450 4520 4519 3 4451 4452 4521 3 4451 4521 4520 3 4452 4453 4522 3 4452 4522 4521 3 4453 4454 4523 3 4453 4523 4522 3 4454 4455 4524 3 4454 4524 4523 3 4455 4456 4525 3 4455 4525 4524 3 4456 4457 4526 3 4456 4526 4525 3 4457 4458 4527 3 4457 4527 4526 3 4458 4459 4528 3 4458 4528 4527 3 4459 4460 4529 3 4459 4529 4528 3 4460 4461 4530 3 4460 4530 4529 3 4461 4462 4531 3 4461 4531 4530 3 4462 4463 4532 3 4462 4532 4531 3 4463 4464 4533 3 4463 4533 4532 3 4464 4465 4534 3 4464 4534 4533 3 4465 4466 4535 3 4465 4535 4534 3 4466 4467 4536 3 4466 4536 4535 3 4467 4468 4537 3 4467 4537 4536 3 4468 4469 4538 3 4468 4538 4537 3 4469 4470 4539 3 4469 4539 4538 3 4470 4471 4540 3 4470 4540 4539 3 4471 4472 4541 3 4471 4541 4540 3 4472 4473 4542 3 4472 4542 4541 3 4473 4474 4543 3 4473 4543 4542 3 4474 4475 4544 3 4474 4544 4543 3 4475 4476 4545 3 4475 4545 4544 3 4476 4477 4546 3 4476 4546 4545 3 4477 4478 4547 3 4477 4547 4546 3 4478 5145 4547 3 5149 4547 5145 3 4479 4480 4549 3 4479 4549 4548 3 4479 4548 5148 3 5152 5148 4548 3 4480 4481 4550 3 4480 4550 4549 3 4481 4482 4550 3 4551 4550 4482 3 4482 4483 4551 3 4552 4551 4483 3 4483 4484 4552 3 4553 4552 4484 3 4484 4485 4553 3 4554 4553 4485 3 4485 4486 4554 3 4555 4554 4486 3 4486 4487 4555 3 4556 4555 4487 3 4487 4488 4556 3 4557 4556 4488 3 4488 4489 4557 3 4558 4557 4489 3 4489 4490 4558 3 4559 4558 4490 3 4490 4491 4559 3 4560 4559 4491 3 4491 4492 4560 3 4561 4560 4492 3 4492 4493 4561 3 4562 4561 4493 3 4493 4494 4562 3 4563 4562 4494 3 4494 4495 4563 3 4564 4563 4495 3 4495 4496 4564 3 4565 4564 4496 3 4496 4497 4565 3 4566 4565 4497 3 4497 4498 4566 3 4567 4566 4498 3 4498 4499 4567 3 4568 4567 4499 3 4499 4500 4568 3 4569 4568 4500 3 4500 4501 4569 3 4570 4569 4501 3 4501 4502 4570 3 4571 4570 4502 3 4502 4503 4571 3 4572 4571 4503 3 4503 4504 4572 3 4573 4572 4504 3 4504 4505 4573 3 4574 4573 4505 3 4505 4506 4574 3 4575 4574 4506 3 4506 4507 4575 3 4576 4575 4507 3 4507 4508 4576 3 4577 4576 4508 3 4508 4509 4577 3 4578 4577 4509 3 4509 4510 4578 3 4579 4578 4510 3 4510 4511 4579 3 4580 4579 4511 3 4511 4512 4580 3 4581 4580 4512 3 4512 4513 4581 3 4582 4581 4513 3 4513 4514 4582 3 4583 4582 4514 3 4514 4515 4583 3 4584 4583 4515 3 4515 4516 4584 3 4585 4584 4516 3 4516 4517 4585 3 4586 4585 4517 3 4517 4518 4586 3 4587 4586 4518 3 4518 4519 4587 3 4588 4587 4519 3 4519 4520 4588 3 4589 4588 4520 3 4520 4521 4589 3 4590 4589 4521 3 4521 4522 4590 3 4591 4590 4522 3 4522 4523 4592 3 4522 4592 4591 3 4523 4524 4593 3 4523 4593 4592 3 4524 4525 4594 3 4524 4594 4593 3 4525 4526 4595 3 4525 4595 4594 3 4526 4527 4596 3 4526 4596 4595 3 4527 4528 4597 3 4527 4597 4596 3 4528 4529 4598 3 4528 4598 4597 3 4529 4530 4599 3 4529 4599 4598 3 4530 4531 4600 3 4530 4600 4599 3 4531 4532 4601 3 4531 4601 4600 3 4532 4533 4602 3 4532 4602 4601 3 4533 4534 4603 3 4533 4603 4602 3 4534 4535 4604 3 4534 4604 4603 3 4535 4536 4605 3 4535 4605 4604 3 4536 4537 4606 3 4536 4606 4605 3 4537 4538 4607 3 4537 4607 4606 3 4538 4539 4608 3 4538 4608 4607 3 4539 4540 4609 3 4539 4609 4608 3 4540 4541 4610 3 4540 4610 4609 3 4541 4542 4611 3 4541 4611 4610 3 4542 4543 4612 3 4542 4612 4611 3 4543 4544 4613 3 4543 4613 4612 3 4544 4545 4614 3 4544 4614 4613 3 4545 4546 4615 3 4545 4615 4614 3 4546 4547 4616 3 4546 4616 4615 3 4547 5149 5155 3 4547 5155 4616 3 4548 4549 5153 3 4548 5153 5152 3 4549 4550 4618 3 4549 4618 4617 3 4549 4617 5153 3 5159 5153 4617 3 4550 4551 4619 3 4550 4619 4618 3 4551 4552 4620 3 4551 4620 4619 3 4552 4553 4621 3 4552 4621 4620 3 4553 4554 4622 3 4553 4622 4621 3 4554 4555 4623 3 4554 4623 4622 3 4555 4556 4624 3 4555 4624 4623 3 4556 4557 4625 3 4556 4625 4624 3 4557 4558 4626 3 4557 4626 4625 3 4558 4559 4627 3 4558 4627 4626 3 4559 4560 4628 3 4559 4628 4627 3 4560 4561 4629 3 4560 4629 4628 3 4561 4562 4630 3 4561 4630 4629 3 4562 4563 4631 3 4562 4631 4630 3 4563 4564 4632 3 4563 4632 4631 3 4564 4565 4632 3 4633 4632 4565 3 4565 4566 4633 3 4634 4633 4566 3 4566 4567 4634 3 4635 4634 4567 3 4567 4568 4635 3 4636 4635 4568 3 4568 4569 4636 3 4637 4636 4569 3 4569 4570 4637 3 4638 4637 4570 3 4570 4571 4638 3 4639 4638 4571 3 4571 4572 4639 3 4640 4639 4572 3 4572 4573 4640 3 4641 4640 4573 3 4573 4574 4641 3 4642 4641 4574 3 4574 4575 4642 3 4643 4642 4575 3 4575 4576 4643 3 4644 4643 4576 3 4576 4577 4644 3 4645 4644 4577 3 4577 4578 4645 3 4646 4645 4578 3 4578 4579 4646 3 4647 4646 4579 3 4579 4580 4647 3 4648 4647 4580 3 4580 4581 4648 3 4649 4648 4581 3 4581 4582 4649 3 4650 4649 4582 3 4582 4583 4650 3 4651 4650 4583 3 4583 4584 4651 3 4652 4651 4584 3 4584 4585 4652 3 4653 4652 4585 3 4585 4586 4653 3 4654 4653 4586 3 4586 4587 4654 3 4655 4654 4587 3 4587 4588 4655 3 4656 4655 4588 3 4588 4589 4656 3 4657 4656 4589 3 4589 4590 4657 3 4658 4657 4590 3 4590 4591 4658 3 4659 4658 4591 3 4591 4592 4659 3 4660 4659 4592 3 4592 4593 4660 3 4661 4660 4593 3 4593 4594 4661 3 4662 4661 4594 3 4594 4595 4662 3 4663 4662 4595 3 4595 4596 4663 3 4664 4663 4596 3 4596 4597 4664 3 4665 4664 4597 3 4597 4598 4665 3 4666 4665 4598 3 4598 4599 4666 3 4667 4666 4599 3 4599 4600 4667 3 4668 4667 4600 3 4600 4601 4668 3 4669 4668 4601 3 4601 4602 4669 3 4670 4669 4602 3 4602 4603 4670 3 4671 4670 4603 3 4603 4604 4671 3 4672 4671 4604 3 4604 4605 4672 3 4673 4672 4605 3 4605 4606 4674 3 4605 4674 4673 3 4606 4607 4675 3 4606 4675 4674 3 4607 4608 4676 3 4607 4676 4675 3 4608 4609 4677 3 4608 4677 4676 3 4609 4610 4678 3 4609 4678 4677 3 4610 4611 4679 3 4610 4679 4678 3 4611 4612 4680 3 4611 4680 4679 3 4612 4613 4681 3 4612 4681 4680 3 4613 4614 4682 3 4613 4682 4681 3 4614 4615 4683 3 4614 4683 4682 3 4615 4616 5155 3 4615 5155 5154 3 4615 5154 4683 3 5162 4683 5154 3 4617 4618 5160 3 4617 5160 5159 3 4618 4619 4685 3 4618 4685 4684 3 4618 4684 5160 3 5167 5160 4684 3 4619 4620 4686 3 4619 4686 4685 3 4620 4621 4687 3 4620 4687 4686 3 4621 4622 4688 3 4621 4688 4687 3 4622 4623 4689 3 4622 4689 4688 3 4623 4624 4690 3 4623 4690 4689 3 4624 4625 4691 3 4624 4691 4690 3 4625 4626 4692 3 4625 4692 4691 3 4626 4627 4693 3 4626 4693 4692 3 4627 4628 4694 3 4627 4694 4693 3 4628 4629 4695 3 4628 4695 4694 3 4629 4630 4696 3 4629 4696 4695 3 4630 4631 4697 3 4630 4697 4696 3 4631 4632 4698 3 4631 4698 4697 3 4632 4633 4699 3 4632 4699 4698 3 4633 4634 4700 3 4633 4700 4699 3 4634 4635 4701 3 4634 4701 4700 3 4635 4636 4702 3 4635 4702 4701 3 4636 4637 4703 3 4636 4703 4702 3 4637 4638 4704 3 4637 4704 4703 3 4638 4639 4705 3 4638 4705 4704 3 4639 4640 4706 3 4639 4706 4705 3 4640 4641 4707 3 4640 4707 4706 3 4641 4642 4708 3 4641 4708 4707 3 4642 4643 4709 3 4642 4709 4708 3 4643 4644 4710 3 4643 4710 4709 3 4644 4645 4711 3 4644 4711 4710 3 4645 4646 4712 3 4645 4712 4711 3 4646 4647 4713 3 4646 4713 4712 3 4647 4648 4713 3 4714 4713 4648 3 4648 4649 4714 3 4715 4714 4649 3 4649 4650 4715 3 4716 4715 4650 3 4650 4651 4716 3 4717 4716 4651 3 4651 4652 4717 3 4718 4717 4652 3 4652 4653 4718 3 4719 4718 4653 3 4653 4654 4719 3 4720 4719 4654 3 4654 4655 4720 3 4721 4720 4655 3 4655 4656 4721 3 4722 4721 4656 3 4656 4657 4722 3 4723 4722 4657 3 4657 4658 4723 3 4724 4723 4658 3 4658 4659 4724 3 4725 4724 4659 3 4659 4660 4725 3 4726 4725 4660 3 4660 4661 4726 3 4727 4726 4661 3 4661 4662 4727 3 4728 4727 4662 3 4662 4663 4728 3 4729 4728 4663 3 4663 4664 4729 3 4730 4729 4664 3 4664 4665 4730 3 4731 4730 4665 3 4665 4666 4731 3 4732 4731 4666 3 4666 4667 4732 3 4733 4732 4667 3 4667 4668 4733 3 4734 4733 4668 3 4668 4669 4734 3 4735 4734 4669 3 4669 4670 4735 3 4736 4735 4670 3 4670 4671 4736 3 4737 4736 4671 3 4671 4672 4737 3 4738 4737 4672 3 4672 4673 4738 3 4739 4738 4673 3 4673 4674 4739 3 4740 4739 4674 3 4674 4675 4740 3 4741 4740 4675 3 4675 4676 4741 3 4742 4741 4676 3 4676 4677 4742 3 4743 4742 4677 3 4677 4678 4743 3 4744 4743 4678 3 4678 4679 4744 3 4745 4744 4679 3 4679 4680 4745 3 4746 4745 4680 3 4680 4681 4746 3 4747 4746 4681 3 4681 4682 4747 3 4748 4747 4682 3 4682 4683 5161 3 5162 5161 4683 3 4682 5161 5231 3 4682 5231 4748 3 4684 4685 5167 3 5168 5167 4685 3 4685 4686 5168 3 5169 5168 4686 3 4686 4687 5169 3 5170 5169 4687 3 4687 4688 5170 3 5171 5170 4688 3 4688 4689 5171 3 5172 5171 4689 3 4689 4690 5173 3 4689 5173 5172 3 4690 4691 5174 3 4690 5174 5173 3 4691 4692 5175 3 4691 5175 5174 3 4692 4693 5176 3 4692 5176 5175 3 4693 4694 5177 3 4693 5177 5176 3 4694 4695 5178 3 4694 5178 5177 3 4695 4696 5179 3 4695 5179 5178 3 4696 4697 5180 3 4696 5180 5179 3 4697 4698 5181 3 4697 5181 5180 3 4698 4699 5182 3 4698 5182 5181 3 4699 4700 5183 3 4699 5183 5182 3 4700 4701 5184 3 4700 5184 5183 3 4701 4702 5185 3 4701 5185 5184 3 4702 4703 5186 3 4702 5186 5185 3 4703 4704 5187 3 4703 5187 5186 3 4704 4705 5188 3 4704 5188 5187 3 4705 4706 5189 3 4705 5189 5188 3 4706 4707 5190 3 4706 5190 5189 3 4707 4708 5191 3 4707 5191 5190 3 4708 4709 5192 3 4708 5192 5191 3 4709 4710 5193 3 4709 5193 5192 3 4710 4711 5194 3 4710 5194 5193 3 4711 4712 5195 3 4711 5195 5194 3 4712 4713 5196 3 4712 5196 5195 3 4713 4714 5197 3 4713 5197 5196 3 4714 4715 5198 3 4714 5198 5197 3 4715 4716 5199 3 4715 5199 5198 3 4716 4717 5200 3 4716 5200 5199 3 4717 4718 5201 3 4717 5201 5200 3 4718 4719 5202 3 4718 5202 5201 3 4719 4720 5203 3 4719 5203 5202 3 4720 4721 5204 3 4720 5204 5203 3 4721 4722 5205 3 4721 5205 5204 3 4722 4723 5206 3 4722 5206 5205 3 4723 4724 5207 3 4723 5207 5206 3 4724 4725 5208 3 4724 5208 5207 3 4725 4726 5209 3 4725 5209 5208 3 4726 4727 5210 3 4726 5210 5209 3 4727 4728 5211 3 4727 5211 5210 3 4728 4729 5212 3 4728 5212 5211 3 4729 4730 5213 3 4729 5213 5212 3 4730 4731 5214 3 4730 5214 5213 3 4731 4732 5214 3 5215 5214 4732 3 4732 4733 5215 3 5216 5215 4733 3 4733 4734 5216 3 5217 5216 4734 3 4734 4735 5217 3 5218 5217 4735 3 4735 4736 5218 3 5219 5218 4736 3 4736 4737 5219 3 5220 5219 4737 3 4737 4738 5220 3 5221 5220 4738 3 4738 4739 5221 3 5222 5221 4739 3 4739 4740 5222 3 5223 5222 4740 3 4740 4741 5223 3 5224 5223 4741 3 4741 4742 5224 3 5225 5224 4742 3 4742 4743 5225 3 5226 5225 4743 3 4743 4744 5226 3 5227 5226 4744 3 4744 4745 5227 3 5228 5227 4745 3 4745 4746 5228 3 5229 5228 4746 3 4746 4747 5229 3 5230 5229 4747 3 4747 4748 5230 3 5231 5230 4748 3 4749 4750 4817 3 4818 4817 4750 3 4749 5367 4750 3 5368 4750 5367 3 4749 4817 5437 3 4749 5437 5367 3 4750 4751 4818 3 4819 4818 4751 3 4750 5368 4751 3 5369 4751 5368 3 4751 4752 4819 3 4820 4819 4752 3 4751 5369 4752 3 5370 4752 5369 3 4752 4753 4820 3 4821 4820 4753 3 4752 5370 4753 3 5371 4753 5370 3 4753 4754 4821 3 4822 4821 4754 3 4753 5371 4754 3 5372 4754 5371 3 4754 4755 4822 3 4823 4822 4755 3 4754 5372 4755 3 5373 4755 5372 3 4755 4756 4823 3 4824 4823 4756 3 4755 5373 4756 3 5374 4756 5373 3 4756 4757 4824 3 4825 4824 4757 3 4756 5374 4757 3 5375 4757 5374 3 4757 4758 4825 3 4826 4825 4758 3 4757 5375 4758 3 5376 4758 5375 3 4758 4759 4826 3 4827 4826 4759 3 4758 5376 4759 3 5377 4759 5376 3 4759 4760 4827 3 4828 4827 4760 3 4759 5377 4760 3 5378 4760 5377 3 4760 4761 4828 3 4829 4828 4761 3 4760 5378 4761 3 5379 4761 5378 3 4761 4762 4829 3 4830 4829 4762 3 4761 5379 4762 3 5380 4762 5379 3 4762 4763 4830 3 4831 4830 4763 3 4762 5380 4763 3 5381 4763 5380 3 4763 4764 4831 3 4832 4831 4764 3 4763 5381 4764 3 5382 4764 5381 3 4764 4765 4832 3 4833 4832 4765 3 4764 5382 4765 3 5383 4765 5382 3 4765 4766 4833 3 4834 4833 4766 3 4765 5383 4766 3 5384 4766 5383 3 4766 4767 4834 3 4835 4834 4767 3 4766 5384 4767 3 5385 4767 5384 3 4767 4768 4835 3 4836 4835 4768 3 4767 5385 4768 3 5386 4768 5385 3 4768 4769 4836 3 4837 4836 4769 3 4768 5386 4769 3 5387 4769 5386 3 4769 4770 4837 3 4838 4837 4770 3 4769 5387 5388 3 4769 5388 4770 3 4770 4771 4838 3 4839 4838 4771 3 4770 5388 5389 3 4770 5389 4771 3 4771 4772 4839 3 4840 4839 4772 3 4771 5389 5390 3 4771 5390 4772 3 4772 4773 4840 3 4841 4840 4773 3 4772 5390 5391 3 4772 5391 4773 3 4773 4774 4841 3 4842 4841 4774 3 4773 5391 5392 3 4773 5392 4774 3 4774 4775 4843 3 4774 4843 4842 3 4774 5392 5393 3 4774 5393 4775 3 4775 4776 4844 3 4775 4844 4843 3 4775 5393 5394 3 4775 5394 4776 3 4776 4777 4845 3 4776 4845 4844 3 4776 5394 5395 3 4776 5395 4777 3 4777 4778 4846 3 4777 4846 4845 3 4777 5395 5396 3 4777 5396 4778 3 4778 4779 4847 3 4778 4847 4846 3 4778 5396 5397 3 4778 5397 4779 3 4779 4780 4848 3 4779 4848 4847 3 4779 5397 5398 3 4779 5398 4780 3 4780 4781 4849 3 4780 4849 4848 3 4780 5398 5399 3 4780 5399 4781 3 4781 4782 4850 3 4781 4850 4849 3 4781 5399 5400 3 4781 5400 4782 3 4782 4783 4851 3 4782 4851 4850 3 4782 5400 5401 3 4782 5401 4783 3 4783 4784 4852 3 4783 4852 4851 3 4783 5401 5402 3 4783 5402 4784 3 4784 4785 4853 3 4784 4853 4852 3 4784 5402 5403 3 4784 5403 4785 3 4785 4786 4854 3 4785 4854 4853 3 4785 5403 5404 3 4785 5404 4786 3 4786 4787 4855 3 4786 4855 4854 3 4786 5404 5405 3 4786 5405 4787 3 4787 4788 4856 3 4787 4856 4855 3 4787 5405 5406 3 4787 5406 4788 3 4788 4789 4857 3 4788 4857 4856 3 4788 5406 5407 3 4788 5407 4789 3 4789 4790 4858 3 4789 4858 4857 3 4789 5407 5408 3 4789 5408 4790 3 4790 4791 4859 3 4790 4859 4858 3 4790 5408 5409 3 4790 5409 4791 3 4791 4792 4860 3 4791 4860 4859 3 4791 5409 5410 3 4791 5410 4792 3 4792 4793 4861 3 4792 4861 4860 3 4792 5410 5411 3 4792 5411 4793 3 4793 4794 4862 3 4793 4862 4861 3 4793 5411 5412 3 4793 5412 4794 3 4794 4795 4863 3 4794 4863 4862 3 4794 5412 5413 3 4794 5413 4795 3 4795 4796 4864 3 4795 4864 4863 3 4795 5413 5414 3 4795 5414 4796 3 4796 4797 4865 3 4796 4865 4864 3 4796 5414 5415 3 4796 5415 4797 3 4797 4798 4866 3 4797 4866 4865 3 4797 5415 5416 3 4797 5416 4798 3 4798 4799 4867 3 4798 4867 4866 3 4798 5416 5417 3 4798 5417 4799 3 4799 4800 4868 3 4799 4868 4867 3 4799 5417 5418 3 4799 5418 4800 3 4800 4801 4869 3 4800 4869 4868 3 4800 5418 5419 3 4800 5419 4801 3 4801 4802 4870 3 4801 4870 4869 3 4801 5419 5420 3 4801 5420 4802 3 4802 4803 4871 3 4802 4871 4870 3 4802 5420 5421 3 4802 5421 4803 3 4803 4804 4872 3 4803 4872 4871 3 4803 5421 5422 3 4803 5422 4804 3 4804 4805 4873 3 4804 4873 4872 3 4804 5422 5423 3 4804 5423 4805 3 4805 4806 4874 3 4805 4874 4873 3 4805 5423 5424 3 4805 5424 4806 3 4806 4807 4875 3 4806 4875 4874 3 4806 5424 5425 3 4806 5425 4807 3 4807 4808 4876 3 4807 4876 4875 3 4807 5425 5426 3 4807 5426 4808 3 4808 4809 4877 3 4808 4877 4876 3 4808 5426 5427 3 4808 5427 4809 3 4809 4810 4878 3 4809 4878 4877 3 4809 5427 5428 3 4809 5428 4810 3 4810 4811 4879 3 4810 4879 4878 3 4810 5428 5429 3 4810 5429 4811 3 4811 4812 4880 3 4811 4880 4879 3 4811 5429 5430 3 4811 5430 4812 3 4812 4813 4881 3 4812 4881 4880 3 4812 5430 5431 3 4812 5431 4813 3 4813 4814 4882 3 4813 4882 4881 3 4813 5431 5432 3 4813 5432 4814 3 4814 4815 4883 3 4814 4883 4882 3 4814 5432 4815 3 5433 4815 5432 3 4815 5433 4883 3 5438 4883 5433 3 4816 4817 4886 3 4887 4886 4817 3 4816 5436 4817 3 5437 4817 5436 3 4816 4886 5436 3 5442 5436 4886 3 4817 4818 4887 3 4888 4887 4818 3 4882 4883 4890 3 4882 4890 4889 3 4883 4884 4891 3 4883 4891 4890 3 4883 5438 4884 3 5439 4884 5438 3 4884 5439 4891 3 5443 4891 5439 3 4885 4886 4894 3 4885 4894 4893 3 4885 5441 4886 3 5442 4886 5441 3 4885 4893 5446 3 4885 5446 5441 3 4886 4887 4895 3 4886 4895 4894 3 4890 4891 4897 3 4890 4897 4896 3 4891 4892 4898 3 4891 4898 4897 3 4891 5443 4892 3 5444 4892 5443 3 4892 5444 4898 3 5447 4898 5444 3 4893 4894 4900 3 4893 4900 4899 3 4893 4899 5450 3 4893 5450 5446 3 4897 4898 4902 3 4897 4902 4901 3 4898 5447 5451 3 4898 5451 4902 3 4899 4900 4904 3 4899 4904 4903 3 4899 4903 5454 3 4899 5454 5450 3 4901 4902 4906 3 4901 4906 4905 3 4902 5451 5455 3 4902 5455 4906 3 4903 4904 4907 3 4908 4907 4904 3 4903 4907 5454 3 5458 5454 4907 3 4905 4906 4909 3 4910 4909 4906 3 4906 5455 5459 3 4906 5459 4910 3 4907 4908 4911 3 4912 4911 4908 3 4907 4911 5458 3 5462 5458 4911 3 4909 4910 4913 3 4914 4913 4910 3 4910 5459 5463 3 4910 5463 4914 3 4911 4912 4915 3 4916 4915 4912 3 4911 4915 5462 3 5466 5462 4915 3 4913 4914 4917 3 4918 4917 4914 3 4914 5463 5467 3 4914 5467 4918 3 4915 4916 4919 3 4920 4919 4916 3 4915 4919 5466 3 5470 5466 4919 3 4917 4918 4921 3 4922 4921 4918 3 4918 5467 5471 3 4918 5471 4922 3 4919 4920 4923 3 4924 4923 4920 3 4919 4923 5470 3 5474 5470 4923 3 4921 4922 4925 3 4926 4925 4922 3 4922 5471 5475 3 4922 5475 4926 3 4923 4924 4927 3 4928 4927 4924 3 4923 4927 5474 3 5478 5474 4927 3 4925 4926 4929 3 4930 4929 4926 3 4926 5475 5479 3 4926 5479 4930 3 4927 4928 4931 3 4932 4931 4928 3 4927 4931 5478 3 5482 5478 4931 3 4929 4930 4933 3 4934 4933 4930 3 4930 5479 5483 3 4930 5483 4934 3 4931 4932 4935 3 4936 4935 4932 3 4931 4935 5482 3 5486 5482 4935 3 4933 4934 4937 3 4938 4937 4934 3 4934 5483 5487 3 4934 5487 4938 3 4935 4936 4939 3 4940 4939 4936 3 4935 4939 5486 3 5490 5486 4939 3 4937 4938 4941 3 4942 4941 4938 3 4938 5487 5491 3 4938 5491 4942 3 4939 4940 4943 3 4944 4943 4940 3 4939 4943 5490 3 5494 5490 4943 3 4941 4942 4945 3 4946 4945 4942 3 4942 5491 4946 3 5495 4946 5491 3 4943 4944 4947 3 4948 4947 4944 3 4943 4947 5494 3 5498 5494 4947 3 4945 4946 4950 3 4945 4950 4949 3 4946 5495 4950 3 5499 4950 5495 3 4947 4948 4952 3 4947 4952 4951 3 4947 4951 5502 3 4947 5502 5498 3 4949 4950 4954 3 4949 4954 4953 3 4950 5499 4954 3 5503 4954 5499 3 4951 4952 4956 3 4951 4956 4955 3 4951 4955 5506 3 4951 5506 5502 3 4953 4954 4958 3 4953 4958 4957 3 4954 5503 4958 3 5507 4958 5503 3 4955 4956 4960 3 4955 4960 4959 3 4955 4959 5510 3 4955 5510 5506 3 4957 4958 4962 3 4957 4962 4961 3 4958 5507 4962 3 5511 4962 5507 3 4959 4960 4964 3 4959 4964 4963 3 4959 4963 5514 3 4959 5514 5510 3 4961 4962 4966 3 4961 4966 4965 3 4962 5511 4966 3 5515 4966 5511 3 4963 4964 4968 3 4963 4968 4967 3 4963 4967 5518 3 4963 5518 5514 3 4965 4966 4970 3 4965 4970 4969 3 4966 5515 4970 3 5519 4970 5515 3 4967 4968 4972 3 4967 4972 4971 3 4967 4971 5522 3 4967 5522 5518 3 4969 4970 4974 3 4969 4974 4973 3 4970 5519 4974 3 5523 4974 5519 3 4971 4972 4976 3 4971 4976 4975 3 4971 4975 5526 3 4971 5526 5522 3 4973 4974 4978 3 4973 4978 4977 3 4974 5523 4978 3 5527 4978 5523 3 4975 4976 4980 3 4975 4980 4979 3 4975 4979 5530 3 4975 5530 5526 3 4977 4978 4982 3 4977 4982 4981 3 4978 5527 4982 3 5531 4982 5527 3 4979 4980 4984 3 4979 4984 4983 3 4979 4983 5534 3 4979 5534 5530 3 4981 4982 4986 3 4981 4986 4985 3 4982 5531 4986 3 5535 4986 5531 3 4983 4984 4988 3 4983 4988 4987 3 4983 4987 5538 3 4983 5538 5534 3 4985 4986 4990 3 4985 4990 4989 3 4986 5535 5539 3 4986 5539 4990 3 4987 4988 4992 3 4987 4992 4991 3 4987 4991 5538 3 5542 5538 4991 3 4989 4990 4993 3 4994 4993 4990 3 4990 5539 5543 3 4990 5543 4994 3 4991 4992 4995 3 4996 4995 4992 3 4991 4995 5542 3 5546 5542 4995 3 4993 4994 4997 3 4998 4997 4994 3 4994 5543 5547 3 4994 5547 4998 3 4995 4996 4999 3 5000 4999 4996 3 4995 4999 5546 3 5550 5546 4999 3 4997 4998 5001 3 5002 5001 4998 3 4998 5547 5551 3 4998 5551 5002 3 4999 5000 5003 3 5004 5003 5000 3 4999 5003 5550 3 5554 5550 5003 3 5001 5002 5005 3 5006 5005 5002 3 5002 5551 5555 3 5002 5555 5006 3 5003 5004 5007 3 5008 5007 5004 3 5003 5007 5554 3 5558 5554 5007 3 5005 5006 5009 3 5010 5009 5006 3 5006 5555 5559 3 5006 5559 5010 3 5007 5008 5011 3 5012 5011 5008 3 5007 5011 5558 3 5562 5558 5011 3 5009 5010 5013 3 5014 5013 5010 3 5010 5559 5563 3 5010 5563 5014 3 5011 5012 5015 3 5016 5015 5012 3 5011 5015 5562 3 5566 5562 5015 3 5013 5014 5017 3 5018 5017 5014 3 5014 5563 5567 3 5014 5567 5018 3 5015 5016 5019 3 5020 5019 5016 3 5015 5019 5566 3 5570 5566 5019 3 5017 5018 5021 3 5022 5021 5018 3 5018 5567 5571 3 5018 5571 5022 3 5019 5020 5023 3 5024 5023 5020 3 5019 5023 5570 3 5574 5570 5023 3 5021 5022 5025 3 5026 5025 5022 3 5022 5571 5575 3 5022 5575 5026 3 5023 5024 5027 3 5028 5027 5024 3 5023 5027 5574 3 5578 5574 5027 3 5025 5026 5029 3 5030 5029 5026 3 5026 5575 5579 3 5026 5579 5030 3 5027 5028 5031 3 5032 5031 5028 3 5027 5031 5578 3 5582 5578 5031 3 5029 5030 5033 3 5034 5033 5030 3 5030 5579 5583 3 5030 5583 5034 3 5031 5032 5035 3 5036 5035 5032 3 5031 5035 5586 3 5031 5586 5582 3 5033 5034 5038 3 5033 5038 5037 3 5034 5583 5038 3 5587 5038 5583 3 5035 5036 5040 3 5035 5040 5039 3 5035 5039 5590 3 5035 5590 5586 3 5037 5038 5042 3 5037 5042 5041 3 5038 5587 5042 3 5591 5042 5587 3 5039 5040 5044 3 5039 5044 5043 3 5039 5043 5594 3 5039 5594 5590 3 5041 5042 5046 3 5041 5046 5045 3 5042 5591 5046 3 5595 5046 5591 3 5043 5044 5048 3 5043 5048 5047 3 5043 5047 5598 3 5043 5598 5594 3 5045 5046 5050 3 5045 5050 5049 3 5046 5595 5050 3 5599 5050 5595 3 5047 5048 5052 3 5047 5052 5051 3 5047 5051 5602 3 5047 5602 5598 3 5049 5050 5054 3 5049 5054 5053 3 5050 5599 5054 3 5603 5054 5599 3 5051 5052 5056 3 5051 5056 5055 3 5051 5055 5606 3 5051 5606 5602 3 5053 5054 5058 3 5053 5058 5057 3 5054 5603 5058 3 5607 5058 5603 3 5055 5056 5060 3 5055 5060 5059 3 5055 5059 5610 3 5055 5610 5606 3 5057 5058 5062 3 5057 5062 5061 3 5058 5607 5062 3 5611 5062 5607 3 5059 5060 5064 3 5059 5064 5063 3 5059 5063 5614 3 5059 5614 5610 3 5061 5062 5066 3 5061 5066 5065 3 5062 5611 5066 3 5615 5066 5611 3 5063 5064 5068 3 5063 5068 5067 3 5063 5067 5618 3 5063 5618 5614 3 5065 5066 5070 3 5065 5070 5069 3 5066 5615 5070 3 5619 5070 5615 3 5067 5068 5072 3 5067 5072 5071 3 5067 5071 5622 3 5067 5622 5618 3 5069 5070 5074 3 5069 5074 5073 3 5070 5619 5074 3 5623 5074 5619 3 5071 5072 5076 3 5071 5076 5075 3 5071 5075 5626 3 5071 5626 5622 3 5073 5074 5078 3 5073 5078 5077 3 5074 5623 5078 3 5627 5078 5623 3 5075 5076 5080 3 5075 5080 5079 3 5075 5079 5626 3 5630 5626 5079 3 5077 5078 5081 3 5082 5081 5078 3 5078 5627 5631 3 5078 5631 5082 3 5079 5080 5083 3 5084 5083 5080 3 5079 5083 5630 3 5634 5630 5083 3 5081 5082 5085 3 5086 5085 5082 3 5082 5631 5635 3 5082 5635 5086 3 5083 5084 5087 3 5088 5087 5084 3 5083 5087 5634 3 5638 5634 5087 3 5085 5086 5089 3 5090 5089 5086 3 5086 5635 5639 3 5086 5639 5090 3 5087 5088 5091 3 5092 5091 5088 3 5087 5091 5638 3 5642 5638 5091 3 5089 5090 5093 3 5094 5093 5090 3 5090 5639 5643 3 5090 5643 5094 3 5091 5092 5095 3 5096 5095 5092 3 5091 5095 5642 3 5646 5642 5095 3 5093 5094 5097 3 5098 5097 5094 3 5094 5643 5647 3 5094 5647 5098 3 5095 5096 5099 3 5100 5099 5096 3 5095 5099 5646 3 5650 5646 5099 3 5097 5098 5101 3 5102 5101 5098 3 5098 5647 5651 3 5098 5651 5102 3 5099 5100 5103 3 5104 5103 5100 3 5099 5103 5650 3 5654 5650 5103 3 5101 5102 5105 3 5106 5105 5102 3 5102 5651 5655 3 5102 5655 5106 3 5103 5104 5107 3 5108 5107 5104 3 5103 5107 5654 3 5658 5654 5107 3 5105 5106 5109 3 5110 5109 5106 3 5106 5655 5659 3 5106 5659 5110 3 5107 5108 5111 3 5112 5111 5108 3 5107 5111 5658 3 5662 5658 5111 3 5109 5110 5113 3 5114 5113 5110 3 5110 5659 5663 3 5110 5663 5114 3 5111 5112 5115 3 5116 5115 5112 3 5111 5115 5662 3 5666 5662 5115 3 5113 5114 5117 3 5118 5117 5114 3 5114 5663 5667 3 5114 5667 5118 3 5115 5116 5119 3 5120 5119 5116 3 5115 5119 5666 3 5670 5666 5119 3 5117 5118 5121 3 5122 5121 5118 3 5118 5667 5671 3 5118 5671 5122 3 5119 5120 5123 3 5124 5123 5120 3 5119 5123 5674 3 5119 5674 5670 3 5121 5122 5126 3 5121 5126 5125 3 5122 5671 5126 3 5675 5126 5671 3 5123 5124 5128 3 5123 5128 5127 3 5123 5127 5678 3 5123 5678 5674 3 5125 5126 5130 3 5125 5130 5129 3 5126 5675 5130 3 5679 5130 5675 3 5127 5128 5132 3 5127 5132 5131 3 5127 5131 5682 3 5127 5682 5678 3 5129 5130 5134 3 5129 5134 5133 3 5130 5679 5134 3 5683 5134 5679 3 5131 5132 5136 3 5131 5136 5135 3 5131 5135 5686 3 5131 5686 5682 3 5133 5134 5138 3 5133 5138 5137 3 5134 5683 5138 3 5687 5138 5683 3 5135 5136 5140 3 5135 5140 5139 3 5135 5139 5690 3 5135 5690 5686 3 5137 5138 5142 3 5137 5142 5141 3 5138 5687 5142 3 5691 5142 5687 3 5139 5140 5144 3 5139 5144 5143 3 5139 5143 5694 3 5139 5694 5690 3 5141 5142 5146 3 5141 5146 5145 3 5142 5691 5146 3 5695 5146 5691 3 5143 5144 5148 3 5143 5148 5147 3 5143 5147 5698 3 5143 5698 5694 3 5145 5146 5150 3 5145 5150 5149 3 5146 5695 5150 3 5699 5150 5695 3 5147 5148 5152 3 5147 5152 5151 3 5147 5151 5702 3 5147 5702 5698 3 5149 5150 5156 3 5149 5156 5155 3 5150 5699 5156 3 5703 5156 5699 3 5151 5152 5158 3 5151 5158 5157 3 5151 5157 5705 3 5151 5705 5702 3 5152 5153 5159 3 5152 5159 5158 3 5154 5155 5163 3 5154 5163 5162 3 5155 5156 5164 3 5155 5164 5163 3 5156 5703 5164 3 5708 5164 5703 3 5157 5158 5706 3 5157 5706 5705 3 5158 5159 5166 3 5158 5166 5165 3 5158 5165 5710 3 5158 5710 5706 3 5159 5160 5167 3 5159 5167 5166 3 5161 5162 5232 3 5161 5232 5231 3 5162 5163 5233 3 5162 5233 5232 3 5163 5164 5708 3 5163 5708 5707 3 5163 5707 5233 3 5713 5233 5707 3 5165 5166 5710 3 5711 5710 5166 3 5166 5167 5234 3 5235 5234 5167 3 5166 5234 5711 3 5716 5711 5234 3 5167 5168 5235 3 5236 5235 5168 3 5168 5169 5236 3 5237 5236 5169 3 5169 5170 5237 3 5238 5237 5170 3 5170 5171 5238 3 5239 5238 5171 3 5171 5172 5239 3 5240 5239 5172 3 5172 5173 5240 3 5241 5240 5173 3 5173 5174 5241 3 5242 5241 5174 3 5174 5175 5242 3 5243 5242 5175 3 5175 5176 5243 3 5244 5243 5176 3 5176 5177 5244 3 5245 5244 5177 3 5177 5178 5245 3 5246 5245 5178 3 5178 5179 5246 3 5247 5246 5179 3 5179 5180 5247 3 5248 5247 5180 3 5180 5181 5248 3 5249 5248 5181 3 5181 5182 5249 3 5250 5249 5182 3 5182 5183 5250 3 5251 5250 5183 3 5183 5184 5251 3 5252 5251 5184 3 5184 5185 5252 3 5253 5252 5185 3 5185 5186 5253 3 5254 5253 5186 3 5186 5187 5254 3 5255 5254 5187 3 5187 5188 5255 3 5256 5255 5188 3 5188 5189 5256 3 5257 5256 5189 3 5189 5190 5257 3 5258 5257 5190 3 5190 5191 5258 3 5259 5258 5191 3 5191 5192 5259 3 5260 5259 5192 3 5192 5193 5260 3 5261 5260 5193 3 5193 5194 5261 3 5262 5261 5194 3 5194 5195 5262 3 5263 5262 5195 3 5195 5196 5263 3 5264 5263 5196 3 5196 5197 5264 3 5265 5264 5197 3 5197 5198 5265 3 5266 5265 5198 3 5198 5199 5266 3 5267 5266 5199 3 5199 5200 5267 3 5268 5267 5200 3 5200 5201 5268 3 5269 5268 5201 3 5201 5202 5269 3 5270 5269 5202 3 5202 5203 5270 3 5271 5270 5203 3 5203 5204 5271 3 5272 5271 5204 3 5204 5205 5272 3 5273 5272 5205 3 5205 5206 5273 3 5274 5273 5206 3 5206 5207 5274 3 5275 5274 5207 3 5207 5208 5275 3 5276 5275 5208 3 5208 5209 5276 3 5277 5276 5209 3 5209 5210 5278 3 5209 5278 5277 3 5210 5211 5279 3 5210 5279 5278 3 5211 5212 5280 3 5211 5280 5279 3 5212 5213 5281 3 5212 5281 5280 3 5213 5214 5282 3 5213 5282 5281 3 5214 5215 5283 3 5214 5283 5282 3 5215 5216 5284 3 5215 5284 5283 3 5216 5217 5285 3 5216 5285 5284 3 5217 5218 5286 3 5217 5286 5285 3 5218 5219 5287 3 5218 5287 5286 3 5219 5220 5288 3 5219 5288 5287 3 5220 5221 5289 3 5220 5289 5288 3 5221 5222 5290 3 5221 5290 5289 3 5222 5223 5291 3 5222 5291 5290 3 5223 5224 5292 3 5223 5292 5291 3 5224 5225 5293 3 5224 5293 5292 3 5225 5226 5294 3 5225 5294 5293 3 5226 5227 5295 3 5226 5295 5294 3 5227 5228 5296 3 5227 5296 5295 3 5228 5229 5297 3 5228 5297 5296 3 5229 5230 5298 3 5229 5298 5297 3 5230 5231 5299 3 5230 5299 5298 3 5231 5232 5300 3 5231 5300 5299 3 5232 5233 5713 3 5232 5713 5712 3 5232 5712 5782 3 5232 5782 5300 3 5234 5235 5717 3 5234 5717 5716 3 5235 5236 5718 3 5235 5718 5717 3 5236 5237 5719 3 5236 5719 5718 3 5237 5238 5720 3 5237 5720 5719 3 5238 5239 5721 3 5238 5721 5720 3 5239 5240 5722 3 5239 5722 5721 3 5240 5241 5723 3 5240 5723 5722 3 5241 5242 5724 3 5241 5724 5723 3 5242 5243 5725 3 5242 5725 5724 3 5243 5244 5726 3 5243 5726 5725 3 5244 5245 5727 3 5244 5727 5726 3 5245 5246 5728 3 5245 5728 5727 3 5246 5247 5729 3 5246 5729 5728 3 5247 5248 5730 3 5247 5730 5729 3 5248 5249 5731 3 5248 5731 5730 3 5249 5250 5732 3 5249 5732 5731 3 5250 5251 5733 3 5250 5733 5732 3 5251 5252 5734 3 5251 5734 5733 3 5252 5253 5735 3 5252 5735 5734 3 5253 5254 5735 3 5736 5735 5254 3 5254 5255 5736 3 5737 5736 5255 3 5255 5256 5737 3 5738 5737 5256 3 5256 5257 5738 3 5739 5738 5257 3 5257 5258 5739 3 5740 5739 5258 3 5258 5259 5740 3 5741 5740 5259 3 5259 5260 5741 3 5742 5741 5260 3 5260 5261 5742 3 5743 5742 5261 3 5261 5262 5743 3 5744 5743 5262 3 5262 5263 5744 3 5745 5744 5263 3 5263 5264 5745 3 5746 5745 5264 3 5264 5265 5746 3 5747 5746 5265 3 5265 5266 5747 3 5748 5747 5266 3 5266 5267 5748 3 5749 5748 5267 3 5267 5268 5749 3 5750 5749 5268 3 5268 5269 5750 3 5751 5750 5269 3 5269 5270 5751 3 5752 5751 5270 3 5270 5271 5752 3 5753 5752 5271 3 5271 5272 5753 3 5754 5753 5272 3 5272 5273 5754 3 5755 5754 5273 3 5273 5274 5755 3 5756 5755 5274 3 5274 5275 5756 3 5757 5756 5275 3 5275 5276 5757 3 5758 5757 5276 3 5276 5277 5758 3 5759 5758 5277 3 5277 5278 5759 3 5760 5759 5278 3 5278 5279 5760 3 5761 5760 5279 3 5279 5280 5761 3 5762 5761 5280 3 5280 5281 5762 3 5763 5762 5281 3 5281 5282 5763 3 5764 5763 5282 3 5282 5283 5764 3 5765 5764 5283 3 5283 5284 5765 3 5766 5765 5284 3 5284 5285 5766 3 5767 5766 5285 3 5285 5286 5767 3 5768 5767 5286 3 5286 5287 5768 3 5769 5768 5287 3 5287 5288 5769 3 5770 5769 5288 3 5288 5289 5770 3 5771 5770 5289 3 5289 5290 5771 3 5772 5771 5290 3 5290 5291 5772 3 5773 5772 5291 3 5291 5292 5773 3 5774 5773 5292 3 5292 5293 5774 3 5775 5774 5293 3 5293 5294 5775 3 5776 5775 5294 3 5294 5295 5776 3 5777 5776 5295 3 5295 5296 5777 3 5778 5777 5296 3 5296 5297 5778 3 5779 5778 5297 3 5297 5298 5779 3 5780 5779 5298 3 5298 5299 5781 3 5298 5781 5780 3 5299 5300 5782 3 5299 5782 5781 3 5301 5302 5369 3 5301 5369 5368 3 5301 5850 5851 3 5301 5851 5302 3 5301 5368 5919 3 5301 5919 5850 3 5302 5303 5370 3 5302 5370 5369 3 5302 5851 5852 3 5302 5852 5303 3 5303 5304 5371 3 5303 5371 5370 3 5303 5852 5853 3 5303 5853 5304 3 5304 5305 5372 3 5304 5372 5371 3 5304 5853 5305 3 5854 5305 5853 3 5305 5306 5373 3 5305 5373 5372 3 5305 5854 5306 3 5855 5306 5854 3 5306 5307 5374 3 5306 5374 5373 3 5306 5855 5307 3 5856 5307 5855 3 5307 5308 5375 3 5307 5375 5374 3 5307 5856 5308 3 5857 5308 5856 3 5308 5309 5376 3 5308 5376 5375 3 5308 5857 5309 3 5858 5309 5857 3 5309 5310 5377 3 5309 5377 5376 3 5309 5858 5310 3 5859 5310 5858 3 5310 5311 5378 3 5310 5378 5377 3 5310 5859 5311 3 5860 5311 5859 3 5311 5312 5379 3 5311 5379 5378 3 5311 5860 5312 3 5861 5312 5860 3 5312 5313 5380 3 5312 5380 5379 3 5312 5861 5313 3 5862 5313 5861 3 5313 5314 5381 3 5313 5381 5380 3 5313 5862 5314 3 5863 5314 5862 3 5314 5315 5382 3 5314 5382 5381 3 5314 5863 5315 3 5864 5315 5863 3 5315 5316 5383 3 5315 5383 5382 3 5315 5864 5316 3 5865 5316 5864 3 5316 5317 5384 3 5316 5384 5383 3 5316 5865 5317 3 5866 5317 5865 3 5317 5318 5385 3 5317 5385 5384 3 5317 5866 5318 3 5867 5318 5866 3 5318 5319 5386 3 5318 5386 5385 3 5318 5867 5319 3 5868 5319 5867 3 5319 5320 5387 3 5319 5387 5386 3 5319 5868 5320 3 5869 5320 5868 3 5320 5321 5388 3 5320 5388 5387 3 5320 5869 5321 3 5870 5321 5869 3 5321 5322 5389 3 5321 5389 5388 3 5321 5870 5322 3 5871 5322 5870 3 5322 5323 5390 3 5322 5390 5389 3 5322 5871 5323 3 5872 5323 5871 3 5323 5324 5391 3 5323 5391 5390 3 5323 5872 5324 3 5873 5324 5872 3 5324 5325 5392 3 5324 5392 5391 3 5324 5873 5325 3 5874 5325 5873 3 5325 5326 5393 3 5325 5393 5392 3 5325 5874 5326 3 5875 5326 5874 3 5326 5327 5394 3 5326 5394 5393 3 5326 5875 5327 3 5876 5327 5875 3 5327 5328 5395 3 5327 5395 5394 3 5327 5876 5328 3 5877 5328 5876 3 5328 5329 5396 3 5328 5396 5395 3 5328 5877 5329 3 5878 5329 5877 3 5329 5330 5397 3 5329 5397 5396 3 5329 5878 5330 3 5879 5330 5878 3 5330 5331 5398 3 5330 5398 5397 3 5330 5879 5331 3 5880 5331 5879 3 5331 5332 5399 3 5331 5399 5398 3 5331 5880 5332 3 5881 5332 5880 3 5332 5333 5400 3 5332 5400 5399 3 5332 5881 5333 3 5882 5333 5881 3 5333 5334 5401 3 5333 5401 5400 3 5333 5882 5334 3 5883 5334 5882 3 5334 5335 5402 3 5334 5402 5401 3 5334 5883 5335 3 5884 5335 5883 3 5335 5336 5403 3 5335 5403 5402 3 5335 5884 5336 3 5885 5336 5884 3 5336 5337 5404 3 5336 5404 5403 3 5336 5885 5337 3 5886 5337 5885 3 5337 5338 5405 3 5337 5405 5404 3 5337 5886 5338 3 5887 5338 5886 3 5338 5339 5406 3 5338 5406 5405 3 5338 5887 5339 3 5888 5339 5887 3 5339 5340 5407 3 5339 5407 5406 3 5339 5888 5340 3 5889 5340 5888 3 5340 5341 5408 3 5340 5408 5407 3 5340 5889 5341 3 5890 5341 5889 3 5341 5342 5409 3 5341 5409 5408 3 5341 5890 5342 3 5891 5342 5890 3 5342 5343 5410 3 5342 5410 5409 3 5342 5891 5343 3 5892 5343 5891 3 5343 5344 5410 3 5411 5410 5344 3 5343 5892 5344 3 5893 5344 5892 3 5344 5345 5411 3 5412 5411 5345 3 5344 5893 5345 3 5894 5345 5893 3 5345 5346 5412 3 5413 5412 5346 3 5345 5894 5346 3 5895 5346 5894 3 5346 5347 5413 3 5414 5413 5347 3 5346 5895 5347 3 5896 5347 5895 3 5347 5348 5414 3 5415 5414 5348 3 5347 5896 5348 3 5897 5348 5896 3 5348 5349 5415 3 5416 5415 5349 3 5348 5897 5349 3 5898 5349 5897 3 5349 5350 5416 3 5417 5416 5350 3 5349 5898 5350 3 5899 5350 5898 3 5350 5351 5417 3 5418 5417 5351 3 5350 5899 5900 3 5350 5900 5351 3 5351 5352 5418 3 5419 5418 5352 3 5351 5900 5901 3 5351 5901 5352 3 5352 5353 5419 3 5420 5419 5353 3 5352 5901 5902 3 5352 5902 5353 3 5353 5354 5420 3 5421 5420 5354 3 5353 5902 5903 3 5353 5903 5354 3 5354 5355 5421 3 5422 5421 5355 3 5354 5903 5904 3 5354 5904 5355 3 5355 5356 5422 3 5423 5422 5356 3 5355 5904 5905 3 5355 5905 5356 3 5356 5357 5423 3 5424 5423 5357 3 5356 5905 5906 3 5356 5906 5357 3 5357 5358 5424 3 5425 5424 5358 3 5357 5906 5907 3 5357 5907 5358 3 5358 5359 5425 3 5426 5425 5359 3 5358 5907 5908 3 5358 5908 5359 3 5359 5360 5426 3 5427 5426 5360 3 5359 5908 5909 3 5359 5909 5360 3 5360 5361 5427 3 5428 5427 5361 3 5360 5909 5910 3 5360 5910 5361 3 5361 5362 5428 3 5429 5428 5362 3 5361 5910 5911 3 5361 5911 5362 3 5362 5363 5429 3 5430 5429 5363 3 5362 5911 5912 3 5362 5912 5363 3 5363 5364 5430 3 5431 5430 5364 3 5363 5912 5913 3 5363 5913 5364 3 5364 5365 5431 3 5432 5431 5365 3 5364 5913 5914 3 5364 5914 5365 3 5365 5914 5920 3 5365 5920 5432 3 5366 5367 5436 3 5437 5436 5367 3 5366 5917 5918 3 5366 5918 5367 3 5366 5436 5925 3 5366 5925 5917 3 5367 5918 5919 3 5367 5919 5368 3 5432 5920 5433 3 5921 5433 5920 3 5433 5434 5438 3 5439 5438 5434 3 5433 5921 5434 3 5922 5434 5921 3 5434 5922 5439 3 5926 5439 5922 3 5435 5436 5441 3 5442 5441 5436 3 5435 5924 5436 3 5925 5436 5924 3 5435 5441 5924 3 5929 5924 5441 3 5439 5440 5443 3 5444 5443 5440 3 5439 5926 5440 3 5927 5440 5926 3 5440 5927 5444 3 5930 5444 5927 3 5441 5446 5929 3 5933 5929 5446 3 5444 5930 5447 3 5934 5447 5930 3 5445 5446 5449 3 5450 5449 5446 3 5445 5932 5446 3 5933 5446 5932 3 5445 5449 5932 3 5936 5932 5449 3 5447 5448 5451 3 5452 5451 5448 3 5447 5934 5448 3 5935 5448 5934 3 5448 5935 5452 3 5937 5452 5935 3 5449 5450 5453 3 5454 5453 5450 3 5449 5453 5936 3 5938 5936 5453 3 5451 5452 5455 3 5456 5455 5452 3 5452 5937 5456 3 5939 5456 5937 3 5453 5454 5457 3 5458 5457 5454 3 5453 5457 5938 3 5940 5938 5457 3 5455 5456 5459 3 5460 5459 5456 3 5456 5939 5460 3 5941 5460 5939 3 5457 5458 5461 3 5462 5461 5458 3 5457 5461 5940 3 5942 5940 5461 3 5459 5460 5463 3 5464 5463 5460 3 5460 5941 5464 3 5943 5464 5941 3 5461 5462 5465 3 5466 5465 5462 3 5461 5465 5942 3 5944 5942 5465 3 5463 5464 5467 3 5468 5467 5464 3 5464 5943 5468 3 5945 5468 5943 3 5465 5466 5469 3 5470 5469 5466 3 5465 5469 5944 3 5946 5944 5469 3 5467 5468 5471 3 5472 5471 5468 3 5468 5945 5472 3 5947 5472 5945 3 5469 5470 5473 3 5474 5473 5470 3 5469 5473 5946 3 5948 5946 5473 3 5471 5472 5475 3 5476 5475 5472 3 5472 5947 5476 3 5949 5476 5947 3 5473 5474 5477 3 5478 5477 5474 3 5473 5477 5948 3 5950 5948 5477 3 5475 5476 5479 3 5480 5479 5476 3 5476 5949 5480 3 5951 5480 5949 3 5477 5478 5481 3 5482 5481 5478 3 5477 5481 5952 3 5477 5952 5950 3 5479 5480 5484 3 5479 5484 5483 3 5480 5951 5484 3 5953 5484 5951 3 5481 5482 5486 3 5481 5486 5485 3 5481 5485 5954 3 5481 5954 5952 3 5483 5484 5488 3 5483 5488 5487 3 5484 5953 5488 3 5955 5488 5953 3 5485 5486 5490 3 5485 5490 5489 3 5485 5489 5956 3 5485 5956 5954 3 5487 5488 5492 3 5487 5492 5491 3 5488 5955 5492 3 5957 5492 5955 3 5489 5490 5494 3 5489 5494 5493 3 5489 5493 5958 3 5489 5958 5956 3 5491 5492 5496 3 5491 5496 5495 3 5492 5957 5959 3 5492 5959 5496 3 5493 5494 5498 3 5493 5498 5497 3 5493 5497 5960 3 5493 5960 5958 3 5495 5496 5500 3 5495 5500 5499 3 5496 5959 5961 3 5496 5961 5500 3 5497 5498 5502 3 5497 5502 5501 3 5497 5501 5962 3 5497 5962 5960 3 5499 5500 5504 3 5499 5504 5503 3 5500 5961 5963 3 5500 5963 5504 3 5501 5502 5506 3 5501 5506 5505 3 5501 5505 5964 3 5501 5964 5962 3 5503 5504 5508 3 5503 5508 5507 3 5504 5963 5965 3 5504 5965 5508 3 5505 5506 5510 3 5505 5510 5509 3 5505 5509 5966 3 5505 5966 5964 3 5507 5508 5512 3 5507 5512 5511 3 5508 5965 5967 3 5508 5967 5512 3 5509 5510 5514 3 5509 5514 5513 3 5509 5513 5968 3 5509 5968 5966 3 5511 5512 5516 3 5511 5516 5515 3 5512 5967 5969 3 5512 5969 5516 3 5513 5514 5518 3 5513 5518 5517 3 5513 5517 5970 3 5513 5970 5968 3 5515 5516 5520 3 5515 5520 5519 3 5516 5969 5971 3 5516 5971 5520 3 5517 5518 5522 3 5517 5522 5521 3 5517 5521 5972 3 5517 5972 5970 3 5519 5520 5524 3 5519 5524 5523 3 5520 5971 5973 3 5520 5973 5524 3 5521 5522 5526 3 5521 5526 5525 3 5521 5525 5974 3 5521 5974 5972 3 5523 5524 5528 3 5523 5528 5527 3 5524 5973 5975 3 5524 5975 5528 3 5525 5526 5529 3 5530 5529 5526 3 5525 5529 5974 3 5976 5974 5529 3 5527 5528 5531 3 5532 5531 5528 3 5528 5975 5977 3 5528 5977 5532 3 5529 5530 5533 3 5534 5533 5530 3 5529 5533 5976 3 5978 5976 5533 3 5531 5532 5535 3 5536 5535 5532 3 5532 5977 5979 3 5532 5979 5536 3 5533 5534 5537 3 5538 5537 5534 3 5533 5537 5978 3 5980 5978 5537 3 5535 5536 5539 3 5540 5539 5536 3 5536 5979 5981 3 5536 5981 5540 3 5537 5538 5541 3 5542 5541 5538 3 5537 5541 5980 3 5982 5980 5541 3 5539 5540 5543 3 5544 5543 5540 3 5540 5981 5983 3 5540 5983 5544 3 5541 5542 5545 3 5546 5545 5542 3 5541 5545 5982 3 5984 5982 5545 3 5543 5544 5547 3 5548 5547 5544 3 5544 5983 5985 3 5544 5985 5548 3 5545 5546 5549 3 5550 5549 5546 3 5545 5549 5984 3 5986 5984 5549 3 5547 5548 5551 3 5552 5551 5548 3 5548 5985 5987 3 5548 5987 5552 3 5549 5550 5553 3 5554 5553 5550 3 5549 5553 5986 3 5988 5986 5553 3 5551 5552 5555 3 5556 5555 5552 3 5552 5987 5989 3 5552 5989 5556 3 5553 5554 5557 3 5558 5557 5554 3 5553 5557 5988 3 5990 5988 5557 3 5555 5556 5559 3 5560 5559 5556 3 5556 5989 5560 3 5991 5560 5989 3 5557 5558 5561 3 5562 5561 5558 3 5557 5561 5990 3 5992 5990 5561 3 5559 5560 5563 3 5564 5563 5560 3 5560 5991 5564 3 5993 5564 5991 3 5561 5562 5565 3 5566 5565 5562 3 5561 5565 5992 3 5994 5992 5565 3 5563 5564 5567 3 5568 5567 5564 3 5564 5993 5568 3 5995 5568 5993 3 5565 5566 5569 3 5570 5569 5566 3 5565 5569 5994 3 5996 5994 5569 3 5567 5568 5571 3 5572 5571 5568 3 5568 5995 5572 3 5997 5572 5995 3 5569 5570 5573 3 5574 5573 5570 3 5569 5573 5998 3 5569 5998 5996 3 5571 5572 5576 3 5571 5576 5575 3 5572 5997 5576 3 5999 5576 5997 3 5573 5574 5578 3 5573 5578 5577 3 5573 5577 6000 3 5573 6000 5998 3 5575 5576 5580 3 5575 5580 5579 3 5576 5999 5580 3 6001 5580 5999 3 5577 5578 5582 3 5577 5582 5581 3 5577 5581 6002 3 5577 6002 6000 3 5579 5580 5584 3 5579 5584 5583 3 5580 6001 5584 3 6003 5584 6001 3 5581 5582 5586 3 5581 5586 5585 3 5581 5585 6004 3 5581 6004 6002 3 5583 5584 5588 3 5583 5588 5587 3 5584 6003 5588 3 6005 5588 6003 3 5585 5586 5590 3 5585 5590 5589 3 5585 5589 6006 3 5585 6006 6004 3 5587 5588 5592 3 5587 5592 5591 3 5588 6005 5592 3 6007 5592 6005 3 5589 5590 5594 3 5589 5594 5593 3 5589 5593 6008 3 5589 6008 6006 3 5591 5592 5596 3 5591 5596 5595 3 5592 6007 5596 3 6009 5596 6007 3 5593 5594 5598 3 5593 5598 5597 3 5593 5597 6010 3 5593 6010 6008 3 5595 5596 5600 3 5595 5600 5599 3 5596 6009 5600 3 6011 5600 6009 3 5597 5598 5602 3 5597 5602 5601 3 5597 5601 6012 3 5597 6012 6010 3 5599 5600 5604 3 5599 5604 5603 3 5600 6011 5604 3 6013 5604 6011 3 5601 5602 5606 3 5601 5606 5605 3 5601 5605 6014 3 5601 6014 6012 3 5603 5604 5608 3 5603 5608 5607 3 5604 6013 5608 3 6015 5608 6013 3 5605 5606 5610 3 5605 5610 5609 3 5605 5609 6016 3 5605 6016 6014 3 5607 5608 5612 3 5607 5612 5611 3 5608 6015 5612 3 6017 5612 6015 3 5609 5610 5614 3 5609 5614 5613 3 5609 5613 6018 3 5609 6018 6016 3 5611 5612 5616 3 5611 5616 5615 3 5612 6017 5616 3 6019 5616 6017 3 5613 5614 5618 3 5613 5618 5617 3 5613 5617 6020 3 5613 6020 6018 3 5615 5616 5620 3 5615 5620 5619 3 5616 6019 5620 3 6021 5620 6019 3 5617 5618 5621 3 5622 5621 5618 3 5617 5621 6020 3 6022 6020 5621 3 5619 5620 5623 3 5624 5623 5620 3 5620 6021 6023 3 5620 6023 5624 3 5621 5622 5625 3 5626 5625 5622 3 5621 5625 6022 3 6024 6022 5625 3 5623 5624 5627 3 5628 5627 5624 3 5624 6023 6025 3 5624 6025 5628 3 5625 5626 5629 3 5630 5629 5626 3 5625 5629 6024 3 6026 6024 5629 3 5627 5628 5631 3 5632 5631 5628 3 5628 6025 6027 3 5628 6027 5632 3 5629 5630 5633 3 5634 5633 5630 3 5629 5633 6026 3 6028 6026 5633 3 5631 5632 5635 3 5636 5635 5632 3 5632 6027 6029 3 5632 6029 5636 3 5633 5634 5637 3 5638 5637 5634 3 5633 5637 6028 3 6030 6028 5637 3 5635 5636 5639 3 5640 5639 5636 3 5636 6029 6031 3 5636 6031 5640 3 5637 5638 5641 3 5642 5641 5638 3 5637 5641 6030 3 6032 6030 5641 3 5639 5640 5643 3 5644 5643 5640 3 5640 6031 6033 3 5640 6033 5644 3 5641 5642 5645 3 5646 5645 5642 3 5641 5645 6032 3 6034 6032 5645 3 5643 5644 5647 3 5648 5647 5644 3 5644 6033 6035 3 5644 6035 5648 3 5645 5646 5649 3 5650 5649 5646 3 5645 5649 6034 3 6036 6034 5649 3 5647 5648 5651 3 5652 5651 5648 3 5648 6035 6037 3 5648 6037 5652 3 5649 5650 5653 3 5654 5653 5650 3 5649 5653 6036 3 6038 6036 5653 3 5651 5652 5655 3 5656 5655 5652 3 5652 6037 6039 3 5652 6039 5656 3 5653 5654 5657 3 5658 5657 5654 3 5653 5657 6038 3 6040 6038 5657 3 5655 5656 5659 3 5660 5659 5656 3 5656 6039 6041 3 5656 6041 5660 3 5657 5658 5661 3 5662 5661 5658 3 5657 5661 6040 3 6042 6040 5661 3 5659 5660 5663 3 5664 5663 5660 3 5660 6041 6043 3 5660 6043 5664 3 5661 5662 5665 3 5666 5665 5662 3 5661 5665 6044 3 5661 6044 6042 3 5663 5664 5668 3 5663 5668 5667 3 5664 6043 6045 3 5664 6045 5668 3 5665 5666 5670 3 5665 5670 5669 3 5665 5669 6046 3 5665 6046 6044 3 5667 5668 5672 3 5667 5672 5671 3 5668 6045 6047 3 5668 6047 5672 3 5669 5670 5674 3 5669 5674 5673 3 5669 5673 6048 3 5669 6048 6046 3 5671 5672 5676 3 5671 5676 5675 3 5672 6047 6049 3 5672 6049 5676 3 5673 5674 5678 3 5673 5678 5677 3 5673 5677 6050 3 5673 6050 6048 3 5675 5676 5680 3 5675 5680 5679 3 5676 6049 6051 3 5676 6051 5680 3 5677 5678 5682 3 5677 5682 5681 3 5677 5681 6052 3 5677 6052 6050 3 5679 5680 5684 3 5679 5684 5683 3 5680 6051 6053 3 5680 6053 5684 3 5681 5682 5686 3 5681 5686 5685 3 5681 5685 6054 3 5681 6054 6052 3 5683 5684 5688 3 5683 5688 5687 3 5684 6053 6055 3 5684 6055 5688 3 5685 5686 5690 3 5685 5690 5689 3 5685 5689 6056 3 5685 6056 6054 3 5687 5688 5692 3 5687 5692 5691 3 5688 6055 5692 3 6057 5692 6055 3 5689 5690 5694 3 5689 5694 5693 3 5689 5693 6058 3 5689 6058 6056 3 5691 5692 5696 3 5691 5696 5695 3 5692 6057 5696 3 6059 5696 6057 3 5693 5694 5698 3 5693 5698 5697 3 5693 5697 6060 3 5693 6060 6058 3 5695 5696 5700 3 5695 5700 5699 3 5696 6059 5700 3 6061 5700 6059 3 5697 5698 5702 3 5697 5702 5701 3 5697 5701 6062 3 5697 6062 6060 3 5699 5700 5704 3 5699 5704 5703 3 5700 6061 5704 3 6065 5704 6061 3 5701 5702 6063 3 5701 6063 6062 3 5702 5705 6067 3 5702 6067 6063 3 5703 5704 6065 3 5703 6065 6064 3 5703 6064 5708 3 6068 5708 6064 3 5705 5706 5710 3 5705 5710 5709 3 5705 5709 6070 3 5705 6070 6067 3 5707 5708 5714 3 5707 5714 5713 3 5708 6068 5714 3 6073 5714 6068 3 5709 5710 6070 3 6071 6070 5710 3 5710 5711 5715 3 5716 5715 5711 3 5710 5715 6071 3 6075 6071 5715 3 5712 5713 5782 3 5783 5782 5713 3 5713 5714 6072 3 6073 6072 5714 3 5713 6072 5783 3 6080 5783 6072 3 5715 5716 6075 3 6076 6075 5716 3 5716 5717 6076 3 6077 6076 5717 3 5717 5718 5784 3 5785 5784 5718 3 5717 5784 6083 3 5717 6083 6077 3 5718 5719 5785 3 5786 5785 5719 3 5719 5720 5786 3 5787 5786 5720 3 5720 5721 5787 3 5788 5787 5721 3 5721 5722 5788 3 5789 5788 5722 3 5722 5723 5789 3 5790 5789 5723 3 5723 5724 5790 3 5791 5790 5724 3 5724 5725 5791 3 5792 5791 5725 3 5725 5726 5792 3 5793 5792 5726 3 5726 5727 5793 3 5794 5793 5727 3 5727 5728 5794 3 5795 5794 5728 3 5728 5729 5795 3 5796 5795 5729 3 5729 5730 5796 3 5797 5796 5730 3 5730 5731 5797 3 5798 5797 5731 3 5731 5732 5798 3 5799 5798 5732 3 5732 5733 5799 3 5800 5799 5733 3 5733 5734 5800 3 5801 5800 5734 3 5734 5735 5801 3 5802 5801 5735 3 5735 5736 5802 3 5803 5802 5736 3 5736 5737 5803 3 5804 5803 5737 3 5737 5738 5804 3 5805 5804 5738 3 5738 5739 5805 3 5806 5805 5739 3 5739 5740 5806 3 5807 5806 5740 3 5740 5741 5807 3 5808 5807 5741 3 5741 5742 5808 3 5809 5808 5742 3 5742 5743 5809 3 5810 5809 5743 3 5743 5744 5810 3 5811 5810 5744 3 5744 5745 5811 3 5812 5811 5745 3 5745 5746 5812 3 5813 5812 5746 3 5746 5747 5813 3 5814 5813 5747 3 5747 5748 5814 3 5815 5814 5748 3 5748 5749 5815 3 5816 5815 5749 3 5749 5750 5816 3 5817 5816 5750 3 5750 5751 5817 3 5818 5817 5751 3 5751 5752 5818 3 5819 5818 5752 3 5752 5753 5819 3 5820 5819 5753 3 5753 5754 5820 3 5821 5820 5754 3 5754 5755 5821 3 5822 5821 5755 3 5755 5756 5823 3 5755 5823 5822 3 5756 5757 5824 3 5756 5824 5823 3 5757 5758 5825 3 5757 5825 5824 3 5758 5759 5826 3 5758 5826 5825 3 5759 5760 5827 3 5759 5827 5826 3 5760 5761 5828 3 5760 5828 5827 3 5761 5762 5829 3 5761 5829 5828 3 5762 5763 5830 3 5762 5830 5829 3 5763 5764 5831 3 5763 5831 5830 3 5764 5765 5832 3 5764 5832 5831 3 5765 5766 5833 3 5765 5833 5832 3 5766 5767 5834 3 5766 5834 5833 3 5767 5768 5835 3 5767 5835 5834 3 5768 5769 5836 3 5768 5836 5835 3 5769 5770 5837 3 5769 5837 5836 3 5770 5771 5838 3 5770 5838 5837 3 5771 5772 5839 3 5771 5839 5838 3 5772 5773 5840 3 5772 5840 5839 3 5773 5774 5841 3 5773 5841 5840 3 5774 5775 5842 3 5774 5842 5841 3 5775 5776 5843 3 5775 5843 5842 3 5776 5777 5844 3 5776 5844 5843 3 5777 5778 5845 3 5777 5845 5844 3 5778 5779 5846 3 5778 5846 5845 3 5779 5780 5847 3 5779 5847 5846 3 5780 5781 5848 3 5780 5848 5847 3 5781 5782 6079 3 5781 6079 6078 3 5781 6078 6147 3 5781 6147 5848 3 5782 5783 6080 3 5782 6080 6079 3 5784 5785 6084 3 5784 6084 6083 3 5785 5786 6085 3 5785 6085 6084 3 5786 5787 6086 3 5786 6086 6085 3 5787 5788 6087 3 5787 6087 6086 3 5788 5789 6088 3 5788 6088 6087 3 5789 5790 6089 3 5789 6089 6088 3 5790 5791 6090 3 5790 6090 6089 3 5791 5792 6091 3 5791 6091 6090 3 5792 5793 6092 3 5792 6092 6091 3 5793 5794 6093 3 5793 6093 6092 3 5794 5795 6094 3 5794 6094 6093 3 5795 5796 6095 3 5795 6095 6094 3 5796 5797 6096 3 5796 6096 6095 3 5797 5798 6097 3 5797 6097 6096 3 5798 5799 6098 3 5798 6098 6097 3 5799 5800 6099 3 5799 6099 6098 3 5800 5801 6100 3 5800 6100 6099 3 5801 5802 6101 3 5801 6101 6100 3 5802 5803 6101 3 6102 6101 5803 3 5803 5804 6102 3 6103 6102 5804 3 5804 5805 6103 3 6104 6103 5805 3 5805 5806 6104 3 6105 6104 5806 3 5806 5807 6105 3 6106 6105 5807 3 5807 5808 6106 3 6107 6106 5808 3 5808 5809 6107 3 6108 6107 5809 3 5809 5810 6108 3 6109 6108 5810 3 5810 5811 6109 3 6110 6109 5811 3 5811 5812 6110 3 6111 6110 5812 3 5812 5813 6111 3 6112 6111 5813 3 5813 5814 6112 3 6113 6112 5814 3 5814 5815 6113 3 6114 6113 5815 3 5815 5816 6114 3 6115 6114 5816 3 5816 5817 6115 3 6116 6115 5817 3 5817 5818 6116 3 6117 6116 5818 3 5818 5819 6117 3 6118 6117 5819 3 5819 5820 6118 3 6119 6118 5820 3 5820 5821 6119 3 6120 6119 5821 3 5821 5822 6120 3 6121 6120 5822 3 5822 5823 6121 3 6122 6121 5823 3 5823 5824 6122 3 6123 6122 5824 3 5824 5825 6123 3 6124 6123 5825 3 5825 5826 6124 3 6125 6124 5826 3 5826 5827 6125 3 6126 6125 5827 3 5827 5828 6126 3 6127 6126 5828 3 5828 5829 6127 3 6128 6127 5829 3 5829 5830 6128 3 6129 6128 5830 3 5830 5831 6129 3 6130 6129 5831 3 5831 5832 6130 3 6131 6130 5832 3 5832 5833 6131 3 6132 6131 5833 3 5833 5834 6132 3 6133 6132 5834 3 5834 5835 6133 3 6134 6133 5835 3 5835 5836 6134 3 6135 6134 5836 3 5836 5837 6135 3 6136 6135 5837 3 5837 5838 6136 3 6137 6136 5838 3 5838 5839 6137 3 6138 6137 5839 3 5839 5840 6138 3 6139 6138 5840 3 5840 5841 6139 3 6140 6139 5841 3 5841 5842 6140 3 6141 6140 5842 3 5842 5843 6141 3 6142 6141 5843 3 5843 5844 6142 3 6143 6142 5844 3 5844 5845 6143 3 6144 6143 5845 3 5845 5846 6144 3 6145 6144 5846 3 5846 5847 6145 3 6146 6145 5847 3 5847 5848 6146 3 6147 6146 5848 3 5849 5850 5919 3 5849 5919 5918 3 5849 6150 5850 3 6151 5850 6150 3 5849 5918 6220 3 5849 6220 6150 3 5850 6151 5851 3 6152 5851 6151 3 5851 6152 5852 3 6153 5852 6152 3 5852 6153 5853 3 6154 5853 6153 3 5853 6154 5854 3 6155 5854 6154 3 5854 6155 5855 3 6156 5855 6155 3 5855 6156 5856 3 6157 5856 6156 3 5856 6157 5857 3 6158 5857 6157 3 5857 6158 5858 3 6159 5858 6158 3 5858 6159 5859 3 6160 5859 6159 3 5859 6160 5860 3 6161 5860 6160 3 5860 6161 5861 3 6162 5861 6161 3 5861 6162 5862 3 6163 5862 6162 3 5862 6163 5863 3 6164 5863 6163 3 5863 6164 5864 3 6165 5864 6164 3 5864 6165 5865 3 6166 5865 6165 3 5865 6166 5866 3 6167 5866 6166 3 5866 6167 5867 3 6168 5867 6167 3 5867 6168 6169 3 5867 6169 5868 3 5868 6169 6170 3 5868 6170 5869 3 5869 6170 6171 3 5869 6171 5870 3 5870 6171 6172 3 5870 6172 5871 3 5871 6172 6173 3 5871 6173 5872 3 5872 6173 6174 3 5872 6174 5873 3 5873 6174 6175 3 5873 6175 5874 3 5874 6175 6176 3 5874 6176 5875 3 5875 6176 6177 3 5875 6177 5876 3 5876 6177 6178 3 5876 6178 5877 3 5877 6178 6179 3 5877 6179 5878 3 5878 6179 6180 3 5878 6180 5879 3 5879 6180 6181 3 5879 6181 5880 3 5880 6181 6182 3 5880 6182 5881 3 5881 6182 6183 3 5881 6183 5882 3 5882 6183 6184 3 5882 6184 5883 3 5883 6184 6185 3 5883 6185 5884 3 5884 6185 6186 3 5884 6186 5885 3 5885 6186 6187 3 5885 6187 5886 3 5886 6187 6188 3 5886 6188 5887 3 5887 6188 6189 3 5887 6189 5888 3 5888 6189 6190 3 5888 6190 5889 3 5889 6190 6191 3 5889 6191 5890 3 5890 6191 6192 3 5890 6192 5891 3 5891 6192 6193 3 5891 6193 5892 3 5892 6193 6194 3 5892 6194 5893 3 5893 6194 6195 3 5893 6195 5894 3 5894 6195 6196 3 5894 6196 5895 3 5895 6196 6197 3 5895 6197 5896 3 5896 6197 6198 3 5896 6198 5897 3 5897 6198 6199 3 5897 6199 5898 3 5898 6199 6200 3 5898 6200 5899 3 5899 6200 6201 3 5899 6201 5900 3 5900 6201 6202 3 5900 6202 5901 3 5901 6202 6203 3 5901 6203 5902 3 5902 6203 6204 3 5902 6204 5903 3 5903 6204 6205 3 5903 6205 5904 3 5904 6205 6206 3 5904 6206 5905 3 5905 6206 6207 3 5905 6207 5906 3 5906 6207 6208 3 5906 6208 5907 3 5907 6208 6209 3 5907 6209 5908 3 5908 6209 6210 3 5908 6210 5909 3 5909 6210 6211 3 5909 6211 5910 3 5910 6211 6212 3 5910 6212 5911 3 5911 6212 6213 3 5911 6213 5912 3 5912 6213 6214 3 5912 6214 5913 3 5913 6214 6215 3 5913 6215 5914 3 5914 5915 5920 3 5921 5920 5915 3 5914 6215 6216 3 5914 6216 5915 3 5915 6216 5921 3 6221 5921 6216 3 5916 5917 5924 3 5925 5924 5917 3 5916 6218 5917 3 6219 5917 6218 3 5916 5924 6218 3 6225 6218 5924 3 5917 6219 5918 3 6220 5918 6219 3 5921 6221 5922 3 6222 5922 6221 3 5922 5923 5926 3 5927 5926 5923 3 5922 6222 5923 3 6223 5923 6222 3 5923 6223 5927 3 6226 5927 6223 3 5924 5929 6225 3 6229 6225 5929 3 5927 6226 5930 3 6230 5930 6226 3 5928 5929 5932 3 5933 5932 5929 3 5928 6228 5929 3 6229 5929 6228 3 5928 5932 6228 3 6232 6228 5932 3 5930 5931 5934 3 5935 5934 5931 3 5930 6230 5931 3 6231 5931 6230 3 5931 6231 5935 3 6233 5935 6231 3 5932 5936 6232 3 6234 6232 5936 3 5935 6233 5937 3 6235 5937 6233 3 5936 5938 6234 3 6236 6234 5938 3 5937 6235 5939 3 6237 5939 6235 3 5938 5940 6236 3 6238 6236 5940 3 5939 6237 5941 3 6239 5941 6237 3 5940 5942 6238 3 6240 6238 5942 3 5941 6239 5943 3 6241 5943 6239 3 5942 5944 6240 3 6242 6240 5944 3 5943 6241 5945 3 6243 5945 6241 3 5944 5946 6244 3 5944 6244 6242 3 5945 6243 5947 3 6245 5947 6243 3 5946 5948 6246 3 5946 6246 6244 3 5947 6245 5949 3 6247 5949 6245 3 5948 5950 6248 3 5948 6248 6246 3 5949 6247 5951 3 6249 5951 6247 3 5950 5952 6250 3 5950 6250 6248 3 5951 6249 5953 3 6251 5953 6249 3 5952 5954 6252 3 5952 6252 6250 3 5953 6251 5955 3 6253 5955 6251 3 5954 5956 6254 3 5954 6254 6252 3 5955 6253 5957 3 6255 5957 6253 3 5956 5958 6256 3 5956 6256 6254 3 5957 6255 5959 3 6257 5959 6255 3 5958 5960 6258 3 5958 6258 6256 3 5959 6257 5961 3 6259 5961 6257 3 5960 5962 6260 3 5960 6260 6258 3 5961 6259 5963 3 6261 5963 6259 3 5962 5964 6262 3 5962 6262 6260 3 5963 6261 5965 3 6263 5965 6261 3 5964 5966 6264 3 5964 6264 6262 3 5965 6263 6265 3 5965 6265 5967 3 5966 5968 6266 3 5966 6266 6264 3 5967 6265 6267 3 5967 6267 5969 3 5968 5970 6268 3 5968 6268 6266 3 5969 6267 6269 3 5969 6269 5971 3 5970 5972 6270 3 5970 6270 6268 3 5971 6269 6271 3 5971 6271 5973 3 5972 5974 6272 3 5972 6272 6270 3 5973 6271 6273 3 5973 6273 5975 3 5974 5976 6274 3 5974 6274 6272 3 5975 6273 6275 3 5975 6275 5977 3 5976 5978 6276 3 5976 6276 6274 3 5977 6275 6277 3 5977 6277 5979 3 5978 5980 6278 3 5978 6278 6276 3 5979 6277 6279 3 5979 6279 5981 3 5980 5982 6280 3 5980 6280 6278 3 5981 6279 6281 3 5981 6281 5983 3 5982 5984 6282 3 5982 6282 6280 3 5983 6281 6283 3 5983 6283 5985 3 5984 5986 6284 3 5984 6284 6282 3 5985 6283 6285 3 5985 6285 5987 3 5986 5988 6286 3 5986 6286 6284 3 5987 6285 6287 3 5987 6287 5989 3 5988 5990 6288 3 5988 6288 6286 3 5989 6287 6289 3 5989 6289 5991 3 5990 5992 6290 3 5990 6290 6288 3 5991 6289 6291 3 5991 6291 5993 3 5992 5994 6290 3 6292 6290 5994 3 5993 6291 6293 3 5993 6293 5995 3 5994 5996 6292 3 6294 6292 5996 3 5995 6293 6295 3 5995 6295 5997 3 5996 5998 6294 3 6296 6294 5998 3 5997 6295 6297 3 5997 6297 5999 3 5998 6000 6296 3 6298 6296 6000 3 5999 6297 6299 3 5999 6299 6001 3 6000 6002 6298 3 6300 6298 6002 3 6001 6299 6301 3 6001 6301 6003 3 6002 6004 6300 3 6302 6300 6004 3 6003 6301 6303 3 6003 6303 6005 3 6004 6006 6302 3 6304 6302 6006 3 6005 6303 6305 3 6005 6305 6007 3 6006 6008 6304 3 6306 6304 6008 3 6007 6305 6307 3 6007 6307 6009 3 6008 6010 6306 3 6308 6306 6010 3 6009 6307 6309 3 6009 6309 6011 3 6010 6012 6308 3 6310 6308 6012 3 6011 6309 6311 3 6011 6311 6013 3 6012 6014 6310 3 6312 6310 6014 3 6013 6311 6015 3 6313 6015 6311 3 6014 6016 6312 3 6314 6312 6016 3 6015 6313 6017 3 6315 6017 6313 3 6016 6018 6314 3 6316 6314 6018 3 6017 6315 6019 3 6317 6019 6315 3 6018 6020 6316 3 6318 6316 6020 3 6019 6317 6021 3 6319 6021 6317 3 6020 6022 6318 3 6320 6318 6022 3 6021 6319 6023 3 6321 6023 6319 3 6022 6024 6320 3 6322 6320 6024 3 6023 6321 6025 3 6323 6025 6321 3 6024 6026 6322 3 6324 6322 6026 3 6025 6323 6027 3 6325 6027 6323 3 6026 6028 6324 3 6326 6324 6028 3 6027 6325 6029 3 6327 6029 6325 3 6028 6030 6326 3 6328 6326 6030 3 6029 6327 6031 3 6329 6031 6327 3 6030 6032 6328 3 6330 6328 6032 3 6031 6329 6033 3 6331 6033 6329 3 6032 6034 6330 3 6332 6330 6034 3 6033 6331 6035 3 6333 6035 6331 3 6034 6036 6332 3 6334 6332 6036 3 6035 6333 6037 3 6335 6037 6333 3 6036 6038 6334 3 6336 6334 6038 3 6037 6335 6039 3 6337 6039 6335 3 6038 6040 6338 3 6038 6338 6336 3 6039 6337 6041 3 6339 6041 6337 3 6040 6042 6340 3 6040 6340 6338 3 6041 6339 6043 3 6341 6043 6339 3 6042 6044 6342 3 6042 6342 6340 3 6043 6341 6045 3 6343 6045 6341 3 6044 6046 6344 3 6044 6344 6342 3 6045 6343 6047 3 6345 6047 6343 3 6046 6048 6346 3 6046 6346 6344 3 6047 6345 6049 3 6347 6049 6345 3 6048 6050 6348 3 6048 6348 6346 3 6049 6347 6051 3 6349 6051 6347 3 6050 6052 6350 3 6050 6350 6348 3 6051 6349 6053 3 6351 6053 6349 3 6052 6054 6352 3 6052 6352 6350 3 6053 6351 6055 3 6353 6055 6351 3 6054 6056 6354 3 6054 6354 6352 3 6055 6353 6057 3 6355 6057 6353 3 6056 6058 6356 3 6056 6356 6354 3 6057 6355 6059 3 6357 6059 6355 3 6058 6060 6358 3 6058 6358 6356 3 6059 6357 6061 3 6359 6061 6357 3 6060 6062 6360 3 6060 6360 6358 3 6061 6359 6065 3 6361 6065 6359 3 6062 6063 6067 3 6062 6067 6066 3 6062 6066 6362 3 6062 6362 6360 3 6064 6065 6069 3 6064 6069 6068 3 6065 6361 6365 3 6065 6365 6069 3 6066 6067 6363 3 6066 6363 6362 3 6067 6070 6367 3 6067 6367 6363 3 6068 6069 6365 3 6068 6365 6364 3 6068 6364 6368 3 6068 6368 6073 3 6070 6071 6075 3 6070 6075 6074 3 6070 6074 6370 3 6070 6370 6367 3 6072 6073 6081 3 6072 6081 6080 3 6073 6368 6375 3 6073 6375 6081 3 6074 6075 6371 3 6074 6371 6370 3 6075 6076 6372 3 6075 6372 6371 3 6076 6077 6083 3 6076 6083 6082 3 6076 6082 6377 3 6076 6377 6372 3 6078 6079 6148 3 6078 6148 6147 3 6079 6080 6374 3 6079 6374 6373 3 6079 6373 6443 3 6079 6443 6148 3 6080 6081 6375 3 6080 6375 6374 3 6082 6083 6378 3 6082 6378 6377 3 6083 6084 6379 3 6083 6379 6378 3 6084 6085 6380 3 6084 6380 6379 3 6085 6086 6381 3 6085 6381 6380 3 6086 6087 6382 3 6086 6382 6381 3 6087 6088 6382 3 6383 6382 6088 3 6088 6089 6383 3 6384 6383 6089 3 6089 6090 6384 3 6385 6384 6090 3 6090 6091 6385 3 6386 6385 6091 3 6091 6092 6386 3 6387 6386 6092 3 6092 6093 6387 3 6388 6387 6093 3 6093 6094 6388 3 6389 6388 6094 3 6094 6095 6389 3 6390 6389 6095 3 6095 6096 6390 3 6391 6390 6096 3 6096 6097 6391 3 6392 6391 6097 3 6097 6098 6392 3 6393 6392 6098 3 6098 6099 6393 3 6394 6393 6099 3 6099 6100 6394 3 6395 6394 6100 3 6100 6101 6395 3 6396 6395 6101 3 6101 6102 6396 3 6397 6396 6102 3 6102 6103 6397 3 6398 6397 6103 3 6103 6104 6398 3 6399 6398 6104 3 6104 6105 6399 3 6400 6399 6105 3 6105 6106 6400 3 6401 6400 6106 3 6106 6107 6401 3 6402 6401 6107 3 6107 6108 6402 3 6403 6402 6108 3 6108 6109 6403 3 6404 6403 6109 3 6109 6110 6404 3 6405 6404 6110 3 6110 6111 6405 3 6406 6405 6111 3 6111 6112 6406 3 6407 6406 6112 3 6112 6113 6407 3 6408 6407 6113 3 6113 6114 6408 3 6409 6408 6114 3 6114 6115 6409 3 6410 6409 6115 3 6115 6116 6410 3 6411 6410 6116 3 6116 6117 6411 3 6412 6411 6117 3 6117 6118 6412 3 6413 6412 6118 3 6118 6119 6413 3 6414 6413 6119 3 6119 6120 6414 3 6415 6414 6120 3 6120 6121 6415 3 6416 6415 6121 3 6121 6122 6416 3 6417 6416 6122 3 6122 6123 6417 3 6418 6417 6123 3 6123 6124 6418 3 6419 6418 6124 3 6124 6125 6419 3 6420 6419 6125 3 6125 6126 6420 3 6421 6420 6126 3 6126 6127 6421 3 6422 6421 6127 3 6127 6128 6422 3 6423 6422 6128 3 6128 6129 6423 3 6424 6423 6129 3 6129 6130 6424 3 6425 6424 6130 3 6130 6131 6425 3 6426 6425 6131 3 6131 6132 6426 3 6427 6426 6132 3 6132 6133 6427 3 6428 6427 6133 3 6133 6134 6428 3 6429 6428 6134 3 6134 6135 6429 3 6430 6429 6135 3 6135 6136 6431 3 6135 6431 6430 3 6136 6137 6432 3 6136 6432 6431 3 6137 6138 6433 3 6137 6433 6432 3 6138 6139 6434 3 6138 6434 6433 3 6139 6140 6435 3 6139 6435 6434 3 6140 6141 6436 3 6140 6436 6435 3 6141 6142 6437 3 6141 6437 6436 3 6142 6143 6438 3 6142 6438 6437 3 6143 6144 6439 3 6143 6439 6438 3 6144 6145 6440 3 6144 6440 6439 3 6145 6146 6441 3 6145 6441 6440 3 6146 6147 6442 3 6146 6442 6441 3 6147 6148 6443 3 6147 6443 6442 3 6149 6150 6220 3 6149 6220 6219 3 6149 6445 6150 3 6446 6150 6445 3 6149 6219 6445 3 6515 6445 6219 3 6150 6446 6151 3 6447 6151 6446 3 6151 6447 6152 3 6448 6152 6447 3 6152 6448 6153 3 6449 6153 6448 3 6153 6449 6154 3 6450 6154 6449 3 6154 6450 6155 3 6451 6155 6450 3 6155 6451 6156 3 6452 6156 6451 3 6156 6452 6157 3 6453 6157 6452 3 6157 6453 6158 3 6454 6158 6453 3 6158 6454 6159 3 6455 6159 6454 3 6159 6455 6160 3 6456 6160 6455 3 6160 6456 6457 3 6160 6457 6161 3 6161 6457 6458 3 6161 6458 6162 3 6162 6458 6459 3 6162 6459 6163 3 6163 6459 6460 3 6163 6460 6164 3 6164 6460 6461 3 6164 6461 6165 3 6165 6461 6462 3 6165 6462 6166 3 6166 6462 6463 3 6166 6463 6167 3 6167 6463 6464 3 6167 6464 6168 3 6168 6464 6465 3 6168 6465 6169 3 6169 6465 6466 3 6169 6466 6170 3 6170 6466 6467 3 6170 6467 6171 3 6171 6467 6468 3 6171 6468 6172 3 6172 6468 6469 3 6172 6469 6173 3 6173 6469 6470 3 6173 6470 6174 3 6174 6470 6471 3 6174 6471 6175 3 6175 6471 6472 3 6175 6472 6176 3 6176 6472 6473 3 6176 6473 6177 3 6177 6473 6474 3 6177 6474 6178 3 6178 6474 6475 3 6178 6475 6179 3 6179 6475 6476 3 6179 6476 6180 3 6180 6476 6477 3 6180 6477 6181 3 6181 6477 6478 3 6181 6478 6182 3 6182 6478 6479 3 6182 6479 6183 3 6183 6479 6480 3 6183 6480 6184 3 6184 6480 6481 3 6184 6481 6185 3 6185 6481 6482 3 6185 6482 6186 3 6186 6482 6483 3 6186 6483 6187 3 6187 6483 6484 3 6187 6484 6188 3 6188 6484 6485 3 6188 6485 6189 3 6189 6485 6486 3 6189 6486 6190 3 6190 6486 6487 3 6190 6487 6191 3 6191 6487 6488 3 6191 6488 6192 3 6192 6488 6489 3 6192 6489 6193 3 6193 6489 6490 3 6193 6490 6194 3 6194 6490 6491 3 6194 6491 6195 3 6195 6491 6492 3 6195 6492 6196 3 6196 6492 6493 3 6196 6493 6197 3 6197 6493 6494 3 6197 6494 6198 3 6198 6494 6495 3 6198 6495 6199 3 6199 6495 6496 3 6199 6496 6200 3 6200 6496 6497 3 6200 6497 6201 3 6201 6497 6498 3 6201 6498 6202 3 6202 6498 6499 3 6202 6499 6203 3 6203 6499 6500 3 6203 6500 6204 3 6204 6500 6501 3 6204 6501 6205 3 6205 6501 6502 3 6205 6502 6206 3 6206 6502 6503 3 6206 6503 6207 3 6207 6503 6504 3 6207 6504 6208 3 6208 6504 6505 3 6208 6505 6209 3 6209 6505 6210 3 6506 6210 6505 3 6210 6506 6211 3 6507 6211 6506 3 6211 6507 6212 3 6508 6212 6507 3 6212 6508 6213 3 6509 6213 6508 3 6213 6509 6214 3 6510 6214 6509 3 6214 6510 6215 3 6511 6215 6510 3 6215 6511 6216 3 6512 6216 6511 3 6216 6217 6221 3 6222 6221 6217 3 6216 6512 6217 3 6513 6217 6512 3 6217 6513 6222 3 6516 6222 6513 3 6218 6514 6219 3 6515 6219 6514 3 6218 6225 6514 3 6519 6514 6225 3 6222 6516 6223 3 6517 6223 6516 3 6223 6517 6226 3 6520 6226 6517 3 6224 6225 6228 3 6229 6228 6225 3 6224 6518 6225 3 6519 6225 6518 3 6224 6228 6518 3 6522 6518 6228 3 6226 6227 6230 3 6231 6230 6227 3 6226 6520 6227 3 6521 6227 6520 3 6227 6521 6231 3 6523 6231 6521 3 6228 6232 6522 3 6524 6522 6232 3 6231 6523 6233 3 6525 6233 6523 3 6232 6234 6526 3 6232 6526 6524 3 6233 6525 6235 3 6527 6235 6525 3 6234 6236 6528 3 6234 6528 6526 3 6235 6527 6237 3 6529 6237 6527 3 6236 6238 6530 3 6236 6530 6528 3 6237 6529 6239 3 6531 6239 6529 3 6238 6240 6532 3 6238 6532 6530 3 6239 6531 6241 3 6533 6241 6531 3 6240 6242 6534 3 6240 6534 6532 3 6241 6533 6243 3 6535 6243 6533 3 6242 6244 6536 3 6242 6536 6534 3 6243 6535 6245 3 6537 6245 6535 3 6244 6246 6538 3 6244 6538 6536 3 6245 6537 6247 3 6539 6247 6537 3 6246 6248 6540 3 6246 6540 6538 3 6247 6539 6249 3 6541 6249 6539 3 6248 6250 6542 3 6248 6542 6540 3 6249 6541 6251 3 6543 6251 6541 3 6250 6252 6544 3 6250 6544 6542 3 6251 6543 6253 3 6545 6253 6543 3 6252 6254 6546 3 6252 6546 6544 3 6253 6545 6255 3 6547 6255 6545 3 6254 6256 6548 3 6254 6548 6546 3 6255 6547 6257 3 6549 6257 6547 3 6256 6258 6550 3 6256 6550 6548 3 6257 6549 6259 3 6551 6259 6549 3 6258 6260 6552 3 6258 6552 6550 3 6259 6551 6261 3 6553 6261 6551 3 6260 6262 6554 3 6260 6554 6552 3 6261 6553 6555 3 6261 6555 6263 3 6262 6264 6556 3 6262 6556 6554 3 6263 6555 6557 3 6263 6557 6265 3 6264 6266 6558 3 6264 6558 6556 3 6265 6557 6559 3 6265 6559 6267 3 6266 6268 6560 3 6266 6560 6558 3 6267 6559 6561 3 6267 6561 6269 3 6268 6270 6562 3 6268 6562 6560 3 6269 6561 6563 3 6269 6563 6271 3 6270 6272 6564 3 6270 6564 6562 3 6271 6563 6565 3 6271 6565 6273 3 6272 6274 6566 3 6272 6566 6564 3 6273 6565 6567 3 6273 6567 6275 3 6274 6276 6568 3 6274 6568 6566 3 6275 6567 6569 3 6275 6569 6277 3 6276 6278 6570 3 6276 6570 6568 3 6277 6569 6571 3 6277 6571 6279 3 6278 6280 6572 3 6278 6572 6570 3 6279 6571 6573 3 6279 6573 6281 3 6280 6282 6572 3 6574 6572 6282 3 6281 6573 6575 3 6281 6575 6283 3 6282 6284 6574 3 6576 6574 6284 3 6283 6575 6577 3 6283 6577 6285 3 6284 6286 6576 3 6578 6576 6286 3 6285 6577 6579 3 6285 6579 6287 3 6286 6288 6578 3 6580 6578 6288 3 6287 6579 6581 3 6287 6581 6289 3 6288 6290 6580 3 6582 6580 6290 3 6289 6581 6583 3 6289 6583 6291 3 6290 6292 6582 3 6584 6582 6292 3 6291 6583 6585 3 6291 6585 6293 3 6292 6294 6584 3 6586 6584 6294 3 6293 6585 6587 3 6293 6587 6295 3 6294 6296 6586 3 6588 6586 6296 3 6295 6587 6589 3 6295 6589 6297 3 6296 6298 6588 3 6590 6588 6298 3 6297 6589 6591 3 6297 6591 6299 3 6298 6300 6590 3 6592 6590 6300 3 6299 6591 6593 3 6299 6593 6301 3 6300 6302 6592 3 6594 6592 6302 3 6301 6593 6595 3 6301 6595 6303 3 6302 6304 6594 3 6596 6594 6304 3 6303 6595 6597 3 6303 6597 6305 3 6304 6306 6596 3 6598 6596 6306 3 6305 6597 6599 3 6305 6599 6307 3 6306 6308 6598 3 6600 6598 6308 3 6307 6599 6601 3 6307 6601 6309 3 6308 6310 6600 3 6602 6600 6310 3 6309 6601 6603 3 6309 6603 6311 3 6310 6312 6602 3 6604 6602 6312 3 6311 6603 6313 3 6605 6313 6603 3 6312 6314 6604 3 6606 6604 6314 3 6313 6605 6315 3 6607 6315 6605 3 6314 6316 6606 3 6608 6606 6316 3 6315 6607 6317 3 6609 6317 6607 3 6316 6318 6608 3 6610 6608 6318 3 6317 6609 6319 3 6611 6319 6609 3 6318 6320 6610 3 6612 6610 6320 3 6319 6611 6321 3 6613 6321 6611 3 6320 6322 6612 3 6614 6612 6322 3 6321 6613 6323 3 6615 6323 6613 3 6322 6324 6614 3 6616 6614 6324 3 6323 6615 6325 3 6617 6325 6615 3 6324 6326 6616 3 6618 6616 6326 3 6325 6617 6327 3 6619 6327 6617 3 6326 6328 6618 3 6620 6618 6328 3 6327 6619 6329 3 6621 6329 6619 3 6328 6330 6620 3 6622 6620 6330 3 6329 6621 6331 3 6623 6331 6621 3 6330 6332 6624 3 6330 6624 6622 3 6331 6623 6333 3 6625 6333 6623 3 6332 6334 6626 3 6332 6626 6624 3 6333 6625 6335 3 6627 6335 6625 3 6334 6336 6628 3 6334 6628 6626 3 6335 6627 6337 3 6629 6337 6627 3 6336 6338 6630 3 6336 6630 6628 3 6337 6629 6339 3 6631 6339 6629 3 6338 6340 6632 3 6338 6632 6630 3 6339 6631 6341 3 6633 6341 6631 3 6340 6342 6634 3 6340 6634 6632 3 6341 6633 6343 3 6635 6343 6633 3 6342 6344 6636 3 6342 6636 6634 3 6343 6635 6345 3 6637 6345 6635 3 6344 6346 6638 3 6344 6638 6636 3 6345 6637 6347 3 6639 6347 6637 3 6346 6348 6640 3 6346 6640 6638 3 6347 6639 6349 3 6641 6349 6639 3 6348 6350 6642 3 6348 6642 6640 3 6349 6641 6351 3 6643 6351 6641 3 6350 6352 6644 3 6350 6644 6642 3 6351 6643 6353 3 6645 6353 6643 3 6352 6354 6646 3 6352 6646 6644 3 6353 6645 6355 3 6647 6355 6645 3 6354 6356 6648 3 6354 6648 6646 3 6355 6647 6357 3 6649 6357 6647 3 6356 6358 6650 3 6356 6650 6648 3 6357 6649 6359 3 6651 6359 6649 3 6358 6360 6652 3 6358 6652 6650 3 6359 6651 6361 3 6653 6361 6651 3 6360 6362 6654 3 6360 6654 6652 3 6361 6653 6655 3 6361 6655 6365 3 6362 6363 6367 3 6362 6367 6366 3 6362 6366 6656 3 6362 6656 6654 3 6364 6365 6369 3 6364 6369 6368 3 6365 6655 6659 3 6365 6659 6369 3 6366 6367 6657 3 6366 6657 6656 3 6367 6370 6660 3 6367 6660 6657 3 6368 6369 6659 3 6368 6659 6658 3 6368 6658 6663 3 6368 6663 6375 3 6370 6371 6661 3 6370 6661 6660 3 6371 6372 6377 3 6371 6377 6376 3 6371 6376 6664 3 6371 6664 6661 3 6373 6374 6444 3 6373 6444 6443 3 6374 6375 6663 3 6374 6663 6662 3 6374 6662 6732 3 6374 6732 6444 3 6376 6377 6665 3 6376 6665 6664 3 6377 6378 6666 3 6377 6666 6665 3 6378 6379 6666 3 6667 6666 6379 3 6379 6380 6667 3 6668 6667 6380 3 6380 6381 6668 3 6669 6668 6381 3 6381 6382 6669 3 6670 6669 6382 3 6382 6383 6670 3 6671 6670 6383 3 6383 6384 6671 3 6672 6671 6384 3 6384 6385 6672 3 6673 6672 6385 3 6385 6386 6673 3 6674 6673 6386 3 6386 6387 6674 3 6675 6674 6387 3 6387 6388 6675 3 6676 6675 6388 3 6388 6389 6676 3 6677 6676 6389 3 6389 6390 6677 3 6678 6677 6390 3 6390 6391 6678 3 6679 6678 6391 3 6391 6392 6679 3 6680 6679 6392 3 6392 6393 6680 3 6681 6680 6393 3 6393 6394 6681 3 6682 6681 6394 3 6394 6395 6682 3 6683 6682 6395 3 6395 6396 6683 3 6684 6683 6396 3 6396 6397 6684 3 6685 6684 6397 3 6397 6398 6685 3 6686 6685 6398 3 6398 6399 6686 3 6687 6686 6399 3 6399 6400 6687 3 6688 6687 6400 3 6400 6401 6688 3 6689 6688 6401 3 6401 6402 6689 3 6690 6689 6402 3 6402 6403 6690 3 6691 6690 6403 3 6403 6404 6691 3 6692 6691 6404 3 6404 6405 6692 3 6693 6692 6405 3 6405 6406 6693 3 6694 6693 6406 3 6406 6407 6694 3 6695 6694 6407 3 6407 6408 6695 3 6696 6695 6408 3 6408 6409 6696 3 6697 6696 6409 3 6409 6410 6697 3 6698 6697 6410 3 6410 6411 6698 3 6699 6698 6411 3 6411 6412 6699 3 6700 6699 6412 3 6412 6413 6700 3 6701 6700 6413 3 6413 6414 6701 3 6702 6701 6414 3 6414 6415 6702 3 6703 6702 6415 3 6415 6416 6703 3 6704 6703 6416 3 6416 6417 6704 3 6705 6704 6417 3 6417 6418 6705 3 6706 6705 6418 3 6418 6419 6706 3 6707 6706 6419 3 6419 6420 6707 3 6708 6707 6420 3 6420 6421 6708 3 6709 6708 6421 3 6421 6422 6709 3 6710 6709 6422 3 6422 6423 6710 3 6711 6710 6423 3 6423 6424 6711 3 6712 6711 6424 3 6424 6425 6712 3 6713 6712 6425 3 6425 6426 6713 3 6714 6713 6426 3 6426 6427 6714 3 6715 6714 6427 3 6427 6428 6715 3 6716 6715 6428 3 6428 6429 6717 3 6428 6717 6716 3 6429 6430 6718 3 6429 6718 6717 3 6430 6431 6719 3 6430 6719 6718 3 6431 6432 6720 3 6431 6720 6719 3 6432 6433 6721 3 6432 6721 6720 3 6433 6434 6722 3 6433 6722 6721 3 6434 6435 6723 3 6434 6723 6722 3 6435 6436 6724 3 6435 6724 6723 3 6436 6437 6725 3 6436 6725 6724 3 6437 6438 6726 3 6437 6726 6725 3 6438 6439 6727 3 6438 6727 6726 3 6439 6440 6728 3 6439 6728 6727 3 6440 6441 6729 3 6440 6729 6728 3 6441 6442 6730 3 6441 6730 6729 3 6442 6443 6731 3 6442 6731 6730 3 6443 6444 6732 3 6443 6732 6731 3 6445 6733 6446 3 6734 6446 6733 3 6445 6515 6733 3 6803 6733 6515 3 6446 6734 6447 3 6735 6447 6734 3 6447 6735 6448 3 6736 6448 6735 3 6448 6736 6449 3 6737 6449 6736 3 6449 6737 6450 3 6738 6450 6737 3 6450 6738 6451 3 6739 6451 6738 3 6451 6739 6452 3 6740 6452 6739 3 6452 6740 6453 3 6741 6453 6740 3 6453 6741 6454 3 6742 6454 6741 3 6454 6742 6455 3 6743 6455 6742 3 6455 6743 6456 3 6744 6456 6743 3 6456 6744 6457 3 6745 6457 6744 3 6457 6745 6458 3 6746 6458 6745 3 6458 6746 6459 3 6747 6459 6746 3 6459 6747 6460 3 6748 6460 6747 3 6460 6748 6461 3 6749 6461 6748 3 6461 6749 6462 3 6750 6462 6749 3 6462 6750 6751 3 6462 6751 6463 3 6463 6751 6752 3 6463 6752 6464 3 6464 6752 6753 3 6464 6753 6465 3 6465 6753 6754 3 6465 6754 6466 3 6466 6754 6755 3 6466 6755 6467 3 6467 6755 6756 3 6467 6756 6468 3 6468 6756 6757 3 6468 6757 6469 3 6469 6757 6758 3 6469 6758 6470 3 6470 6758 6759 3 6470 6759 6471 3 6471 6759 6760 3 6471 6760 6472 3 6472 6760 6761 3 6472 6761 6473 3 6473 6761 6762 3 6473 6762 6474 3 6474 6762 6763 3 6474 6763 6475 3 6475 6763 6764 3 6475 6764 6476 3 6476 6764 6765 3 6476 6765 6477 3 6477 6765 6766 3 6477 6766 6478 3 6478 6766 6767 3 6478 6767 6479 3 6479 6767 6768 3 6479 6768 6480 3 6480 6768 6769 3 6480 6769 6481 3 6481 6769 6770 3 6481 6770 6482 3 6482 6770 6771 3 6482 6771 6483 3 6483 6771 6772 3 6483 6772 6484 3 6484 6772 6773 3 6484 6773 6485 3 6485 6773 6774 3 6485 6774 6486 3 6486 6774 6775 3 6486 6775 6487 3 6487 6775 6776 3 6487 6776 6488 3 6488 6776 6777 3 6488 6777 6489 3 6489 6777 6778 3 6489 6778 6490 3 6490 6778 6779 3 6490 6779 6491 3 6491 6779 6780 3 6491 6780 6492 3 6492 6780 6781 3 6492 6781 6493 3 6493 6781 6782 3 6493 6782 6494 3 6494 6782 6783 3 6494 6783 6495 3 6495 6783 6784 3 6495 6784 6496 3 6496 6784 6785 3 6496 6785 6497 3 6497 6785 6786 3 6497 6786 6498 3 6498 6786 6787 3 6498 6787 6499 3 6499 6787 6788 3 6499 6788 6500 3 6500 6788 6789 3 6500 6789 6501 3 6501 6789 6790 3 6501 6790 6502 3 6502 6790 6791 3 6502 6791 6503 3 6503 6791 6792 3 6503 6792 6504 3 6504 6792 6793 3 6504 6793 6505 3 6505 6793 6794 3 6505 6794 6506 3 6506 6794 6795 3 6506 6795 6507 3 6507 6795 6796 3 6507 6796 6508 3 6508 6796 6797 3 6508 6797 6509 3 6509 6797 6798 3 6509 6798 6510 3 6510 6798 6799 3 6510 6799 6511 3 6511 6799 6800 3 6511 6800 6512 3 6512 6800 6513 3 6801 6513 6800 3 6513 6801 6516 3 6804 6516 6801 3 6514 6802 6515 3 6803 6515 6802 3 6514 6519 6802 3 6807 6802 6519 3 6516 6804 6517 3 6805 6517 6804 3 6517 6805 6520 3 6808 6520 6805 3 6518 6806 6519 3 6807 6519 6806 3 6518 6522 6806 3 6810 6806 6522 3 6520 6808 6521 3 6809 6521 6808 3 6521 6809 6523 3 6811 6523 6809 3 6522 6524 6810 3 6812 6810 6524 3 6523 6811 6525 3 6813 6525 6811 3 6524 6526 6812 3 6814 6812 6526 3 6525 6813 6527 3 6815 6527 6813 3 6526 6528 6816 3 6526 6816 6814 3 6527 6815 6529 3 6817 6529 6815 3 6528 6530 6818 3 6528 6818 6816 3 6529 6817 6531 3 6819 6531 6817 3 6530 6532 6820 3 6530 6820 6818 3 6531 6819 6533 3 6821 6533 6819 3 6532 6534 6822 3 6532 6822 6820 3 6533 6821 6535 3 6823 6535 6821 3 6534 6536 6824 3 6534 6824 6822 3 6535 6823 6537 3 6825 6537 6823 3 6536 6538 6826 3 6536 6826 6824 3 6537 6825 6539 3 6827 6539 6825 3 6538 6540 6828 3 6538 6828 6826 3 6539 6827 6541 3 6829 6541 6827 3 6540 6542 6830 3 6540 6830 6828 3 6541 6829 6543 3 6831 6543 6829 3 6542 6544 6832 3 6542 6832 6830 3 6543 6831 6545 3 6833 6545 6831 3 6544 6546 6834 3 6544 6834 6832 3 6545 6833 6547 3 6835 6547 6833 3 6546 6548 6836 3 6546 6836 6834 3 6547 6835 6549 3 6837 6549 6835 3 6548 6550 6838 3 6548 6838 6836 3 6549 6837 6551 3 6839 6551 6837 3 6550 6552 6840 3 6550 6840 6838 3 6551 6839 6553 3 6841 6553 6839 3 6552 6554 6842 3 6552 6842 6840 3 6553 6841 6555 3 6843 6555 6841 3 6554 6556 6844 3 6554 6844 6842 3 6555 6843 6557 3 6845 6557 6843 3 6556 6558 6846 3 6556 6846 6844 3 6557 6845 6559 3 6847 6559 6845 3 6558 6560 6848 3 6558 6848 6846 3 6559 6847 6561 3 6849 6561 6847 3 6560 6562 6850 3 6560 6850 6848 3 6561 6849 6563 3 6851 6563 6849 3 6562 6564 6852 3 6562 6852 6850 3 6563 6851 6853 3 6563 6853 6565 3 6564 6566 6854 3 6564 6854 6852 3 6565 6853 6855 3 6565 6855 6567 3 6566 6568 6856 3 6566 6856 6854 3 6567 6855 6857 3 6567 6857 6569 3 6568 6570 6858 3 6568 6858 6856 3 6569 6857 6859 3 6569 6859 6571 3 6570 6572 6860 3 6570 6860 6858 3 6571 6859 6861 3 6571 6861 6573 3 6572 6574 6862 3 6572 6862 6860 3 6573 6861 6863 3 6573 6863 6575 3 6574 6576 6864 3 6574 6864 6862 3 6575 6863 6865 3 6575 6865 6577 3 6576 6578 6864 3 6866 6864 6578 3 6577 6865 6867 3 6577 6867 6579 3 6578 6580 6866 3 6868 6866 6580 3 6579 6867 6869 3 6579 6869 6581 3 6580 6582 6868 3 6870 6868 6582 3 6581 6869 6871 3 6581 6871 6583 3 6582 6584 6870 3 6872 6870 6584 3 6583 6871 6873 3 6583 6873 6585 3 6584 6586 6872 3 6874 6872 6586 3 6585 6873 6875 3 6585 6875 6587 3 6586 6588 6874 3 6876 6874 6588 3 6587 6875 6877 3 6587 6877 6589 3 6588 6590 6876 3 6878 6876 6590 3 6589 6877 6879 3 6589 6879 6591 3 6590 6592 6878 3 6880 6878 6592 3 6591 6879 6881 3 6591 6881 6593 3 6592 6594 6880 3 6882 6880 6594 3 6593 6881 6883 3 6593 6883 6595 3 6594 6596 6882 3 6884 6882 6596 3 6595 6883 6885 3 6595 6885 6597 3 6596 6598 6884 3 6886 6884 6598 3 6597 6885 6887 3 6597 6887 6599 3 6598 6600 6886 3 6888 6886 6600 3 6599 6887 6889 3 6599 6889 6601 3 6600 6602 6888 3 6890 6888 6602 3 6601 6889 6891 3 6601 6891 6603 3 6602 6604 6890 3 6892 6890 6604 3 6603 6891 6893 3 6603 6893 6605 3 6604 6606 6892 3 6894 6892 6606 3 6605 6893 6895 3 6605 6895 6607 3 6606 6608 6894 3 6896 6894 6608 3 6607 6895 6897 3 6607 6897 6609 3 6608 6610 6896 3 6898 6896 6610 3 6609 6897 6899 3 6609 6899 6611 3 6610 6612 6898 3 6900 6898 6612 3 6611 6899 6901 3 6611 6901 6613 3 6612 6614 6900 3 6902 6900 6614 3 6613 6901 6615 3 6903 6615 6901 3 6614 6616 6902 3 6904 6902 6616 3 6615 6903 6617 3 6905 6617 6903 3 6616 6618 6904 3 6906 6904 6618 3 6617 6905 6619 3 6907 6619 6905 3 6618 6620 6906 3 6908 6906 6620 3 6619 6907 6621 3 6909 6621 6907 3 6620 6622 6908 3 6910 6908 6622 3 6621 6909 6623 3 6911 6623 6909 3 6622 6624 6910 3 6912 6910 6624 3 6623 6911 6625 3 6913 6625 6911 3 6624 6626 6912 3 6914 6912 6626 3 6625 6913 6627 3 6915 6627 6913 3 6626 6628 6916 3 6626 6916 6914 3 6627 6915 6629 3 6917 6629 6915 3 6628 6630 6918 3 6628 6918 6916 3 6629 6917 6631 3 6919 6631 6917 3 6630 6632 6920 3 6630 6920 6918 3 6631 6919 6633 3 6921 6633 6919 3 6632 6634 6922 3 6632 6922 6920 3 6633 6921 6635 3 6923 6635 6921 3 6634 6636 6924 3 6634 6924 6922 3 6635 6923 6637 3 6925 6637 6923 3 6636 6638 6926 3 6636 6926 6924 3 6637 6925 6639 3 6927 6639 6925 3 6638 6640 6928 3 6638 6928 6926 3 6639 6927 6641 3 6929 6641 6927 3 6640 6642 6930 3 6640 6930 6928 3 6641 6929 6643 3 6931 6643 6929 3 6642 6644 6932 3 6642 6932 6930 3 6643 6931 6645 3 6933 6645 6931 3 6644 6646 6934 3 6644 6934 6932 3 6645 6933 6647 3 6935 6647 6933 3 6646 6648 6936 3 6646 6936 6934 3 6647 6935 6649 3 6937 6649 6935 3 6648 6650 6938 3 6648 6938 6936 3 6649 6937 6651 3 6939 6651 6937 3 6650 6652 6940 3 6650 6940 6938 3 6651 6939 6653 3 6941 6653 6939 3 6652 6654 6942 3 6652 6942 6940 3 6653 6941 6655 3 6943 6655 6941 3 6654 6656 6944 3 6654 6944 6942 3 6655 6943 6659 3 6947 6659 6943 3 6656 6657 6945 3 6656 6945 6944 3 6657 6660 6948 3 6657 6948 6945 3 6658 6659 6947 3 6658 6947 6946 3 6658 6946 6663 3 6951 6663 6946 3 6660 6661 6949 3 6660 6949 6948 3 6661 6664 6952 3 6661 6952 6949 3 6662 6663 6951 3 6662 6951 6950 3 6662 6950 6732 3 7020 6732 6950 3 6664 6665 6953 3 6664 6953 6952 3 6665 6666 6954 3 6665 6954 6953 3 6666 6667 6955 3 6666 6955 6954 3 6667 6668 6956 3 6667 6956 6955 3 6668 6669 6957 3 6668 6957 6956 3 6669 6670 6958 3 6669 6958 6957 3 6670 6671 6959 3 6670 6959 6958 3 6671 6672 6960 3 6671 6960 6959 3 6672 6673 6961 3 6672 6961 6960 3 6673 6674 6962 3 6673 6962 6961 3 6674 6675 6963 3 6674 6963 6962 3 6675 6676 6964 3 6675 6964 6963 3 6676 6677 6965 3 6676 6965 6964 3 6677 6678 6965 3 6966 6965 6678 3 6678 6679 6966 3 6967 6966 6679 3 6679 6680 6967 3 6968 6967 6680 3 6680 6681 6968 3 6969 6968 6681 3 6681 6682 6969 3 6970 6969 6682 3 6682 6683 6970 3 6971 6970 6683 3 6683 6684 6971 3 6972 6971 6684 3 6684 6685 6972 3 6973 6972 6685 3 6685 6686 6973 3 6974 6973 6686 3 6686 6687 6974 3 6975 6974 6687 3 6687 6688 6975 3 6976 6975 6688 3 6688 6689 6976 3 6977 6976 6689 3 6689 6690 6977 3 6978 6977 6690 3 6690 6691 6978 3 6979 6978 6691 3 6691 6692 6979 3 6980 6979 6692 3 6692 6693 6980 3 6981 6980 6693 3 6693 6694 6981 3 6982 6981 6694 3 6694 6695 6982 3 6983 6982 6695 3 6695 6696 6983 3 6984 6983 6696 3 6696 6697 6984 3 6985 6984 6697 3 6697 6698 6985 3 6986 6985 6698 3 6698 6699 6986 3 6987 6986 6699 3 6699 6700 6987 3 6988 6987 6700 3 6700 6701 6988 3 6989 6988 6701 3 6701 6702 6989 3 6990 6989 6702 3 6702 6703 6990 3 6991 6990 6703 3 6703 6704 6991 3 6992 6991 6704 3 6704 6705 6992 3 6993 6992 6705 3 6705 6706 6993 3 6994 6993 6706 3 6706 6707 6994 3 6995 6994 6707 3 6707 6708 6995 3 6996 6995 6708 3 6708 6709 6996 3 6997 6996 6709 3 6709 6710 6997 3 6998 6997 6710 3 6710 6711 6998 3 6999 6998 6711 3 6711 6712 6999 3 7000 6999 6712 3 6712 6713 7000 3 7001 7000 6713 3 6713 6714 7001 3 7002 7001 6714 3 6714 6715 7002 3 7003 7002 6715 3 6715 6716 7003 3 7004 7003 6716 3 6716 6717 7004 3 7005 7004 6717 3 6717 6718 7005 3 7006 7005 6718 3 6718 6719 7006 3 7007 7006 6719 3 6719 6720 7007 3 7008 7007 6720 3 6720 6721 7008 3 7009 7008 6721 3 6721 6722 7009 3 7010 7009 6722 3 6722 6723 7010 3 7011 7010 6723 3 6723 6724 7011 3 7012 7011 6724 3 6724 6725 7012 3 7013 7012 6725 3 6725 6726 7013 3 7014 7013 6726 3 6726 6727 7014 3 7015 7014 6727 3 6727 6728 7016 3 6727 7016 7015 3 6728 6729 7017 3 6728 7017 7016 3 6729 6730 7018 3 6729 7018 7017 3 6730 6731 7019 3 6730 7019 7018 3 6731 6732 7020 3 6731 7020 7019 3 6733 7021 6734 3 7022 6734 7021 3 6733 6803 7091 3 6733 7091 7021 3 6734 7022 6735 3 7023 6735 7022 3 6735 7023 6736 3 7024 6736 7023 3 6736 7024 6737 3 7025 6737 7024 3 6737 7025 6738 3 7026 6738 7025 3 6738 7026 6739 3 7027 6739 7026 3 6739 7027 6740 3 7028 6740 7027 3 6740 7028 6741 3 7029 6741 7028 3 6741 7029 6742 3 7030 6742 7029 3 6742 7030 6743 3 7031 6743 7030 3 6743 7031 6744 3 7032 6744 7031 3 6744 7032 6745 3 7033 6745 7032 3 6745 7033 6746 3 7034 6746 7033 3 6746 7034 6747 3 7035 6747 7034 3 6747 7035 6748 3 7036 6748 7035 3 6748 7036 6749 3 7037 6749 7036 3 6749 7037 6750 3 7038 6750 7037 3 6750 7038 6751 3 7039 6751 7038 3 6751 7039 6752 3 7040 6752 7039 3 6752 7040 6753 3 7041 6753 7040 3 6753 7041 6754 3 7042 6754 7041 3 6754 7042 6755 3 7043 6755 7042 3 6755 7043 6756 3 7044 6756 7043 3 6756 7044 6757 3 7045 6757 7044 3 6757 7045 6758 3 7046 6758 7045 3 6758 7046 6759 3 7047 6759 7046 3 6759 7047 6760 3 7048 6760 7047 3 6760 7048 6761 3 7049 6761 7048 3 6761 7049 6762 3 7050 6762 7049 3 6762 7050 6763 3 7051 6763 7050 3 6763 7051 6764 3 7052 6764 7051 3 6764 7052 6765 3 7053 6765 7052 3 6765 7053 6766 3 7054 6766 7053 3 6766 7054 7055 3 6766 7055 6767 3 6767 7055 7056 3 6767 7056 6768 3 6768 7056 7057 3 6768 7057 6769 3 6769 7057 7058 3 6769 7058 6770 3 6770 7058 7059 3 6770 7059 6771 3 6771 7059 7060 3 6771 7060 6772 3 6772 7060 7061 3 6772 7061 6773 3 6773 7061 7062 3 6773 7062 6774 3 6774 7062 7063 3 6774 7063 6775 3 6775 7063 7064 3 6775 7064 6776 3 6776 7064 7065 3 6776 7065 6777 3 6777 7065 7066 3 6777 7066 6778 3 6778 7066 7067 3 6778 7067 6779 3 6779 7067 7068 3 6779 7068 6780 3 6780 7068 7069 3 6780 7069 6781 3 6781 7069 7070 3 6781 7070 6782 3 6782 7070 7071 3 6782 7071 6783 3 6783 7071 7072 3 6783 7072 6784 3 6784 7072 7073 3 6784 7073 6785 3 6785 7073 7074 3 6785 7074 6786 3 6786 7074 7075 3 6786 7075 6787 3 6787 7075 7076 3 6787 7076 6788 3 6788 7076 7077 3 6788 7077 6789 3 6789 7077 7078 3 6789 7078 6790 3 6790 7078 7079 3 6790 7079 6791 3 6791 7079 7080 3 6791 7080 6792 3 6792 7080 7081 3 6792 7081 6793 3 6793 7081 7082 3 6793 7082 6794 3 6794 7082 7083 3 6794 7083 6795 3 6795 7083 7084 3 6795 7084 6796 3 6796 7084 7085 3 6796 7085 6797 3 6797 7085 7086 3 6797 7086 6798 3 6798 7086 7087 3 6798 7087 6799 3 6799 7087 7088 3 6799 7088 6800 3 6800 7088 7089 3 6800 7089 6801 3 6801 7089 7092 3 6801 7092 6804 3 6802 7090 7091 3 6802 7091 6803 3 6802 6807 7090 3 7095 7090 6807 3 6804 7092 7093 3 6804 7093 6805 3 6805 7093 7096 3 6805 7096 6808 3 6806 7094 7095 3 6806 7095 6807 3 6806 6810 7094 3 7098 7094 6810 3 6808 7096 7097 3 6808 7097 6809 3 6809 7097 7099 3 6809 7099 6811 3 6810 6812 7098 3 7100 7098 6812 3 6811 7099 7101 3 6811 7101 6813 3 6812 6814 7100 3 7102 7100 6814 3 6813 7101 7103 3 6813 7103 6815 3 6814 6816 7102 3 7104 7102 6816 3 6815 7103 7105 3 6815 7105 6817 3 6816 6818 7104 3 7106 7104 6818 3 6817 7105 6819 3 7107 6819 7105 3 6818 6820 7106 3 7108 7106 6820 3 6819 7107 6821 3 7109 6821 7107 3 6820 6822 7108 3 7110 7108 6822 3 6821 7109 6823 3 7111 6823 7109 3 6822 6824 7110 3 7112 7110 6824 3 6823 7111 6825 3 7113 6825 7111 3 6824 6826 7112 3 7114 7112 6826 3 6825 7113 6827 3 7115 6827 7113 3 6826 6828 7114 3 7116 7114 6828 3 6827 7115 6829 3 7117 6829 7115 3 6828 6830 7118 3 6828 7118 7116 3 6829 7117 6831 3 7119 6831 7117 3 6830 6832 7120 3 6830 7120 7118 3 6831 7119 6833 3 7121 6833 7119 3 6832 6834 7122 3 6832 7122 7120 3 6833 7121 6835 3 7123 6835 7121 3 6834 6836 7124 3 6834 7124 7122 3 6835 7123 6837 3 7125 6837 7123 3 6836 6838 7126 3 6836 7126 7124 3 6837 7125 6839 3 7127 6839 7125 3 6838 6840 7128 3 6838 7128 7126 3 6839 7127 6841 3 7129 6841 7127 3 6840 6842 7130 3 6840 7130 7128 3 6841 7129 6843 3 7131 6843 7129 3 6842 6844 7132 3 6842 7132 7130 3 6843 7131 6845 3 7133 6845 7131 3 6844 6846 7134 3 6844 7134 7132 3 6845 7133 6847 3 7135 6847 7133 3 6846 6848 7136 3 6846 7136 7134 3 6847 7135 6849 3 7137 6849 7135 3 6848 6850 7138 3 6848 7138 7136 3 6849 7137 6851 3 7139 6851 7137 3 6850 6852 7140 3 6850 7140 7138 3 6851 7139 6853 3 7141 6853 7139 3 6852 6854 7142 3 6852 7142 7140 3 6853 7141 6855 3 7143 6855 7141 3 6854 6856 7144 3 6854 7144 7142 3 6855 7143 6857 3 7145 6857 7143 3 6856 6858 7146 3 6856 7146 7144 3 6857 7145 6859 3 7147 6859 7145 3 6858 6860 7148 3 6858 7148 7146 3 6859 7147 6861 3 7149 6861 7147 3 6860 6862 7150 3 6860 7150 7148 3 6861 7149 6863 3 7151 6863 7149 3 6862 6864 7152 3 6862 7152 7150 3 6863 7151 6865 3 7153 6865 7151 3 6864 6866 7154 3 6864 7154 7152 3 6865 7153 6867 3 7155 6867 7153 3 6866 6868 7156 3 6866 7156 7154 3 6867 7155 6869 3 7157 6869 7155 3 6868 6870 7158 3 6868 7158 7156 3 6869 7157 7159 3 6869 7159 6871 3 6870 6872 7160 3 6870 7160 7158 3 6871 7159 7161 3 6871 7161 6873 3 6872 6874 7162 3 6872 7162 7160 3 6873 7161 7163 3 6873 7163 6875 3 6874 6876 7164 3 6874 7164 7162 3 6875 7163 7165 3 6875 7165 6877 3 6876 6878 7166 3 6876 7166 7164 3 6877 7165 7167 3 6877 7167 6879 3 6878 6880 7168 3 6878 7168 7166 3 6879 7167 7169 3 6879 7169 6881 3 6880 6882 7168 3 7170 7168 6882 3 6881 7169 7171 3 6881 7171 6883 3 6882 6884 7170 3 7172 7170 6884 3 6883 7171 7173 3 6883 7173 6885 3 6884 6886 7172 3 7174 7172 6886 3 6885 7173 7175 3 6885 7175 6887 3 6886 6888 7174 3 7176 7174 6888 3 6887 7175 7177 3 6887 7177 6889 3 6888 6890 7176 3 7178 7176 6890 3 6889 7177 7179 3 6889 7179 6891 3 6890 6892 7178 3 7180 7178 6892 3 6891 7179 7181 3 6891 7181 6893 3 6892 6894 7180 3 7182 7180 6894 3 6893 7181 7183 3 6893 7183 6895 3 6894 6896 7182 3 7184 7182 6896 3 6895 7183 7185 3 6895 7185 6897 3 6896 6898 7184 3 7186 7184 6898 3 6897 7185 7187 3 6897 7187 6899 3 6898 6900 7186 3 7188 7186 6900 3 6899 7187 7189 3 6899 7189 6901 3 6900 6902 7188 3 7190 7188 6902 3 6901 7189 7191 3 6901 7191 6903 3 6902 6904 7190 3 7192 7190 6904 3 6903 7191 7193 3 6903 7193 6905 3 6904 6906 7192 3 7194 7192 6906 3 6905 7193 7195 3 6905 7195 6907 3 6906 6908 7194 3 7196 7194 6908 3 6907 7195 7197 3 6907 7197 6909 3 6908 6910 7196 3 7198 7196 6910 3 6909 7197 7199 3 6909 7199 6911 3 6910 6912 7198 3 7200 7198 6912 3 6911 7199 7201 3 6911 7201 6913 3 6912 6914 7200 3 7202 7200 6914 3 6913 7201 7203 3 6913 7203 6915 3 6914 6916 7202 3 7204 7202 6916 3 6915 7203 7205 3 6915 7205 6917 3 6916 6918 7204 3 7206 7204 6918 3 6917 7205 7207 3 6917 7207 6919 3 6918 6920 7206 3 7208 7206 6920 3 6919 7207 7209 3 6919 7209 6921 3 6920 6922 7208 3 7210 7208 6922 3 6921 7209 6923 3 7211 6923 7209 3 6922 6924 7210 3 7212 7210 6924 3 6923 7211 6925 3 7213 6925 7211 3 6924 6926 7212 3 7214 7212 6926 3 6925 7213 6927 3 7215 6927 7213 3 6926 6928 7214 3 7216 7214 6928 3 6927 7215 6929 3 7217 6929 7215 3 6928 6930 7216 3 7218 7216 6930 3 6929 7217 6931 3 7219 6931 7217 3 6930 6932 7220 3 6930 7220 7218 3 6931 7219 6933 3 7221 6933 7219 3 6932 6934 7222 3 6932 7222 7220 3 6933 7221 6935 3 7223 6935 7221 3 6934 6936 7224 3 6934 7224 7222 3 6935 7223 6937 3 7225 6937 7223 3 6936 6938 7226 3 6936 7226 7224 3 6937 7225 6939 3 7227 6939 7225 3 6938 6940 7228 3 6938 7228 7226 3 6939 7227 6941 3 7229 6941 7227 3 6940 6942 7230 3 6940 7230 7228 3 6941 7229 6943 3 7231 6943 7229 3 6942 6944 7232 3 6942 7232 7230 3 6943 7231 6947 3 7235 6947 7231 3 6944 6945 7233 3 6944 7233 7232 3 6945 6948 7236 3 6945 7236 7233 3 6946 6947 7235 3 6946 7235 7234 3 6946 7234 6951 3 7239 6951 7234 3 6948 6949 7237 3 6948 7237 7236 3 6949 6952 7240 3 6949 7240 7237 3 6950 6951 7239 3 6950 7239 7238 3 6950 7238 7020 3 7308 7020 7238 3 6952 6953 7241 3 6952 7241 7240 3 6953 6954 7242 3 6953 7242 7241 3 6954 6955 7243 3 6954 7243 7242 3 6955 6956 7244 3 6955 7244 7243 3 6956 6957 7245 3 6956 7245 7244 3 6957 6958 7246 3 6957 7246 7245 3 6958 6959 7247 3 6958 7247 7246 3 6959 6960 7248 3 6959 7248 7247 3 6960 6961 7249 3 6960 7249 7248 3 6961 6962 7250 3 6961 7250 7249 3 6962 6963 7251 3 6962 7251 7250 3 6963 6964 7252 3 6963 7252 7251 3 6964 6965 7253 3 6964 7253 7252 3 6965 6966 7254 3 6965 7254 7253 3 6966 6967 7255 3 6966 7255 7254 3 6967 6968 7256 3 6967 7256 7255 3 6968 6969 7257 3 6968 7257 7256 3 6969 6970 7258 3 6969 7258 7257 3 6970 6971 7259 3 6970 7259 7258 3 6971 6972 7260 3 6971 7260 7259 3 6972 6973 7261 3 6972 7261 7260 3 6973 6974 7262 3 6973 7262 7261 3 6974 6975 7263 3 6974 7263 7262 3 6975 6976 7264 3 6975 7264 7263 3 6976 6977 7265 3 6976 7265 7264 3 6977 6978 7266 3 6977 7266 7265 3 6978 6979 7267 3 6978 7267 7266 3 6979 6980 7268 3 6979 7268 7267 3 6980 6981 7269 3 6980 7269 7268 3 6981 6982 7270 3 6981 7270 7269 3 6982 6983 7270 3 7271 7270 6983 3 6983 6984 7271 3 7272 7271 6984 3 6984 6985 7272 3 7273 7272 6985 3 6985 6986 7273 3 7274 7273 6986 3 6986 6987 7274 3 7275 7274 6987 3 6987 6988 7275 3 7276 7275 6988 3 6988 6989 7276 3 7277 7276 6989 3 6989 6990 7277 3 7278 7277 6990 3 6990 6991 7278 3 7279 7278 6991 3 6991 6992 7279 3 7280 7279 6992 3 6992 6993 7280 3 7281 7280 6993 3 6993 6994 7281 3 7282 7281 6994 3 6994 6995 7282 3 7283 7282 6995 3 6995 6996 7283 3 7284 7283 6996 3 6996 6997 7284 3 7285 7284 6997 3 6997 6998 7285 3 7286 7285 6998 3 6998 6999 7286 3 7287 7286 6999 3 6999 7000 7287 3 7288 7287 7000 3 7000 7001 7288 3 7289 7288 7001 3 7001 7002 7289 3 7290 7289 7002 3 7002 7003 7290 3 7291 7290 7003 3 7003 7004 7291 3 7292 7291 7004 3 7004 7005 7292 3 7293 7292 7005 3 7005 7006 7293 3 7294 7293 7006 3 7006 7007 7294 3 7295 7294 7007 3 7007 7008 7295 3 7296 7295 7008 3 7008 7009 7296 3 7297 7296 7009 3 7009 7010 7297 3 7298 7297 7010 3 7010 7011 7298 3 7299 7298 7011 3 7011 7012 7299 3 7300 7299 7012 3 7012 7013 7300 3 7301 7300 7013 3 7013 7014 7301 3 7302 7301 7014 3 7014 7015 7302 3 7303 7302 7015 3 7015 7016 7303 3 7304 7303 7016 3 7016 7017 7304 3 7305 7304 7017 3 7017 7018 7305 3 7306 7305 7018 3 7018 7019 7306 3 7307 7306 7019 3 7019 7020 7307 3 7308 7307 7020 3 7021 7309 7310 3 7021 7310 7022 3 7021 7091 7379 3 7021 7379 7309 3 7022 7310 7311 3 7022 7311 7023 3 7023 7311 7312 3 7023 7312 7024 3 7024 7312 7313 3 7024 7313 7025 3 7025 7313 7026 3 7314 7026 7313 3 7026 7314 7027 3 7315 7027 7314 3 7027 7315 7028 3 7316 7028 7315 3 7028 7316 7029 3 7317 7029 7316 3 7029 7317 7030 3 7318 7030 7317 3 7030 7318 7031 3 7319 7031 7318 3 7031 7319 7032 3 7320 7032 7319 3 7032 7320 7033 3 7321 7033 7320 3 7033 7321 7034 3 7322 7034 7321 3 7034 7322 7035 3 7323 7035 7322 3 7035 7323 7036 3 7324 7036 7323 3 7036 7324 7037 3 7325 7037 7324 3 7037 7325 7038 3 7326 7038 7325 3 7038 7326 7039 3 7327 7039 7326 3 7039 7327 7040 3 7328 7040 7327 3 7040 7328 7041 3 7329 7041 7328 3 7041 7329 7042 3 7330 7042 7329 3 7042 7330 7043 3 7331 7043 7330 3 7043 7331 7044 3 7332 7044 7331 3 7044 7332 7045 3 7333 7045 7332 3 7045 7333 7046 3 7334 7046 7333 3 7046 7334 7047 3 7335 7047 7334 3 7047 7335 7048 3 7336 7048 7335 3 7048 7336 7049 3 7337 7049 7336 3 7049 7337 7050 3 7338 7050 7337 3 7050 7338 7051 3 7339 7051 7338 3 7051 7339 7052 3 7340 7052 7339 3 7052 7340 7053 3 7341 7053 7340 3 7053 7341 7054 3 7342 7054 7341 3 7054 7342 7055 3 7343 7055 7342 3 7055 7343 7056 3 7344 7056 7343 3 7056 7344 7057 3 7345 7057 7344 3 7057 7345 7058 3 7346 7058 7345 3 7058 7346 7059 3 7347 7059 7346 3 7059 7347 7060 3 7348 7060 7347 3 7060 7348 7061 3 7349 7061 7348 3 7061 7349 7062 3 7350 7062 7349 3 7062 7350 7063 3 7351 7063 7350 3 7063 7351 7064 3 7352 7064 7351 3 7064 7352 7065 3 7353 7065 7352 3 7065 7353 7066 3 7354 7066 7353 3 7066 7354 7067 3 7355 7067 7354 3 7067 7355 7068 3 7356 7068 7355 3 7068 7356 7069 3 7357 7069 7356 3 7069 7357 7070 3 7358 7070 7357 3 7070 7358 7071 3 7359 7071 7358 3 7071 7359 7072 3 7360 7072 7359 3 7072 7360 7073 3 7361 7073 7360 3 7073 7361 7074 3 7362 7074 7361 3 7074 7362 7075 3 7363 7075 7362 3 7075 7363 7076 3 7364 7076 7363 3 7076 7364 7077 3 7365 7077 7364 3 7077 7365 7366 3 7077 7366 7078 3 7078 7366 7367 3 7078 7367 7079 3 7079 7367 7368 3 7079 7368 7080 3 7080 7368 7369 3 7080 7369 7081 3 7081 7369 7370 3 7081 7370 7082 3 7082 7370 7371 3 7082 7371 7083 3 7083 7371 7372 3 7083 7372 7084 3 7084 7372 7373 3 7084 7373 7085 3 7085 7373 7374 3 7085 7374 7086 3 7086 7374 7375 3 7086 7375 7087 3 7087 7375 7376 3 7087 7376 7088 3 7088 7376 7377 3 7088 7377 7089 3 7089 7377 7380 3 7089 7380 7092 3 7090 7378 7379 3 7090 7379 7091 3 7090 7095 7378 3 7383 7378 7095 3 7092 7380 7381 3 7092 7381 7093 3 7093 7381 7384 3 7093 7384 7096 3 7094 7382 7383 3 7094 7383 7095 3 7094 7098 7382 3 7386 7382 7098 3 7096 7384 7385 3 7096 7385 7097 3 7097 7385 7387 3 7097 7387 7099 3 7098 7100 7386 3 7388 7386 7100 3 7099 7387 7389 3 7099 7389 7101 3 7100 7102 7388 3 7390 7388 7102 3 7101 7389 7391 3 7101 7391 7103 3 7102 7104 7390 3 7392 7390 7104 3 7103 7391 7393 3 7103 7393 7105 3 7104 7106 7392 3 7394 7392 7106 3 7105 7393 7395 3 7105 7395 7107 3 7106 7108 7394 3 7396 7394 7108 3 7107 7395 7397 3 7107 7397 7109 3 7108 7110 7396 3 7398 7396 7110 3 7109 7397 7399 3 7109 7399 7111 3 7110 7112 7398 3 7400 7398 7112 3 7111 7399 7401 3 7111 7401 7113 3 7112 7114 7400 3 7402 7400 7114 3 7113 7401 7403 3 7113 7403 7115 3 7114 7116 7402 3 7404 7402 7116 3 7115 7403 7405 3 7115 7405 7117 3 7116 7118 7404 3 7406 7404 7118 3 7117 7405 7407 3 7117 7407 7119 3 7118 7120 7406 3 7408 7406 7120 3 7119 7407 7409 3 7119 7409 7121 3 7120 7122 7408 3 7410 7408 7122 3 7121 7409 7411 3 7121 7411 7123 3 7122 7124 7410 3 7412 7410 7124 3 7123 7411 7413 3 7123 7413 7125 3 7124 7126 7412 3 7414 7412 7126 3 7125 7413 7415 3 7125 7415 7127 3 7126 7128 7414 3 7416 7414 7128 3 7127 7415 7417 3 7127 7417 7129 3 7128 7130 7416 3 7418 7416 7130 3 7129 7417 7131 3 7419 7131 7417 3 7130 7132 7418 3 7420 7418 7132 3 7131 7419 7133 3 7421 7133 7419 3 7132 7134 7420 3 7422 7420 7134 3 7133 7421 7135 3 7423 7135 7421 3 7134 7136 7422 3 7424 7422 7136 3 7135 7423 7137 3 7425 7137 7423 3 7136 7138 7426 3 7136 7426 7424 3 7137 7425 7139 3 7427 7139 7425 3 7138 7140 7428 3 7138 7428 7426 3 7139 7427 7141 3 7429 7141 7427 3 7140 7142 7430 3 7140 7430 7428 3 7141 7429 7143 3 7431 7143 7429 3 7142 7144 7432 3 7142 7432 7430 3 7143 7431 7145 3 7433 7145 7431 3 7144 7146 7434 3 7144 7434 7432 3 7145 7433 7147 3 7435 7147 7433 3 7146 7148 7436 3 7146 7436 7434 3 7147 7435 7149 3 7437 7149 7435 3 7148 7150 7438 3 7148 7438 7436 3 7149 7437 7151 3 7439 7151 7437 3 7150 7152 7440 3 7150 7440 7438 3 7151 7439 7153 3 7441 7153 7439 3 7152 7154 7442 3 7152 7442 7440 3 7153 7441 7155 3 7443 7155 7441 3 7154 7156 7444 3 7154 7444 7442 3 7155 7443 7157 3 7445 7157 7443 3 7156 7158 7446 3 7156 7446 7444 3 7157 7445 7159 3 7447 7159 7445 3 7158 7160 7448 3 7158 7448 7446 3 7159 7447 7161 3 7449 7161 7447 3 7160 7162 7450 3 7160 7450 7448 3 7161 7449 7163 3 7451 7163 7449 3 7162 7164 7452 3 7162 7452 7450 3 7163 7451 7165 3 7453 7165 7451 3 7164 7166 7454 3 7164 7454 7452 3 7165 7453 7167 3 7455 7167 7453 3 7166 7168 7456 3 7166 7456 7454 3 7167 7455 7169 3 7457 7169 7455 3 7168 7170 7458 3 7168 7458 7456 3 7169 7457 7171 3 7459 7171 7457 3 7170 7172 7460 3 7170 7460 7458 3 7171 7459 7173 3 7461 7173 7459 3 7172 7174 7462 3 7172 7462 7460 3 7173 7461 7175 3 7463 7175 7461 3 7174 7176 7464 3 7174 7464 7462 3 7175 7463 7177 3 7465 7177 7463 3 7176 7178 7466 3 7176 7466 7464 3 7177 7465 7179 3 7467 7179 7465 3 7178 7180 7468 3 7178 7468 7466 3 7179 7467 7181 3 7469 7181 7467 3 7180 7182 7470 3 7180 7470 7468 3 7181 7469 7183 3 7471 7183 7469 3 7182 7184 7472 3 7182 7472 7470 3 7183 7471 7473 3 7183 7473 7185 3 7184 7186 7474 3 7184 7474 7472 3 7185 7473 7475 3 7185 7475 7187 3 7186 7188 7476 3 7186 7476 7474 3 7187 7475 7477 3 7187 7477 7189 3 7188 7190 7478 3 7188 7478 7476 3 7189 7477 7479 3 7189 7479 7191 3 7190 7192 7478 3 7480 7478 7192 3 7191 7479 7481 3 7191 7481 7193 3 7192 7194 7480 3 7482 7480 7194 3 7193 7481 7483 3 7193 7483 7195 3 7194 7196 7482 3 7484 7482 7196 3 7195 7483 7485 3 7195 7485 7197 3 7196 7198 7484 3 7486 7484 7198 3 7197 7485 7487 3 7197 7487 7199 3 7198 7200 7486 3 7488 7486 7200 3 7199 7487 7489 3 7199 7489 7201 3 7200 7202 7488 3 7490 7488 7202 3 7201 7489 7491 3 7201 7491 7203 3 7202 7204 7490 3 7492 7490 7204 3 7203 7491 7493 3 7203 7493 7205 3 7204 7206 7492 3 7494 7492 7206 3 7205 7493 7495 3 7205 7495 7207 3 7206 7208 7494 3 7496 7494 7208 3 7207 7495 7497 3 7207 7497 7209 3 7208 7210 7496 3 7498 7496 7210 3 7209 7497 7499 3 7209 7499 7211 3 7210 7212 7498 3 7500 7498 7212 3 7211 7499 7501 3 7211 7501 7213 3 7212 7214 7500 3 7502 7500 7214 3 7213 7501 7503 3 7213 7503 7215 3 7214 7216 7502 3 7504 7502 7216 3 7215 7503 7505 3 7215 7505 7217 3 7216 7218 7504 3 7506 7504 7218 3 7217 7505 7507 3 7217 7507 7219 3 7218 7220 7506 3 7508 7506 7220 3 7219 7507 7509 3 7219 7509 7221 3 7220 7222 7508 3 7510 7508 7222 3 7221 7509 7511 3 7221 7511 7223 3 7222 7224 7510 3 7512 7510 7224 3 7223 7511 7513 3 7223 7513 7225 3 7224 7226 7512 3 7514 7512 7226 3 7225 7513 7515 3 7225 7515 7227 3 7226 7228 7514 3 7516 7514 7228 3 7227 7515 7517 3 7227 7517 7229 3 7228 7230 7516 3 7518 7516 7230 3 7229 7517 7519 3 7229 7519 7231 3 7230 7232 7518 3 7520 7518 7232 3 7231 7519 7523 3 7231 7523 7235 3 7232 7233 7520 3 7521 7520 7233 3 7233 7236 7521 3 7524 7521 7236 3 7234 7235 7522 3 7523 7522 7235 3 7234 7522 7527 3 7234 7527 7239 3 7236 7237 7524 3 7525 7524 7237 3 7237 7240 7525 3 7528 7525 7240 3 7238 7239 7526 3 7527 7526 7239 3 7238 7526 7308 3 7596 7308 7526 3 7240 7241 7528 3 7529 7528 7241 3 7241 7242 7530 3 7241 7530 7529 3 7242 7243 7531 3 7242 7531 7530 3 7243 7244 7532 3 7243 7532 7531 3 7244 7245 7533 3 7244 7533 7532 3 7245 7246 7534 3 7245 7534 7533 3 7246 7247 7535 3 7246 7535 7534 3 7247 7248 7536 3 7247 7536 7535 3 7248 7249 7537 3 7248 7537 7536 3 7249 7250 7538 3 7249 7538 7537 3 7250 7251 7539 3 7250 7539 7538 3 7251 7252 7540 3 7251 7540 7539 3 7252 7253 7541 3 7252 7541 7540 3 7253 7254 7542 3 7253 7542 7541 3 7254 7255 7543 3 7254 7543 7542 3 7255 7256 7544 3 7255 7544 7543 3 7256 7257 7545 3 7256 7545 7544 3 7257 7258 7546 3 7257 7546 7545 3 7258 7259 7547 3 7258 7547 7546 3 7259 7260 7548 3 7259 7548 7547 3 7260 7261 7549 3 7260 7549 7548 3 7261 7262 7550 3 7261 7550 7549 3 7262 7263 7551 3 7262 7551 7550 3 7263 7264 7552 3 7263 7552 7551 3 7264 7265 7553 3 7264 7553 7552 3 7265 7266 7554 3 7265 7554 7553 3 7266 7267 7555 3 7266 7555 7554 3 7267 7268 7556 3 7267 7556 7555 3 7268 7269 7557 3 7268 7557 7556 3 7269 7270 7558 3 7269 7558 7557 3 7270 7271 7559 3 7270 7559 7558 3 7271 7272 7560 3 7271 7560 7559 3 7272 7273 7561 3 7272 7561 7560 3 7273 7274 7562 3 7273 7562 7561 3 7274 7275 7563 3 7274 7563 7562 3 7275 7276 7564 3 7275 7564 7563 3 7276 7277 7565 3 7276 7565 7564 3 7277 7278 7566 3 7277 7566 7565 3 7278 7279 7567 3 7278 7567 7566 3 7279 7280 7568 3 7279 7568 7567 3 7280 7281 7569 3 7280 7569 7568 3 7281 7282 7570 3 7281 7570 7569 3 7282 7283 7571 3 7282 7571 7570 3 7283 7284 7572 3 7283 7572 7571 3 7284 7285 7573 3 7284 7573 7572 3 7285 7286 7574 3 7285 7574 7573 3 7286 7287 7575 3 7286 7575 7574 3 7287 7288 7576 3 7287 7576 7575 3 7288 7289 7577 3 7288 7577 7576 3 7289 7290 7578 3 7289 7578 7577 3 7290 7291 7579 3 7290 7579 7578 3 7291 7292 7580 3 7291 7580 7579 3 7292 7293 7581 3 7292 7581 7580 3 7293 7294 7582 3 7293 7582 7581 3 7294 7295 7582 3 7583 7582 7295 3 7295 7296 7583 3 7584 7583 7296 3 7296 7297 7584 3 7585 7584 7297 3 7297 7298 7585 3 7586 7585 7298 3 7298 7299 7586 3 7587 7586 7299 3 7299 7300 7587 3 7588 7587 7300 3 7300 7301 7588 3 7589 7588 7301 3 7301 7302 7589 3 7590 7589 7302 3 7302 7303 7590 3 7591 7590 7303 3 7303 7304 7591 3 7592 7591 7304 3 7304 7305 7592 3 7593 7592 7305 3 7305 7306 7593 3 7594 7593 7306 3 7306 7307 7594 3 7595 7594 7307 3 7307 7308 7595 3 7596 7595 7308 3 7309 7597 7598 3 7309 7598 7310 3 7309 7379 7667 3 7309 7667 7597 3 7310 7598 7599 3 7310 7599 7311 3 7311 7599 7600 3 7311 7600 7312 3 7312 7600 7601 3 7312 7601 7313 3 7313 7601 7602 3 7313 7602 7314 3 7314 7602 7603 3 7314 7603 7315 3 7315 7603 7604 3 7315 7604 7316 3 7316 7604 7605 3 7316 7605 7317 3 7317 7605 7606 3 7317 7606 7318 3 7318 7606 7607 3 7318 7607 7319 3 7319 7607 7608 3 7319 7608 7320 3 7320 7608 7609 3 7320 7609 7321 3 7321 7609 7610 3 7321 7610 7322 3 7322 7610 7611 3 7322 7611 7323 3 7323 7611 7612 3 7323 7612 7324 3 7324 7612 7613 3 7324 7613 7325 3 7325 7613 7614 3 7325 7614 7326 3 7326 7614 7615 3 7326 7615 7327 3 7327 7615 7616 3 7327 7616 7328 3 7328 7616 7617 3 7328 7617 7329 3 7329 7617 7618 3 7329 7618 7330 3 7330 7618 7619 3 7330 7619 7331 3 7331 7619 7620 3 7331 7620 7332 3 7332 7620 7621 3 7332 7621 7333 3 7333 7621 7622 3 7333 7622 7334 3 7334 7622 7623 3 7334 7623 7335 3 7335 7623 7624 3 7335 7624 7336 3 7336 7624 7625 3 7336 7625 7337 3 7337 7625 7626 3 7337 7626 7338 3 7338 7626 7627 3 7338 7627 7339 3 7339 7627 7628 3 7339 7628 7340 3 7340 7628 7629 3 7340 7629 7341 3 7341 7629 7342 3 7630 7342 7629 3 7342 7630 7343 3 7631 7343 7630 3 7343 7631 7344 3 7632 7344 7631 3 7344 7632 7345 3 7633 7345 7632 3 7345 7633 7346 3 7634 7346 7633 3 7346 7634 7347 3 7635 7347 7634 3 7347 7635 7348 3 7636 7348 7635 3 7348 7636 7349 3 7637 7349 7636 3 7349 7637 7350 3 7638 7350 7637 3 7350 7638 7351 3 7639 7351 7638 3 7351 7639 7352 3 7640 7352 7639 3 7352 7640 7353 3 7641 7353 7640 3 7353 7641 7354 3 7642 7354 7641 3 7354 7642 7355 3 7643 7355 7642 3 7355 7643 7356 3 7644 7356 7643 3 7356 7644 7357 3 7645 7357 7644 3 7357 7645 7358 3 7646 7358 7645 3 7358 7646 7359 3 7647 7359 7646 3 7359 7647 7360 3 7648 7360 7647 3 7360 7648 7361 3 7649 7361 7648 3 7361 7649 7362 3 7650 7362 7649 3 7362 7650 7363 3 7651 7363 7650 3 7363 7651 7364 3 7652 7364 7651 3 7364 7652 7365 3 7653 7365 7652 3 7365 7653 7366 3 7654 7366 7653 3 7366 7654 7367 3 7655 7367 7654 3 7367 7655 7368 3 7656 7368 7655 3 7368 7656 7369 3 7657 7369 7656 3 7369 7657 7370 3 7658 7370 7657 3 7370 7658 7371 3 7659 7371 7658 3 7371 7659 7372 3 7660 7372 7659 3 7372 7660 7373 3 7661 7373 7660 3 7373 7661 7374 3 7662 7374 7661 3 7374 7662 7375 3 7663 7375 7662 3 7375 7663 7376 3 7664 7376 7663 3 7376 7664 7377 3 7665 7377 7664 3 7377 7665 7380 3 7668 7380 7665 3 7378 7666 7379 3 7667 7379 7666 3 7378 7383 7671 3 7378 7671 7666 3 7380 7668 7381 3 7669 7381 7668 3 7381 7669 7384 3 7672 7384 7669 3 7382 7670 7383 3 7671 7383 7670 3 7382 7386 7674 3 7382 7674 7670 3 7384 7672 7385 3 7673 7385 7672 3 7385 7673 7387 3 7675 7387 7673 3 7386 7388 7676 3 7386 7676 7674 3 7387 7675 7389 3 7677 7389 7675 3 7388 7390 7678 3 7388 7678 7676 3 7389 7677 7391 3 7679 7391 7677 3 7390 7392 7680 3 7390 7680 7678 3 7391 7679 7393 3 7681 7393 7679 3 7392 7394 7682 3 7392 7682 7680 3 7393 7681 7395 3 7683 7395 7681 3 7394 7396 7684 3 7394 7684 7682 3 7395 7683 7685 3 7395 7685 7397 3 7396 7398 7686 3 7396 7686 7684 3 7397 7685 7687 3 7397 7687 7399 3 7398 7400 7688 3 7398 7688 7686 3 7399 7687 7689 3 7399 7689 7401 3 7400 7402 7688 3 7690 7688 7402 3 7401 7689 7691 3 7401 7691 7403 3 7402 7404 7690 3 7692 7690 7404 3 7403 7691 7693 3 7403 7693 7405 3 7404 7406 7692 3 7694 7692 7406 3 7405 7693 7695 3 7405 7695 7407 3 7406 7408 7694 3 7696 7694 7408 3 7407 7695 7697 3 7407 7697 7409 3 7408 7410 7696 3 7698 7696 7410 3 7409 7697 7699 3 7409 7699 7411 3 7410 7412 7698 3 7700 7698 7412 3 7411 7699 7701 3 7411 7701 7413 3 7412 7414 7700 3 7702 7700 7414 3 7413 7701 7703 3 7413 7703 7415 3 7414 7416 7702 3 7704 7702 7416 3 7415 7703 7705 3 7415 7705 7417 3 7416 7418 7704 3 7706 7704 7418 3 7417 7705 7707 3 7417 7707 7419 3 7418 7420 7706 3 7708 7706 7420 3 7419 7707 7709 3 7419 7709 7421 3 7420 7422 7708 3 7710 7708 7422 3 7421 7709 7711 3 7421 7711 7423 3 7422 7424 7710 3 7712 7710 7424 3 7423 7711 7713 3 7423 7713 7425 3 7424 7426 7712 3 7714 7712 7426 3 7425 7713 7715 3 7425 7715 7427 3 7426 7428 7714 3 7716 7714 7428 3 7427 7715 7717 3 7427 7717 7429 3 7428 7430 7716 3 7718 7716 7430 3 7429 7717 7719 3 7429 7719 7431 3 7430 7432 7718 3 7720 7718 7432 3 7431 7719 7721 3 7431 7721 7433 3 7432 7434 7720 3 7722 7720 7434 3 7433 7721 7723 3 7433 7723 7435 3 7434 7436 7722 3 7724 7722 7436 3 7435 7723 7725 3 7435 7725 7437 3 7436 7438 7724 3 7726 7724 7438 3 7437 7725 7727 3 7437 7727 7439 3 7438 7440 7726 3 7728 7726 7440 3 7439 7727 7729 3 7439 7729 7441 3 7440 7442 7728 3 7730 7728 7442 3 7441 7729 7731 3 7441 7731 7443 3 7442 7444 7730 3 7732 7730 7444 3 7443 7731 7733 3 7443 7733 7445 3 7444 7446 7732 3 7734 7732 7446 3 7445 7733 7735 3 7445 7735 7447 3 7446 7448 7734 3 7736 7734 7448 3 7447 7735 7737 3 7447 7737 7449 3 7448 7450 7736 3 7738 7736 7450 3 7449 7737 7451 3 7739 7451 7737 3 7450 7452 7738 3 7740 7738 7452 3 7451 7739 7453 3 7741 7453 7739 3 7452 7454 7742 3 7452 7742 7740 3 7453 7741 7455 3 7743 7455 7741 3 7454 7456 7744 3 7454 7744 7742 3 7455 7743 7457 3 7745 7457 7743 3 7456 7458 7746 3 7456 7746 7744 3 7457 7745 7459 3 7747 7459 7745 3 7458 7460 7748 3 7458 7748 7746 3 7459 7747 7461 3 7749 7461 7747 3 7460 7462 7750 3 7460 7750 7748 3 7461 7749 7463 3 7751 7463 7749 3 7462 7464 7752 3 7462 7752 7750 3 7463 7751 7465 3 7753 7465 7751 3 7464 7466 7754 3 7464 7754 7752 3 7465 7753 7467 3 7755 7467 7753 3 7466 7468 7756 3 7466 7756 7754 3 7467 7755 7469 3 7757 7469 7755 3 7468 7470 7758 3 7468 7758 7756 3 7469 7757 7471 3 7759 7471 7757 3 7470 7472 7760 3 7470 7760 7758 3 7471 7759 7473 3 7761 7473 7759 3 7472 7474 7762 3 7472 7762 7760 3 7473 7761 7475 3 7763 7475 7761 3 7474 7476 7764 3 7474 7764 7762 3 7475 7763 7477 3 7765 7477 7763 3 7476 7478 7766 3 7476 7766 7764 3 7477 7765 7479 3 7767 7479 7765 3 7478 7480 7768 3 7478 7768 7766 3 7479 7767 7481 3 7769 7481 7767 3 7480 7482 7770 3 7480 7770 7768 3 7481 7769 7483 3 7771 7483 7769 3 7482 7484 7772 3 7482 7772 7770 3 7483 7771 7485 3 7773 7485 7771 3 7484 7486 7774 3 7484 7774 7772 3 7485 7773 7487 3 7775 7487 7773 3 7486 7488 7776 3 7486 7776 7774 3 7487 7775 7489 3 7777 7489 7775 3 7488 7490 7778 3 7488 7778 7776 3 7489 7777 7491 3 7779 7491 7777 3 7490 7492 7780 3 7490 7780 7778 3 7491 7779 7493 3 7781 7493 7779 3 7492 7494 7782 3 7492 7782 7780 3 7493 7781 7495 3 7783 7495 7781 3 7494 7496 7784 3 7494 7784 7782 3 7495 7783 7497 3 7785 7497 7783 3 7496 7498 7786 3 7496 7786 7784 3 7497 7785 7499 3 7787 7499 7785 3 7498 7500 7788 3 7498 7788 7786 3 7499 7787 7501 3 7789 7501 7787 3 7500 7502 7790 3 7500 7790 7788 3 7501 7789 7503 3 7791 7503 7789 3 7502 7504 7792 3 7502 7792 7790 3 7503 7791 7793 3 7503 7793 7505 3 7504 7506 7794 3 7504 7794 7792 3 7505 7793 7795 3 7505 7795 7507 3 7506 7508 7794 3 7796 7794 7508 3 7507 7795 7797 3 7507 7797 7509 3 7508 7510 7796 3 7798 7796 7510 3 7509 7797 7799 3 7509 7799 7511 3 7510 7512 7798 3 7800 7798 7512 3 7511 7799 7801 3 7511 7801 7513 3 7512 7514 7800 3 7802 7800 7514 3 7513 7801 7803 3 7513 7803 7515 3 7514 7516 7802 3 7804 7802 7516 3 7515 7803 7805 3 7515 7805 7517 3 7516 7518 7804 3 7806 7804 7518 3 7517 7805 7807 3 7517 7807 7519 3 7518 7520 7806 3 7808 7806 7520 3 7519 7807 7811 3 7519 7811 7523 3 7520 7521 7808 3 7809 7808 7521 3 7521 7524 7809 3 7812 7809 7524 3 7522 7523 7810 3 7811 7810 7523 3 7522 7810 7815 3 7522 7815 7527 3 7524 7525 7812 3 7813 7812 7525 3 7525 7528 7813 3 7816 7813 7528 3 7526 7527 7814 3 7815 7814 7527 3 7526 7814 7884 3 7526 7884 7596 3 7528 7529 7816 3 7817 7816 7529 3 7529 7530 7817 3 7818 7817 7530 3 7530 7531 7818 3 7819 7818 7531 3 7531 7532 7819 3 7820 7819 7532 3 7532 7533 7820 3 7821 7820 7533 3 7533 7534 7821 3 7822 7821 7534 3 7534 7535 7822 3 7823 7822 7535 3 7535 7536 7823 3 7824 7823 7536 3 7536 7537 7824 3 7825 7824 7537 3 7537 7538 7825 3 7826 7825 7538 3 7538 7539 7826 3 7827 7826 7539 3 7539 7540 7827 3 7828 7827 7540 3 7540 7541 7828 3 7829 7828 7541 3 7541 7542 7829 3 7830 7829 7542 3 7542 7543 7830 3 7831 7830 7543 3 7543 7544 7831 3 7832 7831 7544 3 7544 7545 7832 3 7833 7832 7545 3 7545 7546 7833 3 7834 7833 7546 3 7546 7547 7834 3 7835 7834 7547 3 7547 7548 7835 3 7836 7835 7548 3 7548 7549 7836 3 7837 7836 7549 3 7549 7550 7837 3 7838 7837 7550 3 7550 7551 7838 3 7839 7838 7551 3 7551 7552 7839 3 7840 7839 7552 3 7552 7553 7840 3 7841 7840 7553 3 7553 7554 7841 3 7842 7841 7554 3 7554 7555 7842 3 7843 7842 7555 3 7555 7556 7843 3 7844 7843 7556 3 7556 7557 7844 3 7845 7844 7557 3 7557 7558 7845 3 7846 7845 7558 3 7558 7559 7846 3 7847 7846 7559 3 7559 7560 7848 3 7559 7848 7847 3 7560 7561 7849 3 7560 7849 7848 3 7561 7562 7850 3 7561 7850 7849 3 7562 7563 7851 3 7562 7851 7850 3 7563 7564 7852 3 7563 7852 7851 3 7564 7565 7853 3 7564 7853 7852 3 7565 7566 7854 3 7565 7854 7853 3 7566 7567 7855 3 7566 7855 7854 3 7567 7568 7856 3 7567 7856 7855 3 7568 7569 7857 3 7568 7857 7856 3 7569 7570 7858 3 7569 7858 7857 3 7570 7571 7859 3 7570 7859 7858 3 7571 7572 7860 3 7571 7860 7859 3 7572 7573 7861 3 7572 7861 7860 3 7573 7574 7862 3 7573 7862 7861 3 7574 7575 7863 3 7574 7863 7862 3 7575 7576 7864 3 7575 7864 7863 3 7576 7577 7865 3 7576 7865 7864 3 7577 7578 7866 3 7577 7866 7865 3 7578 7579 7867 3 7578 7867 7866 3 7579 7580 7868 3 7579 7868 7867 3 7580 7581 7869 3 7580 7869 7868 3 7581 7582 7870 3 7581 7870 7869 3 7582 7583 7871 3 7582 7871 7870 3 7583 7584 7872 3 7583 7872 7871 3 7584 7585 7873 3 7584 7873 7872 3 7585 7586 7874 3 7585 7874 7873 3 7586 7587 7875 3 7586 7875 7874 3 7587 7588 7876 3 7587 7876 7875 3 7588 7589 7877 3 7588 7877 7876 3 7589 7590 7878 3 7589 7878 7877 3 7590 7591 7879 3 7590 7879 7878 3 7591 7592 7880 3 7591 7880 7879 3 7592 7593 7881 3 7592 7881 7880 3 7593 7594 7882 3 7593 7882 7881 3 7594 7595 7883 3 7594 7883 7882 3 7595 7596 7884 3 7595 7884 7883 3 7597 7885 7598 3 7886 7598 7885 3 7597 7667 7885 3 7955 7885 7667 3 7598 7886 7599 3 7887 7599 7886 3 7599 7887 7600 3 7888 7600 7887 3 7600 7888 7601 3 7889 7601 7888 3 7601 7889 7602 3 7890 7602 7889 3 7602 7890 7603 3 7891 7603 7890 3 7603 7891 7604 3 7892 7604 7891 3 7604 7892 7605 3 7893 7605 7892 3 7605 7893 7606 3 7894 7606 7893 3 7606 7894 7607 3 7895 7607 7894 3 7607 7895 7608 3 7896 7608 7895 3 7608 7896 7609 3 7897 7609 7896 3 7609 7897 7610 3 7898 7610 7897 3 7610 7898 7899 3 7610 7899 7611 3 7611 7899 7900 3 7611 7900 7612 3 7612 7900 7901 3 7612 7901 7613 3 7613 7901 7902 3 7613 7902 7614 3 7614 7902 7903 3 7614 7903 7615 3 7615 7903 7904 3 7615 7904 7616 3 7616 7904 7905 3 7616 7905 7617 3 7617 7905 7906 3 7617 7906 7618 3 7618 7906 7907 3 7618 7907 7619 3 7619 7907 7908 3 7619 7908 7620 3 7620 7908 7909 3 7620 7909 7621 3 7621 7909 7910 3 7621 7910 7622 3 7622 7910 7911 3 7622 7911 7623 3 7623 7911 7912 3 7623 7912 7624 3 7624 7912 7913 3 7624 7913 7625 3 7625 7913 7914 3 7625 7914 7626 3 7626 7914 7915 3 7626 7915 7627 3 7627 7915 7916 3 7627 7916 7628 3 7628 7916 7917 3 7628 7917 7629 3 7629 7917 7918 3 7629 7918 7630 3 7630 7918 7919 3 7630 7919 7631 3 7631 7919 7920 3 7631 7920 7632 3 7632 7920 7921 3 7632 7921 7633 3 7633 7921 7922 3 7633 7922 7634 3 7634 7922 7923 3 7634 7923 7635 3 7635 7923 7924 3 7635 7924 7636 3 7636 7924 7925 3 7636 7925 7637 3 7637 7925 7926 3 7637 7926 7638 3 7638 7926 7927 3 7638 7927 7639 3 7639 7927 7928 3 7639 7928 7640 3 7640 7928 7929 3 7640 7929 7641 3 7641 7929 7930 3 7641 7930 7642 3 7642 7930 7931 3 7642 7931 7643 3 7643 7931 7932 3 7643 7932 7644 3 7644 7932 7933 3 7644 7933 7645 3 7645 7933 7934 3 7645 7934 7646 3 7646 7934 7935 3 7646 7935 7647 3 7647 7935 7936 3 7647 7936 7648 3 7648 7936 7937 3 7648 7937 7649 3 7649 7937 7938 3 7649 7938 7650 3 7650 7938 7939 3 7650 7939 7651 3 7651 7939 7940 3 7651 7940 7652 3 7652 7940 7941 3 7652 7941 7653 3 7653 7941 7942 3 7653 7942 7654 3 7654 7942 7943 3 7654 7943 7655 3 7655 7943 7944 3 7655 7944 7656 3 7656 7944 7945 3 7656 7945 7657 3 7657 7945 7946 3 7657 7946 7658 3 7658 7946 7947 3 7658 7947 7659 3 7659 7947 7948 3 7659 7948 7660 3 7660 7948 7949 3 7660 7949 7661 3 7661 7949 7950 3 7661 7950 7662 3 7662 7950 7951 3 7662 7951 7663 3 7663 7951 7952 3 7663 7952 7664 3 7664 7952 7665 3 7953 7665 7952 3 7665 7953 7668 3 7956 7668 7953 3 7666 7954 7667 3 7955 7667 7954 3 7666 7671 7959 3 7666 7959 7954 3 7668 7956 7669 3 7957 7669 7956 3 7669 7957 7672 3 7960 7672 7957 3 7670 7958 7671 3 7959 7671 7958 3 7670 7674 7962 3 7670 7962 7958 3 7672 7960 7673 3 7961 7673 7960 3 7673 7961 7675 3 7963 7675 7961 3 7674 7676 7964 3 7674 7964 7962 3 7675 7963 7677 3 7965 7677 7963 3 7676 7678 7966 3 7676 7966 7964 3 7677 7965 7679 3 7967 7679 7965 3 7678 7680 7968 3 7678 7968 7966 3 7679 7967 7681 3 7969 7681 7967 3 7680 7682 7970 3 7680 7970 7968 3 7681 7969 7683 3 7971 7683 7969 3 7682 7684 7972 3 7682 7972 7970 3 7683 7971 7685 3 7973 7685 7971 3 7684 7686 7974 3 7684 7974 7972 3 7685 7973 7687 3 7975 7687 7973 3 7686 7688 7976 3 7686 7976 7974 3 7687 7975 7689 3 7977 7689 7975 3 7688 7690 7978 3 7688 7978 7976 3 7689 7977 7691 3 7979 7691 7977 3 7690 7692 7980 3 7690 7980 7978 3 7691 7979 7693 3 7981 7693 7979 3 7692 7694 7982 3 7692 7982 7980 3 7693 7981 7695 3 7983 7695 7981 3 7694 7696 7984 3 7694 7984 7982 3 7695 7983 7697 3 7985 7697 7983 3 7696 7698 7986 3 7696 7986 7984 3 7697 7985 7699 3 7987 7699 7985 3 7698 7700 7988 3 7698 7988 7986 3 7699 7987 7701 3 7989 7701 7987 3 7700 7702 7990 3 7700 7990 7988 3 7701 7989 7703 3 7991 7703 7989 3 7702 7704 7992 3 7702 7992 7990 3 7703 7991 7705 3 7993 7705 7991 3 7704 7706 7994 3 7704 7994 7992 3 7705 7993 7707 3 7995 7707 7993 3 7706 7708 7996 3 7706 7996 7994 3 7707 7995 7709 3 7997 7709 7995 3 7708 7710 7998 3 7708 7998 7996 3 7709 7997 7711 3 7999 7711 7997 3 7710 7712 8000 3 7710 8000 7998 3 7711 7999 7713 3 8001 7713 7999 3 7712 7714 8002 3 7712 8002 8000 3 7713 8001 7715 3 8003 7715 8001 3 7714 7716 8004 3 7714 8004 8002 3 7715 8003 7717 3 8005 7717 8003 3 7716 7718 8006 3 7716 8006 8004 3 7717 8005 7719 3 8007 7719 8005 3 7718 7720 8008 3 7718 8008 8006 3 7719 8007 8009 3 7719 8009 7721 3 7720 7722 8008 3 8010 8008 7722 3 7721 8009 8011 3 7721 8011 7723 3 7722 7724 8010 3 8012 8010 7724 3 7723 8011 8013 3 7723 8013 7725 3 7724 7726 8012 3 8014 8012 7726 3 7725 8013 8015 3 7725 8015 7727 3 7726 7728 8014 3 8016 8014 7728 3 7727 8015 8017 3 7727 8017 7729 3 7728 7730 8016 3 8018 8016 7730 3 7729 8017 8019 3 7729 8019 7731 3 7730 7732 8018 3 8020 8018 7732 3 7731 8019 8021 3 7731 8021 7733 3 7732 7734 8020 3 8022 8020 7734 3 7733 8021 8023 3 7733 8023 7735 3 7734 7736 8022 3 8024 8022 7736 3 7735 8023 8025 3 7735 8025 7737 3 7736 7738 8024 3 8026 8024 7738 3 7737 8025 8027 3 7737 8027 7739 3 7738 7740 8026 3 8028 8026 7740 3 7739 8027 8029 3 7739 8029 7741 3 7740 7742 8028 3 8030 8028 7742 3 7741 8029 8031 3 7741 8031 7743 3 7742 7744 8030 3 8032 8030 7744 3 7743 8031 8033 3 7743 8033 7745 3 7744 7746 8032 3 8034 8032 7746 3 7745 8033 8035 3 7745 8035 7747 3 7746 7748 8034 3 8036 8034 7748 3 7747 8035 8037 3 7747 8037 7749 3 7748 7750 8036 3 8038 8036 7750 3 7749 8037 8039 3 7749 8039 7751 3 7750 7752 8038 3 8040 8038 7752 3 7751 8039 8041 3 7751 8041 7753 3 7752 7754 8040 3 8042 8040 7754 3 7753 8041 8043 3 7753 8043 7755 3 7754 7756 8042 3 8044 8042 7756 3 7755 8043 8045 3 7755 8045 7757 3 7756 7758 8044 3 8046 8044 7758 3 7757 8045 8047 3 7757 8047 7759 3 7758 7760 8046 3 8048 8046 7760 3 7759 8047 8049 3 7759 8049 7761 3 7760 7762 8048 3 8050 8048 7762 3 7761 8049 8051 3 7761 8051 7763 3 7762 7764 8050 3 8052 8050 7764 3 7763 8051 8053 3 7763 8053 7765 3 7764 7766 8052 3 8054 8052 7766 3 7765 8053 8055 3 7765 8055 7767 3 7766 7768 8054 3 8056 8054 7768 3 7767 8055 8057 3 7767 8057 7769 3 7768 7770 8056 3 8058 8056 7770 3 7769 8057 8059 3 7769 8059 7771 3 7770 7772 8058 3 8060 8058 7772 3 7771 8059 8061 3 7771 8061 7773 3 7772 7774 8060 3 8062 8060 7774 3 7773 8061 8063 3 7773 8063 7775 3 7774 7776 8064 3 7774 8064 8062 3 7775 8063 7777 3 8065 7777 8063 3 7776 7778 8066 3 7776 8066 8064 3 7777 8065 7779 3 8067 7779 8065 3 7778 7780 8068 3 7778 8068 8066 3 7779 8067 7781 3 8069 7781 8067 3 7780 7782 8070 3 7780 8070 8068 3 7781 8069 7783 3 8071 7783 8069 3 7782 7784 8072 3 7782 8072 8070 3 7783 8071 7785 3 8073 7785 8071 3 7784 7786 8074 3 7784 8074 8072 3 7785 8073 7787 3 8075 7787 8073 3 7786 7788 8076 3 7786 8076 8074 3 7787 8075 7789 3 8077 7789 8075 3 7788 7790 8078 3 7788 8078 8076 3 7789 8077 7791 3 8079 7791 8077 3 7790 7792 8080 3 7790 8080 8078 3 7791 8079 7793 3 8081 7793 8079 3 7792 7794 8082 3 7792 8082 8080 3 7793 8081 7795 3 8083 7795 8081 3 7794 7796 8084 3 7794 8084 8082 3 7795 8083 7797 3 8085 7797 8083 3 7796 7798 8086 3 7796 8086 8084 3 7797 8085 7799 3 8087 7799 8085 3 7798 7800 8088 3 7798 8088 8086 3 7799 8087 7801 3 8089 7801 8087 3 7800 7802 8090 3 7800 8090 8088 3 7801 8089 7803 3 8091 7803 8089 3 7802 7804 8092 3 7802 8092 8090 3 7803 8091 7805 3 8093 7805 8091 3 7804 7806 8094 3 7804 8094 8092 3 7805 8093 7807 3 8095 7807 8093 3 7806 7808 8096 3 7806 8096 8094 3 7807 8095 7811 3 8099 7811 8095 3 7808 7809 8097 3 7808 8097 8096 3 7809 7812 8100 3 7809 8100 8097 3 7810 7811 8099 3 7810 8099 8098 3 7810 8098 7815 3 8103 7815 8098 3 7812 7813 8101 3 7812 8101 8100 3 7813 7816 8104 3 7813 8104 8101 3 7814 7815 8103 3 7814 8103 8102 3 7814 8102 7884 3 8172 7884 8102 3 7816 7817 8105 3 7816 8105 8104 3 7817 7818 8106 3 7817 8106 8105 3 7818 7819 8107 3 7818 8107 8106 3 7819 7820 8108 3 7819 8108 8107 3 7820 7821 8109 3 7820 8109 8108 3 7821 7822 8110 3 7821 8110 8109 3 7822 7823 8111 3 7822 8111 8110 3 7823 7824 8112 3 7823 8112 8111 3 7824 7825 8113 3 7824 8113 8112 3 7825 7826 8114 3 7825 8114 8113 3 7826 7827 8115 3 7826 8115 8114 3 7827 7828 8116 3 7827 8116 8115 3 7828 7829 8117 3 7828 8117 8116 3 7829 7830 8117 3 8118 8117 7830 3 7830 7831 8118 3 8119 8118 7831 3 7831 7832 8119 3 8120 8119 7832 3 7832 7833 8120 3 8121 8120 7833 3 7833 7834 8121 3 8122 8121 7834 3 7834 7835 8122 3 8123 8122 7835 3 7835 7836 8123 3 8124 8123 7836 3 7836 7837 8124 3 8125 8124 7837 3 7837 7838 8125 3 8126 8125 7838 3 7838 7839 8126 3 8127 8126 7839 3 7839 7840 8127 3 8128 8127 7840 3 7840 7841 8128 3 8129 8128 7841 3 7841 7842 8129 3 8130 8129 7842 3 7842 7843 8130 3 8131 8130 7843 3 7843 7844 8131 3 8132 8131 7844 3 7844 7845 8132 3 8133 8132 7845 3 7845 7846 8133 3 8134 8133 7846 3 7846 7847 8134 3 8135 8134 7847 3 7847 7848 8135 3 8136 8135 7848 3 7848 7849 8136 3 8137 8136 7849 3 7849 7850 8137 3 8138 8137 7850 3 7850 7851 8138 3 8139 8138 7851 3 7851 7852 8139 3 8140 8139 7852 3 7852 7853 8140 3 8141 8140 7853 3 7853 7854 8141 3 8142 8141 7854 3 7854 7855 8142 3 8143 8142 7855 3 7855 7856 8143 3 8144 8143 7856 3 7856 7857 8144 3 8145 8144 7857 3 7857 7858 8145 3 8146 8145 7858 3 7858 7859 8146 3 8147 8146 7859 3 7859 7860 8147 3 8148 8147 7860 3 7860 7861 8148 3 8149 8148 7861 3 7861 7862 8149 3 8150 8149 7862 3 7862 7863 8150 3 8151 8150 7863 3 7863 7864 8151 3 8152 8151 7864 3 7864 7865 8152 3 8153 8152 7865 3 7865 7866 8153 3 8154 8153 7866 3 7866 7867 8154 3 8155 8154 7867 3 7867 7868 8155 3 8156 8155 7868 3 7868 7869 8156 3 8157 8156 7869 3 7869 7870 8157 3 8158 8157 7870 3 7870 7871 8158 3 8159 8158 7871 3 7871 7872 8159 3 8160 8159 7872 3 7872 7873 8160 3 8161 8160 7873 3 7873 7874 8161 3 8162 8161 7874 3 7874 7875 8162 3 8163 8162 7875 3 7875 7876 8163 3 8164 8163 7876 3 7876 7877 8164 3 8165 8164 7877 3 7877 7878 8165 3 8166 8165 7878 3 7878 7879 8166 3 8167 8166 7879 3 7879 7880 8167 3 8168 8167 7880 3 7880 7881 8168 3 8169 8168 7881 3 7881 7882 8169 3 8170 8169 7882 3 7882 7883 8170 3 8171 8170 7883 3 7883 7884 8171 3 8172 8171 7884 3 7885 8173 7886 3 8174 7886 8173 3 7885 7955 8243 3 7885 8243 8173 3 7886 8174 7887 3 8175 7887 8174 3 7887 8175 7888 3 8176 7888 8175 3 7888 8176 7889 3 8177 7889 8176 3 7889 8177 7890 3 8178 7890 8177 3 7890 8178 7891 3 8179 7891 8178 3 7891 8179 7892 3 8180 7892 8179 3 7892 8180 7893 3 8181 7893 8180 3 7893 8181 7894 3 8182 7894 8181 3 7894 8182 7895 3 8183 7895 8182 3 7895 8183 7896 3 8184 7896 8183 3 7896 8184 7897 3 8185 7897 8184 3 7897 8185 7898 3 8186 7898 8185 3 7898 8186 7899 3 8187 7899 8186 3 7899 8187 7900 3 8188 7900 8187 3 7900 8188 7901 3 8189 7901 8188 3 7901 8189 7902 3 8190 7902 8189 3 7902 8190 7903 3 8191 7903 8190 3 7903 8191 7904 3 8192 7904 8191 3 7904 8192 7905 3 8193 7905 8192 3 7905 8193 7906 3 8194 7906 8193 3 7906 8194 7907 3 8195 7907 8194 3 7907 8195 7908 3 8196 7908 8195 3 7908 8196 7909 3 8197 7909 8196 3 7909 8197 7910 3 8198 7910 8197 3 7910 8198 7911 3 8199 7911 8198 3 7911 8199 7912 3 8200 7912 8199 3 7912 8200 7913 3 8201 7913 8200 3 7913 8201 7914 3 8202 7914 8201 3 7914 8202 7915 3 8203 7915 8202 3 7915 8203 7916 3 8204 7916 8203 3 7916 8204 7917 3 8205 7917 8204 3 7917 8205 7918 3 8206 7918 8205 3 7918 8206 7919 3 8207 7919 8206 3 7919 8207 7920 3 8208 7920 8207 3 7920 8208 7921 3 8209 7921 8208 3 7921 8209 7922 3 8210 7922 8209 3 7922 8210 7923 3 8211 7923 8210 3 7923 8211 7924 3 8212 7924 8211 3 7924 8212 7925 3 8213 7925 8212 3 7925 8213 7926 3 8214 7926 8213 3 7926 8214 7927 3 8215 7927 8214 3 7927 8215 7928 3 8216 7928 8215 3 7928 8216 7929 3 8217 7929 8216 3 7929 8217 7930 3 8218 7930 8217 3 7930 8218 7931 3 8219 7931 8218 3 7931 8219 7932 3 8220 7932 8219 3 7932 8220 7933 3 8221 7933 8220 3 7933 8221 7934 3 8222 7934 8221 3 7934 8222 7935 3 8223 7935 8222 3 7935 8223 7936 3 8224 7936 8223 3 7936 8224 7937 3 8225 7937 8224 3 7937 8225 7938 3 8226 7938 8225 3 7938 8226 7939 3 8227 7939 8226 3 7939 8227 8228 3 7939 8228 7940 3 7940 8228 8229 3 7940 8229 7941 3 7941 8229 8230 3 7941 8230 7942 3 7942 8230 8231 3 7942 8231 7943 3 7943 8231 8232 3 7943 8232 7944 3 7944 8232 8233 3 7944 8233 7945 3 7945 8233 8234 3 7945 8234 7946 3 7946 8234 8235 3 7946 8235 7947 3 7947 8235 8236 3 7947 8236 7948 3 7948 8236 8237 3 7948 8237 7949 3 7949 8237 8238 3 7949 8238 7950 3 7950 8238 8239 3 7950 8239 7951 3 7951 8239 8240 3 7951 8240 7952 3 7952 8240 8241 3 7952 8241 7953 3 7953 8241 8244 3 7953 8244 7956 3 7954 8242 8243 3 7954 8243 7955 3 7954 7959 8242 3 8247 8242 7959 3 7956 8244 8245 3 7956 8245 7957 3 7957 8245 8248 3 7957 8248 7960 3 7958 8246 8247 3 7958 8247 7959 3 7958 7962 8246 3 8250 8246 7962 3 7960 8248 8249 3 7960 8249 7961 3 7961 8249 8251 3 7961 8251 7963 3 7962 7964 8250 3 8252 8250 7964 3 7963 8251 8253 3 7963 8253 7965 3 7964 7966 8252 3 8254 8252 7966 3 7965 8253 8255 3 7965 8255 7967 3 7966 7968 8254 3 8256 8254 7968 3 7967 8255 8257 3 7967 8257 7969 3 7968 7970 8256 3 8258 8256 7970 3 7969 8257 8259 3 7969 8259 7971 3 7970 7972 8258 3 8260 8258 7972 3 7971 8259 8261 3 7971 8261 7973 3 7972 7974 8260 3 8262 8260 7974 3 7973 8261 8263 3 7973 8263 7975 3 7974 7976 8262 3 8264 8262 7976 3 7975 8263 8265 3 7975 8265 7977 3 7976 7978 8264 3 8266 8264 7978 3 7977 8265 8267 3 7977 8267 7979 3 7978 7980 8266 3 8268 8266 7980 3 7979 8267 8269 3 7979 8269 7981 3 7980 7982 8268 3 8270 8268 7982 3 7981 8269 8271 3 7981 8271 7983 3 7982 7984 8270 3 8272 8270 7984 3 7983 8271 8273 3 7983 8273 7985 3 7984 7986 8272 3 8274 8272 7986 3 7985 8273 8275 3 7985 8275 7987 3 7986 7988 8274 3 8276 8274 7988 3 7987 8275 8277 3 7987 8277 7989 3 7988 7990 8276 3 8278 8276 7990 3 7989 8277 8279 3 7989 8279 7991 3 7990 7992 8278 3 8280 8278 7992 3 7991 8279 8281 3 7991 8281 7993 3 7992 7994 8280 3 8282 8280 7994 3 7993 8281 8283 3 7993 8283 7995 3 7994 7996 8284 3 7994 8284 8282 3 7995 8283 7997 3 8285 7997 8283 3 7996 7998 8286 3 7996 8286 8284 3 7997 8285 7999 3 8287 7999 8285 3 7998 8000 8288 3 7998 8288 8286 3 7999 8287 8001 3 8289 8001 8287 3 8000 8002 8290 3 8000 8290 8288 3 8001 8289 8003 3 8291 8003 8289 3 8002 8004 8292 3 8002 8292 8290 3 8003 8291 8005 3 8293 8005 8291 3 8004 8006 8294 3 8004 8294 8292 3 8005 8293 8007 3 8295 8007 8293 3 8006 8008 8296 3 8006 8296 8294 3 8007 8295 8009 3 8297 8009 8295 3 8008 8010 8298 3 8008 8298 8296 3 8009 8297 8011 3 8299 8011 8297 3 8010 8012 8300 3 8010 8300 8298 3 8011 8299 8013 3 8301 8013 8299 3 8012 8014 8302 3 8012 8302 8300 3 8013 8301 8015 3 8303 8015 8301 3 8014 8016 8304 3 8014 8304 8302 3 8015 8303 8017 3 8305 8017 8303 3 8016 8018 8306 3 8016 8306 8304 3 8017 8305 8019 3 8307 8019 8305 3 8018 8020 8308 3 8018 8308 8306 3 8019 8307 8021 3 8309 8021 8307 3 8020 8022 8310 3 8020 8310 8308 3 8021 8309 8023 3 8311 8023 8309 3 8022 8024 8312 3 8022 8312 8310 3 8023 8311 8025 3 8313 8025 8311 3 8024 8026 8314 3 8024 8314 8312 3 8025 8313 8027 3 8315 8027 8313 3 8026 8028 8316 3 8026 8316 8314 3 8027 8315 8029 3 8317 8029 8315 3 8028 8030 8318 3 8028 8318 8316 3 8029 8317 8031 3 8319 8031 8317 3 8030 8032 8320 3 8030 8320 8318 3 8031 8319 8033 3 8321 8033 8319 3 8032 8034 8322 3 8032 8322 8320 3 8033 8321 8035 3 8323 8035 8321 3 8034 8036 8324 3 8034 8324 8322 3 8035 8323 8037 3 8325 8037 8323 3 8036 8038 8326 3 8036 8326 8324 3 8037 8325 8039 3 8327 8039 8325 3 8038 8040 8328 3 8038 8328 8326 3 8039 8327 8041 3 8329 8041 8327 3 8040 8042 8330 3 8040 8330 8328 3 8041 8329 8043 3 8331 8043 8329 3 8042 8044 8332 3 8042 8332 8330 3 8043 8331 8045 3 8333 8045 8331 3 8044 8046 8334 3 8044 8334 8332 3 8045 8333 8047 3 8335 8047 8333 3 8046 8048 8336 3 8046 8336 8334 3 8047 8335 8049 3 8337 8049 8335 3 8048 8050 8336 3 8338 8336 8050 3 8049 8337 8051 3 8339 8051 8337 3 8050 8052 8338 3 8340 8338 8052 3 8051 8339 8341 3 8051 8341 8053 3 8052 8054 8340 3 8342 8340 8054 3 8053 8341 8343 3 8053 8343 8055 3 8054 8056 8342 3 8344 8342 8056 3 8055 8343 8345 3 8055 8345 8057 3 8056 8058 8344 3 8346 8344 8058 3 8057 8345 8347 3 8057 8347 8059 3 8058 8060 8346 3 8348 8346 8060 3 8059 8347 8349 3 8059 8349 8061 3 8060 8062 8348 3 8350 8348 8062 3 8061 8349 8351 3 8061 8351 8063 3 8062 8064 8350 3 8352 8350 8064 3 8063 8351 8353 3 8063 8353 8065 3 8064 8066 8352 3 8354 8352 8066 3 8065 8353 8355 3 8065 8355 8067 3 8066 8068 8354 3 8356 8354 8068 3 8067 8355 8357 3 8067 8357 8069 3 8068 8070 8356 3 8358 8356 8070 3 8069 8357 8359 3 8069 8359 8071 3 8070 8072 8358 3 8360 8358 8072 3 8071 8359 8361 3 8071 8361 8073 3 8072 8074 8360 3 8362 8360 8074 3 8073 8361 8363 3 8073 8363 8075 3 8074 8076 8362 3 8364 8362 8076 3 8075 8363 8365 3 8075 8365 8077 3 8076 8078 8364 3 8366 8364 8078 3 8077 8365 8367 3 8077 8367 8079 3 8078 8080 8366 3 8368 8366 8080 3 8079 8367 8369 3 8079 8369 8081 3 8080 8082 8368 3 8370 8368 8082 3 8081 8369 8371 3 8081 8371 8083 3 8082 8084 8370 3 8372 8370 8084 3 8083 8371 8373 3 8083 8373 8085 3 8084 8086 8372 3 8374 8372 8086 3 8085 8373 8375 3 8085 8375 8087 3 8086 8088 8374 3 8376 8374 8088 3 8087 8375 8377 3 8087 8377 8089 3 8088 8090 8376 3 8378 8376 8090 3 8089 8377 8379 3 8089 8379 8091 3 8090 8092 8378 3 8380 8378 8092 3 8091 8379 8381 3 8091 8381 8093 3 8092 8094 8380 3 8382 8380 8094 3 8093 8381 8383 3 8093 8383 8095 3 8094 8096 8382 3 8384 8382 8096 3 8095 8383 8387 3 8095 8387 8099 3 8096 8097 8384 3 8385 8384 8097 3 8097 8100 8385 3 8388 8385 8100 3 8098 8099 8386 3 8387 8386 8099 3 8098 8386 8391 3 8098 8391 8103 3 8100 8101 8388 3 8389 8388 8101 3 8101 8104 8389 3 8392 8389 8104 3 8102 8103 8390 3 8391 8390 8103 3 8102 8390 8460 3 8102 8460 8172 3 8104 8105 8393 3 8104 8393 8392 3 8105 8106 8394 3 8105 8394 8393 3 8106 8107 8395 3 8106 8395 8394 3 8107 8108 8396 3 8107 8396 8395 3 8108 8109 8397 3 8108 8397 8396 3 8109 8110 8398 3 8109 8398 8397 3 8110 8111 8399 3 8110 8399 8398 3 8111 8112 8400 3 8111 8400 8399 3 8112 8113 8401 3 8112 8401 8400 3 8113 8114 8402 3 8113 8402 8401 3 8114 8115 8403 3 8114 8403 8402 3 8115 8116 8404 3 8115 8404 8403 3 8116 8117 8405 3 8116 8405 8404 3 8117 8118 8406 3 8117 8406 8405 3 8118 8119 8407 3 8118 8407 8406 3 8119 8120 8408 3 8119 8408 8407 3 8120 8121 8409 3 8120 8409 8408 3 8121 8122 8410 3 8121 8410 8409 3 8122 8123 8411 3 8122 8411 8410 3 8123 8124 8412 3 8123 8412 8411 3 8124 8125 8413 3 8124 8413 8412 3 8125 8126 8414 3 8125 8414 8413 3 8126 8127 8415 3 8126 8415 8414 3 8127 8128 8416 3 8127 8416 8415 3 8128 8129 8417 3 8128 8417 8416 3 8129 8130 8418 3 8129 8418 8417 3 8130 8131 8419 3 8130 8419 8418 3 8131 8132 8420 3 8131 8420 8419 3 8132 8133 8421 3 8132 8421 8420 3 8133 8134 8422 3 8133 8422 8421 3 8134 8135 8423 3 8134 8423 8422 3 8135 8136 8424 3 8135 8424 8423 3 8136 8137 8425 3 8136 8425 8424 3 8137 8138 8426 3 8137 8426 8425 3 8138 8139 8427 3 8138 8427 8426 3 8139 8140 8428 3 8139 8428 8427 3 8140 8141 8429 3 8140 8429 8428 3 8141 8142 8430 3 8141 8430 8429 3 8142 8143 8431 3 8142 8431 8430 3 8143 8144 8432 3 8143 8432 8431 3 8144 8145 8433 3 8144 8433 8432 3 8145 8146 8434 3 8145 8434 8433 3 8146 8147 8435 3 8146 8435 8434 3 8147 8148 8436 3 8147 8436 8435 3 8148 8149 8437 3 8148 8437 8436 3 8149 8150 8438 3 8149 8438 8437 3 8150 8151 8439 3 8150 8439 8438 3 8151 8152 8440 3 8151 8440 8439 3 8152 8153 8441 3 8152 8441 8440 3 8153 8154 8442 3 8153 8442 8441 3 8154 8155 8443 3 8154 8443 8442 3 8155 8156 8444 3 8155 8444 8443 3 8156 8157 8445 3 8156 8445 8444 3 8157 8158 8446 3 8157 8446 8445 3 8158 8159 8447 3 8158 8447 8446 3 8159 8160 8447 3 8448 8447 8160 3 8160 8161 8448 3 8449 8448 8161 3 8161 8162 8449 3 8450 8449 8162 3 8162 8163 8450 3 8451 8450 8163 3 8163 8164 8451 3 8452 8451 8164 3 8164 8165 8452 3 8453 8452 8165 3 8165 8166 8453 3 8454 8453 8166 3 8166 8167 8454 3 8455 8454 8167 3 8167 8168 8455 3 8456 8455 8168 3 8168 8169 8456 3 8457 8456 8169 3 8169 8170 8457 3 8458 8457 8170 3 8170 8171 8458 3 8459 8458 8171 3 8171 8172 8459 3 8460 8459 8172 3 8173 8461 8462 3 8173 8462 8174 3 8173 8243 8461 3 8531 8461 8243 3 8174 8462 8463 3 8174 8463 8175 3 8175 8463 8464 3 8175 8464 8176 3 8176 8464 8465 3 8176 8465 8177 3 8177 8465 8466 3 8177 8466 8178 3 8178 8466 8467 3 8178 8467 8179 3 8179 8467 8468 3 8179 8468 8180 3 8180 8468 8469 3 8180 8469 8181 3 8181 8469 8470 3 8181 8470 8182 3 8182 8470 8471 3 8182 8471 8183 3 8183 8471 8472 3 8183 8472 8184 3 8184 8472 8473 3 8184 8473 8185 3 8185 8473 8474 3 8185 8474 8186 3 8186 8474 8475 3 8186 8475 8187 3 8187 8475 8476 3 8187 8476 8188 3 8188 8476 8477 3 8188 8477 8189 3 8189 8477 8478 3 8189 8478 8190 3 8190 8478 8479 3 8190 8479 8191 3 8191 8479 8480 3 8191 8480 8192 3 8192 8480 8481 3 8192 8481 8193 3 8193 8481 8482 3 8193 8482 8194 3 8194 8482 8483 3 8194 8483 8195 3 8195 8483 8484 3 8195 8484 8196 3 8196 8484 8485 3 8196 8485 8197 3 8197 8485 8486 3 8197 8486 8198 3 8198 8486 8487 3 8198 8487 8199 3 8199 8487 8488 3 8199 8488 8200 3 8200 8488 8489 3 8200 8489 8201 3 8201 8489 8490 3 8201 8490 8202 3 8202 8490 8491 3 8202 8491 8203 3 8203 8491 8492 3 8203 8492 8204 3 8204 8492 8493 3 8204 8493 8205 3 8205 8493 8494 3 8205 8494 8206 3 8206 8494 8495 3 8206 8495 8207 3 8207 8495 8496 3 8207 8496 8208 3 8208 8496 8497 3 8208 8497 8209 3 8209 8497 8498 3 8209 8498 8210 3 8210 8498 8499 3 8210 8499 8211 3 8211 8499 8500 3 8211 8500 8212 3 8212 8500 8501 3 8212 8501 8213 3 8213 8501 8502 3 8213 8502 8214 3 8214 8502 8503 3 8214 8503 8215 3 8215 8503 8504 3 8215 8504 8216 3 8216 8504 8505 3 8216 8505 8217 3 8217 8505 8506 3 8217 8506 8218 3 8218 8506 8507 3 8218 8507 8219 3 8219 8507 8220 3 8508 8220 8507 3 8220 8508 8221 3 8509 8221 8508 3 8221 8509 8222 3 8510 8222 8509 3 8222 8510 8223 3 8511 8223 8510 3 8223 8511 8224 3 8512 8224 8511 3 8224 8512 8225 3 8513 8225 8512 3 8225 8513 8226 3 8514 8226 8513 3 8226 8514 8227 3 8515 8227 8514 3 8227 8515 8228 3 8516 8228 8515 3 8228 8516 8229 3 8517 8229 8516 3 8229 8517 8230 3 8518 8230 8517 3 8230 8518 8231 3 8519 8231 8518 3 8231 8519 8232 3 8520 8232 8519 3 8232 8520 8233 3 8521 8233 8520 3 8233 8521 8234 3 8522 8234 8521 3 8234 8522 8235 3 8523 8235 8522 3 8235 8523 8236 3 8524 8236 8523 3 8236 8524 8237 3 8525 8237 8524 3 8237 8525 8238 3 8526 8238 8525 3 8238 8526 8239 3 8527 8239 8526 3 8239 8527 8240 3 8528 8240 8527 3 8240 8528 8241 3 8529 8241 8528 3 8241 8529 8244 3 8532 8244 8529 3 8242 8530 8243 3 8531 8243 8530 3 8242 8247 8535 3 8242 8535 8530 3 8244 8532 8245 3 8533 8245 8532 3 8245 8533 8248 3 8536 8248 8533 3 8246 8534 8247 3 8535 8247 8534 3 8246 8250 8538 3 8246 8538 8534 3 8248 8536 8249 3 8537 8249 8536 3 8249 8537 8251 3 8539 8251 8537 3 8250 8252 8540 3 8250 8540 8538 3 8251 8539 8253 3 8541 8253 8539 3 8252 8254 8542 3 8252 8542 8540 3 8253 8541 8255 3 8543 8255 8541 3 8254 8256 8544 3 8254 8544 8542 3 8255 8543 8257 3 8545 8257 8543 3 8256 8258 8546 3 8256 8546 8544 3 8257 8545 8259 3 8547 8259 8545 3 8258 8260 8548 3 8258 8548 8546 3 8259 8547 8261 3 8549 8261 8547 3 8260 8262 8550 3 8260 8550 8548 3 8261 8549 8263 3 8551 8263 8549 3 8262 8264 8552 3 8262 8552 8550 3 8263 8551 8265 3 8553 8265 8551 3 8264 8266 8554 3 8264 8554 8552 3 8265 8553 8267 3 8555 8267 8553 3 8266 8268 8556 3 8266 8556 8554 3 8267 8555 8269 3 8557 8269 8555 3 8268 8270 8558 3 8268 8558 8556 3 8269 8557 8271 3 8559 8271 8557 3 8270 8272 8558 3 8560 8558 8272 3 8271 8559 8273 3 8561 8273 8559 3 8272 8274 8560 3 8562 8560 8274 3 8273 8561 8275 3 8563 8275 8561 3 8274 8276 8562 3 8564 8562 8276 3 8275 8563 8565 3 8275 8565 8277 3 8276 8278 8564 3 8566 8564 8278 3 8277 8565 8567 3 8277 8567 8279 3 8278 8280 8566 3 8568 8566 8280 3 8279 8567 8569 3 8279 8569 8281 3 8280 8282 8568 3 8570 8568 8282 3 8281 8569 8571 3 8281 8571 8283 3 8282 8284 8570 3 8572 8570 8284 3 8283 8571 8573 3 8283 8573 8285 3 8284 8286 8572 3 8574 8572 8286 3 8285 8573 8575 3 8285 8575 8287 3 8286 8288 8574 3 8576 8574 8288 3 8287 8575 8577 3 8287 8577 8289 3 8288 8290 8576 3 8578 8576 8290 3 8289 8577 8579 3 8289 8579 8291 3 8290 8292 8578 3 8580 8578 8292 3 8291 8579 8581 3 8291 8581 8293 3 8292 8294 8580 3 8582 8580 8294 3 8293 8581 8583 3 8293 8583 8295 3 8294 8296 8582 3 8584 8582 8296 3 8295 8583 8585 3 8295 8585 8297 3 8296 8298 8584 3 8586 8584 8298 3 8297 8585 8587 3 8297 8587 8299 3 8298 8300 8586 3 8588 8586 8300 3 8299 8587 8589 3 8299 8589 8301 3 8300 8302 8588 3 8590 8588 8302 3 8301 8589 8591 3 8301 8591 8303 3 8302 8304 8590 3 8592 8590 8304 3 8303 8591 8593 3 8303 8593 8305 3 8304 8306 8592 3 8594 8592 8306 3 8305 8593 8595 3 8305 8595 8307 3 8306 8308 8594 3 8596 8594 8308 3 8307 8595 8597 3 8307 8597 8309 3 8308 8310 8596 3 8598 8596 8310 3 8309 8597 8599 3 8309 8599 8311 3 8310 8312 8598 3 8600 8598 8312 3 8311 8599 8601 3 8311 8601 8313 3 8312 8314 8600 3 8602 8600 8314 3 8313 8601 8603 3 8313 8603 8315 3 8314 8316 8602 3 8604 8602 8316 3 8315 8603 8605 3 8315 8605 8317 3 8316 8318 8604 3 8606 8604 8318 3 8317 8605 8607 3 8317 8607 8319 3 8318 8320 8606 3 8608 8606 8320 3 8319 8607 8609 3 8319 8609 8321 3 8320 8322 8608 3 8610 8608 8322 3 8321 8609 8611 3 8321 8611 8323 3 8322 8324 8610 3 8612 8610 8324 3 8323 8611 8613 3 8323 8613 8325 3 8324 8326 8612 3 8614 8612 8326 3 8325 8613 8615 3 8325 8615 8327 3 8326 8328 8616 3 8326 8616 8614 3 8327 8615 8617 3 8327 8617 8329 3 8328 8330 8618 3 8328 8618 8616 3 8329 8617 8619 3 8329 8619 8331 3 8330 8332 8620 3 8330 8620 8618 3 8331 8619 8621 3 8331 8621 8333 3 8332 8334 8622 3 8332 8622 8620 3 8333 8621 8335 3 8623 8335 8621 3 8334 8336 8624 3 8334 8624 8622 3 8335 8623 8337 3 8625 8337 8623 3 8336 8338 8626 3 8336 8626 8624 3 8337 8625 8339 3 8627 8339 8625 3 8338 8340 8628 3 8338 8628 8626 3 8339 8627 8341 3 8629 8341 8627 3 8340 8342 8630 3 8340 8630 8628 3 8341 8629 8343 3 8631 8343 8629 3 8342 8344 8632 3 8342 8632 8630 3 8343 8631 8345 3 8633 8345 8631 3 8344 8346 8634 3 8344 8634 8632 3 8345 8633 8347 3 8635 8347 8633 3 8346 8348 8636 3 8346 8636 8634 3 8347 8635 8349 3 8637 8349 8635 3 8348 8350 8638 3 8348 8638 8636 3 8349 8637 8351 3 8639 8351 8637 3 8350 8352 8640 3 8350 8640 8638 3 8351 8639 8353 3 8641 8353 8639 3 8352 8354 8642 3 8352 8642 8640 3 8353 8641 8355 3 8643 8355 8641 3 8354 8356 8644 3 8354 8644 8642 3 8355 8643 8357 3 8645 8357 8643 3 8356 8358 8646 3 8356 8646 8644 3 8357 8645 8359 3 8647 8359 8645 3 8358 8360 8648 3 8358 8648 8646 3 8359 8647 8361 3 8649 8361 8647 3 8360 8362 8650 3 8360 8650 8648 3 8361 8649 8363 3 8651 8363 8649 3 8362 8364 8652 3 8362 8652 8650 3 8363 8651 8365 3 8653 8365 8651 3 8364 8366 8654 3 8364 8654 8652 3 8365 8653 8367 3 8655 8367 8653 3 8366 8368 8656 3 8366 8656 8654 3 8367 8655 8369 3 8657 8369 8655 3 8368 8370 8658 3 8368 8658 8656 3 8369 8657 8371 3 8659 8371 8657 3 8370 8372 8660 3 8370 8660 8658 3 8371 8659 8373 3 8661 8373 8659 3 8372 8374 8662 3 8372 8662 8660 3 8373 8661 8375 3 8663 8375 8661 3 8374 8376 8664 3 8374 8664 8662 3 8375 8663 8377 3 8665 8377 8663 3 8376 8378 8666 3 8376 8666 8664 3 8377 8665 8379 3 8667 8379 8665 3 8378 8380 8668 3 8378 8668 8666 3 8379 8667 8381 3 8669 8381 8667 3 8380 8382 8670 3 8380 8670 8668 3 8381 8669 8383 3 8671 8383 8669 3 8382 8384 8672 3 8382 8672 8670 3 8383 8671 8387 3 8675 8387 8671 3 8384 8385 8672 3 8673 8672 8385 3 8385 8388 8673 3 8676 8673 8388 3 8386 8387 8674 3 8675 8674 8387 3 8386 8674 8391 3 8679 8391 8674 3 8388 8389 8676 3 8677 8676 8389 3 8389 8392 8677 3 8680 8677 8392 3 8390 8391 8678 3 8679 8678 8391 3 8390 8678 8748 3 8390 8748 8460 3 8392 8393 8680 3 8681 8680 8393 3 8393 8394 8681 3 8682 8681 8394 3 8394 8395 8682 3 8683 8682 8395 3 8395 8396 8683 3 8684 8683 8396 3 8396 8397 8684 3 8685 8684 8397 3 8397 8398 8685 3 8686 8685 8398 3 8398 8399 8686 3 8687 8686 8399 3 8399 8400 8687 3 8688 8687 8400 3 8400 8401 8688 3 8689 8688 8401 3 8401 8402 8689 3 8690 8689 8402 3 8402 8403 8690 3 8691 8690 8403 3 8403 8404 8691 3 8692 8691 8404 3 8404 8405 8692 3 8693 8692 8405 3 8405 8406 8693 3 8694 8693 8406 3 8406 8407 8694 3 8695 8694 8407 3 8407 8408 8695 3 8696 8695 8408 3 8408 8409 8696 3 8697 8696 8409 3 8409 8410 8697 3 8698 8697 8410 3 8410 8411 8698 3 8699 8698 8411 3 8411 8412 8699 3 8700 8699 8412 3 8412 8413 8700 3 8701 8700 8413 3 8413 8414 8701 3 8702 8701 8414 3 8414 8415 8702 3 8703 8702 8415 3 8415 8416 8703 3 8704 8703 8416 3 8416 8417 8704 3 8705 8704 8417 3 8417 8418 8705 3 8706 8705 8418 3 8418 8419 8706 3 8707 8706 8419 3 8419 8420 8707 3 8708 8707 8420 3 8420 8421 8708 3 8709 8708 8421 3 8421 8422 8709 3 8710 8709 8422 3 8422 8423 8710 3 8711 8710 8423 3 8423 8424 8711 3 8712 8711 8424 3 8424 8425 8712 3 8713 8712 8425 3 8425 8426 8713 3 8714 8713 8426 3 8426 8427 8714 3 8715 8714 8427 3 8427 8428 8715 3 8716 8715 8428 3 8428 8429 8716 3 8717 8716 8429 3 8429 8430 8717 3 8718 8717 8430 3 8430 8431 8718 3 8719 8718 8431 3 8431 8432 8719 3 8720 8719 8432 3 8432 8433 8720 3 8721 8720 8433 3 8433 8434 8721 3 8722 8721 8434 3 8434 8435 8722 3 8723 8722 8435 3 8435 8436 8723 3 8724 8723 8436 3 8436 8437 8724 3 8725 8724 8437 3 8437 8438 8725 3 8726 8725 8438 3 8438 8439 8726 3 8727 8726 8439 3 8439 8440 8727 3 8728 8727 8440 3 8440 8441 8729 3 8440 8729 8728 3 8441 8442 8730 3 8441 8730 8729 3 8442 8443 8731 3 8442 8731 8730 3 8443 8444 8732 3 8443 8732 8731 3 8444 8445 8733 3 8444 8733 8732 3 8445 8446 8734 3 8445 8734 8733 3 8446 8447 8735 3 8446 8735 8734 3 8447 8448 8736 3 8447 8736 8735 3 8448 8449 8737 3 8448 8737 8736 3 8449 8450 8738 3 8449 8738 8737 3 8450 8451 8739 3 8450 8739 8738 3 8451 8452 8740 3 8451 8740 8739 3 8452 8453 8741 3 8452 8741 8740 3 8453 8454 8742 3 8453 8742 8741 3 8454 8455 8743 3 8454 8743 8742 3 8455 8456 8744 3 8455 8744 8743 3 8456 8457 8745 3 8456 8745 8744 3 8457 8458 8746 3 8457 8746 8745 3 8458 8459 8747 3 8458 8747 8746 3 8459 8460 8748 3 8459 8748 8747 3 8461 8749 8462 3 8750 8462 8749 3 8461 8531 8749 3 8819 8749 8531 3 8462 8750 8463 3 8751 8463 8750 3 8463 8751 8464 3 8752 8464 8751 3 8464 8752 8465 3 8753 8465 8752 3 8465 8753 8466 3 8754 8466 8753 3 8466 8754 8467 3 8755 8467 8754 3 8467 8755 8468 3 8756 8468 8755 3 8468 8756 8469 3 8757 8469 8756 3 8469 8757 8470 3 8758 8470 8757 3 8470 8758 8471 3 8759 8471 8758 3 8471 8759 8472 3 8760 8472 8759 3 8472 8760 8473 3 8761 8473 8760 3 8473 8761 8474 3 8762 8474 8761 3 8474 8762 8475 3 8763 8475 8762 3 8475 8763 8476 3 8764 8476 8763 3 8476 8764 8477 3 8765 8477 8764 3 8477 8765 8478 3 8766 8478 8765 3 8478 8766 8479 3 8767 8479 8766 3 8479 8767 8480 3 8768 8480 8767 3 8480 8768 8481 3 8769 8481 8768 3 8481 8769 8482 3 8770 8482 8769 3 8482 8770 8483 3 8771 8483 8770 3 8483 8771 8484 3 8772 8484 8771 3 8484 8772 8485 3 8773 8485 8772 3 8485 8773 8486 3 8774 8486 8773 3 8486 8774 8487 3 8775 8487 8774 3 8487 8775 8488 3 8776 8488 8775 3 8488 8776 8489 3 8777 8489 8776 3 8489 8777 8490 3 8778 8490 8777 3 8490 8778 8491 3 8779 8491 8778 3 8491 8779 8492 3 8780 8492 8779 3 8492 8780 8493 3 8781 8493 8780 3 8493 8781 8494 3 8782 8494 8781 3 8494 8782 8495 3 8783 8495 8782 3 8495 8783 8496 3 8784 8496 8783 3 8496 8784 8497 3 8785 8497 8784 3 8497 8785 8498 3 8786 8498 8785 3 8498 8786 8499 3 8787 8499 8786 3 8499 8787 8500 3 8788 8500 8787 3 8500 8788 8501 3 8789 8501 8788 3 8501 8789 8502 3 8790 8502 8789 3 8502 8790 8503 3 8791 8503 8790 3 8503 8791 8792 3 8503 8792 8504 3 8504 8792 8793 3 8504 8793 8505 3 8505 8793 8794 3 8505 8794 8506 3 8506 8794 8795 3 8506 8795 8507 3 8507 8795 8796 3 8507 8796 8508 3 8508 8796 8797 3 8508 8797 8509 3 8509 8797 8798 3 8509 8798 8510 3 8510 8798 8799 3 8510 8799 8511 3 8511 8799 8800 3 8511 8800 8512 3 8512 8800 8801 3 8512 8801 8513 3 8513 8801 8802 3 8513 8802 8514 3 8514 8802 8803 3 8514 8803 8515 3 8515 8803 8804 3 8515 8804 8516 3 8516 8804 8805 3 8516 8805 8517 3 8517 8805 8806 3 8517 8806 8518 3 8518 8806 8807 3 8518 8807 8519 3 8519 8807 8808 3 8519 8808 8520 3 8520 8808 8809 3 8520 8809 8521 3 8521 8809 8810 3 8521 8810 8522 3 8522 8810 8811 3 8522 8811 8523 3 8523 8811 8812 3 8523 8812 8524 3 8524 8812 8813 3 8524 8813 8525 3 8525 8813 8814 3 8525 8814 8526 3 8526 8814 8815 3 8526 8815 8527 3 8527 8815 8816 3 8527 8816 8528 3 8528 8816 8817 3 8528 8817 8529 3 8529 8817 8820 3 8529 8820 8532 3 8530 8818 8819 3 8530 8819 8531 3 8530 8535 8818 3 8823 8818 8535 3 8532 8820 8821 3 8532 8821 8533 3 8533 8821 8824 3 8533 8824 8536 3 8534 8822 8823 3 8534 8823 8535 3 8534 8538 8822 3 8826 8822 8538 3 8536 8824 8825 3 8536 8825 8537 3 8537 8825 8827 3 8537 8827 8539 3 8538 8540 8826 3 8828 8826 8540 3 8539 8827 8829 3 8539 8829 8541 3 8540 8542 8828 3 8830 8828 8542 3 8541 8829 8831 3 8541 8831 8543 3 8542 8544 8830 3 8832 8830 8544 3 8543 8831 8833 3 8543 8833 8545 3 8544 8546 8832 3 8834 8832 8546 3 8545 8833 8835 3 8545 8835 8547 3 8546 8548 8834 3 8836 8834 8548 3 8547 8835 8837 3 8547 8837 8549 3 8548 8550 8836 3 8838 8836 8550 3 8549 8837 8839 3 8549 8839 8551 3 8550 8552 8838 3 8840 8838 8552 3 8551 8839 8841 3 8551 8841 8553 3 8552 8554 8840 3 8842 8840 8554 3 8553 8841 8843 3 8553 8843 8555 3 8554 8556 8844 3 8554 8844 8842 3 8555 8843 8845 3 8555 8845 8557 3 8556 8558 8846 3 8556 8846 8844 3 8557 8845 8847 3 8557 8847 8559 3 8558 8560 8848 3 8558 8848 8846 3 8559 8847 8849 3 8559 8849 8561 3 8560 8562 8850 3 8560 8850 8848 3 8561 8849 8563 3 8851 8563 8849 3 8562 8564 8852 3 8562 8852 8850 3 8563 8851 8565 3 8853 8565 8851 3 8564 8566 8854 3 8564 8854 8852 3 8565 8853 8567 3 8855 8567 8853 3 8566 8568 8856 3 8566 8856 8854 3 8567 8855 8569 3 8857 8569 8855 3 8568 8570 8858 3 8568 8858 8856 3 8569 8857 8571 3 8859 8571 8857 3 8570 8572 8860 3 8570 8860 8858 3 8571 8859 8573 3 8861 8573 8859 3 8572 8574 8862 3 8572 8862 8860 3 8573 8861 8575 3 8863 8575 8861 3 8574 8576 8864 3 8574 8864 8862 3 8575 8863 8577 3 8865 8577 8863 3 8576 8578 8866 3 8576 8866 8864 3 8577 8865 8579 3 8867 8579 8865 3 8578 8580 8868 3 8578 8868 8866 3 8579 8867 8581 3 8869 8581 8867 3 8580 8582 8870 3 8580 8870 8868 3 8581 8869 8583 3 8871 8583 8869 3 8582 8584 8872 3 8582 8872 8870 3 8583 8871 8585 3 8873 8585 8871 3 8584 8586 8874 3 8584 8874 8872 3 8585 8873 8587 3 8875 8587 8873 3 8586 8588 8876 3 8586 8876 8874 3 8587 8875 8589 3 8877 8589 8875 3 8588 8590 8878 3 8588 8878 8876 3 8589 8877 8591 3 8879 8591 8877 3 8590 8592 8880 3 8590 8880 8878 3 8591 8879 8593 3 8881 8593 8879 3 8592 8594 8882 3 8592 8882 8880 3 8593 8881 8595 3 8883 8595 8881 3 8594 8596 8884 3 8594 8884 8882 3 8595 8883 8597 3 8885 8597 8883 3 8596 8598 8886 3 8596 8886 8884 3 8597 8885 8599 3 8887 8599 8885 3 8598 8600 8888 3 8598 8888 8886 3 8599 8887 8601 3 8889 8601 8887 3 8600 8602 8890 3 8600 8890 8888 3 8601 8889 8603 3 8891 8603 8889 3 8602 8604 8892 3 8602 8892 8890 3 8603 8891 8605 3 8893 8605 8891 3 8604 8606 8894 3 8604 8894 8892 3 8605 8893 8607 3 8895 8607 8893 3 8606 8608 8896 3 8606 8896 8894 3 8607 8895 8609 3 8897 8609 8895 3 8608 8610 8898 3 8608 8898 8896 3 8609 8897 8611 3 8899 8611 8897 3 8610 8612 8898 3 8900 8898 8612 3 8611 8899 8613 3 8901 8613 8899 3 8612 8614 8900 3 8902 8900 8614 3 8613 8901 8615 3 8903 8615 8901 3 8614 8616 8902 3 8904 8902 8616 3 8615 8903 8617 3 8905 8617 8903 3 8616 8618 8904 3 8906 8904 8618 3 8617 8905 8619 3 8907 8619 8905 3 8618 8620 8906 3 8908 8906 8620 3 8619 8907 8909 3 8619 8909 8621 3 8620 8622 8908 3 8910 8908 8622 3 8621 8909 8911 3 8621 8911 8623 3 8622 8624 8910 3 8912 8910 8624 3 8623 8911 8913 3 8623 8913 8625 3 8624 8626 8912 3 8914 8912 8626 3 8625 8913 8915 3 8625 8915 8627 3 8626 8628 8914 3 8916 8914 8628 3 8627 8915 8917 3 8627 8917 8629 3 8628 8630 8916 3 8918 8916 8630 3 8629 8917 8919 3 8629 8919 8631 3 8630 8632 8918 3 8920 8918 8632 3 8631 8919 8921 3 8631 8921 8633 3 8632 8634 8920 3 8922 8920 8634 3 8633 8921 8923 3 8633 8923 8635 3 8634 8636 8922 3 8924 8922 8636 3 8635 8923 8925 3 8635 8925 8637 3 8636 8638 8924 3 8926 8924 8638 3 8637 8925 8927 3 8637 8927 8639 3 8638 8640 8926 3 8928 8926 8640 3 8639 8927 8929 3 8639 8929 8641 3 8640 8642 8928 3 8930 8928 8642 3 8641 8929 8931 3 8641 8931 8643 3 8642 8644 8930 3 8932 8930 8644 3 8643 8931 8933 3 8643 8933 8645 3 8644 8646 8932 3 8934 8932 8646 3 8645 8933 8935 3 8645 8935 8647 3 8646 8648 8934 3 8936 8934 8648 3 8647 8935 8937 3 8647 8937 8649 3 8648 8650 8936 3 8938 8936 8650 3 8649 8937 8939 3 8649 8939 8651 3 8650 8652 8938 3 8940 8938 8652 3 8651 8939 8941 3 8651 8941 8653 3 8652 8654 8940 3 8942 8940 8654 3 8653 8941 8943 3 8653 8943 8655 3 8654 8656 8942 3 8944 8942 8656 3 8655 8943 8945 3 8655 8945 8657 3 8656 8658 8944 3 8946 8944 8658 3 8657 8945 8947 3 8657 8947 8659 3 8658 8660 8946 3 8948 8946 8660 3 8659 8947 8949 3 8659 8949 8661 3 8660 8662 8948 3 8950 8948 8662 3 8661 8949 8951 3 8661 8951 8663 3 8662 8664 8950 3 8952 8950 8664 3 8663 8951 8953 3 8663 8953 8665 3 8664 8666 8952 3 8954 8952 8666 3 8665 8953 8955 3 8665 8955 8667 3 8666 8668 8954 3 8956 8954 8668 3 8667 8955 8957 3 8667 8957 8669 3 8668 8670 8958 3 8668 8958 8956 3 8669 8957 8959 3 8669 8959 8671 3 8670 8672 8960 3 8670 8960 8958 3 8671 8959 8963 3 8671 8963 8675 3 8672 8673 8961 3 8672 8961 8960 3 8673 8676 8964 3 8673 8964 8961 3 8674 8675 8963 3 8674 8963 8962 3 8674 8962 8967 3 8674 8967 8679 3 8676 8677 8965 3 8676 8965 8964 3 8677 8680 8968 3 8677 8968 8965 3 8678 8679 8967 3 8678 8967 8966 3 8678 8966 8748 3 9036 8748 8966 3 8680 8681 8969 3 8680 8969 8968 3 8681 8682 8970 3 8681 8970 8969 3 8682 8683 8971 3 8682 8971 8970 3 8683 8684 8972 3 8683 8972 8971 3 8684 8685 8973 3 8684 8973 8972 3 8685 8686 8974 3 8685 8974 8973 3 8686 8687 8975 3 8686 8975 8974 3 8687 8688 8976 3 8687 8976 8975 3 8688 8689 8977 3 8688 8977 8976 3 8689 8690 8978 3 8689 8978 8977 3 8690 8691 8979 3 8690 8979 8978 3 8691 8692 8980 3 8691 8980 8979 3 8692 8693 8981 3 8692 8981 8980 3 8693 8694 8982 3 8693 8982 8981 3 8694 8695 8983 3 8694 8983 8982 3 8695 8696 8984 3 8695 8984 8983 3 8696 8697 8985 3 8696 8985 8984 3 8697 8698 8986 3 8697 8986 8985 3 8698 8699 8987 3 8698 8987 8986 3 8699 8700 8988 3 8699 8988 8987 3 8700 8701 8989 3 8700 8989 8988 3 8701 8702 8990 3 8701 8990 8989 3 8702 8703 8991 3 8702 8991 8990 3 8703 8704 8992 3 8703 8992 8991 3 8704 8705 8993 3 8704 8993 8992 3 8705 8706 8994 3 8705 8994 8993 3 8706 8707 8995 3 8706 8995 8994 3 8707 8708 8996 3 8707 8996 8995 3 8708 8709 8997 3 8708 8997 8996 3 8709 8710 8998 3 8709 8998 8997 3 8710 8711 8999 3 8710 8999 8998 3 8711 8712 9000 3 8711 9000 8999 3 8712 8713 9001 3 8712 9001 9000 3 8713 8714 9002 3 8713 9002 9001 3 8714 8715 9003 3 8714 9003 9002 3 8715 8716 9004 3 8715 9004 9003 3 8716 8717 9005 3 8716 9005 9004 3 8717 8718 9006 3 8717 9006 9005 3 8718 8719 9007 3 8718 9007 9006 3 8719 8720 9008 3 8719 9008 9007 3 8720 8721 9009 3 8720 9009 9008 3 8721 8722 9010 3 8721 9010 9009 3 8722 8723 9011 3 8722 9011 9010 3 8723 8724 9012 3 8723 9012 9011 3 8724 8725 9013 3 8724 9013 9012 3 8725 8726 9013 3 9014 9013 8726 3 8726 8727 9014 3 9015 9014 8727 3 8727 8728 9015 3 9016 9015 8728 3 8728 8729 9016 3 9017 9016 8729 3 8729 8730 9017 3 9018 9017 8730 3 8730 8731 9018 3 9019 9018 8731 3 8731 8732 9019 3 9020 9019 8732 3 8732 8733 9020 3 9021 9020 8733 3 8733 8734 9021 3 9022 9021 8734 3 8734 8735 9022 3 9023 9022 8735 3 8735 8736 9023 3 9024 9023 8736 3 8736 8737 9024 3 9025 9024 8737 3 8737 8738 9025 3 9026 9025 8738 3 8738 8739 9026 3 9027 9026 8739 3 8739 8740 9027 3 9028 9027 8740 3 8740 8741 9028 3 9029 9028 8741 3 8741 8742 9029 3 9030 9029 8742 3 8742 8743 9030 3 9031 9030 8743 3 8743 8744 9031 3 9032 9031 8744 3 8744 8745 9032 3 9033 9032 8745 3 8745 8746 9033 3 9034 9033 8746 3 8746 8747 9034 3 9035 9034 8747 3 8747 8748 9035 3 9036 9035 8748 3 8749 9037 9038 3 8749 9038 8750 3 8749 8819 9107 3 8749 9107 9037 3 8750 9038 9039 3 8750 9039 8751 3 8751 9039 9040 3 8751 9040 8752 3 8752 9040 9041 3 8752 9041 8753 3 8753 9041 9042 3 8753 9042 8754 3 8754 9042 9043 3 8754 9043 8755 3 8755 9043 9044 3 8755 9044 8756 3 8756 9044 9045 3 8756 9045 8757 3 8757 9045 9046 3 8757 9046 8758 3 8758 9046 9047 3 8758 9047 8759 3 8759 9047 9048 3 8759 9048 8760 3 8760 9048 9049 3 8760 9049 8761 3 8761 9049 9050 3 8761 9050 8762 3 8762 9050 9051 3 8762 9051 8763 3 8763 9051 9052 3 8763 9052 8764 3 8764 9052 9053 3 8764 9053 8765 3 8765 9053 9054 3 8765 9054 8766 3 8766 9054 9055 3 8766 9055 8767 3 8767 9055 9056 3 8767 9056 8768 3 8768 9056 9057 3 8768 9057 8769 3 8769 9057 9058 3 8769 9058 8770 3 8770 9058 9059 3 8770 9059 8771 3 8771 9059 9060 3 8771 9060 8772 3 8772 9060 9061 3 8772 9061 8773 3 8773 9061 9062 3 8773 9062 8774 3 8774 9062 9063 3 8774 9063 8775 3 8775 9063 9064 3 8775 9064 8776 3 8776 9064 9065 3 8776 9065 8777 3 8777 9065 9066 3 8777 9066 8778 3 8778 9066 9067 3 8778 9067 8779 3 8779 9067 9068 3 8779 9068 8780 3 8780 9068 9069 3 8780 9069 8781 3 8781 9069 9070 3 8781 9070 8782 3 8782 9070 9071 3 8782 9071 8783 3 8783 9071 9072 3 8783 9072 8784 3 8784 9072 9073 3 8784 9073 8785 3 8785 9073 9074 3 8785 9074 8786 3 8786 9074 9075 3 8786 9075 8787 3 8787 9075 9076 3 8787 9076 8788 3 8788 9076 9077 3 8788 9077 8789 3 8789 9077 9078 3 8789 9078 8790 3 8790 9078 9079 3 8790 9079 8791 3 8791 9079 9080 3 8791 9080 8792 3 8792 9080 8793 3 9081 8793 9080 3 8793 9081 8794 3 9082 8794 9081 3 8794 9082 8795 3 9083 8795 9082 3 8795 9083 8796 3 9084 8796 9083 3 8796 9084 8797 3 9085 8797 9084 3 8797 9085 8798 3 9086 8798 9085 3 8798 9086 8799 3 9087 8799 9086 3 8799 9087 8800 3 9088 8800 9087 3 8800 9088 8801 3 9089 8801 9088 3 8801 9089 8802 3 9090 8802 9089 3 8802 9090 8803 3 9091 8803 9090 3 8803 9091 8804 3 9092 8804 9091 3 8804 9092 8805 3 9093 8805 9092 3 8805 9093 8806 3 9094 8806 9093 3 8806 9094 8807 3 9095 8807 9094 3 8807 9095 8808 3 9096 8808 9095 3 8808 9096 8809 3 9097 8809 9096 3 8809 9097 8810 3 9098 8810 9097 3 8810 9098 8811 3 9099 8811 9098 3 8811 9099 8812 3 9100 8812 9099 3 8812 9100 8813 3 9101 8813 9100 3 8813 9101 8814 3 9102 8814 9101 3 8814 9102 8815 3 9103 8815 9102 3 8815 9103 8816 3 9104 8816 9103 3 8816 9104 8817 3 9105 8817 9104 3 8817 9105 8820 3 9108 8820 9105 3 8818 9106 8819 3 9107 8819 9106 3 8818 8823 9111 3 8818 9111 9106 3 8820 9108 8821 3 9109 8821 9108 3 8821 9109 8824 3 9112 8824 9109 3 8822 9110 8823 3 9111 8823 9110 3 8822 8826 9114 3 8822 9114 9110 3 8824 9112 8825 3 9113 8825 9112 3 8825 9113 8827 3 9115 8827 9113 3 8826 8828 9116 3 8826 9116 9114 3 8827 9115 8829 3 9117 8829 9115 3 8828 8830 9118 3 8828 9118 9116 3 8829 9117 8831 3 9119 8831 9117 3 8830 8832 9120 3 8830 9120 9118 3 8831 9119 8833 3 9121 8833 9119 3 8832 8834 9122 3 8832 9122 9120 3 8833 9121 8835 3 9123 8835 9121 3 8834 8836 9124 3 8834 9124 9122 3 8835 9123 8837 3 9125 8837 9123 3 8836 8838 9126 3 8836 9126 9124 3 8837 9125 8839 3 9127 8839 9125 3 8838 8840 9128 3 8838 9128 9126 3 8839 9127 8841 3 9129 8841 9127 3 8840 8842 9128 3 9130 9128 8842 3 8841 9129 8843 3 9131 8843 9129 3 8842 8844 9130 3 9132 9130 8844 3 8843 9131 8845 3 9133 8845 9131 3 8844 8846 9132 3 9134 9132 8846 3 8845 9133 8847 3 9135 8847 9133 3 8846 8848 9134 3 9136 9134 8848 3 8847 9135 8849 3 9137 8849 9135 3 8848 8850 9136 3 9138 9136 8850 3 8849 9137 8851 3 9139 8851 9137 3 8850 8852 9138 3 9140 9138 8852 3 8851 9139 9141 3 8851 9141 8853 3 8852 8854 9140 3 9142 9140 8854 3 8853 9141 9143 3 8853 9143 8855 3 8854 8856 9142 3 9144 9142 8856 3 8855 9143 9145 3 8855 9145 8857 3 8856 8858 9144 3 9146 9144 8858 3 8857 9145 9147 3 8857 9147 8859 3 8858 8860 9146 3 9148 9146 8860 3 8859 9147 9149 3 8859 9149 8861 3 8860 8862 9148 3 9150 9148 8862 3 8861 9149 9151 3 8861 9151 8863 3 8862 8864 9150 3 9152 9150 8864 3 8863 9151 9153 3 8863 9153 8865 3 8864 8866 9152 3 9154 9152 8866 3 8865 9153 9155 3 8865 9155 8867 3 8866 8868 9154 3 9156 9154 8868 3 8867 9155 9157 3 8867 9157 8869 3 8868 8870 9156 3 9158 9156 8870 3 8869 9157 9159 3 8869 9159 8871 3 8870 8872 9158 3 9160 9158 8872 3 8871 9159 9161 3 8871 9161 8873 3 8872 8874 9160 3 9162 9160 8874 3 8873 9161 9163 3 8873 9163 8875 3 8874 8876 9162 3 9164 9162 8876 3 8875 9163 9165 3 8875 9165 8877 3 8876 8878 9164 3 9166 9164 8878 3 8877 9165 9167 3 8877 9167 8879 3 8878 8880 9166 3 9168 9166 8880 3 8879 9167 9169 3 8879 9169 8881 3 8880 8882 9168 3 9170 9168 8882 3 8881 9169 9171 3 8881 9171 8883 3 8882 8884 9170 3 9172 9170 8884 3 8883 9171 9173 3 8883 9173 8885 3 8884 8886 9172 3 9174 9172 8886 3 8885 9173 9175 3 8885 9175 8887 3 8886 8888 9174 3 9176 9174 8888 3 8887 9175 9177 3 8887 9177 8889 3 8888 8890 9176 3 9178 9176 8890 3 8889 9177 9179 3 8889 9179 8891 3 8890 8892 9178 3 9180 9178 8892 3 8891 9179 9181 3 8891 9181 8893 3 8892 8894 9180 3 9182 9180 8894 3 8893 9181 9183 3 8893 9183 8895 3 8894 8896 9182 3 9184 9182 8896 3 8895 9183 9185 3 8895 9185 8897 3 8896 8898 9184 3 9186 9184 8898 3 8897 9185 9187 3 8897 9187 8899 3 8898 8900 9188 3 8898 9188 9186 3 8899 9187 9189 3 8899 9189 8901 3 8900 8902 9190 3 8900 9190 9188 3 8901 9189 9191 3 8901 9191 8903 3 8902 8904 9192 3 8902 9192 9190 3 8903 9191 9193 3 8903 9193 8905 3 8904 8906 9194 3 8904 9194 9192 3 8905 9193 9195 3 8905 9195 8907 3 8906 8908 9196 3 8906 9196 9194 3 8907 9195 9197 3 8907 9197 8909 3 8908 8910 9198 3 8908 9198 9196 3 8909 9197 8911 3 9199 8911 9197 3 8910 8912 9200 3 8910 9200 9198 3 8911 9199 8913 3 9201 8913 9199 3 8912 8914 9202 3 8912 9202 9200 3 8913 9201 8915 3 9203 8915 9201 3 8914 8916 9204 3 8914 9204 9202 3 8915 9203 8917 3 9205 8917 9203 3 8916 8918 9206 3 8916 9206 9204 3 8917 9205 8919 3 9207 8919 9205 3 8918 8920 9208 3 8918 9208 9206 3 8919 9207 8921 3 9209 8921 9207 3 8920 8922 9210 3 8920 9210 9208 3 8921 9209 8923 3 9211 8923 9209 3 8922 8924 9212 3 8922 9212 9210 3 8923 9211 8925 3 9213 8925 9211 3 8924 8926 9214 3 8924 9214 9212 3 8925 9213 8927 3 9215 8927 9213 3 8926 8928 9216 3 8926 9216 9214 3 8927 9215 8929 3 9217 8929 9215 3 8928 8930 9218 3 8928 9218 9216 3 8929 9217 8931 3 9219 8931 9217 3 8930 8932 9220 3 8930 9220 9218 3 8931 9219 8933 3 9221 8933 9219 3 8932 8934 9222 3 8932 9222 9220 3 8933 9221 8935 3 9223 8935 9221 3 8934 8936 9224 3 8934 9224 9222 3 8935 9223 8937 3 9225 8937 9223 3 8936 8938 9226 3 8936 9226 9224 3 8937 9225 8939 3 9227 8939 9225 3 8938 8940 9228 3 8938 9228 9226 3 8939 9227 8941 3 9229 8941 9227 3 8940 8942 9230 3 8940 9230 9228 3 8941 9229 8943 3 9231 8943 9229 3 8942 8944 9232 3 8942 9232 9230 3 8943 9231 8945 3 9233 8945 9231 3 8944 8946 9234 3 8944 9234 9232 3 8945 9233 8947 3 9235 8947 9233 3 8946 8948 9236 3 8946 9236 9234 3 8947 9235 8949 3 9237 8949 9235 3 8948 8950 9238 3 8948 9238 9236 3 8949 9237 8951 3 9239 8951 9237 3 8950 8952 9240 3 8950 9240 9238 3 8951 9239 8953 3 9241 8953 9239 3 8952 8954 9242 3 8952 9242 9240 3 8953 9241 8955 3 9243 8955 9241 3 8954 8956 9244 3 8954 9244 9242 3 8955 9243 8957 3 9245 8957 9243 3 8956 8958 9244 3 9246 9244 8958 3 8957 9245 8959 3 9247 8959 9245 3 8958 8960 9246 3 9248 9246 8960 3 8959 9247 8963 3 9251 8963 9247 3 8960 8961 9248 3 9249 9248 8961 3 8961 8964 9249 3 9252 9249 8964 3 8962 8963 9250 3 9251 9250 8963 3 8962 9250 8967 3 9255 8967 9250 3 8964 8965 9252 3 9253 9252 8965 3 8965 8968 9253 3 9256 9253 8968 3 8966 8967 9254 3 9255 9254 8967 3 8966 9254 9036 3 9324 9036 9254 3 8968 8969 9256 3 9257 9256 8969 3 8969 8970 9257 3 9258 9257 8970 3 8970 8971 9258 3 9259 9258 8971 3 8971 8972 9259 3 9260 9259 8972 3 8972 8973 9260 3 9261 9260 8973 3 8973 8974 9261 3 9262 9261 8974 3 8974 8975 9262 3 9263 9262 8975 3 8975 8976 9263 3 9264 9263 8976 3 8976 8977 9264 3 9265 9264 8977 3 8977 8978 9265 3 9266 9265 8978 3 8978 8979 9266 3 9267 9266 8979 3 8979 8980 9267 3 9268 9267 8980 3 8980 8981 9268 3 9269 9268 8981 3 8981 8982 9269 3 9270 9269 8982 3 8982 8983 9270 3 9271 9270 8983 3 8983 8984 9271 3 9272 9271 8984 3 8984 8985 9272 3 9273 9272 8985 3 8985 8986 9273 3 9274 9273 8986 3 8986 8987 9274 3 9275 9274 8987 3 8987 8988 9275 3 9276 9275 8988 3 8988 8989 9276 3 9277 9276 8989 3 8989 8990 9277 3 9278 9277 8990 3 8990 8991 9278 3 9279 9278 8991 3 8991 8992 9279 3 9280 9279 8992 3 8992 8993 9280 3 9281 9280 8993 3 8993 8994 9281 3 9282 9281 8994 3 8994 8995 9282 3 9283 9282 8995 3 8995 8996 9283 3 9284 9283 8996 3 8996 8997 9284 3 9285 9284 8997 3 8997 8998 9285 3 9286 9285 8998 3 8998 8999 9286 3 9287 9286 8999 3 8999 9000 9287 3 9288 9287 9000 3 9000 9001 9288 3 9289 9288 9001 3 9001 9002 9289 3 9290 9289 9002 3 9002 9003 9290 3 9291 9290 9003 3 9003 9004 9291 3 9292 9291 9004 3 9004 9005 9292 3 9293 9292 9005 3 9005 9006 9293 3 9294 9293 9006 3 9006 9007 9294 3 9295 9294 9007 3 9007 9008 9295 3 9296 9295 9008 3 9008 9009 9296 3 9297 9296 9009 3 9009 9010 9297 3 9298 9297 9010 3 9010 9011 9298 3 9299 9298 9011 3 9011 9012 9299 3 9300 9299 9012 3 9012 9013 9300 3 9301 9300 9013 3 9013 9014 9301 3 9302 9301 9014 3 9014 9015 9302 3 9303 9302 9015 3 9015 9016 9304 3 9015 9304 9303 3 9016 9017 9305 3 9016 9305 9304 3 9017 9018 9306 3 9017 9306 9305 3 9018 9019 9307 3 9018 9307 9306 3 9019 9020 9308 3 9019 9308 9307 3 9020 9021 9309 3 9020 9309 9308 3 9021 9022 9310 3 9021 9310 9309 3 9022 9023 9311 3 9022 9311 9310 3 9023 9024 9312 3 9023 9312 9311 3 9024 9025 9313 3 9024 9313 9312 3 9025 9026 9314 3 9025 9314 9313 3 9026 9027 9315 3 9026 9315 9314 3 9027 9028 9316 3 9027 9316 9315 3 9028 9029 9317 3 9028 9317 9316 3 9029 9030 9318 3 9029 9318 9317 3 9030 9031 9319 3 9030 9319 9318 3 9031 9032 9320 3 9031 9320 9319 3 9032 9033 9321 3 9032 9321 9320 3 9033 9034 9322 3 9033 9322 9321 3 9034 9035 9323 3 9034 9323 9322 3 9035 9036 9324 3 9035 9324 9323 3 9037 9325 9038 3 9326 9038 9325 3 9037 9107 9325 3 9395 9325 9107 3 9038 9326 9039 3 9327 9039 9326 3 9039 9327 9040 3 9328 9040 9327 3 9040 9328 9041 3 9329 9041 9328 3 9041 9329 9042 3 9330 9042 9329 3 9042 9330 9043 3 9331 9043 9330 3 9043 9331 9044 3 9332 9044 9331 3 9044 9332 9045 3 9333 9045 9332 3 9045 9333 9046 3 9334 9046 9333 3 9046 9334 9047 3 9335 9047 9334 3 9047 9335 9048 3 9336 9048 9335 3 9048 9336 9049 3 9337 9049 9336 3 9049 9337 9050 3 9338 9050 9337 3 9050 9338 9051 3 9339 9051 9338 3 9051 9339 9052 3 9340 9052 9339 3 9052 9340 9053 3 9341 9053 9340 3 9053 9341 9054 3 9342 9054 9341 3 9054 9342 9055 3 9343 9055 9342 3 9055 9343 9056 3 9344 9056 9343 3 9056 9344 9057 3 9345 9057 9344 3 9057 9345 9058 3 9346 9058 9345 3 9058 9346 9059 3 9347 9059 9346 3 9059 9347 9060 3 9348 9060 9347 3 9060 9348 9061 3 9349 9061 9348 3 9061 9349 9062 3 9350 9062 9349 3 9062 9350 9063 3 9351 9063 9350 3 9063 9351 9064 3 9352 9064 9351 3 9064 9352 9065 3 9353 9065 9352 3 9065 9353 9066 3 9354 9066 9353 3 9066 9354 9067 3 9355 9067 9354 3 9067 9355 9068 3 9356 9068 9355 3 9068 9356 9069 3 9357 9069 9356 3 9069 9357 9070 3 9358 9070 9357 3 9070 9358 9071 3 9359 9071 9358 3 9071 9359 9072 3 9360 9072 9359 3 9072 9360 9073 3 9361 9073 9360 3 9073 9361 9074 3 9362 9074 9361 3 9074 9362 9075 3 9363 9075 9362 3 9075 9363 9076 3 9364 9076 9363 3 9076 9364 9077 3 9365 9077 9364 3 9077 9365 9078 3 9366 9078 9365 3 9078 9366 9079 3 9367 9079 9366 3 9079 9367 9080 3 9368 9080 9367 3 9080 9368 9081 3 9369 9081 9368 3 9081 9369 9082 3 9370 9082 9369 3 9082 9370 9083 3 9371 9083 9370 3 9083 9371 9084 3 9372 9084 9371 3 9084 9372 9085 3 9373 9085 9372 3 9085 9373 9374 3 9085 9374 9086 3 9086 9374 9375 3 9086 9375 9087 3 9087 9375 9376 3 9087 9376 9088 3 9088 9376 9377 3 9088 9377 9089 3 9089 9377 9378 3 9089 9378 9090 3 9090 9378 9379 3 9090 9379 9091 3 9091 9379 9380 3 9091 9380 9092 3 9092 9380 9381 3 9092 9381 9093 3 9093 9381 9382 3 9093 9382 9094 3 9094 9382 9383 3 9094 9383 9095 3 9095 9383 9384 3 9095 9384 9096 3 9096 9384 9385 3 9096 9385 9097 3 9097 9385 9386 3 9097 9386 9098 3 9098 9386 9387 3 9098 9387 9099 3 9099 9387 9388 3 9099 9388 9100 3 9100 9388 9389 3 9100 9389 9101 3 9101 9389 9390 3 9101 9390 9102 3 9102 9390 9391 3 9102 9391 9103 3 9103 9391 9392 3 9103 9392 9104 3 9104 9392 9393 3 9104 9393 9105 3 9105 9393 9396 3 9105 9396 9108 3 9106 9394 9395 3 9106 9395 9107 3 9106 9111 9394 3 9399 9394 9111 3 9108 9396 9397 3 9108 9397 9109 3 9109 9397 9400 3 9109 9400 9112 3 9110 9398 9399 3 9110 9399 9111 3 9110 9114 9398 3 9402 9398 9114 3 9112 9400 9401 3 9112 9401 9113 3 9113 9401 9403 3 9113 9403 9115 3 9114 9116 9402 3 9404 9402 9116 3 9115 9403 9405 3 9115 9405 9117 3 9116 9118 9404 3 9406 9404 9118 3 9117 9405 9407 3 9117 9407 9119 3 9118 9120 9406 3 9408 9406 9120 3 9119 9407 9409 3 9119 9409 9121 3 9120 9122 9408 3 9410 9408 9122 3 9121 9409 9411 3 9121 9411 9123 3 9122 9124 9410 3 9412 9410 9124 3 9123 9411 9413 3 9123 9413 9125 3 9124 9126 9412 3 9414 9412 9126 3 9125 9413 9415 3 9125 9415 9127 3 9126 9128 9414 3 9416 9414 9128 3 9127 9415 9417 3 9127 9417 9129 3 9128 9130 9416 3 9418 9416 9130 3 9129 9417 9419 3 9129 9419 9131 3 9130 9132 9418 3 9420 9418 9132 3 9131 9419 9421 3 9131 9421 9133 3 9132 9134 9422 3 9132 9422 9420 3 9133 9421 9423 3 9133 9423 9135 3 9134 9136 9424 3 9134 9424 9422 3 9135 9423 9425 3 9135 9425 9137 3 9136 9138 9426 3 9136 9426 9424 3 9137 9425 9427 3 9137 9427 9139 3 9138 9140 9428 3 9138 9428 9426 3 9139 9427 9429 3 9139 9429 9141 3 9140 9142 9430 3 9140 9430 9428 3 9141 9429 9431 3 9141 9431 9143 3 9142 9144 9432 3 9142 9432 9430 3 9143 9431 9433 3 9143 9433 9145 3 9144 9146 9434 3 9144 9434 9432 3 9145 9433 9147 3 9435 9147 9433 3 9146 9148 9436 3 9146 9436 9434 3 9147 9435 9149 3 9437 9149 9435 3 9148 9150 9438 3 9148 9438 9436 3 9149 9437 9151 3 9439 9151 9437 3 9150 9152 9440 3 9150 9440 9438 3 9151 9439 9153 3 9441 9153 9439 3 9152 9154 9442 3 9152 9442 9440 3 9153 9441 9155 3 9443 9155 9441 3 9154 9156 9444 3 9154 9444 9442 3 9155 9443 9157 3 9445 9157 9443 3 9156 9158 9446 3 9156 9446 9444 3 9157 9445 9159 3 9447 9159 9445 3 9158 9160 9448 3 9158 9448 9446 3 9159 9447 9161 3 9449 9161 9447 3 9160 9162 9450 3 9160 9450 9448 3 9161 9449 9163 3 9451 9163 9449 3 9162 9164 9452 3 9162 9452 9450 3 9163 9451 9165 3 9453 9165 9451 3 9164 9166 9454 3 9164 9454 9452 3 9165 9453 9167 3 9455 9167 9453 3 9166 9168 9456 3 9166 9456 9454 3 9167 9455 9169 3 9457 9169 9455 3 9168 9170 9458 3 9168 9458 9456 3 9169 9457 9171 3 9459 9171 9457 3 9170 9172 9460 3 9170 9460 9458 3 9171 9459 9173 3 9461 9173 9459 3 9172 9174 9462 3 9172 9462 9460 3 9173 9461 9175 3 9463 9175 9461 3 9174 9176 9464 3 9174 9464 9462 3 9175 9463 9177 3 9465 9177 9463 3 9176 9178 9466 3 9176 9466 9464 3 9177 9465 9179 3 9467 9179 9465 3 9178 9180 9468 3 9178 9468 9466 3 9179 9467 9181 3 9469 9181 9467 3 9180 9182 9470 3 9180 9470 9468 3 9181 9469 9183 3 9471 9183 9469 3 9182 9184 9472 3 9182 9472 9470 3 9183 9471 9185 3 9473 9185 9471 3 9184 9186 9474 3 9184 9474 9472 3 9185 9473 9187 3 9475 9187 9473 3 9186 9188 9476 3 9186 9476 9474 3 9187 9475 9189 3 9477 9189 9475 3 9188 9190 9478 3 9188 9478 9476 3 9189 9477 9191 3 9479 9191 9477 3 9190 9192 9478 3 9480 9478 9192 3 9191 9479 9193 3 9481 9193 9479 3 9192 9194 9480 3 9482 9480 9194 3 9193 9481 9195 3 9483 9195 9481 3 9194 9196 9482 3 9484 9482 9196 3 9195 9483 9197 3 9485 9197 9483 3 9196 9198 9484 3 9486 9484 9198 3 9197 9485 9199 3 9487 9199 9485 3 9198 9200 9486 3 9488 9486 9200 3 9199 9487 9201 3 9489 9201 9487 3 9200 9202 9488 3 9490 9488 9202 3 9201 9489 9203 3 9491 9203 9489 3 9202 9204 9490 3 9492 9490 9204 3 9203 9491 9205 3 9493 9205 9491 3 9204 9206 9492 3 9494 9492 9206 3 9205 9493 9495 3 9205 9495 9207 3 9206 9208 9494 3 9496 9494 9208 3 9207 9495 9497 3 9207 9497 9209 3 9208 9210 9496 3 9498 9496 9210 3 9209 9497 9499 3 9209 9499 9211 3 9210 9212 9498 3 9500 9498 9212 3 9211 9499 9501 3 9211 9501 9213 3 9212 9214 9500 3 9502 9500 9214 3 9213 9501 9503 3 9213 9503 9215 3 9214 9216 9502 3 9504 9502 9216 3 9215 9503 9505 3 9215 9505 9217 3 9216 9218 9504 3 9506 9504 9218 3 9217 9505 9507 3 9217 9507 9219 3 9218 9220 9506 3 9508 9506 9220 3 9219 9507 9509 3 9219 9509 9221 3 9220 9222 9508 3 9510 9508 9222 3 9221 9509 9511 3 9221 9511 9223 3 9222 9224 9510 3 9512 9510 9224 3 9223 9511 9513 3 9223 9513 9225 3 9224 9226 9512 3 9514 9512 9226 3 9225 9513 9515 3 9225 9515 9227 3 9226 9228 9514 3 9516 9514 9228 3 9227 9515 9517 3 9227 9517 9229 3 9228 9230 9516 3 9518 9516 9230 3 9229 9517 9519 3 9229 9519 9231 3 9230 9232 9518 3 9520 9518 9232 3 9231 9519 9521 3 9231 9521 9233 3 9232 9234 9520 3 9522 9520 9234 3 9233 9521 9523 3 9233 9523 9235 3 9234 9236 9522 3 9524 9522 9236 3 9235 9523 9525 3 9235 9525 9237 3 9236 9238 9524 3 9526 9524 9238 3 9237 9525 9527 3 9237 9527 9239 3 9238 9240 9526 3 9528 9526 9240 3 9239 9527 9529 3 9239 9529 9241 3 9240 9242 9528 3 9530 9528 9242 3 9241 9529 9531 3 9241 9531 9243 3 9242 9244 9530 3 9532 9530 9244 3 9243 9531 9533 3 9243 9533 9245 3 9244 9246 9532 3 9534 9532 9246 3 9245 9533 9535 3 9245 9535 9247 3 9246 9248 9534 3 9536 9534 9248 3 9247 9535 9539 3 9247 9539 9251 3 9248 9249 9536 3 9537 9536 9249 3 9249 9252 9540 3 9249 9540 9537 3 9250 9251 9539 3 9250 9539 9538 3 9250 9538 9543 3 9250 9543 9255 3 9252 9253 9541 3 9252 9541 9540 3 9253 9256 9544 3 9253 9544 9541 3 9254 9255 9543 3 9254 9543 9542 3 9254 9542 9612 3 9254 9612 9324 3 9256 9257 9545 3 9256 9545 9544 3 9257 9258 9546 3 9257 9546 9545 3 9258 9259 9547 3 9258 9547 9546 3 9259 9260 9548 3 9259 9548 9547 3 9260 9261 9549 3 9260 9549 9548 3 9261 9262 9550 3 9261 9550 9549 3 9262 9263 9551 3 9262 9551 9550 3 9263 9264 9552 3 9263 9552 9551 3 9264 9265 9553 3 9264 9553 9552 3 9265 9266 9554 3 9265 9554 9553 3 9266 9267 9555 3 9266 9555 9554 3 9267 9268 9556 3 9267 9556 9555 3 9268 9269 9557 3 9268 9557 9556 3 9269 9270 9558 3 9269 9558 9557 3 9270 9271 9559 3 9270 9559 9558 3 9271 9272 9560 3 9271 9560 9559 3 9272 9273 9561 3 9272 9561 9560 3 9273 9274 9562 3 9273 9562 9561 3 9274 9275 9563 3 9274 9563 9562 3 9275 9276 9564 3 9275 9564 9563 3 9276 9277 9565 3 9276 9565 9564 3 9277 9278 9566 3 9277 9566 9565 3 9278 9279 9567 3 9278 9567 9566 3 9279 9280 9568 3 9279 9568 9567 3 9280 9281 9569 3 9280 9569 9568 3 9281 9282 9570 3 9281 9570 9569 3 9282 9283 9571 3 9282 9571 9570 3 9283 9284 9572 3 9283 9572 9571 3 9284 9285 9573 3 9284 9573 9572 3 9285 9286 9574 3 9285 9574 9573 3 9286 9287 9575 3 9286 9575 9574 3 9287 9288 9576 3 9287 9576 9575 3 9288 9289 9577 3 9288 9577 9576 3 9289 9290 9578 3 9289 9578 9577 3 9290 9291 9579 3 9290 9579 9578 3 9291 9292 9580 3 9291 9580 9579 3 9292 9293 9581 3 9292 9581 9580 3 9293 9294 9582 3 9293 9582 9581 3 9294 9295 9583 3 9294 9583 9582 3 9295 9296 9584 3 9295 9584 9583 3 9296 9297 9585 3 9296 9585 9584 3 9297 9298 9586 3 9297 9586 9585 3 9298 9299 9587 3 9298 9587 9586 3 9299 9300 9588 3 9299 9588 9587 3 9300 9301 9589 3 9300 9589 9588 3 9301 9302 9590 3 9301 9590 9589 3 9302 9303 9591 3 9302 9591 9590 3 9303 9304 9592 3 9303 9592 9591 3 9304 9305 9593 3 9304 9593 9592 3 9305 9306 9594 3 9305 9594 9593 3 9306 9307 9595 3 9306 9595 9594 3 9307 9308 9596 3 9307 9596 9595 3 9308 9309 9597 3 9308 9597 9596 3 9309 9310 9597 3 9598 9597 9310 3 9310 9311 9598 3 9599 9598 9311 3 9311 9312 9599 3 9600 9599 9312 3 9312 9313 9600 3 9601 9600 9313 3 9313 9314 9601 3 9602 9601 9314 3 9314 9315 9602 3 9603 9602 9315 3 9315 9316 9603 3 9604 9603 9316 3 9316 9317 9604 3 9605 9604 9317 3 9317 9318 9605 3 9606 9605 9318 3 9318 9319 9606 3 9607 9606 9319 3 9319 9320 9607 3 9608 9607 9320 3 9320 9321 9608 3 9609 9608 9321 3 9321 9322 9609 3 9610 9609 9322 3 9322 9323 9610 3 9611 9610 9323 3 9323 9324 9611 3 9612 9611 9324 3 9325 9613 9614 3 9325 9614 9326 3 9325 9395 9613 3 9683 9613 9395 3 9326 9614 9615 3 9326 9615 9327 3 9327 9615 9616 3 9327 9616 9328 3 9328 9616 9617 3 9328 9617 9329 3 9329 9617 9618 3 9329 9618 9330 3 9330 9618 9619 3 9330 9619 9331 3 9331 9619 9620 3 9331 9620 9332 3 9332 9620 9621 3 9332 9621 9333 3 9333 9621 9622 3 9333 9622 9334 3 9334 9622 9623 3 9334 9623 9335 3 9335 9623 9624 3 9335 9624 9336 3 9336 9624 9625 3 9336 9625 9337 3 9337 9625 9626 3 9337 9626 9338 3 9338 9626 9627 3 9338 9627 9339 3 9339 9627 9628 3 9339 9628 9340 3 9340 9628 9629 3 9340 9629 9341 3 9341 9629 9630 3 9341 9630 9342 3 9342 9630 9631 3 9342 9631 9343 3 9343 9631 9632 3 9343 9632 9344 3 9344 9632 9633 3 9344 9633 9345 3 9345 9633 9634 3 9345 9634 9346 3 9346 9634 9635 3 9346 9635 9347 3 9347 9635 9636 3 9347 9636 9348 3 9348 9636 9637 3 9348 9637 9349 3 9349 9637 9638 3 9349 9638 9350 3 9350 9638 9639 3 9350 9639 9351 3 9351 9639 9640 3 9351 9640 9352 3 9352 9640 9641 3 9352 9641 9353 3 9353 9641 9642 3 9353 9642 9354 3 9354 9642 9643 3 9354 9643 9355 3 9355 9643 9644 3 9355 9644 9356 3 9356 9644 9645 3 9356 9645 9357 3 9357 9645 9646 3 9357 9646 9358 3 9358 9646 9647 3 9358 9647 9359 3 9359 9647 9648 3 9359 9648 9360 3 9360 9648 9649 3 9360 9649 9361 3 9361 9649 9650 3 9361 9650 9362 3 9362 9650 9651 3 9362 9651 9363 3 9363 9651 9652 3 9363 9652 9364 3 9364 9652 9653 3 9364 9653 9365 3 9365 9653 9654 3 9365 9654 9366 3 9366 9654 9655 3 9366 9655 9367 3 9367 9655 9656 3 9367 9656 9368 3 9368 9656 9657 3 9368 9657 9369 3 9369 9657 9658 3 9369 9658 9370 3 9370 9658 9659 3 9370 9659 9371 3 9371 9659 9660 3 9371 9660 9372 3 9372 9660 9661 3 9372 9661 9373 3 9373 9661 9662 3 9373 9662 9374 3 9374 9662 9663 3 9374 9663 9375 3 9375 9663 9664 3 9375 9664 9376 3 9376 9664 9665 3 9376 9665 9377 3 9377 9665 9666 3 9377 9666 9378 3 9378 9666 9667 3 9378 9667 9379 3 9379 9667 9668 3 9379 9668 9380 3 9380 9668 9669 3 9380 9669 9381 3 9381 9669 9670 3 9381 9670 9382 3 9382 9670 9671 3 9382 9671 9383 3 9383 9671 9384 3 9672 9384 9671 3 9384 9672 9385 3 9673 9385 9672 3 9385 9673 9386 3 9674 9386 9673 3 9386 9674 9387 3 9675 9387 9674 3 9387 9675 9388 3 9676 9388 9675 3 9388 9676 9389 3 9677 9389 9676 3 9389 9677 9390 3 9678 9390 9677 3 9390 9678 9391 3 9679 9391 9678 3 9391 9679 9392 3 9680 9392 9679 3 9392 9680 9393 3 9681 9393 9680 3 9393 9681 9396 3 9684 9396 9681 3 9394 9682 9395 3 9683 9395 9682 3 9394 9399 9687 3 9394 9687 9682 3 9396 9684 9397 3 9685 9397 9684 3 9397 9685 9400 3 9688 9400 9685 3 9398 9686 9399 3 9687 9399 9686 3 9398 9402 9690 3 9398 9690 9686 3 9400 9688 9401 3 9689 9401 9688 3 9401 9689 9403 3 9691 9403 9689 3 9402 9404 9692 3 9402 9692 9690 3 9403 9691 9405 3 9693 9405 9691 3 9404 9406 9694 3 9404 9694 9692 3 9405 9693 9407 3 9695 9407 9693 3 9406 9408 9696 3 9406 9696 9694 3 9407 9695 9409 3 9697 9409 9695 3 9408 9410 9698 3 9408 9698 9696 3 9409 9697 9411 3 9699 9411 9697 3 9410 9412 9700 3 9410 9700 9698 3 9411 9699 9413 3 9701 9413 9699 3 9412 9414 9702 3 9412 9702 9700 3 9413 9701 9415 3 9703 9415 9701 3 9414 9416 9704 3 9414 9704 9702 3 9415 9703 9417 3 9705 9417 9703 3 9416 9418 9706 3 9416 9706 9704 3 9417 9705 9419 3 9707 9419 9705 3 9418 9420 9708 3 9418 9708 9706 3 9419 9707 9421 3 9709 9421 9707 3 9420 9422 9710 3 9420 9710 9708 3 9421 9709 9423 3 9711 9423 9709 3 9422 9424 9712 3 9422 9712 9710 3 9423 9711 9425 3 9713 9425 9711 3 9424 9426 9714 3 9424 9714 9712 3 9425 9713 9427 3 9715 9427 9713 3 9426 9428 9716 3 9426 9716 9714 3 9427 9715 9429 3 9717 9429 9715 3 9428 9430 9716 3 9718 9716 9430 3 9429 9717 9431 3 9719 9431 9717 3 9430 9432 9718 3 9720 9718 9432 3 9431 9719 9433 3 9721 9433 9719 3 9432 9434 9720 3 9722 9720 9434 3 9433 9721 9435 3 9723 9435 9721 3 9434 9436 9722 3 9724 9722 9436 3 9435 9723 9437 3 9725 9437 9723 3 9436 9438 9724 3 9726 9724 9438 3 9437 9725 9439 3 9727 9439 9725 3 9438 9440 9726 3 9728 9726 9440 3 9439 9727 9441 3 9729 9441 9727 3 9440 9442 9728 3 9730 9728 9442 3 9441 9729 9443 3 9731 9443 9729 3 9442 9444 9730 3 9732 9730 9444 3 9443 9731 9445 3 9733 9445 9731 3 9444 9446 9732 3 9734 9732 9446 3 9445 9733 9735 3 9445 9735 9447 3 9446 9448 9734 3 9736 9734 9448 3 9447 9735 9737 3 9447 9737 9449 3 9448 9450 9736 3 9738 9736 9450 3 9449 9737 9739 3 9449 9739 9451 3 9450 9452 9738 3 9740 9738 9452 3 9451 9739 9741 3 9451 9741 9453 3 9452 9454 9740 3 9742 9740 9454 3 9453 9741 9743 3 9453 9743 9455 3 9454 9456 9742 3 9744 9742 9456 3 9455 9743 9745 3 9455 9745 9457 3 9456 9458 9744 3 9746 9744 9458 3 9457 9745 9747 3 9457 9747 9459 3 9458 9460 9746 3 9748 9746 9460 3 9459 9747 9749 3 9459 9749 9461 3 9460 9462 9748 3 9750 9748 9462 3 9461 9749 9751 3 9461 9751 9463 3 9462 9464 9750 3 9752 9750 9464 3 9463 9751 9753 3 9463 9753 9465 3 9464 9466 9752 3 9754 9752 9466 3 9465 9753 9755 3 9465 9755 9467 3 9466 9468 9754 3 9756 9754 9468 3 9467 9755 9757 3 9467 9757 9469 3 9468 9470 9756 3 9758 9756 9470 3 9469 9757 9759 3 9469 9759 9471 3 9470 9472 9758 3 9760 9758 9472 3 9471 9759 9761 3 9471 9761 9473 3 9472 9474 9760 3 9762 9760 9474 3 9473 9761 9763 3 9473 9763 9475 3 9474 9476 9762 3 9764 9762 9476 3 9475 9763 9765 3 9475 9765 9477 3 9476 9478 9764 3 9766 9764 9478 3 9477 9765 9767 3 9477 9767 9479 3 9478 9480 9766 3 9768 9766 9480 3 9479 9767 9769 3 9479 9769 9481 3 9480 9482 9768 3 9770 9768 9482 3 9481 9769 9771 3 9481 9771 9483 3 9482 9484 9770 3 9772 9770 9484 3 9483 9771 9773 3 9483 9773 9485 3 9484 9486 9772 3 9774 9772 9486 3 9485 9773 9775 3 9485 9775 9487 3 9486 9488 9774 3 9776 9774 9488 3 9487 9775 9777 3 9487 9777 9489 3 9488 9490 9778 3 9488 9778 9776 3 9489 9777 9779 3 9489 9779 9491 3 9490 9492 9780 3 9490 9780 9778 3 9491 9779 9781 3 9491 9781 9493 3 9492 9494 9782 3 9492 9782 9780 3 9493 9781 9783 3 9493 9783 9495 3 9494 9496 9784 3 9494 9784 9782 3 9495 9783 9785 3 9495 9785 9497 3 9496 9498 9786 3 9496 9786 9784 3 9497 9785 9787 3 9497 9787 9499 3 9498 9500 9788 3 9498 9788 9786 3 9499 9787 9789 3 9499 9789 9501 3 9500 9502 9790 3 9500 9790 9788 3 9501 9789 9791 3 9501 9791 9503 3 9502 9504 9792 3 9502 9792 9790 3 9503 9791 9793 3 9503 9793 9505 3 9504 9506 9794 3 9504 9794 9792 3 9505 9793 9507 3 9795 9507 9793 3 9506 9508 9796 3 9506 9796 9794 3 9507 9795 9509 3 9797 9509 9795 3 9508 9510 9798 3 9508 9798 9796 3 9509 9797 9511 3 9799 9511 9797 3 9510 9512 9800 3 9510 9800 9798 3 9511 9799 9513 3 9801 9513 9799 3 9512 9514 9802 3 9512 9802 9800 3 9513 9801 9515 3 9803 9515 9801 3 9514 9516 9804 3 9514 9804 9802 3 9515 9803 9517 3 9805 9517 9803 3 9516 9518 9806 3 9516 9806 9804 3 9517 9805 9519 3 9807 9519 9805 3 9518 9520 9808 3 9518 9808 9806 3 9519 9807 9521 3 9809 9521 9807 3 9520 9522 9810 3 9520 9810 9808 3 9521 9809 9523 3 9811 9523 9809 3 9522 9524 9812 3 9522 9812 9810 3 9523 9811 9525 3 9813 9525 9811 3 9524 9526 9814 3 9524 9814 9812 3 9525 9813 9527 3 9815 9527 9813 3 9526 9528 9816 3 9526 9816 9814 3 9527 9815 9529 3 9817 9529 9815 3 9528 9530 9818 3 9528 9818 9816 3 9529 9817 9531 3 9819 9531 9817 3 9530 9532 9820 3 9530 9820 9818 3 9531 9819 9533 3 9821 9533 9819 3 9532 9534 9822 3 9532 9822 9820 3 9533 9821 9535 3 9823 9535 9821 3 9534 9536 9824 3 9534 9824 9822 3 9535 9823 9539 3 9827 9539 9823 3 9536 9537 9825 3 9536 9825 9824 3 9537 9540 9828 3 9537 9828 9825 3 9538 9539 9827 3 9538 9827 9826 3 9538 9826 9543 3 9831 9543 9826 3 9540 9541 9829 3 9540 9829 9828 3 9541 9544 9832 3 9541 9832 9829 3 9542 9543 9831 3 9542 9831 9830 3 9542 9830 9612 3 9900 9612 9830 3 9544 9545 9833 3 9544 9833 9832 3 9545 9546 9834 3 9545 9834 9833 3 9546 9547 9835 3 9546 9835 9834 3 9547 9548 9836 3 9547 9836 9835 3 9548 9549 9836 3 9837 9836 9549 3 9549 9550 9837 3 9838 9837 9550 3 9550 9551 9838 3 9839 9838 9551 3 9551 9552 9839 3 9840 9839 9552 3 9552 9553 9840 3 9841 9840 9553 3 9553 9554 9841 3 9842 9841 9554 3 9554 9555 9842 3 9843 9842 9555 3 9555 9556 9843 3 9844 9843 9556 3 9556 9557 9844 3 9845 9844 9557 3 9557 9558 9845 3 9846 9845 9558 3 9558 9559 9846 3 9847 9846 9559 3 9559 9560 9847 3 9848 9847 9560 3 9560 9561 9848 3 9849 9848 9561 3 9561 9562 9849 3 9850 9849 9562 3 9562 9563 9850 3 9851 9850 9563 3 9563 9564 9851 3 9852 9851 9564 3 9564 9565 9852 3 9853 9852 9565 3 9565 9566 9853 3 9854 9853 9566 3 9566 9567 9854 3 9855 9854 9567 3 9567 9568 9855 3 9856 9855 9568 3 9568 9569 9856 3 9857 9856 9569 3 9569 9570 9857 3 9858 9857 9570 3 9570 9571 9858 3 9859 9858 9571 3 9571 9572 9859 3 9860 9859 9572 3 9572 9573 9860 3 9861 9860 9573 3 9573 9574 9861 3 9862 9861 9574 3 9574 9575 9862 3 9863 9862 9575 3 9575 9576 9863 3 9864 9863 9576 3 9576 9577 9864 3 9865 9864 9577 3 9577 9578 9865 3 9866 9865 9578 3 9578 9579 9866 3 9867 9866 9579 3 9579 9580 9867 3 9868 9867 9580 3 9580 9581 9868 3 9869 9868 9581 3 9581 9582 9869 3 9870 9869 9582 3 9582 9583 9870 3 9871 9870 9583 3 9583 9584 9871 3 9872 9871 9584 3 9584 9585 9872 3 9873 9872 9585 3 9585 9586 9873 3 9874 9873 9586 3 9586 9587 9874 3 9875 9874 9587 3 9587 9588 9875 3 9876 9875 9588 3 9588 9589 9876 3 9877 9876 9589 3 9589 9590 9877 3 9878 9877 9590 3 9590 9591 9878 3 9879 9878 9591 3 9591 9592 9879 3 9880 9879 9592 3 9592 9593 9880 3 9881 9880 9593 3 9593 9594 9881 3 9882 9881 9594 3 9594 9595 9882 3 9883 9882 9595 3 9595 9596 9883 3 9884 9883 9596 3 9596 9597 9884 3 9885 9884 9597 3 9597 9598 9885 3 9886 9885 9598 3 9598 9599 9886 3 9887 9886 9599 3 9599 9600 9887 3 9888 9887 9600 3 9600 9601 9888 3 9889 9888 9601 3 9601 9602 9889 3 9890 9889 9602 3 9602 9603 9890 3 9891 9890 9603 3 9603 9604 9891 3 9892 9891 9604 3 9604 9605 9892 3 9893 9892 9605 3 9605 9606 9893 3 9894 9893 9606 3 9606 9607 9894 3 9895 9894 9607 3 9607 9608 9895 3 9896 9895 9608 3 9608 9609 9897 3 9608 9897 9896 3 9609 9610 9898 3 9609 9898 9897 3 9610 9611 9899 3 9610 9899 9898 3 9611 9612 9900 3 9611 9900 9899 3 9613 9901 9902 3 9613 9902 9614 3 9613 9683 9971 3 9613 9971 9901 3 9614 9902 9903 3 9614 9903 9615 3 9615 9903 9904 3 9615 9904 9616 3 9616 9904 9905 3 9616 9905 9617 3 9617 9905 9906 3 9617 9906 9618 3 9618 9906 9907 3 9618 9907 9619 3 9619 9907 9908 3 9619 9908 9620 3 9620 9908 9909 3 9620 9909 9621 3 9621 9909 9910 3 9621 9910 9622 3 9622 9910 9911 3 9622 9911 9623 3 9623 9911 9912 3 9623 9912 9624 3 9624 9912 9913 3 9624 9913 9625 3 9625 9913 9914 3 9625 9914 9626 3 9626 9914 9627 3 9915 9627 9914 3 9627 9915 9628 3 9916 9628 9915 3 9628 9916 9629 3 9917 9629 9916 3 9629 9917 9630 3 9918 9630 9917 3 9630 9918 9631 3 9919 9631 9918 3 9631 9919 9632 3 9920 9632 9919 3 9632 9920 9633 3 9921 9633 9920 3 9633 9921 9634 3 9922 9634 9921 3 9634 9922 9635 3 9923 9635 9922 3 9635 9923 9636 3 9924 9636 9923 3 9636 9924 9637 3 9925 9637 9924 3 9637 9925 9638 3 9926 9638 9925 3 9638 9926 9639 3 9927 9639 9926 3 9639 9927 9640 3 9928 9640 9927 3 9640 9928 9641 3 9929 9641 9928 3 9641 9929 9642 3 9930 9642 9929 3 9642 9930 9643 3 9931 9643 9930 3 9643 9931 9644 3 9932 9644 9931 3 9644 9932 9645 3 9933 9645 9932 3 9645 9933 9646 3 9934 9646 9933 3 9646 9934 9647 3 9935 9647 9934 3 9647 9935 9648 3 9936 9648 9935 3 9648 9936 9649 3 9937 9649 9936 3 9649 9937 9650 3 9938 9650 9937 3 9650 9938 9651 3 9939 9651 9938 3 9651 9939 9652 3 9940 9652 9939 3 9652 9940 9653 3 9941 9653 9940 3 9653 9941 9654 3 9942 9654 9941 3 9654 9942 9655 3 9943 9655 9942 3 9655 9943 9656 3 9944 9656 9943 3 9656 9944 9657 3 9945 9657 9944 3 9657 9945 9658 3 9946 9658 9945 3 9658 9946 9659 3 9947 9659 9946 3 9659 9947 9660 3 9948 9660 9947 3 9660 9948 9661 3 9949 9661 9948 3 9661 9949 9662 3 9950 9662 9949 3 9662 9950 9663 3 9951 9663 9950 3 9663 9951 9664 3 9952 9664 9951 3 9664 9952 9665 3 9953 9665 9952 3 9665 9953 9666 3 9954 9666 9953 3 9666 9954 9667 3 9955 9667 9954 3 9667 9955 9668 3 9956 9668 9955 3 9668 9956 9669 3 9957 9669 9956 3 9669 9957 9670 3 9958 9670 9957 3 9670 9958 9671 3 9959 9671 9958 3 9671 9959 9672 3 9960 9672 9959 3 9672 9960 9673 3 9961 9673 9960 3 9673 9961 9674 3 9962 9674 9961 3 9674 9962 9675 3 9963 9675 9962 3 9675 9963 9676 3 9964 9676 9963 3 9676 9964 9677 3 9965 9677 9964 3 9677 9965 9678 3 9966 9678 9965 3 9678 9966 9679 3 9967 9679 9966 3 9679 9967 9680 3 9968 9680 9967 3 9680 9968 9681 3 9969 9681 9968 3 9681 9969 9684 3 9972 9684 9969 3 9682 9970 9683 3 9971 9683 9970 3 9682 9687 9970 3 9975 9970 9687 3 9684 9972 9685 3 9973 9685 9972 3 9685 9973 9688 3 9976 9688 9973 3 9686 9974 9687 3 9975 9687 9974 3 9686 9690 9974 3 9978 9974 9690 3 9688 9976 9977 3 9688 9977 9689 3 9689 9977 9979 3 9689 9979 9691 3 9690 9692 9978 3 9980 9978 9692 3 9691 9979 9981 3 9691 9981 9693 3 9692 9694 9980 3 9982 9980 9694 3 9693 9981 9983 3 9693 9983 9695 3 9694 9696 9982 3 9984 9982 9696 3 9695 9983 9985 3 9695 9985 9697 3 9696 9698 9984 3 9986 9984 9698 3 9697 9985 9987 3 9697 9987 9699 3 9698 9700 9986 3 9988 9986 9700 3 9699 9987 9989 3 9699 9989 9701 3 9700 9702 9988 3 9990 9988 9702 3 9701 9989 9991 3 9701 9991 9703 3 9702 9704 9990 3 9992 9990 9704 3 9703 9991 9993 3 9703 9993 9705 3 9704 9706 9992 3 9994 9992 9706 3 9705 9993 9995 3 9705 9995 9707 3 9706 9708 9994 3 9996 9994 9708 3 9707 9995 9997 3 9707 9997 9709 3 9708 9710 9996 3 9998 9996 9710 3 9709 9997 9999 3 9709 9999 9711 3 9710 9712 9998 3 10000 9998 9712 3 9711 9999 10001 3 9711 10001 9713 3 9712 9714 10000 3 10002 10000 9714 3 9713 10001 10003 3 9713 10003 9715 3 9714 9716 10002 3 10004 10002 9716 3 9715 10003 10005 3 9715 10005 9717 3 9716 9718 10004 3 10006 10004 9718 3 9717 10005 10007 3 9717 10007 9719 3 9718 9720 10006 3 10008 10006 9720 3 9719 10007 10009 3 9719 10009 9721 3 9720 9722 10008 3 10010 10008 9722 3 9721 10009 10011 3 9721 10011 9723 3 9722 9724 10010 3 10012 10010 9724 3 9723 10011 10013 3 9723 10013 9725 3 9724 9726 10012 3 10014 10012 9726 3 9725 10013 10015 3 9725 10015 9727 3 9726 9728 10014 3 10016 10014 9728 3 9727 10015 10017 3 9727 10017 9729 3 9728 9730 10016 3 10018 10016 9730 3 9729 10017 10019 3 9729 10019 9731 3 9730 9732 10020 3 9730 10020 10018 3 9731 10019 10021 3 9731 10021 9733 3 9732 9734 10022 3 9732 10022 10020 3 9733 10021 10023 3 9733 10023 9735 3 9734 9736 10024 3 9734 10024 10022 3 9735 10023 10025 3 9735 10025 9737 3 9736 9738 10026 3 9736 10026 10024 3 9737 10025 10027 3 9737 10027 9739 3 9738 9740 10028 3 9738 10028 10026 3 9739 10027 10029 3 9739 10029 9741 3 9740 9742 10030 3 9740 10030 10028 3 9741 10029 10031 3 9741 10031 9743 3 9742 9744 10032 3 9742 10032 10030 3 9743 10031 10033 3 9743 10033 9745 3 9744 9746 10034 3 9744 10034 10032 3 9745 10033 10035 3 9745 10035 9747 3 9746 9748 10036 3 9746 10036 10034 3 9747 10035 10037 3 9747 10037 9749 3 9748 9750 10038 3 9748 10038 10036 3 9749 10037 9751 3 10039 9751 10037 3 9750 9752 10040 3 9750 10040 10038 3 9751 10039 9753 3 10041 9753 10039 3 9752 9754 10042 3 9752 10042 10040 3 9753 10041 9755 3 10043 9755 10041 3 9754 9756 10044 3 9754 10044 10042 3 9755 10043 9757 3 10045 9757 10043 3 9756 9758 10046 3 9756 10046 10044 3 9757 10045 9759 3 10047 9759 10045 3 9758 9760 10048 3 9758 10048 10046 3 9759 10047 9761 3 10049 9761 10047 3 9760 9762 10050 3 9760 10050 10048 3 9761 10049 9763 3 10051 9763 10049 3 9762 9764 10052 3 9762 10052 10050 3 9763 10051 9765 3 10053 9765 10051 3 9764 9766 10054 3 9764 10054 10052 3 9765 10053 9767 3 10055 9767 10053 3 9766 9768 10056 3 9766 10056 10054 3 9767 10055 9769 3 10057 9769 10055 3 9768 9770 10058 3 9768 10058 10056 3 9769 10057 9771 3 10059 9771 10057 3 9770 9772 10060 3 9770 10060 10058 3 9771 10059 9773 3 10061 9773 10059 3 9772 9774 10062 3 9772 10062 10060 3 9773 10061 9775 3 10063 9775 10061 3 9774 9776 10064 3 9774 10064 10062 3 9775 10063 9777 3 10065 9777 10063 3 9776 9778 10066 3 9776 10066 10064 3 9777 10065 9779 3 10067 9779 10065 3 9778 9780 10068 3 9778 10068 10066 3 9779 10067 9781 3 10069 9781 10067 3 9780 9782 10070 3 9780 10070 10068 3 9781 10069 9783 3 10071 9783 10069 3 9782 9784 10072 3 9782 10072 10070 3 9783 10071 9785 3 10073 9785 10071 3 9784 9786 10074 3 9784 10074 10072 3 9785 10073 9787 3 10075 9787 10073 3 9786 9788 10076 3 9786 10076 10074 3 9787 10075 9789 3 10077 9789 10075 3 9788 9790 10078 3 9788 10078 10076 3 9789 10077 9791 3 10079 9791 10077 3 9790 9792 10078 3 10080 10078 9792 3 9791 10079 9793 3 10081 9793 10079 3 9792 9794 10080 3 10082 10080 9794 3 9793 10081 9795 3 10083 9795 10081 3 9794 9796 10082 3 10084 10082 9796 3 9795 10083 9797 3 10085 9797 10083 3 9796 9798 10084 3 10086 10084 9798 3 9797 10085 9799 3 10087 9799 10085 3 9798 9800 10086 3 10088 10086 9800 3 9799 10087 9801 3 10089 9801 10087 3 9800 9802 10088 3 10090 10088 9802 3 9801 10089 9803 3 10091 9803 10089 3 9802 9804 10090 3 10092 10090 9804 3 9803 10091 9805 3 10093 9805 10091 3 9804 9806 10092 3 10094 10092 9806 3 9805 10093 9807 3 10095 9807 10093 3 9806 9808 10094 3 10096 10094 9808 3 9807 10095 9809 3 10097 9809 10095 3 9808 9810 10096 3 10098 10096 9810 3 9809 10097 10099 3 9809 10099 9811 3 9810 9812 10098 3 10100 10098 9812 3 9811 10099 10101 3 9811 10101 9813 3 9812 9814 10100 3 10102 10100 9814 3 9813 10101 10103 3 9813 10103 9815 3 9814 9816 10102 3 10104 10102 9816 3 9815 10103 10105 3 9815 10105 9817 3 9816 9818 10104 3 10106 10104 9818 3 9817 10105 10107 3 9817 10107 9819 3 9818 9820 10106 3 10108 10106 9820 3 9819 10107 10109 3 9819 10109 9821 3 9820 9822 10108 3 10110 10108 9822 3 9821 10109 10111 3 9821 10111 9823 3 9822 9824 10110 3 10112 10110 9824 3 9823 10111 10115 3 9823 10115 9827 3 9824 9825 10112 3 10113 10112 9825 3 9825 9828 10113 3 10116 10113 9828 3 9826 9827 10114 3 10115 10114 9827 3 9826 10114 10119 3 9826 10119 9831 3 9828 9829 10116 3 10117 10116 9829 3 9829 9832 10117 3 10120 10117 9832 3 9830 9831 10118 3 10119 10118 9831 3 9830 10118 10188 3 9830 10188 9900 3 9832 9833 10120 3 10121 10120 9833 3 9833 9834 10121 3 10122 10121 9834 3 9834 9835 10122 3 10123 10122 9835 3 9835 9836 10123 3 10124 10123 9836 3 9836 9837 10124 3 10125 10124 9837 3 9837 9838 10125 3 10126 10125 9838 3 9838 9839 10126 3 10127 10126 9839 3 9839 9840 10127 3 10128 10127 9840 3 9840 9841 10128 3 10129 10128 9841 3 9841 9842 10129 3 10130 10129 9842 3 9842 9843 10130 3 10131 10130 9843 3 9843 9844 10131 3 10132 10131 9844 3 9844 9845 10132 3 10133 10132 9845 3 9845 9846 10133 3 10134 10133 9846 3 9846 9847 10134 3 10135 10134 9847 3 9847 9848 10135 3 10136 10135 9848 3 9848 9849 10136 3 10137 10136 9849 3 9849 9850 10137 3 10138 10137 9850 3 9850 9851 10138 3 10139 10138 9851 3 9851 9852 10140 3 9851 10140 10139 3 9852 9853 10141 3 9852 10141 10140 3 9853 9854 10142 3 9853 10142 10141 3 9854 9855 10143 3 9854 10143 10142 3 9855 9856 10144 3 9855 10144 10143 3 9856 9857 10145 3 9856 10145 10144 3 9857 9858 10146 3 9857 10146 10145 3 9858 9859 10147 3 9858 10147 10146 3 9859 9860 10148 3 9859 10148 10147 3 9860 9861 10149 3 9860 10149 10148 3 9861 9862 10150 3 9861 10150 10149 3 9862 9863 10151 3 9862 10151 10150 3 9863 9864 10152 3 9863 10152 10151 3 9864 9865 10153 3 9864 10153 10152 3 9865 9866 10154 3 9865 10154 10153 3 9866 9867 10155 3 9866 10155 10154 3 9867 9868 10156 3 9867 10156 10155 3 9868 9869 10157 3 9868 10157 10156 3 9869 9870 10158 3 9869 10158 10157 3 9870 9871 10159 3 9870 10159 10158 3 9871 9872 10160 3 9871 10160 10159 3 9872 9873 10161 3 9872 10161 10160 3 9873 9874 10162 3 9873 10162 10161 3 9874 9875 10163 3 9874 10163 10162 3 9875 9876 10164 3 9875 10164 10163 3 9876 9877 10165 3 9876 10165 10164 3 9877 9878 10166 3 9877 10166 10165 3 9878 9879 10167 3 9878 10167 10166 3 9879 9880 10168 3 9879 10168 10167 3 9880 9881 10169 3 9880 10169 10168 3 9881 9882 10170 3 9881 10170 10169 3 9882 9883 10171 3 9882 10171 10170 3 9883 9884 10172 3 9883 10172 10171 3 9884 9885 10173 3 9884 10173 10172 3 9885 9886 10174 3 9885 10174 10173 3 9886 9887 10175 3 9886 10175 10174 3 9887 9888 10176 3 9887 10176 10175 3 9888 9889 10177 3 9888 10177 10176 3 9889 9890 10178 3 9889 10178 10177 3 9890 9891 10179 3 9890 10179 10178 3 9891 9892 10180 3 9891 10180 10179 3 9892 9893 10181 3 9892 10181 10180 3 9893 9894 10182 3 9893 10182 10181 3 9894 9895 10183 3 9894 10183 10182 3 9895 9896 10184 3 9895 10184 10183 3 9896 9897 10185 3 9896 10185 10184 3 9897 9898 10186 3 9897 10186 10185 3 9898 9899 10187 3 9898 10187 10186 3 9899 9900 10188 3 9899 10188 10187 3 9901 10189 9902 3 10190 9902 10189 3 9901 9971 10189 3 10259 10189 9971 3 9902 10190 9903 3 10191 9903 10190 3 9903 10191 9904 3 10192 9904 10191 3 9904 10192 9905 3 10193 9905 10192 3 9905 10193 9906 3 10194 9906 10193 3 9906 10194 9907 3 10195 9907 10194 3 9907 10195 9908 3 10196 9908 10195 3 9908 10196 9909 3 10197 9909 10196 3 9909 10197 9910 3 10198 9910 10197 3 9910 10198 9911 3 10199 9911 10198 3 9911 10199 9912 3 10200 9912 10199 3 9912 10200 9913 3 10201 9913 10200 3 9913 10201 9914 3 10202 9914 10201 3 9914 10202 9915 3 10203 9915 10202 3 9915 10203 9916 3 10204 9916 10203 3 9916 10204 9917 3 10205 9917 10204 3 9917 10205 9918 3 10206 9918 10205 3 9918 10206 9919 3 10207 9919 10206 3 9919 10207 9920 3 10208 9920 10207 3 9920 10208 9921 3 10209 9921 10208 3 9921 10209 9922 3 10210 9922 10209 3 9922 10210 9923 3 10211 9923 10210 3 9923 10211 9924 3 10212 9924 10211 3 9924 10212 9925 3 10213 9925 10212 3 9925 10213 9926 3 10214 9926 10213 3 9926 10214 9927 3 10215 9927 10214 3 9927 10215 9928 3 10216 9928 10215 3 9928 10216 9929 3 10217 9929 10216 3 9929 10217 9930 3 10218 9930 10217 3 9930 10218 9931 3 10219 9931 10218 3 9931 10219 9932 3 10220 9932 10219 3 9932 10220 10221 3 9932 10221 9933 3 9933 10221 10222 3 9933 10222 9934 3 9934 10222 10223 3 9934 10223 9935 3 9935 10223 10224 3 9935 10224 9936 3 9936 10224 10225 3 9936 10225 9937 3 9937 10225 10226 3 9937 10226 9938 3 9938 10226 10227 3 9938 10227 9939 3 9939 10227 10228 3 9939 10228 9940 3 9940 10228 10229 3 9940 10229 9941 3 9941 10229 10230 3 9941 10230 9942 3 9942 10230 10231 3 9942 10231 9943 3 9943 10231 10232 3 9943 10232 9944 3 9944 10232 10233 3 9944 10233 9945 3 9945 10233 10234 3 9945 10234 9946 3 9946 10234 10235 3 9946 10235 9947 3 9947 10235 10236 3 9947 10236 9948 3 9948 10236 10237 3 9948 10237 9949 3 9949 10237 10238 3 9949 10238 9950 3 9950 10238 10239 3 9950 10239 9951 3 9951 10239 10240 3 9951 10240 9952 3 9952 10240 10241 3 9952 10241 9953 3 9953 10241 10242 3 9953 10242 9954 3 9954 10242 10243 3 9954 10243 9955 3 9955 10243 10244 3 9955 10244 9956 3 9956 10244 10245 3 9956 10245 9957 3 9957 10245 10246 3 9957 10246 9958 3 9958 10246 10247 3 9958 10247 9959 3 9959 10247 10248 3 9959 10248 9960 3 9960 10248 10249 3 9960 10249 9961 3 9961 10249 10250 3 9961 10250 9962 3 9962 10250 10251 3 9962 10251 9963 3 9963 10251 10252 3 9963 10252 9964 3 9964 10252 10253 3 9964 10253 9965 3 9965 10253 10254 3 9965 10254 9966 3 9966 10254 10255 3 9966 10255 9967 3 9967 10255 10256 3 9967 10256 9968 3 9968 10256 10257 3 9968 10257 9969 3 9969 10257 10260 3 9969 10260 9972 3 9970 10258 10259 3 9970 10259 9971 3 9970 9975 10258 3 10263 10258 9975 3 9972 10260 10261 3 9972 10261 9973 3 9973 10261 10264 3 9973 10264 9976 3 9974 10262 10263 3 9974 10263 9975 3 9974 9978 10266 3 9974 10266 10262 3 9976 10264 10265 3 9976 10265 9977 3 9977 10265 10267 3 9977 10267 9979 3 9978 9980 10268 3 9978 10268 10266 3 9979 10267 10269 3 9979 10269 9981 3 9980 9982 10270 3 9980 10270 10268 3 9981 10269 10271 3 9981 10271 9983 3 9982 9984 10272 3 9982 10272 10270 3 9983 10271 10273 3 9983 10273 9985 3 9984 9986 10274 3 9984 10274 10272 3 9985 10273 10275 3 9985 10275 9987 3 9986 9988 10276 3 9986 10276 10274 3 9987 10275 10277 3 9987 10277 9989 3 9988 9990 10278 3 9988 10278 10276 3 9989 10277 10279 3 9989 10279 9991 3 9990 9992 10280 3 9990 10280 10278 3 9991 10279 10281 3 9991 10281 9993 3 9992 9994 10282 3 9992 10282 10280 3 9993 10281 10283 3 9993 10283 9995 3 9994 9996 10284 3 9994 10284 10282 3 9995 10283 9997 3 10285 9997 10283 3 9996 9998 10286 3 9996 10286 10284 3 9997 10285 9999 3 10287 9999 10285 3 9998 10000 10288 3 9998 10288 10286 3 9999 10287 10001 3 10289 10001 10287 3 10000 10002 10290 3 10000 10290 10288 3 10001 10289 10003 3 10291 10003 10289 3 10002 10004 10292 3 10002 10292 10290 3 10003 10291 10005 3 10293 10005 10291 3 10004 10006 10294 3 10004 10294 10292 3 10005 10293 10007 3 10295 10007 10293 3 10006 10008 10296 3 10006 10296 10294 3 10007 10295 10009 3 10297 10009 10295 3 10008 10010 10298 3 10008 10298 10296 3 10009 10297 10011 3 10299 10011 10297 3 10010 10012 10300 3 10010 10300 10298 3 10011 10299 10013 3 10301 10013 10299 3 10012 10014 10302 3 10012 10302 10300 3 10013 10301 10015 3 10303 10015 10301 3 10014 10016 10304 3 10014 10304 10302 3 10015 10303 10017 3 10305 10017 10303 3 10016 10018 10306 3 10016 10306 10304 3 10017 10305 10019 3 10307 10019 10305 3 10018 10020 10308 3 10018 10308 10306 3 10019 10307 10021 3 10309 10021 10307 3 10020 10022 10310 3 10020 10310 10308 3 10021 10309 10023 3 10311 10023 10309 3 10022 10024 10312 3 10022 10312 10310 3 10023 10311 10025 3 10313 10025 10311 3 10024 10026 10314 3 10024 10314 10312 3 10025 10313 10027 3 10315 10027 10313 3 10026 10028 10316 3 10026 10316 10314 3 10027 10315 10029 3 10317 10029 10315 3 10028 10030 10318 3 10028 10318 10316 3 10029 10317 10031 3 10319 10031 10317 3 10030 10032 10320 3 10030 10320 10318 3 10031 10319 10033 3 10321 10033 10319 3 10032 10034 10322 3 10032 10322 10320 3 10033 10321 10035 3 10323 10035 10321 3 10034 10036 10324 3 10034 10324 10322 3 10035 10323 10037 3 10325 10037 10323 3 10036 10038 10324 3 10326 10324 10038 3 10037 10325 10039 3 10327 10039 10325 3 10038 10040 10326 3 10328 10326 10040 3 10039 10327 10041 3 10329 10041 10327 3 10040 10042 10328 3 10330 10328 10042 3 10041 10329 10043 3 10331 10043 10329 3 10042 10044 10330 3 10332 10330 10044 3 10043 10331 10045 3 10333 10045 10331 3 10044 10046 10332 3 10334 10332 10046 3 10045 10333 10047 3 10335 10047 10333 3 10046 10048 10334 3 10336 10334 10048 3 10047 10335 10049 3 10337 10049 10335 3 10048 10050 10336 3 10338 10336 10050 3 10049 10337 10051 3 10339 10051 10337 3 10050 10052 10338 3 10340 10338 10052 3 10051 10339 10053 3 10341 10053 10339 3 10052 10054 10340 3 10342 10340 10054 3 10053 10341 10055 3 10343 10055 10341 3 10054 10056 10342 3 10344 10342 10056 3 10055 10343 10057 3 10345 10057 10343 3 10056 10058 10344 3 10346 10344 10058 3 10057 10345 10347 3 10057 10347 10059 3 10058 10060 10346 3 10348 10346 10060 3 10059 10347 10349 3 10059 10349 10061 3 10060 10062 10348 3 10350 10348 10062 3 10061 10349 10351 3 10061 10351 10063 3 10062 10064 10350 3 10352 10350 10064 3 10063 10351 10353 3 10063 10353 10065 3 10064 10066 10352 3 10354 10352 10066 3 10065 10353 10355 3 10065 10355 10067 3 10066 10068 10354 3 10356 10354 10068 3 10067 10355 10357 3 10067 10357 10069 3 10068 10070 10356 3 10358 10356 10070 3 10069 10357 10359 3 10069 10359 10071 3 10070 10072 10358 3 10360 10358 10072 3 10071 10359 10361 3 10071 10361 10073 3 10072 10074 10360 3 10362 10360 10074 3 10073 10361 10363 3 10073 10363 10075 3 10074 10076 10362 3 10364 10362 10076 3 10075 10363 10365 3 10075 10365 10077 3 10076 10078 10364 3 10366 10364 10078 3 10077 10365 10367 3 10077 10367 10079 3 10078 10080 10366 3 10368 10366 10080 3 10079 10367 10369 3 10079 10369 10081 3 10080 10082 10368 3 10370 10368 10082 3 10081 10369 10371 3 10081 10371 10083 3 10082 10084 10370 3 10372 10370 10084 3 10083 10371 10373 3 10083 10373 10085 3 10084 10086 10372 3 10374 10372 10086 3 10085 10373 10375 3 10085 10375 10087 3 10086 10088 10374 3 10376 10374 10088 3 10087 10375 10377 3 10087 10377 10089 3 10088 10090 10376 3 10378 10376 10090 3 10089 10377 10379 3 10089 10379 10091 3 10090 10092 10378 3 10380 10378 10092 3 10091 10379 10381 3 10091 10381 10093 3 10092 10094 10380 3 10382 10380 10094 3 10093 10381 10383 3 10093 10383 10095 3 10094 10096 10382 3 10384 10382 10096 3 10095 10383 10385 3 10095 10385 10097 3 10096 10098 10384 3 10386 10384 10098 3 10097 10385 10387 3 10097 10387 10099 3 10098 10100 10388 3 10098 10388 10386 3 10099 10387 10389 3 10099 10389 10101 3 10100 10102 10390 3 10100 10390 10388 3 10101 10389 10391 3 10101 10391 10103 3 10102 10104 10392 3 10102 10392 10390 3 10103 10391 10393 3 10103 10393 10105 3 10104 10106 10394 3 10104 10394 10392 3 10105 10393 10395 3 10105 10395 10107 3 10106 10108 10396 3 10106 10396 10394 3 10107 10395 10397 3 10107 10397 10109 3 10108 10110 10398 3 10108 10398 10396 3 10109 10397 10399 3 10109 10399 10111 3 10110 10112 10400 3 10110 10400 10398 3 10111 10399 10403 3 10111 10403 10115 3 10112 10113 10401 3 10112 10401 10400 3 10113 10116 10404 3 10113 10404 10401 3 10114 10115 10403 3 10114 10403 10402 3 10114 10402 10407 3 10114 10407 10119 3 10116 10117 10405 3 10116 10405 10404 3 10117 10120 10408 3 10117 10408 10405 3 10118 10119 10407 3 10118 10407 10406 3 10118 10406 10476 3 10118 10476 10188 3 10120 10121 10409 3 10120 10409 10408 3 10121 10122 10410 3 10121 10410 10409 3 10122 10123 10411 3 10122 10411 10410 3 10123 10124 10412 3 10123 10412 10411 3 10124 10125 10413 3 10124 10413 10412 3 10125 10126 10414 3 10125 10414 10413 3 10126 10127 10415 3 10126 10415 10414 3 10127 10128 10416 3 10127 10416 10415 3 10128 10129 10417 3 10128 10417 10416 3 10129 10130 10418 3 10129 10418 10417 3 10130 10131 10419 3 10130 10419 10418 3 10131 10132 10420 3 10131 10420 10419 3 10132 10133 10421 3 10132 10421 10420 3 10133 10134 10422 3 10133 10422 10421 3 10134 10135 10423 3 10134 10423 10422 3 10135 10136 10424 3 10135 10424 10423 3 10136 10137 10425 3 10136 10425 10424 3 10137 10138 10426 3 10137 10426 10425 3 10138 10139 10427 3 10138 10427 10426 3 10139 10140 10428 3 10139 10428 10427 3 10140 10141 10429 3 10140 10429 10428 3 10141 10142 10430 3 10141 10430 10429 3 10142 10143 10431 3 10142 10431 10430 3 10143 10144 10432 3 10143 10432 10431 3 10144 10145 10433 3 10144 10433 10432 3 10145 10146 10434 3 10145 10434 10433 3 10146 10147 10435 3 10146 10435 10434 3 10147 10148 10436 3 10147 10436 10435 3 10148 10149 10437 3 10148 10437 10436 3 10149 10150 10438 3 10149 10438 10437 3 10150 10151 10439 3 10150 10439 10438 3 10151 10152 10440 3 10151 10440 10439 3 10152 10153 10441 3 10152 10441 10440 3 10153 10154 10442 3 10153 10442 10441 3 10154 10155 10443 3 10154 10443 10442 3 10155 10156 10444 3 10155 10444 10443 3 10156 10157 10445 3 10156 10445 10444 3 10157 10158 10446 3 10157 10446 10445 3 10158 10159 10447 3 10158 10447 10446 3 10159 10160 10447 3 10448 10447 10160 3 10160 10161 10448 3 10449 10448 10161 3 10161 10162 10449 3 10450 10449 10162 3 10162 10163 10450 3 10451 10450 10163 3 10163 10164 10451 3 10452 10451 10164 3 10164 10165 10452 3 10453 10452 10165 3 10165 10166 10453 3 10454 10453 10166 3 10166 10167 10454 3 10455 10454 10167 3 10167 10168 10455 3 10456 10455 10168 3 10168 10169 10456 3 10457 10456 10169 3 10169 10170 10457 3 10458 10457 10170 3 10170 10171 10458 3 10459 10458 10171 3 10171 10172 10459 3 10460 10459 10172 3 10172 10173 10460 3 10461 10460 10173 3 10173 10174 10461 3 10462 10461 10174 3 10174 10175 10462 3 10463 10462 10175 3 10175 10176 10463 3 10464 10463 10176 3 10176 10177 10464 3 10465 10464 10177 3 10177 10178 10465 3 10466 10465 10178 3 10178 10179 10466 3 10467 10466 10179 3 10179 10180 10467 3 10468 10467 10180 3 10180 10181 10468 3 10469 10468 10181 3 10181 10182 10469 3 10470 10469 10182 3 10182 10183 10470 3 10471 10470 10183 3 10183 10184 10471 3 10472 10471 10184 3 10184 10185 10472 3 10473 10472 10185 3 10185 10186 10473 3 10474 10473 10186 3 10186 10187 10474 3 10475 10474 10187 3 10187 10188 10475 3 10476 10475 10188 3 10189 10477 10478 3 10189 10478 10190 3 10189 10259 10547 3 10189 10547 10477 3 10190 10478 10479 3 10190 10479 10191 3 10191 10479 10480 3 10191 10480 10192 3 10192 10480 10481 3 10192 10481 10193 3 10193 10481 10482 3 10193 10482 10194 3 10194 10482 10483 3 10194 10483 10195 3 10195 10483 10484 3 10195 10484 10196 3 10196 10484 10485 3 10196 10485 10197 3 10197 10485 10486 3 10197 10486 10198 3 10198 10486 10487 3 10198 10487 10199 3 10199 10487 10488 3 10199 10488 10200 3 10200 10488 10489 3 10200 10489 10201 3 10201 10489 10490 3 10201 10490 10202 3 10202 10490 10491 3 10202 10491 10203 3 10203 10491 10492 3 10203 10492 10204 3 10204 10492 10493 3 10204 10493 10205 3 10205 10493 10494 3 10205 10494 10206 3 10206 10494 10495 3 10206 10495 10207 3 10207 10495 10496 3 10207 10496 10208 3 10208 10496 10497 3 10208 10497 10209 3 10209 10497 10498 3 10209 10498 10210 3 10210 10498 10499 3 10210 10499 10211 3 10211 10499 10500 3 10211 10500 10212 3 10212 10500 10501 3 10212 10501 10213 3 10213 10501 10502 3 10213 10502 10214 3 10214 10502 10503 3 10214 10503 10215 3 10215 10503 10504 3 10215 10504 10216 3 10216 10504 10505 3 10216 10505 10217 3 10217 10505 10506 3 10217 10506 10218 3 10218 10506 10507 3 10218 10507 10219 3 10219 10507 10508 3 10219 10508 10220 3 10220 10508 10509 3 10220 10509 10221 3 10221 10509 10510 3 10221 10510 10222 3 10222 10510 10511 3 10222 10511 10223 3 10223 10511 10512 3 10223 10512 10224 3 10224 10512 10513 3 10224 10513 10225 3 10225 10513 10514 3 10225 10514 10226 3 10226 10514 10515 3 10226 10515 10227 3 10227 10515 10516 3 10227 10516 10228 3 10228 10516 10517 3 10228 10517 10229 3 10229 10517 10518 3 10229 10518 10230 3 10230 10518 10519 3 10230 10519 10231 3 10231 10519 10520 3 10231 10520 10232 3 10232 10520 10521 3 10232 10521 10233 3 10233 10521 10522 3 10233 10522 10234 3 10234 10522 10523 3 10234 10523 10235 3 10235 10523 10524 3 10235 10524 10236 3 10236 10524 10525 3 10236 10525 10237 3 10237 10525 10526 3 10237 10526 10238 3 10238 10526 10527 3 10238 10527 10239 3 10239 10527 10528 3 10239 10528 10240 3 10240 10528 10529 3 10240 10529 10241 3 10241 10529 10530 3 10241 10530 10242 3 10242 10530 10531 3 10242 10531 10243 3 10243 10531 10532 3 10243 10532 10244 3 10244 10532 10245 3 10533 10245 10532 3 10245 10533 10246 3 10534 10246 10533 3 10246 10534 10247 3 10535 10247 10534 3 10247 10535 10248 3 10536 10248 10535 3 10248 10536 10249 3 10537 10249 10536 3 10249 10537 10250 3 10538 10250 10537 3 10250 10538 10251 3 10539 10251 10538 3 10251 10539 10252 3 10540 10252 10539 3 10252 10540 10253 3 10541 10253 10540 3 10253 10541 10254 3 10542 10254 10541 3 10254 10542 10255 3 10543 10255 10542 3 10255 10543 10256 3 10544 10256 10543 3 10256 10544 10257 3 10545 10257 10544 3 10257 10545 10260 3 10548 10260 10545 3 10258 10546 10259 3 10547 10259 10546 3 10258 10263 10551 3 10258 10551 10546 3 10260 10548 10261 3 10549 10261 10548 3 10261 10549 10264 3 10552 10264 10549 3 10262 10550 10263 3 10551 10263 10550 3 10262 10266 10554 3 10262 10554 10550 3 10264 10552 10265 3 10553 10265 10552 3 10265 10553 10267 3 10555 10267 10553 3 10266 10268 10556 3 10266 10556 10554 3 10267 10555 10269 3 10557 10269 10555 3 10268 10270 10558 3 10268 10558 10556 3 10269 10557 10271 3 10559 10271 10557 3 10270 10272 10560 3 10270 10560 10558 3 10271 10559 10273 3 10561 10273 10559 3 10272 10274 10562 3 10272 10562 10560 3 10273 10561 10275 3 10563 10275 10561 3 10274 10276 10564 3 10274 10564 10562 3 10275 10563 10277 3 10565 10277 10563 3 10276 10278 10566 3 10276 10566 10564 3 10277 10565 10279 3 10567 10279 10565 3 10278 10280 10568 3 10278 10568 10566 3 10279 10567 10281 3 10569 10281 10567 3 10280 10282 10570 3 10280 10570 10568 3 10281 10569 10283 3 10571 10283 10569 3 10282 10284 10572 3 10282 10572 10570 3 10283 10571 10285 3 10573 10285 10571 3 10284 10286 10572 3 10574 10572 10286 3 10285 10573 10287 3 10575 10287 10573 3 10286 10288 10574 3 10576 10574 10288 3 10287 10575 10289 3 10577 10289 10575 3 10288 10290 10576 3 10578 10576 10290 3 10289 10577 10291 3 10579 10291 10577 3 10290 10292 10578 3 10580 10578 10292 3 10291 10579 10293 3 10581 10293 10579 3 10292 10294 10580 3 10582 10580 10294 3 10293 10581 10295 3 10583 10295 10581 3 10294 10296 10582 3 10584 10582 10296 3 10295 10583 10297 3 10585 10297 10583 3 10296 10298 10584 3 10586 10584 10298 3 10297 10585 10299 3 10587 10299 10585 3 10298 10300 10586 3 10588 10586 10300 3 10299 10587 10301 3 10589 10301 10587 3 10300 10302 10588 3 10590 10588 10302 3 10301 10589 10303 3 10591 10303 10589 3 10302 10304 10590 3 10592 10590 10304 3 10303 10591 10305 3 10593 10305 10591 3 10304 10306 10592 3 10594 10592 10306 3 10305 10593 10307 3 10595 10307 10593 3 10306 10308 10594 3 10596 10594 10308 3 10307 10595 10597 3 10307 10597 10309 3 10308 10310 10596 3 10598 10596 10310 3 10309 10597 10599 3 10309 10599 10311 3 10310 10312 10598 3 10600 10598 10312 3 10311 10599 10601 3 10311 10601 10313 3 10312 10314 10600 3 10602 10600 10314 3 10313 10601 10603 3 10313 10603 10315 3 10314 10316 10602 3 10604 10602 10316 3 10315 10603 10605 3 10315 10605 10317 3 10316 10318 10604 3 10606 10604 10318 3 10317 10605 10607 3 10317 10607 10319 3 10318 10320 10606 3 10608 10606 10320 3 10319 10607 10609 3 10319 10609 10321 3 10320 10322 10608 3 10610 10608 10322 3 10321 10609 10611 3 10321 10611 10323 3 10322 10324 10610 3 10612 10610 10324 3 10323 10611 10613 3 10323 10613 10325 3 10324 10326 10612 3 10614 10612 10326 3 10325 10613 10615 3 10325 10615 10327 3 10326 10328 10614 3 10616 10614 10328 3 10327 10615 10617 3 10327 10617 10329 3 10328 10330 10616 3 10618 10616 10330 3 10329 10617 10619 3 10329 10619 10331 3 10330 10332 10618 3 10620 10618 10332 3 10331 10619 10621 3 10331 10621 10333 3 10332 10334 10620 3 10622 10620 10334 3 10333 10621 10623 3 10333 10623 10335 3 10334 10336 10622 3 10624 10622 10336 3 10335 10623 10625 3 10335 10625 10337 3 10336 10338 10624 3 10626 10624 10338 3 10337 10625 10627 3 10337 10627 10339 3 10338 10340 10626 3 10628 10626 10340 3 10339 10627 10629 3 10339 10629 10341 3 10340 10342 10628 3 10630 10628 10342 3 10341 10629 10631 3 10341 10631 10343 3 10342 10344 10630 3 10632 10630 10344 3 10343 10631 10633 3 10343 10633 10345 3 10344 10346 10632 3 10634 10632 10346 3 10345 10633 10635 3 10345 10635 10347 3 10346 10348 10636 3 10346 10636 10634 3 10347 10635 10637 3 10347 10637 10349 3 10348 10350 10638 3 10348 10638 10636 3 10349 10637 10639 3 10349 10639 10351 3 10350 10352 10640 3 10350 10640 10638 3 10351 10639 10641 3 10351 10641 10353 3 10352 10354 10642 3 10352 10642 10640 3 10353 10641 10643 3 10353 10643 10355 3 10354 10356 10644 3 10354 10644 10642 3 10355 10643 10645 3 10355 10645 10357 3 10356 10358 10646 3 10356 10646 10644 3 10357 10645 10647 3 10357 10647 10359 3 10358 10360 10648 3 10358 10648 10646 3 10359 10647 10649 3 10359 10649 10361 3 10360 10362 10650 3 10360 10650 10648 3 10361 10649 10651 3 10361 10651 10363 3 10362 10364 10652 3 10362 10652 10650 3 10363 10651 10653 3 10363 10653 10365 3 10364 10366 10654 3 10364 10654 10652 3 10365 10653 10655 3 10365 10655 10367 3 10366 10368 10656 3 10366 10656 10654 3 10367 10655 10657 3 10367 10657 10369 3 10368 10370 10658 3 10368 10658 10656 3 10369 10657 10659 3 10369 10659 10371 3 10370 10372 10660 3 10370 10660 10658 3 10371 10659 10373 3 10661 10373 10659 3 10372 10374 10662 3 10372 10662 10660 3 10373 10661 10375 3 10663 10375 10661 3 10374 10376 10664 3 10374 10664 10662 3 10375 10663 10377 3 10665 10377 10663 3 10376 10378 10666 3 10376 10666 10664 3 10377 10665 10379 3 10667 10379 10665 3 10378 10380 10668 3 10378 10668 10666 3 10379 10667 10381 3 10669 10381 10667 3 10380 10382 10670 3 10380 10670 10668 3 10381 10669 10383 3 10671 10383 10669 3 10382 10384 10672 3 10382 10672 10670 3 10383 10671 10385 3 10673 10385 10671 3 10384 10386 10674 3 10384 10674 10672 3 10385 10673 10387 3 10675 10387 10673 3 10386 10388 10676 3 10386 10676 10674 3 10387 10675 10389 3 10677 10389 10675 3 10388 10390 10678 3 10388 10678 10676 3 10389 10677 10391 3 10679 10391 10677 3 10390 10392 10680 3 10390 10680 10678 3 10391 10679 10393 3 10681 10393 10679 3 10392 10394 10682 3 10392 10682 10680 3 10393 10681 10395 3 10683 10395 10681 3 10394 10396 10684 3 10394 10684 10682 3 10395 10683 10397 3 10685 10397 10683 3 10396 10398 10686 3 10396 10686 10684 3 10397 10685 10399 3 10687 10399 10685 3 10398 10400 10688 3 10398 10688 10686 3 10399 10687 10403 3 10691 10403 10687 3 10400 10401 10689 3 10400 10689 10688 3 10401 10404 10692 3 10401 10692 10689 3 10402 10403 10691 3 10402 10691 10690 3 10402 10690 10407 3 10695 10407 10690 3 10404 10405 10693 3 10404 10693 10692 3 10405 10408 10696 3 10405 10696 10693 3 10406 10407 10695 3 10406 10695 10694 3 10406 10694 10476 3 10764 10476 10694 3 10408 10409 10697 3 10408 10697 10696 3 10409 10410 10697 3 10698 10697 10410 3 10410 10411 10698 3 10699 10698 10411 3 10411 10412 10699 3 10700 10699 10412 3 10412 10413 10700 3 10701 10700 10413 3 10413 10414 10701 3 10702 10701 10414 3 10414 10415 10702 3 10703 10702 10415 3 10415 10416 10703 3 10704 10703 10416 3 10416 10417 10704 3 10705 10704 10417 3 10417 10418 10705 3 10706 10705 10418 3 10418 10419 10706 3 10707 10706 10419 3 10419 10420 10707 3 10708 10707 10420 3 10420 10421 10708 3 10709 10708 10421 3 10421 10422 10709 3 10710 10709 10422 3 10422 10423 10710 3 10711 10710 10423 3 10423 10424 10711 3 10712 10711 10424 3 10424 10425 10712 3 10713 10712 10425 3 10425 10426 10713 3 10714 10713 10426 3 10426 10427 10714 3 10715 10714 10427 3 10427 10428 10715 3 10716 10715 10428 3 10428 10429 10716 3 10717 10716 10429 3 10429 10430 10717 3 10718 10717 10430 3 10430 10431 10718 3 10719 10718 10431 3 10431 10432 10719 3 10720 10719 10432 3 10432 10433 10720 3 10721 10720 10433 3 10433 10434 10721 3 10722 10721 10434 3 10434 10435 10722 3 10723 10722 10435 3 10435 10436 10723 3 10724 10723 10436 3 10436 10437 10724 3 10725 10724 10437 3 10437 10438 10725 3 10726 10725 10438 3 10438 10439 10726 3 10727 10726 10439 3 10439 10440 10727 3 10728 10727 10440 3 10440 10441 10728 3 10729 10728 10441 3 10441 10442 10729 3 10730 10729 10442 3 10442 10443 10730 3 10731 10730 10443 3 10443 10444 10731 3 10732 10731 10444 3 10444 10445 10732 3 10733 10732 10445 3 10445 10446 10733 3 10734 10733 10446 3 10446 10447 10734 3 10735 10734 10447 3 10447 10448 10735 3 10736 10735 10448 3 10448 10449 10736 3 10737 10736 10449 3 10449 10450 10737 3 10738 10737 10450 3 10450 10451 10738 3 10739 10738 10451 3 10451 10452 10739 3 10740 10739 10452 3 10452 10453 10740 3 10741 10740 10453 3 10453 10454 10741 3 10742 10741 10454 3 10454 10455 10742 3 10743 10742 10455 3 10455 10456 10743 3 10744 10743 10456 3 10456 10457 10744 3 10745 10744 10457 3 10457 10458 10745 3 10746 10745 10458 3 10458 10459 10746 3 10747 10746 10459 3 10459 10460 10747 3 10748 10747 10460 3 10460 10461 10748 3 10749 10748 10461 3 10461 10462 10749 3 10750 10749 10462 3 10462 10463 10750 3 10751 10750 10463 3 10463 10464 10751 3 10752 10751 10464 3 10464 10465 10752 3 10753 10752 10465 3 10465 10466 10753 3 10754 10753 10466 3 10466 10467 10754 3 10755 10754 10467 3 10467 10468 10755 3 10756 10755 10468 3 10468 10469 10756 3 10757 10756 10469 3 10469 10470 10757 3 10758 10757 10470 3 10470 10471 10758 3 10759 10758 10471 3 10471 10472 10759 3 10760 10759 10472 3 10472 10473 10761 3 10472 10761 10760 3 10473 10474 10762 3 10473 10762 10761 3 10474 10475 10763 3 10474 10763 10762 3 10475 10476 10764 3 10475 10764 10763 3 10477 10765 10766 3 10477 10766 10478 3 10477 10547 10835 3 10477 10835 10765 3 10478 10766 10767 3 10478 10767 10479 3 10479 10767 10768 3 10479 10768 10480 3 10480 10768 10769 3 10480 10769 10481 3 10481 10769 10770 3 10481 10770 10482 3 10482 10770 10771 3 10482 10771 10483 3 10483 10771 10772 3 10483 10772 10484 3 10484 10772 10773 3 10484 10773 10485 3 10485 10773 10774 3 10485 10774 10486 3 10486 10774 10775 3 10486 10775 10487 3 10487 10775 10776 3 10487 10776 10488 3 10488 10776 10777 3 10488 10777 10489 3 10489 10777 10778 3 10489 10778 10490 3 10490 10778 10779 3 10490 10779 10491 3 10491 10779 10780 3 10491 10780 10492 3 10492 10780 10781 3 10492 10781 10493 3 10493 10781 10782 3 10493 10782 10494 3 10494 10782 10783 3 10494 10783 10495 3 10495 10783 10784 3 10495 10784 10496 3 10496 10784 10785 3 10496 10785 10497 3 10497 10785 10498 3 10786 10498 10785 3 10498 10786 10499 3 10787 10499 10786 3 10499 10787 10500 3 10788 10500 10787 3 10500 10788 10501 3 10789 10501 10788 3 10501 10789 10502 3 10790 10502 10789 3 10502 10790 10503 3 10791 10503 10790 3 10503 10791 10504 3 10792 10504 10791 3 10504 10792 10505 3 10793 10505 10792 3 10505 10793 10506 3 10794 10506 10793 3 10506 10794 10507 3 10795 10507 10794 3 10507 10795 10508 3 10796 10508 10795 3 10508 10796 10509 3 10797 10509 10796 3 10509 10797 10510 3 10798 10510 10797 3 10510 10798 10511 3 10799 10511 10798 3 10511 10799 10512 3 10800 10512 10799 3 10512 10800 10513 3 10801 10513 10800 3 10513 10801 10514 3 10802 10514 10801 3 10514 10802 10515 3 10803 10515 10802 3 10515 10803 10516 3 10804 10516 10803 3 10516 10804 10517 3 10805 10517 10804 3 10517 10805 10518 3 10806 10518 10805 3 10518 10806 10519 3 10807 10519 10806 3 10519 10807 10520 3 10808 10520 10807 3 10520 10808 10521 3 10809 10521 10808 3 10521 10809 10522 3 10810 10522 10809 3 10522 10810 10523 3 10811 10523 10810 3 10523 10811 10524 3 10812 10524 10811 3 10524 10812 10525 3 10813 10525 10812 3 10525 10813 10526 3 10814 10526 10813 3 10526 10814 10527 3 10815 10527 10814 3 10527 10815 10528 3 10816 10528 10815 3 10528 10816 10529 3 10817 10529 10816 3 10529 10817 10530 3 10818 10530 10817 3 10530 10818 10531 3 10819 10531 10818 3 10531 10819 10532 3 10820 10532 10819 3 10532 10820 10533 3 10821 10533 10820 3 10533 10821 10534 3 10822 10534 10821 3 10534 10822 10535 3 10823 10535 10822 3 10535 10823 10536 3 10824 10536 10823 3 10536 10824 10537 3 10825 10537 10824 3 10537 10825 10538 3 10826 10538 10825 3 10538 10826 10539 3 10827 10539 10826 3 10539 10827 10540 3 10828 10540 10827 3 10540 10828 10541 3 10829 10541 10828 3 10541 10829 10542 3 10830 10542 10829 3 10542 10830 10543 3 10831 10543 10830 3 10543 10831 10544 3 10832 10544 10831 3 10544 10832 10545 3 10833 10545 10832 3 10545 10833 10548 3 10836 10548 10833 3 10546 10834 10547 3 10835 10547 10834 3 10546 10551 10834 3 10839 10834 10551 3 10548 10836 10549 3 10837 10549 10836 3 10549 10837 10552 3 10840 10552 10837 3 10550 10838 10551 3 10839 10551 10838 3 10550 10554 10838 3 10842 10838 10554 3 10552 10840 10553 3 10841 10553 10840 3 10553 10841 10555 3 10843 10555 10841 3 10554 10556 10842 3 10844 10842 10556 3 10555 10843 10557 3 10845 10557 10843 3 10556 10558 10844 3 10846 10844 10558 3 10557 10845 10559 3 10847 10559 10845 3 10558 10560 10846 3 10848 10846 10560 3 10559 10847 10561 3 10849 10561 10847 3 10560 10562 10848 3 10850 10848 10562 3 10561 10849 10851 3 10561 10851 10563 3 10562 10564 10850 3 10852 10850 10564 3 10563 10851 10853 3 10563 10853 10565 3 10564 10566 10852 3 10854 10852 10566 3 10565 10853 10855 3 10565 10855 10567 3 10566 10568 10854 3 10856 10854 10568 3 10567 10855 10857 3 10567 10857 10569 3 10568 10570 10856 3 10858 10856 10570 3 10569 10857 10859 3 10569 10859 10571 3 10570 10572 10858 3 10860 10858 10572 3 10571 10859 10861 3 10571 10861 10573 3 10572 10574 10860 3 10862 10860 10574 3 10573 10861 10863 3 10573 10863 10575 3 10574 10576 10862 3 10864 10862 10576 3 10575 10863 10865 3 10575 10865 10577 3 10576 10578 10864 3 10866 10864 10578 3 10577 10865 10867 3 10577 10867 10579 3 10578 10580 10866 3 10868 10866 10580 3 10579 10867 10869 3 10579 10869 10581 3 10580 10582 10868 3 10870 10868 10582 3 10581 10869 10871 3 10581 10871 10583 3 10582 10584 10870 3 10872 10870 10584 3 10583 10871 10873 3 10583 10873 10585 3 10584 10586 10872 3 10874 10872 10586 3 10585 10873 10875 3 10585 10875 10587 3 10586 10588 10874 3 10876 10874 10588 3 10587 10875 10877 3 10587 10877 10589 3 10588 10590 10876 3 10878 10876 10590 3 10589 10877 10879 3 10589 10879 10591 3 10590 10592 10878 3 10880 10878 10592 3 10591 10879 10881 3 10591 10881 10593 3 10592 10594 10880 3 10882 10880 10594 3 10593 10881 10883 3 10593 10883 10595 3 10594 10596 10882 3 10884 10882 10596 3 10595 10883 10885 3 10595 10885 10597 3 10596 10598 10884 3 10886 10884 10598 3 10597 10885 10887 3 10597 10887 10599 3 10598 10600 10888 3 10598 10888 10886 3 10599 10887 10889 3 10599 10889 10601 3 10600 10602 10890 3 10600 10890 10888 3 10601 10889 10891 3 10601 10891 10603 3 10602 10604 10892 3 10602 10892 10890 3 10603 10891 10893 3 10603 10893 10605 3 10604 10606 10894 3 10604 10894 10892 3 10605 10893 10895 3 10605 10895 10607 3 10606 10608 10896 3 10606 10896 10894 3 10607 10895 10897 3 10607 10897 10609 3 10608 10610 10898 3 10608 10898 10896 3 10609 10897 10899 3 10609 10899 10611 3 10610 10612 10900 3 10610 10900 10898 3 10611 10899 10901 3 10611 10901 10613 3 10612 10614 10902 3 10612 10902 10900 3 10613 10901 10903 3 10613 10903 10615 3 10614 10616 10904 3 10614 10904 10902 3 10615 10903 10905 3 10615 10905 10617 3 10616 10618 10906 3 10616 10906 10904 3 10617 10905 10907 3 10617 10907 10619 3 10618 10620 10908 3 10618 10908 10906 3 10619 10907 10909 3 10619 10909 10621 3 10620 10622 10910 3 10620 10910 10908 3 10621 10909 10911 3 10621 10911 10623 3 10622 10624 10912 3 10622 10912 10910 3 10623 10911 10913 3 10623 10913 10625 3 10624 10626 10914 3 10624 10914 10912 3 10625 10913 10627 3 10915 10627 10913 3 10626 10628 10916 3 10626 10916 10914 3 10627 10915 10629 3 10917 10629 10915 3 10628 10630 10918 3 10628 10918 10916 3 10629 10917 10631 3 10919 10631 10917 3 10630 10632 10920 3 10630 10920 10918 3 10631 10919 10633 3 10921 10633 10919 3 10632 10634 10922 3 10632 10922 10920 3 10633 10921 10635 3 10923 10635 10921 3 10634 10636 10924 3 10634 10924 10922 3 10635 10923 10637 3 10925 10637 10923 3 10636 10638 10926 3 10636 10926 10924 3 10637 10925 10639 3 10927 10639 10925 3 10638 10640 10928 3 10638 10928 10926 3 10639 10927 10641 3 10929 10641 10927 3 10640 10642 10930 3 10640 10930 10928 3 10641 10929 10643 3 10931 10643 10929 3 10642 10644 10932 3 10642 10932 10930 3 10643 10931 10645 3 10933 10645 10931 3 10644 10646 10934 3 10644 10934 10932 3 10645 10933 10647 3 10935 10647 10933 3 10646 10648 10936 3 10646 10936 10934 3 10647 10935 10649 3 10937 10649 10935 3 10648 10650 10938 3 10648 10938 10936 3 10649 10937 10651 3 10939 10651 10937 3 10650 10652 10940 3 10650 10940 10938 3 10651 10939 10653 3 10941 10653 10939 3 10652 10654 10942 3 10652 10942 10940 3 10653 10941 10655 3 10943 10655 10941 3 10654 10656 10944 3 10654 10944 10942 3 10655 10943 10657 3 10945 10657 10943 3 10656 10658 10946 3 10656 10946 10944 3 10657 10945 10659 3 10947 10659 10945 3 10658 10660 10948 3 10658 10948 10946 3 10659 10947 10661 3 10949 10661 10947 3 10660 10662 10950 3 10660 10950 10948 3 10661 10949 10663 3 10951 10663 10949 3 10662 10664 10950 3 10952 10950 10664 3 10663 10951 10665 3 10953 10665 10951 3 10664 10666 10952 3 10954 10952 10666 3 10665 10953 10667 3 10955 10667 10953 3 10666 10668 10954 3 10956 10954 10668 3 10667 10955 10669 3 10957 10669 10955 3 10668 10670 10956 3 10958 10956 10670 3 10669 10957 10671 3 10959 10671 10957 3 10670 10672 10958 3 10960 10958 10672 3 10671 10959 10673 3 10961 10673 10959 3 10672 10674 10960 3 10962 10960 10674 3 10673 10961 10675 3 10963 10675 10961 3 10674 10676 10962 3 10964 10962 10676 3 10675 10963 10677 3 10965 10677 10963 3 10676 10678 10964 3 10966 10964 10678 3 10677 10965 10679 3 10967 10679 10965 3 10678 10680 10966 3 10968 10966 10680 3 10679 10967 10681 3 10969 10681 10967 3 10680 10682 10968 3 10970 10968 10682 3 10681 10969 10683 3 10971 10683 10969 3 10682 10684 10970 3 10972 10970 10684 3 10683 10971 10685 3 10973 10685 10971 3 10684 10686 10972 3 10974 10972 10686 3 10685 10973 10687 3 10975 10687 10973 3 10686 10688 10974 3 10976 10974 10688 3 10687 10975 10691 3 10979 10691 10975 3 10688 10689 10976 3 10977 10976 10689 3 10689 10692 10977 3 10980 10977 10692 3 10690 10691 10978 3 10979 10978 10691 3 10690 10978 10983 3 10690 10983 10695 3 10692 10693 10980 3 10981 10980 10693 3 10693 10696 10981 3 10984 10981 10696 3 10694 10695 10982 3 10983 10982 10695 3 10694 10982 11052 3 10694 11052 10764 3 10696 10697 10984 3 10985 10984 10697 3 10697 10698 10985 3 10986 10985 10698 3 10698 10699 10986 3 10987 10986 10699 3 10699 10700 10987 3 10988 10987 10700 3 10700 10701 10988 3 10989 10988 10701 3 10701 10702 10989 3 10990 10989 10702 3 10702 10703 10990 3 10991 10990 10703 3 10703 10704 10991 3 10992 10991 10704 3 10704 10705 10992 3 10993 10992 10705 3 10705 10706 10993 3 10994 10993 10706 3 10706 10707 10994 3 10995 10994 10707 3 10707 10708 10995 3 10996 10995 10708 3 10708 10709 10996 3 10997 10996 10709 3 10709 10710 10997 3 10998 10997 10710 3 10710 10711 10998 3 10999 10998 10711 3 10711 10712 10999 3 11000 10999 10712 3 10712 10713 11000 3 11001 11000 10713 3 10713 10714 11001 3 11002 11001 10714 3 10714 10715 11002 3 11003 11002 10715 3 10715 10716 11003 3 11004 11003 10716 3 10716 10717 11004 3 11005 11004 10717 3 10717 10718 11005 3 11006 11005 10718 3 10718 10719 11006 3 11007 11006 10719 3 10719 10720 11007 3 11008 11007 10720 3 10720 10721 11008 3 11009 11008 10721 3 10721 10722 11009 3 11010 11009 10722 3 10722 10723 11010 3 11011 11010 10723 3 10723 10724 11011 3 11012 11011 10724 3 10724 10725 11012 3 11013 11012 10725 3 10725 10726 11014 3 10725 11014 11013 3 10726 10727 11015 3 10726 11015 11014 3 10727 10728 11016 3 10727 11016 11015 3 10728 10729 11017 3 10728 11017 11016 3 10729 10730 11018 3 10729 11018 11017 3 10730 10731 11019 3 10730 11019 11018 3 10731 10732 11020 3 10731 11020 11019 3 10732 10733 11021 3 10732 11021 11020 3 10733 10734 11022 3 10733 11022 11021 3 10734 10735 11023 3 10734 11023 11022 3 10735 10736 11024 3 10735 11024 11023 3 10736 10737 11025 3 10736 11025 11024 3 10737 10738 11026 3 10737 11026 11025 3 10738 10739 11027 3 10738 11027 11026 3 10739 10740 11028 3 10739 11028 11027 3 10740 10741 11029 3 10740 11029 11028 3 10741 10742 11030 3 10741 11030 11029 3 10742 10743 11031 3 10742 11031 11030 3 10743 10744 11032 3 10743 11032 11031 3 10744 10745 11033 3 10744 11033 11032 3 10745 10746 11034 3 10745 11034 11033 3 10746 10747 11035 3 10746 11035 11034 3 10747 10748 11036 3 10747 11036 11035 3 10748 10749 11037 3 10748 11037 11036 3 10749 10750 11038 3 10749 11038 11037 3 10750 10751 11039 3 10750 11039 11038 3 10751 10752 11040 3 10751 11040 11039 3 10752 10753 11041 3 10752 11041 11040 3 10753 10754 11042 3 10753 11042 11041 3 10754 10755 11043 3 10754 11043 11042 3 10755 10756 11044 3 10755 11044 11043 3 10756 10757 11045 3 10756 11045 11044 3 10757 10758 11046 3 10757 11046 11045 3 10758 10759 11047 3 10758 11047 11046 3 10759 10760 11048 3 10759 11048 11047 3 10760 10761 11049 3 10760 11049 11048 3 10761 10762 11050 3 10761 11050 11049 3 10762 10763 11051 3 10762 11051 11050 3 10763 10764 11052 3 10763 11052 11051 3 10765 11053 10766 3 11054 10766 11053 3 10765 10835 11053 3 11123 11053 10835 3 10766 11054 10767 3 11055 10767 11054 3 10767 11055 10768 3 11056 10768 11055 3 10768 11056 10769 3 11057 10769 11056 3 10769 11057 10770 3 11058 10770 11057 3 10770 11058 10771 3 11059 10771 11058 3 10771 11059 10772 3 11060 10772 11059 3 10772 11060 10773 3 11061 10773 11060 3 10773 11061 10774 3 11062 10774 11061 3 10774 11062 10775 3 11063 10775 11062 3 10775 11063 10776 3 11064 10776 11063 3 10776 11064 10777 3 11065 10777 11064 3 10777 11065 10778 3 11066 10778 11065 3 10778 11066 10779 3 11067 10779 11066 3 10779 11067 10780 3 11068 10780 11067 3 10780 11068 10781 3 11069 10781 11068 3 10781 11069 10782 3 11070 10782 11069 3 10782 11070 10783 3 11071 10783 11070 3 10783 11071 10784 3 11072 10784 11071 3 10784 11072 10785 3 11073 10785 11072 3 10785 11073 10786 3 11074 10786 11073 3 10786 11074 10787 3 11075 10787 11074 3 10787 11075 10788 3 11076 10788 11075 3 10788 11076 10789 3 11077 10789 11076 3 10789 11077 10790 3 11078 10790 11077 3 10790 11078 10791 3 11079 10791 11078 3 10791 11079 10792 3 11080 10792 11079 3 10792 11080 10793 3 11081 10793 11080 3 10793 11081 10794 3 11082 10794 11081 3 10794 11082 10795 3 11083 10795 11082 3 10795 11083 10796 3 11084 10796 11083 3 10796 11084 10797 3 11085 10797 11084 3 10797 11085 10798 3 11086 10798 11085 3 10798 11086 10799 3 11087 10799 11086 3 10799 11087 10800 3 11088 10800 11087 3 10800 11088 10801 3 11089 10801 11088 3 10801 11089 10802 3 11090 10802 11089 3 10802 11090 10803 3 11091 10803 11090 3 10803 11091 10804 3 11092 10804 11091 3 10804 11092 10805 3 11093 10805 11092 3 10805 11093 10806 3 11094 10806 11093 3 10806 11094 10807 3 11095 10807 11094 3 10807 11095 10808 3 11096 10808 11095 3 10808 11096 10809 3 11097 10809 11096 3 10809 11097 10810 3 11098 10810 11097 3 10810 11098 10811 3 11099 10811 11098 3 10811 11099 10812 3 11100 10812 11099 3 10812 11100 10813 3 11101 10813 11100 3 10813 11101 10814 3 11102 10814 11101 3 10814 11102 10815 3 11103 10815 11102 3 10815 11103 10816 3 11104 10816 11103 3 10816 11104 10817 3 11105 10817 11104 3 10817 11105 11106 3 10817 11106 10818 3 10818 11106 11107 3 10818 11107 10819 3 10819 11107 11108 3 10819 11108 10820 3 10820 11108 11109 3 10820 11109 10821 3 10821 11109 11110 3 10821 11110 10822 3 10822 11110 11111 3 10822 11111 10823 3 10823 11111 11112 3 10823 11112 10824 3 10824 11112 11113 3 10824 11113 10825 3 10825 11113 11114 3 10825 11114 10826 3 10826 11114 11115 3 10826 11115 10827 3 10827 11115 11116 3 10827 11116 10828 3 10828 11116 11117 3 10828 11117 10829 3 10829 11117 11118 3 10829 11118 10830 3 10830 11118 11119 3 10830 11119 10831 3 10831 11119 11120 3 10831 11120 10832 3 10832 11120 11121 3 10832 11121 10833 3 10833 11121 11124 3 10833 11124 10836 3 10834 11122 11123 3 10834 11123 10835 3 10834 10839 11122 3 11127 11122 10839 3 10836 11124 11125 3 10836 11125 10837 3 10837 11125 11128 3 10837 11128 10840 3 10838 11126 11127 3 10838 11127 10839 3 10838 10842 11126 3 11130 11126 10842 3 10840 11128 11129 3 10840 11129 10841 3 10841 11129 11131 3 10841 11131 10843 3 10842 10844 11130 3 11132 11130 10844 3 10843 11131 11133 3 10843 11133 10845 3 10844 10846 11132 3 11134 11132 10846 3 10845 11133 11135 3 10845 11135 10847 3 10846 10848 11134 3 11136 11134 10848 3 10847 11135 11137 3 10847 11137 10849 3 10848 10850 11136 3 11138 11136 10850 3 10849 11137 11139 3 10849 11139 10851 3 10850 10852 11138 3 11140 11138 10852 3 10851 11139 11141 3 10851 11141 10853 3 10852 10854 11142 3 10852 11142 11140 3 10853 11141 11143 3 10853 11143 10855 3 10854 10856 11144 3 10854 11144 11142 3 10855 11143 11145 3 10855 11145 10857 3 10856 10858 11146 3 10856 11146 11144 3 10857 11145 11147 3 10857 11147 10859 3 10858 10860 11148 3 10858 11148 11146 3 10859 11147 11149 3 10859 11149 10861 3 10860 10862 11150 3 10860 11150 11148 3 10861 11149 11151 3 10861 11151 10863 3 10862 10864 11152 3 10862 11152 11150 3 10863 11151 11153 3 10863 11153 10865 3 10864 10866 11154 3 10864 11154 11152 3 10865 11153 11155 3 10865 11155 10867 3 10866 10868 11156 3 10866 11156 11154 3 10867 11155 11157 3 10867 11157 10869 3 10868 10870 11158 3 10868 11158 11156 3 10869 11157 11159 3 10869 11159 10871 3 10870 10872 11160 3 10870 11160 11158 3 10871 11159 11161 3 10871 11161 10873 3 10872 10874 11162 3 10872 11162 11160 3 10873 11161 11163 3 10873 11163 10875 3 10874 10876 11164 3 10874 11164 11162 3 10875 11163 11165 3 10875 11165 10877 3 10876 10878 11166 3 10876 11166 11164 3 10877 11165 11167 3 10877 11167 10879 3 10878 10880 11168 3 10878 11168 11166 3 10879 11167 11169 3 10879 11169 10881 3 10880 10882 11170 3 10880 11170 11168 3 10881 11169 11171 3 10881 11171 10883 3 10882 10884 11172 3 10882 11172 11170 3 10883 11171 10885 3 11173 10885 11171 3 10884 10886 11174 3 10884 11174 11172 3 10885 11173 10887 3 11175 10887 11173 3 10886 10888 11176 3 10886 11176 11174 3 10887 11175 10889 3 11177 10889 11175 3 10888 10890 11178 3 10888 11178 11176 3 10889 11177 10891 3 11179 10891 11177 3 10890 10892 11180 3 10890 11180 11178 3 10891 11179 10893 3 11181 10893 11179 3 10892 10894 11182 3 10892 11182 11180 3 10893 11181 10895 3 11183 10895 11181 3 10894 10896 11184 3 10894 11184 11182 3 10895 11183 10897 3 11185 10897 11183 3 10896 10898 11186 3 10896 11186 11184 3 10897 11185 10899 3 11187 10899 11185 3 10898 10900 11188 3 10898 11188 11186 3 10899 11187 10901 3 11189 10901 11187 3 10900 10902 11190 3 10900 11190 11188 3 10901 11189 10903 3 11191 10903 11189 3 10902 10904 11192 3 10902 11192 11190 3 10903 11191 10905 3 11193 10905 11191 3 10904 10906 11194 3 10904 11194 11192 3 10905 11193 10907 3 11195 10907 11193 3 10906 10908 11196 3 10906 11196 11194 3 10907 11195 10909 3 11197 10909 11195 3 10908 10910 11198 3 10908 11198 11196 3 10909 11197 10911 3 11199 10911 11197 3 10910 10912 11200 3 10910 11200 11198 3 10911 11199 10913 3 11201 10913 11199 3 10912 10914 11202 3 10912 11202 11200 3 10913 11201 10915 3 11203 10915 11201 3 10914 10916 11204 3 10914 11204 11202 3 10915 11203 10917 3 11205 10917 11203 3 10916 10918 11206 3 10916 11206 11204 3 10917 11205 10919 3 11207 10919 11205 3 10918 10920 11206 3 11208 11206 10920 3 10919 11207 10921 3 11209 10921 11207 3 10920 10922 11208 3 11210 11208 10922 3 10921 11209 10923 3 11211 10923 11209 3 10922 10924 11210 3 11212 11210 10924 3 10923 11211 10925 3 11213 10925 11211 3 10924 10926 11212 3 11214 11212 10926 3 10925 11213 10927 3 11215 10927 11213 3 10926 10928 11214 3 11216 11214 10928 3 10927 11215 10929 3 11217 10929 11215 3 10928 10930 11216 3 11218 11216 10930 3 10929 11217 10931 3 11219 10931 11217 3 10930 10932 11218 3 11220 11218 10932 3 10931 11219 10933 3 11221 10933 11219 3 10932 10934 11220 3 11222 11220 10934 3 10933 11221 10935 3 11223 10935 11221 3 10934 10936 11222 3 11224 11222 10936 3 10935 11223 10937 3 11225 10937 11223 3 10936 10938 11224 3 11226 11224 10938 3 10937 11225 10939 3 11227 10939 11225 3 10938 10940 11226 3 11228 11226 10940 3 10939 11227 10941 3 11229 10941 11227 3 10940 10942 11228 3 11230 11228 10942 3 10941 11229 10943 3 11231 10943 11229 3 10942 10944 11230 3 11232 11230 10944 3 10943 11231 10945 3 11233 10945 11231 3 10944 10946 11232 3 11234 11232 10946 3 10945 11233 10947 3 11235 10947 11233 3 10946 10948 11234 3 11236 11234 10948 3 10947 11235 11237 3 10947 11237 10949 3 10948 10950 11236 3 11238 11236 10950 3 10949 11237 11239 3 10949 11239 10951 3 10950 10952 11238 3 11240 11238 10952 3 10951 11239 11241 3 10951 11241 10953 3 10952 10954 11240 3 11242 11240 10954 3 10953 11241 11243 3 10953 11243 10955 3 10954 10956 11242 3 11244 11242 10956 3 10955 11243 11245 3 10955 11245 10957 3 10956 10958 11244 3 11246 11244 10958 3 10957 11245 11247 3 10957 11247 10959 3 10958 10960 11246 3 11248 11246 10960 3 10959 11247 11249 3 10959 11249 10961 3 10960 10962 11248 3 11250 11248 10962 3 10961 11249 11251 3 10961 11251 10963 3 10962 10964 11250 3 11252 11250 10964 3 10963 11251 11253 3 10963 11253 10965 3 10964 10966 11252 3 11254 11252 10966 3 10965 11253 11255 3 10965 11255 10967 3 10966 10968 11254 3 11256 11254 10968 3 10967 11255 11257 3 10967 11257 10969 3 10968 10970 11256 3 11258 11256 10970 3 10969 11257 11259 3 10969 11259 10971 3 10970 10972 11258 3 11260 11258 10972 3 10971 11259 11261 3 10971 11261 10973 3 10972 10974 11260 3 11262 11260 10974 3 10973 11261 11263 3 10973 11263 10975 3 10974 10976 11262 3 11264 11262 10976 3 10975 11263 11267 3 10975 11267 10979 3 10976 10977 11264 3 11265 11264 10977 3 10977 10980 11265 3 11268 11265 10980 3 10978 10979 11266 3 11267 11266 10979 3 10978 11266 11271 3 10978 11271 10983 3 10980 10981 11268 3 11269 11268 10981 3 10981 10984 11272 3 10981 11272 11269 3 10982 10983 11271 3 10982 11271 11270 3 10982 11270 11340 3 10982 11340 11052 3 10984 10985 11273 3 10984 11273 11272 3 10985 10986 11274 3 10985 11274 11273 3 10986 10987 11275 3 10986 11275 11274 3 10987 10988 11276 3 10987 11276 11275 3 10988 10989 11277 3 10988 11277 11276 3 10989 10990 11278 3 10989 11278 11277 3 10990 10991 11279 3 10990 11279 11278 3 10991 10992 11280 3 10991 11280 11279 3 10992 10993 11281 3 10992 11281 11280 3 10993 10994 11282 3 10993 11282 11281 3 10994 10995 11283 3 10994 11283 11282 3 10995 10996 11284 3 10995 11284 11283 3 10996 10997 11285 3 10996 11285 11284 3 10997 10998 11286 3 10997 11286 11285 3 10998 10999 11287 3 10998 11287 11286 3 10999 11000 11288 3 10999 11288 11287 3 11000 11001 11289 3 11000 11289 11288 3 11001 11002 11290 3 11001 11290 11289 3 11002 11003 11291 3 11002 11291 11290 3 11003 11004 11292 3 11003 11292 11291 3 11004 11005 11293 3 11004 11293 11292 3 11005 11006 11294 3 11005 11294 11293 3 11006 11007 11295 3 11006 11295 11294 3 11007 11008 11296 3 11007 11296 11295 3 11008 11009 11297 3 11008 11297 11296 3 11009 11010 11298 3 11009 11298 11297 3 11010 11011 11299 3 11010 11299 11298 3 11011 11012 11300 3 11011 11300 11299 3 11012 11013 11301 3 11012 11301 11300 3 11013 11014 11302 3 11013 11302 11301 3 11014 11015 11303 3 11014 11303 11302 3 11015 11016 11304 3 11015 11304 11303 3 11016 11017 11305 3 11016 11305 11304 3 11017 11018 11306 3 11017 11306 11305 3 11018 11019 11307 3 11018 11307 11306 3 11019 11020 11308 3 11019 11308 11307 3 11020 11021 11309 3 11020 11309 11308 3 11021 11022 11310 3 11021 11310 11309 3 11022 11023 11311 3 11022 11311 11310 3 11023 11024 11312 3 11023 11312 11311 3 11024 11025 11313 3 11024 11313 11312 3 11025 11026 11314 3 11025 11314 11313 3 11026 11027 11315 3 11026 11315 11314 3 11027 11028 11316 3 11027 11316 11315 3 11028 11029 11317 3 11028 11317 11316 3 11029 11030 11318 3 11029 11318 11317 3 11030 11031 11319 3 11030 11319 11318 3 11031 11032 11320 3 11031 11320 11319 3 11032 11033 11321 3 11032 11321 11320 3 11033 11034 11322 3 11033 11322 11321 3 11034 11035 11323 3 11034 11323 11322 3 11035 11036 11324 3 11035 11324 11323 3 11036 11037 11325 3 11036 11325 11324 3 11037 11038 11326 3 11037 11326 11325 3 11038 11039 11327 3 11038 11327 11326 3 11039 11040 11328 3 11039 11328 11327 3 11040 11041 11329 3 11040 11329 11328 3 11041 11042 11330 3 11041 11330 11329 3 11042 11043 11331 3 11042 11331 11330 3 11043 11044 11332 3 11043 11332 11331 3 11044 11045 11333 3 11044 11333 11332 3 11045 11046 11334 3 11045 11334 11333 3 11046 11047 11334 3 11335 11334 11047 3 11047 11048 11335 3 11336 11335 11048 3 11048 11049 11336 3 11337 11336 11049 3 11049 11050 11337 3 11338 11337 11050 3 11050 11051 11338 3 11339 11338 11051 3 11051 11052 11339 3 11340 11339 11052 3 11053 11341 11054 3 11342 11054 11341 3 11053 11123 11341 3 11411 11341 11123 3 11054 11342 11055 3 11343 11055 11342 3 11055 11343 11056 3 11344 11056 11343 3 11056 11344 11057 3 11345 11057 11344 3 11057 11345 11058 3 11346 11058 11345 3 11058 11346 11059 3 11347 11059 11346 3 11059 11347 11060 3 11348 11060 11347 3 11060 11348 11061 3 11349 11061 11348 3 11061 11349 11062 3 11350 11062 11349 3 11062 11350 11063 3 11351 11063 11350 3 11063 11351 11064 3 11352 11064 11351 3 11064 11352 11065 3 11353 11065 11352 3 11065 11353 11066 3 11354 11066 11353 3 11066 11354 11067 3 11355 11067 11354 3 11067 11355 11068 3 11356 11068 11355 3 11068 11356 11069 3 11357 11069 11356 3 11069 11357 11070 3 11358 11070 11357 3 11070 11358 11071 3 11359 11071 11358 3 11071 11359 11072 3 11360 11072 11359 3 11072 11360 11073 3 11361 11073 11360 3 11073 11361 11074 3 11362 11074 11361 3 11074 11362 11075 3 11363 11075 11362 3 11075 11363 11076 3 11364 11076 11363 3 11076 11364 11365 3 11076 11365 11077 3 11077 11365 11366 3 11077 11366 11078 3 11078 11366 11367 3 11078 11367 11079 3 11079 11367 11368 3 11079 11368 11080 3 11080 11368 11369 3 11080 11369 11081 3 11081 11369 11370 3 11081 11370 11082 3 11082 11370 11371 3 11082 11371 11083 3 11083 11371 11372 3 11083 11372 11084 3 11084 11372 11373 3 11084 11373 11085 3 11085 11373 11374 3 11085 11374 11086 3 11086 11374 11375 3 11086 11375 11087 3 11087 11375 11376 3 11087 11376 11088 3 11088 11376 11377 3 11088 11377 11089 3 11089 11377 11378 3 11089 11378 11090 3 11090 11378 11379 3 11090 11379 11091 3 11091 11379 11380 3 11091 11380 11092 3 11092 11380 11381 3 11092 11381 11093 3 11093 11381 11382 3 11093 11382 11094 3 11094 11382 11383 3 11094 11383 11095 3 11095 11383 11384 3 11095 11384 11096 3 11096 11384 11385 3 11096 11385 11097 3 11097 11385 11386 3 11097 11386 11098 3 11098 11386 11387 3 11098 11387 11099 3 11099 11387 11388 3 11099 11388 11100 3 11100 11388 11389 3 11100 11389 11101 3 11101 11389 11390 3 11101 11390 11102 3 11102 11390 11391 3 11102 11391 11103 3 11103 11391 11392 3 11103 11392 11104 3 11104 11392 11393 3 11104 11393 11105 3 11105 11393 11394 3 11105 11394 11106 3 11106 11394 11395 3 11106 11395 11107 3 11107 11395 11396 3 11107 11396 11108 3 11108 11396 11397 3 11108 11397 11109 3 11109 11397 11398 3 11109 11398 11110 3 11110 11398 11399 3 11110 11399 11111 3 11111 11399 11400 3 11111 11400 11112 3 11112 11400 11401 3 11112 11401 11113 3 11113 11401 11402 3 11113 11402 11114 3 11114 11402 11403 3 11114 11403 11115 3 11115 11403 11404 3 11115 11404 11116 3 11116 11404 11405 3 11116 11405 11117 3 11117 11405 11406 3 11117 11406 11118 3 11118 11406 11407 3 11118 11407 11119 3 11119 11407 11408 3 11119 11408 11120 3 11120 11408 11409 3 11120 11409 11121 3 11121 11409 11412 3 11121 11412 11124 3 11122 11410 11411 3 11122 11411 11123 3 11122 11127 11415 3 11122 11415 11410 3 11124 11412 11413 3 11124 11413 11125 3 11125 11413 11416 3 11125 11416 11128 3 11126 11414 11415 3 11126 11415 11127 3 11126 11130 11418 3 11126 11418 11414 3 11128 11416 11417 3 11128 11417 11129 3 11129 11417 11419 3 11129 11419 11131 3 11130 11132 11420 3 11130 11420 11418 3 11131 11419 11421 3 11131 11421 11133 3 11132 11134 11422 3 11132 11422 11420 3 11133 11421 11423 3 11133 11423 11135 3 11134 11136 11424 3 11134 11424 11422 3 11135 11423 11425 3 11135 11425 11137 3 11136 11138 11426 3 11136 11426 11424 3 11137 11425 11427 3 11137 11427 11139 3 11138 11140 11428 3 11138 11428 11426 3 11139 11427 11429 3 11139 11429 11141 3 11140 11142 11430 3 11140 11430 11428 3 11141 11429 11431 3 11141 11431 11143 3 11142 11144 11432 3 11142 11432 11430 3 11143 11431 11145 3 11433 11145 11431 3 11144 11146 11434 3 11144 11434 11432 3 11145 11433 11147 3 11435 11147 11433 3 11146 11148 11436 3 11146 11436 11434 3 11147 11435 11149 3 11437 11149 11435 3 11148 11150 11438 3 11148 11438 11436 3 11149 11437 11151 3 11439 11151 11437 3 11150 11152 11440 3 11150 11440 11438 3 11151 11439 11153 3 11441 11153 11439 3 11152 11154 11442 3 11152 11442 11440 3 11153 11441 11155 3 11443 11155 11441 3 11154 11156 11444 3 11154 11444 11442 3 11155 11443 11157 3 11445 11157 11443 3 11156 11158 11446 3 11156 11446 11444 3 11157 11445 11159 3 11447 11159 11445 3 11158 11160 11448 3 11158 11448 11446 3 11159 11447 11161 3 11449 11161 11447 3 11160 11162 11450 3 11160 11450 11448 3 11161 11449 11163 3 11451 11163 11449 3 11162 11164 11452 3 11162 11452 11450 3 11163 11451 11165 3 11453 11165 11451 3 11164 11166 11454 3 11164 11454 11452 3 11165 11453 11167 3 11455 11167 11453 3 11166 11168 11456 3 11166 11456 11454 3 11167 11455 11169 3 11457 11169 11455 3 11168 11170 11458 3 11168 11458 11456 3 11169 11457 11171 3 11459 11171 11457 3 11170 11172 11460 3 11170 11460 11458 3 11171 11459 11173 3 11461 11173 11459 3 11172 11174 11462 3 11172 11462 11460 3 11173 11461 11175 3 11463 11175 11461 3 11174 11176 11464 3 11174 11464 11462 3 11175 11463 11177 3 11465 11177 11463 3 11176 11178 11464 3 11466 11464 11178 3 11177 11465 11179 3 11467 11179 11465 3 11178 11180 11466 3 11468 11466 11180 3 11179 11467 11181 3 11469 11181 11467 3 11180 11182 11468 3 11470 11468 11182 3 11181 11469 11183 3 11471 11183 11469 3 11182 11184 11470 3 11472 11470 11184 3 11183 11471 11185 3 11473 11185 11471 3 11184 11186 11472 3 11474 11472 11186 3 11185 11473 11187 3 11475 11187 11473 3 11186 11188 11474 3 11476 11474 11188 3 11187 11475 11189 3 11477 11189 11475 3 11188 11190 11476 3 11478 11476 11190 3 11189 11477 11191 3 11479 11191 11477 3 11190 11192 11478 3 11480 11478 11192 3 11191 11479 11193 3 11481 11193 11479 3 11192 11194 11480 3 11482 11480 11194 3 11193 11481 11195 3 11483 11195 11481 3 11194 11196 11482 3 11484 11482 11196 3 11195 11483 11197 3 11485 11197 11483 3 11196 11198 11484 3 11486 11484 11198 3 11197 11485 11199 3 11487 11199 11485 3 11198 11200 11486 3 11488 11486 11200 3 11199 11487 11201 3 11489 11201 11487 3 11200 11202 11488 3 11490 11488 11202 3 11201 11489 11203 3 11491 11203 11489 3 11202 11204 11490 3 11492 11490 11204 3 11203 11491 11205 3 11493 11205 11491 3 11204 11206 11492 3 11494 11492 11206 3 11205 11493 11207 3 11495 11207 11493 3 11206 11208 11494 3 11496 11494 11208 3 11207 11495 11497 3 11207 11497 11209 3 11208 11210 11496 3 11498 11496 11210 3 11209 11497 11499 3 11209 11499 11211 3 11210 11212 11498 3 11500 11498 11212 3 11211 11499 11501 3 11211 11501 11213 3 11212 11214 11500 3 11502 11500 11214 3 11213 11501 11503 3 11213 11503 11215 3 11214 11216 11502 3 11504 11502 11216 3 11215 11503 11505 3 11215 11505 11217 3 11216 11218 11504 3 11506 11504 11218 3 11217 11505 11507 3 11217 11507 11219 3 11218 11220 11506 3 11508 11506 11220 3 11219 11507 11509 3 11219 11509 11221 3 11220 11222 11508 3 11510 11508 11222 3 11221 11509 11511 3 11221 11511 11223 3 11222 11224 11510 3 11512 11510 11224 3 11223 11511 11513 3 11223 11513 11225 3 11224 11226 11512 3 11514 11512 11226 3 11225 11513 11515 3 11225 11515 11227 3 11226 11228 11514 3 11516 11514 11228 3 11227 11515 11517 3 11227 11517 11229 3 11228 11230 11516 3 11518 11516 11230 3 11229 11517 11519 3 11229 11519 11231 3 11230 11232 11518 3 11520 11518 11232 3 11231 11519 11521 3 11231 11521 11233 3 11232 11234 11520 3 11522 11520 11234 3 11233 11521 11523 3 11233 11523 11235 3 11234 11236 11522 3 11524 11522 11236 3 11235 11523 11525 3 11235 11525 11237 3 11236 11238 11524 3 11526 11524 11238 3 11237 11525 11527 3 11237 11527 11239 3 11238 11240 11526 3 11528 11526 11240 3 11239 11527 11529 3 11239 11529 11241 3 11240 11242 11528 3 11530 11528 11242 3 11241 11529 11531 3 11241 11531 11243 3 11242 11244 11532 3 11242 11532 11530 3 11243 11531 11533 3 11243 11533 11245 3 11244 11246 11534 3 11244 11534 11532 3 11245 11533 11535 3 11245 11535 11247 3 11246 11248 11536 3 11246 11536 11534 3 11247 11535 11537 3 11247 11537 11249 3 11248 11250 11538 3 11248 11538 11536 3 11249 11537 11539 3 11249 11539 11251 3 11250 11252 11540 3 11250 11540 11538 3 11251 11539 11541 3 11251 11541 11253 3 11252 11254 11542 3 11252 11542 11540 3 11253 11541 11543 3 11253 11543 11255 3 11254 11256 11544 3 11254 11544 11542 3 11255 11543 11545 3 11255 11545 11257 3 11256 11258 11546 3 11256 11546 11544 3 11257 11545 11547 3 11257 11547 11259 3 11258 11260 11548 3 11258 11548 11546 3 11259 11547 11549 3 11259 11549 11261 3 11260 11262 11550 3 11260 11550 11548 3 11261 11549 11551 3 11261 11551 11263 3 11262 11264 11552 3 11262 11552 11550 3 11263 11551 11555 3 11263 11555 11267 3 11264 11265 11553 3 11264 11553 11552 3 11265 11268 11556 3 11265 11556 11553 3 11266 11267 11555 3 11266 11555 11554 3 11266 11554 11559 3 11266 11559 11271 3 11268 11269 11557 3 11268 11557 11556 3 11269 11272 11560 3 11269 11560 11557 3 11270 11271 11559 3 11270 11559 11558 3 11270 11558 11628 3 11270 11628 11340 3 11272 11273 11561 3 11272 11561 11560 3 11273 11274 11562 3 11273 11562 11561 3 11274 11275 11563 3 11274 11563 11562 3 11275 11276 11564 3 11275 11564 11563 3 11276 11277 11565 3 11276 11565 11564 3 11277 11278 11566 3 11277 11566 11565 3 11278 11279 11567 3 11278 11567 11566 3 11279 11280 11568 3 11279 11568 11567 3 11280 11281 11569 3 11280 11569 11568 3 11281 11282 11570 3 11281 11570 11569 3 11282 11283 11571 3 11282 11571 11570 3 11283 11284 11572 3 11283 11572 11571 3 11284 11285 11573 3 11284 11573 11572 3 11285 11286 11574 3 11285 11574 11573 3 11286 11287 11575 3 11286 11575 11574 3 11287 11288 11576 3 11287 11576 11575 3 11288 11289 11577 3 11288 11577 11576 3 11289 11290 11578 3 11289 11578 11577 3 11290 11291 11579 3 11290 11579 11578 3 11291 11292 11580 3 11291 11580 11579 3 11292 11293 11581 3 11292 11581 11580 3 11293 11294 11582 3 11293 11582 11581 3 11294 11295 11583 3 11294 11583 11582 3 11295 11296 11584 3 11295 11584 11583 3 11296 11297 11585 3 11296 11585 11584 3 11297 11298 11586 3 11297 11586 11585 3 11298 11299 11587 3 11298 11587 11586 3 11299 11300 11588 3 11299 11588 11587 3 11300 11301 11589 3 11300 11589 11588 3 11301 11302 11590 3 11301 11590 11589 3 11302 11303 11591 3 11302 11591 11590 3 11303 11304 11592 3 11303 11592 11591 3 11304 11305 11593 3 11304 11593 11592 3 11305 11306 11594 3 11305 11594 11593 3 11306 11307 11595 3 11306 11595 11594 3 11307 11308 11595 3 11596 11595 11308 3 11308 11309 11596 3 11597 11596 11309 3 11309 11310 11597 3 11598 11597 11310 3 11310 11311 11598 3 11599 11598 11311 3 11311 11312 11599 3 11600 11599 11312 3 11312 11313 11600 3 11601 11600 11313 3 11313 11314 11601 3 11602 11601 11314 3 11314 11315 11602 3 11603 11602 11315 3 11315 11316 11603 3 11604 11603 11316 3 11316 11317 11604 3 11605 11604 11317 3 11317 11318 11605 3 11606 11605 11318 3 11318 11319 11606 3 11607 11606 11319 3 11319 11320 11607 3 11608 11607 11320 3 11320 11321 11608 3 11609 11608 11321 3 11321 11322 11609 3 11610 11609 11322 3 11322 11323 11610 3 11611 11610 11323 3 11323 11324 11611 3 11612 11611 11324 3 11324 11325 11612 3 11613 11612 11325 3 11325 11326 11613 3 11614 11613 11326 3 11326 11327 11614 3 11615 11614 11327 3 11327 11328 11615 3 11616 11615 11328 3 11328 11329 11616 3 11617 11616 11329 3 11329 11330 11617 3 11618 11617 11330 3 11330 11331 11618 3 11619 11618 11331 3 11331 11332 11619 3 11620 11619 11332 3 11332 11333 11620 3 11621 11620 11333 3 11333 11334 11621 3 11622 11621 11334 3 11334 11335 11622 3 11623 11622 11335 3 11335 11336 11623 3 11624 11623 11336 3 11336 11337 11624 3 11625 11624 11337 3 11337 11338 11625 3 11626 11625 11338 3 11338 11339 11626 3 11627 11626 11339 3 11339 11340 11627 3 11628 11627 11340 3 11341 11629 11630 3 11341 11630 11342 3 11341 11411 11699 3 11341 11699 11629 3 11342 11630 11631 3 11342 11631 11343 3 11343 11631 11632 3 11343 11632 11344 3 11344 11632 11633 3 11344 11633 11345 3 11345 11633 11634 3 11345 11634 11346 3 11346 11634 11635 3 11346 11635 11347 3 11347 11635 11636 3 11347 11636 11348 3 11348 11636 11637 3 11348 11637 11349 3 11349 11637 11638 3 11349 11638 11350 3 11350 11638 11639 3 11350 11639 11351 3 11351 11639 11640 3 11351 11640 11352 3 11352 11640 11641 3 11352 11641 11353 3 11353 11641 11642 3 11353 11642 11354 3 11354 11642 11643 3 11354 11643 11355 3 11355 11643 11644 3 11355 11644 11356 3 11356 11644 11645 3 11356 11645 11357 3 11357 11645 11646 3 11357 11646 11358 3 11358 11646 11647 3 11358 11647 11359 3 11359 11647 11648 3 11359 11648 11360 3 11360 11648 11649 3 11360 11649 11361 3 11361 11649 11650 3 11361 11650 11362 3 11362 11650 11651 3 11362 11651 11363 3 11363 11651 11652 3 11363 11652 11364 3 11364 11652 11653 3 11364 11653 11365 3 11365 11653 11654 3 11365 11654 11366 3 11366 11654 11655 3 11366 11655 11367 3 11367 11655 11656 3 11367 11656 11368 3 11368 11656 11657 3 11368 11657 11369 3 11369 11657 11658 3 11369 11658 11370 3 11370 11658 11659 3 11370 11659 11371 3 11371 11659 11660 3 11371 11660 11372 3 11372 11660 11661 3 11372 11661 11373 3 11373 11661 11662 3 11373 11662 11374 3 11374 11662 11663 3 11374 11663 11375 3 11375 11663 11664 3 11375 11664 11376 3 11376 11664 11665 3 11376 11665 11377 3 11377 11665 11666 3 11377 11666 11378 3 11378 11666 11667 3 11378 11667 11379 3 11379 11667 11668 3 11379 11668 11380 3 11380 11668 11669 3 11380 11669 11381 3 11381 11669 11670 3 11381 11670 11382 3 11382 11670 11671 3 11382 11671 11383 3 11383 11671 11672 3 11383 11672 11384 3 11384 11672 11673 3 11384 11673 11385 3 11385 11673 11674 3 11385 11674 11386 3 11386 11674 11675 3 11386 11675 11387 3 11387 11675 11676 3 11387 11676 11388 3 11388 11676 11677 3 11388 11677 11389 3 11389 11677 11678 3 11389 11678 11390 3 11390 11678 11679 3 11390 11679 11391 3 11391 11679 11680 3 11391 11680 11392 3 11392 11680 11681 3 11392 11681 11393 3 11393 11681 11682 3 11393 11682 11394 3 11394 11682 11683 3 11394 11683 11395 3 11395 11683 11684 3 11395 11684 11396 3 11396 11684 11685 3 11396 11685 11397 3 11397 11685 11686 3 11397 11686 11398 3 11398 11686 11687 3 11398 11687 11399 3 11399 11687 11688 3 11399 11688 11400 3 11400 11688 11689 3 11400 11689 11401 3 11401 11689 11690 3 11401 11690 11402 3 11402 11690 11691 3 11402 11691 11403 3 11403 11691 11692 3 11403 11692 11404 3 11404 11692 11693 3 11404 11693 11405 3 11405 11693 11406 3 11694 11406 11693 3 11406 11694 11407 3 11695 11407 11694 3 11407 11695 11408 3 11696 11408 11695 3 11408 11696 11409 3 11697 11409 11696 3 11409 11697 11412 3 11700 11412 11697 3 11410 11698 11411 3 11699 11411 11698 3 11410 11415 11703 3 11410 11703 11698 3 11412 11700 11413 3 11701 11413 11700 3 11413 11701 11416 3 11704 11416 11701 3 11414 11702 11415 3 11703 11415 11702 3 11414 11418 11706 3 11414 11706 11702 3 11416 11704 11417 3 11705 11417 11704 3 11417 11705 11419 3 11707 11419 11705 3 11418 11420 11708 3 11418 11708 11706 3 11419 11707 11421 3 11709 11421 11707 3 11420 11422 11710 3 11420 11710 11708 3 11421 11709 11423 3 11711 11423 11709 3 11422 11424 11712 3 11422 11712 11710 3 11423 11711 11425 3 11713 11425 11711 3 11424 11426 11714 3 11424 11714 11712 3 11425 11713 11427 3 11715 11427 11713 3 11426 11428 11716 3 11426 11716 11714 3 11427 11715 11429 3 11717 11429 11715 3 11428 11430 11718 3 11428 11718 11716 3 11429 11717 11431 3 11719 11431 11717 3 11430 11432 11720 3 11430 11720 11718 3 11431 11719 11433 3 11721 11433 11719 3 11432 11434 11722 3 11432 11722 11720 3 11433 11721 11435 3 11723 11435 11721 3 11434 11436 11724 3 11434 11724 11722 3 11435 11723 11437 3 11725 11437 11723 3 11436 11438 11726 3 11436 11726 11724 3 11437 11725 11439 3 11727 11439 11725 3 11438 11440 11726 3 11728 11726 11440 3 11439 11727 11441 3 11729 11441 11727 3 11440 11442 11728 3 11730 11728 11442 3 11441 11729 11443 3 11731 11443 11729 3 11442 11444 11730 3 11732 11730 11444 3 11443 11731 11445 3 11733 11445 11731 3 11444 11446 11732 3 11734 11732 11446 3 11445 11733 11447 3 11735 11447 11733 3 11446 11448 11734 3 11736 11734 11448 3 11447 11735 11449 3 11737 11449 11735 3 11448 11450 11736 3 11738 11736 11450 3 11449 11737 11451 3 11739 11451 11737 3 11450 11452 11738 3 11740 11738 11452 3 11451 11739 11453 3 11741 11453 11739 3 11452 11454 11740 3 11742 11740 11454 3 11453 11741 11455 3 11743 11455 11741 3 11454 11456 11742 3 11744 11742 11456 3 11455 11743 11457 3 11745 11457 11743 3 11456 11458 11744 3 11746 11744 11458 3 11457 11745 11459 3 11747 11459 11745 3 11458 11460 11746 3 11748 11746 11460 3 11459 11747 11461 3 11749 11461 11747 3 11460 11462 11748 3 11750 11748 11462 3 11461 11749 11463 3 11751 11463 11749 3 11462 11464 11750 3 11752 11750 11464 3 11463 11751 11465 3 11753 11465 11751 3 11464 11466 11752 3 11754 11752 11466 3 11465 11753 11467 3 11755 11467 11753 3 11466 11468 11754 3 11756 11754 11468 3 11467 11755 11469 3 11757 11469 11755 3 11468 11470 11756 3 11758 11756 11470 3 11469 11757 11471 3 11759 11471 11757 3 11470 11472 11758 3 11760 11758 11472 3 11471 11759 11761 3 11471 11761 11473 3 11472 11474 11760 3 11762 11760 11474 3 11473 11761 11763 3 11473 11763 11475 3 11474 11476 11762 3 11764 11762 11476 3 11475 11763 11765 3 11475 11765 11477 3 11476 11478 11764 3 11766 11764 11478 3 11477 11765 11767 3 11477 11767 11479 3 11478 11480 11766 3 11768 11766 11480 3 11479 11767 11769 3 11479 11769 11481 3 11480 11482 11768 3 11770 11768 11482 3 11481 11769 11771 3 11481 11771 11483 3 11482 11484 11770 3 11772 11770 11484 3 11483 11771 11773 3 11483 11773 11485 3 11484 11486 11772 3 11774 11772 11486 3 11485 11773 11775 3 11485 11775 11487 3 11486 11488 11774 3 11776 11774 11488 3 11487 11775 11777 3 11487 11777 11489 3 11488 11490 11776 3 11778 11776 11490 3 11489 11777 11779 3 11489 11779 11491 3 11490 11492 11778 3 11780 11778 11492 3 11491 11779 11781 3 11491 11781 11493 3 11492 11494 11780 3 11782 11780 11494 3 11493 11781 11783 3 11493 11783 11495 3 11494 11496 11782 3 11784 11782 11496 3 11495 11783 11785 3 11495 11785 11497 3 11496 11498 11784 3 11786 11784 11498 3 11497 11785 11787 3 11497 11787 11499 3 11498 11500 11786 3 11788 11786 11500 3 11499 11787 11789 3 11499 11789 11501 3 11500 11502 11788 3 11790 11788 11502 3 11501 11789 11791 3 11501 11791 11503 3 11502 11504 11790 3 11792 11790 11504 3 11503 11791 11793 3 11503 11793 11505 3 11504 11506 11794 3 11504 11794 11792 3 11505 11793 11795 3 11505 11795 11507 3 11506 11508 11796 3 11506 11796 11794 3 11507 11795 11797 3 11507 11797 11509 3 11508 11510 11798 3 11508 11798 11796 3 11509 11797 11799 3 11509 11799 11511 3 11510 11512 11800 3 11510 11800 11798 3 11511 11799 11801 3 11511 11801 11513 3 11512 11514 11802 3 11512 11802 11800 3 11513 11801 11803 3 11513 11803 11515 3 11514 11516 11804 3 11514 11804 11802 3 11515 11803 11805 3 11515 11805 11517 3 11516 11518 11806 3 11516 11806 11804 3 11517 11805 11807 3 11517 11807 11519 3 11518 11520 11808 3 11518 11808 11806 3 11519 11807 11809 3 11519 11809 11521 3 11520 11522 11810 3 11520 11810 11808 3 11521 11809 11811 3 11521 11811 11523 3 11522 11524 11812 3 11522 11812 11810 3 11523 11811 11813 3 11523 11813 11525 3 11524 11526 11814 3 11524 11814 11812 3 11525 11813 11815 3 11525 11815 11527 3 11526 11528 11816 3 11526 11816 11814 3 11527 11815 11817 3 11527 11817 11529 3 11528 11530 11818 3 11528 11818 11816 3 11529 11817 11819 3 11529 11819 11531 3 11530 11532 11820 3 11530 11820 11818 3 11531 11819 11821 3 11531 11821 11533 3 11532 11534 11822 3 11532 11822 11820 3 11533 11821 11823 3 11533 11823 11535 3 11534 11536 11824 3 11534 11824 11822 3 11535 11823 11825 3 11535 11825 11537 3 11536 11538 11826 3 11536 11826 11824 3 11537 11825 11827 3 11537 11827 11539 3 11538 11540 11828 3 11538 11828 11826 3 11539 11827 11541 3 11829 11541 11827 3 11540 11542 11830 3 11540 11830 11828 3 11541 11829 11543 3 11831 11543 11829 3 11542 11544 11832 3 11542 11832 11830 3 11543 11831 11545 3 11833 11545 11831 3 11544 11546 11834 3 11544 11834 11832 3 11545 11833 11547 3 11835 11547 11833 3 11546 11548 11836 3 11546 11836 11834 3 11547 11835 11549 3 11837 11549 11835 3 11548 11550 11838 3 11548 11838 11836 3 11549 11837 11551 3 11839 11551 11837 3 11550 11552 11840 3 11550 11840 11838 3 11551 11839 11555 3 11843 11555 11839 3 11552 11553 11841 3 11552 11841 11840 3 11553 11556 11844 3 11553 11844 11841 3 11554 11555 11843 3 11554 11843 11842 3 11554 11842 11559 3 11847 11559 11842 3 11556 11557 11845 3 11556 11845 11844 3 11557 11560 11848 3 11557 11848 11845 3 11558 11559 11847 3 11558 11847 11846 3 11558 11846 11628 3 11916 11628 11846 3 11560 11561 11849 3 11560 11849 11848 3 11561 11562 11850 3 11561 11850 11849 3 11562 11563 11851 3 11562 11851 11850 3 11563 11564 11852 3 11563 11852 11851 3 11564 11565 11853 3 11564 11853 11852 3 11565 11566 11854 3 11565 11854 11853 3 11566 11567 11855 3 11566 11855 11854 3 11567 11568 11856 3 11567 11856 11855 3 11568 11569 11857 3 11568 11857 11856 3 11569 11570 11858 3 11569 11858 11857 3 11570 11571 11858 3 11859 11858 11571 3 11571 11572 11859 3 11860 11859 11572 3 11572 11573 11860 3 11861 11860 11573 3 11573 11574 11861 3 11862 11861 11574 3 11574 11575 11862 3 11863 11862 11575 3 11575 11576 11863 3 11864 11863 11576 3 11576 11577 11864 3 11865 11864 11577 3 11577 11578 11865 3 11866 11865 11578 3 11578 11579 11866 3 11867 11866 11579 3 11579 11580 11867 3 11868 11867 11580 3 11580 11581 11868 3 11869 11868 11581 3 11581 11582 11869 3 11870 11869 11582 3 11582 11583 11870 3 11871 11870 11583 3 11583 11584 11871 3 11872 11871 11584 3 11584 11585 11872 3 11873 11872 11585 3 11585 11586 11873 3 11874 11873 11586 3 11586 11587 11874 3 11875 11874 11587 3 11587 11588 11875 3 11876 11875 11588 3 11588 11589 11876 3 11877 11876 11589 3 11589 11590 11877 3 11878 11877 11590 3 11590 11591 11878 3 11879 11878 11591 3 11591 11592 11879 3 11880 11879 11592 3 11592 11593 11880 3 11881 11880 11593 3 11593 11594 11881 3 11882 11881 11594 3 11594 11595 11882 3 11883 11882 11595 3 11595 11596 11883 3 11884 11883 11596 3 11596 11597 11884 3 11885 11884 11597 3 11597 11598 11885 3 11886 11885 11598 3 11598 11599 11886 3 11887 11886 11599 3 11599 11600 11887 3 11888 11887 11600 3 11600 11601 11888 3 11889 11888 11601 3 11601 11602 11889 3 11890 11889 11602 3 11602 11603 11890 3 11891 11890 11603 3 11603 11604 11891 3 11892 11891 11604 3 11604 11605 11892 3 11893 11892 11605 3 11605 11606 11893 3 11894 11893 11606 3 11606 11607 11894 3 11895 11894 11607 3 11607 11608 11895 3 11896 11895 11608 3 11608 11609 11896 3 11897 11896 11609 3 11609 11610 11897 3 11898 11897 11610 3 11610 11611 11898 3 11899 11898 11611 3 11611 11612 11899 3 11900 11899 11612 3 11612 11613 11900 3 11901 11900 11613 3 11613 11614 11901 3 11902 11901 11614 3 11614 11615 11902 3 11903 11902 11615 3 11615 11616 11903 3 11904 11903 11616 3 11616 11617 11904 3 11905 11904 11617 3 11617 11618 11905 3 11906 11905 11618 3 11618 11619 11906 3 11907 11906 11619 3 11619 11620 11907 3 11908 11907 11620 3 11620 11621 11908 3 11909 11908 11621 3 11621 11622 11909 3 11910 11909 11622 3 11622 11623 11910 3 11911 11910 11623 3 11623 11624 11911 3 11912 11911 11624 3 11624 11625 11912 3 11913 11912 11625 3 11625 11626 11913 3 11914 11913 11626 3 11626 11627 11914 3 11915 11914 11627 3 11627 11628 11915 3 11916 11915 11628 3 11629 11917 11918 3 11629 11918 11630 3 11629 11699 11987 3 11629 11987 11917 3 11630 11918 11919 3 11630 11919 11631 3 11631 11919 11920 3 11631 11920 11632 3 11632 11920 11921 3 11632 11921 11633 3 11633 11921 11922 3 11633 11922 11634 3 11634 11922 11923 3 11634 11923 11635 3 11635 11923 11924 3 11635 11924 11636 3 11636 11924 11925 3 11636 11925 11637 3 11637 11925 11926 3 11637 11926 11638 3 11638 11926 11927 3 11638 11927 11639 3 11639 11927 11928 3 11639 11928 11640 3 11640 11928 11929 3 11640 11929 11641 3 11641 11929 11930 3 11641 11930 11642 3 11642 11930 11931 3 11642 11931 11643 3 11643 11931 11932 3 11643 11932 11644 3 11644 11932 11933 3 11644 11933 11645 3 11645 11933 11934 3 11645 11934 11646 3 11646 11934 11935 3 11646 11935 11647 3 11647 11935 11936 3 11647 11936 11648 3 11648 11936 11937 3 11648 11937 11649 3 11649 11937 11938 3 11649 11938 11650 3 11650 11938 11939 3 11650 11939 11651 3 11651 11939 11940 3 11651 11940 11652 3 11652 11940 11941 3 11652 11941 11653 3 11653 11941 11942 3 11653 11942 11654 3 11654 11942 11943 3 11654 11943 11655 3 11655 11943 11944 3 11655 11944 11656 3 11656 11944 11945 3 11656 11945 11657 3 11657 11945 11946 3 11657 11946 11658 3 11658 11946 11947 3 11658 11947 11659 3 11659 11947 11948 3 11659 11948 11660 3 11660 11948 11949 3 11660 11949 11661 3 11661 11949 11950 3 11661 11950 11662 3 11662 11950 11951 3 11662 11951 11663 3 11663 11951 11952 3 11663 11952 11664 3 11664 11952 11953 3 11664 11953 11665 3 11665 11953 11954 3 11665 11954 11666 3 11666 11954 11955 3 11666 11955 11667 3 11667 11955 11956 3 11667 11956 11668 3 11668 11956 11957 3 11668 11957 11669 3 11669 11957 11958 3 11669 11958 11670 3 11670 11958 11959 3 11670 11959 11671 3 11671 11959 11672 3 11960 11672 11959 3 11672 11960 11673 3 11961 11673 11960 3 11673 11961 11674 3 11962 11674 11961 3 11674 11962 11675 3 11963 11675 11962 3 11675 11963 11676 3 11964 11676 11963 3 11676 11964 11677 3 11965 11677 11964 3 11677 11965 11678 3 11966 11678 11965 3 11678 11966 11679 3 11967 11679 11966 3 11679 11967 11680 3 11968 11680 11967 3 11680 11968 11681 3 11969 11681 11968 3 11681 11969 11682 3 11970 11682 11969 3 11682 11970 11683 3 11971 11683 11970 3 11683 11971 11684 3 11972 11684 11971 3 11684 11972 11685 3 11973 11685 11972 3 11685 11973 11686 3 11974 11686 11973 3 11686 11974 11687 3 11975 11687 11974 3 11687 11975 11688 3 11976 11688 11975 3 11688 11976 11689 3 11977 11689 11976 3 11689 11977 11690 3 11978 11690 11977 3 11690 11978 11691 3 11979 11691 11978 3 11691 11979 11692 3 11980 11692 11979 3 11692 11980 11693 3 11981 11693 11980 3 11693 11981 11694 3 11982 11694 11981 3 11694 11982 11695 3 11983 11695 11982 3 11695 11983 11696 3 11984 11696 11983 3 11696 11984 11697 3 11985 11697 11984 3 11697 11985 11700 3 11988 11700 11985 3 11698 11986 11699 3 11987 11699 11986 3 11698 11703 11991 3 11698 11991 11986 3 11700 11988 11701 3 11989 11701 11988 3 11701 11989 11704 3 11992 11704 11989 3 11702 11990 11703 3 11991 11703 11990 3 11702 11706 11990 3 11994 11990 11706 3 11704 11992 11705 3 11993 11705 11992 3 11705 11993 11707 3 11995 11707 11993 3 11706 11708 11994 3 11996 11994 11708 3 11707 11995 11709 3 11997 11709 11995 3 11708 11710 11996 3 11998 11996 11710 3 11709 11997 11711 3 11999 11711 11997 3 11710 11712 11998 3 12000 11998 11712 3 11711 11999 11713 3 12001 11713 11999 3 11712 11714 12000 3 12002 12000 11714 3 11713 12001 11715 3 12003 11715 12001 3 11714 11716 12002 3 12004 12002 11716 3 11715 12003 11717 3 12005 11717 12003 3 11716 11718 12004 3 12006 12004 11718 3 11717 12005 11719 3 12007 11719 12005 3 11718 11720 12006 3 12008 12006 11720 3 11719 12007 11721 3 12009 11721 12007 3 11720 11722 12008 3 12010 12008 11722 3 11721 12009 11723 3 12011 11723 12009 3 11722 11724 12010 3 12012 12010 11724 3 11723 12011 11725 3 12013 11725 12011 3 11724 11726 12012 3 12014 12012 11726 3 11725 12013 11727 3 12015 11727 12013 3 11726 11728 12014 3 12016 12014 11728 3 11727 12015 11729 3 12017 11729 12015 3 11728 11730 12016 3 12018 12016 11730 3 11729 12017 11731 3 12019 11731 12017 3 11730 11732 12018 3 12020 12018 11732 3 11731 12019 11733 3 12021 11733 12019 3 11732 11734 12020 3 12022 12020 11734 3 11733 12021 11735 3 12023 11735 12021 3 11734 11736 12022 3 12024 12022 11736 3 11735 12023 11737 3 12025 11737 12023 3 11736 11738 12024 3 12026 12024 11738 3 11737 12025 11739 3 12027 11739 12025 3 11738 11740 12026 3 12028 12026 11740 3 11739 12027 12029 3 11739 12029 11741 3 11740 11742 12028 3 12030 12028 11742 3 11741 12029 12031 3 11741 12031 11743 3 11742 11744 12030 3 12032 12030 11744 3 11743 12031 12033 3 11743 12033 11745 3 11744 11746 12032 3 12034 12032 11746 3 11745 12033 12035 3 11745 12035 11747 3 11746 11748 12034 3 12036 12034 11748 3 11747 12035 12037 3 11747 12037 11749 3 11748 11750 12036 3 12038 12036 11750 3 11749 12037 12039 3 11749 12039 11751 3 11750 11752 12038 3 12040 12038 11752 3 11751 12039 12041 3 11751 12041 11753 3 11752 11754 12040 3 12042 12040 11754 3 11753 12041 12043 3 11753 12043 11755 3 11754 11756 12042 3 12044 12042 11756 3 11755 12043 12045 3 11755 12045 11757 3 11756 11758 12044 3 12046 12044 11758 3 11757 12045 12047 3 11757 12047 11759 3 11758 11760 12046 3 12048 12046 11760 3 11759 12047 12049 3 11759 12049 11761 3 11760 11762 12048 3 12050 12048 11762 3 11761 12049 12051 3 11761 12051 11763 3 11762 11764 12050 3 12052 12050 11764 3 11763 12051 12053 3 11763 12053 11765 3 11764 11766 12052 3 12054 12052 11766 3 11765 12053 12055 3 11765 12055 11767 3 11766 11768 12054 3 12056 12054 11768 3 11767 12055 12057 3 11767 12057 11769 3 11768 11770 12056 3 12058 12056 11770 3 11769 12057 12059 3 11769 12059 11771 3 11770 11772 12060 3 11770 12060 12058 3 11771 12059 12061 3 11771 12061 11773 3 11772 11774 12062 3 11772 12062 12060 3 11773 12061 12063 3 11773 12063 11775 3 11774 11776 12064 3 11774 12064 12062 3 11775 12063 12065 3 11775 12065 11777 3 11776 11778 12066 3 11776 12066 12064 3 11777 12065 12067 3 11777 12067 11779 3 11778 11780 12068 3 11778 12068 12066 3 11779 12067 12069 3 11779 12069 11781 3 11780 11782 12070 3 11780 12070 12068 3 11781 12069 12071 3 11781 12071 11783 3 11782 11784 12072 3 11782 12072 12070 3 11783 12071 12073 3 11783 12073 11785 3 11784 11786 12074 3 11784 12074 12072 3 11785 12073 12075 3 11785 12075 11787 3 11786 11788 12076 3 11786 12076 12074 3 11787 12075 12077 3 11787 12077 11789 3 11788 11790 12078 3 11788 12078 12076 3 11789 12077 12079 3 11789 12079 11791 3 11790 11792 12080 3 11790 12080 12078 3 11791 12079 12081 3 11791 12081 11793 3 11792 11794 12082 3 11792 12082 12080 3 11793 12081 12083 3 11793 12083 11795 3 11794 11796 12084 3 11794 12084 12082 3 11795 12083 12085 3 11795 12085 11797 3 11796 11798 12086 3 11796 12086 12084 3 11797 12085 12087 3 11797 12087 11799 3 11798 11800 12088 3 11798 12088 12086 3 11799 12087 12089 3 11799 12089 11801 3 11800 11802 12090 3 11800 12090 12088 3 11801 12089 12091 3 11801 12091 11803 3 11802 11804 12092 3 11802 12092 12090 3 11803 12091 12093 3 11803 12093 11805 3 11804 11806 12094 3 11804 12094 12092 3 11805 12093 12095 3 11805 12095 11807 3 11806 11808 12096 3 11806 12096 12094 3 11807 12095 11809 3 12097 11809 12095 3 11808 11810 12098 3 11808 12098 12096 3 11809 12097 11811 3 12099 11811 12097 3 11810 11812 12100 3 11810 12100 12098 3 11811 12099 11813 3 12101 11813 12099 3 11812 11814 12102 3 11812 12102 12100 3 11813 12101 11815 3 12103 11815 12101 3 11814 11816 12104 3 11814 12104 12102 3 11815 12103 11817 3 12105 11817 12103 3 11816 11818 12106 3 11816 12106 12104 3 11817 12105 11819 3 12107 11819 12105 3 11818 11820 12108 3 11818 12108 12106 3 11819 12107 11821 3 12109 11821 12107 3 11820 11822 12110 3 11820 12110 12108 3 11821 12109 11823 3 12111 11823 12109 3 11822 11824 12112 3 11822 12112 12110 3 11823 12111 11825 3 12113 11825 12111 3 11824 11826 12114 3 11824 12114 12112 3 11825 12113 11827 3 12115 11827 12113 3 11826 11828 12116 3 11826 12116 12114 3 11827 12115 11829 3 12117 11829 12115 3 11828 11830 12118 3 11828 12118 12116 3 11829 12117 11831 3 12119 11831 12117 3 11830 11832 12120 3 11830 12120 12118 3 11831 12119 11833 3 12121 11833 12119 3 11832 11834 12122 3 11832 12122 12120 3 11833 12121 11835 3 12123 11835 12121 3 11834 11836 12124 3 11834 12124 12122 3 11835 12123 11837 3 12125 11837 12123 3 11836 11838 12124 3 12126 12124 11838 3 11837 12125 11839 3 12127 11839 12125 3 11838 11840 12126 3 12128 12126 11840 3 11839 12127 11843 3 12131 11843 12127 3 11840 11841 12128 3 12129 12128 11841 3 11841 11844 12129 3 12132 12129 11844 3 11842 11843 12130 3 12131 12130 11843 3 11842 12130 11847 3 12135 11847 12130 3 11844 11845 12132 3 12133 12132 11845 3 11845 11848 12133 3 12136 12133 11848 3 11846 11847 12134 3 12135 12134 11847 3 11846 12134 11916 3 12204 11916 12134 3 11848 11849 12136 3 12137 12136 11849 3 11849 11850 12137 3 12138 12137 11850 3 11850 11851 12138 3 12139 12138 11851 3 11851 11852 12139 3 12140 12139 11852 3 11852 11853 12140 3 12141 12140 11853 3 11853 11854 12141 3 12142 12141 11854 3 11854 11855 12142 3 12143 12142 11855 3 11855 11856 12143 3 12144 12143 11856 3 11856 11857 12144 3 12145 12144 11857 3 11857 11858 12145 3 12146 12145 11858 3 11858 11859 12146 3 12147 12146 11859 3 11859 11860 12147 3 12148 12147 11860 3 11860 11861 12148 3 12149 12148 11861 3 11861 11862 12149 3 12150 12149 11862 3 11862 11863 12150 3 12151 12150 11863 3 11863 11864 12151 3 12152 12151 11864 3 11864 11865 12152 3 12153 12152 11865 3 11865 11866 12153 3 12154 12153 11866 3 11866 11867 12154 3 12155 12154 11867 3 11867 11868 12155 3 12156 12155 11868 3 11868 11869 12156 3 12157 12156 11869 3 11869 11870 12157 3 12158 12157 11870 3 11870 11871 12158 3 12159 12158 11871 3 11871 11872 12159 3 12160 12159 11872 3 11872 11873 12160 3 12161 12160 11873 3 11873 11874 12161 3 12162 12161 11874 3 11874 11875 12162 3 12163 12162 11875 3 11875 11876 12163 3 12164 12163 11876 3 11876 11877 12164 3 12165 12164 11877 3 11877 11878 12165 3 12166 12165 11878 3 11878 11879 12166 3 12167 12166 11879 3 11879 11880 12167 3 12168 12167 11880 3 11880 11881 12168 3 12169 12168 11881 3 11881 11882 12169 3 12170 12169 11882 3 11882 11883 12170 3 12171 12170 11883 3 11883 11884 12171 3 12172 12171 11884 3 11884 11885 12172 3 12173 12172 11885 3 11885 11886 12173 3 12174 12173 11886 3 11886 11887 12174 3 12175 12174 11887 3 11887 11888 12175 3 12176 12175 11888 3 11888 11889 12176 3 12177 12176 11889 3 11889 11890 12177 3 12178 12177 11890 3 11890 11891 12178 3 12179 12178 11891 3 11891 11892 12179 3 12180 12179 11892 3 11892 11893 12180 3 12181 12180 11893 3 11893 11894 12181 3 12182 12181 11894 3 11894 11895 12182 3 12183 12182 11895 3 11895 11896 12183 3 12184 12183 11896 3 11896 11897 12184 3 12185 12184 11897 3 11897 11898 12185 3 12186 12185 11898 3 11898 11899 12186 3 12187 12186 11899 3 11899 11900 12187 3 12188 12187 11900 3 11900 11901 12188 3 12189 12188 11901 3 11901 11902 12189 3 12190 12189 11902 3 11902 11903 12190 3 12191 12190 11903 3 11903 11904 12192 3 11903 12192 12191 3 11904 11905 12193 3 11904 12193 12192 3 11905 11906 12194 3 11905 12194 12193 3 11906 11907 12195 3 11906 12195 12194 3 11907 11908 12196 3 11907 12196 12195 3 11908 11909 12197 3 11908 12197 12196 3 11909 11910 12198 3 11909 12198 12197 3 11910 11911 12199 3 11910 12199 12198 3 11911 11912 12200 3 11911 12200 12199 3 11912 11913 12201 3 11912 12201 12200 3 11913 11914 12202 3 11913 12202 12201 3 11914 11915 12203 3 11914 12203 12202 3 11915 11916 12204 3 11915 12204 12203 3 11917 12205 12206 3 11917 12206 11918 3 11917 11987 12275 3 11917 12275 12205 3 11918 12206 12207 3 11918 12207 11919 3 11919 12207 12208 3 11919 12208 11920 3 11920 12208 12209 3 11920 12209 11921 3 11921 12209 12210 3 11921 12210 11922 3 11922 12210 12211 3 11922 12211 11923 3 11923 12211 12212 3 11923 12212 11924 3 11924 12212 12213 3 11924 12213 11925 3 11925 12213 12214 3 11925 12214 11926 3 11926 12214 12215 3 11926 12215 11927 3 11927 12215 12216 3 11927 12216 11928 3 11928 12216 12217 3 11928 12217 11929 3 11929 12217 12218 3 11929 12218 11930 3 11930 12218 12219 3 11930 12219 11931 3 11931 12219 12220 3 11931 12220 11932 3 11932 12220 12221 3 11932 12221 11933 3 11933 12221 12222 3 11933 12222 11934 3 11934 12222 12223 3 11934 12223 11935 3 11935 12223 12224 3 11935 12224 11936 3 11936 12224 12225 3 11936 12225 11937 3 11937 12225 12226 3 11937 12226 11938 3 11938 12226 12227 3 11938 12227 11939 3 11939 12227 12228 3 11939 12228 11940 3 11940 12228 12229 3 11940 12229 11941 3 11941 12229 11942 3 12230 11942 12229 3 11942 12230 11943 3 12231 11943 12230 3 11943 12231 11944 3 12232 11944 12231 3 11944 12232 11945 3 12233 11945 12232 3 11945 12233 11946 3 12234 11946 12233 3 11946 12234 11947 3 12235 11947 12234 3 11947 12235 11948 3 12236 11948 12235 3 11948 12236 11949 3 12237 11949 12236 3 11949 12237 11950 3 12238 11950 12237 3 11950 12238 11951 3 12239 11951 12238 3 11951 12239 11952 3 12240 11952 12239 3 11952 12240 11953 3 12241 11953 12240 3 11953 12241 11954 3 12242 11954 12241 3 11954 12242 11955 3 12243 11955 12242 3 11955 12243 11956 3 12244 11956 12243 3 11956 12244 11957 3 12245 11957 12244 3 11957 12245 11958 3 12246 11958 12245 3 11958 12246 11959 3 12247 11959 12246 3 11959 12247 11960 3 12248 11960 12247 3 11960 12248 11961 3 12249 11961 12248 3 11961 12249 11962 3 12250 11962 12249 3 11962 12250 11963 3 12251 11963 12250 3 11963 12251 11964 3 12252 11964 12251 3 11964 12252 11965 3 12253 11965 12252 3 11965 12253 11966 3 12254 11966 12253 3 11966 12254 11967 3 12255 11967 12254 3 11967 12255 11968 3 12256 11968 12255 3 11968 12256 11969 3 12257 11969 12256 3 11969 12257 11970 3 12258 11970 12257 3 11970 12258 11971 3 12259 11971 12258 3 11971 12259 11972 3 12260 11972 12259 3 11972 12260 11973 3 12261 11973 12260 3 11973 12261 11974 3 12262 11974 12261 3 11974 12262 11975 3 12263 11975 12262 3 11975 12263 11976 3 12264 11976 12263 3 11976 12264 11977 3 12265 11977 12264 3 11977 12265 11978 3 12266 11978 12265 3 11978 12266 11979 3 12267 11979 12266 3 11979 12267 11980 3 12268 11980 12267 3 11980 12268 11981 3 12269 11981 12268 3 11981 12269 11982 3 12270 11982 12269 3 11982 12270 11983 3 12271 11983 12270 3 11983 12271 11984 3 12272 11984 12271 3 11984 12272 11985 3 12273 11985 12272 3 11985 12273 11988 3 12276 11988 12273 3 11986 12274 11987 3 12275 11987 12274 3 11986 11991 12274 3 12279 12274 11991 3 11988 12276 11989 3 12277 11989 12276 3 11989 12277 11992 3 12280 11992 12277 3 11990 12278 11991 3 12279 11991 12278 3 11990 11994 12278 3 12282 12278 11994 3 11992 12280 11993 3 12281 11993 12280 3 11993 12281 11995 3 12283 11995 12281 3 11994 11996 12282 3 12284 12282 11996 3 11995 12283 11997 3 12285 11997 12283 3 11996 11998 12284 3 12286 12284 11998 3 11997 12285 11999 3 12287 11999 12285 3 11998 12000 12286 3 12288 12286 12000 3 11999 12287 12001 3 12289 12001 12287 3 12000 12002 12288 3 12290 12288 12002 3 12001 12289 12003 3 12291 12003 12289 3 12002 12004 12290 3 12292 12290 12004 3 12003 12291 12005 3 12293 12005 12291 3 12004 12006 12292 3 12294 12292 12006 3 12005 12293 12007 3 12295 12007 12293 3 12006 12008 12294 3 12296 12294 12008 3 12007 12295 12009 3 12297 12009 12295 3 12008 12010 12296 3 12298 12296 12010 3 12009 12297 12299 3 12009 12299 12011 3 12010 12012 12298 3 12300 12298 12012 3 12011 12299 12301 3 12011 12301 12013 3 12012 12014 12300 3 12302 12300 12014 3 12013 12301 12303 3 12013 12303 12015 3 12014 12016 12302 3 12304 12302 12016 3 12015 12303 12305 3 12015 12305 12017 3 12016 12018 12304 3 12306 12304 12018 3 12017 12305 12307 3 12017 12307 12019 3 12018 12020 12306 3 12308 12306 12020 3 12019 12307 12309 3 12019 12309 12021 3 12020 12022 12308 3 12310 12308 12022 3 12021 12309 12311 3 12021 12311 12023 3 12022 12024 12310 3 12312 12310 12024 3 12023 12311 12313 3 12023 12313 12025 3 12024 12026 12312 3 12314 12312 12026 3 12025 12313 12315 3 12025 12315 12027 3 12026 12028 12314 3 12316 12314 12028 3 12027 12315 12317 3 12027 12317 12029 3 12028 12030 12316 3 12318 12316 12030 3 12029 12317 12319 3 12029 12319 12031 3 12030 12032 12318 3 12320 12318 12032 3 12031 12319 12321 3 12031 12321 12033 3 12032 12034 12320 3 12322 12320 12034 3 12033 12321 12323 3 12033 12323 12035 3 12034 12036 12322 3 12324 12322 12036 3 12035 12323 12325 3 12035 12325 12037 3 12036 12038 12324 3 12326 12324 12038 3 12037 12325 12327 3 12037 12327 12039 3 12038 12040 12328 3 12038 12328 12326 3 12039 12327 12329 3 12039 12329 12041 3 12040 12042 12330 3 12040 12330 12328 3 12041 12329 12331 3 12041 12331 12043 3 12042 12044 12332 3 12042 12332 12330 3 12043 12331 12333 3 12043 12333 12045 3 12044 12046 12334 3 12044 12334 12332 3 12045 12333 12335 3 12045 12335 12047 3 12046 12048 12336 3 12046 12336 12334 3 12047 12335 12337 3 12047 12337 12049 3 12048 12050 12338 3 12048 12338 12336 3 12049 12337 12339 3 12049 12339 12051 3 12050 12052 12340 3 12050 12340 12338 3 12051 12339 12341 3 12051 12341 12053 3 12052 12054 12342 3 12052 12342 12340 3 12053 12341 12343 3 12053 12343 12055 3 12054 12056 12344 3 12054 12344 12342 3 12055 12343 12345 3 12055 12345 12057 3 12056 12058 12346 3 12056 12346 12344 3 12057 12345 12347 3 12057 12347 12059 3 12058 12060 12348 3 12058 12348 12346 3 12059 12347 12349 3 12059 12349 12061 3 12060 12062 12350 3 12060 12350 12348 3 12061 12349 12351 3 12061 12351 12063 3 12062 12064 12352 3 12062 12352 12350 3 12063 12351 12353 3 12063 12353 12065 3 12064 12066 12354 3 12064 12354 12352 3 12065 12353 12355 3 12065 12355 12067 3 12066 12068 12356 3 12066 12356 12354 3 12067 12355 12357 3 12067 12357 12069 3 12068 12070 12358 3 12068 12358 12356 3 12069 12357 12359 3 12069 12359 12071 3 12070 12072 12360 3 12070 12360 12358 3 12071 12359 12361 3 12071 12361 12073 3 12072 12074 12362 3 12072 12362 12360 3 12073 12361 12363 3 12073 12363 12075 3 12074 12076 12364 3 12074 12364 12362 3 12075 12363 12365 3 12075 12365 12077 3 12076 12078 12366 3 12076 12366 12364 3 12077 12365 12079 3 12367 12079 12365 3 12078 12080 12368 3 12078 12368 12366 3 12079 12367 12081 3 12369 12081 12367 3 12080 12082 12370 3 12080 12370 12368 3 12081 12369 12083 3 12371 12083 12369 3 12082 12084 12372 3 12082 12372 12370 3 12083 12371 12085 3 12373 12085 12371 3 12084 12086 12374 3 12084 12374 12372 3 12085 12373 12087 3 12375 12087 12373 3 12086 12088 12376 3 12086 12376 12374 3 12087 12375 12089 3 12377 12089 12375 3 12088 12090 12378 3 12088 12378 12376 3 12089 12377 12091 3 12379 12091 12377 3 12090 12092 12380 3 12090 12380 12378 3 12091 12379 12093 3 12381 12093 12379 3 12092 12094 12382 3 12092 12382 12380 3 12093 12381 12095 3 12383 12095 12381 3 12094 12096 12384 3 12094 12384 12382 3 12095 12383 12097 3 12385 12097 12383 3 12096 12098 12386 3 12096 12386 12384 3 12097 12385 12099 3 12387 12099 12385 3 12098 12100 12388 3 12098 12388 12386 3 12099 12387 12101 3 12389 12101 12387 3 12100 12102 12390 3 12100 12390 12388 3 12101 12389 12103 3 12391 12103 12389 3 12102 12104 12392 3 12102 12392 12390 3 12103 12391 12105 3 12393 12105 12391 3 12104 12106 12394 3 12104 12394 12392 3 12105 12393 12107 3 12395 12107 12393 3 12106 12108 12394 3 12396 12394 12108 3 12107 12395 12109 3 12397 12109 12395 3 12108 12110 12396 3 12398 12396 12110 3 12109 12397 12111 3 12399 12111 12397 3 12110 12112 12398 3 12400 12398 12112 3 12111 12399 12113 3 12401 12113 12399 3 12112 12114 12400 3 12402 12400 12114 3 12113 12401 12115 3 12403 12115 12401 3 12114 12116 12402 3 12404 12402 12116 3 12115 12403 12117 3 12405 12117 12403 3 12116 12118 12404 3 12406 12404 12118 3 12117 12405 12119 3 12407 12119 12405 3 12118 12120 12406 3 12408 12406 12120 3 12119 12407 12121 3 12409 12121 12407 3 12120 12122 12408 3 12410 12408 12122 3 12121 12409 12123 3 12411 12123 12409 3 12122 12124 12410 3 12412 12410 12124 3 12123 12411 12125 3 12413 12125 12411 3 12124 12126 12412 3 12414 12412 12126 3 12125 12413 12127 3 12415 12127 12413 3 12126 12128 12414 3 12416 12414 12128 3 12127 12415 12131 3 12419 12131 12415 3 12128 12129 12416 3 12417 12416 12129 3 12129 12132 12417 3 12420 12417 12132 3 12130 12131 12418 3 12419 12418 12131 3 12130 12418 12135 3 12423 12135 12418 3 12132 12133 12420 3 12421 12420 12133 3 12133 12136 12421 3 12424 12421 12136 3 12134 12135 12422 3 12423 12422 12135 3 12134 12422 12204 3 12492 12204 12422 3 12136 12137 12424 3 12425 12424 12137 3 12137 12138 12425 3 12426 12425 12138 3 12138 12139 12426 3 12427 12426 12139 3 12139 12140 12427 3 12428 12427 12140 3 12140 12141 12428 3 12429 12428 12141 3 12141 12142 12429 3 12430 12429 12142 3 12142 12143 12430 3 12431 12430 12143 3 12143 12144 12431 3 12432 12431 12144 3 12144 12145 12432 3 12433 12432 12145 3 12145 12146 12433 3 12434 12433 12146 3 12146 12147 12434 3 12435 12434 12147 3 12147 12148 12435 3 12436 12435 12148 3 12148 12149 12436 3 12437 12436 12149 3 12149 12150 12437 3 12438 12437 12150 3 12150 12151 12438 3 12439 12438 12151 3 12151 12152 12439 3 12440 12439 12152 3 12152 12153 12440 3 12441 12440 12153 3 12153 12154 12441 3 12442 12441 12154 3 12154 12155 12442 3 12443 12442 12155 3 12155 12156 12443 3 12444 12443 12156 3 12156 12157 12444 3 12445 12444 12157 3 12157 12158 12445 3 12446 12445 12158 3 12158 12159 12446 3 12447 12446 12159 3 12159 12160 12447 3 12448 12447 12160 3 12160 12161 12448 3 12449 12448 12161 3 12161 12162 12449 3 12450 12449 12162 3 12162 12163 12450 3 12451 12450 12163 3 12163 12164 12451 3 12452 12451 12164 3 12164 12165 12452 3 12453 12452 12165 3 12165 12166 12453 3 12454 12453 12166 3 12166 12167 12454 3 12455 12454 12167 3 12167 12168 12455 3 12456 12455 12168 3 12168 12169 12456 3 12457 12456 12169 3 12169 12170 12457 3 12458 12457 12170 3 12170 12171 12458 3 12459 12458 12171 3 12171 12172 12459 3 12460 12459 12172 3 12172 12173 12460 3 12461 12460 12173 3 12173 12174 12462 3 12173 12462 12461 3 12174 12175 12463 3 12174 12463 12462 3 12175 12176 12464 3 12175 12464 12463 3 12176 12177 12465 3 12176 12465 12464 3 12177 12178 12466 3 12177 12466 12465 3 12178 12179 12467 3 12178 12467 12466 3 12179 12180 12468 3 12179 12468 12467 3 12180 12181 12469 3 12180 12469 12468 3 12181 12182 12470 3 12181 12470 12469 3 12182 12183 12471 3 12182 12471 12470 3 12183 12184 12472 3 12183 12472 12471 3 12184 12185 12473 3 12184 12473 12472 3 12185 12186 12474 3 12185 12474 12473 3 12186 12187 12475 3 12186 12475 12474 3 12187 12188 12476 3 12187 12476 12475 3 12188 12189 12477 3 12188 12477 12476 3 12189 12190 12478 3 12189 12478 12477 3 12190 12191 12479 3 12190 12479 12478 3 12191 12192 12480 3 12191 12480 12479 3 12192 12193 12481 3 12192 12481 12480 3 12193 12194 12482 3 12193 12482 12481 3 12194 12195 12483 3 12194 12483 12482 3 12195 12196 12484 3 12195 12484 12483 3 12196 12197 12485 3 12196 12485 12484 3 12197 12198 12486 3 12197 12486 12485 3 12198 12199 12487 3 12198 12487 12486 3 12199 12200 12488 3 12199 12488 12487 3 12200 12201 12489 3 12200 12489 12488 3 12201 12202 12490 3 12201 12490 12489 3 12202 12203 12491 3 12202 12491 12490 3 12203 12204 12492 3 12203 12492 12491 3 12205 12493 12494 3 12205 12494 12206 3 12205 12275 12493 3 12563 12493 12275 3 12206 12494 12495 3 12206 12495 12207 3 12207 12495 12496 3 12207 12496 12208 3 12208 12496 12497 3 12208 12497 12209 3 12209 12497 12498 3 12209 12498 12210 3 12210 12498 12499 3 12210 12499 12211 3 12211 12499 12500 3 12211 12500 12212 3 12212 12500 12501 3 12212 12501 12213 3 12213 12501 12214 3 12502 12214 12501 3 12214 12502 12215 3 12503 12215 12502 3 12215 12503 12216 3 12504 12216 12503 3 12216 12504 12217 3 12505 12217 12504 3 12217 12505 12218 3 12506 12218 12505 3 12218 12506 12219 3 12507 12219 12506 3 12219 12507 12220 3 12508 12220 12507 3 12220 12508 12221 3 12509 12221 12508 3 12221 12509 12222 3 12510 12222 12509 3 12222 12510 12223 3 12511 12223 12510 3 12223 12511 12224 3 12512 12224 12511 3 12224 12512 12225 3 12513 12225 12512 3 12225 12513 12226 3 12514 12226 12513 3 12226 12514 12227 3 12515 12227 12514 3 12227 12515 12228 3 12516 12228 12515 3 12228 12516 12229 3 12517 12229 12516 3 12229 12517 12230 3 12518 12230 12517 3 12230 12518 12231 3 12519 12231 12518 3 12231 12519 12232 3 12520 12232 12519 3 12232 12520 12233 3 12521 12233 12520 3 12233 12521 12234 3 12522 12234 12521 3 12234 12522 12235 3 12523 12235 12522 3 12235 12523 12236 3 12524 12236 12523 3 12236 12524 12237 3 12525 12237 12524 3 12237 12525 12238 3 12526 12238 12525 3 12238 12526 12239 3 12527 12239 12526 3 12239 12527 12240 3 12528 12240 12527 3 12240 12528 12241 3 12529 12241 12528 3 12241 12529 12242 3 12530 12242 12529 3 12242 12530 12243 3 12531 12243 12530 3 12243 12531 12244 3 12532 12244 12531 3 12244 12532 12245 3 12533 12245 12532 3 12245 12533 12246 3 12534 12246 12533 3 12246 12534 12247 3 12535 12247 12534 3 12247 12535 12248 3 12536 12248 12535 3 12248 12536 12249 3 12537 12249 12536 3 12249 12537 12250 3 12538 12250 12537 3 12250 12538 12251 3 12539 12251 12538 3 12251 12539 12252 3 12540 12252 12539 3 12252 12540 12253 3 12541 12253 12540 3 12253 12541 12254 3 12542 12254 12541 3 12254 12542 12255 3 12543 12255 12542 3 12255 12543 12256 3 12544 12256 12543 3 12256 12544 12257 3 12545 12257 12544 3 12257 12545 12258 3 12546 12258 12545 3 12258 12546 12259 3 12547 12259 12546 3 12259 12547 12260 3 12548 12260 12547 3 12260 12548 12261 3 12549 12261 12548 3 12261 12549 12262 3 12550 12262 12549 3 12262 12550 12263 3 12551 12263 12550 3 12263 12551 12264 3 12552 12264 12551 3 12264 12552 12265 3 12553 12265 12552 3 12265 12553 12266 3 12554 12266 12553 3 12266 12554 12267 3 12555 12267 12554 3 12267 12555 12268 3 12556 12268 12555 3 12268 12556 12269 3 12557 12269 12556 3 12269 12557 12270 3 12558 12270 12557 3 12270 12558 12271 3 12559 12271 12558 3 12271 12559 12272 3 12560 12272 12559 3 12272 12560 12273 3 12561 12273 12560 3 12273 12561 12276 3 12564 12276 12561 3 12274 12562 12275 3 12563 12275 12562 3 12274 12279 12562 3 12567 12562 12279 3 12276 12564 12277 3 12565 12277 12564 3 12277 12565 12280 3 12568 12280 12565 3 12278 12566 12279 3 12567 12279 12566 3 12278 12282 12566 3 12570 12566 12282 3 12280 12568 12281 3 12569 12281 12568 3 12281 12569 12283 3 12571 12283 12569 3 12282 12284 12570 3 12572 12570 12284 3 12283 12571 12573 3 12283 12573 12285 3 12284 12286 12572 3 12574 12572 12286 3 12285 12573 12575 3 12285 12575 12287 3 12286 12288 12574 3 12576 12574 12288 3 12287 12575 12577 3 12287 12577 12289 3 12288 12290 12576 3 12578 12576 12290 3 12289 12577 12579 3 12289 12579 12291 3 12290 12292 12578 3 12580 12578 12292 3 12291 12579 12581 3 12291 12581 12293 3 12292 12294 12580 3 12582 12580 12294 3 12293 12581 12583 3 12293 12583 12295 3 12294 12296 12582 3 12584 12582 12296 3 12295 12583 12585 3 12295 12585 12297 3 12296 12298 12584 3 12586 12584 12298 3 12297 12585 12587 3 12297 12587 12299 3 12298 12300 12586 3 12588 12586 12300 3 12299 12587 12589 3 12299 12589 12301 3 12300 12302 12588 3 12590 12588 12302 3 12301 12589 12591 3 12301 12591 12303 3 12302 12304 12590 3 12592 12590 12304 3 12303 12591 12593 3 12303 12593 12305 3 12304 12306 12592 3 12594 12592 12306 3 12305 12593 12595 3 12305 12595 12307 3 12306 12308 12594 3 12596 12594 12308 3 12307 12595 12597 3 12307 12597 12309 3 12308 12310 12596 3 12598 12596 12310 3 12309 12597 12599 3 12309 12599 12311 3 12310 12312 12600 3 12310 12600 12598 3 12311 12599 12601 3 12311 12601 12313 3 12312 12314 12602 3 12312 12602 12600 3 12313 12601 12603 3 12313 12603 12315 3 12314 12316 12604 3 12314 12604 12602 3 12315 12603 12605 3 12315 12605 12317 3 12316 12318 12606 3 12316 12606 12604 3 12317 12605 12607 3 12317 12607 12319 3 12318 12320 12608 3 12318 12608 12606 3 12319 12607 12609 3 12319 12609 12321 3 12320 12322 12610 3 12320 12610 12608 3 12321 12609 12611 3 12321 12611 12323 3 12322 12324 12612 3 12322 12612 12610 3 12323 12611 12613 3 12323 12613 12325 3 12324 12326 12614 3 12324 12614 12612 3 12325 12613 12615 3 12325 12615 12327 3 12326 12328 12616 3 12326 12616 12614 3 12327 12615 12617 3 12327 12617 12329 3 12328 12330 12618 3 12328 12618 12616 3 12329 12617 12619 3 12329 12619 12331 3 12330 12332 12620 3 12330 12620 12618 3 12331 12619 12621 3 12331 12621 12333 3 12332 12334 12622 3 12332 12622 12620 3 12333 12621 12623 3 12333 12623 12335 3 12334 12336 12624 3 12334 12624 12622 3 12335 12623 12625 3 12335 12625 12337 3 12336 12338 12626 3 12336 12626 12624 3 12337 12625 12627 3 12337 12627 12339 3 12338 12340 12628 3 12338 12628 12626 3 12339 12627 12629 3 12339 12629 12341 3 12340 12342 12630 3 12340 12630 12628 3 12341 12629 12631 3 12341 12631 12343 3 12342 12344 12632 3 12342 12632 12630 3 12343 12631 12633 3 12343 12633 12345 3 12344 12346 12634 3 12344 12634 12632 3 12345 12633 12635 3 12345 12635 12347 3 12346 12348 12636 3 12346 12636 12634 3 12347 12635 12637 3 12347 12637 12349 3 12348 12350 12638 3 12348 12638 12636 3 12349 12637 12639 3 12349 12639 12351 3 12350 12352 12640 3 12350 12640 12638 3 12351 12639 12353 3 12641 12353 12639 3 12352 12354 12642 3 12352 12642 12640 3 12353 12641 12355 3 12643 12355 12641 3 12354 12356 12644 3 12354 12644 12642 3 12355 12643 12357 3 12645 12357 12643 3 12356 12358 12646 3 12356 12646 12644 3 12357 12645 12359 3 12647 12359 12645 3 12358 12360 12648 3 12358 12648 12646 3 12359 12647 12361 3 12649 12361 12647 3 12360 12362 12650 3 12360 12650 12648 3 12361 12649 12363 3 12651 12363 12649 3 12362 12364 12652 3 12362 12652 12650 3 12363 12651 12365 3 12653 12365 12651 3 12364 12366 12654 3 12364 12654 12652 3 12365 12653 12367 3 12655 12367 12653 3 12366 12368 12656 3 12366 12656 12654 3 12367 12655 12369 3 12657 12369 12655 3 12368 12370 12658 3 12368 12658 12656 3 12369 12657 12371 3 12659 12371 12657 3 12370 12372 12660 3 12370 12660 12658 3 12371 12659 12373 3 12661 12373 12659 3 12372 12374 12662 3 12372 12662 12660 3 12373 12661 12375 3 12663 12375 12661 3 12374 12376 12664 3 12374 12664 12662 3 12375 12663 12377 3 12665 12377 12663 3 12376 12378 12666 3 12376 12666 12664 3 12377 12665 12379 3 12667 12379 12665 3 12378 12380 12666 3 12668 12666 12380 3 12379 12667 12381 3 12669 12381 12667 3 12380 12382 12668 3 12670 12668 12382 3 12381 12669 12383 3 12671 12383 12669 3 12382 12384 12670 3 12672 12670 12384 3 12383 12671 12385 3 12673 12385 12671 3 12384 12386 12672 3 12674 12672 12386 3 12385 12673 12387 3 12675 12387 12673 3 12386 12388 12674 3 12676 12674 12388 3 12387 12675 12389 3 12677 12389 12675 3 12388 12390 12676 3 12678 12676 12390 3 12389 12677 12391 3 12679 12391 12677 3 12390 12392 12678 3 12680 12678 12392 3 12391 12679 12393 3 12681 12393 12679 3 12392 12394 12680 3 12682 12680 12394 3 12393 12681 12395 3 12683 12395 12681 3 12394 12396 12682 3 12684 12682 12396 3 12395 12683 12397 3 12685 12397 12683 3 12396 12398 12684 3 12686 12684 12398 3 12397 12685 12399 3 12687 12399 12685 3 12398 12400 12686 3 12688 12686 12400 3 12399 12687 12401 3 12689 12401 12687 3 12400 12402 12688 3 12690 12688 12402 3 12401 12689 12403 3 12691 12403 12689 3 12402 12404 12690 3 12692 12690 12404 3 12403 12691 12405 3 12693 12405 12691 3 12404 12406 12692 3 12694 12692 12406 3 12405 12693 12407 3 12695 12407 12693 3 12406 12408 12694 3 12696 12694 12408 3 12407 12695 12409 3 12697 12409 12695 3 12408 12410 12696 3 12698 12696 12410 3 12409 12697 12411 3 12699 12411 12697 3 12410 12412 12698 3 12700 12698 12412 3 12411 12699 12413 3 12701 12413 12699 3 12412 12414 12700 3 12702 12700 12414 3 12413 12701 12415 3 12703 12415 12701 3 12414 12416 12702 3 12704 12702 12416 3 12415 12703 12419 3 12707 12419 12703 3 12416 12417 12704 3 12705 12704 12417 3 12417 12420 12705 3 12708 12705 12420 3 12418 12419 12706 3 12707 12706 12419 3 12418 12706 12423 3 12711 12423 12706 3 12420 12421 12708 3 12709 12708 12421 3 12421 12424 12709 3 12712 12709 12424 3 12422 12423 12710 3 12711 12710 12423 3 12422 12710 12780 3 12422 12780 12492 3 12424 12425 12712 3 12713 12712 12425 3 12425 12426 12713 3 12714 12713 12426 3 12426 12427 12714 3 12715 12714 12427 3 12427 12428 12715 3 12716 12715 12428 3 12428 12429 12716 3 12717 12716 12429 3 12429 12430 12717 3 12718 12717 12430 3 12430 12431 12718 3 12719 12718 12431 3 12431 12432 12719 3 12720 12719 12432 3 12432 12433 12720 3 12721 12720 12433 3 12433 12434 12721 3 12722 12721 12434 3 12434 12435 12722 3 12723 12722 12435 3 12435 12436 12723 3 12724 12723 12436 3 12436 12437 12724 3 12725 12724 12437 3 12437 12438 12725 3 12726 12725 12438 3 12438 12439 12726 3 12727 12726 12439 3 12439 12440 12727 3 12728 12727 12440 3 12440 12441 12728 3 12729 12728 12441 3 12441 12442 12729 3 12730 12729 12442 3 12442 12443 12730 3 12731 12730 12443 3 12443 12444 12731 3 12732 12731 12444 3 12444 12445 12732 3 12733 12732 12445 3 12445 12446 12733 3 12734 12733 12446 3 12446 12447 12734 3 12735 12734 12447 3 12447 12448 12736 3 12447 12736 12735 3 12448 12449 12737 3 12448 12737 12736 3 12449 12450 12738 3 12449 12738 12737 3 12450 12451 12739 3 12450 12739 12738 3 12451 12452 12740 3 12451 12740 12739 3 12452 12453 12741 3 12452 12741 12740 3 12453 12454 12742 3 12453 12742 12741 3 12454 12455 12743 3 12454 12743 12742 3 12455 12456 12744 3 12455 12744 12743 3 12456 12457 12745 3 12456 12745 12744 3 12457 12458 12746 3 12457 12746 12745 3 12458 12459 12747 3 12458 12747 12746 3 12459 12460 12748 3 12459 12748 12747 3 12460 12461 12749 3 12460 12749 12748 3 12461 12462 12750 3 12461 12750 12749 3 12462 12463 12751 3 12462 12751 12750 3 12463 12464 12752 3 12463 12752 12751 3 12464 12465 12753 3 12464 12753 12752 3 12465 12466 12754 3 12465 12754 12753 3 12466 12467 12755 3 12466 12755 12754 3 12467 12468 12756 3 12467 12756 12755 3 12468 12469 12757 3 12468 12757 12756 3 12469 12470 12758 3 12469 12758 12757 3 12470 12471 12759 3 12470 12759 12758 3 12471 12472 12760 3 12471 12760 12759 3 12472 12473 12761 3 12472 12761 12760 3 12473 12474 12762 3 12473 12762 12761 3 12474 12475 12763 3 12474 12763 12762 3 12475 12476 12764 3 12475 12764 12763 3 12476 12477 12765 3 12476 12765 12764 3 12477 12478 12766 3 12477 12766 12765 3 12478 12479 12767 3 12478 12767 12766 3 12479 12480 12768 3 12479 12768 12767 3 12480 12481 12769 3 12480 12769 12768 3 12481 12482 12770 3 12481 12770 12769 3 12482 12483 12771 3 12482 12771 12770 3 12483 12484 12772 3 12483 12772 12771 3 12484 12485 12773 3 12484 12773 12772 3 12485 12486 12774 3 12485 12774 12773 3 12486 12487 12775 3 12486 12775 12774 3 12487 12488 12776 3 12487 12776 12775 3 12488 12489 12777 3 12488 12777 12776 3 12489 12490 12778 3 12489 12778 12777 3 12490 12491 12779 3 12490 12779 12778 3 12491 12492 12780 3 12491 12780 12779 3 12493 12781 12494 3 12782 12494 12781 3 12493 12563 12781 3 12851 12781 12563 3 12494 12782 12495 3 12783 12495 12782 3 12495 12783 12496 3 12784 12496 12783 3 12496 12784 12497 3 12785 12497 12784 3 12497 12785 12498 3 12786 12498 12785 3 12498 12786 12499 3 12787 12499 12786 3 12499 12787 12500 3 12788 12500 12787 3 12500 12788 12501 3 12789 12501 12788 3 12501 12789 12502 3 12790 12502 12789 3 12502 12790 12503 3 12791 12503 12790 3 12503 12791 12504 3 12792 12504 12791 3 12504 12792 12505 3 12793 12505 12792 3 12505 12793 12506 3 12794 12506 12793 3 12506 12794 12507 3 12795 12507 12794 3 12507 12795 12508 3 12796 12508 12795 3 12508 12796 12509 3 12797 12509 12796 3 12509 12797 12510 3 12798 12510 12797 3 12510 12798 12511 3 12799 12511 12798 3 12511 12799 12512 3 12800 12512 12799 3 12512 12800 12513 3 12801 12513 12800 3 12513 12801 12514 3 12802 12514 12801 3 12514 12802 12515 3 12803 12515 12802 3 12515 12803 12516 3 12804 12516 12803 3 12516 12804 12517 3 12805 12517 12804 3 12517 12805 12518 3 12806 12518 12805 3 12518 12806 12519 3 12807 12519 12806 3 12519 12807 12520 3 12808 12520 12807 3 12520 12808 12521 3 12809 12521 12808 3 12521 12809 12522 3 12810 12522 12809 3 12522 12810 12523 3 12811 12523 12810 3 12523 12811 12524 3 12812 12524 12811 3 12524 12812 12525 3 12813 12525 12812 3 12525 12813 12526 3 12814 12526 12813 3 12526 12814 12527 3 12815 12527 12814 3 12527 12815 12528 3 12816 12528 12815 3 12528 12816 12529 3 12817 12529 12816 3 12529 12817 12530 3 12818 12530 12817 3 12530 12818 12531 3 12819 12531 12818 3 12531 12819 12532 3 12820 12532 12819 3 12532 12820 12533 3 12821 12533 12820 3 12533 12821 12534 3 12822 12534 12821 3 12534 12822 12535 3 12823 12535 12822 3 12535 12823 12536 3 12824 12536 12823 3 12536 12824 12537 3 12825 12537 12824 3 12537 12825 12538 3 12826 12538 12825 3 12538 12826 12539 3 12827 12539 12826 3 12539 12827 12540 3 12828 12540 12827 3 12540 12828 12541 3 12829 12541 12828 3 12541 12829 12542 3 12830 12542 12829 3 12542 12830 12543 3 12831 12543 12830 3 12543 12831 12544 3 12832 12544 12831 3 12544 12832 12545 3 12833 12545 12832 3 12545 12833 12546 3 12834 12546 12833 3 12546 12834 12547 3 12835 12547 12834 3 12547 12835 12548 3 12836 12548 12835 3 12548 12836 12549 3 12837 12549 12836 3 12549 12837 12550 3 12838 12550 12837 3 12550 12838 12551 3 12839 12551 12838 3 12551 12839 12552 3 12840 12552 12839 3 12552 12840 12553 3 12841 12553 12840 3 12553 12841 12554 3 12842 12554 12841 3 12554 12842 12555 3 12843 12555 12842 3 12555 12843 12556 3 12844 12556 12843 3 12556 12844 12557 3 12845 12557 12844 3 12557 12845 12558 3 12846 12558 12845 3 12558 12846 12847 3 12558 12847 12559 3 12559 12847 12848 3 12559 12848 12560 3 12560 12848 12849 3 12560 12849 12561 3 12561 12849 12852 3 12561 12852 12564 3 12562 12850 12851 3 12562 12851 12563 3 12562 12567 12850 3 12855 12850 12567 3 12564 12852 12853 3 12564 12853 12565 3 12565 12853 12856 3 12565 12856 12568 3 12566 12854 12855 3 12566 12855 12567 3 12566 12570 12854 3 12858 12854 12570 3 12568 12856 12857 3 12568 12857 12569 3 12569 12857 12859 3 12569 12859 12571 3 12570 12572 12858 3 12860 12858 12572 3 12571 12859 12861 3 12571 12861 12573 3 12572 12574 12860 3 12862 12860 12574 3 12573 12861 12863 3 12573 12863 12575 3 12574 12576 12862 3 12864 12862 12576 3 12575 12863 12865 3 12575 12865 12577 3 12576 12578 12864 3 12866 12864 12578 3 12577 12865 12867 3 12577 12867 12579 3 12578 12580 12866 3 12868 12866 12580 3 12579 12867 12869 3 12579 12869 12581 3 12580 12582 12868 3 12870 12868 12582 3 12581 12869 12871 3 12581 12871 12583 3 12582 12584 12870 3 12872 12870 12584 3 12583 12871 12873 3 12583 12873 12585 3 12584 12586 12874 3 12584 12874 12872 3 12585 12873 12875 3 12585 12875 12587 3 12586 12588 12876 3 12586 12876 12874 3 12587 12875 12877 3 12587 12877 12589 3 12588 12590 12878 3 12588 12878 12876 3 12589 12877 12879 3 12589 12879 12591 3 12590 12592 12880 3 12590 12880 12878 3 12591 12879 12881 3 12591 12881 12593 3 12592 12594 12882 3 12592 12882 12880 3 12593 12881 12883 3 12593 12883 12595 3 12594 12596 12884 3 12594 12884 12882 3 12595 12883 12885 3 12595 12885 12597 3 12596 12598 12886 3 12596 12886 12884 3 12597 12885 12887 3 12597 12887 12599 3 12598 12600 12888 3 12598 12888 12886 3 12599 12887 12889 3 12599 12889 12601 3 12600 12602 12890 3 12600 12890 12888 3 12601 12889 12891 3 12601 12891 12603 3 12602 12604 12892 3 12602 12892 12890 3 12603 12891 12893 3 12603 12893 12605 3 12604 12606 12894 3 12604 12894 12892 3 12605 12893 12895 3 12605 12895 12607 3 12606 12608 12896 3 12606 12896 12894 3 12607 12895 12897 3 12607 12897 12609 3 12608 12610 12898 3 12608 12898 12896 3 12609 12897 12899 3 12609 12899 12611 3 12610 12612 12900 3 12610 12900 12898 3 12611 12899 12901 3 12611 12901 12613 3 12612 12614 12902 3 12612 12902 12900 3 12613 12901 12903 3 12613 12903 12615 3 12614 12616 12904 3 12614 12904 12902 3 12615 12903 12905 3 12615 12905 12617 3 12616 12618 12906 3 12616 12906 12904 3 12617 12905 12907 3 12617 12907 12619 3 12618 12620 12908 3 12618 12908 12906 3 12619 12907 12909 3 12619 12909 12621 3 12620 12622 12910 3 12620 12910 12908 3 12621 12909 12911 3 12621 12911 12623 3 12622 12624 12912 3 12622 12912 12910 3 12623 12911 12913 3 12623 12913 12625 3 12624 12626 12914 3 12624 12914 12912 3 12625 12913 12915 3 12625 12915 12627 3 12626 12628 12916 3 12626 12916 12914 3 12627 12915 12629 3 12917 12629 12915 3 12628 12630 12918 3 12628 12918 12916 3 12629 12917 12631 3 12919 12631 12917 3 12630 12632 12920 3 12630 12920 12918 3 12631 12919 12633 3 12921 12633 12919 3 12632 12634 12922 3 12632 12922 12920 3 12633 12921 12635 3 12923 12635 12921 3 12634 12636 12924 3 12634 12924 12922 3 12635 12923 12637 3 12925 12637 12923 3 12636 12638 12926 3 12636 12926 12924 3 12637 12925 12639 3 12927 12639 12925 3 12638 12640 12928 3 12638 12928 12926 3 12639 12927 12641 3 12929 12641 12927 3 12640 12642 12930 3 12640 12930 12928 3 12641 12929 12643 3 12931 12643 12929 3 12642 12644 12932 3 12642 12932 12930 3 12643 12931 12645 3 12933 12645 12931 3 12644 12646 12934 3 12644 12934 12932 3 12645 12933 12647 3 12935 12647 12933 3 12646 12648 12936 3 12646 12936 12934 3 12647 12935 12649 3 12937 12649 12935 3 12648 12650 12938 3 12648 12938 12936 3 12649 12937 12651 3 12939 12651 12937 3 12650 12652 12940 3 12650 12940 12938 3 12651 12939 12653 3 12941 12653 12939 3 12652 12654 12942 3 12652 12942 12940 3 12653 12941 12655 3 12943 12655 12941 3 12654 12656 12942 3 12944 12942 12656 3 12655 12943 12657 3 12945 12657 12943 3 12656 12658 12944 3 12946 12944 12658 3 12657 12945 12659 3 12947 12659 12945 3 12658 12660 12946 3 12948 12946 12660 3 12659 12947 12661 3 12949 12661 12947 3 12660 12662 12948 3 12950 12948 12662 3 12661 12949 12663 3 12951 12663 12949 3 12662 12664 12950 3 12952 12950 12664 3 12663 12951 12665 3 12953 12665 12951 3 12664 12666 12952 3 12954 12952 12666 3 12665 12953 12667 3 12955 12667 12953 3 12666 12668 12954 3 12956 12954 12668 3 12667 12955 12669 3 12957 12669 12955 3 12668 12670 12956 3 12958 12956 12670 3 12669 12957 12671 3 12959 12671 12957 3 12670 12672 12958 3 12960 12958 12672 3 12671 12959 12673 3 12961 12673 12959 3 12672 12674 12960 3 12962 12960 12674 3 12673 12961 12675 3 12963 12675 12961 3 12674 12676 12962 3 12964 12962 12676 3 12675 12963 12677 3 12965 12677 12963 3 12676 12678 12964 3 12966 12964 12678 3 12677 12965 12679 3 12967 12679 12965 3 12678 12680 12966 3 12968 12966 12680 3 12679 12967 12681 3 12969 12681 12967 3 12680 12682 12968 3 12970 12968 12682 3 12681 12969 12683 3 12971 12683 12969 3 12682 12684 12970 3 12972 12970 12684 3 12683 12971 12685 3 12973 12685 12971 3 12684 12686 12972 3 12974 12972 12686 3 12685 12973 12687 3 12975 12687 12973 3 12686 12688 12974 3 12976 12974 12688 3 12687 12975 12689 3 12977 12689 12975 3 12688 12690 12976 3 12978 12976 12690 3 12689 12977 12691 3 12979 12691 12977 3 12690 12692 12978 3 12980 12978 12692 3 12691 12979 12693 3 12981 12693 12979 3 12692 12694 12980 3 12982 12980 12694 3 12693 12981 12695 3 12983 12695 12981 3 12694 12696 12982 3 12984 12982 12696 3 12695 12983 12697 3 12985 12697 12983 3 12696 12698 12984 3 12986 12984 12698 3 12697 12985 12987 3 12697 12987 12699 3 12698 12700 12986 3 12988 12986 12700 3 12699 12987 12989 3 12699 12989 12701 3 12700 12702 12988 3 12990 12988 12702 3 12701 12989 12991 3 12701 12991 12703 3 12702 12704 12990 3 12992 12990 12704 3 12703 12991 12995 3 12703 12995 12707 3 12704 12705 12992 3 12993 12992 12705 3 12705 12708 12993 3 12996 12993 12708 3 12706 12707 12994 3 12995 12994 12707 3 12706 12994 12999 3 12706 12999 12711 3 12708 12709 12996 3 12997 12996 12709 3 12709 12712 12997 3 13000 12997 12712 3 12710 12711 12998 3 12999 12998 12711 3 12710 12998 13068 3 12710 13068 12780 3 12712 12713 13000 3 13001 13000 12713 3 12713 12714 13001 3 13002 13001 12714 3 12714 12715 13002 3 13003 13002 12715 3 12715 12716 13003 3 13004 13003 12716 3 12716 12717 13004 3 13005 13004 12717 3 12717 12718 13005 3 13006 13005 12718 3 12718 12719 13006 3 13007 13006 12719 3 12719 12720 13007 3 13008 13007 12720 3 12720 12721 13008 3 13009 13008 12721 3 12721 12722 13009 3 13010 13009 12722 3 12722 12723 13010 3 13011 13010 12723 3 12723 12724 13012 3 12723 13012 13011 3 12724 12725 13013 3 12724 13013 13012 3 12725 12726 13014 3 12725 13014 13013 3 12726 12727 13015 3 12726 13015 13014 3 12727 12728 13016 3 12727 13016 13015 3 12728 12729 13017 3 12728 13017 13016 3 12729 12730 13018 3 12729 13018 13017 3 12730 12731 13019 3 12730 13019 13018 3 12731 12732 13020 3 12731 13020 13019 3 12732 12733 13021 3 12732 13021 13020 3 12733 12734 13022 3 12733 13022 13021 3 12734 12735 13023 3 12734 13023 13022 3 12735 12736 13024 3 12735 13024 13023 3 12736 12737 13025 3 12736 13025 13024 3 12737 12738 13026 3 12737 13026 13025 3 12738 12739 13027 3 12738 13027 13026 3 12739 12740 13028 3 12739 13028 13027 3 12740 12741 13029 3 12740 13029 13028 3 12741 12742 13030 3 12741 13030 13029 3 12742 12743 13031 3 12742 13031 13030 3 12743 12744 13032 3 12743 13032 13031 3 12744 12745 13033 3 12744 13033 13032 3 12745 12746 13034 3 12745 13034 13033 3 12746 12747 13035 3 12746 13035 13034 3 12747 12748 13036 3 12747 13036 13035 3 12748 12749 13037 3 12748 13037 13036 3 12749 12750 13038 3 12749 13038 13037 3 12750 12751 13039 3 12750 13039 13038 3 12751 12752 13040 3 12751 13040 13039 3 12752 12753 13041 3 12752 13041 13040 3 12753 12754 13042 3 12753 13042 13041 3 12754 12755 13043 3 12754 13043 13042 3 12755 12756 13044 3 12755 13044 13043 3 12756 12757 13045 3 12756 13045 13044 3 12757 12758 13046 3 12757 13046 13045 3 12758 12759 13047 3 12758 13047 13046 3 12759 12760 13048 3 12759 13048 13047 3 12760 12761 13049 3 12760 13049 13048 3 12761 12762 13050 3 12761 13050 13049 3 12762 12763 13051 3 12762 13051 13050 3 12763 12764 13052 3 12763 13052 13051 3 12764 12765 13053 3 12764 13053 13052 3 12765 12766 13054 3 12765 13054 13053 3 12766 12767 13055 3 12766 13055 13054 3 12767 12768 13056 3 12767 13056 13055 3 12768 12769 13057 3 12768 13057 13056 3 12769 12770 13058 3 12769 13058 13057 3 12770 12771 13059 3 12770 13059 13058 3 12771 12772 13060 3 12771 13060 13059 3 12772 12773 13061 3 12772 13061 13060 3 12773 12774 13062 3 12773 13062 13061 3 12774 12775 13063 3 12774 13063 13062 3 12775 12776 13064 3 12775 13064 13063 3 12776 12777 13065 3 12776 13065 13064 3 12777 12778 13066 3 12777 13066 13065 3 12778 12779 13067 3 12778 13067 13066 3 12779 12780 13068 3 12779 13068 13067 3 12781 13069 12782 3 13070 12782 13069 3 12781 12851 13069 3 13139 13069 12851 3 12782 13070 12783 3 13071 12783 13070 3 12783 13071 12784 3 13072 12784 13071 3 12784 13072 12785 3 13073 12785 13072 3 12785 13073 12786 3 13074 12786 13073 3 12786 13074 12787 3 13075 12787 13074 3 12787 13075 12788 3 13076 12788 13075 3 12788 13076 12789 3 13077 12789 13076 3 12789 13077 12790 3 13078 12790 13077 3 12790 13078 12791 3 13079 12791 13078 3 12791 13079 12792 3 13080 12792 13079 3 12792 13080 12793 3 13081 12793 13080 3 12793 13081 12794 3 13082 12794 13081 3 12794 13082 12795 3 13083 12795 13082 3 12795 13083 12796 3 13084 12796 13083 3 12796 13084 12797 3 13085 12797 13084 3 12797 13085 12798 3 13086 12798 13085 3 12798 13086 12799 3 13087 12799 13086 3 12799 13087 12800 3 13088 12800 13087 3 12800 13088 12801 3 13089 12801 13088 3 12801 13089 12802 3 13090 12802 13089 3 12802 13090 12803 3 13091 12803 13090 3 12803 13091 12804 3 13092 12804 13091 3 12804 13092 12805 3 13093 12805 13092 3 12805 13093 12806 3 13094 12806 13093 3 12806 13094 12807 3 13095 12807 13094 3 12807 13095 12808 3 13096 12808 13095 3 12808 13096 12809 3 13097 12809 13096 3 12809 13097 12810 3 13098 12810 13097 3 12810 13098 12811 3 13099 12811 13098 3 12811 13099 12812 3 13100 12812 13099 3 12812 13100 12813 3 13101 12813 13100 3 12813 13101 12814 3 13102 12814 13101 3 12814 13102 12815 3 13103 12815 13102 3 12815 13103 12816 3 13104 12816 13103 3 12816 13104 12817 3 13105 12817 13104 3 12817 13105 12818 3 13106 12818 13105 3 12818 13106 12819 3 13107 12819 13106 3 12819 13107 12820 3 13108 12820 13107 3 12820 13108 12821 3 13109 12821 13108 3 12821 13109 12822 3 13110 12822 13109 3 12822 13110 12823 3 13111 12823 13110 3 12823 13111 12824 3 13112 12824 13111 3 12824 13112 12825 3 13113 12825 13112 3 12825 13113 12826 3 13114 12826 13113 3 12826 13114 12827 3 13115 12827 13114 3 12827 13115 12828 3 13116 12828 13115 3 12828 13116 12829 3 13117 12829 13116 3 12829 13117 12830 3 13118 12830 13117 3 12830 13118 12831 3 13119 12831 13118 3 12831 13119 12832 3 13120 12832 13119 3 12832 13120 12833 3 13121 12833 13120 3 12833 13121 12834 3 13122 12834 13121 3 12834 13122 12835 3 13123 12835 13122 3 12835 13123 12836 3 13124 12836 13123 3 12836 13124 12837 3 13125 12837 13124 3 12837 13125 13126 3 12837 13126 12838 3 12838 13126 13127 3 12838 13127 12839 3 12839 13127 13128 3 12839 13128 12840 3 12840 13128 13129 3 12840 13129 12841 3 12841 13129 13130 3 12841 13130 12842 3 12842 13130 13131 3 12842 13131 12843 3 12843 13131 13132 3 12843 13132 12844 3 12844 13132 13133 3 12844 13133 12845 3 12845 13133 13134 3 12845 13134 12846 3 12846 13134 13135 3 12846 13135 12847 3 12847 13135 13136 3 12847 13136 12848 3 12848 13136 13137 3 12848 13137 12849 3 12849 13137 13140 3 12849 13140 12852 3 12850 13138 13139 3 12850 13139 12851 3 12850 12855 13138 3 13143 13138 12855 3 12852 13140 13141 3 12852 13141 12853 3 12853 13141 13144 3 12853 13144 12856 3 12854 13142 13143 3 12854 13143 12855 3 12854 12858 13142 3 13146 13142 12858 3 12856 13144 13145 3 12856 13145 12857 3 12857 13145 13147 3 12857 13147 12859 3 12858 12860 13146 3 13148 13146 12860 3 12859 13147 13149 3 12859 13149 12861 3 12860 12862 13148 3 13150 13148 12862 3 12861 13149 13151 3 12861 13151 12863 3 12862 12864 13152 3 12862 13152 13150 3 12863 13151 13153 3 12863 13153 12865 3 12864 12866 13154 3 12864 13154 13152 3 12865 13153 13155 3 12865 13155 12867 3 12866 12868 13156 3 12866 13156 13154 3 12867 13155 13157 3 12867 13157 12869 3 12868 12870 13158 3 12868 13158 13156 3 12869 13157 13159 3 12869 13159 12871 3 12870 12872 13160 3 12870 13160 13158 3 12871 13159 13161 3 12871 13161 12873 3 12872 12874 13162 3 12872 13162 13160 3 12873 13161 13163 3 12873 13163 12875 3 12874 12876 13164 3 12874 13164 13162 3 12875 13163 13165 3 12875 13165 12877 3 12876 12878 13166 3 12876 13166 13164 3 12877 13165 13167 3 12877 13167 12879 3 12878 12880 13168 3 12878 13168 13166 3 12879 13167 13169 3 12879 13169 12881 3 12880 12882 13170 3 12880 13170 13168 3 12881 13169 13171 3 12881 13171 12883 3 12882 12884 13172 3 12882 13172 13170 3 12883 13171 13173 3 12883 13173 12885 3 12884 12886 13174 3 12884 13174 13172 3 12885 13173 13175 3 12885 13175 12887 3 12886 12888 13176 3 12886 13176 13174 3 12887 13175 13177 3 12887 13177 12889 3 12888 12890 13178 3 12888 13178 13176 3 12889 13177 13179 3 12889 13179 12891 3 12890 12892 13180 3 12890 13180 13178 3 12891 13179 13181 3 12891 13181 12893 3 12892 12894 13182 3 12892 13182 13180 3 12893 13181 13183 3 12893 13183 12895 3 12894 12896 13184 3 12894 13184 13182 3 12895 13183 13185 3 12895 13185 12897 3 12896 12898 13186 3 12896 13186 13184 3 12897 13185 13187 3 12897 13187 12899 3 12898 12900 13188 3 12898 13188 13186 3 12899 13187 13189 3 12899 13189 12901 3 12900 12902 13190 3 12900 13190 13188 3 12901 13189 13191 3 12901 13191 12903 3 12902 12904 13192 3 12902 13192 13190 3 12903 13191 13193 3 12903 13193 12905 3 12904 12906 13194 3 12904 13194 13192 3 12905 13193 13195 3 12905 13195 12907 3 12906 12908 13196 3 12906 13196 13194 3 12907 13195 12909 3 13197 12909 13195 3 12908 12910 13198 3 12908 13198 13196 3 12909 13197 12911 3 13199 12911 13197 3 12910 12912 13200 3 12910 13200 13198 3 12911 13199 12913 3 13201 12913 13199 3 12912 12914 13202 3 12912 13202 13200 3 12913 13201 12915 3 13203 12915 13201 3 12914 12916 13204 3 12914 13204 13202 3 12915 13203 12917 3 13205 12917 13203 3 12916 12918 13206 3 12916 13206 13204 3 12917 13205 12919 3 13207 12919 13205 3 12918 12920 13208 3 12918 13208 13206 3 12919 13207 12921 3 13209 12921 13207 3 12920 12922 13210 3 12920 13210 13208 3 12921 13209 12923 3 13211 12923 13209 3 12922 12924 13212 3 12922 13212 13210 3 12923 13211 12925 3 13213 12925 13211 3 12924 12926 13214 3 12924 13214 13212 3 12925 13213 12927 3 13215 12927 13213 3 12926 12928 13216 3 12926 13216 13214 3 12927 13215 12929 3 13217 12929 13215 3 12928 12930 13218 3 12928 13218 13216 3 12929 13217 12931 3 13219 12931 13217 3 12930 12932 13220 3 12930 13220 13218 3 12931 13219 12933 3 13221 12933 13219 3 12932 12934 13220 3 13222 13220 12934 3 12933 13221 12935 3 13223 12935 13221 3 12934 12936 13222 3 13224 13222 12936 3 12935 13223 12937 3 13225 12937 13223 3 12936 12938 13224 3 13226 13224 12938 3 12937 13225 12939 3 13227 12939 13225 3 12938 12940 13226 3 13228 13226 12940 3 12939 13227 12941 3 13229 12941 13227 3 12940 12942 13228 3 13230 13228 12942 3 12941 13229 12943 3 13231 12943 13229 3 12942 12944 13230 3 13232 13230 12944 3 12943 13231 12945 3 13233 12945 13231 3 12944 12946 13232 3 13234 13232 12946 3 12945 13233 12947 3 13235 12947 13233 3 12946 12948 13234 3 13236 13234 12948 3 12947 13235 12949 3 13237 12949 13235 3 12948 12950 13236 3 13238 13236 12950 3 12949 13237 12951 3 13239 12951 13237 3 12950 12952 13238 3 13240 13238 12952 3 12951 13239 12953 3 13241 12953 13239 3 12952 12954 13240 3 13242 13240 12954 3 12953 13241 12955 3 13243 12955 13241 3 12954 12956 13242 3 13244 13242 12956 3 12955 13243 12957 3 13245 12957 13243 3 12956 12958 13244 3 13246 13244 12958 3 12957 13245 12959 3 13247 12959 13245 3 12958 12960 13246 3 13248 13246 12960 3 12959 13247 12961 3 13249 12961 13247 3 12960 12962 13248 3 13250 13248 12962 3 12961 13249 12963 3 13251 12963 13249 3 12962 12964 13250 3 13252 13250 12964 3 12963 13251 12965 3 13253 12965 13251 3 12964 12966 13252 3 13254 13252 12966 3 12965 13253 12967 3 13255 12967 13253 3 12966 12968 13254 3 13256 13254 12968 3 12967 13255 12969 3 13257 12969 13255 3 12968 12970 13256 3 13258 13256 12970 3 12969 13257 12971 3 13259 12971 13257 3 12970 12972 13258 3 13260 13258 12972 3 12971 13259 12973 3 13261 12973 13259 3 12972 12974 13260 3 13262 13260 12974 3 12973 13261 12975 3 13263 12975 13261 3 12974 12976 13262 3 13264 13262 12976 3 12975 13263 12977 3 13265 12977 13263 3 12976 12978 13264 3 13266 13264 12978 3 12977 13265 12979 3 13267 12979 13265 3 12978 12980 13266 3 13268 13266 12980 3 12979 13267 13269 3 12979 13269 12981 3 12980 12982 13268 3 13270 13268 12982 3 12981 13269 13271 3 12981 13271 12983 3 12982 12984 13270 3 13272 13270 12984 3 12983 13271 13273 3 12983 13273 12985 3 12984 12986 13272 3 13274 13272 12986 3 12985 13273 13275 3 12985 13275 12987 3 12986 12988 13274 3 13276 13274 12988 3 12987 13275 13277 3 12987 13277 12989 3 12988 12990 13276 3 13278 13276 12990 3 12989 13277 13279 3 12989 13279 12991 3 12990 12992 13278 3 13280 13278 12992 3 12991 13279 13283 3 12991 13283 12995 3 12992 12993 13280 3 13281 13280 12993 3 12993 12996 13281 3 13284 13281 12996 3 12994 12995 13282 3 13283 13282 12995 3 12994 13282 13287 3 12994 13287 12999 3 12996 12997 13284 3 13285 13284 12997 3 12997 13000 13285 3 13288 13285 13000 3 12998 12999 13286 3 13287 13286 12999 3 12998 13286 13356 3 12998 13356 13068 3 13000 13001 13288 3 13289 13288 13001 3 13001 13002 13289 3 13290 13289 13002 3 13002 13003 13291 3 13002 13291 13290 3 13003 13004 13292 3 13003 13292 13291 3 13004 13005 13293 3 13004 13293 13292 3 13005 13006 13294 3 13005 13294 13293 3 13006 13007 13295 3 13006 13295 13294 3 13007 13008 13296 3 13007 13296 13295 3 13008 13009 13297 3 13008 13297 13296 3 13009 13010 13298 3 13009 13298 13297 3 13010 13011 13299 3 13010 13299 13298 3 13011 13012 13300 3 13011 13300 13299 3 13012 13013 13301 3 13012 13301 13300 3 13013 13014 13302 3 13013 13302 13301 3 13014 13015 13303 3 13014 13303 13302 3 13015 13016 13304 3 13015 13304 13303 3 13016 13017 13305 3 13016 13305 13304 3 13017 13018 13306 3 13017 13306 13305 3 13018 13019 13307 3 13018 13307 13306 3 13019 13020 13308 3 13019 13308 13307 3 13020 13021 13309 3 13020 13309 13308 3 13021 13022 13310 3 13021 13310 13309 3 13022 13023 13311 3 13022 13311 13310 3 13023 13024 13312 3 13023 13312 13311 3 13024 13025 13313 3 13024 13313 13312 3 13025 13026 13314 3 13025 13314 13313 3 13026 13027 13315 3 13026 13315 13314 3 13027 13028 13316 3 13027 13316 13315 3 13028 13029 13317 3 13028 13317 13316 3 13029 13030 13318 3 13029 13318 13317 3 13030 13031 13319 3 13030 13319 13318 3 13031 13032 13320 3 13031 13320 13319 3 13032 13033 13321 3 13032 13321 13320 3 13033 13034 13322 3 13033 13322 13321 3 13034 13035 13323 3 13034 13323 13322 3 13035 13036 13324 3 13035 13324 13323 3 13036 13037 13325 3 13036 13325 13324 3 13037 13038 13326 3 13037 13326 13325 3 13038 13039 13327 3 13038 13327 13326 3 13039 13040 13328 3 13039 13328 13327 3 13040 13041 13329 3 13040 13329 13328 3 13041 13042 13330 3 13041 13330 13329 3 13042 13043 13331 3 13042 13331 13330 3 13043 13044 13332 3 13043 13332 13331 3 13044 13045 13333 3 13044 13333 13332 3 13045 13046 13334 3 13045 13334 13333 3 13046 13047 13335 3 13046 13335 13334 3 13047 13048 13336 3 13047 13336 13335 3 13048 13049 13337 3 13048 13337 13336 3 13049 13050 13338 3 13049 13338 13337 3 13050 13051 13339 3 13050 13339 13338 3 13051 13052 13340 3 13051 13340 13339 3 13052 13053 13341 3 13052 13341 13340 3 13053 13054 13342 3 13053 13342 13341 3 13054 13055 13343 3 13054 13343 13342 3 13055 13056 13344 3 13055 13344 13343 3 13056 13057 13345 3 13056 13345 13344 3 13057 13058 13346 3 13057 13346 13345 3 13058 13059 13347 3 13058 13347 13346 3 13059 13060 13348 3 13059 13348 13347 3 13060 13061 13349 3 13060 13349 13348 3 13061 13062 13350 3 13061 13350 13349 3 13062 13063 13351 3 13062 13351 13350 3 13063 13064 13352 3 13063 13352 13351 3 13064 13065 13353 3 13064 13353 13352 3 13065 13066 13354 3 13065 13354 13353 3 13066 13067 13355 3 13066 13355 13354 3 13067 13068 13356 3 13067 13356 13355 3 13069 13357 13070 3 13358 13070 13357 3 13069 13139 13357 3 13427 13357 13139 3 13070 13358 13071 3 13359 13071 13358 3 13071 13359 13072 3 13360 13072 13359 3 13072 13360 13073 3 13361 13073 13360 3 13073 13361 13074 3 13362 13074 13361 3 13074 13362 13075 3 13363 13075 13362 3 13075 13363 13076 3 13364 13076 13363 3 13076 13364 13077 3 13365 13077 13364 3 13077 13365 13078 3 13366 13078 13365 3 13078 13366 13079 3 13367 13079 13366 3 13079 13367 13080 3 13368 13080 13367 3 13080 13368 13081 3 13369 13081 13368 3 13081 13369 13082 3 13370 13082 13369 3 13082 13370 13083 3 13371 13083 13370 3 13083 13371 13084 3 13372 13084 13371 3 13084 13372 13085 3 13373 13085 13372 3 13085 13373 13086 3 13374 13086 13373 3 13086 13374 13087 3 13375 13087 13374 3 13087 13375 13088 3 13376 13088 13375 3 13088 13376 13089 3 13377 13089 13376 3 13089 13377 13090 3 13378 13090 13377 3 13090 13378 13091 3 13379 13091 13378 3 13091 13379 13092 3 13380 13092 13379 3 13092 13380 13093 3 13381 13093 13380 3 13093 13381 13094 3 13382 13094 13381 3 13094 13382 13095 3 13383 13095 13382 3 13095 13383 13096 3 13384 13096 13383 3 13096 13384 13097 3 13385 13097 13384 3 13097 13385 13098 3 13386 13098 13385 3 13098 13386 13099 3 13387 13099 13386 3 13099 13387 13100 3 13388 13100 13387 3 13100 13388 13101 3 13389 13101 13388 3 13101 13389 13102 3 13390 13102 13389 3 13102 13390 13103 3 13391 13103 13390 3 13103 13391 13104 3 13392 13104 13391 3 13104 13392 13105 3 13393 13105 13392 3 13105 13393 13106 3 13394 13106 13393 3 13106 13394 13107 3 13395 13107 13394 3 13107 13395 13108 3 13396 13108 13395 3 13108 13396 13109 3 13397 13109 13396 3 13109 13397 13110 3 13398 13110 13397 3 13110 13398 13111 3 13399 13111 13398 3 13111 13399 13112 3 13400 13112 13399 3 13112 13400 13113 3 13401 13113 13400 3 13113 13401 13114 3 13402 13114 13401 3 13114 13402 13115 3 13403 13115 13402 3 13115 13403 13116 3 13404 13116 13403 3 13116 13404 13117 3 13405 13117 13404 3 13117 13405 13118 3 13406 13118 13405 3 13118 13406 13119 3 13407 13119 13406 3 13119 13407 13408 3 13119 13408 13120 3 13120 13408 13409 3 13120 13409 13121 3 13121 13409 13410 3 13121 13410 13122 3 13122 13410 13411 3 13122 13411 13123 3 13123 13411 13412 3 13123 13412 13124 3 13124 13412 13413 3 13124 13413 13125 3 13125 13413 13414 3 13125 13414 13126 3 13126 13414 13415 3 13126 13415 13127 3 13127 13415 13416 3 13127 13416 13128 3 13128 13416 13417 3 13128 13417 13129 3 13129 13417 13418 3 13129 13418 13130 3 13130 13418 13419 3 13130 13419 13131 3 13131 13419 13420 3 13131 13420 13132 3 13132 13420 13421 3 13132 13421 13133 3 13133 13421 13422 3 13133 13422 13134 3 13134 13422 13423 3 13134 13423 13135 3 13135 13423 13424 3 13135 13424 13136 3 13136 13424 13425 3 13136 13425 13137 3 13137 13425 13428 3 13137 13428 13140 3 13138 13426 13427 3 13138 13427 13139 3 13138 13143 13426 3 13431 13426 13143 3 13140 13428 13429 3 13140 13429 13141 3 13141 13429 13432 3 13141 13432 13144 3 13142 13430 13431 3 13142 13431 13143 3 13142 13146 13434 3 13142 13434 13430 3 13144 13432 13433 3 13144 13433 13145 3 13145 13433 13435 3 13145 13435 13147 3 13146 13148 13436 3 13146 13436 13434 3 13147 13435 13437 3 13147 13437 13149 3 13148 13150 13438 3 13148 13438 13436 3 13149 13437 13439 3 13149 13439 13151 3 13150 13152 13440 3 13150 13440 13438 3 13151 13439 13441 3 13151 13441 13153 3 13152 13154 13442 3 13152 13442 13440 3 13153 13441 13443 3 13153 13443 13155 3 13154 13156 13444 3 13154 13444 13442 3 13155 13443 13445 3 13155 13445 13157 3 13156 13158 13446 3 13156 13446 13444 3 13157 13445 13447 3 13157 13447 13159 3 13158 13160 13448 3 13158 13448 13446 3 13159 13447 13449 3 13159 13449 13161 3 13160 13162 13450 3 13160 13450 13448 3 13161 13449 13451 3 13161 13451 13163 3 13162 13164 13452 3 13162 13452 13450 3 13163 13451 13453 3 13163 13453 13165 3 13164 13166 13454 3 13164 13454 13452 3 13165 13453 13455 3 13165 13455 13167 3 13166 13168 13456 3 13166 13456 13454 3 13167 13455 13457 3 13167 13457 13169 3 13168 13170 13458 3 13168 13458 13456 3 13169 13457 13459 3 13169 13459 13171 3 13170 13172 13460 3 13170 13460 13458 3 13171 13459 13461 3 13171 13461 13173 3 13172 13174 13462 3 13172 13462 13460 3 13173 13461 13463 3 13173 13463 13175 3 13174 13176 13464 3 13174 13464 13462 3 13175 13463 13465 3 13175 13465 13177 3 13176 13178 13466 3 13176 13466 13464 3 13177 13465 13467 3 13177 13467 13179 3 13178 13180 13468 3 13178 13468 13466 3 13179 13467 13469 3 13179 13469 13181 3 13180 13182 13470 3 13180 13470 13468 3 13181 13469 13471 3 13181 13471 13183 3 13182 13184 13472 3 13182 13472 13470 3 13183 13471 13473 3 13183 13473 13185 3 13184 13186 13474 3 13184 13474 13472 3 13185 13473 13475 3 13185 13475 13187 3 13186 13188 13476 3 13186 13476 13474 3 13187 13475 13477 3 13187 13477 13189 3 13188 13190 13478 3 13188 13478 13476 3 13189 13477 13479 3 13189 13479 13191 3 13190 13192 13480 3 13190 13480 13478 3 13191 13479 13193 3 13481 13193 13479 3 13192 13194 13482 3 13192 13482 13480 3 13193 13481 13195 3 13483 13195 13481 3 13194 13196 13484 3 13194 13484 13482 3 13195 13483 13197 3 13485 13197 13483 3 13196 13198 13486 3 13196 13486 13484 3 13197 13485 13199 3 13487 13199 13485 3 13198 13200 13488 3 13198 13488 13486 3 13199 13487 13201 3 13489 13201 13487 3 13200 13202 13490 3 13200 13490 13488 3 13201 13489 13203 3 13491 13203 13489 3 13202 13204 13492 3 13202 13492 13490 3 13203 13491 13205 3 13493 13205 13491 3 13204 13206 13494 3 13204 13494 13492 3 13205 13493 13207 3 13495 13207 13493 3 13206 13208 13496 3 13206 13496 13494 3 13207 13495 13209 3 13497 13209 13495 3 13208 13210 13498 3 13208 13498 13496 3 13209 13497 13211 3 13499 13211 13497 3 13210 13212 13500 3 13210 13500 13498 3 13211 13499 13213 3 13501 13213 13499 3 13212 13214 13502 3 13212 13502 13500 3 13213 13501 13215 3 13503 13215 13501 3 13214 13216 13502 3 13504 13502 13216 3 13215 13503 13217 3 13505 13217 13503 3 13216 13218 13504 3 13506 13504 13218 3 13217 13505 13219 3 13507 13219 13505 3 13218 13220 13506 3 13508 13506 13220 3 13219 13507 13221 3 13509 13221 13507 3 13220 13222 13508 3 13510 13508 13222 3 13221 13509 13223 3 13511 13223 13509 3 13222 13224 13510 3 13512 13510 13224 3 13223 13511 13225 3 13513 13225 13511 3 13224 13226 13512 3 13514 13512 13226 3 13225 13513 13227 3 13515 13227 13513 3 13226 13228 13514 3 13516 13514 13228 3 13227 13515 13229 3 13517 13229 13515 3 13228 13230 13516 3 13518 13516 13230 3 13229 13517 13231 3 13519 13231 13517 3 13230 13232 13518 3 13520 13518 13232 3 13231 13519 13233 3 13521 13233 13519 3 13232 13234 13520 3 13522 13520 13234 3 13233 13521 13235 3 13523 13235 13521 3 13234 13236 13522 3 13524 13522 13236 3 13235 13523 13237 3 13525 13237 13523 3 13236 13238 13524 3 13526 13524 13238 3 13237 13525 13239 3 13527 13239 13525 3 13238 13240 13526 3 13528 13526 13240 3 13239 13527 13241 3 13529 13241 13527 3 13240 13242 13528 3 13530 13528 13242 3 13241 13529 13243 3 13531 13243 13529 3 13242 13244 13530 3 13532 13530 13244 3 13243 13531 13245 3 13533 13245 13531 3 13244 13246 13532 3 13534 13532 13246 3 13245 13533 13247 3 13535 13247 13533 3 13246 13248 13534 3 13536 13534 13248 3 13247 13535 13249 3 13537 13249 13535 3 13248 13250 13536 3 13538 13536 13250 3 13249 13537 13251 3 13539 13251 13537 3 13250 13252 13538 3 13540 13538 13252 3 13251 13539 13253 3 13541 13253 13539 3 13252 13254 13540 3 13542 13540 13254 3 13253 13541 13255 3 13543 13255 13541 3 13254 13256 13542 3 13544 13542 13256 3 13255 13543 13257 3 13545 13257 13543 3 13256 13258 13544 3 13546 13544 13258 3 13257 13545 13259 3 13547 13259 13545 3 13258 13260 13546 3 13548 13546 13260 3 13259 13547 13261 3 13549 13261 13547 3 13260 13262 13548 3 13550 13548 13262 3 13261 13549 13263 3 13551 13263 13549 3 13262 13264 13550 3 13552 13550 13264 3 13263 13551 13553 3 13263 13553 13265 3 13264 13266 13552 3 13554 13552 13266 3 13265 13553 13555 3 13265 13555 13267 3 13266 13268 13554 3 13556 13554 13268 3 13267 13555 13557 3 13267 13557 13269 3 13268 13270 13556 3 13558 13556 13270 3 13269 13557 13559 3 13269 13559 13271 3 13270 13272 13558 3 13560 13558 13272 3 13271 13559 13561 3 13271 13561 13273 3 13272 13274 13560 3 13562 13560 13274 3 13273 13561 13563 3 13273 13563 13275 3 13274 13276 13562 3 13564 13562 13276 3 13275 13563 13565 3 13275 13565 13277 3 13276 13278 13564 3 13566 13564 13278 3 13277 13565 13567 3 13277 13567 13279 3 13278 13280 13566 3 13568 13566 13280 3 13279 13567 13571 3 13279 13571 13283 3 13280 13281 13568 3 13569 13568 13281 3 13281 13284 13569 3 13572 13569 13284 3 13282 13283 13570 3 13571 13570 13283 3 13282 13570 13575 3 13282 13575 13287 3 13284 13285 13573 3 13284 13573 13572 3 13285 13288 13576 3 13285 13576 13573 3 13286 13287 13575 3 13286 13575 13574 3 13286 13574 13644 3 13286 13644 13356 3 13288 13289 13577 3 13288 13577 13576 3 13289 13290 13578 3 13289 13578 13577 3 13290 13291 13579 3 13290 13579 13578 3 13291 13292 13580 3 13291 13580 13579 3 13292 13293 13581 3 13292 13581 13580 3 13293 13294 13582 3 13293 13582 13581 3 13294 13295 13583 3 13294 13583 13582 3 13295 13296 13584 3 13295 13584 13583 3 13296 13297 13585 3 13296 13585 13584 3 13297 13298 13586 3 13297 13586 13585 3 13298 13299 13587 3 13298 13587 13586 3 13299 13300 13588 3 13299 13588 13587 3 13300 13301 13589 3 13300 13589 13588 3 13301 13302 13590 3 13301 13590 13589 3 13302 13303 13591 3 13302 13591 13590 3 13303 13304 13592 3 13303 13592 13591 3 13304 13305 13593 3 13304 13593 13592 3 13305 13306 13594 3 13305 13594 13593 3 13306 13307 13595 3 13306 13595 13594 3 13307 13308 13596 3 13307 13596 13595 3 13308 13309 13597 3 13308 13597 13596 3 13309 13310 13598 3 13309 13598 13597 3 13310 13311 13599 3 13310 13599 13598 3 13311 13312 13600 3 13311 13600 13599 3 13312 13313 13601 3 13312 13601 13600 3 13313 13314 13602 3 13313 13602 13601 3 13314 13315 13603 3 13314 13603 13602 3 13315 13316 13604 3 13315 13604 13603 3 13316 13317 13605 3 13316 13605 13604 3 13317 13318 13606 3 13317 13606 13605 3 13318 13319 13607 3 13318 13607 13606 3 13319 13320 13608 3 13319 13608 13607 3 13320 13321 13609 3 13320 13609 13608 3 13321 13322 13610 3 13321 13610 13609 3 13322 13323 13611 3 13322 13611 13610 3 13323 13324 13612 3 13323 13612 13611 3 13324 13325 13613 3 13324 13613 13612 3 13325 13326 13614 3 13325 13614 13613 3 13326 13327 13615 3 13326 13615 13614 3 13327 13328 13616 3 13327 13616 13615 3 13328 13329 13617 3 13328 13617 13616 3 13329 13330 13618 3 13329 13618 13617 3 13330 13331 13619 3 13330 13619 13618 3 13331 13332 13620 3 13331 13620 13619 3 13332 13333 13621 3 13332 13621 13620 3 13333 13334 13622 3 13333 13622 13621 3 13334 13335 13623 3 13334 13623 13622 3 13335 13336 13624 3 13335 13624 13623 3 13336 13337 13625 3 13336 13625 13624 3 13337 13338 13626 3 13337 13626 13625 3 13338 13339 13627 3 13338 13627 13626 3 13339 13340 13628 3 13339 13628 13627 3 13340 13341 13629 3 13340 13629 13628 3 13341 13342 13630 3 13341 13630 13629 3 13342 13343 13631 3 13342 13631 13630 3 13343 13344 13632 3 13343 13632 13631 3 13344 13345 13633 3 13344 13633 13632 3 13345 13346 13634 3 13345 13634 13633 3 13346 13347 13635 3 13346 13635 13634 3 13347 13348 13636 3 13347 13636 13635 3 13348 13349 13637 3 13348 13637 13636 3 13349 13350 13638 3 13349 13638 13637 3 13350 13351 13639 3 13350 13639 13638 3 13351 13352 13640 3 13351 13640 13639 3 13352 13353 13641 3 13352 13641 13640 3 13353 13354 13642 3 13353 13642 13641 3 13354 13355 13643 3 13354 13643 13642 3 13355 13356 13643 3 13644 13643 13356 3 13357 13645 13358 3 13646 13358 13645 3 13357 13427 13645 3 13715 13645 13427 3 13358 13646 13359 3 13647 13359 13646 3 13359 13647 13360 3 13648 13360 13647 3 13360 13648 13361 3 13649 13361 13648 3 13361 13649 13362 3 13650 13362 13649 3 13362 13650 13363 3 13651 13363 13650 3 13363 13651 13364 3 13652 13364 13651 3 13364 13652 13365 3 13653 13365 13652 3 13365 13653 13366 3 13654 13366 13653 3 13366 13654 13367 3 13655 13367 13654 3 13367 13655 13368 3 13656 13368 13655 3 13368 13656 13369 3 13657 13369 13656 3 13369 13657 13370 3 13658 13370 13657 3 13370 13658 13371 3 13659 13371 13658 3 13371 13659 13372 3 13660 13372 13659 3 13372 13660 13373 3 13661 13373 13660 3 13373 13661 13374 3 13662 13374 13661 3 13374 13662 13375 3 13663 13375 13662 3 13375 13663 13376 3 13664 13376 13663 3 13376 13664 13377 3 13665 13377 13664 3 13377 13665 13378 3 13666 13378 13665 3 13378 13666 13379 3 13667 13379 13666 3 13379 13667 13380 3 13668 13380 13667 3 13380 13668 13381 3 13669 13381 13668 3 13381 13669 13382 3 13670 13382 13669 3 13382 13670 13383 3 13671 13383 13670 3 13383 13671 13384 3 13672 13384 13671 3 13384 13672 13385 3 13673 13385 13672 3 13385 13673 13386 3 13674 13386 13673 3 13386 13674 13387 3 13675 13387 13674 3 13387 13675 13388 3 13676 13388 13675 3 13388 13676 13389 3 13677 13389 13676 3 13389 13677 13390 3 13678 13390 13677 3 13390 13678 13391 3 13679 13391 13678 3 13391 13679 13392 3 13680 13392 13679 3 13392 13680 13393 3 13681 13393 13680 3 13393 13681 13394 3 13682 13394 13681 3 13394 13682 13395 3 13683 13395 13682 3 13395 13683 13396 3 13684 13396 13683 3 13396 13684 13397 3 13685 13397 13684 3 13397 13685 13398 3 13686 13398 13685 3 13398 13686 13399 3 13687 13399 13686 3 13399 13687 13400 3 13688 13400 13687 3 13400 13688 13401 3 13689 13401 13688 3 13401 13689 13402 3 13690 13402 13689 3 13402 13690 13403 3 13691 13403 13690 3 13403 13691 13404 3 13692 13404 13691 3 13404 13692 13405 3 13693 13405 13692 3 13405 13693 13694 3 13405 13694 13406 3 13406 13694 13695 3 13406 13695 13407 3 13407 13695 13696 3 13407 13696 13408 3 13408 13696 13697 3 13408 13697 13409 3 13409 13697 13698 3 13409 13698 13410 3 13410 13698 13699 3 13410 13699 13411 3 13411 13699 13700 3 13411 13700 13412 3 13412 13700 13701 3 13412 13701 13413 3 13413 13701 13702 3 13413 13702 13414 3 13414 13702 13703 3 13414 13703 13415 3 13415 13703 13704 3 13415 13704 13416 3 13416 13704 13705 3 13416 13705 13417 3 13417 13705 13706 3 13417 13706 13418 3 13418 13706 13707 3 13418 13707 13419 3 13419 13707 13708 3 13419 13708 13420 3 13420 13708 13709 3 13420 13709 13421 3 13421 13709 13710 3 13421 13710 13422 3 13422 13710 13711 3 13422 13711 13423 3 13423 13711 13712 3 13423 13712 13424 3 13424 13712 13713 3 13424 13713 13425 3 13425 13713 13716 3 13425 13716 13428 3 13426 13714 13715 3 13426 13715 13427 3 13426 13431 13719 3 13426 13719 13714 3 13428 13716 13717 3 13428 13717 13429 3 13429 13717 13720 3 13429 13720 13432 3 13430 13718 13719 3 13430 13719 13431 3 13430 13434 13722 3 13430 13722 13718 3 13432 13720 13721 3 13432 13721 13433 3 13433 13721 13723 3 13433 13723 13435 3 13434 13436 13724 3 13434 13724 13722 3 13435 13723 13725 3 13435 13725 13437 3 13436 13438 13726 3 13436 13726 13724 3 13437 13725 13727 3 13437 13727 13439 3 13438 13440 13728 3 13438 13728 13726 3 13439 13727 13729 3 13439 13729 13441 3 13440 13442 13730 3 13440 13730 13728 3 13441 13729 13731 3 13441 13731 13443 3 13442 13444 13732 3 13442 13732 13730 3 13443 13731 13733 3 13443 13733 13445 3 13444 13446 13734 3 13444 13734 13732 3 13445 13733 13735 3 13445 13735 13447 3 13446 13448 13736 3 13446 13736 13734 3 13447 13735 13737 3 13447 13737 13449 3 13448 13450 13738 3 13448 13738 13736 3 13449 13737 13739 3 13449 13739 13451 3 13450 13452 13740 3 13450 13740 13738 3 13451 13739 13741 3 13451 13741 13453 3 13452 13454 13742 3 13452 13742 13740 3 13453 13741 13743 3 13453 13743 13455 3 13454 13456 13744 3 13454 13744 13742 3 13455 13743 13745 3 13455 13745 13457 3 13456 13458 13746 3 13456 13746 13744 3 13457 13745 13747 3 13457 13747 13459 3 13458 13460 13748 3 13458 13748 13746 3 13459 13747 13749 3 13459 13749 13461 3 13460 13462 13750 3 13460 13750 13748 3 13461 13749 13751 3 13461 13751 13463 3 13462 13464 13752 3 13462 13752 13750 3 13463 13751 13753 3 13463 13753 13465 3 13464 13466 13754 3 13464 13754 13752 3 13465 13753 13755 3 13465 13755 13467 3 13466 13468 13756 3 13466 13756 13754 3 13467 13755 13757 3 13467 13757 13469 3 13468 13470 13758 3 13468 13758 13756 3 13469 13757 13759 3 13469 13759 13471 3 13470 13472 13760 3 13470 13760 13758 3 13471 13759 13761 3 13471 13761 13473 3 13472 13474 13762 3 13472 13762 13760 3 13473 13761 13763 3 13473 13763 13475 3 13474 13476 13764 3 13474 13764 13762 3 13475 13763 13765 3 13475 13765 13477 3 13476 13478 13766 3 13476 13766 13764 3 13477 13765 13479 3 13767 13479 13765 3 13478 13480 13768 3 13478 13768 13766 3 13479 13767 13481 3 13769 13481 13767 3 13480 13482 13770 3 13480 13770 13768 3 13481 13769 13483 3 13771 13483 13769 3 13482 13484 13772 3 13482 13772 13770 3 13483 13771 13485 3 13773 13485 13771 3 13484 13486 13774 3 13484 13774 13772 3 13485 13773 13487 3 13775 13487 13773 3 13486 13488 13776 3 13486 13776 13774 3 13487 13775 13489 3 13777 13489 13775 3 13488 13490 13778 3 13488 13778 13776 3 13489 13777 13491 3 13779 13491 13777 3 13490 13492 13780 3 13490 13780 13778 3 13491 13779 13493 3 13781 13493 13779 3 13492 13494 13782 3 13492 13782 13780 3 13493 13781 13495 3 13783 13495 13781 3 13494 13496 13784 3 13494 13784 13782 3 13495 13783 13497 3 13785 13497 13783 3 13496 13498 13786 3 13496 13786 13784 3 13497 13785 13499 3 13787 13499 13785 3 13498 13500 13786 3 13788 13786 13500 3 13499 13787 13501 3 13789 13501 13787 3 13500 13502 13788 3 13790 13788 13502 3 13501 13789 13503 3 13791 13503 13789 3 13502 13504 13790 3 13792 13790 13504 3 13503 13791 13505 3 13793 13505 13791 3 13504 13506 13792 3 13794 13792 13506 3 13505 13793 13507 3 13795 13507 13793 3 13506 13508 13794 3 13796 13794 13508 3 13507 13795 13509 3 13797 13509 13795 3 13508 13510 13796 3 13798 13796 13510 3 13509 13797 13511 3 13799 13511 13797 3 13510 13512 13798 3 13800 13798 13512 3 13511 13799 13513 3 13801 13513 13799 3 13512 13514 13800 3 13802 13800 13514 3 13513 13801 13515 3 13803 13515 13801 3 13514 13516 13802 3 13804 13802 13516 3 13515 13803 13517 3 13805 13517 13803 3 13516 13518 13804 3 13806 13804 13518 3 13517 13805 13519 3 13807 13519 13805 3 13518 13520 13806 3 13808 13806 13520 3 13519 13807 13521 3 13809 13521 13807 3 13520 13522 13808 3 13810 13808 13522 3 13521 13809 13523 3 13811 13523 13809 3 13522 13524 13810 3 13812 13810 13524 3 13523 13811 13525 3 13813 13525 13811 3 13524 13526 13812 3 13814 13812 13526 3 13525 13813 13527 3 13815 13527 13813 3 13526 13528 13814 3 13816 13814 13528 3 13527 13815 13529 3 13817 13529 13815 3 13528 13530 13816 3 13818 13816 13530 3 13529 13817 13531 3 13819 13531 13817 3 13530 13532 13818 3 13820 13818 13532 3 13531 13819 13533 3 13821 13533 13819 3 13532 13534 13820 3 13822 13820 13534 3 13533 13821 13535 3 13823 13535 13821 3 13534 13536 13822 3 13824 13822 13536 3 13535 13823 13537 3 13825 13537 13823 3 13536 13538 13824 3 13826 13824 13538 3 13537 13825 13539 3 13827 13539 13825 3 13538 13540 13826 3 13828 13826 13540 3 13539 13827 13541 3 13829 13541 13827 3 13540 13542 13828 3 13830 13828 13542 3 13541 13829 13543 3 13831 13543 13829 3 13542 13544 13830 3 13832 13830 13544 3 13543 13831 13545 3 13833 13545 13831 3 13544 13546 13832 3 13834 13832 13546 3 13545 13833 13547 3 13835 13547 13833 3 13546 13548 13834 3 13836 13834 13548 3 13547 13835 13549 3 13837 13549 13835 3 13548 13550 13836 3 13838 13836 13550 3 13549 13837 13839 3 13549 13839 13551 3 13550 13552 13838 3 13840 13838 13552 3 13551 13839 13841 3 13551 13841 13553 3 13552 13554 13840 3 13842 13840 13554 3 13553 13841 13843 3 13553 13843 13555 3 13554 13556 13842 3 13844 13842 13556 3 13555 13843 13845 3 13555 13845 13557 3 13556 13558 13844 3 13846 13844 13558 3 13557 13845 13847 3 13557 13847 13559 3 13558 13560 13846 3 13848 13846 13560 3 13559 13847 13849 3 13559 13849 13561 3 13560 13562 13848 3 13850 13848 13562 3 13561 13849 13851 3 13561 13851 13563 3 13562 13564 13850 3 13852 13850 13564 3 13563 13851 13853 3 13563 13853 13565 3 13564 13566 13852 3 13854 13852 13566 3 13565 13853 13855 3 13565 13855 13567 3 13566 13568 13854 3 13856 13854 13568 3 13567 13855 13859 3 13567 13859 13571 3 13568 13569 13856 3 13857 13856 13569 3 13569 13572 13860 3 13569 13860 13857 3 13570 13571 13859 3 13570 13859 13858 3 13570 13858 13863 3 13570 13863 13575 3 13572 13573 13861 3 13572 13861 13860 3 13573 13576 13864 3 13573 13864 13861 3 13574 13575 13863 3 13574 13863 13862 3 13574 13862 13932 3 13574 13932 13644 3 13576 13577 13865 3 13576 13865 13864 3 13577 13578 13866 3 13577 13866 13865 3 13578 13579 13867 3 13578 13867 13866 3 13579 13580 13868 3 13579 13868 13867 3 13580 13581 13869 3 13580 13869 13868 3 13581 13582 13870 3 13581 13870 13869 3 13582 13583 13871 3 13582 13871 13870 3 13583 13584 13872 3 13583 13872 13871 3 13584 13585 13873 3 13584 13873 13872 3 13585 13586 13874 3 13585 13874 13873 3 13586 13587 13875 3 13586 13875 13874 3 13587 13588 13876 3 13587 13876 13875 3 13588 13589 13877 3 13588 13877 13876 3 13589 13590 13878 3 13589 13878 13877 3 13590 13591 13879 3 13590 13879 13878 3 13591 13592 13880 3 13591 13880 13879 3 13592 13593 13881 3 13592 13881 13880 3 13593 13594 13882 3 13593 13882 13881 3 13594 13595 13883 3 13594 13883 13882 3 13595 13596 13884 3 13595 13884 13883 3 13596 13597 13885 3 13596 13885 13884 3 13597 13598 13886 3 13597 13886 13885 3 13598 13599 13887 3 13598 13887 13886 3 13599 13600 13888 3 13599 13888 13887 3 13600 13601 13889 3 13600 13889 13888 3 13601 13602 13890 3 13601 13890 13889 3 13602 13603 13891 3 13602 13891 13890 3 13603 13604 13892 3 13603 13892 13891 3 13604 13605 13893 3 13604 13893 13892 3 13605 13606 13894 3 13605 13894 13893 3 13606 13607 13895 3 13606 13895 13894 3 13607 13608 13896 3 13607 13896 13895 3 13608 13609 13897 3 13608 13897 13896 3 13609 13610 13898 3 13609 13898 13897 3 13610 13611 13899 3 13610 13899 13898 3 13611 13612 13900 3 13611 13900 13899 3 13612 13613 13901 3 13612 13901 13900 3 13613 13614 13902 3 13613 13902 13901 3 13614 13615 13903 3 13614 13903 13902 3 13615 13616 13904 3 13615 13904 13903 3 13616 13617 13905 3 13616 13905 13904 3 13617 13618 13906 3 13617 13906 13905 3 13618 13619 13907 3 13618 13907 13906 3 13619 13620 13908 3 13619 13908 13907 3 13620 13621 13909 3 13620 13909 13908 3 13621 13622 13910 3 13621 13910 13909 3 13622 13623 13911 3 13622 13911 13910 3 13623 13624 13912 3 13623 13912 13911 3 13624 13625 13913 3 13624 13913 13912 3 13625 13626 13914 3 13625 13914 13913 3 13626 13627 13915 3 13626 13915 13914 3 13627 13628 13916 3 13627 13916 13915 3 13628 13629 13917 3 13628 13917 13916 3 13629 13630 13918 3 13629 13918 13917 3 13630 13631 13919 3 13630 13919 13918 3 13631 13632 13920 3 13631 13920 13919 3 13632 13633 13921 3 13632 13921 13920 3 13633 13634 13922 3 13633 13922 13921 3 13634 13635 13923 3 13634 13923 13922 3 13635 13636 13924 3 13635 13924 13923 3 13636 13637 13925 3 13636 13925 13924 3 13637 13638 13926 3 13637 13926 13925 3 13638 13639 13927 3 13638 13927 13926 3 13639 13640 13928 3 13639 13928 13927 3 13640 13641 13929 3 13640 13929 13928 3 13641 13642 13929 3 13930 13929 13642 3 13642 13643 13930 3 13931 13930 13643 3 13643 13644 13931 3 13932 13931 13644 3 13645 13933 13646 3 13934 13646 13933 3 13645 13715 13933 3 14003 13933 13715 3 13646 13934 13647 3 13935 13647 13934 3 13647 13935 13648 3 13936 13648 13935 3 13648 13936 13649 3 13937 13649 13936 3 13649 13937 13650 3 13938 13650 13937 3 13650 13938 13651 3 13939 13651 13938 3 13651 13939 13652 3 13940 13652 13939 3 13652 13940 13653 3 13941 13653 13940 3 13653 13941 13654 3 13942 13654 13941 3 13654 13942 13655 3 13943 13655 13942 3 13655 13943 13656 3 13944 13656 13943 3 13656 13944 13657 3 13945 13657 13944 3 13657 13945 13658 3 13946 13658 13945 3 13658 13946 13659 3 13947 13659 13946 3 13659 13947 13660 3 13948 13660 13947 3 13660 13948 13661 3 13949 13661 13948 3 13661 13949 13662 3 13950 13662 13949 3 13662 13950 13663 3 13951 13663 13950 3 13663 13951 13664 3 13952 13664 13951 3 13664 13952 13665 3 13953 13665 13952 3 13665 13953 13666 3 13954 13666 13953 3 13666 13954 13667 3 13955 13667 13954 3 13667 13955 13668 3 13956 13668 13955 3 13668 13956 13669 3 13957 13669 13956 3 13669 13957 13670 3 13958 13670 13957 3 13670 13958 13671 3 13959 13671 13958 3 13671 13959 13672 3 13960 13672 13959 3 13672 13960 13673 3 13961 13673 13960 3 13673 13961 13674 3 13962 13674 13961 3 13674 13962 13675 3 13963 13675 13962 3 13675 13963 13676 3 13964 13676 13963 3 13676 13964 13677 3 13965 13677 13964 3 13677 13965 13678 3 13966 13678 13965 3 13678 13966 13679 3 13967 13679 13966 3 13679 13967 13680 3 13968 13680 13967 3 13680 13968 13681 3 13969 13681 13968 3 13681 13969 13682 3 13970 13682 13969 3 13682 13970 13683 3 13971 13683 13970 3 13683 13971 13684 3 13972 13684 13971 3 13684 13972 13685 3 13973 13685 13972 3 13685 13973 13686 3 13974 13686 13973 3 13686 13974 13687 3 13975 13687 13974 3 13687 13975 13688 3 13976 13688 13975 3 13688 13976 13689 3 13977 13689 13976 3 13689 13977 13690 3 13978 13690 13977 3 13690 13978 13691 3 13979 13691 13978 3 13691 13979 13692 3 13980 13692 13979 3 13692 13980 13693 3 13981 13693 13980 3 13693 13981 13982 3 13693 13982 13694 3 13694 13982 13983 3 13694 13983 13695 3 13695 13983 13984 3 13695 13984 13696 3 13696 13984 13985 3 13696 13985 13697 3 13697 13985 13986 3 13697 13986 13698 3 13698 13986 13987 3 13698 13987 13699 3 13699 13987 13988 3 13699 13988 13700 3 13700 13988 13989 3 13700 13989 13701 3 13701 13989 13990 3 13701 13990 13702 3 13702 13990 13991 3 13702 13991 13703 3 13703 13991 13992 3 13703 13992 13704 3 13704 13992 13993 3 13704 13993 13705 3 13705 13993 13994 3 13705 13994 13706 3 13706 13994 13995 3 13706 13995 13707 3 13707 13995 13996 3 13707 13996 13708 3 13708 13996 13997 3 13708 13997 13709 3 13709 13997 13998 3 13709 13998 13710 3 13710 13998 13999 3 13710 13999 13711 3 13711 13999 14000 3 13711 14000 13712 3 13712 14000 14001 3 13712 14001 13713 3 13713 14001 14004 3 13713 14004 13716 3 13714 14002 14003 3 13714 14003 13715 3 13714 13719 14007 3 13714 14007 14002 3 13716 14004 14005 3 13716 14005 13717 3 13717 14005 14008 3 13717 14008 13720 3 13718 14006 14007 3 13718 14007 13719 3 13718 13722 14010 3 13718 14010 14006 3 13720 14008 14009 3 13720 14009 13721 3 13721 14009 14011 3 13721 14011 13723 3 13722 13724 14012 3 13722 14012 14010 3 13723 14011 14013 3 13723 14013 13725 3 13724 13726 14014 3 13724 14014 14012 3 13725 14013 14015 3 13725 14015 13727 3 13726 13728 14016 3 13726 14016 14014 3 13727 14015 14017 3 13727 14017 13729 3 13728 13730 14018 3 13728 14018 14016 3 13729 14017 14019 3 13729 14019 13731 3 13730 13732 14020 3 13730 14020 14018 3 13731 14019 14021 3 13731 14021 13733 3 13732 13734 14022 3 13732 14022 14020 3 13733 14021 14023 3 13733 14023 13735 3 13734 13736 14024 3 13734 14024 14022 3 13735 14023 14025 3 13735 14025 13737 3 13736 13738 14026 3 13736 14026 14024 3 13737 14025 14027 3 13737 14027 13739 3 13738 13740 14028 3 13738 14028 14026 3 13739 14027 14029 3 13739 14029 13741 3 13740 13742 14030 3 13740 14030 14028 3 13741 14029 14031 3 13741 14031 13743 3 13742 13744 14032 3 13742 14032 14030 3 13743 14031 14033 3 13743 14033 13745 3 13744 13746 14034 3 13744 14034 14032 3 13745 14033 14035 3 13745 14035 13747 3 13746 13748 14036 3 13746 14036 14034 3 13747 14035 14037 3 13747 14037 13749 3 13748 13750 14038 3 13748 14038 14036 3 13749 14037 14039 3 13749 14039 13751 3 13750 13752 14040 3 13750 14040 14038 3 13751 14039 14041 3 13751 14041 13753 3 13752 13754 14042 3 13752 14042 14040 3 13753 14041 14043 3 13753 14043 13755 3 13754 13756 14044 3 13754 14044 14042 3 13755 14043 14045 3 13755 14045 13757 3 13756 13758 14046 3 13756 14046 14044 3 13757 14045 14047 3 13757 14047 13759 3 13758 13760 14048 3 13758 14048 14046 3 13759 14047 14049 3 13759 14049 13761 3 13760 13762 14050 3 13760 14050 14048 3 13761 14049 14051 3 13761 14051 13763 3 13762 13764 14052 3 13762 14052 14050 3 13763 14051 14053 3 13763 14053 13765 3 13764 13766 14054 3 13764 14054 14052 3 13765 14053 14055 3 13765 14055 13767 3 13766 13768 14056 3 13766 14056 14054 3 13767 14055 13769 3 14057 13769 14055 3 13768 13770 14058 3 13768 14058 14056 3 13769 14057 13771 3 14059 13771 14057 3 13770 13772 14060 3 13770 14060 14058 3 13771 14059 13773 3 14061 13773 14059 3 13772 13774 14062 3 13772 14062 14060 3 13773 14061 13775 3 14063 13775 14061 3 13774 13776 14064 3 13774 14064 14062 3 13775 14063 13777 3 14065 13777 14063 3 13776 13778 14066 3 13776 14066 14064 3 13777 14065 13779 3 14067 13779 14065 3 13778 13780 14068 3 13778 14068 14066 3 13779 14067 13781 3 14069 13781 14067 3 13780 13782 14070 3 13780 14070 14068 3 13781 14069 13783 3 14071 13783 14069 3 13782 13784 14072 3 13782 14072 14070 3 13783 14071 13785 3 14073 13785 14071 3 13784 13786 14074 3 13784 14074 14072 3 13785 14073 13787 3 14075 13787 14073 3 13786 13788 14074 3 14076 14074 13788 3 13787 14075 13789 3 14077 13789 14075 3 13788 13790 14076 3 14078 14076 13790 3 13789 14077 13791 3 14079 13791 14077 3 13790 13792 14078 3 14080 14078 13792 3 13791 14079 13793 3 14081 13793 14079 3 13792 13794 14080 3 14082 14080 13794 3 13793 14081 13795 3 14083 13795 14081 3 13794 13796 14082 3 14084 14082 13796 3 13795 14083 13797 3 14085 13797 14083 3 13796 13798 14084 3 14086 14084 13798 3 13797 14085 13799 3 14087 13799 14085 3 13798 13800 14086 3 14088 14086 13800 3 13799 14087 13801 3 14089 13801 14087 3 13800 13802 14088 3 14090 14088 13802 3 13801 14089 13803 3 14091 13803 14089 3 13802 13804 14090 3 14092 14090 13804 3 13803 14091 13805 3 14093 13805 14091 3 13804 13806 14092 3 14094 14092 13806 3 13805 14093 13807 3 14095 13807 14093 3 13806 13808 14094 3 14096 14094 13808 3 13807 14095 13809 3 14097 13809 14095 3 13808 13810 14096 3 14098 14096 13810 3 13809 14097 13811 3 14099 13811 14097 3 13810 13812 14098 3 14100 14098 13812 3 13811 14099 13813 3 14101 13813 14099 3 13812 13814 14100 3 14102 14100 13814 3 13813 14101 13815 3 14103 13815 14101 3 13814 13816 14102 3 14104 14102 13816 3 13815 14103 13817 3 14105 13817 14103 3 13816 13818 14104 3 14106 14104 13818 3 13817 14105 13819 3 14107 13819 14105 3 13818 13820 14106 3 14108 14106 13820 3 13819 14107 13821 3 14109 13821 14107 3 13820 13822 14108 3 14110 14108 13822 3 13821 14109 13823 3 14111 13823 14109 3 13822 13824 14110 3 14112 14110 13824 3 13823 14111 13825 3 14113 13825 14111 3 13824 13826 14112 3 14114 14112 13826 3 13825 14113 13827 3 14115 13827 14113 3 13826 13828 14114 3 14116 14114 13828 3 13827 14115 13829 3 14117 13829 14115 3 13828 13830 14116 3 14118 14116 13830 3 13829 14117 13831 3 14119 13831 14117 3 13830 13832 14118 3 14120 14118 13832 3 13831 14119 13833 3 14121 13833 14119 3 13832 13834 14120 3 14122 14120 13834 3 13833 14121 13835 3 14123 13835 14121 3 13834 13836 14122 3 14124 14122 13836 3 13835 14123 13837 3 14125 13837 14123 3 13836 13838 14124 3 14126 14124 13838 3 13837 14125 13839 3 14127 13839 14125 3 13838 13840 14126 3 14128 14126 13840 3 13839 14127 14129 3 13839 14129 13841 3 13840 13842 14128 3 14130 14128 13842 3 13841 14129 14131 3 13841 14131 13843 3 13842 13844 14130 3 14132 14130 13844 3 13843 14131 14133 3 13843 14133 13845 3 13844 13846 14132 3 14134 14132 13846 3 13845 14133 14135 3 13845 14135 13847 3 13846 13848 14134 3 14136 14134 13848 3 13847 14135 14137 3 13847 14137 13849 3 13848 13850 14136 3 14138 14136 13850 3 13849 14137 14139 3 13849 14139 13851 3 13850 13852 14138 3 14140 14138 13852 3 13851 14139 14141 3 13851 14141 13853 3 13852 13854 14140 3 14142 14140 13854 3 13853 14141 14143 3 13853 14143 13855 3 13854 13856 14142 3 14144 14142 13856 3 13855 14143 14147 3 13855 14147 13859 3 13856 13857 14144 3 14145 14144 13857 3 13857 13860 14148 3 13857 14148 14145 3 13858 13859 14147 3 13858 14147 14146 3 13858 14146 14151 3 13858 14151 13863 3 13860 13861 14149 3 13860 14149 14148 3 13861 13864 14152 3 13861 14152 14149 3 13862 13863 14151 3 13862 14151 14150 3 13862 14150 14220 3 13862 14220 13932 3 13864 13865 14153 3 13864 14153 14152 3 13865 13866 14154 3 13865 14154 14153 3 13866 13867 14155 3 13866 14155 14154 3 13867 13868 14156 3 13867 14156 14155 3 13868 13869 14157 3 13868 14157 14156 3 13869 13870 14158 3 13869 14158 14157 3 13870 13871 14159 3 13870 14159 14158 3 13871 13872 14160 3 13871 14160 14159 3 13872 13873 14161 3 13872 14161 14160 3 13873 13874 14162 3 13873 14162 14161 3 13874 13875 14163 3 13874 14163 14162 3 13875 13876 14164 3 13875 14164 14163 3 13876 13877 14165 3 13876 14165 14164 3 13877 13878 14166 3 13877 14166 14165 3 13878 13879 14167 3 13878 14167 14166 3 13879 13880 14168 3 13879 14168 14167 3 13880 13881 14169 3 13880 14169 14168 3 13881 13882 14170 3 13881 14170 14169 3 13882 13883 14171 3 13882 14171 14170 3 13883 13884 14172 3 13883 14172 14171 3 13884 13885 14173 3 13884 14173 14172 3 13885 13886 14174 3 13885 14174 14173 3 13886 13887 14175 3 13886 14175 14174 3 13887 13888 14176 3 13887 14176 14175 3 13888 13889 14177 3 13888 14177 14176 3 13889 13890 14178 3 13889 14178 14177 3 13890 13891 14179 3 13890 14179 14178 3 13891 13892 14180 3 13891 14180 14179 3 13892 13893 14181 3 13892 14181 14180 3 13893 13894 14182 3 13893 14182 14181 3 13894 13895 14183 3 13894 14183 14182 3 13895 13896 14184 3 13895 14184 14183 3 13896 13897 14185 3 13896 14185 14184 3 13897 13898 14186 3 13897 14186 14185 3 13898 13899 14187 3 13898 14187 14186 3 13899 13900 14188 3 13899 14188 14187 3 13900 13901 14189 3 13900 14189 14188 3 13901 13902 14190 3 13901 14190 14189 3 13902 13903 14191 3 13902 14191 14190 3 13903 13904 14192 3 13903 14192 14191 3 13904 13905 14193 3 13904 14193 14192 3 13905 13906 14194 3 13905 14194 14193 3 13906 13907 14195 3 13906 14195 14194 3 13907 13908 14196 3 13907 14196 14195 3 13908 13909 14197 3 13908 14197 14196 3 13909 13910 14198 3 13909 14198 14197 3 13910 13911 14199 3 13910 14199 14198 3 13911 13912 14200 3 13911 14200 14199 3 13912 13913 14201 3 13912 14201 14200 3 13913 13914 14202 3 13913 14202 14201 3 13914 13915 14203 3 13914 14203 14202 3 13915 13916 14204 3 13915 14204 14203 3 13916 13917 14205 3 13916 14205 14204 3 13917 13918 14206 3 13917 14206 14205 3 13918 13919 14207 3 13918 14207 14206 3 13919 13920 14208 3 13919 14208 14207 3 13920 13921 14209 3 13920 14209 14208 3 13921 13922 14210 3 13921 14210 14209 3 13922 13923 14211 3 13922 14211 14210 3 13923 13924 14212 3 13923 14212 14211 3 13924 13925 14213 3 13924 14213 14212 3 13925 13926 14214 3 13925 14214 14213 3 13926 13927 14215 3 13926 14215 14214 3 13927 13928 14216 3 13927 14216 14215 3 13928 13929 14217 3 13928 14217 14216 3 13929 13930 14218 3 13929 14218 14217 3 13930 13931 14218 3 14219 14218 13931 3 13931 13932 14219 3 14220 14219 13932 3 13933 14221 13934 3 14222 13934 14221 3 13933 14003 14221 3 14291 14221 14003 3 13934 14222 13935 3 14223 13935 14222 3 13935 14223 13936 3 14224 13936 14223 3 13936 14224 13937 3 14225 13937 14224 3 13937 14225 13938 3 14226 13938 14225 3 13938 14226 13939 3 14227 13939 14226 3 13939 14227 13940 3 14228 13940 14227 3 13940 14228 13941 3 14229 13941 14228 3 13941 14229 13942 3 14230 13942 14229 3 13942 14230 13943 3 14231 13943 14230 3 13943 14231 13944 3 14232 13944 14231 3 13944 14232 13945 3 14233 13945 14232 3 13945 14233 13946 3 14234 13946 14233 3 13946 14234 13947 3 14235 13947 14234 3 13947 14235 13948 3 14236 13948 14235 3 13948 14236 13949 3 14237 13949 14236 3 13949 14237 13950 3 14238 13950 14237 3 13950 14238 13951 3 14239 13951 14238 3 13951 14239 13952 3 14240 13952 14239 3 13952 14240 13953 3 14241 13953 14240 3 13953 14241 13954 3 14242 13954 14241 3 13954 14242 13955 3 14243 13955 14242 3 13955 14243 13956 3 14244 13956 14243 3 13956 14244 13957 3 14245 13957 14244 3 13957 14245 13958 3 14246 13958 14245 3 13958 14246 13959 3 14247 13959 14246 3 13959 14247 13960 3 14248 13960 14247 3 13960 14248 13961 3 14249 13961 14248 3 13961 14249 13962 3 14250 13962 14249 3 13962 14250 13963 3 14251 13963 14250 3 13963 14251 13964 3 14252 13964 14251 3 13964 14252 13965 3 14253 13965 14252 3 13965 14253 13966 3 14254 13966 14253 3 13966 14254 13967 3 14255 13967 14254 3 13967 14255 13968 3 14256 13968 14255 3 13968 14256 13969 3 14257 13969 14256 3 13969 14257 13970 3 14258 13970 14257 3 13970 14258 13971 3 14259 13971 14258 3 13971 14259 13972 3 14260 13972 14259 3 13972 14260 13973 3 14261 13973 14260 3 13973 14261 13974 3 14262 13974 14261 3 13974 14262 13975 3 14263 13975 14262 3 13975 14263 13976 3 14264 13976 14263 3 13976 14264 13977 3 14265 13977 14264 3 13977 14265 13978 3 14266 13978 14265 3 13978 14266 13979 3 14267 13979 14266 3 13979 14267 13980 3 14268 13980 14267 3 13980 14268 13981 3 14269 13981 14268 3 13981 14269 13982 3 14270 13982 14269 3 13982 14270 13983 3 14271 13983 14270 3 13983 14271 13984 3 14272 13984 14271 3 13984 14272 14273 3 13984 14273 13985 3 13985 14273 14274 3 13985 14274 13986 3 13986 14274 14275 3 13986 14275 13987 3 13987 14275 14276 3 13987 14276 13988 3 13988 14276 14277 3 13988 14277 13989 3 13989 14277 14278 3 13989 14278 13990 3 13990 14278 14279 3 13990 14279 13991 3 13991 14279 14280 3 13991 14280 13992 3 13992 14280 14281 3 13992 14281 13993 3 13993 14281 14282 3 13993 14282 13994 3 13994 14282 14283 3 13994 14283 13995 3 13995 14283 14284 3 13995 14284 13996 3 13996 14284 14285 3 13996 14285 13997 3 13997 14285 14286 3 13997 14286 13998 3 13998 14286 14287 3 13998 14287 13999 3 13999 14287 14288 3 13999 14288 14000 3 14000 14288 14289 3 14000 14289 14001 3 14001 14289 14292 3 14001 14292 14004 3 14002 14290 14291 3 14002 14291 14003 3 14002 14007 14295 3 14002 14295 14290 3 14004 14292 14293 3 14004 14293 14005 3 14005 14293 14296 3 14005 14296 14008 3 14006 14294 14295 3 14006 14295 14007 3 14006 14010 14298 3 14006 14298 14294 3 14008 14296 14297 3 14008 14297 14009 3 14009 14297 14299 3 14009 14299 14011 3 14010 14012 14300 3 14010 14300 14298 3 14011 14299 14301 3 14011 14301 14013 3 14012 14014 14302 3 14012 14302 14300 3 14013 14301 14303 3 14013 14303 14015 3 14014 14016 14304 3 14014 14304 14302 3 14015 14303 14305 3 14015 14305 14017 3 14016 14018 14306 3 14016 14306 14304 3 14017 14305 14307 3 14017 14307 14019 3 14018 14020 14308 3 14018 14308 14306 3 14019 14307 14309 3 14019 14309 14021 3 14020 14022 14310 3 14020 14310 14308 3 14021 14309 14311 3 14021 14311 14023 3 14022 14024 14312 3 14022 14312 14310 3 14023 14311 14313 3 14023 14313 14025 3 14024 14026 14314 3 14024 14314 14312 3 14025 14313 14315 3 14025 14315 14027 3 14026 14028 14316 3 14026 14316 14314 3 14027 14315 14317 3 14027 14317 14029 3 14028 14030 14318 3 14028 14318 14316 3 14029 14317 14319 3 14029 14319 14031 3 14030 14032 14320 3 14030 14320 14318 3 14031 14319 14321 3 14031 14321 14033 3 14032 14034 14322 3 14032 14322 14320 3 14033 14321 14323 3 14033 14323 14035 3 14034 14036 14324 3 14034 14324 14322 3 14035 14323 14325 3 14035 14325 14037 3 14036 14038 14326 3 14036 14326 14324 3 14037 14325 14327 3 14037 14327 14039 3 14038 14040 14328 3 14038 14328 14326 3 14039 14327 14329 3 14039 14329 14041 3 14040 14042 14330 3 14040 14330 14328 3 14041 14329 14331 3 14041 14331 14043 3 14042 14044 14332 3 14042 14332 14330 3 14043 14331 14333 3 14043 14333 14045 3 14044 14046 14334 3 14044 14334 14332 3 14045 14333 14335 3 14045 14335 14047 3 14046 14048 14336 3 14046 14336 14334 3 14047 14335 14337 3 14047 14337 14049 3 14048 14050 14338 3 14048 14338 14336 3 14049 14337 14339 3 14049 14339 14051 3 14050 14052 14340 3 14050 14340 14338 3 14051 14339 14341 3 14051 14341 14053 3 14052 14054 14342 3 14052 14342 14340 3 14053 14341 14343 3 14053 14343 14055 3 14054 14056 14344 3 14054 14344 14342 3 14055 14343 14345 3 14055 14345 14057 3 14056 14058 14346 3 14056 14346 14344 3 14057 14345 14347 3 14057 14347 14059 3 14058 14060 14348 3 14058 14348 14346 3 14059 14347 14061 3 14349 14061 14347 3 14060 14062 14350 3 14060 14350 14348 3 14061 14349 14063 3 14351 14063 14349 3 14062 14064 14352 3 14062 14352 14350 3 14063 14351 14065 3 14353 14065 14351 3 14064 14066 14354 3 14064 14354 14352 3 14065 14353 14067 3 14355 14067 14353 3 14066 14068 14356 3 14066 14356 14354 3 14067 14355 14069 3 14357 14069 14355 3 14068 14070 14358 3 14068 14358 14356 3 14069 14357 14071 3 14359 14071 14357 3 14070 14072 14360 3 14070 14360 14358 3 14071 14359 14073 3 14361 14073 14359 3 14072 14074 14362 3 14072 14362 14360 3 14073 14361 14075 3 14363 14075 14361 3 14074 14076 14364 3 14074 14364 14362 3 14075 14363 14077 3 14365 14077 14363 3 14076 14078 14364 3 14366 14364 14078 3 14077 14365 14079 3 14367 14079 14365 3 14078 14080 14366 3 14368 14366 14080 3 14079 14367 14081 3 14369 14081 14367 3 14080 14082 14368 3 14370 14368 14082 3 14081 14369 14083 3 14371 14083 14369 3 14082 14084 14370 3 14372 14370 14084 3 14083 14371 14085 3 14373 14085 14371 3 14084 14086 14372 3 14374 14372 14086 3 14085 14373 14087 3 14375 14087 14373 3 14086 14088 14374 3 14376 14374 14088 3 14087 14375 14089 3 14377 14089 14375 3 14088 14090 14376 3 14378 14376 14090 3 14089 14377 14091 3 14379 14091 14377 3 14090 14092 14378 3 14380 14378 14092 3 14091 14379 14093 3 14381 14093 14379 3 14092 14094 14380 3 14382 14380 14094 3 14093 14381 14095 3 14383 14095 14381 3 14094 14096 14382 3 14384 14382 14096 3 14095 14383 14097 3 14385 14097 14383 3 14096 14098 14384 3 14386 14384 14098 3 14097 14385 14099 3 14387 14099 14385 3 14098 14100 14386 3 14388 14386 14100 3 14099 14387 14101 3 14389 14101 14387 3 14100 14102 14388 3 14390 14388 14102 3 14101 14389 14103 3 14391 14103 14389 3 14102 14104 14390 3 14392 14390 14104 3 14103 14391 14105 3 14393 14105 14391 3 14104 14106 14392 3 14394 14392 14106 3 14105 14393 14107 3 14395 14107 14393 3 14106 14108 14394 3 14396 14394 14108 3 14107 14395 14109 3 14397 14109 14395 3 14108 14110 14396 3 14398 14396 14110 3 14109 14397 14111 3 14399 14111 14397 3 14110 14112 14398 3 14400 14398 14112 3 14111 14399 14113 3 14401 14113 14399 3 14112 14114 14400 3 14402 14400 14114 3 14113 14401 14115 3 14403 14115 14401 3 14114 14116 14402 3 14404 14402 14116 3 14115 14403 14117 3 14405 14117 14403 3 14116 14118 14404 3 14406 14404 14118 3 14117 14405 14119 3 14407 14119 14405 3 14118 14120 14406 3 14408 14406 14120 3 14119 14407 14121 3 14409 14121 14407 3 14120 14122 14408 3 14410 14408 14122 3 14121 14409 14123 3 14411 14123 14409 3 14122 14124 14410 3 14412 14410 14124 3 14123 14411 14125 3 14413 14125 14411 3 14124 14126 14412 3 14414 14412 14126 3 14125 14413 14127 3 14415 14127 14413 3 14126 14128 14414 3 14416 14414 14128 3 14127 14415 14129 3 14417 14129 14415 3 14128 14130 14416 3 14418 14416 14130 3 14129 14417 14131 3 14419 14131 14417 3 14130 14132 14418 3 14420 14418 14132 3 14131 14419 14421 3 14131 14421 14133 3 14132 14134 14420 3 14422 14420 14134 3 14133 14421 14423 3 14133 14423 14135 3 14134 14136 14422 3 14424 14422 14136 3 14135 14423 14425 3 14135 14425 14137 3 14136 14138 14424 3 14426 14424 14138 3 14137 14425 14427 3 14137 14427 14139 3 14138 14140 14426 3 14428 14426 14140 3 14139 14427 14429 3 14139 14429 14141 3 14140 14142 14428 3 14430 14428 14142 3 14141 14429 14431 3 14141 14431 14143 3 14142 14144 14430 3 14432 14430 14144 3 14143 14431 14435 3 14143 14435 14147 3 14144 14145 14432 3 14433 14432 14145 3 14145 14148 14433 3 14436 14433 14148 3 14146 14147 14434 3 14435 14434 14147 3 14146 14434 14439 3 14146 14439 14151 3 14148 14149 14436 3 14437 14436 14149 3 14149 14152 14440 3 14149 14440 14437 3 14150 14151 14439 3 14150 14439 14438 3 14150 14438 14508 3 14150 14508 14220 3 14152 14153 14441 3 14152 14441 14440 3 14153 14154 14442 3 14153 14442 14441 3 14154 14155 14443 3 14154 14443 14442 3 14155 14156 14444 3 14155 14444 14443 3 14156 14157 14445 3 14156 14445 14444 3 14157 14158 14446 3 14157 14446 14445 3 14158 14159 14447 3 14158 14447 14446 3 14159 14160 14448 3 14159 14448 14447 3 14160 14161 14449 3 14160 14449 14448 3 14161 14162 14450 3 14161 14450 14449 3 14162 14163 14451 3 14162 14451 14450 3 14163 14164 14452 3 14163 14452 14451 3 14164 14165 14453 3 14164 14453 14452 3 14165 14166 14454 3 14165 14454 14453 3 14166 14167 14455 3 14166 14455 14454 3 14167 14168 14456 3 14167 14456 14455 3 14168 14169 14457 3 14168 14457 14456 3 14169 14170 14458 3 14169 14458 14457 3 14170 14171 14459 3 14170 14459 14458 3 14171 14172 14460 3 14171 14460 14459 3 14172 14173 14461 3 14172 14461 14460 3 14173 14174 14462 3 14173 14462 14461 3 14174 14175 14463 3 14174 14463 14462 3 14175 14176 14464 3 14175 14464 14463 3 14176 14177 14465 3 14176 14465 14464 3 14177 14178 14466 3 14177 14466 14465 3 14178 14179 14467 3 14178 14467 14466 3 14179 14180 14468 3 14179 14468 14467 3 14180 14181 14469 3 14180 14469 14468 3 14181 14182 14470 3 14181 14470 14469 3 14182 14183 14471 3 14182 14471 14470 3 14183 14184 14472 3 14183 14472 14471 3 14184 14185 14473 3 14184 14473 14472 3 14185 14186 14474 3 14185 14474 14473 3 14186 14187 14475 3 14186 14475 14474 3 14187 14188 14476 3 14187 14476 14475 3 14188 14189 14477 3 14188 14477 14476 3 14189 14190 14478 3 14189 14478 14477 3 14190 14191 14479 3 14190 14479 14478 3 14191 14192 14480 3 14191 14480 14479 3 14192 14193 14481 3 14192 14481 14480 3 14193 14194 14482 3 14193 14482 14481 3 14194 14195 14483 3 14194 14483 14482 3 14195 14196 14484 3 14195 14484 14483 3 14196 14197 14485 3 14196 14485 14484 3 14197 14198 14486 3 14197 14486 14485 3 14198 14199 14487 3 14198 14487 14486 3 14199 14200 14488 3 14199 14488 14487 3 14200 14201 14489 3 14200 14489 14488 3 14201 14202 14490 3 14201 14490 14489 3 14202 14203 14491 3 14202 14491 14490 3 14203 14204 14492 3 14203 14492 14491 3 14204 14205 14493 3 14204 14493 14492 3 14205 14206 14494 3 14205 14494 14493 3 14206 14207 14495 3 14206 14495 14494 3 14207 14208 14496 3 14207 14496 14495 3 14208 14209 14497 3 14208 14497 14496 3 14209 14210 14498 3 14209 14498 14497 3 14210 14211 14499 3 14210 14499 14498 3 14211 14212 14500 3 14211 14500 14499 3 14212 14213 14501 3 14212 14501 14500 3 14213 14214 14502 3 14213 14502 14501 3 14214 14215 14503 3 14214 14503 14502 3 14215 14216 14504 3 14215 14504 14503 3 14216 14217 14505 3 14216 14505 14504 3 14217 14218 14506 3 14217 14506 14505 3 14218 14219 14507 3 14218 14507 14506 3 14219 14220 14508 3 14219 14508 14507 3 14221 14509 14222 3 14510 14222 14509 3 14221 14291 14509 3 14579 14509 14291 3 14222 14510 14223 3 14511 14223 14510 3 14223 14511 14224 3 14512 14224 14511 3 14224 14512 14225 3 14513 14225 14512 3 14225 14513 14226 3 14514 14226 14513 3 14226 14514 14227 3 14515 14227 14514 3 14227 14515 14228 3 14516 14228 14515 3 14228 14516 14229 3 14517 14229 14516 3 14229 14517 14230 3 14518 14230 14517 3 14230 14518 14231 3 14519 14231 14518 3 14231 14519 14232 3 14520 14232 14519 3 14232 14520 14233 3 14521 14233 14520 3 14233 14521 14234 3 14522 14234 14521 3 14234 14522 14235 3 14523 14235 14522 3 14235 14523 14236 3 14524 14236 14523 3 14236 14524 14237 3 14525 14237 14524 3 14237 14525 14238 3 14526 14238 14525 3 14238 14526 14239 3 14527 14239 14526 3 14239 14527 14240 3 14528 14240 14527 3 14240 14528 14241 3 14529 14241 14528 3 14241 14529 14242 3 14530 14242 14529 3 14242 14530 14243 3 14531 14243 14530 3 14243 14531 14244 3 14532 14244 14531 3 14244 14532 14245 3 14533 14245 14532 3 14245 14533 14246 3 14534 14246 14533 3 14246 14534 14247 3 14535 14247 14534 3 14247 14535 14248 3 14536 14248 14535 3 14248 14536 14249 3 14537 14249 14536 3 14249 14537 14250 3 14538 14250 14537 3 14250 14538 14251 3 14539 14251 14538 3 14251 14539 14252 3 14540 14252 14539 3 14252 14540 14253 3 14541 14253 14540 3 14253 14541 14254 3 14542 14254 14541 3 14254 14542 14255 3 14543 14255 14542 3 14255 14543 14256 3 14544 14256 14543 3 14256 14544 14257 3 14545 14257 14544 3 14257 14545 14258 3 14546 14258 14545 3 14258 14546 14259 3 14547 14259 14546 3 14259 14547 14260 3 14548 14260 14547 3 14260 14548 14261 3 14549 14261 14548 3 14261 14549 14262 3 14550 14262 14549 3 14262 14550 14263 3 14551 14263 14550 3 14263 14551 14264 3 14552 14264 14551 3 14264 14552 14265 3 14553 14265 14552 3 14265 14553 14266 3 14554 14266 14553 3 14266 14554 14267 3 14555 14267 14554 3 14267 14555 14268 3 14556 14268 14555 3 14268 14556 14269 3 14557 14269 14556 3 14269 14557 14270 3 14558 14270 14557 3 14270 14558 14271 3 14559 14271 14558 3 14271 14559 14272 3 14560 14272 14559 3 14272 14560 14273 3 14561 14273 14560 3 14273 14561 14274 3 14562 14274 14561 3 14274 14562 14275 3 14563 14275 14562 3 14275 14563 14276 3 14564 14276 14563 3 14276 14564 14277 3 14565 14277 14564 3 14277 14565 14278 3 14566 14278 14565 3 14278 14566 14279 3 14567 14279 14566 3 14279 14567 14568 3 14279 14568 14280 3 14280 14568 14569 3 14280 14569 14281 3 14281 14569 14570 3 14281 14570 14282 3 14282 14570 14571 3 14282 14571 14283 3 14283 14571 14572 3 14283 14572 14284 3 14284 14572 14573 3 14284 14573 14285 3 14285 14573 14574 3 14285 14574 14286 3 14286 14574 14575 3 14286 14575 14287 3 14287 14575 14576 3 14287 14576 14288 3 14288 14576 14577 3 14288 14577 14289 3 14289 14577 14580 3 14289 14580 14292 3 14290 14578 14579 3 14290 14579 14291 3 14290 14295 14578 3 14583 14578 14295 3 14292 14580 14581 3 14292 14581 14293 3 14293 14581 14584 3 14293 14584 14296 3 14294 14582 14583 3 14294 14583 14295 3 14294 14298 14586 3 14294 14586 14582 3 14296 14584 14585 3 14296 14585 14297 3 14297 14585 14587 3 14297 14587 14299 3 14298 14300 14588 3 14298 14588 14586 3 14299 14587 14589 3 14299 14589 14301 3 14300 14302 14590 3 14300 14590 14588 3 14301 14589 14591 3 14301 14591 14303 3 14302 14304 14592 3 14302 14592 14590 3 14303 14591 14593 3 14303 14593 14305 3 14304 14306 14594 3 14304 14594 14592 3 14305 14593 14595 3 14305 14595 14307 3 14306 14308 14596 3 14306 14596 14594 3 14307 14595 14597 3 14307 14597 14309 3 14308 14310 14598 3 14308 14598 14596 3 14309 14597 14599 3 14309 14599 14311 3 14310 14312 14600 3 14310 14600 14598 3 14311 14599 14601 3 14311 14601 14313 3 14312 14314 14602 3 14312 14602 14600 3 14313 14601 14603 3 14313 14603 14315 3 14314 14316 14604 3 14314 14604 14602 3 14315 14603 14605 3 14315 14605 14317 3 14316 14318 14606 3 14316 14606 14604 3 14317 14605 14607 3 14317 14607 14319 3 14318 14320 14608 3 14318 14608 14606 3 14319 14607 14609 3 14319 14609 14321 3 14320 14322 14610 3 14320 14610 14608 3 14321 14609 14611 3 14321 14611 14323 3 14322 14324 14612 3 14322 14612 14610 3 14323 14611 14613 3 14323 14613 14325 3 14324 14326 14614 3 14324 14614 14612 3 14325 14613 14615 3 14325 14615 14327 3 14326 14328 14616 3 14326 14616 14614 3 14327 14615 14617 3 14327 14617 14329 3 14328 14330 14618 3 14328 14618 14616 3 14329 14617 14619 3 14329 14619 14331 3 14330 14332 14620 3 14330 14620 14618 3 14331 14619 14621 3 14331 14621 14333 3 14332 14334 14622 3 14332 14622 14620 3 14333 14621 14623 3 14333 14623 14335 3 14334 14336 14624 3 14334 14624 14622 3 14335 14623 14625 3 14335 14625 14337 3 14336 14338 14626 3 14336 14626 14624 3 14337 14625 14627 3 14337 14627 14339 3 14338 14340 14628 3 14338 14628 14626 3 14339 14627 14629 3 14339 14629 14341 3 14340 14342 14630 3 14340 14630 14628 3 14341 14629 14631 3 14341 14631 14343 3 14342 14344 14632 3 14342 14632 14630 3 14343 14631 14633 3 14343 14633 14345 3 14344 14346 14634 3 14344 14634 14632 3 14345 14633 14635 3 14345 14635 14347 3 14346 14348 14636 3 14346 14636 14634 3 14347 14635 14637 3 14347 14637 14349 3 14348 14350 14638 3 14348 14638 14636 3 14349 14637 14639 3 14349 14639 14351 3 14350 14352 14640 3 14350 14640 14638 3 14351 14639 14641 3 14351 14641 14353 3 14352 14354 14642 3 14352 14642 14640 3 14353 14641 14355 3 14643 14355 14641 3 14354 14356 14644 3 14354 14644 14642 3 14355 14643 14357 3 14645 14357 14643 3 14356 14358 14646 3 14356 14646 14644 3 14357 14645 14359 3 14647 14359 14645 3 14358 14360 14648 3 14358 14648 14646 3 14359 14647 14361 3 14649 14361 14647 3 14360 14362 14650 3 14360 14650 14648 3 14361 14649 14363 3 14651 14363 14649 3 14362 14364 14652 3 14362 14652 14650 3 14363 14651 14365 3 14653 14365 14651 3 14364 14366 14654 3 14364 14654 14652 3 14365 14653 14367 3 14655 14367 14653 3 14366 14368 14656 3 14366 14656 14654 3 14367 14655 14369 3 14657 14369 14655 3 14368 14370 14658 3 14368 14658 14656 3 14369 14657 14371 3 14659 14371 14657 3 14370 14372 14658 3 14660 14658 14372 3 14371 14659 14373 3 14661 14373 14659 3 14372 14374 14660 3 14662 14660 14374 3 14373 14661 14375 3 14663 14375 14661 3 14374 14376 14662 3 14664 14662 14376 3 14375 14663 14377 3 14665 14377 14663 3 14376 14378 14664 3 14666 14664 14378 3 14377 14665 14379 3 14667 14379 14665 3 14378 14380 14666 3 14668 14666 14380 3 14379 14667 14381 3 14669 14381 14667 3 14380 14382 14668 3 14670 14668 14382 3 14381 14669 14383 3 14671 14383 14669 3 14382 14384 14670 3 14672 14670 14384 3 14383 14671 14385 3 14673 14385 14671 3 14384 14386 14672 3 14674 14672 14386 3 14385 14673 14387 3 14675 14387 14673 3 14386 14388 14674 3 14676 14674 14388 3 14387 14675 14389 3 14677 14389 14675 3 14388 14390 14676 3 14678 14676 14390 3 14389 14677 14391 3 14679 14391 14677 3 14390 14392 14678 3 14680 14678 14392 3 14391 14679 14393 3 14681 14393 14679 3 14392 14394 14680 3 14682 14680 14394 3 14393 14681 14395 3 14683 14395 14681 3 14394 14396 14682 3 14684 14682 14396 3 14395 14683 14397 3 14685 14397 14683 3 14396 14398 14684 3 14686 14684 14398 3 14397 14685 14399 3 14687 14399 14685 3 14398 14400 14686 3 14688 14686 14400 3 14399 14687 14401 3 14689 14401 14687 3 14400 14402 14688 3 14690 14688 14402 3 14401 14689 14403 3 14691 14403 14689 3 14402 14404 14690 3 14692 14690 14404 3 14403 14691 14405 3 14693 14405 14691 3 14404 14406 14692 3 14694 14692 14406 3 14405 14693 14407 3 14695 14407 14693 3 14406 14408 14694 3 14696 14694 14408 3 14407 14695 14409 3 14697 14409 14695 3 14408 14410 14696 3 14698 14696 14410 3 14409 14697 14411 3 14699 14411 14697 3 14410 14412 14698 3 14700 14698 14412 3 14411 14699 14413 3 14701 14413 14699 3 14412 14414 14700 3 14702 14700 14414 3 14413 14701 14415 3 14703 14415 14701 3 14414 14416 14702 3 14704 14702 14416 3 14415 14703 14417 3 14705 14417 14703 3 14416 14418 14704 3 14706 14704 14418 3 14417 14705 14419 3 14707 14419 14705 3 14418 14420 14706 3 14708 14706 14420 3 14419 14707 14421 3 14709 14421 14707 3 14420 14422 14708 3 14710 14708 14422 3 14421 14709 14423 3 14711 14423 14709 3 14422 14424 14710 3 14712 14710 14424 3 14423 14711 14425 3 14713 14425 14711 3 14424 14426 14712 3 14714 14712 14426 3 14425 14713 14427 3 14715 14427 14713 3 14426 14428 14714 3 14716 14714 14428 3 14427 14715 14717 3 14427 14717 14429 3 14428 14430 14716 3 14718 14716 14430 3 14429 14717 14719 3 14429 14719 14431 3 14430 14432 14718 3 14720 14718 14432 3 14431 14719 14723 3 14431 14723 14435 3 14432 14433 14720 3 14721 14720 14433 3 14433 14436 14721 3 14724 14721 14436 3 14434 14435 14722 3 14723 14722 14435 3 14434 14722 14727 3 14434 14727 14439 3 14436 14437 14724 3 14725 14724 14437 3 14437 14440 14725 3 14728 14725 14440 3 14438 14439 14726 3 14727 14726 14439 3 14438 14726 14796 3 14438 14796 14508 3 14440 14441 14728 3 14729 14728 14441 3 14441 14442 14729 3 14730 14729 14442 3 14442 14443 14730 3 14731 14730 14443 3 14443 14444 14731 3 14732 14731 14444 3 14444 14445 14733 3 14444 14733 14732 3 14445 14446 14734 3 14445 14734 14733 3 14446 14447 14735 3 14446 14735 14734 3 14447 14448 14736 3 14447 14736 14735 3 14448 14449 14737 3 14448 14737 14736 3 14449 14450 14738 3 14449 14738 14737 3 14450 14451 14739 3 14450 14739 14738 3 14451 14452 14740 3 14451 14740 14739 3 14452 14453 14741 3 14452 14741 14740 3 14453 14454 14742 3 14453 14742 14741 3 14454 14455 14743 3 14454 14743 14742 3 14455 14456 14744 3 14455 14744 14743 3 14456 14457 14745 3 14456 14745 14744 3 14457 14458 14746 3 14457 14746 14745 3 14458 14459 14747 3 14458 14747 14746 3 14459 14460 14748 3 14459 14748 14747 3 14460 14461 14749 3 14460 14749 14748 3 14461 14462 14750 3 14461 14750 14749 3 14462 14463 14751 3 14462 14751 14750 3 14463 14464 14752 3 14463 14752 14751 3 14464 14465 14753 3 14464 14753 14752 3 14465 14466 14754 3 14465 14754 14753 3 14466 14467 14755 3 14466 14755 14754 3 14467 14468 14756 3 14467 14756 14755 3 14468 14469 14757 3 14468 14757 14756 3 14469 14470 14758 3 14469 14758 14757 3 14470 14471 14759 3 14470 14759 14758 3 14471 14472 14760 3 14471 14760 14759 3 14472 14473 14761 3 14472 14761 14760 3 14473 14474 14762 3 14473 14762 14761 3 14474 14475 14763 3 14474 14763 14762 3 14475 14476 14764 3 14475 14764 14763 3 14476 14477 14765 3 14476 14765 14764 3 14477 14478 14766 3 14477 14766 14765 3 14478 14479 14767 3 14478 14767 14766 3 14479 14480 14768 3 14479 14768 14767 3 14480 14481 14769 3 14480 14769 14768 3 14481 14482 14770 3 14481 14770 14769 3 14482 14483 14771 3 14482 14771 14770 3 14483 14484 14772 3 14483 14772 14771 3 14484 14485 14773 3 14484 14773 14772 3 14485 14486 14774 3 14485 14774 14773 3 14486 14487 14775 3 14486 14775 14774 3 14487 14488 14776 3 14487 14776 14775 3 14488 14489 14777 3 14488 14777 14776 3 14489 14490 14778 3 14489 14778 14777 3 14490 14491 14779 3 14490 14779 14778 3 14491 14492 14780 3 14491 14780 14779 3 14492 14493 14781 3 14492 14781 14780 3 14493 14494 14782 3 14493 14782 14781 3 14494 14495 14783 3 14494 14783 14782 3 14495 14496 14784 3 14495 14784 14783 3 14496 14497 14785 3 14496 14785 14784 3 14497 14498 14786 3 14497 14786 14785 3 14498 14499 14787 3 14498 14787 14786 3 14499 14500 14788 3 14499 14788 14787 3 14500 14501 14789 3 14500 14789 14788 3 14501 14502 14790 3 14501 14790 14789 3 14502 14503 14791 3 14502 14791 14790 3 14503 14504 14792 3 14503 14792 14791 3 14504 14505 14793 3 14504 14793 14792 3 14505 14506 14794 3 14505 14794 14793 3 14506 14507 14795 3 14506 14795 14794 3 14507 14508 14796 3 14507 14796 14795 3 14509 14797 14510 3 14798 14510 14797 3 14509 14579 14797 3 14867 14797 14579 3 14510 14798 14511 3 14799 14511 14798 3 14511 14799 14512 3 14800 14512 14799 3 14512 14800 14513 3 14801 14513 14800 3 14513 14801 14514 3 14802 14514 14801 3 14514 14802 14515 3 14803 14515 14802 3 14515 14803 14516 3 14804 14516 14803 3 14516 14804 14517 3 14805 14517 14804 3 14517 14805 14518 3 14806 14518 14805 3 14518 14806 14519 3 14807 14519 14806 3 14519 14807 14520 3 14808 14520 14807 3 14520 14808 14521 3 14809 14521 14808 3 14521 14809 14522 3 14810 14522 14809 3 14522 14810 14523 3 14811 14523 14810 3 14523 14811 14524 3 14812 14524 14811 3 14524 14812 14525 3 14813 14525 14812 3 14525 14813 14526 3 14814 14526 14813 3 14526 14814 14527 3 14815 14527 14814 3 14527 14815 14528 3 14816 14528 14815 3 14528 14816 14529 3 14817 14529 14816 3 14529 14817 14530 3 14818 14530 14817 3 14530 14818 14531 3 14819 14531 14818 3 14531 14819 14532 3 14820 14532 14819 3 14532 14820 14533 3 14821 14533 14820 3 14533 14821 14534 3 14822 14534 14821 3 14534 14822 14535 3 14823 14535 14822 3 14535 14823 14536 3 14824 14536 14823 3 14536 14824 14537 3 14825 14537 14824 3 14537 14825 14538 3 14826 14538 14825 3 14538 14826 14539 3 14827 14539 14826 3 14539 14827 14540 3 14828 14540 14827 3 14540 14828 14541 3 14829 14541 14828 3 14541 14829 14542 3 14830 14542 14829 3 14542 14830 14543 3 14831 14543 14830 3 14543 14831 14544 3 14832 14544 14831 3 14544 14832 14545 3 14833 14545 14832 3 14545 14833 14546 3 14834 14546 14833 3 14546 14834 14547 3 14835 14547 14834 3 14547 14835 14548 3 14836 14548 14835 3 14548 14836 14549 3 14837 14549 14836 3 14549 14837 14550 3 14838 14550 14837 3 14550 14838 14551 3 14839 14551 14838 3 14551 14839 14552 3 14840 14552 14839 3 14552 14840 14553 3 14841 14553 14840 3 14553 14841 14554 3 14842 14554 14841 3 14554 14842 14555 3 14843 14555 14842 3 14555 14843 14556 3 14844 14556 14843 3 14556 14844 14557 3 14845 14557 14844 3 14557 14845 14558 3 14846 14558 14845 3 14558 14846 14559 3 14847 14559 14846 3 14559 14847 14560 3 14848 14560 14847 3 14560 14848 14561 3 14849 14561 14848 3 14561 14849 14562 3 14850 14562 14849 3 14562 14850 14563 3 14851 14563 14850 3 14563 14851 14564 3 14852 14564 14851 3 14564 14852 14565 3 14853 14565 14852 3 14565 14853 14566 3 14854 14566 14853 3 14566 14854 14567 3 14855 14567 14854 3 14567 14855 14568 3 14856 14568 14855 3 14568 14856 14569 3 14857 14569 14856 3 14569 14857 14570 3 14858 14570 14857 3 14570 14858 14571 3 14859 14571 14858 3 14571 14859 14572 3 14860 14572 14859 3 14572 14860 14573 3 14861 14573 14860 3 14573 14861 14574 3 14862 14574 14861 3 14574 14862 14575 3 14863 14575 14862 3 14575 14863 14576 3 14864 14576 14863 3 14576 14864 14865 3 14576 14865 14577 3 14577 14865 14868 3 14577 14868 14580 3 14578 14866 14867 3 14578 14867 14579 3 14578 14583 14866 3 14871 14866 14583 3 14580 14868 14869 3 14580 14869 14581 3 14581 14869 14872 3 14581 14872 14584 3 14582 14870 14871 3 14582 14871 14583 3 14582 14586 14870 3 14874 14870 14586 3 14584 14872 14873 3 14584 14873 14585 3 14585 14873 14875 3 14585 14875 14587 3 14586 14588 14874 3 14876 14874 14588 3 14587 14875 14877 3 14587 14877 14589 3 14588 14590 14876 3 14878 14876 14590 3 14589 14877 14879 3 14589 14879 14591 3 14590 14592 14878 3 14880 14878 14592 3 14591 14879 14881 3 14591 14881 14593 3 14592 14594 14882 3 14592 14882 14880 3 14593 14881 14883 3 14593 14883 14595 3 14594 14596 14884 3 14594 14884 14882 3 14595 14883 14885 3 14595 14885 14597 3 14596 14598 14886 3 14596 14886 14884 3 14597 14885 14887 3 14597 14887 14599 3 14598 14600 14888 3 14598 14888 14886 3 14599 14887 14889 3 14599 14889 14601 3 14600 14602 14890 3 14600 14890 14888 3 14601 14889 14891 3 14601 14891 14603 3 14602 14604 14892 3 14602 14892 14890 3 14603 14891 14893 3 14603 14893 14605 3 14604 14606 14894 3 14604 14894 14892 3 14605 14893 14895 3 14605 14895 14607 3 14606 14608 14896 3 14606 14896 14894 3 14607 14895 14897 3 14607 14897 14609 3 14608 14610 14898 3 14608 14898 14896 3 14609 14897 14899 3 14609 14899 14611 3 14610 14612 14900 3 14610 14900 14898 3 14611 14899 14901 3 14611 14901 14613 3 14612 14614 14902 3 14612 14902 14900 3 14613 14901 14903 3 14613 14903 14615 3 14614 14616 14904 3 14614 14904 14902 3 14615 14903 14905 3 14615 14905 14617 3 14616 14618 14906 3 14616 14906 14904 3 14617 14905 14907 3 14617 14907 14619 3 14618 14620 14908 3 14618 14908 14906 3 14619 14907 14909 3 14619 14909 14621 3 14620 14622 14910 3 14620 14910 14908 3 14621 14909 14911 3 14621 14911 14623 3 14622 14624 14912 3 14622 14912 14910 3 14623 14911 14913 3 14623 14913 14625 3 14624 14626 14914 3 14624 14914 14912 3 14625 14913 14915 3 14625 14915 14627 3 14626 14628 14916 3 14626 14916 14914 3 14627 14915 14917 3 14627 14917 14629 3 14628 14630 14918 3 14628 14918 14916 3 14629 14917 14919 3 14629 14919 14631 3 14630 14632 14920 3 14630 14920 14918 3 14631 14919 14921 3 14631 14921 14633 3 14632 14634 14922 3 14632 14922 14920 3 14633 14921 14923 3 14633 14923 14635 3 14634 14636 14924 3 14634 14924 14922 3 14635 14923 14925 3 14635 14925 14637 3 14636 14638 14926 3 14636 14926 14924 3 14637 14925 14927 3 14637 14927 14639 3 14638 14640 14928 3 14638 14928 14926 3 14639 14927 14929 3 14639 14929 14641 3 14640 14642 14930 3 14640 14930 14928 3 14641 14929 14931 3 14641 14931 14643 3 14642 14644 14932 3 14642 14932 14930 3 14643 14931 14933 3 14643 14933 14645 3 14644 14646 14934 3 14644 14934 14932 3 14645 14933 14935 3 14645 14935 14647 3 14646 14648 14936 3 14646 14936 14934 3 14647 14935 14937 3 14647 14937 14649 3 14648 14650 14938 3 14648 14938 14936 3 14649 14937 14939 3 14649 14939 14651 3 14650 14652 14940 3 14650 14940 14938 3 14651 14939 14653 3 14941 14653 14939 3 14652 14654 14942 3 14652 14942 14940 3 14653 14941 14655 3 14943 14655 14941 3 14654 14656 14944 3 14654 14944 14942 3 14655 14943 14657 3 14945 14657 14943 3 14656 14658 14946 3 14656 14946 14944 3 14657 14945 14659 3 14947 14659 14945 3 14658 14660 14948 3 14658 14948 14946 3 14659 14947 14661 3 14949 14661 14947 3 14660 14662 14950 3 14660 14950 14948 3 14661 14949 14663 3 14951 14663 14949 3 14662 14664 14952 3 14662 14952 14950 3 14663 14951 14665 3 14953 14665 14951 3 14664 14666 14954 3 14664 14954 14952 3 14665 14953 14667 3 14955 14667 14953 3 14666 14668 14954 3 14956 14954 14668 3 14667 14955 14669 3 14957 14669 14955 3 14668 14670 14956 3 14958 14956 14670 3 14669 14957 14671 3 14959 14671 14957 3 14670 14672 14958 3 14960 14958 14672 3 14671 14959 14673 3 14961 14673 14959 3 14672 14674 14960 3 14962 14960 14674 3 14673 14961 14675 3 14963 14675 14961 3 14674 14676 14962 3 14964 14962 14676 3 14675 14963 14677 3 14965 14677 14963 3 14676 14678 14964 3 14966 14964 14678 3 14677 14965 14679 3 14967 14679 14965 3 14678 14680 14966 3 14968 14966 14680 3 14679 14967 14681 3 14969 14681 14967 3 14680 14682 14968 3 14970 14968 14682 3 14681 14969 14683 3 14971 14683 14969 3 14682 14684 14970 3 14972 14970 14684 3 14683 14971 14685 3 14973 14685 14971 3 14684 14686 14972 3 14974 14972 14686 3 14685 14973 14687 3 14975 14687 14973 3 14686 14688 14974 3 14976 14974 14688 3 14687 14975 14689 3 14977 14689 14975 3 14688 14690 14976 3 14978 14976 14690 3 14689 14977 14691 3 14979 14691 14977 3 14690 14692 14978 3 14980 14978 14692 3 14691 14979 14693 3 14981 14693 14979 3 14692 14694 14980 3 14982 14980 14694 3 14693 14981 14695 3 14983 14695 14981 3 14694 14696 14982 3 14984 14982 14696 3 14695 14983 14697 3 14985 14697 14983 3 14696 14698 14984 3 14986 14984 14698 3 14697 14985 14699 3 14987 14699 14985 3 14698 14700 14986 3 14988 14986 14700 3 14699 14987 14701 3 14989 14701 14987 3 14700 14702 14988 3 14990 14988 14702 3 14701 14989 14703 3 14991 14703 14989 3 14702 14704 14990 3 14992 14990 14704 3 14703 14991 14705 3 14993 14705 14991 3 14704 14706 14992 3 14994 14992 14706 3 14705 14993 14707 3 14995 14707 14993 3 14706 14708 14994 3 14996 14994 14708 3 14707 14995 14709 3 14997 14709 14995 3 14708 14710 14996 3 14998 14996 14710 3 14709 14997 14711 3 14999 14711 14997 3 14710 14712 14998 3 15000 14998 14712 3 14711 14999 14713 3 15001 14713 14999 3 14712 14714 15000 3 15002 15000 14714 3 14713 15001 14715 3 15003 14715 15001 3 14714 14716 15002 3 15004 15002 14716 3 14715 15003 14717 3 15005 14717 15003 3 14716 14718 15004 3 15006 15004 14718 3 14717 15005 14719 3 15007 14719 15005 3 14718 14720 15006 3 15008 15006 14720 3 14719 15007 14723 3 15011 14723 15007 3 14720 14721 15008 3 15009 15008 14721 3 14721 14724 15009 3 15012 15009 14724 3 14722 14723 15010 3 15011 15010 14723 3 14722 15010 14727 3 15015 14727 15010 3 14724 14725 15012 3 15013 15012 14725 3 14725 14728 15013 3 15016 15013 14728 3 14726 14727 15014 3 15015 15014 14727 3 14726 15014 15084 3 14726 15084 14796 3 14728 14729 15016 3 15017 15016 14729 3 14729 14730 15017 3 15018 15017 14730 3 14730 14731 15018 3 15019 15018 14731 3 14731 14732 15019 3 15020 15019 14732 3 14732 14733 15020 3 15021 15020 14733 3 14733 14734 15021 3 15022 15021 14734 3 14734 14735 15022 3 15023 15022 14735 3 14735 14736 15023 3 15024 15023 14736 3 14736 14737 15024 3 15025 15024 14737 3 14737 14738 15025 3 15026 15025 14738 3 14738 14739 15026 3 15027 15026 14739 3 14739 14740 15027 3 15028 15027 14740 3 14740 14741 15028 3 15029 15028 14741 3 14741 14742 15030 3 14741 15030 15029 3 14742 14743 15031 3 14742 15031 15030 3 14743 14744 15032 3 14743 15032 15031 3 14744 14745 15033 3 14744 15033 15032 3 14745 14746 15034 3 14745 15034 15033 3 14746 14747 15035 3 14746 15035 15034 3 14747 14748 15036 3 14747 15036 15035 3 14748 14749 15037 3 14748 15037 15036 3 14749 14750 15038 3 14749 15038 15037 3 14750 14751 15039 3 14750 15039 15038 3 14751 14752 15040 3 14751 15040 15039 3 14752 14753 15041 3 14752 15041 15040 3 14753 14754 15042 3 14753 15042 15041 3 14754 14755 15043 3 14754 15043 15042 3 14755 14756 15044 3 14755 15044 15043 3 14756 14757 15045 3 14756 15045 15044 3 14757 14758 15046 3 14757 15046 15045 3 14758 14759 15047 3 14758 15047 15046 3 14759 14760 15048 3 14759 15048 15047 3 14760 14761 15049 3 14760 15049 15048 3 14761 14762 15050 3 14761 15050 15049 3 14762 14763 15051 3 14762 15051 15050 3 14763 14764 15052 3 14763 15052 15051 3 14764 14765 15053 3 14764 15053 15052 3 14765 14766 15054 3 14765 15054 15053 3 14766 14767 15055 3 14766 15055 15054 3 14767 14768 15056 3 14767 15056 15055 3 14768 14769 15057 3 14768 15057 15056 3 14769 14770 15058 3 14769 15058 15057 3 14770 14771 15059 3 14770 15059 15058 3 14771 14772 15060 3 14771 15060 15059 3 14772 14773 15061 3 14772 15061 15060 3 14773 14774 15062 3 14773 15062 15061 3 14774 14775 15063 3 14774 15063 15062 3 14775 14776 15064 3 14775 15064 15063 3 14776 14777 15065 3 14776 15065 15064 3 14777 14778 15066 3 14777 15066 15065 3 14778 14779 15067 3 14778 15067 15066 3 14779 14780 15068 3 14779 15068 15067 3 14780 14781 15069 3 14780 15069 15068 3 14781 14782 15070 3 14781 15070 15069 3 14782 14783 15071 3 14782 15071 15070 3 14783 14784 15072 3 14783 15072 15071 3 14784 14785 15073 3 14784 15073 15072 3 14785 14786 15074 3 14785 15074 15073 3 14786 14787 15075 3 14786 15075 15074 3 14787 14788 15076 3 14787 15076 15075 3 14788 14789 15077 3 14788 15077 15076 3 14789 14790 15078 3 14789 15078 15077 3 14790 14791 15079 3 14790 15079 15078 3 14791 14792 15080 3 14791 15080 15079 3 14792 14793 15081 3 14792 15081 15080 3 14793 14794 15082 3 14793 15082 15081 3 14794 14795 15083 3 14794 15083 15082 3 14795 14796 15084 3 14795 15084 15083 3 14797 15085 15086 3 14797 15086 14798 3 14797 14867 15085 3 15155 15085 14867 3 14798 15086 15087 3 14798 15087 14799 3 14799 15087 15088 3 14799 15088 14800 3 14800 15088 15089 3 14800 15089 14801 3 14801 15089 14802 3 15090 14802 15089 3 14802 15090 14803 3 15091 14803 15090 3 14803 15091 14804 3 15092 14804 15091 3 14804 15092 14805 3 15093 14805 15092 3 14805 15093 14806 3 15094 14806 15093 3 14806 15094 14807 3 15095 14807 15094 3 14807 15095 14808 3 15096 14808 15095 3 14808 15096 14809 3 15097 14809 15096 3 14809 15097 14810 3 15098 14810 15097 3 14810 15098 14811 3 15099 14811 15098 3 14811 15099 14812 3 15100 14812 15099 3 14812 15100 14813 3 15101 14813 15100 3 14813 15101 14814 3 15102 14814 15101 3 14814 15102 14815 3 15103 14815 15102 3 14815 15103 14816 3 15104 14816 15103 3 14816 15104 14817 3 15105 14817 15104 3 14817 15105 14818 3 15106 14818 15105 3 14818 15106 14819 3 15107 14819 15106 3 14819 15107 14820 3 15108 14820 15107 3 14820 15108 14821 3 15109 14821 15108 3 14821 15109 14822 3 15110 14822 15109 3 14822 15110 14823 3 15111 14823 15110 3 14823 15111 14824 3 15112 14824 15111 3 14824 15112 14825 3 15113 14825 15112 3 14825 15113 14826 3 15114 14826 15113 3 14826 15114 14827 3 15115 14827 15114 3 14827 15115 14828 3 15116 14828 15115 3 14828 15116 14829 3 15117 14829 15116 3 14829 15117 14830 3 15118 14830 15117 3 14830 15118 14831 3 15119 14831 15118 3 14831 15119 14832 3 15120 14832 15119 3 14832 15120 14833 3 15121 14833 15120 3 14833 15121 14834 3 15122 14834 15121 3 14834 15122 14835 3 15123 14835 15122 3 14835 15123 14836 3 15124 14836 15123 3 14836 15124 14837 3 15125 14837 15124 3 14837 15125 14838 3 15126 14838 15125 3 14838 15126 14839 3 15127 14839 15126 3 14839 15127 14840 3 15128 14840 15127 3 14840 15128 14841 3 15129 14841 15128 3 14841 15129 14842 3 15130 14842 15129 3 14842 15130 14843 3 15131 14843 15130 3 14843 15131 14844 3 15132 14844 15131 3 14844 15132 14845 3 15133 14845 15132 3 14845 15133 14846 3 15134 14846 15133 3 14846 15134 14847 3 15135 14847 15134 3 14847 15135 14848 3 15136 14848 15135 3 14848 15136 14849 3 15137 14849 15136 3 14849 15137 14850 3 15138 14850 15137 3 14850 15138 14851 3 15139 14851 15138 3 14851 15139 14852 3 15140 14852 15139 3 14852 15140 14853 3 15141 14853 15140 3 14853 15141 14854 3 15142 14854 15141 3 14854 15142 14855 3 15143 14855 15142 3 14855 15143 14856 3 15144 14856 15143 3 14856 15144 14857 3 15145 14857 15144 3 14857 15145 14858 3 15146 14858 15145 3 14858 15146 14859 3 15147 14859 15146 3 14859 15147 14860 3 15148 14860 15147 3 14860 15148 14861 3 15149 14861 15148 3 14861 15149 14862 3 15150 14862 15149 3 14862 15150 14863 3 15151 14863 15150 3 14863 15151 14864 3 15152 14864 15151 3 14864 15152 14865 3 15153 14865 15152 3 14865 15153 14868 3 15156 14868 15153 3 14866 15154 14867 3 15155 14867 15154 3 14866 14871 15154 3 15159 15154 14871 3 14868 15156 14869 3 15157 14869 15156 3 14869 15157 14872 3 15160 14872 15157 3 14870 15158 14871 3 15159 14871 15158 3 14870 14874 15158 3 15162 15158 14874 3 14872 15160 14873 3 15161 14873 15160 3 14873 15161 14875 3 15163 14875 15161 3 14874 14876 15162 3 15164 15162 14876 3 14875 15163 14877 3 15165 14877 15163 3 14876 14878 15164 3 15166 15164 14878 3 14877 15165 15167 3 14877 15167 14879 3 14878 14880 15166 3 15168 15166 14880 3 14879 15167 15169 3 14879 15169 14881 3 14880 14882 15168 3 15170 15168 14882 3 14881 15169 15171 3 14881 15171 14883 3 14882 14884 15170 3 15172 15170 14884 3 14883 15171 15173 3 14883 15173 14885 3 14884 14886 15172 3 15174 15172 14886 3 14885 15173 15175 3 14885 15175 14887 3 14886 14888 15174 3 15176 15174 14888 3 14887 15175 15177 3 14887 15177 14889 3 14888 14890 15176 3 15178 15176 14890 3 14889 15177 15179 3 14889 15179 14891 3 14890 14892 15180 3 14890 15180 15178 3 14891 15179 15181 3 14891 15181 14893 3 14892 14894 15182 3 14892 15182 15180 3 14893 15181 15183 3 14893 15183 14895 3 14894 14896 15184 3 14894 15184 15182 3 14895 15183 15185 3 14895 15185 14897 3 14896 14898 15186 3 14896 15186 15184 3 14897 15185 15187 3 14897 15187 14899 3 14898 14900 15188 3 14898 15188 15186 3 14899 15187 15189 3 14899 15189 14901 3 14900 14902 15190 3 14900 15190 15188 3 14901 15189 15191 3 14901 15191 14903 3 14902 14904 15192 3 14902 15192 15190 3 14903 15191 15193 3 14903 15193 14905 3 14904 14906 15194 3 14904 15194 15192 3 14905 15193 15195 3 14905 15195 14907 3 14906 14908 15196 3 14906 15196 15194 3 14907 15195 15197 3 14907 15197 14909 3 14908 14910 15198 3 14908 15198 15196 3 14909 15197 15199 3 14909 15199 14911 3 14910 14912 15200 3 14910 15200 15198 3 14911 15199 15201 3 14911 15201 14913 3 14912 14914 15202 3 14912 15202 15200 3 14913 15201 15203 3 14913 15203 14915 3 14914 14916 15204 3 14914 15204 15202 3 14915 15203 15205 3 14915 15205 14917 3 14916 14918 15206 3 14916 15206 15204 3 14917 15205 15207 3 14917 15207 14919 3 14918 14920 15208 3 14918 15208 15206 3 14919 15207 15209 3 14919 15209 14921 3 14920 14922 15210 3 14920 15210 15208 3 14921 15209 15211 3 14921 15211 14923 3 14922 14924 15212 3 14922 15212 15210 3 14923 15211 15213 3 14923 15213 14925 3 14924 14926 15214 3 14924 15214 15212 3 14925 15213 15215 3 14925 15215 14927 3 14926 14928 15216 3 14926 15216 15214 3 14927 15215 15217 3 14927 15217 14929 3 14928 14930 15218 3 14928 15218 15216 3 14929 15217 15219 3 14929 15219 14931 3 14930 14932 15220 3 14930 15220 15218 3 14931 15219 15221 3 14931 15221 14933 3 14932 14934 15222 3 14932 15222 15220 3 14933 15221 15223 3 14933 15223 14935 3 14934 14936 15224 3 14934 15224 15222 3 14935 15223 15225 3 14935 15225 14937 3 14936 14938 15226 3 14936 15226 15224 3 14937 15225 15227 3 14937 15227 14939 3 14938 14940 15228 3 14938 15228 15226 3 14939 15227 15229 3 14939 15229 14941 3 14940 14942 15230 3 14940 15230 15228 3 14941 15229 15231 3 14941 15231 14943 3 14942 14944 15232 3 14942 15232 15230 3 14943 15231 15233 3 14943 15233 14945 3 14944 14946 15234 3 14944 15234 15232 3 14945 15233 15235 3 14945 15235 14947 3 14946 14948 15236 3 14946 15236 15234 3 14947 15235 15237 3 14947 15237 14949 3 14948 14950 15238 3 14948 15238 15236 3 14949 15237 15239 3 14949 15239 14951 3 14950 14952 15240 3 14950 15240 15238 3 14951 15239 15241 3 14951 15241 14953 3 14952 14954 15242 3 14952 15242 15240 3 14953 15241 14955 3 15243 14955 15241 3 14954 14956 15244 3 14954 15244 15242 3 14955 15243 14957 3 15245 14957 15243 3 14956 14958 15246 3 14956 15246 15244 3 14957 15245 14959 3 15247 14959 15245 3 14958 14960 15248 3 14958 15248 15246 3 14959 15247 14961 3 15249 14961 15247 3 14960 14962 15250 3 14960 15250 15248 3 14961 15249 14963 3 15251 14963 15249 3 14962 14964 15252 3 14962 15252 15250 3 14963 15251 14965 3 15253 14965 15251 3 14964 14966 15254 3 14964 15254 15252 3 14965 15253 14967 3 15255 14967 15253 3 14966 14968 15254 3 15256 15254 14968 3 14967 15255 14969 3 15257 14969 15255 3 14968 14970 15256 3 15258 15256 14970 3 14969 15257 14971 3 15259 14971 15257 3 14970 14972 15258 3 15260 15258 14972 3 14971 15259 14973 3 15261 14973 15259 3 14972 14974 15260 3 15262 15260 14974 3 14973 15261 14975 3 15263 14975 15261 3 14974 14976 15262 3 15264 15262 14976 3 14975 15263 14977 3 15265 14977 15263 3 14976 14978 15264 3 15266 15264 14978 3 14977 15265 14979 3 15267 14979 15265 3 14978 14980 15266 3 15268 15266 14980 3 14979 15267 14981 3 15269 14981 15267 3 14980 14982 15268 3 15270 15268 14982 3 14981 15269 14983 3 15271 14983 15269 3 14982 14984 15270 3 15272 15270 14984 3 14983 15271 14985 3 15273 14985 15271 3 14984 14986 15272 3 15274 15272 14986 3 14985 15273 14987 3 15275 14987 15273 3 14986 14988 15274 3 15276 15274 14988 3 14987 15275 14989 3 15277 14989 15275 3 14988 14990 15276 3 15278 15276 14990 3 14989 15277 14991 3 15279 14991 15277 3 14990 14992 15278 3 15280 15278 14992 3 14991 15279 14993 3 15281 14993 15279 3 14992 14994 15280 3 15282 15280 14994 3 14993 15281 14995 3 15283 14995 15281 3 14994 14996 15282 3 15284 15282 14996 3 14995 15283 14997 3 15285 14997 15283 3 14996 14998 15284 3 15286 15284 14998 3 14997 15285 14999 3 15287 14999 15285 3 14998 15000 15286 3 15288 15286 15000 3 14999 15287 15001 3 15289 15001 15287 3 15000 15002 15288 3 15290 15288 15002 3 15001 15289 15003 3 15291 15003 15289 3 15002 15004 15290 3 15292 15290 15004 3 15003 15291 15005 3 15293 15005 15291 3 15004 15006 15292 3 15294 15292 15006 3 15005 15293 15007 3 15295 15007 15293 3 15006 15008 15294 3 15296 15294 15008 3 15007 15295 15011 3 15299 15011 15295 3 15008 15009 15296 3 15297 15296 15009 3 15009 15012 15297 3 15300 15297 15012 3 15010 15011 15298 3 15299 15298 15011 3 15010 15298 15015 3 15303 15015 15298 3 15012 15013 15300 3 15301 15300 15013 3 15013 15016 15301 3 15304 15301 15016 3 15014 15015 15302 3 15303 15302 15015 3 15014 15302 15084 3 15372 15084 15302 3 15016 15017 15304 3 15305 15304 15017 3 15017 15018 15305 3 15306 15305 15018 3 15018 15019 15306 3 15307 15306 15019 3 15019 15020 15307 3 15308 15307 15020 3 15020 15021 15308 3 15309 15308 15021 3 15021 15022 15309 3 15310 15309 15022 3 15022 15023 15310 3 15311 15310 15023 3 15023 15024 15311 3 15312 15311 15024 3 15024 15025 15312 3 15313 15312 15025 3 15025 15026 15313 3 15314 15313 15026 3 15026 15027 15314 3 15315 15314 15027 3 15027 15028 15315 3 15316 15315 15028 3 15028 15029 15316 3 15317 15316 15029 3 15029 15030 15317 3 15318 15317 15030 3 15030 15031 15318 3 15319 15318 15031 3 15031 15032 15319 3 15320 15319 15032 3 15032 15033 15320 3 15321 15320 15033 3 15033 15034 15321 3 15322 15321 15034 3 15034 15035 15322 3 15323 15322 15035 3 15035 15036 15323 3 15324 15323 15036 3 15036 15037 15324 3 15325 15324 15037 3 15037 15038 15325 3 15326 15325 15038 3 15038 15039 15326 3 15327 15326 15039 3 15039 15040 15327 3 15328 15327 15040 3 15040 15041 15328 3 15329 15328 15041 3 15041 15042 15329 3 15330 15329 15042 3 15042 15043 15331 3 15042 15331 15330 3 15043 15044 15332 3 15043 15332 15331 3 15044 15045 15333 3 15044 15333 15332 3 15045 15046 15334 3 15045 15334 15333 3 15046 15047 15335 3 15046 15335 15334 3 15047 15048 15336 3 15047 15336 15335 3 15048 15049 15337 3 15048 15337 15336 3 15049 15050 15338 3 15049 15338 15337 3 15050 15051 15339 3 15050 15339 15338 3 15051 15052 15340 3 15051 15340 15339 3 15052 15053 15341 3 15052 15341 15340 3 15053 15054 15342 3 15053 15342 15341 3 15054 15055 15343 3 15054 15343 15342 3 15055 15056 15344 3 15055 15344 15343 3 15056 15057 15345 3 15056 15345 15344 3 15057 15058 15346 3 15057 15346 15345 3 15058 15059 15347 3 15058 15347 15346 3 15059 15060 15348 3 15059 15348 15347 3 15060 15061 15349 3 15060 15349 15348 3 15061 15062 15350 3 15061 15350 15349 3 15062 15063 15351 3 15062 15351 15350 3 15063 15064 15352 3 15063 15352 15351 3 15064 15065 15353 3 15064 15353 15352 3 15065 15066 15354 3 15065 15354 15353 3 15066 15067 15355 3 15066 15355 15354 3 15067 15068 15356 3 15067 15356 15355 3 15068 15069 15357 3 15068 15357 15356 3 15069 15070 15358 3 15069 15358 15357 3 15070 15071 15359 3 15070 15359 15358 3 15071 15072 15360 3 15071 15360 15359 3 15072 15073 15361 3 15072 15361 15360 3 15073 15074 15362 3 15073 15362 15361 3 15074 15075 15363 3 15074 15363 15362 3 15075 15076 15364 3 15075 15364 15363 3 15076 15077 15365 3 15076 15365 15364 3 15077 15078 15366 3 15077 15366 15365 3 15078 15079 15367 3 15078 15367 15366 3 15079 15080 15368 3 15079 15368 15367 3 15080 15081 15369 3 15080 15369 15368 3 15081 15082 15370 3 15081 15370 15369 3 15082 15083 15371 3 15082 15371 15370 3 15083 15084 15372 3 15083 15372 15371 3 15085 15373 15374 3 15085 15374 15086 3 15085 15155 15373 3 15443 15373 15155 3 15086 15374 15375 3 15086 15375 15087 3 15087 15375 15376 3 15087 15376 15088 3 15088 15376 15377 3 15088 15377 15089 3 15089 15377 15378 3 15089 15378 15090 3 15090 15378 15379 3 15090 15379 15091 3 15091 15379 15380 3 15091 15380 15092 3 15092 15380 15381 3 15092 15381 15093 3 15093 15381 15382 3 15093 15382 15094 3 15094 15382 15383 3 15094 15383 15095 3 15095 15383 15384 3 15095 15384 15096 3 15096 15384 15385 3 15096 15385 15097 3 15097 15385 15386 3 15097 15386 15098 3 15098 15386 15387 3 15098 15387 15099 3 15099 15387 15388 3 15099 15388 15100 3 15100 15388 15389 3 15100 15389 15101 3 15101 15389 15390 3 15101 15390 15102 3 15102 15390 15391 3 15102 15391 15103 3 15103 15391 15392 3 15103 15392 15104 3 15104 15392 15105 3 15393 15105 15392 3 15105 15393 15106 3 15394 15106 15393 3 15106 15394 15107 3 15395 15107 15394 3 15107 15395 15108 3 15396 15108 15395 3 15108 15396 15109 3 15397 15109 15396 3 15109 15397 15110 3 15398 15110 15397 3 15110 15398 15111 3 15399 15111 15398 3 15111 15399 15112 3 15400 15112 15399 3 15112 15400 15113 3 15401 15113 15400 3 15113 15401 15114 3 15402 15114 15401 3 15114 15402 15115 3 15403 15115 15402 3 15115 15403 15116 3 15404 15116 15403 3 15116 15404 15117 3 15405 15117 15404 3 15117 15405 15118 3 15406 15118 15405 3 15118 15406 15119 3 15407 15119 15406 3 15119 15407 15120 3 15408 15120 15407 3 15120 15408 15121 3 15409 15121 15408 3 15121 15409 15122 3 15410 15122 15409 3 15122 15410 15123 3 15411 15123 15410 3 15123 15411 15124 3 15412 15124 15411 3 15124 15412 15125 3 15413 15125 15412 3 15125 15413 15126 3 15414 15126 15413 3 15126 15414 15127 3 15415 15127 15414 3 15127 15415 15128 3 15416 15128 15415 3 15128 15416 15129 3 15417 15129 15416 3 15129 15417 15130 3 15418 15130 15417 3 15130 15418 15131 3 15419 15131 15418 3 15131 15419 15132 3 15420 15132 15419 3 15132 15420 15133 3 15421 15133 15420 3 15133 15421 15134 3 15422 15134 15421 3 15134 15422 15135 3 15423 15135 15422 3 15135 15423 15136 3 15424 15136 15423 3 15136 15424 15137 3 15425 15137 15424 3 15137 15425 15138 3 15426 15138 15425 3 15138 15426 15139 3 15427 15139 15426 3 15139 15427 15140 3 15428 15140 15427 3 15140 15428 15141 3 15429 15141 15428 3 15141 15429 15142 3 15430 15142 15429 3 15142 15430 15143 3 15431 15143 15430 3 15143 15431 15144 3 15432 15144 15431 3 15144 15432 15145 3 15433 15145 15432 3 15145 15433 15146 3 15434 15146 15433 3 15146 15434 15147 3 15435 15147 15434 3 15147 15435 15148 3 15436 15148 15435 3 15148 15436 15149 3 15437 15149 15436 3 15149 15437 15150 3 15438 15150 15437 3 15150 15438 15151 3 15439 15151 15438 3 15151 15439 15152 3 15440 15152 15439 3 15152 15440 15153 3 15441 15153 15440 3 15153 15441 15156 3 15444 15156 15441 3 15154 15442 15155 3 15443 15155 15442 3 15154 15159 15442 3 15447 15442 15159 3 15156 15444 15157 3 15445 15157 15444 3 15157 15445 15160 3 15448 15160 15445 3 15158 15446 15159 3 15447 15159 15446 3 15158 15162 15446 3 15450 15446 15162 3 15160 15448 15161 3 15449 15161 15448 3 15161 15449 15163 3 15451 15163 15449 3 15162 15164 15450 3 15452 15450 15164 3 15163 15451 15165 3 15453 15165 15451 3 15164 15166 15452 3 15454 15452 15166 3 15165 15453 15167 3 15455 15167 15453 3 15166 15168 15454 3 15456 15454 15168 3 15167 15455 15169 3 15457 15169 15455 3 15168 15170 15456 3 15458 15456 15170 3 15169 15457 15171 3 15459 15171 15457 3 15170 15172 15458 3 15460 15458 15172 3 15171 15459 15173 3 15461 15173 15459 3 15172 15174 15460 3 15462 15460 15174 3 15173 15461 15175 3 15463 15175 15461 3 15174 15176 15462 3 15464 15462 15176 3 15175 15463 15177 3 15465 15177 15463 3 15176 15178 15464 3 15466 15464 15178 3 15177 15465 15179 3 15467 15179 15465 3 15178 15180 15466 3 15468 15466 15180 3 15179 15467 15181 3 15469 15181 15467 3 15180 15182 15468 3 15470 15468 15182 3 15181 15469 15471 3 15181 15471 15183 3 15182 15184 15470 3 15472 15470 15184 3 15183 15471 15473 3 15183 15473 15185 3 15184 15186 15472 3 15474 15472 15186 3 15185 15473 15475 3 15185 15475 15187 3 15186 15188 15474 3 15476 15474 15188 3 15187 15475 15477 3 15187 15477 15189 3 15188 15190 15476 3 15478 15476 15190 3 15189 15477 15479 3 15189 15479 15191 3 15190 15192 15478 3 15480 15478 15192 3 15191 15479 15481 3 15191 15481 15193 3 15192 15194 15482 3 15192 15482 15480 3 15193 15481 15483 3 15193 15483 15195 3 15194 15196 15484 3 15194 15484 15482 3 15195 15483 15485 3 15195 15485 15197 3 15196 15198 15486 3 15196 15486 15484 3 15197 15485 15487 3 15197 15487 15199 3 15198 15200 15488 3 15198 15488 15486 3 15199 15487 15489 3 15199 15489 15201 3 15200 15202 15490 3 15200 15490 15488 3 15201 15489 15491 3 15201 15491 15203 3 15202 15204 15492 3 15202 15492 15490 3 15203 15491 15493 3 15203 15493 15205 3 15204 15206 15494 3 15204 15494 15492 3 15205 15493 15495 3 15205 15495 15207 3 15206 15208 15496 3 15206 15496 15494 3 15207 15495 15497 3 15207 15497 15209 3 15208 15210 15498 3 15208 15498 15496 3 15209 15497 15499 3 15209 15499 15211 3 15210 15212 15500 3 15210 15500 15498 3 15211 15499 15501 3 15211 15501 15213 3 15212 15214 15502 3 15212 15502 15500 3 15213 15501 15503 3 15213 15503 15215 3 15214 15216 15504 3 15214 15504 15502 3 15215 15503 15505 3 15215 15505 15217 3 15216 15218 15506 3 15216 15506 15504 3 15217 15505 15507 3 15217 15507 15219 3 15218 15220 15508 3 15218 15508 15506 3 15219 15507 15509 3 15219 15509 15221 3 15220 15222 15510 3 15220 15510 15508 3 15221 15509 15511 3 15221 15511 15223 3 15222 15224 15512 3 15222 15512 15510 3 15223 15511 15513 3 15223 15513 15225 3 15224 15226 15514 3 15224 15514 15512 3 15225 15513 15515 3 15225 15515 15227 3 15226 15228 15516 3 15226 15516 15514 3 15227 15515 15517 3 15227 15517 15229 3 15228 15230 15518 3 15228 15518 15516 3 15229 15517 15519 3 15229 15519 15231 3 15230 15232 15520 3 15230 15520 15518 3 15231 15519 15521 3 15231 15521 15233 3 15232 15234 15522 3 15232 15522 15520 3 15233 15521 15523 3 15233 15523 15235 3 15234 15236 15524 3 15234 15524 15522 3 15235 15523 15525 3 15235 15525 15237 3 15236 15238 15526 3 15236 15526 15524 3 15237 15525 15527 3 15237 15527 15239 3 15238 15240 15528 3 15238 15528 15526 3 15239 15527 15529 3 15239 15529 15241 3 15240 15242 15530 3 15240 15530 15528 3 15241 15529 15531 3 15241 15531 15243 3 15242 15244 15532 3 15242 15532 15530 3 15243 15531 15533 3 15243 15533 15245 3 15244 15246 15534 3 15244 15534 15532 3 15245 15533 15535 3 15245 15535 15247 3 15246 15248 15536 3 15246 15536 15534 3 15247 15535 15537 3 15247 15537 15249 3 15248 15250 15538 3 15248 15538 15536 3 15249 15537 15539 3 15249 15539 15251 3 15250 15252 15540 3 15250 15540 15538 3 15251 15539 15541 3 15251 15541 15253 3 15252 15254 15542 3 15252 15542 15540 3 15253 15541 15543 3 15253 15543 15255 3 15254 15256 15544 3 15254 15544 15542 3 15255 15543 15545 3 15255 15545 15257 3 15256 15258 15546 3 15256 15546 15544 3 15257 15545 15259 3 15547 15259 15545 3 15258 15260 15548 3 15258 15548 15546 3 15259 15547 15261 3 15549 15261 15547 3 15260 15262 15550 3 15260 15550 15548 3 15261 15549 15263 3 15551 15263 15549 3 15262 15264 15552 3 15262 15552 15550 3 15263 15551 15265 3 15553 15265 15551 3 15264 15266 15554 3 15264 15554 15552 3 15265 15553 15267 3 15555 15267 15553 3 15266 15268 15556 3 15266 15556 15554 3 15267 15555 15269 3 15557 15269 15555 3 15268 15270 15556 3 15558 15556 15270 3 15269 15557 15271 3 15559 15271 15557 3 15270 15272 15558 3 15560 15558 15272 3 15271 15559 15273 3 15561 15273 15559 3 15272 15274 15560 3 15562 15560 15274 3 15273 15561 15275 3 15563 15275 15561 3 15274 15276 15562 3 15564 15562 15276 3 15275 15563 15277 3 15565 15277 15563 3 15276 15278 15564 3 15566 15564 15278 3 15277 15565 15279 3 15567 15279 15565 3 15278 15280 15566 3 15568 15566 15280 3 15279 15567 15281 3 15569 15281 15567 3 15280 15282 15568 3 15570 15568 15282 3 15281 15569 15283 3 15571 15283 15569 3 15282 15284 15570 3 15572 15570 15284 3 15283 15571 15285 3 15573 15285 15571 3 15284 15286 15572 3 15574 15572 15286 3 15285 15573 15287 3 15575 15287 15573 3 15286 15288 15574 3 15576 15574 15288 3 15287 15575 15289 3 15577 15289 15575 3 15288 15290 15576 3 15578 15576 15290 3 15289 15577 15291 3 15579 15291 15577 3 15290 15292 15578 3 15580 15578 15292 3 15291 15579 15293 3 15581 15293 15579 3 15292 15294 15580 3 15582 15580 15294 3 15293 15581 15295 3 15583 15295 15581 3 15294 15296 15582 3 15584 15582 15296 3 15295 15583 15299 3 15587 15299 15583 3 15296 15297 15584 3 15585 15584 15297 3 15297 15300 15585 3 15588 15585 15300 3 15298 15299 15586 3 15587 15586 15299 3 15298 15586 15303 3 15591 15303 15586 3 15300 15301 15588 3 15589 15588 15301 3 15301 15304 15589 3 15592 15589 15304 3 15302 15303 15590 3 15591 15590 15303 3 15302 15590 15372 3 15660 15372 15590 3 15304 15305 15592 3 15593 15592 15305 3 15305 15306 15593 3 15594 15593 15306 3 15306 15307 15594 3 15595 15594 15307 3 15307 15308 15595 3 15596 15595 15308 3 15308 15309 15596 3 15597 15596 15309 3 15309 15310 15597 3 15598 15597 15310 3 15310 15311 15598 3 15599 15598 15311 3 15311 15312 15599 3 15600 15599 15312 3 15312 15313 15600 3 15601 15600 15313 3 15313 15314 15601 3 15602 15601 15314 3 15314 15315 15602 3 15603 15602 15315 3 15315 15316 15603 3 15604 15603 15316 3 15316 15317 15604 3 15605 15604 15317 3 15317 15318 15605 3 15606 15605 15318 3 15318 15319 15606 3 15607 15606 15319 3 15319 15320 15607 3 15608 15607 15320 3 15320 15321 15608 3 15609 15608 15321 3 15321 15322 15609 3 15610 15609 15322 3 15322 15323 15610 3 15611 15610 15323 3 15323 15324 15611 3 15612 15611 15324 3 15324 15325 15612 3 15613 15612 15325 3 15325 15326 15613 3 15614 15613 15326 3 15326 15327 15614 3 15615 15614 15327 3 15327 15328 15615 3 15616 15615 15328 3 15328 15329 15616 3 15617 15616 15329 3 15329 15330 15617 3 15618 15617 15330 3 15330 15331 15618 3 15619 15618 15331 3 15331 15332 15619 3 15620 15619 15332 3 15332 15333 15620 3 15621 15620 15333 3 15333 15334 15621 3 15622 15621 15334 3 15334 15335 15622 3 15623 15622 15335 3 15335 15336 15623 3 15624 15623 15336 3 15336 15337 15624 3 15625 15624 15337 3 15337 15338 15625 3 15626 15625 15338 3 15338 15339 15626 3 15627 15626 15339 3 15339 15340 15627 3 15628 15627 15340 3 15340 15341 15628 3 15629 15628 15341 3 15341 15342 15629 3 15630 15629 15342 3 15342 15343 15630 3 15631 15630 15343 3 15343 15344 15631 3 15632 15631 15344 3 15344 15345 15632 3 15633 15632 15345 3 15345 15346 15634 3 15345 15634 15633 3 15346 15347 15635 3 15346 15635 15634 3 15347 15348 15636 3 15347 15636 15635 3 15348 15349 15637 3 15348 15637 15636 3 15349 15350 15638 3 15349 15638 15637 3 15350 15351 15639 3 15350 15639 15638 3 15351 15352 15640 3 15351 15640 15639 3 15352 15353 15641 3 15352 15641 15640 3 15353 15354 15642 3 15353 15642 15641 3 15354 15355 15643 3 15354 15643 15642 3 15355 15356 15644 3 15355 15644 15643 3 15356 15357 15645 3 15356 15645 15644 3 15357 15358 15646 3 15357 15646 15645 3 15358 15359 15647 3 15358 15647 15646 3 15359 15360 15648 3 15359 15648 15647 3 15360 15361 15649 3 15360 15649 15648 3 15361 15362 15650 3 15361 15650 15649 3 15362 15363 15651 3 15362 15651 15650 3 15363 15364 15652 3 15363 15652 15651 3 15364 15365 15653 3 15364 15653 15652 3 15365 15366 15654 3 15365 15654 15653 3 15366 15367 15655 3 15366 15655 15654 3 15367 15368 15656 3 15367 15656 15655 3 15368 15369 15657 3 15368 15657 15656 3 15369 15370 15658 3 15369 15658 15657 3 15370 15371 15659 3 15370 15659 15658 3 15371 15372 15660 3 15371 15660 15659 3 15373 15661 15662 3 15373 15662 15374 3 15373 15443 15731 3 15373 15731 15661 3 15374 15662 15663 3 15374 15663 15375 3 15375 15663 15664 3 15375 15664 15376 3 15376 15664 15665 3 15376 15665 15377 3 15377 15665 15666 3 15377 15666 15378 3 15378 15666 15667 3 15378 15667 15379 3 15379 15667 15668 3 15379 15668 15380 3 15380 15668 15669 3 15380 15669 15381 3 15381 15669 15670 3 15381 15670 15382 3 15382 15670 15671 3 15382 15671 15383 3 15383 15671 15672 3 15383 15672 15384 3 15384 15672 15673 3 15384 15673 15385 3 15385 15673 15674 3 15385 15674 15386 3 15386 15674 15675 3 15386 15675 15387 3 15387 15675 15676 3 15387 15676 15388 3 15388 15676 15677 3 15388 15677 15389 3 15389 15677 15678 3 15389 15678 15390 3 15390 15678 15679 3 15390 15679 15391 3 15391 15679 15680 3 15391 15680 15392 3 15392 15680 15681 3 15392 15681 15393 3 15393 15681 15682 3 15393 15682 15394 3 15394 15682 15683 3 15394 15683 15395 3 15395 15683 15684 3 15395 15684 15396 3 15396 15684 15685 3 15396 15685 15397 3 15397 15685 15686 3 15397 15686 15398 3 15398 15686 15687 3 15398 15687 15399 3 15399 15687 15688 3 15399 15688 15400 3 15400 15688 15689 3 15400 15689 15401 3 15401 15689 15690 3 15401 15690 15402 3 15402 15690 15691 3 15402 15691 15403 3 15403 15691 15692 3 15403 15692 15404 3 15404 15692 15693 3 15404 15693 15405 3 15405 15693 15694 3 15405 15694 15406 3 15406 15694 15695 3 15406 15695 15407 3 15407 15695 15696 3 15407 15696 15408 3 15408 15696 15697 3 15408 15697 15409 3 15409 15697 15698 3 15409 15698 15410 3 15410 15698 15411 3 15699 15411 15698 3 15411 15699 15412 3 15700 15412 15699 3 15412 15700 15413 3 15701 15413 15700 3 15413 15701 15414 3 15702 15414 15701 3 15414 15702 15415 3 15703 15415 15702 3 15415 15703 15416 3 15704 15416 15703 3 15416 15704 15417 3 15705 15417 15704 3 15417 15705 15418 3 15706 15418 15705 3 15418 15706 15419 3 15707 15419 15706 3 15419 15707 15420 3 15708 15420 15707 3 15420 15708 15421 3 15709 15421 15708 3 15421 15709 15422 3 15710 15422 15709 3 15422 15710 15423 3 15711 15423 15710 3 15423 15711 15424 3 15712 15424 15711 3 15424 15712 15425 3 15713 15425 15712 3 15425 15713 15426 3 15714 15426 15713 3 15426 15714 15427 3 15715 15427 15714 3 15427 15715 15428 3 15716 15428 15715 3 15428 15716 15429 3 15717 15429 15716 3 15429 15717 15430 3 15718 15430 15717 3 15430 15718 15431 3 15719 15431 15718 3 15431 15719 15432 3 15720 15432 15719 3 15432 15720 15433 3 15721 15433 15720 3 15433 15721 15434 3 15722 15434 15721 3 15434 15722 15435 3 15723 15435 15722 3 15435 15723 15436 3 15724 15436 15723 3 15436 15724 15437 3 15725 15437 15724 3 15437 15725 15438 3 15726 15438 15725 3 15438 15726 15439 3 15727 15439 15726 3 15439 15727 15440 3 15728 15440 15727 3 15440 15728 15441 3 15729 15441 15728 3 15441 15729 15444 3 15732 15444 15729 3 15442 15730 15443 3 15731 15443 15730 3 15442 15447 15730 3 15735 15730 15447 3 15444 15732 15445 3 15733 15445 15732 3 15445 15733 15448 3 15736 15448 15733 3 15446 15734 15447 3 15735 15447 15734 3 15446 15450 15734 3 15738 15734 15450 3 15448 15736 15449 3 15737 15449 15736 3 15449 15737 15451 3 15739 15451 15737 3 15450 15452 15738 3 15740 15738 15452 3 15451 15739 15453 3 15741 15453 15739 3 15452 15454 15740 3 15742 15740 15454 3 15453 15741 15455 3 15743 15455 15741 3 15454 15456 15742 3 15744 15742 15456 3 15455 15743 15457 3 15745 15457 15743 3 15456 15458 15744 3 15746 15744 15458 3 15457 15745 15459 3 15747 15459 15745 3 15458 15460 15746 3 15748 15746 15460 3 15459 15747 15461 3 15749 15461 15747 3 15460 15462 15748 3 15750 15748 15462 3 15461 15749 15463 3 15751 15463 15749 3 15462 15464 15750 3 15752 15750 15464 3 15463 15751 15465 3 15753 15465 15751 3 15464 15466 15752 3 15754 15752 15466 3 15465 15753 15467 3 15755 15467 15753 3 15466 15468 15754 3 15756 15754 15468 3 15467 15755 15469 3 15757 15469 15755 3 15468 15470 15756 3 15758 15756 15470 3 15469 15757 15471 3 15759 15471 15757 3 15470 15472 15758 3 15760 15758 15472 3 15471 15759 15473 3 15761 15473 15759 3 15472 15474 15760 3 15762 15760 15474 3 15473 15761 15475 3 15763 15475 15761 3 15474 15476 15762 3 15764 15762 15476 3 15475 15763 15477 3 15765 15477 15763 3 15476 15478 15764 3 15766 15764 15478 3 15477 15765 15479 3 15767 15479 15765 3 15478 15480 15766 3 15768 15766 15480 3 15479 15767 15481 3 15769 15481 15767 3 15480 15482 15768 3 15770 15768 15482 3 15481 15769 15483 3 15771 15483 15769 3 15482 15484 15770 3 15772 15770 15484 3 15483 15771 15485 3 15773 15485 15771 3 15484 15486 15772 3 15774 15772 15486 3 15485 15773 15487 3 15775 15487 15773 3 15486 15488 15774 3 15776 15774 15488 3 15487 15775 15777 3 15487 15777 15489 3 15488 15490 15776 3 15778 15776 15490 3 15489 15777 15779 3 15489 15779 15491 3 15490 15492 15778 3 15780 15778 15492 3 15491 15779 15781 3 15491 15781 15493 3 15492 15494 15780 3 15782 15780 15494 3 15493 15781 15783 3 15493 15783 15495 3 15494 15496 15782 3 15784 15782 15496 3 15495 15783 15785 3 15495 15785 15497 3 15496 15498 15784 3 15786 15784 15498 3 15497 15785 15787 3 15497 15787 15499 3 15498 15500 15788 3 15498 15788 15786 3 15499 15787 15789 3 15499 15789 15501 3 15500 15502 15790 3 15500 15790 15788 3 15501 15789 15791 3 15501 15791 15503 3 15502 15504 15792 3 15502 15792 15790 3 15503 15791 15793 3 15503 15793 15505 3 15504 15506 15794 3 15504 15794 15792 3 15505 15793 15795 3 15505 15795 15507 3 15506 15508 15796 3 15506 15796 15794 3 15507 15795 15797 3 15507 15797 15509 3 15508 15510 15798 3 15508 15798 15796 3 15509 15797 15799 3 15509 15799 15511 3 15510 15512 15800 3 15510 15800 15798 3 15511 15799 15801 3 15511 15801 15513 3 15512 15514 15802 3 15512 15802 15800 3 15513 15801 15803 3 15513 15803 15515 3 15514 15516 15804 3 15514 15804 15802 3 15515 15803 15805 3 15515 15805 15517 3 15516 15518 15806 3 15516 15806 15804 3 15517 15805 15807 3 15517 15807 15519 3 15518 15520 15808 3 15518 15808 15806 3 15519 15807 15809 3 15519 15809 15521 3 15520 15522 15810 3 15520 15810 15808 3 15521 15809 15811 3 15521 15811 15523 3 15522 15524 15812 3 15522 15812 15810 3 15523 15811 15813 3 15523 15813 15525 3 15524 15526 15814 3 15524 15814 15812 3 15525 15813 15815 3 15525 15815 15527 3 15526 15528 15816 3 15526 15816 15814 3 15527 15815 15817 3 15527 15817 15529 3 15528 15530 15818 3 15528 15818 15816 3 15529 15817 15819 3 15529 15819 15531 3 15530 15532 15820 3 15530 15820 15818 3 15531 15819 15821 3 15531 15821 15533 3 15532 15534 15822 3 15532 15822 15820 3 15533 15821 15823 3 15533 15823 15535 3 15534 15536 15824 3 15534 15824 15822 3 15535 15823 15825 3 15535 15825 15537 3 15536 15538 15826 3 15536 15826 15824 3 15537 15825 15827 3 15537 15827 15539 3 15538 15540 15828 3 15538 15828 15826 3 15539 15827 15829 3 15539 15829 15541 3 15540 15542 15830 3 15540 15830 15828 3 15541 15829 15831 3 15541 15831 15543 3 15542 15544 15832 3 15542 15832 15830 3 15543 15831 15833 3 15543 15833 15545 3 15544 15546 15834 3 15544 15834 15832 3 15545 15833 15835 3 15545 15835 15547 3 15546 15548 15836 3 15546 15836 15834 3 15547 15835 15837 3 15547 15837 15549 3 15548 15550 15838 3 15548 15838 15836 3 15549 15837 15839 3 15549 15839 15551 3 15550 15552 15840 3 15550 15840 15838 3 15551 15839 15841 3 15551 15841 15553 3 15552 15554 15842 3 15552 15842 15840 3 15553 15841 15843 3 15553 15843 15555 3 15554 15556 15844 3 15554 15844 15842 3 15555 15843 15845 3 15555 15845 15557 3 15556 15558 15846 3 15556 15846 15844 3 15557 15845 15847 3 15557 15847 15559 3 15558 15560 15848 3 15558 15848 15846 3 15559 15847 15849 3 15559 15849 15561 3 15560 15562 15850 3 15560 15850 15848 3 15561 15849 15851 3 15561 15851 15563 3 15562 15564 15852 3 15562 15852 15850 3 15563 15851 15853 3 15563 15853 15565 3 15564 15566 15854 3 15564 15854 15852 3 15565 15853 15567 3 15855 15567 15853 3 15566 15568 15856 3 15566 15856 15854 3 15567 15855 15569 3 15857 15569 15855 3 15568 15570 15858 3 15568 15858 15856 3 15569 15857 15571 3 15859 15571 15857 3 15570 15572 15860 3 15570 15860 15858 3 15571 15859 15573 3 15861 15573 15859 3 15572 15574 15862 3 15572 15862 15860 3 15573 15861 15575 3 15863 15575 15861 3 15574 15576 15862 3 15864 15862 15576 3 15575 15863 15577 3 15865 15577 15863 3 15576 15578 15864 3 15866 15864 15578 3 15577 15865 15579 3 15867 15579 15865 3 15578 15580 15866 3 15868 15866 15580 3 15579 15867 15581 3 15869 15581 15867 3 15580 15582 15868 3 15870 15868 15582 3 15581 15869 15583 3 15871 15583 15869 3 15582 15584 15870 3 15872 15870 15584 3 15583 15871 15587 3 15875 15587 15871 3 15584 15585 15872 3 15873 15872 15585 3 15585 15588 15873 3 15876 15873 15588 3 15586 15587 15874 3 15875 15874 15587 3 15586 15874 15591 3 15879 15591 15874 3 15588 15589 15876 3 15877 15876 15589 3 15589 15592 15877 3 15880 15877 15592 3 15590 15591 15878 3 15879 15878 15591 3 15590 15878 15660 3 15948 15660 15878 3 15592 15593 15880 3 15881 15880 15593 3 15593 15594 15881 3 15882 15881 15594 3 15594 15595 15882 3 15883 15882 15595 3 15595 15596 15883 3 15884 15883 15596 3 15596 15597 15884 3 15885 15884 15597 3 15597 15598 15885 3 15886 15885 15598 3 15598 15599 15886 3 15887 15886 15599 3 15599 15600 15887 3 15888 15887 15600 3 15600 15601 15888 3 15889 15888 15601 3 15601 15602 15889 3 15890 15889 15602 3 15602 15603 15890 3 15891 15890 15603 3 15603 15604 15891 3 15892 15891 15604 3 15604 15605 15892 3 15893 15892 15605 3 15605 15606 15893 3 15894 15893 15606 3 15606 15607 15894 3 15895 15894 15607 3 15607 15608 15895 3 15896 15895 15608 3 15608 15609 15896 3 15897 15896 15609 3 15609 15610 15897 3 15898 15897 15610 3 15610 15611 15898 3 15899 15898 15611 3 15611 15612 15899 3 15900 15899 15612 3 15612 15613 15900 3 15901 15900 15613 3 15613 15614 15901 3 15902 15901 15614 3 15614 15615 15902 3 15903 15902 15615 3 15615 15616 15903 3 15904 15903 15616 3 15616 15617 15904 3 15905 15904 15617 3 15617 15618 15905 3 15906 15905 15618 3 15618 15619 15906 3 15907 15906 15619 3 15619 15620 15907 3 15908 15907 15620 3 15620 15621 15908 3 15909 15908 15621 3 15621 15622 15909 3 15910 15909 15622 3 15622 15623 15910 3 15911 15910 15623 3 15623 15624 15911 3 15912 15911 15624 3 15624 15625 15912 3 15913 15912 15625 3 15625 15626 15913 3 15914 15913 15626 3 15626 15627 15914 3 15915 15914 15627 3 15627 15628 15915 3 15916 15915 15628 3 15628 15629 15916 3 15917 15916 15629 3 15629 15630 15917 3 15918 15917 15630 3 15630 15631 15918 3 15919 15918 15631 3 15631 15632 15919 3 15920 15919 15632 3 15632 15633 15920 3 15921 15920 15633 3 15633 15634 15921 3 15922 15921 15634 3 15634 15635 15922 3 15923 15922 15635 3 15635 15636 15923 3 15924 15923 15636 3 15636 15637 15924 3 15925 15924 15637 3 15637 15638 15925 3 15926 15925 15638 3 15638 15639 15926 3 15927 15926 15639 3 15639 15640 15927 3 15928 15927 15640 3 15640 15641 15928 3 15929 15928 15641 3 15641 15642 15929 3 15930 15929 15642 3 15642 15643 15930 3 15931 15930 15643 3 15643 15644 15931 3 15932 15931 15644 3 15644 15645 15932 3 15933 15932 15645 3 15645 15646 15933 3 15934 15933 15646 3 15646 15647 15934 3 15935 15934 15647 3 15647 15648 15935 3 15936 15935 15648 3 15648 15649 15936 3 15937 15936 15649 3 15649 15650 15937 3 15938 15937 15650 3 15650 15651 15938 3 15939 15938 15651 3 15651 15652 15939 3 15940 15939 15652 3 15652 15653 15941 3 15652 15941 15940 3 15653 15654 15942 3 15653 15942 15941 3 15654 15655 15943 3 15654 15943 15942 3 15655 15656 15944 3 15655 15944 15943 3 15656 15657 15945 3 15656 15945 15944 3 15657 15658 15946 3 15657 15946 15945 3 15658 15659 15947 3 15658 15947 15946 3 15659 15660 15948 3 15659 15948 15947 3 15661 15949 15950 3 15661 15950 15662 3 15661 15731 16019 3 15661 16019 15949 3 15662 15950 15951 3 15662 15951 15663 3 15663 15951 15952 3 15663 15952 15664 3 15664 15952 15953 3 15664 15953 15665 3 15665 15953 15954 3 15665 15954 15666 3 15666 15954 15955 3 15666 15955 15667 3 15667 15955 15956 3 15667 15956 15668 3 15668 15956 15957 3 15668 15957 15669 3 15669 15957 15958 3 15669 15958 15670 3 15670 15958 15959 3 15670 15959 15671 3 15671 15959 15960 3 15671 15960 15672 3 15672 15960 15961 3 15672 15961 15673 3 15673 15961 15962 3 15673 15962 15674 3 15674 15962 15963 3 15674 15963 15675 3 15675 15963 15964 3 15675 15964 15676 3 15676 15964 15965 3 15676 15965 15677 3 15677 15965 15966 3 15677 15966 15678 3 15678 15966 15967 3 15678 15967 15679 3 15679 15967 15968 3 15679 15968 15680 3 15680 15968 15969 3 15680 15969 15681 3 15681 15969 15970 3 15681 15970 15682 3 15682 15970 15971 3 15682 15971 15683 3 15683 15971 15972 3 15683 15972 15684 3 15684 15972 15973 3 15684 15973 15685 3 15685 15973 15974 3 15685 15974 15686 3 15686 15974 15975 3 15686 15975 15687 3 15687 15975 15976 3 15687 15976 15688 3 15688 15976 15977 3 15688 15977 15689 3 15689 15977 15978 3 15689 15978 15690 3 15690 15978 15979 3 15690 15979 15691 3 15691 15979 15980 3 15691 15980 15692 3 15692 15980 15981 3 15692 15981 15693 3 15693 15981 15982 3 15693 15982 15694 3 15694 15982 15983 3 15694 15983 15695 3 15695 15983 15984 3 15695 15984 15696 3 15696 15984 15985 3 15696 15985 15697 3 15697 15985 15986 3 15697 15986 15698 3 15698 15986 15987 3 15698 15987 15699 3 15699 15987 15988 3 15699 15988 15700 3 15700 15988 15989 3 15700 15989 15701 3 15701 15989 15990 3 15701 15990 15702 3 15702 15990 15991 3 15702 15991 15703 3 15703 15991 15992 3 15703 15992 15704 3 15704 15992 15993 3 15704 15993 15705 3 15705 15993 15994 3 15705 15994 15706 3 15706 15994 15995 3 15706 15995 15707 3 15707 15995 15996 3 15707 15996 15708 3 15708 15996 15997 3 15708 15997 15709 3 15709 15997 15998 3 15709 15998 15710 3 15710 15998 15999 3 15710 15999 15711 3 15711 15999 16000 3 15711 16000 15712 3 15712 16000 16001 3 15712 16001 15713 3 15713 16001 16002 3 15713 16002 15714 3 15714 16002 16003 3 15714 16003 15715 3 15715 16003 16004 3 15715 16004 15716 3 15716 16004 16005 3 15716 16005 15717 3 15717 16005 16006 3 15717 16006 15718 3 15718 16006 15719 3 16007 15719 16006 3 15719 16007 15720 3 16008 15720 16007 3 15720 16008 15721 3 16009 15721 16008 3 15721 16009 15722 3 16010 15722 16009 3 15722 16010 15723 3 16011 15723 16010 3 15723 16011 15724 3 16012 15724 16011 3 15724 16012 15725 3 16013 15725 16012 3 15725 16013 15726 3 16014 15726 16013 3 15726 16014 15727 3 16015 15727 16014 3 15727 16015 15728 3 16016 15728 16015 3 15728 16016 15729 3 16017 15729 16016 3 15729 16017 15732 3 16020 15732 16017 3 15730 16018 15731 3 16019 15731 16018 3 15730 15735 16018 3 16023 16018 15735 3 15732 16020 15733 3 16021 15733 16020 3 15733 16021 15736 3 16024 15736 16021 3 15734 16022 15735 3 16023 15735 16022 3 15734 15738 16022 3 16026 16022 15738 3 15736 16024 15737 3 16025 15737 16024 3 15737 16025 15739 3 16027 15739 16025 3 15738 15740 16026 3 16028 16026 15740 3 15739 16027 15741 3 16029 15741 16027 3 15740 15742 16028 3 16030 16028 15742 3 15741 16029 15743 3 16031 15743 16029 3 15742 15744 16030 3 16032 16030 15744 3 15743 16031 15745 3 16033 15745 16031 3 15744 15746 16032 3 16034 16032 15746 3 15745 16033 15747 3 16035 15747 16033 3 15746 15748 16034 3 16036 16034 15748 3 15747 16035 15749 3 16037 15749 16035 3 15748 15750 16036 3 16038 16036 15750 3 15749 16037 15751 3 16039 15751 16037 3 15750 15752 16038 3 16040 16038 15752 3 15751 16039 15753 3 16041 15753 16039 3 15752 15754 16040 3 16042 16040 15754 3 15753 16041 15755 3 16043 15755 16041 3 15754 15756 16042 3 16044 16042 15756 3 15755 16043 15757 3 16045 15757 16043 3 15756 15758 16044 3 16046 16044 15758 3 15757 16045 15759 3 16047 15759 16045 3 15758 15760 16046 3 16048 16046 15760 3 15759 16047 15761 3 16049 15761 16047 3 15760 15762 16048 3 16050 16048 15762 3 15761 16049 15763 3 16051 15763 16049 3 15762 15764 16050 3 16052 16050 15764 3 15763 16051 15765 3 16053 15765 16051 3 15764 15766 16052 3 16054 16052 15766 3 15765 16053 15767 3 16055 15767 16053 3 15766 15768 16054 3 16056 16054 15768 3 15767 16055 15769 3 16057 15769 16055 3 15768 15770 16056 3 16058 16056 15770 3 15769 16057 15771 3 16059 15771 16057 3 15770 15772 16058 3 16060 16058 15772 3 15771 16059 15773 3 16061 15773 16059 3 15772 15774 16060 3 16062 16060 15774 3 15773 16061 15775 3 16063 15775 16061 3 15774 15776 16062 3 16064 16062 15776 3 15775 16063 15777 3 16065 15777 16063 3 15776 15778 16064 3 16066 16064 15778 3 15777 16065 15779 3 16067 15779 16065 3 15778 15780 16066 3 16068 16066 15780 3 15779 16067 15781 3 16069 15781 16067 3 15780 15782 16068 3 16070 16068 15782 3 15781 16069 15783 3 16071 15783 16069 3 15782 15784 16070 3 16072 16070 15784 3 15783 16071 15785 3 16073 15785 16071 3 15784 15786 16072 3 16074 16072 15786 3 15785 16073 15787 3 16075 15787 16073 3 15786 15788 16074 3 16076 16074 15788 3 15787 16075 15789 3 16077 15789 16075 3 15788 15790 16076 3 16078 16076 15790 3 15789 16077 15791 3 16079 15791 16077 3 15790 15792 16078 3 16080 16078 15792 3 15791 16079 15793 3 16081 15793 16079 3 15792 15794 16080 3 16082 16080 15794 3 15793 16081 15795 3 16083 15795 16081 3 15794 15796 16082 3 16084 16082 15796 3 15795 16083 15797 3 16085 15797 16083 3 15796 15798 16084 3 16086 16084 15798 3 15797 16085 16087 3 15797 16087 15799 3 15798 15800 16086 3 16088 16086 15800 3 15799 16087 16089 3 15799 16089 15801 3 15800 15802 16088 3 16090 16088 15802 3 15801 16089 16091 3 15801 16091 15803 3 15802 15804 16090 3 16092 16090 15804 3 15803 16091 16093 3 15803 16093 15805 3 15804 15806 16092 3 16094 16092 15806 3 15805 16093 16095 3 15805 16095 15807 3 15806 15808 16096 3 15806 16096 16094 3 15807 16095 16097 3 15807 16097 15809 3 15808 15810 16098 3 15808 16098 16096 3 15809 16097 16099 3 15809 16099 15811 3 15810 15812 16100 3 15810 16100 16098 3 15811 16099 16101 3 15811 16101 15813 3 15812 15814 16102 3 15812 16102 16100 3 15813 16101 16103 3 15813 16103 15815 3 15814 15816 16104 3 15814 16104 16102 3 15815 16103 16105 3 15815 16105 15817 3 15816 15818 16106 3 15816 16106 16104 3 15817 16105 16107 3 15817 16107 15819 3 15818 15820 16108 3 15818 16108 16106 3 15819 16107 16109 3 15819 16109 15821 3 15820 15822 16110 3 15820 16110 16108 3 15821 16109 16111 3 15821 16111 15823 3 15822 15824 16112 3 15822 16112 16110 3 15823 16111 16113 3 15823 16113 15825 3 15824 15826 16114 3 15824 16114 16112 3 15825 16113 16115 3 15825 16115 15827 3 15826 15828 16116 3 15826 16116 16114 3 15827 16115 16117 3 15827 16117 15829 3 15828 15830 16118 3 15828 16118 16116 3 15829 16117 16119 3 15829 16119 15831 3 15830 15832 16120 3 15830 16120 16118 3 15831 16119 16121 3 15831 16121 15833 3 15832 15834 16122 3 15832 16122 16120 3 15833 16121 16123 3 15833 16123 15835 3 15834 15836 16124 3 15834 16124 16122 3 15835 16123 16125 3 15835 16125 15837 3 15836 15838 16126 3 15836 16126 16124 3 15837 16125 16127 3 15837 16127 15839 3 15838 15840 16128 3 15838 16128 16126 3 15839 16127 16129 3 15839 16129 15841 3 15840 15842 16130 3 15840 16130 16128 3 15841 16129 16131 3 15841 16131 15843 3 15842 15844 16132 3 15842 16132 16130 3 15843 16131 16133 3 15843 16133 15845 3 15844 15846 16134 3 15844 16134 16132 3 15845 16133 16135 3 15845 16135 15847 3 15846 15848 16136 3 15846 16136 16134 3 15847 16135 16137 3 15847 16137 15849 3 15848 15850 16138 3 15848 16138 16136 3 15849 16137 16139 3 15849 16139 15851 3 15850 15852 16140 3 15850 16140 16138 3 15851 16139 16141 3 15851 16141 15853 3 15852 15854 16142 3 15852 16142 16140 3 15853 16141 16143 3 15853 16143 15855 3 15854 15856 16144 3 15854 16144 16142 3 15855 16143 16145 3 15855 16145 15857 3 15856 15858 16146 3 15856 16146 16144 3 15857 16145 16147 3 15857 16147 15859 3 15858 15860 16148 3 15858 16148 16146 3 15859 16147 16149 3 15859 16149 15861 3 15860 15862 16150 3 15860 16150 16148 3 15861 16149 16151 3 15861 16151 15863 3 15862 15864 16152 3 15862 16152 16150 3 15863 16151 16153 3 15863 16153 15865 3 15864 15866 16154 3 15864 16154 16152 3 15865 16153 16155 3 15865 16155 15867 3 15866 15868 16156 3 15866 16156 16154 3 15867 16155 16157 3 15867 16157 15869 3 15868 15870 16158 3 15868 16158 16156 3 15869 16157 16159 3 15869 16159 15871 3 15870 15872 16160 3 15870 16160 16158 3 15871 16159 16163 3 15871 16163 15875 3 15872 15873 16161 3 15872 16161 16160 3 15873 15876 16164 3 15873 16164 16161 3 15874 15875 16163 3 15874 16163 16162 3 15874 16162 15879 3 16167 15879 16162 3 15876 15877 16165 3 15876 16165 16164 3 15877 15880 16168 3 15877 16168 16165 3 15878 15879 16167 3 15878 16167 16166 3 15878 16166 15948 3 16236 15948 16166 3 15880 15881 16169 3 15880 16169 16168 3 15881 15882 16170 3 15881 16170 16169 3 15882 15883 16171 3 15882 16171 16170 3 15883 15884 16171 3 16172 16171 15884 3 15884 15885 16172 3 16173 16172 15885 3 15885 15886 16173 3 16174 16173 15886 3 15886 15887 16174 3 16175 16174 15887 3 15887 15888 16175 3 16176 16175 15888 3 15888 15889 16176 3 16177 16176 15889 3 15889 15890 16177 3 16178 16177 15890 3 15890 15891 16178 3 16179 16178 15891 3 15891 15892 16179 3 16180 16179 15892 3 15892 15893 16180 3 16181 16180 15893 3 15893 15894 16181 3 16182 16181 15894 3 15894 15895 16182 3 16183 16182 15895 3 15895 15896 16183 3 16184 16183 15896 3 15896 15897 16184 3 16185 16184 15897 3 15897 15898 16185 3 16186 16185 15898 3 15898 15899 16186 3 16187 16186 15899 3 15899 15900 16187 3 16188 16187 15900 3 15900 15901 16188 3 16189 16188 15901 3 15901 15902 16189 3 16190 16189 15902 3 15902 15903 16190 3 16191 16190 15903 3 15903 15904 16191 3 16192 16191 15904 3 15904 15905 16192 3 16193 16192 15905 3 15905 15906 16193 3 16194 16193 15906 3 15906 15907 16194 3 16195 16194 15907 3 15907 15908 16195 3 16196 16195 15908 3 15908 15909 16196 3 16197 16196 15909 3 15909 15910 16197 3 16198 16197 15910 3 15910 15911 16198 3 16199 16198 15911 3 15911 15912 16199 3 16200 16199 15912 3 15912 15913 16200 3 16201 16200 15913 3 15913 15914 16201 3 16202 16201 15914 3 15914 15915 16202 3 16203 16202 15915 3 15915 15916 16203 3 16204 16203 15916 3 15916 15917 16204 3 16205 16204 15917 3 15917 15918 16205 3 16206 16205 15918 3 15918 15919 16206 3 16207 16206 15919 3 15919 15920 16207 3 16208 16207 15920 3 15920 15921 16208 3 16209 16208 15921 3 15921 15922 16209 3 16210 16209 15922 3 15922 15923 16210 3 16211 16210 15923 3 15923 15924 16211 3 16212 16211 15924 3 15924 15925 16212 3 16213 16212 15925 3 15925 15926 16213 3 16214 16213 15926 3 15926 15927 16214 3 16215 16214 15927 3 15927 15928 16215 3 16216 16215 15928 3 15928 15929 16216 3 16217 16216 15929 3 15929 15930 16217 3 16218 16217 15930 3 15930 15931 16218 3 16219 16218 15931 3 15931 15932 16219 3 16220 16219 15932 3 15932 15933 16220 3 16221 16220 15933 3 15933 15934 16221 3 16222 16221 15934 3 15934 15935 16222 3 16223 16222 15935 3 15935 15936 16223 3 16224 16223 15936 3 15936 15937 16224 3 16225 16224 15937 3 15937 15938 16225 3 16226 16225 15938 3 15938 15939 16226 3 16227 16226 15939 3 15939 15940 16227 3 16228 16227 15940 3 15940 15941 16228 3 16229 16228 15941 3 15941 15942 16229 3 16230 16229 15942 3 15942 15943 16230 3 16231 16230 15943 3 15943 15944 16231 3 16232 16231 15944 3 15944 15945 16232 3 16233 16232 15945 3 15945 15946 16233 3 16234 16233 15946 3 15946 15947 16234 3 16235 16234 15947 3 15947 15948 16235 3 16236 16235 15948 3 15949 16237 15950 3 16238 15950 16237 3 15949 16019 16307 3 15949 16307 16237 3 15950 16238 15951 3 16239 15951 16238 3 15951 16239 15952 3 16240 15952 16239 3 15952 16240 16241 3 15952 16241 15953 3 15953 16241 16242 3 15953 16242 15954 3 15954 16242 16243 3 15954 16243 15955 3 15955 16243 16244 3 15955 16244 15956 3 15956 16244 16245 3 15956 16245 15957 3 15957 16245 16246 3 15957 16246 15958 3 15958 16246 16247 3 15958 16247 15959 3 15959 16247 16248 3 15959 16248 15960 3 15960 16248 16249 3 15960 16249 15961 3 15961 16249 16250 3 15961 16250 15962 3 15962 16250 16251 3 15962 16251 15963 3 15963 16251 16252 3 15963 16252 15964 3 15964 16252 16253 3 15964 16253 15965 3 15965 16253 16254 3 15965 16254 15966 3 15966 16254 16255 3 15966 16255 15967 3 15967 16255 16256 3 15967 16256 15968 3 15968 16256 16257 3 15968 16257 15969 3 15969 16257 16258 3 15969 16258 15970 3 15970 16258 16259 3 15970 16259 15971 3 15971 16259 16260 3 15971 16260 15972 3 15972 16260 16261 3 15972 16261 15973 3 15973 16261 16262 3 15973 16262 15974 3 15974 16262 16263 3 15974 16263 15975 3 15975 16263 16264 3 15975 16264 15976 3 15976 16264 16265 3 15976 16265 15977 3 15977 16265 16266 3 15977 16266 15978 3 15978 16266 16267 3 15978 16267 15979 3 15979 16267 16268 3 15979 16268 15980 3 15980 16268 16269 3 15980 16269 15981 3 15981 16269 16270 3 15981 16270 15982 3 15982 16270 16271 3 15982 16271 15983 3 15983 16271 16272 3 15983 16272 15984 3 15984 16272 16273 3 15984 16273 15985 3 15985 16273 16274 3 15985 16274 15986 3 15986 16274 16275 3 15986 16275 15987 3 15987 16275 16276 3 15987 16276 15988 3 15988 16276 16277 3 15988 16277 15989 3 15989 16277 16278 3 15989 16278 15990 3 15990 16278 16279 3 15990 16279 15991 3 15991 16279 16280 3 15991 16280 15992 3 15992 16280 16281 3 15992 16281 15993 3 15993 16281 16282 3 15993 16282 15994 3 15994 16282 16283 3 15994 16283 15995 3 15995 16283 16284 3 15995 16284 15996 3 15996 16284 16285 3 15996 16285 15997 3 15997 16285 16286 3 15997 16286 15998 3 15998 16286 16287 3 15998 16287 15999 3 15999 16287 16288 3 15999 16288 16000 3 16000 16288 16289 3 16000 16289 16001 3 16001 16289 16290 3 16001 16290 16002 3 16002 16290 16291 3 16002 16291 16003 3 16003 16291 16292 3 16003 16292 16004 3 16004 16292 16293 3 16004 16293 16005 3 16005 16293 16294 3 16005 16294 16006 3 16006 16294 16295 3 16006 16295 16007 3 16007 16295 16296 3 16007 16296 16008 3 16008 16296 16297 3 16008 16297 16009 3 16009 16297 16298 3 16009 16298 16010 3 16010 16298 16299 3 16010 16299 16011 3 16011 16299 16300 3 16011 16300 16012 3 16012 16300 16301 3 16012 16301 16013 3 16013 16301 16302 3 16013 16302 16014 3 16014 16302 16303 3 16014 16303 16015 3 16015 16303 16304 3 16015 16304 16016 3 16016 16304 16305 3 16016 16305 16017 3 16017 16305 16308 3 16017 16308 16020 3 16018 16306 16307 3 16018 16307 16019 3 16018 16023 16311 3 16018 16311 16306 3 16020 16308 16309 3 16020 16309 16021 3 16021 16309 16312 3 16021 16312 16024 3 16022 16310 16311 3 16022 16311 16023 3 16022 16026 16314 3 16022 16314 16310 3 16024 16312 16313 3 16024 16313 16025 3 16025 16313 16315 3 16025 16315 16027 3 16026 16028 16316 3 16026 16316 16314 3 16027 16315 16317 3 16027 16317 16029 3 16028 16030 16318 3 16028 16318 16316 3 16029 16317 16319 3 16029 16319 16031 3 16030 16032 16320 3 16030 16320 16318 3 16031 16319 16033 3 16321 16033 16319 3 16032 16034 16322 3 16032 16322 16320 3 16033 16321 16035 3 16323 16035 16321 3 16034 16036 16324 3 16034 16324 16322 3 16035 16323 16037 3 16325 16037 16323 3 16036 16038 16326 3 16036 16326 16324 3 16037 16325 16039 3 16327 16039 16325 3 16038 16040 16326 3 16328 16326 16040 3 16039 16327 16041 3 16329 16041 16327 3 16040 16042 16328 3 16330 16328 16042 3 16041 16329 16043 3 16331 16043 16329 3 16042 16044 16330 3 16332 16330 16044 3 16043 16331 16045 3 16333 16045 16331 3 16044 16046 16332 3 16334 16332 16046 3 16045 16333 16047 3 16335 16047 16333 3 16046 16048 16334 3 16336 16334 16048 3 16047 16335 16049 3 16337 16049 16335 3 16048 16050 16336 3 16338 16336 16050 3 16049 16337 16051 3 16339 16051 16337 3 16050 16052 16338 3 16340 16338 16052 3 16051 16339 16053 3 16341 16053 16339 3 16052 16054 16340 3 16342 16340 16054 3 16053 16341 16055 3 16343 16055 16341 3 16054 16056 16342 3 16344 16342 16056 3 16055 16343 16057 3 16345 16057 16343 3 16056 16058 16344 3 16346 16344 16058 3 16057 16345 16059 3 16347 16059 16345 3 16058 16060 16346 3 16348 16346 16060 3 16059 16347 16061 3 16349 16061 16347 3 16060 16062 16348 3 16350 16348 16062 3 16061 16349 16063 3 16351 16063 16349 3 16062 16064 16350 3 16352 16350 16064 3 16063 16351 16065 3 16353 16065 16351 3 16064 16066 16352 3 16354 16352 16066 3 16065 16353 16067 3 16355 16067 16353 3 16066 16068 16354 3 16356 16354 16068 3 16067 16355 16069 3 16357 16069 16355 3 16068 16070 16356 3 16358 16356 16070 3 16069 16357 16071 3 16359 16071 16357 3 16070 16072 16358 3 16360 16358 16072 3 16071 16359 16073 3 16361 16073 16359 3 16072 16074 16360 3 16362 16360 16074 3 16073 16361 16075 3 16363 16075 16361 3 16074 16076 16362 3 16364 16362 16076 3 16075 16363 16077 3 16365 16077 16363 3 16076 16078 16364 3 16366 16364 16078 3 16077 16365 16079 3 16367 16079 16365 3 16078 16080 16366 3 16368 16366 16080 3 16079 16367 16081 3 16369 16081 16367 3 16080 16082 16368 3 16370 16368 16082 3 16081 16369 16083 3 16371 16083 16369 3 16082 16084 16370 3 16372 16370 16084 3 16083 16371 16085 3 16373 16085 16371 3 16084 16086 16372 3 16374 16372 16086 3 16085 16373 16087 3 16375 16087 16373 3 16086 16088 16374 3 16376 16374 16088 3 16087 16375 16089 3 16377 16089 16375 3 16088 16090 16376 3 16378 16376 16090 3 16089 16377 16091 3 16379 16091 16377 3 16090 16092 16378 3 16380 16378 16092 3 16091 16379 16093 3 16381 16093 16379 3 16092 16094 16380 3 16382 16380 16094 3 16093 16381 16095 3 16383 16095 16381 3 16094 16096 16382 3 16384 16382 16096 3 16095 16383 16097 3 16385 16097 16383 3 16096 16098 16384 3 16386 16384 16098 3 16097 16385 16099 3 16387 16099 16385 3 16098 16100 16386 3 16388 16386 16100 3 16099 16387 16101 3 16389 16101 16387 3 16100 16102 16388 3 16390 16388 16102 3 16101 16389 16103 3 16391 16103 16389 3 16102 16104 16390 3 16392 16390 16104 3 16103 16391 16105 3 16393 16105 16391 3 16104 16106 16392 3 16394 16392 16106 3 16105 16393 16107 3 16395 16107 16393 3 16106 16108 16394 3 16396 16394 16108 3 16107 16395 16109 3 16397 16109 16395 3 16108 16110 16396 3 16398 16396 16110 3 16109 16397 16399 3 16109 16399 16111 3 16110 16112 16398 3 16400 16398 16112 3 16111 16399 16401 3 16111 16401 16113 3 16112 16114 16400 3 16402 16400 16114 3 16113 16401 16403 3 16113 16403 16115 3 16114 16116 16402 3 16404 16402 16116 3 16115 16403 16405 3 16115 16405 16117 3 16116 16118 16406 3 16116 16406 16404 3 16117 16405 16407 3 16117 16407 16119 3 16118 16120 16408 3 16118 16408 16406 3 16119 16407 16409 3 16119 16409 16121 3 16120 16122 16410 3 16120 16410 16408 3 16121 16409 16411 3 16121 16411 16123 3 16122 16124 16412 3 16122 16412 16410 3 16123 16411 16413 3 16123 16413 16125 3 16124 16126 16414 3 16124 16414 16412 3 16125 16413 16415 3 16125 16415 16127 3 16126 16128 16416 3 16126 16416 16414 3 16127 16415 16417 3 16127 16417 16129 3 16128 16130 16418 3 16128 16418 16416 3 16129 16417 16419 3 16129 16419 16131 3 16130 16132 16420 3 16130 16420 16418 3 16131 16419 16421 3 16131 16421 16133 3 16132 16134 16422 3 16132 16422 16420 3 16133 16421 16423 3 16133 16423 16135 3 16134 16136 16424 3 16134 16424 16422 3 16135 16423 16425 3 16135 16425 16137 3 16136 16138 16426 3 16136 16426 16424 3 16137 16425 16427 3 16137 16427 16139 3 16138 16140 16428 3 16138 16428 16426 3 16139 16427 16429 3 16139 16429 16141 3 16140 16142 16430 3 16140 16430 16428 3 16141 16429 16431 3 16141 16431 16143 3 16142 16144 16432 3 16142 16432 16430 3 16143 16431 16433 3 16143 16433 16145 3 16144 16146 16434 3 16144 16434 16432 3 16145 16433 16435 3 16145 16435 16147 3 16146 16148 16436 3 16146 16436 16434 3 16147 16435 16437 3 16147 16437 16149 3 16148 16150 16438 3 16148 16438 16436 3 16149 16437 16439 3 16149 16439 16151 3 16150 16152 16440 3 16150 16440 16438 3 16151 16439 16441 3 16151 16441 16153 3 16152 16154 16442 3 16152 16442 16440 3 16153 16441 16443 3 16153 16443 16155 3 16154 16156 16444 3 16154 16444 16442 3 16155 16443 16445 3 16155 16445 16157 3 16156 16158 16446 3 16156 16446 16444 3 16157 16445 16447 3 16157 16447 16159 3 16158 16160 16448 3 16158 16448 16446 3 16159 16447 16451 3 16159 16451 16163 3 16160 16161 16449 3 16160 16449 16448 3 16161 16164 16452 3 16161 16452 16449 3 16162 16163 16451 3 16162 16451 16450 3 16162 16450 16455 3 16162 16455 16167 3 16164 16165 16453 3 16164 16453 16452 3 16165 16168 16456 3 16165 16456 16453 3 16166 16167 16455 3 16166 16455 16454 3 16166 16454 16524 3 16166 16524 16236 3 16168 16169 16457 3 16168 16457 16456 3 16169 16170 16458 3 16169 16458 16457 3 16170 16171 16459 3 16170 16459 16458 3 16171 16172 16460 3 16171 16460 16459 3 16172 16173 16461 3 16172 16461 16460 3 16173 16174 16462 3 16173 16462 16461 3 16174 16175 16463 3 16174 16463 16462 3 16175 16176 16464 3 16175 16464 16463 3 16176 16177 16465 3 16176 16465 16464 3 16177 16178 16466 3 16177 16466 16465 3 16178 16179 16467 3 16178 16467 16466 3 16179 16180 16468 3 16179 16468 16467 3 16180 16181 16469 3 16180 16469 16468 3 16181 16182 16470 3 16181 16470 16469 3 16182 16183 16471 3 16182 16471 16470 3 16183 16184 16472 3 16183 16472 16471 3 16184 16185 16473 3 16184 16473 16472 3 16185 16186 16474 3 16185 16474 16473 3 16186 16187 16475 3 16186 16475 16474 3 16187 16188 16476 3 16187 16476 16475 3 16188 16189 16477 3 16188 16477 16476 3 16189 16190 16478 3 16189 16478 16477 3 16190 16191 16479 3 16190 16479 16478 3 16191 16192 16480 3 16191 16480 16479 3 16192 16193 16481 3 16192 16481 16480 3 16193 16194 16482 3 16193 16482 16481 3 16194 16195 16483 3 16194 16483 16482 3 16195 16196 16483 3 16484 16483 16196 3 16196 16197 16484 3 16485 16484 16197 3 16197 16198 16485 3 16486 16485 16198 3 16198 16199 16486 3 16487 16486 16199 3 16199 16200 16487 3 16488 16487 16200 3 16200 16201 16488 3 16489 16488 16201 3 16201 16202 16489 3 16490 16489 16202 3 16202 16203 16490 3 16491 16490 16203 3 16203 16204 16491 3 16492 16491 16204 3 16204 16205 16492 3 16493 16492 16205 3 16205 16206 16493 3 16494 16493 16206 3 16206 16207 16494 3 16495 16494 16207 3 16207 16208 16495 3 16496 16495 16208 3 16208 16209 16496 3 16497 16496 16209 3 16209 16210 16497 3 16498 16497 16210 3 16210 16211 16498 3 16499 16498 16211 3 16211 16212 16499 3 16500 16499 16212 3 16212 16213 16500 3 16501 16500 16213 3 16213 16214 16501 3 16502 16501 16214 3 16214 16215 16502 3 16503 16502 16215 3 16215 16216 16503 3 16504 16503 16216 3 16216 16217 16504 3 16505 16504 16217 3 16217 16218 16505 3 16506 16505 16218 3 16218 16219 16506 3 16507 16506 16219 3 16219 16220 16507 3 16508 16507 16220 3 16220 16221 16508 3 16509 16508 16221 3 16221 16222 16509 3 16510 16509 16222 3 16222 16223 16510 3 16511 16510 16223 3 16223 16224 16511 3 16512 16511 16224 3 16224 16225 16512 3 16513 16512 16225 3 16225 16226 16513 3 16514 16513 16226 3 16226 16227 16514 3 16515 16514 16227 3 16227 16228 16515 3 16516 16515 16228 3 16228 16229 16516 3 16517 16516 16229 3 16229 16230 16517 3 16518 16517 16230 3 16230 16231 16518 3 16519 16518 16231 3 16231 16232 16519 3 16520 16519 16232 3 16232 16233 16520 3 16521 16520 16233 3 16233 16234 16521 3 16522 16521 16234 3 16234 16235 16522 3 16523 16522 16235 3 16235 16236 16523 3 16524 16523 16236 3 16237 16525 16238 3 16526 16238 16525 3 16237 16307 16595 3 16237 16595 16525 3 16238 16526 16239 3 16527 16239 16526 3 16239 16527 16240 3 16528 16240 16527 3 16240 16528 16241 3 16529 16241 16528 3 16241 16529 16242 3 16530 16242 16529 3 16242 16530 16243 3 16531 16243 16530 3 16243 16531 16244 3 16532 16244 16531 3 16244 16532 16245 3 16533 16245 16532 3 16245 16533 16246 3 16534 16246 16533 3 16246 16534 16247 3 16535 16247 16534 3 16247 16535 16248 3 16536 16248 16535 3 16248 16536 16249 3 16537 16249 16536 3 16249 16537 16250 3 16538 16250 16537 3 16250 16538 16251 3 16539 16251 16538 3 16251 16539 16252 3 16540 16252 16539 3 16252 16540 16253 3 16541 16253 16540 3 16253 16541 16254 3 16542 16254 16541 3 16254 16542 16255 3 16543 16255 16542 3 16255 16543 16256 3 16544 16256 16543 3 16256 16544 16257 3 16545 16257 16544 3 16257 16545 16258 3 16546 16258 16545 3 16258 16546 16259 3 16547 16259 16546 3 16259 16547 16260 3 16548 16260 16547 3 16260 16548 16261 3 16549 16261 16548 3 16261 16549 16262 3 16550 16262 16549 3 16262 16550 16263 3 16551 16263 16550 3 16263 16551 16264 3 16552 16264 16551 3 16264 16552 16265 3 16553 16265 16552 3 16265 16553 16266 3 16554 16266 16553 3 16266 16554 16555 3 16266 16555 16267 3 16267 16555 16556 3 16267 16556 16268 3 16268 16556 16557 3 16268 16557 16269 3 16269 16557 16558 3 16269 16558 16270 3 16270 16558 16559 3 16270 16559 16271 3 16271 16559 16560 3 16271 16560 16272 3 16272 16560 16561 3 16272 16561 16273 3 16273 16561 16562 3 16273 16562 16274 3 16274 16562 16563 3 16274 16563 16275 3 16275 16563 16564 3 16275 16564 16276 3 16276 16564 16565 3 16276 16565 16277 3 16277 16565 16566 3 16277 16566 16278 3 16278 16566 16567 3 16278 16567 16279 3 16279 16567 16568 3 16279 16568 16280 3 16280 16568 16569 3 16280 16569 16281 3 16281 16569 16570 3 16281 16570 16282 3 16282 16570 16571 3 16282 16571 16283 3 16283 16571 16572 3 16283 16572 16284 3 16284 16572 16573 3 16284 16573 16285 3 16285 16573 16574 3 16285 16574 16286 3 16286 16574 16575 3 16286 16575 16287 3 16287 16575 16576 3 16287 16576 16288 3 16288 16576 16577 3 16288 16577 16289 3 16289 16577 16578 3 16289 16578 16290 3 16290 16578 16579 3 16290 16579 16291 3 16291 16579 16580 3 16291 16580 16292 3 16292 16580 16581 3 16292 16581 16293 3 16293 16581 16582 3 16293 16582 16294 3 16294 16582 16583 3 16294 16583 16295 3 16295 16583 16584 3 16295 16584 16296 3 16296 16584 16585 3 16296 16585 16297 3 16297 16585 16586 3 16297 16586 16298 3 16298 16586 16587 3 16298 16587 16299 3 16299 16587 16588 3 16299 16588 16300 3 16300 16588 16589 3 16300 16589 16301 3 16301 16589 16590 3 16301 16590 16302 3 16302 16590 16591 3 16302 16591 16303 3 16303 16591 16592 3 16303 16592 16304 3 16304 16592 16593 3 16304 16593 16305 3 16305 16593 16596 3 16305 16596 16308 3 16306 16594 16595 3 16306 16595 16307 3 16306 16311 16599 3 16306 16599 16594 3 16308 16596 16597 3 16308 16597 16309 3 16309 16597 16600 3 16309 16600 16312 3 16310 16598 16599 3 16310 16599 16311 3 16310 16314 16602 3 16310 16602 16598 3 16312 16600 16601 3 16312 16601 16313 3 16313 16601 16603 3 16313 16603 16315 3 16314 16316 16604 3 16314 16604 16602 3 16315 16603 16605 3 16315 16605 16317 3 16316 16318 16606 3 16316 16606 16604 3 16317 16605 16607 3 16317 16607 16319 3 16318 16320 16608 3 16318 16608 16606 3 16319 16607 16609 3 16319 16609 16321 3 16320 16322 16610 3 16320 16610 16608 3 16321 16609 16611 3 16321 16611 16323 3 16322 16324 16612 3 16322 16612 16610 3 16323 16611 16613 3 16323 16613 16325 3 16324 16326 16614 3 16324 16614 16612 3 16325 16613 16615 3 16325 16615 16327 3 16326 16328 16616 3 16326 16616 16614 3 16327 16615 16617 3 16327 16617 16329 3 16328 16330 16618 3 16328 16618 16616 3 16329 16617 16619 3 16329 16619 16331 3 16330 16332 16620 3 16330 16620 16618 3 16331 16619 16621 3 16331 16621 16333 3 16332 16334 16622 3 16332 16622 16620 3 16333 16621 16623 3 16333 16623 16335 3 16334 16336 16624 3 16334 16624 16622 3 16335 16623 16625 3 16335 16625 16337 3 16336 16338 16626 3 16336 16626 16624 3 16337 16625 16627 3 16337 16627 16339 3 16338 16340 16628 3 16338 16628 16626 3 16339 16627 16629 3 16339 16629 16341 3 16340 16342 16630 3 16340 16630 16628 3 16341 16629 16631 3 16341 16631 16343 3 16342 16344 16632 3 16342 16632 16630 3 16343 16631 16633 3 16343 16633 16345 3 16344 16346 16634 3 16344 16634 16632 3 16345 16633 16347 3 16635 16347 16633 3 16346 16348 16636 3 16346 16636 16634 3 16347 16635 16349 3 16637 16349 16635 3 16348 16350 16638 3 16348 16638 16636 3 16349 16637 16351 3 16639 16351 16637 3 16350 16352 16640 3 16350 16640 16638 3 16351 16639 16353 3 16641 16353 16639 3 16352 16354 16640 3 16642 16640 16354 3 16353 16641 16355 3 16643 16355 16641 3 16354 16356 16642 3 16644 16642 16356 3 16355 16643 16357 3 16645 16357 16643 3 16356 16358 16644 3 16646 16644 16358 3 16357 16645 16359 3 16647 16359 16645 3 16358 16360 16646 3 16648 16646 16360 3 16359 16647 16361 3 16649 16361 16647 3 16360 16362 16648 3 16650 16648 16362 3 16361 16649 16363 3 16651 16363 16649 3 16362 16364 16650 3 16652 16650 16364 3 16363 16651 16365 3 16653 16365 16651 3 16364 16366 16652 3 16654 16652 16366 3 16365 16653 16367 3 16655 16367 16653 3 16366 16368 16654 3 16656 16654 16368 3 16367 16655 16369 3 16657 16369 16655 3 16368 16370 16656 3 16658 16656 16370 3 16369 16657 16371 3 16659 16371 16657 3 16370 16372 16658 3 16660 16658 16372 3 16371 16659 16373 3 16661 16373 16659 3 16372 16374 16660 3 16662 16660 16374 3 16373 16661 16375 3 16663 16375 16661 3 16374 16376 16662 3 16664 16662 16376 3 16375 16663 16377 3 16665 16377 16663 3 16376 16378 16664 3 16666 16664 16378 3 16377 16665 16379 3 16667 16379 16665 3 16378 16380 16666 3 16668 16666 16380 3 16379 16667 16381 3 16669 16381 16667 3 16380 16382 16668 3 16670 16668 16382 3 16381 16669 16383 3 16671 16383 16669 3 16382 16384 16670 3 16672 16670 16384 3 16383 16671 16385 3 16673 16385 16671 3 16384 16386 16672 3 16674 16672 16386 3 16385 16673 16387 3 16675 16387 16673 3 16386 16388 16674 3 16676 16674 16388 3 16387 16675 16389 3 16677 16389 16675 3 16388 16390 16676 3 16678 16676 16390 3 16389 16677 16391 3 16679 16391 16677 3 16390 16392 16678 3 16680 16678 16392 3 16391 16679 16393 3 16681 16393 16679 3 16392 16394 16680 3 16682 16680 16394 3 16393 16681 16395 3 16683 16395 16681 3 16394 16396 16682 3 16684 16682 16396 3 16395 16683 16397 3 16685 16397 16683 3 16396 16398 16684 3 16686 16684 16398 3 16397 16685 16399 3 16687 16399 16685 3 16398 16400 16686 3 16688 16686 16400 3 16399 16687 16401 3 16689 16401 16687 3 16400 16402 16688 3 16690 16688 16402 3 16401 16689 16403 3 16691 16403 16689 3 16402 16404 16690 3 16692 16690 16404 3 16403 16691 16405 3 16693 16405 16691 3 16404 16406 16692 3 16694 16692 16406 3 16405 16693 16407 3 16695 16407 16693 3 16406 16408 16694 3 16696 16694 16408 3 16407 16695 16409 3 16697 16409 16695 3 16408 16410 16696 3 16698 16696 16410 3 16409 16697 16411 3 16699 16411 16697 3 16410 16412 16698 3 16700 16698 16412 3 16411 16699 16413 3 16701 16413 16699 3 16412 16414 16700 3 16702 16700 16414 3 16413 16701 16415 3 16703 16415 16701 3 16414 16416 16702 3 16704 16702 16416 3 16415 16703 16417 3 16705 16417 16703 3 16416 16418 16704 3 16706 16704 16418 3 16417 16705 16419 3 16707 16419 16705 3 16418 16420 16706 3 16708 16706 16420 3 16419 16707 16421 3 16709 16421 16707 3 16420 16422 16708 3 16710 16708 16422 3 16421 16709 16423 3 16711 16423 16709 3 16422 16424 16710 3 16712 16710 16424 3 16423 16711 16425 3 16713 16425 16711 3 16424 16426 16712 3 16714 16712 16426 3 16425 16713 16715 3 16425 16715 16427 3 16426 16428 16714 3 16716 16714 16428 3 16427 16715 16717 3 16427 16717 16429 3 16428 16430 16716 3 16718 16716 16430 3 16429 16717 16719 3 16429 16719 16431 3 16430 16432 16718 3 16720 16718 16432 3 16431 16719 16721 3 16431 16721 16433 3 16432 16434 16722 3 16432 16722 16720 3 16433 16721 16723 3 16433 16723 16435 3 16434 16436 16724 3 16434 16724 16722 3 16435 16723 16725 3 16435 16725 16437 3 16436 16438 16726 3 16436 16726 16724 3 16437 16725 16727 3 16437 16727 16439 3 16438 16440 16728 3 16438 16728 16726 3 16439 16727 16729 3 16439 16729 16441 3 16440 16442 16730 3 16440 16730 16728 3 16441 16729 16731 3 16441 16731 16443 3 16442 16444 16732 3 16442 16732 16730 3 16443 16731 16733 3 16443 16733 16445 3 16444 16446 16734 3 16444 16734 16732 3 16445 16733 16735 3 16445 16735 16447 3 16446 16448 16736 3 16446 16736 16734 3 16447 16735 16739 3 16447 16739 16451 3 16448 16449 16737 3 16448 16737 16736 3 16449 16452 16740 3 16449 16740 16737 3 16450 16451 16739 3 16450 16739 16738 3 16450 16738 16743 3 16450 16743 16455 3 16452 16453 16741 3 16452 16741 16740 3 16453 16456 16744 3 16453 16744 16741 3 16454 16455 16743 3 16454 16743 16742 3 16454 16742 16812 3 16454 16812 16524 3 16456 16457 16745 3 16456 16745 16744 3 16457 16458 16746 3 16457 16746 16745 3 16458 16459 16747 3 16458 16747 16746 3 16459 16460 16748 3 16459 16748 16747 3 16460 16461 16749 3 16460 16749 16748 3 16461 16462 16750 3 16461 16750 16749 3 16462 16463 16751 3 16462 16751 16750 3 16463 16464 16752 3 16463 16752 16751 3 16464 16465 16753 3 16464 16753 16752 3 16465 16466 16754 3 16465 16754 16753 3 16466 16467 16755 3 16466 16755 16754 3 16467 16468 16756 3 16467 16756 16755 3 16468 16469 16757 3 16468 16757 16756 3 16469 16470 16758 3 16469 16758 16757 3 16470 16471 16759 3 16470 16759 16758 3 16471 16472 16760 3 16471 16760 16759 3 16472 16473 16761 3 16472 16761 16760 3 16473 16474 16762 3 16473 16762 16761 3 16474 16475 16763 3 16474 16763 16762 3 16475 16476 16764 3 16475 16764 16763 3 16476 16477 16765 3 16476 16765 16764 3 16477 16478 16766 3 16477 16766 16765 3 16478 16479 16767 3 16478 16767 16766 3 16479 16480 16768 3 16479 16768 16767 3 16480 16481 16769 3 16480 16769 16768 3 16481 16482 16770 3 16481 16770 16769 3 16482 16483 16771 3 16482 16771 16770 3 16483 16484 16772 3 16483 16772 16771 3 16484 16485 16773 3 16484 16773 16772 3 16485 16486 16774 3 16485 16774 16773 3 16486 16487 16775 3 16486 16775 16774 3 16487 16488 16776 3 16487 16776 16775 3 16488 16489 16777 3 16488 16777 16776 3 16489 16490 16778 3 16489 16778 16777 3 16490 16491 16779 3 16490 16779 16778 3 16491 16492 16780 3 16491 16780 16779 3 16492 16493 16781 3 16492 16781 16780 3 16493 16494 16782 3 16493 16782 16781 3 16494 16495 16783 3 16494 16783 16782 3 16495 16496 16784 3 16495 16784 16783 3 16496 16497 16785 3 16496 16785 16784 3 16497 16498 16786 3 16497 16786 16785 3 16498 16499 16787 3 16498 16787 16786 3 16499 16500 16788 3 16499 16788 16787 3 16500 16501 16789 3 16500 16789 16788 3 16501 16502 16790 3 16501 16790 16789 3 16502 16503 16791 3 16502 16791 16790 3 16503 16504 16792 3 16503 16792 16791 3 16504 16505 16793 3 16504 16793 16792 3 16505 16506 16794 3 16505 16794 16793 3 16506 16507 16795 3 16506 16795 16794 3 16507 16508 16796 3 16507 16796 16795 3 16508 16509 16797 3 16508 16797 16796 3 16509 16510 16798 3 16509 16798 16797 3 16510 16511 16798 3 16799 16798 16511 3 16511 16512 16799 3 16800 16799 16512 3 16512 16513 16800 3 16801 16800 16513 3 16513 16514 16801 3 16802 16801 16514 3 16514 16515 16802 3 16803 16802 16515 3 16515 16516 16803 3 16804 16803 16516 3 16516 16517 16804 3 16805 16804 16517 3 16517 16518 16805 3 16806 16805 16518 3 16518 16519 16806 3 16807 16806 16519 3 16519 16520 16807 3 16808 16807 16520 3 16520 16521 16808 3 16809 16808 16521 3 16521 16522 16809 3 16810 16809 16522 3 16522 16523 16810 3 16811 16810 16523 3 16523 16524 16811 3 16812 16811 16524 3 16525 16813 16526 3 16814 16526 16813 3 16525 16595 16813 3 16883 16813 16595 3 16526 16814 16527 3 16815 16527 16814 3 16527 16815 16528 3 16816 16528 16815 3 16528 16816 16529 3 16817 16529 16816 3 16529 16817 16530 3 16818 16530 16817 3 16530 16818 16531 3 16819 16531 16818 3 16531 16819 16532 3 16820 16532 16819 3 16532 16820 16533 3 16821 16533 16820 3 16533 16821 16534 3 16822 16534 16821 3 16534 16822 16535 3 16823 16535 16822 3 16535 16823 16536 3 16824 16536 16823 3 16536 16824 16537 3 16825 16537 16824 3 16537 16825 16538 3 16826 16538 16825 3 16538 16826 16539 3 16827 16539 16826 3 16539 16827 16540 3 16828 16540 16827 3 16540 16828 16541 3 16829 16541 16828 3 16541 16829 16542 3 16830 16542 16829 3 16542 16830 16543 3 16831 16543 16830 3 16543 16831 16544 3 16832 16544 16831 3 16544 16832 16545 3 16833 16545 16832 3 16545 16833 16546 3 16834 16546 16833 3 16546 16834 16547 3 16835 16547 16834 3 16547 16835 16548 3 16836 16548 16835 3 16548 16836 16549 3 16837 16549 16836 3 16549 16837 16550 3 16838 16550 16837 3 16550 16838 16551 3 16839 16551 16838 3 16551 16839 16552 3 16840 16552 16839 3 16552 16840 16553 3 16841 16553 16840 3 16553 16841 16554 3 16842 16554 16841 3 16554 16842 16555 3 16843 16555 16842 3 16555 16843 16556 3 16844 16556 16843 3 16556 16844 16557 3 16845 16557 16844 3 16557 16845 16558 3 16846 16558 16845 3 16558 16846 16559 3 16847 16559 16846 3 16559 16847 16560 3 16848 16560 16847 3 16560 16848 16561 3 16849 16561 16848 3 16561 16849 16562 3 16850 16562 16849 3 16562 16850 16563 3 16851 16563 16850 3 16563 16851 16564 3 16852 16564 16851 3 16564 16852 16565 3 16853 16565 16852 3 16565 16853 16566 3 16854 16566 16853 3 16566 16854 16567 3 16855 16567 16854 3 16567 16855 16568 3 16856 16568 16855 3 16568 16856 16569 3 16857 16569 16856 3 16569 16857 16570 3 16858 16570 16857 3 16570 16858 16571 3 16859 16571 16858 3 16571 16859 16572 3 16860 16572 16859 3 16572 16860 16573 3 16861 16573 16860 3 16573 16861 16574 3 16862 16574 16861 3 16574 16862 16575 3 16863 16575 16862 3 16575 16863 16576 3 16864 16576 16863 3 16576 16864 16577 3 16865 16577 16864 3 16577 16865 16578 3 16866 16578 16865 3 16578 16866 16579 3 16867 16579 16866 3 16579 16867 16580 3 16868 16580 16867 3 16580 16868 16581 3 16869 16581 16868 3 16581 16869 16582 3 16870 16582 16869 3 16582 16870 16583 3 16871 16583 16870 3 16583 16871 16872 3 16583 16872 16584 3 16584 16872 16873 3 16584 16873 16585 3 16585 16873 16874 3 16585 16874 16586 3 16586 16874 16875 3 16586 16875 16587 3 16587 16875 16876 3 16587 16876 16588 3 16588 16876 16877 3 16588 16877 16589 3 16589 16877 16878 3 16589 16878 16590 3 16590 16878 16879 3 16590 16879 16591 3 16591 16879 16880 3 16591 16880 16592 3 16592 16880 16881 3 16592 16881 16593 3 16593 16881 16884 3 16593 16884 16596 3 16594 16882 16883 3 16594 16883 16595 3 16594 16599 16887 3 16594 16887 16882 3 16596 16884 16885 3 16596 16885 16597 3 16597 16885 16888 3 16597 16888 16600 3 16598 16886 16887 3 16598 16887 16599 3 16598 16602 16890 3 16598 16890 16886 3 16600 16888 16889 3 16600 16889 16601 3 16601 16889 16891 3 16601 16891 16603 3 16602 16604 16892 3 16602 16892 16890 3 16603 16891 16893 3 16603 16893 16605 3 16604 16606 16894 3 16604 16894 16892 3 16605 16893 16895 3 16605 16895 16607 3 16606 16608 16896 3 16606 16896 16894 3 16607 16895 16897 3 16607 16897 16609 3 16608 16610 16898 3 16608 16898 16896 3 16609 16897 16899 3 16609 16899 16611 3 16610 16612 16900 3 16610 16900 16898 3 16611 16899 16901 3 16611 16901 16613 3 16612 16614 16902 3 16612 16902 16900 3 16613 16901 16903 3 16613 16903 16615 3 16614 16616 16904 3 16614 16904 16902 3 16615 16903 16905 3 16615 16905 16617 3 16616 16618 16906 3 16616 16906 16904 3 16617 16905 16907 3 16617 16907 16619 3 16618 16620 16908 3 16618 16908 16906 3 16619 16907 16909 3 16619 16909 16621 3 16620 16622 16910 3 16620 16910 16908 3 16621 16909 16911 3 16621 16911 16623 3 16622 16624 16912 3 16622 16912 16910 3 16623 16911 16913 3 16623 16913 16625 3 16624 16626 16914 3 16624 16914 16912 3 16625 16913 16915 3 16625 16915 16627 3 16626 16628 16916 3 16626 16916 16914 3 16627 16915 16917 3 16627 16917 16629 3 16628 16630 16918 3 16628 16918 16916 3 16629 16917 16919 3 16629 16919 16631 3 16630 16632 16920 3 16630 16920 16918 3 16631 16919 16921 3 16631 16921 16633 3 16632 16634 16922 3 16632 16922 16920 3 16633 16921 16923 3 16633 16923 16635 3 16634 16636 16924 3 16634 16924 16922 3 16635 16923 16925 3 16635 16925 16637 3 16636 16638 16926 3 16636 16926 16924 3 16637 16925 16927 3 16637 16927 16639 3 16638 16640 16928 3 16638 16928 16926 3 16639 16927 16929 3 16639 16929 16641 3 16640 16642 16930 3 16640 16930 16928 3 16641 16929 16931 3 16641 16931 16643 3 16642 16644 16932 3 16642 16932 16930 3 16643 16931 16933 3 16643 16933 16645 3 16644 16646 16934 3 16644 16934 16932 3 16645 16933 16935 3 16645 16935 16647 3 16646 16648 16936 3 16646 16936 16934 3 16647 16935 16937 3 16647 16937 16649 3 16648 16650 16938 3 16648 16938 16936 3 16649 16937 16939 3 16649 16939 16651 3 16650 16652 16940 3 16650 16940 16938 3 16651 16939 16941 3 16651 16941 16653 3 16652 16654 16942 3 16652 16942 16940 3 16653 16941 16943 3 16653 16943 16655 3 16654 16656 16944 3 16654 16944 16942 3 16655 16943 16945 3 16655 16945 16657 3 16656 16658 16946 3 16656 16946 16944 3 16657 16945 16947 3 16657 16947 16659 3 16658 16660 16948 3 16658 16948 16946 3 16659 16947 16949 3 16659 16949 16661 3 16660 16662 16950 3 16660 16950 16948 3 16661 16949 16951 3 16661 16951 16663 3 16662 16664 16952 3 16662 16952 16950 3 16663 16951 16665 3 16953 16665 16951 3 16664 16666 16954 3 16664 16954 16952 3 16665 16953 16667 3 16955 16667 16953 3 16666 16668 16956 3 16666 16956 16954 3 16667 16955 16669 3 16957 16669 16955 3 16668 16670 16956 3 16958 16956 16670 3 16669 16957 16671 3 16959 16671 16957 3 16670 16672 16958 3 16960 16958 16672 3 16671 16959 16673 3 16961 16673 16959 3 16672 16674 16960 3 16962 16960 16674 3 16673 16961 16675 3 16963 16675 16961 3 16674 16676 16962 3 16964 16962 16676 3 16675 16963 16677 3 16965 16677 16963 3 16676 16678 16964 3 16966 16964 16678 3 16677 16965 16679 3 16967 16679 16965 3 16678 16680 16966 3 16968 16966 16680 3 16679 16967 16681 3 16969 16681 16967 3 16680 16682 16968 3 16970 16968 16682 3 16681 16969 16683 3 16971 16683 16969 3 16682 16684 16970 3 16972 16970 16684 3 16683 16971 16685 3 16973 16685 16971 3 16684 16686 16972 3 16974 16972 16686 3 16685 16973 16687 3 16975 16687 16973 3 16686 16688 16974 3 16976 16974 16688 3 16687 16975 16689 3 16977 16689 16975 3 16688 16690 16976 3 16978 16976 16690 3 16689 16977 16691 3 16979 16691 16977 3 16690 16692 16978 3 16980 16978 16692 3 16691 16979 16693 3 16981 16693 16979 3 16692 16694 16980 3 16982 16980 16694 3 16693 16981 16695 3 16983 16695 16981 3 16694 16696 16982 3 16984 16982 16696 3 16695 16983 16697 3 16985 16697 16983 3 16696 16698 16984 3 16986 16984 16698 3 16697 16985 16699 3 16987 16699 16985 3 16698 16700 16986 3 16988 16986 16700 3 16699 16987 16701 3 16989 16701 16987 3 16700 16702 16988 3 16990 16988 16702 3 16701 16989 16703 3 16991 16703 16989 3 16702 16704 16990 3 16992 16990 16704 3 16703 16991 16705 3 16993 16705 16991 3 16704 16706 16992 3 16994 16992 16706 3 16705 16993 16707 3 16995 16707 16993 3 16706 16708 16994 3 16996 16994 16708 3 16707 16995 16709 3 16997 16709 16995 3 16708 16710 16996 3 16998 16996 16710 3 16709 16997 16711 3 16999 16711 16997 3 16710 16712 16998 3 17000 16998 16712 3 16711 16999 16713 3 17001 16713 16999 3 16712 16714 17000 3 17002 17000 16714 3 16713 17001 16715 3 17003 16715 17001 3 16714 16716 17002 3 17004 17002 16716 3 16715 17003 16717 3 17005 16717 17003 3 16716 16718 17004 3 17006 17004 16718 3 16717 17005 16719 3 17007 16719 17005 3 16718 16720 17006 3 17008 17006 16720 3 16719 17007 16721 3 17009 16721 17007 3 16720 16722 17008 3 17010 17008 16722 3 16721 17009 16723 3 17011 16723 17009 3 16722 16724 17010 3 17012 17010 16724 3 16723 17011 16725 3 17013 16725 17011 3 16724 16726 17012 3 17014 17012 16726 3 16725 17013 16727 3 17015 16727 17013 3 16726 16728 17014 3 17016 17014 16728 3 16727 17015 16729 3 17017 16729 17015 3 16728 16730 17016 3 17018 17016 16730 3 16729 17017 16731 3 17019 16731 17017 3 16730 16732 17018 3 17020 17018 16732 3 16731 17019 16733 3 17021 16733 17019 3 16732 16734 17020 3 17022 17020 16734 3 16733 17021 16735 3 17023 16735 17021 3 16734 16736 17022 3 17024 17022 16736 3 16735 17023 16739 3 17027 16739 17023 3 16736 16737 17024 3 17025 17024 16737 3 16737 16740 17025 3 17028 17025 16740 3 16738 16739 17026 3 17027 17026 16739 3 16738 17026 16743 3 17031 16743 17026 3 16740 16741 17028 3 17029 17028 16741 3 16741 16744 17029 3 17032 17029 16744 3 16742 16743 17030 3 17031 17030 16743 3 16742 17030 16812 3 17100 16812 17030 3 16744 16745 17032 3 17033 17032 16745 3 16745 16746 17033 3 17034 17033 16746 3 16746 16747 17034 3 17035 17034 16747 3 16747 16748 17035 3 17036 17035 16748 3 16748 16749 17037 3 16748 17037 17036 3 16749 16750 17038 3 16749 17038 17037 3 16750 16751 17039 3 16750 17039 17038 3 16751 16752 17040 3 16751 17040 17039 3 16752 16753 17041 3 16752 17041 17040 3 16753 16754 17042 3 16753 17042 17041 3 16754 16755 17043 3 16754 17043 17042 3 16755 16756 17044 3 16755 17044 17043 3 16756 16757 17045 3 16756 17045 17044 3 16757 16758 17046 3 16757 17046 17045 3 16758 16759 17047 3 16758 17047 17046 3 16759 16760 17048 3 16759 17048 17047 3 16760 16761 17049 3 16760 17049 17048 3 16761 16762 17050 3 16761 17050 17049 3 16762 16763 17051 3 16762 17051 17050 3 16763 16764 17052 3 16763 17052 17051 3 16764 16765 17053 3 16764 17053 17052 3 16765 16766 17054 3 16765 17054 17053 3 16766 16767 17055 3 16766 17055 17054 3 16767 16768 17056 3 16767 17056 17055 3 16768 16769 17057 3 16768 17057 17056 3 16769 16770 17058 3 16769 17058 17057 3 16770 16771 17059 3 16770 17059 17058 3 16771 16772 17060 3 16771 17060 17059 3 16772 16773 17061 3 16772 17061 17060 3 16773 16774 17062 3 16773 17062 17061 3 16774 16775 17063 3 16774 17063 17062 3 16775 16776 17064 3 16775 17064 17063 3 16776 16777 17065 3 16776 17065 17064 3 16777 16778 17066 3 16777 17066 17065 3 16778 16779 17067 3 16778 17067 17066 3 16779 16780 17068 3 16779 17068 17067 3 16780 16781 17069 3 16780 17069 17068 3 16781 16782 17070 3 16781 17070 17069 3 16782 16783 17071 3 16782 17071 17070 3 16783 16784 17072 3 16783 17072 17071 3 16784 16785 17073 3 16784 17073 17072 3 16785 16786 17074 3 16785 17074 17073 3 16786 16787 17075 3 16786 17075 17074 3 16787 16788 17076 3 16787 17076 17075 3 16788 16789 17077 3 16788 17077 17076 3 16789 16790 17078 3 16789 17078 17077 3 16790 16791 17079 3 16790 17079 17078 3 16791 16792 17080 3 16791 17080 17079 3 16792 16793 17081 3 16792 17081 17080 3 16793 16794 17082 3 16793 17082 17081 3 16794 16795 17083 3 16794 17083 17082 3 16795 16796 17084 3 16795 17084 17083 3 16796 16797 17085 3 16796 17085 17084 3 16797 16798 17086 3 16797 17086 17085 3 16798 16799 17087 3 16798 17087 17086 3 16799 16800 17088 3 16799 17088 17087 3 16800 16801 17089 3 16800 17089 17088 3 16801 16802 17090 3 16801 17090 17089 3 16802 16803 17091 3 16802 17091 17090 3 16803 16804 17092 3 16803 17092 17091 3 16804 16805 17093 3 16804 17093 17092 3 16805 16806 17094 3 16805 17094 17093 3 16806 16807 17095 3 16806 17095 17094 3 16807 16808 17096 3 16807 17096 17095 3 16808 16809 17097 3 16808 17097 17096 3 16809 16810 17098 3 16809 17098 17097 3 16810 16811 17099 3 16810 17099 17098 3 16811 16812 17100 3 16811 17100 17099 3 16813 17101 17102 3 16813 17102 16814 3 16813 16883 17101 3 17171 17101 16883 3 16814 17102 17103 3 16814 17103 16815 3 16815 17103 17104 3 16815 17104 16816 3 16816 17104 17105 3 16816 17105 16817 3 16817 17105 17106 3 16817 17106 16818 3 16818 17106 17107 3 16818 17107 16819 3 16819 17107 17108 3 16819 17108 16820 3 16820 17108 17109 3 16820 17109 16821 3 16821 17109 17110 3 16821 17110 16822 3 16822 17110 17111 3 16822 17111 16823 3 16823 17111 16824 3 17112 16824 17111 3 16824 17112 16825 3 17113 16825 17112 3 16825 17113 16826 3 17114 16826 17113 3 16826 17114 16827 3 17115 16827 17114 3 16827 17115 16828 3 17116 16828 17115 3 16828 17116 16829 3 17117 16829 17116 3 16829 17117 16830 3 17118 16830 17117 3 16830 17118 16831 3 17119 16831 17118 3 16831 17119 16832 3 17120 16832 17119 3 16832 17120 16833 3 17121 16833 17120 3 16833 17121 16834 3 17122 16834 17121 3 16834 17122 16835 3 17123 16835 17122 3 16835 17123 16836 3 17124 16836 17123 3 16836 17124 16837 3 17125 16837 17124 3 16837 17125 16838 3 17126 16838 17125 3 16838 17126 16839 3 17127 16839 17126 3 16839 17127 16840 3 17128 16840 17127 3 16840 17128 16841 3 17129 16841 17128 3 16841 17129 16842 3 17130 16842 17129 3 16842 17130 16843 3 17131 16843 17130 3 16843 17131 16844 3 17132 16844 17131 3 16844 17132 16845 3 17133 16845 17132 3 16845 17133 16846 3 17134 16846 17133 3 16846 17134 16847 3 17135 16847 17134 3 16847 17135 16848 3 17136 16848 17135 3 16848 17136 16849 3 17137 16849 17136 3 16849 17137 16850 3 17138 16850 17137 3 16850 17138 16851 3 17139 16851 17138 3 16851 17139 16852 3 17140 16852 17139 3 16852 17140 16853 3 17141 16853 17140 3 16853 17141 16854 3 17142 16854 17141 3 16854 17142 16855 3 17143 16855 17142 3 16855 17143 16856 3 17144 16856 17143 3 16856 17144 16857 3 17145 16857 17144 3 16857 17145 16858 3 17146 16858 17145 3 16858 17146 16859 3 17147 16859 17146 3 16859 17147 16860 3 17148 16860 17147 3 16860 17148 16861 3 17149 16861 17148 3 16861 17149 16862 3 17150 16862 17149 3 16862 17150 16863 3 17151 16863 17150 3 16863 17151 16864 3 17152 16864 17151 3 16864 17152 16865 3 17153 16865 17152 3 16865 17153 16866 3 17154 16866 17153 3 16866 17154 16867 3 17155 16867 17154 3 16867 17155 16868 3 17156 16868 17155 3 16868 17156 16869 3 17157 16869 17156 3 16869 17157 16870 3 17158 16870 17157 3 16870 17158 16871 3 17159 16871 17158 3 16871 17159 16872 3 17160 16872 17159 3 16872 17160 16873 3 17161 16873 17160 3 16873 17161 16874 3 17162 16874 17161 3 16874 17162 16875 3 17163 16875 17162 3 16875 17163 16876 3 17164 16876 17163 3 16876 17164 16877 3 17165 16877 17164 3 16877 17165 16878 3 17166 16878 17165 3 16878 17166 16879 3 17167 16879 17166 3 16879 17167 16880 3 17168 16880 17167 3 16880 17168 16881 3 17169 16881 17168 3 16881 17169 16884 3 17172 16884 17169 3 16882 17170 16883 3 17171 16883 17170 3 16882 16887 17170 3 17175 17170 16887 3 16884 17172 16885 3 17173 16885 17172 3 16885 17173 16888 3 17176 16888 17173 3 16886 17174 16887 3 17175 16887 17174 3 16886 16890 17174 3 17178 17174 16890 3 16888 17176 16889 3 17177 16889 17176 3 16889 17177 16891 3 17179 16891 17177 3 16890 16892 17178 3 17180 17178 16892 3 16891 17179 16893 3 17181 16893 17179 3 16892 16894 17180 3 17182 17180 16894 3 16893 17181 16895 3 17183 16895 17181 3 16894 16896 17182 3 17184 17182 16896 3 16895 17183 16897 3 17185 16897 17183 3 16896 16898 17184 3 17186 17184 16898 3 16897 17185 16899 3 17187 16899 17185 3 16898 16900 17186 3 17188 17186 16900 3 16899 17187 16901 3 17189 16901 17187 3 16900 16902 17188 3 17190 17188 16902 3 16901 17189 16903 3 17191 16903 17189 3 16902 16904 17190 3 17192 17190 16904 3 16903 17191 17193 3 16903 17193 16905 3 16904 16906 17192 3 17194 17192 16906 3 16905 17193 17195 3 16905 17195 16907 3 16906 16908 17194 3 17196 17194 16908 3 16907 17195 17197 3 16907 17197 16909 3 16908 16910 17198 3 16908 17198 17196 3 16909 17197 17199 3 16909 17199 16911 3 16910 16912 17200 3 16910 17200 17198 3 16911 17199 17201 3 16911 17201 16913 3 16912 16914 17202 3 16912 17202 17200 3 16913 17201 17203 3 16913 17203 16915 3 16914 16916 17204 3 16914 17204 17202 3 16915 17203 17205 3 16915 17205 16917 3 16916 16918 17206 3 16916 17206 17204 3 16917 17205 17207 3 16917 17207 16919 3 16918 16920 17208 3 16918 17208 17206 3 16919 17207 17209 3 16919 17209 16921 3 16920 16922 17210 3 16920 17210 17208 3 16921 17209 17211 3 16921 17211 16923 3 16922 16924 17212 3 16922 17212 17210 3 16923 17211 17213 3 16923 17213 16925 3 16924 16926 17214 3 16924 17214 17212 3 16925 17213 17215 3 16925 17215 16927 3 16926 16928 17216 3 16926 17216 17214 3 16927 17215 17217 3 16927 17217 16929 3 16928 16930 17218 3 16928 17218 17216 3 16929 17217 17219 3 16929 17219 16931 3 16930 16932 17220 3 16930 17220 17218 3 16931 17219 17221 3 16931 17221 16933 3 16932 16934 17222 3 16932 17222 17220 3 16933 17221 17223 3 16933 17223 16935 3 16934 16936 17224 3 16934 17224 17222 3 16935 17223 17225 3 16935 17225 16937 3 16936 16938 17226 3 16936 17226 17224 3 16937 17225 17227 3 16937 17227 16939 3 16938 16940 17228 3 16938 17228 17226 3 16939 17227 17229 3 16939 17229 16941 3 16940 16942 17230 3 16940 17230 17228 3 16941 17229 17231 3 16941 17231 16943 3 16942 16944 17232 3 16942 17232 17230 3 16943 17231 17233 3 16943 17233 16945 3 16944 16946 17234 3 16944 17234 17232 3 16945 17233 17235 3 16945 17235 16947 3 16946 16948 17236 3 16946 17236 17234 3 16947 17235 17237 3 16947 17237 16949 3 16948 16950 17238 3 16948 17238 17236 3 16949 17237 17239 3 16949 17239 16951 3 16950 16952 17240 3 16950 17240 17238 3 16951 17239 17241 3 16951 17241 16953 3 16952 16954 17242 3 16952 17242 17240 3 16953 17241 17243 3 16953 17243 16955 3 16954 16956 17244 3 16954 17244 17242 3 16955 17243 17245 3 16955 17245 16957 3 16956 16958 17246 3 16956 17246 17244 3 16957 17245 17247 3 16957 17247 16959 3 16958 16960 17248 3 16958 17248 17246 3 16959 17247 17249 3 16959 17249 16961 3 16960 16962 17250 3 16960 17250 17248 3 16961 17249 17251 3 16961 17251 16963 3 16962 16964 17252 3 16962 17252 17250 3 16963 17251 17253 3 16963 17253 16965 3 16964 16966 17254 3 16964 17254 17252 3 16965 17253 17255 3 16965 17255 16967 3 16966 16968 17256 3 16966 17256 17254 3 16967 17255 17257 3 16967 17257 16969 3 16968 16970 17258 3 16968 17258 17256 3 16969 17257 17259 3 16969 17259 16971 3 16970 16972 17260 3 16970 17260 17258 3 16971 17259 17261 3 16971 17261 16973 3 16972 16974 17262 3 16972 17262 17260 3 16973 17261 17263 3 16973 17263 16975 3 16974 16976 17264 3 16974 17264 17262 3 16975 17263 17265 3 16975 17265 16977 3 16976 16978 17266 3 16976 17266 17264 3 16977 17265 17267 3 16977 17267 16979 3 16978 16980 17268 3 16978 17268 17266 3 16979 17267 17269 3 16979 17269 16981 3 16980 16982 17270 3 16980 17270 17268 3 16981 17269 17271 3 16981 17271 16983 3 16982 16984 17272 3 16982 17272 17270 3 16983 17271 17273 3 16983 17273 16985 3 16984 16986 17274 3 16984 17274 17272 3 16985 17273 16987 3 17275 16987 17273 3 16986 16988 17276 3 16986 17276 17274 3 16987 17275 16989 3 17277 16989 17275 3 16988 16990 17276 3 17278 17276 16990 3 16989 17277 16991 3 17279 16991 17277 3 16990 16992 17278 3 17280 17278 16992 3 16991 17279 16993 3 17281 16993 17279 3 16992 16994 17280 3 17282 17280 16994 3 16993 17281 16995 3 17283 16995 17281 3 16994 16996 17282 3 17284 17282 16996 3 16995 17283 16997 3 17285 16997 17283 3 16996 16998 17284 3 17286 17284 16998 3 16997 17285 16999 3 17287 16999 17285 3 16998 17000 17286 3 17288 17286 17000 3 16999 17287 17001 3 17289 17001 17287 3 17000 17002 17288 3 17290 17288 17002 3 17001 17289 17003 3 17291 17003 17289 3 17002 17004 17290 3 17292 17290 17004 3 17003 17291 17005 3 17293 17005 17291 3 17004 17006 17292 3 17294 17292 17006 3 17005 17293 17007 3 17295 17007 17293 3 17006 17008 17294 3 17296 17294 17008 3 17007 17295 17009 3 17297 17009 17295 3 17008 17010 17296 3 17298 17296 17010 3 17009 17297 17011 3 17299 17011 17297 3 17010 17012 17298 3 17300 17298 17012 3 17011 17299 17013 3 17301 17013 17299 3 17012 17014 17300 3 17302 17300 17014 3 17013 17301 17015 3 17303 17015 17301 3 17014 17016 17302 3 17304 17302 17016 3 17015 17303 17017 3 17305 17017 17303 3 17016 17018 17304 3 17306 17304 17018 3 17017 17305 17019 3 17307 17019 17305 3 17018 17020 17306 3 17308 17306 17020 3 17019 17307 17021 3 17309 17021 17307 3 17020 17022 17308 3 17310 17308 17022 3 17021 17309 17023 3 17311 17023 17309 3 17022 17024 17310 3 17312 17310 17024 3 17023 17311 17027 3 17315 17027 17311 3 17024 17025 17312 3 17313 17312 17025 3 17025 17028 17313 3 17316 17313 17028 3 17026 17027 17314 3 17315 17314 17027 3 17026 17314 17031 3 17319 17031 17314 3 17028 17029 17316 3 17317 17316 17029 3 17029 17032 17317 3 17320 17317 17032 3 17030 17031 17318 3 17319 17318 17031 3 17030 17318 17100 3 17388 17100 17318 3 17032 17033 17320 3 17321 17320 17033 3 17033 17034 17321 3 17322 17321 17034 3 17034 17035 17322 3 17323 17322 17035 3 17035 17036 17323 3 17324 17323 17036 3 17036 17037 17324 3 17325 17324 17037 3 17037 17038 17325 3 17326 17325 17038 3 17038 17039 17326 3 17327 17326 17039 3 17039 17040 17327 3 17328 17327 17040 3 17040 17041 17328 3 17329 17328 17041 3 17041 17042 17329 3 17330 17329 17042 3 17042 17043 17330 3 17331 17330 17043 3 17043 17044 17331 3 17332 17331 17044 3 17044 17045 17332 3 17333 17332 17045 3 17045 17046 17333 3 17334 17333 17046 3 17046 17047 17334 3 17335 17334 17047 3 17047 17048 17335 3 17336 17335 17048 3 17048 17049 17336 3 17337 17336 17049 3 17049 17050 17337 3 17338 17337 17050 3 17050 17051 17338 3 17339 17338 17051 3 17051 17052 17339 3 17340 17339 17052 3 17052 17053 17340 3 17341 17340 17053 3 17053 17054 17341 3 17342 17341 17054 3 17054 17055 17342 3 17343 17342 17055 3 17055 17056 17343 3 17344 17343 17056 3 17056 17057 17344 3 17345 17344 17057 3 17057 17058 17345 3 17346 17345 17058 3 17058 17059 17346 3 17347 17346 17059 3 17059 17060 17347 3 17348 17347 17060 3 17060 17061 17348 3 17349 17348 17061 3 17061 17062 17349 3 17350 17349 17062 3 17062 17063 17350 3 17351 17350 17063 3 17063 17064 17351 3 17352 17351 17064 3 17064 17065 17352 3 17353 17352 17065 3 17065 17066 17353 3 17354 17353 17066 3 17066 17067 17354 3 17355 17354 17067 3 17067 17068 17355 3 17356 17355 17068 3 17068 17069 17357 3 17068 17357 17356 3 17069 17070 17358 3 17069 17358 17357 3 17070 17071 17359 3 17070 17359 17358 3 17071 17072 17360 3 17071 17360 17359 3 17072 17073 17361 3 17072 17361 17360 3 17073 17074 17362 3 17073 17362 17361 3 17074 17075 17363 3 17074 17363 17362 3 17075 17076 17364 3 17075 17364 17363 3 17076 17077 17365 3 17076 17365 17364 3 17077 17078 17366 3 17077 17366 17365 3 17078 17079 17367 3 17078 17367 17366 3 17079 17080 17368 3 17079 17368 17367 3 17080 17081 17369 3 17080 17369 17368 3 17081 17082 17370 3 17081 17370 17369 3 17082 17083 17371 3 17082 17371 17370 3 17083 17084 17372 3 17083 17372 17371 3 17084 17085 17373 3 17084 17373 17372 3 17085 17086 17374 3 17085 17374 17373 3 17086 17087 17375 3 17086 17375 17374 3 17087 17088 17376 3 17087 17376 17375 3 17088 17089 17377 3 17088 17377 17376 3 17089 17090 17378 3 17089 17378 17377 3 17090 17091 17379 3 17090 17379 17378 3 17091 17092 17380 3 17091 17380 17379 3 17092 17093 17381 3 17092 17381 17380 3 17093 17094 17382 3 17093 17382 17381 3 17094 17095 17383 3 17094 17383 17382 3 17095 17096 17384 3 17095 17384 17383 3 17096 17097 17385 3 17096 17385 17384 3 17097 17098 17386 3 17097 17386 17385 3 17098 17099 17387 3 17098 17387 17386 3 17099 17100 17388 3 17099 17388 17387 3 17101 17389 17390 3 17101 17390 17102 3 17101 17171 17459 3 17101 17459 17389 3 17102 17390 17391 3 17102 17391 17103 3 17103 17391 17392 3 17103 17392 17104 3 17104 17392 17393 3 17104 17393 17105 3 17105 17393 17394 3 17105 17394 17106 3 17106 17394 17395 3 17106 17395 17107 3 17107 17395 17396 3 17107 17396 17108 3 17108 17396 17397 3 17108 17397 17109 3 17109 17397 17398 3 17109 17398 17110 3 17110 17398 17399 3 17110 17399 17111 3 17111 17399 17400 3 17111 17400 17112 3 17112 17400 17401 3 17112 17401 17113 3 17113 17401 17402 3 17113 17402 17114 3 17114 17402 17403 3 17114 17403 17115 3 17115 17403 17404 3 17115 17404 17116 3 17116 17404 17405 3 17116 17405 17117 3 17117 17405 17406 3 17117 17406 17118 3 17118 17406 17407 3 17118 17407 17119 3 17119 17407 17408 3 17119 17408 17120 3 17120 17408 17409 3 17120 17409 17121 3 17121 17409 17410 3 17121 17410 17122 3 17122 17410 17411 3 17122 17411 17123 3 17123 17411 17412 3 17123 17412 17124 3 17124 17412 17413 3 17124 17413 17125 3 17125 17413 17414 3 17125 17414 17126 3 17126 17414 17415 3 17126 17415 17127 3 17127 17415 17416 3 17127 17416 17128 3 17128 17416 17417 3 17128 17417 17129 3 17129 17417 17418 3 17129 17418 17130 3 17130 17418 17419 3 17130 17419 17131 3 17131 17419 17420 3 17131 17420 17132 3 17132 17420 17421 3 17132 17421 17133 3 17133 17421 17422 3 17133 17422 17134 3 17134 17422 17423 3 17134 17423 17135 3 17135 17423 17424 3 17135 17424 17136 3 17136 17424 17425 3 17136 17425 17137 3 17137 17425 17426 3 17137 17426 17138 3 17138 17426 17427 3 17138 17427 17139 3 17139 17427 17428 3 17139 17428 17140 3 17140 17428 17429 3 17140 17429 17141 3 17141 17429 17430 3 17141 17430 17142 3 17142 17430 17431 3 17142 17431 17143 3 17143 17431 17432 3 17143 17432 17144 3 17144 17432 17433 3 17144 17433 17145 3 17145 17433 17434 3 17145 17434 17146 3 17146 17434 17147 3 17435 17147 17434 3 17147 17435 17148 3 17436 17148 17435 3 17148 17436 17149 3 17437 17149 17436 3 17149 17437 17150 3 17438 17150 17437 3 17150 17438 17151 3 17439 17151 17438 3 17151 17439 17152 3 17440 17152 17439 3 17152 17440 17153 3 17441 17153 17440 3 17153 17441 17154 3 17442 17154 17441 3 17154 17442 17155 3 17443 17155 17442 3 17155 17443 17156 3 17444 17156 17443 3 17156 17444 17157 3 17445 17157 17444 3 17157 17445 17158 3 17446 17158 17445 3 17158 17446 17159 3 17447 17159 17446 3 17159 17447 17160 3 17448 17160 17447 3 17160 17448 17161 3 17449 17161 17448 3 17161 17449 17162 3 17450 17162 17449 3 17162 17450 17163 3 17451 17163 17450 3 17163 17451 17164 3 17452 17164 17451 3 17164 17452 17165 3 17453 17165 17452 3 17165 17453 17166 3 17454 17166 17453 3 17166 17454 17167 3 17455 17167 17454 3 17167 17455 17168 3 17456 17168 17455 3 17168 17456 17169 3 17457 17169 17456 3 17169 17457 17172 3 17460 17172 17457 3 17170 17458 17171 3 17459 17171 17458 3 17170 17175 17458 3 17463 17458 17175 3 17172 17460 17173 3 17461 17173 17460 3 17173 17461 17176 3 17464 17176 17461 3 17174 17462 17175 3 17463 17175 17462 3 17174 17178 17462 3 17466 17462 17178 3 17176 17464 17177 3 17465 17177 17464 3 17177 17465 17179 3 17467 17179 17465 3 17178 17180 17466 3 17468 17466 17180 3 17179 17467 17181 3 17469 17181 17467 3 17180 17182 17468 3 17470 17468 17182 3 17181 17469 17183 3 17471 17183 17469 3 17182 17184 17470 3 17472 17470 17184 3 17183 17471 17185 3 17473 17185 17471 3 17184 17186 17472 3 17474 17472 17186 3 17185 17473 17187 3 17475 17187 17473 3 17186 17188 17474 3 17476 17474 17188 3 17187 17475 17189 3 17477 17189 17475 3 17188 17190 17476 3 17478 17476 17190 3 17189 17477 17191 3 17479 17191 17477 3 17190 17192 17478 3 17480 17478 17192 3 17191 17479 17193 3 17481 17193 17479 3 17192 17194 17480 3 17482 17480 17194 3 17193 17481 17195 3 17483 17195 17481 3 17194 17196 17482 3 17484 17482 17196 3 17195 17483 17197 3 17485 17197 17483 3 17196 17198 17484 3 17486 17484 17198 3 17197 17485 17199 3 17487 17199 17485 3 17198 17200 17486 3 17488 17486 17200 3 17199 17487 17201 3 17489 17201 17487 3 17200 17202 17488 3 17490 17488 17202 3 17201 17489 17203 3 17491 17203 17489 3 17202 17204 17490 3 17492 17490 17204 3 17203 17491 17205 3 17493 17205 17491 3 17204 17206 17492 3 17494 17492 17206 3 17205 17493 17207 3 17495 17207 17493 3 17206 17208 17494 3 17496 17494 17208 3 17207 17495 17209 3 17497 17209 17495 3 17208 17210 17496 3 17498 17496 17210 3 17209 17497 17211 3 17499 17211 17497 3 17210 17212 17498 3 17500 17498 17212 3 17211 17499 17213 3 17501 17213 17499 3 17212 17214 17500 3 17502 17500 17214 3 17213 17501 17215 3 17503 17215 17501 3 17214 17216 17502 3 17504 17502 17216 3 17215 17503 17217 3 17505 17217 17503 3 17216 17218 17504 3 17506 17504 17218 3 17217 17505 17219 3 17507 17219 17505 3 17218 17220 17506 3 17508 17506 17220 3 17219 17507 17221 3 17509 17221 17507 3 17220 17222 17508 3 17510 17508 17222 3 17221 17509 17223 3 17511 17223 17509 3 17222 17224 17510 3 17512 17510 17224 3 17223 17511 17225 3 17513 17225 17511 3 17224 17226 17512 3 17514 17512 17226 3 17225 17513 17227 3 17515 17227 17513 3 17226 17228 17514 3 17516 17514 17228 3 17227 17515 17517 3 17227 17517 17229 3 17228 17230 17516 3 17518 17516 17230 3 17229 17517 17519 3 17229 17519 17231 3 17230 17232 17520 3 17230 17520 17518 3 17231 17519 17521 3 17231 17521 17233 3 17232 17234 17522 3 17232 17522 17520 3 17233 17521 17523 3 17233 17523 17235 3 17234 17236 17524 3 17234 17524 17522 3 17235 17523 17525 3 17235 17525 17237 3 17236 17238 17526 3 17236 17526 17524 3 17237 17525 17527 3 17237 17527 17239 3 17238 17240 17528 3 17238 17528 17526 3 17239 17527 17529 3 17239 17529 17241 3 17240 17242 17530 3 17240 17530 17528 3 17241 17529 17531 3 17241 17531 17243 3 17242 17244 17532 3 17242 17532 17530 3 17243 17531 17533 3 17243 17533 17245 3 17244 17246 17534 3 17244 17534 17532 3 17245 17533 17535 3 17245 17535 17247 3 17246 17248 17536 3 17246 17536 17534 3 17247 17535 17537 3 17247 17537 17249 3 17248 17250 17538 3 17248 17538 17536 3 17249 17537 17539 3 17249 17539 17251 3 17250 17252 17540 3 17250 17540 17538 3 17251 17539 17541 3 17251 17541 17253 3 17252 17254 17542 3 17252 17542 17540 3 17253 17541 17543 3 17253 17543 17255 3 17254 17256 17544 3 17254 17544 17542 3 17255 17543 17545 3 17255 17545 17257 3 17256 17258 17546 3 17256 17546 17544 3 17257 17545 17547 3 17257 17547 17259 3 17258 17260 17548 3 17258 17548 17546 3 17259 17547 17549 3 17259 17549 17261 3 17260 17262 17550 3 17260 17550 17548 3 17261 17549 17551 3 17261 17551 17263 3 17262 17264 17552 3 17262 17552 17550 3 17263 17551 17553 3 17263 17553 17265 3 17264 17266 17554 3 17264 17554 17552 3 17265 17553 17555 3 17265 17555 17267 3 17266 17268 17556 3 17266 17556 17554 3 17267 17555 17557 3 17267 17557 17269 3 17268 17270 17558 3 17268 17558 17556 3 17269 17557 17559 3 17269 17559 17271 3 17270 17272 17560 3 17270 17560 17558 3 17271 17559 17561 3 17271 17561 17273 3 17272 17274 17562 3 17272 17562 17560 3 17273 17561 17563 3 17273 17563 17275 3 17274 17276 17564 3 17274 17564 17562 3 17275 17563 17565 3 17275 17565 17277 3 17276 17278 17566 3 17276 17566 17564 3 17277 17565 17567 3 17277 17567 17279 3 17278 17280 17568 3 17278 17568 17566 3 17279 17567 17569 3 17279 17569 17281 3 17280 17282 17570 3 17280 17570 17568 3 17281 17569 17571 3 17281 17571 17283 3 17282 17284 17572 3 17282 17572 17570 3 17283 17571 17573 3 17283 17573 17285 3 17284 17286 17574 3 17284 17574 17572 3 17285 17573 17575 3 17285 17575 17287 3 17286 17288 17576 3 17286 17576 17574 3 17287 17575 17577 3 17287 17577 17289 3 17288 17290 17578 3 17288 17578 17576 3 17289 17577 17579 3 17289 17579 17291 3 17290 17292 17580 3 17290 17580 17578 3 17291 17579 17581 3 17291 17581 17293 3 17292 17294 17582 3 17292 17582 17580 3 17293 17581 17583 3 17293 17583 17295 3 17294 17296 17584 3 17294 17584 17582 3 17295 17583 17585 3 17295 17585 17297 3 17296 17298 17586 3 17296 17586 17584 3 17297 17585 17587 3 17297 17587 17299 3 17298 17300 17588 3 17298 17588 17586 3 17299 17587 17589 3 17299 17589 17301 3 17300 17302 17590 3 17300 17590 17588 3 17301 17589 17591 3 17301 17591 17303 3 17302 17304 17592 3 17302 17592 17590 3 17303 17591 17593 3 17303 17593 17305 3 17304 17306 17594 3 17304 17594 17592 3 17305 17593 17595 3 17305 17595 17307 3 17306 17308 17596 3 17306 17596 17594 3 17307 17595 17597 3 17307 17597 17309 3 17308 17310 17598 3 17308 17598 17596 3 17309 17597 17311 3 17599 17311 17597 3 17310 17312 17598 3 17600 17598 17312 3 17311 17599 17315 3 17603 17315 17599 3 17312 17313 17600 3 17601 17600 17313 3 17313 17316 17601 3 17604 17601 17316 3 17314 17315 17602 3 17603 17602 17315 3 17314 17602 17319 3 17607 17319 17602 3 17316 17317 17604 3 17605 17604 17317 3 17317 17320 17605 3 17608 17605 17320 3 17318 17319 17606 3 17607 17606 17319 3 17318 17606 17388 3 17676 17388 17606 3 17320 17321 17608 3 17609 17608 17321 3 17321 17322 17609 3 17610 17609 17322 3 17322 17323 17610 3 17611 17610 17323 3 17323 17324 17611 3 17612 17611 17324 3 17324 17325 17612 3 17613 17612 17325 3 17325 17326 17613 3 17614 17613 17326 3 17326 17327 17614 3 17615 17614 17327 3 17327 17328 17615 3 17616 17615 17328 3 17328 17329 17616 3 17617 17616 17329 3 17329 17330 17617 3 17618 17617 17330 3 17330 17331 17618 3 17619 17618 17331 3 17331 17332 17619 3 17620 17619 17332 3 17332 17333 17620 3 17621 17620 17333 3 17333 17334 17621 3 17622 17621 17334 3 17334 17335 17622 3 17623 17622 17335 3 17335 17336 17623 3 17624 17623 17336 3 17336 17337 17624 3 17625 17624 17337 3 17337 17338 17625 3 17626 17625 17338 3 17338 17339 17626 3 17627 17626 17339 3 17339 17340 17627 3 17628 17627 17340 3 17340 17341 17628 3 17629 17628 17341 3 17341 17342 17629 3 17630 17629 17342 3 17342 17343 17630 3 17631 17630 17343 3 17343 17344 17631 3 17632 17631 17344 3 17344 17345 17632 3 17633 17632 17345 3 17345 17346 17633 3 17634 17633 17346 3 17346 17347 17634 3 17635 17634 17347 3 17347 17348 17635 3 17636 17635 17348 3 17348 17349 17636 3 17637 17636 17349 3 17349 17350 17637 3 17638 17637 17350 3 17350 17351 17638 3 17639 17638 17351 3 17351 17352 17639 3 17640 17639 17352 3 17352 17353 17640 3 17641 17640 17353 3 17353 17354 17641 3 17642 17641 17354 3 17354 17355 17642 3 17643 17642 17355 3 17355 17356 17643 3 17644 17643 17356 3 17356 17357 17644 3 17645 17644 17357 3 17357 17358 17645 3 17646 17645 17358 3 17358 17359 17646 3 17647 17646 17359 3 17359 17360 17647 3 17648 17647 17360 3 17360 17361 17648 3 17649 17648 17361 3 17361 17362 17649 3 17650 17649 17362 3 17362 17363 17650 3 17651 17650 17363 3 17363 17364 17651 3 17652 17651 17364 3 17364 17365 17652 3 17653 17652 17365 3 17365 17366 17653 3 17654 17653 17366 3 17366 17367 17654 3 17655 17654 17367 3 17367 17368 17655 3 17656 17655 17368 3 17368 17369 17656 3 17657 17656 17369 3 17369 17370 17657 3 17658 17657 17370 3 17370 17371 17658 3 17659 17658 17371 3 17371 17372 17659 3 17660 17659 17372 3 17372 17373 17660 3 17661 17660 17373 3 17373 17374 17661 3 17662 17661 17374 3 17374 17375 17662 3 17663 17662 17375 3 17375 17376 17663 3 17664 17663 17376 3 17376 17377 17664 3 17665 17664 17377 3 17377 17378 17665 3 17666 17665 17378 3 17378 17379 17666 3 17667 17666 17379 3 17379 17380 17667 3 17668 17667 17380 3 17380 17381 17668 3 17669 17668 17381 3 17381 17382 17669 3 17670 17669 17382 3 17382 17383 17670 3 17671 17670 17383 3 17383 17384 17671 3 17672 17671 17384 3 17384 17385 17672 3 17673 17672 17385 3 17385 17386 17673 3 17674 17673 17386 3 17386 17387 17674 3 17675 17674 17387 3 17387 17388 17675 3 17676 17675 17388 3 17389 17677 17678 3 17389 17678 17390 3 17389 17459 17747 3 17389 17747 17677 3 17390 17678 17679 3 17390 17679 17391 3 17391 17679 17680 3 17391 17680 17392 3 17392 17680 17681 3 17392 17681 17393 3 17393 17681 17682 3 17393 17682 17394 3 17394 17682 17683 3 17394 17683 17395 3 17395 17683 17684 3 17395 17684 17396 3 17396 17684 17685 3 17396 17685 17397 3 17397 17685 17686 3 17397 17686 17398 3 17398 17686 17687 3 17398 17687 17399 3 17399 17687 17688 3 17399 17688 17400 3 17400 17688 17689 3 17400 17689 17401 3 17401 17689 17690 3 17401 17690 17402 3 17402 17690 17691 3 17402 17691 17403 3 17403 17691 17692 3 17403 17692 17404 3 17404 17692 17693 3 17404 17693 17405 3 17405 17693 17694 3 17405 17694 17406 3 17406 17694 17695 3 17406 17695 17407 3 17407 17695 17696 3 17407 17696 17408 3 17408 17696 17697 3 17408 17697 17409 3 17409 17697 17698 3 17409 17698 17410 3 17410 17698 17699 3 17410 17699 17411 3 17411 17699 17700 3 17411 17700 17412 3 17412 17700 17701 3 17412 17701 17413 3 17413 17701 17702 3 17413 17702 17414 3 17414 17702 17703 3 17414 17703 17415 3 17415 17703 17704 3 17415 17704 17416 3 17416 17704 17705 3 17416 17705 17417 3 17417 17705 17706 3 17417 17706 17418 3 17418 17706 17707 3 17418 17707 17419 3 17419 17707 17708 3 17419 17708 17420 3 17420 17708 17709 3 17420 17709 17421 3 17421 17709 17710 3 17421 17710 17422 3 17422 17710 17711 3 17422 17711 17423 3 17423 17711 17712 3 17423 17712 17424 3 17424 17712 17713 3 17424 17713 17425 3 17425 17713 17714 3 17425 17714 17426 3 17426 17714 17715 3 17426 17715 17427 3 17427 17715 17716 3 17427 17716 17428 3 17428 17716 17717 3 17428 17717 17429 3 17429 17717 17718 3 17429 17718 17430 3 17430 17718 17719 3 17430 17719 17431 3 17431 17719 17720 3 17431 17720 17432 3 17432 17720 17721 3 17432 17721 17433 3 17433 17721 17722 3 17433 17722 17434 3 17434 17722 17723 3 17434 17723 17435 3 17435 17723 17724 3 17435 17724 17436 3 17436 17724 17725 3 17436 17725 17437 3 17437 17725 17726 3 17437 17726 17438 3 17438 17726 17727 3 17438 17727 17439 3 17439 17727 17728 3 17439 17728 17440 3 17440 17728 17729 3 17440 17729 17441 3 17441 17729 17730 3 17441 17730 17442 3 17442 17730 17731 3 17442 17731 17443 3 17443 17731 17732 3 17443 17732 17444 3 17444 17732 17733 3 17444 17733 17445 3 17445 17733 17734 3 17445 17734 17446 3 17446 17734 17735 3 17446 17735 17447 3 17447 17735 17736 3 17447 17736 17448 3 17448 17736 17737 3 17448 17737 17449 3 17449 17737 17738 3 17449 17738 17450 3 17450 17738 17739 3 17450 17739 17451 3 17451 17739 17740 3 17451 17740 17452 3 17452 17740 17741 3 17452 17741 17453 3 17453 17741 17742 3 17453 17742 17454 3 17454 17742 17743 3 17454 17743 17455 3 17455 17743 17744 3 17455 17744 17456 3 17456 17744 17745 3 17456 17745 17457 3 17457 17745 17748 3 17457 17748 17460 3 17458 17746 17747 3 17458 17747 17459 3 17458 17463 17751 3 17458 17751 17746 3 17460 17748 17749 3 17460 17749 17461 3 17461 17749 17752 3 17461 17752 17464 3 17462 17750 17751 3 17462 17751 17463 3 17462 17466 17754 3 17462 17754 17750 3 17464 17752 17753 3 17464 17753 17465 3 17465 17753 17755 3 17465 17755 17467 3 17466 17468 17756 3 17466 17756 17754 3 17467 17755 17757 3 17467 17757 17469 3 17468 17470 17758 3 17468 17758 17756 3 17469 17757 17759 3 17469 17759 17471 3 17470 17472 17760 3 17470 17760 17758 3 17471 17759 17473 3 17761 17473 17759 3 17472 17474 17760 3 17762 17760 17474 3 17473 17761 17475 3 17763 17475 17761 3 17474 17476 17762 3 17764 17762 17476 3 17475 17763 17477 3 17765 17477 17763 3 17476 17478 17764 3 17766 17764 17478 3 17477 17765 17479 3 17767 17479 17765 3 17478 17480 17766 3 17768 17766 17480 3 17479 17767 17481 3 17769 17481 17767 3 17480 17482 17768 3 17770 17768 17482 3 17481 17769 17483 3 17771 17483 17769 3 17482 17484 17770 3 17772 17770 17484 3 17483 17771 17485 3 17773 17485 17771 3 17484 17486 17772 3 17774 17772 17486 3 17485 17773 17487 3 17775 17487 17773 3 17486 17488 17774 3 17776 17774 17488 3 17487 17775 17489 3 17777 17489 17775 3 17488 17490 17776 3 17778 17776 17490 3 17489 17777 17491 3 17779 17491 17777 3 17490 17492 17778 3 17780 17778 17492 3 17491 17779 17493 3 17781 17493 17779 3 17492 17494 17780 3 17782 17780 17494 3 17493 17781 17495 3 17783 17495 17781 3 17494 17496 17782 3 17784 17782 17496 3 17495 17783 17497 3 17785 17497 17783 3 17496 17498 17784 3 17786 17784 17498 3 17497 17785 17499 3 17787 17499 17785 3 17498 17500 17786 3 17788 17786 17500 3 17499 17787 17501 3 17789 17501 17787 3 17500 17502 17788 3 17790 17788 17502 3 17501 17789 17503 3 17791 17503 17789 3 17502 17504 17790 3 17792 17790 17504 3 17503 17791 17505 3 17793 17505 17791 3 17504 17506 17792 3 17794 17792 17506 3 17505 17793 17507 3 17795 17507 17793 3 17506 17508 17794 3 17796 17794 17508 3 17507 17795 17509 3 17797 17509 17795 3 17508 17510 17796 3 17798 17796 17510 3 17509 17797 17511 3 17799 17511 17797 3 17510 17512 17798 3 17800 17798 17512 3 17511 17799 17513 3 17801 17513 17799 3 17512 17514 17800 3 17802 17800 17514 3 17513 17801 17515 3 17803 17515 17801 3 17514 17516 17802 3 17804 17802 17516 3 17515 17803 17517 3 17805 17517 17803 3 17516 17518 17804 3 17806 17804 17518 3 17517 17805 17519 3 17807 17519 17805 3 17518 17520 17806 3 17808 17806 17520 3 17519 17807 17521 3 17809 17521 17807 3 17520 17522 17808 3 17810 17808 17522 3 17521 17809 17523 3 17811 17523 17809 3 17522 17524 17810 3 17812 17810 17524 3 17523 17811 17525 3 17813 17525 17811 3 17524 17526 17812 3 17814 17812 17526 3 17525 17813 17527 3 17815 17527 17813 3 17526 17528 17814 3 17816 17814 17528 3 17527 17815 17529 3 17817 17529 17815 3 17528 17530 17816 3 17818 17816 17530 3 17529 17817 17531 3 17819 17531 17817 3 17530 17532 17818 3 17820 17818 17532 3 17531 17819 17533 3 17821 17533 17819 3 17532 17534 17820 3 17822 17820 17534 3 17533 17821 17535 3 17823 17535 17821 3 17534 17536 17822 3 17824 17822 17536 3 17535 17823 17537 3 17825 17537 17823 3 17536 17538 17824 3 17826 17824 17538 3 17537 17825 17539 3 17827 17539 17825 3 17538 17540 17826 3 17828 17826 17540 3 17539 17827 17541 3 17829 17541 17827 3 17540 17542 17828 3 17830 17828 17542 3 17541 17829 17543 3 17831 17543 17829 3 17542 17544 17830 3 17832 17830 17544 3 17543 17831 17545 3 17833 17545 17831 3 17544 17546 17832 3 17834 17832 17546 3 17545 17833 17547 3 17835 17547 17833 3 17546 17548 17834 3 17836 17834 17548 3 17547 17835 17549 3 17837 17549 17835 3 17548 17550 17836 3 17838 17836 17550 3 17549 17837 17551 3 17839 17551 17837 3 17550 17552 17838 3 17840 17838 17552 3 17551 17839 17553 3 17841 17553 17839 3 17552 17554 17840 3 17842 17840 17554 3 17553 17841 17843 3 17553 17843 17555 3 17554 17556 17844 3 17554 17844 17842 3 17555 17843 17845 3 17555 17845 17557 3 17556 17558 17846 3 17556 17846 17844 3 17557 17845 17847 3 17557 17847 17559 3 17558 17560 17848 3 17558 17848 17846 3 17559 17847 17849 3 17559 17849 17561 3 17560 17562 17850 3 17560 17850 17848 3 17561 17849 17851 3 17561 17851 17563 3 17562 17564 17852 3 17562 17852 17850 3 17563 17851 17853 3 17563 17853 17565 3 17564 17566 17854 3 17564 17854 17852 3 17565 17853 17855 3 17565 17855 17567 3 17566 17568 17856 3 17566 17856 17854 3 17567 17855 17857 3 17567 17857 17569 3 17568 17570 17858 3 17568 17858 17856 3 17569 17857 17859 3 17569 17859 17571 3 17570 17572 17860 3 17570 17860 17858 3 17571 17859 17861 3 17571 17861 17573 3 17572 17574 17862 3 17572 17862 17860 3 17573 17861 17863 3 17573 17863 17575 3 17574 17576 17864 3 17574 17864 17862 3 17575 17863 17865 3 17575 17865 17577 3 17576 17578 17866 3 17576 17866 17864 3 17577 17865 17867 3 17577 17867 17579 3 17578 17580 17868 3 17578 17868 17866 3 17579 17867 17869 3 17579 17869 17581 3 17580 17582 17870 3 17580 17870 17868 3 17581 17869 17871 3 17581 17871 17583 3 17582 17584 17872 3 17582 17872 17870 3 17583 17871 17873 3 17583 17873 17585 3 17584 17586 17874 3 17584 17874 17872 3 17585 17873 17875 3 17585 17875 17587 3 17586 17588 17876 3 17586 17876 17874 3 17587 17875 17877 3 17587 17877 17589 3 17588 17590 17878 3 17588 17878 17876 3 17589 17877 17879 3 17589 17879 17591 3 17590 17592 17880 3 17590 17880 17878 3 17591 17879 17881 3 17591 17881 17593 3 17592 17594 17882 3 17592 17882 17880 3 17593 17881 17883 3 17593 17883 17595 3 17594 17596 17884 3 17594 17884 17882 3 17595 17883 17885 3 17595 17885 17597 3 17596 17598 17886 3 17596 17886 17884 3 17597 17885 17887 3 17597 17887 17599 3 17598 17600 17888 3 17598 17888 17886 3 17599 17887 17891 3 17599 17891 17603 3 17600 17601 17889 3 17600 17889 17888 3 17601 17604 17892 3 17601 17892 17889 3 17602 17603 17891 3 17602 17891 17890 3 17602 17890 17895 3 17602 17895 17607 3 17604 17605 17893 3 17604 17893 17892 3 17605 17608 17896 3 17605 17896 17893 3 17606 17607 17895 3 17606 17895 17894 3 17606 17894 17964 3 17606 17964 17676 3 17608 17609 17897 3 17608 17897 17896 3 17609 17610 17898 3 17609 17898 17897 3 17610 17611 17899 3 17610 17899 17898 3 17611 17612 17900 3 17611 17900 17899 3 17612 17613 17901 3 17612 17901 17900 3 17613 17614 17902 3 17613 17902 17901 3 17614 17615 17903 3 17614 17903 17902 3 17615 17616 17904 3 17615 17904 17903 3 17616 17617 17905 3 17616 17905 17904 3 17617 17618 17906 3 17617 17906 17905 3 17618 17619 17907 3 17618 17907 17906 3 17619 17620 17908 3 17619 17908 17907 3 17620 17621 17909 3 17620 17909 17908 3 17621 17622 17910 3 17621 17910 17909 3 17622 17623 17911 3 17622 17911 17910 3 17623 17624 17912 3 17623 17912 17911 3 17624 17625 17913 3 17624 17913 17912 3 17625 17626 17914 3 17625 17914 17913 3 17626 17627 17915 3 17626 17915 17914 3 17627 17628 17916 3 17627 17916 17915 3 17628 17629 17917 3 17628 17917 17916 3 17629 17630 17918 3 17629 17918 17917 3 17630 17631 17919 3 17630 17919 17918 3 17631 17632 17920 3 17631 17920 17919 3 17632 17633 17921 3 17632 17921 17920 3 17633 17634 17922 3 17633 17922 17921 3 17634 17635 17923 3 17634 17923 17922 3 17635 17636 17924 3 17635 17924 17923 3 17636 17637 17924 3 17925 17924 17637 3 17637 17638 17925 3 17926 17925 17638 3 17638 17639 17926 3 17927 17926 17639 3 17639 17640 17927 3 17928 17927 17640 3 17640 17641 17928 3 17929 17928 17641 3 17641 17642 17929 3 17930 17929 17642 3 17642 17643 17930 3 17931 17930 17643 3 17643 17644 17931 3 17932 17931 17644 3 17644 17645 17932 3 17933 17932 17645 3 17645 17646 17933 3 17934 17933 17646 3 17646 17647 17934 3 17935 17934 17647 3 17647 17648 17935 3 17936 17935 17648 3 17648 17649 17936 3 17937 17936 17649 3 17649 17650 17937 3 17938 17937 17650 3 17650 17651 17938 3 17939 17938 17651 3 17651 17652 17939 3 17940 17939 17652 3 17652 17653 17940 3 17941 17940 17653 3 17653 17654 17941 3 17942 17941 17654 3 17654 17655 17942 3 17943 17942 17655 3 17655 17656 17943 3 17944 17943 17656 3 17656 17657 17944 3 17945 17944 17657 3 17657 17658 17945 3 17946 17945 17658 3 17658 17659 17946 3 17947 17946 17659 3 17659 17660 17947 3 17948 17947 17660 3 17660 17661 17948 3 17949 17948 17661 3 17661 17662 17949 3 17950 17949 17662 3 17662 17663 17950 3 17951 17950 17663 3 17663 17664 17951 3 17952 17951 17664 3 17664 17665 17952 3 17953 17952 17665 3 17665 17666 17953 3 17954 17953 17666 3 17666 17667 17954 3 17955 17954 17667 3 17667 17668 17955 3 17956 17955 17668 3 17668 17669 17956 3 17957 17956 17669 3 17669 17670 17957 3 17958 17957 17670 3 17670 17671 17958 3 17959 17958 17671 3 17671 17672 17959 3 17960 17959 17672 3 17672 17673 17960 3 17961 17960 17673 3 17673 17674 17961 3 17962 17961 17674 3 17674 17675 17962 3 17963 17962 17675 3 17675 17676 17963 3 17964 17963 17676 3 17677 17965 17678 3 17966 17678 17965 3 17677 17747 17965 3 18035 17965 17747 3 17678 17966 17679 3 17967 17679 17966 3 17679 17967 17680 3 17968 17680 17967 3 17680 17968 17681 3 17969 17681 17968 3 17681 17969 17682 3 17970 17682 17969 3 17682 17970 17683 3 17971 17683 17970 3 17683 17971 17684 3 17972 17684 17971 3 17684 17972 17685 3 17973 17685 17972 3 17685 17973 17686 3 17974 17686 17973 3 17686 17974 17687 3 17975 17687 17974 3 17687 17975 17688 3 17976 17688 17975 3 17688 17976 17689 3 17977 17689 17976 3 17689 17977 17690 3 17978 17690 17977 3 17690 17978 17691 3 17979 17691 17978 3 17691 17979 17692 3 17980 17692 17979 3 17692 17980 17693 3 17981 17693 17980 3 17693 17981 17694 3 17982 17694 17981 3 17694 17982 17695 3 17983 17695 17982 3 17695 17983 17696 3 17984 17696 17983 3 17696 17984 17697 3 17985 17697 17984 3 17697 17985 17698 3 17986 17698 17985 3 17698 17986 17699 3 17987 17699 17986 3 17699 17987 17700 3 17988 17700 17987 3 17700 17988 17701 3 17989 17701 17988 3 17701 17989 17702 3 17990 17702 17989 3 17702 17990 17703 3 17991 17703 17990 3 17703 17991 17704 3 17992 17704 17991 3 17704 17992 17705 3 17993 17705 17992 3 17705 17993 17706 3 17994 17706 17993 3 17706 17994 17707 3 17995 17707 17994 3 17707 17995 17708 3 17996 17708 17995 3 17708 17996 17709 3 17997 17709 17996 3 17709 17997 17710 3 17998 17710 17997 3 17710 17998 17711 3 17999 17711 17998 3 17711 17999 17712 3 18000 17712 17999 3 17712 18000 17713 3 18001 17713 18000 3 17713 18001 17714 3 18002 17714 18001 3 17714 18002 17715 3 18003 17715 18002 3 17715 18003 17716 3 18004 17716 18003 3 17716 18004 17717 3 18005 17717 18004 3 17717 18005 18006 3 17717 18006 17718 3 17718 18006 18007 3 17718 18007 17719 3 17719 18007 18008 3 17719 18008 17720 3 17720 18008 18009 3 17720 18009 17721 3 17721 18009 18010 3 17721 18010 17722 3 17722 18010 18011 3 17722 18011 17723 3 17723 18011 18012 3 17723 18012 17724 3 17724 18012 18013 3 17724 18013 17725 3 17725 18013 18014 3 17725 18014 17726 3 17726 18014 18015 3 17726 18015 17727 3 17727 18015 18016 3 17727 18016 17728 3 17728 18016 18017 3 17728 18017 17729 3 17729 18017 18018 3 17729 18018 17730 3 17730 18018 18019 3 17730 18019 17731 3 17731 18019 18020 3 17731 18020 17732 3 17732 18020 18021 3 17732 18021 17733 3 17733 18021 18022 3 17733 18022 17734 3 17734 18022 18023 3 17734 18023 17735 3 17735 18023 18024 3 17735 18024 17736 3 17736 18024 18025 3 17736 18025 17737 3 17737 18025 18026 3 17737 18026 17738 3 17738 18026 18027 3 17738 18027 17739 3 17739 18027 18028 3 17739 18028 17740 3 17740 18028 18029 3 17740 18029 17741 3 17741 18029 18030 3 17741 18030 17742 3 17742 18030 18031 3 17742 18031 17743 3 17743 18031 18032 3 17743 18032 17744 3 17744 18032 18033 3 17744 18033 17745 3 17745 18033 18036 3 17745 18036 17748 3 17746 18034 18035 3 17746 18035 17747 3 17746 17751 18039 3 17746 18039 18034 3 17748 18036 18037 3 17748 18037 17749 3 17749 18037 18040 3 17749 18040 17752 3 17750 18038 18039 3 17750 18039 17751 3 17750 17754 18042 3 17750 18042 18038 3 17752 18040 18041 3 17752 18041 17753 3 17753 18041 18043 3 17753 18043 17755 3 17754 17756 18044 3 17754 18044 18042 3 17755 18043 18045 3 17755 18045 17757 3 17756 17758 18046 3 17756 18046 18044 3 17757 18045 18047 3 17757 18047 17759 3 17758 17760 18048 3 17758 18048 18046 3 17759 18047 18049 3 17759 18049 17761 3 17760 17762 18050 3 17760 18050 18048 3 17761 18049 18051 3 17761 18051 17763 3 17762 17764 18052 3 17762 18052 18050 3 17763 18051 18053 3 17763 18053 17765 3 17764 17766 18054 3 17764 18054 18052 3 17765 18053 18055 3 17765 18055 17767 3 17766 17768 18056 3 17766 18056 18054 3 17767 18055 18057 3 17767 18057 17769 3 17768 17770 18058 3 17768 18058 18056 3 17769 18057 18059 3 17769 18059 17771 3 17770 17772 18060 3 17770 18060 18058 3 17771 18059 18061 3 17771 18061 17773 3 17772 17774 18062 3 17772 18062 18060 3 17773 18061 18063 3 17773 18063 17775 3 17774 17776 18064 3 17774 18064 18062 3 17775 18063 18065 3 17775 18065 17777 3 17776 17778 18066 3 17776 18066 18064 3 17777 18065 18067 3 17777 18067 17779 3 17778 17780 18068 3 17778 18068 18066 3 17779 18067 18069 3 17779 18069 17781 3 17780 17782 18070 3 17780 18070 18068 3 17781 18069 18071 3 17781 18071 17783 3 17782 17784 18072 3 17782 18072 18070 3 17783 18071 18073 3 17783 18073 17785 3 17784 17786 18074 3 17784 18074 18072 3 17785 18073 18075 3 17785 18075 17787 3 17786 17788 18076 3 17786 18076 18074 3 17787 18075 18077 3 17787 18077 17789 3 17788 17790 18078 3 17788 18078 18076 3 17789 18077 18079 3 17789 18079 17791 3 17790 17792 18080 3 17790 18080 18078 3 17791 18079 18081 3 17791 18081 17793 3 17792 17794 18082 3 17792 18082 18080 3 17793 18081 18083 3 17793 18083 17795 3 17794 17796 18084 3 17794 18084 18082 3 17795 18083 18085 3 17795 18085 17797 3 17796 17798 18086 3 17796 18086 18084 3 17797 18085 18087 3 17797 18087 17799 3 17798 17800 18088 3 17798 18088 18086 3 17799 18087 17801 3 18089 17801 18087 3 17800 17802 18088 3 18090 18088 17802 3 17801 18089 17803 3 18091 17803 18089 3 17802 17804 18090 3 18092 18090 17804 3 17803 18091 17805 3 18093 17805 18091 3 17804 17806 18092 3 18094 18092 17806 3 17805 18093 17807 3 18095 17807 18093 3 17806 17808 18094 3 18096 18094 17808 3 17807 18095 17809 3 18097 17809 18095 3 17808 17810 18096 3 18098 18096 17810 3 17809 18097 17811 3 18099 17811 18097 3 17810 17812 18098 3 18100 18098 17812 3 17811 18099 17813 3 18101 17813 18099 3 17812 17814 18100 3 18102 18100 17814 3 17813 18101 17815 3 18103 17815 18101 3 17814 17816 18102 3 18104 18102 17816 3 17815 18103 17817 3 18105 17817 18103 3 17816 17818 18104 3 18106 18104 17818 3 17817 18105 17819 3 18107 17819 18105 3 17818 17820 18106 3 18108 18106 17820 3 17819 18107 17821 3 18109 17821 18107 3 17820 17822 18108 3 18110 18108 17822 3 17821 18109 17823 3 18111 17823 18109 3 17822 17824 18110 3 18112 18110 17824 3 17823 18111 17825 3 18113 17825 18111 3 17824 17826 18112 3 18114 18112 17826 3 17825 18113 17827 3 18115 17827 18113 3 17826 17828 18114 3 18116 18114 17828 3 17827 18115 17829 3 18117 17829 18115 3 17828 17830 18116 3 18118 18116 17830 3 17829 18117 17831 3 18119 17831 18117 3 17830 17832 18118 3 18120 18118 17832 3 17831 18119 17833 3 18121 17833 18119 3 17832 17834 18120 3 18122 18120 17834 3 17833 18121 17835 3 18123 17835 18121 3 17834 17836 18122 3 18124 18122 17836 3 17835 18123 17837 3 18125 17837 18123 3 17836 17838 18124 3 18126 18124 17838 3 17837 18125 17839 3 18127 17839 18125 3 17838 17840 18126 3 18128 18126 17840 3 17839 18127 17841 3 18129 17841 18127 3 17840 17842 18128 3 18130 18128 17842 3 17841 18129 17843 3 18131 17843 18129 3 17842 17844 18130 3 18132 18130 17844 3 17843 18131 17845 3 18133 17845 18131 3 17844 17846 18132 3 18134 18132 17846 3 17845 18133 17847 3 18135 17847 18133 3 17846 17848 18134 3 18136 18134 17848 3 17847 18135 17849 3 18137 17849 18135 3 17848 17850 18136 3 18138 18136 17850 3 17849 18137 17851 3 18139 17851 18137 3 17850 17852 18138 3 18140 18138 17852 3 17851 18139 17853 3 18141 17853 18139 3 17852 17854 18140 3 18142 18140 17854 3 17853 18141 17855 3 18143 17855 18141 3 17854 17856 18142 3 18144 18142 17856 3 17855 18143 17857 3 18145 17857 18143 3 17856 17858 18144 3 18146 18144 17858 3 17857 18145 17859 3 18147 17859 18145 3 17858 17860 18146 3 18148 18146 17860 3 17859 18147 17861 3 18149 17861 18147 3 17860 17862 18148 3 18150 18148 17862 3 17861 18149 17863 3 18151 17863 18149 3 17862 17864 18150 3 18152 18150 17864 3 17863 18151 17865 3 18153 17865 18151 3 17864 17866 18152 3 18154 18152 17866 3 17865 18153 17867 3 18155 17867 18153 3 17866 17868 18154 3 18156 18154 17868 3 17867 18155 17869 3 18157 17869 18155 3 17868 17870 18156 3 18158 18156 17870 3 17869 18157 17871 3 18159 17871 18157 3 17870 17872 18158 3 18160 18158 17872 3 17871 18159 17873 3 18161 17873 18159 3 17872 17874 18160 3 18162 18160 17874 3 17873 18161 17875 3 18163 17875 18161 3 17874 17876 18162 3 18164 18162 17876 3 17875 18163 17877 3 18165 17877 18163 3 17876 17878 18164 3 18166 18164 17878 3 17877 18165 17879 3 18167 17879 18165 3 17878 17880 18166 3 18168 18166 17880 3 17879 18167 17881 3 18169 17881 18167 3 17880 17882 18168 3 18170 18168 17882 3 17881 18169 17883 3 18171 17883 18169 3 17882 17884 18172 3 17882 18172 18170 3 17883 18171 18173 3 17883 18173 17885 3 17884 17886 18174 3 17884 18174 18172 3 17885 18173 18175 3 17885 18175 17887 3 17886 17888 18176 3 17886 18176 18174 3 17887 18175 18179 3 17887 18179 17891 3 17888 17889 18177 3 17888 18177 18176 3 17889 17892 18180 3 17889 18180 18177 3 17890 17891 18179 3 17890 18179 18178 3 17890 18178 18183 3 17890 18183 17895 3 17892 17893 18181 3 17892 18181 18180 3 17893 17896 18184 3 17893 18184 18181 3 17894 17895 18183 3 17894 18183 18182 3 17894 18182 18252 3 17894 18252 17964 3 17896 17897 18185 3 17896 18185 18184 3 17897 17898 18186 3 17897 18186 18185 3 17898 17899 18187 3 17898 18187 18186 3 17899 17900 18188 3 17899 18188 18187 3 17900 17901 18189 3 17900 18189 18188 3 17901 17902 18190 3 17901 18190 18189 3 17902 17903 18191 3 17902 18191 18190 3 17903 17904 18192 3 17903 18192 18191 3 17904 17905 18193 3 17904 18193 18192 3 17905 17906 18194 3 17905 18194 18193 3 17906 17907 18195 3 17906 18195 18194 3 17907 17908 18196 3 17907 18196 18195 3 17908 17909 18197 3 17908 18197 18196 3 17909 17910 18198 3 17909 18198 18197 3 17910 17911 18199 3 17910 18199 18198 3 17911 17912 18200 3 17911 18200 18199 3 17912 17913 18201 3 17912 18201 18200 3 17913 17914 18202 3 17913 18202 18201 3 17914 17915 18203 3 17914 18203 18202 3 17915 17916 18204 3 17915 18204 18203 3 17916 17917 18205 3 17916 18205 18204 3 17917 17918 18206 3 17917 18206 18205 3 17918 17919 18207 3 17918 18207 18206 3 17919 17920 18208 3 17919 18208 18207 3 17920 17921 18209 3 17920 18209 18208 3 17921 17922 18210 3 17921 18210 18209 3 17922 17923 18211 3 17922 18211 18210 3 17923 17924 18212 3 17923 18212 18211 3 17924 17925 18213 3 17924 18213 18212 3 17925 17926 18214 3 17925 18214 18213 3 17926 17927 18215 3 17926 18215 18214 3 17927 17928 18216 3 17927 18216 18215 3 17928 17929 18217 3 17928 18217 18216 3 17929 17930 18218 3 17929 18218 18217 3 17930 17931 18219 3 17930 18219 18218 3 17931 17932 18220 3 17931 18220 18219 3 17932 17933 18221 3 17932 18221 18220 3 17933 17934 18222 3 17933 18222 18221 3 17934 17935 18223 3 17934 18223 18222 3 17935 17936 18224 3 17935 18224 18223 3 17936 17937 18225 3 17936 18225 18224 3 17937 17938 18226 3 17937 18226 18225 3 17938 17939 18227 3 17938 18227 18226 3 17939 17940 18228 3 17939 18228 18227 3 17940 17941 18229 3 17940 18229 18228 3 17941 17942 18230 3 17941 18230 18229 3 17942 17943 18231 3 17942 18231 18230 3 17943 17944 18232 3 17943 18232 18231 3 17944 17945 18233 3 17944 18233 18232 3 17945 17946 18234 3 17945 18234 18233 3 17946 17947 18235 3 17946 18235 18234 3 17947 17948 18236 3 17947 18236 18235 3 17948 17949 18237 3 17948 18237 18236 3 17949 17950 18238 3 17949 18238 18237 3 17950 17951 18239 3 17950 18239 18238 3 17951 17952 18240 3 17951 18240 18239 3 17952 17953 18241 3 17952 18241 18240 3 17953 17954 18242 3 17953 18242 18241 3 17954 17955 18243 3 17954 18243 18242 3 17955 17956 18244 3 17955 18244 18243 3 17956 17957 18245 3 17956 18245 18244 3 17957 17958 18246 3 17957 18246 18245 3 17958 17959 18247 3 17958 18247 18246 3 17959 17960 18248 3 17959 18248 18247 3 17960 17961 18249 3 17960 18249 18248 3 17961 17962 18250 3 17961 18250 18249 3 17962 17963 18251 3 17962 18251 18250 3 17963 17964 18252 3 17963 18252 18251 3 17965 18253 17966 3 18254 17966 18253 3 17965 18035 18253 3 18323 18253 18035 3 17966 18254 17967 3 18255 17967 18254 3 17967 18255 17968 3 18256 17968 18255 3 17968 18256 17969 3 18257 17969 18256 3 17969 18257 17970 3 18258 17970 18257 3 17970 18258 17971 3 18259 17971 18258 3 17971 18259 17972 3 18260 17972 18259 3 17972 18260 17973 3 18261 17973 18260 3 17973 18261 17974 3 18262 17974 18261 3 17974 18262 17975 3 18263 17975 18262 3 17975 18263 17976 3 18264 17976 18263 3 17976 18264 17977 3 18265 17977 18264 3 17977 18265 17978 3 18266 17978 18265 3 17978 18266 17979 3 18267 17979 18266 3 17979 18267 17980 3 18268 17980 18267 3 17980 18268 17981 3 18269 17981 18268 3 17981 18269 17982 3 18270 17982 18269 3 17982 18270 17983 3 18271 17983 18270 3 17983 18271 17984 3 18272 17984 18271 3 17984 18272 17985 3 18273 17985 18272 3 17985 18273 17986 3 18274 17986 18273 3 17986 18274 17987 3 18275 17987 18274 3 17987 18275 17988 3 18276 17988 18275 3 17988 18276 17989 3 18277 17989 18276 3 17989 18277 17990 3 18278 17990 18277 3 17990 18278 17991 3 18279 17991 18278 3 17991 18279 17992 3 18280 17992 18279 3 17992 18280 17993 3 18281 17993 18280 3 17993 18281 17994 3 18282 17994 18281 3 17994 18282 17995 3 18283 17995 18282 3 17995 18283 17996 3 18284 17996 18283 3 17996 18284 17997 3 18285 17997 18284 3 17997 18285 17998 3 18286 17998 18285 3 17998 18286 17999 3 18287 17999 18286 3 17999 18287 18000 3 18288 18000 18287 3 18000 18288 18001 3 18289 18001 18288 3 18001 18289 18002 3 18290 18002 18289 3 18002 18290 18003 3 18291 18003 18290 3 18003 18291 18004 3 18292 18004 18291 3 18004 18292 18005 3 18293 18005 18292 3 18005 18293 18006 3 18294 18006 18293 3 18006 18294 18007 3 18295 18007 18294 3 18007 18295 18008 3 18296 18008 18295 3 18008 18296 18009 3 18297 18009 18296 3 18009 18297 18010 3 18298 18010 18297 3 18010 18298 18011 3 18299 18011 18298 3 18011 18299 18012 3 18300 18012 18299 3 18012 18300 18013 3 18301 18013 18300 3 18013 18301 18014 3 18302 18014 18301 3 18014 18302 18015 3 18303 18015 18302 3 18015 18303 18016 3 18304 18016 18303 3 18016 18304 18017 3 18305 18017 18304 3 18017 18305 18018 3 18306 18018 18305 3 18018 18306 18019 3 18307 18019 18306 3 18019 18307 18020 3 18308 18020 18307 3 18020 18308 18021 3 18309 18021 18308 3 18021 18309 18022 3 18310 18022 18309 3 18022 18310 18023 3 18311 18023 18310 3 18023 18311 18024 3 18312 18024 18311 3 18024 18312 18025 3 18313 18025 18312 3 18025 18313 18026 3 18314 18026 18313 3 18026 18314 18027 3 18315 18027 18314 3 18027 18315 18028 3 18316 18028 18315 3 18028 18316 18029 3 18317 18029 18316 3 18029 18317 18030 3 18318 18030 18317 3 18030 18318 18031 3 18319 18031 18318 3 18031 18319 18032 3 18320 18032 18319 3 18032 18320 18033 3 18321 18033 18320 3 18033 18321 18036 3 18324 18036 18321 3 18034 18322 18035 3 18323 18035 18322 3 18034 18039 18322 3 18327 18322 18039 3 18036 18324 18037 3 18325 18037 18324 3 18037 18325 18040 3 18328 18040 18325 3 18038 18326 18039 3 18327 18039 18326 3 18038 18042 18326 3 18330 18326 18042 3 18040 18328 18041 3 18329 18041 18328 3 18041 18329 18043 3 18331 18043 18329 3 18042 18044 18330 3 18332 18330 18044 3 18043 18331 18045 3 18333 18045 18331 3 18044 18046 18332 3 18334 18332 18046 3 18045 18333 18047 3 18335 18047 18333 3 18046 18048 18336 3 18046 18336 18334 3 18047 18335 18049 3 18337 18049 18335 3 18048 18050 18338 3 18048 18338 18336 3 18049 18337 18339 3 18049 18339 18051 3 18050 18052 18340 3 18050 18340 18338 3 18051 18339 18341 3 18051 18341 18053 3 18052 18054 18342 3 18052 18342 18340 3 18053 18341 18343 3 18053 18343 18055 3 18054 18056 18344 3 18054 18344 18342 3 18055 18343 18345 3 18055 18345 18057 3 18056 18058 18346 3 18056 18346 18344 3 18057 18345 18347 3 18057 18347 18059 3 18058 18060 18348 3 18058 18348 18346 3 18059 18347 18349 3 18059 18349 18061 3 18060 18062 18350 3 18060 18350 18348 3 18061 18349 18351 3 18061 18351 18063 3 18062 18064 18352 3 18062 18352 18350 3 18063 18351 18353 3 18063 18353 18065 3 18064 18066 18354 3 18064 18354 18352 3 18065 18353 18355 3 18065 18355 18067 3 18066 18068 18356 3 18066 18356 18354 3 18067 18355 18357 3 18067 18357 18069 3 18068 18070 18358 3 18068 18358 18356 3 18069 18357 18359 3 18069 18359 18071 3 18070 18072 18360 3 18070 18360 18358 3 18071 18359 18361 3 18071 18361 18073 3 18072 18074 18362 3 18072 18362 18360 3 18073 18361 18363 3 18073 18363 18075 3 18074 18076 18364 3 18074 18364 18362 3 18075 18363 18365 3 18075 18365 18077 3 18076 18078 18366 3 18076 18366 18364 3 18077 18365 18367 3 18077 18367 18079 3 18078 18080 18368 3 18078 18368 18366 3 18079 18367 18369 3 18079 18369 18081 3 18080 18082 18370 3 18080 18370 18368 3 18081 18369 18371 3 18081 18371 18083 3 18082 18084 18372 3 18082 18372 18370 3 18083 18371 18373 3 18083 18373 18085 3 18084 18086 18374 3 18084 18374 18372 3 18085 18373 18375 3 18085 18375 18087 3 18086 18088 18376 3 18086 18376 18374 3 18087 18375 18377 3 18087 18377 18089 3 18088 18090 18378 3 18088 18378 18376 3 18089 18377 18379 3 18089 18379 18091 3 18090 18092 18380 3 18090 18380 18378 3 18091 18379 18381 3 18091 18381 18093 3 18092 18094 18382 3 18092 18382 18380 3 18093 18381 18383 3 18093 18383 18095 3 18094 18096 18384 3 18094 18384 18382 3 18095 18383 18385 3 18095 18385 18097 3 18096 18098 18386 3 18096 18386 18384 3 18097 18385 18387 3 18097 18387 18099 3 18098 18100 18388 3 18098 18388 18386 3 18099 18387 18389 3 18099 18389 18101 3 18100 18102 18390 3 18100 18390 18388 3 18101 18389 18391 3 18101 18391 18103 3 18102 18104 18392 3 18102 18392 18390 3 18103 18391 18393 3 18103 18393 18105 3 18104 18106 18394 3 18104 18394 18392 3 18105 18393 18395 3 18105 18395 18107 3 18106 18108 18396 3 18106 18396 18394 3 18107 18395 18397 3 18107 18397 18109 3 18108 18110 18398 3 18108 18398 18396 3 18109 18397 18399 3 18109 18399 18111 3 18110 18112 18400 3 18110 18400 18398 3 18111 18399 18401 3 18111 18401 18113 3 18112 18114 18402 3 18112 18402 18400 3 18113 18401 18403 3 18113 18403 18115 3 18114 18116 18404 3 18114 18404 18402 3 18115 18403 18405 3 18115 18405 18117 3 18116 18118 18406 3 18116 18406 18404 3 18117 18405 18407 3 18117 18407 18119 3 18118 18120 18408 3 18118 18408 18406 3 18119 18407 18409 3 18119 18409 18121 3 18120 18122 18410 3 18120 18410 18408 3 18121 18409 18411 3 18121 18411 18123 3 18122 18124 18412 3 18122 18412 18410 3 18123 18411 18413 3 18123 18413 18125 3 18124 18126 18414 3 18124 18414 18412 3 18125 18413 18415 3 18125 18415 18127 3 18126 18128 18416 3 18126 18416 18414 3 18127 18415 18417 3 18127 18417 18129 3 18128 18130 18418 3 18128 18418 18416 3 18129 18417 18419 3 18129 18419 18131 3 18130 18132 18418 3 18420 18418 18132 3 18131 18419 18133 3 18421 18133 18419 3 18132 18134 18420 3 18422 18420 18134 3 18133 18421 18135 3 18423 18135 18421 3 18134 18136 18422 3 18424 18422 18136 3 18135 18423 18137 3 18425 18137 18423 3 18136 18138 18424 3 18426 18424 18138 3 18137 18425 18139 3 18427 18139 18425 3 18138 18140 18426 3 18428 18426 18140 3 18139 18427 18141 3 18429 18141 18427 3 18140 18142 18428 3 18430 18428 18142 3 18141 18429 18143 3 18431 18143 18429 3 18142 18144 18430 3 18432 18430 18144 3 18143 18431 18145 3 18433 18145 18431 3 18144 18146 18432 3 18434 18432 18146 3 18145 18433 18147 3 18435 18147 18433 3 18146 18148 18434 3 18436 18434 18148 3 18147 18435 18149 3 18437 18149 18435 3 18148 18150 18436 3 18438 18436 18150 3 18149 18437 18151 3 18439 18151 18437 3 18150 18152 18438 3 18440 18438 18152 3 18151 18439 18153 3 18441 18153 18439 3 18152 18154 18440 3 18442 18440 18154 3 18153 18441 18155 3 18443 18155 18441 3 18154 18156 18442 3 18444 18442 18156 3 18155 18443 18157 3 18445 18157 18443 3 18156 18158 18444 3 18446 18444 18158 3 18157 18445 18159 3 18447 18159 18445 3 18158 18160 18446 3 18448 18446 18160 3 18159 18447 18161 3 18449 18161 18447 3 18160 18162 18448 3 18450 18448 18162 3 18161 18449 18163 3 18451 18163 18449 3 18162 18164 18450 3 18452 18450 18164 3 18163 18451 18165 3 18453 18165 18451 3 18164 18166 18452 3 18454 18452 18166 3 18165 18453 18167 3 18455 18167 18453 3 18166 18168 18454 3 18456 18454 18168 3 18167 18455 18169 3 18457 18169 18455 3 18168 18170 18456 3 18458 18456 18170 3 18169 18457 18171 3 18459 18171 18457 3 18170 18172 18458 3 18460 18458 18172 3 18171 18459 18173 3 18461 18173 18459 3 18172 18174 18460 3 18462 18460 18174 3 18173 18461 18175 3 18463 18175 18461 3 18174 18176 18462 3 18464 18462 18176 3 18175 18463 18179 3 18467 18179 18463 3 18176 18177 18464 3 18465 18464 18177 3 18177 18180 18465 3 18468 18465 18180 3 18178 18179 18466 3 18467 18466 18179 3 18178 18466 18183 3 18471 18183 18466 3 18180 18181 18468 3 18469 18468 18181 3 18181 18184 18469 3 18472 18469 18184 3 18182 18183 18470 3 18471 18470 18183 3 18182 18470 18252 3 18540 18252 18470 3 18184 18185 18472 3 18473 18472 18185 3 18185 18186 18473 3 18474 18473 18186 3 18186 18187 18474 3 18475 18474 18187 3 18187 18188 18475 3 18476 18475 18188 3 18188 18189 18476 3 18477 18476 18189 3 18189 18190 18477 3 18478 18477 18190 3 18190 18191 18478 3 18479 18478 18191 3 18191 18192 18479 3 18480 18479 18192 3 18192 18193 18480 3 18481 18480 18193 3 18193 18194 18481 3 18482 18481 18194 3 18194 18195 18482 3 18483 18482 18195 3 18195 18196 18483 3 18484 18483 18196 3 18196 18197 18484 3 18485 18484 18197 3 18197 18198 18485 3 18486 18485 18198 3 18198 18199 18486 3 18487 18486 18199 3 18199 18200 18487 3 18488 18487 18200 3 18200 18201 18488 3 18489 18488 18201 3 18201 18202 18489 3 18490 18489 18202 3 18202 18203 18490 3 18491 18490 18203 3 18203 18204 18491 3 18492 18491 18204 3 18204 18205 18492 3 18493 18492 18205 3 18205 18206 18493 3 18494 18493 18206 3 18206 18207 18494 3 18495 18494 18207 3 18207 18208 18495 3 18496 18495 18208 3 18208 18209 18496 3 18497 18496 18209 3 18209 18210 18497 3 18498 18497 18210 3 18210 18211 18498 3 18499 18498 18211 3 18211 18212 18499 3 18500 18499 18212 3 18212 18213 18500 3 18501 18500 18213 3 18213 18214 18502 3 18213 18502 18501 3 18214 18215 18503 3 18214 18503 18502 3 18215 18216 18504 3 18215 18504 18503 3 18216 18217 18505 3 18216 18505 18504 3 18217 18218 18506 3 18217 18506 18505 3 18218 18219 18507 3 18218 18507 18506 3 18219 18220 18508 3 18219 18508 18507 3 18220 18221 18509 3 18220 18509 18508 3 18221 18222 18510 3 18221 18510 18509 3 18222 18223 18511 3 18222 18511 18510 3 18223 18224 18512 3 18223 18512 18511 3 18224 18225 18513 3 18224 18513 18512 3 18225 18226 18514 3 18225 18514 18513 3 18226 18227 18515 3 18226 18515 18514 3 18227 18228 18516 3 18227 18516 18515 3 18228 18229 18517 3 18228 18517 18516 3 18229 18230 18518 3 18229 18518 18517 3 18230 18231 18519 3 18230 18519 18518 3 18231 18232 18520 3 18231 18520 18519 3 18232 18233 18521 3 18232 18521 18520 3 18233 18234 18522 3 18233 18522 18521 3 18234 18235 18523 3 18234 18523 18522 3 18235 18236 18524 3 18235 18524 18523 3 18236 18237 18525 3 18236 18525 18524 3 18237 18238 18526 3 18237 18526 18525 3 18238 18239 18527 3 18238 18527 18526 3 18239 18240 18528 3 18239 18528 18527 3 18240 18241 18529 3 18240 18529 18528 3 18241 18242 18530 3 18241 18530 18529 3 18242 18243 18531 3 18242 18531 18530 3 18243 18244 18532 3 18243 18532 18531 3 18244 18245 18533 3 18244 18533 18532 3 18245 18246 18534 3 18245 18534 18533 3 18246 18247 18535 3 18246 18535 18534 3 18247 18248 18536 3 18247 18536 18535 3 18248 18249 18537 3 18248 18537 18536 3 18249 18250 18538 3 18249 18538 18537 3 18250 18251 18539 3 18250 18539 18538 3 18251 18252 18540 3 18251 18540 18539 3 18253 18541 18542 3 18253 18542 18254 3 18253 18323 18611 3 18253 18611 18541 3 18254 18542 18543 3 18254 18543 18255 3 18255 18543 18544 3 18255 18544 18256 3 18256 18544 18545 3 18256 18545 18257 3 18257 18545 18546 3 18257 18546 18258 3 18258 18546 18547 3 18258 18547 18259 3 18259 18547 18548 3 18259 18548 18260 3 18260 18548 18549 3 18260 18549 18261 3 18261 18549 18550 3 18261 18550 18262 3 18262 18550 18551 3 18262 18551 18263 3 18263 18551 18552 3 18263 18552 18264 3 18264 18552 18553 3 18264 18553 18265 3 18265 18553 18554 3 18265 18554 18266 3 18266 18554 18555 3 18266 18555 18267 3 18267 18555 18556 3 18267 18556 18268 3 18268 18556 18557 3 18268 18557 18269 3 18269 18557 18558 3 18269 18558 18270 3 18270 18558 18559 3 18270 18559 18271 3 18271 18559 18560 3 18271 18560 18272 3 18272 18560 18561 3 18272 18561 18273 3 18273 18561 18562 3 18273 18562 18274 3 18274 18562 18563 3 18274 18563 18275 3 18275 18563 18564 3 18275 18564 18276 3 18276 18564 18565 3 18276 18565 18277 3 18277 18565 18566 3 18277 18566 18278 3 18278 18566 18567 3 18278 18567 18279 3 18279 18567 18568 3 18279 18568 18280 3 18280 18568 18569 3 18280 18569 18281 3 18281 18569 18570 3 18281 18570 18282 3 18282 18570 18571 3 18282 18571 18283 3 18283 18571 18572 3 18283 18572 18284 3 18284 18572 18573 3 18284 18573 18285 3 18285 18573 18574 3 18285 18574 18286 3 18286 18574 18575 3 18286 18575 18287 3 18287 18575 18576 3 18287 18576 18288 3 18288 18576 18577 3 18288 18577 18289 3 18289 18577 18578 3 18289 18578 18290 3 18290 18578 18579 3 18290 18579 18291 3 18291 18579 18580 3 18291 18580 18292 3 18292 18580 18581 3 18292 18581 18293 3 18293 18581 18582 3 18293 18582 18294 3 18294 18582 18583 3 18294 18583 18295 3 18295 18583 18584 3 18295 18584 18296 3 18296 18584 18585 3 18296 18585 18297 3 18297 18585 18586 3 18297 18586 18298 3 18298 18586 18299 3 18587 18299 18586 3 18299 18587 18300 3 18588 18300 18587 3 18300 18588 18301 3 18589 18301 18588 3 18301 18589 18302 3 18590 18302 18589 3 18302 18590 18303 3 18591 18303 18590 3 18303 18591 18304 3 18592 18304 18591 3 18304 18592 18305 3 18593 18305 18592 3 18305 18593 18306 3 18594 18306 18593 3 18306 18594 18307 3 18595 18307 18594 3 18307 18595 18308 3 18596 18308 18595 3 18308 18596 18309 3 18597 18309 18596 3 18309 18597 18310 3 18598 18310 18597 3 18310 18598 18311 3 18599 18311 18598 3 18311 18599 18312 3 18600 18312 18599 3 18312 18600 18313 3 18601 18313 18600 3 18313 18601 18314 3 18602 18314 18601 3 18314 18602 18315 3 18603 18315 18602 3 18315 18603 18316 3 18604 18316 18603 3 18316 18604 18317 3 18605 18317 18604 3 18317 18605 18318 3 18606 18318 18605 3 18318 18606 18319 3 18607 18319 18606 3 18319 18607 18320 3 18608 18320 18607 3 18320 18608 18321 3 18609 18321 18608 3 18321 18609 18324 3 18612 18324 18609 3 18322 18610 18323 3 18611 18323 18610 3 18322 18327 18610 3 18615 18610 18327 3 18324 18612 18325 3 18613 18325 18612 3 18325 18613 18328 3 18616 18328 18613 3 18326 18614 18327 3 18615 18327 18614 3 18326 18330 18614 3 18618 18614 18330 3 18328 18616 18329 3 18617 18329 18616 3 18329 18617 18331 3 18619 18331 18617 3 18330 18332 18618 3 18620 18618 18332 3 18331 18619 18333 3 18621 18333 18619 3 18332 18334 18620 3 18622 18620 18334 3 18333 18621 18335 3 18623 18335 18621 3 18334 18336 18622 3 18624 18622 18336 3 18335 18623 18337 3 18625 18337 18623 3 18336 18338 18624 3 18626 18624 18338 3 18337 18625 18339 3 18627 18339 18625 3 18338 18340 18626 3 18628 18626 18340 3 18339 18627 18341 3 18629 18341 18627 3 18340 18342 18628 3 18630 18628 18342 3 18341 18629 18343 3 18631 18343 18629 3 18342 18344 18630 3 18632 18630 18344 3 18343 18631 18345 3 18633 18345 18631 3 18344 18346 18632 3 18634 18632 18346 3 18345 18633 18347 3 18635 18347 18633 3 18346 18348 18634 3 18636 18634 18348 3 18347 18635 18349 3 18637 18349 18635 3 18348 18350 18636 3 18638 18636 18350 3 18349 18637 18351 3 18639 18351 18637 3 18350 18352 18638 3 18640 18638 18352 3 18351 18639 18353 3 18641 18353 18639 3 18352 18354 18640 3 18642 18640 18354 3 18353 18641 18355 3 18643 18355 18641 3 18354 18356 18642 3 18644 18642 18356 3 18355 18643 18357 3 18645 18357 18643 3 18356 18358 18644 3 18646 18644 18358 3 18357 18645 18359 3 18647 18359 18645 3 18358 18360 18646 3 18648 18646 18360 3 18359 18647 18361 3 18649 18361 18647 3 18360 18362 18648 3 18650 18648 18362 3 18361 18649 18363 3 18651 18363 18649 3 18362 18364 18650 3 18652 18650 18364 3 18363 18651 18365 3 18653 18365 18651 3 18364 18366 18652 3 18654 18652 18366 3 18365 18653 18367 3 18655 18367 18653 3 18366 18368 18654 3 18656 18654 18368 3 18367 18655 18369 3 18657 18369 18655 3 18368 18370 18656 3 18658 18656 18370 3 18369 18657 18371 3 18659 18371 18657 3 18370 18372 18658 3 18660 18658 18372 3 18371 18659 18373 3 18661 18373 18659 3 18372 18374 18660 3 18662 18660 18374 3 18373 18661 18375 3 18663 18375 18661 3 18374 18376 18662 3 18664 18662 18376 3 18375 18663 18377 3 18665 18377 18663 3 18376 18378 18664 3 18666 18664 18378 3 18377 18665 18379 3 18667 18379 18665 3 18378 18380 18666 3 18668 18666 18380 3 18379 18667 18381 3 18669 18381 18667 3 18380 18382 18670 3 18380 18670 18668 3 18381 18669 18383 3 18671 18383 18669 3 18382 18384 18672 3 18382 18672 18670 3 18383 18671 18673 3 18383 18673 18385 3 18384 18386 18674 3 18384 18674 18672 3 18385 18673 18675 3 18385 18675 18387 3 18386 18388 18676 3 18386 18676 18674 3 18387 18675 18677 3 18387 18677 18389 3 18388 18390 18678 3 18388 18678 18676 3 18389 18677 18679 3 18389 18679 18391 3 18390 18392 18680 3 18390 18680 18678 3 18391 18679 18681 3 18391 18681 18393 3 18392 18394 18682 3 18392 18682 18680 3 18393 18681 18683 3 18393 18683 18395 3 18394 18396 18684 3 18394 18684 18682 3 18395 18683 18685 3 18395 18685 18397 3 18396 18398 18686 3 18396 18686 18684 3 18397 18685 18687 3 18397 18687 18399 3 18398 18400 18688 3 18398 18688 18686 3 18399 18687 18689 3 18399 18689 18401 3 18400 18402 18690 3 18400 18690 18688 3 18401 18689 18691 3 18401 18691 18403 3 18402 18404 18692 3 18402 18692 18690 3 18403 18691 18693 3 18403 18693 18405 3 18404 18406 18694 3 18404 18694 18692 3 18405 18693 18695 3 18405 18695 18407 3 18406 18408 18696 3 18406 18696 18694 3 18407 18695 18697 3 18407 18697 18409 3 18408 18410 18698 3 18408 18698 18696 3 18409 18697 18699 3 18409 18699 18411 3 18410 18412 18700 3 18410 18700 18698 3 18411 18699 18701 3 18411 18701 18413 3 18412 18414 18702 3 18412 18702 18700 3 18413 18701 18703 3 18413 18703 18415 3 18414 18416 18704 3 18414 18704 18702 3 18415 18703 18705 3 18415 18705 18417 3 18416 18418 18706 3 18416 18706 18704 3 18417 18705 18707 3 18417 18707 18419 3 18418 18420 18708 3 18418 18708 18706 3 18419 18707 18709 3 18419 18709 18421 3 18420 18422 18710 3 18420 18710 18708 3 18421 18709 18711 3 18421 18711 18423 3 18422 18424 18712 3 18422 18712 18710 3 18423 18711 18713 3 18423 18713 18425 3 18424 18426 18714 3 18424 18714 18712 3 18425 18713 18715 3 18425 18715 18427 3 18426 18428 18716 3 18426 18716 18714 3 18427 18715 18717 3 18427 18717 18429 3 18428 18430 18718 3 18428 18718 18716 3 18429 18717 18719 3 18429 18719 18431 3 18430 18432 18720 3 18430 18720 18718 3 18431 18719 18721 3 18431 18721 18433 3 18432 18434 18722 3 18432 18722 18720 3 18433 18721 18723 3 18433 18723 18435 3 18434 18436 18724 3 18434 18724 18722 3 18435 18723 18725 3 18435 18725 18437 3 18436 18438 18726 3 18436 18726 18724 3 18437 18725 18727 3 18437 18727 18439 3 18438 18440 18728 3 18438 18728 18726 3 18439 18727 18729 3 18439 18729 18441 3 18440 18442 18730 3 18440 18730 18728 3 18441 18729 18731 3 18441 18731 18443 3 18442 18444 18732 3 18442 18732 18730 3 18443 18731 18733 3 18443 18733 18445 3 18444 18446 18734 3 18444 18734 18732 3 18445 18733 18735 3 18445 18735 18447 3 18446 18448 18736 3 18446 18736 18734 3 18447 18735 18737 3 18447 18737 18449 3 18448 18450 18738 3 18448 18738 18736 3 18449 18737 18739 3 18449 18739 18451 3 18450 18452 18740 3 18450 18740 18738 3 18451 18739 18741 3 18451 18741 18453 3 18452 18454 18742 3 18452 18742 18740 3 18453 18741 18743 3 18453 18743 18455 3 18454 18456 18744 3 18454 18744 18742 3 18455 18743 18745 3 18455 18745 18457 3 18456 18458 18746 3 18456 18746 18744 3 18457 18745 18747 3 18457 18747 18459 3 18458 18460 18748 3 18458 18748 18746 3 18459 18747 18749 3 18459 18749 18461 3 18460 18462 18750 3 18460 18750 18748 3 18461 18749 18751 3 18461 18751 18463 3 18462 18464 18750 3 18752 18750 18464 3 18463 18751 18755 3 18463 18755 18467 3 18464 18465 18752 3 18753 18752 18465 3 18465 18468 18753 3 18756 18753 18468 3 18466 18467 18754 3 18755 18754 18467 3 18466 18754 18471 3 18759 18471 18754 3 18468 18469 18756 3 18757 18756 18469 3 18469 18472 18757 3 18760 18757 18472 3 18470 18471 18758 3 18759 18758 18471 3 18470 18758 18540 3 18828 18540 18758 3 18472 18473 18760 3 18761 18760 18473 3 18473 18474 18761 3 18762 18761 18474 3 18474 18475 18762 3 18763 18762 18475 3 18475 18476 18763 3 18764 18763 18476 3 18476 18477 18764 3 18765 18764 18477 3 18477 18478 18765 3 18766 18765 18478 3 18478 18479 18766 3 18767 18766 18479 3 18479 18480 18767 3 18768 18767 18480 3 18480 18481 18768 3 18769 18768 18481 3 18481 18482 18769 3 18770 18769 18482 3 18482 18483 18770 3 18771 18770 18483 3 18483 18484 18771 3 18772 18771 18484 3 18484 18485 18772 3 18773 18772 18485 3 18485 18486 18773 3 18774 18773 18486 3 18486 18487 18774 3 18775 18774 18487 3 18487 18488 18775 3 18776 18775 18488 3 18488 18489 18776 3 18777 18776 18489 3 18489 18490 18777 3 18778 18777 18490 3 18490 18491 18778 3 18779 18778 18491 3 18491 18492 18779 3 18780 18779 18492 3 18492 18493 18780 3 18781 18780 18493 3 18493 18494 18781 3 18782 18781 18494 3 18494 18495 18782 3 18783 18782 18495 3 18495 18496 18783 3 18784 18783 18496 3 18496 18497 18784 3 18785 18784 18497 3 18497 18498 18785 3 18786 18785 18498 3 18498 18499 18786 3 18787 18786 18499 3 18499 18500 18787 3 18788 18787 18500 3 18500 18501 18788 3 18789 18788 18501 3 18501 18502 18789 3 18790 18789 18502 3 18502 18503 18790 3 18791 18790 18503 3 18503 18504 18791 3 18792 18791 18504 3 18504 18505 18792 3 18793 18792 18505 3 18505 18506 18793 3 18794 18793 18506 3 18506 18507 18794 3 18795 18794 18507 3 18507 18508 18795 3 18796 18795 18508 3 18508 18509 18796 3 18797 18796 18509 3 18509 18510 18797 3 18798 18797 18510 3 18510 18511 18798 3 18799 18798 18511 3 18511 18512 18799 3 18800 18799 18512 3 18512 18513 18800 3 18801 18800 18513 3 18513 18514 18801 3 18802 18801 18514 3 18514 18515 18802 3 18803 18802 18515 3 18515 18516 18803 3 18804 18803 18516 3 18516 18517 18804 3 18805 18804 18517 3 18517 18518 18805 3 18806 18805 18518 3 18518 18519 18806 3 18807 18806 18519 3 18519 18520 18807 3 18808 18807 18520 3 18520 18521 18808 3 18809 18808 18521 3 18521 18522 18809 3 18810 18809 18522 3 18522 18523 18810 3 18811 18810 18523 3 18523 18524 18811 3 18812 18811 18524 3 18524 18525 18812 3 18813 18812 18525 3 18525 18526 18813 3 18814 18813 18526 3 18526 18527 18814 3 18815 18814 18527 3 18527 18528 18815 3 18816 18815 18528 3 18528 18529 18816 3 18817 18816 18529 3 18529 18530 18817 3 18818 18817 18530 3 18530 18531 18818 3 18819 18818 18531 3 18531 18532 18819 3 18820 18819 18532 3 18532 18533 18820 3 18821 18820 18533 3 18533 18534 18821 3 18822 18821 18534 3 18534 18535 18822 3 18823 18822 18535 3 18535 18536 18823 3 18824 18823 18536 3 18536 18537 18824 3 18825 18824 18537 3 18537 18538 18825 3 18826 18825 18538 3 18538 18539 18826 3 18827 18826 18539 3 18539 18540 18827 3 18828 18827 18540 3 18541 18829 18542 3 18830 18542 18829 3 18541 18611 18899 3 18541 18899 18829 3 18542 18830 18543 3 18831 18543 18830 3 18543 18831 18544 3 18832 18544 18831 3 18544 18832 18545 3 18833 18545 18832 3 18545 18833 18546 3 18834 18546 18833 3 18546 18834 18547 3 18835 18547 18834 3 18547 18835 18548 3 18836 18548 18835 3 18548 18836 18549 3 18837 18549 18836 3 18549 18837 18550 3 18838 18550 18837 3 18550 18838 18839 3 18550 18839 18551 3 18551 18839 18840 3 18551 18840 18552 3 18552 18840 18841 3 18552 18841 18553 3 18553 18841 18842 3 18553 18842 18554 3 18554 18842 18843 3 18554 18843 18555 3 18555 18843 18844 3 18555 18844 18556 3 18556 18844 18845 3 18556 18845 18557 3 18557 18845 18846 3 18557 18846 18558 3 18558 18846 18847 3 18558 18847 18559 3 18559 18847 18848 3 18559 18848 18560 3 18560 18848 18849 3 18560 18849 18561 3 18561 18849 18850 3 18561 18850 18562 3 18562 18850 18851 3 18562 18851 18563 3 18563 18851 18852 3 18563 18852 18564 3 18564 18852 18853 3 18564 18853 18565 3 18565 18853 18854 3 18565 18854 18566 3 18566 18854 18855 3 18566 18855 18567 3 18567 18855 18856 3 18567 18856 18568 3 18568 18856 18857 3 18568 18857 18569 3 18569 18857 18858 3 18569 18858 18570 3 18570 18858 18859 3 18570 18859 18571 3 18571 18859 18860 3 18571 18860 18572 3 18572 18860 18861 3 18572 18861 18573 3 18573 18861 18862 3 18573 18862 18574 3 18574 18862 18863 3 18574 18863 18575 3 18575 18863 18864 3 18575 18864 18576 3 18576 18864 18865 3 18576 18865 18577 3 18577 18865 18866 3 18577 18866 18578 3 18578 18866 18867 3 18578 18867 18579 3 18579 18867 18868 3 18579 18868 18580 3 18580 18868 18869 3 18580 18869 18581 3 18581 18869 18870 3 18581 18870 18582 3 18582 18870 18871 3 18582 18871 18583 3 18583 18871 18872 3 18583 18872 18584 3 18584 18872 18873 3 18584 18873 18585 3 18585 18873 18874 3 18585 18874 18586 3 18586 18874 18875 3 18586 18875 18587 3 18587 18875 18876 3 18587 18876 18588 3 18588 18876 18877 3 18588 18877 18589 3 18589 18877 18878 3 18589 18878 18590 3 18590 18878 18879 3 18590 18879 18591 3 18591 18879 18880 3 18591 18880 18592 3 18592 18880 18881 3 18592 18881 18593 3 18593 18881 18882 3 18593 18882 18594 3 18594 18882 18883 3 18594 18883 18595 3 18595 18883 18884 3 18595 18884 18596 3 18596 18884 18885 3 18596 18885 18597 3 18597 18885 18886 3 18597 18886 18598 3 18598 18886 18887 3 18598 18887 18599 3 18599 18887 18888 3 18599 18888 18600 3 18600 18888 18889 3 18600 18889 18601 3 18601 18889 18890 3 18601 18890 18602 3 18602 18890 18891 3 18602 18891 18603 3 18603 18891 18892 3 18603 18892 18604 3 18604 18892 18893 3 18604 18893 18605 3 18605 18893 18894 3 18605 18894 18606 3 18606 18894 18895 3 18606 18895 18607 3 18607 18895 18896 3 18607 18896 18608 3 18608 18896 18897 3 18608 18897 18609 3 18609 18897 18900 3 18609 18900 18612 3 18610 18898 18899 3 18610 18899 18611 3 18610 18615 18903 3 18610 18903 18898 3 18612 18900 18901 3 18612 18901 18613 3 18613 18901 18904 3 18613 18904 18616 3 18614 18902 18903 3 18614 18903 18615 3 18614 18618 18906 3 18614 18906 18902 3 18616 18904 18905 3 18616 18905 18617 3 18617 18905 18907 3 18617 18907 18619 3 18618 18620 18908 3 18618 18908 18906 3 18619 18907 18909 3 18619 18909 18621 3 18620 18622 18910 3 18620 18910 18908 3 18621 18909 18911 3 18621 18911 18623 3 18622 18624 18912 3 18622 18912 18910 3 18623 18911 18913 3 18623 18913 18625 3 18624 18626 18914 3 18624 18914 18912 3 18625 18913 18915 3 18625 18915 18627 3 18626 18628 18916 3 18626 18916 18914 3 18627 18915 18917 3 18627 18917 18629 3 18628 18630 18918 3 18628 18918 18916 3 18629 18917 18919 3 18629 18919 18631 3 18630 18632 18918 3 18920 18918 18632 3 18631 18919 18921 3 18631 18921 18633 3 18632 18634 18920 3 18922 18920 18634 3 18633 18921 18923 3 18633 18923 18635 3 18634 18636 18922 3 18924 18922 18636 3 18635 18923 18637 3 18925 18637 18923 3 18636 18638 18924 3 18926 18924 18638 3 18637 18925 18639 3 18927 18639 18925 3 18638 18640 18926 3 18928 18926 18640 3 18639 18927 18641 3 18929 18641 18927 3 18640 18642 18928 3 18930 18928 18642 3 18641 18929 18643 3 18931 18643 18929 3 18642 18644 18930 3 18932 18930 18644 3 18643 18931 18645 3 18933 18645 18931 3 18644 18646 18932 3 18934 18932 18646 3 18645 18933 18647 3 18935 18647 18933 3 18646 18648 18934 3 18936 18934 18648 3 18647 18935 18649 3 18937 18649 18935 3 18648 18650 18936 3 18938 18936 18650 3 18649 18937 18651 3 18939 18651 18937 3 18650 18652 18938 3 18940 18938 18652 3 18651 18939 18653 3 18941 18653 18939 3 18652 18654 18940 3 18942 18940 18654 3 18653 18941 18655 3 18943 18655 18941 3 18654 18656 18942 3 18944 18942 18656 3 18655 18943 18657 3 18945 18657 18943 3 18656 18658 18944 3 18946 18944 18658 3 18657 18945 18659 3 18947 18659 18945 3 18658 18660 18946 3 18948 18946 18660 3 18659 18947 18661 3 18949 18661 18947 3 18660 18662 18948 3 18950 18948 18662 3 18661 18949 18663 3 18951 18663 18949 3 18662 18664 18950 3 18952 18950 18664 3 18663 18951 18665 3 18953 18665 18951 3 18664 18666 18952 3 18954 18952 18666 3 18665 18953 18667 3 18955 18667 18953 3 18666 18668 18954 3 18956 18954 18668 3 18667 18955 18669 3 18957 18669 18955 3 18668 18670 18956 3 18958 18956 18670 3 18669 18957 18671 3 18959 18671 18957 3 18670 18672 18958 3 18960 18958 18672 3 18671 18959 18673 3 18961 18673 18959 3 18672 18674 18960 3 18962 18960 18674 3 18673 18961 18675 3 18963 18675 18961 3 18674 18676 18962 3 18964 18962 18676 3 18675 18963 18677 3 18965 18677 18963 3 18676 18678 18964 3 18966 18964 18678 3 18677 18965 18679 3 18967 18679 18965 3 18678 18680 18966 3 18968 18966 18680 3 18679 18967 18681 3 18969 18681 18967 3 18680 18682 18968 3 18970 18968 18682 3 18681 18969 18683 3 18971 18683 18969 3 18682 18684 18970 3 18972 18970 18684 3 18683 18971 18685 3 18973 18685 18971 3 18684 18686 18972 3 18974 18972 18686 3 18685 18973 18687 3 18975 18687 18973 3 18686 18688 18974 3 18976 18974 18688 3 18687 18975 18689 3 18977 18689 18975 3 18688 18690 18976 3 18978 18976 18690 3 18689 18977 18691 3 18979 18691 18977 3 18690 18692 18978 3 18980 18978 18692 3 18691 18979 18693 3 18981 18693 18979 3 18692 18694 18980 3 18982 18980 18694 3 18693 18981 18695 3 18983 18695 18981 3 18694 18696 18982 3 18984 18982 18696 3 18695 18983 18697 3 18985 18697 18983 3 18696 18698 18984 3 18986 18984 18698 3 18697 18985 18699 3 18987 18699 18985 3 18698 18700 18986 3 18988 18986 18700 3 18699 18987 18701 3 18989 18701 18987 3 18700 18702 18988 3 18990 18988 18702 3 18701 18989 18703 3 18991 18703 18989 3 18702 18704 18990 3 18992 18990 18704 3 18703 18991 18705 3 18993 18705 18991 3 18704 18706 18992 3 18994 18992 18706 3 18705 18993 18707 3 18995 18707 18993 3 18706 18708 18994 3 18996 18994 18708 3 18707 18995 18709 3 18997 18709 18995 3 18708 18710 18996 3 18998 18996 18710 3 18709 18997 18711 3 18999 18711 18997 3 18710 18712 18998 3 19000 18998 18712 3 18711 18999 18713 3 19001 18713 18999 3 18712 18714 19000 3 19002 19000 18714 3 18713 19001 18715 3 19003 18715 19001 3 18714 18716 19004 3 18714 19004 19002 3 18715 19003 18717 3 19005 18717 19003 3 18716 18718 19006 3 18716 19006 19004 3 18717 19005 18719 3 19007 18719 19005 3 18718 18720 19008 3 18718 19008 19006 3 18719 19007 19009 3 18719 19009 18721 3 18720 18722 19010 3 18720 19010 19008 3 18721 19009 19011 3 18721 19011 18723 3 18722 18724 19012 3 18722 19012 19010 3 18723 19011 19013 3 18723 19013 18725 3 18724 18726 19014 3 18724 19014 19012 3 18725 19013 19015 3 18725 19015 18727 3 18726 18728 19016 3 18726 19016 19014 3 18727 19015 19017 3 18727 19017 18729 3 18728 18730 19018 3 18728 19018 19016 3 18729 19017 19019 3 18729 19019 18731 3 18730 18732 19020 3 18730 19020 19018 3 18731 19019 19021 3 18731 19021 18733 3 18732 18734 19022 3 18732 19022 19020 3 18733 19021 19023 3 18733 19023 18735 3 18734 18736 19024 3 18734 19024 19022 3 18735 19023 19025 3 18735 19025 18737 3 18736 18738 19026 3 18736 19026 19024 3 18737 19025 19027 3 18737 19027 18739 3 18738 18740 19028 3 18738 19028 19026 3 18739 19027 19029 3 18739 19029 18741 3 18740 18742 19030 3 18740 19030 19028 3 18741 19029 19031 3 18741 19031 18743 3 18742 18744 19032 3 18742 19032 19030 3 18743 19031 19033 3 18743 19033 18745 3 18744 18746 19034 3 18744 19034 19032 3 18745 19033 19035 3 18745 19035 18747 3 18746 18748 19036 3 18746 19036 19034 3 18747 19035 19037 3 18747 19037 18749 3 18748 18750 19038 3 18748 19038 19036 3 18749 19037 19039 3 18749 19039 18751 3 18750 18752 19040 3 18750 19040 19038 3 18751 19039 19043 3 18751 19043 18755 3 18752 18753 19041 3 18752 19041 19040 3 18753 18756 19044 3 18753 19044 19041 3 18754 18755 19043 3 18754 19043 19042 3 18754 19042 19047 3 18754 19047 18759 3 18756 18757 19045 3 18756 19045 19044 3 18757 18760 19048 3 18757 19048 19045 3 18758 18759 19047 3 18758 19047 19046 3 18758 19046 19116 3 18758 19116 18828 3 18760 18761 19049 3 18760 19049 19048 3 18761 18762 19050 3 18761 19050 19049 3 18762 18763 19051 3 18762 19051 19050 3 18763 18764 19052 3 18763 19052 19051 3 18764 18765 19053 3 18764 19053 19052 3 18765 18766 19054 3 18765 19054 19053 3 18766 18767 19055 3 18766 19055 19054 3 18767 18768 19056 3 18767 19056 19055 3 18768 18769 19057 3 18768 19057 19056 3 18769 18770 19058 3 18769 19058 19057 3 18770 18771 19059 3 18770 19059 19058 3 18771 18772 19060 3 18771 19060 19059 3 18772 18773 19061 3 18772 19061 19060 3 18773 18774 19062 3 18773 19062 19061 3 18774 18775 19063 3 18774 19063 19062 3 18775 18776 19064 3 18775 19064 19063 3 18776 18777 19065 3 18776 19065 19064 3 18777 18778 19066 3 18777 19066 19065 3 18778 18779 19067 3 18778 19067 19066 3 18779 18780 19068 3 18779 19068 19067 3 18780 18781 19069 3 18780 19069 19068 3 18781 18782 19070 3 18781 19070 19069 3 18782 18783 19071 3 18782 19071 19070 3 18783 18784 19072 3 18783 19072 19071 3 18784 18785 19073 3 18784 19073 19072 3 18785 18786 19074 3 18785 19074 19073 3 18786 18787 19075 3 18786 19075 19074 3 18787 18788 19076 3 18787 19076 19075 3 18788 18789 19077 3 18788 19077 19076 3 18789 18790 19078 3 18789 19078 19077 3 18790 18791 19079 3 18790 19079 19078 3 18791 18792 19080 3 18791 19080 19079 3 18792 18793 19081 3 18792 19081 19080 3 18793 18794 19082 3 18793 19082 19081 3 18794 18795 19083 3 18794 19083 19082 3 18795 18796 19084 3 18795 19084 19083 3 18796 18797 19085 3 18796 19085 19084 3 18797 18798 19086 3 18797 19086 19085 3 18798 18799 19087 3 18798 19087 19086 3 18799 18800 19087 3 19088 19087 18800 3 18800 18801 19088 3 19089 19088 18801 3 18801 18802 19089 3 19090 19089 18802 3 18802 18803 19090 3 19091 19090 18803 3 18803 18804 19091 3 19092 19091 18804 3 18804 18805 19092 3 19093 19092 18805 3 18805 18806 19093 3 19094 19093 18806 3 18806 18807 19094 3 19095 19094 18807 3 18807 18808 19095 3 19096 19095 18808 3 18808 18809 19096 3 19097 19096 18809 3 18809 18810 19097 3 19098 19097 18810 3 18810 18811 19098 3 19099 19098 18811 3 18811 18812 19099 3 19100 19099 18812 3 18812 18813 19100 3 19101 19100 18813 3 18813 18814 19101 3 19102 19101 18814 3 18814 18815 19102 3 19103 19102 18815 3 18815 18816 19103 3 19104 19103 18816 3 18816 18817 19104 3 19105 19104 18817 3 18817 18818 19105 3 19106 19105 18818 3 18818 18819 19106 3 19107 19106 18819 3 18819 18820 19107 3 19108 19107 18820 3 18820 18821 19108 3 19109 19108 18821 3 18821 18822 19109 3 19110 19109 18822 3 18822 18823 19110 3 19111 19110 18823 3 18823 18824 19111 3 19112 19111 18824 3 18824 18825 19112 3 19113 19112 18825 3 18825 18826 19113 3 19114 19113 18826 3 18826 18827 19114 3 19115 19114 18827 3 18827 18828 19115 3 19116 19115 18828 3 18829 19117 18830 3 19118 18830 19117 3 18829 18899 19117 3 19187 19117 18899 3 18830 19118 18831 3 19119 18831 19118 3 18831 19119 18832 3 19120 18832 19119 3 18832 19120 18833 3 19121 18833 19120 3 18833 19121 18834 3 19122 18834 19121 3 18834 19122 18835 3 19123 18835 19122 3 18835 19123 18836 3 19124 18836 19123 3 18836 19124 18837 3 19125 18837 19124 3 18837 19125 18838 3 19126 18838 19125 3 18838 19126 18839 3 19127 18839 19126 3 18839 19127 18840 3 19128 18840 19127 3 18840 19128 18841 3 19129 18841 19128 3 18841 19129 18842 3 19130 18842 19129 3 18842 19130 18843 3 19131 18843 19130 3 18843 19131 18844 3 19132 18844 19131 3 18844 19132 18845 3 19133 18845 19132 3 18845 19133 18846 3 19134 18846 19133 3 18846 19134 18847 3 19135 18847 19134 3 18847 19135 18848 3 19136 18848 19135 3 18848 19136 18849 3 19137 18849 19136 3 18849 19137 18850 3 19138 18850 19137 3 18850 19138 18851 3 19139 18851 19138 3 18851 19139 18852 3 19140 18852 19139 3 18852 19140 18853 3 19141 18853 19140 3 18853 19141 18854 3 19142 18854 19141 3 18854 19142 18855 3 19143 18855 19142 3 18855 19143 18856 3 19144 18856 19143 3 18856 19144 18857 3 19145 18857 19144 3 18857 19145 18858 3 19146 18858 19145 3 18858 19146 18859 3 19147 18859 19146 3 18859 19147 18860 3 19148 18860 19147 3 18860 19148 18861 3 19149 18861 19148 3 18861 19149 18862 3 19150 18862 19149 3 18862 19150 18863 3 19151 18863 19150 3 18863 19151 18864 3 19152 18864 19151 3 18864 19152 18865 3 19153 18865 19152 3 18865 19153 18866 3 19154 18866 19153 3 18866 19154 18867 3 19155 18867 19154 3 18867 19155 18868 3 19156 18868 19155 3 18868 19156 18869 3 19157 18869 19156 3 18869 19157 18870 3 19158 18870 19157 3 18870 19158 18871 3 19159 18871 19158 3 18871 19159 18872 3 19160 18872 19159 3 18872 19160 18873 3 19161 18873 19160 3 18873 19161 18874 3 19162 18874 19161 3 18874 19162 18875 3 19163 18875 19162 3 18875 19163 18876 3 19164 18876 19163 3 18876 19164 18877 3 19165 18877 19164 3 18877 19165 18878 3 19166 18878 19165 3 18878 19166 18879 3 19167 18879 19166 3 18879 19167 18880 3 19168 18880 19167 3 18880 19168 18881 3 19169 18881 19168 3 18881 19169 18882 3 19170 18882 19169 3 18882 19170 18883 3 19171 18883 19170 3 18883 19171 18884 3 19172 18884 19171 3 18884 19172 18885 3 19173 18885 19172 3 18885 19173 18886 3 19174 18886 19173 3 18886 19174 18887 3 19175 18887 19174 3 18887 19175 18888 3 19176 18888 19175 3 18888 19176 19177 3 18888 19177 18889 3 18889 19177 19178 3 18889 19178 18890 3 18890 19178 19179 3 18890 19179 18891 3 18891 19179 19180 3 18891 19180 18892 3 18892 19180 19181 3 18892 19181 18893 3 18893 19181 19182 3 18893 19182 18894 3 18894 19182 19183 3 18894 19183 18895 3 18895 19183 19184 3 18895 19184 18896 3 18896 19184 19185 3 18896 19185 18897 3 18897 19185 19188 3 18897 19188 18900 3 18898 19186 19187 3 18898 19187 18899 3 18898 18903 19191 3 18898 19191 19186 3 18900 19188 19189 3 18900 19189 18901 3 18901 19189 19192 3 18901 19192 18904 3 18902 19190 19191 3 18902 19191 18903 3 18902 18906 19194 3 18902 19194 19190 3 18904 19192 19193 3 18904 19193 18905 3 18905 19193 19195 3 18905 19195 18907 3 18906 18908 19196 3 18906 19196 19194 3 18907 19195 19197 3 18907 19197 18909 3 18908 18910 19198 3 18908 19198 19196 3 18909 19197 19199 3 18909 19199 18911 3 18910 18912 19200 3 18910 19200 19198 3 18911 19199 19201 3 18911 19201 18913 3 18912 18914 19202 3 18912 19202 19200 3 18913 19201 19203 3 18913 19203 18915 3 18914 18916 19204 3 18914 19204 19202 3 18915 19203 19205 3 18915 19205 18917 3 18916 18918 19206 3 18916 19206 19204 3 18917 19205 19207 3 18917 19207 18919 3 18918 18920 19208 3 18918 19208 19206 3 18919 19207 19209 3 18919 19209 18921 3 18920 18922 19210 3 18920 19210 19208 3 18921 19209 19211 3 18921 19211 18923 3 18922 18924 19212 3 18922 19212 19210 3 18923 19211 19213 3 18923 19213 18925 3 18924 18926 19214 3 18924 19214 19212 3 18925 19213 19215 3 18925 19215 18927 3 18926 18928 19216 3 18926 19216 19214 3 18927 19215 19217 3 18927 19217 18929 3 18928 18930 19218 3 18928 19218 19216 3 18929 19217 19219 3 18929 19219 18931 3 18930 18932 19220 3 18930 19220 19218 3 18931 19219 19221 3 18931 19221 18933 3 18932 18934 19222 3 18932 19222 19220 3 18933 19221 19223 3 18933 19223 18935 3 18934 18936 19224 3 18934 19224 19222 3 18935 19223 19225 3 18935 19225 18937 3 18936 18938 19226 3 18936 19226 19224 3 18937 19225 19227 3 18937 19227 18939 3 18938 18940 19228 3 18938 19228 19226 3 18939 19227 19229 3 18939 19229 18941 3 18940 18942 19230 3 18940 19230 19228 3 18941 19229 19231 3 18941 19231 18943 3 18942 18944 19232 3 18942 19232 19230 3 18943 19231 19233 3 18943 19233 18945 3 18944 18946 19234 3 18944 19234 19232 3 18945 19233 19235 3 18945 19235 18947 3 18946 18948 19236 3 18946 19236 19234 3 18947 19235 19237 3 18947 19237 18949 3 18948 18950 19238 3 18948 19238 19236 3 18949 19237 19239 3 18949 19239 18951 3 18950 18952 19240 3 18950 19240 19238 3 18951 19239 19241 3 18951 19241 18953 3 18952 18954 19242 3 18952 19242 19240 3 18953 19241 19243 3 18953 19243 18955 3 18954 18956 19244 3 18954 19244 19242 3 18955 19243 19245 3 18955 19245 18957 3 18956 18958 19246 3 18956 19246 19244 3 18957 19245 19247 3 18957 19247 18959 3 18958 18960 19248 3 18958 19248 19246 3 18959 19247 19249 3 18959 19249 18961 3 18960 18962 19250 3 18960 19250 19248 3 18961 19249 19251 3 18961 19251 18963 3 18962 18964 19252 3 18962 19252 19250 3 18963 19251 19253 3 18963 19253 18965 3 18964 18966 19254 3 18964 19254 19252 3 18965 19253 19255 3 18965 19255 18967 3 18966 18968 19256 3 18966 19256 19254 3 18967 19255 19257 3 18967 19257 18969 3 18968 18970 19256 3 19258 19256 18970 3 18969 19257 19259 3 18969 19259 18971 3 18970 18972 19258 3 19260 19258 18972 3 18971 19259 19261 3 18971 19261 18973 3 18972 18974 19260 3 19262 19260 18974 3 18973 19261 18975 3 19263 18975 19261 3 18974 18976 19262 3 19264 19262 18976 3 18975 19263 18977 3 19265 18977 19263 3 18976 18978 19264 3 19266 19264 18978 3 18977 19265 18979 3 19267 18979 19265 3 18978 18980 19266 3 19268 19266 18980 3 18979 19267 18981 3 19269 18981 19267 3 18980 18982 19268 3 19270 19268 18982 3 18981 19269 18983 3 19271 18983 19269 3 18982 18984 19270 3 19272 19270 18984 3 18983 19271 18985 3 19273 18985 19271 3 18984 18986 19272 3 19274 19272 18986 3 18985 19273 18987 3 19275 18987 19273 3 18986 18988 19274 3 19276 19274 18988 3 18987 19275 18989 3 19277 18989 19275 3 18988 18990 19276 3 19278 19276 18990 3 18989 19277 18991 3 19279 18991 19277 3 18990 18992 19278 3 19280 19278 18992 3 18991 19279 18993 3 19281 18993 19279 3 18992 18994 19280 3 19282 19280 18994 3 18993 19281 18995 3 19283 18995 19281 3 18994 18996 19282 3 19284 19282 18996 3 18995 19283 18997 3 19285 18997 19283 3 18996 18998 19284 3 19286 19284 18998 3 18997 19285 18999 3 19287 18999 19285 3 18998 19000 19286 3 19288 19286 19000 3 18999 19287 19001 3 19289 19001 19287 3 19000 19002 19288 3 19290 19288 19002 3 19001 19289 19003 3 19291 19003 19289 3 19002 19004 19290 3 19292 19290 19004 3 19003 19291 19005 3 19293 19005 19291 3 19004 19006 19292 3 19294 19292 19006 3 19005 19293 19007 3 19295 19007 19293 3 19006 19008 19294 3 19296 19294 19008 3 19007 19295 19009 3 19297 19009 19295 3 19008 19010 19296 3 19298 19296 19010 3 19009 19297 19011 3 19299 19011 19297 3 19010 19012 19298 3 19300 19298 19012 3 19011 19299 19013 3 19301 19013 19299 3 19012 19014 19300 3 19302 19300 19014 3 19013 19301 19015 3 19303 19015 19301 3 19014 19016 19302 3 19304 19302 19016 3 19015 19303 19017 3 19305 19017 19303 3 19016 19018 19304 3 19306 19304 19018 3 19017 19305 19019 3 19307 19019 19305 3 19018 19020 19306 3 19308 19306 19020 3 19019 19307 19021 3 19309 19021 19307 3 19020 19022 19308 3 19310 19308 19022 3 19021 19309 19023 3 19311 19023 19309 3 19022 19024 19310 3 19312 19310 19024 3 19023 19311 19025 3 19313 19025 19311 3 19024 19026 19312 3 19314 19312 19026 3 19025 19313 19027 3 19315 19027 19313 3 19026 19028 19314 3 19316 19314 19028 3 19027 19315 19029 3 19317 19029 19315 3 19028 19030 19316 3 19318 19316 19030 3 19029 19317 19031 3 19319 19031 19317 3 19030 19032 19318 3 19320 19318 19032 3 19031 19319 19033 3 19321 19033 19319 3 19032 19034 19320 3 19322 19320 19034 3 19033 19321 19035 3 19323 19035 19321 3 19034 19036 19322 3 19324 19322 19036 3 19035 19323 19037 3 19325 19037 19323 3 19036 19038 19324 3 19326 19324 19038 3 19037 19325 19039 3 19327 19039 19325 3 19038 19040 19326 3 19328 19326 19040 3 19039 19327 19043 3 19331 19043 19327 3 19040 19041 19328 3 19329 19328 19041 3 19041 19044 19329 3 19332 19329 19044 3 19042 19043 19330 3 19331 19330 19043 3 19042 19330 19047 3 19335 19047 19330 3 19044 19045 19332 3 19333 19332 19045 3 19045 19048 19333 3 19336 19333 19048 3 19046 19047 19334 3 19335 19334 19047 3 19046 19334 19116 3 19404 19116 19334 3 19048 19049 19336 3 19337 19336 19049 3 19049 19050 19337 3 19338 19337 19050 3 19050 19051 19338 3 19339 19338 19051 3 19051 19052 19339 3 19340 19339 19052 3 19052 19053 19340 3 19341 19340 19053 3 19053 19054 19342 3 19053 19342 19341 3 19054 19055 19343 3 19054 19343 19342 3 19055 19056 19344 3 19055 19344 19343 3 19056 19057 19345 3 19056 19345 19344 3 19057 19058 19346 3 19057 19346 19345 3 19058 19059 19347 3 19058 19347 19346 3 19059 19060 19348 3 19059 19348 19347 3 19060 19061 19349 3 19060 19349 19348 3 19061 19062 19350 3 19061 19350 19349 3 19062 19063 19351 3 19062 19351 19350 3 19063 19064 19352 3 19063 19352 19351 3 19064 19065 19353 3 19064 19353 19352 3 19065 19066 19354 3 19065 19354 19353 3 19066 19067 19355 3 19066 19355 19354 3 19067 19068 19356 3 19067 19356 19355 3 19068 19069 19357 3 19068 19357 19356 3 19069 19070 19358 3 19069 19358 19357 3 19070 19071 19359 3 19070 19359 19358 3 19071 19072 19360 3 19071 19360 19359 3 19072 19073 19361 3 19072 19361 19360 3 19073 19074 19362 3 19073 19362 19361 3 19074 19075 19363 3 19074 19363 19362 3 19075 19076 19364 3 19075 19364 19363 3 19076 19077 19365 3 19076 19365 19364 3 19077 19078 19366 3 19077 19366 19365 3 19078 19079 19367 3 19078 19367 19366 3 19079 19080 19368 3 19079 19368 19367 3 19080 19081 19369 3 19080 19369 19368 3 19081 19082 19370 3 19081 19370 19369 3 19082 19083 19371 3 19082 19371 19370 3 19083 19084 19372 3 19083 19372 19371 3 19084 19085 19373 3 19084 19373 19372 3 19085 19086 19374 3 19085 19374 19373 3 19086 19087 19375 3 19086 19375 19374 3 19087 19088 19376 3 19087 19376 19375 3 19088 19089 19377 3 19088 19377 19376 3 19089 19090 19378 3 19089 19378 19377 3 19090 19091 19379 3 19090 19379 19378 3 19091 19092 19380 3 19091 19380 19379 3 19092 19093 19381 3 19092 19381 19380 3 19093 19094 19382 3 19093 19382 19381 3 19094 19095 19383 3 19094 19383 19382 3 19095 19096 19384 3 19095 19384 19383 3 19096 19097 19385 3 19096 19385 19384 3 19097 19098 19386 3 19097 19386 19385 3 19098 19099 19387 3 19098 19387 19386 3 19099 19100 19388 3 19099 19388 19387 3 19100 19101 19389 3 19100 19389 19388 3 19101 19102 19390 3 19101 19390 19389 3 19102 19103 19391 3 19102 19391 19390 3 19103 19104 19392 3 19103 19392 19391 3 19104 19105 19393 3 19104 19393 19392 3 19105 19106 19394 3 19105 19394 19393 3 19106 19107 19395 3 19106 19395 19394 3 19107 19108 19396 3 19107 19396 19395 3 19108 19109 19397 3 19108 19397 19396 3 19109 19110 19398 3 19109 19398 19397 3 19110 19111 19399 3 19110 19399 19398 3 19111 19112 19400 3 19111 19400 19399 3 19112 19113 19401 3 19112 19401 19400 3 19113 19114 19402 3 19113 19402 19401 3 19114 19115 19403 3 19114 19403 19402 3 19115 19116 19404 3 19115 19404 19403 3 19117 19405 19406 3 19117 19406 19118 3 19117 19187 19405 3 19475 19405 19187 3 19118 19406 19407 3 19118 19407 19119 3 19119 19407 19408 3 19119 19408 19120 3 19120 19408 19409 3 19120 19409 19121 3 19121 19409 19410 3 19121 19410 19122 3 19122 19410 19411 3 19122 19411 19123 3 19123 19411 19412 3 19123 19412 19124 3 19124 19412 19413 3 19124 19413 19125 3 19125 19413 19414 3 19125 19414 19126 3 19126 19414 19415 3 19126 19415 19127 3 19127 19415 19416 3 19127 19416 19128 3 19128 19416 19417 3 19128 19417 19129 3 19129 19417 19418 3 19129 19418 19130 3 19130 19418 19419 3 19130 19419 19131 3 19131 19419 19420 3 19131 19420 19132 3 19132 19420 19421 3 19132 19421 19133 3 19133 19421 19422 3 19133 19422 19134 3 19134 19422 19423 3 19134 19423 19135 3 19135 19423 19424 3 19135 19424 19136 3 19136 19424 19425 3 19136 19425 19137 3 19137 19425 19426 3 19137 19426 19138 3 19138 19426 19427 3 19138 19427 19139 3 19139 19427 19428 3 19139 19428 19140 3 19140 19428 19429 3 19140 19429 19141 3 19141 19429 19430 3 19141 19430 19142 3 19142 19430 19431 3 19142 19431 19143 3 19143 19431 19432 3 19143 19432 19144 3 19144 19432 19145 3 19433 19145 19432 3 19145 19433 19146 3 19434 19146 19433 3 19146 19434 19147 3 19435 19147 19434 3 19147 19435 19148 3 19436 19148 19435 3 19148 19436 19149 3 19437 19149 19436 3 19149 19437 19150 3 19438 19150 19437 3 19150 19438 19151 3 19439 19151 19438 3 19151 19439 19152 3 19440 19152 19439 3 19152 19440 19153 3 19441 19153 19440 3 19153 19441 19154 3 19442 19154 19441 3 19154 19442 19155 3 19443 19155 19442 3 19155 19443 19156 3 19444 19156 19443 3 19156 19444 19157 3 19445 19157 19444 3 19157 19445 19158 3 19446 19158 19445 3 19158 19446 19159 3 19447 19159 19446 3 19159 19447 19160 3 19448 19160 19447 3 19160 19448 19161 3 19449 19161 19448 3 19161 19449 19162 3 19450 19162 19449 3 19162 19450 19163 3 19451 19163 19450 3 19163 19451 19164 3 19452 19164 19451 3 19164 19452 19165 3 19453 19165 19452 3 19165 19453 19166 3 19454 19166 19453 3 19166 19454 19167 3 19455 19167 19454 3 19167 19455 19168 3 19456 19168 19455 3 19168 19456 19169 3 19457 19169 19456 3 19169 19457 19170 3 19458 19170 19457 3 19170 19458 19171 3 19459 19171 19458 3 19171 19459 19172 3 19460 19172 19459 3 19172 19460 19173 3 19461 19173 19460 3 19173 19461 19174 3 19462 19174 19461 3 19174 19462 19175 3 19463 19175 19462 3 19175 19463 19176 3 19464 19176 19463 3 19176 19464 19177 3 19465 19177 19464 3 19177 19465 19178 3 19466 19178 19465 3 19178 19466 19179 3 19467 19179 19466 3 19179 19467 19180 3 19468 19180 19467 3 19180 19468 19181 3 19469 19181 19468 3 19181 19469 19182 3 19470 19182 19469 3 19182 19470 19183 3 19471 19183 19470 3 19183 19471 19184 3 19472 19184 19471 3 19184 19472 19185 3 19473 19185 19472 3 19185 19473 19188 3 19476 19188 19473 3 19186 19474 19187 3 19475 19187 19474 3 19186 19191 19474 3 19479 19474 19191 3 19188 19476 19189 3 19477 19189 19476 3 19189 19477 19192 3 19480 19192 19477 3 19190 19478 19191 3 19479 19191 19478 3 19190 19194 19478 3 19482 19478 19194 3 19192 19480 19193 3 19481 19193 19480 3 19193 19481 19195 3 19483 19195 19481 3 19194 19196 19482 3 19484 19482 19196 3 19195 19483 19197 3 19485 19197 19483 3 19196 19198 19484 3 19486 19484 19198 3 19197 19485 19199 3 19487 19199 19485 3 19198 19200 19486 3 19488 19486 19200 3 19199 19487 19201 3 19489 19201 19487 3 19200 19202 19488 3 19490 19488 19202 3 19201 19489 19203 3 19491 19203 19489 3 19202 19204 19490 3 19492 19490 19204 3 19203 19491 19205 3 19493 19205 19491 3 19204 19206 19492 3 19494 19492 19206 3 19205 19493 19207 3 19495 19207 19493 3 19206 19208 19494 3 19496 19494 19208 3 19207 19495 19209 3 19497 19209 19495 3 19208 19210 19496 3 19498 19496 19210 3 19209 19497 19211 3 19499 19211 19497 3 19210 19212 19498 3 19500 19498 19212 3 19211 19499 19213 3 19501 19213 19499 3 19212 19214 19500 3 19502 19500 19214 3 19213 19501 19215 3 19503 19215 19501 3 19214 19216 19502 3 19504 19502 19216 3 19215 19503 19217 3 19505 19217 19503 3 19216 19218 19504 3 19506 19504 19218 3 19217 19505 19219 3 19507 19219 19505 3 19218 19220 19506 3 19508 19506 19220 3 19219 19507 19221 3 19509 19221 19507 3 19220 19222 19508 3 19510 19508 19222 3 19221 19509 19223 3 19511 19223 19509 3 19222 19224 19510 3 19512 19510 19224 3 19223 19511 19225 3 19513 19225 19511 3 19224 19226 19514 3 19224 19514 19512 3 19225 19513 19227 3 19515 19227 19513 3 19226 19228 19516 3 19226 19516 19514 3 19227 19515 19229 3 19517 19229 19515 3 19228 19230 19518 3 19228 19518 19516 3 19229 19517 19519 3 19229 19519 19231 3 19230 19232 19520 3 19230 19520 19518 3 19231 19519 19521 3 19231 19521 19233 3 19232 19234 19522 3 19232 19522 19520 3 19233 19521 19523 3 19233 19523 19235 3 19234 19236 19524 3 19234 19524 19522 3 19235 19523 19525 3 19235 19525 19237 3 19236 19238 19526 3 19236 19526 19524 3 19237 19525 19527 3 19237 19527 19239 3 19238 19240 19528 3 19238 19528 19526 3 19239 19527 19529 3 19239 19529 19241 3 19240 19242 19530 3 19240 19530 19528 3 19241 19529 19531 3 19241 19531 19243 3 19242 19244 19532 3 19242 19532 19530 3 19243 19531 19533 3 19243 19533 19245 3 19244 19246 19534 3 19244 19534 19532 3 19245 19533 19535 3 19245 19535 19247 3 19246 19248 19536 3 19246 19536 19534 3 19247 19535 19537 3 19247 19537 19249 3 19248 19250 19538 3 19248 19538 19536 3 19249 19537 19539 3 19249 19539 19251 3 19250 19252 19540 3 19250 19540 19538 3 19251 19539 19541 3 19251 19541 19253 3 19252 19254 19542 3 19252 19542 19540 3 19253 19541 19543 3 19253 19543 19255 3 19254 19256 19544 3 19254 19544 19542 3 19255 19543 19545 3 19255 19545 19257 3 19256 19258 19546 3 19256 19546 19544 3 19257 19545 19547 3 19257 19547 19259 3 19258 19260 19548 3 19258 19548 19546 3 19259 19547 19549 3 19259 19549 19261 3 19260 19262 19550 3 19260 19550 19548 3 19261 19549 19551 3 19261 19551 19263 3 19262 19264 19552 3 19262 19552 19550 3 19263 19551 19553 3 19263 19553 19265 3 19264 19266 19554 3 19264 19554 19552 3 19265 19553 19555 3 19265 19555 19267 3 19266 19268 19556 3 19266 19556 19554 3 19267 19555 19557 3 19267 19557 19269 3 19268 19270 19558 3 19268 19558 19556 3 19269 19557 19559 3 19269 19559 19271 3 19270 19272 19560 3 19270 19560 19558 3 19271 19559 19561 3 19271 19561 19273 3 19272 19274 19562 3 19272 19562 19560 3 19273 19561 19563 3 19273 19563 19275 3 19274 19276 19564 3 19274 19564 19562 3 19275 19563 19565 3 19275 19565 19277 3 19276 19278 19566 3 19276 19566 19564 3 19277 19565 19567 3 19277 19567 19279 3 19278 19280 19568 3 19278 19568 19566 3 19279 19567 19569 3 19279 19569 19281 3 19280 19282 19570 3 19280 19570 19568 3 19281 19569 19571 3 19281 19571 19283 3 19282 19284 19572 3 19282 19572 19570 3 19283 19571 19573 3 19283 19573 19285 3 19284 19286 19574 3 19284 19574 19572 3 19285 19573 19575 3 19285 19575 19287 3 19286 19288 19576 3 19286 19576 19574 3 19287 19575 19577 3 19287 19577 19289 3 19288 19290 19578 3 19288 19578 19576 3 19289 19577 19579 3 19289 19579 19291 3 19290 19292 19580 3 19290 19580 19578 3 19291 19579 19581 3 19291 19581 19293 3 19292 19294 19582 3 19292 19582 19580 3 19293 19581 19583 3 19293 19583 19295 3 19294 19296 19584 3 19294 19584 19582 3 19295 19583 19585 3 19295 19585 19297 3 19296 19298 19586 3 19296 19586 19584 3 19297 19585 19587 3 19297 19587 19299 3 19298 19300 19588 3 19298 19588 19586 3 19299 19587 19589 3 19299 19589 19301 3 19300 19302 19590 3 19300 19590 19588 3 19301 19589 19591 3 19301 19591 19303 3 19302 19304 19592 3 19302 19592 19590 3 19303 19591 19593 3 19303 19593 19305 3 19304 19306 19594 3 19304 19594 19592 3 19305 19593 19595 3 19305 19595 19307 3 19306 19308 19596 3 19306 19596 19594 3 19307 19595 19597 3 19307 19597 19309 3 19308 19310 19596 3 19598 19596 19310 3 19309 19597 19599 3 19309 19599 19311 3 19310 19312 19598 3 19600 19598 19312 3 19311 19599 19601 3 19311 19601 19313 3 19312 19314 19600 3 19602 19600 19314 3 19313 19601 19603 3 19313 19603 19315 3 19314 19316 19602 3 19604 19602 19316 3 19315 19603 19317 3 19605 19317 19603 3 19316 19318 19604 3 19606 19604 19318 3 19317 19605 19319 3 19607 19319 19605 3 19318 19320 19606 3 19608 19606 19320 3 19319 19607 19321 3 19609 19321 19607 3 19320 19322 19608 3 19610 19608 19322 3 19321 19609 19323 3 19611 19323 19609 3 19322 19324 19610 3 19612 19610 19324 3 19323 19611 19325 3 19613 19325 19611 3 19324 19326 19612 3 19614 19612 19326 3 19325 19613 19327 3 19615 19327 19613 3 19326 19328 19614 3 19616 19614 19328 3 19327 19615 19331 3 19619 19331 19615 3 19328 19329 19616 3 19617 19616 19329 3 19329 19332 19617 3 19620 19617 19332 3 19330 19331 19618 3 19619 19618 19331 3 19330 19618 19335 3 19623 19335 19618 3 19332 19333 19620 3 19621 19620 19333 3 19333 19336 19621 3 19624 19621 19336 3 19334 19335 19622 3 19623 19622 19335 3 19334 19622 19404 3 19692 19404 19622 3 19336 19337 19624 3 19625 19624 19337 3 19337 19338 19625 3 19626 19625 19338 3 19338 19339 19626 3 19627 19626 19339 3 19339 19340 19627 3 19628 19627 19340 3 19340 19341 19628 3 19629 19628 19341 3 19341 19342 19629 3 19630 19629 19342 3 19342 19343 19630 3 19631 19630 19343 3 19343 19344 19631 3 19632 19631 19344 3 19344 19345 19632 3 19633 19632 19345 3 19345 19346 19633 3 19634 19633 19346 3 19346 19347 19634 3 19635 19634 19347 3 19347 19348 19635 3 19636 19635 19348 3 19348 19349 19636 3 19637 19636 19349 3 19349 19350 19637 3 19638 19637 19350 3 19350 19351 19638 3 19639 19638 19351 3 19351 19352 19639 3 19640 19639 19352 3 19352 19353 19640 3 19641 19640 19353 3 19353 19354 19641 3 19642 19641 19354 3 19354 19355 19642 3 19643 19642 19355 3 19355 19356 19643 3 19644 19643 19356 3 19356 19357 19644 3 19645 19644 19357 3 19357 19358 19645 3 19646 19645 19358 3 19358 19359 19646 3 19647 19646 19359 3 19359 19360 19647 3 19648 19647 19360 3 19360 19361 19648 3 19649 19648 19361 3 19361 19362 19649 3 19650 19649 19362 3 19362 19363 19650 3 19651 19650 19363 3 19363 19364 19651 3 19652 19651 19364 3 19364 19365 19652 3 19653 19652 19365 3 19365 19366 19653 3 19654 19653 19366 3 19366 19367 19654 3 19655 19654 19367 3 19367 19368 19655 3 19656 19655 19368 3 19368 19369 19656 3 19657 19656 19369 3 19369 19370 19657 3 19658 19657 19370 3 19370 19371 19658 3 19659 19658 19371 3 19371 19372 19659 3 19660 19659 19372 3 19372 19373 19660 3 19661 19660 19373 3 19373 19374 19661 3 19662 19661 19374 3 19374 19375 19662 3 19663 19662 19375 3 19375 19376 19663 3 19664 19663 19376 3 19376 19377 19664 3 19665 19664 19377 3 19377 19378 19665 3 19666 19665 19378 3 19378 19379 19666 3 19667 19666 19379 3 19379 19380 19667 3 19668 19667 19380 3 19380 19381 19668 3 19669 19668 19381 3 19381 19382 19669 3 19670 19669 19382 3 19382 19383 19670 3 19671 19670 19383 3 19383 19384 19671 3 19672 19671 19384 3 19384 19385 19672 3 19673 19672 19385 3 19385 19386 19673 3 19674 19673 19386 3 19386 19387 19674 3 19675 19674 19387 3 19387 19388 19675 3 19676 19675 19388 3 19388 19389 19676 3 19677 19676 19389 3 19389 19390 19677 3 19678 19677 19390 3 19390 19391 19678 3 19679 19678 19391 3 19391 19392 19679 3 19680 19679 19392 3 19392 19393 19680 3 19681 19680 19393 3 19393 19394 19681 3 19682 19681 19394 3 19394 19395 19683 3 19394 19683 19682 3 19395 19396 19684 3 19395 19684 19683 3 19396 19397 19685 3 19396 19685 19684 3 19397 19398 19686 3 19397 19686 19685 3 19398 19399 19687 3 19398 19687 19686 3 19399 19400 19688 3 19399 19688 19687 3 19400 19401 19689 3 19400 19689 19688 3 19401 19402 19690 3 19401 19690 19689 3 19402 19403 19691 3 19402 19691 19690 3 19403 19404 19692 3 19403 19692 19691 3 19405 19693 19694 3 19405 19694 19406 3 19405 19475 19763 3 19405 19763 19693 3 19406 19694 19695 3 19406 19695 19407 3 19407 19695 19696 3 19407 19696 19408 3 19408 19696 19697 3 19408 19697 19409 3 19409 19697 19698 3 19409 19698 19410 3 19410 19698 19699 3 19410 19699 19411 3 19411 19699 19700 3 19411 19700 19412 3 19412 19700 19701 3 19412 19701 19413 3 19413 19701 19702 3 19413 19702 19414 3 19414 19702 19703 3 19414 19703 19415 3 19415 19703 19704 3 19415 19704 19416 3 19416 19704 19705 3 19416 19705 19417 3 19417 19705 19706 3 19417 19706 19418 3 19418 19706 19707 3 19418 19707 19419 3 19419 19707 19708 3 19419 19708 19420 3 19420 19708 19709 3 19420 19709 19421 3 19421 19709 19710 3 19421 19710 19422 3 19422 19710 19711 3 19422 19711 19423 3 19423 19711 19712 3 19423 19712 19424 3 19424 19712 19713 3 19424 19713 19425 3 19425 19713 19714 3 19425 19714 19426 3 19426 19714 19715 3 19426 19715 19427 3 19427 19715 19716 3 19427 19716 19428 3 19428 19716 19717 3 19428 19717 19429 3 19429 19717 19718 3 19429 19718 19430 3 19430 19718 19719 3 19430 19719 19431 3 19431 19719 19720 3 19431 19720 19432 3 19432 19720 19721 3 19432 19721 19433 3 19433 19721 19722 3 19433 19722 19434 3 19434 19722 19723 3 19434 19723 19435 3 19435 19723 19724 3 19435 19724 19436 3 19436 19724 19725 3 19436 19725 19437 3 19437 19725 19726 3 19437 19726 19438 3 19438 19726 19727 3 19438 19727 19439 3 19439 19727 19728 3 19439 19728 19440 3 19440 19728 19729 3 19440 19729 19441 3 19441 19729 19730 3 19441 19730 19442 3 19442 19730 19731 3 19442 19731 19443 3 19443 19731 19732 3 19443 19732 19444 3 19444 19732 19733 3 19444 19733 19445 3 19445 19733 19734 3 19445 19734 19446 3 19446 19734 19735 3 19446 19735 19447 3 19447 19735 19736 3 19447 19736 19448 3 19448 19736 19737 3 19448 19737 19449 3 19449 19737 19738 3 19449 19738 19450 3 19450 19738 19739 3 19450 19739 19451 3 19451 19739 19740 3 19451 19740 19452 3 19452 19740 19741 3 19452 19741 19453 3 19453 19741 19742 3 19453 19742 19454 3 19454 19742 19743 3 19454 19743 19455 3 19455 19743 19744 3 19455 19744 19456 3 19456 19744 19745 3 19456 19745 19457 3 19457 19745 19746 3 19457 19746 19458 3 19458 19746 19747 3 19458 19747 19459 3 19459 19747 19748 3 19459 19748 19460 3 19460 19748 19749 3 19460 19749 19461 3 19461 19749 19750 3 19461 19750 19462 3 19462 19750 19751 3 19462 19751 19463 3 19463 19751 19752 3 19463 19752 19464 3 19464 19752 19753 3 19464 19753 19465 3 19465 19753 19754 3 19465 19754 19466 3 19466 19754 19755 3 19466 19755 19467 3 19467 19755 19756 3 19467 19756 19468 3 19468 19756 19757 3 19468 19757 19469 3 19469 19757 19758 3 19469 19758 19470 3 19470 19758 19759 3 19470 19759 19471 3 19471 19759 19760 3 19471 19760 19472 3 19472 19760 19761 3 19472 19761 19473 3 19473 19761 19764 3 19473 19764 19476 3 19474 19762 19763 3 19474 19763 19475 3 19474 19479 19767 3 19474 19767 19762 3 19476 19764 19765 3 19476 19765 19477 3 19477 19765 19768 3 19477 19768 19480 3 19478 19766 19767 3 19478 19767 19479 3 19478 19482 19766 3 19770 19766 19482 3 19480 19768 19769 3 19480 19769 19481 3 19481 19769 19771 3 19481 19771 19483 3 19482 19484 19770 3 19772 19770 19484 3 19483 19771 19773 3 19483 19773 19485 3 19484 19486 19772 3 19774 19772 19486 3 19485 19773 19775 3 19485 19775 19487 3 19486 19488 19774 3 19776 19774 19488 3 19487 19775 19489 3 19777 19489 19775 3 19488 19490 19776 3 19778 19776 19490 3 19489 19777 19491 3 19779 19491 19777 3 19490 19492 19778 3 19780 19778 19492 3 19491 19779 19493 3 19781 19493 19779 3 19492 19494 19780 3 19782 19780 19494 3 19493 19781 19495 3 19783 19495 19781 3 19494 19496 19782 3 19784 19782 19496 3 19495 19783 19497 3 19785 19497 19783 3 19496 19498 19784 3 19786 19784 19498 3 19497 19785 19499 3 19787 19499 19785 3 19498 19500 19786 3 19788 19786 19500 3 19499 19787 19501 3 19789 19501 19787 3 19500 19502 19788 3 19790 19788 19502 3 19501 19789 19503 3 19791 19503 19789 3 19502 19504 19790 3 19792 19790 19504 3 19503 19791 19505 3 19793 19505 19791 3 19504 19506 19792 3 19794 19792 19506 3 19505 19793 19507 3 19795 19507 19793 3 19506 19508 19794 3 19796 19794 19508 3 19507 19795 19509 3 19797 19509 19795 3 19508 19510 19796 3 19798 19796 19510 3 19509 19797 19511 3 19799 19511 19797 3 19510 19512 19798 3 19800 19798 19512 3 19511 19799 19513 3 19801 19513 19799 3 19512 19514 19800 3 19802 19800 19514 3 19513 19801 19515 3 19803 19515 19801 3 19514 19516 19802 3 19804 19802 19516 3 19515 19803 19517 3 19805 19517 19803 3 19516 19518 19804 3 19806 19804 19518 3 19517 19805 19519 3 19807 19519 19805 3 19518 19520 19806 3 19808 19806 19520 3 19519 19807 19521 3 19809 19521 19807 3 19520 19522 19808 3 19810 19808 19522 3 19521 19809 19523 3 19811 19523 19809 3 19522 19524 19810 3 19812 19810 19524 3 19523 19811 19525 3 19813 19525 19811 3 19524 19526 19812 3 19814 19812 19526 3 19525 19813 19527 3 19815 19527 19813 3 19526 19528 19814 3 19816 19814 19528 3 19527 19815 19529 3 19817 19529 19815 3 19528 19530 19816 3 19818 19816 19530 3 19529 19817 19531 3 19819 19531 19817 3 19530 19532 19818 3 19820 19818 19532 3 19531 19819 19533 3 19821 19533 19819 3 19532 19534 19820 3 19822 19820 19534 3 19533 19821 19535 3 19823 19535 19821 3 19534 19536 19822 3 19824 19822 19536 3 19535 19823 19537 3 19825 19537 19823 3 19536 19538 19824 3 19826 19824 19538 3 19537 19825 19539 3 19827 19539 19825 3 19538 19540 19826 3 19828 19826 19540 3 19539 19827 19541 3 19829 19541 19827 3 19540 19542 19828 3 19830 19828 19542 3 19541 19829 19543 3 19831 19543 19829 3 19542 19544 19830 3 19832 19830 19544 3 19543 19831 19545 3 19833 19545 19831 3 19544 19546 19832 3 19834 19832 19546 3 19545 19833 19547 3 19835 19547 19833 3 19546 19548 19834 3 19836 19834 19548 3 19547 19835 19549 3 19837 19549 19835 3 19548 19550 19836 3 19838 19836 19550 3 19549 19837 19551 3 19839 19551 19837 3 19550 19552 19838 3 19840 19838 19552 3 19551 19839 19553 3 19841 19553 19839 3 19552 19554 19840 3 19842 19840 19554 3 19553 19841 19555 3 19843 19555 19841 3 19554 19556 19842 3 19844 19842 19556 3 19555 19843 19557 3 19845 19557 19843 3 19556 19558 19844 3 19846 19844 19558 3 19557 19845 19559 3 19847 19559 19845 3 19558 19560 19846 3 19848 19846 19560 3 19559 19847 19561 3 19849 19561 19847 3 19560 19562 19848 3 19850 19848 19562 3 19561 19849 19563 3 19851 19563 19849 3 19562 19564 19850 3 19852 19850 19564 3 19563 19851 19565 3 19853 19565 19851 3 19564 19566 19852 3 19854 19852 19566 3 19565 19853 19567 3 19855 19567 19853 3 19566 19568 19856 3 19566 19856 19854 3 19567 19855 19569 3 19857 19569 19855 3 19568 19570 19858 3 19568 19858 19856 3 19569 19857 19571 3 19859 19571 19857 3 19570 19572 19860 3 19570 19860 19858 3 19571 19859 19573 3 19861 19573 19859 3 19572 19574 19862 3 19572 19862 19860 3 19573 19861 19575 3 19863 19575 19861 3 19574 19576 19864 3 19574 19864 19862 3 19575 19863 19865 3 19575 19865 19577 3 19576 19578 19866 3 19576 19866 19864 3 19577 19865 19867 3 19577 19867 19579 3 19578 19580 19868 3 19578 19868 19866 3 19579 19867 19869 3 19579 19869 19581 3 19580 19582 19870 3 19580 19870 19868 3 19581 19869 19871 3 19581 19871 19583 3 19582 19584 19872 3 19582 19872 19870 3 19583 19871 19873 3 19583 19873 19585 3 19584 19586 19874 3 19584 19874 19872 3 19585 19873 19875 3 19585 19875 19587 3 19586 19588 19876 3 19586 19876 19874 3 19587 19875 19877 3 19587 19877 19589 3 19588 19590 19878 3 19588 19878 19876 3 19589 19877 19879 3 19589 19879 19591 3 19590 19592 19880 3 19590 19880 19878 3 19591 19879 19881 3 19591 19881 19593 3 19592 19594 19882 3 19592 19882 19880 3 19593 19881 19883 3 19593 19883 19595 3 19594 19596 19884 3 19594 19884 19882 3 19595 19883 19885 3 19595 19885 19597 3 19596 19598 19886 3 19596 19886 19884 3 19597 19885 19887 3 19597 19887 19599 3 19598 19600 19888 3 19598 19888 19886 3 19599 19887 19889 3 19599 19889 19601 3 19600 19602 19890 3 19600 19890 19888 3 19601 19889 19891 3 19601 19891 19603 3 19602 19604 19892 3 19602 19892 19890 3 19603 19891 19893 3 19603 19893 19605 3 19604 19606 19894 3 19604 19894 19892 3 19605 19893 19895 3 19605 19895 19607 3 19606 19608 19896 3 19606 19896 19894 3 19607 19895 19897 3 19607 19897 19609 3 19608 19610 19898 3 19608 19898 19896 3 19609 19897 19899 3 19609 19899 19611 3 19610 19612 19900 3 19610 19900 19898 3 19611 19899 19901 3 19611 19901 19613 3 19612 19614 19902 3 19612 19902 19900 3 19613 19901 19903 3 19613 19903 19615 3 19614 19616 19904 3 19614 19904 19902 3 19615 19903 19907 3 19615 19907 19619 3 19616 19617 19905 3 19616 19905 19904 3 19617 19620 19908 3 19617 19908 19905 3 19618 19619 19907 3 19618 19907 19906 3 19618 19906 19911 3 19618 19911 19623 3 19620 19621 19909 3 19620 19909 19908 3 19621 19624 19912 3 19621 19912 19909 3 19622 19623 19911 3 19622 19911 19910 3 19622 19910 19980 3 19622 19980 19692 3 19624 19625 19913 3 19624 19913 19912 3 19625 19626 19914 3 19625 19914 19913 3 19626 19627 19915 3 19626 19915 19914 3 19627 19628 19916 3 19627 19916 19915 3 19628 19629 19917 3 19628 19917 19916 3 19629 19630 19918 3 19629 19918 19917 3 19630 19631 19919 3 19630 19919 19918 3 19631 19632 19920 3 19631 19920 19919 3 19632 19633 19921 3 19632 19921 19920 3 19633 19634 19922 3 19633 19922 19921 3 19634 19635 19923 3 19634 19923 19922 3 19635 19636 19924 3 19635 19924 19923 3 19636 19637 19925 3 19636 19925 19924 3 19637 19638 19926 3 19637 19926 19925 3 19638 19639 19927 3 19638 19927 19926 3 19639 19640 19928 3 19639 19928 19927 3 19640 19641 19929 3 19640 19929 19928 3 19641 19642 19930 3 19641 19930 19929 3 19642 19643 19931 3 19642 19931 19930 3 19643 19644 19932 3 19643 19932 19931 3 19644 19645 19933 3 19644 19933 19932 3 19645 19646 19934 3 19645 19934 19933 3 19646 19647 19935 3 19646 19935 19934 3 19647 19648 19936 3 19647 19936 19935 3 19648 19649 19937 3 19648 19937 19936 3 19649 19650 19938 3 19649 19938 19937 3 19650 19651 19939 3 19650 19939 19938 3 19651 19652 19940 3 19651 19940 19939 3 19652 19653 19940 3 19941 19940 19653 3 19653 19654 19941 3 19942 19941 19654 3 19654 19655 19942 3 19943 19942 19655 3 19655 19656 19943 3 19944 19943 19656 3 19656 19657 19944 3 19945 19944 19657 3 19657 19658 19945 3 19946 19945 19658 3 19658 19659 19946 3 19947 19946 19659 3 19659 19660 19947 3 19948 19947 19660 3 19660 19661 19948 3 19949 19948 19661 3 19661 19662 19949 3 19950 19949 19662 3 19662 19663 19950 3 19951 19950 19663 3 19663 19664 19951 3 19952 19951 19664 3 19664 19665 19952 3 19953 19952 19665 3 19665 19666 19953 3 19954 19953 19666 3 19666 19667 19954 3 19955 19954 19667 3 19667 19668 19955 3 19956 19955 19668 3 19668 19669 19956 3 19957 19956 19669 3 19669 19670 19957 3 19958 19957 19670 3 19670 19671 19958 3 19959 19958 19671 3 19671 19672 19959 3 19960 19959 19672 3 19672 19673 19960 3 19961 19960 19673 3 19673 19674 19961 3 19962 19961 19674 3 19674 19675 19962 3 19963 19962 19675 3 19675 19676 19963 3 19964 19963 19676 3 19676 19677 19964 3 19965 19964 19677 3 19677 19678 19965 3 19966 19965 19678 3 19678 19679 19966 3 19967 19966 19679 3 19679 19680 19967 3 19968 19967 19680 3 19680 19681 19968 3 19969 19968 19681 3 19681 19682 19969 3 19970 19969 19682 3 19682 19683 19970 3 19971 19970 19683 3 19683 19684 19971 3 19972 19971 19684 3 19684 19685 19972 3 19973 19972 19685 3 19685 19686 19973 3 19974 19973 19686 3 19686 19687 19974 3 19975 19974 19687 3 19687 19688 19975 3 19976 19975 19688 3 19688 19689 19976 3 19977 19976 19689 3 19689 19690 19977 3 19978 19977 19690 3 19690 19691 19978 3 19979 19978 19691 3 19691 19692 19979 3 19980 19979 19692 3 19693 19981 19694 3 19982 19694 19981 3 19693 19763 19981 3 20051 19981 19763 3 19694 19982 19695 3 19983 19695 19982 3 19695 19983 19696 3 19984 19696 19983 3 19696 19984 19697 3 19985 19697 19984 3 19697 19985 19698 3 19986 19698 19985 3 19698 19986 19699 3 19987 19699 19986 3 19699 19987 19700 3 19988 19700 19987 3 19700 19988 19701 3 19989 19701 19988 3 19701 19989 19702 3 19990 19702 19989 3 19702 19990 19703 3 19991 19703 19990 3 19703 19991 19704 3 19992 19704 19991 3 19704 19992 19705 3 19993 19705 19992 3 19705 19993 19706 3 19994 19706 19993 3 19706 19994 19707 3 19995 19707 19994 3 19707 19995 19708 3 19996 19708 19995 3 19708 19996 19709 3 19997 19709 19996 3 19709 19997 19710 3 19998 19710 19997 3 19710 19998 19711 3 19999 19711 19998 3 19711 19999 19712 3 20000 19712 19999 3 19712 20000 19713 3 20001 19713 20000 3 19713 20001 19714 3 20002 19714 20001 3 19714 20002 19715 3 20003 19715 20002 3 19715 20003 19716 3 20004 19716 20003 3 19716 20004 19717 3 20005 19717 20004 3 19717 20005 19718 3 20006 19718 20005 3 19718 20006 19719 3 20007 19719 20006 3 19719 20007 19720 3 20008 19720 20007 3 19720 20008 19721 3 20009 19721 20008 3 19721 20009 19722 3 20010 19722 20009 3 19722 20010 19723 3 20011 19723 20010 3 19723 20011 19724 3 20012 19724 20011 3 19724 20012 19725 3 20013 19725 20012 3 19725 20013 19726 3 20014 19726 20013 3 19726 20014 19727 3 20015 19727 20014 3 19727 20015 19728 3 20016 19728 20015 3 19728 20016 19729 3 20017 19729 20016 3 19729 20017 19730 3 20018 19730 20017 3 19730 20018 19731 3 20019 19731 20018 3 19731 20019 19732 3 20020 19732 20019 3 19732 20020 19733 3 20021 19733 20020 3 19733 20021 19734 3 20022 19734 20021 3 19734 20022 19735 3 20023 19735 20022 3 19735 20023 19736 3 20024 19736 20023 3 19736 20024 19737 3 20025 19737 20024 3 19737 20025 19738 3 20026 19738 20025 3 19738 20026 19739 3 20027 19739 20026 3 19739 20027 19740 3 20028 19740 20027 3 19740 20028 19741 3 20029 19741 20028 3 19741 20029 19742 3 20030 19742 20029 3 19742 20030 19743 3 20031 19743 20030 3 19743 20031 19744 3 20032 19744 20031 3 19744 20032 19745 3 20033 19745 20032 3 19745 20033 19746 3 20034 19746 20033 3 19746 20034 19747 3 20035 19747 20034 3 19747 20035 20036 3 19747 20036 19748 3 19748 20036 20037 3 19748 20037 19749 3 19749 20037 20038 3 19749 20038 19750 3 19750 20038 20039 3 19750 20039 19751 3 19751 20039 20040 3 19751 20040 19752 3 19752 20040 20041 3 19752 20041 19753 3 19753 20041 20042 3 19753 20042 19754 3 19754 20042 20043 3 19754 20043 19755 3 19755 20043 20044 3 19755 20044 19756 3 19756 20044 20045 3 19756 20045 19757 3 19757 20045 20046 3 19757 20046 19758 3 19758 20046 20047 3 19758 20047 19759 3 19759 20047 20048 3 19759 20048 19760 3 19760 20048 20049 3 19760 20049 19761 3 19761 20049 20052 3 19761 20052 19764 3 19762 20050 20051 3 19762 20051 19763 3 19762 19767 20055 3 19762 20055 20050 3 19764 20052 20053 3 19764 20053 19765 3 19765 20053 20056 3 19765 20056 19768 3 19766 20054 20055 3 19766 20055 19767 3 19766 19770 20058 3 19766 20058 20054 3 19768 20056 20057 3 19768 20057 19769 3 19769 20057 20059 3 19769 20059 19771 3 19770 19772 20060 3 19770 20060 20058 3 19771 20059 20061 3 19771 20061 19773 3 19772 19774 20062 3 19772 20062 20060 3 19773 20061 20063 3 19773 20063 19775 3 19774 19776 20064 3 19774 20064 20062 3 19775 20063 20065 3 19775 20065 19777 3 19776 19778 20066 3 19776 20066 20064 3 19777 20065 20067 3 19777 20067 19779 3 19778 19780 20068 3 19778 20068 20066 3 19779 20067 20069 3 19779 20069 19781 3 19780 19782 20070 3 19780 20070 20068 3 19781 20069 20071 3 19781 20071 19783 3 19782 19784 20072 3 19782 20072 20070 3 19783 20071 20073 3 19783 20073 19785 3 19784 19786 20074 3 19784 20074 20072 3 19785 20073 20075 3 19785 20075 19787 3 19786 19788 20076 3 19786 20076 20074 3 19787 20075 20077 3 19787 20077 19789 3 19788 19790 20078 3 19788 20078 20076 3 19789 20077 20079 3 19789 20079 19791 3 19790 19792 20080 3 19790 20080 20078 3 19791 20079 20081 3 19791 20081 19793 3 19792 19794 20082 3 19792 20082 20080 3 19793 20081 20083 3 19793 20083 19795 3 19794 19796 20084 3 19794 20084 20082 3 19795 20083 20085 3 19795 20085 19797 3 19796 19798 20086 3 19796 20086 20084 3 19797 20085 20087 3 19797 20087 19799 3 19798 19800 20088 3 19798 20088 20086 3 19799 20087 20089 3 19799 20089 19801 3 19800 19802 20090 3 19800 20090 20088 3 19801 20089 20091 3 19801 20091 19803 3 19802 19804 20092 3 19802 20092 20090 3 19803 20091 20093 3 19803 20093 19805 3 19804 19806 20094 3 19804 20094 20092 3 19805 20093 20095 3 19805 20095 19807 3 19806 19808 20096 3 19806 20096 20094 3 19807 20095 20097 3 19807 20097 19809 3 19808 19810 20098 3 19808 20098 20096 3 19809 20097 20099 3 19809 20099 19811 3 19810 19812 20100 3 19810 20100 20098 3 19811 20099 20101 3 19811 20101 19813 3 19812 19814 20102 3 19812 20102 20100 3 19813 20101 20103 3 19813 20103 19815 3 19814 19816 20104 3 19814 20104 20102 3 19815 20103 20105 3 19815 20105 19817 3 19816 19818 20106 3 19816 20106 20104 3 19817 20105 20107 3 19817 20107 19819 3 19818 19820 20108 3 19818 20108 20106 3 19819 20107 20109 3 19819 20109 19821 3 19820 19822 20110 3 19820 20110 20108 3 19821 20109 20111 3 19821 20111 19823 3 19822 19824 20112 3 19822 20112 20110 3 19823 20111 20113 3 19823 20113 19825 3 19824 19826 20114 3 19824 20114 20112 3 19825 20113 20115 3 19825 20115 19827 3 19826 19828 20114 3 20116 20114 19828 3 19827 20115 20117 3 19827 20117 19829 3 19828 19830 20116 3 20118 20116 19830 3 19829 20117 20119 3 19829 20119 19831 3 19830 19832 20118 3 20120 20118 19832 3 19831 20119 20121 3 19831 20121 19833 3 19832 19834 20120 3 20122 20120 19834 3 19833 20121 20123 3 19833 20123 19835 3 19834 19836 20122 3 20124 20122 19836 3 19835 20123 19837 3 20125 19837 20123 3 19836 19838 20124 3 20126 20124 19838 3 19837 20125 19839 3 20127 19839 20125 3 19838 19840 20126 3 20128 20126 19840 3 19839 20127 19841 3 20129 19841 20127 3 19840 19842 20128 3 20130 20128 19842 3 19841 20129 19843 3 20131 19843 20129 3 19842 19844 20130 3 20132 20130 19844 3 19843 20131 19845 3 20133 19845 20131 3 19844 19846 20132 3 20134 20132 19846 3 19845 20133 19847 3 20135 19847 20133 3 19846 19848 20134 3 20136 20134 19848 3 19847 20135 19849 3 20137 19849 20135 3 19848 19850 20136 3 20138 20136 19850 3 19849 20137 19851 3 20139 19851 20137 3 19850 19852 20138 3 20140 20138 19852 3 19851 20139 19853 3 20141 19853 20139 3 19852 19854 20140 3 20142 20140 19854 3 19853 20141 19855 3 20143 19855 20141 3 19854 19856 20142 3 20144 20142 19856 3 19855 20143 19857 3 20145 19857 20143 3 19856 19858 20144 3 20146 20144 19858 3 19857 20145 19859 3 20147 19859 20145 3 19858 19860 20146 3 20148 20146 19860 3 19859 20147 19861 3 20149 19861 20147 3 19860 19862 20148 3 20150 20148 19862 3 19861 20149 19863 3 20151 19863 20149 3 19862 19864 20150 3 20152 20150 19864 3 19863 20151 19865 3 20153 19865 20151 3 19864 19866 20152 3 20154 20152 19866 3 19865 20153 19867 3 20155 19867 20153 3 19866 19868 20154 3 20156 20154 19868 3 19867 20155 19869 3 20157 19869 20155 3 19868 19870 20156 3 20158 20156 19870 3 19869 20157 19871 3 20159 19871 20157 3 19870 19872 20158 3 20160 20158 19872 3 19871 20159 19873 3 20161 19873 20159 3 19872 19874 20160 3 20162 20160 19874 3 19873 20161 19875 3 20163 19875 20161 3 19874 19876 20162 3 20164 20162 19876 3 19875 20163 19877 3 20165 19877 20163 3 19876 19878 20164 3 20166 20164 19878 3 19877 20165 19879 3 20167 19879 20165 3 19878 19880 20166 3 20168 20166 19880 3 19879 20167 19881 3 20169 19881 20167 3 19880 19882 20168 3 20170 20168 19882 3 19881 20169 19883 3 20171 19883 20169 3 19882 19884 20170 3 20172 20170 19884 3 19883 20171 19885 3 20173 19885 20171 3 19884 19886 20172 3 20174 20172 19886 3 19885 20173 19887 3 20175 19887 20173 3 19886 19888 20174 3 20176 20174 19888 3 19887 20175 19889 3 20177 19889 20175 3 19888 19890 20176 3 20178 20176 19890 3 19889 20177 19891 3 20179 19891 20177 3 19890 19892 20178 3 20180 20178 19892 3 19891 20179 19893 3 20181 19893 20179 3 19892 19894 20180 3 20182 20180 19894 3 19893 20181 19895 3 20183 19895 20181 3 19894 19896 20182 3 20184 20182 19896 3 19895 20183 19897 3 20185 19897 20183 3 19896 19898 20184 3 20186 20184 19898 3 19897 20185 19899 3 20187 19899 20185 3 19898 19900 20186 3 20188 20186 19900 3 19899 20187 19901 3 20189 19901 20187 3 19900 19902 20188 3 20190 20188 19902 3 19901 20189 19903 3 20191 19903 20189 3 19902 19904 20190 3 20192 20190 19904 3 19903 20191 19907 3 20195 19907 20191 3 19904 19905 20192 3 20193 20192 19905 3 19905 19908 20193 3 20196 20193 19908 3 19906 19907 20194 3 20195 20194 19907 3 19906 20194 19911 3 20199 19911 20194 3 19908 19909 20196 3 20197 20196 19909 3 19909 19912 20197 3 20200 20197 19912 3 19910 19911 20198 3 20199 20198 19911 3 19910 20198 19980 3 20268 19980 20198 3 19912 19913 20201 3 19912 20201 20200 3 19913 19914 20202 3 19913 20202 20201 3 19914 19915 20203 3 19914 20203 20202 3 19915 19916 20204 3 19915 20204 20203 3 19916 19917 20205 3 19916 20205 20204 3 19917 19918 20206 3 19917 20206 20205 3 19918 19919 20207 3 19918 20207 20206 3 19919 19920 20208 3 19919 20208 20207 3 19920 19921 20209 3 19920 20209 20208 3 19921 19922 20210 3 19921 20210 20209 3 19922 19923 20211 3 19922 20211 20210 3 19923 19924 20212 3 19923 20212 20211 3 19924 19925 20213 3 19924 20213 20212 3 19925 19926 20214 3 19925 20214 20213 3 19926 19927 20215 3 19926 20215 20214 3 19927 19928 20216 3 19927 20216 20215 3 19928 19929 20217 3 19928 20217 20216 3 19929 19930 20218 3 19929 20218 20217 3 19930 19931 20219 3 19930 20219 20218 3 19931 19932 20220 3 19931 20220 20219 3 19932 19933 20221 3 19932 20221 20220 3 19933 19934 20222 3 19933 20222 20221 3 19934 19935 20223 3 19934 20223 20222 3 19935 19936 20224 3 19935 20224 20223 3 19936 19937 20225 3 19936 20225 20224 3 19937 19938 20226 3 19937 20226 20225 3 19938 19939 20227 3 19938 20227 20226 3 19939 19940 20228 3 19939 20228 20227 3 19940 19941 20229 3 19940 20229 20228 3 19941 19942 20230 3 19941 20230 20229 3 19942 19943 20231 3 19942 20231 20230 3 19943 19944 20232 3 19943 20232 20231 3 19944 19945 20233 3 19944 20233 20232 3 19945 19946 20234 3 19945 20234 20233 3 19946 19947 20235 3 19946 20235 20234 3 19947 19948 20236 3 19947 20236 20235 3 19948 19949 20237 3 19948 20237 20236 3 19949 19950 20238 3 19949 20238 20237 3 19950 19951 20239 3 19950 20239 20238 3 19951 19952 20240 3 19951 20240 20239 3 19952 19953 20241 3 19952 20241 20240 3 19953 19954 20242 3 19953 20242 20241 3 19954 19955 20243 3 19954 20243 20242 3 19955 19956 20244 3 19955 20244 20243 3 19956 19957 20245 3 19956 20245 20244 3 19957 19958 20246 3 19957 20246 20245 3 19958 19959 20247 3 19958 20247 20246 3 19959 19960 20248 3 19959 20248 20247 3 19960 19961 20249 3 19960 20249 20248 3 19961 19962 20250 3 19961 20250 20249 3 19962 19963 20251 3 19962 20251 20250 3 19963 19964 20252 3 19963 20252 20251 3 19964 19965 20253 3 19964 20253 20252 3 19965 19966 20254 3 19965 20254 20253 3 19966 19967 20255 3 19966 20255 20254 3 19967 19968 20256 3 19967 20256 20255 3 19968 19969 20257 3 19968 20257 20256 3 19969 19970 20258 3 19969 20258 20257 3 19970 19971 20259 3 19970 20259 20258 3 19971 19972 20260 3 19971 20260 20259 3 19972 19973 20261 3 19972 20261 20260 3 19973 19974 20262 3 19973 20262 20261 3 19974 19975 20263 3 19974 20263 20262 3 19975 19976 20264 3 19975 20264 20263 3 19976 19977 20265 3 19976 20265 20264 3 19977 19978 20266 3 19977 20266 20265 3 19978 19979 20267 3 19978 20267 20266 3 19979 19980 20268 3 19979 20268 20267 3 19981 20269 20270 3 19981 20270 19982 3 19981 20051 20269 3 20339 20269 20051 3 19982 20270 20271 3 19982 20271 19983 3 19983 20271 20272 3 19983 20272 19984 3 19984 20272 20273 3 19984 20273 19985 3 19985 20273 20274 3 19985 20274 19986 3 19986 20274 20275 3 19986 20275 19987 3 19987 20275 20276 3 19987 20276 19988 3 19988 20276 20277 3 19988 20277 19989 3 19989 20277 20278 3 19989 20278 19990 3 19990 20278 20279 3 19990 20279 19991 3 19991 20279 20280 3 19991 20280 19992 3 19992 20280 20281 3 19992 20281 19993 3 19993 20281 20282 3 19993 20282 19994 3 19994 20282 20283 3 19994 20283 19995 3 19995 20283 20284 3 19995 20284 19996 3 19996 20284 20285 3 19996 20285 19997 3 19997 20285 20286 3 19997 20286 19998 3 19998 20286 20287 3 19998 20287 19999 3 19999 20287 20288 3 19999 20288 20000 3 20000 20288 20289 3 20000 20289 20001 3 20001 20289 20290 3 20001 20290 20002 3 20002 20290 20291 3 20002 20291 20003 3 20003 20291 20292 3 20003 20292 20004 3 20004 20292 20293 3 20004 20293 20005 3 20005 20293 20294 3 20005 20294 20006 3 20006 20294 20295 3 20006 20295 20007 3 20007 20295 20296 3 20007 20296 20008 3 20008 20296 20009 3 20297 20009 20296 3 20009 20297 20010 3 20298 20010 20297 3 20010 20298 20011 3 20299 20011 20298 3 20011 20299 20012 3 20300 20012 20299 3 20012 20300 20013 3 20301 20013 20300 3 20013 20301 20014 3 20302 20014 20301 3 20014 20302 20015 3 20303 20015 20302 3 20015 20303 20016 3 20304 20016 20303 3 20016 20304 20017 3 20305 20017 20304 3 20017 20305 20018 3 20306 20018 20305 3 20018 20306 20019 3 20307 20019 20306 3 20019 20307 20020 3 20308 20020 20307 3 20020 20308 20021 3 20309 20021 20308 3 20021 20309 20022 3 20310 20022 20309 3 20022 20310 20023 3 20311 20023 20310 3 20023 20311 20024 3 20312 20024 20311 3 20024 20312 20025 3 20313 20025 20312 3 20025 20313 20026 3 20314 20026 20313 3 20026 20314 20027 3 20315 20027 20314 3 20027 20315 20028 3 20316 20028 20315 3 20028 20316 20029 3 20317 20029 20316 3 20029 20317 20030 3 20318 20030 20317 3 20030 20318 20031 3 20319 20031 20318 3 20031 20319 20032 3 20320 20032 20319 3 20032 20320 20033 3 20321 20033 20320 3 20033 20321 20034 3 20322 20034 20321 3 20034 20322 20035 3 20323 20035 20322 3 20035 20323 20036 3 20324 20036 20323 3 20036 20324 20037 3 20325 20037 20324 3 20037 20325 20038 3 20326 20038 20325 3 20038 20326 20039 3 20327 20039 20326 3 20039 20327 20040 3 20328 20040 20327 3 20040 20328 20041 3 20329 20041 20328 3 20041 20329 20042 3 20330 20042 20329 3 20042 20330 20043 3 20331 20043 20330 3 20043 20331 20044 3 20332 20044 20331 3 20044 20332 20045 3 20333 20045 20332 3 20045 20333 20046 3 20334 20046 20333 3 20046 20334 20047 3 20335 20047 20334 3 20047 20335 20048 3 20336 20048 20335 3 20048 20336 20049 3 20337 20049 20336 3 20049 20337 20052 3 20340 20052 20337 3 20050 20338 20051 3 20339 20051 20338 3 20050 20055 20338 3 20343 20338 20055 3 20052 20340 20053 3 20341 20053 20340 3 20053 20341 20056 3 20344 20056 20341 3 20054 20342 20055 3 20343 20055 20342 3 20054 20058 20342 3 20346 20342 20058 3 20056 20344 20057 3 20345 20057 20344 3 20057 20345 20059 3 20347 20059 20345 3 20058 20060 20346 3 20348 20346 20060 3 20059 20347 20061 3 20349 20061 20347 3 20060 20062 20348 3 20350 20348 20062 3 20061 20349 20063 3 20351 20063 20349 3 20062 20064 20350 3 20352 20350 20064 3 20063 20351 20065 3 20353 20065 20351 3 20064 20066 20352 3 20354 20352 20066 3 20065 20353 20067 3 20355 20067 20353 3 20066 20068 20354 3 20356 20354 20068 3 20067 20355 20069 3 20357 20069 20355 3 20068 20070 20356 3 20358 20356 20070 3 20069 20357 20071 3 20359 20071 20357 3 20070 20072 20358 3 20360 20358 20072 3 20071 20359 20073 3 20361 20073 20359 3 20072 20074 20360 3 20362 20360 20074 3 20073 20361 20075 3 20363 20075 20361 3 20074 20076 20362 3 20364 20362 20076 3 20075 20363 20077 3 20365 20077 20363 3 20076 20078 20364 3 20366 20364 20078 3 20077 20365 20079 3 20367 20079 20365 3 20078 20080 20366 3 20368 20366 20080 3 20079 20367 20081 3 20369 20081 20367 3 20080 20082 20368 3 20370 20368 20082 3 20081 20369 20083 3 20371 20083 20369 3 20082 20084 20370 3 20372 20370 20084 3 20083 20371 20085 3 20373 20085 20371 3 20084 20086 20372 3 20374 20372 20086 3 20085 20373 20087 3 20375 20087 20373 3 20086 20088 20376 3 20086 20376 20374 3 20087 20375 20089 3 20377 20089 20375 3 20088 20090 20378 3 20088 20378 20376 3 20089 20377 20091 3 20379 20091 20377 3 20090 20092 20380 3 20090 20380 20378 3 20091 20379 20093 3 20381 20093 20379 3 20092 20094 20382 3 20092 20382 20380 3 20093 20381 20095 3 20383 20095 20381 3 20094 20096 20384 3 20094 20384 20382 3 20095 20383 20097 3 20385 20097 20383 3 20096 20098 20386 3 20096 20386 20384 3 20097 20385 20387 3 20097 20387 20099 3 20098 20100 20388 3 20098 20388 20386 3 20099 20387 20389 3 20099 20389 20101 3 20100 20102 20390 3 20100 20390 20388 3 20101 20389 20391 3 20101 20391 20103 3 20102 20104 20392 3 20102 20392 20390 3 20103 20391 20393 3 20103 20393 20105 3 20104 20106 20394 3 20104 20394 20392 3 20105 20393 20395 3 20105 20395 20107 3 20106 20108 20396 3 20106 20396 20394 3 20107 20395 20397 3 20107 20397 20109 3 20108 20110 20398 3 20108 20398 20396 3 20109 20397 20399 3 20109 20399 20111 3 20110 20112 20400 3 20110 20400 20398 3 20111 20399 20401 3 20111 20401 20113 3 20112 20114 20402 3 20112 20402 20400 3 20113 20401 20403 3 20113 20403 20115 3 20114 20116 20404 3 20114 20404 20402 3 20115 20403 20405 3 20115 20405 20117 3 20116 20118 20406 3 20116 20406 20404 3 20117 20405 20407 3 20117 20407 20119 3 20118 20120 20408 3 20118 20408 20406 3 20119 20407 20409 3 20119 20409 20121 3 20120 20122 20410 3 20120 20410 20408 3 20121 20409 20411 3 20121 20411 20123 3 20122 20124 20412 3 20122 20412 20410 3 20123 20411 20413 3 20123 20413 20125 3 20124 20126 20414 3 20124 20414 20412 3 20125 20413 20415 3 20125 20415 20127 3 20126 20128 20416 3 20126 20416 20414 3 20127 20415 20417 3 20127 20417 20129 3 20128 20130 20418 3 20128 20418 20416 3 20129 20417 20419 3 20129 20419 20131 3 20130 20132 20420 3 20130 20420 20418 3 20131 20419 20421 3 20131 20421 20133 3 20132 20134 20422 3 20132 20422 20420 3 20133 20421 20423 3 20133 20423 20135 3 20134 20136 20424 3 20134 20424 20422 3 20135 20423 20425 3 20135 20425 20137 3 20136 20138 20426 3 20136 20426 20424 3 20137 20425 20427 3 20137 20427 20139 3 20138 20140 20428 3 20138 20428 20426 3 20139 20427 20429 3 20139 20429 20141 3 20140 20142 20430 3 20140 20430 20428 3 20141 20429 20431 3 20141 20431 20143 3 20142 20144 20432 3 20142 20432 20430 3 20143 20431 20433 3 20143 20433 20145 3 20144 20146 20434 3 20144 20434 20432 3 20145 20433 20435 3 20145 20435 20147 3 20146 20148 20436 3 20146 20436 20434 3 20147 20435 20437 3 20147 20437 20149 3 20148 20150 20438 3 20148 20438 20436 3 20149 20437 20439 3 20149 20439 20151 3 20150 20152 20440 3 20150 20440 20438 3 20151 20439 20441 3 20151 20441 20153 3 20152 20154 20442 3 20152 20442 20440 3 20153 20441 20443 3 20153 20443 20155 3 20154 20156 20444 3 20154 20444 20442 3 20155 20443 20445 3 20155 20445 20157 3 20156 20158 20446 3 20156 20446 20444 3 20157 20445 20447 3 20157 20447 20159 3 20158 20160 20448 3 20158 20448 20446 3 20159 20447 20449 3 20159 20449 20161 3 20160 20162 20450 3 20160 20450 20448 3 20161 20449 20451 3 20161 20451 20163 3 20162 20164 20452 3 20162 20452 20450 3 20163 20451 20453 3 20163 20453 20165 3 20164 20166 20454 3 20164 20454 20452 3 20165 20453 20455 3 20165 20455 20167 3 20166 20168 20456 3 20166 20456 20454 3 20167 20455 20457 3 20167 20457 20169 3 20168 20170 20458 3 20168 20458 20456 3 20169 20457 20459 3 20169 20459 20171 3 20170 20172 20460 3 20170 20460 20458 3 20171 20459 20461 3 20171 20461 20173 3 20172 20174 20462 3 20172 20462 20460 3 20173 20461 20463 3 20173 20463 20175 3 20174 20176 20462 3 20464 20462 20176 3 20175 20463 20465 3 20175 20465 20177 3 20176 20178 20464 3 20466 20464 20178 3 20177 20465 20467 3 20177 20467 20179 3 20178 20180 20466 3 20468 20466 20180 3 20179 20467 20469 3 20179 20469 20181 3 20180 20182 20468 3 20470 20468 20182 3 20181 20469 20471 3 20181 20471 20183 3 20182 20184 20470 3 20472 20470 20184 3 20183 20471 20473 3 20183 20473 20185 3 20184 20186 20472 3 20474 20472 20186 3 20185 20473 20187 3 20475 20187 20473 3 20186 20188 20474 3 20476 20474 20188 3 20187 20475 20189 3 20477 20189 20475 3 20188 20190 20476 3 20478 20476 20190 3 20189 20477 20191 3 20479 20191 20477 3 20190 20192 20478 3 20480 20478 20192 3 20191 20479 20195 3 20483 20195 20479 3 20192 20193 20480 3 20481 20480 20193 3 20193 20196 20481 3 20484 20481 20196 3 20194 20195 20482 3 20483 20482 20195 3 20194 20482 20199 3 20487 20199 20482 3 20196 20197 20484 3 20485 20484 20197 3 20197 20200 20485 3 20488 20485 20200 3 20198 20199 20486 3 20487 20486 20199 3 20198 20486 20268 3 20556 20268 20486 3 20200 20201 20488 3 20489 20488 20201 3 20201 20202 20489 3 20490 20489 20202 3 20202 20203 20490 3 20491 20490 20203 3 20203 20204 20491 3 20492 20491 20204 3 20204 20205 20492 3 20493 20492 20205 3 20205 20206 20493 3 20494 20493 20206 3 20206 20207 20494 3 20495 20494 20207 3 20207 20208 20495 3 20496 20495 20208 3 20208 20209 20496 3 20497 20496 20209 3 20209 20210 20497 3 20498 20497 20210 3 20210 20211 20498 3 20499 20498 20211 3 20211 20212 20499 3 20500 20499 20212 3 20212 20213 20500 3 20501 20500 20213 3 20213 20214 20501 3 20502 20501 20214 3 20214 20215 20502 3 20503 20502 20215 3 20215 20216 20503 3 20504 20503 20216 3 20216 20217 20504 3 20505 20504 20217 3 20217 20218 20505 3 20506 20505 20218 3 20218 20219 20506 3 20507 20506 20219 3 20219 20220 20507 3 20508 20507 20220 3 20220 20221 20508 3 20509 20508 20221 3 20221 20222 20509 3 20510 20509 20222 3 20222 20223 20510 3 20511 20510 20223 3 20223 20224 20511 3 20512 20511 20224 3 20224 20225 20512 3 20513 20512 20225 3 20225 20226 20513 3 20514 20513 20226 3 20226 20227 20514 3 20515 20514 20227 3 20227 20228 20515 3 20516 20515 20228 3 20228 20229 20516 3 20517 20516 20229 3 20229 20230 20517 3 20518 20517 20230 3 20230 20231 20518 3 20519 20518 20231 3 20231 20232 20519 3 20520 20519 20232 3 20232 20233 20520 3 20521 20520 20233 3 20233 20234 20521 3 20522 20521 20234 3 20234 20235 20522 3 20523 20522 20235 3 20235 20236 20523 3 20524 20523 20236 3 20236 20237 20524 3 20525 20524 20237 3 20237 20238 20525 3 20526 20525 20238 3 20238 20239 20526 3 20527 20526 20239 3 20239 20240 20527 3 20528 20527 20240 3 20240 20241 20528 3 20529 20528 20241 3 20241 20242 20529 3 20530 20529 20242 3 20242 20243 20530 3 20531 20530 20243 3 20243 20244 20531 3 20532 20531 20244 3 20244 20245 20532 3 20533 20532 20245 3 20245 20246 20533 3 20534 20533 20246 3 20246 20247 20534 3 20535 20534 20247 3 20247 20248 20535 3 20536 20535 20248 3 20248 20249 20536 3 20537 20536 20249 3 20249 20250 20537 3 20538 20537 20250 3 20250 20251 20538 3 20539 20538 20251 3 20251 20252 20539 3 20540 20539 20252 3 20252 20253 20540 3 20541 20540 20253 3 20253 20254 20541 3 20542 20541 20254 3 20254 20255 20542 3 20543 20542 20255 3 20255 20256 20543 3 20544 20543 20256 3 20256 20257 20544 3 20545 20544 20257 3 20257 20258 20545 3 20546 20545 20258 3 20258 20259 20546 3 20547 20546 20259 3 20259 20260 20547 3 20548 20547 20260 3 20260 20261 20548 3 20549 20548 20261 3 20261 20262 20550 3 20261 20550 20549 3 20262 20263 20551 3 20262 20551 20550 3 20263 20264 20552 3 20263 20552 20551 3 20264 20265 20553 3 20264 20553 20552 3 20265 20266 20554 3 20265 20554 20553 3 20266 20267 20555 3 20266 20555 20554 3 20267 20268 20556 3 20267 20556 20555 3 20269 20557 20270 3 20558 20270 20557 3 20269 20339 20627 3 20269 20627 20557 3 20270 20558 20271 3 20559 20271 20558 3 20271 20559 20272 3 20560 20272 20559 3 20272 20560 20561 3 20272 20561 20273 3 20273 20561 20562 3 20273 20562 20274 3 20274 20562 20563 3 20274 20563 20275 3 20275 20563 20564 3 20275 20564 20276 3 20276 20564 20565 3 20276 20565 20277 3 20277 20565 20566 3 20277 20566 20278 3 20278 20566 20567 3 20278 20567 20279 3 20279 20567 20568 3 20279 20568 20280 3 20280 20568 20569 3 20280 20569 20281 3 20281 20569 20570 3 20281 20570 20282 3 20282 20570 20571 3 20282 20571 20283 3 20283 20571 20572 3 20283 20572 20284 3 20284 20572 20573 3 20284 20573 20285 3 20285 20573 20574 3 20285 20574 20286 3 20286 20574 20575 3 20286 20575 20287 3 20287 20575 20576 3 20287 20576 20288 3 20288 20576 20577 3 20288 20577 20289 3 20289 20577 20578 3 20289 20578 20290 3 20290 20578 20579 3 20290 20579 20291 3 20291 20579 20580 3 20291 20580 20292 3 20292 20580 20581 3 20292 20581 20293 3 20293 20581 20582 3 20293 20582 20294 3 20294 20582 20583 3 20294 20583 20295 3 20295 20583 20584 3 20295 20584 20296 3 20296 20584 20585 3 20296 20585 20297 3 20297 20585 20586 3 20297 20586 20298 3 20298 20586 20587 3 20298 20587 20299 3 20299 20587 20588 3 20299 20588 20300 3 20300 20588 20589 3 20300 20589 20301 3 20301 20589 20590 3 20301 20590 20302 3 20302 20590 20591 3 20302 20591 20303 3 20303 20591 20592 3 20303 20592 20304 3 20304 20592 20593 3 20304 20593 20305 3 20305 20593 20594 3 20305 20594 20306 3 20306 20594 20595 3 20306 20595 20307 3 20307 20595 20596 3 20307 20596 20308 3 20308 20596 20597 3 20308 20597 20309 3 20309 20597 20598 3 20309 20598 20310 3 20310 20598 20599 3 20310 20599 20311 3 20311 20599 20600 3 20311 20600 20312 3 20312 20600 20601 3 20312 20601 20313 3 20313 20601 20602 3 20313 20602 20314 3 20314 20602 20603 3 20314 20603 20315 3 20315 20603 20604 3 20315 20604 20316 3 20316 20604 20605 3 20316 20605 20317 3 20317 20605 20606 3 20317 20606 20318 3 20318 20606 20607 3 20318 20607 20319 3 20319 20607 20608 3 20319 20608 20320 3 20320 20608 20609 3 20320 20609 20321 3 20321 20609 20610 3 20321 20610 20322 3 20322 20610 20611 3 20322 20611 20323 3 20323 20611 20612 3 20323 20612 20324 3 20324 20612 20613 3 20324 20613 20325 3 20325 20613 20614 3 20325 20614 20326 3 20326 20614 20615 3 20326 20615 20327 3 20327 20615 20616 3 20327 20616 20328 3 20328 20616 20617 3 20328 20617 20329 3 20329 20617 20618 3 20329 20618 20330 3 20330 20618 20619 3 20330 20619 20331 3 20331 20619 20620 3 20331 20620 20332 3 20332 20620 20621 3 20332 20621 20333 3 20333 20621 20622 3 20333 20622 20334 3 20334 20622 20623 3 20334 20623 20335 3 20335 20623 20624 3 20335 20624 20336 3 20336 20624 20625 3 20336 20625 20337 3 20337 20625 20628 3 20337 20628 20340 3 20338 20626 20627 3 20338 20627 20339 3 20338 20343 20631 3 20338 20631 20626 3 20340 20628 20629 3 20340 20629 20341 3 20341 20629 20632 3 20341 20632 20344 3 20342 20630 20631 3 20342 20631 20343 3 20342 20346 20634 3 20342 20634 20630 3 20344 20632 20633 3 20344 20633 20345 3 20345 20633 20635 3 20345 20635 20347 3 20346 20348 20636 3 20346 20636 20634 3 20347 20635 20637 3 20347 20637 20349 3 20348 20350 20636 3 20638 20636 20350 3 20349 20637 20639 3 20349 20639 20351 3 20350 20352 20638 3 20640 20638 20352 3 20351 20639 20641 3 20351 20641 20353 3 20352 20354 20640 3 20642 20640 20354 3 20353 20641 20643 3 20353 20643 20355 3 20354 20356 20642 3 20644 20642 20356 3 20355 20643 20645 3 20355 20645 20357 3 20356 20358 20644 3 20646 20644 20358 3 20357 20645 20647 3 20357 20647 20359 3 20358 20360 20646 3 20648 20646 20360 3 20359 20647 20649 3 20359 20649 20361 3 20360 20362 20648 3 20650 20648 20362 3 20361 20649 20363 3 20651 20363 20649 3 20362 20364 20650 3 20652 20650 20364 3 20363 20651 20365 3 20653 20365 20651 3 20364 20366 20652 3 20654 20652 20366 3 20365 20653 20367 3 20655 20367 20653 3 20366 20368 20654 3 20656 20654 20368 3 20367 20655 20369 3 20657 20369 20655 3 20368 20370 20656 3 20658 20656 20370 3 20369 20657 20371 3 20659 20371 20657 3 20370 20372 20658 3 20660 20658 20372 3 20371 20659 20373 3 20661 20373 20659 3 20372 20374 20660 3 20662 20660 20374 3 20373 20661 20375 3 20663 20375 20661 3 20374 20376 20662 3 20664 20662 20376 3 20375 20663 20377 3 20665 20377 20663 3 20376 20378 20664 3 20666 20664 20378 3 20377 20665 20379 3 20667 20379 20665 3 20378 20380 20666 3 20668 20666 20380 3 20379 20667 20381 3 20669 20381 20667 3 20380 20382 20668 3 20670 20668 20382 3 20381 20669 20383 3 20671 20383 20669 3 20382 20384 20670 3 20672 20670 20384 3 20383 20671 20385 3 20673 20385 20671 3 20384 20386 20672 3 20674 20672 20386 3 20385 20673 20387 3 20675 20387 20673 3 20386 20388 20674 3 20676 20674 20388 3 20387 20675 20389 3 20677 20389 20675 3 20388 20390 20676 3 20678 20676 20390 3 20389 20677 20391 3 20679 20391 20677 3 20390 20392 20678 3 20680 20678 20392 3 20391 20679 20393 3 20681 20393 20679 3 20392 20394 20680 3 20682 20680 20394 3 20393 20681 20395 3 20683 20395 20681 3 20394 20396 20682 3 20684 20682 20396 3 20395 20683 20397 3 20685 20397 20683 3 20396 20398 20684 3 20686 20684 20398 3 20397 20685 20399 3 20687 20399 20685 3 20398 20400 20686 3 20688 20686 20400 3 20399 20687 20401 3 20689 20401 20687 3 20400 20402 20688 3 20690 20688 20402 3 20401 20689 20403 3 20691 20403 20689 3 20402 20404 20690 3 20692 20690 20404 3 20403 20691 20405 3 20693 20405 20691 3 20404 20406 20692 3 20694 20692 20406 3 20405 20693 20407 3 20695 20407 20693 3 20406 20408 20694 3 20696 20694 20408 3 20407 20695 20409 3 20697 20409 20695 3 20408 20410 20696 3 20698 20696 20410 3 20409 20697 20411 3 20699 20411 20697 3 20410 20412 20698 3 20700 20698 20412 3 20411 20699 20413 3 20701 20413 20699 3 20412 20414 20700 3 20702 20700 20414 3 20413 20701 20415 3 20703 20415 20701 3 20414 20416 20702 3 20704 20702 20416 3 20415 20703 20417 3 20705 20417 20703 3 20416 20418 20704 3 20706 20704 20418 3 20417 20705 20419 3 20707 20419 20705 3 20418 20420 20706 3 20708 20706 20420 3 20419 20707 20421 3 20709 20421 20707 3 20420 20422 20708 3 20710 20708 20422 3 20421 20709 20423 3 20711 20423 20709 3 20422 20424 20710 3 20712 20710 20424 3 20423 20711 20425 3 20713 20425 20711 3 20424 20426 20712 3 20714 20712 20426 3 20425 20713 20427 3 20715 20427 20713 3 20426 20428 20714 3 20716 20714 20428 3 20427 20715 20429 3 20717 20429 20715 3 20428 20430 20716 3 20718 20716 20430 3 20429 20717 20431 3 20719 20431 20717 3 20430 20432 20718 3 20720 20718 20432 3 20431 20719 20433 3 20721 20433 20719 3 20432 20434 20720 3 20722 20720 20434 3 20433 20721 20435 3 20723 20435 20721 3 20434 20436 20722 3 20724 20722 20436 3 20435 20723 20437 3 20725 20437 20723 3 20436 20438 20726 3 20436 20726 20724 3 20437 20725 20439 3 20727 20439 20725 3 20438 20440 20728 3 20438 20728 20726 3 20439 20727 20441 3 20729 20441 20727 3 20440 20442 20730 3 20440 20730 20728 3 20441 20729 20443 3 20731 20443 20729 3 20442 20444 20732 3 20442 20732 20730 3 20443 20731 20445 3 20733 20445 20731 3 20444 20446 20734 3 20444 20734 20732 3 20445 20733 20447 3 20735 20447 20733 3 20446 20448 20736 3 20446 20736 20734 3 20447 20735 20449 3 20737 20449 20735 3 20448 20450 20738 3 20448 20738 20736 3 20449 20737 20739 3 20449 20739 20451 3 20450 20452 20740 3 20450 20740 20738 3 20451 20739 20741 3 20451 20741 20453 3 20452 20454 20742 3 20452 20742 20740 3 20453 20741 20743 3 20453 20743 20455 3 20454 20456 20744 3 20454 20744 20742 3 20455 20743 20745 3 20455 20745 20457 3 20456 20458 20746 3 20456 20746 20744 3 20457 20745 20747 3 20457 20747 20459 3 20458 20460 20748 3 20458 20748 20746 3 20459 20747 20749 3 20459 20749 20461 3 20460 20462 20750 3 20460 20750 20748 3 20461 20749 20751 3 20461 20751 20463 3 20462 20464 20752 3 20462 20752 20750 3 20463 20751 20753 3 20463 20753 20465 3 20464 20466 20754 3 20464 20754 20752 3 20465 20753 20755 3 20465 20755 20467 3 20466 20468 20756 3 20466 20756 20754 3 20467 20755 20757 3 20467 20757 20469 3 20468 20470 20758 3 20468 20758 20756 3 20469 20757 20759 3 20469 20759 20471 3 20470 20472 20760 3 20470 20760 20758 3 20471 20759 20761 3 20471 20761 20473 3 20472 20474 20762 3 20472 20762 20760 3 20473 20761 20763 3 20473 20763 20475 3 20474 20476 20764 3 20474 20764 20762 3 20475 20763 20765 3 20475 20765 20477 3 20476 20478 20766 3 20476 20766 20764 3 20477 20765 20767 3 20477 20767 20479 3 20478 20480 20768 3 20478 20768 20766 3 20479 20767 20771 3 20479 20771 20483 3 20480 20481 20769 3 20480 20769 20768 3 20481 20484 20772 3 20481 20772 20769 3 20482 20483 20771 3 20482 20771 20770 3 20482 20770 20775 3 20482 20775 20487 3 20484 20485 20773 3 20484 20773 20772 3 20485 20488 20776 3 20485 20776 20773 3 20486 20487 20775 3 20486 20775 20774 3 20486 20774 20844 3 20486 20844 20556 3 20488 20489 20777 3 20488 20777 20776 3 20489 20490 20778 3 20489 20778 20777 3 20490 20491 20779 3 20490 20779 20778 3 20491 20492 20780 3 20491 20780 20779 3 20492 20493 20781 3 20492 20781 20780 3 20493 20494 20782 3 20493 20782 20781 3 20494 20495 20783 3 20494 20783 20782 3 20495 20496 20784 3 20495 20784 20783 3 20496 20497 20785 3 20496 20785 20784 3 20497 20498 20786 3 20497 20786 20785 3 20498 20499 20787 3 20498 20787 20786 3 20499 20500 20788 3 20499 20788 20787 3 20500 20501 20789 3 20500 20789 20788 3 20501 20502 20790 3 20501 20790 20789 3 20502 20503 20791 3 20502 20791 20790 3 20503 20504 20792 3 20503 20792 20791 3 20504 20505 20793 3 20504 20793 20792 3 20505 20506 20794 3 20505 20794 20793 3 20506 20507 20795 3 20506 20795 20794 3 20507 20508 20796 3 20507 20796 20795 3 20508 20509 20797 3 20508 20797 20796 3 20509 20510 20798 3 20509 20798 20797 3 20510 20511 20799 3 20510 20799 20798 3 20511 20512 20800 3 20511 20800 20799 3 20512 20513 20801 3 20512 20801 20800 3 20513 20514 20802 3 20513 20802 20801 3 20514 20515 20803 3 20514 20803 20802 3 20515 20516 20804 3 20515 20804 20803 3 20516 20517 20805 3 20516 20805 20804 3 20517 20518 20806 3 20517 20806 20805 3 20518 20519 20807 3 20518 20807 20806 3 20519 20520 20808 3 20519 20808 20807 3 20520 20521 20809 3 20520 20809 20808 3 20521 20522 20810 3 20521 20810 20809 3 20522 20523 20811 3 20522 20811 20810 3 20523 20524 20812 3 20523 20812 20811 3 20524 20525 20813 3 20524 20813 20812 3 20525 20526 20813 3 20814 20813 20526 3 20526 20527 20814 3 20815 20814 20527 3 20527 20528 20815 3 20816 20815 20528 3 20528 20529 20816 3 20817 20816 20529 3 20529 20530 20817 3 20818 20817 20530 3 20530 20531 20818 3 20819 20818 20531 3 20531 20532 20819 3 20820 20819 20532 3 20532 20533 20820 3 20821 20820 20533 3 20533 20534 20821 3 20822 20821 20534 3 20534 20535 20822 3 20823 20822 20535 3 20535 20536 20823 3 20824 20823 20536 3 20536 20537 20824 3 20825 20824 20537 3 20537 20538 20825 3 20826 20825 20538 3 20538 20539 20826 3 20827 20826 20539 3 20539 20540 20827 3 20828 20827 20540 3 20540 20541 20828 3 20829 20828 20541 3 20541 20542 20829 3 20830 20829 20542 3 20542 20543 20830 3 20831 20830 20543 3 20543 20544 20831 3 20832 20831 20544 3 20544 20545 20832 3 20833 20832 20545 3 20545 20546 20833 3 20834 20833 20546 3 20546 20547 20834 3 20835 20834 20547 3 20547 20548 20835 3 20836 20835 20548 3 20548 20549 20836 3 20837 20836 20549 3 20549 20550 20837 3 20838 20837 20550 3 20550 20551 20838 3 20839 20838 20551 3 20551 20552 20839 3 20840 20839 20552 3 20552 20553 20840 3 20841 20840 20553 3 20553 20554 20841 3 20842 20841 20554 3 20554 20555 20842 3 20843 20842 20555 3 20555 20556 20843 3 20844 20843 20556 3 20557 20845 20558 3 20846 20558 20845 3 20557 20627 20845 3 20915 20845 20627 3 20558 20846 20559 3 20847 20559 20846 3 20559 20847 20560 3 20848 20560 20847 3 20560 20848 20561 3 20849 20561 20848 3 20561 20849 20562 3 20850 20562 20849 3 20562 20850 20563 3 20851 20563 20850 3 20563 20851 20564 3 20852 20564 20851 3 20564 20852 20565 3 20853 20565 20852 3 20565 20853 20566 3 20854 20566 20853 3 20566 20854 20567 3 20855 20567 20854 3 20567 20855 20568 3 20856 20568 20855 3 20568 20856 20569 3 20857 20569 20856 3 20569 20857 20570 3 20858 20570 20857 3 20570 20858 20571 3 20859 20571 20858 3 20571 20859 20572 3 20860 20572 20859 3 20572 20860 20573 3 20861 20573 20860 3 20573 20861 20574 3 20862 20574 20861 3 20574 20862 20575 3 20863 20575 20862 3 20575 20863 20576 3 20864 20576 20863 3 20576 20864 20577 3 20865 20577 20864 3 20577 20865 20578 3 20866 20578 20865 3 20578 20866 20579 3 20867 20579 20866 3 20579 20867 20580 3 20868 20580 20867 3 20580 20868 20581 3 20869 20581 20868 3 20581 20869 20582 3 20870 20582 20869 3 20582 20870 20583 3 20871 20583 20870 3 20583 20871 20584 3 20872 20584 20871 3 20584 20872 20585 3 20873 20585 20872 3 20585 20873 20586 3 20874 20586 20873 3 20586 20874 20587 3 20875 20587 20874 3 20587 20875 20588 3 20876 20588 20875 3 20588 20876 20589 3 20877 20589 20876 3 20589 20877 20590 3 20878 20590 20877 3 20590 20878 20591 3 20879 20591 20878 3 20591 20879 20592 3 20880 20592 20879 3 20592 20880 20593 3 20881 20593 20880 3 20593 20881 20594 3 20882 20594 20881 3 20594 20882 20595 3 20883 20595 20882 3 20595 20883 20596 3 20884 20596 20883 3 20596 20884 20597 3 20885 20597 20884 3 20597 20885 20598 3 20886 20598 20885 3 20598 20886 20599 3 20887 20599 20886 3 20599 20887 20600 3 20888 20600 20887 3 20600 20888 20601 3 20889 20601 20888 3 20601 20889 20602 3 20890 20602 20889 3 20602 20890 20603 3 20891 20603 20890 3 20603 20891 20604 3 20892 20604 20891 3 20604 20892 20605 3 20893 20605 20892 3 20605 20893 20606 3 20894 20606 20893 3 20606 20894 20607 3 20895 20607 20894 3 20607 20895 20608 3 20896 20608 20895 3 20608 20896 20609 3 20897 20609 20896 3 20609 20897 20610 3 20898 20610 20897 3 20610 20898 20611 3 20899 20611 20898 3 20611 20899 20612 3 20900 20612 20899 3 20612 20900 20613 3 20901 20613 20900 3 20613 20901 20614 3 20902 20614 20901 3 20614 20902 20615 3 20903 20615 20902 3 20615 20903 20616 3 20904 20616 20903 3 20616 20904 20617 3 20905 20617 20904 3 20617 20905 20618 3 20906 20618 20905 3 20618 20906 20619 3 20907 20619 20906 3 20619 20907 20620 3 20908 20620 20907 3 20620 20908 20621 3 20909 20621 20908 3 20621 20909 20622 3 20910 20622 20909 3 20622 20910 20623 3 20911 20623 20910 3 20623 20911 20624 3 20912 20624 20911 3 20624 20912 20625 3 20913 20625 20912 3 20625 20913 20916 3 20625 20916 20628 3 20626 20914 20915 3 20626 20915 20627 3 20626 20631 20919 3 20626 20919 20914 3 20628 20916 20917 3 20628 20917 20629 3 20629 20917 20920 3 20629 20920 20632 3 20630 20918 20919 3 20630 20919 20631 3 20630 20634 20922 3 20630 20922 20918 3 20632 20920 20921 3 20632 20921 20633 3 20633 20921 20923 3 20633 20923 20635 3 20634 20636 20924 3 20634 20924 20922 3 20635 20923 20925 3 20635 20925 20637 3 20636 20638 20926 3 20636 20926 20924 3 20637 20925 20927 3 20637 20927 20639 3 20638 20640 20928 3 20638 20928 20926 3 20639 20927 20929 3 20639 20929 20641 3 20640 20642 20930 3 20640 20930 20928 3 20641 20929 20931 3 20641 20931 20643 3 20642 20644 20932 3 20642 20932 20930 3 20643 20931 20933 3 20643 20933 20645 3 20644 20646 20934 3 20644 20934 20932 3 20645 20933 20935 3 20645 20935 20647 3 20646 20648 20936 3 20646 20936 20934 3 20647 20935 20937 3 20647 20937 20649 3 20648 20650 20938 3 20648 20938 20936 3 20649 20937 20939 3 20649 20939 20651 3 20650 20652 20940 3 20650 20940 20938 3 20651 20939 20941 3 20651 20941 20653 3 20652 20654 20942 3 20652 20942 20940 3 20653 20941 20943 3 20653 20943 20655 3 20654 20656 20944 3 20654 20944 20942 3 20655 20943 20945 3 20655 20945 20657 3 20656 20658 20946 3 20656 20946 20944 3 20657 20945 20947 3 20657 20947 20659 3 20658 20660 20948 3 20658 20948 20946 3 20659 20947 20949 3 20659 20949 20661 3 20660 20662 20950 3 20660 20950 20948 3 20661 20949 20951 3 20661 20951 20663 3 20662 20664 20952 3 20662 20952 20950 3 20663 20951 20953 3 20663 20953 20665 3 20664 20666 20954 3 20664 20954 20952 3 20665 20953 20955 3 20665 20955 20667 3 20666 20668 20956 3 20666 20956 20954 3 20667 20955 20957 3 20667 20957 20669 3 20668 20670 20958 3 20668 20958 20956 3 20669 20957 20959 3 20669 20959 20671 3 20670 20672 20960 3 20670 20960 20958 3 20671 20959 20961 3 20671 20961 20673 3 20672 20674 20962 3 20672 20962 20960 3 20673 20961 20963 3 20673 20963 20675 3 20674 20676 20964 3 20674 20964 20962 3 20675 20963 20965 3 20675 20965 20677 3 20676 20678 20966 3 20676 20966 20964 3 20677 20965 20967 3 20677 20967 20679 3 20678 20680 20968 3 20678 20968 20966 3 20679 20967 20969 3 20679 20969 20681 3 20680 20682 20970 3 20680 20970 20968 3 20681 20969 20971 3 20681 20971 20683 3 20682 20684 20972 3 20682 20972 20970 3 20683 20971 20973 3 20683 20973 20685 3 20684 20686 20974 3 20684 20974 20972 3 20685 20973 20975 3 20685 20975 20687 3 20686 20688 20976 3 20686 20976 20974 3 20687 20975 20977 3 20687 20977 20689 3 20688 20690 20978 3 20688 20978 20976 3 20689 20977 20979 3 20689 20979 20691 3 20690 20692 20980 3 20690 20980 20978 3 20691 20979 20981 3 20691 20981 20693 3 20692 20694 20982 3 20692 20982 20980 3 20693 20981 20983 3 20693 20983 20695 3 20694 20696 20984 3 20694 20984 20982 3 20695 20983 20985 3 20695 20985 20697 3 20696 20698 20986 3 20696 20986 20984 3 20697 20985 20987 3 20697 20987 20699 3 20698 20700 20988 3 20698 20988 20986 3 20699 20987 20989 3 20699 20989 20701 3 20700 20702 20990 3 20700 20990 20988 3 20701 20989 20991 3 20701 20991 20703 3 20702 20704 20990 3 20992 20990 20704 3 20703 20991 20993 3 20703 20993 20705 3 20704 20706 20992 3 20994 20992 20706 3 20705 20993 20995 3 20705 20995 20707 3 20706 20708 20994 3 20996 20994 20708 3 20707 20995 20997 3 20707 20997 20709 3 20708 20710 20996 3 20998 20996 20710 3 20709 20997 20999 3 20709 20999 20711 3 20710 20712 20998 3 21000 20998 20712 3 20711 20999 21001 3 20711 21001 20713 3 20712 20714 21000 3 21002 21000 20714 3 20713 21001 21003 3 20713 21003 20715 3 20714 20716 21002 3 21004 21002 20716 3 20715 21003 20717 3 21005 20717 21003 3 20716 20718 21004 3 21006 21004 20718 3 20717 21005 20719 3 21007 20719 21005 3 20718 20720 21006 3 21008 21006 20720 3 20719 21007 20721 3 21009 20721 21007 3 20720 20722 21008 3 21010 21008 20722 3 20721 21009 20723 3 21011 20723 21009 3 20722 20724 21010 3 21012 21010 20724 3 20723 21011 20725 3 21013 20725 21011 3 20724 20726 21012 3 21014 21012 20726 3 20725 21013 20727 3 21015 20727 21013 3 20726 20728 21014 3 21016 21014 20728 3 20727 21015 20729 3 21017 20729 21015 3 20728 20730 21016 3 21018 21016 20730 3 20729 21017 20731 3 21019 20731 21017 3 20730 20732 21018 3 21020 21018 20732 3 20731 21019 20733 3 21021 20733 21019 3 20732 20734 21020 3 21022 21020 20734 3 20733 21021 20735 3 21023 20735 21021 3 20734 20736 21022 3 21024 21022 20736 3 20735 21023 20737 3 21025 20737 21023 3 20736 20738 21024 3 21026 21024 20738 3 20737 21025 20739 3 21027 20739 21025 3 20738 20740 21026 3 21028 21026 20740 3 20739 21027 20741 3 21029 20741 21027 3 20740 20742 21028 3 21030 21028 20742 3 20741 21029 20743 3 21031 20743 21029 3 20742 20744 21030 3 21032 21030 20744 3 20743 21031 20745 3 21033 20745 21031 3 20744 20746 21032 3 21034 21032 20746 3 20745 21033 20747 3 21035 20747 21033 3 20746 20748 21034 3 21036 21034 20748 3 20747 21035 20749 3 21037 20749 21035 3 20748 20750 21036 3 21038 21036 20750 3 20749 21037 20751 3 21039 20751 21037 3 20750 20752 21038 3 21040 21038 20752 3 20751 21039 20753 3 21041 20753 21039 3 20752 20754 21040 3 21042 21040 20754 3 20753 21041 20755 3 21043 20755 21041 3 20754 20756 21042 3 21044 21042 20756 3 20755 21043 20757 3 21045 20757 21043 3 20756 20758 21044 3 21046 21044 20758 3 20757 21045 20759 3 21047 20759 21045 3 20758 20760 21046 3 21048 21046 20760 3 20759 21047 20761 3 21049 20761 21047 3 20760 20762 21048 3 21050 21048 20762 3 20761 21049 20763 3 21051 20763 21049 3 20762 20764 21050 3 21052 21050 20764 3 20763 21051 20765 3 21053 20765 21051 3 20764 20766 21052 3 21054 21052 20766 3 20765 21053 20767 3 21055 20767 21053 3 20766 20768 21054 3 21056 21054 20768 3 20767 21055 20771 3 21059 20771 21055 3 20768 20769 21056 3 21057 21056 20769 3 20769 20772 21057 3 21060 21057 20772 3 20770 20771 21058 3 21059 21058 20771 3 20770 21058 20775 3 21063 20775 21058 3 20772 20773 21060 3 21061 21060 20773 3 20773 20776 21061 3 21064 21061 20776 3 20774 20775 21062 3 21063 21062 20775 3 20774 21062 20844 3 21132 20844 21062 3 20776 20777 21064 3 21065 21064 20777 3 20777 20778 21065 3 21066 21065 20778 3 20778 20779 21066 3 21067 21066 20779 3 20779 20780 21067 3 21068 21067 20780 3 20780 20781 21068 3 21069 21068 20781 3 20781 20782 21069 3 21070 21069 20782 3 20782 20783 21070 3 21071 21070 20783 3 20783 20784 21071 3 21072 21071 20784 3 20784 20785 21072 3 21073 21072 20785 3 20785 20786 21073 3 21074 21073 20786 3 20786 20787 21074 3 21075 21074 20787 3 20787 20788 21075 3 21076 21075 20788 3 20788 20789 21076 3 21077 21076 20789 3 20789 20790 21077 3 21078 21077 20790 3 20790 20791 21079 3 20790 21079 21078 3 20791 20792 21080 3 20791 21080 21079 3 20792 20793 21081 3 20792 21081 21080 3 20793 20794 21082 3 20793 21082 21081 3 20794 20795 21083 3 20794 21083 21082 3 20795 20796 21084 3 20795 21084 21083 3 20796 20797 21085 3 20796 21085 21084 3 20797 20798 21086 3 20797 21086 21085 3 20798 20799 21087 3 20798 21087 21086 3 20799 20800 21088 3 20799 21088 21087 3 20800 20801 21089 3 20800 21089 21088 3 20801 20802 21090 3 20801 21090 21089 3 20802 20803 21091 3 20802 21091 21090 3 20803 20804 21092 3 20803 21092 21091 3 20804 20805 21093 3 20804 21093 21092 3 20805 20806 21094 3 20805 21094 21093 3 20806 20807 21095 3 20806 21095 21094 3 20807 20808 21096 3 20807 21096 21095 3 20808 20809 21097 3 20808 21097 21096 3 20809 20810 21098 3 20809 21098 21097 3 20810 20811 21099 3 20810 21099 21098 3 20811 20812 21100 3 20811 21100 21099 3 20812 20813 21101 3 20812 21101 21100 3 20813 20814 21102 3 20813 21102 21101 3 20814 20815 21103 3 20814 21103 21102 3 20815 20816 21104 3 20815 21104 21103 3 20816 20817 21105 3 20816 21105 21104 3 20817 20818 21106 3 20817 21106 21105 3 20818 20819 21107 3 20818 21107 21106 3 20819 20820 21108 3 20819 21108 21107 3 20820 20821 21109 3 20820 21109 21108 3 20821 20822 21110 3 20821 21110 21109 3 20822 20823 21111 3 20822 21111 21110 3 20823 20824 21112 3 20823 21112 21111 3 20824 20825 21113 3 20824 21113 21112 3 20825 20826 21114 3 20825 21114 21113 3 20826 20827 21115 3 20826 21115 21114 3 20827 20828 21116 3 20827 21116 21115 3 20828 20829 21117 3 20828 21117 21116 3 20829 20830 21118 3 20829 21118 21117 3 20830 20831 21119 3 20830 21119 21118 3 20831 20832 21120 3 20831 21120 21119 3 20832 20833 21121 3 20832 21121 21120 3 20833 20834 21122 3 20833 21122 21121 3 20834 20835 21123 3 20834 21123 21122 3 20835 20836 21124 3 20835 21124 21123 3 20836 20837 21125 3 20836 21125 21124 3 20837 20838 21126 3 20837 21126 21125 3 20838 20839 21127 3 20838 21127 21126 3 20839 20840 21128 3 20839 21128 21127 3 20840 20841 21129 3 20840 21129 21128 3 20841 20842 21130 3 20841 21130 21129 3 20842 20843 21131 3 20842 21131 21130 3 20843 20844 21132 3 20843 21132 21131 3 20845 21133 21134 3 20845 21134 20846 3 20845 20915 21133 3 21203 21133 20915 3 20846 21134 21135 3 20846 21135 20847 3 20847 21135 21136 3 20847 21136 20848 3 20848 21136 21137 3 20848 21137 20849 3 20849 21137 21138 3 20849 21138 20850 3 20850 21138 21139 3 20850 21139 20851 3 20851 21139 21140 3 20851 21140 20852 3 20852 21140 21141 3 20852 21141 20853 3 20853 21141 21142 3 20853 21142 20854 3 20854 21142 21143 3 20854 21143 20855 3 20855 21143 21144 3 20855 21144 20856 3 20856 21144 21145 3 20856 21145 20857 3 20857 21145 21146 3 20857 21146 20858 3 20858 21146 21147 3 20858 21147 20859 3 20859 21147 21148 3 20859 21148 20860 3 20860 21148 21149 3 20860 21149 20861 3 20861 21149 21150 3 20861 21150 20862 3 20862 21150 21151 3 20862 21151 20863 3 20863 21151 21152 3 20863 21152 20864 3 20864 21152 21153 3 20864 21153 20865 3 20865 21153 21154 3 20865 21154 20866 3 20866 21154 21155 3 20866 21155 20867 3 20867 21155 21156 3 20867 21156 20868 3 20868 21156 21157 3 20868 21157 20869 3 20869 21157 21158 3 20869 21158 20870 3 20870 21158 21159 3 20870 21159 20871 3 20871 21159 21160 3 20871 21160 20872 3 20872 21160 21161 3 20872 21161 20873 3 20873 21161 21162 3 20873 21162 20874 3 20874 21162 21163 3 20874 21163 20875 3 20875 21163 21164 3 20875 21164 20876 3 20876 21164 21165 3 20876 21165 20877 3 20877 21165 21166 3 20877 21166 20878 3 20878 21166 21167 3 20878 21167 20879 3 20879 21167 21168 3 20879 21168 20880 3 20880 21168 21169 3 20880 21169 20881 3 20881 21169 21170 3 20881 21170 20882 3 20882 21170 21171 3 20882 21171 20883 3 20883 21171 21172 3 20883 21172 20884 3 20884 21172 21173 3 20884 21173 20885 3 20885 21173 21174 3 20885 21174 20886 3 20886 21174 21175 3 20886 21175 20887 3 20887 21175 21176 3 20887 21176 20888 3 20888 21176 21177 3 20888 21177 20889 3 20889 21177 21178 3 20889 21178 20890 3 20890 21178 21179 3 20890 21179 20891 3 20891 21179 21180 3 20891 21180 20892 3 20892 21180 20893 3 21181 20893 21180 3 20893 21181 20894 3 21182 20894 21181 3 20894 21182 20895 3 21183 20895 21182 3 20895 21183 20896 3 21184 20896 21183 3 20896 21184 20897 3 21185 20897 21184 3 20897 21185 20898 3 21186 20898 21185 3 20898 21186 20899 3 21187 20899 21186 3 20899 21187 20900 3 21188 20900 21187 3 20900 21188 20901 3 21189 20901 21188 3 20901 21189 20902 3 21190 20902 21189 3 20902 21190 20903 3 21191 20903 21190 3 20903 21191 20904 3 21192 20904 21191 3 20904 21192 20905 3 21193 20905 21192 3 20905 21193 20906 3 21194 20906 21193 3 20906 21194 20907 3 21195 20907 21194 3 20907 21195 20908 3 21196 20908 21195 3 20908 21196 20909 3 21197 20909 21196 3 20909 21197 20910 3 21198 20910 21197 3 20910 21198 20911 3 21199 20911 21198 3 20911 21199 20912 3 21200 20912 21199 3 20912 21200 20913 3 21201 20913 21200 3 20913 21201 20916 3 21204 20916 21201 3 20914 21202 20915 3 21203 20915 21202 3 20914 20919 21202 3 21207 21202 20919 3 20916 21204 20917 3 21205 20917 21204 3 20917 21205 20920 3 21208 20920 21205 3 20918 21206 20919 3 21207 20919 21206 3 20918 20922 21206 3 21210 21206 20922 3 20920 21208 20921 3 21209 20921 21208 3 20921 21209 20923 3 21211 20923 21209 3 20922 20924 21210 3 21212 21210 20924 3 20923 21211 20925 3 21213 20925 21211 3 20924 20926 21212 3 21214 21212 20926 3 20925 21213 20927 3 21215 20927 21213 3 20926 20928 21214 3 21216 21214 20928 3 20927 21215 20929 3 21217 20929 21215 3 20928 20930 21216 3 21218 21216 20930 3 20929 21217 20931 3 21219 20931 21217 3 20930 20932 21218 3 21220 21218 20932 3 20931 21219 20933 3 21221 20933 21219 3 20932 20934 21220 3 21222 21220 20934 3 20933 21221 20935 3 21223 20935 21221 3 20934 20936 21222 3 21224 21222 20936 3 20935 21223 20937 3 21225 20937 21223 3 20936 20938 21224 3 21226 21224 20938 3 20937 21225 20939 3 21227 20939 21225 3 20938 20940 21226 3 21228 21226 20940 3 20939 21227 20941 3 21229 20941 21227 3 20940 20942 21228 3 21230 21228 20942 3 20941 21229 20943 3 21231 20943 21229 3 20942 20944 21230 3 21232 21230 20944 3 20943 21231 20945 3 21233 20945 21231 3 20944 20946 21232 3 21234 21232 20946 3 20945 21233 20947 3 21235 20947 21233 3 20946 20948 21234 3 21236 21234 20948 3 20947 21235 20949 3 21237 20949 21235 3 20948 20950 21236 3 21238 21236 20950 3 20949 21237 20951 3 21239 20951 21237 3 20950 20952 21238 3 21240 21238 20952 3 20951 21239 20953 3 21241 20953 21239 3 20952 20954 21240 3 21242 21240 20954 3 20953 21241 20955 3 21243 20955 21241 3 20954 20956 21242 3 21244 21242 20956 3 20955 21243 20957 3 21245 20957 21243 3 20956 20958 21244 3 21246 21244 20958 3 20957 21245 20959 3 21247 20959 21245 3 20958 20960 21246 3 21248 21246 20960 3 20959 21247 20961 3 21249 20961 21247 3 20960 20962 21248 3 21250 21248 20962 3 20961 21249 20963 3 21251 20963 21249 3 20962 20964 21250 3 21252 21250 20964 3 20963 21251 20965 3 21253 20965 21251 3 20964 20966 21252 3 21254 21252 20966 3 20965 21253 20967 3 21255 20967 21253 3 20966 20968 21254 3 21256 21254 20968 3 20967 21255 20969 3 21257 20969 21255 3 20968 20970 21258 3 20968 21258 21256 3 20969 21257 20971 3 21259 20971 21257 3 20970 20972 21260 3 20970 21260 21258 3 20971 21259 20973 3 21261 20973 21259 3 20972 20974 21262 3 20972 21262 21260 3 20973 21261 20975 3 21263 20975 21261 3 20974 20976 21264 3 20974 21264 21262 3 20975 21263 20977 3 21265 20977 21263 3 20976 20978 21266 3 20976 21266 21264 3 20977 21265 20979 3 21267 20979 21265 3 20978 20980 21268 3 20978 21268 21266 3 20979 21267 20981 3 21269 20981 21267 3 20980 20982 21270 3 20980 21270 21268 3 20981 21269 20983 3 21271 20983 21269 3 20982 20984 21272 3 20982 21272 21270 3 20983 21271 21273 3 20983 21273 20985 3 20984 20986 21274 3 20984 21274 21272 3 20985 21273 21275 3 20985 21275 20987 3 20986 20988 21276 3 20986 21276 21274 3 20987 21275 21277 3 20987 21277 20989 3 20988 20990 21278 3 20988 21278 21276 3 20989 21277 21279 3 20989 21279 20991 3 20990 20992 21280 3 20990 21280 21278 3 20991 21279 21281 3 20991 21281 20993 3 20992 20994 21282 3 20992 21282 21280 3 20993 21281 21283 3 20993 21283 20995 3 20994 20996 21284 3 20994 21284 21282 3 20995 21283 21285 3 20995 21285 20997 3 20996 20998 21286 3 20996 21286 21284 3 20997 21285 21287 3 20997 21287 20999 3 20998 21000 21288 3 20998 21288 21286 3 20999 21287 21289 3 20999 21289 21001 3 21000 21002 21290 3 21000 21290 21288 3 21001 21289 21291 3 21001 21291 21003 3 21002 21004 21292 3 21002 21292 21290 3 21003 21291 21293 3 21003 21293 21005 3 21004 21006 21294 3 21004 21294 21292 3 21005 21293 21295 3 21005 21295 21007 3 21006 21008 21296 3 21006 21296 21294 3 21007 21295 21297 3 21007 21297 21009 3 21008 21010 21298 3 21008 21298 21296 3 21009 21297 21299 3 21009 21299 21011 3 21010 21012 21300 3 21010 21300 21298 3 21011 21299 21301 3 21011 21301 21013 3 21012 21014 21302 3 21012 21302 21300 3 21013 21301 21303 3 21013 21303 21015 3 21014 21016 21304 3 21014 21304 21302 3 21015 21303 21305 3 21015 21305 21017 3 21016 21018 21306 3 21016 21306 21304 3 21017 21305 21307 3 21017 21307 21019 3 21018 21020 21308 3 21018 21308 21306 3 21019 21307 21309 3 21019 21309 21021 3 21020 21022 21310 3 21020 21310 21308 3 21021 21309 21311 3 21021 21311 21023 3 21022 21024 21312 3 21022 21312 21310 3 21023 21311 21313 3 21023 21313 21025 3 21024 21026 21314 3 21024 21314 21312 3 21025 21313 21315 3 21025 21315 21027 3 21026 21028 21316 3 21026 21316 21314 3 21027 21315 21317 3 21027 21317 21029 3 21028 21030 21318 3 21028 21318 21316 3 21029 21317 21319 3 21029 21319 21031 3 21030 21032 21320 3 21030 21320 21318 3 21031 21319 21321 3 21031 21321 21033 3 21032 21034 21322 3 21032 21322 21320 3 21033 21321 21323 3 21033 21323 21035 3 21034 21036 21324 3 21034 21324 21322 3 21035 21323 21325 3 21035 21325 21037 3 21036 21038 21326 3 21036 21326 21324 3 21037 21325 21327 3 21037 21327 21039 3 21038 21040 21328 3 21038 21328 21326 3 21039 21327 21329 3 21039 21329 21041 3 21040 21042 21330 3 21040 21330 21328 3 21041 21329 21331 3 21041 21331 21043 3 21042 21044 21332 3 21042 21332 21330 3 21043 21331 21333 3 21043 21333 21045 3 21044 21046 21334 3 21044 21334 21332 3 21045 21333 21335 3 21045 21335 21047 3 21046 21048 21336 3 21046 21336 21334 3 21047 21335 21337 3 21047 21337 21049 3 21048 21050 21338 3 21048 21338 21336 3 21049 21337 21339 3 21049 21339 21051 3 21050 21052 21340 3 21050 21340 21338 3 21051 21339 21341 3 21051 21341 21053 3 21052 21054 21342 3 21052 21342 21340 3 21053 21341 21343 3 21053 21343 21055 3 21054 21056 21344 3 21054 21344 21342 3 21055 21343 21347 3 21055 21347 21059 3 21056 21057 21345 3 21056 21345 21344 3 21057 21060 21345 3 21348 21345 21060 3 21058 21059 21346 3 21347 21346 21059 3 21058 21346 21351 3 21058 21351 21063 3 21060 21061 21348 3 21349 21348 21061 3 21061 21064 21349 3 21352 21349 21064 3 21062 21063 21350 3 21351 21350 21063 3 21062 21350 21420 3 21062 21420 21132 3 21064 21065 21352 3 21353 21352 21065 3 21065 21066 21353 3 21354 21353 21066 3 21066 21067 21354 3 21355 21354 21067 3 21067 21068 21355 3 21356 21355 21068 3 21068 21069 21356 3 21357 21356 21069 3 21069 21070 21357 3 21358 21357 21070 3 21070 21071 21358 3 21359 21358 21071 3 21071 21072 21359 3 21360 21359 21072 3 21072 21073 21360 3 21361 21360 21073 3 21073 21074 21361 3 21362 21361 21074 3 21074 21075 21362 3 21363 21362 21075 3 21075 21076 21363 3 21364 21363 21076 3 21076 21077 21364 3 21365 21364 21077 3 21077 21078 21365 3 21366 21365 21078 3 21078 21079 21366 3 21367 21366 21079 3 21079 21080 21367 3 21368 21367 21080 3 21080 21081 21368 3 21369 21368 21081 3 21081 21082 21369 3 21370 21369 21082 3 21082 21083 21370 3 21371 21370 21083 3 21083 21084 21371 3 21372 21371 21084 3 21084 21085 21372 3 21373 21372 21085 3 21085 21086 21373 3 21374 21373 21086 3 21086 21087 21374 3 21375 21374 21087 3 21087 21088 21375 3 21376 21375 21088 3 21088 21089 21376 3 21377 21376 21089 3 21089 21090 21377 3 21378 21377 21090 3 21090 21091 21378 3 21379 21378 21091 3 21091 21092 21379 3 21380 21379 21092 3 21092 21093 21380 3 21381 21380 21093 3 21093 21094 21381 3 21382 21381 21094 3 21094 21095 21382 3 21383 21382 21095 3 21095 21096 21383 3 21384 21383 21096 3 21096 21097 21384 3 21385 21384 21097 3 21097 21098 21385 3 21386 21385 21098 3 21098 21099 21386 3 21387 21386 21099 3 21099 21100 21387 3 21388 21387 21100 3 21100 21101 21388 3 21389 21388 21101 3 21101 21102 21389 3 21390 21389 21102 3 21102 21103 21390 3 21391 21390 21103 3 21103 21104 21391 3 21392 21391 21104 3 21104 21105 21392 3 21393 21392 21105 3 21105 21106 21393 3 21394 21393 21106 3 21106 21107 21394 3 21395 21394 21107 3 21107 21108 21395 3 21396 21395 21108 3 21108 21109 21396 3 21397 21396 21109 3 21109 21110 21397 3 21398 21397 21110 3 21110 21111 21398 3 21399 21398 21111 3 21111 21112 21399 3 21400 21399 21112 3 21112 21113 21400 3 21401 21400 21113 3 21113 21114 21401 3 21402 21401 21114 3 21114 21115 21402 3 21403 21402 21115 3 21115 21116 21403 3 21404 21403 21116 3 21116 21117 21404 3 21405 21404 21117 3 21117 21118 21405 3 21406 21405 21118 3 21118 21119 21406 3 21407 21406 21119 3 21119 21120 21407 3 21408 21407 21120 3 21120 21121 21408 3 21409 21408 21121 3 21121 21122 21409 3 21410 21409 21122 3 21122 21123 21410 3 21411 21410 21123 3 21123 21124 21411 3 21412 21411 21124 3 21124 21125 21412 3 21413 21412 21125 3 21125 21126 21413 3 21414 21413 21126 3 21126 21127 21414 3 21415 21414 21127 3 21127 21128 21415 3 21416 21415 21128 3 21128 21129 21416 3 21417 21416 21129 3 21129 21130 21417 3 21418 21417 21130 3 21130 21131 21418 3 21419 21418 21131 3 21131 21132 21419 3 21420 21419 21132 3 21133 21421 21134 3 21422 21134 21421 3 21133 21203 21491 3 21133 21491 21421 3 21134 21422 21135 3 21423 21135 21422 3 21135 21423 21136 3 21424 21136 21423 3 21136 21424 21137 3 21425 21137 21424 3 21137 21425 21138 3 21426 21138 21425 3 21138 21426 21139 3 21427 21139 21426 3 21139 21427 21140 3 21428 21140 21427 3 21140 21428 21141 3 21429 21141 21428 3 21141 21429 21142 3 21430 21142 21429 3 21142 21430 21143 3 21431 21143 21430 3 21143 21431 21144 3 21432 21144 21431 3 21144 21432 21145 3 21433 21145 21432 3 21145 21433 21146 3 21434 21146 21433 3 21146 21434 21147 3 21435 21147 21434 3 21147 21435 21148 3 21436 21148 21435 3 21148 21436 21149 3 21437 21149 21436 3 21149 21437 21150 3 21438 21150 21437 3 21150 21438 21151 3 21439 21151 21438 3 21151 21439 21152 3 21440 21152 21439 3 21152 21440 21153 3 21441 21153 21440 3 21153 21441 21154 3 21442 21154 21441 3 21154 21442 21155 3 21443 21155 21442 3 21155 21443 21156 3 21444 21156 21443 3 21156 21444 21157 3 21445 21157 21444 3 21157 21445 21158 3 21446 21158 21445 3 21158 21446 21159 3 21447 21159 21446 3 21159 21447 21160 3 21448 21160 21447 3 21160 21448 21161 3 21449 21161 21448 3 21161 21449 21450 3 21161 21450 21162 3 21162 21450 21451 3 21162 21451 21163 3 21163 21451 21452 3 21163 21452 21164 3 21164 21452 21453 3 21164 21453 21165 3 21165 21453 21454 3 21165 21454 21166 3 21166 21454 21455 3 21166 21455 21167 3 21167 21455 21456 3 21167 21456 21168 3 21168 21456 21457 3 21168 21457 21169 3 21169 21457 21458 3 21169 21458 21170 3 21170 21458 21459 3 21170 21459 21171 3 21171 21459 21460 3 21171 21460 21172 3 21172 21460 21461 3 21172 21461 21173 3 21173 21461 21462 3 21173 21462 21174 3 21174 21462 21463 3 21174 21463 21175 3 21175 21463 21464 3 21175 21464 21176 3 21176 21464 21465 3 21176 21465 21177 3 21177 21465 21466 3 21177 21466 21178 3 21178 21466 21467 3 21178 21467 21179 3 21179 21467 21468 3 21179 21468 21180 3 21180 21468 21469 3 21180 21469 21181 3 21181 21469 21470 3 21181 21470 21182 3 21182 21470 21471 3 21182 21471 21183 3 21183 21471 21472 3 21183 21472 21184 3 21184 21472 21473 3 21184 21473 21185 3 21185 21473 21474 3 21185 21474 21186 3 21186 21474 21475 3 21186 21475 21187 3 21187 21475 21476 3 21187 21476 21188 3 21188 21476 21477 3 21188 21477 21189 3 21189 21477 21478 3 21189 21478 21190 3 21190 21478 21479 3 21190 21479 21191 3 21191 21479 21480 3 21191 21480 21192 3 21192 21480 21481 3 21192 21481 21193 3 21193 21481 21482 3 21193 21482 21194 3 21194 21482 21483 3 21194 21483 21195 3 21195 21483 21484 3 21195 21484 21196 3 21196 21484 21485 3 21196 21485 21197 3 21197 21485 21486 3 21197 21486 21198 3 21198 21486 21487 3 21198 21487 21199 3 21199 21487 21488 3 21199 21488 21200 3 21200 21488 21489 3 21200 21489 21201 3 21201 21489 21492 3 21201 21492 21204 3 21202 21490 21491 3 21202 21491 21203 3 21202 21207 21495 3 21202 21495 21490 3 21204 21492 21493 3 21204 21493 21205 3 21205 21493 21496 3 21205 21496 21208 3 21206 21494 21495 3 21206 21495 21207 3 21206 21210 21498 3 21206 21498 21494 3 21208 21496 21497 3 21208 21497 21209 3 21209 21497 21499 3 21209 21499 21211 3 21210 21212 21500 3 21210 21500 21498 3 21211 21499 21501 3 21211 21501 21213 3 21212 21214 21502 3 21212 21502 21500 3 21213 21501 21503 3 21213 21503 21215 3 21214 21216 21504 3 21214 21504 21502 3 21215 21503 21505 3 21215 21505 21217 3 21216 21218 21506 3 21216 21506 21504 3 21217 21505 21507 3 21217 21507 21219 3 21218 21220 21508 3 21218 21508 21506 3 21219 21507 21509 3 21219 21509 21221 3 21220 21222 21510 3 21220 21510 21508 3 21221 21509 21511 3 21221 21511 21223 3 21222 21224 21512 3 21222 21512 21510 3 21223 21511 21513 3 21223 21513 21225 3 21224 21226 21514 3 21224 21514 21512 3 21225 21513 21515 3 21225 21515 21227 3 21226 21228 21516 3 21226 21516 21514 3 21227 21515 21517 3 21227 21517 21229 3 21228 21230 21518 3 21228 21518 21516 3 21229 21517 21519 3 21229 21519 21231 3 21230 21232 21520 3 21230 21520 21518 3 21231 21519 21521 3 21231 21521 21233 3 21232 21234 21522 3 21232 21522 21520 3 21233 21521 21523 3 21233 21523 21235 3 21234 21236 21524 3 21234 21524 21522 3 21235 21523 21525 3 21235 21525 21237 3 21236 21238 21524 3 21526 21524 21238 3 21237 21525 21527 3 21237 21527 21239 3 21238 21240 21526 3 21528 21526 21240 3 21239 21527 21529 3 21239 21529 21241 3 21240 21242 21528 3 21530 21528 21242 3 21241 21529 21531 3 21241 21531 21243 3 21242 21244 21530 3 21532 21530 21244 3 21243 21531 21533 3 21243 21533 21245 3 21244 21246 21532 3 21534 21532 21246 3 21245 21533 21535 3 21245 21535 21247 3 21246 21248 21534 3 21536 21534 21248 3 21247 21535 21537 3 21247 21537 21249 3 21248 21250 21536 3 21538 21536 21250 3 21249 21537 21539 3 21249 21539 21251 3 21250 21252 21538 3 21540 21538 21252 3 21251 21539 21253 3 21541 21253 21539 3 21252 21254 21540 3 21542 21540 21254 3 21253 21541 21255 3 21543 21255 21541 3 21254 21256 21542 3 21544 21542 21256 3 21255 21543 21257 3 21545 21257 21543 3 21256 21258 21544 3 21546 21544 21258 3 21257 21545 21259 3 21547 21259 21545 3 21258 21260 21546 3 21548 21546 21260 3 21259 21547 21261 3 21549 21261 21547 3 21260 21262 21548 3 21550 21548 21262 3 21261 21549 21263 3 21551 21263 21549 3 21262 21264 21550 3 21552 21550 21264 3 21263 21551 21265 3 21553 21265 21551 3 21264 21266 21552 3 21554 21552 21266 3 21265 21553 21267 3 21555 21267 21553 3 21266 21268 21554 3 21556 21554 21268 3 21267 21555 21269 3 21557 21269 21555 3 21268 21270 21556 3 21558 21556 21270 3 21269 21557 21271 3 21559 21271 21557 3 21270 21272 21558 3 21560 21558 21272 3 21271 21559 21273 3 21561 21273 21559 3 21272 21274 21560 3 21562 21560 21274 3 21273 21561 21275 3 21563 21275 21561 3 21274 21276 21562 3 21564 21562 21276 3 21275 21563 21277 3 21565 21277 21563 3 21276 21278 21564 3 21566 21564 21278 3 21277 21565 21279 3 21567 21279 21565 3 21278 21280 21566 3 21568 21566 21280 3 21279 21567 21281 3 21569 21281 21567 3 21280 21282 21568 3 21570 21568 21282 3 21281 21569 21283 3 21571 21283 21569 3 21282 21284 21570 3 21572 21570 21284 3 21283 21571 21285 3 21573 21285 21571 3 21284 21286 21572 3 21574 21572 21286 3 21285 21573 21287 3 21575 21287 21573 3 21286 21288 21574 3 21576 21574 21288 3 21287 21575 21289 3 21577 21289 21575 3 21288 21290 21576 3 21578 21576 21290 3 21289 21577 21291 3 21579 21291 21577 3 21290 21292 21578 3 21580 21578 21292 3 21291 21579 21293 3 21581 21293 21579 3 21292 21294 21580 3 21582 21580 21294 3 21293 21581 21295 3 21583 21295 21581 3 21294 21296 21582 3 21584 21582 21296 3 21295 21583 21297 3 21585 21297 21583 3 21296 21298 21584 3 21586 21584 21298 3 21297 21585 21299 3 21587 21299 21585 3 21298 21300 21586 3 21588 21586 21300 3 21299 21587 21301 3 21589 21301 21587 3 21300 21302 21588 3 21590 21588 21302 3 21301 21589 21303 3 21591 21303 21589 3 21302 21304 21590 3 21592 21590 21304 3 21303 21591 21305 3 21593 21305 21591 3 21304 21306 21592 3 21594 21592 21306 3 21305 21593 21307 3 21595 21307 21593 3 21306 21308 21594 3 21596 21594 21308 3 21307 21595 21309 3 21597 21309 21595 3 21308 21310 21596 3 21598 21596 21310 3 21309 21597 21311 3 21599 21311 21597 3 21310 21312 21598 3 21600 21598 21312 3 21311 21599 21313 3 21601 21313 21599 3 21312 21314 21600 3 21602 21600 21314 3 21313 21601 21315 3 21603 21315 21601 3 21314 21316 21602 3 21604 21602 21316 3 21315 21603 21317 3 21605 21317 21603 3 21316 21318 21604 3 21606 21604 21318 3 21317 21605 21319 3 21607 21319 21605 3 21318 21320 21606 3 21608 21606 21320 3 21319 21607 21321 3 21609 21321 21607 3 21320 21322 21608 3 21610 21608 21322 3 21321 21609 21323 3 21611 21323 21609 3 21322 21324 21610 3 21612 21610 21324 3 21323 21611 21325 3 21613 21325 21611 3 21324 21326 21612 3 21614 21612 21326 3 21325 21613 21327 3 21615 21327 21613 3 21326 21328 21616 3 21326 21616 21614 3 21327 21615 21329 3 21617 21329 21615 3 21328 21330 21618 3 21328 21618 21616 3 21329 21617 21331 3 21619 21331 21617 3 21330 21332 21620 3 21330 21620 21618 3 21331 21619 21333 3 21621 21333 21619 3 21332 21334 21622 3 21332 21622 21620 3 21333 21621 21335 3 21623 21335 21621 3 21334 21336 21624 3 21334 21624 21622 3 21335 21623 21337 3 21625 21337 21623 3 21336 21338 21626 3 21336 21626 21624 3 21337 21625 21339 3 21627 21339 21625 3 21338 21340 21628 3 21338 21628 21626 3 21339 21627 21341 3 21629 21341 21627 3 21340 21342 21630 3 21340 21630 21628 3 21341 21629 21631 3 21341 21631 21343 3 21342 21344 21632 3 21342 21632 21630 3 21343 21631 21635 3 21343 21635 21347 3 21344 21345 21633 3 21344 21633 21632 3 21345 21348 21636 3 21345 21636 21633 3 21346 21347 21635 3 21346 21635 21634 3 21346 21634 21639 3 21346 21639 21351 3 21348 21349 21637 3 21348 21637 21636 3 21349 21352 21640 3 21349 21640 21637 3 21350 21351 21639 3 21350 21639 21638 3 21350 21638 21708 3 21350 21708 21420 3 21352 21353 21641 3 21352 21641 21640 3 21353 21354 21642 3 21353 21642 21641 3 21354 21355 21643 3 21354 21643 21642 3 21355 21356 21644 3 21355 21644 21643 3 21356 21357 21645 3 21356 21645 21644 3 21357 21358 21646 3 21357 21646 21645 3 21358 21359 21647 3 21358 21647 21646 3 21359 21360 21648 3 21359 21648 21647 3 21360 21361 21649 3 21360 21649 21648 3 21361 21362 21650 3 21361 21650 21649 3 21362 21363 21651 3 21362 21651 21650 3 21363 21364 21652 3 21363 21652 21651 3 21364 21365 21653 3 21364 21653 21652 3 21365 21366 21654 3 21365 21654 21653 3 21366 21367 21655 3 21366 21655 21654 3 21367 21368 21656 3 21367 21656 21655 3 21368 21369 21657 3 21368 21657 21656 3 21369 21370 21658 3 21369 21658 21657 3 21370 21371 21659 3 21370 21659 21658 3 21371 21372 21660 3 21371 21660 21659 3 21372 21373 21661 3 21372 21661 21660 3 21373 21374 21662 3 21373 21662 21661 3 21374 21375 21663 3 21374 21663 21662 3 21375 21376 21664 3 21375 21664 21663 3 21376 21377 21665 3 21376 21665 21664 3 21377 21378 21666 3 21377 21666 21665 3 21378 21379 21667 3 21378 21667 21666 3 21379 21380 21668 3 21379 21668 21667 3 21380 21381 21669 3 21380 21669 21668 3 21381 21382 21670 3 21381 21670 21669 3 21382 21383 21671 3 21382 21671 21670 3 21383 21384 21672 3 21383 21672 21671 3 21384 21385 21673 3 21384 21673 21672 3 21385 21386 21674 3 21385 21674 21673 3 21386 21387 21675 3 21386 21675 21674 3 21387 21388 21676 3 21387 21676 21675 3 21388 21389 21677 3 21388 21677 21676 3 21389 21390 21678 3 21389 21678 21677 3 21390 21391 21679 3 21390 21679 21678 3 21391 21392 21680 3 21391 21680 21679 3 21392 21393 21681 3 21392 21681 21680 3 21393 21394 21682 3 21393 21682 21681 3 21394 21395 21683 3 21394 21683 21682 3 21395 21396 21684 3 21395 21684 21683 3 21396 21397 21685 3 21396 21685 21684 3 21397 21398 21686 3 21397 21686 21685 3 21398 21399 21687 3 21398 21687 21686 3 21399 21400 21688 3 21399 21688 21687 3 21400 21401 21689 3 21400 21689 21688 3 21401 21402 21690 3 21401 21690 21689 3 21402 21403 21691 3 21402 21691 21690 3 21403 21404 21692 3 21403 21692 21691 3 21404 21405 21693 3 21404 21693 21692 3 21405 21406 21694 3 21405 21694 21693 3 21406 21407 21695 3 21406 21695 21694 3 21407 21408 21696 3 21407 21696 21695 3 21408 21409 21697 3 21408 21697 21696 3 21409 21410 21698 3 21409 21698 21697 3 21410 21411 21699 3 21410 21699 21698 3 21411 21412 21700 3 21411 21700 21699 3 21412 21413 21701 3 21412 21701 21700 3 21413 21414 21702 3 21413 21702 21701 3 21414 21415 21703 3 21414 21703 21702 3 21415 21416 21704 3 21415 21704 21703 3 21416 21417 21704 3 21705 21704 21417 3 21417 21418 21705 3 21706 21705 21418 3 21418 21419 21706 3 21707 21706 21419 3 21419 21420 21707 3 21708 21707 21420 3 21421 21709 21710 3 21421 21710 21422 3 21421 21491 21709 3 21779 21709 21491 3 21422 21710 21711 3 21422 21711 21423 3 21423 21711 21712 3 21423 21712 21424 3 21424 21712 21713 3 21424 21713 21425 3 21425 21713 21714 3 21425 21714 21426 3 21426 21714 21715 3 21426 21715 21427 3 21427 21715 21716 3 21427 21716 21428 3 21428 21716 21717 3 21428 21717 21429 3 21429 21717 21718 3 21429 21718 21430 3 21430 21718 21719 3 21430 21719 21431 3 21431 21719 21432 3 21720 21432 21719 3 21432 21720 21433 3 21721 21433 21720 3 21433 21721 21434 3 21722 21434 21721 3 21434 21722 21435 3 21723 21435 21722 3 21435 21723 21436 3 21724 21436 21723 3 21436 21724 21437 3 21725 21437 21724 3 21437 21725 21438 3 21726 21438 21725 3 21438 21726 21439 3 21727 21439 21726 3 21439 21727 21440 3 21728 21440 21727 3 21440 21728 21441 3 21729 21441 21728 3 21441 21729 21442 3 21730 21442 21729 3 21442 21730 21443 3 21731 21443 21730 3 21443 21731 21444 3 21732 21444 21731 3 21444 21732 21445 3 21733 21445 21732 3 21445 21733 21446 3 21734 21446 21733 3 21446 21734 21447 3 21735 21447 21734 3 21447 21735 21448 3 21736 21448 21735 3 21448 21736 21449 3 21737 21449 21736 3 21449 21737 21450 3 21738 21450 21737 3 21450 21738 21451 3 21739 21451 21738 3 21451 21739 21452 3 21740 21452 21739 3 21452 21740 21453 3 21741 21453 21740 3 21453 21741 21454 3 21742 21454 21741 3 21454 21742 21455 3 21743 21455 21742 3 21455 21743 21456 3 21744 21456 21743 3 21456 21744 21457 3 21745 21457 21744 3 21457 21745 21458 3 21746 21458 21745 3 21458 21746 21459 3 21747 21459 21746 3 21459 21747 21460 3 21748 21460 21747 3 21460 21748 21461 3 21749 21461 21748 3 21461 21749 21462 3 21750 21462 21749 3 21462 21750 21463 3 21751 21463 21750 3 21463 21751 21464 3 21752 21464 21751 3 21464 21752 21465 3 21753 21465 21752 3 21465 21753 21466 3 21754 21466 21753 3 21466 21754 21467 3 21755 21467 21754 3 21467 21755 21468 3 21756 21468 21755 3 21468 21756 21469 3 21757 21469 21756 3 21469 21757 21470 3 21758 21470 21757 3 21470 21758 21471 3 21759 21471 21758 3 21471 21759 21472 3 21760 21472 21759 3 21472 21760 21473 3 21761 21473 21760 3 21473 21761 21474 3 21762 21474 21761 3 21474 21762 21475 3 21763 21475 21762 3 21475 21763 21476 3 21764 21476 21763 3 21476 21764 21477 3 21765 21477 21764 3 21477 21765 21478 3 21766 21478 21765 3 21478 21766 21479 3 21767 21479 21766 3 21479 21767 21480 3 21768 21480 21767 3 21480 21768 21481 3 21769 21481 21768 3 21481 21769 21482 3 21770 21482 21769 3 21482 21770 21483 3 21771 21483 21770 3 21483 21771 21484 3 21772 21484 21771 3 21484 21772 21485 3 21773 21485 21772 3 21485 21773 21486 3 21774 21486 21773 3 21486 21774 21487 3 21775 21487 21774 3 21487 21775 21488 3 21776 21488 21775 3 21488 21776 21489 3 21777 21489 21776 3 21489 21777 21492 3 21780 21492 21777 3 21490 21778 21491 3 21779 21491 21778 3 21490 21495 21778 3 21783 21778 21495 3 21492 21780 21493 3 21781 21493 21780 3 21493 21781 21496 3 21784 21496 21781 3 21494 21782 21495 3 21783 21495 21782 3 21494 21498 21782 3 21786 21782 21498 3 21496 21784 21497 3 21785 21497 21784 3 21497 21785 21499 3 21787 21499 21785 3 21498 21500 21786 3 21788 21786 21500 3 21499 21787 21501 3 21789 21501 21787 3 21500 21502 21788 3 21790 21788 21502 3 21501 21789 21503 3 21791 21503 21789 3 21502 21504 21790 3 21792 21790 21504 3 21503 21791 21505 3 21793 21505 21791 3 21504 21506 21792 3 21794 21792 21506 3 21505 21793 21507 3 21795 21507 21793 3 21506 21508 21796 3 21506 21796 21794 3 21507 21795 21509 3 21797 21509 21795 3 21508 21510 21798 3 21508 21798 21796 3 21509 21797 21511 3 21799 21511 21797 3 21510 21512 21800 3 21510 21800 21798 3 21511 21799 21513 3 21801 21513 21799 3 21512 21514 21802 3 21512 21802 21800 3 21513 21801 21515 3 21803 21515 21801 3 21514 21516 21804 3 21514 21804 21802 3 21515 21803 21517 3 21805 21517 21803 3 21516 21518 21806 3 21516 21806 21804 3 21517 21805 21519 3 21807 21519 21805 3 21518 21520 21808 3 21518 21808 21806 3 21519 21807 21521 3 21809 21521 21807 3 21520 21522 21810 3 21520 21810 21808 3 21521 21809 21523 3 21811 21523 21809 3 21522 21524 21812 3 21522 21812 21810 3 21523 21811 21813 3 21523 21813 21525 3 21524 21526 21814 3 21524 21814 21812 3 21525 21813 21815 3 21525 21815 21527 3 21526 21528 21816 3 21526 21816 21814 3 21527 21815 21817 3 21527 21817 21529 3 21528 21530 21818 3 21528 21818 21816 3 21529 21817 21819 3 21529 21819 21531 3 21530 21532 21820 3 21530 21820 21818 3 21531 21819 21821 3 21531 21821 21533 3 21532 21534 21822 3 21532 21822 21820 3 21533 21821 21823 3 21533 21823 21535 3 21534 21536 21824 3 21534 21824 21822 3 21535 21823 21825 3 21535 21825 21537 3 21536 21538 21826 3 21536 21826 21824 3 21537 21825 21827 3 21537 21827 21539 3 21538 21540 21828 3 21538 21828 21826 3 21539 21827 21829 3 21539 21829 21541 3 21540 21542 21830 3 21540 21830 21828 3 21541 21829 21831 3 21541 21831 21543 3 21542 21544 21832 3 21542 21832 21830 3 21543 21831 21833 3 21543 21833 21545 3 21544 21546 21834 3 21544 21834 21832 3 21545 21833 21835 3 21545 21835 21547 3 21546 21548 21836 3 21546 21836 21834 3 21547 21835 21837 3 21547 21837 21549 3 21548 21550 21838 3 21548 21838 21836 3 21549 21837 21839 3 21549 21839 21551 3 21550 21552 21840 3 21550 21840 21838 3 21551 21839 21841 3 21551 21841 21553 3 21552 21554 21842 3 21552 21842 21840 3 21553 21841 21843 3 21553 21843 21555 3 21554 21556 21844 3 21554 21844 21842 3 21555 21843 21845 3 21555 21845 21557 3 21556 21558 21846 3 21556 21846 21844 3 21557 21845 21847 3 21557 21847 21559 3 21558 21560 21848 3 21558 21848 21846 3 21559 21847 21849 3 21559 21849 21561 3 21560 21562 21850 3 21560 21850 21848 3 21561 21849 21851 3 21561 21851 21563 3 21562 21564 21852 3 21562 21852 21850 3 21563 21851 21853 3 21563 21853 21565 3 21564 21566 21854 3 21564 21854 21852 3 21565 21853 21855 3 21565 21855 21567 3 21566 21568 21856 3 21566 21856 21854 3 21567 21855 21857 3 21567 21857 21569 3 21568 21570 21858 3 21568 21858 21856 3 21569 21857 21859 3 21569 21859 21571 3 21570 21572 21860 3 21570 21860 21858 3 21571 21859 21861 3 21571 21861 21573 3 21572 21574 21862 3 21572 21862 21860 3 21573 21861 21863 3 21573 21863 21575 3 21574 21576 21864 3 21574 21864 21862 3 21575 21863 21865 3 21575 21865 21577 3 21576 21578 21866 3 21576 21866 21864 3 21577 21865 21867 3 21577 21867 21579 3 21578 21580 21868 3 21578 21868 21866 3 21579 21867 21869 3 21579 21869 21581 3 21580 21582 21870 3 21580 21870 21868 3 21581 21869 21871 3 21581 21871 21583 3 21582 21584 21872 3 21582 21872 21870 3 21583 21871 21873 3 21583 21873 21585 3 21584 21586 21874 3 21584 21874 21872 3 21585 21873 21875 3 21585 21875 21587 3 21586 21588 21876 3 21586 21876 21874 3 21587 21875 21877 3 21587 21877 21589 3 21588 21590 21878 3 21588 21878 21876 3 21589 21877 21879 3 21589 21879 21591 3 21590 21592 21880 3 21590 21880 21878 3 21591 21879 21881 3 21591 21881 21593 3 21592 21594 21882 3 21592 21882 21880 3 21593 21881 21883 3 21593 21883 21595 3 21594 21596 21884 3 21594 21884 21882 3 21595 21883 21885 3 21595 21885 21597 3 21596 21598 21884 3 21886 21884 21598 3 21597 21885 21887 3 21597 21887 21599 3 21598 21600 21886 3 21888 21886 21600 3 21599 21887 21889 3 21599 21889 21601 3 21600 21602 21888 3 21890 21888 21602 3 21601 21889 21891 3 21601 21891 21603 3 21602 21604 21890 3 21892 21890 21604 3 21603 21891 21893 3 21603 21893 21605 3 21604 21606 21892 3 21894 21892 21606 3 21605 21893 21895 3 21605 21895 21607 3 21606 21608 21894 3 21896 21894 21608 3 21607 21895 21897 3 21607 21897 21609 3 21608 21610 21896 3 21898 21896 21610 3 21609 21897 21899 3 21609 21899 21611 3 21610 21612 21898 3 21900 21898 21612 3 21611 21899 21901 3 21611 21901 21613 3 21612 21614 21900 3 21902 21900 21614 3 21613 21901 21615 3 21903 21615 21901 3 21614 21616 21902 3 21904 21902 21616 3 21615 21903 21617 3 21905 21617 21903 3 21616 21618 21904 3 21906 21904 21618 3 21617 21905 21619 3 21907 21619 21905 3 21618 21620 21906 3 21908 21906 21620 3 21619 21907 21621 3 21909 21621 21907 3 21620 21622 21908 3 21910 21908 21622 3 21621 21909 21623 3 21911 21623 21909 3 21622 21624 21910 3 21912 21910 21624 3 21623 21911 21625 3 21913 21625 21911 3 21624 21626 21912 3 21914 21912 21626 3 21625 21913 21627 3 21915 21627 21913 3 21626 21628 21914 3 21916 21914 21628 3 21627 21915 21629 3 21917 21629 21915 3 21628 21630 21916 3 21918 21916 21630 3 21629 21917 21631 3 21919 21631 21917 3 21630 21632 21918 3 21920 21918 21632 3 21631 21919 21635 3 21923 21635 21919 3 21632 21633 21920 3 21921 21920 21633 3 21633 21636 21921 3 21924 21921 21636 3 21634 21635 21922 3 21923 21922 21635 3 21634 21922 21639 3 21927 21639 21922 3 21636 21637 21924 3 21925 21924 21637 3 21637 21640 21925 3 21928 21925 21640 3 21638 21639 21926 3 21927 21926 21639 3 21638 21926 21708 3 21996 21708 21926 3 21640 21641 21928 3 21929 21928 21641 3 21641 21642 21929 3 21930 21929 21642 3 21642 21643 21930 3 21931 21930 21643 3 21643 21644 21931 3 21932 21931 21644 3 21644 21645 21932 3 21933 21932 21645 3 21645 21646 21933 3 21934 21933 21646 3 21646 21647 21934 3 21935 21934 21647 3 21647 21648 21935 3 21936 21935 21648 3 21648 21649 21936 3 21937 21936 21649 3 21649 21650 21937 3 21938 21937 21650 3 21650 21651 21938 3 21939 21938 21651 3 21651 21652 21939 3 21940 21939 21652 3 21652 21653 21940 3 21941 21940 21653 3 21653 21654 21941 3 21942 21941 21654 3 21654 21655 21942 3 21943 21942 21655 3 21655 21656 21943 3 21944 21943 21656 3 21656 21657 21944 3 21945 21944 21657 3 21657 21658 21945 3 21946 21945 21658 3 21658 21659 21946 3 21947 21946 21659 3 21659 21660 21947 3 21948 21947 21660 3 21660 21661 21948 3 21949 21948 21661 3 21661 21662 21949 3 21950 21949 21662 3 21662 21663 21950 3 21951 21950 21663 3 21663 21664 21951 3 21952 21951 21664 3 21664 21665 21952 3 21953 21952 21665 3 21665 21666 21953 3 21954 21953 21666 3 21666 21667 21954 3 21955 21954 21667 3 21667 21668 21955 3 21956 21955 21668 3 21668 21669 21956 3 21957 21956 21669 3 21669 21670 21957 3 21958 21957 21670 3 21670 21671 21958 3 21959 21958 21671 3 21671 21672 21959 3 21960 21959 21672 3 21672 21673 21960 3 21961 21960 21673 3 21673 21674 21961 3 21962 21961 21674 3 21674 21675 21962 3 21963 21962 21675 3 21675 21676 21963 3 21964 21963 21676 3 21676 21677 21964 3 21965 21964 21677 3 21677 21678 21965 3 21966 21965 21678 3 21678 21679 21966 3 21967 21966 21679 3 21679 21680 21967 3 21968 21967 21680 3 21680 21681 21968 3 21969 21968 21681 3 21681 21682 21969 3 21970 21969 21682 3 21682 21683 21970 3 21971 21970 21683 3 21683 21684 21971 3 21972 21971 21684 3 21684 21685 21972 3 21973 21972 21685 3 21685 21686 21973 3 21974 21973 21686 3 21686 21687 21974 3 21975 21974 21687 3 21687 21688 21976 3 21687 21976 21975 3 21688 21689 21977 3 21688 21977 21976 3 21689 21690 21978 3 21689 21978 21977 3 21690 21691 21979 3 21690 21979 21978 3 21691 21692 21980 3 21691 21980 21979 3 21692 21693 21981 3 21692 21981 21980 3 21693 21694 21982 3 21693 21982 21981 3 21694 21695 21983 3 21694 21983 21982 3 21695 21696 21984 3 21695 21984 21983 3 21696 21697 21985 3 21696 21985 21984 3 21697 21698 21986 3 21697 21986 21985 3 21698 21699 21987 3 21698 21987 21986 3 21699 21700 21988 3 21699 21988 21987 3 21700 21701 21989 3 21700 21989 21988 3 21701 21702 21990 3 21701 21990 21989 3 21702 21703 21991 3 21702 21991 21990 3 21703 21704 21992 3 21703 21992 21991 3 21704 21705 21993 3 21704 21993 21992 3 21705 21706 21994 3 21705 21994 21993 3 21706 21707 21995 3 21706 21995 21994 3 21707 21708 21996 3 21707 21996 21995 3 21709 21997 21998 3 21709 21998 21710 3 21709 21779 22067 3 21709 22067 21997 3 21710 21998 21999 3 21710 21999 21711 3 21711 21999 22000 3 21711 22000 21712 3 21712 22000 22001 3 21712 22001 21713 3 21713 22001 22002 3 21713 22002 21714 3 21714 22002 22003 3 21714 22003 21715 3 21715 22003 22004 3 21715 22004 21716 3 21716 22004 22005 3 21716 22005 21717 3 21717 22005 22006 3 21717 22006 21718 3 21718 22006 22007 3 21718 22007 21719 3 21719 22007 22008 3 21719 22008 21720 3 21720 22008 22009 3 21720 22009 21721 3 21721 22009 22010 3 21721 22010 21722 3 21722 22010 22011 3 21722 22011 21723 3 21723 22011 22012 3 21723 22012 21724 3 21724 22012 22013 3 21724 22013 21725 3 21725 22013 22014 3 21725 22014 21726 3 21726 22014 22015 3 21726 22015 21727 3 21727 22015 22016 3 21727 22016 21728 3 21728 22016 22017 3 21728 22017 21729 3 21729 22017 22018 3 21729 22018 21730 3 21730 22018 22019 3 21730 22019 21731 3 21731 22019 22020 3 21731 22020 21732 3 21732 22020 22021 3 21732 22021 21733 3 21733 22021 22022 3 21733 22022 21734 3 21734 22022 22023 3 21734 22023 21735 3 21735 22023 22024 3 21735 22024 21736 3 21736 22024 22025 3 21736 22025 21737 3 21737 22025 22026 3 21737 22026 21738 3 21738 22026 22027 3 21738 22027 21739 3 21739 22027 22028 3 21739 22028 21740 3 21740 22028 22029 3 21740 22029 21741 3 21741 22029 22030 3 21741 22030 21742 3 21742 22030 22031 3 21742 22031 21743 3 21743 22031 22032 3 21743 22032 21744 3 21744 22032 22033 3 21744 22033 21745 3 21745 22033 22034 3 21745 22034 21746 3 21746 22034 22035 3 21746 22035 21747 3 21747 22035 22036 3 21747 22036 21748 3 21748 22036 22037 3 21748 22037 21749 3 21749 22037 22038 3 21749 22038 21750 3 21750 22038 22039 3 21750 22039 21751 3 21751 22039 22040 3 21751 22040 21752 3 21752 22040 22041 3 21752 22041 21753 3 21753 22041 22042 3 21753 22042 21754 3 21754 22042 22043 3 21754 22043 21755 3 21755 22043 22044 3 21755 22044 21756 3 21756 22044 22045 3 21756 22045 21757 3 21757 22045 22046 3 21757 22046 21758 3 21758 22046 22047 3 21758 22047 21759 3 21759 22047 22048 3 21759 22048 21760 3 21760 22048 22049 3 21760 22049 21761 3 21761 22049 22050 3 21761 22050 21762 3 21762 22050 22051 3 21762 22051 21763 3 21763 22051 22052 3 21763 22052 21764 3 21764 22052 22053 3 21764 22053 21765 3 21765 22053 22054 3 21765 22054 21766 3 21766 22054 22055 3 21766 22055 21767 3 21767 22055 22056 3 21767 22056 21768 3 21768 22056 22057 3 21768 22057 21769 3 21769 22057 22058 3 21769 22058 21770 3 21770 22058 22059 3 21770 22059 21771 3 21771 22059 22060 3 21771 22060 21772 3 21772 22060 22061 3 21772 22061 21773 3 21773 22061 22062 3 21773 22062 21774 3 21774 22062 22063 3 21774 22063 21775 3 21775 22063 22064 3 21775 22064 21776 3 21776 22064 22065 3 21776 22065 21777 3 21777 22065 22068 3 21777 22068 21780 3 21778 22066 22067 3 21778 22067 21779 3 21778 21783 22066 3 22071 22066 21783 3 21780 22068 22069 3 21780 22069 21781 3 21781 22069 22072 3 21781 22072 21784 3 21782 22070 22071 3 21782 22071 21783 3 21782 21786 22070 3 22074 22070 21786 3 21784 22072 22073 3 21784 22073 21785 3 21785 22073 22075 3 21785 22075 21787 3 21786 21788 22074 3 22076 22074 21788 3 21787 22075 22077 3 21787 22077 21789 3 21788 21790 22076 3 22078 22076 21790 3 21789 22077 22079 3 21789 22079 21791 3 21790 21792 22078 3 22080 22078 21792 3 21791 22079 22081 3 21791 22081 21793 3 21792 21794 22080 3 22082 22080 21794 3 21793 22081 22083 3 21793 22083 21795 3 21794 21796 22082 3 22084 22082 21796 3 21795 22083 21797 3 22085 21797 22083 3 21796 21798 22084 3 22086 22084 21798 3 21797 22085 21799 3 22087 21799 22085 3 21798 21800 22086 3 22088 22086 21800 3 21799 22087 21801 3 22089 21801 22087 3 21800 21802 22088 3 22090 22088 21802 3 21801 22089 21803 3 22091 21803 22089 3 21802 21804 22090 3 22092 22090 21804 3 21803 22091 21805 3 22093 21805 22091 3 21804 21806 22092 3 22094 22092 21806 3 21805 22093 21807 3 22095 21807 22093 3 21806 21808 22094 3 22096 22094 21808 3 21807 22095 21809 3 22097 21809 22095 3 21808 21810 22096 3 22098 22096 21810 3 21809 22097 21811 3 22099 21811 22097 3 21810 21812 22098 3 22100 22098 21812 3 21811 22099 21813 3 22101 21813 22099 3 21812 21814 22100 3 22102 22100 21814 3 21813 22101 21815 3 22103 21815 22101 3 21814 21816 22102 3 22104 22102 21816 3 21815 22103 21817 3 22105 21817 22103 3 21816 21818 22104 3 22106 22104 21818 3 21817 22105 21819 3 22107 21819 22105 3 21818 21820 22106 3 22108 22106 21820 3 21819 22107 21821 3 22109 21821 22107 3 21820 21822 22108 3 22110 22108 21822 3 21821 22109 21823 3 22111 21823 22109 3 21822 21824 22110 3 22112 22110 21824 3 21823 22111 21825 3 22113 21825 22111 3 21824 21826 22112 3 22114 22112 21826 3 21825 22113 21827 3 22115 21827 22113 3 21826 21828 22114 3 22116 22114 21828 3 21827 22115 21829 3 22117 21829 22115 3 21828 21830 22116 3 22118 22116 21830 3 21829 22117 21831 3 22119 21831 22117 3 21830 21832 22118 3 22120 22118 21832 3 21831 22119 21833 3 22121 21833 22119 3 21832 21834 22120 3 22122 22120 21834 3 21833 22121 21835 3 22123 21835 22121 3 21834 21836 22122 3 22124 22122 21836 3 21835 22123 21837 3 22125 21837 22123 3 21836 21838 22124 3 22126 22124 21838 3 21837 22125 21839 3 22127 21839 22125 3 21838 21840 22126 3 22128 22126 21840 3 21839 22127 21841 3 22129 21841 22127 3 21840 21842 22128 3 22130 22128 21842 3 21841 22129 21843 3 22131 21843 22129 3 21842 21844 22130 3 22132 22130 21844 3 21843 22131 21845 3 22133 21845 22131 3 21844 21846 22132 3 22134 22132 21846 3 21845 22133 21847 3 22135 21847 22133 3 21846 21848 22134 3 22136 22134 21848 3 21847 22135 21849 3 22137 21849 22135 3 21848 21850 22136 3 22138 22136 21850 3 21849 22137 21851 3 22139 21851 22137 3 21850 21852 22138 3 22140 22138 21852 3 21851 22139 21853 3 22141 21853 22139 3 21852 21854 22140 3 22142 22140 21854 3 21853 22141 21855 3 22143 21855 22141 3 21854 21856 22142 3 22144 22142 21856 3 21855 22143 21857 3 22145 21857 22143 3 21856 21858 22144 3 22146 22144 21858 3 21857 22145 21859 3 22147 21859 22145 3 21858 21860 22146 3 22148 22146 21860 3 21859 22147 21861 3 22149 21861 22147 3 21860 21862 22148 3 22150 22148 21862 3 21861 22149 21863 3 22151 21863 22149 3 21862 21864 22150 3 22152 22150 21864 3 21863 22151 21865 3 22153 21865 22151 3 21864 21866 22152 3 22154 22152 21866 3 21865 22153 21867 3 22155 21867 22153 3 21866 21868 22154 3 22156 22154 21868 3 21867 22155 21869 3 22157 21869 22155 3 21868 21870 22158 3 21868 22158 22156 3 21869 22157 21871 3 22159 21871 22157 3 21870 21872 22160 3 21870 22160 22158 3 21871 22159 21873 3 22161 21873 22159 3 21872 21874 22162 3 21872 22162 22160 3 21873 22161 21875 3 22163 21875 22161 3 21874 21876 22164 3 21874 22164 22162 3 21875 22163 21877 3 22165 21877 22163 3 21876 21878 22166 3 21876 22166 22164 3 21877 22165 21879 3 22167 21879 22165 3 21878 21880 22168 3 21878 22168 22166 3 21879 22167 21881 3 22169 21881 22167 3 21880 21882 22170 3 21880 22170 22168 3 21881 22169 21883 3 22171 21883 22169 3 21882 21884 22172 3 21882 22172 22170 3 21883 22171 21885 3 22173 21885 22171 3 21884 21886 22174 3 21884 22174 22172 3 21885 22173 21887 3 22175 21887 22173 3 21886 21888 22176 3 21886 22176 22174 3 21887 22175 22177 3 21887 22177 21889 3 21888 21890 22178 3 21888 22178 22176 3 21889 22177 22179 3 21889 22179 21891 3 21890 21892 22180 3 21890 22180 22178 3 21891 22179 22181 3 21891 22181 21893 3 21892 21894 22182 3 21892 22182 22180 3 21893 22181 22183 3 21893 22183 21895 3 21894 21896 22184 3 21894 22184 22182 3 21895 22183 22185 3 21895 22185 21897 3 21896 21898 22186 3 21896 22186 22184 3 21897 22185 22187 3 21897 22187 21899 3 21898 21900 22188 3 21898 22188 22186 3 21899 22187 22189 3 21899 22189 21901 3 21900 21902 22190 3 21900 22190 22188 3 21901 22189 22191 3 21901 22191 21903 3 21902 21904 22192 3 21902 22192 22190 3 21903 22191 22193 3 21903 22193 21905 3 21904 21906 22194 3 21904 22194 22192 3 21905 22193 22195 3 21905 22195 21907 3 21906 21908 22196 3 21906 22196 22194 3 21907 22195 22197 3 21907 22197 21909 3 21908 21910 22198 3 21908 22198 22196 3 21909 22197 22199 3 21909 22199 21911 3 21910 21912 22200 3 21910 22200 22198 3 21911 22199 22201 3 21911 22201 21913 3 21912 21914 22202 3 21912 22202 22200 3 21913 22201 22203 3 21913 22203 21915 3 21914 21916 22204 3 21914 22204 22202 3 21915 22203 22205 3 21915 22205 21917 3 21916 21918 22206 3 21916 22206 22204 3 21917 22205 22207 3 21917 22207 21919 3 21918 21920 22208 3 21918 22208 22206 3 21919 22207 22211 3 21919 22211 21923 3 21920 21921 22209 3 21920 22209 22208 3 21921 21924 22212 3 21921 22212 22209 3 21922 21923 22211 3 21922 22211 22210 3 21922 22210 22215 3 21922 22215 21927 3 21924 21925 22213 3 21924 22213 22212 3 21925 21928 22216 3 21925 22216 22213 3 21926 21927 22215 3 21926 22215 22214 3 21926 22214 22284 3 21926 22284 21996 3 21928 21929 22217 3 21928 22217 22216 3 21929 21930 22218 3 21929 22218 22217 3 21930 21931 22219 3 21930 22219 22218 3 21931 21932 22220 3 21931 22220 22219 3 21932 21933 22221 3 21932 22221 22220 3 21933 21934 22222 3 21933 22222 22221 3 21934 21935 22223 3 21934 22223 22222 3 21935 21936 22224 3 21935 22224 22223 3 21936 21937 22225 3 21936 22225 22224 3 21937 21938 22226 3 21937 22226 22225 3 21938 21939 22227 3 21938 22227 22226 3 21939 21940 22228 3 21939 22228 22227 3 21940 21941 22229 3 21940 22229 22228 3 21941 21942 22230 3 21941 22230 22229 3 21942 21943 22231 3 21942 22231 22230 3 21943 21944 22232 3 21943 22232 22231 3 21944 21945 22233 3 21944 22233 22232 3 21945 21946 22234 3 21945 22234 22233 3 21946 21947 22235 3 21946 22235 22234 3 21947 21948 22236 3 21947 22236 22235 3 21948 21949 22237 3 21948 22237 22236 3 21949 21950 22238 3 21949 22238 22237 3 21950 21951 22239 3 21950 22239 22238 3 21951 21952 22240 3 21951 22240 22239 3 21952 21953 22241 3 21952 22241 22240 3 21953 21954 22242 3 21953 22242 22241 3 21954 21955 22243 3 21954 22243 22242 3 21955 21956 22244 3 21955 22244 22243 3 21956 21957 22245 3 21956 22245 22244 3 21957 21958 22246 3 21957 22246 22245 3 21958 21959 22247 3 21958 22247 22246 3 21959 21960 22248 3 21959 22248 22247 3 21960 21961 22248 3 22249 22248 21961 3 21961 21962 22249 3 22250 22249 21962 3 21962 21963 22250 3 22251 22250 21963 3 21963 21964 22251 3 22252 22251 21964 3 21964 21965 22252 3 22253 22252 21965 3 21965 21966 22253 3 22254 22253 21966 3 21966 21967 22254 3 22255 22254 21967 3 21967 21968 22255 3 22256 22255 21968 3 21968 21969 22256 3 22257 22256 21969 3 21969 21970 22257 3 22258 22257 21970 3 21970 21971 22258 3 22259 22258 21971 3 21971 21972 22259 3 22260 22259 21972 3 21972 21973 22260 3 22261 22260 21973 3 21973 21974 22261 3 22262 22261 21974 3 21974 21975 22262 3 22263 22262 21975 3 21975 21976 22263 3 22264 22263 21976 3 21976 21977 22264 3 22265 22264 21977 3 21977 21978 22265 3 22266 22265 21978 3 21978 21979 22266 3 22267 22266 21979 3 21979 21980 22267 3 22268 22267 21980 3 21980 21981 22268 3 22269 22268 21981 3 21981 21982 22269 3 22270 22269 21982 3 21982 21983 22270 3 22271 22270 21983 3 21983 21984 22271 3 22272 22271 21984 3 21984 21985 22272 3 22273 22272 21985 3 21985 21986 22273 3 22274 22273 21986 3 21986 21987 22274 3 22275 22274 21987 3 21987 21988 22275 3 22276 22275 21988 3 21988 21989 22276 3 22277 22276 21989 3 21989 21990 22277 3 22278 22277 21990 3 21990 21991 22278 3 22279 22278 21991 3 21991 21992 22279 3 22280 22279 21992 3 21992 21993 22280 3 22281 22280 21993 3 21993 21994 22281 3 22282 22281 21994 3 21994 21995 22282 3 22283 22282 21995 3 21995 21996 22283 3 22284 22283 21996 3 21997 22285 21998 3 22286 21998 22285 3 21997 22067 22285 3 22355 22285 22067 3 21998 22286 21999 3 22287 21999 22286 3 21999 22287 22000 3 22288 22000 22287 3 22000 22288 22001 3 22289 22001 22288 3 22001 22289 22002 3 22290 22002 22289 3 22002 22290 22003 3 22291 22003 22290 3 22003 22291 22004 3 22292 22004 22291 3 22004 22292 22005 3 22293 22005 22292 3 22005 22293 22006 3 22294 22006 22293 3 22006 22294 22007 3 22295 22007 22294 3 22007 22295 22008 3 22296 22008 22295 3 22008 22296 22009 3 22297 22009 22296 3 22009 22297 22010 3 22298 22010 22297 3 22010 22298 22011 3 22299 22011 22298 3 22011 22299 22012 3 22300 22012 22299 3 22012 22300 22013 3 22301 22013 22300 3 22013 22301 22014 3 22302 22014 22301 3 22014 22302 22015 3 22303 22015 22302 3 22015 22303 22016 3 22304 22016 22303 3 22016 22304 22017 3 22305 22017 22304 3 22017 22305 22018 3 22306 22018 22305 3 22018 22306 22019 3 22307 22019 22306 3 22019 22307 22020 3 22308 22020 22307 3 22020 22308 22021 3 22309 22021 22308 3 22021 22309 22022 3 22310 22022 22309 3 22022 22310 22023 3 22311 22023 22310 3 22023 22311 22024 3 22312 22024 22311 3 22024 22312 22025 3 22313 22025 22312 3 22025 22313 22026 3 22314 22026 22313 3 22026 22314 22027 3 22315 22027 22314 3 22027 22315 22028 3 22316 22028 22315 3 22028 22316 22029 3 22317 22029 22316 3 22029 22317 22030 3 22318 22030 22317 3 22030 22318 22031 3 22319 22031 22318 3 22031 22319 22032 3 22320 22032 22319 3 22032 22320 22033 3 22321 22033 22320 3 22033 22321 22034 3 22322 22034 22321 3 22034 22322 22035 3 22323 22035 22322 3 22035 22323 22036 3 22324 22036 22323 3 22036 22324 22037 3 22325 22037 22324 3 22037 22325 22038 3 22326 22038 22325 3 22038 22326 22039 3 22327 22039 22326 3 22039 22327 22040 3 22328 22040 22327 3 22040 22328 22041 3 22329 22041 22328 3 22041 22329 22042 3 22330 22042 22329 3 22042 22330 22043 3 22331 22043 22330 3 22043 22331 22044 3 22332 22044 22331 3 22044 22332 22045 3 22333 22045 22332 3 22045 22333 22046 3 22334 22046 22333 3 22046 22334 22047 3 22335 22047 22334 3 22047 22335 22048 3 22336 22048 22335 3 22048 22336 22049 3 22337 22049 22336 3 22049 22337 22050 3 22338 22050 22337 3 22050 22338 22051 3 22339 22051 22338 3 22051 22339 22052 3 22340 22052 22339 3 22052 22340 22053 3 22341 22053 22340 3 22053 22341 22054 3 22342 22054 22341 3 22054 22342 22055 3 22343 22055 22342 3 22055 22343 22056 3 22344 22056 22343 3 22056 22344 22057 3 22345 22057 22344 3 22057 22345 22058 3 22346 22058 22345 3 22058 22346 22059 3 22347 22059 22346 3 22059 22347 22060 3 22348 22060 22347 3 22060 22348 22061 3 22349 22061 22348 3 22061 22349 22062 3 22350 22062 22349 3 22062 22350 22063 3 22351 22063 22350 3 22063 22351 22064 3 22352 22064 22351 3 22064 22352 22065 3 22353 22065 22352 3 22065 22353 22068 3 22356 22068 22353 3 22066 22354 22067 3 22355 22067 22354 3 22066 22071 22359 3 22066 22359 22354 3 22068 22356 22069 3 22357 22069 22356 3 22069 22357 22360 3 22069 22360 22072 3 22070 22358 22359 3 22070 22359 22071 3 22070 22074 22362 3 22070 22362 22358 3 22072 22360 22361 3 22072 22361 22073 3 22073 22361 22363 3 22073 22363 22075 3 22074 22076 22364 3 22074 22364 22362 3 22075 22363 22365 3 22075 22365 22077 3 22076 22078 22366 3 22076 22366 22364 3 22077 22365 22367 3 22077 22367 22079 3 22078 22080 22368 3 22078 22368 22366 3 22079 22367 22369 3 22079 22369 22081 3 22080 22082 22370 3 22080 22370 22368 3 22081 22369 22371 3 22081 22371 22083 3 22082 22084 22372 3 22082 22372 22370 3 22083 22371 22373 3 22083 22373 22085 3 22084 22086 22374 3 22084 22374 22372 3 22085 22373 22375 3 22085 22375 22087 3 22086 22088 22376 3 22086 22376 22374 3 22087 22375 22377 3 22087 22377 22089 3 22088 22090 22378 3 22088 22378 22376 3 22089 22377 22379 3 22089 22379 22091 3 22090 22092 22380 3 22090 22380 22378 3 22091 22379 22381 3 22091 22381 22093 3 22092 22094 22382 3 22092 22382 22380 3 22093 22381 22383 3 22093 22383 22095 3 22094 22096 22384 3 22094 22384 22382 3 22095 22383 22385 3 22095 22385 22097 3 22096 22098 22386 3 22096 22386 22384 3 22097 22385 22387 3 22097 22387 22099 3 22098 22100 22388 3 22098 22388 22386 3 22099 22387 22389 3 22099 22389 22101 3 22100 22102 22390 3 22100 22390 22388 3 22101 22389 22391 3 22101 22391 22103 3 22102 22104 22392 3 22102 22392 22390 3 22103 22391 22393 3 22103 22393 22105 3 22104 22106 22394 3 22104 22394 22392 3 22105 22393 22395 3 22105 22395 22107 3 22106 22108 22396 3 22106 22396 22394 3 22107 22395 22397 3 22107 22397 22109 3 22108 22110 22398 3 22108 22398 22396 3 22109 22397 22399 3 22109 22399 22111 3 22110 22112 22400 3 22110 22400 22398 3 22111 22399 22401 3 22111 22401 22113 3 22112 22114 22402 3 22112 22402 22400 3 22113 22401 22403 3 22113 22403 22115 3 22114 22116 22404 3 22114 22404 22402 3 22115 22403 22405 3 22115 22405 22117 3 22116 22118 22406 3 22116 22406 22404 3 22117 22405 22407 3 22117 22407 22119 3 22118 22120 22408 3 22118 22408 22406 3 22119 22407 22409 3 22119 22409 22121 3 22120 22122 22410 3 22120 22410 22408 3 22121 22409 22411 3 22121 22411 22123 3 22122 22124 22412 3 22122 22412 22410 3 22123 22411 22413 3 22123 22413 22125 3 22124 22126 22414 3 22124 22414 22412 3 22125 22413 22415 3 22125 22415 22127 3 22126 22128 22416 3 22126 22416 22414 3 22127 22415 22417 3 22127 22417 22129 3 22128 22130 22418 3 22128 22418 22416 3 22129 22417 22419 3 22129 22419 22131 3 22130 22132 22420 3 22130 22420 22418 3 22131 22419 22421 3 22131 22421 22133 3 22132 22134 22422 3 22132 22422 22420 3 22133 22421 22423 3 22133 22423 22135 3 22134 22136 22424 3 22134 22424 22422 3 22135 22423 22425 3 22135 22425 22137 3 22136 22138 22426 3 22136 22426 22424 3 22137 22425 22427 3 22137 22427 22139 3 22138 22140 22428 3 22138 22428 22426 3 22139 22427 22429 3 22139 22429 22141 3 22140 22142 22430 3 22140 22430 22428 3 22141 22429 22431 3 22141 22431 22143 3 22142 22144 22430 3 22432 22430 22144 3 22143 22431 22433 3 22143 22433 22145 3 22144 22146 22432 3 22434 22432 22146 3 22145 22433 22435 3 22145 22435 22147 3 22146 22148 22434 3 22436 22434 22148 3 22147 22435 22437 3 22147 22437 22149 3 22148 22150 22436 3 22438 22436 22150 3 22149 22437 22439 3 22149 22439 22151 3 22150 22152 22438 3 22440 22438 22152 3 22151 22439 22441 3 22151 22441 22153 3 22152 22154 22440 3 22442 22440 22154 3 22153 22441 22443 3 22153 22443 22155 3 22154 22156 22442 3 22444 22442 22156 3 22155 22443 22445 3 22155 22445 22157 3 22156 22158 22444 3 22446 22444 22158 3 22157 22445 22447 3 22157 22447 22159 3 22158 22160 22446 3 22448 22446 22160 3 22159 22447 22449 3 22159 22449 22161 3 22160 22162 22448 3 22450 22448 22162 3 22161 22449 22163 3 22451 22163 22449 3 22162 22164 22450 3 22452 22450 22164 3 22163 22451 22165 3 22453 22165 22451 3 22164 22166 22452 3 22454 22452 22166 3 22165 22453 22167 3 22455 22167 22453 3 22166 22168 22454 3 22456 22454 22168 3 22167 22455 22169 3 22457 22169 22455 3 22168 22170 22456 3 22458 22456 22170 3 22169 22457 22171 3 22459 22171 22457 3 22170 22172 22458 3 22460 22458 22172 3 22171 22459 22173 3 22461 22173 22459 3 22172 22174 22460 3 22462 22460 22174 3 22173 22461 22175 3 22463 22175 22461 3 22174 22176 22462 3 22464 22462 22176 3 22175 22463 22177 3 22465 22177 22463 3 22176 22178 22464 3 22466 22464 22178 3 22177 22465 22179 3 22467 22179 22465 3 22178 22180 22466 3 22468 22466 22180 3 22179 22467 22181 3 22469 22181 22467 3 22180 22182 22468 3 22470 22468 22182 3 22181 22469 22183 3 22471 22183 22469 3 22182 22184 22470 3 22472 22470 22184 3 22183 22471 22185 3 22473 22185 22471 3 22184 22186 22472 3 22474 22472 22186 3 22185 22473 22187 3 22475 22187 22473 3 22186 22188 22474 3 22476 22474 22188 3 22187 22475 22189 3 22477 22189 22475 3 22188 22190 22476 3 22478 22476 22190 3 22189 22477 22191 3 22479 22191 22477 3 22190 22192 22478 3 22480 22478 22192 3 22191 22479 22193 3 22481 22193 22479 3 22192 22194 22480 3 22482 22480 22194 3 22193 22481 22195 3 22483 22195 22481 3 22194 22196 22482 3 22484 22482 22196 3 22195 22483 22197 3 22485 22197 22483 3 22196 22198 22484 3 22486 22484 22198 3 22197 22485 22199 3 22487 22199 22485 3 22198 22200 22486 3 22488 22486 22200 3 22199 22487 22201 3 22489 22201 22487 3 22200 22202 22488 3 22490 22488 22202 3 22201 22489 22203 3 22491 22203 22489 3 22202 22204 22490 3 22492 22490 22204 3 22203 22491 22205 3 22493 22205 22491 3 22204 22206 22492 3 22494 22492 22206 3 22205 22493 22207 3 22495 22207 22493 3 22206 22208 22494 3 22496 22494 22208 3 22207 22495 22211 3 22499 22211 22495 3 22208 22209 22496 3 22497 22496 22209 3 22209 22212 22497 3 22500 22497 22212 3 22210 22211 22498 3 22499 22498 22211 3 22210 22498 22215 3 22503 22215 22498 3 22212 22213 22500 3 22501 22500 22213 3 22213 22216 22501 3 22504 22501 22216 3 22214 22215 22502 3 22503 22502 22215 3 22214 22502 22284 3 22572 22284 22502 3 22216 22217 22504 3 22505 22504 22217 3 22217 22218 22505 3 22506 22505 22218 3 22218 22219 22506 3 22507 22506 22219 3 22219 22220 22507 3 22508 22507 22220 3 22220 22221 22508 3 22509 22508 22221 3 22221 22222 22509 3 22510 22509 22222 3 22222 22223 22510 3 22511 22510 22223 3 22223 22224 22511 3 22512 22511 22224 3 22224 22225 22512 3 22513 22512 22225 3 22225 22226 22513 3 22514 22513 22226 3 22226 22227 22514 3 22515 22514 22227 3 22227 22228 22515 3 22516 22515 22228 3 22228 22229 22516 3 22517 22516 22229 3 22229 22230 22517 3 22518 22517 22230 3 22230 22231 22518 3 22519 22518 22231 3 22231 22232 22519 3 22520 22519 22232 3 22232 22233 22520 3 22521 22520 22233 3 22233 22234 22521 3 22522 22521 22234 3 22234 22235 22523 3 22234 22523 22522 3 22235 22236 22524 3 22235 22524 22523 3 22236 22237 22525 3 22236 22525 22524 3 22237 22238 22526 3 22237 22526 22525 3 22238 22239 22527 3 22238 22527 22526 3 22239 22240 22528 3 22239 22528 22527 3 22240 22241 22529 3 22240 22529 22528 3 22241 22242 22530 3 22241 22530 22529 3 22242 22243 22531 3 22242 22531 22530 3 22243 22244 22532 3 22243 22532 22531 3 22244 22245 22533 3 22244 22533 22532 3 22245 22246 22534 3 22245 22534 22533 3 22246 22247 22535 3 22246 22535 22534 3 22247 22248 22536 3 22247 22536 22535 3 22248 22249 22537 3 22248 22537 22536 3 22249 22250 22538 3 22249 22538 22537 3 22250 22251 22539 3 22250 22539 22538 3 22251 22252 22540 3 22251 22540 22539 3 22252 22253 22541 3 22252 22541 22540 3 22253 22254 22542 3 22253 22542 22541 3 22254 22255 22543 3 22254 22543 22542 3 22255 22256 22544 3 22255 22544 22543 3 22256 22257 22545 3 22256 22545 22544 3 22257 22258 22546 3 22257 22546 22545 3 22258 22259 22547 3 22258 22547 22546 3 22259 22260 22548 3 22259 22548 22547 3 22260 22261 22549 3 22260 22549 22548 3 22261 22262 22550 3 22261 22550 22549 3 22262 22263 22551 3 22262 22551 22550 3 22263 22264 22552 3 22263 22552 22551 3 22264 22265 22553 3 22264 22553 22552 3 22265 22266 22554 3 22265 22554 22553 3 22266 22267 22555 3 22266 22555 22554 3 22267 22268 22556 3 22267 22556 22555 3 22268 22269 22557 3 22268 22557 22556 3 22269 22270 22558 3 22269 22558 22557 3 22270 22271 22559 3 22270 22559 22558 3 22271 22272 22560 3 22271 22560 22559 3 22272 22273 22561 3 22272 22561 22560 3 22273 22274 22562 3 22273 22562 22561 3 22274 22275 22563 3 22274 22563 22562 3 22275 22276 22564 3 22275 22564 22563 3 22276 22277 22565 3 22276 22565 22564 3 22277 22278 22566 3 22277 22566 22565 3 22278 22279 22567 3 22278 22567 22566 3 22279 22280 22568 3 22279 22568 22567 3 22280 22281 22569 3 22280 22569 22568 3 22281 22282 22570 3 22281 22570 22569 3 22282 22283 22571 3 22282 22571 22570 3 22283 22284 22572 3 22283 22572 22571 3 22285 22573 22574 3 22285 22574 22286 3 22285 22355 22643 3 22285 22643 22573 3 22286 22574 22575 3 22286 22575 22287 3 22287 22575 22576 3 22287 22576 22288 3 22288 22576 22577 3 22288 22577 22289 3 22289 22577 22578 3 22289 22578 22290 3 22290 22578 22579 3 22290 22579 22291 3 22291 22579 22580 3 22291 22580 22292 3 22292 22580 22581 3 22292 22581 22293 3 22293 22581 22582 3 22293 22582 22294 3 22294 22582 22583 3 22294 22583 22295 3 22295 22583 22584 3 22295 22584 22296 3 22296 22584 22585 3 22296 22585 22297 3 22297 22585 22586 3 22297 22586 22298 3 22298 22586 22587 3 22298 22587 22299 3 22299 22587 22588 3 22299 22588 22300 3 22300 22588 22589 3 22300 22589 22301 3 22301 22589 22590 3 22301 22590 22302 3 22302 22590 22591 3 22302 22591 22303 3 22303 22591 22592 3 22303 22592 22304 3 22304 22592 22593 3 22304 22593 22305 3 22305 22593 22594 3 22305 22594 22306 3 22306 22594 22595 3 22306 22595 22307 3 22307 22595 22596 3 22307 22596 22308 3 22308 22596 22597 3 22308 22597 22309 3 22309 22597 22598 3 22309 22598 22310 3 22310 22598 22599 3 22310 22599 22311 3 22311 22599 22600 3 22311 22600 22312 3 22312 22600 22601 3 22312 22601 22313 3 22313 22601 22602 3 22313 22602 22314 3 22314 22602 22603 3 22314 22603 22315 3 22315 22603 22604 3 22315 22604 22316 3 22316 22604 22605 3 22316 22605 22317 3 22317 22605 22606 3 22317 22606 22318 3 22318 22606 22607 3 22318 22607 22319 3 22319 22607 22608 3 22319 22608 22320 3 22320 22608 22609 3 22320 22609 22321 3 22321 22609 22610 3 22321 22610 22322 3 22322 22610 22611 3 22322 22611 22323 3 22323 22611 22612 3 22323 22612 22324 3 22324 22612 22613 3 22324 22613 22325 3 22325 22613 22614 3 22325 22614 22326 3 22326 22614 22615 3 22326 22615 22327 3 22327 22615 22616 3 22327 22616 22328 3 22328 22616 22617 3 22328 22617 22329 3 22329 22617 22618 3 22329 22618 22330 3 22330 22618 22619 3 22330 22619 22331 3 22331 22619 22620 3 22331 22620 22332 3 22332 22620 22621 3 22332 22621 22333 3 22333 22621 22622 3 22333 22622 22334 3 22334 22622 22623 3 22334 22623 22335 3 22335 22623 22624 3 22335 22624 22336 3 22336 22624 22625 3 22336 22625 22337 3 22337 22625 22626 3 22337 22626 22338 3 22338 22626 22627 3 22338 22627 22339 3 22339 22627 22628 3 22339 22628 22340 3 22340 22628 22629 3 22340 22629 22341 3 22341 22629 22630 3 22341 22630 22342 3 22342 22630 22631 3 22342 22631 22343 3 22343 22631 22632 3 22343 22632 22344 3 22344 22632 22633 3 22344 22633 22345 3 22345 22633 22634 3 22345 22634 22346 3 22346 22634 22347 3 22635 22347 22634 3 22347 22635 22348 3 22636 22348 22635 3 22348 22636 22349 3 22637 22349 22636 3 22349 22637 22350 3 22638 22350 22637 3 22350 22638 22351 3 22639 22351 22638 3 22351 22639 22352 3 22640 22352 22639 3 22352 22640 22353 3 22641 22353 22640 3 22353 22641 22356 3 22644 22356 22641 3 22354 22642 22355 3 22643 22355 22642 3 22354 22359 22642 3 22647 22642 22359 3 22356 22644 22357 3 22645 22357 22644 3 22357 22645 22360 3 22648 22360 22645 3 22358 22646 22359 3 22647 22359 22646 3 22358 22362 22646 3 22650 22646 22362 3 22360 22648 22361 3 22649 22361 22648 3 22361 22649 22363 3 22651 22363 22649 3 22362 22364 22650 3 22652 22650 22364 3 22363 22651 22365 3 22653 22365 22651 3 22364 22366 22652 3 22654 22652 22366 3 22365 22653 22367 3 22655 22367 22653 3 22366 22368 22654 3 22656 22654 22368 3 22367 22655 22369 3 22657 22369 22655 3 22368 22370 22656 3 22658 22656 22370 3 22369 22657 22371 3 22659 22371 22657 3 22370 22372 22658 3 22660 22658 22372 3 22371 22659 22373 3 22661 22373 22659 3 22372 22374 22660 3 22662 22660 22374 3 22373 22661 22375 3 22663 22375 22661 3 22374 22376 22662 3 22664 22662 22376 3 22375 22663 22377 3 22665 22377 22663 3 22376 22378 22664 3 22666 22664 22378 3 22377 22665 22379 3 22667 22379 22665 3 22378 22380 22666 3 22668 22666 22380 3 22379 22667 22381 3 22669 22381 22667 3 22380 22382 22668 3 22670 22668 22382 3 22381 22669 22383 3 22671 22383 22669 3 22382 22384 22670 3 22672 22670 22384 3 22383 22671 22385 3 22673 22385 22671 3 22384 22386 22672 3 22674 22672 22386 3 22385 22673 22387 3 22675 22387 22673 3 22386 22388 22674 3 22676 22674 22388 3 22387 22675 22389 3 22677 22389 22675 3 22388 22390 22676 3 22678 22676 22390 3 22389 22677 22391 3 22679 22391 22677 3 22390 22392 22678 3 22680 22678 22392 3 22391 22679 22393 3 22681 22393 22679 3 22392 22394 22680 3 22682 22680 22394 3 22393 22681 22395 3 22683 22395 22681 3 22394 22396 22682 3 22684 22682 22396 3 22395 22683 22397 3 22685 22397 22683 3 22396 22398 22684 3 22686 22684 22398 3 22397 22685 22399 3 22687 22399 22685 3 22398 22400 22686 3 22688 22686 22400 3 22399 22687 22401 3 22689 22401 22687 3 22400 22402 22688 3 22690 22688 22402 3 22401 22689 22403 3 22691 22403 22689 3 22402 22404 22690 3 22692 22690 22404 3 22403 22691 22405 3 22693 22405 22691 3 22404 22406 22692 3 22694 22692 22406 3 22405 22693 22407 3 22695 22407 22693 3 22406 22408 22694 3 22696 22694 22408 3 22407 22695 22409 3 22697 22409 22695 3 22408 22410 22696 3 22698 22696 22410 3 22409 22697 22411 3 22699 22411 22697 3 22410 22412 22698 3 22700 22698 22412 3 22411 22699 22413 3 22701 22413 22699 3 22412 22414 22700 3 22702 22700 22414 3 22413 22701 22415 3 22703 22415 22701 3 22414 22416 22702 3 22704 22702 22416 3 22415 22703 22417 3 22705 22417 22703 3 22416 22418 22704 3 22706 22704 22418 3 22417 22705 22419 3 22707 22419 22705 3 22418 22420 22708 3 22418 22708 22706 3 22419 22707 22421 3 22709 22421 22707 3 22420 22422 22710 3 22420 22710 22708 3 22421 22709 22423 3 22711 22423 22709 3 22422 22424 22712 3 22422 22712 22710 3 22423 22711 22425 3 22713 22425 22711 3 22424 22426 22714 3 22424 22714 22712 3 22425 22713 22427 3 22715 22427 22713 3 22426 22428 22716 3 22426 22716 22714 3 22427 22715 22429 3 22717 22429 22715 3 22428 22430 22718 3 22428 22718 22716 3 22429 22717 22431 3 22719 22431 22717 3 22430 22432 22720 3 22430 22720 22718 3 22431 22719 22433 3 22721 22433 22719 3 22432 22434 22722 3 22432 22722 22720 3 22433 22721 22435 3 22723 22435 22721 3 22434 22436 22724 3 22434 22724 22722 3 22435 22723 22437 3 22725 22437 22723 3 22436 22438 22726 3 22436 22726 22724 3 22437 22725 22439 3 22727 22439 22725 3 22438 22440 22728 3 22438 22728 22726 3 22439 22727 22729 3 22439 22729 22441 3 22440 22442 22730 3 22440 22730 22728 3 22441 22729 22731 3 22441 22731 22443 3 22442 22444 22732 3 22442 22732 22730 3 22443 22731 22733 3 22443 22733 22445 3 22444 22446 22734 3 22444 22734 22732 3 22445 22733 22735 3 22445 22735 22447 3 22446 22448 22736 3 22446 22736 22734 3 22447 22735 22737 3 22447 22737 22449 3 22448 22450 22738 3 22448 22738 22736 3 22449 22737 22739 3 22449 22739 22451 3 22450 22452 22740 3 22450 22740 22738 3 22451 22739 22741 3 22451 22741 22453 3 22452 22454 22742 3 22452 22742 22740 3 22453 22741 22743 3 22453 22743 22455 3 22454 22456 22744 3 22454 22744 22742 3 22455 22743 22745 3 22455 22745 22457 3 22456 22458 22746 3 22456 22746 22744 3 22457 22745 22747 3 22457 22747 22459 3 22458 22460 22748 3 22458 22748 22746 3 22459 22747 22749 3 22459 22749 22461 3 22460 22462 22750 3 22460 22750 22748 3 22461 22749 22751 3 22461 22751 22463 3 22462 22464 22752 3 22462 22752 22750 3 22463 22751 22753 3 22463 22753 22465 3 22464 22466 22754 3 22464 22754 22752 3 22465 22753 22755 3 22465 22755 22467 3 22466 22468 22756 3 22466 22756 22754 3 22467 22755 22757 3 22467 22757 22469 3 22468 22470 22758 3 22468 22758 22756 3 22469 22757 22759 3 22469 22759 22471 3 22470 22472 22760 3 22470 22760 22758 3 22471 22759 22761 3 22471 22761 22473 3 22472 22474 22762 3 22472 22762 22760 3 22473 22761 22763 3 22473 22763 22475 3 22474 22476 22764 3 22474 22764 22762 3 22475 22763 22765 3 22475 22765 22477 3 22476 22478 22766 3 22476 22766 22764 3 22477 22765 22767 3 22477 22767 22479 3 22478 22480 22768 3 22478 22768 22766 3 22479 22767 22769 3 22479 22769 22481 3 22480 22482 22770 3 22480 22770 22768 3 22481 22769 22771 3 22481 22771 22483 3 22482 22484 22772 3 22482 22772 22770 3 22483 22771 22773 3 22483 22773 22485 3 22484 22486 22774 3 22484 22774 22772 3 22485 22773 22775 3 22485 22775 22487 3 22486 22488 22776 3 22486 22776 22774 3 22487 22775 22777 3 22487 22777 22489 3 22488 22490 22778 3 22488 22778 22776 3 22489 22777 22779 3 22489 22779 22491 3 22490 22492 22780 3 22490 22780 22778 3 22491 22779 22781 3 22491 22781 22493 3 22492 22494 22782 3 22492 22782 22780 3 22493 22781 22783 3 22493 22783 22495 3 22494 22496 22784 3 22494 22784 22782 3 22495 22783 22787 3 22495 22787 22499 3 22496 22497 22785 3 22496 22785 22784 3 22497 22500 22788 3 22497 22788 22785 3 22498 22499 22787 3 22498 22787 22786 3 22498 22786 22791 3 22498 22791 22503 3 22500 22501 22789 3 22500 22789 22788 3 22501 22504 22792 3 22501 22792 22789 3 22502 22503 22791 3 22502 22791 22790 3 22502 22790 22860 3 22502 22860 22572 3 22504 22505 22793 3 22504 22793 22792 3 22505 22506 22794 3 22505 22794 22793 3 22506 22507 22795 3 22506 22795 22794 3 22507 22508 22796 3 22507 22796 22795 3 22508 22509 22797 3 22508 22797 22796 3 22509 22510 22798 3 22509 22798 22797 3 22510 22511 22799 3 22510 22799 22798 3 22511 22512 22799 3 22800 22799 22512 3 22512 22513 22800 3 22801 22800 22513 3 22513 22514 22801 3 22802 22801 22514 3 22514 22515 22802 3 22803 22802 22515 3 22515 22516 22803 3 22804 22803 22516 3 22516 22517 22804 3 22805 22804 22517 3 22517 22518 22805 3 22806 22805 22518 3 22518 22519 22806 3 22807 22806 22519 3 22519 22520 22807 3 22808 22807 22520 3 22520 22521 22808 3 22809 22808 22521 3 22521 22522 22809 3 22810 22809 22522 3 22522 22523 22810 3 22811 22810 22523 3 22523 22524 22811 3 22812 22811 22524 3 22524 22525 22812 3 22813 22812 22525 3 22525 22526 22813 3 22814 22813 22526 3 22526 22527 22814 3 22815 22814 22527 3 22527 22528 22815 3 22816 22815 22528 3 22528 22529 22816 3 22817 22816 22529 3 22529 22530 22817 3 22818 22817 22530 3 22530 22531 22818 3 22819 22818 22531 3 22531 22532 22819 3 22820 22819 22532 3 22532 22533 22820 3 22821 22820 22533 3 22533 22534 22821 3 22822 22821 22534 3 22534 22535 22822 3 22823 22822 22535 3 22535 22536 22823 3 22824 22823 22536 3 22536 22537 22824 3 22825 22824 22537 3 22537 22538 22825 3 22826 22825 22538 3 22538 22539 22826 3 22827 22826 22539 3 22539 22540 22827 3 22828 22827 22540 3 22540 22541 22828 3 22829 22828 22541 3 22541 22542 22829 3 22830 22829 22542 3 22542 22543 22830 3 22831 22830 22543 3 22543 22544 22831 3 22832 22831 22544 3 22544 22545 22832 3 22833 22832 22545 3 22545 22546 22833 3 22834 22833 22546 3 22546 22547 22834 3 22835 22834 22547 3 22547 22548 22835 3 22836 22835 22548 3 22548 22549 22836 3 22837 22836 22549 3 22549 22550 22837 3 22838 22837 22550 3 22550 22551 22838 3 22839 22838 22551 3 22551 22552 22839 3 22840 22839 22552 3 22552 22553 22840 3 22841 22840 22553 3 22553 22554 22841 3 22842 22841 22554 3 22554 22555 22842 3 22843 22842 22555 3 22555 22556 22843 3 22844 22843 22556 3 22556 22557 22844 3 22845 22844 22557 3 22557 22558 22845 3 22846 22845 22558 3 22558 22559 22846 3 22847 22846 22559 3 22559 22560 22847 3 22848 22847 22560 3 22560 22561 22848 3 22849 22848 22561 3 22561 22562 22849 3 22850 22849 22562 3 22562 22563 22850 3 22851 22850 22563 3 22563 22564 22851 3 22852 22851 22564 3 22564 22565 22852 3 22853 22852 22565 3 22565 22566 22853 3 22854 22853 22566 3 22566 22567 22854 3 22855 22854 22567 3 22567 22568 22855 3 22856 22855 22568 3 22568 22569 22856 3 22857 22856 22569 3 22569 22570 22857 3 22858 22857 22570 3 22570 22571 22858 3 22859 22858 22571 3 22571 22572 22859 3 22860 22859 22572 3 22573 22861 22574 3 22862 22574 22861 3 22573 22643 22931 3 22573 22931 22861 3 22574 22862 22575 3 22863 22575 22862 3 22575 22863 22576 3 22864 22576 22863 3 22576 22864 22577 3 22865 22577 22864 3 22577 22865 22578 3 22866 22578 22865 3 22578 22866 22579 3 22867 22579 22866 3 22579 22867 22580 3 22868 22580 22867 3 22580 22868 22581 3 22869 22581 22868 3 22581 22869 22582 3 22870 22582 22869 3 22582 22870 22583 3 22871 22583 22870 3 22583 22871 22584 3 22872 22584 22871 3 22584 22872 22585 3 22873 22585 22872 3 22585 22873 22586 3 22874 22586 22873 3 22586 22874 22587 3 22875 22587 22874 3 22587 22875 22588 3 22876 22588 22875 3 22588 22876 22589 3 22877 22589 22876 3 22589 22877 22590 3 22878 22590 22877 3 22590 22878 22591 3 22879 22591 22878 3 22591 22879 22592 3 22880 22592 22879 3 22592 22880 22593 3 22881 22593 22880 3 22593 22881 22594 3 22882 22594 22881 3 22594 22882 22595 3 22883 22595 22882 3 22595 22883 22596 3 22884 22596 22883 3 22596 22884 22597 3 22885 22597 22884 3 22597 22885 22598 3 22886 22598 22885 3 22598 22886 22599 3 22887 22599 22886 3 22599 22887 22600 3 22888 22600 22887 3 22600 22888 22601 3 22889 22601 22888 3 22601 22889 22602 3 22890 22602 22889 3 22602 22890 22603 3 22891 22603 22890 3 22603 22891 22604 3 22892 22604 22891 3 22604 22892 22605 3 22893 22605 22892 3 22605 22893 22606 3 22894 22606 22893 3 22606 22894 22607 3 22895 22607 22894 3 22607 22895 22608 3 22896 22608 22895 3 22608 22896 22609 3 22897 22609 22896 3 22609 22897 22610 3 22898 22610 22897 3 22610 22898 22611 3 22899 22611 22898 3 22611 22899 22612 3 22900 22612 22899 3 22612 22900 22613 3 22901 22613 22900 3 22613 22901 22614 3 22902 22614 22901 3 22614 22902 22615 3 22903 22615 22902 3 22615 22903 22616 3 22904 22616 22903 3 22616 22904 22617 3 22905 22617 22904 3 22617 22905 22618 3 22906 22618 22905 3 22618 22906 22619 3 22907 22619 22906 3 22619 22907 22620 3 22908 22620 22907 3 22620 22908 22621 3 22909 22621 22908 3 22621 22909 22622 3 22910 22622 22909 3 22622 22910 22623 3 22911 22623 22910 3 22623 22911 22912 3 22623 22912 22624 3 22624 22912 22913 3 22624 22913 22625 3 22625 22913 22914 3 22625 22914 22626 3 22626 22914 22915 3 22626 22915 22627 3 22627 22915 22916 3 22627 22916 22628 3 22628 22916 22917 3 22628 22917 22629 3 22629 22917 22918 3 22629 22918 22630 3 22630 22918 22919 3 22630 22919 22631 3 22631 22919 22920 3 22631 22920 22632 3 22632 22920 22921 3 22632 22921 22633 3 22633 22921 22922 3 22633 22922 22634 3 22634 22922 22923 3 22634 22923 22635 3 22635 22923 22924 3 22635 22924 22636 3 22636 22924 22925 3 22636 22925 22637 3 22637 22925 22926 3 22637 22926 22638 3 22638 22926 22927 3 22638 22927 22639 3 22639 22927 22928 3 22639 22928 22640 3 22640 22928 22929 3 22640 22929 22641 3 22641 22929 22932 3 22641 22932 22644 3 22642 22930 22931 3 22642 22931 22643 3 22642 22647 22935 3 22642 22935 22930 3 22644 22932 22933 3 22644 22933 22645 3 22645 22933 22936 3 22645 22936 22648 3 22646 22934 22935 3 22646 22935 22647 3 22646 22650 22938 3 22646 22938 22934 3 22648 22936 22937 3 22648 22937 22649 3 22649 22937 22939 3 22649 22939 22651 3 22650 22652 22940 3 22650 22940 22938 3 22651 22939 22941 3 22651 22941 22653 3 22652 22654 22942 3 22652 22942 22940 3 22653 22941 22943 3 22653 22943 22655 3 22654 22656 22944 3 22654 22944 22942 3 22655 22943 22945 3 22655 22945 22657 3 22656 22658 22946 3 22656 22946 22944 3 22657 22945 22947 3 22657 22947 22659 3 22658 22660 22948 3 22658 22948 22946 3 22659 22947 22949 3 22659 22949 22661 3 22660 22662 22950 3 22660 22950 22948 3 22661 22949 22951 3 22661 22951 22663 3 22662 22664 22952 3 22662 22952 22950 3 22663 22951 22953 3 22663 22953 22665 3 22664 22666 22954 3 22664 22954 22952 3 22665 22953 22955 3 22665 22955 22667 3 22666 22668 22956 3 22666 22956 22954 3 22667 22955 22957 3 22667 22957 22669 3 22668 22670 22958 3 22668 22958 22956 3 22669 22957 22959 3 22669 22959 22671 3 22670 22672 22960 3 22670 22960 22958 3 22671 22959 22961 3 22671 22961 22673 3 22672 22674 22962 3 22672 22962 22960 3 22673 22961 22963 3 22673 22963 22675 3 22674 22676 22964 3 22674 22964 22962 3 22675 22963 22965 3 22675 22965 22677 3 22676 22678 22966 3 22676 22966 22964 3 22677 22965 22967 3 22677 22967 22679 3 22678 22680 22968 3 22678 22968 22966 3 22679 22967 22969 3 22679 22969 22681 3 22680 22682 22970 3 22680 22970 22968 3 22681 22969 22971 3 22681 22971 22683 3 22682 22684 22972 3 22682 22972 22970 3 22683 22971 22973 3 22683 22973 22685 3 22684 22686 22974 3 22684 22974 22972 3 22685 22973 22975 3 22685 22975 22687 3 22686 22688 22976 3 22686 22976 22974 3 22687 22975 22977 3 22687 22977 22689 3 22688 22690 22978 3 22688 22978 22976 3 22689 22977 22979 3 22689 22979 22691 3 22690 22692 22980 3 22690 22980 22978 3 22691 22979 22981 3 22691 22981 22693 3 22692 22694 22982 3 22692 22982 22980 3 22693 22981 22983 3 22693 22983 22695 3 22694 22696 22984 3 22694 22984 22982 3 22695 22983 22985 3 22695 22985 22697 3 22696 22698 22984 3 22986 22984 22698 3 22697 22985 22987 3 22697 22987 22699 3 22698 22700 22986 3 22988 22986 22700 3 22699 22987 22989 3 22699 22989 22701 3 22700 22702 22988 3 22990 22988 22702 3 22701 22989 22991 3 22701 22991 22703 3 22702 22704 22990 3 22992 22990 22704 3 22703 22991 22993 3 22703 22993 22705 3 22704 22706 22992 3 22994 22992 22706 3 22705 22993 22995 3 22705 22995 22707 3 22706 22708 22994 3 22996 22994 22708 3 22707 22995 22997 3 22707 22997 22709 3 22708 22710 22996 3 22998 22996 22710 3 22709 22997 22999 3 22709 22999 22711 3 22710 22712 22998 3 23000 22998 22712 3 22711 22999 23001 3 22711 23001 22713 3 22712 22714 23000 3 23002 23000 22714 3 22713 23001 23003 3 22713 23003 22715 3 22714 22716 23002 3 23004 23002 22716 3 22715 23003 23005 3 22715 23005 22717 3 22716 22718 23004 3 23006 23004 22718 3 22717 23005 22719 3 23007 22719 23005 3 22718 22720 23006 3 23008 23006 22720 3 22719 23007 22721 3 23009 22721 23007 3 22720 22722 23008 3 23010 23008 22722 3 22721 23009 22723 3 23011 22723 23009 3 22722 22724 23010 3 23012 23010 22724 3 22723 23011 22725 3 23013 22725 23011 3 22724 22726 23012 3 23014 23012 22726 3 22725 23013 22727 3 23015 22727 23013 3 22726 22728 23014 3 23016 23014 22728 3 22727 23015 22729 3 23017 22729 23015 3 22728 22730 23016 3 23018 23016 22730 3 22729 23017 22731 3 23019 22731 23017 3 22730 22732 23018 3 23020 23018 22732 3 22731 23019 22733 3 23021 22733 23019 3 22732 22734 23020 3 23022 23020 22734 3 22733 23021 22735 3 23023 22735 23021 3 22734 22736 23022 3 23024 23022 22736 3 22735 23023 22737 3 23025 22737 23023 3 22736 22738 23024 3 23026 23024 22738 3 22737 23025 22739 3 23027 22739 23025 3 22738 22740 23026 3 23028 23026 22740 3 22739 23027 22741 3 23029 22741 23027 3 22740 22742 23028 3 23030 23028 22742 3 22741 23029 22743 3 23031 22743 23029 3 22742 22744 23030 3 23032 23030 22744 3 22743 23031 22745 3 23033 22745 23031 3 22744 22746 23032 3 23034 23032 22746 3 22745 23033 22747 3 23035 22747 23033 3 22746 22748 23034 3 23036 23034 22748 3 22747 23035 22749 3 23037 22749 23035 3 22748 22750 23036 3 23038 23036 22750 3 22749 23037 22751 3 23039 22751 23037 3 22750 22752 23038 3 23040 23038 22752 3 22751 23039 22753 3 23041 22753 23039 3 22752 22754 23040 3 23042 23040 22754 3 22753 23041 22755 3 23043 22755 23041 3 22754 22756 23042 3 23044 23042 22756 3 22755 23043 22757 3 23045 22757 23043 3 22756 22758 23044 3 23046 23044 22758 3 22757 23045 22759 3 23047 22759 23045 3 22758 22760 23046 3 23048 23046 22760 3 22759 23047 22761 3 23049 22761 23047 3 22760 22762 23048 3 23050 23048 22762 3 22761 23049 22763 3 23051 22763 23049 3 22762 22764 23050 3 23052 23050 22764 3 22763 23051 22765 3 23053 22765 23051 3 22764 22766 23052 3 23054 23052 22766 3 22765 23053 22767 3 23055 22767 23053 3 22766 22768 23054 3 23056 23054 22768 3 22767 23055 22769 3 23057 22769 23055 3 22768 22770 23056 3 23058 23056 22770 3 22769 23057 22771 3 23059 22771 23057 3 22770 22772 23058 3 23060 23058 22772 3 22771 23059 22773 3 23061 22773 23059 3 22772 22774 23060 3 23062 23060 22774 3 22773 23061 22775 3 23063 22775 23061 3 22774 22776 23062 3 23064 23062 22776 3 22775 23063 22777 3 23065 22777 23063 3 22776 22778 23064 3 23066 23064 22778 3 22777 23065 22779 3 23067 22779 23065 3 22778 22780 23066 3 23068 23066 22780 3 22779 23067 22781 3 23069 22781 23067 3 22780 22782 23068 3 23070 23068 22782 3 22781 23069 22783 3 23071 22783 23069 3 22782 22784 23070 3 23072 23070 22784 3 22783 23071 22787 3 23075 22787 23071 3 22784 22785 23072 3 23073 23072 22785 3 22785 22788 23073 3 23076 23073 22788 3 22786 22787 23074 3 23075 23074 22787 3 22786 23074 22791 3 23079 22791 23074 3 22788 22789 23077 3 22788 23077 23076 3 22789 22792 23080 3 22789 23080 23077 3 22790 22791 23079 3 22790 23079 23078 3 22790 23078 22860 3 23148 22860 23078 3 22792 22793 23081 3 22792 23081 23080 3 22793 22794 23082 3 22793 23082 23081 3 22794 22795 23083 3 22794 23083 23082 3 22795 22796 23084 3 22795 23084 23083 3 22796 22797 23085 3 22796 23085 23084 3 22797 22798 23086 3 22797 23086 23085 3 22798 22799 23087 3 22798 23087 23086 3 22799 22800 23088 3 22799 23088 23087 3 22800 22801 23089 3 22800 23089 23088 3 22801 22802 23090 3 22801 23090 23089 3 22802 22803 23091 3 22802 23091 23090 3 22803 22804 23092 3 22803 23092 23091 3 22804 22805 23093 3 22804 23093 23092 3 22805 22806 23094 3 22805 23094 23093 3 22806 22807 23095 3 22806 23095 23094 3 22807 22808 23096 3 22807 23096 23095 3 22808 22809 23097 3 22808 23097 23096 3 22809 22810 23098 3 22809 23098 23097 3 22810 22811 23099 3 22810 23099 23098 3 22811 22812 23100 3 22811 23100 23099 3 22812 22813 23101 3 22812 23101 23100 3 22813 22814 23102 3 22813 23102 23101 3 22814 22815 23103 3 22814 23103 23102 3 22815 22816 23104 3 22815 23104 23103 3 22816 22817 23105 3 22816 23105 23104 3 22817 22818 23106 3 22817 23106 23105 3 22818 22819 23107 3 22818 23107 23106 3 22819 22820 23108 3 22819 23108 23107 3 22820 22821 23109 3 22820 23109 23108 3 22821 22822 23110 3 22821 23110 23109 3 22822 22823 23111 3 22822 23111 23110 3 22823 22824 23112 3 22823 23112 23111 3 22824 22825 23113 3 22824 23113 23112 3 22825 22826 23114 3 22825 23114 23113 3 22826 22827 23115 3 22826 23115 23114 3 22827 22828 23116 3 22827 23116 23115 3 22828 22829 23117 3 22828 23117 23116 3 22829 22830 23118 3 22829 23118 23117 3 22830 22831 23119 3 22830 23119 23118 3 22831 22832 23120 3 22831 23120 23119 3 22832 22833 23121 3 22832 23121 23120 3 22833 22834 23122 3 22833 23122 23121 3 22834 22835 23123 3 22834 23123 23122 3 22835 22836 23124 3 22835 23124 23123 3 22836 22837 23125 3 22836 23125 23124 3 22837 22838 23126 3 22837 23126 23125 3 22838 22839 23127 3 22838 23127 23126 3 22839 22840 23128 3 22839 23128 23127 3 22840 22841 23129 3 22840 23129 23128 3 22841 22842 23130 3 22841 23130 23129 3 22842 22843 23131 3 22842 23131 23130 3 22843 22844 23132 3 22843 23132 23131 3 22844 22845 23133 3 22844 23133 23132 3 22845 22846 23134 3 22845 23134 23133 3 22846 22847 23135 3 22846 23135 23134 3 22847 22848 23136 3 22847 23136 23135 3 22848 22849 23137 3 22848 23137 23136 3 22849 22850 23138 3 22849 23138 23137 3 22850 22851 23139 3 22850 23139 23138 3 22851 22852 23140 3 22851 23140 23139 3 22852 22853 23141 3 22852 23141 23140 3 22853 22854 23142 3 22853 23142 23141 3 22854 22855 23143 3 22854 23143 23142 3 22855 22856 23144 3 22855 23144 23143 3 22856 22857 23145 3 22856 23145 23144 3 22857 22858 23146 3 22857 23146 23145 3 22858 22859 23147 3 22858 23147 23146 3 22859 22860 23148 3 22859 23148 23147 3 22861 23149 23150 3 22861 23150 22862 3 22861 22931 23149 3 23219 23149 22931 3 22862 23150 23151 3 22862 23151 22863 3 22863 23151 23152 3 22863 23152 22864 3 22864 23152 23153 3 22864 23153 22865 3 22865 23153 23154 3 22865 23154 22866 3 22866 23154 23155 3 22866 23155 22867 3 22867 23155 23156 3 22867 23156 22868 3 22868 23156 23157 3 22868 23157 22869 3 22869 23157 23158 3 22869 23158 22870 3 22870 23158 23159 3 22870 23159 22871 3 22871 23159 23160 3 22871 23160 22872 3 22872 23160 23161 3 22872 23161 22873 3 22873 23161 23162 3 22873 23162 22874 3 22874 23162 23163 3 22874 23163 22875 3 22875 23163 23164 3 22875 23164 22876 3 22876 23164 23165 3 22876 23165 22877 3 22877 23165 23166 3 22877 23166 22878 3 22878 23166 23167 3 22878 23167 22879 3 22879 23167 23168 3 22879 23168 22880 3 22880 23168 23169 3 22880 23169 22881 3 22881 23169 23170 3 22881 23170 22882 3 22882 23170 23171 3 22882 23171 22883 3 22883 23171 23172 3 22883 23172 22884 3 22884 23172 23173 3 22884 23173 22885 3 22885 23173 23174 3 22885 23174 22886 3 22886 23174 23175 3 22886 23175 22887 3 22887 23175 23176 3 22887 23176 22888 3 22888 23176 23177 3 22888 23177 22889 3 22889 23177 23178 3 22889 23178 22890 3 22890 23178 23179 3 22890 23179 22891 3 22891 23179 23180 3 22891 23180 22892 3 22892 23180 23181 3 22892 23181 22893 3 22893 23181 23182 3 22893 23182 22894 3 22894 23182 23183 3 22894 23183 22895 3 22895 23183 23184 3 22895 23184 22896 3 22896 23184 23185 3 22896 23185 22897 3 22897 23185 23186 3 22897 23186 22898 3 22898 23186 23187 3 22898 23187 22899 3 22899 23187 23188 3 22899 23188 22900 3 22900 23188 23189 3 22900 23189 22901 3 22901 23189 23190 3 22901 23190 22902 3 22902 23190 23191 3 22902 23191 22903 3 22903 23191 22904 3 23192 22904 23191 3 22904 23192 22905 3 23193 22905 23192 3 22905 23193 22906 3 23194 22906 23193 3 22906 23194 22907 3 23195 22907 23194 3 22907 23195 22908 3 23196 22908 23195 3 22908 23196 22909 3 23197 22909 23196 3 22909 23197 22910 3 23198 22910 23197 3 22910 23198 22911 3 23199 22911 23198 3 22911 23199 22912 3 23200 22912 23199 3 22912 23200 22913 3 23201 22913 23200 3 22913 23201 22914 3 23202 22914 23201 3 22914 23202 22915 3 23203 22915 23202 3 22915 23203 22916 3 23204 22916 23203 3 22916 23204 22917 3 23205 22917 23204 3 22917 23205 22918 3 23206 22918 23205 3 22918 23206 22919 3 23207 22919 23206 3 22919 23207 22920 3 23208 22920 23207 3 22920 23208 22921 3 23209 22921 23208 3 22921 23209 22922 3 23210 22922 23209 3 22922 23210 22923 3 23211 22923 23210 3 22923 23211 22924 3 23212 22924 23211 3 22924 23212 22925 3 23213 22925 23212 3 22925 23213 22926 3 23214 22926 23213 3 22926 23214 22927 3 23215 22927 23214 3 22927 23215 22928 3 23216 22928 23215 3 22928 23216 22929 3 23217 22929 23216 3 22929 23217 22932 3 23220 22932 23217 3 22930 23218 22931 3 23219 22931 23218 3 22930 22935 23218 3 23223 23218 22935 3 22932 23220 22933 3 23221 22933 23220 3 22933 23221 22936 3 23224 22936 23221 3 22934 23222 22935 3 23223 22935 23222 3 22934 22938 23222 3 23226 23222 22938 3 22936 23224 22937 3 23225 22937 23224 3 22937 23225 22939 3 23227 22939 23225 3 22938 22940 23226 3 23228 23226 22940 3 22939 23227 22941 3 23229 22941 23227 3 22940 22942 23228 3 23230 23228 22942 3 22941 23229 22943 3 23231 22943 23229 3 22942 22944 23230 3 23232 23230 22944 3 22943 23231 22945 3 23233 22945 23231 3 22944 22946 23232 3 23234 23232 22946 3 22945 23233 22947 3 23235 22947 23233 3 22946 22948 23234 3 23236 23234 22948 3 22947 23235 22949 3 23237 22949 23235 3 22948 22950 23236 3 23238 23236 22950 3 22949 23237 22951 3 23239 22951 23237 3 22950 22952 23238 3 23240 23238 22952 3 22951 23239 22953 3 23241 22953 23239 3 22952 22954 23240 3 23242 23240 22954 3 22953 23241 22955 3 23243 22955 23241 3 22954 22956 23242 3 23244 23242 22956 3 22955 23243 22957 3 23245 22957 23243 3 22956 22958 23244 3 23246 23244 22958 3 22957 23245 22959 3 23247 22959 23245 3 22958 22960 23246 3 23248 23246 22960 3 22959 23247 22961 3 23249 22961 23247 3 22960 22962 23248 3 23250 23248 22962 3 22961 23249 22963 3 23251 22963 23249 3 22962 22964 23250 3 23252 23250 22964 3 22963 23251 22965 3 23253 22965 23251 3 22964 22966 23252 3 23254 23252 22966 3 22965 23253 22967 3 23255 22967 23253 3 22966 22968 23254 3 23256 23254 22968 3 22967 23255 22969 3 23257 22969 23255 3 22968 22970 23256 3 23258 23256 22970 3 22969 23257 22971 3 23259 22971 23257 3 22970 22972 23258 3 23260 23258 22972 3 22971 23259 22973 3 23261 22973 23259 3 22972 22974 23260 3 23262 23260 22974 3 22973 23261 22975 3 23263 22975 23261 3 22974 22976 23264 3 22974 23264 23262 3 22975 23263 22977 3 23265 22977 23263 3 22976 22978 23266 3 22976 23266 23264 3 22977 23265 22979 3 23267 22979 23265 3 22978 22980 23268 3 22978 23268 23266 3 22979 23267 22981 3 23269 22981 23267 3 22980 22982 23270 3 22980 23270 23268 3 22981 23269 22983 3 23271 22983 23269 3 22982 22984 23272 3 22982 23272 23270 3 22983 23271 22985 3 23273 22985 23271 3 22984 22986 23274 3 22984 23274 23272 3 22985 23273 22987 3 23275 22987 23273 3 22986 22988 23276 3 22986 23276 23274 3 22987 23275 22989 3 23277 22989 23275 3 22988 22990 23278 3 22988 23278 23276 3 22989 23277 22991 3 23279 22991 23277 3 22990 22992 23280 3 22990 23280 23278 3 22991 23279 22993 3 23281 22993 23279 3 22992 22994 23282 3 22992 23282 23280 3 22993 23281 22995 3 23283 22995 23281 3 22994 22996 23284 3 22994 23284 23282 3 22995 23283 22997 3 23285 22997 23283 3 22996 22998 23286 3 22996 23286 23284 3 22997 23285 23287 3 22997 23287 22999 3 22998 23000 23288 3 22998 23288 23286 3 22999 23287 23289 3 22999 23289 23001 3 23000 23002 23290 3 23000 23290 23288 3 23001 23289 23291 3 23001 23291 23003 3 23002 23004 23292 3 23002 23292 23290 3 23003 23291 23293 3 23003 23293 23005 3 23004 23006 23294 3 23004 23294 23292 3 23005 23293 23295 3 23005 23295 23007 3 23006 23008 23296 3 23006 23296 23294 3 23007 23295 23297 3 23007 23297 23009 3 23008 23010 23298 3 23008 23298 23296 3 23009 23297 23299 3 23009 23299 23011 3 23010 23012 23300 3 23010 23300 23298 3 23011 23299 23301 3 23011 23301 23013 3 23012 23014 23302 3 23012 23302 23300 3 23013 23301 23303 3 23013 23303 23015 3 23014 23016 23304 3 23014 23304 23302 3 23015 23303 23305 3 23015 23305 23017 3 23016 23018 23306 3 23016 23306 23304 3 23017 23305 23307 3 23017 23307 23019 3 23018 23020 23308 3 23018 23308 23306 3 23019 23307 23309 3 23019 23309 23021 3 23020 23022 23310 3 23020 23310 23308 3 23021 23309 23311 3 23021 23311 23023 3 23022 23024 23312 3 23022 23312 23310 3 23023 23311 23313 3 23023 23313 23025 3 23024 23026 23314 3 23024 23314 23312 3 23025 23313 23315 3 23025 23315 23027 3 23026 23028 23316 3 23026 23316 23314 3 23027 23315 23317 3 23027 23317 23029 3 23028 23030 23318 3 23028 23318 23316 3 23029 23317 23319 3 23029 23319 23031 3 23030 23032 23320 3 23030 23320 23318 3 23031 23319 23321 3 23031 23321 23033 3 23032 23034 23322 3 23032 23322 23320 3 23033 23321 23323 3 23033 23323 23035 3 23034 23036 23324 3 23034 23324 23322 3 23035 23323 23325 3 23035 23325 23037 3 23036 23038 23326 3 23036 23326 23324 3 23037 23325 23327 3 23037 23327 23039 3 23038 23040 23328 3 23038 23328 23326 3 23039 23327 23329 3 23039 23329 23041 3 23040 23042 23330 3 23040 23330 23328 3 23041 23329 23331 3 23041 23331 23043 3 23042 23044 23332 3 23042 23332 23330 3 23043 23331 23333 3 23043 23333 23045 3 23044 23046 23334 3 23044 23334 23332 3 23045 23333 23335 3 23045 23335 23047 3 23046 23048 23336 3 23046 23336 23334 3 23047 23335 23337 3 23047 23337 23049 3 23048 23050 23338 3 23048 23338 23336 3 23049 23337 23339 3 23049 23339 23051 3 23050 23052 23340 3 23050 23340 23338 3 23051 23339 23341 3 23051 23341 23053 3 23052 23054 23342 3 23052 23342 23340 3 23053 23341 23343 3 23053 23343 23055 3 23054 23056 23344 3 23054 23344 23342 3 23055 23343 23345 3 23055 23345 23057 3 23056 23058 23346 3 23056 23346 23344 3 23057 23345 23347 3 23057 23347 23059 3 23058 23060 23348 3 23058 23348 23346 3 23059 23347 23349 3 23059 23349 23061 3 23060 23062 23350 3 23060 23350 23348 3 23061 23349 23351 3 23061 23351 23063 3 23062 23064 23352 3 23062 23352 23350 3 23063 23351 23353 3 23063 23353 23065 3 23064 23066 23354 3 23064 23354 23352 3 23065 23353 23355 3 23065 23355 23067 3 23066 23068 23356 3 23066 23356 23354 3 23067 23355 23357 3 23067 23357 23069 3 23068 23070 23356 3 23358 23356 23070 3 23069 23357 23359 3 23069 23359 23071 3 23070 23072 23358 3 23360 23358 23072 3 23071 23359 23363 3 23071 23363 23075 3 23072 23073 23360 3 23361 23360 23073 3 23073 23076 23361 3 23364 23361 23076 3 23074 23075 23362 3 23363 23362 23075 3 23074 23362 23367 3 23074 23367 23079 3 23076 23077 23364 3 23365 23364 23077 3 23077 23080 23365 3 23368 23365 23080 3 23078 23079 23366 3 23367 23366 23079 3 23078 23366 23436 3 23078 23436 23148 3 23080 23081 23368 3 23369 23368 23081 3 23081 23082 23369 3 23370 23369 23082 3 23082 23083 23370 3 23371 23370 23083 3 23083 23084 23371 3 23372 23371 23084 3 23084 23085 23372 3 23373 23372 23085 3 23085 23086 23373 3 23374 23373 23086 3 23086 23087 23374 3 23375 23374 23087 3 23087 23088 23375 3 23376 23375 23088 3 23088 23089 23376 3 23377 23376 23089 3 23089 23090 23377 3 23378 23377 23090 3 23090 23091 23378 3 23379 23378 23091 3 23091 23092 23379 3 23380 23379 23092 3 23092 23093 23380 3 23381 23380 23093 3 23093 23094 23381 3 23382 23381 23094 3 23094 23095 23382 3 23383 23382 23095 3 23095 23096 23383 3 23384 23383 23096 3 23096 23097 23384 3 23385 23384 23097 3 23097 23098 23385 3 23386 23385 23098 3 23098 23099 23386 3 23387 23386 23099 3 23099 23100 23387 3 23388 23387 23100 3 23100 23101 23388 3 23389 23388 23101 3 23101 23102 23389 3 23390 23389 23102 3 23102 23103 23390 3 23391 23390 23103 3 23103 23104 23391 3 23392 23391 23104 3 23104 23105 23392 3 23393 23392 23105 3 23105 23106 23393 3 23394 23393 23106 3 23106 23107 23394 3 23395 23394 23107 3 23107 23108 23395 3 23396 23395 23108 3 23108 23109 23396 3 23397 23396 23109 3 23109 23110 23397 3 23398 23397 23110 3 23110 23111 23398 3 23399 23398 23111 3 23111 23112 23399 3 23400 23399 23112 3 23112 23113 23400 3 23401 23400 23113 3 23113 23114 23401 3 23402 23401 23114 3 23114 23115 23402 3 23403 23402 23115 3 23115 23116 23403 3 23404 23403 23116 3 23116 23117 23404 3 23405 23404 23117 3 23117 23118 23405 3 23406 23405 23118 3 23118 23119 23406 3 23407 23406 23119 3 23119 23120 23407 3 23408 23407 23120 3 23120 23121 23408 3 23409 23408 23121 3 23121 23122 23409 3 23410 23409 23122 3 23122 23123 23410 3 23411 23410 23123 3 23123 23124 23411 3 23412 23411 23124 3 23124 23125 23412 3 23413 23412 23125 3 23125 23126 23413 3 23414 23413 23126 3 23126 23127 23414 3 23415 23414 23127 3 23127 23128 23415 3 23416 23415 23128 3 23128 23129 23416 3 23417 23416 23129 3 23129 23130 23417 3 23418 23417 23130 3 23130 23131 23418 3 23419 23418 23131 3 23131 23132 23419 3 23420 23419 23132 3 23132 23133 23420 3 23421 23420 23133 3 23133 23134 23421 3 23422 23421 23134 3 23134 23135 23422 3 23423 23422 23135 3 23135 23136 23423 3 23424 23423 23136 3 23136 23137 23424 3 23425 23424 23137 3 23137 23138 23425 3 23426 23425 23138 3 23138 23139 23426 3 23427 23426 23139 3 23139 23140 23427 3 23428 23427 23140 3 23140 23141 23428 3 23429 23428 23141 3 23141 23142 23429 3 23430 23429 23142 3 23142 23143 23430 3 23431 23430 23143 3 23143 23144 23431 3 23432 23431 23144 3 23144 23145 23432 3 23433 23432 23145 3 23145 23146 23433 3 23434 23433 23146 3 23146 23147 23434 3 23435 23434 23147 3 23147 23148 23435 3 23436 23435 23148 3 23149 23437 23150 3 23438 23150 23437 3 23149 23219 23507 3 23149 23507 23437 3 23150 23438 23151 3 23439 23151 23438 3 23151 23439 23152 3 23440 23152 23439 3 23152 23440 23153 3 23441 23153 23440 3 23153 23441 23154 3 23442 23154 23441 3 23154 23442 23155 3 23443 23155 23442 3 23155 23443 23156 3 23444 23156 23443 3 23156 23444 23157 3 23445 23157 23444 3 23157 23445 23158 3 23446 23158 23445 3 23158 23446 23159 3 23447 23159 23446 3 23159 23447 23160 3 23448 23160 23447 3 23160 23448 23161 3 23449 23161 23448 3 23161 23449 23162 3 23450 23162 23449 3 23162 23450 23163 3 23451 23163 23450 3 23163 23451 23164 3 23452 23164 23451 3 23164 23452 23165 3 23453 23165 23452 3 23165 23453 23166 3 23454 23166 23453 3 23166 23454 23167 3 23455 23167 23454 3 23167 23455 23168 3 23456 23168 23455 3 23168 23456 23169 3 23457 23169 23456 3 23169 23457 23170 3 23458 23170 23457 3 23170 23458 23171 3 23459 23171 23458 3 23171 23459 23172 3 23460 23172 23459 3 23172 23460 23173 3 23461 23173 23460 3 23173 23461 23174 3 23462 23174 23461 3 23174 23462 23175 3 23463 23175 23462 3 23175 23463 23176 3 23464 23176 23463 3 23176 23464 23177 3 23465 23177 23464 3 23177 23465 23178 3 23466 23178 23465 3 23178 23466 23179 3 23467 23179 23466 3 23179 23467 23180 3 23468 23180 23467 3 23180 23468 23181 3 23469 23181 23468 3 23181 23469 23182 3 23470 23182 23469 3 23182 23470 23183 3 23471 23183 23470 3 23183 23471 23184 3 23472 23184 23471 3 23184 23472 23473 3 23184 23473 23185 3 23185 23473 23474 3 23185 23474 23186 3 23186 23474 23475 3 23186 23475 23187 3 23187 23475 23476 3 23187 23476 23188 3 23188 23476 23477 3 23188 23477 23189 3 23189 23477 23478 3 23189 23478 23190 3 23190 23478 23479 3 23190 23479 23191 3 23191 23479 23480 3 23191 23480 23192 3 23192 23480 23481 3 23192 23481 23193 3 23193 23481 23482 3 23193 23482 23194 3 23194 23482 23483 3 23194 23483 23195 3 23195 23483 23484 3 23195 23484 23196 3 23196 23484 23485 3 23196 23485 23197 3 23197 23485 23486 3 23197 23486 23198 3 23198 23486 23487 3 23198 23487 23199 3 23199 23487 23488 3 23199 23488 23200 3 23200 23488 23489 3 23200 23489 23201 3 23201 23489 23490 3 23201 23490 23202 3 23202 23490 23491 3 23202 23491 23203 3 23203 23491 23492 3 23203 23492 23204 3 23204 23492 23493 3 23204 23493 23205 3 23205 23493 23494 3 23205 23494 23206 3 23206 23494 23495 3 23206 23495 23207 3 23207 23495 23496 3 23207 23496 23208 3 23208 23496 23497 3 23208 23497 23209 3 23209 23497 23498 3 23209 23498 23210 3 23210 23498 23499 3 23210 23499 23211 3 23211 23499 23500 3 23211 23500 23212 3 23212 23500 23501 3 23212 23501 23213 3 23213 23501 23502 3 23213 23502 23214 3 23214 23502 23503 3 23214 23503 23215 3 23215 23503 23504 3 23215 23504 23216 3 23216 23504 23505 3 23216 23505 23217 3 23217 23505 23508 3 23217 23508 23220 3 23218 23506 23507 3 23218 23507 23219 3 23218 23223 23511 3 23218 23511 23506 3 23220 23508 23509 3 23220 23509 23221 3 23221 23509 23512 3 23221 23512 23224 3 23222 23510 23511 3 23222 23511 23223 3 23222 23226 23514 3 23222 23514 23510 3 23224 23512 23513 3 23224 23513 23225 3 23225 23513 23515 3 23225 23515 23227 3 23226 23228 23516 3 23226 23516 23514 3 23227 23515 23517 3 23227 23517 23229 3 23228 23230 23518 3 23228 23518 23516 3 23229 23517 23519 3 23229 23519 23231 3 23230 23232 23520 3 23230 23520 23518 3 23231 23519 23521 3 23231 23521 23233 3 23232 23234 23522 3 23232 23522 23520 3 23233 23521 23523 3 23233 23523 23235 3 23234 23236 23524 3 23234 23524 23522 3 23235 23523 23525 3 23235 23525 23237 3 23236 23238 23526 3 23236 23526 23524 3 23237 23525 23527 3 23237 23527 23239 3 23238 23240 23528 3 23238 23528 23526 3 23239 23527 23529 3 23239 23529 23241 3 23240 23242 23530 3 23240 23530 23528 3 23241 23529 23531 3 23241 23531 23243 3 23242 23244 23532 3 23242 23532 23530 3 23243 23531 23533 3 23243 23533 23245 3 23244 23246 23534 3 23244 23534 23532 3 23245 23533 23535 3 23245 23535 23247 3 23246 23248 23536 3 23246 23536 23534 3 23247 23535 23537 3 23247 23537 23249 3 23248 23250 23538 3 23248 23538 23536 3 23249 23537 23539 3 23249 23539 23251 3 23250 23252 23540 3 23250 23540 23538 3 23251 23539 23541 3 23251 23541 23253 3 23252 23254 23542 3 23252 23542 23540 3 23253 23541 23543 3 23253 23543 23255 3 23254 23256 23544 3 23254 23544 23542 3 23255 23543 23545 3 23255 23545 23257 3 23256 23258 23544 3 23546 23544 23258 3 23257 23545 23547 3 23257 23547 23259 3 23258 23260 23546 3 23548 23546 23260 3 23259 23547 23549 3 23259 23549 23261 3 23260 23262 23548 3 23550 23548 23262 3 23261 23549 23551 3 23261 23551 23263 3 23262 23264 23550 3 23552 23550 23264 3 23263 23551 23553 3 23263 23553 23265 3 23264 23266 23552 3 23554 23552 23266 3 23265 23553 23555 3 23265 23555 23267 3 23266 23268 23554 3 23556 23554 23268 3 23267 23555 23557 3 23267 23557 23269 3 23268 23270 23556 3 23558 23556 23270 3 23269 23557 23559 3 23269 23559 23271 3 23270 23272 23558 3 23560 23558 23272 3 23271 23559 23561 3 23271 23561 23273 3 23272 23274 23560 3 23562 23560 23274 3 23273 23561 23563 3 23273 23563 23275 3 23274 23276 23562 3 23564 23562 23276 3 23275 23563 23565 3 23275 23565 23277 3 23276 23278 23564 3 23566 23564 23278 3 23277 23565 23567 3 23277 23567 23279 3 23278 23280 23566 3 23568 23566 23280 3 23279 23567 23281 3 23569 23281 23567 3 23280 23282 23568 3 23570 23568 23282 3 23281 23569 23283 3 23571 23283 23569 3 23282 23284 23570 3 23572 23570 23284 3 23283 23571 23285 3 23573 23285 23571 3 23284 23286 23572 3 23574 23572 23286 3 23285 23573 23287 3 23575 23287 23573 3 23286 23288 23574 3 23576 23574 23288 3 23287 23575 23289 3 23577 23289 23575 3 23288 23290 23576 3 23578 23576 23290 3 23289 23577 23291 3 23579 23291 23577 3 23290 23292 23578 3 23580 23578 23292 3 23291 23579 23293 3 23581 23293 23579 3 23292 23294 23580 3 23582 23580 23294 3 23293 23581 23295 3 23583 23295 23581 3 23294 23296 23582 3 23584 23582 23296 3 23295 23583 23297 3 23585 23297 23583 3 23296 23298 23584 3 23586 23584 23298 3 23297 23585 23299 3 23587 23299 23585 3 23298 23300 23586 3 23588 23586 23300 3 23299 23587 23301 3 23589 23301 23587 3 23300 23302 23588 3 23590 23588 23302 3 23301 23589 23303 3 23591 23303 23589 3 23302 23304 23590 3 23592 23590 23304 3 23303 23591 23305 3 23593 23305 23591 3 23304 23306 23592 3 23594 23592 23306 3 23305 23593 23307 3 23595 23307 23593 3 23306 23308 23594 3 23596 23594 23308 3 23307 23595 23309 3 23597 23309 23595 3 23308 23310 23596 3 23598 23596 23310 3 23309 23597 23311 3 23599 23311 23597 3 23310 23312 23598 3 23600 23598 23312 3 23311 23599 23313 3 23601 23313 23599 3 23312 23314 23600 3 23602 23600 23314 3 23313 23601 23315 3 23603 23315 23601 3 23314 23316 23602 3 23604 23602 23316 3 23315 23603 23317 3 23605 23317 23603 3 23316 23318 23604 3 23606 23604 23318 3 23317 23605 23319 3 23607 23319 23605 3 23318 23320 23606 3 23608 23606 23320 3 23319 23607 23321 3 23609 23321 23607 3 23320 23322 23608 3 23610 23608 23322 3 23321 23609 23323 3 23611 23323 23609 3 23322 23324 23610 3 23612 23610 23324 3 23323 23611 23325 3 23613 23325 23611 3 23324 23326 23612 3 23614 23612 23326 3 23325 23613 23327 3 23615 23327 23613 3 23326 23328 23614 3 23616 23614 23328 3 23327 23615 23329 3 23617 23329 23615 3 23328 23330 23616 3 23618 23616 23330 3 23329 23617 23331 3 23619 23331 23617 3 23330 23332 23618 3 23620 23618 23332 3 23331 23619 23333 3 23621 23333 23619 3 23332 23334 23620 3 23622 23620 23334 3 23333 23621 23335 3 23623 23335 23621 3 23334 23336 23622 3 23624 23622 23336 3 23335 23623 23337 3 23625 23337 23623 3 23336 23338 23624 3 23626 23624 23338 3 23337 23625 23339 3 23627 23339 23625 3 23338 23340 23626 3 23628 23626 23340 3 23339 23627 23341 3 23629 23341 23627 3 23340 23342 23628 3 23630 23628 23342 3 23341 23629 23343 3 23631 23343 23629 3 23342 23344 23630 3 23632 23630 23344 3 23343 23631 23345 3 23633 23345 23631 3 23344 23346 23632 3 23634 23632 23346 3 23345 23633 23347 3 23635 23347 23633 3 23346 23348 23634 3 23636 23634 23348 3 23347 23635 23349 3 23637 23349 23635 3 23348 23350 23636 3 23638 23636 23350 3 23349 23637 23351 3 23639 23351 23637 3 23350 23352 23640 3 23350 23640 23638 3 23351 23639 23353 3 23641 23353 23639 3 23352 23354 23642 3 23352 23642 23640 3 23353 23641 23355 3 23643 23355 23641 3 23354 23356 23644 3 23354 23644 23642 3 23355 23643 23357 3 23645 23357 23643 3 23356 23358 23646 3 23356 23646 23644 3 23357 23645 23359 3 23647 23359 23645 3 23358 23360 23648 3 23358 23648 23646 3 23359 23647 23363 3 23651 23363 23647 3 23360 23361 23649 3 23360 23649 23648 3 23361 23364 23652 3 23361 23652 23649 3 23362 23363 23651 3 23362 23651 23650 3 23362 23650 23367 3 23655 23367 23650 3 23364 23365 23653 3 23364 23653 23652 3 23365 23368 23656 3 23365 23656 23653 3 23366 23367 23655 3 23366 23655 23654 3 23366 23654 23436 3 23724 23436 23654 3 23368 23369 23657 3 23368 23657 23656 3 23369 23370 23658 3 23369 23658 23657 3 23370 23371 23659 3 23370 23659 23658 3 23371 23372 23660 3 23371 23660 23659 3 23372 23373 23661 3 23372 23661 23660 3 23373 23374 23662 3 23373 23662 23661 3 23374 23375 23663 3 23374 23663 23662 3 23375 23376 23664 3 23375 23664 23663 3 23376 23377 23665 3 23376 23665 23664 3 23377 23378 23666 3 23377 23666 23665 3 23378 23379 23667 3 23378 23667 23666 3 23379 23380 23668 3 23379 23668 23667 3 23380 23381 23669 3 23380 23669 23668 3 23381 23382 23670 3 23381 23670 23669 3 23382 23383 23671 3 23382 23671 23670 3 23383 23384 23672 3 23383 23672 23671 3 23384 23385 23673 3 23384 23673 23672 3 23385 23386 23674 3 23385 23674 23673 3 23386 23387 23675 3 23386 23675 23674 3 23387 23388 23676 3 23387 23676 23675 3 23388 23389 23677 3 23388 23677 23676 3 23389 23390 23678 3 23389 23678 23677 3 23390 23391 23679 3 23390 23679 23678 3 23391 23392 23680 3 23391 23680 23679 3 23392 23393 23681 3 23392 23681 23680 3 23393 23394 23682 3 23393 23682 23681 3 23394 23395 23683 3 23394 23683 23682 3 23395 23396 23684 3 23395 23684 23683 3 23396 23397 23685 3 23396 23685 23684 3 23397 23398 23686 3 23397 23686 23685 3 23398 23399 23687 3 23398 23687 23686 3 23399 23400 23688 3 23399 23688 23687 3 23400 23401 23689 3 23400 23689 23688 3 23401 23402 23690 3 23401 23690 23689 3 23402 23403 23691 3 23402 23691 23690 3 23403 23404 23692 3 23403 23692 23691 3 23404 23405 23693 3 23404 23693 23692 3 23405 23406 23694 3 23405 23694 23693 3 23406 23407 23695 3 23406 23695 23694 3 23407 23408 23696 3 23407 23696 23695 3 23408 23409 23697 3 23408 23697 23696 3 23409 23410 23698 3 23409 23698 23697 3 23410 23411 23699 3 23410 23699 23698 3 23411 23412 23700 3 23411 23700 23699 3 23412 23413 23701 3 23412 23701 23700 3 23413 23414 23702 3 23413 23702 23701 3 23414 23415 23703 3 23414 23703 23702 3 23415 23416 23704 3 23415 23704 23703 3 23416 23417 23705 3 23416 23705 23704 3 23417 23418 23706 3 23417 23706 23705 3 23418 23419 23707 3 23418 23707 23706 3 23419 23420 23708 3 23419 23708 23707 3 23420 23421 23709 3 23420 23709 23708 3 23421 23422 23710 3 23421 23710 23709 3 23422 23423 23711 3 23422 23711 23710 3 23423 23424 23712 3 23423 23712 23711 3 23424 23425 23713 3 23424 23713 23712 3 23425 23426 23714 3 23425 23714 23713 3 23426 23427 23715 3 23426 23715 23714 3 23427 23428 23716 3 23427 23716 23715 3 23428 23429 23717 3 23428 23717 23716 3 23429 23430 23718 3 23429 23718 23717 3 23430 23431 23719 3 23430 23719 23718 3 23431 23432 23720 3 23431 23720 23719 3 23432 23433 23721 3 23432 23721 23720 3 23433 23434 23722 3 23433 23722 23721 3 23434 23435 23723 3 23434 23723 23722 3 23435 23436 23724 3 23435 23724 23723 3 23437 23725 23726 3 23437 23726 23438 3 23437 23507 23725 3 23795 23725 23507 3 23438 23726 23727 3 23438 23727 23439 3 23439 23727 23728 3 23439 23728 23440 3 23440 23728 23729 3 23440 23729 23441 3 23441 23729 23730 3 23441 23730 23442 3 23442 23730 23731 3 23442 23731 23443 3 23443 23731 23732 3 23443 23732 23444 3 23444 23732 23733 3 23444 23733 23445 3 23445 23733 23734 3 23445 23734 23446 3 23446 23734 23735 3 23446 23735 23447 3 23447 23735 23736 3 23447 23736 23448 3 23448 23736 23737 3 23448 23737 23449 3 23449 23737 23738 3 23449 23738 23450 3 23450 23738 23739 3 23450 23739 23451 3 23451 23739 23740 3 23451 23740 23452 3 23452 23740 23741 3 23452 23741 23453 3 23453 23741 23742 3 23453 23742 23454 3 23454 23742 23743 3 23454 23743 23455 3 23455 23743 23744 3 23455 23744 23456 3 23456 23744 23745 3 23456 23745 23457 3 23457 23745 23746 3 23457 23746 23458 3 23458 23746 23747 3 23458 23747 23459 3 23459 23747 23748 3 23459 23748 23460 3 23460 23748 23749 3 23460 23749 23461 3 23461 23749 23750 3 23461 23750 23462 3 23462 23750 23751 3 23462 23751 23463 3 23463 23751 23752 3 23463 23752 23464 3 23464 23752 23753 3 23464 23753 23465 3 23465 23753 23754 3 23465 23754 23466 3 23466 23754 23755 3 23466 23755 23467 3 23467 23755 23468 3 23756 23468 23755 3 23468 23756 23469 3 23757 23469 23756 3 23469 23757 23470 3 23758 23470 23757 3 23470 23758 23471 3 23759 23471 23758 3 23471 23759 23472 3 23760 23472 23759 3 23472 23760 23473 3 23761 23473 23760 3 23473 23761 23474 3 23762 23474 23761 3 23474 23762 23475 3 23763 23475 23762 3 23475 23763 23476 3 23764 23476 23763 3 23476 23764 23477 3 23765 23477 23764 3 23477 23765 23478 3 23766 23478 23765 3 23478 23766 23479 3 23767 23479 23766 3 23479 23767 23480 3 23768 23480 23767 3 23480 23768 23481 3 23769 23481 23768 3 23481 23769 23482 3 23770 23482 23769 3 23482 23770 23483 3 23771 23483 23770 3 23483 23771 23484 3 23772 23484 23771 3 23484 23772 23485 3 23773 23485 23772 3 23485 23773 23486 3 23774 23486 23773 3 23486 23774 23487 3 23775 23487 23774 3 23487 23775 23488 3 23776 23488 23775 3 23488 23776 23489 3 23777 23489 23776 3 23489 23777 23490 3 23778 23490 23777 3 23490 23778 23491 3 23779 23491 23778 3 23491 23779 23492 3 23780 23492 23779 3 23492 23780 23493 3 23781 23493 23780 3 23493 23781 23494 3 23782 23494 23781 3 23494 23782 23495 3 23783 23495 23782 3 23495 23783 23496 3 23784 23496 23783 3 23496 23784 23497 3 23785 23497 23784 3 23497 23785 23498 3 23786 23498 23785 3 23498 23786 23499 3 23787 23499 23786 3 23499 23787 23500 3 23788 23500 23787 3 23500 23788 23501 3 23789 23501 23788 3 23501 23789 23502 3 23790 23502 23789 3 23502 23790 23503 3 23791 23503 23790 3 23503 23791 23504 3 23792 23504 23791 3 23504 23792 23505 3 23793 23505 23792 3 23505 23793 23508 3 23796 23508 23793 3 23506 23794 23507 3 23795 23507 23794 3 23506 23511 23794 3 23799 23794 23511 3 23508 23796 23509 3 23797 23509 23796 3 23509 23797 23512 3 23800 23512 23797 3 23510 23798 23511 3 23799 23511 23798 3 23510 23514 23798 3 23802 23798 23514 3 23512 23800 23513 3 23801 23513 23800 3 23513 23801 23515 3 23803 23515 23801 3 23514 23516 23802 3 23804 23802 23516 3 23515 23803 23517 3 23805 23517 23803 3 23516 23518 23804 3 23806 23804 23518 3 23517 23805 23519 3 23807 23519 23805 3 23518 23520 23806 3 23808 23806 23520 3 23519 23807 23521 3 23809 23521 23807 3 23520 23522 23808 3 23810 23808 23522 3 23521 23809 23523 3 23811 23523 23809 3 23522 23524 23810 3 23812 23810 23524 3 23523 23811 23525 3 23813 23525 23811 3 23524 23526 23812 3 23814 23812 23526 3 23525 23813 23527 3 23815 23527 23813 3 23526 23528 23814 3 23816 23814 23528 3 23527 23815 23529 3 23817 23529 23815 3 23528 23530 23816 3 23818 23816 23530 3 23529 23817 23531 3 23819 23531 23817 3 23530 23532 23818 3 23820 23818 23532 3 23531 23819 23533 3 23821 23533 23819 3 23532 23534 23820 3 23822 23820 23534 3 23533 23821 23535 3 23823 23535 23821 3 23534 23536 23822 3 23824 23822 23536 3 23535 23823 23537 3 23825 23537 23823 3 23536 23538 23824 3 23826 23824 23538 3 23537 23825 23539 3 23827 23539 23825 3 23538 23540 23828 3 23538 23828 23826 3 23539 23827 23541 3 23829 23541 23827 3 23540 23542 23830 3 23540 23830 23828 3 23541 23829 23543 3 23831 23543 23829 3 23542 23544 23832 3 23542 23832 23830 3 23543 23831 23545 3 23833 23545 23831 3 23544 23546 23834 3 23544 23834 23832 3 23545 23833 23547 3 23835 23547 23833 3 23546 23548 23836 3 23546 23836 23834 3 23547 23835 23549 3 23837 23549 23835 3 23548 23550 23838 3 23548 23838 23836 3 23549 23837 23551 3 23839 23551 23837 3 23550 23552 23840 3 23550 23840 23838 3 23551 23839 23553 3 23841 23553 23839 3 23552 23554 23842 3 23552 23842 23840 3 23553 23841 23555 3 23843 23555 23841 3 23554 23556 23844 3 23554 23844 23842 3 23555 23843 23557 3 23845 23557 23843 3 23556 23558 23846 3 23556 23846 23844 3 23557 23845 23559 3 23847 23559 23845 3 23558 23560 23848 3 23558 23848 23846 3 23559 23847 23561 3 23849 23561 23847 3 23560 23562 23850 3 23560 23850 23848 3 23561 23849 23563 3 23851 23563 23849 3 23562 23564 23852 3 23562 23852 23850 3 23563 23851 23853 3 23563 23853 23565 3 23564 23566 23854 3 23564 23854 23852 3 23565 23853 23855 3 23565 23855 23567 3 23566 23568 23856 3 23566 23856 23854 3 23567 23855 23857 3 23567 23857 23569 3 23568 23570 23858 3 23568 23858 23856 3 23569 23857 23859 3 23569 23859 23571 3 23570 23572 23860 3 23570 23860 23858 3 23571 23859 23861 3 23571 23861 23573 3 23572 23574 23862 3 23572 23862 23860 3 23573 23861 23863 3 23573 23863 23575 3 23574 23576 23864 3 23574 23864 23862 3 23575 23863 23865 3 23575 23865 23577 3 23576 23578 23866 3 23576 23866 23864 3 23577 23865 23867 3 23577 23867 23579 3 23578 23580 23868 3 23578 23868 23866 3 23579 23867 23869 3 23579 23869 23581 3 23580 23582 23870 3 23580 23870 23868 3 23581 23869 23871 3 23581 23871 23583 3 23582 23584 23872 3 23582 23872 23870 3 23583 23871 23873 3 23583 23873 23585 3 23584 23586 23874 3 23584 23874 23872 3 23585 23873 23875 3 23585 23875 23587 3 23586 23588 23876 3 23586 23876 23874 3 23587 23875 23877 3 23587 23877 23589 3 23588 23590 23878 3 23588 23878 23876 3 23589 23877 23879 3 23589 23879 23591 3 23590 23592 23880 3 23590 23880 23878 3 23591 23879 23881 3 23591 23881 23593 3 23592 23594 23882 3 23592 23882 23880 3 23593 23881 23883 3 23593 23883 23595 3 23594 23596 23884 3 23594 23884 23882 3 23595 23883 23885 3 23595 23885 23597 3 23596 23598 23886 3 23596 23886 23884 3 23597 23885 23887 3 23597 23887 23599 3 23598 23600 23888 3 23598 23888 23886 3 23599 23887 23889 3 23599 23889 23601 3 23600 23602 23890 3 23600 23890 23888 3 23601 23889 23891 3 23601 23891 23603 3 23602 23604 23892 3 23602 23892 23890 3 23603 23891 23893 3 23603 23893 23605 3 23604 23606 23894 3 23604 23894 23892 3 23605 23893 23895 3 23605 23895 23607 3 23606 23608 23896 3 23606 23896 23894 3 23607 23895 23897 3 23607 23897 23609 3 23608 23610 23898 3 23608 23898 23896 3 23609 23897 23899 3 23609 23899 23611 3 23610 23612 23900 3 23610 23900 23898 3 23611 23899 23901 3 23611 23901 23613 3 23612 23614 23902 3 23612 23902 23900 3 23613 23901 23903 3 23613 23903 23615 3 23614 23616 23904 3 23614 23904 23902 3 23615 23903 23905 3 23615 23905 23617 3 23616 23618 23906 3 23616 23906 23904 3 23617 23905 23907 3 23617 23907 23619 3 23618 23620 23908 3 23618 23908 23906 3 23619 23907 23909 3 23619 23909 23621 3 23620 23622 23910 3 23620 23910 23908 3 23621 23909 23911 3 23621 23911 23623 3 23622 23624 23912 3 23622 23912 23910 3 23623 23911 23913 3 23623 23913 23625 3 23624 23626 23914 3 23624 23914 23912 3 23625 23913 23915 3 23625 23915 23627 3 23626 23628 23916 3 23626 23916 23914 3 23627 23915 23917 3 23627 23917 23629 3 23628 23630 23918 3 23628 23918 23916 3 23629 23917 23919 3 23629 23919 23631 3 23630 23632 23920 3 23630 23920 23918 3 23631 23919 23921 3 23631 23921 23633 3 23632 23634 23920 3 23922 23920 23634 3 23633 23921 23923 3 23633 23923 23635 3 23634 23636 23922 3 23924 23922 23636 3 23635 23923 23925 3 23635 23925 23637 3 23636 23638 23924 3 23926 23924 23638 3 23637 23925 23927 3 23637 23927 23639 3 23638 23640 23926 3 23928 23926 23640 3 23639 23927 23929 3 23639 23929 23641 3 23640 23642 23928 3 23930 23928 23642 3 23641 23929 23931 3 23641 23931 23643 3 23642 23644 23930 3 23932 23930 23644 3 23643 23931 23933 3 23643 23933 23645 3 23644 23646 23932 3 23934 23932 23646 3 23645 23933 23935 3 23645 23935 23647 3 23646 23648 23934 3 23936 23934 23648 3 23647 23935 23939 3 23647 23939 23651 3 23648 23649 23936 3 23937 23936 23649 3 23649 23652 23937 3 23940 23937 23652 3 23650 23651 23938 3 23939 23938 23651 3 23650 23938 23943 3 23650 23943 23655 3 23652 23653 23940 3 23941 23940 23653 3 23653 23656 23941 3 23944 23941 23656 3 23654 23655 23942 3 23943 23942 23655 3 23654 23942 24012 3 23654 24012 23724 3 23656 23657 23944 3 23945 23944 23657 3 23657 23658 23945 3 23946 23945 23658 3 23658 23659 23946 3 23947 23946 23659 3 23659 23660 23947 3 23948 23947 23660 3 23660 23661 23948 3 23949 23948 23661 3 23661 23662 23949 3 23950 23949 23662 3 23662 23663 23950 3 23951 23950 23663 3 23663 23664 23951 3 23952 23951 23664 3 23664 23665 23952 3 23953 23952 23665 3 23665 23666 23953 3 23954 23953 23666 3 23666 23667 23954 3 23955 23954 23667 3 23667 23668 23955 3 23956 23955 23668 3 23668 23669 23956 3 23957 23956 23669 3 23669 23670 23957 3 23958 23957 23670 3 23670 23671 23958 3 23959 23958 23671 3 23671 23672 23959 3 23960 23959 23672 3 23672 23673 23960 3 23961 23960 23673 3 23673 23674 23961 3 23962 23961 23674 3 23674 23675 23962 3 23963 23962 23675 3 23675 23676 23963 3 23964 23963 23676 3 23676 23677 23964 3 23965 23964 23677 3 23677 23678 23965 3 23966 23965 23678 3 23678 23679 23966 3 23967 23966 23679 3 23679 23680 23967 3 23968 23967 23680 3 23680 23681 23968 3 23969 23968 23681 3 23681 23682 23969 3 23970 23969 23682 3 23682 23683 23970 3 23971 23970 23683 3 23683 23684 23971 3 23972 23971 23684 3 23684 23685 23972 3 23973 23972 23685 3 23685 23686 23973 3 23974 23973 23686 3 23686 23687 23974 3 23975 23974 23687 3 23687 23688 23975 3 23976 23975 23688 3 23688 23689 23976 3 23977 23976 23689 3 23689 23690 23977 3 23978 23977 23690 3 23690 23691 23978 3 23979 23978 23691 3 23691 23692 23979 3 23980 23979 23692 3 23692 23693 23980 3 23981 23980 23693 3 23693 23694 23981 3 23982 23981 23694 3 23694 23695 23982 3 23983 23982 23695 3 23695 23696 23983 3 23984 23983 23696 3 23696 23697 23984 3 23985 23984 23697 3 23697 23698 23985 3 23986 23985 23698 3 23698 23699 23986 3 23987 23986 23699 3 23699 23700 23987 3 23988 23987 23700 3 23700 23701 23988 3 23989 23988 23701 3 23701 23702 23989 3 23990 23989 23702 3 23702 23703 23990 3 23991 23990 23703 3 23703 23704 23991 3 23992 23991 23704 3 23704 23705 23992 3 23993 23992 23705 3 23705 23706 23993 3 23994 23993 23706 3 23706 23707 23994 3 23995 23994 23707 3 23707 23708 23995 3 23996 23995 23708 3 23708 23709 23996 3 23997 23996 23709 3 23709 23710 23997 3 23998 23997 23710 3 23710 23711 23998 3 23999 23998 23711 3 23711 23712 23999 3 24000 23999 23712 3 23712 23713 24000 3 24001 24000 23713 3 23713 23714 24001 3 24002 24001 23714 3 23714 23715 24002 3 24003 24002 23715 3 23715 23716 24003 3 24004 24003 23716 3 23716 23717 24004 3 24005 24004 23717 3 23717 23718 24005 3 24006 24005 23718 3 23718 23719 24006 3 24007 24006 23719 3 23719 23720 24007 3 24008 24007 23720 3 23720 23721 24008 3 24009 24008 23721 3 23721 23722 24009 3 24010 24009 23722 3 23722 23723 24010 3 24011 24010 23723 3 23723 23724 24011 3 24012 24011 23724 3 23725 24013 23726 3 24014 23726 24013 3 23725 23795 24083 3 23725 24083 24013 3 23726 24014 23727 3 24015 23727 24014 3 23727 24015 23728 3 24016 23728 24015 3 23728 24016 23729 3 24017 23729 24016 3 23729 24017 23730 3 24018 23730 24017 3 23730 24018 23731 3 24019 23731 24018 3 23731 24019 23732 3 24020 23732 24019 3 23732 24020 23733 3 24021 23733 24020 3 23733 24021 23734 3 24022 23734 24021 3 23734 24022 23735 3 24023 23735 24022 3 23735 24023 23736 3 24024 23736 24023 3 23736 24024 23737 3 24025 23737 24024 3 23737 24025 23738 3 24026 23738 24025 3 23738 24026 23739 3 24027 23739 24026 3 23739 24027 23740 3 24028 23740 24027 3 23740 24028 23741 3 24029 23741 24028 3 23741 24029 23742 3 24030 23742 24029 3 23742 24030 23743 3 24031 23743 24030 3 23743 24031 23744 3 24032 23744 24031 3 23744 24032 23745 3 24033 23745 24032 3 23745 24033 23746 3 24034 23746 24033 3 23746 24034 23747 3 24035 23747 24034 3 23747 24035 23748 3 24036 23748 24035 3 23748 24036 23749 3 24037 23749 24036 3 23749 24037 23750 3 24038 23750 24037 3 23750 24038 23751 3 24039 23751 24038 3 23751 24039 23752 3 24040 23752 24039 3 23752 24040 24041 3 23752 24041 23753 3 23753 24041 24042 3 23753 24042 23754 3 23754 24042 24043 3 23754 24043 23755 3 23755 24043 24044 3 23755 24044 23756 3 23756 24044 24045 3 23756 24045 23757 3 23757 24045 24046 3 23757 24046 23758 3 23758 24046 24047 3 23758 24047 23759 3 23759 24047 24048 3 23759 24048 23760 3 23760 24048 24049 3 23760 24049 23761 3 23761 24049 24050 3 23761 24050 23762 3 23762 24050 24051 3 23762 24051 23763 3 23763 24051 24052 3 23763 24052 23764 3 23764 24052 24053 3 23764 24053 23765 3 23765 24053 24054 3 23765 24054 23766 3 23766 24054 24055 3 23766 24055 23767 3 23767 24055 24056 3 23767 24056 23768 3 23768 24056 24057 3 23768 24057 23769 3 23769 24057 24058 3 23769 24058 23770 3 23770 24058 24059 3 23770 24059 23771 3 23771 24059 24060 3 23771 24060 23772 3 23772 24060 24061 3 23772 24061 23773 3 23773 24061 24062 3 23773 24062 23774 3 23774 24062 24063 3 23774 24063 23775 3 23775 24063 24064 3 23775 24064 23776 3 23776 24064 24065 3 23776 24065 23777 3 23777 24065 24066 3 23777 24066 23778 3 23778 24066 24067 3 23778 24067 23779 3 23779 24067 24068 3 23779 24068 23780 3 23780 24068 24069 3 23780 24069 23781 3 23781 24069 24070 3 23781 24070 23782 3 23782 24070 24071 3 23782 24071 23783 3 23783 24071 24072 3 23783 24072 23784 3 23784 24072 24073 3 23784 24073 23785 3 23785 24073 24074 3 23785 24074 23786 3 23786 24074 24075 3 23786 24075 23787 3 23787 24075 24076 3 23787 24076 23788 3 23788 24076 24077 3 23788 24077 23789 3 23789 24077 24078 3 23789 24078 23790 3 23790 24078 24079 3 23790 24079 23791 3 23791 24079 24080 3 23791 24080 23792 3 23792 24080 24081 3 23792 24081 23793 3 23793 24081 24084 3 23793 24084 23796 3 23794 24082 24083 3 23794 24083 23795 3 23794 23799 24087 3 23794 24087 24082 3 23796 24084 24085 3 23796 24085 23797 3 23797 24085 24088 3 23797 24088 23800 3 23798 24086 24087 3 23798 24087 23799 3 23798 23802 24090 3 23798 24090 24086 3 23800 24088 24089 3 23800 24089 23801 3 23801 24089 24091 3 23801 24091 23803 3 23802 23804 24092 3 23802 24092 24090 3 23803 24091 24093 3 23803 24093 23805 3 23804 23806 24094 3 23804 24094 24092 3 23805 24093 24095 3 23805 24095 23807 3 23806 23808 24096 3 23806 24096 24094 3 23807 24095 24097 3 23807 24097 23809 3 23808 23810 24098 3 23808 24098 24096 3 23809 24097 24099 3 23809 24099 23811 3 23810 23812 24100 3 23810 24100 24098 3 23811 24099 24101 3 23811 24101 23813 3 23812 23814 24102 3 23812 24102 24100 3 23813 24101 24103 3 23813 24103 23815 3 23814 23816 24104 3 23814 24104 24102 3 23815 24103 24105 3 23815 24105 23817 3 23816 23818 24106 3 23816 24106 24104 3 23817 24105 24107 3 23817 24107 23819 3 23818 23820 24108 3 23818 24108 24106 3 23819 24107 24109 3 23819 24109 23821 3 23820 23822 24110 3 23820 24110 24108 3 23821 24109 24111 3 23821 24111 23823 3 23822 23824 24110 3 24112 24110 23824 3 23823 24111 24113 3 23823 24113 23825 3 23824 23826 24112 3 24114 24112 23826 3 23825 24113 24115 3 23825 24115 23827 3 23826 23828 24114 3 24116 24114 23828 3 23827 24115 24117 3 23827 24117 23829 3 23828 23830 24116 3 24118 24116 23830 3 23829 24117 24119 3 23829 24119 23831 3 23830 23832 24118 3 24120 24118 23832 3 23831 24119 24121 3 23831 24121 23833 3 23832 23834 24120 3 24122 24120 23834 3 23833 24121 24123 3 23833 24123 23835 3 23834 23836 24122 3 24124 24122 23836 3 23835 24123 24125 3 23835 24125 23837 3 23836 23838 24124 3 24126 24124 23838 3 23837 24125 24127 3 23837 24127 23839 3 23838 23840 24126 3 24128 24126 23840 3 23839 24127 24129 3 23839 24129 23841 3 23840 23842 24128 3 24130 24128 23842 3 23841 24129 24131 3 23841 24131 23843 3 23842 23844 24130 3 24132 24130 23844 3 23843 24131 24133 3 23843 24133 23845 3 23844 23846 24132 3 24134 24132 23846 3 23845 24133 24135 3 23845 24135 23847 3 23846 23848 24134 3 24136 24134 23848 3 23847 24135 23849 3 24137 23849 24135 3 23848 23850 24136 3 24138 24136 23850 3 23849 24137 23851 3 24139 23851 24137 3 23850 23852 24138 3 24140 24138 23852 3 23851 24139 23853 3 24141 23853 24139 3 23852 23854 24140 3 24142 24140 23854 3 23853 24141 23855 3 24143 23855 24141 3 23854 23856 24142 3 24144 24142 23856 3 23855 24143 23857 3 24145 23857 24143 3 23856 23858 24144 3 24146 24144 23858 3 23857 24145 23859 3 24147 23859 24145 3 23858 23860 24146 3 24148 24146 23860 3 23859 24147 23861 3 24149 23861 24147 3 23860 23862 24148 3 24150 24148 23862 3 23861 24149 23863 3 24151 23863 24149 3 23862 23864 24150 3 24152 24150 23864 3 23863 24151 23865 3 24153 23865 24151 3 23864 23866 24152 3 24154 24152 23866 3 23865 24153 23867 3 24155 23867 24153 3 23866 23868 24154 3 24156 24154 23868 3 23867 24155 23869 3 24157 23869 24155 3 23868 23870 24156 3 24158 24156 23870 3 23869 24157 23871 3 24159 23871 24157 3 23870 23872 24158 3 24160 24158 23872 3 23871 24159 23873 3 24161 23873 24159 3 23872 23874 24160 3 24162 24160 23874 3 23873 24161 23875 3 24163 23875 24161 3 23874 23876 24162 3 24164 24162 23876 3 23875 24163 23877 3 24165 23877 24163 3 23876 23878 24164 3 24166 24164 23878 3 23877 24165 23879 3 24167 23879 24165 3 23878 23880 24166 3 24168 24166 23880 3 23879 24167 23881 3 24169 23881 24167 3 23880 23882 24168 3 24170 24168 23882 3 23881 24169 23883 3 24171 23883 24169 3 23882 23884 24170 3 24172 24170 23884 3 23883 24171 23885 3 24173 23885 24171 3 23884 23886 24172 3 24174 24172 23886 3 23885 24173 23887 3 24175 23887 24173 3 23886 23888 24174 3 24176 24174 23888 3 23887 24175 23889 3 24177 23889 24175 3 23888 23890 24176 3 24178 24176 23890 3 23889 24177 23891 3 24179 23891 24177 3 23890 23892 24178 3 24180 24178 23892 3 23891 24179 23893 3 24181 23893 24179 3 23892 23894 24180 3 24182 24180 23894 3 23893 24181 23895 3 24183 23895 24181 3 23894 23896 24182 3 24184 24182 23896 3 23895 24183 23897 3 24185 23897 24183 3 23896 23898 24184 3 24186 24184 23898 3 23897 24185 23899 3 24187 23899 24185 3 23898 23900 24186 3 24188 24186 23900 3 23899 24187 23901 3 24189 23901 24187 3 23900 23902 24188 3 24190 24188 23902 3 23901 24189 23903 3 24191 23903 24189 3 23902 23904 24190 3 24192 24190 23904 3 23903 24191 23905 3 24193 23905 24191 3 23904 23906 24192 3 24194 24192 23906 3 23905 24193 23907 3 24195 23907 24193 3 23906 23908 24194 3 24196 24194 23908 3 23907 24195 23909 3 24197 23909 24195 3 23908 23910 24196 3 24198 24196 23910 3 23909 24197 23911 3 24199 23911 24197 3 23910 23912 24198 3 24200 24198 23912 3 23911 24199 23913 3 24201 23913 24199 3 23912 23914 24200 3 24202 24200 23914 3 23913 24201 23915 3 24203 23915 24201 3 23914 23916 24202 3 24204 24202 23916 3 23915 24203 23917 3 24205 23917 24203 3 23916 23918 24204 3 24206 24204 23918 3 23917 24205 23919 3 24207 23919 24205 3 23918 23920 24208 3 23918 24208 24206 3 23919 24207 23921 3 24209 23921 24207 3 23920 23922 24210 3 23920 24210 24208 3 23921 24209 23923 3 24211 23923 24209 3 23922 23924 24212 3 23922 24212 24210 3 23923 24211 23925 3 24213 23925 24211 3 23924 23926 24214 3 23924 24214 24212 3 23925 24213 23927 3 24215 23927 24213 3 23926 23928 24216 3 23926 24216 24214 3 23927 24215 23929 3 24217 23929 24215 3 23928 23930 24218 3 23928 24218 24216 3 23929 24217 23931 3 24219 23931 24217 3 23930 23932 24220 3 23930 24220 24218 3 23931 24219 23933 3 24221 23933 24219 3 23932 23934 24222 3 23932 24222 24220 3 23933 24221 23935 3 24223 23935 24221 3 23934 23936 24224 3 23934 24224 24222 3 23935 24223 23939 3 24227 23939 24223 3 23936 23937 24225 3 23936 24225 24224 3 23937 23940 24228 3 23937 24228 24225 3 23938 23939 24227 3 23938 24227 24226 3 23938 24226 23943 3 24231 23943 24226 3 23940 23941 24229 3 23940 24229 24228 3 23941 23944 24232 3 23941 24232 24229 3 23942 23943 24231 3 23942 24231 24230 3 23942 24230 24012 3 24300 24012 24230 3 23944 23945 24233 3 23944 24233 24232 3 23945 23946 24234 3 23945 24234 24233 3 23946 23947 24235 3 23946 24235 24234 3 23947 23948 24236 3 23947 24236 24235 3 23948 23949 24237 3 23948 24237 24236 3 23949 23950 24238 3 23949 24238 24237 3 23950 23951 24239 3 23950 24239 24238 3 23951 23952 24240 3 23951 24240 24239 3 23952 23953 24241 3 23952 24241 24240 3 23953 23954 24242 3 23953 24242 24241 3 23954 23955 24243 3 23954 24243 24242 3 23955 23956 24244 3 23955 24244 24243 3 23956 23957 24245 3 23956 24245 24244 3 23957 23958 24246 3 23957 24246 24245 3 23958 23959 24247 3 23958 24247 24246 3 23959 23960 24248 3 23959 24248 24247 3 23960 23961 24249 3 23960 24249 24248 3 23961 23962 24250 3 23961 24250 24249 3 23962 23963 24251 3 23962 24251 24250 3 23963 23964 24252 3 23963 24252 24251 3 23964 23965 24253 3 23964 24253 24252 3 23965 23966 24254 3 23965 24254 24253 3 23966 23967 24255 3 23966 24255 24254 3 23967 23968 24256 3 23967 24256 24255 3 23968 23969 24257 3 23968 24257 24256 3 23969 23970 24258 3 23969 24258 24257 3 23970 23971 24259 3 23970 24259 24258 3 23971 23972 24260 3 23971 24260 24259 3 23972 23973 24261 3 23972 24261 24260 3 23973 23974 24262 3 23973 24262 24261 3 23974 23975 24263 3 23974 24263 24262 3 23975 23976 24264 3 23975 24264 24263 3 23976 23977 24265 3 23976 24265 24264 3 23977 23978 24266 3 23977 24266 24265 3 23978 23979 24267 3 23978 24267 24266 3 23979 23980 24268 3 23979 24268 24267 3 23980 23981 24269 3 23980 24269 24268 3 23981 23982 24270 3 23981 24270 24269 3 23982 23983 24271 3 23982 24271 24270 3 23983 23984 24272 3 23983 24272 24271 3 23984 23985 24273 3 23984 24273 24272 3 23985 23986 24274 3 23985 24274 24273 3 23986 23987 24275 3 23986 24275 24274 3 23987 23988 24276 3 23987 24276 24275 3 23988 23989 24277 3 23988 24277 24276 3 23989 23990 24278 3 23989 24278 24277 3 23990 23991 24279 3 23990 24279 24278 3 23991 23992 24280 3 23991 24280 24279 3 23992 23993 24281 3 23992 24281 24280 3 23993 23994 24282 3 23993 24282 24281 3 23994 23995 24283 3 23994 24283 24282 3 23995 23996 24284 3 23995 24284 24283 3 23996 23997 24285 3 23996 24285 24284 3 23997 23998 24286 3 23997 24286 24285 3 23998 23999 24287 3 23998 24287 24286 3 23999 24000 24288 3 23999 24288 24287 3 24000 24001 24289 3 24000 24289 24288 3 24001 24002 24290 3 24001 24290 24289 3 24002 24003 24291 3 24002 24291 24290 3 24003 24004 24292 3 24003 24292 24291 3 24004 24005 24293 3 24004 24293 24292 3 24005 24006 24294 3 24005 24294 24293 3 24006 24007 24295 3 24006 24295 24294 3 24007 24008 24296 3 24007 24296 24295 3 24008 24009 24297 3 24008 24297 24296 3 24009 24010 24298 3 24009 24298 24297 3 24010 24011 24299 3 24010 24299 24298 3 24011 24012 24300 3 24011 24300 24299 3 24013 24301 24302 3 24013 24302 24014 3 24013 24083 24301 3 24371 24301 24083 3 24014 24302 24303 3 24014 24303 24015 3 24015 24303 24304 3 24015 24304 24016 3 24016 24304 24305 3 24016 24305 24017 3 24017 24305 24306 3 24017 24306 24018 3 24018 24306 24307 3 24018 24307 24019 3 24019 24307 24308 3 24019 24308 24020 3 24020 24308 24309 3 24020 24309 24021 3 24021 24309 24310 3 24021 24310 24022 3 24022 24310 24311 3 24022 24311 24023 3 24023 24311 24312 3 24023 24312 24024 3 24024 24312 24313 3 24024 24313 24025 3 24025 24313 24314 3 24025 24314 24026 3 24026 24314 24315 3 24026 24315 24027 3 24027 24315 24316 3 24027 24316 24028 3 24028 24316 24317 3 24028 24317 24029 3 24029 24317 24318 3 24029 24318 24030 3 24030 24318 24319 3 24030 24319 24031 3 24031 24319 24320 3 24031 24320 24032 3 24032 24320 24321 3 24032 24321 24033 3 24033 24321 24322 3 24033 24322 24034 3 24034 24322 24323 3 24034 24323 24035 3 24035 24323 24324 3 24035 24324 24036 3 24036 24324 24325 3 24036 24325 24037 3 24037 24325 24326 3 24037 24326 24038 3 24038 24326 24327 3 24038 24327 24039 3 24039 24327 24040 3 24328 24040 24327 3 24040 24328 24041 3 24329 24041 24328 3 24041 24329 24042 3 24330 24042 24329 3 24042 24330 24043 3 24331 24043 24330 3 24043 24331 24044 3 24332 24044 24331 3 24044 24332 24045 3 24333 24045 24332 3 24045 24333 24046 3 24334 24046 24333 3 24046 24334 24047 3 24335 24047 24334 3 24047 24335 24048 3 24336 24048 24335 3 24048 24336 24049 3 24337 24049 24336 3 24049 24337 24050 3 24338 24050 24337 3 24050 24338 24051 3 24339 24051 24338 3 24051 24339 24052 3 24340 24052 24339 3 24052 24340 24053 3 24341 24053 24340 3 24053 24341 24054 3 24342 24054 24341 3 24054 24342 24055 3 24343 24055 24342 3 24055 24343 24056 3 24344 24056 24343 3 24056 24344 24057 3 24345 24057 24344 3 24057 24345 24058 3 24346 24058 24345 3 24058 24346 24059 3 24347 24059 24346 3 24059 24347 24060 3 24348 24060 24347 3 24060 24348 24061 3 24349 24061 24348 3 24061 24349 24062 3 24350 24062 24349 3 24062 24350 24063 3 24351 24063 24350 3 24063 24351 24064 3 24352 24064 24351 3 24064 24352 24065 3 24353 24065 24352 3 24065 24353 24066 3 24354 24066 24353 3 24066 24354 24067 3 24355 24067 24354 3 24067 24355 24068 3 24356 24068 24355 3 24068 24356 24069 3 24357 24069 24356 3 24069 24357 24070 3 24358 24070 24357 3 24070 24358 24071 3 24359 24071 24358 3 24071 24359 24072 3 24360 24072 24359 3 24072 24360 24073 3 24361 24073 24360 3 24073 24361 24074 3 24362 24074 24361 3 24074 24362 24075 3 24363 24075 24362 3 24075 24363 24076 3 24364 24076 24363 3 24076 24364 24077 3 24365 24077 24364 3 24077 24365 24078 3 24366 24078 24365 3 24078 24366 24079 3 24367 24079 24366 3 24079 24367 24080 3 24368 24080 24367 3 24080 24368 24081 3 24369 24081 24368 3 24081 24369 24084 3 24372 24084 24369 3 24082 24370 24083 3 24371 24083 24370 3 24082 24087 24370 3 24375 24370 24087 3 24084 24372 24085 3 24373 24085 24372 3 24085 24373 24088 3 24376 24088 24373 3 24086 24374 24087 3 24375 24087 24374 3 24086 24090 24374 3 24378 24374 24090 3 24088 24376 24089 3 24377 24089 24376 3 24089 24377 24091 3 24379 24091 24377 3 24090 24092 24378 3 24380 24378 24092 3 24091 24379 24093 3 24381 24093 24379 3 24092 24094 24380 3 24382 24380 24094 3 24093 24381 24095 3 24383 24095 24381 3 24094 24096 24382 3 24384 24382 24096 3 24095 24383 24097 3 24385 24097 24383 3 24096 24098 24384 3 24386 24384 24098 3 24097 24385 24099 3 24387 24099 24385 3 24098 24100 24386 3 24388 24386 24100 3 24099 24387 24101 3 24389 24101 24387 3 24100 24102 24388 3 24390 24388 24102 3 24101 24389 24103 3 24391 24103 24389 3 24102 24104 24390 3 24392 24390 24104 3 24103 24391 24105 3 24393 24105 24391 3 24104 24106 24392 3 24394 24392 24106 3 24105 24393 24107 3 24395 24107 24393 3 24106 24108 24394 3 24396 24394 24108 3 24107 24395 24109 3 24397 24109 24395 3 24108 24110 24398 3 24108 24398 24396 3 24109 24397 24111 3 24399 24111 24397 3 24110 24112 24400 3 24110 24400 24398 3 24111 24399 24113 3 24401 24113 24399 3 24112 24114 24402 3 24112 24402 24400 3 24113 24401 24115 3 24403 24115 24401 3 24114 24116 24404 3 24114 24404 24402 3 24115 24403 24117 3 24405 24117 24403 3 24116 24118 24406 3 24116 24406 24404 3 24117 24405 24119 3 24407 24119 24405 3 24118 24120 24408 3 24118 24408 24406 3 24119 24407 24121 3 24409 24121 24407 3 24120 24122 24410 3 24120 24410 24408 3 24121 24409 24123 3 24411 24123 24409 3 24122 24124 24412 3 24122 24412 24410 3 24123 24411 24125 3 24413 24125 24411 3 24124 24126 24414 3 24124 24414 24412 3 24125 24413 24127 3 24415 24127 24413 3 24126 24128 24416 3 24126 24416 24414 3 24127 24415 24129 3 24417 24129 24415 3 24128 24130 24418 3 24128 24418 24416 3 24129 24417 24131 3 24419 24131 24417 3 24130 24132 24420 3 24130 24420 24418 3 24131 24419 24133 3 24421 24133 24419 3 24132 24134 24422 3 24132 24422 24420 3 24133 24421 24135 3 24423 24135 24421 3 24134 24136 24424 3 24134 24424 24422 3 24135 24423 24425 3 24135 24425 24137 3 24136 24138 24426 3 24136 24426 24424 3 24137 24425 24427 3 24137 24427 24139 3 24138 24140 24428 3 24138 24428 24426 3 24139 24427 24429 3 24139 24429 24141 3 24140 24142 24430 3 24140 24430 24428 3 24141 24429 24431 3 24141 24431 24143 3 24142 24144 24432 3 24142 24432 24430 3 24143 24431 24433 3 24143 24433 24145 3 24144 24146 24434 3 24144 24434 24432 3 24145 24433 24435 3 24145 24435 24147 3 24146 24148 24436 3 24146 24436 24434 3 24147 24435 24437 3 24147 24437 24149 3 24148 24150 24438 3 24148 24438 24436 3 24149 24437 24439 3 24149 24439 24151 3 24150 24152 24440 3 24150 24440 24438 3 24151 24439 24441 3 24151 24441 24153 3 24152 24154 24442 3 24152 24442 24440 3 24153 24441 24443 3 24153 24443 24155 3 24154 24156 24444 3 24154 24444 24442 3 24155 24443 24445 3 24155 24445 24157 3 24156 24158 24446 3 24156 24446 24444 3 24157 24445 24447 3 24157 24447 24159 3 24158 24160 24448 3 24158 24448 24446 3 24159 24447 24449 3 24159 24449 24161 3 24160 24162 24450 3 24160 24450 24448 3 24161 24449 24451 3 24161 24451 24163 3 24162 24164 24452 3 24162 24452 24450 3 24163 24451 24453 3 24163 24453 24165 3 24164 24166 24454 3 24164 24454 24452 3 24165 24453 24455 3 24165 24455 24167 3 24166 24168 24456 3 24166 24456 24454 3 24167 24455 24457 3 24167 24457 24169 3 24168 24170 24458 3 24168 24458 24456 3 24169 24457 24459 3 24169 24459 24171 3 24170 24172 24460 3 24170 24460 24458 3 24171 24459 24461 3 24171 24461 24173 3 24172 24174 24462 3 24172 24462 24460 3 24173 24461 24463 3 24173 24463 24175 3 24174 24176 24464 3 24174 24464 24462 3 24175 24463 24465 3 24175 24465 24177 3 24176 24178 24466 3 24176 24466 24464 3 24177 24465 24467 3 24177 24467 24179 3 24178 24180 24468 3 24178 24468 24466 3 24179 24467 24469 3 24179 24469 24181 3 24180 24182 24470 3 24180 24470 24468 3 24181 24469 24471 3 24181 24471 24183 3 24182 24184 24472 3 24182 24472 24470 3 24183 24471 24473 3 24183 24473 24185 3 24184 24186 24474 3 24184 24474 24472 3 24185 24473 24475 3 24185 24475 24187 3 24186 24188 24476 3 24186 24476 24474 3 24187 24475 24477 3 24187 24477 24189 3 24188 24190 24478 3 24188 24478 24476 3 24189 24477 24479 3 24189 24479 24191 3 24190 24192 24480 3 24190 24480 24478 3 24191 24479 24481 3 24191 24481 24193 3 24192 24194 24482 3 24192 24482 24480 3 24193 24481 24483 3 24193 24483 24195 3 24194 24196 24484 3 24194 24484 24482 3 24195 24483 24485 3 24195 24485 24197 3 24196 24198 24486 3 24196 24486 24484 3 24197 24485 24487 3 24197 24487 24199 3 24198 24200 24488 3 24198 24488 24486 3 24199 24487 24489 3 24199 24489 24201 3 24200 24202 24490 3 24200 24490 24488 3 24201 24489 24491 3 24201 24491 24203 3 24202 24204 24492 3 24202 24492 24490 3 24203 24491 24493 3 24203 24493 24205 3 24204 24206 24492 3 24494 24492 24206 3 24205 24493 24495 3 24205 24495 24207 3 24206 24208 24494 3 24496 24494 24208 3 24207 24495 24497 3 24207 24497 24209 3 24208 24210 24496 3 24498 24496 24210 3 24209 24497 24499 3 24209 24499 24211 3 24210 24212 24498 3 24500 24498 24212 3 24211 24499 24501 3 24211 24501 24213 3 24212 24214 24500 3 24502 24500 24214 3 24213 24501 24503 3 24213 24503 24215 3 24214 24216 24502 3 24504 24502 24216 3 24215 24503 24505 3 24215 24505 24217 3 24216 24218 24504 3 24506 24504 24218 3 24217 24505 24507 3 24217 24507 24219 3 24218 24220 24506 3 24508 24506 24220 3 24219 24507 24509 3 24219 24509 24221 3 24220 24222 24508 3 24510 24508 24222 3 24221 24509 24511 3 24221 24511 24223 3 24222 24224 24510 3 24512 24510 24224 3 24223 24511 24515 3 24223 24515 24227 3 24224 24225 24512 3 24513 24512 24225 3 24225 24228 24513 3 24516 24513 24228 3 24226 24227 24514 3 24515 24514 24227 3 24226 24514 24519 3 24226 24519 24231 3 24228 24229 24516 3 24517 24516 24229 3 24229 24232 24517 3 24520 24517 24232 3 24230 24231 24518 3 24519 24518 24231 3 24230 24518 24300 3 24588 24300 24518 3 24232 24233 24520 3 24521 24520 24233 3 24233 24234 24521 3 24522 24521 24234 3 24234 24235 24522 3 24523 24522 24235 3 24235 24236 24523 3 24524 24523 24236 3 24236 24237 24524 3 24525 24524 24237 3 24237 24238 24525 3 24526 24525 24238 3 24238 24239 24526 3 24527 24526 24239 3 24239 24240 24527 3 24528 24527 24240 3 24240 24241 24528 3 24529 24528 24241 3 24241 24242 24529 3 24530 24529 24242 3 24242 24243 24530 3 24531 24530 24243 3 24243 24244 24531 3 24532 24531 24244 3 24244 24245 24532 3 24533 24532 24245 3 24245 24246 24533 3 24534 24533 24246 3 24246 24247 24534 3 24535 24534 24247 3 24247 24248 24535 3 24536 24535 24248 3 24248 24249 24536 3 24537 24536 24249 3 24249 24250 24537 3 24538 24537 24250 3 24250 24251 24538 3 24539 24538 24251 3 24251 24252 24539 3 24540 24539 24252 3 24252 24253 24540 3 24541 24540 24253 3 24253 24254 24541 3 24542 24541 24254 3 24254 24255 24542 3 24543 24542 24255 3 24255 24256 24543 3 24544 24543 24256 3 24256 24257 24544 3 24545 24544 24257 3 24257 24258 24545 3 24546 24545 24258 3 24258 24259 24546 3 24547 24546 24259 3 24259 24260 24547 3 24548 24547 24260 3 24260 24261 24548 3 24549 24548 24261 3 24261 24262 24549 3 24550 24549 24262 3 24262 24263 24550 3 24551 24550 24263 3 24263 24264 24551 3 24552 24551 24264 3 24264 24265 24552 3 24553 24552 24265 3 24265 24266 24553 3 24554 24553 24266 3 24266 24267 24554 3 24555 24554 24267 3 24267 24268 24555 3 24556 24555 24268 3 24268 24269 24556 3 24557 24556 24269 3 24269 24270 24557 3 24558 24557 24270 3 24270 24271 24558 3 24559 24558 24271 3 24271 24272 24559 3 24560 24559 24272 3 24272 24273 24560 3 24561 24560 24273 3 24273 24274 24561 3 24562 24561 24274 3 24274 24275 24562 3 24563 24562 24275 3 24275 24276 24563 3 24564 24563 24276 3 24276 24277 24564 3 24565 24564 24277 3 24277 24278 24565 3 24566 24565 24278 3 24278 24279 24566 3 24567 24566 24279 3 24279 24280 24567 3 24568 24567 24280 3 24280 24281 24568 3 24569 24568 24281 3 24281 24282 24569 3 24570 24569 24282 3 24282 24283 24570 3 24571 24570 24283 3 24283 24284 24571 3 24572 24571 24284 3 24284 24285 24572 3 24573 24572 24285 3 24285 24286 24573 3 24574 24573 24286 3 24286 24287 24574 3 24575 24574 24287 3 24287 24288 24575 3 24576 24575 24288 3 24288 24289 24576 3 24577 24576 24289 3 24289 24290 24577 3 24578 24577 24290 3 24290 24291 24578 3 24579 24578 24291 3 24291 24292 24579 3 24580 24579 24292 3 24292 24293 24580 3 24581 24580 24293 3 24293 24294 24581 3 24582 24581 24294 3 24294 24295 24582 3 24583 24582 24295 3 24295 24296 24583 3 24584 24583 24296 3 24296 24297 24584 3 24585 24584 24297 3 24297 24298 24585 3 24586 24585 24298 3 24298 24299 24586 3 24587 24586 24299 3 24299 24300 24588 3 24299 24588 24587 3 24301 24589 24302 3 24590 24302 24589 3 24301 24371 24659 3 24301 24659 24589 3 24302 24590 24303 3 24591 24303 24590 3 24303 24591 24304 3 24592 24304 24591 3 24304 24592 24305 3 24593 24305 24592 3 24305 24593 24306 3 24594 24306 24593 3 24306 24594 24307 3 24595 24307 24594 3 24307 24595 24308 3 24596 24308 24595 3 24308 24596 24309 3 24597 24309 24596 3 24309 24597 24310 3 24598 24310 24597 3 24310 24598 24311 3 24599 24311 24598 3 24311 24599 24312 3 24600 24312 24599 3 24312 24600 24313 3 24601 24313 24600 3 24313 24601 24314 3 24602 24314 24601 3 24314 24602 24315 3 24603 24315 24602 3 24315 24603 24316 3 24604 24316 24603 3 24316 24604 24317 3 24605 24317 24604 3 24317 24605 24318 3 24606 24318 24605 3 24318 24606 24319 3 24607 24319 24606 3 24319 24607 24320 3 24608 24320 24607 3 24320 24608 24321 3 24609 24321 24608 3 24321 24609 24322 3 24610 24322 24609 3 24322 24610 24323 3 24611 24323 24610 3 24323 24611 24324 3 24612 24324 24611 3 24324 24612 24325 3 24613 24325 24612 3 24325 24613 24326 3 24614 24326 24613 3 24326 24614 24327 3 24615 24327 24614 3 24327 24615 24616 3 24327 24616 24328 3 24328 24616 24617 3 24328 24617 24329 3 24329 24617 24618 3 24329 24618 24330 3 24330 24618 24619 3 24330 24619 24331 3 24331 24619 24620 3 24331 24620 24332 3 24332 24620 24621 3 24332 24621 24333 3 24333 24621 24622 3 24333 24622 24334 3 24334 24622 24623 3 24334 24623 24335 3 24335 24623 24624 3 24335 24624 24336 3 24336 24624 24625 3 24336 24625 24337 3 24337 24625 24626 3 24337 24626 24338 3 24338 24626 24627 3 24338 24627 24339 3 24339 24627 24628 3 24339 24628 24340 3 24340 24628 24629 3 24340 24629 24341 3 24341 24629 24630 3 24341 24630 24342 3 24342 24630 24631 3 24342 24631 24343 3 24343 24631 24632 3 24343 24632 24344 3 24344 24632 24633 3 24344 24633 24345 3 24345 24633 24634 3 24345 24634 24346 3 24346 24634 24635 3 24346 24635 24347 3 24347 24635 24636 3 24347 24636 24348 3 24348 24636 24637 3 24348 24637 24349 3 24349 24637 24638 3 24349 24638 24350 3 24350 24638 24639 3 24350 24639 24351 3 24351 24639 24640 3 24351 24640 24352 3 24352 24640 24641 3 24352 24641 24353 3 24353 24641 24642 3 24353 24642 24354 3 24354 24642 24643 3 24354 24643 24355 3 24355 24643 24644 3 24355 24644 24356 3 24356 24644 24645 3 24356 24645 24357 3 24357 24645 24646 3 24357 24646 24358 3 24358 24646 24647 3 24358 24647 24359 3 24359 24647 24648 3 24359 24648 24360 3 24360 24648 24649 3 24360 24649 24361 3 24361 24649 24650 3 24361 24650 24362 3 24362 24650 24651 3 24362 24651 24363 3 24363 24651 24652 3 24363 24652 24364 3 24364 24652 24653 3 24364 24653 24365 3 24365 24653 24654 3 24365 24654 24366 3 24366 24654 24655 3 24366 24655 24367 3 24367 24655 24656 3 24367 24656 24368 3 24368 24656 24657 3 24368 24657 24369 3 24369 24657 24662 3 24369 24662 24372 3 24370 24658 24659 3 24370 24659 24371 3 24370 24375 24665 3 24370 24665 24658 3 24372 24662 24663 3 24372 24663 24373 3 24373 24663 24666 3 24373 24666 24376 3 24374 24664 24665 3 24374 24665 24375 3 24374 24378 24668 3 24374 24668 24664 3 24376 24666 24667 3 24376 24667 24377 3 24377 24667 24671 3 24377 24671 24379 3 24378 24380 24672 3 24378 24672 24668 3 24379 24671 24673 3 24379 24673 24381 3 24380 24382 24674 3 24380 24674 24672 3 24381 24673 24675 3 24381 24675 24383 3 24382 24384 24676 3 24382 24676 24674 3 24383 24675 24677 3 24383 24677 24385 3 24384 24386 24678 3 24384 24678 24676 3 24385 24677 24679 3 24385 24679 24387 3 24386 24388 24680 3 24386 24680 24678 3 24387 24679 24681 3 24387 24681 24389 3 24388 24390 24682 3 24388 24682 24680 3 24389 24681 24683 3 24389 24683 24391 3 24390 24392 24684 3 24390 24684 24682 3 24391 24683 24685 3 24391 24685 24393 3 24392 24394 24686 3 24392 24686 24684 3 24393 24685 24687 3 24393 24687 24395 3 24394 24396 24688 3 24394 24688 24686 3 24395 24687 24689 3 24395 24689 24397 3 24396 24398 24688 3 24690 24688 24398 3 24397 24689 24691 3 24397 24691 24399 3 24398 24400 24690 3 24692 24690 24400 3 24399 24691 24693 3 24399 24693 24401 3 24400 24402 24692 3 24694 24692 24402 3 24401 24693 24695 3 24401 24695 24403 3 24402 24404 24694 3 24696 24694 24404 3 24403 24695 24697 3 24403 24697 24405 3 24404 24406 24696 3 24698 24696 24406 3 24405 24697 24699 3 24405 24699 24407 3 24406 24408 24698 3 24700 24698 24408 3 24407 24699 24701 3 24407 24701 24409 3 24408 24410 24700 3 24702 24700 24410 3 24409 24701 24703 3 24409 24703 24411 3 24410 24412 24702 3 24704 24702 24412 3 24411 24703 24705 3 24411 24705 24413 3 24412 24414 24704 3 24706 24704 24414 3 24413 24705 24707 3 24413 24707 24415 3 24414 24416 24706 3 24708 24706 24416 3 24415 24707 24709 3 24415 24709 24417 3 24416 24418 24708 3 24710 24708 24418 3 24417 24709 24711 3 24417 24711 24419 3 24418 24420 24710 3 24712 24710 24420 3 24419 24711 24713 3 24419 24713 24421 3 24420 24422 24712 3 24714 24712 24422 3 24421 24713 24423 3 24715 24423 24713 3 24422 24424 24714 3 24716 24714 24424 3 24423 24715 24425 3 24717 24425 24715 3 24424 24426 24716 3 24718 24716 24426 3 24425 24717 24427 3 24719 24427 24717 3 24426 24428 24718 3 24720 24718 24428 3 24427 24719 24429 3 24721 24429 24719 3 24428 24430 24720 3 24722 24720 24430 3 24429 24721 24431 3 24723 24431 24721 3 24430 24432 24722 3 24724 24722 24432 3 24431 24723 24433 3 24725 24433 24723 3 24432 24434 24724 3 24726 24724 24434 3 24433 24725 24435 3 24727 24435 24725 3 24434 24436 24726 3 24728 24726 24436 3 24435 24727 24437 3 24729 24437 24727 3 24436 24438 24728 3 24730 24728 24438 3 24437 24729 24439 3 24731 24439 24729 3 24438 24440 24730 3 24732 24730 24440 3 24439 24731 24441 3 24733 24441 24731 3 24440 24442 24732 3 24734 24732 24442 3 24441 24733 24443 3 24735 24443 24733 3 24442 24444 24734 3 24736 24734 24444 3 24443 24735 24445 3 24737 24445 24735 3 24444 24446 24736 3 24738 24736 24446 3 24445 24737 24447 3 24739 24447 24737 3 24446 24448 24738 3 24740 24738 24448 3 24447 24739 24449 3 24741 24449 24739 3 24448 24450 24740 3 24742 24740 24450 3 24449 24741 24451 3 24743 24451 24741 3 24450 24452 24742 3 24744 24742 24452 3 24451 24743 24453 3 24745 24453 24743 3 24452 24454 24744 3 24746 24744 24454 3 24453 24745 24455 3 24747 24455 24745 3 24454 24456 24746 3 24748 24746 24456 3 24455 24747 24457 3 24749 24457 24747 3 24456 24458 24748 3 24750 24748 24458 3 24457 24749 24459 3 24751 24459 24749 3 24458 24460 24750 3 24752 24750 24460 3 24459 24751 24461 3 24753 24461 24751 3 24460 24462 24752 3 24754 24752 24462 3 24461 24753 24463 3 24755 24463 24753 3 24462 24464 24754 3 24756 24754 24464 3 24463 24755 24465 3 24757 24465 24755 3 24464 24466 24756 3 24758 24756 24466 3 24465 24757 24467 3 24759 24467 24757 3 24466 24468 24758 3 24760 24758 24468 3 24467 24759 24469 3 24761 24469 24759 3 24468 24470 24760 3 24762 24760 24470 3 24469 24761 24471 3 24763 24471 24761 3 24470 24472 24762 3 24764 24762 24472 3 24471 24763 24473 3 24765 24473 24763 3 24472 24474 24764 3 24766 24764 24474 3 24473 24765 24475 3 24767 24475 24765 3 24474 24476 24766 3 24768 24766 24476 3 24475 24767 24477 3 24769 24477 24767 3 24476 24478 24768 3 24770 24768 24478 3 24477 24769 24479 3 24771 24479 24769 3 24478 24480 24770 3 24772 24770 24480 3 24479 24771 24481 3 24773 24481 24771 3 24480 24482 24772 3 24774 24772 24482 3 24481 24773 24483 3 24775 24483 24773 3 24482 24484 24774 3 24776 24774 24484 3 24483 24775 24485 3 24777 24485 24775 3 24484 24486 24776 3 24778 24776 24486 3 24485 24777 24487 3 24779 24487 24777 3 24486 24488 24778 3 24780 24778 24488 3 24487 24779 24489 3 24781 24489 24779 3 24488 24490 24780 3 24782 24780 24490 3 24489 24781 24491 3 24783 24491 24781 3 24490 24492 24782 3 24784 24782 24492 3 24491 24783 24493 3 24785 24493 24783 3 24492 24494 24786 3 24492 24786 24784 3 24493 24785 24495 3 24787 24495 24785 3 24494 24496 24788 3 24494 24788 24786 3 24495 24787 24497 3 24789 24497 24787 3 24496 24498 24790 3 24496 24790 24788 3 24497 24789 24499 3 24791 24499 24789 3 24498 24500 24792 3 24498 24792 24790 3 24499 24791 24501 3 24793 24501 24791 3 24500 24502 24794 3 24500 24794 24792 3 24501 24793 24503 3 24795 24503 24793 3 24502 24504 24796 3 24502 24796 24794 3 24503 24795 24505 3 24797 24505 24795 3 24504 24506 24798 3 24504 24798 24796 3 24505 24797 24507 3 24799 24507 24797 3 24506 24508 24800 3 24506 24800 24798 3 24507 24799 24509 3 24801 24509 24799 3 24508 24510 24802 3 24508 24802 24800 3 24509 24801 24511 3 24805 24511 24801 3 24510 24512 24806 3 24510 24806 24802 3 24511 24805 24515 3 24809 24515 24805 3 24512 24513 24807 3 24512 24807 24806 3 24513 24516 24810 3 24513 24810 24807 3 24514 24515 24809 3 24514 24809 24808 3 24514 24808 24519 3 24815 24519 24808 3 24516 24517 24811 3 24516 24811 24810 3 24517 24520 24816 3 24517 24816 24811 3 24518 24519 24815 3 24518 24815 24814 3 24518 24814 24884 3 24518 24884 24588 3 24520 24521 24817 3 24520 24817 24816 3 24521 24522 24818 3 24521 24818 24817 3 24522 24523 24819 3 24522 24819 24818 3 24523 24524 24820 3 24523 24820 24819 3 24524 24525 24821 3 24524 24821 24820 3 24525 24526 24822 3 24525 24822 24821 3 24526 24527 24823 3 24526 24823 24822 3 24527 24528 24824 3 24527 24824 24823 3 24528 24529 24825 3 24528 24825 24824 3 24529 24530 24826 3 24529 24826 24825 3 24530 24531 24827 3 24530 24827 24826 3 24531 24532 24828 3 24531 24828 24827 3 24532 24533 24829 3 24532 24829 24828 3 24533 24534 24830 3 24533 24830 24829 3 24534 24535 24831 3 24534 24831 24830 3 24535 24536 24832 3 24535 24832 24831 3 24536 24537 24833 3 24536 24833 24832 3 24537 24538 24834 3 24537 24834 24833 3 24538 24539 24835 3 24538 24835 24834 3 24539 24540 24836 3 24539 24836 24835 3 24540 24541 24837 3 24540 24837 24836 3 24541 24542 24838 3 24541 24838 24837 3 24542 24543 24839 3 24542 24839 24838 3 24543 24544 24840 3 24543 24840 24839 3 24544 24545 24841 3 24544 24841 24840 3 24545 24546 24842 3 24545 24842 24841 3 24546 24547 24843 3 24546 24843 24842 3 24547 24548 24844 3 24547 24844 24843 3 24548 24549 24845 3 24548 24845 24844 3 24549 24550 24846 3 24549 24846 24845 3 24550 24551 24847 3 24550 24847 24846 3 24551 24552 24848 3 24551 24848 24847 3 24552 24553 24849 3 24552 24849 24848 3 24553 24554 24850 3 24553 24850 24849 3 24554 24555 24851 3 24554 24851 24850 3 24555 24556 24852 3 24555 24852 24851 3 24556 24557 24853 3 24556 24853 24852 3 24557 24558 24854 3 24557 24854 24853 3 24558 24559 24855 3 24558 24855 24854 3 24559 24560 24856 3 24559 24856 24855 3 24560 24561 24857 3 24560 24857 24856 3 24561 24562 24858 3 24561 24858 24857 3 24562 24563 24859 3 24562 24859 24858 3 24563 24564 24860 3 24563 24860 24859 3 24564 24565 24861 3 24564 24861 24860 3 24565 24566 24862 3 24565 24862 24861 3 24566 24567 24863 3 24566 24863 24862 3 24567 24568 24864 3 24567 24864 24863 3 24568 24569 24865 3 24568 24865 24864 3 24569 24570 24866 3 24569 24866 24865 3 24570 24571 24867 3 24570 24867 24866 3 24571 24572 24868 3 24571 24868 24867 3 24572 24573 24869 3 24572 24869 24868 3 24573 24574 24870 3 24573 24870 24869 3 24574 24575 24871 3 24574 24871 24870 3 24575 24576 24872 3 24575 24872 24871 3 24576 24577 24873 3 24576 24873 24872 3 24577 24578 24874 3 24577 24874 24873 3 24578 24579 24875 3 24578 24875 24874 3 24579 24580 24876 3 24579 24876 24875 3 24580 24581 24877 3 24580 24877 24876 3 24581 24582 24878 3 24581 24878 24877 3 24582 24583 24879 3 24582 24879 24878 3 24583 24584 24880 3 24583 24880 24879 3 24584 24585 24881 3 24584 24881 24880 3 24585 24586 24882 3 24585 24882 24881 3 24586 24587 24883 3 24586 24883 24882 3 24587 24588 24884 3 24587 24884 24883 3 24589 24659 24590 3 24660 24590 24659 3 24590 24885 24886 3 24590 24886 24591 3 24590 24660 24885 3 24954 24885 24660 3 24591 24886 24887 3 24591 24887 24592 3 24592 24887 24888 3 24592 24888 24593 3 24593 24888 24889 3 24593 24889 24594 3 24594 24889 24890 3 24594 24890 24595 3 24595 24890 24891 3 24595 24891 24596 3 24596 24891 24892 3 24596 24892 24597 3 24597 24892 24893 3 24597 24893 24598 3 24598 24893 24894 3 24598 24894 24599 3 24599 24894 24895 3 24599 24895 24600 3 24600 24895 24896 3 24600 24896 24601 3 24601 24896 24897 3 24601 24897 24602 3 24602 24897 24898 3 24602 24898 24603 3 24603 24898 24899 3 24603 24899 24604 3 24604 24899 24900 3 24604 24900 24605 3 24605 24900 24901 3 24605 24901 24606 3 24606 24901 24902 3 24606 24902 24607 3 24607 24902 24903 3 24607 24903 24608 3 24608 24903 24904 3 24608 24904 24609 3 24609 24904 24905 3 24609 24905 24610 3 24610 24905 24906 3 24610 24906 24611 3 24611 24906 24907 3 24611 24907 24612 3 24612 24907 24613 3 24908 24613 24907 3 24613 24908 24614 3 24909 24614 24908 3 24614 24909 24615 3 24910 24615 24909 3 24615 24910 24616 3 24911 24616 24910 3 24616 24911 24617 3 24912 24617 24911 3 24617 24912 24618 3 24913 24618 24912 3 24618 24913 24619 3 24914 24619 24913 3 24619 24914 24620 3 24915 24620 24914 3 24620 24915 24621 3 24916 24621 24915 3 24621 24916 24622 3 24917 24622 24916 3 24622 24917 24623 3 24918 24623 24917 3 24623 24918 24624 3 24919 24624 24918 3 24624 24919 24625 3 24920 24625 24919 3 24625 24920 24626 3 24921 24626 24920 3 24626 24921 24627 3 24922 24627 24921 3 24627 24922 24628 3 24923 24628 24922 3 24628 24923 24629 3 24924 24629 24923 3 24629 24924 24630 3 24925 24630 24924 3 24630 24925 24631 3 24926 24631 24925 3 24631 24926 24632 3 24927 24632 24926 3 24632 24927 24633 3 24928 24633 24927 3 24633 24928 24634 3 24929 24634 24928 3 24634 24929 24635 3 24930 24635 24929 3 24635 24930 24636 3 24931 24636 24930 3 24636 24931 24637 3 24932 24637 24931 3 24637 24932 24638 3 24933 24638 24932 3 24638 24933 24639 3 24934 24639 24933 3 24639 24934 24640 3 24935 24640 24934 3 24640 24935 24641 3 24936 24641 24935 3 24641 24936 24642 3 24937 24642 24936 3 24642 24937 24643 3 24938 24643 24937 3 24643 24938 24644 3 24939 24644 24938 3 24644 24939 24645 3 24940 24645 24939 3 24645 24940 24646 3 24941 24646 24940 3 24646 24941 24647 3 24942 24647 24941 3 24647 24942 24648 3 24943 24648 24942 3 24648 24943 24649 3 24944 24649 24943 3 24649 24944 24650 3 24945 24650 24944 3 24650 24945 24651 3 24946 24651 24945 3 24651 24946 24652 3 24947 24652 24946 3 24652 24947 24653 3 24948 24653 24947 3 24653 24948 24654 3 24949 24654 24948 3 24654 24949 24655 3 24950 24655 24949 3 24655 24950 24656 3 24951 24656 24950 3 24656 24661 24657 3 24662 24657 24661 3 24656 24951 24661 3 24957 24661 24951 3 24658 24952 24659 3 24953 24659 24952 3 24658 24665 24952 3 24960 24952 24665 3 24659 24953 24660 3 24954 24660 24953 3 24661 24957 24662 3 24958 24662 24957 3 24662 24958 24663 3 24959 24663 24958 3 24663 24959 24666 3 24963 24666 24959 3 24664 24668 24665 3 24669 24665 24668 3 24665 24669 24960 3 24965 24960 24669 3 24666 24670 24667 3 24671 24667 24670 3 24666 24963 24670 3 24966 24670 24963 3 24668 24964 24669 3 24965 24669 24964 3 24668 24672 24964 3 24968 24964 24672 3 24670 24966 24671 3 24967 24671 24966 3 24671 24967 24673 3 24971 24673 24967 3 24672 24674 24968 3 24972 24968 24674 3 24673 24971 24675 3 24973 24675 24971 3 24674 24676 24972 3 24974 24972 24676 3 24675 24973 24677 3 24975 24677 24973 3 24676 24678 24974 3 24976 24974 24678 3 24677 24975 24679 3 24977 24679 24975 3 24678 24680 24976 3 24978 24976 24680 3 24679 24977 24681 3 24979 24681 24977 3 24680 24682 24978 3 24980 24978 24682 3 24681 24979 24683 3 24981 24683 24979 3 24682 24684 24980 3 24982 24980 24684 3 24683 24981 24685 3 24983 24685 24981 3 24684 24686 24984 3 24684 24984 24982 3 24685 24983 24687 3 24985 24687 24983 3 24686 24688 24986 3 24686 24986 24984 3 24687 24985 24689 3 24987 24689 24985 3 24688 24690 24988 3 24688 24988 24986 3 24689 24987 24691 3 24989 24691 24987 3 24690 24692 24990 3 24690 24990 24988 3 24691 24989 24693 3 24991 24693 24989 3 24692 24694 24992 3 24692 24992 24990 3 24693 24991 24695 3 24993 24695 24991 3 24694 24696 24994 3 24694 24994 24992 3 24695 24993 24697 3 24995 24697 24993 3 24696 24698 24996 3 24696 24996 24994 3 24697 24995 24699 3 24997 24699 24995 3 24698 24700 24998 3 24698 24998 24996 3 24699 24997 24701 3 24999 24701 24997 3 24700 24702 25000 3 24700 25000 24998 3 24701 24999 24703 3 25001 24703 24999 3 24702 24704 25002 3 24702 25002 25000 3 24703 25001 24705 3 25003 24705 25001 3 24704 24706 25004 3 24704 25004 25002 3 24705 25003 24707 3 25005 24707 25003 3 24706 24708 25006 3 24706 25006 25004 3 24707 25005 24709 3 25007 24709 25005 3 24708 24710 25008 3 24708 25008 25006 3 24709 25007 25009 3 24709 25009 24711 3 24710 24712 25010 3 24710 25010 25008 3 24711 25009 25011 3 24711 25011 24713 3 24712 24714 25012 3 24712 25012 25010 3 24713 25011 25013 3 24713 25013 24715 3 24714 24716 25014 3 24714 25014 25012 3 24715 25013 25015 3 24715 25015 24717 3 24716 24718 25016 3 24716 25016 25014 3 24717 25015 25017 3 24717 25017 24719 3 24718 24720 25018 3 24718 25018 25016 3 24719 25017 25019 3 24719 25019 24721 3 24720 24722 25020 3 24720 25020 25018 3 24721 25019 25021 3 24721 25021 24723 3 24722 24724 25022 3 24722 25022 25020 3 24723 25021 25023 3 24723 25023 24725 3 24724 24726 25024 3 24724 25024 25022 3 24725 25023 25025 3 24725 25025 24727 3 24726 24728 25026 3 24726 25026 25024 3 24727 25025 25027 3 24727 25027 24729 3 24728 24730 25028 3 24728 25028 25026 3 24729 25027 25029 3 24729 25029 24731 3 24730 24732 25030 3 24730 25030 25028 3 24731 25029 25031 3 24731 25031 24733 3 24732 24734 25032 3 24732 25032 25030 3 24733 25031 25033 3 24733 25033 24735 3 24734 24736 25034 3 24734 25034 25032 3 24735 25033 25035 3 24735 25035 24737 3 24736 24738 25036 3 24736 25036 25034 3 24737 25035 25037 3 24737 25037 24739 3 24738 24740 25038 3 24738 25038 25036 3 24739 25037 25039 3 24739 25039 24741 3 24740 24742 25040 3 24740 25040 25038 3 24741 25039 25041 3 24741 25041 24743 3 24742 24744 25042 3 24742 25042 25040 3 24743 25041 25043 3 24743 25043 24745 3 24744 24746 25044 3 24744 25044 25042 3 24745 25043 25045 3 24745 25045 24747 3 24746 24748 25046 3 24746 25046 25044 3 24747 25045 25047 3 24747 25047 24749 3 24748 24750 25048 3 24748 25048 25046 3 24749 25047 25049 3 24749 25049 24751 3 24750 24752 25050 3 24750 25050 25048 3 24751 25049 25051 3 24751 25051 24753 3 24752 24754 25052 3 24752 25052 25050 3 24753 25051 25053 3 24753 25053 24755 3 24754 24756 25054 3 24754 25054 25052 3 24755 25053 25055 3 24755 25055 24757 3 24756 24758 25056 3 24756 25056 25054 3 24757 25055 25057 3 24757 25057 24759 3 24758 24760 25058 3 24758 25058 25056 3 24759 25057 25059 3 24759 25059 24761 3 24760 24762 25060 3 24760 25060 25058 3 24761 25059 25061 3 24761 25061 24763 3 24762 24764 25062 3 24762 25062 25060 3 24763 25061 25063 3 24763 25063 24765 3 24764 24766 25064 3 24764 25064 25062 3 24765 25063 25065 3 24765 25065 24767 3 24766 24768 25066 3 24766 25066 25064 3 24767 25065 25067 3 24767 25067 24769 3 24768 24770 25068 3 24768 25068 25066 3 24769 25067 25069 3 24769 25069 24771 3 24770 24772 25070 3 24770 25070 25068 3 24771 25069 25071 3 24771 25071 24773 3 24772 24774 25072 3 24772 25072 25070 3 24773 25071 25073 3 24773 25073 24775 3 24774 24776 25074 3 24774 25074 25072 3 24775 25073 25075 3 24775 25075 24777 3 24776 24778 25076 3 24776 25076 25074 3 24777 25075 25077 3 24777 25077 24779 3 24778 24780 25078 3 24778 25078 25076 3 24779 25077 25079 3 24779 25079 24781 3 24780 24782 25080 3 24780 25080 25078 3 24781 25079 25081 3 24781 25081 24783 3 24782 24784 25080 3 25082 25080 24784 3 24783 25081 25083 3 24783 25083 24785 3 24784 24786 25082 3 25084 25082 24786 3 24785 25083 25085 3 24785 25085 24787 3 24786 24788 25084 3 25086 25084 24788 3 24787 25085 25087 3 24787 25087 24789 3 24788 24790 25086 3 25088 25086 24790 3 24789 25087 25089 3 24789 25089 24791 3 24790 24792 25088 3 25090 25088 24792 3 24791 25089 25091 3 24791 25091 24793 3 24792 24794 25090 3 25092 25090 24794 3 24793 25091 25093 3 24793 25093 24795 3 24794 24796 25092 3 25094 25092 24796 3 24795 25093 25095 3 24795 25095 24797 3 24796 24798 25094 3 25096 25094 24798 3 24797 25095 25097 3 24797 25097 24799 3 24798 24800 25096 3 25098 25096 24800 3 24799 25097 25101 3 24799 25101 24801 3 24800 24802 25098 3 25102 25098 24802 3 24801 25101 25105 3 24801 25105 24805 3 24802 24806 24803 3 24807 24803 24806 3 24802 24803 25102 3 25103 25102 24803 3 24803 24807 25103 3 25106 25103 24807 3 24804 24808 24805 3 24809 24805 24808 3 24804 24805 25104 3 25105 25104 24805 3 24804 25104 24808 3 25109 24808 25104 3 24807 24810 25106 3 25110 25106 24810 3 24808 25109 24815 3 25117 24815 25109 3 24810 24811 25110 3 25111 25110 24811 3 24811 24816 24812 3 24817 24812 24816 3 24811 24812 25111 3 25112 25111 24812 3 24812 24817 25112 3 25118 25112 24817 3 24813 24883 24814 3 24884 24814 24883 3 24813 24814 25115 3 25116 25115 24814 3 24813 25115 24883 3 25184 24883 25115 3 24814 24815 25116 3 25117 25116 24815 3 24817 24818 25118 3 25119 25118 24818 3 24818 24819 25119 3 25120 25119 24819 3 24819 24820 25120 3 25121 25120 24820 3 24820 24821 25121 3 25122 25121 24821 3 24821 24822 25122 3 25123 25122 24822 3 24822 24823 25123 3 25124 25123 24823 3 24823 24824 25124 3 25125 25124 24824 3 24824 24825 25125 3 25126 25125 24825 3 24825 24826 25126 3 25127 25126 24826 3 24826 24827 25127 3 25128 25127 24827 3 24827 24828 25128 3 25129 25128 24828 3 24828 24829 25129 3 25130 25129 24829 3 24829 24830 25130 3 25131 25130 24830 3 24830 24831 25131 3 25132 25131 24831 3 24831 24832 25132 3 25133 25132 24832 3 24832 24833 25133 3 25134 25133 24833 3 24833 24834 25134 3 25135 25134 24834 3 24834 24835 25135 3 25136 25135 24835 3 24835 24836 25136 3 25137 25136 24836 3 24836 24837 25137 3 25138 25137 24837 3 24837 24838 25138 3 25139 25138 24838 3 24838 24839 25139 3 25140 25139 24839 3 24839 24840 25140 3 25141 25140 24840 3 24840 24841 25141 3 25142 25141 24841 3 24841 24842 25142 3 25143 25142 24842 3 24842 24843 25143 3 25144 25143 24843 3 24843 24844 25144 3 25145 25144 24844 3 24844 24845 25145 3 25146 25145 24845 3 24845 24846 25146 3 25147 25146 24846 3 24846 24847 25147 3 25148 25147 24847 3 24847 24848 25148 3 25149 25148 24848 3 24848 24849 25149 3 25150 25149 24849 3 24849 24850 25150 3 25151 25150 24850 3 24850 24851 25151 3 25152 25151 24851 3 24851 24852 25152 3 25153 25152 24852 3 24852 24853 25153 3 25154 25153 24853 3 24853 24854 25154 3 25155 25154 24854 3 24854 24855 25155 3 25156 25155 24855 3 24855 24856 25156 3 25157 25156 24856 3 24856 24857 25157 3 25158 25157 24857 3 24857 24858 25158 3 25159 25158 24858 3 24858 24859 25159 3 25160 25159 24859 3 24859 24860 25160 3 25161 25160 24860 3 24860 24861 25161 3 25162 25161 24861 3 24861 24862 25162 3 25163 25162 24862 3 24862 24863 25163 3 25164 25163 24863 3 24863 24864 25164 3 25165 25164 24864 3 24864 24865 25165 3 25166 25165 24865 3 24865 24866 25166 3 25167 25166 24866 3 24866 24867 25167 3 25168 25167 24867 3 24867 24868 25168 3 25169 25168 24868 3 24868 24869 25169 3 25170 25169 24869 3 24869 24870 25170 3 25171 25170 24870 3 24870 24871 25171 3 25172 25171 24871 3 24871 24872 25172 3 25173 25172 24872 3 24872 24873 25173 3 25174 25173 24873 3 24873 24874 25174 3 25175 25174 24874 3 24874 24875 25175 3 25176 25175 24875 3 24875 24876 25176 3 25177 25176 24876 3 24876 24877 25177 3 25178 25177 24877 3 24877 24878 25178 3 25179 25178 24878 3 24878 24879 25179 3 25180 25179 24879 3 24879 24880 25181 3 24879 25181 25180 3 24880 24881 25182 3 24880 25182 25181 3 24881 24882 25183 3 24881 25183 25182 3 24882 24883 25184 3 24882 25184 25183 3 24885 24954 24955 3 24885 24955 24886 3 24886 25185 24887 3 25186 24887 25185 3 24886 24955 25252 3 24886 25252 25185 3 24887 25186 24888 3 25187 24888 25186 3 24888 25187 24889 3 25188 24889 25187 3 24889 25188 24890 3 25189 24890 25188 3 24890 25189 24891 3 25190 24891 25189 3 24891 25190 24892 3 25191 24892 25190 3 24892 25191 24893 3 25192 24893 25191 3 24893 25192 24894 3 25193 24894 25192 3 24894 25193 24895 3 25194 24895 25193 3 24895 25194 24896 3 25195 24896 25194 3 24896 25195 24897 3 25196 24897 25195 3 24897 25196 24898 3 25197 24898 25196 3 24898 25197 24899 3 25198 24899 25197 3 24899 25198 24900 3 25199 24900 25198 3 24900 25199 24901 3 25200 24901 25199 3 24901 25200 24902 3 25201 24902 25200 3 24902 25201 25202 3 24902 25202 24903 3 24903 25202 25203 3 24903 25203 24904 3 24904 25203 25204 3 24904 25204 24905 3 24905 25204 25205 3 24905 25205 24906 3 24906 25205 25206 3 24906 25206 24907 3 24907 25206 25207 3 24907 25207 24908 3 24908 25207 25208 3 24908 25208 24909 3 24909 25208 25209 3 24909 25209 24910 3 24910 25209 25210 3 24910 25210 24911 3 24911 25210 25211 3 24911 25211 24912 3 24912 25211 25212 3 24912 25212 24913 3 24913 25212 25213 3 24913 25213 24914 3 24914 25213 25214 3 24914 25214 24915 3 24915 25214 25215 3 24915 25215 24916 3 24916 25215 25216 3 24916 25216 24917 3 24917 25216 25217 3 24917 25217 24918 3 24918 25217 25218 3 24918 25218 24919 3 24919 25218 25219 3 24919 25219 24920 3 24920 25219 25220 3 24920 25220 24921 3 24921 25220 25221 3 24921 25221 24922 3 24922 25221 25222 3 24922 25222 24923 3 24923 25222 25223 3 24923 25223 24924 3 24924 25223 25224 3 24924 25224 24925 3 24925 25224 25225 3 24925 25225 24926 3 24926 25225 25226 3 24926 25226 24927 3 24927 25226 25227 3 24927 25227 24928 3 24928 25227 25228 3 24928 25228 24929 3 24929 25228 25229 3 24929 25229 24930 3 24930 25229 25230 3 24930 25230 24931 3 24931 25230 25231 3 24931 25231 24932 3 24932 25231 25232 3 24932 25232 24933 3 24933 25232 25233 3 24933 25233 24934 3 24934 25233 25234 3 24934 25234 24935 3 24935 25234 25235 3 24935 25235 24936 3 24936 25235 25236 3 24936 25236 24937 3 24937 25236 25237 3 24937 25237 24938 3 24938 25237 25238 3 24938 25238 24939 3 24939 25238 25239 3 24939 25239 24940 3 24940 25239 25240 3 24940 25240 24941 3 24941 25240 25241 3 24941 25241 24942 3 24942 25241 25242 3 24942 25242 24943 3 24943 25242 25243 3 24943 25243 24944 3 24944 25243 25244 3 24944 25244 24945 3 24945 25244 25245 3 24945 25245 24946 3 24946 25245 25246 3 24946 25246 24947 3 24947 25246 25247 3 24947 25247 24948 3 24948 25247 25248 3 24948 25248 24949 3 24949 25248 25249 3 24949 25249 24950 3 24950 24956 24957 3 24950 24957 24951 3 24950 25249 25316 3 24950 25316 24956 3 24952 24960 24961 3 24952 24961 24953 3 24953 25250 25251 3 24953 25251 24954 3 24953 24961 25320 3 24953 25320 25250 3 24954 25251 25252 3 24954 25252 24955 3 24956 25316 25317 3 24956 25317 24957 3 24957 25317 25318 3 24957 25318 24958 3 24958 24962 24963 3 24958 24963 24959 3 24958 25318 25323 3 24958 25323 24962 3 24960 25319 25320 3 24960 25320 24961 3 24960 24965 25325 3 24960 25325 25319 3 24962 25323 25324 3 24962 25324 24963 3 24963 25324 25328 3 24963 25328 24966 3 24964 24968 24969 3 24964 24969 24965 3 24965 24969 25330 3 24965 25330 25325 3 24966 24970 24971 3 24966 24971 24967 3 24966 25328 24970 3 25331 24970 25328 3 24968 25329 24969 3 25330 24969 25329 3 24968 24972 25333 3 24968 25333 25329 3 24970 25331 24971 3 25332 24971 25331 3 24971 25332 24973 3 25336 24973 25332 3 24972 24974 25337 3 24972 25337 25333 3 24973 25336 24975 3 25340 24975 25336 3 24974 24976 25341 3 24974 25341 25337 3 24975 25340 24977 3 25344 24977 25340 3 24976 24978 25341 3 25345 25341 24978 3 24977 25344 24979 3 25348 24979 25344 3 24978 24980 25345 3 25349 25345 24980 3 24979 25348 24981 3 25352 24981 25348 3 24980 24982 25349 3 25353 25349 24982 3 24981 25352 24983 3 25356 24983 25352 3 24982 24984 25353 3 25357 25353 24984 3 24983 25356 24985 3 25360 24985 25356 3 24984 24986 25357 3 25361 25357 24986 3 24985 25360 24987 3 25364 24987 25360 3 24986 24988 25361 3 25365 25361 24988 3 24987 25364 24989 3 25368 24989 25364 3 24988 24990 25365 3 25369 25365 24990 3 24989 25368 24991 3 25372 24991 25368 3 24990 24992 25369 3 25373 25369 24992 3 24991 25372 24993 3 25376 24993 25372 3 24992 24994 25373 3 25377 25373 24994 3 24993 25376 24995 3 25380 24995 25376 3 24994 24996 25377 3 25381 25377 24996 3 24995 25380 24997 3 25384 24997 25380 3 24996 24998 25381 3 25385 25381 24998 3 24997 25384 24999 3 25388 24999 25384 3 24998 25000 25385 3 25389 25385 25000 3 24999 25388 25001 3 25392 25001 25388 3 25000 25002 25389 3 25393 25389 25002 3 25001 25392 25003 3 25396 25003 25392 3 25002 25004 25393 3 25397 25393 25004 3 25003 25396 25005 3 25400 25005 25396 3 25004 25006 25397 3 25401 25397 25006 3 25005 25400 25007 3 25404 25007 25400 3 25006 25008 25401 3 25405 25401 25008 3 25007 25404 25009 3 25408 25009 25404 3 25008 25010 25405 3 25409 25405 25010 3 25009 25408 25011 3 25412 25011 25408 3 25010 25012 25409 3 25413 25409 25012 3 25011 25412 25013 3 25416 25013 25412 3 25012 25014 25413 3 25417 25413 25014 3 25013 25416 25015 3 25420 25015 25416 3 25014 25016 25417 3 25421 25417 25016 3 25015 25420 25017 3 25424 25017 25420 3 25016 25018 25421 3 25425 25421 25018 3 25017 25424 25019 3 25428 25019 25424 3 25018 25020 25425 3 25429 25425 25020 3 25019 25428 25021 3 25432 25021 25428 3 25020 25022 25429 3 25433 25429 25022 3 25021 25432 25023 3 25436 25023 25432 3 25022 25024 25433 3 25437 25433 25024 3 25023 25436 25025 3 25440 25025 25436 3 25024 25026 25437 3 25441 25437 25026 3 25025 25440 25027 3 25444 25027 25440 3 25026 25028 25441 3 25445 25441 25028 3 25027 25444 25029 3 25448 25029 25444 3 25028 25030 25445 3 25449 25445 25030 3 25029 25448 25452 3 25029 25452 25031 3 25030 25032 25449 3 25453 25449 25032 3 25031 25452 25456 3 25031 25456 25033 3 25032 25034 25453 3 25457 25453 25034 3 25033 25456 25460 3 25033 25460 25035 3 25034 25036 25457 3 25461 25457 25036 3 25035 25460 25464 3 25035 25464 25037 3 25036 25038 25461 3 25465 25461 25038 3 25037 25464 25468 3 25037 25468 25039 3 25038 25040 25465 3 25469 25465 25040 3 25039 25468 25472 3 25039 25472 25041 3 25040 25042 25469 3 25473 25469 25042 3 25041 25472 25476 3 25041 25476 25043 3 25042 25044 25473 3 25477 25473 25044 3 25043 25476 25480 3 25043 25480 25045 3 25044 25046 25477 3 25481 25477 25046 3 25045 25480 25484 3 25045 25484 25047 3 25046 25048 25481 3 25485 25481 25048 3 25047 25484 25488 3 25047 25488 25049 3 25048 25050 25485 3 25489 25485 25050 3 25049 25488 25492 3 25049 25492 25051 3 25050 25052 25489 3 25493 25489 25052 3 25051 25492 25496 3 25051 25496 25053 3 25052 25054 25493 3 25497 25493 25054 3 25053 25496 25500 3 25053 25500 25055 3 25054 25056 25497 3 25501 25497 25056 3 25055 25500 25504 3 25055 25504 25057 3 25056 25058 25501 3 25505 25501 25058 3 25057 25504 25508 3 25057 25508 25059 3 25058 25060 25505 3 25509 25505 25060 3 25059 25508 25512 3 25059 25512 25061 3 25060 25062 25509 3 25513 25509 25062 3 25061 25512 25516 3 25061 25516 25063 3 25062 25064 25513 3 25517 25513 25064 3 25063 25516 25520 3 25063 25520 25065 3 25064 25066 25517 3 25521 25517 25066 3 25065 25520 25524 3 25065 25524 25067 3 25066 25068 25521 3 25525 25521 25068 3 25067 25524 25528 3 25067 25528 25069 3 25068 25070 25525 3 25529 25525 25070 3 25069 25528 25532 3 25069 25532 25071 3 25070 25072 25529 3 25533 25529 25072 3 25071 25532 25536 3 25071 25536 25073 3 25072 25074 25537 3 25072 25537 25533 3 25073 25536 25540 3 25073 25540 25075 3 25074 25076 25541 3 25074 25541 25537 3 25075 25540 25544 3 25075 25544 25077 3 25076 25078 25545 3 25076 25545 25541 3 25077 25544 25548 3 25077 25548 25079 3 25078 25080 25549 3 25078 25549 25545 3 25079 25548 25552 3 25079 25552 25081 3 25080 25082 25553 3 25080 25553 25549 3 25081 25552 25556 3 25081 25556 25083 3 25082 25084 25557 3 25082 25557 25553 3 25083 25556 25560 3 25083 25560 25085 3 25084 25086 25561 3 25084 25561 25557 3 25085 25560 25564 3 25085 25564 25087 3 25086 25088 25565 3 25086 25565 25561 3 25087 25564 25568 3 25087 25568 25089 3 25088 25090 25569 3 25088 25569 25565 3 25089 25568 25572 3 25089 25572 25091 3 25090 25092 25573 3 25090 25573 25569 3 25091 25572 25093 3 25576 25093 25572 3 25092 25094 25577 3 25092 25577 25573 3 25093 25576 25095 3 25580 25095 25576 3 25094 25096 25581 3 25094 25581 25577 3 25095 25580 25097 3 25584 25097 25580 3 25096 25098 25585 3 25096 25585 25581 3 25097 25584 25101 3 25588 25101 25584 3 25098 25102 25103 3 25098 25103 25099 3 25098 25099 25586 3 25098 25586 25585 3 25099 25103 25589 3 25099 25589 25586 3 25100 25104 25105 3 25100 25105 25101 3 25100 25101 25588 3 25100 25588 25587 3 25100 25587 25104 3 25592 25104 25587 3 25103 25106 25593 3 25103 25593 25589 3 25104 25592 25109 3 25598 25109 25592 3 25106 25110 25111 3 25106 25111 25107 3 25106 25107 25594 3 25106 25594 25593 3 25107 25111 25599 3 25107 25599 25594 3 25108 25116 25117 3 25108 25117 25109 3 25108 25109 25598 3 25108 25598 25597 3 25108 25597 25116 3 25667 25116 25597 3 25111 25112 25600 3 25111 25600 25599 3 25112 25118 25119 3 25112 25119 25113 3 25112 25113 25601 3 25112 25601 25600 3 25113 25119 25668 3 25113 25668 25601 3 25114 25183 25184 3 25114 25184 25115 3 25114 25115 25666 3 25114 25666 25665 3 25114 25665 25183 3 25732 25183 25665 3 25115 25116 25667 3 25115 25667 25666 3 25119 25120 25669 3 25119 25669 25668 3 25120 25121 25670 3 25120 25670 25669 3 25121 25122 25671 3 25121 25671 25670 3 25122 25123 25672 3 25122 25672 25671 3 25123 25124 25673 3 25123 25673 25672 3 25124 25125 25674 3 25124 25674 25673 3 25125 25126 25675 3 25125 25675 25674 3 25126 25127 25676 3 25126 25676 25675 3 25127 25128 25677 3 25127 25677 25676 3 25128 25129 25678 3 25128 25678 25677 3 25129 25130 25679 3 25129 25679 25678 3 25130 25131 25680 3 25130 25680 25679 3 25131 25132 25681 3 25131 25681 25680 3 25132 25133 25682 3 25132 25682 25681 3 25133 25134 25683 3 25133 25683 25682 3 25134 25135 25684 3 25134 25684 25683 3 25135 25136 25685 3 25135 25685 25684 3 25136 25137 25686 3 25136 25686 25685 3 25137 25138 25687 3 25137 25687 25686 3 25138 25139 25688 3 25138 25688 25687 3 25139 25140 25689 3 25139 25689 25688 3 25140 25141 25690 3 25140 25690 25689 3 25141 25142 25691 3 25141 25691 25690 3 25142 25143 25692 3 25142 25692 25691 3 25143 25144 25693 3 25143 25693 25692 3 25144 25145 25694 3 25144 25694 25693 3 25145 25146 25695 3 25145 25695 25694 3 25146 25147 25696 3 25146 25696 25695 3 25147 25148 25697 3 25147 25697 25696 3 25148 25149 25698 3 25148 25698 25697 3 25149 25150 25699 3 25149 25699 25698 3 25150 25151 25700 3 25150 25700 25699 3 25151 25152 25701 3 25151 25701 25700 3 25152 25153 25702 3 25152 25702 25701 3 25153 25154 25703 3 25153 25703 25702 3 25154 25155 25704 3 25154 25704 25703 3 25155 25156 25705 3 25155 25705 25704 3 25156 25157 25706 3 25156 25706 25705 3 25157 25158 25707 3 25157 25707 25706 3 25158 25159 25708 3 25158 25708 25707 3 25159 25160 25709 3 25159 25709 25708 3 25160 25161 25710 3 25160 25710 25709 3 25161 25162 25711 3 25161 25711 25710 3 25162 25163 25712 3 25162 25712 25711 3 25163 25164 25713 3 25163 25713 25712 3 25164 25165 25714 3 25164 25714 25713 3 25165 25166 25715 3 25165 25715 25714 3 25166 25167 25716 3 25166 25716 25715 3 25167 25168 25717 3 25167 25717 25716 3 25168 25169 25718 3 25168 25718 25717 3 25169 25170 25719 3 25169 25719 25718 3 25170 25171 25720 3 25170 25720 25719 3 25171 25172 25720 3 25721 25720 25172 3 25172 25173 25721 3 25722 25721 25173 3 25173 25174 25722 3 25723 25722 25174 3 25174 25175 25723 3 25724 25723 25175 3 25175 25176 25724 3 25725 25724 25176 3 25176 25177 25725 3 25726 25725 25177 3 25177 25178 25726 3 25727 25726 25178 3 25178 25179 25727 3 25728 25727 25179 3 25179 25180 25728 3 25729 25728 25180 3 25180 25181 25729 3 25730 25729 25181 3 25181 25182 25730 3 25731 25730 25182 3 25182 25183 25731 3 25732 25731 25183 3 25185 25252 25186 3 25253 25186 25252 3 25186 25253 25187 3 25254 25187 25253 3 25187 25254 25188 3 25255 25188 25254 3 25188 25255 25189 3 25256 25189 25255 3 25189 25256 25190 3 25257 25190 25256 3 25190 25257 25191 3 25258 25191 25257 3 25191 25258 25192 3 25259 25192 25258 3 25192 25259 25193 3 25260 25193 25259 3 25193 25260 25194 3 25261 25194 25260 3 25194 25261 25195 3 25262 25195 25261 3 25195 25262 25196 3 25263 25196 25262 3 25196 25263 25197 3 25264 25197 25263 3 25197 25264 25198 3 25265 25198 25264 3 25198 25265 25199 3 25266 25199 25265 3 25199 25266 25200 3 25267 25200 25266 3 25200 25267 25201 3 25268 25201 25267 3 25201 25268 25202 3 25269 25202 25268 3 25202 25269 25203 3 25270 25203 25269 3 25203 25270 25204 3 25271 25204 25270 3 25204 25271 25205 3 25272 25205 25271 3 25205 25272 25206 3 25273 25206 25272 3 25206 25273 25207 3 25274 25207 25273 3 25207 25274 25208 3 25275 25208 25274 3 25208 25275 25209 3 25276 25209 25275 3 25209 25276 25210 3 25277 25210 25276 3 25210 25277 25211 3 25278 25211 25277 3 25211 25278 25212 3 25279 25212 25278 3 25212 25279 25213 3 25280 25213 25279 3 25213 25280 25214 3 25281 25214 25280 3 25214 25281 25215 3 25282 25215 25281 3 25215 25282 25216 3 25283 25216 25282 3 25216 25283 25217 3 25284 25217 25283 3 25217 25284 25218 3 25285 25218 25284 3 25218 25285 25219 3 25286 25219 25285 3 25219 25286 25220 3 25287 25220 25286 3 25220 25287 25221 3 25288 25221 25287 3 25221 25288 25222 3 25289 25222 25288 3 25222 25289 25223 3 25290 25223 25289 3 25223 25290 25224 3 25291 25224 25290 3 25224 25291 25225 3 25292 25225 25291 3 25225 25292 25226 3 25293 25226 25292 3 25226 25293 25227 3 25294 25227 25293 3 25227 25294 25228 3 25295 25228 25294 3 25228 25295 25229 3 25296 25229 25295 3 25229 25296 25230 3 25297 25230 25296 3 25230 25297 25298 3 25230 25298 25231 3 25231 25298 25299 3 25231 25299 25232 3 25232 25299 25300 3 25232 25300 25233 3 25233 25300 25301 3 25233 25301 25234 3 25234 25301 25302 3 25234 25302 25235 3 25235 25302 25303 3 25235 25303 25236 3 25236 25303 25304 3 25236 25304 25237 3 25237 25304 25305 3 25237 25305 25238 3 25238 25305 25306 3 25238 25306 25239 3 25239 25306 25307 3 25239 25307 25240 3 25240 25307 25308 3 25240 25308 25241 3 25241 25308 25309 3 25241 25309 25242 3 25242 25309 25310 3 25242 25310 25243 3 25243 25310 25311 3 25243 25311 25244 3 25244 25311 25312 3 25244 25312 25245 3 25245 25312 25313 3 25245 25313 25246 3 25246 25313 25314 3 25246 25314 25247 3 25247 25314 25315 3 25247 25315 25248 3 25248 25315 25316 3 25248 25316 25249 3 25250 25320 25321 3 25250 25321 25251 3 25251 25733 25734 3 25251 25734 25252 3 25251 25321 25801 3 25251 25801 25733 3 25252 25734 25735 3 25252 25735 25253 3 25253 25735 25736 3 25253 25736 25254 3 25254 25736 25737 3 25254 25737 25255 3 25255 25737 25738 3 25255 25738 25256 3 25256 25738 25739 3 25256 25739 25257 3 25257 25739 25740 3 25257 25740 25258 3 25258 25740 25741 3 25258 25741 25259 3 25259 25741 25742 3 25259 25742 25260 3 25260 25742 25743 3 25260 25743 25261 3 25261 25743 25744 3 25261 25744 25262 3 25262 25744 25745 3 25262 25745 25263 3 25263 25745 25746 3 25263 25746 25264 3 25264 25746 25747 3 25264 25747 25265 3 25265 25747 25748 3 25265 25748 25266 3 25266 25748 25749 3 25266 25749 25267 3 25267 25749 25750 3 25267 25750 25268 3 25268 25750 25751 3 25268 25751 25269 3 25269 25751 25752 3 25269 25752 25270 3 25270 25752 25753 3 25270 25753 25271 3 25271 25753 25754 3 25271 25754 25272 3 25272 25754 25755 3 25272 25755 25273 3 25273 25755 25756 3 25273 25756 25274 3 25274 25756 25757 3 25274 25757 25275 3 25275 25757 25758 3 25275 25758 25276 3 25276 25758 25759 3 25276 25759 25277 3 25277 25759 25760 3 25277 25760 25278 3 25278 25760 25761 3 25278 25761 25279 3 25279 25761 25762 3 25279 25762 25280 3 25280 25762 25763 3 25280 25763 25281 3 25281 25763 25764 3 25281 25764 25282 3 25282 25764 25765 3 25282 25765 25283 3 25283 25765 25766 3 25283 25766 25284 3 25284 25766 25767 3 25284 25767 25285 3 25285 25767 25768 3 25285 25768 25286 3 25286 25768 25287 3 25769 25287 25768 3 25287 25769 25288 3 25770 25288 25769 3 25288 25770 25289 3 25771 25289 25770 3 25289 25771 25290 3 25772 25290 25771 3 25290 25772 25291 3 25773 25291 25772 3 25291 25773 25292 3 25774 25292 25773 3 25292 25774 25293 3 25775 25293 25774 3 25293 25775 25294 3 25776 25294 25775 3 25294 25776 25295 3 25777 25295 25776 3 25295 25777 25296 3 25778 25296 25777 3 25296 25778 25297 3 25779 25297 25778 3 25297 25779 25298 3 25780 25298 25779 3 25298 25780 25299 3 25781 25299 25780 3 25299 25781 25300 3 25782 25300 25781 3 25300 25782 25301 3 25783 25301 25782 3 25301 25783 25302 3 25784 25302 25783 3 25302 25784 25303 3 25785 25303 25784 3 25303 25785 25304 3 25786 25304 25785 3 25304 25786 25305 3 25787 25305 25786 3 25305 25787 25306 3 25788 25306 25787 3 25306 25788 25307 3 25789 25307 25788 3 25307 25789 25308 3 25790 25308 25789 3 25308 25790 25309 3 25791 25309 25790 3 25309 25791 25310 3 25792 25310 25791 3 25310 25792 25311 3 25793 25311 25792 3 25311 25793 25312 3 25794 25312 25793 3 25312 25794 25313 3 25795 25313 25794 3 25313 25795 25314 3 25796 25314 25795 3 25314 25796 25315 3 25797 25315 25796 3 25315 25797 25316 3 25798 25316 25797 3 25316 25798 25317 3 25799 25317 25798 3 25317 25322 25323 3 25317 25323 25318 3 25317 25799 25322 3 25867 25322 25799 3 25319 25325 25326 3 25319 25326 25320 3 25320 25800 25321 3 25801 25321 25800 3 25320 25326 25870 3 25320 25870 25800 3 25322 25867 25323 3 25868 25323 25867 3 25323 25327 25328 3 25323 25328 25324 3 25323 25868 25327 3 25875 25327 25868 3 25325 25869 25326 3 25870 25326 25869 3 25325 25330 25877 3 25325 25877 25869 3 25327 25875 25328 3 25876 25328 25875 3 25328 25876 25331 3 25882 25331 25876 3 25329 25333 25334 3 25329 25334 25330 3 25330 25334 25883 3 25330 25883 25877 3 25331 25335 25336 3 25331 25336 25332 3 25331 25882 25335 3 25886 25335 25882 3 25333 25337 25338 3 25333 25338 25334 3 25334 25338 25887 3 25334 25887 25883 3 25335 25339 25340 3 25335 25340 25336 3 25335 25886 25339 3 25890 25339 25886 3 25337 25341 25342 3 25337 25342 25338 3 25338 25342 25891 3 25338 25891 25887 3 25339 25343 25344 3 25339 25344 25340 3 25339 25890 25343 3 25894 25343 25890 3 25341 25345 25346 3 25341 25346 25342 3 25342 25346 25895 3 25342 25895 25891 3 25343 25347 25348 3 25343 25348 25344 3 25343 25894 25347 3 25898 25347 25894 3 25345 25349 25350 3 25345 25350 25346 3 25346 25350 25899 3 25346 25899 25895 3 25347 25351 25352 3 25347 25352 25348 3 25347 25898 25902 3 25347 25902 25351 3 25349 25353 25354 3 25349 25354 25350 3 25350 25354 25903 3 25350 25903 25899 3 25351 25355 25356 3 25351 25356 25352 3 25351 25902 25906 3 25351 25906 25355 3 25353 25357 25358 3 25353 25358 25354 3 25354 25358 25907 3 25354 25907 25903 3 25355 25359 25360 3 25355 25360 25356 3 25355 25906 25910 3 25355 25910 25359 3 25357 25361 25362 3 25357 25362 25358 3 25358 25362 25911 3 25358 25911 25907 3 25359 25363 25364 3 25359 25364 25360 3 25359 25910 25914 3 25359 25914 25363 3 25361 25365 25366 3 25361 25366 25362 3 25362 25366 25915 3 25362 25915 25911 3 25363 25367 25368 3 25363 25368 25364 3 25363 25914 25918 3 25363 25918 25367 3 25365 25369 25366 3 25370 25366 25369 3 25366 25370 25915 3 25919 25915 25370 3 25367 25371 25368 3 25372 25368 25371 3 25367 25918 25922 3 25367 25922 25371 3 25369 25373 25370 3 25374 25370 25373 3 25370 25374 25919 3 25923 25919 25374 3 25371 25375 25372 3 25376 25372 25375 3 25371 25922 25926 3 25371 25926 25375 3 25373 25377 25374 3 25378 25374 25377 3 25374 25378 25923 3 25927 25923 25378 3 25375 25379 25376 3 25380 25376 25379 3 25375 25926 25930 3 25375 25930 25379 3 25377 25381 25378 3 25382 25378 25381 3 25378 25382 25927 3 25931 25927 25382 3 25379 25383 25380 3 25384 25380 25383 3 25379 25930 25934 3 25379 25934 25383 3 25381 25385 25382 3 25386 25382 25385 3 25382 25386 25931 3 25935 25931 25386 3 25383 25387 25384 3 25388 25384 25387 3 25383 25934 25938 3 25383 25938 25387 3 25385 25389 25386 3 25390 25386 25389 3 25386 25390 25935 3 25939 25935 25390 3 25387 25391 25388 3 25392 25388 25391 3 25387 25938 25942 3 25387 25942 25391 3 25389 25393 25390 3 25394 25390 25393 3 25390 25394 25939 3 25943 25939 25394 3 25391 25395 25392 3 25396 25392 25395 3 25391 25942 25946 3 25391 25946 25395 3 25393 25397 25394 3 25398 25394 25397 3 25394 25398 25943 3 25947 25943 25398 3 25395 25399 25396 3 25400 25396 25399 3 25395 25946 25950 3 25395 25950 25399 3 25397 25401 25398 3 25402 25398 25401 3 25398 25402 25947 3 25951 25947 25402 3 25399 25403 25400 3 25404 25400 25403 3 25399 25950 25954 3 25399 25954 25403 3 25401 25405 25402 3 25406 25402 25405 3 25402 25406 25951 3 25955 25951 25406 3 25403 25407 25404 3 25408 25404 25407 3 25403 25954 25958 3 25403 25958 25407 3 25405 25409 25406 3 25410 25406 25409 3 25406 25410 25955 3 25959 25955 25410 3 25407 25411 25408 3 25412 25408 25411 3 25407 25958 25962 3 25407 25962 25411 3 25409 25413 25410 3 25414 25410 25413 3 25410 25414 25959 3 25963 25959 25414 3 25411 25415 25412 3 25416 25412 25415 3 25411 25962 25966 3 25411 25966 25415 3 25413 25417 25414 3 25418 25414 25417 3 25414 25418 25963 3 25967 25963 25418 3 25415 25419 25416 3 25420 25416 25419 3 25415 25966 25970 3 25415 25970 25419 3 25417 25421 25418 3 25422 25418 25421 3 25418 25422 25967 3 25971 25967 25422 3 25419 25423 25420 3 25424 25420 25423 3 25419 25970 25974 3 25419 25974 25423 3 25421 25425 25422 3 25426 25422 25425 3 25422 25426 25971 3 25975 25971 25426 3 25423 25427 25424 3 25428 25424 25427 3 25423 25974 25978 3 25423 25978 25427 3 25425 25429 25426 3 25430 25426 25429 3 25426 25430 25975 3 25979 25975 25430 3 25427 25431 25428 3 25432 25428 25431 3 25427 25978 25982 3 25427 25982 25431 3 25429 25433 25430 3 25434 25430 25433 3 25430 25434 25979 3 25983 25979 25434 3 25431 25435 25432 3 25436 25432 25435 3 25431 25982 25986 3 25431 25986 25435 3 25433 25437 25434 3 25438 25434 25437 3 25434 25438 25983 3 25987 25983 25438 3 25435 25439 25436 3 25440 25436 25439 3 25435 25986 25990 3 25435 25990 25439 3 25437 25441 25438 3 25442 25438 25441 3 25438 25442 25987 3 25991 25987 25442 3 25439 25443 25440 3 25444 25440 25443 3 25439 25990 25994 3 25439 25994 25443 3 25441 25445 25442 3 25446 25442 25445 3 25442 25446 25991 3 25995 25991 25446 3 25443 25447 25444 3 25448 25444 25447 3 25443 25994 25998 3 25443 25998 25447 3 25445 25449 25446 3 25450 25446 25449 3 25446 25450 25995 3 25999 25995 25450 3 25447 25451 25448 3 25452 25448 25451 3 25447 25998 25451 3 26002 25451 25998 3 25449 25453 25450 3 25454 25450 25453 3 25450 25454 25999 3 26003 25999 25454 3 25451 25455 25452 3 25456 25452 25455 3 25451 26002 25455 3 26006 25455 26002 3 25453 25457 25454 3 25458 25454 25457 3 25454 25458 26003 3 26007 26003 25458 3 25455 25459 25456 3 25460 25456 25459 3 25455 26006 25459 3 26010 25459 26006 3 25457 25461 25458 3 25462 25458 25461 3 25458 25462 26007 3 26011 26007 25462 3 25459 25463 25460 3 25464 25460 25463 3 25459 26010 25463 3 26014 25463 26010 3 25461 25465 25462 3 25466 25462 25465 3 25462 25466 26011 3 26015 26011 25466 3 25463 25467 25468 3 25463 25468 25464 3 25463 26014 25467 3 26018 25467 26014 3 25465 25469 25470 3 25465 25470 25466 3 25466 25470 26019 3 25466 26019 26015 3 25467 25471 25472 3 25467 25472 25468 3 25467 26018 25471 3 26022 25471 26018 3 25469 25473 25474 3 25469 25474 25470 3 25470 25474 26023 3 25470 26023 26019 3 25471 25475 25476 3 25471 25476 25472 3 25471 26022 25475 3 26026 25475 26022 3 25473 25477 25478 3 25473 25478 25474 3 25474 25478 26027 3 25474 26027 26023 3 25475 25479 25480 3 25475 25480 25476 3 25475 26026 25479 3 26030 25479 26026 3 25477 25481 25482 3 25477 25482 25478 3 25478 25482 26031 3 25478 26031 26027 3 25479 25483 25484 3 25479 25484 25480 3 25479 26030 25483 3 26034 25483 26030 3 25481 25485 25486 3 25481 25486 25482 3 25482 25486 26035 3 25482 26035 26031 3 25483 25487 25488 3 25483 25488 25484 3 25483 26034 25487 3 26038 25487 26034 3 25485 25489 25490 3 25485 25490 25486 3 25486 25490 26039 3 25486 26039 26035 3 25487 25491 25492 3 25487 25492 25488 3 25487 26038 25491 3 26042 25491 26038 3 25489 25493 25494 3 25489 25494 25490 3 25490 25494 26043 3 25490 26043 26039 3 25491 25495 25496 3 25491 25496 25492 3 25491 26042 25495 3 26046 25495 26042 3 25493 25497 25498 3 25493 25498 25494 3 25494 25498 26047 3 25494 26047 26043 3 25495 25499 25500 3 25495 25500 25496 3 25495 26046 25499 3 26050 25499 26046 3 25497 25501 25502 3 25497 25502 25498 3 25498 25502 26051 3 25498 26051 26047 3 25499 25503 25504 3 25499 25504 25500 3 25499 26050 25503 3 26054 25503 26050 3 25501 25505 25506 3 25501 25506 25502 3 25502 25506 26055 3 25502 26055 26051 3 25503 25507 25508 3 25503 25508 25504 3 25503 26054 25507 3 26058 25507 26054 3 25505 25509 25510 3 25505 25510 25506 3 25506 25510 26059 3 25506 26059 26055 3 25507 25511 25512 3 25507 25512 25508 3 25507 26058 25511 3 26062 25511 26058 3 25509 25513 25514 3 25509 25514 25510 3 25510 25514 26063 3 25510 26063 26059 3 25511 25515 25516 3 25511 25516 25512 3 25511 26062 25515 3 26066 25515 26062 3 25513 25517 25518 3 25513 25518 25514 3 25514 25518 26067 3 25514 26067 26063 3 25515 25519 25520 3 25515 25520 25516 3 25515 26066 25519 3 26070 25519 26066 3 25517 25521 25522 3 25517 25522 25518 3 25518 25522 26071 3 25518 26071 26067 3 25519 25523 25524 3 25519 25524 25520 3 25519 26070 25523 3 26074 25523 26070 3 25521 25525 25526 3 25521 25526 25522 3 25522 25526 26075 3 25522 26075 26071 3 25523 25527 25528 3 25523 25528 25524 3 25523 26074 25527 3 26078 25527 26074 3 25525 25529 25530 3 25525 25530 25526 3 25526 25530 26079 3 25526 26079 26075 3 25527 25531 25532 3 25527 25532 25528 3 25527 26078 25531 3 26082 25531 26078 3 25529 25533 25534 3 25529 25534 25530 3 25530 25534 26083 3 25530 26083 26079 3 25531 25535 25536 3 25531 25536 25532 3 25531 26082 25535 3 26086 25535 26082 3 25533 25537 25538 3 25533 25538 25534 3 25534 25538 26087 3 25534 26087 26083 3 25535 25539 25540 3 25535 25540 25536 3 25535 26086 25539 3 26090 25539 26086 3 25537 25541 25542 3 25537 25542 25538 3 25538 25542 26091 3 25538 26091 26087 3 25539 25543 25544 3 25539 25544 25540 3 25539 26090 25543 3 26094 25543 26090 3 25541 25545 25546 3 25541 25546 25542 3 25542 25546 26095 3 25542 26095 26091 3 25543 25547 25548 3 25543 25548 25544 3 25543 26094 26098 3 25543 26098 25547 3 25545 25549 25550 3 25545 25550 25546 3 25546 25550 26099 3 25546 26099 26095 3 25547 25551 25552 3 25547 25552 25548 3 25547 26098 26102 3 25547 26102 25551 3 25549 25553 25554 3 25549 25554 25550 3 25550 25554 26103 3 25550 26103 26099 3 25551 25555 25556 3 25551 25556 25552 3 25551 26102 26106 3 25551 26106 25555 3 25553 25557 25558 3 25553 25558 25554 3 25554 25558 26107 3 25554 26107 26103 3 25555 25559 25560 3 25555 25560 25556 3 25555 26106 26110 3 25555 26110 25559 3 25557 25561 25562 3 25557 25562 25558 3 25558 25562 26111 3 25558 26111 26107 3 25559 25563 25564 3 25559 25564 25560 3 25559 26110 26114 3 25559 26114 25563 3 25561 25565 25562 3 25566 25562 25565 3 25562 25566 26111 3 26115 26111 25566 3 25563 25567 25564 3 25568 25564 25567 3 25563 26114 26118 3 25563 26118 25567 3 25565 25569 25566 3 25570 25566 25569 3 25566 25570 26115 3 26119 26115 25570 3 25567 25571 25568 3 25572 25568 25571 3 25567 26118 26122 3 25567 26122 25571 3 25569 25573 25570 3 25574 25570 25573 3 25570 25574 26119 3 26123 26119 25574 3 25571 25575 25572 3 25576 25572 25575 3 25571 26122 26126 3 25571 26126 25575 3 25573 25577 25574 3 25578 25574 25577 3 25574 25578 26123 3 26127 26123 25578 3 25575 25579 25576 3 25580 25576 25579 3 25575 26126 26130 3 25575 26130 25579 3 25577 25581 25578 3 25582 25578 25581 3 25578 25582 26127 3 26131 26127 25582 3 25579 25583 25580 3 25584 25580 25583 3 25579 26130 26134 3 25579 26134 25583 3 25581 25585 25582 3 25586 25582 25585 3 25582 25586 26131 3 26135 26131 25586 3 25583 25587 25584 3 25588 25584 25587 3 25583 26134 26140 3 25583 26140 25587 3 25586 25589 26135 3 26141 26135 25589 3 25587 26140 26148 3 25587 26148 25592 3 25589 25593 25590 3 25594 25590 25593 3 25589 25590 26141 3 26142 26141 25590 3 25590 25594 26142 3 26149 26142 25594 3 25591 25597 25592 3 25598 25592 25597 3 25591 25592 26147 3 26148 26147 25592 3 25591 26147 26217 3 25591 26217 25597 3 25594 25599 25595 3 25600 25595 25599 3 25594 25595 26149 3 26150 26149 25595 3 25595 25600 26150 3 26218 26150 25600 3 25596 25666 25597 3 25667 25597 25666 3 25596 25597 26216 3 26217 26216 25597 3 25596 26216 26284 3 25596 26284 25666 3 25600 25601 26218 3 26219 26218 25601 3 25601 25668 25602 3 25669 25602 25668 3 25601 25602 26219 3 26220 26219 25602 3 25602 25669 25603 3 25670 25603 25669 3 25602 25603 26220 3 26221 26220 25603 3 25603 25670 25604 3 25671 25604 25670 3 25603 25604 26221 3 26222 26221 25604 3 25604 25671 25605 3 25672 25605 25671 3 25604 25605 26222 3 26223 26222 25605 3 25605 25672 25606 3 25673 25606 25672 3 25605 25606 26223 3 26224 26223 25606 3 25606 25673 25607 3 25674 25607 25673 3 25606 25607 26224 3 26225 26224 25607 3 25607 25674 25608 3 25675 25608 25674 3 25607 25608 26225 3 26226 26225 25608 3 25608 25675 25609 3 25676 25609 25675 3 25608 25609 26226 3 26227 26226 25609 3 25609 25676 25610 3 25677 25610 25676 3 25609 25610 26227 3 26228 26227 25610 3 25610 25677 25611 3 25678 25611 25677 3 25610 25611 26228 3 26229 26228 25611 3 25611 25678 25612 3 25679 25612 25678 3 25611 25612 26229 3 26230 26229 25612 3 25612 25679 25613 3 25680 25613 25679 3 25612 25613 26230 3 26231 26230 25613 3 25613 25680 25614 3 25681 25614 25680 3 25613 25614 26231 3 26232 26231 25614 3 25614 25681 25615 3 25682 25615 25681 3 25614 25615 26232 3 26233 26232 25615 3 25615 25682 25616 3 25683 25616 25682 3 25615 25616 26233 3 26234 26233 25616 3 25616 25683 25617 3 25684 25617 25683 3 25616 25617 26234 3 26235 26234 25617 3 25617 25684 25618 3 25685 25618 25684 3 25617 25618 26235 3 26236 26235 25618 3 25618 25685 25619 3 25686 25619 25685 3 25618 25619 26236 3 26237 26236 25619 3 25619 25686 25620 3 25687 25620 25686 3 25619 25620 26237 3 26238 26237 25620 3 25620 25687 25621 3 25688 25621 25687 3 25620 25621 26238 3 26239 26238 25621 3 25621 25688 25622 3 25689 25622 25688 3 25621 25622 26239 3 26240 26239 25622 3 25622 25689 25623 3 25690 25623 25689 3 25622 25623 26240 3 26241 26240 25623 3 25623 25690 25691 3 25623 25691 25624 3 25623 25624 26241 3 26242 26241 25624 3 25624 25691 25692 3 25624 25692 25625 3 25624 25625 26242 3 26243 26242 25625 3 25625 25692 25693 3 25625 25693 25626 3 25625 25626 26243 3 26244 26243 25626 3 25626 25693 25694 3 25626 25694 25627 3 25626 25627 26244 3 26245 26244 25627 3 25627 25694 25695 3 25627 25695 25628 3 25627 25628 26245 3 26246 26245 25628 3 25628 25695 25696 3 25628 25696 25629 3 25628 25629 26246 3 26247 26246 25629 3 25629 25696 25697 3 25629 25697 25630 3 25629 25630 26247 3 26248 26247 25630 3 25630 25697 25698 3 25630 25698 25631 3 25630 25631 26248 3 26249 26248 25631 3 25631 25698 25699 3 25631 25699 25632 3 25631 25632 26249 3 26250 26249 25632 3 25632 25699 25700 3 25632 25700 25633 3 25632 25633 26250 3 26251 26250 25633 3 25633 25700 25701 3 25633 25701 25634 3 25633 25634 26251 3 26252 26251 25634 3 25634 25701 25702 3 25634 25702 25635 3 25634 25635 26252 3 26253 26252 25635 3 25635 25702 25703 3 25635 25703 25636 3 25635 25636 26253 3 26254 26253 25636 3 25636 25703 25704 3 25636 25704 25637 3 25636 25637 26254 3 26255 26254 25637 3 25637 25704 25705 3 25637 25705 25638 3 25637 25638 26255 3 26256 26255 25638 3 25638 25705 25706 3 25638 25706 25639 3 25638 25639 26256 3 26257 26256 25639 3 25639 25706 25707 3 25639 25707 25640 3 25639 25640 26257 3 26258 26257 25640 3 25640 25707 25708 3 25640 25708 25641 3 25640 25641 26258 3 26259 26258 25641 3 25641 25708 25709 3 25641 25709 25642 3 25641 25642 26259 3 26260 26259 25642 3 25642 25709 25710 3 25642 25710 25643 3 25642 25643 26260 3 26261 26260 25643 3 25643 25710 25711 3 25643 25711 25644 3 25643 25644 26261 3 26262 26261 25644 3 25644 25711 25712 3 25644 25712 25645 3 25644 25645 26262 3 26263 26262 25645 3 25645 25712 25713 3 25645 25713 25646 3 25645 25646 26263 3 26264 26263 25646 3 25646 25713 25714 3 25646 25714 25647 3 25646 25647 26264 3 26265 26264 25647 3 25647 25714 25715 3 25647 25715 25648 3 25647 25648 26265 3 26266 26265 25648 3 25648 25715 25716 3 25648 25716 25649 3 25648 25649 26266 3 26267 26266 25649 3 25649 25716 25717 3 25649 25717 25650 3 25649 25650 26267 3 26268 26267 25650 3 25650 25717 25718 3 25650 25718 25651 3 25650 25651 26268 3 26269 26268 25651 3 25651 25718 25719 3 25651 25719 25652 3 25651 25652 26269 3 26270 26269 25652 3 25652 25719 25720 3 25652 25720 25653 3 25652 25653 26270 3 26271 26270 25653 3 25653 25720 25721 3 25653 25721 25654 3 25653 25654 26271 3 26272 26271 25654 3 25654 25721 25722 3 25654 25722 25655 3 25654 25655 26272 3 26273 26272 25655 3 25655 25722 25723 3 25655 25723 25656 3 25655 25656 26273 3 26274 26273 25656 3 25656 25723 25724 3 25656 25724 25657 3 25656 25657 26274 3 26275 26274 25657 3 25657 25724 25725 3 25657 25725 25658 3 25657 25658 26275 3 26276 26275 25658 3 25658 25725 25726 3 25658 25726 25659 3 25658 25659 26276 3 26277 26276 25659 3 25659 25726 25727 3 25659 25727 25660 3 25659 25660 26277 3 26278 26277 25660 3 25660 25727 25728 3 25660 25728 25661 3 25660 25661 26278 3 26279 26278 25661 3 25661 25728 25729 3 25661 25729 25662 3 25661 25662 26280 3 25661 26280 26279 3 25662 25729 25730 3 25662 25730 25663 3 25662 25663 26281 3 25662 26281 26280 3 25663 25730 25731 3 25663 25731 25664 3 25663 25664 26282 3 25663 26282 26281 3 25664 25731 25732 3 25664 25732 25665 3 25664 25665 26283 3 25664 26283 26282 3 25665 25666 26284 3 25665 26284 26283 3 25733 25801 25734 3 25802 25734 25801 3 25734 25802 25735 3 25803 25735 25802 3 25735 25803 25736 3 25804 25736 25803 3 25736 25804 25737 3 25805 25737 25804 3 25737 25805 25738 3 25806 25738 25805 3 25738 25806 25739 3 25807 25739 25806 3 25739 25807 25740 3 25808 25740 25807 3 25740 25808 25741 3 25809 25741 25808 3 25741 25809 25742 3 25810 25742 25809 3 25742 25810 25743 3 25811 25743 25810 3 25743 25811 25744 3 25812 25744 25811 3 25744 25812 25745 3 25813 25745 25812 3 25745 25813 25746 3 25814 25746 25813 3 25746 25814 25747 3 25815 25747 25814 3 25747 25815 25748 3 25816 25748 25815 3 25748 25816 25749 3 25817 25749 25816 3 25749 25817 25750 3 25818 25750 25817 3 25750 25818 25751 3 25819 25751 25818 3 25751 25819 25752 3 25820 25752 25819 3 25752 25820 25753 3 25821 25753 25820 3 25753 25821 25754 3 25822 25754 25821 3 25754 25822 25755 3 25823 25755 25822 3 25755 25823 25756 3 25824 25756 25823 3 25756 25824 25757 3 25825 25757 25824 3 25757 25825 25758 3 25826 25758 25825 3 25758 25826 25759 3 25827 25759 25826 3 25759 25827 25760 3 25828 25760 25827 3 25760 25828 25761 3 25829 25761 25828 3 25761 25829 25762 3 25830 25762 25829 3 25762 25830 25763 3 25831 25763 25830 3 25763 25831 25764 3 25832 25764 25831 3 25764 25832 25765 3 25833 25765 25832 3 25765 25833 25766 3 25834 25766 25833 3 25766 25834 25767 3 25835 25767 25834 3 25767 25835 25768 3 25836 25768 25835 3 25768 25836 25769 3 25837 25769 25836 3 25769 25837 25770 3 25838 25770 25837 3 25770 25838 25771 3 25839 25771 25838 3 25771 25839 25772 3 25840 25772 25839 3 25772 25840 25773 3 25841 25773 25840 3 25773 25841 25774 3 25842 25774 25841 3 25774 25842 25775 3 25843 25775 25842 3 25775 25843 25776 3 25844 25776 25843 3 25776 25844 25777 3 25845 25777 25844 3 25777 25845 25778 3 25846 25778 25845 3 25778 25846 25779 3 25847 25779 25846 3 25779 25847 25780 3 25848 25780 25847 3 25780 25848 25781 3 25849 25781 25848 3 25781 25849 25782 3 25850 25782 25849 3 25782 25850 25783 3 25851 25783 25850 3 25783 25851 25784 3 25852 25784 25851 3 25784 25852 25785 3 25853 25785 25852 3 25785 25853 25786 3 25854 25786 25853 3 25786 25854 25787 3 25855 25787 25854 3 25787 25855 25788 3 25856 25788 25855 3 25788 25856 25789 3 25857 25789 25856 3 25789 25857 25790 3 25858 25790 25857 3 25790 25858 25791 3 25859 25791 25858 3 25791 25859 25792 3 25860 25792 25859 3 25792 25860 25793 3 25861 25793 25860 3 25793 25861 25794 3 25862 25794 25861 3 25794 25862 25795 3 25863 25795 25862 3 25795 25863 25796 3 25864 25796 25863 3 25796 25864 25797 3 25865 25797 25864 3 25797 25865 25798 3 25866 25798 25865 3 25798 25866 25799 3 25867 25799 25866 3 25800 25870 25801 3 25871 25801 25870 3 25801 25871 25802 3 25872 25802 25871 3 25802 26285 26286 3 25802 26286 25803 3 25802 25872 26285 3 26351 26285 25872 3 25803 26286 26287 3 25803 26287 25804 3 25804 26287 26288 3 25804 26288 25805 3 25805 26288 26289 3 25805 26289 25806 3 25806 26289 26290 3 25806 26290 25807 3 25807 26290 26291 3 25807 26291 25808 3 25808 26291 26292 3 25808 26292 25809 3 25809 26292 26293 3 25809 26293 25810 3 25810 26293 26294 3 25810 26294 25811 3 25811 26294 26295 3 25811 26295 25812 3 25812 26295 26296 3 25812 26296 25813 3 25813 26296 26297 3 25813 26297 25814 3 25814 26297 26298 3 25814 26298 25815 3 25815 26298 26299 3 25815 26299 25816 3 25816 26299 26300 3 25816 26300 25817 3 25817 26300 26301 3 25817 26301 25818 3 25818 26301 26302 3 25818 26302 25819 3 25819 26302 26303 3 25819 26303 25820 3 25820 26303 26304 3 25820 26304 25821 3 25821 26304 26305 3 25821 26305 25822 3 25822 26305 26306 3 25822 26306 25823 3 25823 26306 26307 3 25823 26307 25824 3 25824 26307 26308 3 25824 26308 25825 3 25825 26308 26309 3 25825 26309 25826 3 25826 26309 26310 3 25826 26310 25827 3 25827 26310 26311 3 25827 26311 25828 3 25828 26311 26312 3 25828 26312 25829 3 25829 26312 26313 3 25829 26313 25830 3 25830 26313 26314 3 25830 26314 25831 3 25831 26314 26315 3 25831 26315 25832 3 25832 26315 26316 3 25832 26316 25833 3 25833 26316 26317 3 25833 26317 25834 3 25834 26317 26318 3 25834 26318 25835 3 25835 26318 26319 3 25835 26319 25836 3 25836 26319 26320 3 25836 26320 25837 3 25837 26320 26321 3 25837 26321 25838 3 25838 26321 26322 3 25838 26322 25839 3 25839 26322 26323 3 25839 26323 25840 3 25840 26323 26324 3 25840 26324 25841 3 25841 26324 26325 3 25841 26325 25842 3 25842 26325 26326 3 25842 26326 25843 3 25843 26326 26327 3 25843 26327 25844 3 25844 26327 26328 3 25844 26328 25845 3 25845 26328 26329 3 25845 26329 25846 3 25846 26329 26330 3 25846 26330 25847 3 25847 26330 26331 3 25847 26331 25848 3 25848 26331 26332 3 25848 26332 25849 3 25849 26332 26333 3 25849 26333 25850 3 25850 26333 26334 3 25850 26334 25851 3 25851 26334 26335 3 25851 26335 25852 3 25852 26335 26336 3 25852 26336 25853 3 25853 26336 26337 3 25853 26337 25854 3 25854 26337 26338 3 25854 26338 25855 3 25855 26338 26339 3 25855 26339 25856 3 25856 26339 26340 3 25856 26340 25857 3 25857 26340 26341 3 25857 26341 25858 3 25858 26341 26342 3 25858 26342 25859 3 25859 26342 26343 3 25859 26343 25860 3 25860 26343 26344 3 25860 26344 25861 3 25861 26344 26345 3 25861 26345 25862 3 25862 26345 26346 3 25862 26346 25863 3 25863 26346 26347 3 25863 26347 25864 3 25864 26347 26348 3 25864 26348 25865 3 25865 26348 26349 3 25865 26349 25866 3 25866 25873 25874 3 25866 25874 25867 3 25866 26349 26415 3 25866 26415 25873 3 25867 25874 25875 3 25867 25875 25868 3 25869 25877 25878 3 25869 25878 25870 3 25870 25878 25879 3 25870 25879 25871 3 25871 26350 26351 3 25871 26351 25872 3 25871 25879 26418 3 25871 26418 26350 3 25873 26415 25874 3 26416 25874 26415 3 25874 25880 25881 3 25874 25881 25875 3 25874 26416 25880 3 26484 25880 26416 3 25875 25881 25882 3 25875 25882 25876 3 25877 25883 25884 3 25877 25884 25878 3 25878 26417 25879 3 26418 25879 26417 3 25878 25884 26486 3 25878 26486 26417 3 25880 26484 25881 3 26485 25881 26484 3 25881 25885 25886 3 25881 25886 25882 3 25881 26485 25885 3 26554 25885 26485 3 25883 25887 25888 3 25883 25888 25884 3 25884 25888 26555 3 25884 26555 26486 3 25885 25889 25890 3 25885 25890 25886 3 25885 26554 26623 3 25885 26623 25889 3 25887 25891 25892 3 25887 25892 25888 3 25888 25892 26624 3 25888 26624 26555 3 25889 25893 25894 3 25889 25894 25890 3 25889 26623 26692 3 25889 26692 25893 3 25891 25895 25896 3 25891 25896 25892 3 25892 25896 26693 3 25892 26693 26624 3 25893 25897 25898 3 25893 25898 25894 3 25893 26692 26761 3 25893 26761 25897 3 25895 25899 25900 3 25895 25900 25896 3 25896 25900 26762 3 25896 26762 26693 3 25897 25901 25902 3 25897 25902 25898 3 25897 26761 25901 3 26830 25901 26761 3 25899 25903 25904 3 25899 25904 25900 3 25900 25904 26831 3 25900 26831 26762 3 25901 25905 25906 3 25901 25906 25902 3 25901 26830 25905 3 26899 25905 26830 3 25903 25907 25908 3 25903 25908 25904 3 25904 25908 26900 3 25904 26900 26831 3 25905 25909 25910 3 25905 25910 25906 3 25905 26899 26968 3 25905 26968 25909 3 25907 25911 25912 3 25907 25912 25908 3 25908 25912 26969 3 25908 26969 26900 3 25909 25913 25914 3 25909 25914 25910 3 25909 26968 27037 3 25909 27037 25913 3 25911 25915 25916 3 25911 25916 25912 3 25912 25916 27038 3 25912 27038 26969 3 25913 25917 25918 3 25913 25918 25914 3 25913 27037 25917 3 27106 25917 27037 3 25915 25919 25920 3 25915 25920 25916 3 25916 25920 27107 3 25916 27107 27038 3 25917 25921 25922 3 25917 25922 25918 3 25917 27106 25921 3 27175 25921 27106 3 25919 25923 25924 3 25919 25924 25920 3 25920 25924 27176 3 25920 27176 27107 3 25921 25925 25926 3 25921 25926 25922 3 25921 27175 25925 3 27244 25925 27175 3 25923 25927 25928 3 25923 25928 25924 3 25924 25928 27245 3 25924 27245 27176 3 25925 25929 25930 3 25925 25930 25926 3 25925 27244 27313 3 25925 27313 25929 3 25927 25931 25932 3 25927 25932 25928 3 25928 25932 27314 3 25928 27314 27245 3 25929 25933 25934 3 25929 25934 25930 3 25929 27313 27382 3 25929 27382 25933 3 25931 25935 25936 3 25931 25936 25932 3 25932 25936 27383 3 25932 27383 27314 3 25933 25937 25938 3 25933 25938 25934 3 25933 27382 25937 3 27451 25937 27382 3 25935 25939 25940 3 25935 25940 25936 3 25936 25940 27452 3 25936 27452 27383 3 25937 25941 25942 3 25937 25942 25938 3 25937 27451 25941 3 27520 25941 27451 3 25939 25943 25944 3 25939 25944 25940 3 25940 25944 27521 3 25940 27521 27452 3 25941 25945 25946 3 25941 25946 25942 3 25941 27520 25945 3 27589 25945 27520 3 25943 25947 25948 3 25943 25948 25944 3 25944 25948 27590 3 25944 27590 27521 3 25945 25949 25950 3 25945 25950 25946 3 25945 27589 27658 3 25945 27658 25949 3 25947 25951 25952 3 25947 25952 25948 3 25948 25952 27659 3 25948 27659 27590 3 25949 25953 25954 3 25949 25954 25950 3 25949 27658 27727 3 25949 27727 25953 3 25951 25955 25956 3 25951 25956 25952 3 25952 25956 27728 3 25952 27728 27659 3 25953 25957 25958 3 25953 25958 25954 3 25953 27727 25957 3 27796 25957 27727 3 25955 25959 25960 3 25955 25960 25956 3 25956 25960 27728 3 27797 27728 25960 3 25957 25961 25958 3 25962 25958 25961 3 25957 27796 25961 3 27865 25961 27796 3 25959 25963 25960 3 25964 25960 25963 3 25960 25964 27797 3 27866 27797 25964 3 25961 25965 25962 3 25966 25962 25965 3 25961 27865 25965 3 27934 25965 27865 3 25963 25967 25964 3 25968 25964 25967 3 25964 25968 27866 3 27935 27866 25968 3 25965 25969 25966 3 25970 25966 25969 3 25965 27934 28003 3 25965 28003 25969 3 25967 25971 25968 3 25972 25968 25971 3 25968 25972 27935 3 28004 27935 25972 3 25969 25973 25970 3 25974 25970 25973 3 25969 28003 28072 3 25969 28072 25973 3 25971 25975 25972 3 25976 25972 25975 3 25972 25976 28004 3 28073 28004 25976 3 25973 25977 25974 3 25978 25974 25977 3 25973 28072 25977 3 28141 25977 28072 3 25975 25979 25976 3 25980 25976 25979 3 25976 25980 28073 3 28142 28073 25980 3 25977 25981 25978 3 25982 25978 25981 3 25977 28141 25981 3 28210 25981 28141 3 25979 25983 25980 3 25984 25980 25983 3 25980 25984 28142 3 28211 28142 25984 3 25981 25985 25982 3 25986 25982 25985 3 25981 28210 25985 3 28279 25985 28210 3 25983 25987 25984 3 25988 25984 25987 3 25984 25988 28211 3 28280 28211 25988 3 25985 25989 25986 3 25990 25986 25989 3 25985 28279 28348 3 25985 28348 25989 3 25987 25991 25988 3 25992 25988 25991 3 25988 25992 28280 3 28349 28280 25992 3 25989 25993 25990 3 25994 25990 25993 3 25989 28348 28417 3 25989 28417 25993 3 25991 25995 25992 3 25996 25992 25995 3 25992 25996 28349 3 28418 28349 25996 3 25993 25997 25994 3 25998 25994 25997 3 25993 28417 25997 3 28486 25997 28417 3 25995 25999 25996 3 26000 25996 25999 3 25996 26000 28418 3 28487 28418 26000 3 25997 26001 25998 3 26002 25998 26001 3 25997 28486 26001 3 28555 26001 28486 3 25999 26003 26000 3 26004 26000 26003 3 26000 26004 28487 3 28556 28487 26004 3 26001 26005 26002 3 26006 26002 26005 3 26001 28555 26005 3 28624 26005 28555 3 26003 26007 26004 3 26008 26004 26007 3 26004 26008 28556 3 28625 28556 26008 3 26005 26009 26006 3 26010 26006 26009 3 26005 28624 28693 3 26005 28693 26009 3 26007 26011 26008 3 26012 26008 26011 3 26008 26012 28625 3 28694 28625 26012 3 26009 26013 26010 3 26014 26010 26013 3 26009 28693 28762 3 26009 28762 26013 3 26011 26015 26012 3 26016 26012 26015 3 26012 26016 28694 3 28763 28694 26016 3 26013 26017 26014 3 26018 26014 26017 3 26013 28762 26017 3 28831 26017 28762 3 26015 26019 26016 3 26020 26016 26019 3 26016 26020 28763 3 28832 28763 26020 3 26017 26021 26018 3 26022 26018 26021 3 26017 28831 26021 3 28900 26021 28831 3 26019 26023 26020 3 26024 26020 26023 3 26020 26024 28832 3 28901 28832 26024 3 26021 26025 26022 3 26026 26022 26025 3 26021 28900 26025 3 28969 26025 28900 3 26023 26027 26024 3 26028 26024 26027 3 26024 26028 28901 3 28970 28901 26028 3 26025 26029 26026 3 26030 26026 26029 3 26025 28969 29038 3 26025 29038 26029 3 26027 26031 26028 3 26032 26028 26031 3 26028 26032 28970 3 29039 28970 26032 3 26029 26033 26030 3 26034 26030 26033 3 26029 29038 29107 3 26029 29107 26033 3 26031 26035 26032 3 26036 26032 26035 3 26032 26036 29039 3 29108 29039 26036 3 26033 26037 26034 3 26038 26034 26037 3 26033 29107 26037 3 29176 26037 29107 3 26035 26039 26036 3 26040 26036 26039 3 26036 26040 29108 3 29177 29108 26040 3 26037 26041 26038 3 26042 26038 26041 3 26037 29176 26041 3 29245 26041 29176 3 26039 26043 26040 3 26044 26040 26043 3 26040 26044 29177 3 29246 29177 26044 3 26041 26045 26042 3 26046 26042 26045 3 26041 29245 26045 3 29314 26045 29245 3 26043 26047 26044 3 26048 26044 26047 3 26044 26048 29246 3 29315 29246 26048 3 26045 26049 26046 3 26050 26046 26049 3 26045 29314 29383 3 26045 29383 26049 3 26047 26051 26048 3 26052 26048 26051 3 26048 26052 29315 3 29384 29315 26052 3 26049 26053 26050 3 26054 26050 26053 3 26049 29383 29452 3 26049 29452 26053 3 26051 26055 26052 3 26056 26052 26055 3 26052 26056 29384 3 29453 29384 26056 3 26053 26057 26054 3 26058 26054 26057 3 26053 29452 26057 3 29521 26057 29452 3 26055 26059 26060 3 26055 26060 26056 3 26056 26060 29522 3 26056 29522 29453 3 26057 26061 26062 3 26057 26062 26058 3 26057 29521 26061 3 29590 26061 29521 3 26059 26063 26064 3 26059 26064 26060 3 26060 26064 29591 3 26060 29591 29522 3 26061 26065 26066 3 26061 26066 26062 3 26061 29590 26065 3 29659 26065 29590 3 26063 26067 26068 3 26063 26068 26064 3 26064 26068 29660 3 26064 29660 29591 3 26065 26069 26070 3 26065 26070 26066 3 26065 29659 29728 3 26065 29728 26069 3 26067 26071 26072 3 26067 26072 26068 3 26068 26072 29729 3 26068 29729 29660 3 26069 26073 26074 3 26069 26074 26070 3 26069 29728 29797 3 26069 29797 26073 3 26071 26075 26076 3 26071 26076 26072 3 26072 26076 29798 3 26072 29798 29729 3 26073 26077 26078 3 26073 26078 26074 3 26073 29797 26077 3 29866 26077 29797 3 26075 26079 26080 3 26075 26080 26076 3 26076 26080 29867 3 26076 29867 29798 3 26077 26081 26082 3 26077 26082 26078 3 26077 29866 26081 3 29935 26081 29866 3 26079 26083 26084 3 26079 26084 26080 3 26080 26084 29936 3 26080 29936 29867 3 26081 26085 26086 3 26081 26086 26082 3 26081 29935 26085 3 30004 26085 29935 3 26083 26087 26088 3 26083 26088 26084 3 26084 26088 30005 3 26084 30005 29936 3 26085 26089 26090 3 26085 26090 26086 3 26085 30004 30073 3 26085 30073 26089 3 26087 26091 26092 3 26087 26092 26088 3 26088 26092 30074 3 26088 30074 30005 3 26089 26093 26094 3 26089 26094 26090 3 26089 30073 30142 3 26089 30142 26093 3 26091 26095 26096 3 26091 26096 26092 3 26092 26096 30143 3 26092 30143 30074 3 26093 26097 26098 3 26093 26098 26094 3 26093 30142 30211 3 26093 30211 26097 3 26095 26099 26100 3 26095 26100 26096 3 26096 26100 30212 3 26096 30212 30143 3 26097 26101 26102 3 26097 26102 26098 3 26097 30211 26101 3 30280 26101 30211 3 26099 26103 26104 3 26099 26104 26100 3 26100 26104 30281 3 26100 30281 30212 3 26101 26105 26106 3 26101 26106 26102 3 26101 30280 26105 3 30349 26105 30280 3 26103 26107 26108 3 26103 26108 26104 3 26104 26108 30350 3 26104 30350 30281 3 26105 26109 26110 3 26105 26110 26106 3 26105 30349 30418 3 26105 30418 26109 3 26107 26111 26112 3 26107 26112 26108 3 26108 26112 30419 3 26108 30419 30350 3 26109 26113 26114 3 26109 26114 26110 3 26109 30418 30487 3 26109 30487 26113 3 26111 26115 26116 3 26111 26116 26112 3 26112 26116 30488 3 26112 30488 30419 3 26113 26117 26118 3 26113 26118 26114 3 26113 30487 30556 3 26113 30556 26117 3 26115 26119 26120 3 26115 26120 26116 3 26116 26120 30557 3 26116 30557 30488 3 26117 26121 26122 3 26117 26122 26118 3 26117 30556 26121 3 30625 26121 30556 3 26119 26123 26124 3 26119 26124 26120 3 26120 26124 30626 3 26120 30626 30557 3 26121 26125 26126 3 26121 26126 26122 3 26121 30625 26125 3 30694 26125 30625 3 26123 26127 26128 3 26123 26128 26124 3 26124 26128 30695 3 26124 30695 30626 3 26125 26129 26130 3 26125 26130 26126 3 26125 30694 30763 3 26125 30763 26129 3 26127 26131 26132 3 26127 26132 26128 3 26128 26132 30764 3 26128 30764 30695 3 26129 26133 26134 3 26129 26134 26130 3 26129 30763 30832 3 26129 30832 26133 3 26131 26135 26136 3 26131 26136 26132 3 26132 26136 30833 3 26132 30833 30764 3 26133 26139 26140 3 26133 26140 26134 3 26133 30832 30901 3 26133 30901 26139 3 26135 26141 26142 3 26135 26142 26136 3 26136 26142 26143 3 26136 26143 26137 3 26136 26137 30834 3 26136 30834 30833 3 26137 26143 30902 3 26137 30902 30834 3 26138 26146 26147 3 26138 26147 26139 3 26138 26139 30901 3 26138 30901 30900 3 26138 30900 26146 3 30968 26146 30900 3 26139 26147 26148 3 26139 26148 26140 3 26142 26149 26150 3 26142 26150 26143 3 26143 26150 26151 3 26143 26151 26144 3 26143 26144 30903 3 26143 30903 30902 3 26144 26151 30969 3 26144 30969 30903 3 26145 26215 26146 3 26216 26146 26215 3 26145 26146 30968 3 26145 30968 30967 3 26145 30967 26215 3 31033 26215 30967 3 26146 26216 26147 3 26217 26147 26216 3 26150 26218 26151 3 26219 26151 26218 3 26151 26219 26152 3 26220 26152 26219 3 26151 26152 30970 3 26151 30970 30969 3 26152 26220 26153 3 26221 26153 26220 3 26152 26153 30971 3 26152 30971 30970 3 26153 26221 26154 3 26222 26154 26221 3 26153 26154 30972 3 26153 30972 30971 3 26154 26222 26155 3 26223 26155 26222 3 26154 26155 30973 3 26154 30973 30972 3 26155 26223 26156 3 26224 26156 26223 3 26155 26156 30974 3 26155 30974 30973 3 26156 26224 26157 3 26225 26157 26224 3 26156 26157 30975 3 26156 30975 30974 3 26157 26225 26158 3 26226 26158 26225 3 26157 26158 30975 3 30976 30975 26158 3 26158 26226 26159 3 26227 26159 26226 3 26158 26159 30976 3 30977 30976 26159 3 26159 26227 26160 3 26228 26160 26227 3 26159 26160 30977 3 30978 30977 26160 3 26160 26228 26161 3 26229 26161 26228 3 26160 26161 30978 3 30979 30978 26161 3 26161 26229 26162 3 26230 26162 26229 3 26161 26162 30979 3 30980 30979 26162 3 26162 26230 26163 3 26231 26163 26230 3 26162 26163 30980 3 30981 30980 26163 3 26163 26231 26164 3 26232 26164 26231 3 26163 26164 30981 3 30982 30981 26164 3 26164 26232 26165 3 26233 26165 26232 3 26164 26165 30982 3 30983 30982 26165 3 26165 26233 26166 3 26234 26166 26233 3 26165 26166 30983 3 30984 30983 26166 3 26166 26234 26167 3 26235 26167 26234 3 26166 26167 30984 3 30985 30984 26167 3 26167 26235 26168 3 26236 26168 26235 3 26167 26168 30985 3 30986 30985 26168 3 26168 26236 26169 3 26237 26169 26236 3 26168 26169 30986 3 30987 30986 26169 3 26169 26237 26170 3 26238 26170 26237 3 26169 26170 30987 3 30988 30987 26170 3 26170 26238 26171 3 26239 26171 26238 3 26170 26171 30988 3 30989 30988 26171 3 26171 26239 26172 3 26240 26172 26239 3 26171 26172 30989 3 30990 30989 26172 3 26172 26240 26173 3 26241 26173 26240 3 26172 26173 30990 3 30991 30990 26173 3 26173 26241 26174 3 26242 26174 26241 3 26173 26174 30991 3 30992 30991 26174 3 26174 26242 26175 3 26243 26175 26242 3 26174 26175 30992 3 30993 30992 26175 3 26175 26243 26176 3 26244 26176 26243 3 26175 26176 30993 3 30994 30993 26176 3 26176 26244 26177 3 26245 26177 26244 3 26176 26177 30994 3 30995 30994 26177 3 26177 26245 26178 3 26246 26178 26245 3 26177 26178 30995 3 30996 30995 26178 3 26178 26246 26179 3 26247 26179 26246 3 26178 26179 30996 3 30997 30996 26179 3 26179 26247 26180 3 26248 26180 26247 3 26179 26180 30997 3 30998 30997 26180 3 26180 26248 26181 3 26249 26181 26248 3 26180 26181 30998 3 30999 30998 26181 3 26181 26249 26182 3 26250 26182 26249 3 26181 26182 30999 3 31000 30999 26182 3 26182 26250 26183 3 26251 26183 26250 3 26182 26183 31000 3 31001 31000 26183 3 26183 26251 26184 3 26252 26184 26251 3 26183 26184 31001 3 31002 31001 26184 3 26184 26252 26185 3 26253 26185 26252 3 26184 26185 31002 3 31003 31002 26185 3 26185 26253 26186 3 26254 26186 26253 3 26185 26186 31003 3 31004 31003 26186 3 26186 26254 26187 3 26255 26187 26254 3 26186 26187 31004 3 31005 31004 26187 3 26187 26255 26188 3 26256 26188 26255 3 26187 26188 31005 3 31006 31005 26188 3 26188 26256 26189 3 26257 26189 26256 3 26188 26189 31006 3 31007 31006 26189 3 26189 26257 26190 3 26258 26190 26257 3 26189 26190 31007 3 31008 31007 26190 3 26190 26258 26191 3 26259 26191 26258 3 26190 26191 31008 3 31009 31008 26191 3 26191 26259 26192 3 26260 26192 26259 3 26191 26192 31009 3 31010 31009 26192 3 26192 26260 26193 3 26261 26193 26260 3 26192 26193 31010 3 31011 31010 26193 3 26193 26261 26194 3 26262 26194 26261 3 26193 26194 31011 3 31012 31011 26194 3 26194 26262 26195 3 26263 26195 26262 3 26194 26195 31012 3 31013 31012 26195 3 26195 26263 26196 3 26264 26196 26263 3 26195 26196 31013 3 31014 31013 26196 3 26196 26264 26197 3 26265 26197 26264 3 26196 26197 31014 3 31015 31014 26197 3 26197 26265 26198 3 26266 26198 26265 3 26197 26198 31015 3 31016 31015 26198 3 26198 26266 26199 3 26267 26199 26266 3 26198 26199 31016 3 31017 31016 26199 3 26199 26267 26200 3 26268 26200 26267 3 26199 26200 31017 3 31018 31017 26200 3 26200 26268 26201 3 26269 26201 26268 3 26200 26201 31018 3 31019 31018 26201 3 26201 26269 26202 3 26270 26202 26269 3 26201 26202 31019 3 31020 31019 26202 3 26202 26270 26203 3 26271 26203 26270 3 26202 26203 31020 3 31021 31020 26203 3 26203 26271 26204 3 26272 26204 26271 3 26203 26204 31021 3 31022 31021 26204 3 26204 26272 26205 3 26273 26205 26272 3 26204 26205 31022 3 31023 31022 26205 3 26205 26273 26206 3 26274 26206 26273 3 26205 26206 31023 3 31024 31023 26206 3 26206 26274 26207 3 26275 26207 26274 3 26206 26207 31024 3 31025 31024 26207 3 26207 26275 26208 3 26276 26208 26275 3 26207 26208 31025 3 31026 31025 26208 3 26208 26276 26209 3 26277 26209 26276 3 26208 26209 31026 3 31027 31026 26209 3 26209 26277 26210 3 26278 26210 26277 3 26209 26210 31027 3 31028 31027 26210 3 26210 26278 26211 3 26279 26211 26278 3 26210 26211 31028 3 31029 31028 26211 3 26211 26279 26212 3 26280 26212 26279 3 26211 26212 31029 3 31030 31029 26212 3 26212 26280 26213 3 26281 26213 26280 3 26212 26213 31030 3 31031 31030 26213 3 26213 26281 26214 3 26282 26214 26281 3 26213 26214 31031 3 31032 31031 26214 3 26214 26282 26215 3 26283 26215 26282 3 26214 26215 31032 3 31033 31032 26215 3 26215 26283 26216 3 26284 26216 26283 3 26285 26351 26352 3 26285 26352 26286 3 26286 26352 26353 3 26286 26353 26287 3 26287 26353 26354 3 26287 26354 26288 3 26288 26354 26355 3 26288 26355 26289 3 26289 26355 26356 3 26289 26356 26290 3 26290 26356 26357 3 26290 26357 26291 3 26291 26357 26358 3 26291 26358 26292 3 26292 26358 26359 3 26292 26359 26293 3 26293 26359 26360 3 26293 26360 26294 3 26294 26360 26361 3 26294 26361 26295 3 26295 26361 26362 3 26295 26362 26296 3 26296 26362 26363 3 26296 26363 26297 3 26297 26363 26364 3 26297 26364 26298 3 26298 26364 26365 3 26298 26365 26299 3 26299 26365 26366 3 26299 26366 26300 3 26300 26366 26367 3 26300 26367 26301 3 26301 26367 26368 3 26301 26368 26302 3 26302 26368 26369 3 26302 26369 26303 3 26303 26369 26370 3 26303 26370 26304 3 26304 26370 26371 3 26304 26371 26305 3 26305 26371 26372 3 26305 26372 26306 3 26306 26372 26373 3 26306 26373 26307 3 26307 26373 26374 3 26307 26374 26308 3 26308 26374 26375 3 26308 26375 26309 3 26309 26375 26376 3 26309 26376 26310 3 26310 26376 26377 3 26310 26377 26311 3 26311 26377 26378 3 26311 26378 26312 3 26312 26378 26379 3 26312 26379 26313 3 26313 26379 26380 3 26313 26380 26314 3 26314 26380 26381 3 26314 26381 26315 3 26315 26381 26382 3 26315 26382 26316 3 26316 26382 26383 3 26316 26383 26317 3 26317 26383 26384 3 26317 26384 26318 3 26318 26384 26385 3 26318 26385 26319 3 26319 26385 26320 3 26386 26320 26385 3 26320 26386 26321 3 26387 26321 26386 3 26321 26387 26322 3 26388 26322 26387 3 26322 26388 26323 3 26389 26323 26388 3 26323 26389 26324 3 26390 26324 26389 3 26324 26390 26325 3 26391 26325 26390 3 26325 26391 26326 3 26392 26326 26391 3 26326 26392 26327 3 26393 26327 26392 3 26327 26393 26328 3 26394 26328 26393 3 26328 26394 26329 3 26395 26329 26394 3 26329 26395 26330 3 26396 26330 26395 3 26330 26396 26331 3 26397 26331 26396 3 26331 26397 26332 3 26398 26332 26397 3 26332 26398 26333 3 26399 26333 26398 3 26333 26399 26334 3 26400 26334 26399 3 26334 26400 26335 3 26401 26335 26400 3 26335 26401 26336 3 26402 26336 26401 3 26336 26402 26337 3 26403 26337 26402 3 26337 26403 26338 3 26404 26338 26403 3 26338 26404 26339 3 26405 26339 26404 3 26339 26405 26340 3 26406 26340 26405 3 26340 26406 26341 3 26407 26341 26406 3 26341 26407 26342 3 26408 26342 26407 3 26342 26408 26343 3 26409 26343 26408 3 26343 26409 26344 3 26410 26344 26409 3 26344 26410 26345 3 26411 26345 26410 3 26345 26411 26346 3 26412 26346 26411 3 26346 26412 26347 3 26413 26347 26412 3 26347 26413 26348 3 26414 26348 26413 3 26348 26414 26349 3 26415 26349 26414 3 26350 26418 26351 3 26419 26351 26418 3 26351 26419 26352 3 26420 26352 26419 3 26352 26420 26353 3 26421 26353 26420 3 26353 26421 26354 3 26422 26354 26421 3 26354 26422 26355 3 26423 26355 26422 3 26355 26423 26356 3 26424 26356 26423 3 26356 26424 26357 3 26425 26357 26424 3 26357 26425 26358 3 26426 26358 26425 3 26358 26426 26359 3 26427 26359 26426 3 26359 26427 26360 3 26428 26360 26427 3 26360 26428 26361 3 26429 26361 26428 3 26361 26429 26362 3 26430 26362 26429 3 26362 26430 26363 3 26431 26363 26430 3 26363 26431 26364 3 26432 26364 26431 3 26364 26432 26365 3 26433 26365 26432 3 26365 26433 26366 3 26434 26366 26433 3 26366 26434 26367 3 26435 26367 26434 3 26367 26435 26368 3 26436 26368 26435 3 26368 26436 26369 3 26437 26369 26436 3 26369 26437 26370 3 26438 26370 26437 3 26370 26438 26371 3 26439 26371 26438 3 26371 26439 26372 3 26440 26372 26439 3 26372 26440 26373 3 26441 26373 26440 3 26373 26441 26374 3 26442 26374 26441 3 26374 26442 26375 3 26443 26375 26442 3 26375 26443 26376 3 26444 26376 26443 3 26376 26444 26377 3 26445 26377 26444 3 26377 26445 26378 3 26446 26378 26445 3 26378 26446 26379 3 26447 26379 26446 3 26379 26447 26380 3 26448 26380 26447 3 26380 26448 26381 3 26449 26381 26448 3 26381 26449 26382 3 26450 26382 26449 3 26382 26450 26383 3 26451 26383 26450 3 26383 26451 26384 3 26452 26384 26451 3 26384 26452 26385 3 26453 26385 26452 3 26385 26453 26386 3 26454 26386 26453 3 26386 26454 26387 3 26455 26387 26454 3 26387 26455 26388 3 26456 26388 26455 3 26388 26456 26389 3 26457 26389 26456 3 26389 26457 26390 3 26458 26390 26457 3 26390 26458 26391 3 26459 26391 26458 3 26391 26459 26392 3 26460 26392 26459 3 26392 26460 26393 3 26461 26393 26460 3 26393 26461 26394 3 26462 26394 26461 3 26394 26462 26395 3 26463 26395 26462 3 26395 26463 26396 3 26464 26396 26463 3 26396 26464 26397 3 26465 26397 26464 3 26397 26465 26398 3 26466 26398 26465 3 26398 26466 26399 3 26467 26399 26466 3 26399 26467 26400 3 26468 26400 26467 3 26400 26468 26401 3 26469 26401 26468 3 26401 26469 26402 3 26470 26402 26469 3 26402 26470 26403 3 26471 26403 26470 3 26403 26471 26404 3 26472 26404 26471 3 26404 26472 26405 3 26473 26405 26472 3 26405 26473 26406 3 26474 26406 26473 3 26406 26474 26407 3 26475 26407 26474 3 26407 26475 26408 3 26476 26408 26475 3 26408 26476 26409 3 26477 26409 26476 3 26409 26477 26410 3 26478 26410 26477 3 26410 26478 26411 3 26479 26411 26478 3 26411 26479 26412 3 26480 26412 26479 3 26412 26480 26413 3 26481 26413 26480 3 26413 26481 26414 3 26482 26414 26481 3 26414 26482 26415 3 26483 26415 26482 3 26415 26483 26416 3 26484 26416 26483 3 26417 26486 26487 3 26417 26487 26418 3 26418 26487 26488 3 26418 26488 26419 3 26419 26488 26489 3 26419 26489 26420 3 26420 26489 26490 3 26420 26490 26421 3 26421 26490 26491 3 26421 26491 26422 3 26422 26491 26492 3 26422 26492 26423 3 26423 26492 26493 3 26423 26493 26424 3 26424 26493 26494 3 26424 26494 26425 3 26425 26494 26495 3 26425 26495 26426 3 26426 26495 26496 3 26426 26496 26427 3 26427 26496 26497 3 26427 26497 26428 3 26428 26497 26498 3 26428 26498 26429 3 26429 26498 26499 3 26429 26499 26430 3 26430 26499 26500 3 26430 26500 26431 3 26431 26500 26501 3 26431 26501 26432 3 26432 26501 26502 3 26432 26502 26433 3 26433 26502 26503 3 26433 26503 26434 3 26434 26503 26504 3 26434 26504 26435 3 26435 26504 26505 3 26435 26505 26436 3 26436 26505 26506 3 26436 26506 26437 3 26437 26506 26507 3 26437 26507 26438 3 26438 26507 26508 3 26438 26508 26439 3 26439 26508 26509 3 26439 26509 26440 3 26440 26509 26510 3 26440 26510 26441 3 26441 26510 26511 3 26441 26511 26442 3 26442 26511 26512 3 26442 26512 26443 3 26443 26512 26513 3 26443 26513 26444 3 26444 26513 26514 3 26444 26514 26445 3 26445 26514 26515 3 26445 26515 26446 3 26446 26515 26516 3 26446 26516 26447 3 26447 26516 26517 3 26447 26517 26448 3 26448 26517 26518 3 26448 26518 26449 3 26449 26518 26519 3 26449 26519 26450 3 26450 26519 26520 3 26450 26520 26451 3 26451 26520 26521 3 26451 26521 26452 3 26452 26521 26522 3 26452 26522 26453 3 26453 26522 26523 3 26453 26523 26454 3 26454 26523 26524 3 26454 26524 26455 3 26455 26524 26525 3 26455 26525 26456 3 26456 26525 26526 3 26456 26526 26457 3 26457 26526 26527 3 26457 26527 26458 3 26458 26527 26528 3 26458 26528 26459 3 26459 26528 26529 3 26459 26529 26460 3 26460 26529 26530 3 26460 26530 26461 3 26461 26530 26531 3 26461 26531 26462 3 26462 26531 26532 3 26462 26532 26463 3 26463 26532 26533 3 26463 26533 26464 3 26464 26533 26534 3 26464 26534 26465 3 26465 26534 26535 3 26465 26535 26466 3 26466 26535 26536 3 26466 26536 26467 3 26467 26536 26537 3 26467 26537 26468 3 26468 26537 26538 3 26468 26538 26469 3 26469 26538 26539 3 26469 26539 26470 3 26470 26539 26540 3 26470 26540 26471 3 26471 26540 26541 3 26471 26541 26472 3 26472 26541 26542 3 26472 26542 26473 3 26473 26542 26543 3 26473 26543 26474 3 26474 26543 26544 3 26474 26544 26475 3 26475 26544 26545 3 26475 26545 26476 3 26476 26545 26546 3 26476 26546 26477 3 26477 26546 26547 3 26477 26547 26478 3 26478 26547 26548 3 26478 26548 26479 3 26479 26548 26549 3 26479 26549 26480 3 26480 26549 26550 3 26480 26550 26481 3 26481 26550 26551 3 26481 26551 26482 3 26482 26551 26552 3 26482 26552 26483 3 26483 26552 26553 3 26483 26553 26484 3 26484 26553 26554 3 26484 26554 26485 3 26486 26555 26556 3 26486 26556 26487 3 26487 26556 26557 3 26487 26557 26488 3 26488 26557 26558 3 26488 26558 26489 3 26489 26558 26559 3 26489 26559 26490 3 26490 26559 26560 3 26490 26560 26491 3 26491 26560 26561 3 26491 26561 26492 3 26492 26561 26562 3 26492 26562 26493 3 26493 26562 26563 3 26493 26563 26494 3 26494 26563 26564 3 26494 26564 26495 3 26495 26564 26565 3 26495 26565 26496 3 26496 26565 26566 3 26496 26566 26497 3 26497 26566 26567 3 26497 26567 26498 3 26498 26567 26568 3 26498 26568 26499 3 26499 26568 26569 3 26499 26569 26500 3 26500 26569 26570 3 26500 26570 26501 3 26501 26570 26571 3 26501 26571 26502 3 26502 26571 26572 3 26502 26572 26503 3 26503 26572 26573 3 26503 26573 26504 3 26504 26573 26574 3 26504 26574 26505 3 26505 26574 26575 3 26505 26575 26506 3 26506 26575 26576 3 26506 26576 26507 3 26507 26576 26577 3 26507 26577 26508 3 26508 26577 26578 3 26508 26578 26509 3 26509 26578 26579 3 26509 26579 26510 3 26510 26579 26580 3 26510 26580 26511 3 26511 26580 26581 3 26511 26581 26512 3 26512 26581 26582 3 26512 26582 26513 3 26513 26582 26583 3 26513 26583 26514 3 26514 26583 26584 3 26514 26584 26515 3 26515 26584 26585 3 26515 26585 26516 3 26516 26585 26586 3 26516 26586 26517 3 26517 26586 26518 3 26587 26518 26586 3 26518 26587 26519 3 26588 26519 26587 3 26519 26588 26520 3 26589 26520 26588 3 26520 26589 26521 3 26590 26521 26589 3 26521 26590 26522 3 26591 26522 26590 3 26522 26591 26523 3 26592 26523 26591 3 26523 26592 26524 3 26593 26524 26592 3 26524 26593 26525 3 26594 26525 26593 3 26525 26594 26526 3 26595 26526 26594 3 26526 26595 26527 3 26596 26527 26595 3 26527 26596 26528 3 26597 26528 26596 3 26528 26597 26529 3 26598 26529 26597 3 26529 26598 26530 3 26599 26530 26598 3 26530 26599 26531 3 26600 26531 26599 3 26531 26600 26532 3 26601 26532 26600 3 26532 26601 26533 3 26602 26533 26601 3 26533 26602 26534 3 26603 26534 26602 3 26534 26603 26535 3 26604 26535 26603 3 26535 26604 26536 3 26605 26536 26604 3 26536 26605 26537 3 26606 26537 26605 3 26537 26606 26538 3 26607 26538 26606 3 26538 26607 26539 3 26608 26539 26607 3 26539 26608 26540 3 26609 26540 26608 3 26540 26609 26541 3 26610 26541 26609 3 26541 26610 26542 3 26611 26542 26610 3 26542 26611 26543 3 26612 26543 26611 3 26543 26612 26544 3 26613 26544 26612 3 26544 26613 26545 3 26614 26545 26613 3 26545 26614 26546 3 26615 26546 26614 3 26546 26615 26547 3 26616 26547 26615 3 26547 26616 26548 3 26617 26548 26616 3 26548 26617 26549 3 26618 26549 26617 3 26549 26618 26550 3 26619 26550 26618 3 26550 26619 26551 3 26620 26551 26619 3 26551 26620 26552 3 26621 26552 26620 3 26552 26621 26553 3 26622 26553 26621 3 26553 26622 26554 3 26623 26554 26622 3 26555 26624 26556 3 26625 26556 26624 3 26556 26625 26557 3 26626 26557 26625 3 26557 26626 26558 3 26627 26558 26626 3 26558 26627 26559 3 26628 26559 26627 3 26559 26628 26560 3 26629 26560 26628 3 26560 26629 26561 3 26630 26561 26629 3 26561 26630 26562 3 26631 26562 26630 3 26562 26631 26563 3 26632 26563 26631 3 26563 26632 26564 3 26633 26564 26632 3 26564 26633 26565 3 26634 26565 26633 3 26565 26634 26566 3 26635 26566 26634 3 26566 26635 26567 3 26636 26567 26635 3 26567 26636 26568 3 26637 26568 26636 3 26568 26637 26569 3 26638 26569 26637 3 26569 26638 26570 3 26639 26570 26638 3 26570 26639 26571 3 26640 26571 26639 3 26571 26640 26572 3 26641 26572 26640 3 26572 26641 26573 3 26642 26573 26641 3 26573 26642 26574 3 26643 26574 26642 3 26574 26643 26575 3 26644 26575 26643 3 26575 26644 26576 3 26645 26576 26644 3 26576 26645 26577 3 26646 26577 26645 3 26577 26646 26578 3 26647 26578 26646 3 26578 26647 26579 3 26648 26579 26647 3 26579 26648 26580 3 26649 26580 26648 3 26580 26649 26581 3 26650 26581 26649 3 26581 26650 26582 3 26651 26582 26650 3 26582 26651 26583 3 26652 26583 26651 3 26583 26652 26584 3 26653 26584 26652 3 26584 26653 26585 3 26654 26585 26653 3 26585 26654 26586 3 26655 26586 26654 3 26586 26655 26587 3 26656 26587 26655 3 26587 26656 26588 3 26657 26588 26656 3 26588 26657 26589 3 26658 26589 26657 3 26589 26658 26590 3 26659 26590 26658 3 26590 26659 26591 3 26660 26591 26659 3 26591 26660 26592 3 26661 26592 26660 3 26592 26661 26593 3 26662 26593 26661 3 26593 26662 26594 3 26663 26594 26662 3 26594 26663 26595 3 26664 26595 26663 3 26595 26664 26596 3 26665 26596 26664 3 26596 26665 26597 3 26666 26597 26665 3 26597 26666 26598 3 26667 26598 26666 3 26598 26667 26599 3 26668 26599 26667 3 26599 26668 26600 3 26669 26600 26668 3 26600 26669 26601 3 26670 26601 26669 3 26601 26670 26602 3 26671 26602 26670 3 26602 26671 26603 3 26672 26603 26671 3 26603 26672 26604 3 26673 26604 26672 3 26604 26673 26605 3 26674 26605 26673 3 26605 26674 26606 3 26675 26606 26674 3 26606 26675 26607 3 26676 26607 26675 3 26607 26676 26608 3 26677 26608 26676 3 26608 26677 26609 3 26678 26609 26677 3 26609 26678 26610 3 26679 26610 26678 3 26610 26679 26611 3 26680 26611 26679 3 26611 26680 26612 3 26681 26612 26680 3 26612 26681 26613 3 26682 26613 26681 3 26613 26682 26614 3 26683 26614 26682 3 26614 26683 26615 3 26684 26615 26683 3 26615 26684 26616 3 26685 26616 26684 3 26616 26685 26617 3 26686 26617 26685 3 26617 26686 26618 3 26687 26618 26686 3 26618 26687 26688 3 26618 26688 26619 3 26619 26688 26689 3 26619 26689 26620 3 26620 26689 26690 3 26620 26690 26621 3 26621 26690 26691 3 26621 26691 26622 3 26622 26691 26692 3 26622 26692 26623 3 26624 26693 26694 3 26624 26694 26625 3 26625 26694 26695 3 26625 26695 26626 3 26626 26695 26696 3 26626 26696 26627 3 26627 26696 26697 3 26627 26697 26628 3 26628 26697 26698 3 26628 26698 26629 3 26629 26698 26699 3 26629 26699 26630 3 26630 26699 26700 3 26630 26700 26631 3 26631 26700 26701 3 26631 26701 26632 3 26632 26701 26702 3 26632 26702 26633 3 26633 26702 26703 3 26633 26703 26634 3 26634 26703 26704 3 26634 26704 26635 3 26635 26704 26705 3 26635 26705 26636 3 26636 26705 26706 3 26636 26706 26637 3 26637 26706 26707 3 26637 26707 26638 3 26638 26707 26708 3 26638 26708 26639 3 26639 26708 26709 3 26639 26709 26640 3 26640 26709 26710 3 26640 26710 26641 3 26641 26710 26711 3 26641 26711 26642 3 26642 26711 26712 3 26642 26712 26643 3 26643 26712 26713 3 26643 26713 26644 3 26644 26713 26714 3 26644 26714 26645 3 26645 26714 26715 3 26645 26715 26646 3 26646 26715 26716 3 26646 26716 26647 3 26647 26716 26717 3 26647 26717 26648 3 26648 26717 26718 3 26648 26718 26649 3 26649 26718 26719 3 26649 26719 26650 3 26650 26719 26720 3 26650 26720 26651 3 26651 26720 26721 3 26651 26721 26652 3 26652 26721 26722 3 26652 26722 26653 3 26653 26722 26723 3 26653 26723 26654 3 26654 26723 26724 3 26654 26724 26655 3 26655 26724 26725 3 26655 26725 26656 3 26656 26725 26726 3 26656 26726 26657 3 26657 26726 26727 3 26657 26727 26658 3 26658 26727 26728 3 26658 26728 26659 3 26659 26728 26729 3 26659 26729 26660 3 26660 26729 26730 3 26660 26730 26661 3 26661 26730 26731 3 26661 26731 26662 3 26662 26731 26732 3 26662 26732 26663 3 26663 26732 26733 3 26663 26733 26664 3 26664 26733 26734 3 26664 26734 26665 3 26665 26734 26735 3 26665 26735 26666 3 26666 26735 26736 3 26666 26736 26667 3 26667 26736 26737 3 26667 26737 26668 3 26668 26737 26738 3 26668 26738 26669 3 26669 26738 26739 3 26669 26739 26670 3 26670 26739 26740 3 26670 26740 26671 3 26671 26740 26741 3 26671 26741 26672 3 26672 26741 26742 3 26672 26742 26673 3 26673 26742 26743 3 26673 26743 26674 3 26674 26743 26744 3 26674 26744 26675 3 26675 26744 26745 3 26675 26745 26676 3 26676 26745 26746 3 26676 26746 26677 3 26677 26746 26747 3 26677 26747 26678 3 26678 26747 26748 3 26678 26748 26679 3 26679 26748 26749 3 26679 26749 26680 3 26680 26749 26750 3 26680 26750 26681 3 26681 26750 26751 3 26681 26751 26682 3 26682 26751 26752 3 26682 26752 26683 3 26683 26752 26753 3 26683 26753 26684 3 26684 26753 26754 3 26684 26754 26685 3 26685 26754 26755 3 26685 26755 26686 3 26686 26755 26756 3 26686 26756 26687 3 26687 26756 26757 3 26687 26757 26688 3 26688 26757 26758 3 26688 26758 26689 3 26689 26758 26759 3 26689 26759 26690 3 26690 26759 26760 3 26690 26760 26691 3 26691 26760 26761 3 26691 26761 26692 3 26693 26762 26763 3 26693 26763 26694 3 26694 26763 26764 3 26694 26764 26695 3 26695 26764 26765 3 26695 26765 26696 3 26696 26765 26766 3 26696 26766 26697 3 26697 26766 26767 3 26697 26767 26698 3 26698 26767 26768 3 26698 26768 26699 3 26699 26768 26769 3 26699 26769 26700 3 26700 26769 26770 3 26700 26770 26701 3 26701 26770 26771 3 26701 26771 26702 3 26702 26771 26772 3 26702 26772 26703 3 26703 26772 26773 3 26703 26773 26704 3 26704 26773 26774 3 26704 26774 26705 3 26705 26774 26775 3 26705 26775 26706 3 26706 26775 26776 3 26706 26776 26707 3 26707 26776 26777 3 26707 26777 26708 3 26708 26777 26778 3 26708 26778 26709 3 26709 26778 26779 3 26709 26779 26710 3 26710 26779 26780 3 26710 26780 26711 3 26711 26780 26781 3 26711 26781 26712 3 26712 26781 26782 3 26712 26782 26713 3 26713 26782 26783 3 26713 26783 26714 3 26714 26783 26784 3 26714 26784 26715 3 26715 26784 26785 3 26715 26785 26716 3 26716 26785 26786 3 26716 26786 26717 3 26717 26786 26787 3 26717 26787 26718 3 26718 26787 26719 3 26788 26719 26787 3 26719 26788 26720 3 26789 26720 26788 3 26720 26789 26721 3 26790 26721 26789 3 26721 26790 26722 3 26791 26722 26790 3 26722 26791 26723 3 26792 26723 26791 3 26723 26792 26724 3 26793 26724 26792 3 26724 26793 26725 3 26794 26725 26793 3 26725 26794 26726 3 26795 26726 26794 3 26726 26795 26727 3 26796 26727 26795 3 26727 26796 26728 3 26797 26728 26796 3 26728 26797 26729 3 26798 26729 26797 3 26729 26798 26730 3 26799 26730 26798 3 26730 26799 26731 3 26800 26731 26799 3 26731 26800 26732 3 26801 26732 26800 3 26732 26801 26733 3 26802 26733 26801 3 26733 26802 26734 3 26803 26734 26802 3 26734 26803 26735 3 26804 26735 26803 3 26735 26804 26736 3 26805 26736 26804 3 26736 26805 26737 3 26806 26737 26805 3 26737 26806 26738 3 26807 26738 26806 3 26738 26807 26739 3 26808 26739 26807 3 26739 26808 26740 3 26809 26740 26808 3 26740 26809 26741 3 26810 26741 26809 3 26741 26810 26742 3 26811 26742 26810 3 26742 26811 26743 3 26812 26743 26811 3 26743 26812 26744 3 26813 26744 26812 3 26744 26813 26745 3 26814 26745 26813 3 26745 26814 26746 3 26815 26746 26814 3 26746 26815 26747 3 26816 26747 26815 3 26747 26816 26748 3 26817 26748 26816 3 26748 26817 26749 3 26818 26749 26817 3 26749 26818 26750 3 26819 26750 26818 3 26750 26819 26751 3 26820 26751 26819 3 26751 26820 26752 3 26821 26752 26820 3 26752 26821 26753 3 26822 26753 26821 3 26753 26822 26754 3 26823 26754 26822 3 26754 26823 26755 3 26824 26755 26823 3 26755 26824 26756 3 26825 26756 26824 3 26756 26825 26757 3 26826 26757 26825 3 26757 26826 26758 3 26827 26758 26826 3 26758 26827 26759 3 26828 26759 26827 3 26759 26828 26760 3 26829 26760 26828 3 26760 26829 26761 3 26830 26761 26829 3 26762 26831 26763 3 26832 26763 26831 3 26763 26832 26764 3 26833 26764 26832 3 26764 26833 26765 3 26834 26765 26833 3 26765 26834 26766 3 26835 26766 26834 3 26766 26835 26767 3 26836 26767 26835 3 26767 26836 26768 3 26837 26768 26836 3 26768 26837 26769 3 26838 26769 26837 3 26769 26838 26770 3 26839 26770 26838 3 26770 26839 26771 3 26840 26771 26839 3 26771 26840 26772 3 26841 26772 26840 3 26772 26841 26773 3 26842 26773 26841 3 26773 26842 26774 3 26843 26774 26842 3 26774 26843 26775 3 26844 26775 26843 3 26775 26844 26776 3 26845 26776 26844 3 26776 26845 26777 3 26846 26777 26845 3 26777 26846 26778 3 26847 26778 26846 3 26778 26847 26779 3 26848 26779 26847 3 26779 26848 26780 3 26849 26780 26848 3 26780 26849 26781 3 26850 26781 26849 3 26781 26850 26782 3 26851 26782 26850 3 26782 26851 26783 3 26852 26783 26851 3 26783 26852 26784 3 26853 26784 26852 3 26784 26853 26785 3 26854 26785 26853 3 26785 26854 26786 3 26855 26786 26854 3 26786 26855 26787 3 26856 26787 26855 3 26787 26856 26788 3 26857 26788 26856 3 26788 26857 26789 3 26858 26789 26857 3 26789 26858 26790 3 26859 26790 26858 3 26790 26859 26791 3 26860 26791 26859 3 26791 26860 26792 3 26861 26792 26860 3 26792 26861 26793 3 26862 26793 26861 3 26793 26862 26794 3 26863 26794 26862 3 26794 26863 26795 3 26864 26795 26863 3 26795 26864 26796 3 26865 26796 26864 3 26796 26865 26797 3 26866 26797 26865 3 26797 26866 26798 3 26867 26798 26866 3 26798 26867 26799 3 26868 26799 26867 3 26799 26868 26800 3 26869 26800 26868 3 26800 26869 26801 3 26870 26801 26869 3 26801 26870 26802 3 26871 26802 26870 3 26802 26871 26803 3 26872 26803 26871 3 26803 26872 26804 3 26873 26804 26872 3 26804 26873 26805 3 26874 26805 26873 3 26805 26874 26806 3 26875 26806 26874 3 26806 26875 26807 3 26876 26807 26875 3 26807 26876 26808 3 26877 26808 26876 3 26808 26877 26809 3 26878 26809 26877 3 26809 26878 26810 3 26879 26810 26878 3 26810 26879 26811 3 26880 26811 26879 3 26811 26880 26812 3 26881 26812 26880 3 26812 26881 26813 3 26882 26813 26881 3 26813 26882 26814 3 26883 26814 26882 3 26814 26883 26815 3 26884 26815 26883 3 26815 26884 26816 3 26885 26816 26884 3 26816 26885 26817 3 26886 26817 26885 3 26817 26886 26818 3 26887 26818 26886 3 26818 26887 26819 3 26888 26819 26887 3 26819 26888 26889 3 26819 26889 26820 3 26820 26889 26890 3 26820 26890 26821 3 26821 26890 26891 3 26821 26891 26822 3 26822 26891 26892 3 26822 26892 26823 3 26823 26892 26893 3 26823 26893 26824 3 26824 26893 26894 3 26824 26894 26825 3 26825 26894 26895 3 26825 26895 26826 3 26826 26895 26896 3 26826 26896 26827 3 26827 26896 26897 3 26827 26897 26828 3 26828 26897 26898 3 26828 26898 26829 3 26829 26898 26899 3 26829 26899 26830 3 26831 26900 26901 3 26831 26901 26832 3 26832 26901 26902 3 26832 26902 26833 3 26833 26902 26903 3 26833 26903 26834 3 26834 26903 26904 3 26834 26904 26835 3 26835 26904 26905 3 26835 26905 26836 3 26836 26905 26906 3 26836 26906 26837 3 26837 26906 26907 3 26837 26907 26838 3 26838 26907 26908 3 26838 26908 26839 3 26839 26908 26909 3 26839 26909 26840 3 26840 26909 26910 3 26840 26910 26841 3 26841 26910 26911 3 26841 26911 26842 3 26842 26911 26912 3 26842 26912 26843 3 26843 26912 26913 3 26843 26913 26844 3 26844 26913 26914 3 26844 26914 26845 3 26845 26914 26915 3 26845 26915 26846 3 26846 26915 26916 3 26846 26916 26847 3 26847 26916 26917 3 26847 26917 26848 3 26848 26917 26918 3 26848 26918 26849 3 26849 26918 26919 3 26849 26919 26850 3 26850 26919 26920 3 26850 26920 26851 3 26851 26920 26921 3 26851 26921 26852 3 26852 26921 26922 3 26852 26922 26853 3 26853 26922 26923 3 26853 26923 26854 3 26854 26923 26924 3 26854 26924 26855 3 26855 26924 26925 3 26855 26925 26856 3 26856 26925 26926 3 26856 26926 26857 3 26857 26926 26927 3 26857 26927 26858 3 26858 26927 26928 3 26858 26928 26859 3 26859 26928 26929 3 26859 26929 26860 3 26860 26929 26930 3 26860 26930 26861 3 26861 26930 26931 3 26861 26931 26862 3 26862 26931 26932 3 26862 26932 26863 3 26863 26932 26933 3 26863 26933 26864 3 26864 26933 26934 3 26864 26934 26865 3 26865 26934 26935 3 26865 26935 26866 3 26866 26935 26936 3 26866 26936 26867 3 26867 26936 26937 3 26867 26937 26868 3 26868 26937 26938 3 26868 26938 26869 3 26869 26938 26939 3 26869 26939 26870 3 26870 26939 26940 3 26870 26940 26871 3 26871 26940 26941 3 26871 26941 26872 3 26872 26941 26942 3 26872 26942 26873 3 26873 26942 26943 3 26873 26943 26874 3 26874 26943 26944 3 26874 26944 26875 3 26875 26944 26945 3 26875 26945 26876 3 26876 26945 26946 3 26876 26946 26877 3 26877 26946 26947 3 26877 26947 26878 3 26878 26947 26948 3 26878 26948 26879 3 26879 26948 26949 3 26879 26949 26880 3 26880 26949 26950 3 26880 26950 26881 3 26881 26950 26951 3 26881 26951 26882 3 26882 26951 26952 3 26882 26952 26883 3 26883 26952 26953 3 26883 26953 26884 3 26884 26953 26954 3 26884 26954 26885 3 26885 26954 26955 3 26885 26955 26886 3 26886 26955 26956 3 26886 26956 26887 3 26887 26956 26957 3 26887 26957 26888 3 26888 26957 26958 3 26888 26958 26889 3 26889 26958 26959 3 26889 26959 26890 3 26890 26959 26960 3 26890 26960 26891 3 26891 26960 26961 3 26891 26961 26892 3 26892 26961 26962 3 26892 26962 26893 3 26893 26962 26963 3 26893 26963 26894 3 26894 26963 26964 3 26894 26964 26895 3 26895 26964 26965 3 26895 26965 26896 3 26896 26965 26966 3 26896 26966 26897 3 26897 26966 26967 3 26897 26967 26898 3 26898 26967 26968 3 26898 26968 26899 3 26900 26969 26970 3 26900 26970 26901 3 26901 26970 26971 3 26901 26971 26902 3 26902 26971 26972 3 26902 26972 26903 3 26903 26972 26973 3 26903 26973 26904 3 26904 26973 26974 3 26904 26974 26905 3 26905 26974 26975 3 26905 26975 26906 3 26906 26975 26976 3 26906 26976 26907 3 26907 26976 26977 3 26907 26977 26908 3 26908 26977 26978 3 26908 26978 26909 3 26909 26978 26979 3 26909 26979 26910 3 26910 26979 26980 3 26910 26980 26911 3 26911 26980 26981 3 26911 26981 26912 3 26912 26981 26982 3 26912 26982 26913 3 26913 26982 26983 3 26913 26983 26914 3 26914 26983 26984 3 26914 26984 26915 3 26915 26984 26985 3 26915 26985 26916 3 26916 26985 26986 3 26916 26986 26917 3 26917 26986 26987 3 26917 26987 26918 3 26918 26987 26988 3 26918 26988 26919 3 26919 26988 26989 3 26919 26989 26920 3 26920 26989 26921 3 26990 26921 26989 3 26921 26990 26922 3 26991 26922 26990 3 26922 26991 26923 3 26992 26923 26991 3 26923 26992 26924 3 26993 26924 26992 3 26924 26993 26925 3 26994 26925 26993 3 26925 26994 26926 3 26995 26926 26994 3 26926 26995 26927 3 26996 26927 26995 3 26927 26996 26928 3 26997 26928 26996 3 26928 26997 26929 3 26998 26929 26997 3 26929 26998 26930 3 26999 26930 26998 3 26930 26999 26931 3 27000 26931 26999 3 26931 27000 26932 3 27001 26932 27000 3 26932 27001 26933 3 27002 26933 27001 3 26933 27002 26934 3 27003 26934 27002 3 26934 27003 26935 3 27004 26935 27003 3 26935 27004 26936 3 27005 26936 27004 3 26936 27005 26937 3 27006 26937 27005 3 26937 27006 26938 3 27007 26938 27006 3 26938 27007 26939 3 27008 26939 27007 3 26939 27008 26940 3 27009 26940 27008 3 26940 27009 26941 3 27010 26941 27009 3 26941 27010 26942 3 27011 26942 27010 3 26942 27011 26943 3 27012 26943 27011 3 26943 27012 26944 3 27013 26944 27012 3 26944 27013 26945 3 27014 26945 27013 3 26945 27014 26946 3 27015 26946 27014 3 26946 27015 26947 3 27016 26947 27015 3 26947 27016 26948 3 27017 26948 27016 3 26948 27017 26949 3 27018 26949 27017 3 26949 27018 26950 3 27019 26950 27018 3 26950 27019 26951 3 27020 26951 27019 3 26951 27020 26952 3 27021 26952 27020 3 26952 27021 26953 3 27022 26953 27021 3 26953 27022 26954 3 27023 26954 27022 3 26954 27023 26955 3 27024 26955 27023 3 26955 27024 26956 3 27025 26956 27024 3 26956 27025 26957 3 27026 26957 27025 3 26957 27026 26958 3 27027 26958 27026 3 26958 27027 26959 3 27028 26959 27027 3 26959 27028 26960 3 27029 26960 27028 3 26960 27029 26961 3 27030 26961 27029 3 26961 27030 26962 3 27031 26962 27030 3 26962 27031 26963 3 27032 26963 27031 3 26963 27032 26964 3 27033 26964 27032 3 26964 27033 26965 3 27034 26965 27033 3 26965 27034 26966 3 27035 26966 27034 3 26966 27035 26967 3 27036 26967 27035 3 26967 27036 26968 3 27037 26968 27036 3 26969 27038 26970 3 27039 26970 27038 3 26970 27039 26971 3 27040 26971 27039 3 26971 27040 26972 3 27041 26972 27040 3 26972 27041 26973 3 27042 26973 27041 3 26973 27042 26974 3 27043 26974 27042 3 26974 27043 26975 3 27044 26975 27043 3 26975 27044 26976 3 27045 26976 27044 3 26976 27045 26977 3 27046 26977 27045 3 26977 27046 26978 3 27047 26978 27046 3 26978 27047 26979 3 27048 26979 27047 3 26979 27048 26980 3 27049 26980 27048 3 26980 27049 26981 3 27050 26981 27049 3 26981 27050 26982 3 27051 26982 27050 3 26982 27051 26983 3 27052 26983 27051 3 26983 27052 26984 3 27053 26984 27052 3 26984 27053 26985 3 27054 26985 27053 3 26985 27054 26986 3 27055 26986 27054 3 26986 27055 26987 3 27056 26987 27055 3 26987 27056 26988 3 27057 26988 27056 3 26988 27057 26989 3 27058 26989 27057 3 26989 27058 26990 3 27059 26990 27058 3 26990 27059 26991 3 27060 26991 27059 3 26991 27060 26992 3 27061 26992 27060 3 26992 27061 26993 3 27062 26993 27061 3 26993 27062 26994 3 27063 26994 27062 3 26994 27063 26995 3 27064 26995 27063 3 26995 27064 26996 3 27065 26996 27064 3 26996 27065 26997 3 27066 26997 27065 3 26997 27066 26998 3 27067 26998 27066 3 26998 27067 26999 3 27068 26999 27067 3 26999 27068 27000 3 27069 27000 27068 3 27000 27069 27001 3 27070 27001 27069 3 27001 27070 27002 3 27071 27002 27070 3 27002 27071 27003 3 27072 27003 27071 3 27003 27072 27004 3 27073 27004 27072 3 27004 27073 27005 3 27074 27005 27073 3 27005 27074 27006 3 27075 27006 27074 3 27006 27075 27007 3 27076 27007 27075 3 27007 27076 27008 3 27077 27008 27076 3 27008 27077 27009 3 27078 27009 27077 3 27009 27078 27010 3 27079 27010 27078 3 27010 27079 27011 3 27080 27011 27079 3 27011 27080 27012 3 27081 27012 27080 3 27012 27081 27013 3 27082 27013 27081 3 27013 27082 27014 3 27083 27014 27082 3 27014 27083 27015 3 27084 27015 27083 3 27015 27084 27016 3 27085 27016 27084 3 27016 27085 27017 3 27086 27017 27085 3 27017 27086 27018 3 27087 27018 27086 3 27018 27087 27019 3 27088 27019 27087 3 27019 27088 27020 3 27089 27020 27088 3 27020 27089 27021 3 27090 27021 27089 3 27021 27090 27091 3 27021 27091 27022 3 27022 27091 27092 3 27022 27092 27023 3 27023 27092 27093 3 27023 27093 27024 3 27024 27093 27094 3 27024 27094 27025 3 27025 27094 27095 3 27025 27095 27026 3 27026 27095 27096 3 27026 27096 27027 3 27027 27096 27097 3 27027 27097 27028 3 27028 27097 27098 3 27028 27098 27029 3 27029 27098 27099 3 27029 27099 27030 3 27030 27099 27100 3 27030 27100 27031 3 27031 27100 27101 3 27031 27101 27032 3 27032 27101 27102 3 27032 27102 27033 3 27033 27102 27103 3 27033 27103 27034 3 27034 27103 27104 3 27034 27104 27035 3 27035 27104 27105 3 27035 27105 27036 3 27036 27105 27106 3 27036 27106 27037 3 27038 27107 27108 3 27038 27108 27039 3 27039 27108 27109 3 27039 27109 27040 3 27040 27109 27110 3 27040 27110 27041 3 27041 27110 27111 3 27041 27111 27042 3 27042 27111 27112 3 27042 27112 27043 3 27043 27112 27113 3 27043 27113 27044 3 27044 27113 27114 3 27044 27114 27045 3 27045 27114 27115 3 27045 27115 27046 3 27046 27115 27116 3 27046 27116 27047 3 27047 27116 27117 3 27047 27117 27048 3 27048 27117 27118 3 27048 27118 27049 3 27049 27118 27119 3 27049 27119 27050 3 27050 27119 27120 3 27050 27120 27051 3 27051 27120 27121 3 27051 27121 27052 3 27052 27121 27122 3 27052 27122 27053 3 27053 27122 27123 3 27053 27123 27054 3 27054 27123 27124 3 27054 27124 27055 3 27055 27124 27125 3 27055 27125 27056 3 27056 27125 27126 3 27056 27126 27057 3 27057 27126 27127 3 27057 27127 27058 3 27058 27127 27128 3 27058 27128 27059 3 27059 27128 27129 3 27059 27129 27060 3 27060 27129 27130 3 27060 27130 27061 3 27061 27130 27131 3 27061 27131 27062 3 27062 27131 27132 3 27062 27132 27063 3 27063 27132 27133 3 27063 27133 27064 3 27064 27133 27134 3 27064 27134 27065 3 27065 27134 27135 3 27065 27135 27066 3 27066 27135 27136 3 27066 27136 27067 3 27067 27136 27137 3 27067 27137 27068 3 27068 27137 27138 3 27068 27138 27069 3 27069 27138 27139 3 27069 27139 27070 3 27070 27139 27140 3 27070 27140 27071 3 27071 27140 27141 3 27071 27141 27072 3 27072 27141 27142 3 27072 27142 27073 3 27073 27142 27143 3 27073 27143 27074 3 27074 27143 27144 3 27074 27144 27075 3 27075 27144 27145 3 27075 27145 27076 3 27076 27145 27146 3 27076 27146 27077 3 27077 27146 27147 3 27077 27147 27078 3 27078 27147 27148 3 27078 27148 27079 3 27079 27148 27149 3 27079 27149 27080 3 27080 27149 27150 3 27080 27150 27081 3 27081 27150 27151 3 27081 27151 27082 3 27082 27151 27152 3 27082 27152 27083 3 27083 27152 27153 3 27083 27153 27084 3 27084 27153 27154 3 27084 27154 27085 3 27085 27154 27155 3 27085 27155 27086 3 27086 27155 27156 3 27086 27156 27087 3 27087 27156 27157 3 27087 27157 27088 3 27088 27157 27158 3 27088 27158 27089 3 27089 27158 27159 3 27089 27159 27090 3 27090 27159 27160 3 27090 27160 27091 3 27091 27160 27161 3 27091 27161 27092 3 27092 27161 27162 3 27092 27162 27093 3 27093 27162 27163 3 27093 27163 27094 3 27094 27163 27164 3 27094 27164 27095 3 27095 27164 27165 3 27095 27165 27096 3 27096 27165 27166 3 27096 27166 27097 3 27097 27166 27167 3 27097 27167 27098 3 27098 27167 27168 3 27098 27168 27099 3 27099 27168 27169 3 27099 27169 27100 3 27100 27169 27170 3 27100 27170 27101 3 27101 27170 27171 3 27101 27171 27102 3 27102 27171 27172 3 27102 27172 27103 3 27103 27172 27173 3 27103 27173 27104 3 27104 27173 27174 3 27104 27174 27105 3 27105 27174 27175 3 27105 27175 27106 3 27107 27176 27177 3 27107 27177 27108 3 27108 27177 27178 3 27108 27178 27109 3 27109 27178 27179 3 27109 27179 27110 3 27110 27179 27180 3 27110 27180 27111 3 27111 27180 27181 3 27111 27181 27112 3 27112 27181 27182 3 27112 27182 27113 3 27113 27182 27183 3 27113 27183 27114 3 27114 27183 27184 3 27114 27184 27115 3 27115 27184 27185 3 27115 27185 27116 3 27116 27185 27186 3 27116 27186 27117 3 27117 27186 27187 3 27117 27187 27118 3 27118 27187 27188 3 27118 27188 27119 3 27119 27188 27189 3 27119 27189 27120 3 27120 27189 27190 3 27120 27190 27121 3 27121 27190 27191 3 27121 27191 27122 3 27122 27191 27123 3 27192 27123 27191 3 27123 27192 27124 3 27193 27124 27192 3 27124 27193 27125 3 27194 27125 27193 3 27125 27194 27126 3 27195 27126 27194 3 27126 27195 27127 3 27196 27127 27195 3 27127 27196 27128 3 27197 27128 27196 3 27128 27197 27129 3 27198 27129 27197 3 27129 27198 27130 3 27199 27130 27198 3 27130 27199 27131 3 27200 27131 27199 3 27131 27200 27132 3 27201 27132 27200 3 27132 27201 27133 3 27202 27133 27201 3 27133 27202 27134 3 27203 27134 27202 3 27134 27203 27135 3 27204 27135 27203 3 27135 27204 27136 3 27205 27136 27204 3 27136 27205 27137 3 27206 27137 27205 3 27137 27206 27138 3 27207 27138 27206 3 27138 27207 27139 3 27208 27139 27207 3 27139 27208 27140 3 27209 27140 27208 3 27140 27209 27141 3 27210 27141 27209 3 27141 27210 27142 3 27211 27142 27210 3 27142 27211 27143 3 27212 27143 27211 3 27143 27212 27144 3 27213 27144 27212 3 27144 27213 27145 3 27214 27145 27213 3 27145 27214 27146 3 27215 27146 27214 3 27146 27215 27147 3 27216 27147 27215 3 27147 27216 27148 3 27217 27148 27216 3 27148 27217 27149 3 27218 27149 27217 3 27149 27218 27150 3 27219 27150 27218 3 27150 27219 27151 3 27220 27151 27219 3 27151 27220 27152 3 27221 27152 27220 3 27152 27221 27153 3 27222 27153 27221 3 27153 27222 27154 3 27223 27154 27222 3 27154 27223 27155 3 27224 27155 27223 3 27155 27224 27156 3 27225 27156 27224 3 27156 27225 27157 3 27226 27157 27225 3 27157 27226 27158 3 27227 27158 27226 3 27158 27227 27159 3 27228 27159 27227 3 27159 27228 27160 3 27229 27160 27228 3 27160 27229 27161 3 27230 27161 27229 3 27161 27230 27162 3 27231 27162 27230 3 27162 27231 27163 3 27232 27163 27231 3 27163 27232 27164 3 27233 27164 27232 3 27164 27233 27165 3 27234 27165 27233 3 27165 27234 27166 3 27235 27166 27234 3 27166 27235 27167 3 27236 27167 27235 3 27167 27236 27168 3 27237 27168 27236 3 27168 27237 27169 3 27238 27169 27237 3 27169 27238 27170 3 27239 27170 27238 3 27170 27239 27171 3 27240 27171 27239 3 27171 27240 27172 3 27241 27172 27240 3 27172 27241 27173 3 27242 27173 27241 3 27173 27242 27174 3 27243 27174 27242 3 27174 27243 27175 3 27244 27175 27243 3 27176 27245 27177 3 27246 27177 27245 3 27177 27246 27178 3 27247 27178 27246 3 27178 27247 27179 3 27248 27179 27247 3 27179 27248 27180 3 27249 27180 27248 3 27180 27249 27181 3 27250 27181 27249 3 27181 27250 27182 3 27251 27182 27250 3 27182 27251 27183 3 27252 27183 27251 3 27183 27252 27184 3 27253 27184 27252 3 27184 27253 27185 3 27254 27185 27253 3 27185 27254 27186 3 27255 27186 27254 3 27186 27255 27187 3 27256 27187 27255 3 27187 27256 27188 3 27257 27188 27256 3 27188 27257 27189 3 27258 27189 27257 3 27189 27258 27190 3 27259 27190 27258 3 27190 27259 27191 3 27260 27191 27259 3 27191 27260 27192 3 27261 27192 27260 3 27192 27261 27193 3 27262 27193 27261 3 27193 27262 27194 3 27263 27194 27262 3 27194 27263 27195 3 27264 27195 27263 3 27195 27264 27196 3 27265 27196 27264 3 27196 27265 27197 3 27266 27197 27265 3 27197 27266 27198 3 27267 27198 27266 3 27198 27267 27199 3 27268 27199 27267 3 27199 27268 27200 3 27269 27200 27268 3 27200 27269 27201 3 27270 27201 27269 3 27201 27270 27202 3 27271 27202 27270 3 27202 27271 27203 3 27272 27203 27271 3 27203 27272 27204 3 27273 27204 27272 3 27204 27273 27205 3 27274 27205 27273 3 27205 27274 27206 3 27275 27206 27274 3 27206 27275 27207 3 27276 27207 27275 3 27207 27276 27208 3 27277 27208 27276 3 27208 27277 27209 3 27278 27209 27277 3 27209 27278 27210 3 27279 27210 27278 3 27210 27279 27211 3 27280 27211 27279 3 27211 27280 27212 3 27281 27212 27280 3 27212 27281 27213 3 27282 27213 27281 3 27213 27282 27214 3 27283 27214 27282 3 27214 27283 27215 3 27284 27215 27283 3 27215 27284 27216 3 27285 27216 27284 3 27216 27285 27217 3 27286 27217 27285 3 27217 27286 27218 3 27287 27218 27286 3 27218 27287 27219 3 27288 27219 27287 3 27219 27288 27220 3 27289 27220 27288 3 27220 27289 27221 3 27290 27221 27289 3 27221 27290 27222 3 27291 27222 27290 3 27222 27291 27223 3 27292 27223 27291 3 27223 27292 27224 3 27293 27224 27292 3 27224 27293 27294 3 27224 27294 27225 3 27225 27294 27295 3 27225 27295 27226 3 27226 27295 27296 3 27226 27296 27227 3 27227 27296 27297 3 27227 27297 27228 3 27228 27297 27298 3 27228 27298 27229 3 27229 27298 27299 3 27229 27299 27230 3 27230 27299 27300 3 27230 27300 27231 3 27231 27300 27301 3 27231 27301 27232 3 27232 27301 27302 3 27232 27302 27233 3 27233 27302 27303 3 27233 27303 27234 3 27234 27303 27304 3 27234 27304 27235 3 27235 27304 27305 3 27235 27305 27236 3 27236 27305 27306 3 27236 27306 27237 3 27237 27306 27307 3 27237 27307 27238 3 27238 27307 27308 3 27238 27308 27239 3 27239 27308 27309 3 27239 27309 27240 3 27240 27309 27310 3 27240 27310 27241 3 27241 27310 27311 3 27241 27311 27242 3 27242 27311 27312 3 27242 27312 27243 3 27243 27312 27313 3 27243 27313 27244 3 27245 27314 27315 3 27245 27315 27246 3 27246 27315 27316 3 27246 27316 27247 3 27247 27316 27317 3 27247 27317 27248 3 27248 27317 27318 3 27248 27318 27249 3 27249 27318 27319 3 27249 27319 27250 3 27250 27319 27320 3 27250 27320 27251 3 27251 27320 27321 3 27251 27321 27252 3 27252 27321 27322 3 27252 27322 27253 3 27253 27322 27323 3 27253 27323 27254 3 27254 27323 27324 3 27254 27324 27255 3 27255 27324 27325 3 27255 27325 27256 3 27256 27325 27326 3 27256 27326 27257 3 27257 27326 27327 3 27257 27327 27258 3 27258 27327 27328 3 27258 27328 27259 3 27259 27328 27329 3 27259 27329 27260 3 27260 27329 27330 3 27260 27330 27261 3 27261 27330 27331 3 27261 27331 27262 3 27262 27331 27332 3 27262 27332 27263 3 27263 27332 27333 3 27263 27333 27264 3 27264 27333 27334 3 27264 27334 27265 3 27265 27334 27335 3 27265 27335 27266 3 27266 27335 27336 3 27266 27336 27267 3 27267 27336 27337 3 27267 27337 27268 3 27268 27337 27338 3 27268 27338 27269 3 27269 27338 27339 3 27269 27339 27270 3 27270 27339 27340 3 27270 27340 27271 3 27271 27340 27341 3 27271 27341 27272 3 27272 27341 27342 3 27272 27342 27273 3 27273 27342 27343 3 27273 27343 27274 3 27274 27343 27344 3 27274 27344 27275 3 27275 27344 27345 3 27275 27345 27276 3 27276 27345 27346 3 27276 27346 27277 3 27277 27346 27347 3 27277 27347 27278 3 27278 27347 27348 3 27278 27348 27279 3 27279 27348 27349 3 27279 27349 27280 3 27280 27349 27350 3 27280 27350 27281 3 27281 27350 27351 3 27281 27351 27282 3 27282 27351 27352 3 27282 27352 27283 3 27283 27352 27353 3 27283 27353 27284 3 27284 27353 27354 3 27284 27354 27285 3 27285 27354 27355 3 27285 27355 27286 3 27286 27355 27356 3 27286 27356 27287 3 27287 27356 27357 3 27287 27357 27288 3 27288 27357 27358 3 27288 27358 27289 3 27289 27358 27359 3 27289 27359 27290 3 27290 27359 27360 3 27290 27360 27291 3 27291 27360 27361 3 27291 27361 27292 3 27292 27361 27362 3 27292 27362 27293 3 27293 27362 27363 3 27293 27363 27294 3 27294 27363 27364 3 27294 27364 27295 3 27295 27364 27365 3 27295 27365 27296 3 27296 27365 27366 3 27296 27366 27297 3 27297 27366 27367 3 27297 27367 27298 3 27298 27367 27368 3 27298 27368 27299 3 27299 27368 27369 3 27299 27369 27300 3 27300 27369 27370 3 27300 27370 27301 3 27301 27370 27371 3 27301 27371 27302 3 27302 27371 27372 3 27302 27372 27303 3 27303 27372 27373 3 27303 27373 27304 3 27304 27373 27374 3 27304 27374 27305 3 27305 27374 27375 3 27305 27375 27306 3 27306 27375 27376 3 27306 27376 27307 3 27307 27376 27377 3 27307 27377 27308 3 27308 27377 27378 3 27308 27378 27309 3 27309 27378 27379 3 27309 27379 27310 3 27310 27379 27380 3 27310 27380 27311 3 27311 27380 27381 3 27311 27381 27312 3 27312 27381 27382 3 27312 27382 27313 3 27314 27383 27384 3 27314 27384 27315 3 27315 27384 27385 3 27315 27385 27316 3 27316 27385 27386 3 27316 27386 27317 3 27317 27386 27387 3 27317 27387 27318 3 27318 27387 27388 3 27318 27388 27319 3 27319 27388 27389 3 27319 27389 27320 3 27320 27389 27390 3 27320 27390 27321 3 27321 27390 27391 3 27321 27391 27322 3 27322 27391 27392 3 27322 27392 27323 3 27323 27392 27393 3 27323 27393 27324 3 27324 27393 27394 3 27324 27394 27325 3 27325 27394 27395 3 27325 27395 27326 3 27326 27395 27327 3 27396 27327 27395 3 27327 27396 27328 3 27397 27328 27396 3 27328 27397 27329 3 27398 27329 27397 3 27329 27398 27330 3 27399 27330 27398 3 27330 27399 27331 3 27400 27331 27399 3 27331 27400 27332 3 27401 27332 27400 3 27332 27401 27333 3 27402 27333 27401 3 27333 27402 27334 3 27403 27334 27402 3 27334 27403 27335 3 27404 27335 27403 3 27335 27404 27336 3 27405 27336 27404 3 27336 27405 27337 3 27406 27337 27405 3 27337 27406 27338 3 27407 27338 27406 3 27338 27407 27339 3 27408 27339 27407 3 27339 27408 27340 3 27409 27340 27408 3 27340 27409 27341 3 27410 27341 27409 3 27341 27410 27342 3 27411 27342 27410 3 27342 27411 27343 3 27412 27343 27411 3 27343 27412 27344 3 27413 27344 27412 3 27344 27413 27345 3 27414 27345 27413 3 27345 27414 27346 3 27415 27346 27414 3 27346 27415 27347 3 27416 27347 27415 3 27347 27416 27348 3 27417 27348 27416 3 27348 27417 27349 3 27418 27349 27417 3 27349 27418 27350 3 27419 27350 27418 3 27350 27419 27351 3 27420 27351 27419 3 27351 27420 27352 3 27421 27352 27420 3 27352 27421 27353 3 27422 27353 27421 3 27353 27422 27354 3 27423 27354 27422 3 27354 27423 27355 3 27424 27355 27423 3 27355 27424 27356 3 27425 27356 27424 3 27356 27425 27357 3 27426 27357 27425 3 27357 27426 27358 3 27427 27358 27426 3 27358 27427 27359 3 27428 27359 27427 3 27359 27428 27360 3 27429 27360 27428 3 27360 27429 27361 3 27430 27361 27429 3 27361 27430 27362 3 27431 27362 27430 3 27362 27431 27363 3 27432 27363 27431 3 27363 27432 27364 3 27433 27364 27432 3 27364 27433 27365 3 27434 27365 27433 3 27365 27434 27366 3 27435 27366 27434 3 27366 27435 27367 3 27436 27367 27435 3 27367 27436 27368 3 27437 27368 27436 3 27368 27437 27369 3 27438 27369 27437 3 27369 27438 27370 3 27439 27370 27438 3 27370 27439 27371 3 27440 27371 27439 3 27371 27440 27372 3 27441 27372 27440 3 27372 27441 27373 3 27442 27373 27441 3 27373 27442 27374 3 27443 27374 27442 3 27374 27443 27375 3 27444 27375 27443 3 27375 27444 27376 3 27445 27376 27444 3 27376 27445 27377 3 27446 27377 27445 3 27377 27446 27378 3 27447 27378 27446 3 27378 27447 27379 3 27448 27379 27447 3 27379 27448 27380 3 27449 27380 27448 3 27380 27449 27381 3 27450 27381 27449 3 27381 27450 27382 3 27451 27382 27450 3 27383 27452 27384 3 27453 27384 27452 3 27384 27453 27385 3 27454 27385 27453 3 27385 27454 27386 3 27455 27386 27454 3 27386 27455 27387 3 27456 27387 27455 3 27387 27456 27388 3 27457 27388 27456 3 27388 27457 27389 3 27458 27389 27457 3 27389 27458 27390 3 27459 27390 27458 3 27390 27459 27391 3 27460 27391 27459 3 27391 27460 27392 3 27461 27392 27460 3 27392 27461 27393 3 27462 27393 27461 3 27393 27462 27394 3 27463 27394 27462 3 27394 27463 27395 3 27464 27395 27463 3 27395 27464 27396 3 27465 27396 27464 3 27396 27465 27397 3 27466 27397 27465 3 27397 27466 27398 3 27467 27398 27466 3 27398 27467 27399 3 27468 27399 27467 3 27399 27468 27400 3 27469 27400 27468 3 27400 27469 27401 3 27470 27401 27469 3 27401 27470 27402 3 27471 27402 27470 3 27402 27471 27403 3 27472 27403 27471 3 27403 27472 27404 3 27473 27404 27472 3 27404 27473 27405 3 27474 27405 27473 3 27405 27474 27406 3 27475 27406 27474 3 27406 27475 27407 3 27476 27407 27475 3 27407 27476 27408 3 27477 27408 27476 3 27408 27477 27409 3 27478 27409 27477 3 27409 27478 27410 3 27479 27410 27478 3 27410 27479 27411 3 27480 27411 27479 3 27411 27480 27412 3 27481 27412 27480 3 27412 27481 27413 3 27482 27413 27481 3 27413 27482 27414 3 27483 27414 27482 3 27414 27483 27415 3 27484 27415 27483 3 27415 27484 27416 3 27485 27416 27484 3 27416 27485 27417 3 27486 27417 27485 3 27417 27486 27418 3 27487 27418 27486 3 27418 27487 27419 3 27488 27419 27487 3 27419 27488 27420 3 27489 27420 27488 3 27420 27489 27421 3 27490 27421 27489 3 27421 27490 27422 3 27491 27422 27490 3 27422 27491 27423 3 27492 27423 27491 3 27423 27492 27424 3 27493 27424 27492 3 27424 27493 27425 3 27494 27425 27493 3 27425 27494 27426 3 27495 27426 27494 3 27426 27495 27427 3 27496 27427 27495 3 27427 27496 27428 3 27497 27428 27496 3 27428 27497 27498 3 27428 27498 27429 3 27429 27498 27499 3 27429 27499 27430 3 27430 27499 27500 3 27430 27500 27431 3 27431 27500 27501 3 27431 27501 27432 3 27432 27501 27502 3 27432 27502 27433 3 27433 27502 27503 3 27433 27503 27434 3 27434 27503 27504 3 27434 27504 27435 3 27435 27504 27505 3 27435 27505 27436 3 27436 27505 27506 3 27436 27506 27437 3 27437 27506 27507 3 27437 27507 27438 3 27438 27507 27508 3 27438 27508 27439 3 27439 27508 27509 3 27439 27509 27440 3 27440 27509 27510 3 27440 27510 27441 3 27441 27510 27511 3 27441 27511 27442 3 27442 27511 27512 3 27442 27512 27443 3 27443 27512 27513 3 27443 27513 27444 3 27444 27513 27514 3 27444 27514 27445 3 27445 27514 27515 3 27445 27515 27446 3 27446 27515 27516 3 27446 27516 27447 3 27447 27516 27517 3 27447 27517 27448 3 27448 27517 27518 3 27448 27518 27449 3 27449 27518 27519 3 27449 27519 27450 3 27450 27519 27520 3 27450 27520 27451 3 27452 27521 27522 3 27452 27522 27453 3 27453 27522 27523 3 27453 27523 27454 3 27454 27523 27524 3 27454 27524 27455 3 27455 27524 27525 3 27455 27525 27456 3 27456 27525 27526 3 27456 27526 27457 3 27457 27526 27527 3 27457 27527 27458 3 27458 27527 27528 3 27458 27528 27459 3 27459 27528 27529 3 27459 27529 27460 3 27460 27529 27530 3 27460 27530 27461 3 27461 27530 27531 3 27461 27531 27462 3 27462 27531 27532 3 27462 27532 27463 3 27463 27532 27533 3 27463 27533 27464 3 27464 27533 27534 3 27464 27534 27465 3 27465 27534 27535 3 27465 27535 27466 3 27466 27535 27536 3 27466 27536 27467 3 27467 27536 27537 3 27467 27537 27468 3 27468 27537 27538 3 27468 27538 27469 3 27469 27538 27539 3 27469 27539 27470 3 27470 27539 27540 3 27470 27540 27471 3 27471 27540 27541 3 27471 27541 27472 3 27472 27541 27542 3 27472 27542 27473 3 27473 27542 27543 3 27473 27543 27474 3 27474 27543 27544 3 27474 27544 27475 3 27475 27544 27545 3 27475 27545 27476 3 27476 27545 27546 3 27476 27546 27477 3 27477 27546 27547 3 27477 27547 27478 3 27478 27547 27548 3 27478 27548 27479 3 27479 27548 27549 3 27479 27549 27480 3 27480 27549 27550 3 27480 27550 27481 3 27481 27550 27551 3 27481 27551 27482 3 27482 27551 27552 3 27482 27552 27483 3 27483 27552 27553 3 27483 27553 27484 3 27484 27553 27554 3 27484 27554 27485 3 27485 27554 27555 3 27485 27555 27486 3 27486 27555 27556 3 27486 27556 27487 3 27487 27556 27557 3 27487 27557 27488 3 27488 27557 27558 3 27488 27558 27489 3 27489 27558 27559 3 27489 27559 27490 3 27490 27559 27560 3 27490 27560 27491 3 27491 27560 27561 3 27491 27561 27492 3 27492 27561 27562 3 27492 27562 27493 3 27493 27562 27563 3 27493 27563 27494 3 27494 27563 27564 3 27494 27564 27495 3 27495 27564 27565 3 27495 27565 27496 3 27496 27565 27566 3 27496 27566 27497 3 27497 27566 27567 3 27497 27567 27498 3 27498 27567 27568 3 27498 27568 27499 3 27499 27568 27569 3 27499 27569 27500 3 27500 27569 27570 3 27500 27570 27501 3 27501 27570 27571 3 27501 27571 27502 3 27502 27571 27572 3 27502 27572 27503 3 27503 27572 27573 3 27503 27573 27504 3 27504 27573 27574 3 27504 27574 27505 3 27505 27574 27575 3 27505 27575 27506 3 27506 27575 27576 3 27506 27576 27507 3 27507 27576 27577 3 27507 27577 27508 3 27508 27577 27578 3 27508 27578 27509 3 27509 27578 27579 3 27509 27579 27510 3 27510 27579 27580 3 27510 27580 27511 3 27511 27580 27581 3 27511 27581 27512 3 27512 27581 27582 3 27512 27582 27513 3 27513 27582 27583 3 27513 27583 27514 3 27514 27583 27584 3 27514 27584 27515 3 27515 27584 27585 3 27515 27585 27516 3 27516 27585 27586 3 27516 27586 27517 3 27517 27586 27587 3 27517 27587 27518 3 27518 27587 27588 3 27518 27588 27519 3 27519 27588 27589 3 27519 27589 27520 3 27521 27590 27591 3 27521 27591 27522 3 27522 27591 27592 3 27522 27592 27523 3 27523 27592 27593 3 27523 27593 27524 3 27524 27593 27594 3 27524 27594 27525 3 27525 27594 27595 3 27525 27595 27526 3 27526 27595 27596 3 27526 27596 27527 3 27527 27596 27597 3 27527 27597 27528 3 27528 27597 27598 3 27528 27598 27529 3 27529 27598 27599 3 27529 27599 27530 3 27530 27599 27531 3 27600 27531 27599 3 27531 27600 27532 3 27601 27532 27600 3 27532 27601 27533 3 27602 27533 27601 3 27533 27602 27534 3 27603 27534 27602 3 27534 27603 27535 3 27604 27535 27603 3 27535 27604 27536 3 27605 27536 27604 3 27536 27605 27537 3 27606 27537 27605 3 27537 27606 27538 3 27607 27538 27606 3 27538 27607 27539 3 27608 27539 27607 3 27539 27608 27540 3 27609 27540 27608 3 27540 27609 27541 3 27610 27541 27609 3 27541 27610 27542 3 27611 27542 27610 3 27542 27611 27543 3 27612 27543 27611 3 27543 27612 27544 3 27613 27544 27612 3 27544 27613 27545 3 27614 27545 27613 3 27545 27614 27546 3 27615 27546 27614 3 27546 27615 27547 3 27616 27547 27615 3 27547 27616 27548 3 27617 27548 27616 3 27548 27617 27549 3 27618 27549 27617 3 27549 27618 27550 3 27619 27550 27618 3 27550 27619 27551 3 27620 27551 27619 3 27551 27620 27552 3 27621 27552 27620 3 27552 27621 27553 3 27622 27553 27621 3 27553 27622 27554 3 27623 27554 27622 3 27554 27623 27555 3 27624 27555 27623 3 27555 27624 27556 3 27625 27556 27624 3 27556 27625 27557 3 27626 27557 27625 3 27557 27626 27558 3 27627 27558 27626 3 27558 27627 27559 3 27628 27559 27627 3 27559 27628 27560 3 27629 27560 27628 3 27560 27629 27561 3 27630 27561 27629 3 27561 27630 27562 3 27631 27562 27630 3 27562 27631 27563 3 27632 27563 27631 3 27563 27632 27564 3 27633 27564 27632 3 27564 27633 27565 3 27634 27565 27633 3 27565 27634 27566 3 27635 27566 27634 3 27566 27635 27567 3 27636 27567 27635 3 27567 27636 27568 3 27637 27568 27636 3 27568 27637 27569 3 27638 27569 27637 3 27569 27638 27570 3 27639 27570 27638 3 27570 27639 27571 3 27640 27571 27639 3 27571 27640 27572 3 27641 27572 27640 3 27572 27641 27573 3 27642 27573 27641 3 27573 27642 27574 3 27643 27574 27642 3 27574 27643 27575 3 27644 27575 27643 3 27575 27644 27576 3 27645 27576 27644 3 27576 27645 27577 3 27646 27577 27645 3 27577 27646 27578 3 27647 27578 27646 3 27578 27647 27579 3 27648 27579 27647 3 27579 27648 27580 3 27649 27580 27648 3 27580 27649 27581 3 27650 27581 27649 3 27581 27650 27582 3 27651 27582 27650 3 27582 27651 27583 3 27652 27583 27651 3 27583 27652 27584 3 27653 27584 27652 3 27584 27653 27585 3 27654 27585 27653 3 27585 27654 27586 3 27655 27586 27654 3 27586 27655 27587 3 27656 27587 27655 3 27587 27656 27588 3 27657 27588 27656 3 27588 27657 27589 3 27658 27589 27657 3 27590 27659 27591 3 27660 27591 27659 3 27591 27660 27592 3 27661 27592 27660 3 27592 27661 27593 3 27662 27593 27661 3 27593 27662 27594 3 27663 27594 27662 3 27594 27663 27595 3 27664 27595 27663 3 27595 27664 27596 3 27665 27596 27664 3 27596 27665 27597 3 27666 27597 27665 3 27597 27666 27598 3 27667 27598 27666 3 27598 27667 27599 3 27668 27599 27667 3 27599 27668 27600 3 27669 27600 27668 3 27600 27669 27601 3 27670 27601 27669 3 27601 27670 27602 3 27671 27602 27670 3 27602 27671 27603 3 27672 27603 27671 3 27603 27672 27604 3 27673 27604 27672 3 27604 27673 27605 3 27674 27605 27673 3 27605 27674 27606 3 27675 27606 27674 3 27606 27675 27607 3 27676 27607 27675 3 27607 27676 27608 3 27677 27608 27676 3 27608 27677 27609 3 27678 27609 27677 3 27609 27678 27610 3 27679 27610 27678 3 27610 27679 27611 3 27680 27611 27679 3 27611 27680 27612 3 27681 27612 27680 3 27612 27681 27613 3 27682 27613 27681 3 27613 27682 27614 3 27683 27614 27682 3 27614 27683 27615 3 27684 27615 27683 3 27615 27684 27616 3 27685 27616 27684 3 27616 27685 27617 3 27686 27617 27685 3 27617 27686 27618 3 27687 27618 27686 3 27618 27687 27619 3 27688 27619 27687 3 27619 27688 27620 3 27689 27620 27688 3 27620 27689 27621 3 27690 27621 27689 3 27621 27690 27622 3 27691 27622 27690 3 27622 27691 27623 3 27692 27623 27691 3 27623 27692 27624 3 27693 27624 27692 3 27624 27693 27625 3 27694 27625 27693 3 27625 27694 27626 3 27695 27626 27694 3 27626 27695 27627 3 27696 27627 27695 3 27627 27696 27628 3 27697 27628 27696 3 27628 27697 27629 3 27698 27629 27697 3 27629 27698 27630 3 27699 27630 27698 3 27630 27699 27631 3 27700 27631 27699 3 27631 27700 27632 3 27701 27632 27700 3 27632 27701 27702 3 27632 27702 27633 3 27633 27702 27703 3 27633 27703 27634 3 27634 27703 27704 3 27634 27704 27635 3 27635 27704 27705 3 27635 27705 27636 3 27636 27705 27706 3 27636 27706 27637 3 27637 27706 27707 3 27637 27707 27638 3 27638 27707 27708 3 27638 27708 27639 3 27639 27708 27709 3 27639 27709 27640 3 27640 27709 27710 3 27640 27710 27641 3 27641 27710 27711 3 27641 27711 27642 3 27642 27711 27712 3 27642 27712 27643 3 27643 27712 27713 3 27643 27713 27644 3 27644 27713 27714 3 27644 27714 27645 3 27645 27714 27715 3 27645 27715 27646 3 27646 27715 27716 3 27646 27716 27647 3 27647 27716 27717 3 27647 27717 27648 3 27648 27717 27718 3 27648 27718 27649 3 27649 27718 27719 3 27649 27719 27650 3 27650 27719 27720 3 27650 27720 27651 3 27651 27720 27721 3 27651 27721 27652 3 27652 27721 27722 3 27652 27722 27653 3 27653 27722 27723 3 27653 27723 27654 3 27654 27723 27724 3 27654 27724 27655 3 27655 27724 27725 3 27655 27725 27656 3 27656 27725 27726 3 27656 27726 27657 3 27657 27726 27727 3 27657 27727 27658 3 27659 27728 27729 3 27659 27729 27660 3 27660 27729 27730 3 27660 27730 27661 3 27661 27730 27731 3 27661 27731 27662 3 27662 27731 27732 3 27662 27732 27663 3 27663 27732 27733 3 27663 27733 27664 3 27664 27733 27734 3 27664 27734 27665 3 27665 27734 27735 3 27665 27735 27666 3 27666 27735 27736 3 27666 27736 27667 3 27667 27736 27737 3 27667 27737 27668 3 27668 27737 27738 3 27668 27738 27669 3 27669 27738 27739 3 27669 27739 27670 3 27670 27739 27740 3 27670 27740 27671 3 27671 27740 27741 3 27671 27741 27672 3 27672 27741 27742 3 27672 27742 27673 3 27673 27742 27743 3 27673 27743 27674 3 27674 27743 27744 3 27674 27744 27675 3 27675 27744 27745 3 27675 27745 27676 3 27676 27745 27746 3 27676 27746 27677 3 27677 27746 27747 3 27677 27747 27678 3 27678 27747 27748 3 27678 27748 27679 3 27679 27748 27749 3 27679 27749 27680 3 27680 27749 27750 3 27680 27750 27681 3 27681 27750 27751 3 27681 27751 27682 3 27682 27751 27752 3 27682 27752 27683 3 27683 27752 27753 3 27683 27753 27684 3 27684 27753 27754 3 27684 27754 27685 3 27685 27754 27755 3 27685 27755 27686 3 27686 27755 27756 3 27686 27756 27687 3 27687 27756 27757 3 27687 27757 27688 3 27688 27757 27758 3 27688 27758 27689 3 27689 27758 27759 3 27689 27759 27690 3 27690 27759 27760 3 27690 27760 27691 3 27691 27760 27761 3 27691 27761 27692 3 27692 27761 27762 3 27692 27762 27693 3 27693 27762 27763 3 27693 27763 27694 3 27694 27763 27764 3 27694 27764 27695 3 27695 27764 27765 3 27695 27765 27696 3 27696 27765 27766 3 27696 27766 27697 3 27697 27766 27767 3 27697 27767 27698 3 27698 27767 27768 3 27698 27768 27699 3 27699 27768 27769 3 27699 27769 27700 3 27700 27769 27770 3 27700 27770 27701 3 27701 27770 27771 3 27701 27771 27702 3 27702 27771 27772 3 27702 27772 27703 3 27703 27772 27773 3 27703 27773 27704 3 27704 27773 27774 3 27704 27774 27705 3 27705 27774 27775 3 27705 27775 27706 3 27706 27775 27776 3 27706 27776 27707 3 27707 27776 27777 3 27707 27777 27708 3 27708 27777 27778 3 27708 27778 27709 3 27709 27778 27779 3 27709 27779 27710 3 27710 27779 27780 3 27710 27780 27711 3 27711 27780 27781 3 27711 27781 27712 3 27712 27781 27782 3 27712 27782 27713 3 27713 27782 27783 3 27713 27783 27714 3 27714 27783 27784 3 27714 27784 27715 3 27715 27784 27785 3 27715 27785 27716 3 27716 27785 27786 3 27716 27786 27717 3 27717 27786 27787 3 27717 27787 27718 3 27718 27787 27788 3 27718 27788 27719 3 27719 27788 27789 3 27719 27789 27720 3 27720 27789 27790 3 27720 27790 27721 3 27721 27790 27791 3 27721 27791 27722 3 27722 27791 27792 3 27722 27792 27723 3 27723 27792 27793 3 27723 27793 27724 3 27724 27793 27794 3 27724 27794 27725 3 27725 27794 27795 3 27725 27795 27726 3 27726 27795 27796 3 27726 27796 27727 3 27728 27797 27798 3 27728 27798 27729 3 27729 27798 27799 3 27729 27799 27730 3 27730 27799 27800 3 27730 27800 27731 3 27731 27800 27801 3 27731 27801 27732 3 27732 27801 27802 3 27732 27802 27733 3 27733 27802 27803 3 27733 27803 27734 3 27734 27803 27804 3 27734 27804 27735 3 27735 27804 27736 3 27805 27736 27804 3 27736 27805 27737 3 27806 27737 27805 3 27737 27806 27738 3 27807 27738 27806 3 27738 27807 27739 3 27808 27739 27807 3 27739 27808 27740 3 27809 27740 27808 3 27740 27809 27741 3 27810 27741 27809 3 27741 27810 27742 3 27811 27742 27810 3 27742 27811 27743 3 27812 27743 27811 3 27743 27812 27744 3 27813 27744 27812 3 27744 27813 27745 3 27814 27745 27813 3 27745 27814 27746 3 27815 27746 27814 3 27746 27815 27747 3 27816 27747 27815 3 27747 27816 27748 3 27817 27748 27816 3 27748 27817 27749 3 27818 27749 27817 3 27749 27818 27750 3 27819 27750 27818 3 27750 27819 27751 3 27820 27751 27819 3 27751 27820 27752 3 27821 27752 27820 3 27752 27821 27753 3 27822 27753 27821 3 27753 27822 27754 3 27823 27754 27822 3 27754 27823 27755 3 27824 27755 27823 3 27755 27824 27756 3 27825 27756 27824 3 27756 27825 27757 3 27826 27757 27825 3 27757 27826 27758 3 27827 27758 27826 3 27758 27827 27759 3 27828 27759 27827 3 27759 27828 27760 3 27829 27760 27828 3 27760 27829 27761 3 27830 27761 27829 3 27761 27830 27762 3 27831 27762 27830 3 27762 27831 27763 3 27832 27763 27831 3 27763 27832 27764 3 27833 27764 27832 3 27764 27833 27765 3 27834 27765 27833 3 27765 27834 27766 3 27835 27766 27834 3 27766 27835 27767 3 27836 27767 27835 3 27767 27836 27768 3 27837 27768 27836 3 27768 27837 27769 3 27838 27769 27837 3 27769 27838 27770 3 27839 27770 27838 3 27770 27839 27771 3 27840 27771 27839 3 27771 27840 27772 3 27841 27772 27840 3 27772 27841 27773 3 27842 27773 27841 3 27773 27842 27774 3 27843 27774 27842 3 27774 27843 27775 3 27844 27775 27843 3 27775 27844 27776 3 27845 27776 27844 3 27776 27845 27777 3 27846 27777 27845 3 27777 27846 27778 3 27847 27778 27846 3 27778 27847 27779 3 27848 27779 27847 3 27779 27848 27780 3 27849 27780 27848 3 27780 27849 27781 3 27850 27781 27849 3 27781 27850 27782 3 27851 27782 27850 3 27782 27851 27783 3 27852 27783 27851 3 27783 27852 27784 3 27853 27784 27852 3 27784 27853 27785 3 27854 27785 27853 3 27785 27854 27786 3 27855 27786 27854 3 27786 27855 27787 3 27856 27787 27855 3 27787 27856 27788 3 27857 27788 27856 3 27788 27857 27789 3 27858 27789 27857 3 27789 27858 27790 3 27859 27790 27858 3 27790 27859 27791 3 27860 27791 27859 3 27791 27860 27792 3 27861 27792 27860 3 27792 27861 27793 3 27862 27793 27861 3 27793 27862 27794 3 27863 27794 27862 3 27794 27863 27795 3 27864 27795 27863 3 27795 27864 27796 3 27865 27796 27864 3 27797 27866 27798 3 27867 27798 27866 3 27798 27867 27799 3 27868 27799 27867 3 27799 27868 27800 3 27869 27800 27868 3 27800 27869 27801 3 27870 27801 27869 3 27801 27870 27802 3 27871 27802 27870 3 27802 27871 27803 3 27872 27803 27871 3 27803 27872 27804 3 27873 27804 27872 3 27804 27873 27805 3 27874 27805 27873 3 27805 27874 27806 3 27875 27806 27874 3 27806 27875 27807 3 27876 27807 27875 3 27807 27876 27808 3 27877 27808 27876 3 27808 27877 27809 3 27878 27809 27877 3 27809 27878 27810 3 27879 27810 27878 3 27810 27879 27811 3 27880 27811 27879 3 27811 27880 27812 3 27881 27812 27880 3 27812 27881 27813 3 27882 27813 27881 3 27813 27882 27814 3 27883 27814 27882 3 27814 27883 27815 3 27884 27815 27883 3 27815 27884 27816 3 27885 27816 27884 3 27816 27885 27817 3 27886 27817 27885 3 27817 27886 27818 3 27887 27818 27886 3 27818 27887 27819 3 27888 27819 27887 3 27819 27888 27820 3 27889 27820 27888 3 27820 27889 27821 3 27890 27821 27889 3 27821 27890 27822 3 27891 27822 27890 3 27822 27891 27823 3 27892 27823 27891 3 27823 27892 27824 3 27893 27824 27892 3 27824 27893 27825 3 27894 27825 27893 3 27825 27894 27826 3 27895 27826 27894 3 27826 27895 27827 3 27896 27827 27895 3 27827 27896 27828 3 27897 27828 27896 3 27828 27897 27829 3 27898 27829 27897 3 27829 27898 27830 3 27899 27830 27898 3 27830 27899 27831 3 27900 27831 27899 3 27831 27900 27832 3 27901 27832 27900 3 27832 27901 27833 3 27902 27833 27901 3 27833 27902 27834 3 27903 27834 27902 3 27834 27903 27835 3 27904 27835 27903 3 27835 27904 27836 3 27905 27836 27904 3 27836 27905 27837 3 27906 27837 27905 3 27837 27906 27907 3 27837 27907 27838 3 27838 27907 27908 3 27838 27908 27839 3 27839 27908 27909 3 27839 27909 27840 3 27840 27909 27910 3 27840 27910 27841 3 27841 27910 27911 3 27841 27911 27842 3 27842 27911 27912 3 27842 27912 27843 3 27843 27912 27913 3 27843 27913 27844 3 27844 27913 27914 3 27844 27914 27845 3 27845 27914 27915 3 27845 27915 27846 3 27846 27915 27916 3 27846 27916 27847 3 27847 27916 27917 3 27847 27917 27848 3 27848 27917 27918 3 27848 27918 27849 3 27849 27918 27919 3 27849 27919 27850 3 27850 27919 27920 3 27850 27920 27851 3 27851 27920 27921 3 27851 27921 27852 3 27852 27921 27922 3 27852 27922 27853 3 27853 27922 27923 3 27853 27923 27854 3 27854 27923 27924 3 27854 27924 27855 3 27855 27924 27925 3 27855 27925 27856 3 27856 27925 27926 3 27856 27926 27857 3 27857 27926 27927 3 27857 27927 27858 3 27858 27927 27928 3 27858 27928 27859 3 27859 27928 27929 3 27859 27929 27860 3 27860 27929 27930 3 27860 27930 27861 3 27861 27930 27931 3 27861 27931 27862 3 27862 27931 27932 3 27862 27932 27863 3 27863 27932 27933 3 27863 27933 27864 3 27864 27933 27934 3 27864 27934 27865 3 27866 27935 27936 3 27866 27936 27867 3 27867 27936 27937 3 27867 27937 27868 3 27868 27937 27938 3 27868 27938 27869 3 27869 27938 27939 3 27869 27939 27870 3 27870 27939 27940 3 27870 27940 27871 3 27871 27940 27941 3 27871 27941 27872 3 27872 27941 27942 3 27872 27942 27873 3 27873 27942 27943 3 27873 27943 27874 3 27874 27943 27944 3 27874 27944 27875 3 27875 27944 27945 3 27875 27945 27876 3 27876 27945 27946 3 27876 27946 27877 3 27877 27946 27947 3 27877 27947 27878 3 27878 27947 27948 3 27878 27948 27879 3 27879 27948 27949 3 27879 27949 27880 3 27880 27949 27950 3 27880 27950 27881 3 27881 27950 27951 3 27881 27951 27882 3 27882 27951 27952 3 27882 27952 27883 3 27883 27952 27953 3 27883 27953 27884 3 27884 27953 27954 3 27884 27954 27885 3 27885 27954 27955 3 27885 27955 27886 3 27886 27955 27956 3 27886 27956 27887 3 27887 27956 27957 3 27887 27957 27888 3 27888 27957 27958 3 27888 27958 27889 3 27889 27958 27959 3 27889 27959 27890 3 27890 27959 27960 3 27890 27960 27891 3 27891 27960 27961 3 27891 27961 27892 3 27892 27961 27962 3 27892 27962 27893 3 27893 27962 27963 3 27893 27963 27894 3 27894 27963 27964 3 27894 27964 27895 3 27895 27964 27965 3 27895 27965 27896 3 27896 27965 27966 3 27896 27966 27897 3 27897 27966 27967 3 27897 27967 27898 3 27898 27967 27968 3 27898 27968 27899 3 27899 27968 27969 3 27899 27969 27900 3 27900 27969 27970 3 27900 27970 27901 3 27901 27970 27971 3 27901 27971 27902 3 27902 27971 27972 3 27902 27972 27903 3 27903 27972 27973 3 27903 27973 27904 3 27904 27973 27974 3 27904 27974 27905 3 27905 27974 27975 3 27905 27975 27906 3 27906 27975 27976 3 27906 27976 27907 3 27907 27976 27977 3 27907 27977 27908 3 27908 27977 27978 3 27908 27978 27909 3 27909 27978 27979 3 27909 27979 27910 3 27910 27979 27980 3 27910 27980 27911 3 27911 27980 27981 3 27911 27981 27912 3 27912 27981 27982 3 27912 27982 27913 3 27913 27982 27983 3 27913 27983 27914 3 27914 27983 27984 3 27914 27984 27915 3 27915 27984 27985 3 27915 27985 27916 3 27916 27985 27986 3 27916 27986 27917 3 27917 27986 27987 3 27917 27987 27918 3 27918 27987 27988 3 27918 27988 27919 3 27919 27988 27989 3 27919 27989 27920 3 27920 27989 27990 3 27920 27990 27921 3 27921 27990 27991 3 27921 27991 27922 3 27922 27991 27992 3 27922 27992 27923 3 27923 27992 27993 3 27923 27993 27924 3 27924 27993 27994 3 27924 27994 27925 3 27925 27994 27995 3 27925 27995 27926 3 27926 27995 27996 3 27926 27996 27927 3 27927 27996 27997 3 27927 27997 27928 3 27928 27997 27998 3 27928 27998 27929 3 27929 27998 27999 3 27929 27999 27930 3 27930 27999 28000 3 27930 28000 27931 3 27931 28000 28001 3 27931 28001 27932 3 27932 28001 28002 3 27932 28002 27933 3 27933 28002 28003 3 27933 28003 27934 3 27935 28004 28005 3 27935 28005 27936 3 27936 28005 28006 3 27936 28006 27937 3 27937 28006 28007 3 27937 28007 27938 3 27938 28007 28008 3 27938 28008 27939 3 27939 28008 28009 3 27939 28009 27940 3 27940 28009 27941 3 28010 27941 28009 3 27941 28010 27942 3 28011 27942 28010 3 27942 28011 27943 3 28012 27943 28011 3 27943 28012 27944 3 28013 27944 28012 3 27944 28013 27945 3 28014 27945 28013 3 27945 28014 27946 3 28015 27946 28014 3 27946 28015 27947 3 28016 27947 28015 3 27947 28016 27948 3 28017 27948 28016 3 27948 28017 27949 3 28018 27949 28017 3 27949 28018 27950 3 28019 27950 28018 3 27950 28019 27951 3 28020 27951 28019 3 27951 28020 27952 3 28021 27952 28020 3 27952 28021 27953 3 28022 27953 28021 3 27953 28022 27954 3 28023 27954 28022 3 27954 28023 27955 3 28024 27955 28023 3 27955 28024 27956 3 28025 27956 28024 3 27956 28025 27957 3 28026 27957 28025 3 27957 28026 27958 3 28027 27958 28026 3 27958 28027 27959 3 28028 27959 28027 3 27959 28028 27960 3 28029 27960 28028 3 27960 28029 27961 3 28030 27961 28029 3 27961 28030 27962 3 28031 27962 28030 3 27962 28031 27963 3 28032 27963 28031 3 27963 28032 27964 3 28033 27964 28032 3 27964 28033 27965 3 28034 27965 28033 3 27965 28034 27966 3 28035 27966 28034 3 27966 28035 27967 3 28036 27967 28035 3 27967 28036 27968 3 28037 27968 28036 3 27968 28037 27969 3 28038 27969 28037 3 27969 28038 27970 3 28039 27970 28038 3 27970 28039 27971 3 28040 27971 28039 3 27971 28040 27972 3 28041 27972 28040 3 27972 28041 27973 3 28042 27973 28041 3 27973 28042 27974 3 28043 27974 28042 3 27974 28043 27975 3 28044 27975 28043 3 27975 28044 27976 3 28045 27976 28044 3 27976 28045 27977 3 28046 27977 28045 3 27977 28046 27978 3 28047 27978 28046 3 27978 28047 27979 3 28048 27979 28047 3 27979 28048 27980 3 28049 27980 28048 3 27980 28049 27981 3 28050 27981 28049 3 27981 28050 27982 3 28051 27982 28050 3 27982 28051 27983 3 28052 27983 28051 3 27983 28052 27984 3 28053 27984 28052 3 27984 28053 27985 3 28054 27985 28053 3 27985 28054 27986 3 28055 27986 28054 3 27986 28055 27987 3 28056 27987 28055 3 27987 28056 27988 3 28057 27988 28056 3 27988 28057 27989 3 28058 27989 28057 3 27989 28058 27990 3 28059 27990 28058 3 27990 28059 27991 3 28060 27991 28059 3 27991 28060 27992 3 28061 27992 28060 3 27992 28061 27993 3 28062 27993 28061 3 27993 28062 27994 3 28063 27994 28062 3 27994 28063 27995 3 28064 27995 28063 3 27995 28064 27996 3 28065 27996 28064 3 27996 28065 27997 3 28066 27997 28065 3 27997 28066 27998 3 28067 27998 28066 3 27998 28067 27999 3 28068 27999 28067 3 27999 28068 28000 3 28069 28000 28068 3 28000 28069 28001 3 28070 28001 28069 3 28001 28070 28002 3 28071 28002 28070 3 28002 28071 28003 3 28072 28003 28071 3 28004 28073 28005 3 28074 28005 28073 3 28005 28074 28006 3 28075 28006 28074 3 28006 28075 28007 3 28076 28007 28075 3 28007 28076 28008 3 28077 28008 28076 3 28008 28077 28009 3 28078 28009 28077 3 28009 28078 28010 3 28079 28010 28078 3 28010 28079 28011 3 28080 28011 28079 3 28011 28080 28012 3 28081 28012 28080 3 28012 28081 28013 3 28082 28013 28081 3 28013 28082 28014 3 28083 28014 28082 3 28014 28083 28015 3 28084 28015 28083 3 28015 28084 28016 3 28085 28016 28084 3 28016 28085 28017 3 28086 28017 28085 3 28017 28086 28018 3 28087 28018 28086 3 28018 28087 28019 3 28088 28019 28087 3 28019 28088 28020 3 28089 28020 28088 3 28020 28089 28021 3 28090 28021 28089 3 28021 28090 28022 3 28091 28022 28090 3 28022 28091 28023 3 28092 28023 28091 3 28023 28092 28024 3 28093 28024 28092 3 28024 28093 28025 3 28094 28025 28093 3 28025 28094 28026 3 28095 28026 28094 3 28026 28095 28027 3 28096 28027 28095 3 28027 28096 28028 3 28097 28028 28096 3 28028 28097 28029 3 28098 28029 28097 3 28029 28098 28030 3 28099 28030 28098 3 28030 28099 28031 3 28100 28031 28099 3 28031 28100 28032 3 28101 28032 28100 3 28032 28101 28033 3 28102 28033 28101 3 28033 28102 28034 3 28103 28034 28102 3 28034 28103 28035 3 28104 28035 28103 3 28035 28104 28036 3 28105 28036 28104 3 28036 28105 28037 3 28106 28037 28105 3 28037 28106 28038 3 28107 28038 28106 3 28038 28107 28039 3 28108 28039 28107 3 28039 28108 28040 3 28109 28040 28108 3 28040 28109 28041 3 28110 28041 28109 3 28041 28110 28042 3 28111 28042 28110 3 28042 28111 28043 3 28112 28043 28111 3 28043 28112 28113 3 28043 28113 28044 3 28044 28113 28114 3 28044 28114 28045 3 28045 28114 28115 3 28045 28115 28046 3 28046 28115 28116 3 28046 28116 28047 3 28047 28116 28117 3 28047 28117 28048 3 28048 28117 28118 3 28048 28118 28049 3 28049 28118 28119 3 28049 28119 28050 3 28050 28119 28120 3 28050 28120 28051 3 28051 28120 28121 3 28051 28121 28052 3 28052 28121 28122 3 28052 28122 28053 3 28053 28122 28123 3 28053 28123 28054 3 28054 28123 28124 3 28054 28124 28055 3 28055 28124 28125 3 28055 28125 28056 3 28056 28125 28126 3 28056 28126 28057 3 28057 28126 28127 3 28057 28127 28058 3 28058 28127 28128 3 28058 28128 28059 3 28059 28128 28129 3 28059 28129 28060 3 28060 28129 28130 3 28060 28130 28061 3 28061 28130 28131 3 28061 28131 28062 3 28062 28131 28132 3 28062 28132 28063 3 28063 28132 28133 3 28063 28133 28064 3 28064 28133 28134 3 28064 28134 28065 3 28065 28134 28135 3 28065 28135 28066 3 28066 28135 28136 3 28066 28136 28067 3 28067 28136 28137 3 28067 28137 28068 3 28068 28137 28138 3 28068 28138 28069 3 28069 28138 28139 3 28069 28139 28070 3 28070 28139 28140 3 28070 28140 28071 3 28071 28140 28141 3 28071 28141 28072 3 28073 28142 28143 3 28073 28143 28074 3 28074 28143 28144 3 28074 28144 28075 3 28075 28144 28145 3 28075 28145 28076 3 28076 28145 28146 3 28076 28146 28077 3 28077 28146 28147 3 28077 28147 28078 3 28078 28147 28148 3 28078 28148 28079 3 28079 28148 28149 3 28079 28149 28080 3 28080 28149 28150 3 28080 28150 28081 3 28081 28150 28151 3 28081 28151 28082 3 28082 28151 28152 3 28082 28152 28083 3 28083 28152 28153 3 28083 28153 28084 3 28084 28153 28154 3 28084 28154 28085 3 28085 28154 28155 3 28085 28155 28086 3 28086 28155 28156 3 28086 28156 28087 3 28087 28156 28157 3 28087 28157 28088 3 28088 28157 28158 3 28088 28158 28089 3 28089 28158 28159 3 28089 28159 28090 3 28090 28159 28160 3 28090 28160 28091 3 28091 28160 28161 3 28091 28161 28092 3 28092 28161 28162 3 28092 28162 28093 3 28093 28162 28163 3 28093 28163 28094 3 28094 28163 28164 3 28094 28164 28095 3 28095 28164 28165 3 28095 28165 28096 3 28096 28165 28166 3 28096 28166 28097 3 28097 28166 28167 3 28097 28167 28098 3 28098 28167 28168 3 28098 28168 28099 3 28099 28168 28169 3 28099 28169 28100 3 28100 28169 28170 3 28100 28170 28101 3 28101 28170 28171 3 28101 28171 28102 3 28102 28171 28172 3 28102 28172 28103 3 28103 28172 28173 3 28103 28173 28104 3 28104 28173 28174 3 28104 28174 28105 3 28105 28174 28175 3 28105 28175 28106 3 28106 28175 28176 3 28106 28176 28107 3 28107 28176 28177 3 28107 28177 28108 3 28108 28177 28178 3 28108 28178 28109 3 28109 28178 28179 3 28109 28179 28110 3 28110 28179 28180 3 28110 28180 28111 3 28111 28180 28181 3 28111 28181 28112 3 28112 28181 28182 3 28112 28182 28113 3 28113 28182 28183 3 28113 28183 28114 3 28114 28183 28184 3 28114 28184 28115 3 28115 28184 28185 3 28115 28185 28116 3 28116 28185 28186 3 28116 28186 28117 3 28117 28186 28187 3 28117 28187 28118 3 28118 28187 28188 3 28118 28188 28119 3 28119 28188 28189 3 28119 28189 28120 3 28120 28189 28190 3 28120 28190 28121 3 28121 28190 28191 3 28121 28191 28122 3 28122 28191 28192 3 28122 28192 28123 3 28123 28192 28193 3 28123 28193 28124 3 28124 28193 28194 3 28124 28194 28125 3 28125 28194 28195 3 28125 28195 28126 3 28126 28195 28196 3 28126 28196 28127 3 28127 28196 28197 3 28127 28197 28128 3 28128 28197 28198 3 28128 28198 28129 3 28129 28198 28199 3 28129 28199 28130 3 28130 28199 28200 3 28130 28200 28131 3 28131 28200 28201 3 28131 28201 28132 3 28132 28201 28202 3 28132 28202 28133 3 28133 28202 28203 3 28133 28203 28134 3 28134 28203 28204 3 28134 28204 28135 3 28135 28204 28205 3 28135 28205 28136 3 28136 28205 28206 3 28136 28206 28137 3 28137 28206 28207 3 28137 28207 28138 3 28138 28207 28208 3 28138 28208 28139 3 28139 28208 28209 3 28139 28209 28140 3 28140 28209 28210 3 28140 28210 28141 3 28142 28211 28212 3 28142 28212 28143 3 28143 28212 28213 3 28143 28213 28144 3 28144 28213 28214 3 28144 28214 28145 3 28145 28214 28215 3 28145 28215 28146 3 28146 28215 28147 3 28216 28147 28215 3 28147 28216 28148 3 28217 28148 28216 3 28148 28217 28149 3 28218 28149 28217 3 28149 28218 28150 3 28219 28150 28218 3 28150 28219 28151 3 28220 28151 28219 3 28151 28220 28152 3 28221 28152 28220 3 28152 28221 28153 3 28222 28153 28221 3 28153 28222 28154 3 28223 28154 28222 3 28154 28223 28155 3 28224 28155 28223 3 28155 28224 28156 3 28225 28156 28224 3 28156 28225 28157 3 28226 28157 28225 3 28157 28226 28158 3 28227 28158 28226 3 28158 28227 28159 3 28228 28159 28227 3 28159 28228 28160 3 28229 28160 28228 3 28160 28229 28161 3 28230 28161 28229 3 28161 28230 28162 3 28231 28162 28230 3 28162 28231 28163 3 28232 28163 28231 3 28163 28232 28164 3 28233 28164 28232 3 28164 28233 28165 3 28234 28165 28233 3 28165 28234 28166 3 28235 28166 28234 3 28166 28235 28167 3 28236 28167 28235 3 28167 28236 28168 3 28237 28168 28236 3 28168 28237 28169 3 28238 28169 28237 3 28169 28238 28170 3 28239 28170 28238 3 28170 28239 28171 3 28240 28171 28239 3 28171 28240 28172 3 28241 28172 28240 3 28172 28241 28173 3 28242 28173 28241 3 28173 28242 28174 3 28243 28174 28242 3 28174 28243 28175 3 28244 28175 28243 3 28175 28244 28176 3 28245 28176 28244 3 28176 28245 28177 3 28246 28177 28245 3 28177 28246 28178 3 28247 28178 28246 3 28178 28247 28179 3 28248 28179 28247 3 28179 28248 28180 3 28249 28180 28248 3 28180 28249 28181 3 28250 28181 28249 3 28181 28250 28182 3 28251 28182 28250 3 28182 28251 28183 3 28252 28183 28251 3 28183 28252 28184 3 28253 28184 28252 3 28184 28253 28185 3 28254 28185 28253 3 28185 28254 28186 3 28255 28186 28254 3 28186 28255 28187 3 28256 28187 28255 3 28187 28256 28188 3 28257 28188 28256 3 28188 28257 28189 3 28258 28189 28257 3 28189 28258 28190 3 28259 28190 28258 3 28190 28259 28191 3 28260 28191 28259 3 28191 28260 28192 3 28261 28192 28260 3 28192 28261 28193 3 28262 28193 28261 3 28193 28262 28194 3 28263 28194 28262 3 28194 28263 28195 3 28264 28195 28263 3 28195 28264 28196 3 28265 28196 28264 3 28196 28265 28197 3 28266 28197 28265 3 28197 28266 28198 3 28267 28198 28266 3 28198 28267 28199 3 28268 28199 28267 3 28199 28268 28200 3 28269 28200 28268 3 28200 28269 28201 3 28270 28201 28269 3 28201 28270 28202 3 28271 28202 28270 3 28202 28271 28203 3 28272 28203 28271 3 28203 28272 28204 3 28273 28204 28272 3 28204 28273 28205 3 28274 28205 28273 3 28205 28274 28206 3 28275 28206 28274 3 28206 28275 28207 3 28276 28207 28275 3 28207 28276 28208 3 28277 28208 28276 3 28208 28277 28209 3 28278 28209 28277 3 28209 28278 28210 3 28279 28210 28278 3 28211 28280 28212 3 28281 28212 28280 3 28212 28281 28213 3 28282 28213 28281 3 28213 28282 28214 3 28283 28214 28282 3 28214 28283 28215 3 28284 28215 28283 3 28215 28284 28216 3 28285 28216 28284 3 28216 28285 28217 3 28286 28217 28285 3 28217 28286 28218 3 28287 28218 28286 3 28218 28287 28219 3 28288 28219 28287 3 28219 28288 28220 3 28289 28220 28288 3 28220 28289 28221 3 28290 28221 28289 3 28221 28290 28222 3 28291 28222 28290 3 28222 28291 28223 3 28292 28223 28291 3 28223 28292 28224 3 28293 28224 28292 3 28224 28293 28225 3 28294 28225 28293 3 28225 28294 28226 3 28295 28226 28294 3 28226 28295 28227 3 28296 28227 28295 3 28227 28296 28228 3 28297 28228 28296 3 28228 28297 28229 3 28298 28229 28297 3 28229 28298 28230 3 28299 28230 28298 3 28230 28299 28231 3 28300 28231 28299 3 28231 28300 28232 3 28301 28232 28300 3 28232 28301 28233 3 28302 28233 28301 3 28233 28302 28234 3 28303 28234 28302 3 28234 28303 28235 3 28304 28235 28303 3 28235 28304 28236 3 28305 28236 28304 3 28236 28305 28237 3 28306 28237 28305 3 28237 28306 28238 3 28307 28238 28306 3 28238 28307 28239 3 28308 28239 28307 3 28239 28308 28240 3 28309 28240 28308 3 28240 28309 28241 3 28310 28241 28309 3 28241 28310 28242 3 28311 28242 28310 3 28242 28311 28243 3 28312 28243 28311 3 28243 28312 28244 3 28313 28244 28312 3 28244 28313 28245 3 28314 28245 28313 3 28245 28314 28246 3 28315 28246 28314 3 28246 28315 28247 3 28316 28247 28315 3 28247 28316 28248 3 28317 28248 28316 3 28248 28317 28249 3 28318 28249 28317 3 28249 28318 28250 3 28319 28250 28318 3 28250 28319 28320 3 28250 28320 28251 3 28251 28320 28321 3 28251 28321 28252 3 28252 28321 28322 3 28252 28322 28253 3 28253 28322 28323 3 28253 28323 28254 3 28254 28323 28324 3 28254 28324 28255 3 28255 28324 28325 3 28255 28325 28256 3 28256 28325 28326 3 28256 28326 28257 3 28257 28326 28327 3 28257 28327 28258 3 28258 28327 28328 3 28258 28328 28259 3 28259 28328 28329 3 28259 28329 28260 3 28260 28329 28330 3 28260 28330 28261 3 28261 28330 28331 3 28261 28331 28262 3 28262 28331 28332 3 28262 28332 28263 3 28263 28332 28333 3 28263 28333 28264 3 28264 28333 28334 3 28264 28334 28265 3 28265 28334 28335 3 28265 28335 28266 3 28266 28335 28336 3 28266 28336 28267 3 28267 28336 28337 3 28267 28337 28268 3 28268 28337 28338 3 28268 28338 28269 3 28269 28338 28339 3 28269 28339 28270 3 28270 28339 28340 3 28270 28340 28271 3 28271 28340 28341 3 28271 28341 28272 3 28272 28341 28342 3 28272 28342 28273 3 28273 28342 28343 3 28273 28343 28274 3 28274 28343 28344 3 28274 28344 28275 3 28275 28344 28345 3 28275 28345 28276 3 28276 28345 28346 3 28276 28346 28277 3 28277 28346 28347 3 28277 28347 28278 3 28278 28347 28348 3 28278 28348 28279 3 28280 28349 28350 3 28280 28350 28281 3 28281 28350 28351 3 28281 28351 28282 3 28282 28351 28352 3 28282 28352 28283 3 28283 28352 28353 3 28283 28353 28284 3 28284 28353 28354 3 28284 28354 28285 3 28285 28354 28355 3 28285 28355 28286 3 28286 28355 28356 3 28286 28356 28287 3 28287 28356 28357 3 28287 28357 28288 3 28288 28357 28358 3 28288 28358 28289 3 28289 28358 28359 3 28289 28359 28290 3 28290 28359 28360 3 28290 28360 28291 3 28291 28360 28361 3 28291 28361 28292 3 28292 28361 28362 3 28292 28362 28293 3 28293 28362 28363 3 28293 28363 28294 3 28294 28363 28364 3 28294 28364 28295 3 28295 28364 28365 3 28295 28365 28296 3 28296 28365 28366 3 28296 28366 28297 3 28297 28366 28367 3 28297 28367 28298 3 28298 28367 28368 3 28298 28368 28299 3 28299 28368 28369 3 28299 28369 28300 3 28300 28369 28370 3 28300 28370 28301 3 28301 28370 28371 3 28301 28371 28302 3 28302 28371 28372 3 28302 28372 28303 3 28303 28372 28373 3 28303 28373 28304 3 28304 28373 28374 3 28304 28374 28305 3 28305 28374 28375 3 28305 28375 28306 3 28306 28375 28376 3 28306 28376 28307 3 28307 28376 28377 3 28307 28377 28308 3 28308 28377 28378 3 28308 28378 28309 3 28309 28378 28379 3 28309 28379 28310 3 28310 28379 28380 3 28310 28380 28311 3 28311 28380 28381 3 28311 28381 28312 3 28312 28381 28382 3 28312 28382 28313 3 28313 28382 28383 3 28313 28383 28314 3 28314 28383 28384 3 28314 28384 28315 3 28315 28384 28385 3 28315 28385 28316 3 28316 28385 28386 3 28316 28386 28317 3 28317 28386 28387 3 28317 28387 28318 3 28318 28387 28388 3 28318 28388 28319 3 28319 28388 28389 3 28319 28389 28320 3 28320 28389 28390 3 28320 28390 28321 3 28321 28390 28391 3 28321 28391 28322 3 28322 28391 28392 3 28322 28392 28323 3 28323 28392 28393 3 28323 28393 28324 3 28324 28393 28394 3 28324 28394 28325 3 28325 28394 28395 3 28325 28395 28326 3 28326 28395 28396 3 28326 28396 28327 3 28327 28396 28397 3 28327 28397 28328 3 28328 28397 28398 3 28328 28398 28329 3 28329 28398 28399 3 28329 28399 28330 3 28330 28399 28400 3 28330 28400 28331 3 28331 28400 28401 3 28331 28401 28332 3 28332 28401 28402 3 28332 28402 28333 3 28333 28402 28403 3 28333 28403 28334 3 28334 28403 28404 3 28334 28404 28335 3 28335 28404 28405 3 28335 28405 28336 3 28336 28405 28406 3 28336 28406 28337 3 28337 28406 28407 3 28337 28407 28338 3 28338 28407 28408 3 28338 28408 28339 3 28339 28408 28409 3 28339 28409 28340 3 28340 28409 28410 3 28340 28410 28341 3 28341 28410 28411 3 28341 28411 28342 3 28342 28411 28412 3 28342 28412 28343 3 28343 28412 28413 3 28343 28413 28344 3 28344 28413 28414 3 28344 28414 28345 3 28345 28414 28415 3 28345 28415 28346 3 28346 28415 28416 3 28346 28416 28347 3 28347 28416 28417 3 28347 28417 28348 3 28349 28418 28419 3 28349 28419 28350 3 28350 28419 28420 3 28350 28420 28351 3 28351 28420 28421 3 28351 28421 28352 3 28352 28421 28422 3 28352 28422 28353 3 28353 28422 28354 3 28423 28354 28422 3 28354 28423 28355 3 28424 28355 28423 3 28355 28424 28356 3 28425 28356 28424 3 28356 28425 28357 3 28426 28357 28425 3 28357 28426 28358 3 28427 28358 28426 3 28358 28427 28359 3 28428 28359 28427 3 28359 28428 28360 3 28429 28360 28428 3 28360 28429 28361 3 28430 28361 28429 3 28361 28430 28362 3 28431 28362 28430 3 28362 28431 28363 3 28432 28363 28431 3 28363 28432 28364 3 28433 28364 28432 3 28364 28433 28365 3 28434 28365 28433 3 28365 28434 28366 3 28435 28366 28434 3 28366 28435 28367 3 28436 28367 28435 3 28367 28436 28368 3 28437 28368 28436 3 28368 28437 28369 3 28438 28369 28437 3 28369 28438 28370 3 28439 28370 28438 3 28370 28439 28371 3 28440 28371 28439 3 28371 28440 28372 3 28441 28372 28440 3 28372 28441 28373 3 28442 28373 28441 3 28373 28442 28374 3 28443 28374 28442 3 28374 28443 28375 3 28444 28375 28443 3 28375 28444 28376 3 28445 28376 28444 3 28376 28445 28377 3 28446 28377 28445 3 28377 28446 28378 3 28447 28378 28446 3 28378 28447 28379 3 28448 28379 28447 3 28379 28448 28380 3 28449 28380 28448 3 28380 28449 28381 3 28450 28381 28449 3 28381 28450 28382 3 28451 28382 28450 3 28382 28451 28383 3 28452 28383 28451 3 28383 28452 28384 3 28453 28384 28452 3 28384 28453 28385 3 28454 28385 28453 3 28385 28454 28386 3 28455 28386 28454 3 28386 28455 28387 3 28456 28387 28455 3 28387 28456 28388 3 28457 28388 28456 3 28388 28457 28389 3 28458 28389 28457 3 28389 28458 28390 3 28459 28390 28458 3 28390 28459 28391 3 28460 28391 28459 3 28391 28460 28392 3 28461 28392 28460 3 28392 28461 28393 3 28462 28393 28461 3 28393 28462 28394 3 28463 28394 28462 3 28394 28463 28395 3 28464 28395 28463 3 28395 28464 28396 3 28465 28396 28464 3 28396 28465 28397 3 28466 28397 28465 3 28397 28466 28398 3 28467 28398 28466 3 28398 28467 28399 3 28468 28399 28467 3 28399 28468 28400 3 28469 28400 28468 3 28400 28469 28401 3 28470 28401 28469 3 28401 28470 28402 3 28471 28402 28470 3 28402 28471 28403 3 28472 28403 28471 3 28403 28472 28404 3 28473 28404 28472 3 28404 28473 28405 3 28474 28405 28473 3 28405 28474 28406 3 28475 28406 28474 3 28406 28475 28407 3 28476 28407 28475 3 28407 28476 28408 3 28477 28408 28476 3 28408 28477 28409 3 28478 28409 28477 3 28409 28478 28410 3 28479 28410 28478 3 28410 28479 28411 3 28480 28411 28479 3 28411 28480 28412 3 28481 28412 28480 3 28412 28481 28413 3 28482 28413 28481 3 28413 28482 28414 3 28483 28414 28482 3 28414 28483 28415 3 28484 28415 28483 3 28415 28484 28416 3 28485 28416 28484 3 28416 28485 28417 3 28486 28417 28485 3 28418 28487 28419 3 28488 28419 28487 3 28419 28488 28420 3 28489 28420 28488 3 28420 28489 28421 3 28490 28421 28489 3 28421 28490 28422 3 28491 28422 28490 3 28422 28491 28423 3 28492 28423 28491 3 28423 28492 28424 3 28493 28424 28492 3 28424 28493 28425 3 28494 28425 28493 3 28425 28494 28426 3 28495 28426 28494 3 28426 28495 28427 3 28496 28427 28495 3 28427 28496 28428 3 28497 28428 28496 3 28428 28497 28429 3 28498 28429 28497 3 28429 28498 28430 3 28499 28430 28498 3 28430 28499 28431 3 28500 28431 28499 3 28431 28500 28432 3 28501 28432 28500 3 28432 28501 28433 3 28502 28433 28501 3 28433 28502 28434 3 28503 28434 28502 3 28434 28503 28435 3 28504 28435 28503 3 28435 28504 28436 3 28505 28436 28504 3 28436 28505 28437 3 28506 28437 28505 3 28437 28506 28438 3 28507 28438 28506 3 28438 28507 28439 3 28508 28439 28507 3 28439 28508 28440 3 28509 28440 28508 3 28440 28509 28441 3 28510 28441 28509 3 28441 28510 28442 3 28511 28442 28510 3 28442 28511 28443 3 28512 28443 28511 3 28443 28512 28444 3 28513 28444 28512 3 28444 28513 28445 3 28514 28445 28513 3 28445 28514 28446 3 28515 28446 28514 3 28446 28515 28447 3 28516 28447 28515 3 28447 28516 28448 3 28517 28448 28516 3 28448 28517 28449 3 28518 28449 28517 3 28449 28518 28450 3 28519 28450 28518 3 28450 28519 28451 3 28520 28451 28519 3 28451 28520 28452 3 28521 28452 28520 3 28452 28521 28453 3 28522 28453 28521 3 28453 28522 28454 3 28523 28454 28522 3 28454 28523 28455 3 28524 28455 28523 3 28455 28524 28456 3 28525 28456 28524 3 28456 28525 28457 3 28526 28457 28525 3 28457 28526 28527 3 28457 28527 28458 3 28458 28527 28528 3 28458 28528 28459 3 28459 28528 28529 3 28459 28529 28460 3 28460 28529 28530 3 28460 28530 28461 3 28461 28530 28531 3 28461 28531 28462 3 28462 28531 28532 3 28462 28532 28463 3 28463 28532 28533 3 28463 28533 28464 3 28464 28533 28534 3 28464 28534 28465 3 28465 28534 28535 3 28465 28535 28466 3 28466 28535 28536 3 28466 28536 28467 3 28467 28536 28537 3 28467 28537 28468 3 28468 28537 28538 3 28468 28538 28469 3 28469 28538 28539 3 28469 28539 28470 3 28470 28539 28540 3 28470 28540 28471 3 28471 28540 28541 3 28471 28541 28472 3 28472 28541 28542 3 28472 28542 28473 3 28473 28542 28543 3 28473 28543 28474 3 28474 28543 28544 3 28474 28544 28475 3 28475 28544 28545 3 28475 28545 28476 3 28476 28545 28546 3 28476 28546 28477 3 28477 28546 28547 3 28477 28547 28478 3 28478 28547 28548 3 28478 28548 28479 3 28479 28548 28549 3 28479 28549 28480 3 28480 28549 28550 3 28480 28550 28481 3 28481 28550 28551 3 28481 28551 28482 3 28482 28551 28552 3 28482 28552 28483 3 28483 28552 28553 3 28483 28553 28484 3 28484 28553 28554 3 28484 28554 28485 3 28485 28554 28555 3 28485 28555 28486 3 28487 28556 28557 3 28487 28557 28488 3 28488 28557 28558 3 28488 28558 28489 3 28489 28558 28559 3 28489 28559 28490 3 28490 28559 28560 3 28490 28560 28491 3 28491 28560 28561 3 28491 28561 28492 3 28492 28561 28562 3 28492 28562 28493 3 28493 28562 28563 3 28493 28563 28494 3 28494 28563 28564 3 28494 28564 28495 3 28495 28564 28565 3 28495 28565 28496 3 28496 28565 28566 3 28496 28566 28497 3 28497 28566 28567 3 28497 28567 28498 3 28498 28567 28568 3 28498 28568 28499 3 28499 28568 28569 3 28499 28569 28500 3 28500 28569 28570 3 28500 28570 28501 3 28501 28570 28571 3 28501 28571 28502 3 28502 28571 28572 3 28502 28572 28503 3 28503 28572 28573 3 28503 28573 28504 3 28504 28573 28574 3 28504 28574 28505 3 28505 28574 28575 3 28505 28575 28506 3 28506 28575 28576 3 28506 28576 28507 3 28507 28576 28577 3 28507 28577 28508 3 28508 28577 28578 3 28508 28578 28509 3 28509 28578 28579 3 28509 28579 28510 3 28510 28579 28580 3 28510 28580 28511 3 28511 28580 28581 3 28511 28581 28512 3 28512 28581 28582 3 28512 28582 28513 3 28513 28582 28583 3 28513 28583 28514 3 28514 28583 28584 3 28514 28584 28515 3 28515 28584 28585 3 28515 28585 28516 3 28516 28585 28586 3 28516 28586 28517 3 28517 28586 28587 3 28517 28587 28518 3 28518 28587 28588 3 28518 28588 28519 3 28519 28588 28589 3 28519 28589 28520 3 28520 28589 28590 3 28520 28590 28521 3 28521 28590 28591 3 28521 28591 28522 3 28522 28591 28592 3 28522 28592 28523 3 28523 28592 28593 3 28523 28593 28524 3 28524 28593 28594 3 28524 28594 28525 3 28525 28594 28595 3 28525 28595 28526 3 28526 28595 28596 3 28526 28596 28527 3 28527 28596 28597 3 28527 28597 28528 3 28528 28597 28598 3 28528 28598 28529 3 28529 28598 28599 3 28529 28599 28530 3 28530 28599 28600 3 28530 28600 28531 3 28531 28600 28601 3 28531 28601 28532 3 28532 28601 28602 3 28532 28602 28533 3 28533 28602 28603 3 28533 28603 28534 3 28534 28603 28604 3 28534 28604 28535 3 28535 28604 28605 3 28535 28605 28536 3 28536 28605 28606 3 28536 28606 28537 3 28537 28606 28607 3 28537 28607 28538 3 28538 28607 28608 3 28538 28608 28539 3 28539 28608 28609 3 28539 28609 28540 3 28540 28609 28610 3 28540 28610 28541 3 28541 28610 28611 3 28541 28611 28542 3 28542 28611 28612 3 28542 28612 28543 3 28543 28612 28613 3 28543 28613 28544 3 28544 28613 28614 3 28544 28614 28545 3 28545 28614 28615 3 28545 28615 28546 3 28546 28615 28616 3 28546 28616 28547 3 28547 28616 28617 3 28547 28617 28548 3 28548 28617 28618 3 28548 28618 28549 3 28549 28618 28619 3 28549 28619 28550 3 28550 28619 28620 3 28550 28620 28551 3 28551 28620 28621 3 28551 28621 28552 3 28552 28621 28622 3 28552 28622 28553 3 28553 28622 28623 3 28553 28623 28554 3 28554 28623 28624 3 28554 28624 28555 3 28556 28625 28626 3 28556 28626 28557 3 28557 28626 28627 3 28557 28627 28558 3 28558 28627 28628 3 28558 28628 28559 3 28559 28628 28629 3 28559 28629 28560 3 28560 28629 28630 3 28560 28630 28561 3 28561 28630 28562 3 28631 28562 28630 3 28562 28631 28563 3 28632 28563 28631 3 28563 28632 28564 3 28633 28564 28632 3 28564 28633 28565 3 28634 28565 28633 3 28565 28634 28566 3 28635 28566 28634 3 28566 28635 28567 3 28636 28567 28635 3 28567 28636 28568 3 28637 28568 28636 3 28568 28637 28569 3 28638 28569 28637 3 28569 28638 28570 3 28639 28570 28638 3 28570 28639 28571 3 28640 28571 28639 3 28571 28640 28572 3 28641 28572 28640 3 28572 28641 28573 3 28642 28573 28641 3 28573 28642 28574 3 28643 28574 28642 3 28574 28643 28575 3 28644 28575 28643 3 28575 28644 28576 3 28645 28576 28644 3 28576 28645 28577 3 28646 28577 28645 3 28577 28646 28578 3 28647 28578 28646 3 28578 28647 28579 3 28648 28579 28647 3 28579 28648 28580 3 28649 28580 28648 3 28580 28649 28581 3 28650 28581 28649 3 28581 28650 28582 3 28651 28582 28650 3 28582 28651 28583 3 28652 28583 28651 3 28583 28652 28584 3 28653 28584 28652 3 28584 28653 28585 3 28654 28585 28653 3 28585 28654 28586 3 28655 28586 28654 3 28586 28655 28587 3 28656 28587 28655 3 28587 28656 28588 3 28657 28588 28656 3 28588 28657 28589 3 28658 28589 28657 3 28589 28658 28590 3 28659 28590 28658 3 28590 28659 28591 3 28660 28591 28659 3 28591 28660 28592 3 28661 28592 28660 3 28592 28661 28593 3 28662 28593 28661 3 28593 28662 28594 3 28663 28594 28662 3 28594 28663 28595 3 28664 28595 28663 3 28595 28664 28596 3 28665 28596 28664 3 28596 28665 28597 3 28666 28597 28665 3 28597 28666 28598 3 28667 28598 28666 3 28598 28667 28599 3 28668 28599 28667 3 28599 28668 28600 3 28669 28600 28668 3 28600 28669 28601 3 28670 28601 28669 3 28601 28670 28602 3 28671 28602 28670 3 28602 28671 28603 3 28672 28603 28671 3 28603 28672 28604 3 28673 28604 28672 3 28604 28673 28605 3 28674 28605 28673 3 28605 28674 28606 3 28675 28606 28674 3 28606 28675 28607 3 28676 28607 28675 3 28607 28676 28608 3 28677 28608 28676 3 28608 28677 28609 3 28678 28609 28677 3 28609 28678 28610 3 28679 28610 28678 3 28610 28679 28611 3 28680 28611 28679 3 28611 28680 28612 3 28681 28612 28680 3 28612 28681 28613 3 28682 28613 28681 3 28613 28682 28614 3 28683 28614 28682 3 28614 28683 28615 3 28684 28615 28683 3 28615 28684 28616 3 28685 28616 28684 3 28616 28685 28617 3 28686 28617 28685 3 28617 28686 28618 3 28687 28618 28686 3 28618 28687 28619 3 28688 28619 28687 3 28619 28688 28620 3 28689 28620 28688 3 28620 28689 28621 3 28690 28621 28689 3 28621 28690 28622 3 28691 28622 28690 3 28622 28691 28623 3 28692 28623 28691 3 28623 28692 28624 3 28693 28624 28692 3 28625 28694 28626 3 28695 28626 28694 3 28626 28695 28627 3 28696 28627 28695 3 28627 28696 28628 3 28697 28628 28696 3 28628 28697 28629 3 28698 28629 28697 3 28629 28698 28630 3 28699 28630 28698 3 28630 28699 28631 3 28700 28631 28699 3 28631 28700 28632 3 28701 28632 28700 3 28632 28701 28633 3 28702 28633 28701 3 28633 28702 28634 3 28703 28634 28702 3 28634 28703 28635 3 28704 28635 28703 3 28635 28704 28636 3 28705 28636 28704 3 28636 28705 28637 3 28706 28637 28705 3 28637 28706 28638 3 28707 28638 28706 3 28638 28707 28639 3 28708 28639 28707 3 28639 28708 28640 3 28709 28640 28708 3 28640 28709 28641 3 28710 28641 28709 3 28641 28710 28642 3 28711 28642 28710 3 28642 28711 28643 3 28712 28643 28711 3 28643 28712 28644 3 28713 28644 28712 3 28644 28713 28645 3 28714 28645 28713 3 28645 28714 28646 3 28715 28646 28714 3 28646 28715 28647 3 28716 28647 28715 3 28647 28716 28648 3 28717 28648 28716 3 28648 28717 28649 3 28718 28649 28717 3 28649 28718 28650 3 28719 28650 28718 3 28650 28719 28651 3 28720 28651 28719 3 28651 28720 28652 3 28721 28652 28720 3 28652 28721 28653 3 28722 28653 28721 3 28653 28722 28654 3 28723 28654 28722 3 28654 28723 28655 3 28724 28655 28723 3 28655 28724 28656 3 28725 28656 28724 3 28656 28725 28657 3 28726 28657 28725 3 28657 28726 28658 3 28727 28658 28726 3 28658 28727 28659 3 28728 28659 28727 3 28659 28728 28660 3 28729 28660 28728 3 28660 28729 28661 3 28730 28661 28729 3 28661 28730 28662 3 28731 28662 28730 3 28662 28731 28663 3 28732 28663 28731 3 28663 28732 28664 3 28733 28664 28732 3 28664 28733 28665 3 28734 28665 28733 3 28665 28734 28735 3 28665 28735 28666 3 28666 28735 28736 3 28666 28736 28667 3 28667 28736 28737 3 28667 28737 28668 3 28668 28737 28738 3 28668 28738 28669 3 28669 28738 28739 3 28669 28739 28670 3 28670 28739 28740 3 28670 28740 28671 3 28671 28740 28741 3 28671 28741 28672 3 28672 28741 28742 3 28672 28742 28673 3 28673 28742 28743 3 28673 28743 28674 3 28674 28743 28744 3 28674 28744 28675 3 28675 28744 28745 3 28675 28745 28676 3 28676 28745 28746 3 28676 28746 28677 3 28677 28746 28747 3 28677 28747 28678 3 28678 28747 28748 3 28678 28748 28679 3 28679 28748 28749 3 28679 28749 28680 3 28680 28749 28750 3 28680 28750 28681 3 28681 28750 28751 3 28681 28751 28682 3 28682 28751 28752 3 28682 28752 28683 3 28683 28752 28753 3 28683 28753 28684 3 28684 28753 28754 3 28684 28754 28685 3 28685 28754 28755 3 28685 28755 28686 3 28686 28755 28756 3 28686 28756 28687 3 28687 28756 28757 3 28687 28757 28688 3 28688 28757 28758 3 28688 28758 28689 3 28689 28758 28759 3 28689 28759 28690 3 28690 28759 28760 3 28690 28760 28691 3 28691 28760 28761 3 28691 28761 28692 3 28692 28761 28762 3 28692 28762 28693 3 28694 28763 28764 3 28694 28764 28695 3 28695 28764 28765 3 28695 28765 28696 3 28696 28765 28766 3 28696 28766 28697 3 28697 28766 28767 3 28697 28767 28698 3 28698 28767 28768 3 28698 28768 28699 3 28699 28768 28769 3 28699 28769 28700 3 28700 28769 28770 3 28700 28770 28701 3 28701 28770 28771 3 28701 28771 28702 3 28702 28771 28772 3 28702 28772 28703 3 28703 28772 28773 3 28703 28773 28704 3 28704 28773 28774 3 28704 28774 28705 3 28705 28774 28775 3 28705 28775 28706 3 28706 28775 28776 3 28706 28776 28707 3 28707 28776 28777 3 28707 28777 28708 3 28708 28777 28778 3 28708 28778 28709 3 28709 28778 28779 3 28709 28779 28710 3 28710 28779 28780 3 28710 28780 28711 3 28711 28780 28781 3 28711 28781 28712 3 28712 28781 28782 3 28712 28782 28713 3 28713 28782 28783 3 28713 28783 28714 3 28714 28783 28784 3 28714 28784 28715 3 28715 28784 28785 3 28715 28785 28716 3 28716 28785 28786 3 28716 28786 28717 3 28717 28786 28787 3 28717 28787 28718 3 28718 28787 28788 3 28718 28788 28719 3 28719 28788 28789 3 28719 28789 28720 3 28720 28789 28790 3 28720 28790 28721 3 28721 28790 28791 3 28721 28791 28722 3 28722 28791 28792 3 28722 28792 28723 3 28723 28792 28793 3 28723 28793 28724 3 28724 28793 28794 3 28724 28794 28725 3 28725 28794 28795 3 28725 28795 28726 3 28726 28795 28796 3 28726 28796 28727 3 28727 28796 28797 3 28727 28797 28728 3 28728 28797 28798 3 28728 28798 28729 3 28729 28798 28799 3 28729 28799 28730 3 28730 28799 28800 3 28730 28800 28731 3 28731 28800 28801 3 28731 28801 28732 3 28732 28801 28802 3 28732 28802 28733 3 28733 28802 28803 3 28733 28803 28734 3 28734 28803 28804 3 28734 28804 28735 3 28735 28804 28805 3 28735 28805 28736 3 28736 28805 28806 3 28736 28806 28737 3 28737 28806 28807 3 28737 28807 28738 3 28738 28807 28808 3 28738 28808 28739 3 28739 28808 28809 3 28739 28809 28740 3 28740 28809 28810 3 28740 28810 28741 3 28741 28810 28811 3 28741 28811 28742 3 28742 28811 28812 3 28742 28812 28743 3 28743 28812 28813 3 28743 28813 28744 3 28744 28813 28814 3 28744 28814 28745 3 28745 28814 28815 3 28745 28815 28746 3 28746 28815 28816 3 28746 28816 28747 3 28747 28816 28817 3 28747 28817 28748 3 28748 28817 28818 3 28748 28818 28749 3 28749 28818 28819 3 28749 28819 28750 3 28750 28819 28820 3 28750 28820 28751 3 28751 28820 28821 3 28751 28821 28752 3 28752 28821 28822 3 28752 28822 28753 3 28753 28822 28823 3 28753 28823 28754 3 28754 28823 28824 3 28754 28824 28755 3 28755 28824 28825 3 28755 28825 28756 3 28756 28825 28826 3 28756 28826 28757 3 28757 28826 28827 3 28757 28827 28758 3 28758 28827 28828 3 28758 28828 28759 3 28759 28828 28829 3 28759 28829 28760 3 28760 28829 28830 3 28760 28830 28761 3 28761 28830 28831 3 28761 28831 28762 3 28763 28832 28833 3 28763 28833 28764 3 28764 28833 28834 3 28764 28834 28765 3 28765 28834 28835 3 28765 28835 28766 3 28766 28835 28836 3 28766 28836 28767 3 28767 28836 28837 3 28767 28837 28768 3 28768 28837 28838 3 28768 28838 28769 3 28769 28838 28839 3 28769 28839 28770 3 28770 28839 28771 3 28840 28771 28839 3 28771 28840 28772 3 28841 28772 28840 3 28772 28841 28773 3 28842 28773 28841 3 28773 28842 28774 3 28843 28774 28842 3 28774 28843 28775 3 28844 28775 28843 3 28775 28844 28776 3 28845 28776 28844 3 28776 28845 28777 3 28846 28777 28845 3 28777 28846 28778 3 28847 28778 28846 3 28778 28847 28779 3 28848 28779 28847 3 28779 28848 28780 3 28849 28780 28848 3 28780 28849 28781 3 28850 28781 28849 3 28781 28850 28782 3 28851 28782 28850 3 28782 28851 28783 3 28852 28783 28851 3 28783 28852 28784 3 28853 28784 28852 3 28784 28853 28785 3 28854 28785 28853 3 28785 28854 28786 3 28855 28786 28854 3 28786 28855 28787 3 28856 28787 28855 3 28787 28856 28788 3 28857 28788 28856 3 28788 28857 28789 3 28858 28789 28857 3 28789 28858 28790 3 28859 28790 28858 3 28790 28859 28791 3 28860 28791 28859 3 28791 28860 28792 3 28861 28792 28860 3 28792 28861 28793 3 28862 28793 28861 3 28793 28862 28794 3 28863 28794 28862 3 28794 28863 28795 3 28864 28795 28863 3 28795 28864 28796 3 28865 28796 28864 3 28796 28865 28797 3 28866 28797 28865 3 28797 28866 28798 3 28867 28798 28866 3 28798 28867 28799 3 28868 28799 28867 3 28799 28868 28800 3 28869 28800 28868 3 28800 28869 28801 3 28870 28801 28869 3 28801 28870 28802 3 28871 28802 28870 3 28802 28871 28803 3 28872 28803 28871 3 28803 28872 28804 3 28873 28804 28872 3 28804 28873 28805 3 28874 28805 28873 3 28805 28874 28806 3 28875 28806 28874 3 28806 28875 28807 3 28876 28807 28875 3 28807 28876 28808 3 28877 28808 28876 3 28808 28877 28809 3 28878 28809 28877 3 28809 28878 28810 3 28879 28810 28878 3 28810 28879 28811 3 28880 28811 28879 3 28811 28880 28812 3 28881 28812 28880 3 28812 28881 28813 3 28882 28813 28881 3 28813 28882 28814 3 28883 28814 28882 3 28814 28883 28815 3 28884 28815 28883 3 28815 28884 28816 3 28885 28816 28884 3 28816 28885 28817 3 28886 28817 28885 3 28817 28886 28818 3 28887 28818 28886 3 28818 28887 28819 3 28888 28819 28887 3 28819 28888 28820 3 28889 28820 28888 3 28820 28889 28821 3 28890 28821 28889 3 28821 28890 28822 3 28891 28822 28890 3 28822 28891 28823 3 28892 28823 28891 3 28823 28892 28824 3 28893 28824 28892 3 28824 28893 28825 3 28894 28825 28893 3 28825 28894 28826 3 28895 28826 28894 3 28826 28895 28827 3 28896 28827 28895 3 28827 28896 28828 3 28897 28828 28896 3 28828 28897 28829 3 28898 28829 28897 3 28829 28898 28830 3 28899 28830 28898 3 28830 28899 28831 3 28900 28831 28899 3 28832 28901 28833 3 28902 28833 28901 3 28833 28902 28834 3 28903 28834 28902 3 28834 28903 28835 3 28904 28835 28903 3 28835 28904 28836 3 28905 28836 28904 3 28836 28905 28837 3 28906 28837 28905 3 28837 28906 28838 3 28907 28838 28906 3 28838 28907 28839 3 28908 28839 28907 3 28839 28908 28840 3 28909 28840 28908 3 28840 28909 28841 3 28910 28841 28909 3 28841 28910 28842 3 28911 28842 28910 3 28842 28911 28843 3 28912 28843 28911 3 28843 28912 28844 3 28913 28844 28912 3 28844 28913 28845 3 28914 28845 28913 3 28845 28914 28846 3 28915 28846 28914 3 28846 28915 28847 3 28916 28847 28915 3 28847 28916 28848 3 28917 28848 28916 3 28848 28917 28849 3 28918 28849 28917 3 28849 28918 28850 3 28919 28850 28918 3 28850 28919 28851 3 28920 28851 28919 3 28851 28920 28852 3 28921 28852 28920 3 28852 28921 28853 3 28922 28853 28921 3 28853 28922 28854 3 28923 28854 28922 3 28854 28923 28855 3 28924 28855 28923 3 28855 28924 28856 3 28925 28856 28924 3 28856 28925 28857 3 28926 28857 28925 3 28857 28926 28858 3 28927 28858 28926 3 28858 28927 28859 3 28928 28859 28927 3 28859 28928 28860 3 28929 28860 28928 3 28860 28929 28861 3 28930 28861 28929 3 28861 28930 28862 3 28931 28862 28930 3 28862 28931 28863 3 28932 28863 28931 3 28863 28932 28864 3 28933 28864 28932 3 28864 28933 28865 3 28934 28865 28933 3 28865 28934 28866 3 28935 28866 28934 3 28866 28935 28867 3 28936 28867 28935 3 28867 28936 28868 3 28937 28868 28936 3 28868 28937 28869 3 28938 28869 28937 3 28869 28938 28870 3 28939 28870 28938 3 28870 28939 28871 3 28940 28871 28939 3 28871 28940 28872 3 28941 28872 28940 3 28872 28941 28873 3 28942 28873 28941 3 28873 28942 28874 3 28943 28874 28942 3 28874 28943 28944 3 28874 28944 28875 3 28875 28944 28945 3 28875 28945 28876 3 28876 28945 28946 3 28876 28946 28877 3 28877 28946 28947 3 28877 28947 28878 3 28878 28947 28948 3 28878 28948 28879 3 28879 28948 28949 3 28879 28949 28880 3 28880 28949 28950 3 28880 28950 28881 3 28881 28950 28951 3 28881 28951 28882 3 28882 28951 28952 3 28882 28952 28883 3 28883 28952 28953 3 28883 28953 28884 3 28884 28953 28954 3 28884 28954 28885 3 28885 28954 28955 3 28885 28955 28886 3 28886 28955 28956 3 28886 28956 28887 3 28887 28956 28957 3 28887 28957 28888 3 28888 28957 28958 3 28888 28958 28889 3 28889 28958 28959 3 28889 28959 28890 3 28890 28959 28960 3 28890 28960 28891 3 28891 28960 28961 3 28891 28961 28892 3 28892 28961 28962 3 28892 28962 28893 3 28893 28962 28963 3 28893 28963 28894 3 28894 28963 28964 3 28894 28964 28895 3 28895 28964 28965 3 28895 28965 28896 3 28896 28965 28966 3 28896 28966 28897 3 28897 28966 28967 3 28897 28967 28898 3 28898 28967 28968 3 28898 28968 28899 3 28899 28968 28969 3 28899 28969 28900 3 28901 28970 28971 3 28901 28971 28902 3 28902 28971 28972 3 28902 28972 28903 3 28903 28972 28973 3 28903 28973 28904 3 28904 28973 28974 3 28904 28974 28905 3 28905 28974 28975 3 28905 28975 28906 3 28906 28975 28976 3 28906 28976 28907 3 28907 28976 28977 3 28907 28977 28908 3 28908 28977 28978 3 28908 28978 28909 3 28909 28978 28979 3 28909 28979 28910 3 28910 28979 28980 3 28910 28980 28911 3 28911 28980 28981 3 28911 28981 28912 3 28912 28981 28982 3 28912 28982 28913 3 28913 28982 28983 3 28913 28983 28914 3 28914 28983 28984 3 28914 28984 28915 3 28915 28984 28985 3 28915 28985 28916 3 28916 28985 28986 3 28916 28986 28917 3 28917 28986 28987 3 28917 28987 28918 3 28918 28987 28988 3 28918 28988 28919 3 28919 28988 28989 3 28919 28989 28920 3 28920 28989 28990 3 28920 28990 28921 3 28921 28990 28991 3 28921 28991 28922 3 28922 28991 28992 3 28922 28992 28923 3 28923 28992 28993 3 28923 28993 28924 3 28924 28993 28994 3 28924 28994 28925 3 28925 28994 28995 3 28925 28995 28926 3 28926 28995 28996 3 28926 28996 28927 3 28927 28996 28997 3 28927 28997 28928 3 28928 28997 28998 3 28928 28998 28929 3 28929 28998 28999 3 28929 28999 28930 3 28930 28999 29000 3 28930 29000 28931 3 28931 29000 29001 3 28931 29001 28932 3 28932 29001 29002 3 28932 29002 28933 3 28933 29002 29003 3 28933 29003 28934 3 28934 29003 29004 3 28934 29004 28935 3 28935 29004 29005 3 28935 29005 28936 3 28936 29005 29006 3 28936 29006 28937 3 28937 29006 29007 3 28937 29007 28938 3 28938 29007 29008 3 28938 29008 28939 3 28939 29008 29009 3 28939 29009 28940 3 28940 29009 29010 3 28940 29010 28941 3 28941 29010 29011 3 28941 29011 28942 3 28942 29011 29012 3 28942 29012 28943 3 28943 29012 29013 3 28943 29013 28944 3 28944 29013 29014 3 28944 29014 28945 3 28945 29014 29015 3 28945 29015 28946 3 28946 29015 29016 3 28946 29016 28947 3 28947 29016 29017 3 28947 29017 28948 3 28948 29017 29018 3 28948 29018 28949 3 28949 29018 29019 3 28949 29019 28950 3 28950 29019 29020 3 28950 29020 28951 3 28951 29020 29021 3 28951 29021 28952 3 28952 29021 29022 3 28952 29022 28953 3 28953 29022 29023 3 28953 29023 28954 3 28954 29023 29024 3 28954 29024 28955 3 28955 29024 29025 3 28955 29025 28956 3 28956 29025 29026 3 28956 29026 28957 3 28957 29026 29027 3 28957 29027 28958 3 28958 29027 29028 3 28958 29028 28959 3 28959 29028 29029 3 28959 29029 28960 3 28960 29029 29030 3 28960 29030 28961 3 28961 29030 29031 3 28961 29031 28962 3 28962 29031 29032 3 28962 29032 28963 3 28963 29032 29033 3 28963 29033 28964 3 28964 29033 29034 3 28964 29034 28965 3 28965 29034 29035 3 28965 29035 28966 3 28966 29035 29036 3 28966 29036 28967 3 28967 29036 29037 3 28967 29037 28968 3 28968 29037 29038 3 28968 29038 28969 3 28970 29039 29040 3 28970 29040 28971 3 28971 29040 29041 3 28971 29041 28972 3 28972 29041 29042 3 28972 29042 28973 3 28973 29042 29043 3 28973 29043 28974 3 28974 29043 29044 3 28974 29044 28975 3 28975 29044 29045 3 28975 29045 28976 3 28976 29045 29046 3 28976 29046 28977 3 28977 29046 29047 3 28977 29047 28978 3 28978 29047 29048 3 28978 29048 28979 3 28979 29048 28980 3 29049 28980 29048 3 28980 29049 28981 3 29050 28981 29049 3 28981 29050 28982 3 29051 28982 29050 3 28982 29051 28983 3 29052 28983 29051 3 28983 29052 28984 3 29053 28984 29052 3 28984 29053 28985 3 29054 28985 29053 3 28985 29054 28986 3 29055 28986 29054 3 28986 29055 28987 3 29056 28987 29055 3 28987 29056 28988 3 29057 28988 29056 3 28988 29057 28989 3 29058 28989 29057 3 28989 29058 28990 3 29059 28990 29058 3 28990 29059 28991 3 29060 28991 29059 3 28991 29060 28992 3 29061 28992 29060 3 28992 29061 28993 3 29062 28993 29061 3 28993 29062 28994 3 29063 28994 29062 3 28994 29063 28995 3 29064 28995 29063 3 28995 29064 28996 3 29065 28996 29064 3 28996 29065 28997 3 29066 28997 29065 3 28997 29066 28998 3 29067 28998 29066 3 28998 29067 28999 3 29068 28999 29067 3 28999 29068 29000 3 29069 29000 29068 3 29000 29069 29001 3 29070 29001 29069 3 29001 29070 29002 3 29071 29002 29070 3 29002 29071 29003 3 29072 29003 29071 3 29003 29072 29004 3 29073 29004 29072 3 29004 29073 29005 3 29074 29005 29073 3 29005 29074 29006 3 29075 29006 29074 3 29006 29075 29007 3 29076 29007 29075 3 29007 29076 29008 3 29077 29008 29076 3 29008 29077 29009 3 29078 29009 29077 3 29009 29078 29010 3 29079 29010 29078 3 29010 29079 29011 3 29080 29011 29079 3 29011 29080 29012 3 29081 29012 29080 3 29012 29081 29013 3 29082 29013 29081 3 29013 29082 29014 3 29083 29014 29082 3 29014 29083 29015 3 29084 29015 29083 3 29015 29084 29016 3 29085 29016 29084 3 29016 29085 29017 3 29086 29017 29085 3 29017 29086 29018 3 29087 29018 29086 3 29018 29087 29019 3 29088 29019 29087 3 29019 29088 29020 3 29089 29020 29088 3 29020 29089 29021 3 29090 29021 29089 3 29021 29090 29022 3 29091 29022 29090 3 29022 29091 29023 3 29092 29023 29091 3 29023 29092 29024 3 29093 29024 29092 3 29024 29093 29025 3 29094 29025 29093 3 29025 29094 29026 3 29095 29026 29094 3 29026 29095 29027 3 29096 29027 29095 3 29027 29096 29028 3 29097 29028 29096 3 29028 29097 29029 3 29098 29029 29097 3 29029 29098 29030 3 29099 29030 29098 3 29030 29099 29031 3 29100 29031 29099 3 29031 29100 29032 3 29101 29032 29100 3 29032 29101 29033 3 29102 29033 29101 3 29033 29102 29034 3 29103 29034 29102 3 29034 29103 29035 3 29104 29035 29103 3 29035 29104 29036 3 29105 29036 29104 3 29036 29105 29037 3 29106 29037 29105 3 29037 29106 29038 3 29107 29038 29106 3 29039 29108 29040 3 29109 29040 29108 3 29040 29109 29041 3 29110 29041 29109 3 29041 29110 29042 3 29111 29042 29110 3 29042 29111 29043 3 29112 29043 29111 3 29043 29112 29044 3 29113 29044 29112 3 29044 29113 29045 3 29114 29045 29113 3 29045 29114 29046 3 29115 29046 29114 3 29046 29115 29047 3 29116 29047 29115 3 29047 29116 29048 3 29117 29048 29116 3 29048 29117 29049 3 29118 29049 29117 3 29049 29118 29050 3 29119 29050 29118 3 29050 29119 29051 3 29120 29051 29119 3 29051 29120 29052 3 29121 29052 29120 3 29052 29121 29053 3 29122 29053 29121 3 29053 29122 29054 3 29123 29054 29122 3 29054 29123 29055 3 29124 29055 29123 3 29055 29124 29056 3 29125 29056 29124 3 29056 29125 29057 3 29126 29057 29125 3 29057 29126 29058 3 29127 29058 29126 3 29058 29127 29059 3 29128 29059 29127 3 29059 29128 29060 3 29129 29060 29128 3 29060 29129 29061 3 29130 29061 29129 3 29061 29130 29062 3 29131 29062 29130 3 29062 29131 29063 3 29132 29063 29131 3 29063 29132 29064 3 29133 29064 29132 3 29064 29133 29065 3 29134 29065 29133 3 29065 29134 29066 3 29135 29066 29134 3 29066 29135 29067 3 29136 29067 29135 3 29067 29136 29068 3 29137 29068 29136 3 29068 29137 29069 3 29138 29069 29137 3 29069 29138 29070 3 29139 29070 29138 3 29070 29139 29071 3 29140 29071 29139 3 29071 29140 29072 3 29141 29072 29140 3 29072 29141 29073 3 29142 29073 29141 3 29073 29142 29074 3 29143 29074 29142 3 29074 29143 29075 3 29144 29075 29143 3 29075 29144 29076 3 29145 29076 29144 3 29076 29145 29077 3 29146 29077 29145 3 29077 29146 29078 3 29147 29078 29146 3 29078 29147 29079 3 29148 29079 29147 3 29079 29148 29080 3 29149 29080 29148 3 29080 29149 29081 3 29150 29081 29149 3 29081 29150 29082 3 29151 29082 29150 3 29082 29151 29083 3 29152 29083 29151 3 29083 29152 29084 3 29153 29084 29152 3 29084 29153 29154 3 29084 29154 29085 3 29085 29154 29155 3 29085 29155 29086 3 29086 29155 29156 3 29086 29156 29087 3 29087 29156 29157 3 29087 29157 29088 3 29088 29157 29158 3 29088 29158 29089 3 29089 29158 29159 3 29089 29159 29090 3 29090 29159 29160 3 29090 29160 29091 3 29091 29160 29161 3 29091 29161 29092 3 29092 29161 29162 3 29092 29162 29093 3 29093 29162 29163 3 29093 29163 29094 3 29094 29163 29164 3 29094 29164 29095 3 29095 29164 29165 3 29095 29165 29096 3 29096 29165 29166 3 29096 29166 29097 3 29097 29166 29167 3 29097 29167 29098 3 29098 29167 29168 3 29098 29168 29099 3 29099 29168 29169 3 29099 29169 29100 3 29100 29169 29170 3 29100 29170 29101 3 29101 29170 29171 3 29101 29171 29102 3 29102 29171 29172 3 29102 29172 29103 3 29103 29172 29173 3 29103 29173 29104 3 29104 29173 29174 3 29104 29174 29105 3 29105 29174 29175 3 29105 29175 29106 3 29106 29175 29176 3 29106 29176 29107 3 29108 29177 29178 3 29108 29178 29109 3 29109 29178 29179 3 29109 29179 29110 3 29110 29179 29180 3 29110 29180 29111 3 29111 29180 29181 3 29111 29181 29112 3 29112 29181 29182 3 29112 29182 29113 3 29113 29182 29183 3 29113 29183 29114 3 29114 29183 29184 3 29114 29184 29115 3 29115 29184 29185 3 29115 29185 29116 3 29116 29185 29186 3 29116 29186 29117 3 29117 29186 29187 3 29117 29187 29118 3 29118 29187 29188 3 29118 29188 29119 3 29119 29188 29189 3 29119 29189 29120 3 29120 29189 29190 3 29120 29190 29121 3 29121 29190 29191 3 29121 29191 29122 3 29122 29191 29192 3 29122 29192 29123 3 29123 29192 29193 3 29123 29193 29124 3 29124 29193 29194 3 29124 29194 29125 3 29125 29194 29195 3 29125 29195 29126 3 29126 29195 29196 3 29126 29196 29127 3 29127 29196 29197 3 29127 29197 29128 3 29128 29197 29198 3 29128 29198 29129 3 29129 29198 29199 3 29129 29199 29130 3 29130 29199 29200 3 29130 29200 29131 3 29131 29200 29201 3 29131 29201 29132 3 29132 29201 29202 3 29132 29202 29133 3 29133 29202 29203 3 29133 29203 29134 3 29134 29203 29204 3 29134 29204 29135 3 29135 29204 29205 3 29135 29205 29136 3 29136 29205 29206 3 29136 29206 29137 3 29137 29206 29207 3 29137 29207 29138 3 29138 29207 29208 3 29138 29208 29139 3 29139 29208 29209 3 29139 29209 29140 3 29140 29209 29210 3 29140 29210 29141 3 29141 29210 29211 3 29141 29211 29142 3 29142 29211 29212 3 29142 29212 29143 3 29143 29212 29213 3 29143 29213 29144 3 29144 29213 29214 3 29144 29214 29145 3 29145 29214 29215 3 29145 29215 29146 3 29146 29215 29216 3 29146 29216 29147 3 29147 29216 29217 3 29147 29217 29148 3 29148 29217 29218 3 29148 29218 29149 3 29149 29218 29219 3 29149 29219 29150 3 29150 29219 29220 3 29150 29220 29151 3 29151 29220 29221 3 29151 29221 29152 3 29152 29221 29222 3 29152 29222 29153 3 29153 29222 29223 3 29153 29223 29154 3 29154 29223 29224 3 29154 29224 29155 3 29155 29224 29225 3 29155 29225 29156 3 29156 29225 29226 3 29156 29226 29157 3 29157 29226 29227 3 29157 29227 29158 3 29158 29227 29228 3 29158 29228 29159 3 29159 29228 29229 3 29159 29229 29160 3 29160 29229 29230 3 29160 29230 29161 3 29161 29230 29231 3 29161 29231 29162 3 29162 29231 29232 3 29162 29232 29163 3 29163 29232 29233 3 29163 29233 29164 3 29164 29233 29234 3 29164 29234 29165 3 29165 29234 29235 3 29165 29235 29166 3 29166 29235 29236 3 29166 29236 29167 3 29167 29236 29237 3 29167 29237 29168 3 29168 29237 29238 3 29168 29238 29169 3 29169 29238 29239 3 29169 29239 29170 3 29170 29239 29240 3 29170 29240 29171 3 29171 29240 29241 3 29171 29241 29172 3 29172 29241 29242 3 29172 29242 29173 3 29173 29242 29243 3 29173 29243 29174 3 29174 29243 29244 3 29174 29244 29175 3 29175 29244 29245 3 29175 29245 29176 3 29177 29246 29247 3 29177 29247 29178 3 29178 29247 29248 3 29178 29248 29179 3 29179 29248 29249 3 29179 29249 29180 3 29180 29249 29250 3 29180 29250 29181 3 29181 29250 29251 3 29181 29251 29182 3 29182 29251 29252 3 29182 29252 29183 3 29183 29252 29253 3 29183 29253 29184 3 29184 29253 29254 3 29184 29254 29185 3 29185 29254 29255 3 29185 29255 29186 3 29186 29255 29256 3 29186 29256 29187 3 29187 29256 29257 3 29187 29257 29188 3 29188 29257 29258 3 29188 29258 29189 3 29189 29258 29190 3 29259 29190 29258 3 29190 29259 29191 3 29260 29191 29259 3 29191 29260 29192 3 29261 29192 29260 3 29192 29261 29193 3 29262 29193 29261 3 29193 29262 29194 3 29263 29194 29262 3 29194 29263 29195 3 29264 29195 29263 3 29195 29264 29196 3 29265 29196 29264 3 29196 29265 29197 3 29266 29197 29265 3 29197 29266 29198 3 29267 29198 29266 3 29198 29267 29199 3 29268 29199 29267 3 29199 29268 29200 3 29269 29200 29268 3 29200 29269 29201 3 29270 29201 29269 3 29201 29270 29202 3 29271 29202 29270 3 29202 29271 29203 3 29272 29203 29271 3 29203 29272 29204 3 29273 29204 29272 3 29204 29273 29205 3 29274 29205 29273 3 29205 29274 29206 3 29275 29206 29274 3 29206 29275 29207 3 29276 29207 29275 3 29207 29276 29208 3 29277 29208 29276 3 29208 29277 29209 3 29278 29209 29277 3 29209 29278 29210 3 29279 29210 29278 3 29210 29279 29211 3 29280 29211 29279 3 29211 29280 29212 3 29281 29212 29280 3 29212 29281 29213 3 29282 29213 29281 3 29213 29282 29214 3 29283 29214 29282 3 29214 29283 29215 3 29284 29215 29283 3 29215 29284 29216 3 29285 29216 29284 3 29216 29285 29217 3 29286 29217 29285 3 29217 29286 29218 3 29287 29218 29286 3 29218 29287 29219 3 29288 29219 29287 3 29219 29288 29220 3 29289 29220 29288 3 29220 29289 29221 3 29290 29221 29289 3 29221 29290 29222 3 29291 29222 29290 3 29222 29291 29223 3 29292 29223 29291 3 29223 29292 29224 3 29293 29224 29292 3 29224 29293 29225 3 29294 29225 29293 3 29225 29294 29226 3 29295 29226 29294 3 29226 29295 29227 3 29296 29227 29295 3 29227 29296 29228 3 29297 29228 29296 3 29228 29297 29229 3 29298 29229 29297 3 29229 29298 29230 3 29299 29230 29298 3 29230 29299 29231 3 29300 29231 29299 3 29231 29300 29232 3 29301 29232 29300 3 29232 29301 29233 3 29302 29233 29301 3 29233 29302 29234 3 29303 29234 29302 3 29234 29303 29235 3 29304 29235 29303 3 29235 29304 29236 3 29305 29236 29304 3 29236 29305 29237 3 29306 29237 29305 3 29237 29306 29238 3 29307 29238 29306 3 29238 29307 29239 3 29308 29239 29307 3 29239 29308 29240 3 29309 29240 29308 3 29240 29309 29241 3 29310 29241 29309 3 29241 29310 29242 3 29311 29242 29310 3 29242 29311 29243 3 29312 29243 29311 3 29243 29312 29244 3 29313 29244 29312 3 29244 29313 29245 3 29314 29245 29313 3 29246 29315 29247 3 29316 29247 29315 3 29247 29316 29248 3 29317 29248 29316 3 29248 29317 29249 3 29318 29249 29317 3 29249 29318 29250 3 29319 29250 29318 3 29250 29319 29251 3 29320 29251 29319 3 29251 29320 29252 3 29321 29252 29320 3 29252 29321 29253 3 29322 29253 29321 3 29253 29322 29254 3 29323 29254 29322 3 29254 29323 29255 3 29324 29255 29323 3 29255 29324 29256 3 29325 29256 29324 3 29256 29325 29257 3 29326 29257 29325 3 29257 29326 29258 3 29327 29258 29326 3 29258 29327 29259 3 29328 29259 29327 3 29259 29328 29260 3 29329 29260 29328 3 29260 29329 29261 3 29330 29261 29329 3 29261 29330 29262 3 29331 29262 29330 3 29262 29331 29263 3 29332 29263 29331 3 29263 29332 29264 3 29333 29264 29332 3 29264 29333 29265 3 29334 29265 29333 3 29265 29334 29266 3 29335 29266 29334 3 29266 29335 29267 3 29336 29267 29335 3 29267 29336 29268 3 29337 29268 29336 3 29268 29337 29269 3 29338 29269 29337 3 29269 29338 29270 3 29339 29270 29338 3 29270 29339 29271 3 29340 29271 29339 3 29271 29340 29272 3 29341 29272 29340 3 29272 29341 29273 3 29342 29273 29341 3 29273 29342 29274 3 29343 29274 29342 3 29274 29343 29275 3 29344 29275 29343 3 29275 29344 29276 3 29345 29276 29344 3 29276 29345 29277 3 29346 29277 29345 3 29277 29346 29278 3 29347 29278 29346 3 29278 29347 29279 3 29348 29279 29347 3 29279 29348 29280 3 29349 29280 29348 3 29280 29349 29281 3 29350 29281 29349 3 29281 29350 29282 3 29351 29282 29350 3 29282 29351 29283 3 29352 29283 29351 3 29283 29352 29284 3 29353 29284 29352 3 29284 29353 29285 3 29354 29285 29353 3 29285 29354 29286 3 29355 29286 29354 3 29286 29355 29287 3 29356 29287 29355 3 29287 29356 29288 3 29357 29288 29356 3 29288 29357 29289 3 29358 29289 29357 3 29289 29358 29290 3 29359 29290 29358 3 29290 29359 29291 3 29360 29291 29359 3 29291 29360 29292 3 29361 29292 29360 3 29292 29361 29293 3 29362 29293 29361 3 29293 29362 29294 3 29363 29294 29362 3 29294 29363 29295 3 29364 29295 29363 3 29295 29364 29365 3 29295 29365 29296 3 29296 29365 29366 3 29296 29366 29297 3 29297 29366 29367 3 29297 29367 29298 3 29298 29367 29368 3 29298 29368 29299 3 29299 29368 29369 3 29299 29369 29300 3 29300 29369 29370 3 29300 29370 29301 3 29301 29370 29371 3 29301 29371 29302 3 29302 29371 29372 3 29302 29372 29303 3 29303 29372 29373 3 29303 29373 29304 3 29304 29373 29374 3 29304 29374 29305 3 29305 29374 29375 3 29305 29375 29306 3 29306 29375 29376 3 29306 29376 29307 3 29307 29376 29377 3 29307 29377 29308 3 29308 29377 29378 3 29308 29378 29309 3 29309 29378 29379 3 29309 29379 29310 3 29310 29379 29380 3 29310 29380 29311 3 29311 29380 29381 3 29311 29381 29312 3 29312 29381 29382 3 29312 29382 29313 3 29313 29382 29383 3 29313 29383 29314 3 29315 29384 29385 3 29315 29385 29316 3 29316 29385 29386 3 29316 29386 29317 3 29317 29386 29387 3 29317 29387 29318 3 29318 29387 29388 3 29318 29388 29319 3 29319 29388 29389 3 29319 29389 29320 3 29320 29389 29390 3 29320 29390 29321 3 29321 29390 29391 3 29321 29391 29322 3 29322 29391 29392 3 29322 29392 29323 3 29323 29392 29393 3 29323 29393 29324 3 29324 29393 29394 3 29324 29394 29325 3 29325 29394 29395 3 29325 29395 29326 3 29326 29395 29396 3 29326 29396 29327 3 29327 29396 29397 3 29327 29397 29328 3 29328 29397 29398 3 29328 29398 29329 3 29329 29398 29399 3 29329 29399 29330 3 29330 29399 29400 3 29330 29400 29331 3 29331 29400 29401 3 29331 29401 29332 3 29332 29401 29402 3 29332 29402 29333 3 29333 29402 29403 3 29333 29403 29334 3 29334 29403 29404 3 29334 29404 29335 3 29335 29404 29405 3 29335 29405 29336 3 29336 29405 29406 3 29336 29406 29337 3 29337 29406 29407 3 29337 29407 29338 3 29338 29407 29408 3 29338 29408 29339 3 29339 29408 29409 3 29339 29409 29340 3 29340 29409 29410 3 29340 29410 29341 3 29341 29410 29411 3 29341 29411 29342 3 29342 29411 29412 3 29342 29412 29343 3 29343 29412 29413 3 29343 29413 29344 3 29344 29413 29414 3 29344 29414 29345 3 29345 29414 29415 3 29345 29415 29346 3 29346 29415 29416 3 29346 29416 29347 3 29347 29416 29417 3 29347 29417 29348 3 29348 29417 29418 3 29348 29418 29349 3 29349 29418 29419 3 29349 29419 29350 3 29350 29419 29420 3 29350 29420 29351 3 29351 29420 29421 3 29351 29421 29352 3 29352 29421 29422 3 29352 29422 29353 3 29353 29422 29423 3 29353 29423 29354 3 29354 29423 29424 3 29354 29424 29355 3 29355 29424 29425 3 29355 29425 29356 3 29356 29425 29426 3 29356 29426 29357 3 29357 29426 29427 3 29357 29427 29358 3 29358 29427 29428 3 29358 29428 29359 3 29359 29428 29429 3 29359 29429 29360 3 29360 29429 29430 3 29360 29430 29361 3 29361 29430 29431 3 29361 29431 29362 3 29362 29431 29432 3 29362 29432 29363 3 29363 29432 29433 3 29363 29433 29364 3 29364 29433 29434 3 29364 29434 29365 3 29365 29434 29435 3 29365 29435 29366 3 29366 29435 29436 3 29366 29436 29367 3 29367 29436 29437 3 29367 29437 29368 3 29368 29437 29438 3 29368 29438 29369 3 29369 29438 29439 3 29369 29439 29370 3 29370 29439 29440 3 29370 29440 29371 3 29371 29440 29441 3 29371 29441 29372 3 29372 29441 29442 3 29372 29442 29373 3 29373 29442 29443 3 29373 29443 29374 3 29374 29443 29444 3 29374 29444 29375 3 29375 29444 29445 3 29375 29445 29376 3 29376 29445 29446 3 29376 29446 29377 3 29377 29446 29447 3 29377 29447 29378 3 29378 29447 29448 3 29378 29448 29379 3 29379 29448 29449 3 29379 29449 29380 3 29380 29449 29450 3 29380 29450 29381 3 29381 29450 29451 3 29381 29451 29382 3 29382 29451 29452 3 29382 29452 29383 3 29384 29453 29454 3 29384 29454 29385 3 29385 29454 29455 3 29385 29455 29386 3 29386 29455 29456 3 29386 29456 29387 3 29387 29456 29457 3 29387 29457 29388 3 29388 29457 29458 3 29388 29458 29389 3 29389 29458 29459 3 29389 29459 29390 3 29390 29459 29460 3 29390 29460 29391 3 29391 29460 29461 3 29391 29461 29392 3 29392 29461 29462 3 29392 29462 29393 3 29393 29462 29463 3 29393 29463 29394 3 29394 29463 29464 3 29394 29464 29395 3 29395 29464 29465 3 29395 29465 29396 3 29396 29465 29466 3 29396 29466 29397 3 29397 29466 29467 3 29397 29467 29398 3 29398 29467 29468 3 29398 29468 29399 3 29399 29468 29469 3 29399 29469 29400 3 29400 29469 29401 3 29470 29401 29469 3 29401 29470 29402 3 29471 29402 29470 3 29402 29471 29403 3 29472 29403 29471 3 29403 29472 29404 3 29473 29404 29472 3 29404 29473 29405 3 29474 29405 29473 3 29405 29474 29406 3 29475 29406 29474 3 29406 29475 29407 3 29476 29407 29475 3 29407 29476 29408 3 29477 29408 29476 3 29408 29477 29409 3 29478 29409 29477 3 29409 29478 29410 3 29479 29410 29478 3 29410 29479 29411 3 29480 29411 29479 3 29411 29480 29412 3 29481 29412 29480 3 29412 29481 29413 3 29482 29413 29481 3 29413 29482 29414 3 29483 29414 29482 3 29414 29483 29415 3 29484 29415 29483 3 29415 29484 29416 3 29485 29416 29484 3 29416 29485 29417 3 29486 29417 29485 3 29417 29486 29418 3 29487 29418 29486 3 29418 29487 29419 3 29488 29419 29487 3 29419 29488 29420 3 29489 29420 29488 3 29420 29489 29421 3 29490 29421 29489 3 29421 29490 29422 3 29491 29422 29490 3 29422 29491 29423 3 29492 29423 29491 3 29423 29492 29424 3 29493 29424 29492 3 29424 29493 29425 3 29494 29425 29493 3 29425 29494 29426 3 29495 29426 29494 3 29426 29495 29427 3 29496 29427 29495 3 29427 29496 29428 3 29497 29428 29496 3 29428 29497 29429 3 29498 29429 29497 3 29429 29498 29430 3 29499 29430 29498 3 29430 29499 29431 3 29500 29431 29499 3 29431 29500 29432 3 29501 29432 29500 3 29432 29501 29433 3 29502 29433 29501 3 29433 29502 29434 3 29503 29434 29502 3 29434 29503 29435 3 29504 29435 29503 3 29435 29504 29436 3 29505 29436 29504 3 29436 29505 29437 3 29506 29437 29505 3 29437 29506 29438 3 29507 29438 29506 3 29438 29507 29439 3 29508 29439 29507 3 29439 29508 29440 3 29509 29440 29508 3 29440 29509 29441 3 29510 29441 29509 3 29441 29510 29442 3 29511 29442 29510 3 29442 29511 29443 3 29512 29443 29511 3 29443 29512 29444 3 29513 29444 29512 3 29444 29513 29445 3 29514 29445 29513 3 29445 29514 29446 3 29515 29446 29514 3 29446 29515 29447 3 29516 29447 29515 3 29447 29516 29448 3 29517 29448 29516 3 29448 29517 29449 3 29518 29449 29517 3 29449 29518 29450 3 29519 29450 29518 3 29450 29519 29451 3 29520 29451 29519 3 29451 29520 29452 3 29521 29452 29520 3 29453 29522 29454 3 29523 29454 29522 3 29454 29523 29455 3 29524 29455 29523 3 29455 29524 29456 3 29525 29456 29524 3 29456 29525 29457 3 29526 29457 29525 3 29457 29526 29458 3 29527 29458 29526 3 29458 29527 29459 3 29528 29459 29527 3 29459 29528 29460 3 29529 29460 29528 3 29460 29529 29461 3 29530 29461 29529 3 29461 29530 29462 3 29531 29462 29530 3 29462 29531 29463 3 29532 29463 29531 3 29463 29532 29464 3 29533 29464 29532 3 29464 29533 29465 3 29534 29465 29533 3 29465 29534 29466 3 29535 29466 29534 3 29466 29535 29467 3 29536 29467 29535 3 29467 29536 29468 3 29537 29468 29536 3 29468 29537 29469 3 29538 29469 29537 3 29469 29538 29470 3 29539 29470 29538 3 29470 29539 29471 3 29540 29471 29539 3 29471 29540 29472 3 29541 29472 29540 3 29472 29541 29473 3 29542 29473 29541 3 29473 29542 29474 3 29543 29474 29542 3 29474 29543 29475 3 29544 29475 29543 3 29475 29544 29476 3 29545 29476 29544 3 29476 29545 29477 3 29546 29477 29545 3 29477 29546 29478 3 29547 29478 29546 3 29478 29547 29479 3 29548 29479 29547 3 29479 29548 29480 3 29549 29480 29548 3 29480 29549 29481 3 29550 29481 29549 3 29481 29550 29482 3 29551 29482 29550 3 29482 29551 29483 3 29552 29483 29551 3 29483 29552 29484 3 29553 29484 29552 3 29484 29553 29485 3 29554 29485 29553 3 29485 29554 29486 3 29555 29486 29554 3 29486 29555 29487 3 29556 29487 29555 3 29487 29556 29488 3 29557 29488 29556 3 29488 29557 29489 3 29558 29489 29557 3 29489 29558 29490 3 29559 29490 29558 3 29490 29559 29491 3 29560 29491 29559 3 29491 29560 29492 3 29561 29492 29560 3 29492 29561 29493 3 29562 29493 29561 3 29493 29562 29494 3 29563 29494 29562 3 29494 29563 29495 3 29564 29495 29563 3 29495 29564 29496 3 29565 29496 29564 3 29496 29565 29497 3 29566 29497 29565 3 29497 29566 29498 3 29567 29498 29566 3 29498 29567 29499 3 29568 29499 29567 3 29499 29568 29500 3 29569 29500 29568 3 29500 29569 29501 3 29570 29501 29569 3 29501 29570 29502 3 29571 29502 29570 3 29502 29571 29503 3 29572 29503 29571 3 29503 29572 29504 3 29573 29504 29572 3 29504 29573 29505 3 29574 29505 29573 3 29505 29574 29506 3 29575 29506 29574 3 29506 29575 29576 3 29506 29576 29507 3 29507 29576 29577 3 29507 29577 29508 3 29508 29577 29578 3 29508 29578 29509 3 29509 29578 29579 3 29509 29579 29510 3 29510 29579 29580 3 29510 29580 29511 3 29511 29580 29581 3 29511 29581 29512 3 29512 29581 29582 3 29512 29582 29513 3 29513 29582 29583 3 29513 29583 29514 3 29514 29583 29584 3 29514 29584 29515 3 29515 29584 29585 3 29515 29585 29516 3 29516 29585 29586 3 29516 29586 29517 3 29517 29586 29587 3 29517 29587 29518 3 29518 29587 29588 3 29518 29588 29519 3 29519 29588 29589 3 29519 29589 29520 3 29520 29589 29590 3 29520 29590 29521 3 29522 29591 29592 3 29522 29592 29523 3 29523 29592 29593 3 29523 29593 29524 3 29524 29593 29594 3 29524 29594 29525 3 29525 29594 29595 3 29525 29595 29526 3 29526 29595 29596 3 29526 29596 29527 3 29527 29596 29597 3 29527 29597 29528 3 29528 29597 29598 3 29528 29598 29529 3 29529 29598 29599 3 29529 29599 29530 3 29530 29599 29600 3 29530 29600 29531 3 29531 29600 29601 3 29531 29601 29532 3 29532 29601 29602 3 29532 29602 29533 3 29533 29602 29603 3 29533 29603 29534 3 29534 29603 29604 3 29534 29604 29535 3 29535 29604 29605 3 29535 29605 29536 3 29536 29605 29606 3 29536 29606 29537 3 29537 29606 29607 3 29537 29607 29538 3 29538 29607 29608 3 29538 29608 29539 3 29539 29608 29609 3 29539 29609 29540 3 29540 29609 29610 3 29540 29610 29541 3 29541 29610 29611 3 29541 29611 29542 3 29542 29611 29612 3 29542 29612 29543 3 29543 29612 29613 3 29543 29613 29544 3 29544 29613 29614 3 29544 29614 29545 3 29545 29614 29615 3 29545 29615 29546 3 29546 29615 29616 3 29546 29616 29547 3 29547 29616 29617 3 29547 29617 29548 3 29548 29617 29618 3 29548 29618 29549 3 29549 29618 29619 3 29549 29619 29550 3 29550 29619 29620 3 29550 29620 29551 3 29551 29620 29621 3 29551 29621 29552 3 29552 29621 29622 3 29552 29622 29553 3 29553 29622 29623 3 29553 29623 29554 3 29554 29623 29624 3 29554 29624 29555 3 29555 29624 29625 3 29555 29625 29556 3 29556 29625 29626 3 29556 29626 29557 3 29557 29626 29627 3 29557 29627 29558 3 29558 29627 29628 3 29558 29628 29559 3 29559 29628 29629 3 29559 29629 29560 3 29560 29629 29630 3 29560 29630 29561 3 29561 29630 29631 3 29561 29631 29562 3 29562 29631 29632 3 29562 29632 29563 3 29563 29632 29633 3 29563 29633 29564 3 29564 29633 29634 3 29564 29634 29565 3 29565 29634 29635 3 29565 29635 29566 3 29566 29635 29636 3 29566 29636 29567 3 29567 29636 29637 3 29567 29637 29568 3 29568 29637 29638 3 29568 29638 29569 3 29569 29638 29639 3 29569 29639 29570 3 29570 29639 29640 3 29570 29640 29571 3 29571 29640 29641 3 29571 29641 29572 3 29572 29641 29642 3 29572 29642 29573 3 29573 29642 29643 3 29573 29643 29574 3 29574 29643 29644 3 29574 29644 29575 3 29575 29644 29645 3 29575 29645 29576 3 29576 29645 29646 3 29576 29646 29577 3 29577 29646 29647 3 29577 29647 29578 3 29578 29647 29648 3 29578 29648 29579 3 29579 29648 29649 3 29579 29649 29580 3 29580 29649 29650 3 29580 29650 29581 3 29581 29650 29651 3 29581 29651 29582 3 29582 29651 29652 3 29582 29652 29583 3 29583 29652 29653 3 29583 29653 29584 3 29584 29653 29654 3 29584 29654 29585 3 29585 29654 29655 3 29585 29655 29586 3 29586 29655 29656 3 29586 29656 29587 3 29587 29656 29657 3 29587 29657 29588 3 29588 29657 29658 3 29588 29658 29589 3 29589 29658 29659 3 29589 29659 29590 3 29591 29660 29661 3 29591 29661 29592 3 29592 29661 29662 3 29592 29662 29593 3 29593 29662 29663 3 29593 29663 29594 3 29594 29663 29664 3 29594 29664 29595 3 29595 29664 29665 3 29595 29665 29596 3 29596 29665 29666 3 29596 29666 29597 3 29597 29666 29667 3 29597 29667 29598 3 29598 29667 29668 3 29598 29668 29599 3 29599 29668 29669 3 29599 29669 29600 3 29600 29669 29670 3 29600 29670 29601 3 29601 29670 29671 3 29601 29671 29602 3 29602 29671 29672 3 29602 29672 29603 3 29603 29672 29673 3 29603 29673 29604 3 29604 29673 29674 3 29604 29674 29605 3 29605 29674 29675 3 29605 29675 29606 3 29606 29675 29676 3 29606 29676 29607 3 29607 29676 29677 3 29607 29677 29608 3 29608 29677 29678 3 29608 29678 29609 3 29609 29678 29679 3 29609 29679 29610 3 29610 29679 29680 3 29610 29680 29611 3 29611 29680 29681 3 29611 29681 29612 3 29612 29681 29613 3 29682 29613 29681 3 29613 29682 29614 3 29683 29614 29682 3 29614 29683 29615 3 29684 29615 29683 3 29615 29684 29616 3 29685 29616 29684 3 29616 29685 29617 3 29686 29617 29685 3 29617 29686 29618 3 29687 29618 29686 3 29618 29687 29619 3 29688 29619 29687 3 29619 29688 29620 3 29689 29620 29688 3 29620 29689 29621 3 29690 29621 29689 3 29621 29690 29622 3 29691 29622 29690 3 29622 29691 29623 3 29692 29623 29691 3 29623 29692 29624 3 29693 29624 29692 3 29624 29693 29625 3 29694 29625 29693 3 29625 29694 29626 3 29695 29626 29694 3 29626 29695 29627 3 29696 29627 29695 3 29627 29696 29628 3 29697 29628 29696 3 29628 29697 29629 3 29698 29629 29697 3 29629 29698 29630 3 29699 29630 29698 3 29630 29699 29631 3 29700 29631 29699 3 29631 29700 29632 3 29701 29632 29700 3 29632 29701 29633 3 29702 29633 29701 3 29633 29702 29634 3 29703 29634 29702 3 29634 29703 29635 3 29704 29635 29703 3 29635 29704 29636 3 29705 29636 29704 3 29636 29705 29637 3 29706 29637 29705 3 29637 29706 29638 3 29707 29638 29706 3 29638 29707 29639 3 29708 29639 29707 3 29639 29708 29640 3 29709 29640 29708 3 29640 29709 29641 3 29710 29641 29709 3 29641 29710 29642 3 29711 29642 29710 3 29642 29711 29643 3 29712 29643 29711 3 29643 29712 29644 3 29713 29644 29712 3 29644 29713 29645 3 29714 29645 29713 3 29645 29714 29646 3 29715 29646 29714 3 29646 29715 29647 3 29716 29647 29715 3 29647 29716 29648 3 29717 29648 29716 3 29648 29717 29649 3 29718 29649 29717 3 29649 29718 29650 3 29719 29650 29718 3 29650 29719 29651 3 29720 29651 29719 3 29651 29720 29652 3 29721 29652 29720 3 29652 29721 29653 3 29722 29653 29721 3 29653 29722 29654 3 29723 29654 29722 3 29654 29723 29655 3 29724 29655 29723 3 29655 29724 29656 3 29725 29656 29724 3 29656 29725 29657 3 29726 29657 29725 3 29657 29726 29658 3 29727 29658 29726 3 29658 29727 29659 3 29728 29659 29727 3 29660 29729 29661 3 29730 29661 29729 3 29661 29730 29662 3 29731 29662 29730 3 29662 29731 29663 3 29732 29663 29731 3 29663 29732 29664 3 29733 29664 29732 3 29664 29733 29665 3 29734 29665 29733 3 29665 29734 29666 3 29735 29666 29734 3 29666 29735 29667 3 29736 29667 29735 3 29667 29736 29668 3 29737 29668 29736 3 29668 29737 29669 3 29738 29669 29737 3 29669 29738 29670 3 29739 29670 29738 3 29670 29739 29671 3 29740 29671 29739 3 29671 29740 29672 3 29741 29672 29740 3 29672 29741 29673 3 29742 29673 29741 3 29673 29742 29674 3 29743 29674 29742 3 29674 29743 29675 3 29744 29675 29743 3 29675 29744 29676 3 29745 29676 29744 3 29676 29745 29677 3 29746 29677 29745 3 29677 29746 29678 3 29747 29678 29746 3 29678 29747 29679 3 29748 29679 29747 3 29679 29748 29680 3 29749 29680 29748 3 29680 29749 29681 3 29750 29681 29749 3 29681 29750 29682 3 29751 29682 29750 3 29682 29751 29683 3 29752 29683 29751 3 29683 29752 29684 3 29753 29684 29752 3 29684 29753 29685 3 29754 29685 29753 3 29685 29754 29686 3 29755 29686 29754 3 29686 29755 29687 3 29756 29687 29755 3 29687 29756 29688 3 29757 29688 29756 3 29688 29757 29689 3 29758 29689 29757 3 29689 29758 29690 3 29759 29690 29758 3 29690 29759 29691 3 29760 29691 29759 3 29691 29760 29692 3 29761 29692 29760 3 29692 29761 29693 3 29762 29693 29761 3 29693 29762 29694 3 29763 29694 29762 3 29694 29763 29695 3 29764 29695 29763 3 29695 29764 29696 3 29765 29696 29764 3 29696 29765 29697 3 29766 29697 29765 3 29697 29766 29698 3 29767 29698 29766 3 29698 29767 29699 3 29768 29699 29767 3 29699 29768 29700 3 29769 29700 29768 3 29700 29769 29701 3 29770 29701 29769 3 29701 29770 29702 3 29771 29702 29770 3 29702 29771 29703 3 29772 29703 29771 3 29703 29772 29704 3 29773 29704 29772 3 29704 29773 29705 3 29774 29705 29773 3 29705 29774 29706 3 29775 29706 29774 3 29706 29775 29707 3 29776 29707 29775 3 29707 29776 29708 3 29777 29708 29776 3 29708 29777 29709 3 29778 29709 29777 3 29709 29778 29710 3 29779 29710 29778 3 29710 29779 29711 3 29780 29711 29779 3 29711 29780 29712 3 29781 29712 29780 3 29712 29781 29713 3 29782 29713 29781 3 29713 29782 29714 3 29783 29714 29782 3 29714 29783 29715 3 29784 29715 29783 3 29715 29784 29716 3 29785 29716 29784 3 29716 29785 29717 3 29786 29717 29785 3 29717 29786 29718 3 29787 29718 29786 3 29718 29787 29788 3 29718 29788 29719 3 29719 29788 29789 3 29719 29789 29720 3 29720 29789 29790 3 29720 29790 29721 3 29721 29790 29791 3 29721 29791 29722 3 29722 29791 29792 3 29722 29792 29723 3 29723 29792 29793 3 29723 29793 29724 3 29724 29793 29794 3 29724 29794 29725 3 29725 29794 29795 3 29725 29795 29726 3 29726 29795 29796 3 29726 29796 29727 3 29727 29796 29797 3 29727 29797 29728 3 29729 29798 29799 3 29729 29799 29730 3 29730 29799 29800 3 29730 29800 29731 3 29731 29800 29801 3 29731 29801 29732 3 29732 29801 29802 3 29732 29802 29733 3 29733 29802 29803 3 29733 29803 29734 3 29734 29803 29804 3 29734 29804 29735 3 29735 29804 29805 3 29735 29805 29736 3 29736 29805 29806 3 29736 29806 29737 3 29737 29806 29807 3 29737 29807 29738 3 29738 29807 29808 3 29738 29808 29739 3 29739 29808 29809 3 29739 29809 29740 3 29740 29809 29810 3 29740 29810 29741 3 29741 29810 29811 3 29741 29811 29742 3 29742 29811 29812 3 29742 29812 29743 3 29743 29812 29813 3 29743 29813 29744 3 29744 29813 29814 3 29744 29814 29745 3 29745 29814 29815 3 29745 29815 29746 3 29746 29815 29816 3 29746 29816 29747 3 29747 29816 29817 3 29747 29817 29748 3 29748 29817 29818 3 29748 29818 29749 3 29749 29818 29819 3 29749 29819 29750 3 29750 29819 29820 3 29750 29820 29751 3 29751 29820 29821 3 29751 29821 29752 3 29752 29821 29822 3 29752 29822 29753 3 29753 29822 29823 3 29753 29823 29754 3 29754 29823 29824 3 29754 29824 29755 3 29755 29824 29825 3 29755 29825 29756 3 29756 29825 29826 3 29756 29826 29757 3 29757 29826 29827 3 29757 29827 29758 3 29758 29827 29828 3 29758 29828 29759 3 29759 29828 29829 3 29759 29829 29760 3 29760 29829 29830 3 29760 29830 29761 3 29761 29830 29831 3 29761 29831 29762 3 29762 29831 29832 3 29762 29832 29763 3 29763 29832 29833 3 29763 29833 29764 3 29764 29833 29834 3 29764 29834 29765 3 29765 29834 29835 3 29765 29835 29766 3 29766 29835 29836 3 29766 29836 29767 3 29767 29836 29837 3 29767 29837 29768 3 29768 29837 29838 3 29768 29838 29769 3 29769 29838 29839 3 29769 29839 29770 3 29770 29839 29840 3 29770 29840 29771 3 29771 29840 29841 3 29771 29841 29772 3 29772 29841 29842 3 29772 29842 29773 3 29773 29842 29843 3 29773 29843 29774 3 29774 29843 29844 3 29774 29844 29775 3 29775 29844 29845 3 29775 29845 29776 3 29776 29845 29846 3 29776 29846 29777 3 29777 29846 29847 3 29777 29847 29778 3 29778 29847 29848 3 29778 29848 29779 3 29779 29848 29849 3 29779 29849 29780 3 29780 29849 29850 3 29780 29850 29781 3 29781 29850 29851 3 29781 29851 29782 3 29782 29851 29852 3 29782 29852 29783 3 29783 29852 29853 3 29783 29853 29784 3 29784 29853 29854 3 29784 29854 29785 3 29785 29854 29855 3 29785 29855 29786 3 29786 29855 29856 3 29786 29856 29787 3 29787 29856 29857 3 29787 29857 29788 3 29788 29857 29858 3 29788 29858 29789 3 29789 29858 29859 3 29789 29859 29790 3 29790 29859 29860 3 29790 29860 29791 3 29791 29860 29861 3 29791 29861 29792 3 29792 29861 29862 3 29792 29862 29793 3 29793 29862 29863 3 29793 29863 29794 3 29794 29863 29864 3 29794 29864 29795 3 29795 29864 29865 3 29795 29865 29796 3 29796 29865 29866 3 29796 29866 29797 3 29798 29867 29868 3 29798 29868 29799 3 29799 29868 29869 3 29799 29869 29800 3 29800 29869 29870 3 29800 29870 29801 3 29801 29870 29871 3 29801 29871 29802 3 29802 29871 29872 3 29802 29872 29803 3 29803 29872 29873 3 29803 29873 29804 3 29804 29873 29874 3 29804 29874 29805 3 29805 29874 29875 3 29805 29875 29806 3 29806 29875 29876 3 29806 29876 29807 3 29807 29876 29877 3 29807 29877 29808 3 29808 29877 29878 3 29808 29878 29809 3 29809 29878 29879 3 29809 29879 29810 3 29810 29879 29880 3 29810 29880 29811 3 29811 29880 29881 3 29811 29881 29812 3 29812 29881 29882 3 29812 29882 29813 3 29813 29882 29883 3 29813 29883 29814 3 29814 29883 29884 3 29814 29884 29815 3 29815 29884 29885 3 29815 29885 29816 3 29816 29885 29886 3 29816 29886 29817 3 29817 29886 29887 3 29817 29887 29818 3 29818 29887 29888 3 29818 29888 29819 3 29819 29888 29889 3 29819 29889 29820 3 29820 29889 29890 3 29820 29890 29821 3 29821 29890 29891 3 29821 29891 29822 3 29822 29891 29892 3 29822 29892 29823 3 29823 29892 29893 3 29823 29893 29824 3 29824 29893 29825 3 29894 29825 29893 3 29825 29894 29826 3 29895 29826 29894 3 29826 29895 29827 3 29896 29827 29895 3 29827 29896 29828 3 29897 29828 29896 3 29828 29897 29829 3 29898 29829 29897 3 29829 29898 29830 3 29899 29830 29898 3 29830 29899 29831 3 29900 29831 29899 3 29831 29900 29832 3 29901 29832 29900 3 29832 29901 29833 3 29902 29833 29901 3 29833 29902 29834 3 29903 29834 29902 3 29834 29903 29835 3 29904 29835 29903 3 29835 29904 29836 3 29905 29836 29904 3 29836 29905 29837 3 29906 29837 29905 3 29837 29906 29838 3 29907 29838 29906 3 29838 29907 29839 3 29908 29839 29907 3 29839 29908 29840 3 29909 29840 29908 3 29840 29909 29841 3 29910 29841 29909 3 29841 29910 29842 3 29911 29842 29910 3 29842 29911 29843 3 29912 29843 29911 3 29843 29912 29844 3 29913 29844 29912 3 29844 29913 29845 3 29914 29845 29913 3 29845 29914 29846 3 29915 29846 29914 3 29846 29915 29847 3 29916 29847 29915 3 29847 29916 29848 3 29917 29848 29916 3 29848 29917 29849 3 29918 29849 29917 3 29849 29918 29850 3 29919 29850 29918 3 29850 29919 29851 3 29920 29851 29919 3 29851 29920 29852 3 29921 29852 29920 3 29852 29921 29853 3 29922 29853 29921 3 29853 29922 29854 3 29923 29854 29922 3 29854 29923 29855 3 29924 29855 29923 3 29855 29924 29856 3 29925 29856 29924 3 29856 29925 29857 3 29926 29857 29925 3 29857 29926 29858 3 29927 29858 29926 3 29858 29927 29859 3 29928 29859 29927 3 29859 29928 29860 3 29929 29860 29928 3 29860 29929 29861 3 29930 29861 29929 3 29861 29930 29862 3 29931 29862 29930 3 29862 29931 29863 3 29932 29863 29931 3 29863 29932 29864 3 29933 29864 29932 3 29864 29933 29865 3 29934 29865 29933 3 29865 29934 29866 3 29935 29866 29934 3 29867 29936 29868 3 29937 29868 29936 3 29868 29937 29869 3 29938 29869 29937 3 29869 29938 29870 3 29939 29870 29938 3 29870 29939 29871 3 29940 29871 29939 3 29871 29940 29872 3 29941 29872 29940 3 29872 29941 29873 3 29942 29873 29941 3 29873 29942 29874 3 29943 29874 29942 3 29874 29943 29875 3 29944 29875 29943 3 29875 29944 29876 3 29945 29876 29944 3 29876 29945 29877 3 29946 29877 29945 3 29877 29946 29878 3 29947 29878 29946 3 29878 29947 29879 3 29948 29879 29947 3 29879 29948 29880 3 29949 29880 29948 3 29880 29949 29881 3 29950 29881 29949 3 29881 29950 29882 3 29951 29882 29950 3 29882 29951 29883 3 29952 29883 29951 3 29883 29952 29884 3 29953 29884 29952 3 29884 29953 29885 3 29954 29885 29953 3 29885 29954 29886 3 29955 29886 29954 3 29886 29955 29887 3 29956 29887 29955 3 29887 29956 29888 3 29957 29888 29956 3 29888 29957 29889 3 29958 29889 29957 3 29889 29958 29890 3 29959 29890 29958 3 29890 29959 29891 3 29960 29891 29959 3 29891 29960 29892 3 29961 29892 29960 3 29892 29961 29893 3 29962 29893 29961 3 29893 29962 29894 3 29963 29894 29962 3 29894 29963 29895 3 29964 29895 29963 3 29895 29964 29896 3 29965 29896 29964 3 29896 29965 29897 3 29966 29897 29965 3 29897 29966 29898 3 29967 29898 29966 3 29898 29967 29899 3 29968 29899 29967 3 29899 29968 29900 3 29969 29900 29968 3 29900 29969 29901 3 29970 29901 29969 3 29901 29970 29902 3 29971 29902 29970 3 29902 29971 29903 3 29972 29903 29971 3 29903 29972 29904 3 29973 29904 29972 3 29904 29973 29905 3 29974 29905 29973 3 29905 29974 29906 3 29975 29906 29974 3 29906 29975 29907 3 29976 29907 29975 3 29907 29976 29908 3 29977 29908 29976 3 29908 29977 29909 3 29978 29909 29977 3 29909 29978 29910 3 29979 29910 29978 3 29910 29979 29911 3 29980 29911 29979 3 29911 29980 29912 3 29981 29912 29980 3 29912 29981 29913 3 29982 29913 29981 3 29913 29982 29914 3 29983 29914 29982 3 29914 29983 29915 3 29984 29915 29983 3 29915 29984 29916 3 29985 29916 29984 3 29916 29985 29917 3 29986 29917 29985 3 29917 29986 29918 3 29987 29918 29986 3 29918 29987 29919 3 29988 29919 29987 3 29919 29988 29920 3 29989 29920 29988 3 29920 29989 29921 3 29990 29921 29989 3 29921 29990 29922 3 29991 29922 29990 3 29922 29991 29923 3 29992 29923 29991 3 29923 29992 29924 3 29993 29924 29992 3 29924 29993 29925 3 29994 29925 29993 3 29925 29994 29926 3 29995 29926 29994 3 29926 29995 29927 3 29996 29927 29995 3 29927 29996 29928 3 29997 29928 29996 3 29928 29997 29929 3 29998 29929 29997 3 29929 29998 29930 3 29999 29930 29998 3 29930 29999 29931 3 30000 29931 29999 3 29931 30000 30001 3 29931 30001 29932 3 29932 30001 30002 3 29932 30002 29933 3 29933 30002 30003 3 29933 30003 29934 3 29934 30003 30004 3 29934 30004 29935 3 29936 30005 30006 3 29936 30006 29937 3 29937 30006 30007 3 29937 30007 29938 3 29938 30007 30008 3 29938 30008 29939 3 29939 30008 30009 3 29939 30009 29940 3 29940 30009 30010 3 29940 30010 29941 3 29941 30010 30011 3 29941 30011 29942 3 29942 30011 30012 3 29942 30012 29943 3 29943 30012 30013 3 29943 30013 29944 3 29944 30013 30014 3 29944 30014 29945 3 29945 30014 30015 3 29945 30015 29946 3 29946 30015 30016 3 29946 30016 29947 3 29947 30016 30017 3 29947 30017 29948 3 29948 30017 30018 3 29948 30018 29949 3 29949 30018 30019 3 29949 30019 29950 3 29950 30019 30020 3 29950 30020 29951 3 29951 30020 30021 3 29951 30021 29952 3 29952 30021 30022 3 29952 30022 29953 3 29953 30022 30023 3 29953 30023 29954 3 29954 30023 30024 3 29954 30024 29955 3 29955 30024 30025 3 29955 30025 29956 3 29956 30025 30026 3 29956 30026 29957 3 29957 30026 30027 3 29957 30027 29958 3 29958 30027 30028 3 29958 30028 29959 3 29959 30028 30029 3 29959 30029 29960 3 29960 30029 30030 3 29960 30030 29961 3 29961 30030 30031 3 29961 30031 29962 3 29962 30031 30032 3 29962 30032 29963 3 29963 30032 30033 3 29963 30033 29964 3 29964 30033 30034 3 29964 30034 29965 3 29965 30034 30035 3 29965 30035 29966 3 29966 30035 30036 3 29966 30036 29967 3 29967 30036 30037 3 29967 30037 29968 3 29968 30037 30038 3 29968 30038 29969 3 29969 30038 30039 3 29969 30039 29970 3 29970 30039 30040 3 29970 30040 29971 3 29971 30040 30041 3 29971 30041 29972 3 29972 30041 30042 3 29972 30042 29973 3 29973 30042 30043 3 29973 30043 29974 3 29974 30043 30044 3 29974 30044 29975 3 29975 30044 30045 3 29975 30045 29976 3 29976 30045 30046 3 29976 30046 29977 3 29977 30046 30047 3 29977 30047 29978 3 29978 30047 30048 3 29978 30048 29979 3 29979 30048 30049 3 29979 30049 29980 3 29980 30049 30050 3 29980 30050 29981 3 29981 30050 30051 3 29981 30051 29982 3 29982 30051 30052 3 29982 30052 29983 3 29983 30052 30053 3 29983 30053 29984 3 29984 30053 30054 3 29984 30054 29985 3 29985 30054 30055 3 29985 30055 29986 3 29986 30055 30056 3 29986 30056 29987 3 29987 30056 30057 3 29987 30057 29988 3 29988 30057 30058 3 29988 30058 29989 3 29989 30058 30059 3 29989 30059 29990 3 29990 30059 30060 3 29990 30060 29991 3 29991 30060 30061 3 29991 30061 29992 3 29992 30061 30062 3 29992 30062 29993 3 29993 30062 30063 3 29993 30063 29994 3 29994 30063 30064 3 29994 30064 29995 3 29995 30064 30065 3 29995 30065 29996 3 29996 30065 30066 3 29996 30066 29997 3 29997 30066 30067 3 29997 30067 29998 3 29998 30067 30068 3 29998 30068 29999 3 29999 30068 30069 3 29999 30069 30000 3 30000 30069 30070 3 30000 30070 30001 3 30001 30070 30071 3 30001 30071 30002 3 30002 30071 30072 3 30002 30072 30003 3 30003 30072 30073 3 30003 30073 30004 3 30005 30074 30075 3 30005 30075 30006 3 30006 30075 30076 3 30006 30076 30007 3 30007 30076 30077 3 30007 30077 30008 3 30008 30077 30078 3 30008 30078 30009 3 30009 30078 30079 3 30009 30079 30010 3 30010 30079 30080 3 30010 30080 30011 3 30011 30080 30081 3 30011 30081 30012 3 30012 30081 30082 3 30012 30082 30013 3 30013 30082 30083 3 30013 30083 30014 3 30014 30083 30084 3 30014 30084 30015 3 30015 30084 30085 3 30015 30085 30016 3 30016 30085 30086 3 30016 30086 30017 3 30017 30086 30087 3 30017 30087 30018 3 30018 30087 30088 3 30018 30088 30019 3 30019 30088 30089 3 30019 30089 30020 3 30020 30089 30090 3 30020 30090 30021 3 30021 30090 30091 3 30021 30091 30022 3 30022 30091 30092 3 30022 30092 30023 3 30023 30092 30093 3 30023 30093 30024 3 30024 30093 30094 3 30024 30094 30025 3 30025 30094 30095 3 30025 30095 30026 3 30026 30095 30096 3 30026 30096 30027 3 30027 30096 30097 3 30027 30097 30028 3 30028 30097 30098 3 30028 30098 30029 3 30029 30098 30099 3 30029 30099 30030 3 30030 30099 30100 3 30030 30100 30031 3 30031 30100 30101 3 30031 30101 30032 3 30032 30101 30102 3 30032 30102 30033 3 30033 30102 30103 3 30033 30103 30034 3 30034 30103 30104 3 30034 30104 30035 3 30035 30104 30105 3 30035 30105 30036 3 30036 30105 30106 3 30036 30106 30037 3 30037 30106 30038 3 30107 30038 30106 3 30038 30107 30039 3 30108 30039 30107 3 30039 30108 30040 3 30109 30040 30108 3 30040 30109 30041 3 30110 30041 30109 3 30041 30110 30042 3 30111 30042 30110 3 30042 30111 30043 3 30112 30043 30111 3 30043 30112 30044 3 30113 30044 30112 3 30044 30113 30045 3 30114 30045 30113 3 30045 30114 30046 3 30115 30046 30114 3 30046 30115 30047 3 30116 30047 30115 3 30047 30116 30048 3 30117 30048 30116 3 30048 30117 30049 3 30118 30049 30117 3 30049 30118 30050 3 30119 30050 30118 3 30050 30119 30051 3 30120 30051 30119 3 30051 30120 30052 3 30121 30052 30120 3 30052 30121 30053 3 30122 30053 30121 3 30053 30122 30054 3 30123 30054 30122 3 30054 30123 30055 3 30124 30055 30123 3 30055 30124 30056 3 30125 30056 30124 3 30056 30125 30057 3 30126 30057 30125 3 30057 30126 30058 3 30127 30058 30126 3 30058 30127 30059 3 30128 30059 30127 3 30059 30128 30060 3 30129 30060 30128 3 30060 30129 30061 3 30130 30061 30129 3 30061 30130 30062 3 30131 30062 30130 3 30062 30131 30063 3 30132 30063 30131 3 30063 30132 30064 3 30133 30064 30132 3 30064 30133 30065 3 30134 30065 30133 3 30065 30134 30066 3 30135 30066 30134 3 30066 30135 30067 3 30136 30067 30135 3 30067 30136 30068 3 30137 30068 30136 3 30068 30137 30069 3 30138 30069 30137 3 30069 30138 30070 3 30139 30070 30138 3 30070 30139 30071 3 30140 30071 30139 3 30071 30140 30072 3 30141 30072 30140 3 30072 30141 30073 3 30142 30073 30141 3 30074 30143 30075 3 30144 30075 30143 3 30075 30144 30076 3 30145 30076 30144 3 30076 30145 30077 3 30146 30077 30145 3 30077 30146 30078 3 30147 30078 30146 3 30078 30147 30079 3 30148 30079 30147 3 30079 30148 30080 3 30149 30080 30148 3 30080 30149 30081 3 30150 30081 30149 3 30081 30150 30082 3 30151 30082 30150 3 30082 30151 30083 3 30152 30083 30151 3 30083 30152 30084 3 30153 30084 30152 3 30084 30153 30085 3 30154 30085 30153 3 30085 30154 30086 3 30155 30086 30154 3 30086 30155 30087 3 30156 30087 30155 3 30087 30156 30088 3 30157 30088 30156 3 30088 30157 30089 3 30158 30089 30157 3 30089 30158 30090 3 30159 30090 30158 3 30090 30159 30091 3 30160 30091 30159 3 30091 30160 30092 3 30161 30092 30160 3 30092 30161 30093 3 30162 30093 30161 3 30093 30162 30094 3 30163 30094 30162 3 30094 30163 30095 3 30164 30095 30163 3 30095 30164 30096 3 30165 30096 30164 3 30096 30165 30097 3 30166 30097 30165 3 30097 30166 30098 3 30167 30098 30166 3 30098 30167 30099 3 30168 30099 30167 3 30099 30168 30100 3 30169 30100 30168 3 30100 30169 30101 3 30170 30101 30169 3 30101 30170 30102 3 30171 30102 30170 3 30102 30171 30103 3 30172 30103 30171 3 30103 30172 30104 3 30173 30104 30172 3 30104 30173 30105 3 30174 30105 30173 3 30105 30174 30106 3 30175 30106 30174 3 30106 30175 30107 3 30176 30107 30175 3 30107 30176 30108 3 30177 30108 30176 3 30108 30177 30109 3 30178 30109 30177 3 30109 30178 30110 3 30179 30110 30178 3 30110 30179 30111 3 30180 30111 30179 3 30111 30180 30112 3 30181 30112 30180 3 30112 30181 30113 3 30182 30113 30181 3 30113 30182 30114 3 30183 30114 30182 3 30114 30183 30115 3 30184 30115 30183 3 30115 30184 30116 3 30185 30116 30184 3 30116 30185 30117 3 30186 30117 30185 3 30117 30186 30118 3 30187 30118 30186 3 30118 30187 30119 3 30188 30119 30187 3 30119 30188 30120 3 30189 30120 30188 3 30120 30189 30121 3 30190 30121 30189 3 30121 30190 30122 3 30191 30122 30190 3 30122 30191 30123 3 30192 30123 30191 3 30123 30192 30124 3 30193 30124 30192 3 30124 30193 30125 3 30194 30125 30193 3 30125 30194 30126 3 30195 30126 30194 3 30126 30195 30127 3 30196 30127 30195 3 30127 30196 30128 3 30197 30128 30196 3 30128 30197 30129 3 30198 30129 30197 3 30129 30198 30130 3 30199 30130 30198 3 30130 30199 30131 3 30200 30131 30199 3 30131 30200 30132 3 30201 30132 30200 3 30132 30201 30133 3 30202 30133 30201 3 30133 30202 30134 3 30203 30134 30202 3 30134 30203 30135 3 30204 30135 30203 3 30135 30204 30136 3 30205 30136 30204 3 30136 30205 30137 3 30206 30137 30205 3 30137 30206 30138 3 30207 30138 30206 3 30138 30207 30139 3 30208 30139 30207 3 30139 30208 30140 3 30209 30140 30208 3 30140 30209 30141 3 30210 30141 30209 3 30141 30210 30142 3 30211 30142 30210 3 30143 30212 30144 3 30213 30144 30212 3 30144 30213 30214 3 30144 30214 30145 3 30145 30214 30215 3 30145 30215 30146 3 30146 30215 30216 3 30146 30216 30147 3 30147 30216 30217 3 30147 30217 30148 3 30148 30217 30218 3 30148 30218 30149 3 30149 30218 30219 3 30149 30219 30150 3 30150 30219 30220 3 30150 30220 30151 3 30151 30220 30221 3 30151 30221 30152 3 30152 30221 30222 3 30152 30222 30153 3 30153 30222 30223 3 30153 30223 30154 3 30154 30223 30224 3 30154 30224 30155 3 30155 30224 30225 3 30155 30225 30156 3 30156 30225 30226 3 30156 30226 30157 3 30157 30226 30227 3 30157 30227 30158 3 30158 30227 30228 3 30158 30228 30159 3 30159 30228 30229 3 30159 30229 30160 3 30160 30229 30230 3 30160 30230 30161 3 30161 30230 30231 3 30161 30231 30162 3 30162 30231 30232 3 30162 30232 30163 3 30163 30232 30233 3 30163 30233 30164 3 30164 30233 30234 3 30164 30234 30165 3 30165 30234 30235 3 30165 30235 30166 3 30166 30235 30236 3 30166 30236 30167 3 30167 30236 30237 3 30167 30237 30168 3 30168 30237 30238 3 30168 30238 30169 3 30169 30238 30239 3 30169 30239 30170 3 30170 30239 30240 3 30170 30240 30171 3 30171 30240 30241 3 30171 30241 30172 3 30172 30241 30242 3 30172 30242 30173 3 30173 30242 30243 3 30173 30243 30174 3 30174 30243 30244 3 30174 30244 30175 3 30175 30244 30245 3 30175 30245 30176 3 30176 30245 30246 3 30176 30246 30177 3 30177 30246 30247 3 30177 30247 30178 3 30178 30247 30248 3 30178 30248 30179 3 30179 30248 30249 3 30179 30249 30180 3 30180 30249 30250 3 30180 30250 30181 3 30181 30250 30251 3 30181 30251 30182 3 30182 30251 30252 3 30182 30252 30183 3 30183 30252 30253 3 30183 30253 30184 3 30184 30253 30254 3 30184 30254 30185 3 30185 30254 30255 3 30185 30255 30186 3 30186 30255 30256 3 30186 30256 30187 3 30187 30256 30257 3 30187 30257 30188 3 30188 30257 30258 3 30188 30258 30189 3 30189 30258 30259 3 30189 30259 30190 3 30190 30259 30260 3 30190 30260 30191 3 30191 30260 30261 3 30191 30261 30192 3 30192 30261 30262 3 30192 30262 30193 3 30193 30262 30263 3 30193 30263 30194 3 30194 30263 30264 3 30194 30264 30195 3 30195 30264 30265 3 30195 30265 30196 3 30196 30265 30266 3 30196 30266 30197 3 30197 30266 30267 3 30197 30267 30198 3 30198 30267 30268 3 30198 30268 30199 3 30199 30268 30269 3 30199 30269 30200 3 30200 30269 30270 3 30200 30270 30201 3 30201 30270 30271 3 30201 30271 30202 3 30202 30271 30272 3 30202 30272 30203 3 30203 30272 30273 3 30203 30273 30204 3 30204 30273 30274 3 30204 30274 30205 3 30205 30274 30275 3 30205 30275 30206 3 30206 30275 30276 3 30206 30276 30207 3 30207 30276 30277 3 30207 30277 30208 3 30208 30277 30278 3 30208 30278 30209 3 30209 30278 30279 3 30209 30279 30210 3 30210 30279 30280 3 30210 30280 30211 3 30212 30281 30282 3 30212 30282 30213 3 30213 30282 30283 3 30213 30283 30214 3 30214 30283 30284 3 30214 30284 30215 3 30215 30284 30285 3 30215 30285 30216 3 30216 30285 30286 3 30216 30286 30217 3 30217 30286 30287 3 30217 30287 30218 3 30218 30287 30288 3 30218 30288 30219 3 30219 30288 30289 3 30219 30289 30220 3 30220 30289 30290 3 30220 30290 30221 3 30221 30290 30291 3 30221 30291 30222 3 30222 30291 30292 3 30222 30292 30223 3 30223 30292 30293 3 30223 30293 30224 3 30224 30293 30294 3 30224 30294 30225 3 30225 30294 30295 3 30225 30295 30226 3 30226 30295 30296 3 30226 30296 30227 3 30227 30296 30297 3 30227 30297 30228 3 30228 30297 30298 3 30228 30298 30229 3 30229 30298 30299 3 30229 30299 30230 3 30230 30299 30300 3 30230 30300 30231 3 30231 30300 30301 3 30231 30301 30232 3 30232 30301 30302 3 30232 30302 30233 3 30233 30302 30303 3 30233 30303 30234 3 30234 30303 30304 3 30234 30304 30235 3 30235 30304 30305 3 30235 30305 30236 3 30236 30305 30306 3 30236 30306 30237 3 30237 30306 30307 3 30237 30307 30238 3 30238 30307 30308 3 30238 30308 30239 3 30239 30308 30309 3 30239 30309 30240 3 30240 30309 30310 3 30240 30310 30241 3 30241 30310 30311 3 30241 30311 30242 3 30242 30311 30312 3 30242 30312 30243 3 30243 30312 30313 3 30243 30313 30244 3 30244 30313 30314 3 30244 30314 30245 3 30245 30314 30315 3 30245 30315 30246 3 30246 30315 30316 3 30246 30316 30247 3 30247 30316 30317 3 30247 30317 30248 3 30248 30317 30318 3 30248 30318 30249 3 30249 30318 30319 3 30249 30319 30250 3 30250 30319 30320 3 30250 30320 30251 3 30251 30320 30252 3 30321 30252 30320 3 30252 30321 30253 3 30322 30253 30321 3 30253 30322 30254 3 30323 30254 30322 3 30254 30323 30255 3 30324 30255 30323 3 30255 30324 30256 3 30325 30256 30324 3 30256 30325 30257 3 30326 30257 30325 3 30257 30326 30258 3 30327 30258 30326 3 30258 30327 30259 3 30328 30259 30327 3 30259 30328 30260 3 30329 30260 30328 3 30260 30329 30261 3 30330 30261 30329 3 30261 30330 30262 3 30331 30262 30330 3 30262 30331 30263 3 30332 30263 30331 3 30263 30332 30264 3 30333 30264 30332 3 30264 30333 30265 3 30334 30265 30333 3 30265 30334 30266 3 30335 30266 30334 3 30266 30335 30267 3 30336 30267 30335 3 30267 30336 30268 3 30337 30268 30336 3 30268 30337 30269 3 30338 30269 30337 3 30269 30338 30270 3 30339 30270 30338 3 30270 30339 30271 3 30340 30271 30339 3 30271 30340 30272 3 30341 30272 30340 3 30272 30341 30273 3 30342 30273 30341 3 30273 30342 30274 3 30343 30274 30342 3 30274 30343 30275 3 30344 30275 30343 3 30275 30344 30276 3 30345 30276 30344 3 30276 30345 30277 3 30346 30277 30345 3 30277 30346 30278 3 30347 30278 30346 3 30278 30347 30279 3 30348 30279 30347 3 30279 30348 30280 3 30349 30280 30348 3 30281 30350 30282 3 30351 30282 30350 3 30282 30351 30283 3 30352 30283 30351 3 30283 30352 30284 3 30353 30284 30352 3 30284 30353 30285 3 30354 30285 30353 3 30285 30354 30286 3 30355 30286 30354 3 30286 30355 30287 3 30356 30287 30355 3 30287 30356 30288 3 30357 30288 30356 3 30288 30357 30289 3 30358 30289 30357 3 30289 30358 30290 3 30359 30290 30358 3 30290 30359 30291 3 30360 30291 30359 3 30291 30360 30292 3 30361 30292 30360 3 30292 30361 30293 3 30362 30293 30361 3 30293 30362 30294 3 30363 30294 30362 3 30294 30363 30295 3 30364 30295 30363 3 30295 30364 30296 3 30365 30296 30364 3 30296 30365 30297 3 30366 30297 30365 3 30297 30366 30298 3 30367 30298 30366 3 30298 30367 30299 3 30368 30299 30367 3 30299 30368 30300 3 30369 30300 30368 3 30300 30369 30301 3 30370 30301 30369 3 30301 30370 30302 3 30371 30302 30370 3 30302 30371 30303 3 30372 30303 30371 3 30303 30372 30304 3 30373 30304 30372 3 30304 30373 30305 3 30374 30305 30373 3 30305 30374 30306 3 30375 30306 30374 3 30306 30375 30307 3 30376 30307 30375 3 30307 30376 30308 3 30377 30308 30376 3 30308 30377 30309 3 30378 30309 30377 3 30309 30378 30310 3 30379 30310 30378 3 30310 30379 30311 3 30380 30311 30379 3 30311 30380 30312 3 30381 30312 30380 3 30312 30381 30313 3 30382 30313 30381 3 30313 30382 30314 3 30383 30314 30382 3 30314 30383 30315 3 30384 30315 30383 3 30315 30384 30316 3 30385 30316 30384 3 30316 30385 30317 3 30386 30317 30385 3 30317 30386 30318 3 30387 30318 30386 3 30318 30387 30319 3 30388 30319 30387 3 30319 30388 30320 3 30389 30320 30388 3 30320 30389 30321 3 30390 30321 30389 3 30321 30390 30322 3 30391 30322 30390 3 30322 30391 30323 3 30392 30323 30391 3 30323 30392 30324 3 30393 30324 30392 3 30324 30393 30325 3 30394 30325 30393 3 30325 30394 30326 3 30395 30326 30394 3 30326 30395 30327 3 30396 30327 30395 3 30327 30396 30328 3 30397 30328 30396 3 30328 30397 30329 3 30398 30329 30397 3 30329 30398 30330 3 30399 30330 30398 3 30330 30399 30331 3 30400 30331 30399 3 30331 30400 30332 3 30401 30332 30400 3 30332 30401 30333 3 30402 30333 30401 3 30333 30402 30334 3 30403 30334 30402 3 30334 30403 30335 3 30404 30335 30403 3 30335 30404 30336 3 30405 30336 30404 3 30336 30405 30337 3 30406 30337 30405 3 30337 30406 30338 3 30407 30338 30406 3 30338 30407 30339 3 30408 30339 30407 3 30339 30408 30340 3 30409 30340 30408 3 30340 30409 30341 3 30410 30341 30409 3 30341 30410 30342 3 30411 30342 30410 3 30342 30411 30343 3 30412 30343 30411 3 30343 30412 30344 3 30413 30344 30412 3 30344 30413 30345 3 30414 30345 30413 3 30345 30414 30346 3 30415 30346 30414 3 30346 30415 30347 3 30416 30347 30415 3 30347 30416 30348 3 30417 30348 30416 3 30348 30417 30349 3 30418 30349 30417 3 30350 30419 30351 3 30420 30351 30419 3 30351 30420 30352 3 30421 30352 30420 3 30352 30421 30353 3 30422 30353 30421 3 30353 30422 30354 3 30423 30354 30422 3 30354 30423 30355 3 30424 30355 30423 3 30355 30424 30356 3 30425 30356 30424 3 30356 30425 30357 3 30426 30357 30425 3 30357 30426 30358 3 30427 30358 30426 3 30358 30427 30428 3 30358 30428 30359 3 30359 30428 30429 3 30359 30429 30360 3 30360 30429 30430 3 30360 30430 30361 3 30361 30430 30431 3 30361 30431 30362 3 30362 30431 30432 3 30362 30432 30363 3 30363 30432 30433 3 30363 30433 30364 3 30364 30433 30434 3 30364 30434 30365 3 30365 30434 30435 3 30365 30435 30366 3 30366 30435 30436 3 30366 30436 30367 3 30367 30436 30437 3 30367 30437 30368 3 30368 30437 30438 3 30368 30438 30369 3 30369 30438 30439 3 30369 30439 30370 3 30370 30439 30440 3 30370 30440 30371 3 30371 30440 30441 3 30371 30441 30372 3 30372 30441 30442 3 30372 30442 30373 3 30373 30442 30443 3 30373 30443 30374 3 30374 30443 30444 3 30374 30444 30375 3 30375 30444 30445 3 30375 30445 30376 3 30376 30445 30446 3 30376 30446 30377 3 30377 30446 30447 3 30377 30447 30378 3 30378 30447 30448 3 30378 30448 30379 3 30379 30448 30449 3 30379 30449 30380 3 30380 30449 30450 3 30380 30450 30381 3 30381 30450 30451 3 30381 30451 30382 3 30382 30451 30452 3 30382 30452 30383 3 30383 30452 30453 3 30383 30453 30384 3 30384 30453 30454 3 30384 30454 30385 3 30385 30454 30455 3 30385 30455 30386 3 30386 30455 30456 3 30386 30456 30387 3 30387 30456 30457 3 30387 30457 30388 3 30388 30457 30458 3 30388 30458 30389 3 30389 30458 30459 3 30389 30459 30390 3 30390 30459 30460 3 30390 30460 30391 3 30391 30460 30461 3 30391 30461 30392 3 30392 30461 30462 3 30392 30462 30393 3 30393 30462 30463 3 30393 30463 30394 3 30394 30463 30464 3 30394 30464 30395 3 30395 30464 30465 3 30395 30465 30396 3 30396 30465 30466 3 30396 30466 30397 3 30397 30466 30467 3 30397 30467 30398 3 30398 30467 30468 3 30398 30468 30399 3 30399 30468 30469 3 30399 30469 30400 3 30400 30469 30470 3 30400 30470 30401 3 30401 30470 30471 3 30401 30471 30402 3 30402 30471 30472 3 30402 30472 30403 3 30403 30472 30473 3 30403 30473 30404 3 30404 30473 30474 3 30404 30474 30405 3 30405 30474 30475 3 30405 30475 30406 3 30406 30475 30476 3 30406 30476 30407 3 30407 30476 30477 3 30407 30477 30408 3 30408 30477 30478 3 30408 30478 30409 3 30409 30478 30479 3 30409 30479 30410 3 30410 30479 30480 3 30410 30480 30411 3 30411 30480 30481 3 30411 30481 30412 3 30412 30481 30482 3 30412 30482 30413 3 30413 30482 30483 3 30413 30483 30414 3 30414 30483 30484 3 30414 30484 30415 3 30415 30484 30485 3 30415 30485 30416 3 30416 30485 30486 3 30416 30486 30417 3 30417 30486 30487 3 30417 30487 30418 3 30419 30488 30489 3 30419 30489 30420 3 30420 30489 30490 3 30420 30490 30421 3 30421 30490 30491 3 30421 30491 30422 3 30422 30491 30492 3 30422 30492 30423 3 30423 30492 30493 3 30423 30493 30424 3 30424 30493 30494 3 30424 30494 30425 3 30425 30494 30495 3 30425 30495 30426 3 30426 30495 30496 3 30426 30496 30427 3 30427 30496 30497 3 30427 30497 30428 3 30428 30497 30498 3 30428 30498 30429 3 30429 30498 30499 3 30429 30499 30430 3 30430 30499 30500 3 30430 30500 30431 3 30431 30500 30501 3 30431 30501 30432 3 30432 30501 30502 3 30432 30502 30433 3 30433 30502 30503 3 30433 30503 30434 3 30434 30503 30504 3 30434 30504 30435 3 30435 30504 30505 3 30435 30505 30436 3 30436 30505 30506 3 30436 30506 30437 3 30437 30506 30507 3 30437 30507 30438 3 30438 30507 30508 3 30438 30508 30439 3 30439 30508 30509 3 30439 30509 30440 3 30440 30509 30510 3 30440 30510 30441 3 30441 30510 30511 3 30441 30511 30442 3 30442 30511 30512 3 30442 30512 30443 3 30443 30512 30513 3 30443 30513 30444 3 30444 30513 30514 3 30444 30514 30445 3 30445 30514 30515 3 30445 30515 30446 3 30446 30515 30516 3 30446 30516 30447 3 30447 30516 30517 3 30447 30517 30448 3 30448 30517 30518 3 30448 30518 30449 3 30449 30518 30519 3 30449 30519 30450 3 30450 30519 30520 3 30450 30520 30451 3 30451 30520 30521 3 30451 30521 30452 3 30452 30521 30522 3 30452 30522 30453 3 30453 30522 30523 3 30453 30523 30454 3 30454 30523 30524 3 30454 30524 30455 3 30455 30524 30525 3 30455 30525 30456 3 30456 30525 30526 3 30456 30526 30457 3 30457 30526 30527 3 30457 30527 30458 3 30458 30527 30528 3 30458 30528 30459 3 30459 30528 30529 3 30459 30529 30460 3 30460 30529 30530 3 30460 30530 30461 3 30461 30530 30531 3 30461 30531 30462 3 30462 30531 30532 3 30462 30532 30463 3 30463 30532 30533 3 30463 30533 30464 3 30464 30533 30534 3 30464 30534 30465 3 30465 30534 30535 3 30465 30535 30466 3 30466 30535 30467 3 30536 30467 30535 3 30467 30536 30468 3 30537 30468 30536 3 30468 30537 30469 3 30538 30469 30537 3 30469 30538 30470 3 30539 30470 30538 3 30470 30539 30471 3 30540 30471 30539 3 30471 30540 30472 3 30541 30472 30540 3 30472 30541 30473 3 30542 30473 30541 3 30473 30542 30474 3 30543 30474 30542 3 30474 30543 30475 3 30544 30475 30543 3 30475 30544 30476 3 30545 30476 30544 3 30476 30545 30477 3 30546 30477 30545 3 30477 30546 30478 3 30547 30478 30546 3 30478 30547 30479 3 30548 30479 30547 3 30479 30548 30480 3 30549 30480 30548 3 30480 30549 30481 3 30550 30481 30549 3 30481 30550 30482 3 30551 30482 30550 3 30482 30551 30483 3 30552 30483 30551 3 30483 30552 30484 3 30553 30484 30552 3 30484 30553 30485 3 30554 30485 30553 3 30485 30554 30486 3 30555 30486 30554 3 30486 30555 30487 3 30556 30487 30555 3 30488 30557 30489 3 30558 30489 30557 3 30489 30558 30490 3 30559 30490 30558 3 30490 30559 30491 3 30560 30491 30559 3 30491 30560 30492 3 30561 30492 30560 3 30492 30561 30493 3 30562 30493 30561 3 30493 30562 30494 3 30563 30494 30562 3 30494 30563 30495 3 30564 30495 30563 3 30495 30564 30496 3 30565 30496 30564 3 30496 30565 30497 3 30566 30497 30565 3 30497 30566 30498 3 30567 30498 30566 3 30498 30567 30499 3 30568 30499 30567 3 30499 30568 30500 3 30569 30500 30568 3 30500 30569 30501 3 30570 30501 30569 3 30501 30570 30502 3 30571 30502 30570 3 30502 30571 30503 3 30572 30503 30571 3 30503 30572 30504 3 30573 30504 30572 3 30504 30573 30505 3 30574 30505 30573 3 30505 30574 30506 3 30575 30506 30574 3 30506 30575 30507 3 30576 30507 30575 3 30507 30576 30508 3 30577 30508 30576 3 30508 30577 30509 3 30578 30509 30577 3 30509 30578 30510 3 30579 30510 30578 3 30510 30579 30511 3 30580 30511 30579 3 30511 30580 30512 3 30581 30512 30580 3 30512 30581 30513 3 30582 30513 30581 3 30513 30582 30514 3 30583 30514 30582 3 30514 30583 30515 3 30584 30515 30583 3 30515 30584 30516 3 30585 30516 30584 3 30516 30585 30517 3 30586 30517 30585 3 30517 30586 30518 3 30587 30518 30586 3 30518 30587 30519 3 30588 30519 30587 3 30519 30588 30520 3 30589 30520 30588 3 30520 30589 30521 3 30590 30521 30589 3 30521 30590 30522 3 30591 30522 30590 3 30522 30591 30523 3 30592 30523 30591 3 30523 30592 30524 3 30593 30524 30592 3 30524 30593 30525 3 30594 30525 30593 3 30525 30594 30526 3 30595 30526 30594 3 30526 30595 30527 3 30596 30527 30595 3 30527 30596 30528 3 30597 30528 30596 3 30528 30597 30529 3 30598 30529 30597 3 30529 30598 30530 3 30599 30530 30598 3 30530 30599 30531 3 30600 30531 30599 3 30531 30600 30532 3 30601 30532 30600 3 30532 30601 30533 3 30602 30533 30601 3 30533 30602 30534 3 30603 30534 30602 3 30534 30603 30535 3 30604 30535 30603 3 30535 30604 30536 3 30605 30536 30604 3 30536 30605 30537 3 30606 30537 30605 3 30537 30606 30538 3 30607 30538 30606 3 30538 30607 30539 3 30608 30539 30607 3 30539 30608 30540 3 30609 30540 30608 3 30540 30609 30541 3 30610 30541 30609 3 30541 30610 30542 3 30611 30542 30610 3 30542 30611 30543 3 30612 30543 30611 3 30543 30612 30544 3 30613 30544 30612 3 30544 30613 30545 3 30614 30545 30613 3 30545 30614 30546 3 30615 30546 30614 3 30546 30615 30547 3 30616 30547 30615 3 30547 30616 30548 3 30617 30548 30616 3 30548 30617 30549 3 30618 30549 30617 3 30549 30618 30550 3 30619 30550 30618 3 30550 30619 30551 3 30620 30551 30619 3 30551 30620 30552 3 30621 30552 30620 3 30552 30621 30553 3 30622 30553 30621 3 30553 30622 30554 3 30623 30554 30622 3 30554 30623 30555 3 30624 30555 30623 3 30555 30624 30556 3 30625 30556 30624 3 30557 30626 30558 3 30627 30558 30626 3 30558 30627 30559 3 30628 30559 30627 3 30559 30628 30560 3 30629 30560 30628 3 30560 30629 30561 3 30630 30561 30629 3 30561 30630 30562 3 30631 30562 30630 3 30562 30631 30563 3 30632 30563 30631 3 30563 30632 30564 3 30633 30564 30632 3 30564 30633 30565 3 30634 30565 30633 3 30565 30634 30566 3 30635 30566 30634 3 30566 30635 30567 3 30636 30567 30635 3 30567 30636 30568 3 30637 30568 30636 3 30568 30637 30569 3 30638 30569 30637 3 30569 30638 30570 3 30639 30570 30638 3 30570 30639 30571 3 30640 30571 30639 3 30571 30640 30572 3 30641 30572 30640 3 30572 30641 30573 3 30642 30573 30641 3 30573 30642 30643 3 30573 30643 30574 3 30574 30643 30644 3 30574 30644 30575 3 30575 30644 30645 3 30575 30645 30576 3 30576 30645 30646 3 30576 30646 30577 3 30577 30646 30647 3 30577 30647 30578 3 30578 30647 30648 3 30578 30648 30579 3 30579 30648 30649 3 30579 30649 30580 3 30580 30649 30650 3 30580 30650 30581 3 30581 30650 30651 3 30581 30651 30582 3 30582 30651 30652 3 30582 30652 30583 3 30583 30652 30653 3 30583 30653 30584 3 30584 30653 30654 3 30584 30654 30585 3 30585 30654 30655 3 30585 30655 30586 3 30586 30655 30656 3 30586 30656 30587 3 30587 30656 30657 3 30587 30657 30588 3 30588 30657 30658 3 30588 30658 30589 3 30589 30658 30659 3 30589 30659 30590 3 30590 30659 30660 3 30590 30660 30591 3 30591 30660 30661 3 30591 30661 30592 3 30592 30661 30662 3 30592 30662 30593 3 30593 30662 30663 3 30593 30663 30594 3 30594 30663 30664 3 30594 30664 30595 3 30595 30664 30665 3 30595 30665 30596 3 30596 30665 30666 3 30596 30666 30597 3 30597 30666 30667 3 30597 30667 30598 3 30598 30667 30668 3 30598 30668 30599 3 30599 30668 30669 3 30599 30669 30600 3 30600 30669 30670 3 30600 30670 30601 3 30601 30670 30671 3 30601 30671 30602 3 30602 30671 30672 3 30602 30672 30603 3 30603 30672 30673 3 30603 30673 30604 3 30604 30673 30674 3 30604 30674 30605 3 30605 30674 30675 3 30605 30675 30606 3 30606 30675 30676 3 30606 30676 30607 3 30607 30676 30677 3 30607 30677 30608 3 30608 30677 30678 3 30608 30678 30609 3 30609 30678 30679 3 30609 30679 30610 3 30610 30679 30680 3 30610 30680 30611 3 30611 30680 30681 3 30611 30681 30612 3 30612 30681 30682 3 30612 30682 30613 3 30613 30682 30683 3 30613 30683 30614 3 30614 30683 30684 3 30614 30684 30615 3 30615 30684 30685 3 30615 30685 30616 3 30616 30685 30686 3 30616 30686 30617 3 30617 30686 30687 3 30617 30687 30618 3 30618 30687 30688 3 30618 30688 30619 3 30619 30688 30689 3 30619 30689 30620 3 30620 30689 30690 3 30620 30690 30621 3 30621 30690 30691 3 30621 30691 30622 3 30622 30691 30692 3 30622 30692 30623 3 30623 30692 30693 3 30623 30693 30624 3 30624 30693 30694 3 30624 30694 30625 3 30626 30695 30696 3 30626 30696 30627 3 30627 30696 30697 3 30627 30697 30628 3 30628 30697 30698 3 30628 30698 30629 3 30629 30698 30699 3 30629 30699 30630 3 30630 30699 30700 3 30630 30700 30631 3 30631 30700 30701 3 30631 30701 30632 3 30632 30701 30702 3 30632 30702 30633 3 30633 30702 30703 3 30633 30703 30634 3 30634 30703 30704 3 30634 30704 30635 3 30635 30704 30705 3 30635 30705 30636 3 30636 30705 30706 3 30636 30706 30637 3 30637 30706 30707 3 30637 30707 30638 3 30638 30707 30708 3 30638 30708 30639 3 30639 30708 30709 3 30639 30709 30640 3 30640 30709 30710 3 30640 30710 30641 3 30641 30710 30711 3 30641 30711 30642 3 30642 30711 30712 3 30642 30712 30643 3 30643 30712 30713 3 30643 30713 30644 3 30644 30713 30714 3 30644 30714 30645 3 30645 30714 30715 3 30645 30715 30646 3 30646 30715 30716 3 30646 30716 30647 3 30647 30716 30717 3 30647 30717 30648 3 30648 30717 30718 3 30648 30718 30649 3 30649 30718 30719 3 30649 30719 30650 3 30650 30719 30720 3 30650 30720 30651 3 30651 30720 30721 3 30651 30721 30652 3 30652 30721 30722 3 30652 30722 30653 3 30653 30722 30723 3 30653 30723 30654 3 30654 30723 30724 3 30654 30724 30655 3 30655 30724 30725 3 30655 30725 30656 3 30656 30725 30726 3 30656 30726 30657 3 30657 30726 30727 3 30657 30727 30658 3 30658 30727 30728 3 30658 30728 30659 3 30659 30728 30729 3 30659 30729 30660 3 30660 30729 30730 3 30660 30730 30661 3 30661 30730 30731 3 30661 30731 30662 3 30662 30731 30732 3 30662 30732 30663 3 30663 30732 30733 3 30663 30733 30664 3 30664 30733 30734 3 30664 30734 30665 3 30665 30734 30735 3 30665 30735 30666 3 30666 30735 30736 3 30666 30736 30667 3 30667 30736 30737 3 30667 30737 30668 3 30668 30737 30738 3 30668 30738 30669 3 30669 30738 30739 3 30669 30739 30670 3 30670 30739 30740 3 30670 30740 30671 3 30671 30740 30741 3 30671 30741 30672 3 30672 30741 30742 3 30672 30742 30673 3 30673 30742 30743 3 30673 30743 30674 3 30674 30743 30744 3 30674 30744 30675 3 30675 30744 30745 3 30675 30745 30676 3 30676 30745 30746 3 30676 30746 30677 3 30677 30746 30747 3 30677 30747 30678 3 30678 30747 30748 3 30678 30748 30679 3 30679 30748 30749 3 30679 30749 30680 3 30680 30749 30750 3 30680 30750 30681 3 30681 30750 30682 3 30751 30682 30750 3 30682 30751 30683 3 30752 30683 30751 3 30683 30752 30684 3 30753 30684 30752 3 30684 30753 30685 3 30754 30685 30753 3 30685 30754 30686 3 30755 30686 30754 3 30686 30755 30687 3 30756 30687 30755 3 30687 30756 30688 3 30757 30688 30756 3 30688 30757 30689 3 30758 30689 30757 3 30689 30758 30690 3 30759 30690 30758 3 30690 30759 30691 3 30760 30691 30759 3 30691 30760 30692 3 30761 30692 30760 3 30692 30761 30693 3 30762 30693 30761 3 30693 30762 30694 3 30763 30694 30762 3 30695 30764 30696 3 30765 30696 30764 3 30696 30765 30697 3 30766 30697 30765 3 30697 30766 30698 3 30767 30698 30766 3 30698 30767 30699 3 30768 30699 30767 3 30699 30768 30700 3 30769 30700 30768 3 30700 30769 30701 3 30770 30701 30769 3 30701 30770 30702 3 30771 30702 30770 3 30702 30771 30703 3 30772 30703 30771 3 30703 30772 30704 3 30773 30704 30772 3 30704 30773 30705 3 30774 30705 30773 3 30705 30774 30706 3 30775 30706 30774 3 30706 30775 30707 3 30776 30707 30775 3 30707 30776 30708 3 30777 30708 30776 3 30708 30777 30709 3 30778 30709 30777 3 30709 30778 30710 3 30779 30710 30778 3 30710 30779 30711 3 30780 30711 30779 3 30711 30780 30712 3 30781 30712 30780 3 30712 30781 30713 3 30782 30713 30781 3 30713 30782 30714 3 30783 30714 30782 3 30714 30783 30715 3 30784 30715 30783 3 30715 30784 30716 3 30785 30716 30784 3 30716 30785 30717 3 30786 30717 30785 3 30717 30786 30718 3 30787 30718 30786 3 30718 30787 30719 3 30788 30719 30787 3 30719 30788 30720 3 30789 30720 30788 3 30720 30789 30721 3 30790 30721 30789 3 30721 30790 30722 3 30791 30722 30790 3 30722 30791 30723 3 30792 30723 30791 3 30723 30792 30724 3 30793 30724 30792 3 30724 30793 30725 3 30794 30725 30793 3 30725 30794 30726 3 30795 30726 30794 3 30726 30795 30727 3 30796 30727 30795 3 30727 30796 30728 3 30797 30728 30796 3 30728 30797 30729 3 30798 30729 30797 3 30729 30798 30730 3 30799 30730 30798 3 30730 30799 30731 3 30800 30731 30799 3 30731 30800 30732 3 30801 30732 30800 3 30732 30801 30733 3 30802 30733 30801 3 30733 30802 30734 3 30803 30734 30802 3 30734 30803 30735 3 30804 30735 30803 3 30735 30804 30736 3 30805 30736 30804 3 30736 30805 30737 3 30806 30737 30805 3 30737 30806 30738 3 30807 30738 30806 3 30738 30807 30739 3 30808 30739 30807 3 30739 30808 30740 3 30809 30740 30808 3 30740 30809 30741 3 30810 30741 30809 3 30741 30810 30742 3 30811 30742 30810 3 30742 30811 30743 3 30812 30743 30811 3 30743 30812 30744 3 30813 30744 30812 3 30744 30813 30745 3 30814 30745 30813 3 30745 30814 30746 3 30815 30746 30814 3 30746 30815 30747 3 30816 30747 30815 3 30747 30816 30748 3 30817 30748 30816 3 30748 30817 30749 3 30818 30749 30817 3 30749 30818 30750 3 30819 30750 30818 3 30750 30819 30751 3 30820 30751 30819 3 30751 30820 30752 3 30821 30752 30820 3 30752 30821 30753 3 30822 30753 30821 3 30753 30822 30754 3 30823 30754 30822 3 30754 30823 30755 3 30824 30755 30823 3 30755 30824 30756 3 30825 30756 30824 3 30756 30825 30757 3 30826 30757 30825 3 30757 30826 30758 3 30827 30758 30826 3 30758 30827 30759 3 30828 30759 30827 3 30759 30828 30760 3 30829 30760 30828 3 30760 30829 30761 3 30830 30761 30829 3 30761 30830 30762 3 30831 30762 30830 3 30762 30831 30763 3 30832 30763 30831 3 30764 30833 30765 3 30834 30765 30833 3 30765 30834 30766 3 30835 30766 30834 3 30766 30835 30767 3 30836 30767 30835 3 30767 30836 30768 3 30837 30768 30836 3 30768 30837 30769 3 30838 30769 30837 3 30769 30838 30770 3 30839 30770 30838 3 30770 30839 30771 3 30840 30771 30839 3 30771 30840 30772 3 30841 30772 30840 3 30772 30841 30773 3 30842 30773 30841 3 30773 30842 30774 3 30843 30774 30842 3 30774 30843 30775 3 30844 30775 30843 3 30775 30844 30776 3 30845 30776 30844 3 30776 30845 30777 3 30846 30777 30845 3 30777 30846 30778 3 30847 30778 30846 3 30778 30847 30779 3 30848 30779 30847 3 30779 30848 30780 3 30849 30780 30848 3 30780 30849 30781 3 30850 30781 30849 3 30781 30850 30782 3 30851 30782 30850 3 30782 30851 30783 3 30852 30783 30851 3 30783 30852 30784 3 30853 30784 30852 3 30784 30853 30785 3 30854 30785 30853 3 30785 30854 30786 3 30855 30786 30854 3 30786 30855 30787 3 30856 30787 30855 3 30787 30856 30788 3 30857 30788 30856 3 30788 30857 30789 3 30858 30789 30857 3 30789 30858 30859 3 30789 30859 30790 3 30790 30859 30860 3 30790 30860 30791 3 30791 30860 30861 3 30791 30861 30792 3 30792 30861 30862 3 30792 30862 30793 3 30793 30862 30863 3 30793 30863 30794 3 30794 30863 30864 3 30794 30864 30795 3 30795 30864 30865 3 30795 30865 30796 3 30796 30865 30866 3 30796 30866 30797 3 30797 30866 30867 3 30797 30867 30798 3 30798 30867 30868 3 30798 30868 30799 3 30799 30868 30869 3 30799 30869 30800 3 30800 30869 30870 3 30800 30870 30801 3 30801 30870 30871 3 30801 30871 30802 3 30802 30871 30872 3 30802 30872 30803 3 30803 30872 30873 3 30803 30873 30804 3 30804 30873 30874 3 30804 30874 30805 3 30805 30874 30875 3 30805 30875 30806 3 30806 30875 30876 3 30806 30876 30807 3 30807 30876 30877 3 30807 30877 30808 3 30808 30877 30878 3 30808 30878 30809 3 30809 30878 30879 3 30809 30879 30810 3 30810 30879 30880 3 30810 30880 30811 3 30811 30880 30881 3 30811 30881 30812 3 30812 30881 30882 3 30812 30882 30813 3 30813 30882 30883 3 30813 30883 30814 3 30814 30883 30884 3 30814 30884 30815 3 30815 30884 30885 3 30815 30885 30816 3 30816 30885 30886 3 30816 30886 30817 3 30817 30886 30887 3 30817 30887 30818 3 30818 30887 30888 3 30818 30888 30819 3 30819 30888 30889 3 30819 30889 30820 3 30820 30889 30890 3 30820 30890 30821 3 30821 30890 30891 3 30821 30891 30822 3 30822 30891 30892 3 30822 30892 30823 3 30823 30892 30893 3 30823 30893 30824 3 30824 30893 30894 3 30824 30894 30825 3 30825 30894 30895 3 30825 30895 30826 3 30826 30895 30896 3 30826 30896 30827 3 30827 30896 30897 3 30827 30897 30828 3 30828 30897 30898 3 30828 30898 30829 3 30829 30898 30899 3 30829 30899 30830 3 30830 30899 30900 3 30830 30900 30831 3 30831 30900 30901 3 30831 30901 30832 3 30834 30902 30903 3 30834 30903 30835 3 30835 30903 30904 3 30835 30904 30836 3 30836 30904 30905 3 30836 30905 30837 3 30837 30905 30906 3 30837 30906 30838 3 30838 30906 30907 3 30838 30907 30839 3 30839 30907 30908 3 30839 30908 30840 3 30840 30908 30909 3 30840 30909 30841 3 30841 30909 30910 3 30841 30910 30842 3 30842 30910 30911 3 30842 30911 30843 3 30843 30911 30912 3 30843 30912 30844 3 30844 30912 30913 3 30844 30913 30845 3 30845 30913 30914 3 30845 30914 30846 3 30846 30914 30915 3 30846 30915 30847 3 30847 30915 30916 3 30847 30916 30848 3 30848 30916 30917 3 30848 30917 30849 3 30849 30917 30918 3 30849 30918 30850 3 30850 30918 30919 3 30850 30919 30851 3 30851 30919 30920 3 30851 30920 30852 3 30852 30920 30921 3 30852 30921 30853 3 30853 30921 30922 3 30853 30922 30854 3 30854 30922 30923 3 30854 30923 30855 3 30855 30923 30924 3 30855 30924 30856 3 30856 30924 30925 3 30856 30925 30857 3 30857 30925 30926 3 30857 30926 30858 3 30858 30926 30927 3 30858 30927 30859 3 30859 30927 30928 3 30859 30928 30860 3 30860 30928 30929 3 30860 30929 30861 3 30861 30929 30930 3 30861 30930 30862 3 30862 30930 30931 3 30862 30931 30863 3 30863 30931 30932 3 30863 30932 30864 3 30864 30932 30933 3 30864 30933 30865 3 30865 30933 30934 3 30865 30934 30866 3 30866 30934 30935 3 30866 30935 30867 3 30867 30935 30936 3 30867 30936 30868 3 30868 30936 30937 3 30868 30937 30869 3 30869 30937 30938 3 30869 30938 30870 3 30870 30938 30939 3 30870 30939 30871 3 30871 30939 30940 3 30871 30940 30872 3 30872 30940 30941 3 30872 30941 30873 3 30873 30941 30942 3 30873 30942 30874 3 30874 30942 30943 3 30874 30943 30875 3 30875 30943 30944 3 30875 30944 30876 3 30876 30944 30945 3 30876 30945 30877 3 30877 30945 30946 3 30877 30946 30878 3 30878 30946 30947 3 30878 30947 30879 3 30879 30947 30948 3 30879 30948 30880 3 30880 30948 30949 3 30880 30949 30881 3 30881 30949 30950 3 30881 30950 30882 3 30882 30950 30951 3 30882 30951 30883 3 30883 30951 30952 3 30883 30952 30884 3 30884 30952 30953 3 30884 30953 30885 3 30885 30953 30954 3 30885 30954 30886 3 30886 30954 30955 3 30886 30955 30887 3 30887 30955 30956 3 30887 30956 30888 3 30888 30956 30957 3 30888 30957 30889 3 30889 30957 30958 3 30889 30958 30890 3 30890 30958 30959 3 30890 30959 30891 3 30891 30959 30960 3 30891 30960 30892 3 30892 30960 30961 3 30892 30961 30893 3 30893 30961 30962 3 30893 30962 30894 3 30894 30962 30963 3 30894 30963 30895 3 30895 30963 30964 3 30895 30964 30896 3 30896 30964 30965 3 30896 30965 30897 3 30897 30965 30966 3 30897 30966 30898 3 30898 30966 30899 3 30967 30899 30966 3 30899 30967 30900 3 30968 30900 30967 3 30903 30969 30904 3 30970 30904 30969 3 30904 30970 30905 3 30971 30905 30970 3 30905 30971 30906 3 30972 30906 30971 3 30906 30972 30907 3 30973 30907 30972 3 30907 30973 30908 3 30974 30908 30973 3 30908 30974 30909 3 30975 30909 30974 3 30909 30975 30910 3 30976 30910 30975 3 30910 30976 30911 3 30977 30911 30976 3 30911 30977 30912 3 30978 30912 30977 3 30912 30978 30913 3 30979 30913 30978 3 30913 30979 30914 3 30980 30914 30979 3 30914 30980 30915 3 30981 30915 30980 3 30915 30981 30916 3 30982 30916 30981 3 30916 30982 30917 3 30983 30917 30982 3 30917 30983 30918 3 30984 30918 30983 3 30918 30984 30919 3 30985 30919 30984 3 30919 30985 30920 3 30986 30920 30985 3 30920 30986 30921 3 30987 30921 30986 3 30921 30987 30922 3 30988 30922 30987 3 30922 30988 30923 3 30989 30923 30988 3 30923 30989 30924 3 30990 30924 30989 3 30924 30990 30925 3 30991 30925 30990 3 30925 30991 30926 3 30992 30926 30991 3 30926 30992 30927 3 30993 30927 30992 3 30927 30993 30928 3 30994 30928 30993 3 30928 30994 30929 3 30995 30929 30994 3 30929 30995 30930 3 30996 30930 30995 3 30930 30996 30931 3 30997 30931 30996 3 30931 30997 30932 3 30998 30932 30997 3 30932 30998 30933 3 30999 30933 30998 3 30933 30999 30934 3 31000 30934 30999 3 30934 31000 30935 3 31001 30935 31000 3 30935 31001 30936 3 31002 30936 31001 3 30936 31002 30937 3 31003 30937 31002 3 30937 31003 30938 3 31004 30938 31003 3 30938 31004 30939 3 31005 30939 31004 3 30939 31005 30940 3 31006 30940 31005 3 30940 31006 30941 3 31007 30941 31006 3 30941 31007 30942 3 31008 30942 31007 3 30942 31008 30943 3 31009 30943 31008 3 30943 31009 30944 3 31010 30944 31009 3 30944 31010 30945 3 31011 30945 31010 3 30945 31011 30946 3 31012 30946 31011 3 30946 31012 30947 3 31013 30947 31012 3 30947 31013 30948 3 31014 30948 31013 3 30948 31014 30949 3 31015 30949 31014 3 30949 31015 30950 3 31016 30950 31015 3 30950 31016 30951 3 31017 30951 31016 3 30951 31017 30952 3 31018 30952 31017 3 30952 31018 30953 3 31019 30953 31018 3 30953 31019 30954 3 31020 30954 31019 3 30954 31020 30955 3 31021 30955 31020 3 30955 31021 30956 3 31022 30956 31021 3 30956 31022 30957 3 31023 30957 31022 3 30957 31023 30958 3 31024 30958 31023 3 30958 31024 30959 3 31025 30959 31024 3 30959 31025 30960 3 31026 30960 31025 3 30960 31026 30961 3 31027 30961 31026 3 30961 31027 30962 3 31028 30962 31027 3 30962 31028 30963 3 31029 30963 31028 3 30963 31029 30964 3 31030 30964 31029 3 30964 31030 30965 3 31031 30965 31030 3 30965 31031 30966 3 31032 30966 31031 3 30966 31032 30967 3 31033 30967 31032 nipype-1.7.0/nipype/testing/data/surf1.vtk000066400000000000000000000000001413403311400204630ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/surf2.vtk000066400000000000000000000000001413403311400204640ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/targets_MASK1.nii000066400000000000000000000000001413403311400217430ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/targets_MASK2.nii000066400000000000000000000000001413403311400217440ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/tbss_dir/000077500000000000000000000000001413403311400205205ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/tbss_dir/do_not_delete.txt000066400000000000000000000000741413403311400240660ustar00rootroot00000000000000This file has to be here because git ignores empty folders. nipype-1.7.0/nipype/testing/data/tdi.mif000066400000000000000000000000001413403311400201520ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/tensor_fitted_data.Bdouble000066400000000000000000000000001413403311400240350ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/timeDesign.con000066400000000000000000000000001413403311400214660ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/timeDesign.mat000066400000000000000000000000001413403311400214700ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/timeseries.txt000066400000000000000000000000001413403311400216070ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/tissue+air_map.nii000066400000000000000000000000001413403311400223160ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/tissues.nii.gz000066400000000000000000000000001413403311400215140ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/topup_encoding.txt000066400000000000000000000000001413403311400224530ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/topup_fieldcoef.nii.gz000066400000000000000000000000001413403311400231640ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/topup_movpar.txt000066400000000000000000000000001413403311400221710ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/tpm_00.nii.gz000066400000000000000000004726171413403311400211470ustar00rootroot00000000000000|Snormtpm_00.nii] |U0 mMAI[0SDG;Y@ᒂf!)0LHdfv@Ɨ7ݽx! WHAD2!E\NI}uyyss9ymlli;ă6QGxg 5nS=_sW{ZtԦ#6>.!e:wF7N᫏sZԝB@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P( B@!P(;AMmw“Rvyޔ2L<؛HϜ_M}1}ߙlZFۂ͋h3|+1oM[NdN.-?\<+ٶ^!D?VLS rۂm_צ\?ϰիuvҰ V j'>}`X\m\;g[yg Yuz(lPmAV:@;9-p-λz}Qo 7ۻ`ؼ=Ν̽3ee.LlJ7HPtbҫ_\^İ}/"t7;^j,EhO ]/O-Oݱt6/d>u=ZWӞ٘w-Mo eSFxrK\Z1Yl#eH*җlƃ(x6v~rv G $E1gg[dMH#ww/x>Bqq&~*O9=X .z]ʎ}i1;7,?yP%g4+}5}qx780Dہ>tG{0m}{3<:P~L4v2l/C߃O kʷ+WfseS%ᭈeAJ}7෹{)?$|Bl㡮܎æ$>GI"̦b,7t|o߼F;gQMOG6%3S|&1}j4P]p!u٪y>vl{y,x.Ơc}?۔ۻn,7~{~7צd3Z7zP( B@!P( B@!P( B@!P( B@!P( B@!P(  ס㼥-c*UMd0?48*zYׯ=Fx>?0qϼbg ~O)  ag6x,X؟@KϜE[OU< ]¿gU[eb!/wE|ZmD >)=J8/Xck-GMJ7C,W:#+|RKϜ"ßb{ -]2I 䠀ͫ6w1uͦUC_ă<oN? ;|sg8cȦ܃ 7G `8|$GgDzSh*(z68΄Z8p~Aةy.d4Y7<:`|חOR]r^yߡ\7gn?Tc઀|X2 u}PgjKš#5pUmik|ߞ<8+aLW8l#v?-=IJbvL,ψ6jUF\_&N'1z#[(v??4χL{2ϰS_{PWHC;#$biFFTsGrPP ٞ#z.IN-[UE~?Y%k~1~o8tV3هJ:\N\? J.\FQ'b*e CQ~5vZ6)"xӈ+:sBs+eao6{7ZB|K;Á;O-h܊Jd>BgQ>{|\;󿴱&:ұ<o;Y16I>_no[鎯dcZOjc #mgk^W~u{ϟJ~B@!P( B@!P( B@!P( B@!P( B@!P( uʵY^׆ýOޓ 3`jgVzxotⷍm쟣{-mCf|S+‡wMXq 'x8|Jd[zc!ƾsE#MN~\}C]RնnW<|íO /$H_ɸ6 >'b?~<@[9X1ki 5fyWxg$H W3gbC }9Z,eYQ|5 <'ЫW;2;R桑 ?ymF3|+'o^z1Oϯ~Aҳ5>d!yD[\yŝ=m*5L;%U;>2L}ƐҶZEcV-=+'8w:#ҭ\ ݡ ?xG{'g`LUi˰8:uK"gܟ1Xgi^bȦ3 ,ufl$G ,kAXt{v0q/ǥ"6eɋrXvgw&Ctı5d_#СwFrYe*ǚԧ5 pY ^W{.nZD~Zs 3=mGF_\,k_~%#"ڵ:ÝUܟC9L\qM.`6`S\/M]\gZ8|5~~_m=j3[*\kq.v' 9샔v']US{7 ҫ_ò} g EGhwcmt?ƓfEWK=#&}1gjQnse?qmہ댼+Asrǃ&Ud6[_?ۯKl-ԆzY/Ag>cK_j'm)y:ā3 /M}8{+xH Rk`i7V:s?aM}+)Op؝xYv89 <|;,'kgS{+dKzy顥lpx t&Wz_r *麳gQ%ک>Ϣ}3Vj{ =d?8FGtѷ}}}j3ޱpAb}B$gH\y)C4\yeqgEKp u119$Ա̣Zn!gԮ-[$"w6רeK`6|6ia +Ons)NcПk%d;bh5$γX*ÿt1Ժ2lZ.o,ϔsyn{;c>SMTs;V/ҘC$ӴܗaYʴ,M_>џDQ9ksfб7{V/YM i ;yo}<: 8%Wj;T`#\^1>d.Fє|?A_͙}1{PۂfVV] sWvڷSy/tGȈX3SDZu?wJBO-㵞9~ ^W)&xEV,֧Qzj a_{dg|O!m,ǚjE8I,H֡ G+ze뼾"^8'ٝ8h"WThx&ikۓ^U dѼAB{'6#5yOD{8{X7wKl)l(0ۥ|7K(ts'"gqs Fƭ|iw6M< ,;}QF2,[ N9iAͅO{Fgi#<|kRy,=3?ܓr3ʰUpɱu.7>31ې4[YGgnBgX6o/I<(TcdZ@bU?5p[^͘J^k&}#ks[W?b-tNn:t2׏r+)`[Y߃@:lqt786ld|8TҥӨӬs ,q=%&>%ʝ}5f_q}A\A{?2 4cQr6NnJEy{Zꟿ3-zI[ewH|n̯B ٵ(O:=(v$^pǜթs'!Pܡ-%Wt#_,#t5ιJ'I[rIM-_Ԟ\L궗{ Y o 痡CK)TJ=~B[Zm~3q:TpKkC< ᫫xdUgYv\8D;_:'Wq}/;xHgFo&_(rmп|C =KQ |'!{Y"wݯ^2th8, uYM1"^Y͙#`/-c=O "yz~Xp0Kqܯ|8Ov_k\-b=[>&6k <`Kw?7ߗ &qgp4NUYOuq 8a$_6sm^"Fc/*u'Y:/83EX hqcѩ2<{Sp ¶Rf7bHtqiO{i(ZvLeYuA?; ] :'" 6tF 4vڛtޥ-}˧oתE\QF}1!k?s.z>aOz^ZcsvWkcqmWN}&DscomÍLoB6َb$!V=XKD:nrcɁ;Be6!Ff̃ =LlyJ=q{їaq/F&I"ߣ?|oK/y,Z RN9,Kx&u[umlp/74~I^EI}^1kfMrK0_ŋO}ix4^Sa4mD;!f;x;nCdǃ/xC11"cR]o}6m<~ ĥ筓Č7f_&lS%|6筭K݆6e1ăx?ח<ܷVDXe,yLdJ"?ei E=UgJCxskFOvi=|3YO\:1&+:U2) B@!P( B@!P( B@!P( B@!P(/̵=+~Q1x&5?V kfXk`˰uW3ꍅg:bwve=(#|vWX}7JEƨ !5 Ҿ#֠bݿD_b/X;^|;LVDo{rWgt+N,.?;;a)_tfC] //M]);v`}{'ycC45mylXǥx[! ڧѷkjiyq.f+8cbH-:lK:XO\m )ק8ZUيm|<>yuб^Hմ3t~+g&XjZff+9CM-ZN,G{7;!1Gۦ5z^^-w2w{YVok-]c0q,ެGߗe=.77x{ͰvyWgkF=q }'~ՀGFygi4f/WHOn芴3K5+xp9[#䄅J}XsECb`w_> ,Jߵ}w7˞0m;/&. P#wd2WeO_u;1' n-i4oNNgH.dϜMٸN\D`2̓ߙgt>gl OGG>zU4G@C{r%u1^ܸOcS/S6S\'w6\P og䛕6 _[7S>WR(ϦaCܕE}?ZO^>_wmUq$|8,]`ۛyFȱ#ӨX n2nqNļEuz1ǂ"ɻ"^5:f:.2%*ϩߋ9s>6yO-kA̍gglcAYÁrEr=M뽘uƪc4'=gC]QCsOjVMV ݖ_@ms_C ``o2T=S9?"l,!\ԡyhk F]GN͗%k|O7jwd2:q*櫭$iMkf$M̻o{Wzy9R ]e+ygu9)l=gx#;#^?ǸYD/?ߞ<}d:遛kuoy<gRWD}OLH֯3v-jCGo17/u~֔ݙLa?\rzm"ׂ#;DyY߾5ɘv=^W;8D(CA HkG>W B@!P( B@!P( B@!P( B@!P( /X5Z۽cl]=yD`)t/3keumBg?ăڲ®1G?NڠP#!jOZ$)Ӷi_&. ߼~ &̽ȱkC\5V{G4M I>ޡ6^,#և%>&ǹv/BsyR9 4pam7ʔˎtg.ߓ&}j _b?_z>p%mQOFuV.m,xdS+p֭%f[|~i(= װG'bmؑ" zn6Y<79#[цg2Iwfg9zPl$f= *͖wE܍/d}e#33 @vNu.2x?6Q~[O;)iO ĖWWfm{BPw62'?vһ(/3^'s-Pqdןţ\TVX s4-eoZNK`1bXvy>]9fDHK,e 5#`ɺ'NYcr ݨړ8F vQknO nPFΦzOl 9J5\{qÀ:`R:^agAW$v 3}Sti;-hfSǶ殾Ne{}^z'k<g诊u79˶漄X/5/=!~F /ؙ@C\ч~7q?6}Dg/ F{sI(lQɵV/v_7eUJl,m1۾P[>Еs|_1~2;ǃCaj9/0K@#N8§:_Px'}PT_<"cg5ȅ0dSSF]/ >6Ew<6@^xn2nlÿS՟Y7ޤqb7ڥ9T}S?rE}⡒mV<Ͷ}$/oJ{Soo8*83#\1[k!gG.xxq[>40޹G_Eܗ:˼D2mO:1lh:xߞcGPm&~'ͣd- S?LckŔTW4Gz6BiE\%M~6w}0qHŖmEуe #qdŀ:Ng[I}̚oo5"F1[I<;Rrc/KWWqUDZQwhF68㫡ub2AzF?~:WCx;̵9sg/LnG809=7i1. By#c8AƲ_T"LcI?\?>h̾(N Q_#H6Ή71룯J<{gd(EzoJq*̷PLp`{D}2s8C6^,d]sj vIۘHPxƦ+UWֳ:u;$vn;.Y׹!ilrU9݇8qY77 9?1,uLM%u] xho/4@<O_ə7 .oV&gJnv{_j7=nؑ;S]5Vr%4"ݵǺod|2縌ɲH3.lyڕnzM< ᜏny~6t϶豜s$3#]dn1gh/&U4`TB B@!P( B@!P( B@!P( B@!P(hIZÕ^N H g:>b5{?~1f|`遙zDk}ϛ>yƚ û[*C[_qu|D>G`-bX:L>X  -a>j|Г±ͱ>Vk _r`'~%J`\(Cz6T{kX-%<;c5Ä#Nz}v6zcLy\fk(B~7יִ|.<;(ߜOn Lko &.&Kuk ;_~M댬Z ^ZxQf_mkRyOeWzHc/m}byio cs(.1NWq\רҵʣuGl@~- \\ф.}B+J~^?b4*W굜Wf=#h] c#w59.gw/Yld\w/m_o-bXYg{+D=#6g{WDŮ=]˯j\rE }ߐK >Y-c,q/ůmY6֒Di8:/x8޻j`GW]Ӟ7v:s d.:̶Fv}VZCm8F8rVsK($qgkRkIx>8=?) ڃ%'țF="ct=; ;wDz Wێϡ]u̺u@X͸\/;j-Lg"L;P8G'{}{.jJ$\H;0նⴋ}9\Nu;K.*W0OGD{$BF D~mj$=9P ;ɸ쥝_|6"ک`kog#. itԓ||F;D8AGF\!#O\O m6U踯;Lee80#߬fk[e#Y|wݝ]HoJ{ 3oO9Ч|C]_փݬi%ϱ?,uֵ+>W.کӽǙ2P,:gvg ֝v(c>\eopArOKq僉jG5t;1):c$? &t+q5vn {xc xѯɦ{y~(:y}O}?qyIFiFA|n[aW u]Yu~=PVQYu<(bkZ_\6/tDm'RY(kÁ <=ȏ)ڪ#K~gZk!QX:zxEsq;y֝Z4&g4⴦;ڟ`F%drg2K#ݩ{Ϩm#ElljO~X_jTgPNJJwX˪hs_2zŨ6z&{KY[nq̦V:zsK$Sa 7i( Ş+c:_ k T6N+;n!SS[E_Wz:+e3e=>w-}loꏳJ4ۘk1,™idK&HVI`}'J0]+eΪ~gVن4O"sӣߛ!' pRt-1K>怋۴xs}彨jߌX'͗Uōd6AljKHgDGy%_t\:6氄g߻JlQsMC%ºxcq᫉uR0H= ߣ?]]6Mjl >^px7?7pݶk'y9o3)8#=xMDy]eބ3gğ[ᗷB40@ YkQn0%Co%qJ|~ٕ%FYyo=yYrӞ-qWCcUsukOj`6?CہB9qgο! uCΧuXs3y8|j/U竵r_ץ,C:R&мY̷n6ebgR7\}~o7r>/?9O ^[1C587āmSǶK;.'\·o7r iC9@c_[eos>sI0f-mL'3kR3-KDKlΐz^ jZɯqݡS(~5|A] +Cr;џxEzvm['m//Ȳb/k'{E/4T>ᆁ//cydMאoKi Nȁgg9ғ,~s<7Y"Z~q»o<iڈzzË9;p!N Ok<zNGGUmXa3揼z6͐q们 ʧ@F2];Z*M3lϘ(w7ΛDW( B@!P( B@!P( 氭tǓJ3ms2E^kړad-CP`\=RS<\Y^ \ ݡ뉋y&xa]ݯ=J.ԭK\akuu |+Vkɿ_?8WBMW $fO-x0/}F?5A&o0Az6;G2Æ+q/y@<O48}q15pƺ0 ' u}-; Mtn&ol2Lfc~rM [u͗ދϵ"y+::oN3 Bzړ?! 15y_2K~TGSX ?!789ˀgSzްYzrƻ4~a'⌺Y\'ʦ>Zzy >Ev |5w OSuo'qoU;qPiZ_~QȰyzwd^4KaOkGmy!?_N ~՗?X؟j,@*xZ|97p->F>,ڍ3b.*=d#vgqPoXJkٯlI.:Z5|5he::k#kE,ܙ[Kst̜[̣7*a_  JC6i8pM鴗;?U?=o5詫 $cˎGE>mji3lGUau~@-[x&O+oW~0.@nQ>/P7iNaJx7kR{gڏ8ژѭ|j}z;+*}PgPl9=q:QQnҮ]Di//-WuFmX$)Ϧ~Ka6ΛZ .69Z o:FMK_=ܭ^ǃ(WV;Ǩg=K\gSTϽA,3]OMS$|Ɲ@\8l6~#GɶgSy|؏j3 eKٟ9y_+cMehAUimOu=4- ˎ+*NVgmcrgˎ[JdӋ:i,\~œYnEǵKǒjΜ͍'vXOu3~2y8U8N|)xhړGp&^GqWaݡ;#y|t='\ #sHSpfB6{I#M 'ɎljOߢVgw=aT:{ &tsIf=}?͙2Pߌ4`" 㧏Uqٚ;/o[sT?qį˱|JCpmeb`uHi,:S8din{OWq*MGO&^inG%S1G<`zr|Kklf\7e8.$.G֞S*zf"aoF~F"$/xZ_;#2+fO4냈Kff|>~_i ^ۏm|zagd//Od_*iOgˎ$ز]"Sz#<(IYڞv vQdCn5x~8z7]F=&jCkc{PfaQU>R5d4v:~7{3[H7u2ot]e~w ɲ/ ug7u\~61G[-3~K]7w1S^t!bxfIgC޲7{UMØCe1Mdx0.[tq%Mdl|X8~ kOl**6B@!vxp7]v~?ɪ{R( B@!P( B@!P( b.UszڶO_\~~ [.ĺF{^u0u~X3}&-mb~:c!2'i F'M}9l*րk^5XsBxrht2#l" V g>>N b+ԷAz_[ 6۬F{YsxMو I rKPc'8g'u\Z_A\VSXhSY}#0C#X_{F2/Q6&yt֝:./0}ew8xֲ}u}ԧಘM_B>Cl;_ȷ&>Y8';'x='?9HɫG>W:|7LՌ)~лu8H^}Ҿy""_f=M"w1B]3B~e;̣shxW K~eG!%סmħW/2>^0坘+ T/_^#^Kc/߹8kD(ٔ/Μ]z@i-ߢ. 51Hm#E  pS\Vkz6uQw߅m<Dz,]OMWEBQO 4Mmo|zEwD?;8Bžĉ  )=!{×|8?>c#bNrɷopve׮{uCE(DZ)|hny쏅8yT,%Gj?)څR\6Ywa,zarBٌ'"?~ul6:u1o,쥯n| (=hY#e)qs>z`3 <~}ԅĿx8${qmWP،禊3F%}CGAֱyGOܑ9=m5Q7S+I{!m\S\)Nc +SmюbXW/y#Z,+m錘ad\8s4F/pwN[s| k}&5 ׌z&߽(Wxtw\#I/2$^q4אOqi/= 8q鹨U1nqg(o]3u4o͕Fy݇ylNo(͛sVcr:ad1'0GP23$޸s)ɥZ?3Ẋ8νC0% u]Kф{~B\ċ+qzRqш̷6qΰ;{+)%LcNDy+ AZ?Gcgm3J]Qީcq#C_AsA^$\Q_'oJ׹/?wŮ>P4n{BY }P.g_ݏ~[D[NQ((nk/7 -q8/7E&~KHӻҞ 1eš[ 쭝YgRW=oE}Κ\ڂ^\F=ȫ;?~ȸќR+o&nk/>.8Rd⏹&M]Asvƻ):r4"), -88렋(u8ʵ EEmWYguSHM^;Fzʔ; O/JѴC VOoBʜhܱZ(QW81&S~6X]V&U7CzBFs^|O?5]e}.L6-Z3D>&۳-Y? _?g5WwVn;D/c@­;+r?7}vmw\c%v}$i^{@ѾnlWp.ch.] &^gS7l(Q^r4-Jץ.s> z\Ӟ߻xJ~QʾH8|5M\e+ZiGֵٴf m",MKuܾM_^zT4/O{hAZ+mi%O6%|EAlg K>&Wh-(r'J?ZWϺFe3Z5b#nLH+iM.qߞi1|Qa׆NEm{;_.c/G~^+O~E?5MZcy(gZMnBe^sf*ۆδQpE>;~W/Ey:X_me[Q_,/X5[QSe"g;U@^C&mvفuqAC=Jws~3zm<835"+7Ȇ6ܹ|o?K_7{zK_v~'e/]b2vvoﺈyl71nbbv58#צP:87ZǑlG<UXDwsq~ >ț~m*G{>B|d)'E,4Gi`n{I2{=)xm¯d[!ANs\f]iW>j@_S_<<Xeڶ8kEE\?8/1ʩx/ˠb?xn.$yîS+;<&|OQO|~JEwd.5*g's:;!zyzGx0yNy,yÀQr!C3sd$3yWq{?A_ŗ>T'DbƆ`y H#/ף#3IRF>4n+qĹ#_%&|vBg Y݉f-cݡ/r ] !p<ekط;t8?t<l,VSL9lW<v]e18t5S'^[إR|[/LToI#p4)mx\gV}-+8v<- qw+=MG-q&^.PN2 :$0tg6w}^ƿ/Yʘ&~Ӑv㶄xxvK?i%n '7['}.ڗwsl3cjS`*<[/҂ΰscAgC[ԧwŷ Q5l՜Sx(CJ^sEO6W<PѸ7Rn-mx((f! /o-1/0*tcs|udU=4feS'./pM8?qS^_9ssCgYK0߂: u:#-"ˈ/kؙs<n^f/D/3 P* Do XÿIuUiC4&r&2ڷ+%ᙽ$qݗgE1+D?Q``b ʢ&Wʳ< u]1p6+ ^"˸C8ЗvNbvQRd_|S_'yԷ1I8c ,ǂԝiI|;AO&sljJ#޲hzuHEUj"Fר7d?fYJ{ ;XBYm6u[Jm%\gWOM>7R7R9NsC酎*|5ުߎ炛fȎ7qT6t=`>Ip\]4^%nc>tiy֝]G\,;s1kUJ=:öryt/Qec@ ]ym=Dk1pw0!8+}* [VK1ޭR^9{ b8oPrQy7g5R]8XØ:n"g"8\O%Qf3H_\V18y1.nZ:[;<+QЦJ{~mJ Lۢ ¸o .\׆!% s?68.ۯȿ\]m#1۪UZͼm;W;vonkku6K:U޿ŃW矣~t?V_t~?UP( B@!P( B@!Pks2S6c{P8;SK鍍X۠'~_Z'^.Zeq9}0 :U߭t;`Ud1km7Tѐ[^=am|XvN{``Lеʲ&Nk?(ُf\tnۈzz(^O?8.2G '":X"}Z2?h8ۗiuDr6bG z*ue6F*g{wֱ_%Kl3 ?1[7x PgPlC~_RvhEY m }aW6P9 F蟎g隃mx?L:_._UF⺸s3/4SI^X}g!Ff]i=/fb?Ľ{K=% `s)o̷RN[xt@٧| o@'ROk߫GG4LNVA"m/p3Es;U}| \;Gń8⧸ײs(3WztMZ>3JK;Wlԗ|<**r]_prA$/D,㈋{fօcOgkvokM ͊=rhuMk4h4H3.]ڐ ~gۺ%H򧯖z^|繉CLck\3?rϖv=L߀汹oJ9Cus66g@_q8[f{nnZbAv'ރӡhmqta" oq*92os=X `A4ŭO;mOp'e\zvfc|[Wbi `kSVemVhY\wJ`<n :$Cf9mۛb7tz؅~ڝh 9<.E`>j}ymGw#@}6? L л|vߠI{(vBy%1x `R/ll-|^ }ԓDVaΦ%l=c~.&_Ffb>v?exgnp9N8G,0b3Z ؘ60b{NGb3SuasܵK±WKc.&Z9r|OSe٥fcL,$ yxۦ޿?|;-in7 HZov:}ۢY裙6荣nD>"k/e`Nans,~OuTa+b_]qsǫw|Tj#QG#W)%Owv0'K{> UE= ٷ˝Y#Jllcp[^i-ZnՏkQR,#[ 8*eй M7|4[%/ Q\%Nk6u ^Y>6z9ǖh͟銎&x/ uQivD(r| M]cc28KefaI݇c_̳±ڼHwF{z.?C{c^?n?G>=~ƾв9x`bhz-Q&yXc4C8&]窢R >CӇ몔FV{AnWѭ|y^1<XZByhVV/5ki`Oo8rE^ptWW*3EHdi"W$F{ =v㏦l9zlo[=Z?:БsOgK`?;P-|8 ,}Ik{`)c{i󣳉ks6RX5·xg5bxBZyn{su{fo8}Ew1Qnro--z00zf eXpٟqtNq皣f]CTiInK]Q.i ء{Y5>(;?8'9c@gi0̙ MemGZ: c8԰Ѱ.FyƵDDG`t|7ݙֽ9 [dwHYЎ="ıA25AݖFX㪪y2>^ѴnF (A{us;#WT\yee|v2؇lE_B}qz_Wĵas F'NbGՊہF˨[\(> 0G8jvԿWClP'}j;yY̡*gĴʞRMjJ;em ϊ}]KY7[xcYWby٦SȬoum3+%u]f{ď4ۺ8}`W_̉gw.3bz+!<Y1cj쟂57~t7`Kox/ iϫmY}5gPLg9#vSCW1?Ա `5B̉-"jձ֙8hw̒\MƟs-&]i^ޟx6 Lp`&80 Lp`&80:ߥ ۼcyu^Zy3׀;õ9P {?R >0M|.w;~S oy*B@H;o<;K08H)vEGp=>6N cܤ>z2sYE̠Cw6&= PN DFZ H =&~[;?TU̓^ېnSfY?xa2WZ3哲J>PnYhhT..)!b/7D0GviND cCq&2˶I}FvKy76/6j=FUh}3k|(,ͅեl yψ.M&_ ̾ydob?LK&Ip33N<_72zcr!nLy0 Qñv)>)p!ĽWrzSL},^gI#OiE {?i?lჽs-1;T7qMNb?Ǻ~7vGSƮ(Ic\z9y2rXT'~[t5W,+[_ecx?7W ]u?B7pDF+3+dlK2GXkĞ?zȖs zHe@ C>1n3ybSXژ|ҝKhA xGu(ͿwXDqF FR_.l|oX$\#'a`'9RdlqW!4:5MUf*O}Nc{ҎHxF-ƎfN^R~$o0r,l i8%UR|'zX~]]d˂n)#/X$Je qBc/Qg ^^Ez ~ޔ&.q7z;2v bMvII"vt{ѾwN_1x$Qcin .rO546:ZzQ}uTu< vHt"װ'*y^H$\msxcgv!Ɏƥl*mJ`6j'=aS{k={r$sɋi!Ve,}K=#JoɛF5q鿅A5b/j*kE -Աw1̍k1dg57թf雼mxU1%0-'6^O6{sQ5gGٵ[X(^'qC !)mc թkl-=[IX]1v.@׷l|'5,N6-ϝ)uq/x'En')z|ԯrM;j̺%_39ۉ,fLy7=˄y{l}4CKZ /᧘+vd̃U}aW`z%-Zl`w=.]RW6qB>Q? X};m2Hy-kik l?Ӧd[VaI?ZϽYo_ ֶgڿ#ؘĸ̿(y~kP[9I~>$/|Eԟ9hƣF_RN[np~GHa.%H fytDWK#=7.xj(^bD+r.D}#5؍ʨtbRᩓztO ?+7ķE ѣҞYxγK`eVvXjۗX~XK/; ѭ& 7Цw|M],W,۱ě?4qc;:5}B +l9q^57UnrM^0}w̡%_69;^coy}4a?{pɡǤLOR,qyrO9 eu-pj;w`OIu٤3MXdWHkK1zc mJ;9fb3X(dc7L;7 y{h:|/<^%vϙ9J(i.AZǥo(W}Bv ^VyW9k?\r~\i51v*:ePecKK }|2G#7|:\'lUs7µZF8'pgT̫tNg<{D'CC`H&642pyIM[*ҭZZE6< ;8a{.`!ӡMGJS~]b\:lmeq< c!n(wgBw/K]w̚ }((X[cûw̩oa[h e76TJ&ީ9lZ4ϲŮ7`PwҖPc~aF|oi:;7^m3qYq-+щ1aNv&L]t,GB/.gP>Ys'qlDG sv eĮuyOS]1Ժk1mǽf0._`χw :dn]ro%}NW^:n)u2=:ƘA?Stώ>#>~U:E.| :VXIae)* Kɨ}a:L˯k]>_tBGa_mQV,plSof{Eh~Y/by{hI/8IӖ؇ .XR|@7c?^V`~LY6 Xͼ paI5/P5Snzk _Ro Rt[uyf]"NV#!K b{^n-^0w_Yq]3Hy4㜭 hoy`֖Y>c mOP}c:2L)̳|X|We> g!uLZ?" ILvF~AAy!4m,֫kӝ~q:Bs_IĊ,i vGXV$_4Ӽ7z?x3%_|L~Xmc\թC 'MޘIvF$stkB@מ7e>Dx tٷӞ!uDbaΓcW>6YuHޤzZ.)>_\7B䍖IDl{n(UGRQæP4|']GhhͶ}pl51<I=P&Y/SW\( !n$qwUկ䝘/WGe>:0HaaKSSWtm|͝H]O2w;Ln?uxf713dgVy#!?َY?DpN3EIQΩÞu}˶W%?~Vc:v!v#L3ybv\3jUi=vnZd_I_}̬?A҉τ.Xzݟ ZOsQYtg,,oJ>+>þҞ`G]s})#*ܝ\?8"'3/֙v]g⪨ñ!)qLgSNbPϠk[xjzx<綎C.mX˱e-P8FNm b[ Ό-u?T:ObAlKw?hu uL3Y7O3m6C~,,h|O|cx=@w!.:VҜ"Ы2^_ +'|F^R~ iSZk$n29icK[:qYtgŌ^z 0+ݵ{8jbC.da~חl>Z/y6p*zϐ?[f 0MA0ec%n¿%șދ VIݛg͎حkXz W6\ |:"إ,um8vsl ʷI'kg:\,nI⁂Oc[W|Zt>iaWG\;.U81 f V&P*Iۂ;;A~@9. H9vOVUjrUF)I~o}r%}/>!ڿ<06նE66,˴nEF\ӖX(lE嘺S%g}alWj+DGxys(XA|\tNǬJd~dwpG}2'Aj{~ a.#uLٶ ֥AMu-v@6;I3GLG . V{qJ]c]u epJ:.Ns Kwbi@n1^upGV#p+OCzԧ?0-/ r@}`zZ:݂]ӗ|ެ־Ɵ:;*|h;xs@1Js~ŹΦ)kCo\Φ̶OeM @}┞tNp7Y`d4[]أ}P_SZNl䵏F;lmW -}-bImR열MꝈV(+:vôzΩ X4 V4`긙q@/Β27?d*;όByn7χZo .ϥ6sskvQK EzH}(]]A%Q/#M -K>%|F|T16Ϯ]6|e"1q憿ka7]8g1l`ǎJk h+aGu;n@ɑ >1W6ueƁmh!VV}?1KcLr[ s}S_FsMotg`>eO8V1:iֵ;{|*Hh?{ZSTk˺y:-:`̈ހe\YϴQSl>%6dNmk4qxڥ0Bcth^7.x\慘%p wVll OhѶ?/I=̾zg|zy]3~Qz ̃tG2^O¼{DcʼH'l_M=k_ M>CϫSfq\x"~}4V;GcvR}w,rI|K~ίG" wq6#͝212']u煵Ji9Y֣_}?a~?㳀0#/+Zz R{D'_W90X]"B< ȔcC1ӚMVfvOi+Ze^~ 7rUfv^/ ujZh[ž75zisߕ]oZX,&C;}ʼQe[(9{ Mrx:C!݂^~ϣӮij6j~jW^x;s Oލ_>R{5C_-Ѵԕq_^hV]=f RoGw~Uœ\>?-uaY/aRL=~-6u\%zz(24?<V3uZaS3-' 29DuhCo׹L_f) Mc8_{lK5s~t x/_>Uu >v޳FYY72ʟ54-ĔXvsI,>G,rU74!xzߛ],1xJz'|P_Fҗs˓">3{?+9+h]٤^b*3W.cߴ5Q3Q Lp`&80 Lp`67ج}/|]v s)TfVGmbd3%vV9uӟ1| 9K?)W+]ú:>1myNs5Yjz-xd&µ.ϹnI< ?̇r["e.kieZyfK}oHEi}W]{TSߣ<>ۦ }+(׬[c)"3]s1z20:9tWkJY5x[Sn˺dH}Ǻ'O,79|M؃@֡{㖖b<:mWZ̒.O6>5Ez|=(uBdϯԩ^߱I}S'ƛ7U' OGmYu}R>_W\E< xK0vajz~`,wb?ɋ9FoZ5_^ܓ{A!^\ _cAiOcGY1\?==S^+۰ +Hz=53;raq>kl)a.Ck^WRjBN U7b@:wuL'\4`P5ᴮ~?#߮3l&ߢ|iG?6tQA|"fLY٪;]o˦멋K(:eIsĿ"FO` Y'?@8[Rƻz)+cB $/`-W6< L)k*â&/G$,IU}n?He`'D=;AmhB!-S2O.>[ ϥ;L|gdO',wG|K^ϴ*z>coƟE#bl="Td1V)Ա̍~s:Cm9lĆN}L &6:[o4S\ig{lϾ%rFZߐO74:9D޳>K<2! 27 ."3&Tǵ|2|)z7}Tm!'{!WI3-%xH$9 C[}zS\1y>I|^hftggп '#⾵F9H6bK[ccnďt,ߤ^3c!h!![ vE[B|g-jyyF;E)4h/Ky4͑ǁ.0ߍEuQFo ΧʹGCf3v]wuqU^{vƛ;='VlCP|_[9?7#]d[ɴ7"|ʅM[XSϘmGL);֢!D-״x" 1R_> \ߏ2Bmw{}>4n%:d:L5_aO]7C9  oERmї3IGe𰄦Esn6?z vV6,Ǹ@us"N@yg :AH&xivh5W]k։c|y})=Ѯ#>e> <ҞߡC/4bKEl7lYw{Ks%Sl_nhkql}1K_M;u_b1(pnp`;|{}Eb07{,0" -m^-?1zP?$ ޭNfѼ);9˩I_Gkg% 5~s[;\xԖs4CKd8W9v +Kl.{5H~m_8QR_۶`|ۨBe//jKY$_緈y-|c8bCk;u%V9 2ޤMC|1p_,9ޖmAj[bO5/ނN;m{sQO}Porm2fwF!U}>'(m`sKzfL3,{a~1 {]1Mw*ƫ|kJte5#^X}0 H#/()Zj0nWĠXmugH<ԟI0j=|)1mINz7e&hWqsOZQΊeMsnm`u3{ChYg=Rt\i?_![]7])+8qlbĊX7IH09b0>1n6hBs2v~/yMRycc|M!HxYv⦍e򠌳s=fI$ddJ@]õ0ʶ)I[9Wڼi:/惸P3S\wﺕ{di,xlkĆ,W:iD:%@.OZm%1+oYOu}!e''6QtPgE~9:eW۵#Yߏ/POcXz uMy6ZfBāMw4ρ9^'uC!?eVӗ:X?o=s,ohtaXige?xgΈG\`Y2eSu\aۮI3u5Rw(,џ1^0~݋:_4htZ ]e|<}0±G)=IdP.|b8v.cY3D*Mg!k9RFJ1`nX:V+^uG5]ѽ3s:Քg?,0GGm6<_~agD~ȿ ;Qd鯝)Nܺ<ۣPwp e'+/N ?_J1똯Muj mVnX)^/kKb:##s|mu* w5?0MoG>1}J^x|u VU};SszgǝrSqPξqoerbQ8wm|g''s2v^2 uN>g8ş?cuI>v< \OCNk_|a[TKv-ͬ0o:;Qֈ9ɷBe3&̹wiZ|/9SXAO.\mguq1Mc@`v驽0~Dxnۻ{-&u3ANicUf~)uC@YmږᳺV)]fֽ ;نy4wrG\n7^Mj"m%*܏í|ʯӶ\̙;o:>Γ½a8GDJ`eYL[~l&)Cdp7ZG?t+pO9A u,N*:Ϲ,}3ۯkchűy2$o+QZ~ h8F9oJ;o'}?B esZD'Y?=9;lYGNY=˪縀IeybR9t95vz~968ulKA`=GK:KKcꊙRFrq.닺PFbK͖lxǴ&MX\l囥:ځс0"dn\}dlt'w6q-ٗe/ ͤ[jO O)o=Ͻugd% z(&80 Lp`&80 %NJ꣆y7~`}~_E_u~f??k-|:yϐu̇ٹ.E'`P;[]gvD\'=Zo׾7F푱'O'h2{>_E>r=擸!k9;9>ҏ> Ǣ?=C!c$iY|x7]biG" Z&'l#~y\kLn/~nF'Wxw~MvzTu.l:_Ӷ^4oi%j4b6Eb[(Q蕢b>qf>oZyG `_ ^x>q~c֟#X\Nqc.aY52cW'4A?-p-^c1"3GI)~Mbq0oUd/\h?>` [.|E y!S{>C1"W/#;1am wa~PeL=vuZeM}sS[ec<< |$"7~c {^I>N☺rm qR[4:W/?n-6RjEj UDŽ:uƺf l}m骐{[?LL=O)ikF`L#_#fS| ێ-/V+Psc{?GNc ivUo}kcО+o籗!B+{\g({:ףKl O4/sd&EK3|gJ826T}2sHls~(W?8_ (N-O WbuU7&٣yS饿^,ϨyO7mgaĉGs):OX3BgkA|')CeV9"N}bWFy] .iQöח0x$~WKrWG`{bJǥlow9˒W][$/|VbcV`c5aۧ:i*&S6o"gגeSTGs3DMZĩZRd,c6R]GQ燈a?5kPʐLQOahC Ҟy(`}~Qګ%!^j}bIiG_G]?quH/. jhV;i&vn_C :RλO1&{bl=k#Z]Vcᣦ2zǡ0q ujhf:kdpfhIy/xcjc| 뺍ܝ g[qmHV`yXz?uo5GCxsGeiJ״u0H`3~dha`͚KE{F,AVK,?, oV5zZgm:wwؤDxacnwgXΗ# Ǹ-u&/BOΗ$ki+ߑ?B|Ņ>P'_o+ucXwH`}1F8}W{%Oۺ sյnb6`uX.b@g`yGq A9+HhѲ^<9d,]N&O4c\bOvq+|wyL-K+ִ1sŪTwHNJtaC,DIo+8޴V}n;]u/͐4p{H,/u0 ]9b ,0Y9(1w6مnYg$ H{:/yBρϋ TO6G=םӽgo@YbǰM7WOk9iNW~%F?^1W̉!zo쏦vى.Au;ht-/CO^H/}ĂA~K_Jd-\m_pU,u6U 7]pIkd5Tyf80~iXܗ Nm/ϔmIYXp_X}x'bpN5A}{w%_t&wv'}U"*wU: vrn9~)@H0*sQGblsdk\~yM˄^)iȯ";2o/f }Zo8@CؿRoc9'̀5/^La۟:Lւ֗lJ&ˡ џ؜Yeh]PcIwqDSv\}7wPh/I@ƜC6uf48T/qcl#[{HRKbE^[g5ȅt3y-":# 毨8"y2v*ij Lp`&80 Lp`9iKne2q^\y߄ߛ9S >ρܽQBo87?Y9]2S5V`\l7-\XpkȺ|1gi6f^<̣ų_v8'3I}:_4Z) /Q׉v>[^, C3F^xPɻګg o;睲*i2D[K_&sSu԰+?.2fAެcimwKXÑy5Oxu|yP]ksmrv^dz7g"M8 ̵o68|qsI|B^ >G}KqSNW$i'|χlr/7<=WOBYorNHFef3P$I嘜u`A!4p@O#.kc[4 J9D̺3ɕ8`yeI/g䦛 >6 E"{aIu mKɽx!OM^Qy>E9ۇ/w&䡈v~%|{6+>y3}P EyjpY#y~ 7"#> sK|hfBRo[1} cKпms]?ƺ$fCmzYg䥠o}_Ce)Il+"QuFms/~zbI34\ ^S ?$ ue9'}*5a}P95}^YK?3gf}g .kXR|{φZ+ Ҥ Uë Ƌ|9Y!l:Η۸#Jﳞ|ӰOtĠ"Q\-5еx2=:uM˘o׍Vd`eBg=!d_j}{<}PZew$-YYY _]77XA1p>a>ju>>3U?byNp.MݡJ/̾fƟL#es"֦ qk;_Ӗ4 R|ʟM9F"i0t3='w99uv ^fg V2 ^y>2b1~uvZExp|DrO -!kX>_q,Uoҥ)C'bsizf|g 1Kv^V8+.oZ Ɩ7Yomae ~r|~>I'0:VǍ]bпa?&bxvW_28nx{g0ˡ{g罃?ο'vh*ekIzPdr\L06 enUثv+oU{7a̔8׶m- y~B7xgݨrU~AǛ kg%bM)K%q'% nEcf0+c{sFoK1+Y _ QPo\P>%:?>&)gZL9| }Hl9ZAo=:T W|V-W-%<ĜGђI'ӃIl1ؘKo Ғ< c'c m$ ~|v#_ω<'uo`A 3E1(?as`ϸ l} \=ie|],tLM? +!տuB<{*FlQ/~Sh:#op=D?V;+=0U]bR\:Bt)Aoהb?z#;<khbHN^LG;½&i>+Sʶe:[{nOu},]g 'VUaS5DjoYOFw +>* ^/6iG6ͯ$εu]z?7uQ8Cn%-hCɒzN5TU`>8n^ҘLH=Pؿ#%3m}mÛΡw~𾗫:6,MNRBsCw]c\KUUr`L $08xtהcuH2d㎘s (<%_ l=L#ߓigr%+6q< o_ ipI§q'Q{RW8)61m_ˬsX~90jxi)0ht}iN4 `~،+_`tѦoO mP6hzWz[s71k*&{j{Xxv*UZWo ~+s}YϤ6cbnYQS2ס?.оKxj`U)b LkSwٯ2I_l;./e=s"_[%eclAY\7N|-wњtD)s JKj]TҦa;4s&ũvgi~`5̀5K>sĿpę2M]8Azw6z?/-Zm~}j}ujYgKXC`<XejgqI"uZ1gԓ)P^x(cI*1d#kYG7%C#mh< 8?z봰Y~IDDGEjƧ˭* ݱ:F7݊>8yԺL,/KS"&ތ/R+Ν=I_ݑuur'֘l[^_؟Q_XÚԩ3eOM"2D|Z >m68.Ct(f>Ul*i0&Xht7Q`4nR׍幓0_?b8qg{<^㿦i1(|`B4uptA\;^hC^6Tߘm}(wo6q\9~Mw,]+KJ@y^,]>b#m%ƝPVK{"ቾU]ys;.i+9&G__)Pnn\c!`sB^_5Otq[@g7'U[S?fMǐֻ{$1Ғs,+NߔVIضiIȬϘWG:eH;Gen& bOA{c_Iφs6yxB]P[3ݭ &80 Lp`&80 >:P<..dw|9((W&& 2߅8X}jn45Pw~y:M̎~V ( s|cKbٰ}L&=fnq^H>6{;o>޻yK?$rmĮ_|`!Z^Wz41mV?H٧q_OIuόoЦqN9W)px<;O鰾fIӮW~UxkW~_4.&xw0?:;2J3WSi]٠?v<(=%|gp[ޙ j`L>{b{w~{B=ϾLq"sRͼ>R^-I.muj<*5sVqK+ tAvQ+ĩux?Hrvq^$Fr/w/oIhj ѯCo /ʚnӸNF~ob84ȳ[{թBq-f]ܟmkp}'YgseB%A,,guݢMyz'||=yĵeûXM&>jl+xqgY=ucb|ѿ 3~8XmuCͱ1^a=Ycl}L:4\6d-%q~'۞{mC%rmmUM.^CHlad^@nK?#>_Pa6keb*3X7Ճi0yc{,F\K2G^LKmY[)^^zQ!iQ~6otOb!mkljz?Y+aou8_B%/ʘ# >z lڀ)'(?b=w[١05]f8Gަs[~G[su2k?rpW~z'x9Kᛧyxg<&%z63."qݮ# B}\SpSb$~Лæc_o3_[O޵3 (ya8T[$Co!WAOpÕgx'o/ח,N VKk>@V6Zݣ~9&W ue!\a*6RY^ V 4u-h51Zy+3#inP|cӮc.kL7] :}uŋ5ϩŞŐl|QK\l ,[ ߭Gb K?B^f?ZlR=__OҊ_D* eS?f ,$FmosnP4x3Ί:WWs?w67~t—@m{a?'v-VgeyrDYbS6=;'L%tx _a2 {;o:?0:rrk^~>0~풯o=;Ic6AwFsm׀>O~W=`FԵ{'--|:pRz1r}Vl=od~rNC\8>`ˡtyo_+sS?%u?_Ng9/Fz_ ,􃙫n|,/Kguݩcwْ$&HUl-b1ˆQ(Юo~;1Ư?#Qd `9_<gK{`=N]U^bO6@!/.mb.}m;k(c%,h8|{ fn29C?8݇o}EYmf5o#sl Vyvt6`~^ʄ6,i [y7TCm69=WvD_$zDc`727gzxV+3ޛUS(˴Ii)3sg–*KR ߤT, T 2-L;O "PLlSQ$) EG|##cϱ+←v1C vTO x9٫Z3?1/O~BmCƵJU M7[ y +ZP}<|g?yz ߛ[/KV3 ijJ.+=Xj.Nk Wjo 26ㅥ`x7@FUާN>W0|xqy"~L[ xr)t<]j'=,_-5'FA xF5á | V'rLS5|τ[,z XFȗa~jM:G{U3\:33|M[#7X)s]i_k| L,p4t}MB['+ca^Dm{y8~BR|ghMCe}ޓmaO0Ƚ;ʭ1uU*m}uo5sq.iӅ/c3p4~w4SNXQam+ :G]mT[b_٧uy1igVb,TLHs;fӈK?^eilX?W(a``}C\$}m~̹-So܎x2L~uWưaa6 f6WXI 8ꭹH$vNd dϱ-"`"#sOXҼ olPa Kc5>~1RZ|xg[=py1>642:Xƨڳ˴(;Pg)X2߬jc^~J|ƽn<vc˰gt%2F6sIQAǶHZgxJ2=Khx~,ά d *4}?,ȝ z!vRnפUqN;GݞNKZ 画# |0vд賻nN@^m\_>t_u >`d^~BPchTb?l^ApU_<֥DY;yj`j`j`j`j`_;"A<mfFZPfع_b/砯(YZ_dSVO{ BЏX?ZfżҶdarأN~>/3z͎J5-; u')D!W2. GbRbvc2 EoG=|e]Cݘ+[R\XŸo>ub|g5F(2W.N܇6xG6n}"n?_Ïi/-t}x~,y2묇GHڙk]+".5}}FL󸆟뼧v4{Ev&m&jRGǣccef/^۞~Jm9yG-/? )?x Cu%6^п1F*a>QT>wꉝynkesh% G?PK_Y?oڊNȕH]Mδ]ecQo^-Jvۇ׼č8ع{7mXF[ R\4O5> }?-Nlݾjy {]ÏYǂo78cNcc\}b7H.˼+a-liG=}آTH;Ok3U~nPh]տ]lrY#߃}M٘CenKm戸Ĩ(i㒪 jf߱w"D^ +-Oy^J} P\32TܛOPg ?_Hw+f9k*vz Ǿ:WZ黑ȡ7}bG[gLغJ$~l'n?P؂(0KYocoշc?A![*yb0#~R e+8p _%iTeO_UՌ>P{lqTjgYRrkoK]/0&'2$ow U¹7$Ŭ 7O}X OwY=b<+CO sѣiϼǸXk$~-T"A%)Lu+M>tЋ1?O j%tgZ;)EKi<#57ְgKg'79)#;| -׶tiBŪ֩\ñ6Ai0;woO29nc=S/>ڗ;[>tw-x /Crų\c |ShkK&| )Pk}:sWl9>LzF ?|Cg<;߶λ_Ok` 5F}-ր8AKѩ_*Jgr ]ajHv|:gg\'LM 7Bݷ`?ͱ~3=2i |=2v(ge&"׋fyVїAj'΋|'~T#SplJ7UgWɷCo֋dz&L614?^_E|Sl R+ 䝷!z} ^t\p5|!qZYg?;q]hFkG2鿲O쟙2o.b\?t+%[;4& ZU;)dmo"7/9W䓟J;ܒ48`o޷1gH.ێ_4)Q<ߏqͻ\ >+M eδ:>rËKa/1gk,cM"0۰9.C ,[a;ka>~ܹpԡZYHs&7f7y N| %U]SCX ?oV4__/ӌiT 됣"wHE=:K%S}WL>z9٤4"g@ǭ:zg5~t5FvcW2T=n]- }~](2 n𽜇q~Po=0W]wWѭվL-M$4763δy˯M[0J{B$:dL/=_J$ߪ0_˜ˣtj|o~y8WܾY)tTהWgo+TӃhAbx^_S%:70P?zSeQgۣumMZ)wrL%ޅVk7]ߪ6S[q⺏uo"@f+yn9֥*Lt2CQ1q³6F U`V .*?[譋8(+ Ny1kXC7_ЯpkYJYJ7i.9o0|rMth21okEoV&<.0T5X͚o4 N2e3v=ǎN<)zޠ^xHhʹl/4̈yCxwUY6;^i~V}ްޯ]tċo̼3ۘ=`\Iy=rL.>JBfb޿C.f"p O co .L]blWɬq .]+_F\jo~!ΔX-fbhNsig;/,‡Slgs"mgRWݤgЦvpeҺ=.fq`<ۮ; n` K;8tgTĐw#gF. u 7m[O-lq,`ѹ5ߗ>Um)6RKK^SywOW]_O>3}sK=@^a[gѷD覾LcBs;蹀'2ê9->~|~_b5Mžd˺F6ֹ%QGNU̓>O[++ Le`1ii}/EfƧ"W ]d$p6nW!.ہĉ=U Q`ÛOe;PvڝDu ]Vjbg~:ig$ ofTU١~{/Ǵ=âOPe#|C!O+DaL6Q҇9fmOpҩGt]~ulퟳ_t?3JܰFN6/WE~V~+>ROГ0y~ORt]ϥ Wys 4:T7b<6w|3?1st-"^quwgtv@/Ouv7_B,}(CGT6Ҟa:8I9aCKu3/u_g0~ MͪM%+[}JX~7ob[]L l #eUڏ= y?W<$퍽/iH/?l0J;!b7aIyO){jLJ`>,|`lϚ{&4~~lKi~c{0 }W)*on{ ~B7|uRzO1 ]}gCA'Y]|/Կ7ܹMb,>~q 6ۅ>I߉5|"OO>e#Oixw_ftUls^ؘMFչhҮ1£OR70T"kzH{c)L$弬%ThZ߄_84\_[3a`-u _*͸Pa^d$ 恘/pL00Ukpz=wo2_G{`i=^m ن}{U#uK­TO:>B@bs:]IJ1c.MS22Yg>8fgد-Myk[B+b.H:_iU7\{s`JLgw&dM5(}8@!c2g6^.{j>S2i13b$ZA]S&0%~+{h]M޴=&K_xv.~cZr~5(Z0td$obb+Zb=FgŒK⫫>*ss o): eSV6j%fT1.&gwWZu"vl1rq#=S5ݒ>$]_g"p3S2bz^aEmV{Zlwm7]0c*ϻQI3m\I;/UN?j!m (YsԲ6ڧy,N9eqz1h8Gbc356P(i}\ubCW,'Тdz?p+:95u^仈e~VK@~yZ-۲G=˹P*9eŴP4"Ӣbk4;gib ,7A!In<S;Hy8riOxؕW:i>X3%蛫 dK%vڗm}oQe:{C҅|ʏy*C ͼm㽵9dzK+Yɝ޿D)'P\iIo7#me󚮁Oo ;ٞk>ˤoazs Dgcֆ5Bҧ8΀ lVKk|CDT3=I ݚ۴3-%&"IGVZ>9!e;X3v;]p?\ ,bSicˬ` ] 'c9>OZ}];k&x!iަW#}eEmX>Ӣ]_B]O17a:\!cgʤƼ˒飤=x1:+'_(@}E[Kah\m&&2v0V;-Щ|{ڟF>{1|sUJSW@XCV`y5 .^W6}ߧn XƗCޒo; b-{Ix9ߐ`?:O^ ̸d98dZᕶ!Qk%>Pbv)ΘWzz 9:Z}E|*54/E,ʁ.iE"[K~yt:<_>|u/,<@7ry+BE'ȃE5oi^wדenWAo%4ĘW9 =+c󾳬&-w{JwDt\Sv߯ߡNpR=_P˵ӫ;dlSYYw׎8?mm$f?̀d}1Q>E.OˮI\~?F}xxt_tz}VU~MOM_颴I)q3˜J,6p$ bļu2'=UTW@´[W>u+<7M>s5.^t2-2`#N^˼X}֩^pKpMډՙ fQbM=xḦ=Zñ sH[%(@.7 mjݥ, LCNE zsYu99[c_꛸B3On'U褱_ٻV6.6|0<+}{Jr1K]kWaa\̹ z37#KQ .ZgB?HǛrCPEe_35 ~Y[J2$kTJ&7?ty$'d?T.J_$#ca}13Ws\fǑ xYձZ̕X_ʺ+o|̳4Byêx5n2זَ~xBU+W7ulo/RgǢp>e6\\ Lrϵ OZ)xq?ťpltgn7|IMtT9՛F@ie )& Γ[p[6y O1 ]Fkt4Ze ن65ejӯuAjL njtA։T/q/Uu=ZY~,[TySJ2vN^yY}B9&"\ZגC3%];#`['?e^joCLXG7W&CSJ:vmL=N! m3^(ҏo%u9P0z~\^~۷#m(g)I| }9z!\%y/WXV~Έ$t˿L2:"kŞ`cݬj]=_2/]>>MYc4jsd,13{{ؕAS|*d4`MoP-*1)7 cXI)҇gx^W>7J\ƨSV)e~q z+5m j9Ǽf~ yж>2!z_`3?*r"c ܬtGn[kR6h˟P ޜo3`'kܺ`<-wz:(Gp_vb^@OP4~-WYssHd3הPK:@ 5(pz oC̔ lyhÍe&u~}Žx%q?E8>``ap,+S*ΙN(+Xg\'1x%~cʷܐ΅}u/8ΠʕtaWbN ž[`(M}=+} &/3mt^?δ lcif\_=1\JTLOcrу=,*e0y l#g(bOJzBҷ1j=%q- q8!xLXGC}25^k6!ژnL`Ï9pQa7~4n=T9}Y[B)tB^Zה)(:u3ˬQ {_TF\Nǎho^kԚ)kƃִPqzϢ9cޗ9:@kn֋_R|-='Pv<<\do p#Ϻ:o뒭' cl)b_Nޓ<;;\Ϥo}Wme'!xo5?;Sna@:o|Edd膩}8\c&F1DgS*=m!NjtQAv9p?7+jy̓=en |Tag㰻;V睅;8h{e#9]j/ND: tmSZʷ-Ե r{F?׻?Vϼg|AQ܊x7oUc5@KSZ:Y]_}98<msh|!}ˀi ͗|7Pqߡ__n}O{{ygU/N_*¶e׋8"޾imKbf1fr5O6+5bg;{= 9YPM5aЁ={_x[OxI6:ƝlkX3%04}T"*F]i^w&8ڳcO'm9MA^Gy8Ӯ('V|t1l!~l|`o!muxqYFv( /RDҮ߇9aO]Uڈ=eoXGhc/W c?^"pʚzO|ُΌ =ʯQf̌TOb|K6ipI?0(5> `5F:6ʤ˼?3 z϶u L>8|dvph\wG{NuzcT.\ɫ^OFن.t쥫8Zvv>8=ךr*cӎ:bM#]h{t;|WȨ[WĕxN?i+H[SbL/@^W &Rf_oێ&2:s r-dOȴw= ]grmn<{<b}=xhW_֭߮dz N,z`&Əw͊[ 5 36m&8{EWxn*Z- bl$ʺi/A`J X|+s[oni p$d 35;换-9h>i^qvmZvymX {׫RBw( MП~~%/`_;nO$%zoFZ ?R}i uک*NUյ6iw}zUf|횘7Fi'?+}ǭCNI@7m#ṿ,s$lC{Xy RǴ/ыN/=zNLcMȵ)mez@X? ]D)?#.ٙm3DyXlY:%0Xt1ąîsHwW׺}p7Nn91ߡk6]9rR*x4agόc:=ὌE?itw8Ph|^86}Xx`CA*[ZWSxbWKiY)0WvfF)L9}e$6`sM{ZSWY.E_ QK~ cv{%?~W#n #vShzgLA`\q}t8@z2Gԋ`6siow ̺ա|#&bҎ1rV߅%C P!y&_ccZ.mcGF3q56ӆ ؉\[?4u띺x]歂_se?iEfjOT_[-c4{b@ޢNV#9}'CYߴ5:yHc UсჃȧV"1t7/#` i,yNT{*njLGtĵ_z|1CVa,Cj%|Z% -0=wCO/M$u*S:F ^44#(|3LcKFf[bYO9vf.j+}\--l%/^͸?< Ubl^+~w _ }fl9% ehz8#זk3k'ps%[&fe9S=mc3/-iw+TuI]g;V$^dFRn RiS`ږϬD[INrygc1?1Nw-43ib$G1+00~(|4uT]lMŴNm1|PӮdTb/#?+_?;{]3ICkڳ=> zڏ,wMy = pdiQW>G >_çYn|YH$a\d<ې CYrήn~J/ou__Ӯ{[feLkн &.jcoj=gzk$z,cO{xGG~H,Q9ěKy*6efbPvW}|ѽB3t+τc9W/K}]ElmԪ523G*G[1˔᫒2O/ּ9W?jK!ipiG{ˉ0K. (Rw̵P^GdIa+E[=s_\mr(c>[ɺjUEYC͏6NOo89!շ[Jc> ̓G*Č!o|¢֡%ۯ^?"NmQQw5IcV9p~1=ʁZ :ho?pQK2:g7"|̛?Yq$ۯ9g\m%znX. \6},3yckɋ)aO1VOU5X?@+eO};vVQ!yՐ; >b/ IdC;Ajς57l˄w>bb]$k#~I:%.ƠIsP S 7OJ{C˕XՐ͈U`#\YB3o? i7]kxry{-H^G?p9S9ҧRnϕ\)um| t<_ȍg(^ O9(tU&H3)σEJSڐ~t=}TSp5b̞#@(`_66!XšQԚ}jiS+4 #oc3]w3%~܇v`36/ƃE |[]2=#C}JDG^R {eTs`>B)G<%S \Ԏ8R xVe:(j);eo.k{m=Mζ1zE7tw/68U+?a聿腃gswTSW87>wg)јWn@|D[}eۼ<*+ ;W}Ioq6!&&6/숥Oz̕4i؃N>Mt{wEw0Z,_$c>ՇE.G ОDÍE3LO`'l 5+*`#SXֶ\CC'|ĶOӁXҖ}0W6$uنPwe y"My>-/C~`6veN~K$18oٽec\{ܣ>i}/4$Б`Fia&;Q g3XJOb1&ψ=NIb ^ ϒ1ms[/=CÑzݚa+yYy(6{.ޯj9-|Άu"H94s6`?WDd9{q̗Y"VƆEG?'Uݼ3}-~\ڿ>"oD.l&t8i  /;`hu[^&6 ?Iy Qbĩ99|hUJ{Ö"Y]ߓgEV=|1^v>a;JLgYjgU9sSS}DC6~,14[ ;ó;l{Obi>(s֟a/I5u!B?9?Qf{NK4c^%WXXk,ڣEqx+|ƽZϥ.L33CŻ`tR"{,M\㫖ci:41Ɔ=Y~ws; ˪N+I}g9IjoBژcQ\Jޑ6F}?y~b;ۿ*k)o߳~ӎ_QT]rukG7 \R7}3+3fi*2uoN;]۝<|;*n¿`|,twjUzf-eZ}~gdz-J;ɕg2܇oRt33mXyM/ "IkqLC }׮Ï WY€*b\˙"Qoxs[HۚzVQ.ntbBE'd(E3 ψIr gnԺp*V;DZX,Vx eMmݟyRFfŻmzi?7Ԟ1y;{<?MW)]E戝eȲc?#Wd}Pm'mݵg|gu6OaܦY][&'{z$guĸv1z|+V.diKa^1f5ԡz$U} |w[V_=h^ҕ-_l6o&uҭ˪%qEA]ϥz+};ac.Ns]ΰ1@2pd~~C%dU% 2%[Ĉt__GiE$՛Ű=2|ck{A悿¶}AN>%k+'Qk8̘'kV.{^TE*qX~qK;WJc7މ5=OG6xÀ/?쯁po%lgEQTXIj^+m _qLKJȼgc`ҟYvmw;*gbئ}[W70kj]6Vz?:g`n.D*= (pigi?s wn {*e51:W{sB[2&dXT6=,u 䙲'-;h)˓~%"ư;P:?ZY0vC9AŠ%]Ӥz t`BW6I{I'yJ_B0]uF?4t-ZLkmS1Pʊ=+WB_C)ֶŰox,ϧ=Pomb:}n.8p?U4D|]snI% wt]{:u!/>`^~Iy.,t_ *F/Oz,N0skg3_8ivs ZɷT  r3뢯I~ǘq͹`(>/ӇBWi냢s5KՑCc9|Ƽsį]s%M*s)!|䷋_/8@>tNXL=G>>LCB R,'үMCvV`]^C@(_0 rή=_ݭ]|sԥl\w>ybh[_twB>EnsakE [į؄;|ЙO:.-XmbSHcw+|'+iƨ@7b c;I=O=+47g:aޙlmcWjw w;e6~óBۭ.3漜@oU}Fm1WdmA2-5joC]קoO-?uQ\XZ3կ~JI`@3Vr4{ҿT:߱E:_'mQI4q 7%Po51hDV$IX#ض%q9SfkW]P˪fk+9Ay<{h7!|sT>/B_ yeig1ila5Xi&nvWGa4cZO]6 Xd$>"Wo sAI^nk#W˴ Α;2%QTJ*c65GRwβo"ϾDkW?&h3Fu_s%m.uۑ2}MŗL$ 1A_az,ڂ|B?1!y41Z5?mf~O3G"_m e `nӪ70kJ:KjG 4I3nWR.#?yey k-"o_ZNE^7^ bK )»DZvһp7M>VHȓ9/Q~f/bLlytxL_,>E؁8{\x.3-_2ao J?&_(\Iq̞N}e)Iʣm<-`=qEu--PNr)+߮/n/L#jFSoEM*?--y{ˁLŔqi`;-@|q~I33-w¶&^.}8;:~N 8^:>o/C&PgzAsk:_1rݶܳc;fo˹a?087A Ɨ'K ٧uS}2E 4آ;,;-Id^F9oj~=0}|3y<[^ɛ:qf?6-]QΛveSߪq3돺NG;Or:qVOe eWzoBLդc&sGCŖD 1UgLڱei[bm,cX 'n g<~)[1ĥO.koGTmNiym$r]=]5N=%GB:<.6}vHN{\F[}jka q[Bv~:_`_1jEVDj ,mVə|&8C s?U}nL*=/S/!nˇ|# w &tfJotqeP$E(_MMy@+\oO^Q4Vc50Vc50Vc50Vc50VO uL)>l\op:?0 ^@Y^/7l G3ua7cTqa豠чE5Ƙ2} 7hkätӿ i}5`VxؔW6Nm+21//#gâM zF*(;hʕj2 c^G95QfH8 6bK1Ϗy2#-t ,2lK֪"B73+I[c1Kg_ ٮ7A=nO=~5bC"#QΗ? {BOg-(u.pL ,Y䦰GyqMW}C)/q _3srW%͗PBNd[^+H:3bOϱU+m> uO3.Ӣ-Nl{W\_Km+|+uǽg)ɗ`- !#'M1CĂ-p{8!> Ǥǚ~"6 b~Ji=[VJԅV5\~7AtžGka~VTd4F߱aM ̇!n}dxk;)(6{]q-X= J6$湉1  zn>wwؓd7^e_SE=?ɳ1zRyJ?ϽE>Lvӑ]!% Bf oe6ua0O[]nq{@j_=.674_O"V.>F.~E[9Y/wT|m*ݣ T {i "w?AbuVY2n:!F\ ʢ˴~g5f#?X4ObzǼpu^VƼui[Lo8bnçl&ho].l.9X r X9Nɔ= 5K ,YN>;wk4~ɋ4zsĖûnp "7C[,4!-[vXvRFWOX]{ i܋u"7`?$yt>9 waLc$k{Ѣh짾&忟JΣ ͜U路^˰_[i7apĎw,C03Lzž:Ip_tK92mKMT}Ҧw̥rkWe"62L @t3b!Ƞ>'6^ 8f:]W:6-bg/9bi^+ֆE\ 5Bra9JV趒qQ)-N99Т ?T=^!Op9}xvOVόS>P{o;D g;xJaL kR6.yHAցwוVOAʌ |_/x G!])c E5o){BS_v6iF3@faXFPr+3Oάu?&=x=[ăa;s/pk`#6m\r%C~wp*肆Ov;:|y90OAl]ѦZ!WF+%/% ؞#Ŗ7e6/sey= N{)d[J<Ռ&- S̶|3~dz`liIƺo_s!=X3V`U\{X?4#^2Wiųgf85KUi%g3>߬ b`;o3۝\zs{X#nuUtC7z_c-h ݚng"kkc޴|mb:'eL 6x EڑF`QQ`F&?苮um)u|x`~Kd@ezͲ>*əPygGh/mZghwn 1Gْ'Bl+}|U&KuvvԳ/nʑiY pcޗv{LEHڞ~HH64oN)]XR2oޣ(σE(6;clm~uAplb. '8m v|{7~g)(h鬄KC e@ >ym֏ߣZ(CzO | Y,#ı_ );#=߆,wwv}^mwQ+onW`];nBn3}YxYځSاD_ 2 & ݚGiJbg1-)wTlQh@] x˃+s>.8!wsu{.ѽOw5fZgqƷ 7|Ljim?̿k*Gۙ>,Yxfg<|[#_|IJF<܉r4#OK{#Fcqwa#6ɥ}Mhw|0/g-?1z2pTI:^Epi=և^-1cr%K[; :97>cޝYW tZZ;P5ia5}")+ZKSmJ}}@ZaS8LSӮ|&uu!/^⩏(?Q3(~Ҟfh v_suqF/}I+=_ ?a!Μfggœc +SΗ4Y~A0b f߱9[Xmk!7wv sD~\u7bAldG9S~oTốH=3.9BWxdR멋$O *F> >ȁ0J޴@3 3B}M?eMEKY`2ok`]~qTB\wZ譩V>2^g_%=[ 51ZNoë^^}Uݢle?ex:W$K߰+ڈ5Ju6(sv0}^ߋDfJBcd V8]}>b”~h#s2ҡC3]L a]XK ] ˤ/zs!>LB.iROTj\qo^`Y}: nC^23O;eҮE3ޜ/h4F]|0`,`i69 =;tsu}{1:99cɘg6iLKxn? 8 #y$^s8`{ :QzZzR6|s@,}>c_r Y&fѬ[};tN{J/H[V{Chs RC/>iCvKS.pvx_lw`1%^/FZ}pOCl"z %հu>?LA,IZ3YgLdI I]Щ 2)2EQA %)kI e$E@e 6E(w{MZ'yor={=ݎUn9ZwOzrfZٝ&b~װg6u)nI~e7sN:pqڧ55|/'٦abOR|7Nn~S/[܇S:zD}#S,[7v] 1t>neˬҍߥx'q%IW>cH|#gO[F|fam哦>X4l3~Ny*:Sr2_dx>n݆e[_<, l=m5/~<6{7XgJZ;^x]hiOS1*i^+; e4}]ߴ!lc5 o(u!b"ee(e|r擳 1{sKÞc[9N\?RwW?ڪ|Bh.y&,p@\o,a_mῚXƤC?& ?!w@"?~#D=oOg1SPڗmjO e{UCRF8|[I¯nRd<u֤9w߂̧-J^EEy∧W8=pPb1b--3x?nu`xI>=X L >Aźb=&ضU-<-d~gOk<ue9`u{>+g57]1uBZwlUc3omas8tFڙ!7\LMiW2RUK?ceg~Ӄ-ݶMk:΁R[Cɲؿͤٛ1 1><:[^0AiKtO&'tޠkVyyBx#>v֌#eèhT4symyw7<".Lu㡪6k+o!J|'|AX6r0lTmv Ń2p=M-2s»pX8mA_qʟkJJb鿏VvT"^< cV6'#uṀv뺯 , ᣤ`s֫S-{e󐮛/Ja2vr#) Ŷ&~=gqFXM)g\"ؐKA CC5CM' \U8}3~3ɛ5+Nlτ}P OȤ'3^Rڙkh@L`[Ys.[ |}ϝS_CZ(:!OfsˆM8G^嫦ܾ46~GjYsT6j`X\aDj cb]l70LO:uZ <-˖d)z!sH[{NV}3,%7CyT7>Wh{v3 ;8 )kԘJ58'y?prnuzZ<[ s)f*]]poȋmaqmCï4V>S]{y>ue )EWN9Zf[ bb|4')9{{q C_Kڌ bIro`d)Emu>0o q.VN' l g@^ 0CqVvی릫h%zMSD$Zխ^b\zk xlȴ=Ee{+=uu<ÙgR?s ^:oez|t=(}i[b-O,^ 4]ԶX咱fuTgIs1|c;_ͯzכߵq(z3]>No ezeoe~z'ꡱYKMȵ 98)40TIxVTSrBҘgo<.߂H/=o){48^?oq Ms"'U(>9ӻE9?J:HV2:v+Aj:=kss QJvg~ yWݰjg=HaZ+SsC?ElYM3qUgBi{!eot J <{J+&da::w6pxiu.E}sO1?= YOھk$I99c3h+섽CDz?JY>G{D͑rcrH/$ 9G3u+5 X<;PyV'ֵC]xs7Z澱chE>!$'uwV5|9|}E6.ɾ-JJo'oo,[cOS!FNM`1 W×дockM6Ɛ"~MoZ~+۔c]Зmu 0>&r.߆ -:i~u=\m#.uM^` r`JZ3'y)A \~VeA q 7m-m{04_iZ"g:;~L+z-8PkHrJh`t_%Xvο^ЬT4/yX#'^,-J-GZf89a9A7#Ń ~]ßYj%75%euk wJ,:ڢC1y>N%`I'.=BOV&+[k`t^h$WOvיzo`P{?ng5'ςlw|Jg|=t2&\6ۥE{{áR߈!O|w%53O|q)c^n)D@_ZI `*`v4mWCh̏ c=)F^7l v&<`^: BzQ;e]-\6=["W\p۱o7iY˻7<=Rw%]ʛA;)6(5s7_vz>bYvSڻV]*Ow{4:u,]uAI6Qϻە먳!HYބzoe0h4Hm񎲴F{wR٦1X&q4l~j!{|=l ;1^ni?i߰7:;FaxȲ9ӻ}fqc?n`[G˻9,(XWbЍ! s/,\moHP5σ({ukV$YCRx@"{.\hy4zc l7lݎ«N>yngbH'S!kw}oΔ3߶^'~)fM޲le:; ko7i|TSA)7dN_zmx81b=2qqcgjЂgҟm%$ ܹ n9G7iىπW -uqk; ] ~ PϣKiSziOyzjҀv2'kἎr9PEҖ*oL[u'žF%1_Z0'yvS_=w~QHOxÜj =dmGS|kM{mP.:mSNjֹiF\J[G(`/zLq=1>+Re,/}JDŰxx%5hq m랐c)JzLs$Ҵ~˒88NSnb녱]`spsZwSjщ-^]NuoAfϣ$B6:(U5輠z .6^Ve}]'Ko3+v7fŧ\k' _4s? yJY9uYՙcPO*#6ůئz*יm4[>[ wwKoD LD LD LD LD LD Lv ց:kDG;Zl7{J3 |G>c Lnk`n4c'̂jf1U7rJ 8؜|oxy6j[Rx"iR\4} 1Tw]wqV{ wR4']%'~DG*fAAbUmVw9>?uxs[~bG Tpb ζF[ޮ\s{6So C>S-4tԗN0܇u >%ړ[1 3K;6y77ř$jOm|&8עL s/c4x?1 '^ 0am W]sqZ?mD+YXPjI7 _L2mW%gw(*`W)[ro(\w`l#O:e^?bn˷n-#}vqd}Z*zؽ#wfAqd;_Q'+t t^3O6> ?Z苇xB0f}xZ(iZu+z+`q&2>֚)~~o<[rWWZ̓žmJ<}A>õ͠ӔQ)̏e]b9v 7BU}C%d9h򷒷7)ӥm_?ebڀFmĹ̑$Ч1EgnI=ū`|d}zw0Fs1lGqiXį +[&jS_^CdŘo0?vۡ91U/NiZw_jy>RK |o 6tjӌ*jzx`K *u̫]-Zaz% f1fpf%Gҷ8DځBE*6q U_nZG^O?yu[E4KQ]'Q;$&׊ކo}2YP8 }vıiCN{0}H^s2[VVҶ2'QO.G-12{c|8?[Fi=~+ѨvRm8V}|S:*{JtQ3o#c~,?)4CQ^~O:;w܉sڗSR0u6n I' de}jWu\1d3]X?mQhs=~#-JOA]NuCW 0bgI}㗲F aGYk n[x>_ߞXǾ,'<|hTFѝ>O/w`0pӓ‡W/0H{hY(=ꇢQ姘,(U^qM.eRtxS7pvcAڞj{M)p(mlЍ#\CScWy5ab/C q+Uj%XF f?KF&Zfkwpk8*mi4'^QAξΒׄ~w SÆs&csf΋ʹf+zw\:s:bsC +֡G?$Sg.ZӀ ۾\ZbУ|^{φM癪O fC/ 9մKf#Jދ[`|TTPDh'#gMw6_ҫwW6x(A URF7Ʒ_ϔEt{麀^R' ` =9YA0@ͷ~Rvl4lϵ?q›ׁ%syM{$!ޡTkm} #nD>g>xaޛ3ۖ75`˝퇾 gd-xQ* dNq'O:+oZw{`_O9\wAJ%e)^WT\d) &4tU`i k+GM_ .4p:~VR|wM"O|~p|yqCSK'5үuvA?\ lb>֟q?lhcʁJ0/c}xkӰe;x`pS=b5oEjU%i5N_1ֳkn[ }pDn͌פW|˾az|Ga`q߃yn B$u ݔiQCz[t:S_99cߥʝ| r.gDŃߜԋ K?O/u&N^߬1PQ`1CyDA_+žj{E-j8-4iCYӜ|HhjEΛgj95}=!&<~zZc)YC]%eg8M9mqK*0/72n}]^yuɭWf+oG5 6~&{g d|{<跸2'0Gfߜ ]+5}25p Ju4k暒ruz}P1TOgI/!s,,wS(Ѓ*m?kuѸwlA.seC`3>Ǿ'z/0UޞQt" F{W*n%VTXkq`` qGK_:d}E` }ۗ,`s`JᕺuQZ~4"&Z/VWf IZ!}ʛln{1 2盰 @SIߛ}ZzU_T<|[sXTz}S_qԸmVv7aީ }2%jx6 +'M~0xv=_ZX# EsW)8\Mza} -w_1nsAy{qUT ִF!Ʀ3o%mr3g;Nq zmAL`4QQ%]v[OAet}Xgg=޻屹?4*WDGtB/eV.\HBOnezw4ދEesϊ َiՋnzDc-u^:.ۏHbx\ꪹoml>P_ fŁbޗ ؟_tz?yKm4]0x*-*WZn8V̙vfϴ/w#ârOPlOmoppB 5\`/P#6Y3p}E7C{m%a( ncTTLF|椲ס7ۼME3w0Hy:,=)< ߢZ2"E' .9R«S;a7<͐3C,[}y6SS&Wu\a ^IIoeɓdAaM)]R]bReޔQ|y[f3]:sB3ctw7ZGl,__hgyP.cugWk7ŷ=2#|WsHv*e2~;s7%r¦ge^d-[I R_B'ќT~BmS4o!ְŠpJ ~vv!?M_,BJ"NUtJ]a){kvٿufj3].xc3.=g| \0ح0 S@*V.0)Էj/t_ H`ѾA=h[!udISdQ'nj.;{M۴9YIPs(CjݖOك噖, ,dP6uX7'w n_kgiɿ;WLS)u̯OB {:[UgVvy?/Y>(Q}o}ϞbOhE]RۃiGh(f2LeOw!s"&)]1/`pAv;2[:{%?MHs1s(uUYR;ՀZN "o{sIjS; L+|]Nk?v,kr}YrN~*ce?кlG֤ #`(+!]3<*{^F_g|y/;lw{DT|+cp \86/Uz&`6[5tc&PjKsͼ7h[t/🃜s0+9#xvvDY{ޔ~f CSTsK׉ 8 u93KW2KX:u [: !8% 3}x:8Cv`^/yI^(ZBVRa ]ӟ#Pt ׵wd M%&T|/ ^£na/D:Z_L˷Scﶸ/W7=?g ~o#|P%,taq߬OmEy- YǾM4σ##lA.Ќ1^vkC aIJP*{=O oH` e+O ZO;E즹&Ӵ-栰Z'(Q+M}g-UVV#ƙ5N$m` ~v#VM5Ӿ;njUHY^X2ac򭟒2+]ǿB,ka9ll{ ,-t>3},>e)p\>a-s+?C-iS3߄ʖ?w5Q-߹yܝſ%̩0ާ\M7Ὅs9mSr" B_s͵1yo6![ `jQuT5 NuꍍsLk;90z\CT*.=Nۍ^x~-:: "'+Y:2T 6a<7gˠ[X5xؑ = PQ3*x o5[LZ3V:ty#/I݋uBS~gIޟږ$G7`1=// 206sfd>ݔ9Oۢ==MԺؕOk?˶R/4x;auk2S6u<'<!F-u0p&Svw^gn9b_XI#q-1 vRwG`CE_x1yp_߉P9岾,.݃?w0-L6zr۲bGlS~u-+vn $|X+-?@U)YϨ]5Jc/ a6.0Iz9q$rc_T;5}i(?\2E0wvs -_n{7m8ǟ$ee-9Rt+;6z:2qOZ"rjt|*D,{sjENa;k|i|9uPN;&OD LD LD LD LD LD LR 1#ar ;Ӻ㾇\}V9Be9ct菸ƣ7Qu4Ohq>BZK} uGn|)>+:3}ރ.؇<3t=Fm*EĎ7go Qwrgwm֊ ^tfJq/Đ˧&;hΞ͘B{{j/]՜R''qPIx>DnAؙ~Oi.tJfqJK4'z˩۶+'4,:ޭI'~ug>jPm+o;US@C=KŸZpkq+vcmZ93,8qz/6D{ _!o\qئ~>9Uz~MdػM_$Ga_`3 *Zω69f){>6Ch?X |b#E`CX9|o{o6o'q~af82W_iڎi(|ҍӷ>Zb ֗ o0AMF=y|L]b$>fhm!w2A*+A6TG4|s%ўFk|@\ ̴'Mr3Nm;^qą!>Dކ\s1 b\jO{c]pme50/Wb{)Ĥ>_gEWykEPL!W pĖ_C.[\zƴ7^O{[w!00SzoL}hf,RÁKr2s &DU q#v>N?iQP3-?xg]쪟j_.~mXtz)WsRj'۰C cMaZƚ UYx) _uN *壶*Cyܦn['=ulNiChC7{6UU8;V*e'5q_k+ؖ]w[kU\*#޾Y֛#_ =*CkUxCh#]{g⮮n}0n=/:A:*iϡtlBc$9S)UONkeMPy4w]<;Z%[ U:K r6;L]6OMtښt, ~jSLyÏt'Ugٴ/s%bVM>0oUxMy{<]++k97딝{)η)WQN六MJoglcI精geM?fmjŶ0F5<_q{bwYK}ϡM)yc0Ϥ+OR:s={KJ{c9^7[kxUS{ij}^`tH[ezAY}53%:y+`'I ۷ ycaєO滾q` ~*LMo_uͤ'ULǭ}3Xh"1PӦ> N[%RnػR.g\v|Q՗;YS;n/u-HuL,їf|IW~)AH?WTĭ5XV79f1=4{/n71hMuH n=C) KjN {V -ݮ^;nHk4 vo=7F؞汸U UY'*X?B]8[-05zlW|ޗ[Ҙ͌{jzl=Uey;gNԱ[xGl(#g+u؏KC >ζvj+Xfniei0R[a9$1@L{Sñ -ϏRkӔkmJTNX^C:Ut\bm@UigH| Ҽ8^3O]/vyn=møNsl/@y_v%MovF );ʦSV'ȄMNI>Rx>UĞX7ZU>(^0+vez9 ms~~ߋw} c _7鞳.s.7B4}zBSQaWnxNjiOM| ]9^o6oes\f_ so['>V&M`‹8B|3 p ;(| ~CflSmm#.Q{:[,?S9joqy?}xX&~ ;}WK5vb'7&۸o kJ$퍽Km{~cobTGDLa7 de0#̘IqBkZFǚq߿0 0Cxʱn6l;Uڊ0³3H<ĽJHwlҎFBs4Y)X u}~1,>Mc4~y|ـtbQTAоAaAhoeh㮎L[wqu3ypeq,%\bm\[vukmW5)}t)|Eƌ t-B黿OUFTNX(5L"靃'}caCԽi*~}#$(S1RVe )),Pvihx7Αm7[r mȵkuv"c,0֙rM)^[۟b/I˞ yC?5::W%>˷*o!N<_D҈G{9)mU0윤3n;U`fl<1I[ʤ޽Wʫ&ŏ џ>pw'} |y=l/ka'Ym!k85~^|,(@[ M}ĭ>3~{D?pӮi>̦__>I ,$E 0>lᕸ5  鱒>h3ˣE|yr-k,6{ =:SnN!mx'tَIa;$۠Swzk53&hマO6vzَ Ox<9QyP~zlz8Gz_Wlї[rIM :k%6!By.Kjiz&dqmހ~M[LV%4;V&igz\/SII~L<̗.C^-u?Э˞C >X]NK.>wX\a70ʵmO|-0rd3|%iOJn+ȦJ7$ޏkRy޹ I)ð5|Sq]/)DZo,[ne^sl0qksPHM˯\[ȴvScۘgR/_~6FxD#;SyMYvzL#4\墑ؔ{§M3vd6;r+Og僪m>q^M`| .U[j_zۧJy <:mrChRmC3l 3XQ̓XV$H]Ť~ )#w͙sEﭵݿޟ:v?tм&έn;9FQ_TWoz3"rSNb]qfO?Z&ѩ'̝AWiRW}FK9%ZFs#obTG[LKn鈛H-#VWrMsB7|L2tŭ_O3nx>BpSĴo{dӻ~A1f\?qʨ |.?85y}1g*'bHSM>|`xC;>=,Dldy7{JX384cmQӧпY'N^iNBr7{}xw? jiwJ(fYc$o φPY82y2ꇞ{V5)<ؘ̬#kAϿ\:[ے¶o!P(՘+;)P=Yo53MypfJ4kh;^gwI7kzZ*Cy30t7 9xأGP;UoTt~Iz e%FTu;ېևs \?wZc/b5ladžFNja7?~SC/t.{\/gӔ<ޟy{+ w}7rNʟn糱CF&a2SŲc]}%zOk_`UL?Ro# ,>qղy y!sx[59JXtlo QatzmCs@S֡X/߀~_5#{jHw[({2m<ؽCp^|;'Q-{btï @vK4/F {gXSU/2CQn?pwU)=fF,{S79Lݑ?xBdzR{q9Ux:y”o;)%+g]ڱstI_ *n"^x=i Ӈ׏Q( !Y \VfKVo6 6Yc2߇.6(ŠY=rW2jx)˭o"4qV~󝙽 :A?*p?~Qp|η6 oϭrܽJ>{]qs_x94ˤ2i\_hQ+Iˢx\'J >|7e^^§H{^B;B}%p y'̘lNg=a?%DU~<vji3l طl8% S2~9=Owqg}izrw ~V 1 %~/`=%/'5^^L>-mVSAOL΅X2M.Az/d%Oiu8k&Q^UV_>`WL= Rn>l*wT!֑g^\.[wHakOQ`lFʔJftEϝU1Gz+y+|bL)(vK .B\)4w . ,bB[8 k&}gRO_g1 qI3>НO b߲, ZI=P6wڝϦ;M#OajO3v n$OD89:M{2ND:#CpM.._akU(58pы1ljNJp|ݗ[,6/fk|zPuoiWYfnjX.+6VlLsբg!Е#nz=S\[(4Hm>ge d IKxJs hCm9 ؝g#6<'\iu׺w&m cBvoVTmuNpIݜx)W? r{ jS|{36WV{MV?&yvhi/ooe&j`&j`&j`&j`&j`&j`6] `Ax?sZXӥ#H RlgNߙ〻-f%UX5uޓ%ؿIc3JϣfY\0lŌ1$)?BNm $:~G,SWK[wj-`ÿ,_#K\k`{1Z8/FwEt= - NaZ#>i/6_l'coe-b '`1z2Bi{:h{4}F'gƽE~Q,. `_gaEb)p*N])c[8aDE^fOL?tgu֯Z*d3궗fö"&m<>=\#ף}]chx,R:MsVb/-(vVBNW>.W`7f }5N_}[{W \#<#/7ƕӾ[i?Wtɑf؞H=oW=|)0{\=o6*)`[r\3]]NYF9^mk|)PAE`Jgl8C?yxBۋu@iQr~S= 5V2ƽf`<#i/c}otoDL|!~l"4V͸>ٿx%녱n?*v8\:9G<#yHl2Φs=F<՗; C|Bwu6SVv_ =oؖlG7Ż6Wr7󆿉}R4cjfNKc9m l;m`"~}2h}Jh{'arסֻ [om6ZyI%[lR+=oioK ]=*$ɇ=&iŶ-|]MZ#icԷ.㘼{a38%\ Qv^YN}&Ŏ25bM?bU}{Aeȱ?B7e|},/Z`#.A7ş<=?~>֡iC>i#J4ӀX{j`_h]/uS[b~k[τs[y ۷A5.g ǽ ǭFYcx>le^)6IM Gg,ݜ8հg[{%"orpp6^ϥb>QubE3xД81A4akːjUH?Zu+`:~ [ן ]K"ph>'͇})7: =C:By(Ӽmb×l.F,`a7Mh971&~O) =7*'Ӆ/=Ĥ-;K?1X+|X]zU1ȓo1}?;긮{?a>^iuVN<ʽ}ǘoGXh&l&c,yŖygHlW~ƺ_|e튗CV/MmsQ8Za7s'FPUTo 2NWL ,}}WdސcЯ Raʱ]yn}s/o~9Cjmߞ䥯 V~BCvxO"~4q@mp0 WOay_ 2Կ%DZ3TI-,-6?fF46ngBͶK lE~9|L^*>7Wbr5<*%ھ? v>c)a~uSwW|Q{}Yt,tXF,CPLŸu4Z۸x7>4U miQ/~~J񻵩SVJ}6w)rkʋ\r]T8VuOi+&x@EXa :wwƸK뱘NC;>YPXC#: .rG Z˶ 98~oz赏?b3rMᆸuO&Œ[u~կFkW ccLMZ BFЙS5ݪs? cFj ̹#t bCn5eט^Cr@W_um(SLMzJiRDkZOo 2qƲ+q=f|k疴1~3?Y:6b=>:O2t!fļBi-ov84ϏBwۘrk|ۛK4: ~VcݡAt:ob=͹c~Y{T9=;Ќq7VS|ւN'$KCے!թIXӚ:`/\Z_v)>瘾2-/bMT=ؽ/(gJƵnG$5Z#cj'})[|emmwBtV÷c+r#9֟C(p \Ҕ`{dݞB|H[IjvՆWLͅL̹ b)ߎ Dg>!zRguzϷ>Ulв6nl2S΃Խ(]±Yf5^!OΡmrQ_H^_.)cw4i,MAiw՟޶&M#2QnJ?֓b x{ HyX۪AqZ>+*3e\}w.W47]?)y2v:$-`yJ6'/4cP=bMۏjqZ_6% A q̸՟Oں=r=BWuSzl@? 0e#bPOǯ8OjA3WhV_zVcO 2ε:;q}O$碽!o! [Qrղ֖u_qGSb1ɽp-ƼrtXzJՔ*[_'IXn\jM?"O^씯_{u"щr+Q߲~{S?bq {cUGn,r# 7]\ԣN9XgZS<ԛ& Iߙ.R3;9;Hb;mF7<0/ jOwO<,e\i$3&\O" yr87 o8~ӷV3p/FH}BSq uz'!7Aaـۃ5oc[۬;HrIKɛ v*Ң(d(`:@$C(Z $ qɐ[: ")( ᢐa) )}Nɛnұy?}^9s|k$@lu\_!y@>zs8+Zq$붪)k?+j~ϙkk+3}mc;yyRkbgbBT1%XG*Jwtk2[nJ=?y+1Gڱ{{W52NF=fP=4b^yNނnnӠP`K}wx&\(Q 5Dz'mVv' #>*FM~@.o1S* rIny^`7cm*51ޜ\TyE9g?ضjfGb=7iS}DÖ 4ݮ};)DpyJ91׮&̅;ʖ2>moc6? I;&~wF>₪9rx'ݻ`d8XC-%gue+ϯHv w09*^8694+mvWUuik iou7v17k`h 8G^Vő`e,O̸9sCGQ)YRh|y2V/mmiT8P@T8P@T8P@T8=mMMI==lkaZ[~iNj9fG(G-\_56um^"l]$qO6N\捜Z?bΣL~L@;ᷮ֨gh:=%smWu677@Σ1;p͐1S6Pm/is_T bbY oVb_er0_pI'S9Kʩ~AH{˷~Ag*7 ŷRM{񣪪~:lĶ#Y#הK-s"Z^b0NVǖ{?y@LO FZa_wb+B[DE·'29p%!= Kqx z?N=AܷMJjG <ԋB+K{(?>ڌ El yTXv_X=ٓ桉Wu GuĹ=m1g|' Y{`|i jkS>{hIYsCEyb/Ià;ėXxG@+cKnv[8>t(wevMF\㨾~HMxl ߢR^[kB/9 Yg 㗯`pyN1J޹\d?NS6^`Iݾlk]Mkǖ;xcczKݽ׏Ho93ƖW|Z6o08'\^$,/ϙe[}'7qiW;|x-#FUBcZ za| wQp*:BB}GR+Y2IfJþ~Nu2G:^Sj 3 ɕڈ8u ]k RɃEֲ->\v=Tg)ܗ_$rא<͕J":m-ʄy߽f)9=y u=}\_ZRe.ߗ.}*|%R/IF״)h7mݲ} ܙ9i{ͽ77w?Ќwoi 'C/I?lmz_g[D DΐGZn>'C7J]t''04:uŶU?,;gIϕzFO5W .0;_R6a+y͈w6Lydc|e[ ]sMSĴe` FW= - c޼vI }i $6:)]#?aŞ/is$__7ƅ\g]0Sh}30J8|g:K"3oCcu?GbwOѮ7Zr?Zȅ3`+e< #˼7'4` CgI~xn ,*{Y/uO4SU-^tK=߉80i,OH{ԩE|or!|Ē lw`O ?c9Ǧ3}C{o7u ӊoS@bTkG_ įo8~m/El x6BT%?0w/ 8SEkiUe\zWԿ/ XK %0b*Р|*|9ixc7V_wGoIM(|$`ݡ>@2^-u|S҈ǝwR9kG12ّ-6r.!s%¯QƋNj.b7]b~ X56_C4}ٗt|90F]0 [}k4Yt_>.Q*A1T 2~mhkiXkW; r\.xX[,D<Z=΄n~ts&&4b}F :y:O+gv_ˮw$=KM9 _€edX?ǣVkl| K܈5ދ&"cg?~} vd 1P8ՉT1)\)zw7QY.\JgZr5hޤ' ^i%!%6/:<꼰lkVZU>_Nn aȻ8Fl> ]!\z[g1RCa=C̱d~uLaK^|j?2}713zpcvF}Z^[,pxXD WUU= PT-ź>as.ˊ iOu̘MA`K_->߇q^+s[uĦ̅Y5'N?U쇠1a:3~V![ɇyVX+Nw{ڋ'l3Wދ mWt5C~<6k]9QJmO )zz2[a9iόk܌/rm jmsRŸC,}o }2⺁^*pmwsWZSy>WM-἗6UU}zsRcm MWKtu`Wn(]wy{$u)^z]7/to.TV8P@T8P@T8P@T8P@8zW+}W؁st-]9rEQ\Wv\*>hZ/}C3i~H˟;2׺i7rղ<0,%qo!^cf;(sR w.Q-} ݭﭪ⚯3ϹoU-=jaD_%]&V^T0QSNW1N`\APB*݆njY#SR GyvfEn7~p^>XI6RT@l ib?)VBy #2gL мks4;˺vIimrtVq ss.1dRA-=SF|z{ogi=Rvq:Ms&MO6̢?^O}uG܂sO[@Uy z϶Vχv?T N=t6 __ٴ7s; [YnlnS ^GuLX2Vx_zozuK^U9vLҍ%-'G;L{>qMy?ߌL[i F)۟% `]q䂨ŒҘ:ٖ ?G WJ:w+sƃ4Kem)t|X|ҍRt[ެ c#›/Lf;; /ؿB9)v~Ў#ϙw=FޓO݁K`o|>D^ثê$]B%O쟑?SR))O0N؝0;ؾ/y M$e)7О 1_16jvAL,phvNC|{c'>jl1 p6Sꋍޟy ۢƺcvh:W חkHy7'8KE;)eͪ|bhF}P&zD敎~t;A|Y)+: .>ke>bLӰÆ..0+x뼦 `bGt86Rn?rᑖVZ=a ic'zc,12}?hӑ-~=O[ _fL;Lvͦ?k4|¥MlQ2V ". i O+tF2td~"˘nv+>Ҩ/Q볰K?Wl[ _ހ_ u{~RVƜ{?5:vFfĒkGmX-gB7q0LJ1:V~wcn ~܉|ۤgwW=DvkιM2ό^'ؖd: x =<}_6lbi鲻R+I7=Mc7L[RgOYFSSd#| z~ׂ/ng6#l}Et/~ؼL֦u<Dhϳ.l44(p*CNxo>yСUsg/7K }?YB?I=@8X4 TF7dAoTES [?_IWg>O8y~--L\}˅q1;Y u፽Yޚ̾ǙNu]/O kUUY8_[5'>GfP8v=hSnUc7'.ycǩ7ĶoW X'%|LRh39cY惝F/Q_s~E9iw~:rE0~.Y>!/<k'DvZ y[M3mkr #G,7ӮxegKO%chUU\iD+uXeߊ˒?M_ogYr4ǔ﬋xd#XӃlZGD+MWס oڒ9qFc4y:JROƜW߽?µ#lkk~V`:{kwLj#fWo,iDM~=XmGA Q1.["oytǵ}.XkKwbɇd=} 8_?xָ5c[hfw: -݃\-yqCxakkY`HX29{>+O_Ų!Z}do zM#S0kIPUhbOI9YWK$Vϲl!쾯A,ɹ]Rǔ晀asqK~#+A@ZaR^ uN?Xnť; OX_c;e3ײ8◈ Qqmal0 guwcYZ_Uuwwǽ9]:_d5~sJ4(Ljb$Z LO"u:o ^\%lֶ) ϖa[b9 EMf_q`QEVǜ4}FC4oMIFN(+\ϡ (}~ ^)*A]"Ҕ-Hʗ-/8*5W@4w>1=|Cso)to?{z3J.Tek]B@zO|1v1^.^+עs\¼?%ޗEK?"8sYFB#k25SR.G.UU5̱L`;!qiQNR&Y=';+ѿ#-uxV hǑE 6E/u|SgӶ_]uKTK{g~p}1ߣCK ޴cSΕN[|UM{ɈOSеg˵# ֕^qHIemB,>|tkq\닭dٴFѭaC:{m9|gKKܿͯ~%B7k cz0νG3䳼Qh2e|H >)wz/'+$䛔,GY]_sӪiGuT8P@T8P@T8P@T8P@[*zvwKQavzw랶g#$oC]//͕^f66_{/7+9M(lH^EXK_3g5Ϳ4>>MV-mmNu&̊}_z8zFdA:7_0 ҟ#':̽Fx/שr!Ƣ><|ᾊ}ԭ5B}Mݔ{}Y3pԹr/fe ؗQg 1l~dnx}Bv%׷= @J))WZ_.CT炛 vn]N [}l>61!o=mPh?ӺJ_E|9o};.~iZW}vlWd21e W5XB4X \٨VUX0j2UHt_nxmui?Pn K[ q'noi/N& "ݮk|\ IPZs=جZ Uj2{ WEY9y kaTƷ8ldtވ*Ήg>>=Spb`(Ga1yo9tN/^-mx~dx+WCah& K_}rx2t71Z{Sɨ>iG;;]}w}.1=qח+Х~SB:>[έ,sĴ·`؈Wৈðџ"2P8׺\s1Jm!en2tEEr~IGu5q?uE[+}0 Zc\ 5mlUyZa `[~ܿr #]Z[,膁tsǖJ;3r8IU޾=6ziXBM 0bkJ9×@`jS,7OO8@Hƥ62 sO :"8}ev;u-bMAhA̸TFWU_@}:W^m?gfwGum,Y%+oO>XE?蟠3.Č69s8pĥ2kYEo׽*!ΫP뗾ʾm^3=ˈ3=OY/B|k)+#oM.Wӡ dC'6=9X}8PCC4FT=+XcE4X.rf9&F0[")%4hvʊ)_Y&my8*x|3< wRǃ<&hJ=[3o`k_f^8Q[+y+\@dJq7yjIec̏GmxP9m<1nU3 $ mpcS m \;5w؎'0*{<4 lԹ7$͎ЉĄ-6q#܉_/xxdZM/vpE܀[PRa|6I#1Ɩkq??2η" ˓#ˤ716|'%gB3p$}`am@ڥ-f+Y|^U5{@l} 8@:,Um,Fy=}=T؍v5zHa`-Pع؁+^S> 89ok)_L{iϾ70~,ӷ fO`;0 >1M}{GV|/xg%u<|"ӡpi*ҿt?c9fNiyS.#ϗ'gA(ea>=$7:amˢ{A# vD̸/y1;(ă}[Y4Ɓw`u0}16B> ~ʏ1/X'[SK[b@Lj]O=/bBk%w kK95->ЁR7?Xv[zo)Qws;<:xo%XX?Mon5A`CU1z un"=όL2W4j/Rof^XS$5 kJYksэ9c X¦kT<m:U'%͔=B lV>D^Q?r ,kPuY>7O}138osy/ugͧ^8Ud/]2<|{ Sg0M7Կ~&׌H0j]Xj2cŒ 8,Xy/GZe{6See-YݸwkT UC4=5<*m({G{G:0NGRmQB>=H:)[]g:}OH~Wi8t㱆74NIn1!@- |⍈zsEW$}Wk5`HsmW@&\{/ζxCG{{0GIL87ƀU֢srvSktQL@O(,C:ݮKc\v$b \S] N>[)8:ް+eԝI}$o'82rUܗb]i3=-k{;.i3^MZҵRs_0UءL?2u~ 3駜&F2th+#w6ezr3r0t ~:)jS!W@?Dןs@Z= }R܍X t2m& XwsW%>8l-.i:4^fL`y1Y >+b>N|MhfzWj:]x=ko2Pb\&D,=1Ŵ׺>`6m{FV1+vPG{'@`쵫55ꨜ'^1My88_^ӿK%R;[ u9Ы)iν):=-ֹo~s~)Aչ37~d@ɶk?.zn_M>+LMې'?[=_dԮ:T(ULSf\[yUrΝsj#΁H?g佧yt޳*U,zQX7=Y:{ڰ96IcW.s%;eѧ*_:|˫⺢H׺~W[sj??\W8P@T8P@T8P@T8P@T8%r@meel kfI3VPi* quAT ti{ n b@`Z<0aK5֞pmCTTˈ={v%fܴꭝ ^tZ׉li*>lT2gCK)R-{ 9l;M%j.Ojut /ju|_l3烟myHֈφHn酭 {*}z|}图9Ӳr~ƩlG/]XnK1>Q>3=Jl:>i'kF߳?6B?oC4 _bIki9+ӽܲ3ڗƒ'hWMc}'=g𑭛-t@S6icX>1>}2?j2*~s);h7M;[/0xC=6Œ/4S?hu?w&jKȿN݀)|cIiu忁l2ɴ#eZ_ڌJQOun m]d7Ŀ={Qӡ`Q1a/kh}u>wjH;K=kUqsɾ"IԷo</#El=j^ sѷ7C'4t5W4W _-:/_ 9r/D)1[Gn@s_|3:ľn]#o]GX7d>' "k͏vbAX>cϵXJmznȶ 8L:Sxي_mUaMG~l?PyNp'ٛ72QPq vgw541 }\v0|JL^m-)¿Vh%^`}Fotv.m#6cysGUoRaKaK1`r:rKP~WH=_C#>WsCgAI9-}V6,˹@WUm{GA`T;`K7i쫨 6 yc7xmyF0 ygĺT`?߷d,^)®k)Q)*7՛~7~ɢޕ^s]Okk8Z ޗs%lg%!fM4wn,ѱj/\%|yO;_~ pE쑺fgU'?cBddi`@91^mA.7)~o4-+AYdS=b(R~%jgTQO]zub "tQynƻY;o*/͚W} G>}SSR_44sM?fg_l>> 8 `,)5d1ȋ^WRӬb֔fNW+,CK4kdTmftiZC%kiK~_Q6+hPR{twq#wȌA{ٖ_ƘWemc1GE89;"`&,ĺrk'bU%XNTI ̅U [7LY1.0J>k1i4>^{%0c~g3{'Ҕc㢆@K~Hw湒>웨˨φC`_^ .P7#|;ƘDp<^ךqnGkPPNiչEV'ry7눴όfMko?D{IL}5I`P:ur[ʶ]` p)SyƙM|RKVX mc،쎾v#?:_bXowEdw\z }vmOu݄2zRXH͹8ꋵ:Ϲo:c3̡@MTzsXK,q>SmHؚ XH],3GJ_uΖae] TymkGbae@]%\#JmhG,0<+1 dz]\Ut kF'\eN_Sj[Z i^ObR63d}nQ8ϱ0X;z [gBg4{"Jv5;vsWSYTsҧ+eƺxZ$6ڔԙuV^ޖtV6鏃F?ioJOq>{Ճ%ɶ6NzUc:ֱд3ۯǾ_]@T8P@T8P@T8P@T8P@lnPk\Uvu=w=mث b-x^}{?+#sp_LoN|Vͣ__kqŧUO;w?'ɘз"f*ؗGb.Zw7O=7@57CᲰ4zO[4>t k2'~Z({p4{^ E;qs|yN@7Yu|o!XUU =]؈m?{VYsVn>?f {Rp䮗=ZW(MO)s$ ? FW<eA^t\]e WUm];ّLF,+٧Jz ||nYW#ɗ;/+a)XYo &I,mOglC~.;mfX~fN_H-EYdBk/wiKmS]ٯK,@CoCʸhPl _ 9>}gz]5u|%_\Kw+?W}ǁ9S i͉MðM>N_b7bJZ%Wmv+e[[.)E}Q,v}Qys#nS-~ЮU~n,.pZMDzcPna8Vc]b5;iv{vc=zY*f&ݯUZێ5̳v6}20!߷ae<:Q%?wiZq~.$/P%i~a>wz ><(kŶAۦ:k%?X5GKo0[Ѓna}4?QF͌+c}5d`wۮuN5zvI1Hn\#1I:``|m ㍵]z}Wd15/~6|&0uXw6P:֑'EtHY2ap.Nէou_0 `d8Mnn>W3cv qꦱNޢ~籡g`FsMkV)3t?HVBC= w%kk:}9"cMd9rS >ө>Nl~ӗxRCns brμx`| X >/O4|ׯ~c%uzN'tgGT;wlcT$tBG#e 0MLsI٘`m$ j^:H%LTxVA46Ӏm38ݵL)ޟa?j PzlvO[rn[{n4#=+bo`>]|j[w\zMf{g4:yizx$i2q:ɻ"~9Q)p0W]=6hVp} k &5 N @$x1+,w\y%}^k= \7+B{{Ykw&e~axi ~(pᯓ>ZYLY jwU~f+.ݭ0.6ͻ!9(GgtA8N2&QXg9c,2ryōc8Ų2XAϫurebRu]H ͙vx:f|yP ,chpӴmW*%l\2|xܫ1Ӯƥ̃x^̹f^j ?靔,z쓉~#w|keFDE<<> DVJ,Ƙۥ#_*Y1I>C#W?V5˫lGX:f]`-Tݑ/eiٕۗi'MSu=2hvo.nu=5jҔ% >T仙e8渌Cڇ늸Z7?G5=l2eGYȶ݆X?u7ur >"2 U.ey$p`O^VӔבeE_MkztSɴ[kľ4sˎb/Q E$Hsĸum]V-03}Ɯ.FwS߱^{ߏϡd<=J֦ {l''ʚE0`׼eR" ,g>zZ=SM*̊ Ŧ5Vn:ao+Hq\uJCgNw=f#VfdQ'h_N.զxmou;s_)eXMw1:2Q/:{glS7iPG79ֱGUyX^?r}{ ڮ_M93C6'zuIWf~K<0>Ś?#/3`?ľ*a߻`>0Ic_0)#i9-}ǼB̃o`Kx3꼷0HEJM_➈,õnsI3G 8K{[uvOw6bRKa nR6piUy=~9WRp *p *p *p *p *ppmлǧoҽgfkCؠM xw6kNϲü`r|-`ytW޹zg:H$=KwAzrf>%t- S ktv$s-}b\?hƇU#6Kf7LdžTg!oPmWy ^&h97{ށ-o[)1EoSR7YWv?깖ogO,)7 3mCgW:Cv$93_{oOw,v}+b]t2b?X5Z?^ͺl|uJ{v8SF$; ~4 z d`OCyv#㣨#ѿNWĒ.kiz^mb*)7?v~/].߇ώwy; ;Y>H7u}|im3lD&iCi?0|$ ;G䥰}g%6C5<: Ÿ@Я42N-mJ=ʯ; 6i8F ߷ew4{슶m]9$MzGv4ex<O^q;KG۵Trs ۿ)<kĒ?>,yw0{hZmoSty|@>+s{־t/>R7yUxܗTB^/(#qiab 9#Ǔ{ԑL\~/xd\>ML.#k6IA}8-8'#.ځjT YrsG279RlgEVW3_쫆'/l3Ҝ^,^[Q? f|C_2rqI:wGXn0?  #ػ&q/` C#3 ZV.C;̷ r1&e9Zk2gT$mUØ +d~v%oћԱ}97M;t#Q_cy賒&gfkS"syohxso{zvlΈ]]sp1N}K*|&w/;N!=mGnmѴ!?6ڇ^} 0;<ٺ)<~;^7DzJ\+JyKuy tvfs~}i+0Foy ӐT׮Jt+0)^Εz3c G9 }p{ }& t]U5:^p&f^0x6|{spN L I}xAYVUMK= G窅`0}^G|l{A } ~'8cޚ1:?1知n(TtF62x]a1]L4ut-uo9{FZ’o<`9GCkc OqE-最ác,_zSujob2yvY[85zq0:‡5ncC6iG7y!{]F̎9c'NI}N5zKmm|]"/̝CLB#^wn-8XT?2Ow@?j,rRfWjgwc&$M(Ǧg.3Bc9rWAw߂vQ؜w[9b"W9!fK4UvAF̘S-ӸsY_mt=j#_wUSӑCJOyL?\oH/,{="k1sQfHi;SJ" GkX_+gN*0Uޣ来{k.؈yIgHpK=GrlʿZҷ^oIcs7<ڪ XWugan`/tuFw8,?Z Yuc9s1,,G=8/ys caٚq1$?{NF0%ܶ>Ś1z1[<iN\#QSػ]EhHbVoB֗(ձ+=Bx.ruY~2tE> mXVg{PG%ߏ㒇~_LKy7v&`w=0ĺد8Xxc q!֎~g=).k#T_ cNȟb> G&g%%ziǟ؋K=[4ZEk5M=l E[ٺMڳ^4S.V:T\%oj.߉f?ʷ>˶Wtp4Wkæ;cMlhdyj2˛bɉ2F{K~U8@:w{{0oD>ush3޷E=ߜ[͓j qP'B "VlQ)ci~,^g՚Aa&ʞ7ٴ'e|=$4}>"k=&QzǕʛP~ڷП+]26MXƪ벅r{auPF.m]f#<w r[sΚnuf{]cSw?8zޱڣJ[Su K3-upʳ.s^/aжU:'?T E@35w^?yvE\/og"~g3'a;X# w}|&[-ۆVv6x^\Ol9.QOSN9$O6]7JYnM!u ;]H;@U=m*~ޱӵ-]r;X׽T~W8P@T8P@T8P@T8P@T8P@T8)9m^ǚ z~oK43koG e?UU6|wK8Q!Ml?w-Aޞ6t^bp_vMB+wFl=|!ϴ?L d%6BQ)jX'QcL5#N9bOB:+DM;enW`gGiʌّW`PT@`X aN<隢ïNdo6S^@lBl,;^T6O16"+O:=v""\Wkvf{_?0gah߻Z*ؼc/_խ rCּӞMYyeyfĖB@޺g=|>aPeN26/%W_င/|`-;&ebhȹ >3^x>IQ}-~S3 /߯|\b>YXh ^2ȝf0tsߊ}Kڔ<(;u^w 1OX3WC1DB>'$?BQFnpW0(qY蘭8c*p(ۿ bDkCsOۄdL-tW`_Ю3fQAY!ݮzc|?o NjfR`~됲uv@r5<:FSb[C >sn} |$U~Ѓ$(df誎 Уf@pm|']Wm;#4HCGP(4BQVmEY sVUg2NS_RU>Mzۧio\ux3=bEv&cqmuR6^cN0xɅ55miإ?b51)pd -|8߈m%'ucgo-GNbQO$QxeԳNGi6euN\$^lH)jq{0-667Q~k4Cq/ )<%mֶW#qz߁Xp, ="e3.㼩s5W_f8Y8wD$cw D7|~l0Dk}'mY螺&?m;'ގȃQ c*p{Ϡp˶Jrz7m<#lǏvgEb5hk$uS*li#M?:gsT<2eu1bgo8P8\پVmmۦ /^2dMP~Ox6 %oXy ehX,3)r zo}v=Ӯ/ΎȟcS}@>ЎB;8yz_][ه"rz(qaԗE[o=rGm{+3m=bĎGO{FMwṣYXu> G(+׹G|HD@`;$~=ɉq6NZ\9Giˉk9R OSGH^܂߃s\62ۚn %c.g7#`voI&3\6c:,w5}`Q.âOsض"<Eg;ԕ sks} pΐ7Hqmͩ&L1Ϧ[ xh-0Kpo]yóq`{cbwcvL(ıӁ`bqL%{PUwYS,[] m)C!lvϿ>?G=8c6oJ&60>9#Lh߫xZtuF#ʺOi 9k=k77'Ap ,G'rĞ*_Sq uQrp 4D\\;s5|wozά3VAρM0{|?|{{?Wfps$G&;n<泜[kܬ5 W׺6.ܦ_=V*-͢;ms {?V^R\Mۢ)m+<&1F5k6'gEڶÒɷx|k`P>Z_5Otvjct<#'Kgc},|[>5֓}@'\X6Vw#m|N='vq}ioFJ_t;xx />j:F'.nTh"^:xptbݛ1/9CAƤOu'p:ζ}o/pmdwr;tu!:{} lkcUKnV;`{5}|{eiGP_U&s|γ2 >H9O u$ύig1XY_|=Fs|6/1u`f[)ȗ<ĥ`?.|f<ڬ9(VW_*[PtR77ct ]1N=VE@PE@PE@PE@PE@PE`6x w(/<~ʼnekNAtgrtd(Jf_6g9hQ'n}L;V7{]nR;Oofidż}y٩pˤ?9&-ڬyC sц?{hw5[(<#C\117$B]3ͼ5slbx1G.b@eԧjs yޜܟNZd[7zMrkaZ\? _q.xfi-ֺp9(s 595Ʋeb˳y[|.Yf{VПHq>g˼٦c7J a \ev~3X';=ցݵ1,Ƒ󠓘?gPU6~7ɧfLv5<7V/lo}w:\f3C_&+7FW)*Ǘ/o;6ϴŐƑ#tҝ;S| kH!4;%uچ[@ 6n] STmo_{kW#-f:ȐF|gs CRXޚ;`}ԷxQMi׌)1ynbmT8n'^|la pDⶮa=Q?/OצF,om8BI'1}F1ݽVlsLh){^F1'H#?\]I%+86a$:c")}Ipˬ ={޺)&.=M|m'S_Xt/-\M(p?9gNk6/mM_ Uo7Ed"y|E1`K1}&f(dmM"uF%!{^Gl( A]upgq=[gKvN ǻM]1Rsi\zyj|nDl{bb'$_f}Ɛ^mX~&be{f?5)N>A{~M3w\Ӟ+9ljm/ ]^ }t"0rKn|Iւ}gh_&,SI@_h|(>\=Ζ9_5;֓i$Na5OՍmU-g@6LXDipt^^yK]>Hs~~&>g;*yZpN!*^ L^[xR2A]G7y}7LU ~䏠/D'g0fȻAe/H˳ͶDZ\~eio)ݒܣ|OkQeXo^?ilq'm[cuNa.)Ov] jY}ݼsm3 Rn^=g] 8oێJ&Ɔ?e^Ypo;W췎?A}[[tmԙvceX,Z9@1e{q1٩P{/Jey>ݦ{dm!mھ}1>/^~#P:ʭ؃{H(g#rL݅頄 gV..E̞v~s_^y-ze|ϡȅOS'ЯPy=xc;Pv߻džȎc0J:ItkMbBt=%xik-~kf#|.W I?ƘqW|-_4jSg~l}wt˸0O$Rao)'=i5\*Mݚ`m̾g}9kcqM2F*'|y\,/xYt2k9X"wugm>io_dWT͇{C;0m|ݹ+ք687HALbGq9w|lUo~9Ob]`<\]8h"$&kU sb<ߌyg"-O>n^d177\nr/yq\~{NqÕˁ`Ӟ7u31^l{\҂Eqg{`n0M{Y-uāw^&2ckԵْ.\i1tu1\r4q 2g~ {0_>pNp>ϔ[v=%(-vҤ}Ѳ=3 Y{dS1#T\if|5_{GX->Oop]=|,%eAN)8uBWyx{s.wVVLoO{1o\  s=6وy/$Ӷv]>pup/sCP-$Vbx \Jԓ<_xkaأ[p6;D饈U1֑r0[N;wxN+v&燫 0ɖ̧}ۺژ‹''w8x{.|~Itf;|/Gx|?@3[a,SEG+[1IY^˾1<ϗX=QxD]Pu,^kՑy=&x?sC'b._>״&8功}2'y_oCCؖfi=r]}ڲkc/ E"_CD$m'ԍ#nZ+b1ÝSi72r{ k"W;\4_A"괯*|OPO#knyd"w(tF ~ObIl~~te^S7e #=wl;{Νsf|nb!׎g.RbKq%ll;:+ێNxste! 1?;2\xXs3YLmܵ%oP"w]e:hǰͲZ* ޳zc3^-=-WbL>_>r8~8Dz+NK?"7 ے\pu`a`Կ r|j4IpT7|n#x^?OG]Ynu%Xm2\y}uyޓ}<_r,|& WVIHi Ý?O=>6l'<ާN1uUZc٩}!VγgUVv;W-7Aߺ۸l,H,D7>'-||!'l҇l.n*,?Ai+ ᛟ?JʰK'1vވ|UU?e;J([Wc|xhjv[ny_%> mtc5Uzōbgy{W)Ă\5E=yaϗ][ }.'n{'.C:eZ^y?[u~f]}L< ˢiφӨ vAu=O __|‡{y N +]\0pmeU:~D?Ϭ^Q|?p[i(WWWNC_{,x>oor \kc®O8_}k·Gps>m+P<}|gr=P=wU~1<N_#g}/arN1=yhn?ExIb- |؀j|]pc}f!h{Eo.v%^"hn)Jc=ux|Zf^'^q7,P`;?qz _\})?jゥ Wzsy,JK&u;໽yt"+<,״',/bO$XR%<%rXG_#9-cSM dWyo{G+w ~EѴ_x?>L7|S݄GzWru:XuQˮ_;24 6\xk_:uo=/:/TE@PE@PE@PE@PE@PE@PE@X(r<༪6ڗ7ep znzgpEWƪ$nwrnlaj-BX4^KchG#rmgd. eX3u}*#׌y?T~ѴMD~'+Pw5s̼t| OdQQLutOOemDQs2)3IqT:ɼ6]RM fa]Qgn/Ξ&yneFlcGYs~ik 5d29ԏqLù\sxqt ~y)SG.ȣPGw 1̃L&|k gڄx⃜'[V`^YZ־;s$ G03rsr;"^]u`c ~w\mޮ% EFO&{ Wp)-b #n.oR~T[/>ΈQզ~{DlyGN\?%)-m ٗc[7c5zŭm~8އ9WN%D;܅ˤL`Ms аp>˵9Ȍ]8ŔO}MxZ=+TVy80y@1 r 6f. 7ꦉ_#9/36܌8|O1s#f]g^c|b}6 b-E+-H~"\oȽ"gOCqxJ27kFؔ+.u>r_hssm٩?6s0 kz/?!yWh{8rdt9)}G*0FϢdg#ޱq=5,J^kbLibmpA;FJُNi{e9\Y >mjW_i2=уӦ{=kنZ}dde^Xj:>s:<|T-#t66*'R`܈ {0 6\ rs%k[3='Ľ;x?=JOI-c G?4mʜ\d~udt.ֳstg;GB^b-zljO&'%Z X=o9Fim-(鎋'n1#Z8p+ Y2aX쇤0mFg>߶7 -Sx1y'o=+gG}Mv3Myv5W4-izp}AM*W ^Sfֵ6mkWO} |3[Uw0IWz.^+Ĥ}cY^ԭ߬Qw-rz6"PDʯzd]Wl$W|׬xEB{mtϖ <测lٯ˸;}yexP5=бxvS|%bfplkp u4]m(mCћ]S?/pѰo5sA';-z ; 9OT)⫣^Ie5}3 #b9a! Sc\&ocf7|*}ٞCymK&pr~}ީ mrnױ̓ʱyo=hj}+l!&/R}Gz =K;{i&d-Rzc( 'Xgt?';9x'[ɞ+pz~'\XA+-W۳nr ር6mw=e9=[i^g?:Qrd+^fs[ͷ'ďޖo嘿}:~~#u8UJQ|m2yk|z:~kfW{&7V{sw^>+9 ÍÒx=:ϟf[~rZ wb_LwrOxΛ+|H:Wfvgk+k!~+a 㙮XsڭH"m+S'=`'djp HLp~c bشՄ{s H.9|߷cGs&:y+C |AWwXD+6쐼=9g*|D_:?E<8] ]7Y3e]ˎ2mN5b,W/8mӳ7Hֹ΍xC%kGORwٶ=y);A۲c(2oyBCoOJ𙦄H-;د΍ߖ0~\]J ^Hckا;gk|P ~Q?'//c×_D~xpش|, ,% ~95󇂿_Zf~/tyE@PE@PE@PE@PE@PE@PE@PzNxlݹg?8t|0GOOyKliSo.J:Klt,oU\@YحO)~bNrӘ]+ ?B;cvnD>5;ށ/X ܇='aÕUաy1IY޳,'qg#/bF< 9w ~[M&~\93cnO/[%çOw ΃4W#ו$0J󓯳[n 48ku*BuS?/elS;#+;WczODi#k}&qc)eOVյ"8gD b.yu@ /iwB<\Gl廞| \~ʰ{Asv?NttpPm>iso?}m8ΦUB~H^٘NN[ujx>e3>bCw?IsySP%xf|F23?55Kϲ}pL рYOܒ9Dkĉc>}ק;zaq?IlVZgSV/W{\`u9ݝ+=13M@QNզ}eIN S׻W95`$Lb-7l'Чs?9opfa:NɾĎgv c7sEG{X&Bk{v]НW+-=Yăed`ī>:~oEWiS,~[.n|B=|`{l<"<4RmsÀ3 Wt@͵|yó2~1P(_)-=ُ |;ꤶp僝UӖY ֧&>/?Viy&$JHnz>p'#ι}dc ۰۵l\o* >Mnމ>wWo]l}}I6ݝ%{uKdG;cq'7,t0^U͍3Pp7Z.< }[l-6뼡+>n8 z@N_/szl;dMO̷5~ߑxas~f)h~ +Wmnѝu}sOcC1M̓ZZ˜'G"8s>; }i`lv^6>wXsPϣ_.#aM`רoY_o ƵapF&SѡCzQg7˜Jč~/[vJ>8Zr4a=Wgcu_Em-ߊ쭌ԗ:id_'+-h*cc+K;} >gF_3PQ˱֤?o{/Bi ;x #g)k)m~ΗGJawʣCc8?sfot p@Ke9Z)\ytb\W5G8fxp ~m}dtcc|czd0fo(ttG=}թc,8ߗ1;c(qEMŌjSSb&4y4o puc<5m2+j;( ^XݱvjĭlSYv.A'uRg=qǻwG't4-w(k5~tbw'r76uk? ,<|=et)-•m7u(d*x-X/aĿ^.協mt_{6,Z ?f"kO۴aZh=Q^;yrwZQ;|vq񧉼;$Vk;m(.%'sw/q)=%Xqqj'AMrWFSbߖ3|1/sfnFO+mwbmKc*#|cPOxɾ'V `Kmo{G'?Ʌg|wc|.mkmzG|kv(b); 1xG~ DͩFB 7q!C3(uLZ'ƢJ~3މ\qo?yzQ_P E(%a#_sLZȺg""M̩ SsKd?>0i;F++f.BGd>^ UV_^>2< |l1-wEp#tsĀsq4\r̴}b핯숔rh\߮?X|Du\6|,}Çm&uv|79 5_alp,lBQgי ߃6 '<9

40dzqOkܭRV{DsXioMثS$s}.c7] oZںp# וvn}wˡG*d{_7k_V6r棛opm!p UV>8s _N4h?AG (dwˍ茔Ru=<{%/t&)ŚwܥzݵoٸxF?cj? #u9!{EۭWPw̧8E-m=`=? Ha{[QF/9jk<=tuT6#uy=dj8Xʅk{v> N3lܪڽ4/rs,HldKre:`_TOLb}jܓdVCIxWNQPͰc-bvYp{s-ˇZ>v}lR_icOqjs#ΓwRjk c٧6GmZ{|79W+ݮ<wudyeym;xo`e39W&hRI&hRI&hRI&hRI&hRI=xjq۱W/lRd9qK\s}z?_Tnkm8O=<4򸡥==OΞlq- Wױmߒ[m{UplOh6zKi]}q8'.^gjvXh$>&~?iٳM?<ݟ?+Nۂ''y̞x~Uxe1%~1:G9pK|Toiuxs$pqf]>bv*`vt.xdz{~~.W6׈Z5Y8#/`$VIq%|'6\&&lCuF]ktΣ6^݆68xd~o8+>FaϥvkXSOӌic fCi|}Y/Y<~~Ty 8#x O_ڹ['2ri{Hi1.^l4[IY)v]c 8C|^XKguQE߳1Qg^į9@/j@Ul[v>!Wޑw#4Hk}cji ].*6*˫rW[2-ߙUe|)y u/)Q-g,kۙߎnzWtk3>3nPu{yNb=/L10/&k3so%sȣ̞gVVyΟgxsB=#{WQL]dryp3^=uifϜ!aA^+6K>>$99y~}3Kx&-otv`,D\j6M' %&k};tl]vML:KsoL,)Z޿}k4٦ڷd=/ 0&V̌vjOϿ{xNVHwZgyfN 1>WyM~8`#hHTkZ{0}U}_8o(.cv3LV/r_0@  y uRIGRwBc ;gFKmCT2fnaw}r?3{H˳г'X@'\}ԃ*193h6^kޕ̑VeL~sa#RfH-#KdM᫙S>whhѳmwd_Ԋ~ m)>OX2fQ+zcAj< -Oo"Yj>Yjh? uMjuj vUl_xFa['N.|Ht /DOT޷,mW ~?[ǤԤ|ӱ3WUQwS (xyOecNsxߔ wӱvW=d (Y{tIsgν9S?~ojی֨g0->l~xKC)5l?> i!sWDlFfRZ/ƛׯFi RQ߂z/|!胵ӝSωoi^fC01.+Mxasυ(2{Fίo\GxϽ{e? >-}}[1^)^3?[oiٲ;Vov/H-;{dz9HuWƖ71_Z;h=[7G规N]`ꏵ(vMޏ`R{>ΎOǼɧfo9pCXCD*2fӺo|^cluS_jl$6xR:@\OM _\w2& z<`a$)z/,n}Mj_xiA1r>qr]0X-uY;}oςfͿX~-7Gw`fS=R{Q\]isTNsVtOdVJMB:hxdsr $_=mVZ?߷!ŻԼ }1O}_IZQ)}y]0cN\m{CC'3}e{~la8dT--_닥jnh{%ehʯ?>Y8o5~"'@lE#6oz59'{oŊ=+qĻ\q^{8'9->TfNyle9\cq2wt?%F:q0kXs0[*G}zrRo o|f >Z.|I?I5&iu8zpOVK~Ged]xXۚM曈T.C_,_cJֵn-6ʚe ?Kl63XJcaʕjE 6f9XTkrR96?uq~89 vPbLHl_ߴ9U. M\<23χg7݀bUG|I}q myzCb%"ǺX6G6X=ӱr6l^f~(K> cj:g#O\<>tQ&;"G㠓CPsKJx'O0^ȟt@d]~]ؓe,<>[HΛylS~aL(3HÃLJht,K)ͩWhϟɜk>{2SdNmJGtG78WMG.|4ù[)程g3sv(ܗg W)Qz'yv洖?*O!3xh#C6M9=Dtq$>0l̋׻3<5 f_b{n&˵2"`k;,寄m雏lt?|ih4]dLОє3s_:3b[\k~O`8 Oy3oޚ+hZދe<.ȯIk=ɛ8T>ԫ`TjMy'~07>y;/qb. {V.~ڵöIdc<4$092>K^ʱSہ_쬫 f FnsAfn46IMlc jX`㶫^Z_Ociw.ao6SFwhC-F[sZڷa] E%|H%g#eKg8@9i6|- =-+^ȋ7斵tHcGYctQ< \r}z^G[rT?h[#'JQknM}/ #qk;TJsb3AI [[Zflة ԋڢK2*-5 |k5!AYJQ͉1=^Hmlo14ĺz?htWcr=a|6!xdJwe4ozmmI/Iw]lC؉EΚiJJp6E< |nGٶ~aG^egרrKr9xDASeb񲃥6gOQZ=_b1.Ȟ X;t?\$;G."9(qUk%<γ5Xxn'S:DSD1-9O;:ooe{??9H9,k]_%ok&O'1{Z3*siMڙ?z~=CXcx+zc6ßbz쫏Q:gf6̗$Jn_ձ4~WP{"Y~[2{bp_[I<|]ɳr2^\[{쉫_,kަ| _A1Hq@ؿr♉ڱbdOcc|;[k⫅Z;XTV| Pw9_&G̏ف_)S-K?YUR_ K!oiS+OZBovbԜ;;_O.t|wR`f~Hj0bq+̓N2&_ta :,9)x_~!f]WDŽ~Y6*uUֳEmlkџ'ǖyFAR?l"osٽ]ƈ~rRڦN3yԾە/uksbU!s}7 >rS,sӯw{7=G}8Fc?㞦ХMLMH8րlpSn}5}V>CMy*SsZ,ƕ3 d[,-vcz8cL67J}a#RX;fi]eL_bIb'ctl o`O(ŏX1Պᵢk9>`{;vG]#gF ׋-,|XmOR8T>!56@_Fn^۸{MwNujETt0clo3E.f qoy鴕l5? ΩtJJK>{'Zcw[LGZؙxx^g=iK\[: @!oyǟ99 ={Nkf6c+>3*ƺK_ ?X鈜 YvsDYߍފbJLN֏Qcƕ =]VykW N),z~Qk*YjޞqV?5WFh~hMzə c4nzmͳa㱉-S^[}qKy{⮉]=]:G4ͦP VR9F6[3v{S7 9_^h=me:SMo<@ӦZ{}'Vt\)T8cAn#ţVkqܖ89`+q[ɡE郥 =Xx3-)% :~\ܬ>({)?]En^j|5w○'Я1w\Kl }ws9ᯅ<ņя5 ǂ^nc<{)T -ߌ'_і<فs}1Yg53n[Q03S3xqc2oهLMGm2(& ?Y[ ?|Mdմ[<& i}" INC'q\ly{F[nq޿13H'KpBt&OJ?V5~?~;OΕٮd$|K꟣cv瑇&&:[Y|Esq,gc sCEra+&?ln fxdfU;w=k(I2DpGr&-닞^މA}7HgkpΩcYX%EVC3 =Y^"zÆ5fsgKA7Y`hvlxGO*sY cS<˞-91ԧm_t˜DUb =/ 6$}Gut1U(ϴ>JK:3˵X6)SxsDlVj6JWeu"G'__1͝)jmYoWt|kˁqe*o^1|߿'(_\Cs9'41ؙswY-&xXכݩ%&pu}%>uNەT0_ge2Bƺ.jC{ ^֧NƨR5 6>2n3Dz+iv˦.I/I˦'Z4᜵ɮdO:wFKggnfs 7^F?e 42B~~ŲYkUWARomL=y}Zn:o|3˽ƃ/굝9u{Syx6vm-x{#_kϽ{oZWoLǺn})ݾ+}3DVÉ̔ʝS6}89s>~# R<./:Գb~1 6}/~GW־z]ߦw|_cƇ~8+Ⱥ= ܡ"R\kQWEsGV6zE]ϛyf׬|ߓZާk͸ al=^<=BkuX*H zsޥ'['އa&{9Xƕ0 L>|vמh䄍ޟ]F>lbcWZ[f6cp--{wA+"סCNpgV=Alme㎡ W??ՃO_o-~7d1{G6Nf\2'|1s(8emy\"We>%!cp9o ߥ8:Ի_i|oOksY:o/w\#k.1Yo/ٸ#4|gQ$ВM2z-t6ҟ 6jᄅ_W:Z۱Jw}Kᝈ}m^3M:iy/ YΛ#8#8#8#8#8#8#t?[[ikFAQt=8?h|+͵4j 5>ҘVohXL(kGfݎC g1~IVXZ z`[ՉfM~sbYX b,b.T&gո1biKQRilm/t&.k;uE-1č*@3V&+f!n5![b;dmSB,89kZF\]{.r-Y#s|;t-Wb !ÓKx3>7}֊mo+qP(-{Vj N^~C/KC ɰߪ]KHUuCCOn_M$$_ZiP}$/ ~}kuMuQ!Z[*8ʚx֧~uZNX,*:69گ_\ ta&q1'k2 F?\jv$KOt%ګebٮWV,["Ʌپp㪼zP$07bP$\5=5bt#Lwɇ~yX3ç NV]1ͳ ႙r+C>>O윝-7sP [~Ih1RPG ~7;. 9CG9N(R\#yt8PWsP`X}_魌c0-y 7+ע|F jּ21"lC.aDiyzz>_t^7X.ySB.ޢxqA?R<$2`ksㄣ$&%|{[s͸x?2errݓJ}o)%ZHDTj ߼6=<58azMq0:Kc>>3x :nkZcK2^M򬗻{xzb!| l|߮۵*c/o%^Q~M\Ӿk_ymOgCaC11kk[* YIml\ű67kA7s2rM+f.k[?t]>T?Z/~,}!a7u.>4å.Rk\hhWNˏNΜ۷kUxCT"#dbG7qMɗy7eN1&kևc+G+|o1NZ*9ňӐ<,PUj;xmpCDvٶrk].vה9r3EA 596cl?mAtqTlְ䝘ŴRܨȷ@#;RIiN,xϵ6Dϸ5WVK/nߴrT{pxw{ħWrs}m >A(wk%1޳37NT7 h:?1_B?n6w [Dέ2}&v 㤶I4+gߞ,t̶('&A~Ӹۼlɽ+y;gxqM{^ O.{{RFcw˻K'Z{%x[)~xґ;Aeaȁ<Y)֜3pN*jg[N_dH|#2".l5YsXCU'b}=ҍGYwKsQLLedFRyD!W>4m9e2;Y^k(ZKe.bulElO4mTcҡ6[*JwgQ=!öqVb-(k_'5wd\-gÌke|f8rOm|-=N}Ϭo<gl vzg[=j@nQ߭_~rs_o 6144KͅR|v~Kχ'VsiBc2] 零6!B,9YKiAOV|EѱyjMw\Xk<[_,CfBQdvr =| *n ;9of[ڷOؖs 5yx&dpΞR[:UOb_q`n|d¸Z; >=4ۚSS`̝w[?5c]<#ԧb~][~<>j,Ϳ}IRm*p@I*s F4j{Jr'u=;NϮ66˹:_e?Nz:IO7;\87?R<: 1Dy8RJ)~.6:]w+$V9r+s΅Ӟu+b:`, ,1×]IJ{;ߩwrpi<:θ?&MukWɮ]^?n74oƔaHpپpN|wKԮ]#ҿ ?ܓyë>eZ}Ff=?CCۥ9 1|ÎJTfziosIdohtj{˹(|h'Fu jgwMm@}#>?W+㕸<lxOn?r|ï AImiD VcNiMGY; }H~16ygٻS݋|<NIy|ch/mڽ\_ЛV "f'_6~['εG,&-{>\xMA(*=*Oj7 xq 5ہ_h |3r?|^M_xU^qtf#syHUOc ?:~gˮ FT?ߎ˹K ;fƮ+9pc\k5Z/\3?}_0f-&_/{͞/:" ._ڛ}yڷ:-~~QݚVyo?!.1[|hRwgף@)_[=;eb9'{uW!p[v̯.EQ$|wٵ75w]d-Ԧ>buy3 WF[kn6KX~rnt9],*qS;߆RVAl!ԀXMn%2O4,7LsUR#4t-S'^^G8 P>$j(|2@ܛ⢈`$xX/H΅7xIlwZ|^pƴ>-IsBM^C>ٜΣ$U /ƔO :\,i11!>[B;XV_޹>ޡWN/+1?S_/IGwN õK5 3Ǻ8i㕸xckFB h}cR/nfrfvH!rl $Ryv/*ɸKT="-6VzrJ]^)vf̓+v~x>+PqhЇ|(؂]Z[l @3܋-;6Ǒzj24G>}zzLujx^X <;d'6yIhCjA0:6(52Wfyӆ{K5}va5?׌* }S2F[*qZSǢf:n|aq^rU*wjT=fp|>̑a |PKs(x~d(oZ{| Gxh˶Z>̟!9}BwsL ׃sI}bw^-s(|n CsjLձ*a' >]u!shNIgW6~ڍ:: 2ކnWYx E3ڮgK$~oAqʻ+f]yCV>6`S菹g쵅bp{jά%QKCa!q|L}Z`禸Y?81|kOqVw6)0U4u??lރvrj|ifo lTWoo~^ewKv/j]L- "ϭKxޟ}ڀk+:'bbGop8?4{iNak'=m,E,3q ?y3#mC;+Xh'ץV#uŭO͞1HL$N6`~my)hC3"C1K㐰~)4+`\O,,$FN9Ǻ|"#W4hhhߐMN1o~R;B8u;N9o*ۏ`ZRzjf<)x$߅Z.y7Yſr;5GL^!|ooϩN8 -8)i~L[_|V @#~h7_P>\?`_uD(UK=pYu|o&|G>X׃Szm63;1!W57tgOdžgֶ뉡t@+ys߫QdNw|<7IsOGy^jp9םZ<Ɛbc_LhT?vM6E)EiyKRrPяMX=p\wԆ5Jo/T{r(j80:oQmMjfxW57^G?Iy+/R3o/tϩN_ؔf+Va: ~lw̧zV|='}'[cONk:ǀbo\|a9V^I@WYlܤmT#Ɠ+'NJV#n,ņk>-[M{:t耍)w<+UZ{7w /xOAفz?uaS_5;[\mwڄ>HXs䥟n~3FFqS_#'\8E F_ć6uޡ^&^Ҏ- iKttqxX=p߶Ro+sR{'{sj{''c4,<4~ W=K߻>se\&ΜR}TJWƞ-oW=z;DƜy!<Bxdta=zJ{:.nx3j3u)5SEWLx3n#V˰~iRw*s}R{ؿE}lr3sIˢ_~_jmrEԩ"[Bl65*sdR//;θk=]\昍5O~- SpGpGpGpGpGpGpGpGhG~m U͓\Jߖ\=x=ԫYz];~bV MgI_$`yܣ%w}|C,Ų9Cm'[ss4]j4TT/R롽j||*ݶ;h.BԌi>SQwI:k.[tUrAQvXf]^jT6͜/+1!RcukwAJ{Q3{K4mN_xfX,4YC7TN5MX|UkJN0 rFG1͹kާnk_j*[*݅]7E : C3^ _z| r4Ej:P?\ub}Pkza}[×u7}J^yלpR|tkODyفچo)I KqK6˱RYi̾yۦOyy`;:)ʹۤ"\IvWTcf\iZvnW0nBotU}y uuTJٹ#RO,9nr#1;[ r >LM,h;58PJ[}|''OH!Weǃlix7N!:oVNoNyn\\cG̿GV↓IF勡{Bs?M U Ιo3|ҏ?f=-64tKL9g|UxM8/hA?/qxU)F.Z/&?]K-COthq[ y4hCc{ؗ>rl½'e٪e~]M >§]~t~3) l* 5=TQ=z0v[Dd6U웥k5/u|:qY 5Ywq?0vʵT!S}Y>$}tlrKPvM|Zޱ>mU,Oc*bN<#M ?]#kDfZ>mɊϸyPܷ)3li}Z/t|%Z=jZ@5eб)[5j[׭%<{S-<-~ ?j2On8L͆o_8wk4:W?l-ɬ<𜔟S4NزOg=w^e.[k>̮@74_=M:Ժ3=60̍zv߾+󀗅1,i ԭ+1e>6 aaR]x7:=@FQkԾh]1-C;0R<3~s{/t;Cj gMTC7'>0u\껐9ܘ8W}_ŕbr@sLj cvzZtWPC_x {=t_iX/1Ԑ=264.m򾏔m#8#8#8#8#8#8#8#8#8#? WOPGwIx֜Ys{KVY;y},fu?֡.4rtI03;{O~q}rG!V=IW搏Bҕڧ>&ur,w\MsJ #b.t*59ɡLvUq#JmtX)398aqfĸf9zG.9MZGYZk߱(5u {0a)>ʱԢ8ڶT,h= e#Sȹ/2sЉMCڨbR,Ӗv=BS_!'#R끼؅{^hba,v խ.sVݿ{hEcG%#Fkw\cPr<_MJ|9]'%oJ'EF_He߾VTpغX>9eϢO.@4LkV(6\;!jO u6/-T.?쇆>BwpܣB=>灎+QT,Z09 Y߄Be7'J/R3(&tPν&S[;No Y]<##[O- WK]O-~X}=u}{iZ] |o1*r/,A[î[874`#¿7}>oBvPY.~IJ-@p$:);ԝ.]B Hvh1U|q3CL=Wi=#Wq4vGg믃4[k ^;G?okXy+sWj==5ε*2hs9@'j۴5<1sIR,or>I rh4ӟi8fC㟂dž~,=,O:> Zm Zc O[j'tE/0֢im`O&}u|׼|/&;Ox ]AĜ^hlC )6̿o0ݧ^eSl[XTl2a1x c8[ׯ~~Z&rV]>ɯzz8o4|J*g|n|.~=>[eU?pQA{̙~=uWK+s Gl|EvwwϜgNl׿r_c#8#8#8#8#8#8#8#8#8#8#*Zn^,/kC~=o3Y2޿2 (?*d'I?ǺB΁J~Չ5*OG kҚ˴vI{J5˥o]\/|q&97n13O=鉅;ڐC^xyܣׅK7MWF΍s[=(OYWs oɽ-$Ǘ'<]tXQplO BOFj͚K?ץ\41T}֘P[~66$9EIuZώ YެӅn%BT9F|C/k^KH;ri#S}"NүJT魏FxQͷҼYby|T|-hw~9J({o[nyc79O=iT֞0S#Ƙ|}FQTk)VjOInlqN;STnQD@7uJ]7UI]"}͝>m<Zn=ݶΩ X6k~i6B5?@7~8}`m8P蜽m卜wOGo;5~Il:iyԘD[Qypۨ#nL!qeLjC}*>[ _qnW׶r'so6!_<4tEluHtyң/\Nw#(HjQo/n^j3[۵>f~^ᗄI|߅i3p̟ot:}rW_9Yuubak2MМ+JǦY?OG#]&O;^tDvR`mHR&q ~=2k5<~-NvOgt_7:]]3K 3% qێm>RjڕML:fڟN؞s.ڍŖʟ:Neۿ2m/\ltOꏺkR.hKwGpGpGpGpGpGpGpGpGpGpGpGw$gK/E 7o<69=? |v<͵X_4/|j'%V&gǼ|3g{m[cQ!yrͺkzu쵦uX/~d{|8rc }딤UךWWyGȳ$x7ԩ>&>&{!UE&KI-uߕ$+nk;SR಩P>zag/s/I0t%g4Z7<9ߒ.y۷-2OaW GdNn7^ }ƟYo\}y6 25>O ,߷Y=FYsM79߷ֿXf@:7s>uR]9産*rjiP>;P }MeQj6ͣVWv~c@}6vϧsvg %_4p2`SM^Pj릟E2rauzC/C6rwMpc_r[Za\jt E &(v#F*>w}rT{grGѸU2|:mmk}ۿU<%tLNuGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpGpG` v̤nipype-1.7.0/nipype/testing/data/tpms_msk.nii.gz000066400000000000000000000500171413403311400216670ustar00rootroot00000000000000+Stpms_msk.nii훽Jv]̑s'`GN8C8# GsnX$7(a5לu=oM揿yǟ_?_ۿBіe?n۵=[ko @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @G?ڿ>_ 7՜O=זђݶXsN9I7vڲlm+}iM[wmﻼo}2ƼwYo̼ejߚ\ @ @ @ @ @ x7[o{Nۯn>ߣ^IߌC橯}ZT?yݝ}j3OFwgF[}eLog7oYϳ2}T߼eW9Q}1:t^T^G:{f]׻C3z̜]}SƼeΧ9O|6~[Kޭߒ=O{(o򌲶V-_{ooK@ @ @ @ @ @'>}y-՘Q2䩯[?Wy|:?{|=ߝ{g?}jg?ZgϖF5ޣ:U6{ֻ*g3+g?yzg9rsw?ƺ2k5#gۺG5~{Zs~mws>kv{=wgiwu;O9˝sN'-KȚiylݹD3tswGyu{)Ob~ @ @ @ @ &uw8ߝk&}}αgt~wӱƯwgd|{{F~c]"t1sշ>:k7fe>Q&R'35J+;󊹿-sE6#Gy8i]LCZB'ҺYIHV%!V'J[*yF> ykz+|ϛ}_Xs|Y}{c۟G['Vڪ9zޑ稳s(]t @ @ @ @ ?yڷiO5ծnͿ[%wIfn3rϭϝ<3hvEܙgٙgf7d%0{͏MyT92~7;_??>ݽb?w]͌Uj*sפMnsRι}RtMog+2魚|֪7gr2Y~ڜvn=nw쌳CROu:]z{s#9IjsGogRmOݛѽtިWr&շI<59}fan 6wꎴά3}#?s,>G>'[˪x @ @ @ @`M#y=_i~}jQZQrGTݣW/7;s24K EhƦ]vXs=m U'ڻsYGsY[6kTjߣg}~ΟQךE29{zܟ5#w5O>zO;N:Ѯ3,uZ/[kghZ'}gK}uO}tj+7o/o3QH?d:zf=@ @ @ @ C>~nW[F72DkvvǬٳHu3K}eUΡέSU*vc hWǦFZwg;ܢ4w__5N;(oK6|A=Go9JWx @ @ 3_q״b]}}Jcz̙gfTq>#wE߼TWe#[꣺.3wu<2rMyT"wtΟQ4#tiN꡺.6Q:teN꣢.>ݳbEnkLi3y!c_3+2Ge/}*u/ꣲ~R:˞ꥪ޹&$RӞ3_jnZoݝ<:kxjZ>E-^Zϝ}Kw/y[s8;y~{o}oʼ  @ @ C,&m=[y~{oj2׫dx=3+r=ͨ"{uyٙϸgw$s Y5q)JH$g%=*64n6w}\ʣLoNcwogǦ<62o#j3ھVr=ͩo}[>7nߙ7 @ @ O"pYyr?+^#+l^"=iyO3-oc83w0=#癕ϳg2w7"癹gzi wW!:Ӭ"s }خvKӽZ7?dx-޵b7O{M{{}ҽcLo՛{Ǫ| 7UIU3Woګ;Vu޶^ܖصj;_Gݝy::ftKx^)۝kݬ +dy;͜}n4;yey}|2buny㔣9Qq;3oc׫is;7*Jru?3oܣ}<5i ڞ7 @ @>@yW{?ei5T]^)cqf<93ۓO21sƓU~*15vY-^*{2rW̊lP7ƑO7̋?63w-Ow~2N޹jY,޹{w=|5۳w<;wV1n7˳w.;{V1n7˳w.;F5dgL]kw~u5ڷg-㡲wR:˗pmR7v?nO`_O$SʪD-^,2:ӛ^^׈{lFkߔo禮{˺=?*7f|Go}T̝6gv}-''X@ @ L`-o(}yV~=GWMgo>g#՛[/nF,RR_uUshskTckSͬ322[TE㡺3o#o?ߓ{g\2 xY엧&sg;f'ZWؒwU̻wox챥z̠VQUoK#!#wɮugg۫5k)t94:jZ[ ]WE {{Sv=3;T__-YOWw->ν])#םe|x~|M^y׼]N̡ܲ @ l?CV"?̒ޙuTjV}?yޭٛIҿRnߨrGḙ];#wt^ufY--2{13^^^Oѹ+ݸkČ{il {qgsi, _,^{17^1U={z=FǤzV{vsY%ڳWm̭ctLgh^gq^qޕ"}[]VX}F&EjWi}F')FVK=[rԭ}>yܝ+ @ |+3s~{VֻZ7+d#՚[1nVZ+3G{Ux9B/;w ^#5sGz˨'d~co-*4߻[ 2 d}*]yҽ[֜=|JWt5gO"_eyҕ'+eٓWY^tI*oYs$U*]yҽ[֜=nBWt1cO`[exִ%ݻE}h_z{whݵo+Kϗڝ3Rw-K˗ڝ3J8$_NL{h^{q7^qJc1^oY1*Yc1^oY1U|{txe$Wmսw{c]-ڿEelcdOlZde|kD}| bDYMĻqi.Z-Zp@ @` L]smn;|=3Y{gO+f*5idJd)U>*5idJd)U>*5iJ >*It'^f^|T\SN*F%VGҹ&TcKxRquo;cKwSvS;Ö)ٵ6˳Wח-ٵ6˳Wח-ٵ6˳Wח-ٵ6˳Wח/ݵ"óGӗF+wmu[=FTGhŤЫXFvDdj:QzŸW:;̝p|y @ @Lѧz{jUw޽+f[7fv,5>#kge`ѪmS;ûU3o̙:[EhGOQ+F;ſRz'r>OGjo̼1<&zeWR=F;=TR=F;=TR=F;&"}TjR+}F:"TiS;|F9&"TiS;|F9TS;+F8:d;F菝O#Kݪ43/6\-ʿVG0RS¦ksT_z_8n]6at~@ h~ۺek+{W$_%oTQ94:ߘy3|3X*s[eTomݗ;۷G3o7];3o"/~oj1D0񍙭z7kQgYF`+Ky=KF5j,*j,cnרת3tid'3F̔PqFvS7b,}Uj4jd"{OE{}Eo }"YSv,*۷G_F+soѶwye*|[um)]V}zhV={[gT͵+ʿVܯUdyu]{.W)6>\>\'L @V'>{w{o宮ݯ=_17Ru+dȡ՘[7~fV^#*sGjU7J/+w o̼q~2MfdhozQ3=fhG͜-K3o,=Oֵַfoɝ)[2![_X)[F/KyL#e{֗9~swsjvO&Ij%#jVE1;kTQ,:3r[|FTѫћ;sfVoYߘycd}Ho#EV9Û9K7fzo=rIj$Uj=yҽc=|ZOt=oO"_E{֓'=VVwL"Ez֒{Ygw̵-ǟDߢ)wSi¦Ces%QܯN.Oγכy爙ghYN`)Oy?KF5j,*j,cnרǪٳtiY~#S+#f8&f+Oy=CB7b.:C5o<]kujk I߈yjϞytqio<}qCeؽһu=ݸꣲo~Z:˞niQ7v;nKwmSwwS:×'qM~Lggq9qdJR_Yu2UYytEUGK`W=;f\LMߟ1T鳮W @?W֧3hr>]{tޟ56{FZ-rGg-U3#wVnenڪ9ٹ~3sWeʬQ]:v^tfYge΍ʬ;>" y+c捻YY ;ͬUj)8&4Vէov 9r樌#s|f=#ۓOf_O}U<9ߘyHϨ=#ffFnd,3I,g|c捵ݸcwq۩Ce-}Ww{>[JY}7/U=dUshdǤzVΠvwU])+Vil_t}lZtaNgT]N jT''޻| |~oqs6 @ |۾ڻu#1/.Zљ3#un"eiْweԽwoeKޕ5Bݹ"_=3óGS_O"Wђ;xISm9zŦЫyks:-9}nIiM)\wuqN)ٚv!@ @JV}ѺmgݓIڻRv稺٣rhuf'yr{ޑ)[wO_e{';]clu>]clu>]clu>]kl= 'ڙ 3h_O3g^;fgο&hά%v̠}uo?ΞQoO7A;s~_]oOvYA;޹F룪ޗ깻*vγkOv?L!;T_6J).&N%3D[6Z-&.M)#ӓe|דȻx>l#-WY8eoƾ?UR]q?{3 @ '~ҿϮO|w-];k{V~/|v\orʾΝG_[ꧢ3o\og⥪3ol3o㣲7*sgo̼1xl#Pl)]^3 3)Y{jg\}ҽ;{V@;WtΞտ'Яfy'=vx?&fxԧƮUs^TΉI"]4";D?.LI)F4*;ӓ~lړ;Ül#ݜvՑ3ȌVkzջUu3M?gZ @ @ Gg`}ڻf]y}+ez[!wDƬ=չ#[*3[=fU7YOw-S'2wHm2 D~ -{ҽgݹ}B˞t9cwn_Eв';+|FV~*lI*e @7ԥVWxqM;K7?N;K7?N;Kw_3R>&S.suh$h_zTϕ#So#eiS=wd}NO$)soaK#є;Wzeړ:3[4u=zHZ-۸׈8vR NgO]k'[]›}Xums}Zs}iwy'3 @ @$?j5~.vB3źWq֯Kw YڮEewx̨wgdh:,2{l)]ޭrJ>[l^ ~}lQ)tQutF?Fґ'5>&K<}eר9I7Q^t+FιO#R%Kv_U3r}M -Y 3n#ThR=WUFvBK깪g4HOZD>#ߓ+"}ejWf~O"%O$-K"#O#-K𖥡O#ՕWyEۓ;={}^Q(VckD߽]!¿E8گɠ;UxƺUyuAs0OYsTg[:c @ @ O$~5[ѻogoF糕FWMov7YrgVʮʝC7>ُTf̡YaFmF9431'YKfN3ۗr\vwn_Ct[vwn_Ct[vwn_Ct[vwn_Ct[vw[EtΛ];fg'3gYIwucjv|ңt̺cnflʣt۸ݬ?g߲O΅w@ @ @{is5o-}jW}^#+J'wsgdh-YS;3F2WvmE ZZ?UY[|cSca_yӫ`2Ff|/L{ڽ롪w_UyOyݳ"@ֶǮO{!1!wG2+|UY!ih gq J+d=]ƞfcݫϿwϝO8+6w3#̼mvu6w*ٹVgd_)ߝw+{KM]I5}yIY @ @ @@O|kfurl+?{rXzggx處;»GcFn߈#4*sGҨ5J';sh^#2rG'DkFݴeE;[nDM#[fTO?QOhݨN,=2d}h]_cw,k.g?+KkwtW򖥳;[eyҍK+EyٝƮzucSռ޲.wY=)oGw9 V͜cU辱hZdZrosHrSK}FԽ#ѨK$$쩑Su;'nw˹njKg{>d훶Vo\ @ @ PI,i{?y[-'wZٹr[FݣS3"w523G{1Z+#s 5#=|)Yu}hoYz,ѺG׾],=_cwhݣk.[/;gյ$[/ݸ;gص4_/ݸ;gص4c?X!gر4c?ٽBH{HZd ^o|F+DyywW׫F;ǦЩY={t.㫽5ߞZÜNOIoUmM\m\w9:wLO\s[mz[GL @ @ @ >w }]}nV^-st;ry<;3כo*WdzQK}E*23WʭQYҿe7f8E>3z#0YfR^-*{'- 3z43 3j'3rhf^4g^ǜ""_]wuwRE:/.UI꡺4:t^\±GUeiUtc5Ӫ׹o%o:8sҲ7Ŷ< @ @ @$>\~}tV)Σٯ][BnoK=3Gh}cSc.Y={ts{eff--sFM3;y3r{=e9@wǤf\Iέ7>}ot~ӻ-oG@ @ @ @ !D>w\>Z7Y]}{ݍ?=(l5+#X!kTQ94:ߘy3|Z2n$ȞI*һU&>3ownOOzԬ"[jXFku^'iF_ԝDeԥ)2כֻ7Q5idS2TyIks1zs6'n'߹W7ynٻe?7m%Mߐ𖽯M>w[~?[>y*+۝;oԹlwsy>?Nv77uԹ'&Oq>84"Hc~{#޳ix3uɜ4ߞlZUּk˾l˶e95{@ @ @ @ @ yi{}>߯{|kW_{;{z{[W|7Lo}+fm޼[ok&Iߊy7O2C %}<:x'DZsΚGҷNʣwkq:;kI:)N$5)khկn-~\)}_)y2V e8g}p>[wY?kQ_\=n=[=kg7kGޭggmF}MsȻl͟5ߨiyg75W_[V;z>g?%gSu>m-Ggs5 @ @ @ @ @ @ { MCߵh9ڻ,۾Fu߿{y}c辝unu?['G9l纾g(9]͹ngoL}辝unu3u￝}ͧ[YnnY|,o9w[mgr>Nrw).g @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @GJnipype-1.7.0/nipype/testing/data/track1.trk000066400000000000000000000000001413403311400206040ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/track2.trk000066400000000000000000000000001413403311400206050ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/tracks.tck000066400000000000000000000000001413403311400206670ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/tracks.trk000066400000000000000000000000001413403311400207060ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/tract_data.Bfloat000066400000000000000000000000001413403311400221340ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/tracts.Bdouble000066400000000000000000000000001413403311400214730ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/trans.mat000066400000000000000000000000001413403311400205270ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/tst_class0.nii000066400000000000000000000000001413403311400214550ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/tst_class1.nii000066400000000000000000000000001413403311400214560ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/u_rc1s1_Template.nii000066400000000000000000000000001413403311400225060ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/u_rc1s2_Template.nii000066400000000000000000000000001413403311400225070ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/u_rc1s3_Template.nii000066400000000000000000000000001413403311400225100ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/varcope.nii.gz000066400000000000000000000000001413403311400214540ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/varcope1run1.nii.gz000066400000000000000000000000001413403311400223430ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/varcope1run2.nii.gz000066400000000000000000000000001413403311400223440ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/varcope2run1.nii.gz000066400000000000000000000000001413403311400223440ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/varcope2run2.nii.gz000066400000000000000000000000001413403311400223450ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/voxel-order_data.Bfloat000066400000000000000000000000001413403311400232650ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/voxeldisplacemap.vdm000066400000000000000000000000001413403311400227450ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/vsm.nii000066400000000000000000000000001413403311400202030ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/warpfield.nii000066400000000000000000000000001413403311400213530ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/weights.txt000066400000000000000000000000001413403311400211100ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/wm.mgz000066400000000000000000000000001413403311400200370ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/wm_mask.mif000066400000000000000000000000001413403311400210300ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/wm_undersampled.nii000066400000000000000000000000001413403311400225640ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/data/zstat1.nii.gz000066400000000000000000000000001413403311400212430ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/decorators.py000066400000000000000000000044001413403311400205130ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Extend numpy's decorators to use nipype's gui and data labels. """ from numpy.testing import dec from nibabel.data import DataError def make_label_dec(label, ds=None): """Factory function to create a decorator that applies one or more labels. Parameters ---------- label : str or sequence One or more labels that will be applied by the decorator to the functions it decorates. Labels are attributes of the decorated function with their value set to True. ds : str An optional docstring for the resulting decorator. If not given, a default docstring is auto-generated. Returns ------- ldec : function A decorator. Examples -------- >>> slow = make_label_dec('slow') >>> slow.__doc__ "Labels a test as 'slow'" >>> rare = make_label_dec(['slow','hard'], ... "Mix labels 'slow' and 'hard' for rare tests") >>> @rare ... def f(): pass ... >>> >>> f.slow True >>> f.hard True """ if isinstance(label, str): labels = [label] else: labels = label # Validate that the given label(s) are OK for use in setattr() by doing a # dry run on a dummy function. tmp = lambda: None for label in labels: setattr(tmp, label, True) # This is the actual decorator we'll return def decor(f): for label in labels: setattr(f, label, True) return f # Apply the user's docstring if ds is None: ds = "Labels a test as %r" % label decor.__doc__ = ds return decor # For tests that need further review def needs_review(msg): """Skip a test that needs further review. Parameters ---------- msg : string msg regarding the review that needs to be done """ def skip_func(func): return dec.skipif(True, msg)(func) return skip_func # Easier version of the numpy knownfailure def knownfailure(f): return dec.knownfailureif(True)(f) def if_datasource(ds, msg): try: ds.get_filename() except DataError: return dec.skipif(True, msg) return lambda f: f nipype-1.7.0/nipype/testing/fixtures.py000066400000000000000000000061421413403311400202240ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Pytest fixtures used in tests. """ import os import pytest import numpy as np import nibabel as nb from nipype.utils.filemanip import ensure_list from nipype.interfaces.fsl import Info from nipype.interfaces.fsl.base import FSLCommand def analyze_pair_image_files(outdir, filelist, shape): for f in ensure_list(filelist): hdr = nb.Nifti1Header() hdr.set_data_shape(shape) img = np.random.random(shape) analyze = nb.AnalyzeImage(img, np.eye(4), hdr) analyze.to_filename(os.path.join(outdir, f)) def nifti_image_files(outdir, filelist, shape): for f in ensure_list(filelist): img = np.random.random(shape) nb.Nifti1Image(img, np.eye(4), None).to_filename(os.path.join(outdir, f)) @pytest.fixture() def create_files_in_directory(request, tmpdir): cwd = tmpdir.chdir() filelist = ["a.nii", "b.nii"] nifti_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) def change_directory(): cwd.chdir() request.addfinalizer(change_directory) return (filelist, tmpdir.strpath) @pytest.fixture() def create_analyze_pair_file_in_directory(request, tmpdir): cwd = tmpdir.chdir() filelist = ["a.hdr"] analyze_pair_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) def change_directory(): cwd.chdir() request.addfinalizer(change_directory) return (filelist, tmpdir.strpath) @pytest.fixture() def create_files_in_directory_plus_dummy_file(request, tmpdir): cwd = tmpdir.chdir() filelist = ["a.nii", "b.nii"] nifti_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) tmpdir.join("reg.dat").write("dummy file") filelist.append("reg.dat") def change_directory(): cwd.chdir() request.addfinalizer(change_directory) return (filelist, tmpdir.strpath) @pytest.fixture() def create_surf_file_in_directory(request, tmpdir): cwd = tmpdir.chdir() surf = "lh.a.nii" nifti_image_files(tmpdir.strpath, filelist=surf, shape=(1, 100, 1)) def change_directory(): cwd.chdir() request.addfinalizer(change_directory) return (surf, tmpdir.strpath) def set_output_type(fsl_output_type): prev_output_type = os.environ.get("FSLOUTPUTTYPE", None) if fsl_output_type is not None: os.environ["FSLOUTPUTTYPE"] = fsl_output_type elif "FSLOUTPUTTYPE" in os.environ: del os.environ["FSLOUTPUTTYPE"] FSLCommand.set_default_output_type(Info.output_type()) return prev_output_type @pytest.fixture(params=[None] + sorted(Info.ftypes)) def create_files_in_directory_plus_output_type(request, tmpdir): func_prev_type = set_output_type(request.param) origdir = tmpdir.chdir() filelist = ["a.nii", "b.nii"] nifti_image_files(tmpdir.strpath, filelist, shape=(3, 3, 3, 4)) out_ext = Info.output_type_to_ext(Info.output_type()) def fin(): set_output_type(func_prev_type) origdir.chdir() request.addfinalizer(fin) return (filelist, tmpdir.strpath, out_ext) nipype-1.7.0/nipype/testing/tests/000077500000000000000000000000001413403311400171405ustar00rootroot00000000000000nipype-1.7.0/nipype/testing/tests/test_utils.py000066400000000000000000000022621413403311400217130ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Test testing utilities """ import os import warnings import subprocess from unittest.mock import patch, MagicMock from unittest import SkipTest from nipype.testing.utils import TempFATFS def test_tempfatfs(): try: fatfs = TempFATFS() except (IOError, OSError): raise SkipTest("Cannot mount FAT filesystems with FUSE") with fatfs as tmp_dir: assert os.path.exists(tmp_dir) @patch( "subprocess.check_call", MagicMock(side_effect=subprocess.CalledProcessError("", "")), ) def test_tempfatfs_calledprocesserror(): try: TempFATFS() except IOError as e: assert isinstance(e, IOError) assert isinstance(e.__cause__, subprocess.CalledProcessError) else: assert False @patch("subprocess.check_call", MagicMock()) @patch("subprocess.Popen", MagicMock(side_effect=OSError())) def test_tempfatfs_oserror(): try: TempFATFS() except IOError as e: assert isinstance(e, IOError) assert isinstance(e.__cause__, OSError) else: assert False nipype-1.7.0/nipype/testing/utils.py000066400000000000000000000056171413403311400175210ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Additional handy utilities for testing """ import os import time import shutil import signal import subprocess from subprocess import CalledProcessError from tempfile import mkdtemp from ..utils.misc import package_check __docformat__ = "restructuredtext" import numpy as np import nibabel as nb class TempFATFS(object): def __init__(self, size_in_mbytes=8, delay=0.5): """Temporary filesystem for testing non-POSIX filesystems on a POSIX system. with TempFATFS() as fatdir: target = os.path.join(fatdir, 'target') copyfile(file1, target, copy=False) assert not os.path.islink(target) Arguments --------- size_in_mbytes : int Size (in MiB) of filesystem to create delay : float Time (in seconds) to wait for fusefat to start, stop """ self.delay = delay self.tmpdir = mkdtemp() self.dev_null = open(os.devnull, "wb") vfatfile = os.path.join(self.tmpdir, "vfatblock") self.vfatmount = os.path.join(self.tmpdir, "vfatmount") self.canary = os.path.join(self.vfatmount, ".canary") with open(vfatfile, "wb") as fobj: fobj.write(b"\x00" * (int(size_in_mbytes) << 20)) os.mkdir(self.vfatmount) mkfs_args = ["mkfs.vfat", vfatfile] mount_args = ["fusefat", "-o", "rw+", "-f", vfatfile, self.vfatmount] try: subprocess.check_call( args=mkfs_args, stdout=self.dev_null, stderr=self.dev_null ) except CalledProcessError as e: raise IOError("mkfs.vfat failed") from e try: self.fusefat = subprocess.Popen( args=mount_args, stdout=self.dev_null, stderr=self.dev_null ) except OSError as e: raise IOError("fusefat is not installed") from e time.sleep(self.delay) if self.fusefat.poll() is not None: raise IOError("fusefat terminated too soon") open(self.canary, "wb").close() def __enter__(self): return self.vfatmount def __exit__(self, exc_type, exc_val, exc_tb): if self.fusefat is not None: self.fusefat.send_signal(signal.SIGINT) # Allow 1s to return without sending terminate for count in range(10): time.sleep(0.1) if self.fusefat.poll() is not None: break else: self.fusefat.terminate() time.sleep(self.delay) assert not os.path.exists(self.canary) self.dev_null.close() shutil.rmtree(self.tmpdir) def save_toy_nii(ndarray, filename): toy = nb.Nifti1Image(ndarray, np.eye(4)) nb.nifti1.save(toy, filename) return filename nipype-1.7.0/nipype/tests/000077500000000000000000000000001413403311400154635ustar00rootroot00000000000000nipype-1.7.0/nipype/tests/__init__.py000066400000000000000000000000001413403311400175620ustar00rootroot00000000000000nipype-1.7.0/nipype/tests/test_nipype.py000066400000000000000000000061321413403311400204020ustar00rootroot00000000000000import os from .. import get_info from ..info import get_nipype_gitversion import pytest def test_nipype_info(): exception_not_raised = True try: get_info() except Exception: exception_not_raised = False assert exception_not_raised @pytest.mark.skipif( not get_nipype_gitversion(), reason="not able to get version from get_nipype_gitversion", ) def test_git_hash(): # removing the first "g" from gitversion get_nipype_gitversion()[1:] == get_info()["commit_hash"] def _check_no_et(): import os from unittest.mock import patch et = os.getenv("NIPYPE_NO_ET") is None with patch.dict("os.environ", {"NIPYPE_NO_ET": "1"}): from nipype.interfaces.base import BaseInterface ver_data = BaseInterface._etelemetry_version_data if et and ver_data is None: raise ValueError( "etelemetry enabled and version data missing - double hits likely" ) return et def test_no_et_bare(tmp_path): from unittest.mock import patch from nipype.pipeline import engine as pe from nipype.interfaces import utility as niu from nipype.interfaces.base import BaseInterface et = os.getenv("NIPYPE_NO_ET") is None # Pytest doesn't trigger this, so let's pretend it's there with patch.object(BaseInterface, "_etelemetry_version_data", {}): # Direct function call - environment not set f = niu.Function(function=_check_no_et) res = f.run() assert res.outputs.out == et # Basic node - environment not set n = pe.Node( niu.Function(function=_check_no_et), name="n", base_dir=str(tmp_path) ) res = n.run() assert res.outputs.out == et # Linear run - environment not set wf1 = pe.Workflow(name="wf1", base_dir=str(tmp_path)) wf1.add_nodes([pe.Node(niu.Function(function=_check_no_et), name="n")]) res = wf1.run() assert next(iter(res.nodes)).result.outputs.out == et @pytest.mark.parametrize("plugin", ("MultiProc", "LegacyMultiProc")) @pytest.mark.parametrize("run_without_submitting", (True, False)) def test_no_et_multiproc(tmp_path, plugin, run_without_submitting): from unittest.mock import patch from nipype.pipeline import engine as pe from nipype.interfaces import utility as niu from nipype.interfaces.base import BaseInterface et = os.getenv("NIPYPE_NO_ET") is None # Multiprocessing runs initialize new processes with NIPYPE_NO_ET # This does not apply to unsubmitted jobs, run by the main thread expectation = et if run_without_submitting else False # Pytest doesn't trigger this, so let's pretend it's there with patch.object(BaseInterface, "_etelemetry_version_data", {}): wf = pe.Workflow(name="wf2", base_dir=str(tmp_path)) n = pe.Node( niu.Function(function=_check_no_et), run_without_submitting=run_without_submitting, name="n", ) wf.add_nodes([n]) res = wf.run(plugin=plugin, plugin_args={"n_procs": 1}) assert next(iter(res.nodes)).result.outputs.out is expectation nipype-1.7.0/nipype/utils/000077500000000000000000000000001413403311400154615ustar00rootroot00000000000000nipype-1.7.0/nipype/utils/README.txt000066400000000000000000000003701413403311400171570ustar00rootroot00000000000000================== Nipype Utilities ================== This directory contains various utilities used in nipype. Some of them have been copied from nipy. Any changes to these should be done upstream. * From nipy: * onetime.py * tmpdirs.py nipype-1.7.0/nipype/utils/__init__.py000066400000000000000000000002151413403311400175700ustar00rootroot00000000000000# -*- coding: utf-8 -*- from .onetime import OneTimeProperty, setattr_on_read from .tmpdirs import TemporaryDirectory, InTemporaryDirectory nipype-1.7.0/nipype/utils/config.py000066400000000000000000000302141413403311400173000ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Created on 20 Apr 2010 logging options : INFO, DEBUG hash_method : content, timestamp @author: Chris Filo Gorgolewski """ import os import sys import errno import atexit from warnings import warn from distutils.version import LooseVersion import configparser import numpy as np from simplejson import load, dump from .misc import str2bool from filelock import SoftFileLock CONFIG_DEPRECATIONS = { "profile_runtime": ("monitoring.enabled", "1.0"), "filemanip_level": ("logging.utils_level", "1.0"), } DEFAULT_CONFIG_TPL = """\ [logging] workflow_level = INFO utils_level = INFO interface_level = INFO log_to_file = false log_directory = {log_dir} log_size = 16384000 log_rotate = 4 [execution] create_report = true crashdump_dir = {crashdump_dir} hash_method = timestamp job_finished_timeout = 5 keep_inputs = false local_hash_check = true matplotlib_backend = Agg plugin = Linear remove_node_directories = false remove_unnecessary_outputs = true try_hard_link_datasink = true single_thread_matlab = true crashfile_format = pklz stop_on_first_crash = false stop_on_first_rerun = false use_relative_paths = false stop_on_unknown_version = false write_provenance = false parameterize_dirs = true poll_sleep_duration = 2 xvfb_max_wait = 10 check_version = true [monitoring] enabled = false sample_frequency = 1 summary_append = true [check] interval = 1209600 """ def mkdir_p(path): try: os.makedirs(path) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise class NipypeConfig(object): """Base nipype config class""" def __init__(self, *args, **kwargs): self._config = configparser.ConfigParser() self._cwd = None config_dir = os.path.expanduser( os.getenv("NIPYPE_CONFIG_DIR", default="~/.nipype") ) self.data_file = os.path.join(config_dir, "nipype.json") self.set_default_config() self._display = None self._resource_monitor = None self._config.read([os.path.join(config_dir, "nipype.cfg"), "nipype.cfg"]) for option in CONFIG_DEPRECATIONS: for section in ["execution", "logging", "monitoring"]: if self.has_option(section, option): new_section, new_option = CONFIG_DEPRECATIONS[option][0].split(".") if not self.has_option(new_section, new_option): # Warn implicit in get self.set(new_section, new_option, self.get(section, option)) @property def cwd(self): """Cache current working directory ASAP""" # Run getcwd only once, preventing multiproc to finish # with error having changed to the wrong path if self._cwd is None: try: self._cwd = os.getcwd() except OSError: warn( 'Trying to run Nipype from a nonexistent directory "{}".'.format( os.getenv("PWD", "unknown") ), RuntimeWarning, ) raise return self._cwd def set_default_config(self): """Read default settings template and set into config object""" default_cfg = DEFAULT_CONFIG_TPL.format( log_dir=os.path.expanduser("~"), # Get $HOME in a platform-agnostic way crashdump_dir=self.cwd, # Read cached cwd ) try: self._config.read_string(default_cfg) # Python >= 3.2 except AttributeError: from io import StringIO self._config.readfp(StringIO(default_cfg)) def enable_debug_mode(self): """Enables debug configuration""" from .. import logging self._config.set("execution", "stop_on_first_crash", "true") self._config.set("execution", "remove_unnecessary_outputs", "false") self._config.set("execution", "keep_inputs", "true") self._config.set("logging", "workflow_level", "DEBUG") self._config.set("logging", "interface_level", "DEBUG") self._config.set("logging", "utils_level", "DEBUG") logging.update_logging(self._config) def set_log_dir(self, log_dir): """Sets logging directory This should be the first thing that is done before any nipype class with logging is imported. """ self._config.set("logging", "log_directory", log_dir) def get(self, section, option, default=None): """Get an option""" if option in CONFIG_DEPRECATIONS: msg = ( 'Config option "%s" has been deprecated as of nipype %s. ' 'Please use "%s" instead.' ) % (option, CONFIG_DEPRECATIONS[option][1], CONFIG_DEPRECATIONS[option][0]) warn(msg) section, option = CONFIG_DEPRECATIONS[option][0].split(".") if self._config.has_option(section, option): return self._config.get(section, option) return default def set(self, section, option, value): """Set new value on option""" if isinstance(value, bool): value = str(value) if option in CONFIG_DEPRECATIONS: msg = ( 'Config option "%s" has been deprecated as of nipype %s. ' 'Please use "%s" instead.' ) % (option, CONFIG_DEPRECATIONS[option][1], CONFIG_DEPRECATIONS[option][0]) warn(msg) section, option = CONFIG_DEPRECATIONS[option][0].split(".") return self._config.set(section, option, value) def getboolean(self, section, option): """Get a boolean option from section""" return self._config.getboolean(section, option) def has_option(self, section, option): """Check if option exists in section""" return self._config.has_option(section, option) @property def _sections(self): return self._config._sections def get_data(self, key): """Read options file""" if not os.path.exists(self.data_file): return None with SoftFileLock("%s.lock" % self.data_file): with open(self.data_file, "rt") as file: datadict = load(file) if key in datadict: return datadict[key] return None def save_data(self, key, value): """Store config flie""" datadict = {} if os.path.exists(self.data_file): with SoftFileLock("%s.lock" % self.data_file): with open(self.data_file, "rt") as file: datadict = load(file) else: dirname = os.path.dirname(self.data_file) if not os.path.exists(dirname): mkdir_p(dirname) with SoftFileLock("%s.lock" % self.data_file): with open(self.data_file, "wt") as file: datadict[key] = value dump(datadict, file) def update_config(self, config_dict): """Extend internal dictionary with config_dict""" for section in ["execution", "logging", "check"]: if section in config_dict: for key, val in list(config_dict[section].items()): if not key.startswith("__"): self._config.set(section, key, str(val)) def update_matplotlib(self): """Set backend on matplotlib from options""" import matplotlib matplotlib.use(self.get("execution", "matplotlib_backend")) def enable_provenance(self): """Sets provenance storing on""" self._config.set("execution", "write_provenance", "true") self._config.set("execution", "hash_method", "content") @property def resource_monitor(self): """Check if resource_monitor is available""" if self._resource_monitor is not None: return self._resource_monitor # Cache config from nipype config self.resource_monitor = ( str2bool(self._config.get("monitoring", "enabled")) or False ) return self._resource_monitor @resource_monitor.setter def resource_monitor(self, value): # Accept string true/false values if isinstance(value, (str, bytes)): value = str2bool(value.lower()) if value is False: self._resource_monitor = False elif value is True: if not self._resource_monitor: # Before setting self._resource_monitor check psutil # availability self._resource_monitor = False try: import psutil self._resource_monitor = LooseVersion( psutil.__version__ ) >= LooseVersion("5.0") except ImportError: pass finally: if not self._resource_monitor: warn( "Could not enable the resource monitor: " "psutil>=5.0 could not be imported." ) self._config.set( "monitoring", "enabled", ("%s" % self._resource_monitor).lower() ) def enable_resource_monitor(self): """Sets the resource monitor on""" self.resource_monitor = True def disable_resource_monitor(self): """Sets the resource monitor off""" self.resource_monitor = False def get_display(self): """Returns the first display available""" # Check if an Xorg server is listening # import subprocess as sp # if not hasattr(sp, 'DEVNULL'): # setattr(sp, 'DEVNULL', os.devnull) # x_listening = bool(sp.call('ps au | grep -v grep | grep -i xorg', # shell=True, stdout=sp.DEVNULL)) if self._display is not None: return ":%d" % self._display.new_display sysdisplay = None if self._config.has_option("execution", "display_variable"): sysdisplay = self._config.get("execution", "display_variable") sysdisplay = sysdisplay or os.getenv("DISPLAY") if sysdisplay: from collections import namedtuple def _mock(): pass # Store a fake Xvfb object. Format - :[.] ndisp = sysdisplay.split(":")[-1].split(".")[0] Xvfb = namedtuple("Xvfb", ["new_display", "stop"]) self._display = Xvfb(int(ndisp), _mock) return self.get_display() else: if "darwin" in sys.platform: raise RuntimeError( "Xvfb requires root permissions to run in OSX. Please " "make sure that an X server is listening and set the " "appropriate config on either $DISPLAY or nipype's " '"display_variable" config. Valid X servers include ' "VNC, XQuartz, or manually started Xvfb." ) # If $DISPLAY is empty, it confuses Xvfb so unset if sysdisplay == "": del os.environ["DISPLAY"] try: from xvfbwrapper import Xvfb except ImportError: raise RuntimeError( "A display server was required, but $DISPLAY is not " "defined and Xvfb could not be imported." ) self._display = Xvfb(nolisten="tcp") self._display.start() # Older versions of xvfbwrapper used vdisplay_num if not hasattr(self._display, "new_display"): setattr(self._display, "new_display", self._display.vdisplay_num) return self.get_display() def stop_display(self): """Closes the display if started""" if self._display is not None: from .. import logging self._display.stop() logging.getLogger("nipype.interface").debug("Closing display (if virtual)") @atexit.register def free_display(): """Stop virtual display (if it is up)""" from .. import config config.stop_display() nipype-1.7.0/nipype/utils/docparse.py000066400000000000000000000254421413403311400176420ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Utilities to pull in documentation from command-line tools. Examples -------- # Instantiate bet object from nipype.interfaces import fsl from nipype.utils import docparse better = fsl.Bet() docstring = docparse.get_doc(better.cmd, better.opt_map) """ import subprocess from ..interfaces.base import CommandLine from .misc import is_container def grab_doc(cmd, trap_error=True): """Run cmd without args and grab documentation. Parameters ---------- cmd : string Command line string trap_error : boolean Ensure that returncode is 0 Returns ------- doc : string The command line documentation """ proc = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True ) stdout, stderr = proc.communicate() if trap_error and proc.returncode: msg = "Attempting to run %s. Returned Error: %s" % (cmd, stderr) raise IOError(msg) if stderr: # A few programs, like fast and fnirt, send their help to # stderr instead of stdout. # XXX: Test for error vs. doc in stderr return stderr return stdout def reverse_opt_map(opt_map): """Reverse the key/value pairs of the option map in the interface classes. Parameters ---------- opt_map : dict Dictionary mapping the attribute name to a command line flag. Each interface class defines these for the command it wraps. Returns ------- rev_opt_map : dict Dictionary mapping the flags to the attribute name. """ # For docs, we only care about the mapping from our attribute # names to the command-line flags. The 'v.split()[0]' below # strips off the string format characters. # if (k != 'flags' and v) , key must not be flags as it is generic, # v must not be None or it cannot be parsed by this line revdict = {} for key, value in list(opt_map.items()): if is_container(value): # The value is a tuple where the first element is the # format string and the second element is a docstring. value = value[0] if key != "flags" and value is not None: revdict[value.split()[0]] = key return revdict def format_params(paramlist, otherlist=None): """Format the parameters according to the nipy style conventions. Since the external programs do not conform to any conventions, the resulting docstrings are not ideal. But at a minimum the Parameters section is reasonably close. Parameters ---------- paramlist : list List of strings where each list item matches exactly one parameter and it's description. These items will go into the 'Parameters' section of the docstring. otherlist : list List of strings, similar to paramlist above. These items will go into the 'Other Parameters' section of the docstring. Returns ------- doc : string The formatted docstring. """ hdr = "Parameters" delim = "----------" paramlist.insert(0, delim) paramlist.insert(0, hdr) params = "\n".join(paramlist) otherparams = [] doc = "".join(params) if otherlist: hdr = "Others Parameters" delim = "-----------------" otherlist.insert(0, delim) otherlist.insert(0, hdr) otherlist.insert(0, "\n") otherparams = "\n".join(otherlist) doc = "".join([doc, otherparams]) return doc def insert_doc(doc, new_items): """Insert ``new_items`` into the beginning of the ``doc`` Docstrings in ``new_items`` will be inserted right after the *Parameters* header but before the existing docs. Parameters ---------- doc : str The existing docstring we're inserting docmentation into. new_items : list List of strings to be inserted in the ``doc``. Examples -------- >>> from nipype.utils.docparse import insert_doc >>> doc = '''Parameters ... ---------- ... outline : ... something about an outline''' >>> new_items = ['infile : str', ' The name of the input file'] >>> new_items.extend(['outfile : str', ' The name of the output file']) >>> newdoc = insert_doc(doc, new_items) >>> print(newdoc) Parameters ---------- infile : str The name of the input file outfile : str The name of the output file outline : something about an outline """ # Insert new_items after the Parameters header doclist = doc.split("\n") tmpdoc = doclist[:2] # Add new_items tmpdoc.extend(new_items) # Add rest of documents tmpdoc.extend(doclist[2:]) # Insert newlines newdoc = [] for line in tmpdoc: newdoc.append(line) newdoc.append("\n") # We add one too many newlines, remove it. newdoc.pop(-1) return "".join(newdoc) def build_doc(doc, opts): """Build docstring from doc and options Parameters ---------- rep_doc : string Documentation string opts : dict Dictionary of option attributes and keys. Use reverse_opt_map to reverse flags and attrs from opt_map class attribute. Returns ------- newdoc : string The docstring with flags replaced with attribute names and formated to match nipy standards (as best we can). """ # Split doc into line elements. Generally, each line is an # individual flag/option. doclist = doc.split("\n") newdoc = [] flags_doc = [] for line in doclist: linelist = line.split() if not linelist: # Probably an empty line continue # For lines we care about, the first item is the flag if "," in linelist[0]: # sometimes flags are only seperated by comma flag = linelist[0].split(",")[0] else: flag = linelist[0] attr = opts.get(flag) if attr is not None: # newline = line.replace(flag, attr) # Replace the flag with our attribute name linelist[0] = "%s :\n " % str(attr) # Add some line formatting newline = " ".join(linelist) newdoc.append(newline) else: if line[0].isspace(): # For all the docs I've looked at, the flags all have # indentation (spaces) at the start of the line. # Other parts of the docs, like 'usage' statements # start with alpha-numeric characters. We only care # about the flags. flags_doc.append(line) return format_params(newdoc, flags_doc) def get_doc(cmd, opt_map, help_flag=None, trap_error=True): """Get the docstring from our command and options map. Parameters ---------- cmd : string The command whose documentation we are fetching opt_map : dict Dictionary of flags and option attributes. help_flag : string Provide additional help flag. e.g., -h trap_error : boolean Override if underlying command returns a non-zero returncode Returns ------- doc : string The formated docstring """ res = CommandLine( "which %s" % cmd.split(" ")[0], resource_monitor=False, terminal_output="allatonce", ).run() cmd_path = res.runtime.stdout.strip() if cmd_path == "": raise Exception("Command %s not found" % cmd.split(" ")[0]) if help_flag: cmd = " ".join((cmd, help_flag)) doc = grab_doc(cmd, trap_error) opts = reverse_opt_map(opt_map) return build_doc(doc, opts) def _parse_doc(doc, style=["--"]): """Parses a help doc for inputs Parameters ---------- doc : string Documentation string style : string default ['--'] The help command style (--, -) Returns ------- optmap : dict of input parameters """ # Split doc into line elements. Generally, each line is an # individual flag/option. doclist = doc.split("\n") optmap = {} if isinstance(style, (str, bytes)): style = [style] for line in doclist: linelist = line.split() flag = [ item for i, item in enumerate(linelist) if i < 2 and any([item.startswith(s) for s in style]) and len(item) > 1 ] if flag: if len(flag) == 1: style_idx = [flag[0].startswith(s) for s in style].index(True) flag = flag[0] else: style_idx = [] for f in flag: for i, s in enumerate(style): if f.startswith(s): style_idx.append(i) break flag = flag[style_idx.index(min(style_idx))] style_idx = min(style_idx) optmap[flag.split(style[style_idx])[1]] = "%s %%s" % flag return optmap def get_params_from_doc(cmd, style="--", help_flag=None, trap_error=True): """Auto-generate option map from command line help Parameters ---------- cmd : string The command whose documentation we are fetching style : string default ['--'] The help command style (--, -). Multiple styles can be provided in a list e.g. ['--','-']. help_flag : string Provide additional help flag. e.g., -h trap_error : boolean Override if underlying command returns a non-zero returncode Returns ------- optmap : dict Contains a mapping from input to command line variables """ res = CommandLine( "which %s" % cmd.split(" ")[0], resource_monitor=False, terminal_output="allatonce", ).run() cmd_path = res.runtime.stdout.strip() if cmd_path == "": raise Exception("Command %s not found" % cmd.split(" ")[0]) if help_flag: cmd = " ".join((cmd, help_flag)) doc = grab_doc(cmd, trap_error) return _parse_doc(doc, style) def replace_opts(rep_doc, opts): """Replace flags with parameter names. This is a simple operation where we replace the command line flags with the attribute names. Parameters ---------- rep_doc : string Documentation string opts : dict Dictionary of option attributes and keys. Use reverse_opt_map to reverse flags and attrs from opt_map class attribute. Returns ------- rep_doc : string New docstring with flags replaces with attribute names. Examples -------- doc = grab_doc('bet') opts = reverse_opt_map(fsl.Bet.opt_map) rep_doc = replace_opts(doc, opts) """ # Replace flags with attribute names for key, val in list(opts.items()): rep_doc = rep_doc.replace(key, val) return rep_doc nipype-1.7.0/nipype/utils/draw_gantt_chart.py000066400000000000000000000440271413403311400213550ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Module to draw an html gantt chart from logfile produced by ``nipype.utils.profiler.log_nodes_cb()`` """ # Import packages import sys import random import datetime import simplejson as json from collections import OrderedDict # Pandas try: import pandas as pd except ImportError: print( "Pandas not found; in order for full functionality of this module " "install the pandas package" ) pass def create_event_dict(start_time, nodes_list): """ Function to generate a dictionary of event (start/finish) nodes from the nodes list Parameters ---------- start_time : datetime.datetime a datetime object of the pipeline start time nodes_list : list a list of the node dictionaries that were run in the pipeline Returns ------- events : dictionary a dictionary where the key is the timedelta from the start of the pipeline execution to the value node it accompanies """ # Import packages import copy events = {} for node in nodes_list: # Format node fields estimated_threads = node.get("num_threads", 1) estimated_memory_gb = node.get("estimated_memory_gb", 1.0) runtime_threads = node.get("runtime_threads", 0) runtime_memory_gb = node.get("runtime_memory_gb", 0.0) # Init and format event-based nodes node["estimated_threads"] = estimated_threads node["estimated_memory_gb"] = estimated_memory_gb node["runtime_threads"] = runtime_threads node["runtime_memory_gb"] = runtime_memory_gb start_node = node finish_node = copy.deepcopy(node) start_node["event"] = "start" finish_node["event"] = "finish" # Get dictionary key start_delta = (node["start"] - start_time).total_seconds() finish_delta = (node["finish"] - start_time).total_seconds() # Populate dictionary if events.get(start_delta) or events.get(finish_delta): err_msg = "Event logged twice or events started at exact same time!" raise KeyError(err_msg) events[start_delta] = start_node events[finish_delta] = finish_node # Return events dictionary return events def log_to_dict(logfile): """ Function to extract log node dictionaries into a list of python dictionaries and return the list as well as the final node Parameters ---------- logfile : string path to the json-formatted log file generated from a nipype workflow execution Returns ------- nodes_list : list a list of python dictionaries containing the runtime info for each nipype node """ # Init variables with open(logfile, "r") as content: # read file separating each line lines = content.readlines() nodes_list = [json.loads(l) for l in lines] # Return list of nodes return nodes_list def calculate_resource_timeseries(events, resource): """ Given as event dictionary, calculate the resources used as a timeseries Parameters ---------- events : dictionary a dictionary of event-based node dictionaries of the workflow execution statistics resource : string the resource of interest to return the time-series of; e.g. 'runtime_memory_gb', 'estimated_threads', etc Returns ------- time_series : pandas Series a pandas Series object that contains timestamps as the indices and the resource amount as values """ # Import packages import pandas as pd # Init variables res = OrderedDict() all_res = 0.0 # Iterate through the events for _, event in sorted(events.items()): if event["event"] == "start": if resource in event and event[resource] != "Unknown": all_res += float(event[resource]) current_time = event["start"] elif event["event"] == "finish": if resource in event and event[resource] != "Unknown": all_res -= float(event[resource]) current_time = event["finish"] res[current_time] = all_res # Formulate the pandas timeseries time_series = pd.Series(data=list(res.values()), index=list(res.keys())) # Downsample where there is only value-diff ts_diff = time_series.diff() time_series = time_series[ts_diff != 0] # Return the new time series return time_series def draw_lines(start, total_duration, minute_scale, scale): """ Function to draw the minute line markers and timestamps Parameters ---------- start : datetime.datetime obj start time for first minute line marker total_duration : float total duration of the workflow execution (in seconds) minute_scale : integer the scale, in minutes, at which to plot line markers for the gantt chart; for example, minute_scale=10 means there are lines drawn at every 10 minute interval from start to finish scale : integer scale factor in pixel spacing between minute line markers Returns ------- result : string the html-formatted string for producing the minutes-based time line markers """ # Init variables result = "" next_line = 220 next_time = start num_lines = int(((total_duration // 60) // minute_scale) + 2) # Iterate through the lines and create html line markers string for line in range(num_lines): # Line object new_line = "


" % next_line result += new_line # Time digits time = "

%02d:%02d

" % ( next_line - 20, next_time.hour, next_time.minute, ) result += time # Increment line spacing and digits next_line += minute_scale * scale next_time += datetime.timedelta(minutes=minute_scale) # Return html string for time line markers return result def draw_nodes(start, nodes_list, cores, minute_scale, space_between_minutes, colors): """ Function to return the html-string of the node drawings for the gantt chart Parameters ---------- start : datetime.datetime obj start time for first node nodes_list : list a list of the node dictionaries cores : integer the number of cores given to the workflow via the 'n_procs' plugin arg total_duration : float total duration of the workflow execution (in seconds) minute_scale : integer the scale, in minutes, at which to plot line markers for the gantt chart; for example, minute_scale=10 means there are lines drawn at every 10 minute interval from start to finish space_between_minutes : integer scale factor in pixel spacing between minute line markers colors : list a list of colors to choose from when coloring the nodes in the gantt chart Returns ------- result : string the html-formatted string for producing the minutes-based time line markers """ # Init variables result = "" scale = space_between_minutes / minute_scale space_between_minutes = space_between_minutes / scale end_times = [ datetime.datetime( start.year, start.month, start.day, start.hour, start.minute, start.second ) for core in range(cores) ] # For each node in the pipeline for node in nodes_list: # Get start and finish times node_start = node["start"] node_finish = node["finish"] # Calculate an offset and scale duration offset = ( (node_start - start).total_seconds() / 60 ) * scale * space_between_minutes + 220 # Scale duration scale_duration = (node["duration"] / 60) * scale * space_between_minutes if scale_duration < 5: scale_duration = 5 scale_duration -= 2 # Left left = 60 for core in range(len(end_times)): if end_times[core] < node_start: left += core * 30 end_times[core] = datetime.datetime( node_finish.year, node_finish.month, node_finish.day, node_finish.hour, node_finish.minute, node_finish.second, ) break # Get color for node object color = random.choice(colors) if "error" in node: color = "red" # Setup dictionary for node html string insertion node_dict = { "left": left, "offset": offset, "scale_duration": scale_duration, "color": color, "node_name": node["name"], "node_dur": node["duration"] / 60.0, "node_start": node_start.strftime("%Y-%m-%d %H:%M:%S"), "node_finish": node_finish.strftime("%Y-%m-%d %H:%M:%S"), } # Create new node string new_node = ( "
" % node_dict ) # Append to output result result += new_node # Return html string for nodes return result def draw_resource_bar( start_time, finish_time, time_series, space_between_minutes, minute_scale, color, left, resource, ): """ """ # Memory header result = "

%s

" % (left, resource) # Image scaling factors scale = space_between_minutes / minute_scale space_between_minutes = space_between_minutes / scale # Iterate through time series ts_items = time_series.items() ts_len = len(time_series) for idx, (ts_start, amount) in enumerate(ts_items): if idx < ts_len - 1: ts_end = time_series.index[idx + 1] else: ts_end = finish_time # Calculate offset from start at top offset = ( (ts_start - start_time).total_seconds() / 60.0 ) * scale * space_between_minutes + 220 # Scale duration duration_mins = (ts_end - ts_start).total_seconds() / 60.0 height = duration_mins * scale * space_between_minutes if height < 5: height = 5 height -= 2 # Bar width is proportional to resource amount width = amount * 20 if resource.lower() == "memory": label = "%.3f GB" % amount else: label = "%d threads" % amount # Setup dictionary for bar html string insertion bar_dict = { "color": color, "height": height, "width": width, "offset": offset, "left": left, "label": label, "duration": duration_mins, "start": ts_start.strftime("%Y-%m-%d %H:%M:%S"), "finish": ts_end.strftime("%Y-%m-%d %H:%M:%S"), } bar_html = ( "
" ) # Add another bar to html line result += bar_html % bar_dict # Return bar-formatted html string return result def generate_gantt_chart( logfile, cores, minute_scale=10, space_between_minutes=50, colors=["#7070FF", "#4E4EB2", "#2D2D66", "#9B9BFF"], ): """ Generates a gantt chart in html showing the workflow execution based on a callback log file. This script was intended to be used with the MultiprocPlugin. The following code shows how to set up the workflow in order to generate the log file: Parameters ---------- logfile : string filepath to the callback log file to plot the gantt chart of cores : integer the number of cores given to the workflow via the 'n_procs' plugin arg minute_scale : integer (optional); default=10 the scale, in minutes, at which to plot line markers for the gantt chart; for example, minute_scale=10 means there are lines drawn at every 10 minute interval from start to finish space_between_minutes : integer (optional); default=50 scale factor in pixel spacing between minute line markers colors : list (optional) a list of colors to choose from when coloring the nodes in the gantt chart Returns ------- None the function does not return any value but writes out an html file in the same directory as the callback log path passed in Usage ----- # import logging # import logging.handlers # from nipype.utils.profiler import log_nodes_cb # log_filename = 'callback.log' # logger = logging.getLogger('callback') # logger.setLevel(logging.DEBUG) # handler = logging.FileHandler(log_filename) # logger.addHandler(handler) # #create workflow # workflow = ... # workflow.run(plugin='MultiProc', # plugin_args={'n_procs':8, 'memory':12, 'status_callback': log_nodes_cb}) # generate_gantt_chart('callback.log', 8) """ # add the html header html_string = """
""" close_header = """

Estimated Resource

Actual Resource

Failed Node

""" # Read in json-log to get list of node dicts nodes_list = log_to_dict(logfile) # Create the header of the report with useful information start_node = nodes_list[0] last_node = nodes_list[-1] duration = (last_node["finish"] - start_node["start"]).total_seconds() # Get events based dictionary of node run stats events = create_event_dict(start_node["start"], nodes_list) # Summary strings of workflow at top html_string += ( "

Start: " + start_node["start"].strftime("%Y-%m-%d %H:%M:%S") + "

" ) html_string += ( "

Finish: " + last_node["finish"].strftime("%Y-%m-%d %H:%M:%S") + "

" ) html_string += "

Duration: " + "{0:.2f}".format(duration / 60) + " minutes

" html_string += "

Nodes: " + str(len(nodes_list)) + "

" html_string += "

Cores: " + str(cores) + "

" html_string += close_header # Draw nipype nodes Gantt chart and runtimes html_string += draw_lines( start_node["start"], duration, minute_scale, space_between_minutes ) html_string += draw_nodes( start_node["start"], nodes_list, cores, minute_scale, space_between_minutes, colors, ) # Get memory timeseries estimated_mem_ts = calculate_resource_timeseries(events, "estimated_memory_gb") runtime_mem_ts = calculate_resource_timeseries(events, "runtime_memory_gb") # Plot gantt chart resource_offset = 120 + 30 * cores html_string += draw_resource_bar( start_node["start"], last_node["finish"], estimated_mem_ts, space_between_minutes, minute_scale, "#90BBD7", resource_offset * 2 + 120, "Memory", ) html_string += draw_resource_bar( start_node["start"], last_node["finish"], runtime_mem_ts, space_between_minutes, minute_scale, "#03969D", resource_offset * 2 + 120, "Memory", ) # Get threads timeseries estimated_threads_ts = calculate_resource_timeseries(events, "estimated_threads") runtime_threads_ts = calculate_resource_timeseries(events, "runtime_threads") # Plot gantt chart html_string += draw_resource_bar( start_node["start"], last_node["finish"], estimated_threads_ts, space_between_minutes, minute_scale, "#90BBD7", resource_offset, "Threads", ) html_string += draw_resource_bar( start_node["start"], last_node["finish"], runtime_threads_ts, space_between_minutes, minute_scale, "#03969D", resource_offset, "Threads", ) # finish html html_string += """
""" # save file with open(logfile + ".html", "w") as html_file: html_file.write(html_string) nipype-1.7.0/nipype/utils/filemanip.py000066400000000000000000000654011413403311400200050ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Miscellaneous file manipulation functions """ import sys import pickle import errno import subprocess as sp import gzip import hashlib import locale from hashlib import md5 import os import os.path as op import re import shutil import contextlib import posixpath from pathlib import Path import simplejson as json from time import sleep, time from .. import logging, config, __version__ as version from .misc import is_container fmlogger = logging.getLogger("nipype.utils") related_filetype_sets = [(".hdr", ".img", ".mat"), (".nii", ".mat"), (".BRIK", ".HEAD")] def _resolve_with_filenotfound(path, **kwargs): """Raise FileNotFoundError instead of OSError""" try: return path.resolve(**kwargs) except OSError as e: if isinstance(e, FileNotFoundError): raise raise FileNotFoundError(str(path)) def path_resolve(path, strict=False): try: return _resolve_with_filenotfound(path, strict=strict) except TypeError: # PY35 pass path = path.absolute() if strict or path.exists(): return _resolve_with_filenotfound(path) # This is a hacky shortcut, using path.absolute() unmodified # In cases where the existing part of the path contains a # symlink, different results will be produced return path def split_filename(fname): """Split a filename into parts: path, base filename and extension. Parameters ---------- fname : str file or path name Returns ------- pth : str base path from fname fname : str filename from fname, without extension ext : str file extension from fname Examples -------- >>> from nipype.utils.filemanip import split_filename >>> pth, fname, ext = split_filename('/home/data/subject.nii.gz') >>> pth '/home/data' >>> fname 'subject' >>> ext '.nii.gz' """ special_extensions = [".nii.gz", ".tar.gz", ".niml.dset"] pth = op.dirname(fname) fname = op.basename(fname) ext = None for special_ext in special_extensions: ext_len = len(special_ext) if (len(fname) > ext_len) and (fname[-ext_len:].lower() == special_ext.lower()): ext = fname[-ext_len:] fname = fname[:-ext_len] break if not ext: fname, ext = op.splitext(fname) return pth, fname, ext def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): """Manipulates path and name of input filename Parameters ---------- fname : string A filename (may or may not include path) prefix : string Characters to prepend to the filename suffix : string Characters to append to the filename newpath : string Path to replace the path of the input fname use_ext : boolean If True (default), appends the extension of the original file to the output name. Returns ------- Absolute path of the modified filename >>> from nipype.utils.filemanip import fname_presuffix >>> fname = 'foo.nii.gz' >>> fname_presuffix(fname,'pre','post','/tmp') '/tmp/prefoopost.nii.gz' >>> from nipype.interfaces.base import Undefined >>> fname_presuffix(fname, 'pre', 'post', Undefined) == \ fname_presuffix(fname, 'pre', 'post') True """ pth, fname, ext = split_filename(fname) if not use_ext: ext = "" # No need for isdefined: bool(Undefined) evaluates to False if newpath: pth = op.abspath(newpath) return op.join(pth, prefix + fname + suffix + ext) def fnames_presuffix(fnames, prefix="", suffix="", newpath=None, use_ext=True): """Calls fname_presuffix for a list of files.""" f2 = [] for fname in fnames: f2.append(fname_presuffix(fname, prefix, suffix, newpath, use_ext)) return f2 def hash_rename(filename, hashvalue): """renames a file given original filename and hash and sets path to output_directory """ path, name, ext = split_filename(filename) newfilename = "".join((name, "_0x", hashvalue, ext)) return op.join(path, newfilename) def check_forhash(filename): """checks if file has a hash in its filename""" if isinstance(filename, list): filename = filename[0] path, name = op.split(filename) if re.search("(_0x[a-z0-9]{32})", name): hashvalue = re.findall("(_0x[a-z0-9]{32})", name) return True, hashvalue else: return False, None def hash_infile(afile, chunk_len=8192, crypto=hashlib.md5, raise_notfound=False): """ Computes hash of a file using 'crypto' module >>> hash_infile('smri_ants_registration_settings.json') 'f225785dfb0db9032aa5a0e4f2c730ad' >>> hash_infile('surf01.vtk') 'fdf1cf359b4e346034372cdeb58f9a88' >>> hash_infile('spminfo') '0dc55e3888c98a182dab179b976dfffc' >>> hash_infile('fsl_motion_outliers_fd.txt') 'defd1812c22405b1ee4431aac5bbdd73' """ if not op.isfile(afile): if raise_notfound: raise RuntimeError('File "%s" not found.' % afile) return None crypto_obj = crypto() with open(afile, "rb") as fp: while True: data = fp.read(chunk_len) if not data: break crypto_obj.update(data) return crypto_obj.hexdigest() def hash_timestamp(afile): """Computes md5 hash of the timestamp of a file""" md5hex = None if op.isfile(afile): md5obj = md5() stat = os.stat(afile) md5obj.update(str(stat.st_size).encode()) md5obj.update(str(stat.st_mtime).encode()) md5hex = md5obj.hexdigest() return md5hex def _parse_mount_table(exit_code, output): """Parses the output of ``mount`` to produce (path, fs_type) pairs Separated from _generate_cifs_table to enable testing logic with real outputs """ # Not POSIX if exit_code != 0: return [] # Linux mount example: sysfs on /sys type sysfs (rw,nosuid,nodev,noexec) # ^^^^ ^^^^^ # OSX mount example: /dev/disk2 on / (hfs, local, journaled) # ^ ^^^ pattern = re.compile(r".*? on (/.*?) (?:type |\()([^\s,\)]+)") # Keep line and match for error reporting (match == None on failure) # Ignore empty lines matches = [(l, pattern.match(l)) for l in output.strip().splitlines() if l] # (path, fstype) tuples, sorted by path length (longest first) mount_info = sorted( (match.groups() for _, match in matches if match is not None), key=lambda x: len(x[0]), reverse=True, ) cifs_paths = [path for path, fstype in mount_info if fstype.lower() == "cifs"] # Report failures as warnings for line, match in matches: if match is None: fmlogger.debug("Cannot parse mount line: '%s'", line) return [ mount for mount in mount_info if any(mount[0].startswith(path) for path in cifs_paths) ] def _generate_cifs_table(): """Construct a reverse-length-ordered list of mount points that fall under a CIFS mount. This precomputation allows efficient checking for whether a given path would be on a CIFS filesystem. On systems without a ``mount`` command, or with no CIFS mounts, returns an empty list. """ exit_code, output = sp.getstatusoutput("mount") return _parse_mount_table(exit_code, output) _cifs_table = _generate_cifs_table() def on_cifs(fname): """ Checks whether a file path is on a CIFS filesystem mounted in a POSIX host (i.e., has the ``mount`` command). On Windows, Docker mounts host directories into containers through CIFS shares, which has support for Minshall+French symlinks, or text files that the CIFS driver exposes to the OS as symlinks. We have found that under concurrent access to the filesystem, this feature can result in failures to create or read recently-created symlinks, leading to inconsistent behavior and ``FileNotFoundError``. This check is written to support disabling symlinks on CIFS shares. """ # Only the first match (most recent parent) counts for fspath, fstype in _cifs_table: if fname.startswith(fspath): return fstype == "cifs" return False def copyfile( originalfile, newfile, copy=False, create_new=False, hashmethod=None, use_hardlink=False, copy_related_files=True, ): """Copy or link ``originalfile`` to ``newfile``. If ``use_hardlink`` is True, and the file can be hard-linked, then a link is created, instead of copying the file. If a hard link is not created and ``copy`` is False, then a symbolic link is created. Parameters ---------- originalfile : str full path to original file newfile : str full path to new file copy : Bool specifies whether to copy or symlink files (default=False) but only for POSIX systems use_hardlink : Bool specifies whether to hard-link files, when able (Default=False), taking precedence over copy copy_related_files : Bool specifies whether to also operate on related files, as defined in ``related_filetype_sets`` Returns ------- None """ newhash = None orighash = None fmlogger.debug(newfile) if create_new: while op.exists(newfile): base, fname, ext = split_filename(newfile) s = re.search("_c[0-9]{4,4}$", fname) i = 0 if s: i = int(s.group()[2:]) + 1 fname = fname[:-6] + "_c%04d" % i else: fname += "_c%04d" % i newfile = base + os.sep + fname + ext if hashmethod is None: hashmethod = config.get("execution", "hash_method").lower() # Don't try creating symlinks on CIFS if copy is False and on_cifs(newfile): copy = True # Existing file # ------------- # Options: # symlink # to regular file originalfile (keep if symlinking) # to same dest as symlink originalfile (keep if symlinking) # to other file (unlink) # regular file # hard link to originalfile (keep) # copy of file (same hash) (keep) # different file (diff hash) (unlink) keep = False if op.lexists(newfile): if op.islink(newfile): if all( ( os.readlink(newfile) == op.realpath(originalfile), not use_hardlink, not copy, ) ): keep = True elif posixpath.samefile(newfile, originalfile): keep = True else: if hashmethod == "timestamp": hashfn = hash_timestamp elif hashmethod == "content": hashfn = hash_infile else: raise AttributeError("Unknown hash method found:", hashmethod) newhash = hashfn(newfile) fmlogger.debug( "File: %s already exists,%s, copy:%d", newfile, newhash, copy ) orighash = hashfn(originalfile) keep = newhash == orighash if keep: fmlogger.debug( "File: %s already exists, not overwriting, copy:%d", newfile, copy ) else: os.unlink(newfile) # New file # -------- # use_hardlink & can_hardlink => hardlink # ~hardlink & ~copy & can_symlink => symlink # ~hardlink & ~symlink => copy if not keep and use_hardlink: try: fmlogger.debug("Linking File: %s->%s", newfile, originalfile) # Use realpath to avoid hardlinking symlinks os.link(op.realpath(originalfile), newfile) except OSError: use_hardlink = False # Disable hardlink for associated files else: keep = True if not keep and not copy and os.name == "posix": try: fmlogger.debug("Symlinking File: %s->%s", newfile, originalfile) os.symlink(originalfile, newfile) except OSError: copy = True # Disable symlink for associated files else: keep = True if not keep: try: fmlogger.debug("Copying File: %s->%s", newfile, originalfile) shutil.copyfile(originalfile, newfile) except shutil.Error as e: fmlogger.warning(str(e)) # Associated files if copy_related_files: related_file_pairs = ( get_related_files(f, include_this_file=False) for f in (originalfile, newfile) ) for alt_ofile, alt_nfile in zip(*related_file_pairs): if op.exists(alt_ofile): copyfile( alt_ofile, alt_nfile, copy, hashmethod=hashmethod, use_hardlink=use_hardlink, copy_related_files=False, ) return newfile def get_related_files(filename, include_this_file=True): """Returns a list of related files, as defined in ``related_filetype_sets``, for a filename. (e.g., Nifti-Pair, Analyze (SPM) and AFNI files). Parameters ---------- filename : str File name to find related filetypes of. include_this_file : bool If true, output includes the input filename. """ related_files = [] path, name, this_type = split_filename(filename) for type_set in related_filetype_sets: if this_type in type_set: for related_type in type_set: if include_this_file or related_type != this_type: related_files.append(op.join(path, name + related_type)) if not len(related_files): related_files = [filename] return related_files def copyfiles(filelist, dest, copy=False, create_new=False): """Copy or symlink files in ``filelist`` to ``dest`` directory. Parameters ---------- filelist : list List of files to copy. dest : path/files full path to destination. If it is a list of length greater than 1, then it assumes that these are the names of the new files. copy : Bool specifies whether to copy or symlink files (default=False) but only for posix systems Returns ------- None """ outfiles = ensure_list(dest) newfiles = [] for i, f in enumerate(ensure_list(filelist)): if isinstance(f, list): newfiles.insert(i, copyfiles(f, dest, copy=copy, create_new=create_new)) else: if len(outfiles) > 1: destfile = outfiles[i] else: destfile = fname_presuffix(f, newpath=outfiles[0]) destfile = copyfile(f, destfile, copy, create_new=create_new) newfiles.insert(i, destfile) return newfiles def ensure_list(filename): """Returns a list given either a string or a list""" if isinstance(filename, (str, bytes)): return [filename] elif isinstance(filename, list): return filename elif is_container(filename): return [x for x in filename] else: return None def simplify_list(filelist): """Returns a list if filelist is a list of length greater than 1, otherwise returns the first element """ if len(filelist) > 1: return filelist else: return filelist[0] filename_to_list = ensure_list list_to_filename = simplify_list def check_depends(targets, dependencies): """Return true if all targets exist and are newer than all dependencies. An OSError will be raised if there are missing dependencies. """ tgts = ensure_list(targets) deps = ensure_list(dependencies) return all(map(op.exists, tgts)) and min(map(op.getmtime, tgts)) > max( list(map(op.getmtime, deps)) + [0] ) def save_json(filename, data): """Save data to a json file Parameters ---------- filename : str Filename to save data in. data : dict Dictionary to save in json file. """ mode = "w" with open(filename, mode) as fp: json.dump(data, fp, sort_keys=True, indent=4) def load_json(filename): """Load data from a json file Parameters ---------- filename : str Filename to load data from. Returns ------- data : dict """ with open(filename, "r") as fp: data = json.load(fp) return data def loadcrash(infile, *args): if infile.endswith("pkl") or infile.endswith("pklz"): return loadpkl(infile) else: raise ValueError("Only pickled crashfiles are supported") def loadpkl(infile): """Load a zipped or plain cPickled file.""" infile = Path(infile) fmlogger.debug("Loading pkl: %s", infile) pklopen = gzip.open if infile.suffix == ".pklz" else open t = time() timeout = float(config.get("execution", "job_finished_timeout")) timed_out = True while (time() - t) < timeout: if infile.exists(): timed_out = False break fmlogger.debug("'{}' missing; waiting 2s".format(infile)) sleep(2) if timed_out: error_message = ( "Result file {0} expected, but " "does not exist after ({1}) " "seconds.".format(infile, timeout) ) raise IOError(error_message) with pklopen(str(infile), "rb") as pkl_file: pkl_contents = pkl_file.read() pkl_metadata = None # Look if pkl file contains version metadata idx = pkl_contents.find(b"\n") if idx >= 0: try: pkl_metadata = json.loads(pkl_contents[:idx]) except (UnicodeDecodeError, json.JSONDecodeError): # Could not get version info pass else: # On success, skip JSON metadata pkl_contents = pkl_contents[idx + 1 :] # Pickle files may contain relative paths that must be resolved relative # to the working directory, so use indirectory while attempting to load unpkl = None try: with indirectory(infile.parent): unpkl = pickle.loads(pkl_contents) except UnicodeDecodeError: # Was this pickle created with Python 2.x? with indirectory(infile.parent): unpkl = pickle.loads(pkl_contents, fix_imports=True, encoding="utf-8") fmlogger.info("Successfully loaded pkl in compatibility mode.") # Unpickling problems except Exception as e: if pkl_metadata and "version" in pkl_metadata: if pkl_metadata["version"] != version: fmlogger.error( """\ Attempted to open a results file generated by Nipype version %s, \ with an incompatible Nipype version (%s)""", pkl_metadata["version"], version, ) raise e fmlogger.warning( """\ No metadata was found in the pkl file. Make sure you are currently using \ the same Nipype version from the generated pkl.""" ) raise e if unpkl is None: raise ValueError("Loading %s resulted in None." % infile) return unpkl def crash2txt(filename, record): """Write out plain text crash file""" with open(filename, "w") as fp: if "node" in record: node = record["node"] fp.write("Node: {}\n".format(node.fullname)) fp.write("Working directory: {}\n".format(node.output_dir())) fp.write("\n") fp.write("Node inputs:\n{}\n".format(node.inputs)) fp.write("".join(record["traceback"])) def read_stream(stream, logger=None, encoding=None): """ Robustly reads a stream, sending a warning to a logger if some decoding error was raised. >>> read_stream(bytearray([65, 0xc7, 65, 10, 66])) # doctest: +ELLIPSIS ['A...A', 'B'] """ default_encoding = encoding or locale.getdefaultlocale()[1] or "UTF-8" logger = logger or fmlogger try: out = stream.decode(default_encoding) except UnicodeDecodeError as err: out = stream.decode(default_encoding, errors="replace") logger.warning("Error decoding string: %s", err) return out.splitlines() def savepkl(filename, record, versioning=False): from io import BytesIO with BytesIO() as f: if versioning: metadata = json.dumps({"version": version}) f.write(metadata.encode("utf-8")) f.write("\n".encode("utf-8")) pickle.dump(record, f) content = f.getvalue() pkl_open = gzip.open if filename.endswith(".pklz") else open tmpfile = filename + ".tmp" with pkl_open(tmpfile, "wb") as pkl_file: pkl_file.write(content) os.rename(tmpfile, filename) rst_levels = ["=", "-", "~", "+"] def write_rst_header(header, level=0): return "\n".join((header, "".join([rst_levels[level] for _ in header]))) + "\n\n" def write_rst_list(items, prefix=""): out = [] for item in ensure_list(items): out.append("{} {}".format(prefix, str(item))) return "\n".join(out) + "\n\n" def write_rst_dict(info, prefix=""): out = [] for key, value in sorted(info.items()): out.append("{}* {} : {}".format(prefix, key, str(value))) return "\n".join(out) + "\n\n" def dist_is_editable(dist): """Is distribution an editable install? Parameters ---------- dist : string Package name # Borrowed from `pip`'s' API """ for path_item in sys.path: egg_link = op.join(path_item, dist + ".egg-link") if op.isfile(egg_link): return True return False def emptydirs(path, noexist_ok=False): """ Empty an existing directory, without deleting it. Do not raise error if the path does not exist and noexist_ok is True. Parameters ---------- path : directory that should be empty """ fmlogger.debug("Removing contents of %s", path) if noexist_ok and not op.exists(path): return True if op.isfile(path): raise OSError('path "%s" should be a directory' % path) try: shutil.rmtree(path) except OSError as ex: elcont = [ Path(root) / file for root, _, files in os.walk(path) for file in files if not file.startswith(".nfs") ] if ex.errno in [errno.ENOTEMPTY, errno.EBUSY] and not elcont: fmlogger.warning( "An exception was raised trying to remove old %s, but the path" " seems empty. Is it an NFS mount?. Passing the exception.", path, ) elif ex.errno == errno.ENOTEMPTY and elcont: fmlogger.debug("Folder %s contents (%d items).", path, len(elcont)) raise ex else: raise ex os.makedirs(path, exist_ok=True) def silentrm(filename): """ Equivalent to ``rm -f``, returns ``False`` if the file did not exist. Parameters ---------- filename : str file to be deleted """ try: os.remove(filename) except OSError as e: if e.errno != errno.ENOENT: raise return False return True def which(cmd, env=None, pathext=None): """ Return the path to an executable which would be run if the given cmd was called. If no cmd would be called, return ``None``. Code for Python < 3.3 is based on a code snippet from http://orip.org/2009/08/python-checking-if-executable-exists-in.html """ if pathext is None: pathext = os.getenv("PATHEXT", "").split(os.pathsep) pathext.insert(0, "") path = os.getenv("PATH", os.defpath) if env and "PATH" in env: path = env.get("PATH") for ext in pathext: filename = shutil.which(cmd + ext, path=path) if filename: return filename return None def get_dependencies(name, environ): """Return library dependencies of a dynamically linked executable Uses otool on darwin, ldd on linux. Currently doesn't support windows. """ command = None if sys.platform == "darwin": command = "otool -L `which %s`" % name elif "linux" in sys.platform: command = "ldd `which %s`" % name else: return "Platform %s not supported" % sys.platform deps = None try: proc = sp.Popen( command, stdout=sp.PIPE, stderr=sp.PIPE, shell=True, env=environ ) o, e = proc.communicate() deps = o.rstrip() except Exception as ex: deps = f"{command!r} failed" fmlogger.warning(f"Could not get dependencies of {name}s. Error:\n{ex}") return deps def canonicalize_env(env): """Windows requires that environment be dicts with str as keys and values This function converts any unicode entries for Windows only, returning the dictionary untouched in other environments. Parameters ---------- env : dict environment dictionary with unicode or bytes keys and values Returns ------- env : dict Windows: environment dictionary with str keys and values Other: untouched input ``env`` """ if os.name != "nt": return env out_env = {} for key, val in env.items(): if not isinstance(key, str): key = key.decode("utf-8") if not isinstance(val, str): val = val.decode("utf-8") out_env[key] = val return out_env def relpath(path, start=None): """Return a relative version of a path""" try: return op.relpath(path, start) except AttributeError: pass if start is None: start = os.curdir if not path: raise ValueError("no path specified") start_list = op.abspath(start).split(op.sep) path_list = op.abspath(path).split(op.sep) if start_list[0].lower() != path_list[0].lower(): unc_path, rest = op.splitunc(path) unc_start, rest = op.splitunc(start) if bool(unc_path) ^ bool(unc_start): raise ValueError( ("Cannot mix UNC and non-UNC paths " "(%s and %s)") % (path, start) ) else: raise ValueError( "path is on drive %s, start on drive %s" % (path_list[0], start_list[0]) ) # Work out how much of the filepath is shared by start and path. for i in range(min(len(start_list), len(path_list))): if start_list[i].lower() != path_list[i].lower(): break else: i += 1 rel_list = [op.pardir] * (len(start_list) - i) + path_list[i:] if not rel_list: return os.curdir return op.join(*rel_list) @contextlib.contextmanager def indirectory(path): cwd = os.getcwd() os.chdir(str(path)) try: yield finally: os.chdir(cwd) nipype-1.7.0/nipype/utils/functions.py000066400000000000000000000030161413403311400200430ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ Handles custom functions used in Function interface. Future imports are avoided to keep namespace as clear as possible. """ import inspect from textwrap import dedent def getsource(function): """Returns the source code of a function""" return dedent(inspect.getsource(function)) def create_function_from_source(function_source, imports=None): """Return a function object from a function source Parameters ---------- function_source : unicode string unicode string defining a function imports : list of strings list of import statements in string form that allow the function to be executed in an otherwise empty namespace """ ns = {} import_keys = [] try: if imports is not None: for statement in imports: exec(statement, ns) import_keys = list(ns.keys()) exec(function_source, ns) except Exception as e: msg = "Error executing function\n{}\n".format(function_source) msg += ( "Functions in connection strings have to be standalone. " "They cannot be declared either interactively or inside " "another function or inline in the connect string. Any " "imports should be done inside the function." ) raise RuntimeError(msg) from e ns_funcs = list(set(ns) - set(import_keys + ["__builtins__"])) assert len(ns_funcs) == 1, "Function or inputs are ill-defined" func = ns[ns_funcs[0]] return func nipype-1.7.0/nipype/utils/imagemanip.py000066400000000000000000000011141413403311400201370ustar00rootroot00000000000000"""Image manipulation utilities (mostly, NiBabel manipulations).""" import nibabel as nb def copy_header(header_file, in_file, keep_dtype=True): """Copy header from a reference image onto another image.""" hdr_img = nb.load(header_file) out_img = nb.load(in_file, mmap=False) hdr = hdr_img.header.copy() if keep_dtype: hdr.set_data_dtype(out_img.get_data_dtype()) new_img = out_img.__class__(out_img.dataobj, None, hdr) if not keep_dtype: new_img.set_data_dtype(hdr_img.get_data_dtype()) new_img.to_filename(in_file) return in_file nipype-1.7.0/nipype/utils/logger.py000066400000000000000000000100001413403311400173010ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import logging from warnings import warn import os import sys from .misc import str2bool try: from ..external.cloghandler import ConcurrentRotatingFileHandler as RFHandler except ImportError: # Next 2 lines are optional: issue a warning to the user warn("ConcurrentLogHandler not installed. Using builtin log handler") from logging.handlers import RotatingFileHandler as RFHandler class Logging(object): """Nipype logging class""" fmt = "%(asctime)s,%(msecs)d %(name)-2s " "%(levelname)-2s:\n\t %(message)s" datefmt = "%y%m%d-%H:%M:%S" def __init__(self, config): self._config = config # scope our logger to not interfere with user _nipype_logger = logging.getLogger("nipype") _nipype_hdlr = logging.StreamHandler(stream=sys.stdout) _nipype_hdlr.setFormatter(logging.Formatter(fmt=self.fmt, datefmt=self.datefmt)) # if StreamHandler was added, do not stack if not len(_nipype_logger.handlers): _nipype_logger.addHandler(_nipype_hdlr) self._logger = logging.getLogger("nipype.workflow") self._utlogger = logging.getLogger("nipype.utils") self._fmlogger = logging.getLogger("nipype.filemanip") self._iflogger = logging.getLogger("nipype.interface") self.loggers = { "nipype.workflow": self._logger, "nipype.utils": self._utlogger, "nipype.filemanip": self._fmlogger, "nipype.interface": self._iflogger, } self._hdlr = None self.update_logging(self._config) def enable_file_logging(self): config = self._config LOG_FILENAME = os.path.join( config.get("logging", "log_directory"), "pypeline.log" ) hdlr = RFHandler( LOG_FILENAME, maxBytes=int(config.get("logging", "log_size")), backupCount=int(config.get("logging", "log_rotate")), ) formatter = logging.Formatter(fmt=self.fmt, datefmt=self.datefmt) hdlr.setFormatter(formatter) self._logger.addHandler(hdlr) self._utlogger.addHandler(hdlr) self._iflogger.addHandler(hdlr) self._fmlogger.addHandler(hdlr) self._hdlr = hdlr def disable_file_logging(self): if self._hdlr: self._logger.removeHandler(self._hdlr) self._utlogger.removeHandler(self._hdlr) self._iflogger.removeHandler(self._hdlr) self._fmlogger.removeHandler(self._hdlr) self._hdlr = None def update_logging(self, config): self._config = config self.disable_file_logging() self._logger.setLevel( logging.getLevelName(config.get("logging", "workflow_level")) ) self._utlogger.setLevel( logging.getLevelName(config.get("logging", "utils_level")) ) self._iflogger.setLevel( logging.getLevelName(config.get("logging", "interface_level")) ) if str2bool(config.get("logging", "log_to_file")): self.enable_file_logging() def getLogger(self, name): if name == "filemanip": warn( 'The "filemanip" logger has been deprecated and replaced by ' 'the "utils" logger as of nipype 1.0' ) if name in self.loggers: return self.loggers[name] return None def getLevelName(self, name): return logging.getLevelName(name) def logdebug_dict_differences(self, dold, dnew, prefix=""): """Helper to log what actually changed from old to new values of dictionaries. typical use -- log difference for hashed_inputs """ from .misc import dict_diff self._logger.warning( "logdebug_dict_differences has been deprecated, please use " "nipype.utils.misc.dict_diff." ) self._logger.debug(dict_diff(dold, dnew)) nipype-1.7.0/nipype/utils/matlabtools.py000066400000000000000000000042131413403311400203540ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Useful Functions for working with matlab""" # Stdlib imports import os import re import tempfile import numpy as np # Functions, classes and other top-level code def fltcols(vals): """Trivial little function to make 1xN float vector""" return np.atleast_2d(np.array(vals, dtype=float)) def mlab_tempfile(dir=None): """Returns a temporary file-like object with valid matlab name. The file name is accessible as the .name attribute of the returned object. The caller is responsible for closing the returned object, at which time the underlying file gets deleted from the filesystem. Parameters ---------- dir : str A path to use as the starting directory. Note that this directory must already exist, it is NOT created if it doesn't (in that case, OSError is raised instead). Returns ------- f : A file-like object. Examples -------- >>> fn = mlab_tempfile() >>> import os >>> filename = os.path.basename(fn.name) >>> '-' not in filename True >>> fn.close() """ valid_name = re.compile(r"^\w+$") # Make temp files until we get one whose name is a valid matlab identifier, # since matlab imposes that constraint. Since the temp file routines may # return names that aren't valid matlab names, but we can't control that # directly, we just keep trying until we get a valid name. To avoid an # infinite loop for some strange reason, we only try 100 times. for n in range(100): f = tempfile.NamedTemporaryFile(suffix=".m", prefix="tmp_matlab_", dir=dir) # Check the file name for matlab compilance fname = os.path.splitext(os.path.basename(f.name))[0] if valid_name.match(fname): break # Close the temp file we just made if its name is not valid; the # tempfile module then takes care of deleting the actual file on disk. f.close() else: raise ValueError("Could not make temp file after 100 tries") return f nipype-1.7.0/nipype/utils/misc.py000066400000000000000000000252731413403311400167770ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Miscellaneous utility functions """ import os import sys import re from collections.abc import Iterator from warnings import warn from distutils.version import LooseVersion import numpy as np import textwrap def human_order_sorted(l): """Sorts string in human order (i.e. 'stat10' will go after 'stat2')""" def atoi(text): return int(text) if text.isdigit() else text def natural_keys(text): if isinstance(text, tuple): text = text[0] return [atoi(c) for c in re.split(r"(\d+)", text)] return sorted(l, key=natural_keys) def trim(docstring, marker=None): if isinstance(docstring, bytes): docstring = str(docstring, "utf-8") if not docstring: return "" # Convert tabs to spaces (following the normal Python rules) # and split into a list of lines: lines = docstring.expandtabs().splitlines() # Determine minimum indentation (first line doesn't count): indent = sys.maxsize for line in lines[1:]: stripped = line.lstrip() if stripped: indent = min(indent, len(line) - len(stripped)) # Remove indentation (first line is special): trimmed = [lines[0].strip()] if indent < sys.maxsize: for line in lines[1:]: # replace existing REST marker with doc level marker stripped = line.lstrip().strip().rstrip() if ( marker is not None and stripped and all([s == stripped[0] for s in stripped]) and stripped[0] not in [":"] ): line = line.replace(stripped[0], marker) trimmed.append(line[indent:].rstrip()) # Strip off trailing and leading blank lines: while trimmed and not trimmed[-1]: trimmed.pop() while trimmed and not trimmed[0]: trimmed.pop(0) # Return a single string: return "\n".join(trimmed) def find_indices(condition): "Return the indices where ravel(condition) is true" (res,) = np.nonzero(np.ravel(condition)) return res def is_container(item): """Checks if item is a container (list, tuple, dict, set) Parameters ---------- item : object object to check for .__iter__ Returns ------- output : Boolean True if container False if not (eg string) """ if isinstance(item, str): return False elif hasattr(item, "__iter__"): return True else: return False def container_to_string(cont): """Convert a container to a command line string. Elements of the container are joined with a space between them, suitable for a command line parameter. If the container `cont` is only a sequence, like a string and not a container, it is returned unmodified. Parameters ---------- cont : container A container object like a list, tuple, dict, or a set. Returns ------- cont_str : string Container elements joined into a string. """ if hasattr(cont, "__iter__") and not isinstance(cont, str): cont = " ".join(cont) return str(cont) # Dependency checks. Copied this from Nipy, with some modificiations # (added app as a parameter). def package_check( pkg_name, version=None, app=None, checker=LooseVersion, exc_failed_import=ImportError, exc_failed_check=RuntimeError, ): """Check that the minimal version of the required package is installed. Parameters ---------- pkg_name : string Name of the required package. version : string, optional Minimal version number for required package. app : string, optional Application that is performing the check. For instance, the name of the tutorial being executed that depends on specific packages. Default is *Nipype*. checker : object, optional The class that will perform the version checking. Default is distutils.version.LooseVersion. exc_failed_import : Exception, optional Class of the exception to be thrown if import failed. exc_failed_check : Exception, optional Class of the exception to be thrown if version check failed. Examples -------- package_check('numpy', '1.3') package_check('scipy', '0.7', 'tutorial1') """ if app: msg = "%s requires %s" % (app, pkg_name) else: msg = "Nipype requires %s" % pkg_name if version: msg += " with version >= %s" % (version,) try: mod = __import__(pkg_name) except ImportError as e: raise exc_failed_import(msg) from e if not version: return try: have_version = mod.__version__ except AttributeError as e: raise exc_failed_check("Cannot find version for %s" % pkg_name) from e if checker(have_version) < checker(version): raise exc_failed_check(msg) def str2bool(v): """ Convert strings (and bytearrays) to boolean values >>> all([str2bool(v) for v in (True, "yes", "true", ... "y", "t", "Yes", "True", "1", "on", "On")]) True >>> all([str2bool(v.encode('utf-8')) ... for v in ("yes", "true", "y", "t", "1", "Yes", "on", "On")]) True >>> any([str2bool(v) for v in (False, "no", "false", "n", "f", ... "False", "0", "off", "Off")]) False >>> any([str2bool(v.encode('utf-8')) ... for v in ("no", "false", "n", "f", "0", "off", "Off")]) False >>> str2bool(None) # doctest: +ELLIPSIS Traceback (most recent call last): ... ValueError: ... >>> str2bool('/some/path') # doctest: +ELLIPSIS Traceback (most recent call last): ... ValueError: ... >>> str2bool('Agg') # doctest: +ELLIPSIS Traceback (most recent call last): ... ValueError: ... >>> str2bool('INFO') # doctest: +ELLIPSIS Traceback (most recent call last): ... ValueError: ... >>> str2bool('/some/bytes/path'.encode('utf-8')) # doctest: +ELLIPSIS Traceback (most recent call last): ... ValueError: ... """ if isinstance(v, bool): return v if isinstance(v, bytes): v = v.decode("utf-8") if isinstance(v, str): lower = v.lower() if lower in ("yes", "true", "y", "t", "1", "on"): return True elif lower in ("no", "false", "n", "f", "0", "off"): return False raise ValueError("%r cannot be converted to bool" % v) def flatten(S): if S == []: return S if isinstance(S[0], list): return flatten(S[0]) + flatten(S[1:]) return S[:1] + flatten(S[1:]) def unflatten(in_list, prev_structure): if not isinstance(in_list, Iterator): in_list = iter(in_list) if not isinstance(prev_structure, list): return next(in_list) out = [] for item in prev_structure: out.append(unflatten(in_list, item)) return out def normalize_mc_params(params, source): """ Normalize a single row of motion parameters to the SPM format. SPM saves motion parameters as: x Right-Left (mm) y Anterior-Posterior (mm) z Superior-Inferior (mm) rx Pitch (rad) ry Roll (rad) rz Yaw (rad) """ if source.upper() == "FSL": params = params[[3, 4, 5, 0, 1, 2]] elif source.upper() in ("AFNI", "FSFAST"): params = params[np.asarray([4, 5, 3, 1, 2, 0]) + (len(params) > 6)] params[3:] = params[3:] * np.pi / 180.0 elif source.upper() == "NIPY": from nipy.algorithms.registration import to_matrix44, aff2euler matrix = to_matrix44(params) params = np.zeros(6) params[:3] = matrix[:3, 3] params[-1:2:-1] = aff2euler(matrix) return params def dict_diff(dold, dnew, indent=0): """Helper to log what actually changed from old to new values of dictionaries. typical use -- log difference for hashed_inputs """ try: dnew, dold = dict(dnew), dict(dold) except Exception: return textwrap.indent( f"""\ Diff between nipype inputs failed: * Cached inputs: {dold} * New inputs: {dnew}""", " " * indent, ) # Compare against hashed_inputs # Keys: should rarely differ new_keys = set(dnew.keys()) old_keys = set(dold.keys()) diff = [] if new_keys - old_keys: diff += [" * keys not previously seen: %s" % (new_keys - old_keys)] if old_keys - new_keys: diff += [" * keys not presently seen: %s" % (old_keys - new_keys)] # Add topical message if diff: diff.insert(0, "Dictionaries had differing keys:") diffkeys = len(diff) def _shorten(value): if isinstance(value, str) and len(value) > 50: return f"{value[:10]}...{value[-10:]}" if isinstance(value, (tuple, list)) and len(value) > 10: return tuple(list(value[:2]) + ["..."] + list(value[-2:])) return value def _uniformize(val): if isinstance(val, dict): return {k: _uniformize(v) for k, v in val.items()} if isinstance(val, (list, tuple)): return tuple(_uniformize(el) for el in val) return val # Values in common keys would differ quite often, # so we need to join the messages together for k in new_keys.intersection(old_keys): # Reading from JSON produces lists, but internally we typically # use tuples. At this point these dictionary values can be # immutable (and therefore the preference for tuple). new = _uniformize(dnew[k]) old = _uniformize(dold[k]) if new != old: diff += [" * %s: %r != %r" % (k, _shorten(new), _shorten(old))] if len(diff) > diffkeys: diff.insert(diffkeys, "Some dictionary entries had differing values:") return textwrap.indent("\n".join(diff), " " * indent) def rgetcwd(error=True): """ Robust replacement for getcwd when folders get removed If error==True, this is just an alias for os.getcwd() """ if error: return os.getcwd() try: cwd = os.getcwd() except OSError as exc: # Changing back to cwd is probably not necessary # but this makes sure there's somewhere to change to. cwd = os.getenv("PWD") if cwd is None: raise OSError( ( exc.errno, "Current directory does not exist anymore, " "and nipype was not able to guess it from the environment", ) ) warn('Current folder does not exist, replacing with "%s" instead.' % cwd) return cwd nipype-1.7.0/nipype/utils/nipype2boutiques.py000066400000000000000000000546261413403311400213770ustar00rootroot00000000000000# -*- coding: utf-8 -*- # This tool exports a Nipype interface in the Boutiques # (https://github.com/boutiques) JSON format. Boutiques tools # can be imported in CBRAIN (https://github.com/aces/cbrain) # among other platforms. # # Limitations: # * Optional outputs, i.e. outputs that not always produced, may not be # detected. They will, however, still be listed with a placeholder for # the path template (either a value key or the output ID) that should # be verified and corrected. # * Still need to add some fields to the descriptor manually, e.g. url, # descriptor-url, path-template-stripped-extensions, etc. import os import sys import simplejson as json from ..scripts.instance import import_module def generate_boutiques_descriptor( module, interface_name, container_image, container_type, container_index=None, verbose=False, save=False, save_path=None, author=None, ignore_inputs=None, tags=None, ): """ Generate a JSON Boutiques description of a Nipype interface. Arguments --------- module : module where the Nipype interface is declared. interface_name : name of Nipype interface. container_image : name of the container image where the tool is installed container_type : type of container image (Docker or Singularity) container_index : optional index where the image is available verbose : print information messages save : True if you want to save descriptor to a file save_path : file path for the saved descriptor (defaults to name of the interface in current directory) author : author of the tool (required for publishing) ignore_inputs : list of interface inputs to not include in the descriptor tags : JSON object containing tags to include in the descriptor, e.g. ``{"key1": "value1"}`` (note: the tags 'domain:neuroinformatics' and 'interface-type:nipype' are included by default) Returns ------- boutiques : str string containing a Boutiques' JSON object """ if not module: raise Exception("Undefined module.") # Retrieves Nipype interface if isinstance(module, (str, bytes)): import_module(module) module_name = str(module) module = sys.modules[module] else: module_name = str(module.__name__) interface = getattr(module, interface_name)() inputs = interface.input_spec() outputs = interface.output_spec() # Tool description tool_desc = {} tool_desc["name"] = interface_name tool_desc["command-line"] = interface_name + " " tool_desc["author"] = "Nipype (interface)" if author is not None: tool_desc["author"] = tool_desc["author"] + ", " + author + " (tool)" tool_desc["description"] = ( interface_name + ", as implemented in Nipype (module: " + module_name + ", interface: " + interface_name + ")." ) tool_desc["inputs"] = [] tool_desc["output-files"] = [] tool_desc["groups"] = [] tool_desc["tool-version"] = ( interface.version if interface.version is not None else "1.0.0" ) tool_desc["schema-version"] = "0.5" if container_image: tool_desc["container-image"] = {} tool_desc["container-image"]["image"] = container_image tool_desc["container-image"]["type"] = container_type if container_index: tool_desc["container-image"]["index"] = container_index # Generates tool inputs for name, spec in sorted(interface.inputs.traits(transient=None).items()): # Skip ignored inputs if ignore_inputs is not None and name in ignore_inputs: continue # If spec has a name source, this means it actually represents an # output, so create a Boutiques output from it elif spec.name_source and spec.name_template: tool_desc["output-files"].append( get_boutiques_output_from_inp(inputs, spec, name) ) else: inp = get_boutiques_input(inputs, interface, name, spec, verbose) # Handle compound inputs (inputs that can be of multiple types # and are mutually exclusive) if isinstance(inp, list): mutex_group_members = [] tool_desc["command-line"] += inp[0]["value-key"] + " " for i in inp: tool_desc["inputs"].append(i) mutex_group_members.append(i["id"]) if verbose: print("-> Adding input " + i["name"]) # Put inputs into a mutually exclusive group tool_desc["groups"].append( { "id": inp[0]["id"] + "_group", "name": inp[0]["name"] + " group", "members": mutex_group_members, "mutually-exclusive": True, } ) else: tool_desc["inputs"].append(inp) tool_desc["command-line"] += inp["value-key"] + " " if verbose: print("-> Adding input " + inp["name"]) # Generates input groups tool_desc["groups"] += get_boutiques_groups( interface.inputs.traits(transient=None).items() ) if len(tool_desc["groups"]) == 0: del tool_desc["groups"] # Generates tool outputs generate_tool_outputs(outputs, interface, tool_desc, verbose, True) # Generate outputs with various different inputs to try to generate # as many output values as possible custom_inputs = generate_custom_inputs(tool_desc["inputs"]) for input_dict in custom_inputs: interface = getattr(module, interface_name)(**input_dict) outputs = interface.output_spec() generate_tool_outputs(outputs, interface, tool_desc, verbose, False) # Fill in all missing output paths for output in tool_desc["output-files"]: if output["path-template"] == "": fill_in_missing_output_path(output, output["name"], tool_desc["inputs"]) # Add tags desc_tags = {"domain": "neuroinformatics", "source": "nipype-interface"} if tags is not None: tags_dict = json.loads(tags) for k, v in tags_dict.items(): if k in desc_tags: if not isinstance(desc_tags[k], list): desc_tags[k] = [desc_tags[k]] desc_tags[k].append(v) else: desc_tags[k] = v tool_desc["tags"] = desc_tags # Check for positional arguments and reorder command line args if necessary tool_desc["command-line"] = reorder_cmd_line_args( tool_desc["command-line"], interface, ignore_inputs ) # Remove the extra space at the end of the command line tool_desc["command-line"] = tool_desc["command-line"].strip() # Save descriptor to a file if save: path = save_path or os.path.join(os.getcwd(), interface_name + ".json") with open(path, "w") as outfile: json.dump(tool_desc, outfile, indent=4, separators=(",", ": ")) if verbose: print("-> Descriptor saved to file " + outfile.name) print( "NOTE: Descriptors produced by this script may not entirely conform " "to the Nipype interface specs. Please check that the descriptor is " "correct before using it." ) return json.dumps(tool_desc, indent=4, separators=(",", ": ")) def generate_tool_outputs(outputs, interface, tool_desc, verbose, first_run): for name, spec in sorted(outputs.traits(transient=None).items()): output = get_boutiques_output( outputs, name, spec, interface, tool_desc["inputs"] ) # If this is the first time we are generating outputs, add the full # output to the descriptor. Otherwise, find the existing output and # update its path template if it's still undefined. if first_run: tool_desc["output-files"].append(output) if output.get("value-key"): tool_desc["command-line"] += output["value-key"] + " " if verbose: print("-> Adding output " + output["name"]) else: for existing_output in tool_desc["output-files"]: if ( output["id"] == existing_output["id"] and existing_output["path-template"] == "" ): existing_output["path-template"] = output["path-template"] break if ( output.get("value-key") and output["value-key"] not in tool_desc["command-line"] ): tool_desc["command-line"] += output["value-key"] + " " if len(tool_desc["output-files"]) == 0: raise Exception("Tool has no output.") def get_boutiques_input( inputs, interface, input_name, spec, verbose, handler=None, input_number=None ): """ Returns a dictionary containing the Boutiques input corresponding to a Nipype input. Args: * inputs: inputs of the Nipype interface. * interface: Nipype interface. * input_name: name of the Nipype input. * spec: Nipype input spec. * verbose: print information messages. * handler: used when handling compound inputs, which don't have their own input spec * input_number: used when handling compound inputs to assign each a unique ID Assumes that: * Input names are unique. """ inp = {} # No need to append a number to the first of a list of compound inputs if input_number: inp["id"] = input_name + "_" + str(input_number + 1) else: inp["id"] = input_name inp["name"] = input_name.replace("_", " ").capitalize() if handler is None: trait_handler = spec.handler else: trait_handler = handler # Figure out the input type from its handler type handler_type = type(trait_handler).__name__ # Deal with compound traits if handler_type == "TraitCompound": input_list = [] # Recursively create an input for each trait for i in range(0, len(trait_handler.handlers)): inp = get_boutiques_input( inputs, interface, input_name, spec, verbose, trait_handler.handlers[i], i, ) inp["optional"] = True input_list.append(inp) return input_list if handler_type == "File" or handler_type == "Directory": inp["type"] = "File" elif handler_type == "Int": inp["type"] = "Number" inp["integer"] = True elif handler_type == "Float": inp["type"] = "Number" elif handler_type == "Bool": inp["type"] = "Flag" else: inp["type"] = "String" # Deal with range inputs if handler_type == "Range": inp["type"] = "Number" if trait_handler._low is not None: inp["minimum"] = trait_handler._low if trait_handler._high is not None: inp["maximum"] = trait_handler._high if trait_handler._exclude_low: inp["exclusive-minimum"] = True if trait_handler._exclude_high: inp["exclusive-maximum"] = True # Deal with list inputs # TODO handle lists of lists (e.g. FSL ProbTrackX seed input) if handler_type == "List": inp["list"] = True item_type = trait_handler.item_trait.trait_type item_type_name = type(item_type).__name__ if item_type_name == "Int": inp["integer"] = True inp["type"] = "Number" elif item_type_name == "Float": inp["type"] = "Number" elif item_type_name == "File": inp["type"] = "File" elif item_type_name == "Enum": value_choices = item_type.values if value_choices is not None: if all(isinstance(n, int) for n in value_choices): inp["type"] = "Number" inp["integer"] = True elif all(isinstance(n, float) for n in value_choices): inp["type"] = "Number" inp["value-choices"] = value_choices else: inp["type"] = "String" if trait_handler.minlen != 0: inp["min-list-entries"] = trait_handler.minlen if trait_handler.maxlen != sys.maxsize: inp["max-list-entries"] = trait_handler.maxlen if spec.sep: inp["list-separator"] = spec.sep if handler_type == "Tuple": inp["list"] = True inp["min-list-entries"] = len(spec.default) inp["max-list-entries"] = len(spec.default) input_type = type(spec.default[0]).__name__ if input_type == "int": inp["type"] = "Number" inp["integer"] = True elif input_type == "float": inp["type"] = "Number" else: inp["type"] = "String" # Deal with multi-input if handler_type == "InputMultiObject": inp["type"] = "File" inp["list"] = True if spec.sep: inp["list-separator"] = spec.sep inp["value-key"] = ( "[" + input_name.upper() + "]" ) # assumes that input names are unique flag, flag_sep = get_command_line_flag(spec, inp["type"] == "Flag", input_name) if flag is not None: inp["command-line-flag"] = flag if flag_sep is not None: inp["command-line-flag-separator"] = flag_sep inp["description"] = get_description_from_spec(inputs, input_name, spec) if not (hasattr(spec, "mandatory") and spec.mandatory): inp["optional"] = True else: inp["optional"] = False if spec.usedefault: inp["default-value"] = spec.default_value()[1] if spec.requires is not None: inp["requires-inputs"] = spec.requires try: value_choices = trait_handler.values except AttributeError: pass else: if value_choices is not None: if all(isinstance(n, int) for n in value_choices): inp["type"] = "Number" inp["integer"] = True elif all(isinstance(n, float) for n in value_choices): inp["type"] = "Number" inp["value-choices"] = value_choices return inp def get_boutiques_output(outputs, name, spec, interface, tool_inputs): """ Returns a dictionary containing the Boutiques output corresponding to a Nipype output. Args: * outputs: outputs of the Nipype interface. * name: name of the Nipype output. * spec: Nipype output spec. * interface: Nipype interface. * tool_inputs: list of tool inputs (as produced by method get_boutiques_input). Assumes that: * Output names are unique. * Input values involved in the path template are defined. * Output files are written in the current directory. * There is a single output value (output lists are not supported). """ output = {} output["name"] = name.replace("_", " ").capitalize() # Check if the output name was already used as an input name # If so, append '_outfile' to the end of the ID unique_id = True for inp in tool_inputs: if inp["id"] == name: unique_id = False break output["id"] = name if unique_id else name + "_outfile" output["path-template"] = "" # No real way to determine if an output is always # produced, regardless of the input values. output["optional"] = True output["description"] = get_description_from_spec(outputs, name, spec) # Path template creation. try: output_value = interface._list_outputs()[name] except TypeError: output_value = None except AttributeError: output_value = None except KeyError: output_value = None # Handle multi-outputs if ( isinstance(output_value, list) or type(spec.handler).__name__ == "OutputMultiObject" or type(spec.handler).__name__ == "List" ): output["list"] = True if output_value: # Check if all extensions are the same extensions = [] for val in output_value: extensions.append(os.path.splitext(val)[1]) # If extensions all the same, set path template as # wildcard + extension. Otherwise just use a wildcard if len(set(extensions)) == 1: output["path-template"] = "*" + extensions[0] else: output["path-template"] = "*" return output # If an output value is defined, use its relative path, if one exists. # Otherwise, put blank string as placeholder and try to fill it on # another iteration. if output_value: output["path-template"] = os.path.relpath(output_value) else: output["path-template"] = "" return output def get_boutiques_groups(input_traits): """ Returns a list of dictionaries containing Boutiques groups for the mutually exclusive Nipype inputs. """ desc_groups = [] mutex_input_sets = [] # Get all the groups for name, spec in input_traits: if spec.xor is not None: group_members = set([name] + list(spec.xor)) if group_members not in mutex_input_sets: mutex_input_sets.append(group_members) # Create a dictionary for each one for i, inp_set in enumerate(mutex_input_sets, 1): desc_groups.append( { "id": "mutex_group" + ("_" + str(i) if i != 1 else ""), "name": "Mutex group" + (" " + str(i) if i != 1 else ""), "members": list(inp_set), "mutually-exclusive": True, } ) return desc_groups def get_description_from_spec(obj, name, spec): """ Generates a description based on the input or output spec. """ if not spec.desc: spec.desc = "No description provided." spec_info = spec.full_info(obj, name, None) boutiques_description = ( spec_info.capitalize() + ". " + spec.desc.capitalize() ).replace("\n", "") if not boutiques_description.endswith("."): boutiques_description += "." return boutiques_description def fill_in_missing_output_path(output, output_name, tool_inputs): """ Creates a path template for outputs that are missing one This is needed for the descriptor to be valid (path template is required) """ # Look for an input with the same name as the output and use its value key found = False for input in tool_inputs: if input["name"] == output_name: output["path-template"] = input["value-key"] found = True break # If no input with the same name was found, use the output ID if not found: output["path-template"] = output["id"] return output def generate_custom_inputs(desc_inputs): """ Generates a bunch of custom input dictionaries in order to generate as many outputs as possible (to get their path templates). Currently only works with flag inputs and inputs with defined value choices. """ custom_input_dicts = [] for desc_input in desc_inputs: if desc_input["type"] == "Flag": custom_input_dicts.append({desc_input["id"]: True}) elif desc_input.get("value-choices") and not desc_input.get("list"): for value in desc_input["value-choices"]: custom_input_dicts.append({desc_input["id"]: value}) return custom_input_dicts def reorder_cmd_line_args(cmd_line, interface, ignore_inputs=None): """ Generates a new command line with the positional arguments in the correct order """ interface_name = cmd_line.split()[0] positional_arg_dict = {} positional_args = [] non_positional_args = [] for name, spec in sorted(interface.inputs.traits(transient=None).items()): if ignore_inputs is not None and name in ignore_inputs: continue value_key = "[" + name.upper() + "]" if spec.position is not None: positional_arg_dict[spec.position] = value_key else: non_positional_args.append(value_key) last_arg = None for item in sorted(positional_arg_dict.items()): if item[0] == -1: last_arg = item[1] continue positional_args.append(item[1]) return ( interface_name + " " + ((" ".join(positional_args) + " ") if len(positional_args) > 0 else "") + ((last_arg + " ") if last_arg else "") + " ".join(non_positional_args) ) def get_command_line_flag(input_spec, is_flag_type=False, input_name=None): """ Generates the command line flag for a given input """ flag, flag_sep = None, None if input_spec.argstr: if "=" in input_spec.argstr: if ( input_spec.argstr.split("=")[1] == "0" or input_spec.argstr.split("=")[1] == "1" ): flag = input_spec.argstr else: flag = input_spec.argstr.split("=")[0].strip() flag_sep = "=" elif input_spec.argstr.split("%")[0]: flag = input_spec.argstr.split("%")[0].strip() elif is_flag_type: flag = ("--%s" % input_name + " ").strip() return flag, flag_sep def get_boutiques_output_from_inp(inputs, inp_spec, inp_name): """ Takes a Nipype input representing an output file and generates a Boutiques output for it """ output = {} output["name"] = inp_name.replace("_", " ").capitalize() output["id"] = inp_name output["optional"] = True output["description"] = get_description_from_spec(inputs, inp_name, inp_spec) if not (hasattr(inp_spec, "mandatory") and inp_spec.mandatory): output["optional"] = True else: output["optional"] = False if inp_spec.usedefault: output["default-value"] = inp_spec.default_value()[1] if isinstance(inp_spec.name_source, list): source = inp_spec.name_source[0] else: source = inp_spec.name_source output["path-template"] = inp_spec.name_template.replace( "%s", "[" + source.upper() + "]" ) output["value-key"] = "[" + inp_name.upper() + "]" flag, flag_sep = get_command_line_flag(inp_spec) if flag is not None: output["command-line-flag"] = flag if flag_sep is not None: output["command-line-flag-separator"] = flag_sep return output nipype-1.7.0/nipype/utils/nipype_cmd.py000066400000000000000000000053541413403311400201710ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os import argparse import inspect import sys from ..interfaces.base import Interface, InputMultiPath, traits from ..interfaces.base.support import get_trait_desc from .misc import str2bool def listClasses(module=None): if module: __import__(module) pkg = sys.modules[module] print("Available Interfaces:") for k, v in sorted(list(pkg.__dict__.items())): if inspect.isclass(v) and issubclass(v, Interface): print("\t%s" % k) def add_options(parser=None, module=None, function=None): interface = None if parser and module and function: __import__(module) interface = getattr(sys.modules[module], function)() inputs = interface.input_spec() for name, spec in sorted(interface.inputs.traits(transient=None).items()): desc = "\n".join(get_trait_desc(inputs, name, spec))[len(name) + 2 :] args = {} if spec.is_trait_type(traits.Bool): args["action"] = "store_true" if hasattr(spec, "mandatory") and spec.mandatory: if spec.is_trait_type(InputMultiPath): args["nargs"] = "+" parser.add_argument(name, help=desc, **args) else: if spec.is_trait_type(InputMultiPath): args["nargs"] = "*" parser.add_argument("--%s" % name, dest=name, help=desc, **args) return parser, interface def run_instance(interface, options): print("setting function inputs") for input_name, _ in list(interface.inputs.items()): if getattr(options, input_name) is not None: value = getattr(options, input_name) try: setattr(interface.inputs, input_name, value) except ValueError as e: print("Error when setting the value of %s: '%s'" % (input_name, str(e))) print(interface.inputs) res = interface.run() print(res.outputs) def main(argv): if len(argv) == 2 and not argv[1].startswith("-"): listClasses(argv[1]) sys.exit(0) parser = argparse.ArgumentParser( description="Nipype interface runner", prog=argv[0] ) parser.add_argument("module", type=str, help="Module name") parser.add_argument("interface", type=str, help="Interface name") parsed = parser.parse_args(args=argv[1:3]) _, prog = os.path.split(argv[0]) interface_parser = argparse.ArgumentParser( description="Run %s" % parsed.interface, prog=" ".join([prog] + argv[1:3]) ) interface_parser, interface = add_options( interface_parser, parsed.module, parsed.interface ) args = interface_parser.parse_args(args=argv[3:]) run_instance(interface, args) nipype-1.7.0/nipype/utils/onetime.py000066400000000000000000000050531413403311400174760ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Descriptor support for NIPY. Utilities to support special Python descriptors [1,2], in particular the use of a useful pattern for properties we call 'one time properties'. These are object attributes which are declared as properties, but become regular attributes once they've been read the first time. They can thus be evaluated later in the object's life cycle, but once evaluated they become normal, static attributes with no function call overhead on access or any other constraints. References ---------- [1] How-To Guide for Descriptors, Raymond Hettinger. http://users.rcn.com/python/download/Descriptor.htm [2] Python data model, http://docs.python.org/reference/datamodel.html """ class OneTimeProperty(object): """A descriptor to make special properties that become normal attributes.""" def __init__(self, func): """Create a OneTimeProperty instance. Parameters ---------- func : method The method that will be called the first time to compute a value. Afterwards, the method's name will be a standard attribute holding the value of this computation. """ self.getter = func self.name = func.__name__ def __get__(self, obj, type=None): """Called on attribute access on the class or instance.""" if obj is None: # Being called on the class, return the original function. # This way, introspection works on the class. return self.getter val = self.getter(obj) # print "** setattr_on_read - loading '%s'" % self.name # dbg setattr(obj, self.name, val) return val def setattr_on_read(func): # XXX - beetter names for this? # - cor_property (copy on read property) # - sor_property (set on read property) # - prop2attr_on_read # ... ? """Decorator to create OneTimeProperty attributes. Parameters ---------- func : method The method that will be called the first time to compute a value. Afterwards, the method's name will be a standard attribute holding the value of this computation. Examples -------- >>> class MagicProp(object): ... @setattr_on_read ... def a(self): ... return 99 ... >>> x = MagicProp() >>> 'a' in x.__dict__ False >>> x.a 99 >>> 'a' in x.__dict__ True """ return OneTimeProperty(func) nipype-1.7.0/nipype/utils/profiler.py000066400000000000000000000275401413403311400176650ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Utilities to keep track of performance """ import os import numpy as np import threading from time import time try: import psutil except ImportError as exc: psutil = None from .. import config, logging proflogger = logging.getLogger("nipype.utils") resource_monitor = config.resource_monitor # Init variables _MB = 1024.0 ** 2 class ResourceMonitorMock: """A mock class to use when the monitor is disabled.""" @property def fname(self): """Get/set the internal filename""" return None def __init__(self, pid, freq=5, fname=None, python=True): pass def start(self): pass def stop(self): return {} class ResourceMonitor(threading.Thread): """ A ``Thread`` to monitor a specific PID with a certain frequence to a file """ def __init__(self, pid, freq=5, fname=None, python=True): # Make sure psutil is imported import psutil if freq < 0.2: raise RuntimeError("Frequency (%0.2fs) cannot be lower than 0.2s" % freq) if fname is None: fname = ".proc-%d_time-%s_freq-%0.2f" % (pid, time(), freq) self._fname = os.path.abspath(fname) self._logfile = open(self._fname, "w") self._freq = freq self._python = python # Leave process initialized and make first sample self._process = psutil.Process(pid) self._sample(cpu_interval=0.2) # Start thread threading.Thread.__init__(self) self._event = threading.Event() @property def fname(self): """Get/set the internal filename""" return self._fname def stop(self): """Stop monitoring.""" if not self._event.is_set(): self._event.set() self.join() self._sample() self._logfile.flush() self._logfile.close() retval = { "mem_peak_gb": None, "cpu_percent": None, } # Read .prof file in and set runtime values vals = np.loadtxt(self._fname, delimiter=",") if vals.size: vals = np.atleast_2d(vals) retval["mem_peak_gb"] = vals[:, 2].max() / 1024 retval["cpu_percent"] = vals[:, 1].max() retval["prof_dict"] = { "time": vals[:, 0].tolist(), "cpus": vals[:, 1].tolist(), "rss_GiB": (vals[:, 2] / 1024).tolist(), "vms_GiB": (vals[:, 3] / 1024).tolist(), } return retval def _sample(self, cpu_interval=None): cpu = 0.0 rss = 0.0 vms = 0.0 try: with self._process.oneshot(): cpu += self._process.cpu_percent(interval=cpu_interval) mem_info = self._process.memory_info() rss += mem_info.rss vms += mem_info.vms except psutil.NoSuchProcess: pass # Iterate through child processes and get number of their threads try: children = self._process.children(recursive=True) except psutil.NoSuchProcess: children = [] for child in children: try: with child.oneshot(): cpu += child.cpu_percent() mem_info = child.memory_info() rss += mem_info.rss vms += mem_info.vms except psutil.NoSuchProcess: pass print("%f,%f,%f,%f" % (time(), cpu, rss / _MB, vms / _MB), file=self._logfile) self._logfile.flush() def run(self): """Core monitoring function, called by start()""" start_time = time() wait_til = start_time while not self._event.is_set(): self._sample() wait_til += self._freq self._event.wait(max(0, wait_til - time())) # Log node stats function def log_nodes_cb(node, status): """Function to record node run statistics to a log file as json dictionaries Parameters ---------- node : nipype.pipeline.engine.Node the node being logged status : string acceptable values are 'start', 'end'; otherwise it is considered and error Returns ------- None this function does not return any values, it logs the node status info to the callback logger """ if status != "end": return # Import packages import logging import json status_dict = { "name": node.name, "id": node._id, "start": getattr(node.result.runtime, "startTime"), "finish": getattr(node.result.runtime, "endTime"), "duration": getattr(node.result.runtime, "duration"), "runtime_threads": getattr(node.result.runtime, "cpu_percent", "N/A"), "runtime_memory_gb": getattr(node.result.runtime, "mem_peak_gb", "N/A"), "estimated_memory_gb": node.mem_gb, "num_threads": node.n_procs, } if status_dict["start"] is None or status_dict["finish"] is None: status_dict["error"] = True # Dump string to log logging.getLogger("callback").debug(json.dumps(status_dict)) # Get total system RAM def get_system_total_memory_gb(): """ Function to get the total RAM of the running system in GB """ # Import packages import os import sys # Get memory if "linux" in sys.platform: with open("/proc/meminfo", "r") as f_in: meminfo_lines = f_in.readlines() mem_total_line = [line for line in meminfo_lines if "MemTotal" in line][0] mem_total = float(mem_total_line.split()[1]) memory_gb = mem_total / (1024.0 ** 2) elif "darwin" in sys.platform: mem_str = os.popen("sysctl hw.memsize").read().strip().split(" ")[-1] memory_gb = float(mem_str) / (1024.0 ** 3) else: err_msg = "System platform: %s is not supported" raise Exception(err_msg) # Return memory return memory_gb # Get max resources used for process def get_max_resources_used(pid, mem_mb, num_threads, pyfunc=False): """ Function to get the RAM and threads utilized by a given process Parameters ---------- pid : integer the process ID of process to profile mem_mb : float the high memory watermark so far during process execution (in MB) num_threads: int the high thread watermark so far during process execution Returns ------- mem_mb : float the new high memory watermark of process (MB) num_threads : float the new high thread watermark of process """ if not resource_monitor: raise RuntimeError( "Attempted to measure resources with option " '"monitoring.enabled" set off.' ) try: mem_mb = max(mem_mb, _get_ram_mb(pid, pyfunc=pyfunc)) num_threads = max(num_threads, _get_num_threads(pid)) except Exception as exc: proflogger.info("Could not get resources used by process.\n%s", exc) return mem_mb, num_threads # Get number of threads for process def _get_num_threads(pid): """ Function to get the number of threads a process is using Parameters ---------- pid : integer the process ID of process to profile Returns ------- num_threads : int the number of threads that the process is using """ try: proc = psutil.Process(pid) # If process is running if proc.status() == psutil.STATUS_RUNNING: num_threads = proc.num_threads() elif proc.num_threads() > 1: tprocs = [psutil.Process(thr.id) for thr in proc.threads()] alive_tprocs = [ tproc for tproc in tprocs if tproc.status() == psutil.STATUS_RUNNING ] num_threads = len(alive_tprocs) else: num_threads = 1 child_threads = 0 # Iterate through child processes and get number of their threads for child in proc.children(recursive=True): # Leaf process if len(child.children()) == 0: # If process is running, get its number of threads if child.status() == psutil.STATUS_RUNNING: child_thr = child.num_threads() # If its not necessarily running, but still multi-threaded elif child.num_threads() > 1: # Cast each thread as a process and check for only running tprocs = [psutil.Process(thr.id) for thr in child.threads()] alive_tprocs = [ tproc for tproc in tprocs if tproc.status() == psutil.STATUS_RUNNING ] child_thr = len(alive_tprocs) # Otherwise, no threads are running else: child_thr = 0 # Increment child threads child_threads += child_thr except psutil.NoSuchProcess: return None # Number of threads is max between found active children and parent num_threads = max(child_threads, num_threads) # Return number of threads found return num_threads # Get ram usage of process def _get_ram_mb(pid, pyfunc=False): """ Function to get the RAM usage of a process and its children Reference: http://ftp.dev411.com/t/python/python-list/095thexx8g/\ multiprocessing-forking-memory-usage Parameters ---------- pid : integer the PID of the process to get RAM usage of pyfunc : boolean (optional); default=False a flag to indicate if the process is a python function; when Pythons are multithreaded via multiprocess or threading, children functions include their own memory + parents. if this is set, the parent memory will removed from children memories Returns ------- mem_mb : float the memory RAM in MB utilized by the process PID """ try: # Init parent parent = psutil.Process(pid) # Get memory of parent parent_mem = parent.memory_info().rss mem_mb = parent_mem / _MB # Iterate through child processes for child in parent.children(recursive=True): child_mem = child.memory_info().rss if pyfunc: child_mem -= parent_mem mem_mb += child_mem / _MB except psutil.NoSuchProcess: return None # Return memory return mem_mb def _use_cpu(x): ctr = 0 while ctr < 1e7: ctr += 1 x * x # Spin multiple threads def _use_resources(n_procs, mem_gb): """ Function to execute multiple use_gb_ram functions in parallel """ import os import sys import psutil from multiprocessing import Pool from nipype import logging from nipype.utils.profiler import _use_cpu iflogger = logging.getLogger("nipype.interface") # Getsize of one character string BSIZE = sys.getsizeof(" ") - sys.getsizeof(" ") BOFFSET = sys.getsizeof("") _GB = 1024.0 ** 3 def _use_gb_ram(mem_gb): """A test function to consume mem_gb GB of RAM""" num_bytes = int(mem_gb * _GB) # Eat mem_gb GB of memory for 1 second gb_str = " " * ((num_bytes - BOFFSET) // BSIZE) assert sys.getsizeof(gb_str) == num_bytes return gb_str # Measure the amount of memory this process already holds p = psutil.Process(os.getpid()) mem_offset = p.memory_info().rss / _GB big_str = _use_gb_ram(mem_gb - mem_offset) _use_cpu(5) mem_total = p.memory_info().rss / _GB del big_str iflogger.info( "[%d] Memory offset %0.2fGB, total %0.2fGB", os.getpid(), mem_offset, mem_total ) if n_procs > 1: pool = Pool(n_procs) pool.map(_use_cpu, range(n_procs)) return True nipype-1.7.0/nipype/utils/provenance.py000066400000000000000000000373711413403311400202060ustar00rootroot00000000000000# -*- coding: utf-8 -*- from collections import OrderedDict from copy import deepcopy from pickle import dumps import os import getpass import platform from uuid import uuid1 import simplejson as json import numpy as np import prov.model as pm from .. import get_info, logging, __version__ from .filemanip import md5, hashlib, hash_infile logger = logging.getLogger("nipype.utils") foaf = pm.Namespace("foaf", "http://xmlns.com/foaf/0.1/") dcterms = pm.Namespace("dcterms", "http://purl.org/dc/terms/") nipype_ns = pm.Namespace("nipype", "http://nipy.org/nipype/terms/") niiri = pm.Namespace("niiri", "http://iri.nidash.org/") crypto = pm.Namespace( "crypto", ("http://id.loc.gov/vocabulary/preservation/" "cryptographicHashFunctions/"), ) get_id = lambda: niiri[uuid1().hex] PROV_ENVVARS = [ "PATH", "FSLDIR", "FREESURFER_HOME", "ANTSPATH", "CAMINOPATH", "CLASSPATH", "LD_LIBRARY_PATH", "DYLD_LIBRARY_PATH", "FIX_VERTEX_AREA", "FSF_OUTPUT_FORMAT", "FSLCONFDIR", "FSLOUTPUTTYPE", "LOGNAME", "USER", "MKL_NUM_THREADS", "OMP_NUM_THREADS", ] def get_attr_id(attr, skip=None): dictwithhash, hashval = get_hashval(attr, skip=skip) return niiri[hashval] max_text_len = 1024000 def get_hashval(inputdict, skip=None): """Return a dictionary of our items with hashes for each file. Searches through dictionary items and if an item is a file, it calculates the md5 hash of the file contents and stores the file name and hash value as the new key value. However, the overall bunch hash is calculated only on the hash value of a file. The path and name of the file are not used in the overall hash calculation. Returns ------- dict_withhash : dict Copy of our dictionary with the new file hashes included with each file. hashvalue : str The md5 hash value of the traited spec """ dict_withhash = {} dict_nofilename = OrderedDict() keys = {} for key in inputdict: if skip is not None and key in skip: continue keys[key.uri] = key for key in sorted(keys): val = inputdict[keys[key]] outname = key try: if isinstance(val, pm.URIRef): val = val.decode() except AttributeError: pass if isinstance(val, pm.QualifiedName): val = val.uri if isinstance(val, pm.Literal): val = val.value dict_nofilename[outname] = _get_sorteddict(val) dict_withhash[outname] = _get_sorteddict(val, True) sorted_dict = str(sorted(dict_nofilename.items())) return (dict_withhash, md5(sorted_dict.encode()).hexdigest()) def _get_sorteddict(object, dictwithhash=False): if isinstance(object, dict): out = OrderedDict() for key, val in sorted(object.items()): if val: out[key] = _get_sorteddict(val, dictwithhash) elif isinstance(object, (list, tuple)): out = [] for val in object: if val: out.append(_get_sorteddict(val, dictwithhash)) if isinstance(object, tuple): out = tuple(out) else: if isinstance(object, str) and os.path.isfile(object): hash = hash_infile(object) if dictwithhash: out = (object, hash) else: out = hash elif isinstance(object, float): out = "%.10f".format(object) else: out = object return out def safe_encode(x, as_literal=True): """ Encodes a python value for prov """ if x is None: value = "Unknown" if as_literal: return pm.Literal(value, pm.XSD["string"]) else: return value if isinstance(x, (str, bytes)): if isinstance(x, bytes): x = str(x, "utf-8") if os.path.exists(x): if x[0] != os.pathsep: x = os.path.abspath(x) value = "file://{}{}".format(platform.node().lower(), x) if not as_literal: return value try: return pm.URIRef(value) except AttributeError: return pm.Literal(value, pm.XSD["anyURI"]) else: value = x if len(x) > max_text_len: cliptxt = "...Clipped..." value = x[: max_text_len - len(cliptxt)] + cliptxt if not as_literal: return value return pm.Literal(value, pm.XSD["string"]) if isinstance(x, int): if not as_literal: return x return pm.Literal(int(x), pm.XSD["integer"]) if isinstance(x, float): if not as_literal: return x return pm.Literal(x, pm.XSD["float"]) if isinstance(x, dict): outdict = {} for key, value in list(x.items()): encoded_value = safe_encode(value, as_literal=False) if isinstance(encoded_value, pm.Literal): outdict[key] = encoded_value.json_representation() else: outdict[key] = encoded_value try: jsonstr = json.dumps(outdict) except UnicodeDecodeError as excp: jsonstr = "Could not encode dictionary. {}".format(excp) logger.warning("Prov: %s", jsonstr) if not as_literal: return jsonstr return pm.Literal(jsonstr, pm.XSD["string"]) if isinstance(x, (list, tuple)): x = list(x) is_object = False try: nptype = np.array(x).dtype is_object = nptype == np.dtype(object) except ValueError: is_object = True # If the array contains an heterogeneous mixture of data types # they should be encoded sequentially if is_object: outlist = [] for value in x: encoded_value = safe_encode(value, as_literal=False) if isinstance(encoded_value, pm.Literal): outlist.append(encoded_value.json_representation()) else: outlist.append(encoded_value) x = outlist try: jsonstr = json.dumps(x) except UnicodeDecodeError as excp: jsonstr = "Could not encode list/tuple. {}".format(excp) logger.warning("Prov: %s", jsonstr) if not as_literal: return jsonstr return pm.Literal(jsonstr, pm.XSD["string"]) # If is a literal, and as_literal do nothing. # else bring back to json. if isinstance(x, pm.Literal): if as_literal: return x return dumps(x.json_representation()) jsonstr = None ltype = pm.XSD["string"] try: jsonstr = json.dumps(x.__dict__) except AttributeError: pass if jsonstr is None: try: jsonstr = dumps(x) ltype = nipype_ns["pickle"] except TypeError as excp: jsonstr = "Could not encode object. {}".format(excp) if not as_literal: return jsonstr return pm.Literal(jsonstr, ltype) def prov_encode(graph, value, create_container=True): if isinstance(value, (list, tuple)) and create_container: value = list(value) if len(value) == 0: encoded_literal = safe_encode(value) attr = {pm.PROV["value"]: encoded_literal} eid = get_attr_id(attr) return graph.entity(eid, attr) if len(value) == 1: return prov_encode(graph, value[0]) entities = [] for item in value: item_entity = prov_encode(graph, item) entities.append(item_entity) if isinstance(item, (list, tuple)): continue item_entity_val = list(item_entity.value)[0] is_str = isinstance(item_entity_val, str) if not is_str or (is_str and "file://" not in item_entity_val): return prov_encode(graph, value, create_container=False) eid = get_id() entity = graph.collection(identifier=eid) for item_entity in entities: graph.hadMember(eid, item_entity) return entity else: encoded_literal = safe_encode(value) attr = {pm.PROV["value"]: encoded_literal} if isinstance(value, str) and os.path.exists(value): attr.update({pm.PROV["location"]: encoded_literal}) if not os.path.isdir(value): sha512 = hash_infile(value, crypto=hashlib.sha512) attr.update({crypto["sha512"]: pm.Literal(sha512, pm.XSD["string"])}) eid = get_attr_id(attr, skip=[pm.PROV["location"], pm.PROV["value"]]) else: eid = get_attr_id(attr, skip=[pm.PROV["location"]]) else: eid = get_attr_id(attr) entity = graph.entity(eid, attr) return entity def write_provenance(results, filename="provenance", format="all"): prov = None try: ps = ProvStore() ps.add_results(results) prov = ps.write_provenance(filename=filename, format=format) except Exception as e: import traceback err_msg = traceback.format_exc() if getattr(e, "args"): err_msg += "\n\nException arguments:\n" + ", ".join( ['"%s"' % arg for arg in e.args] ) logger.warning("Writing provenance failed - Exception details:\n%s", err_msg) return prov class ProvStore(object): def __init__(self): self.g = pm.ProvDocument() self.g.add_namespace(foaf) self.g.add_namespace(dcterms) self.g.add_namespace(nipype_ns) self.g.add_namespace(niiri) def add_results(self, results, keep_provenance=False): if keep_provenance and results.provenance: self.g = deepcopy(results.provenance) return self.g runtime = results.runtime interface = results.interface inputs = results.inputs outputs = results.outputs classname = interface.__name__ modulepath = "{0}.{1}".format(interface.__module__, interface.__name__) activitytype = "".join([i.capitalize() for i in modulepath.split(".")]) a0_attrs = { nipype_ns["module"]: interface.__module__, nipype_ns["interface"]: classname, pm.PROV["type"]: nipype_ns[activitytype], pm.PROV["label"]: classname, nipype_ns["duration"]: safe_encode(runtime.duration), nipype_ns["workingDirectory"]: safe_encode(runtime.cwd), nipype_ns["returnCode"]: safe_encode(runtime.returncode), nipype_ns["platform"]: safe_encode(runtime.platform), nipype_ns["version"]: safe_encode(runtime.version), } a0_attrs[foaf["host"]] = pm.Literal(runtime.hostname, pm.XSD["anyURI"]) try: a0_attrs.update({nipype_ns["command"]: safe_encode(runtime.cmdline)}) a0_attrs.update( {nipype_ns["commandPath"]: safe_encode(runtime.command_path)} ) a0_attrs.update( {nipype_ns["dependencies"]: safe_encode(runtime.dependencies)} ) except AttributeError: pass a0 = self.g.activity(get_id(), runtime.startTime, runtime.endTime, a0_attrs) # environment id = get_id() env_collection = self.g.collection(id) env_collection.add_attributes( {pm.PROV["type"]: nipype_ns["Environment"], pm.PROV["label"]: "Environment"} ) self.g.used(a0, id) # write environment entities for idx, (key, val) in enumerate(sorted(runtime.environ.items())): if key not in PROV_ENVVARS: continue in_attr = { pm.PROV["label"]: key, nipype_ns["environmentVariable"]: key, pm.PROV["value"]: safe_encode(val), } id = get_attr_id(in_attr) self.g.entity(id, in_attr) self.g.hadMember(env_collection, id) # write input entities if inputs: id = get_id() input_collection = self.g.collection(id) input_collection.add_attributes( {pm.PROV["type"]: nipype_ns["Inputs"], pm.PROV["label"]: "Inputs"} ) # write input entities for idx, (key, val) in enumerate(sorted(inputs.items())): in_entity = prov_encode(self.g, val).identifier self.g.hadMember(input_collection, in_entity) used_attr = {pm.PROV["label"]: key, nipype_ns["inPort"]: key} self.g.used(activity=a0, entity=in_entity, other_attributes=used_attr) # write output entities if outputs: id = get_id() output_collection = self.g.collection(id) if not isinstance(outputs, dict): outputs = outputs.get_traitsfree() output_collection.add_attributes( {pm.PROV["type"]: nipype_ns["Outputs"], pm.PROV["label"]: "Outputs"} ) self.g.wasGeneratedBy(output_collection, a0) # write output entities for idx, (key, val) in enumerate(sorted(outputs.items())): out_entity = prov_encode(self.g, val).identifier self.g.hadMember(output_collection, out_entity) gen_attr = {pm.PROV["label"]: key, nipype_ns["outPort"]: key} self.g.generation(out_entity, activity=a0, other_attributes=gen_attr) # write runtime entities id = get_id() runtime_collection = self.g.collection(id) runtime_collection.add_attributes( {pm.PROV["type"]: nipype_ns["Runtime"], pm.PROV["label"]: "RuntimeInfo"} ) self.g.wasGeneratedBy(runtime_collection, a0) for key, value in sorted(runtime.items()): if not value: continue if key not in ["stdout", "stderr", "merged"]: continue attr = {pm.PROV["label"]: key, nipype_ns[key]: safe_encode(value)} id = get_id() self.g.entity(get_id(), attr) self.g.hadMember(runtime_collection, id) # create agents user_attr = { pm.PROV["type"]: pm.PROV["Person"], pm.PROV["label"]: getpass.getuser(), foaf["name"]: safe_encode(getpass.getuser()), } user_agent = self.g.agent(get_attr_id(user_attr), user_attr) agent_attr = { pm.PROV["type"]: pm.PROV["SoftwareAgent"], pm.PROV["label"]: "Nipype", foaf["name"]: safe_encode("Nipype"), nipype_ns["version"]: __version__, } for key, value in list(get_info().items()): agent_attr.update({nipype_ns[key]: safe_encode(value)}) software_agent = self.g.agent(get_attr_id(agent_attr), agent_attr) self.g.wasAssociatedWith( a0, user_agent, None, None, {pm.PROV["hadRole"]: nipype_ns["LoggedInUser"]} ) self.g.wasAssociatedWith(a0, software_agent) return self.g def write_provenance(self, filename="provenance", format="all"): if format in ["provn", "all"]: with open(filename + ".provn", "wt") as fp: fp.writelines(self.g.get_provn()) try: if format in ["rdf", "all"]: if len(self.g.bundles) == 0: rdf_format = "turtle" ext = ".ttl" else: rdf_format = "trig" ext = ".trig" self.g.serialize(filename + ext, format="rdf", rdf_format=rdf_format) if format in ["jsonld"]: self.g.serialize( filename + ".jsonld", format="rdf", rdf_format="json-ld", indent=4 ) except pm.serializers.DoNotExist: pass return self.g nipype-1.7.0/nipype/utils/spm_docs.py000066400000000000000000000032761413403311400176520ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Grab documentation from spm.""" import os from ..interfaces import matlab def grab_doc(task_name): """Grab the SPM documentation for the given SPM task named `task_name` Parameters ---------- task_name : string Task name for which we are grabbing documentation. Example task names are ``Realign: Estimate & Reslice``, ``Normalise: Estimate & Write``. See Also -------- spm_flat_config.m : This function can print out all the possible task names. """ cmd = matlab.MatlabCommand(resource_monitor=False) # We need to tell Matlab where to find our spm_get_doc.m file. cwd = os.path.dirname(__file__) # Build matlab command mcmd = "addpath('%s');spm_get_doc('%s')" % (cwd, task_name) cmd.inputs.script_lines = mcmd # Run the command and get the documentation out of the result. out = cmd.run() return _strip_header(out.runtime.stdout) def _strip_header(doc): """Strip Matlab header and splash info off doc. Searches for the tag 'NIPYPE' in the doc and returns everyting after that. """ hdr = "NIPYPE" # There's some weird cruft at the end of the docstring, almost looks like # the hex for the escape character 0x1b. cruft = "\x1b" try: index = doc.index(hdr) except ValueError as e: raise IOError("This docstring was not generated by Nipype!\n") from e index += len(hdr) index += 1 doc = doc[index:] try: index = doc.index(cruft) except ValueError: index = len(doc) return doc[:index] nipype-1.7.0/nipype/utils/spm_flat_config.m000066400000000000000000000021021413403311400207640ustar00rootroot00000000000000function cfgstruct = spm_flat_config(print_names) % Get a flat spm_config structure, with option to print out names % % This calls spm_config() to get the the nested configuration % structure from spm. We use this to fetch documentation, the % flattened structure is much easier to search through. If % print_names is true (value of 1) it will print out the configuration % names. If print_names is false (value of 0), it will only return % the flattened structure. if strcmp(spm('ver'),'SPM5') cfg = spm_config(); else cfgstruct = []; return; end cfgstruct = spm_cfg_list(cfg, {}); if print_names [rows, cols] = size(cfgstruct); for i = 1:cols fprintf(1, '%d : %s\n', i, cfgstruct{i}.name) end end end function objlist = spm_cfg_list(astruct, objlist) % Flatten the nested structure in 'astruct'. % Returns a cell array. % Usage: objlist = spm_cfg_list(astruct, {}) if isfield(astruct, 'values') [rows, cols] = size(astruct.values); for i = 1:cols objlist = spm_cfg_list(astruct.values{i}, objlist); end else objlist = {objlist{:} astruct}; end end nipype-1.7.0/nipype/utils/spm_get_doc.m000066400000000000000000000011641413403311400201240ustar00rootroot00000000000000function doc = spm_get_doc(docname) % Get the documentation from SPM for the functionality named % docname. % % This will search through the spm_config() object and grab the % documentation whose name matches docname. cfgstruct = spm_flat_config(0); [rows, cols] = size(cfgstruct); docstruct.help={'None'}; % Loop over cell array and search for the docname for i = 1:cols if strcmp(cfgstruct{i}.name, docname) docstruct = cfgstruct{i}; break end end % Add a tag so we can strip off the Matlab header information and % only print out the SPM documentation. tag = 'NIPYPE\n'; doc = strcat(tag, docstruct.help{:}); end nipype-1.7.0/nipype/utils/subprocess.py000066400000000000000000000132261413403311400202270ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Miscellaneous utility functions """ import os import sys import gc import errno import select import locale import datetime from subprocess import Popen, STDOUT, PIPE from .filemanip import canonicalize_env, read_stream from .. import logging iflogger = logging.getLogger("nipype.interface") class Stream(object): """Function to capture stdout and stderr streams with timestamps stackoverflow.com/questions/4984549/merge-and-sync-stdout-and-stderr/5188359 """ def __init__(self, name, impl): self._name = name self._impl = impl self._buf = "" self._rows = [] self._lastidx = 0 self.default_encoding = locale.getdefaultlocale()[1] or "UTF-8" def fileno(self): "Pass-through for file descriptor." return self._impl.fileno() def read(self, drain=0): "Read from the file descriptor. If 'drain' set, read until EOF." while self._read(drain) is not None: if not drain: break def _read(self, drain): "Read from the file descriptor" fd = self.fileno() buf = os.read(fd, 4096).decode(self.default_encoding) if not buf and not self._buf: return None if "\n" not in buf: if not drain: self._buf += buf return [] # prepend any data previously read, then split into lines and format buf = self._buf + buf if "\n" in buf: tmp, rest = buf.rsplit("\n", 1) else: tmp = buf rest = None self._buf = rest now = datetime.datetime.now().isoformat() rows = tmp.split("\n") self._rows += [(now, "%s %s:%s" % (self._name, now, r), r) for r in rows] for idx in range(self._lastidx, len(self._rows)): iflogger.info(self._rows[idx][1]) self._lastidx = len(self._rows) def run_command(runtime, output=None, timeout=0.01): """Run a command, read stdout and stderr, prefix with timestamp. The returned runtime contains a merged stdout+stderr log with timestamps """ # Init variables cmdline = runtime.cmdline env = canonicalize_env(runtime.environ) errfile = None outfile = None stdout = PIPE stderr = PIPE if output == "file": outfile = os.path.join(runtime.cwd, "output.nipype") stdout = open(outfile, "wb") # t=='text'===default stderr = STDOUT elif output == "file_split": outfile = os.path.join(runtime.cwd, "stdout.nipype") stdout = open(outfile, "wb") errfile = os.path.join(runtime.cwd, "stderr.nipype") stderr = open(errfile, "wb") elif output == "file_stdout": outfile = os.path.join(runtime.cwd, "stdout.nipype") stdout = open(outfile, "wb") elif output == "file_stderr": errfile = os.path.join(runtime.cwd, "stderr.nipype") stderr = open(errfile, "wb") proc = Popen( cmdline, stdout=stdout, stderr=stderr, shell=True, cwd=runtime.cwd, env=env, close_fds=(not sys.platform.startswith("win")), ) result = { "stdout": [], "stderr": [], "merged": [], } if output == "stream": streams = [Stream("stdout", proc.stdout), Stream("stderr", proc.stderr)] def _process(drain=0): try: res = select.select(streams, [], [], timeout) except select.error as e: iflogger.info(e) if e.errno == errno.EINTR: return else: raise else: for stream in res[0]: stream.read(drain) while proc.returncode is None: proc.poll() _process() _process(drain=1) # collect results, merge and return result = {} temp = [] for stream in streams: rows = stream._rows temp += rows result[stream._name] = [r[2] for r in rows] temp.sort() result["merged"] = [r[1] for r in temp] if output.startswith("file"): proc.wait() if outfile is not None: stdout.flush() stdout.close() with open(outfile, "rb") as ofh: stdoutstr = ofh.read() result["stdout"] = read_stream(stdoutstr, logger=iflogger) del stdoutstr if errfile is not None: stderr.flush() stderr.close() with open(errfile, "rb") as efh: stderrstr = efh.read() result["stderr"] = read_stream(stderrstr, logger=iflogger) del stderrstr if output == "file": result["merged"] = result["stdout"] result["stdout"] = [] else: stdout, stderr = proc.communicate() if output == "allatonce": # Discard stdout and stderr otherwise result["stdout"] = read_stream(stdout, logger=iflogger) result["stderr"] = read_stream(stderr, logger=iflogger) runtime.returncode = proc.returncode try: proc.terminate() # Ensure we are done except OSError as error: # Python 2 raises when the process is already gone if error.errno != errno.ESRCH: raise # Dereference & force GC for a cleanup del proc del stdout del stderr gc.collect() runtime.stderr = "\n".join(result["stderr"]) runtime.stdout = "\n".join(result["stdout"]) runtime.merged = "\n".join(result["merged"]) return runtime nipype-1.7.0/nipype/utils/tests/000077500000000000000000000000001413403311400166235ustar00rootroot00000000000000nipype-1.7.0/nipype/utils/tests/__init__.py000066400000000000000000000023011413403311400207300ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Please write tests for all code submitted to the repository. The code will be used by many people, and will in due course be used in live analyses, so we need to make sure that we have the best possible defenses against bugs. It also helps us think about code interfaces, and gives examples of code use that can be useful for others using the code. Python's unit testing framework (the U{unittest} module) is used to implement project tests. We use the convention that each package contains a subpackage called tests which contains modules defining test cases (subclasses of U{unittest.TestCase}) for that package. The nipy.utils.tests package contains an example test case called L{test_template.TemplateTest} to get you started writing your tests. Please try to include working test cases for all functions and classes that you contribute. Often, writing tests for your code before the code is written helps to frame your thoughts about what the code should look like. """ nipype-1.7.0/nipype/utils/tests/test_cmd.py000066400000000000000000000044661413403311400210110ustar00rootroot00000000000000#!/usr/bin/env python import pytest import sys from contextlib import contextmanager from io import StringIO from ...utils import nipype_cmd @contextmanager def capture_sys_output(): caputure_out, capture_err = StringIO(), StringIO() current_out, current_err = sys.stdout, sys.stderr try: sys.stdout, sys.stderr = caputure_out, capture_err yield caputure_out, capture_err finally: sys.stdout, sys.stderr = current_out, current_err class TestNipypeCMD: maxDiff = None def test_main_returns_2_on_empty(self): with pytest.raises(SystemExit) as cm: with capture_sys_output() as (stdout, stderr): nipype_cmd.main(["nipype_cmd"]) exit_exception = cm.value assert exit_exception.code == 2 msg = """usage: nipype_cmd [-h] module interface nipype_cmd: error: the following arguments are required: module, interface """ assert stderr.getvalue() == msg assert stdout.getvalue() == "" def test_main_returns_0_on_help(self): with pytest.raises(SystemExit) as cm: with capture_sys_output() as (stdout, stderr): nipype_cmd.main(["nipype_cmd", "-h"]) exit_exception = cm.value assert exit_exception.code == 0 assert stderr.getvalue() == "" assert ( stdout.getvalue() == """usage: nipype_cmd [-h] module interface Nipype interface runner positional arguments: module Module name interface Interface name optional arguments: -h, --help show this help message and exit """ ) def test_list_nipy_interfacesp(self): with pytest.raises(SystemExit) as cm: with capture_sys_output() as (stdout, stderr): nipype_cmd.main(["nipype_cmd", "nipype.interfaces.nipy"]) # repeat twice in case nipy raises warnings with pytest.raises(SystemExit) as cm: with capture_sys_output() as (stdout, stderr): nipype_cmd.main(["nipype_cmd", "nipype.interfaces.nipy"]) exit_exception = cm.value assert exit_exception.code == 0 assert stderr.getvalue() == "" assert ( stdout.getvalue() == """Available Interfaces: \tComputeMask \tEstimateContrast \tFitGLM \tSimilarity \tSpaceTimeRealigner """ ) nipype-1.7.0/nipype/utils/tests/test_config.py000066400000000000000000000262361413403311400215120ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import sys import pytest from nipype import config from unittest.mock import MagicMock try: import xvfbwrapper has_Xvfb = True except ImportError: has_Xvfb = False # Define mocks for xvfbwrapper. Do not forget the spec to ensure that # hasattr() checks return False with missing attributes. xvfbpatch = MagicMock(spec=["Xvfb"]) xvfbpatch.Xvfb.return_value = MagicMock( spec=["new_display", "start", "stop"], new_display=2010 ) # Mock the legacy xvfbwrapper.Xvfb class (changed display attribute name) xvfbpatch_old = MagicMock(spec=["Xvfb"]) xvfbpatch_old.Xvfb.return_value = MagicMock( spec=["vdisplay_num", "start", "stop"], vdisplay_num=2010 ) @pytest.mark.parametrize("dispvar", [":12", "localhost:12", "localhost:12.1"]) def test_display_parse(monkeypatch, dispvar): """Check that when $DISPLAY is defined, the display is correctly parsed""" config._display = None config._config.remove_option("execution", "display_variable") monkeypatch.setenv("DISPLAY", dispvar) assert config.get_display() == ":12" # Test that it was correctly cached assert config.get_display() == ":12" @pytest.mark.parametrize("dispnum", range(5)) def test_display_config(monkeypatch, dispnum): """Check that the display_variable option is used ($DISPLAY not set)""" config._display = None dispstr = ":%d" % dispnum config.set("execution", "display_variable", dispstr) monkeypatch.delitem(os.environ, "DISPLAY", raising=False) assert config.get_display() == config.get("execution", "display_variable") # Test that it was correctly cached assert config.get_display() == config.get("execution", "display_variable") @pytest.mark.parametrize("dispnum", range(5)) def test_display_system(monkeypatch, dispnum): """Check that when only a $DISPLAY is defined, it is used""" config._display = None config._config.remove_option("execution", "display_variable") dispstr = ":%d" % dispnum monkeypatch.setenv("DISPLAY", dispstr) assert config.get_display() == dispstr # Test that it was correctly cached assert config.get_display() == dispstr def test_display_config_and_system(monkeypatch): """Check that when only both config and $DISPLAY are defined, the config takes precedence""" config._display = None dispstr = ":10" config.set("execution", "display_variable", dispstr) monkeypatch.setenv("DISPLAY", ":0") assert config.get_display() == dispstr # Test that it was correctly cached assert config.get_display() == dispstr def test_display_noconfig_nosystem_patched(monkeypatch): """Check that when no $DISPLAY nor option are specified, a virtual Xvfb is used""" config._display = None if config.has_option("execution", "display_variable"): config._config.remove_option("execution", "display_variable") monkeypatch.delitem(os.environ, "DISPLAY", raising=False) monkeypatch.setitem(sys.modules, "xvfbwrapper", xvfbpatch) monkeypatch.setattr(sys, "platform", value="linux") assert config.get_display() == ":2010" # Test that it was correctly cached assert config.get_display() == ":2010" # Check that raises in Mac config._display = None monkeypatch.setattr(sys, "platform", value="darwin") with pytest.raises(RuntimeError): config.get_display() def test_display_empty_patched(monkeypatch): """ Check that when $DISPLAY is empty string and no option is specified, a virtual Xvfb is used """ config._display = None if config.has_option("execution", "display_variable"): config._config.remove_option("execution", "display_variable") monkeypatch.setenv("DISPLAY", "") monkeypatch.setitem(sys.modules, "xvfbwrapper", xvfbpatch) monkeypatch.setattr(sys, "platform", value="linux") assert config.get_display() == ":2010" # Test that it was correctly cached assert config.get_display() == ":2010" # Check that raises in Mac config._display = None monkeypatch.setattr(sys, "platform", value="darwin") with pytest.raises(RuntimeError): config.get_display() def test_display_noconfig_nosystem_patched_oldxvfbwrapper(monkeypatch): """ Check that when no $DISPLAY nor option are specified, a virtual Xvfb is used (with a legacy version of xvfbwrapper). """ config._display = None if config.has_option("execution", "display_variable"): config._config.remove_option("execution", "display_variable") monkeypatch.delitem(os.environ, "DISPLAY", raising=False) monkeypatch.setitem(sys.modules, "xvfbwrapper", xvfbpatch_old) monkeypatch.setattr(sys, "platform", value="linux") assert config.get_display() == ":2010" # Test that it was correctly cached assert config.get_display() == ":2010" # Check that raises in Mac config._display = None monkeypatch.setattr(sys, "platform", value="darwin") with pytest.raises(RuntimeError): config.get_display() def test_display_empty_patched_oldxvfbwrapper(monkeypatch): """ Check that when $DISPLAY is empty string and no option is specified, a virtual Xvfb is used (with a legacy version of xvfbwrapper). """ config._display = None if config.has_option("execution", "display_variable"): config._config.remove_option("execution", "display_variable") monkeypatch.setenv("DISPLAY", "") monkeypatch.setitem(sys.modules, "xvfbwrapper", xvfbpatch_old) monkeypatch.setattr(sys, "platform", value="linux") assert config.get_display() == ":2010" # Test that it was correctly cached assert config.get_display() == ":2010" # Check that raises in Mac config._display = None monkeypatch.setattr(sys, "platform", value="darwin") with pytest.raises(RuntimeError): config.get_display() def test_display_noconfig_nosystem_notinstalled(monkeypatch): """ Check that an exception is raised if xvfbwrapper is not installed but necessary (no config and $DISPLAY unset) """ config._display = None if config.has_option("execution", "display_variable"): config._config.remove_option("execution", "display_variable") monkeypatch.delenv("DISPLAY", raising=False) monkeypatch.setitem(sys.modules, "xvfbwrapper", None) with pytest.raises(RuntimeError): config.get_display() def test_display_empty_notinstalled(monkeypatch): """ Check that an exception is raised if xvfbwrapper is not installed but necessary (no config and $DISPLAY empty) """ config._display = None if config.has_option("execution", "display_variable"): config._config.remove_option("execution", "display_variable") monkeypatch.setenv("DISPLAY", "") monkeypatch.setitem(sys.modules, "xvfbwrapper", None) with pytest.raises(RuntimeError): config.get_display() @pytest.mark.skipif(not has_Xvfb, reason="xvfbwrapper not installed") @pytest.mark.skipif("darwin" in sys.platform, reason="macosx requires root for Xvfb") def test_display_noconfig_nosystem_installed(monkeypatch): """ Check that actually uses xvfbwrapper when installed (not mocked) and necessary (no config and $DISPLAY unset) """ config._display = None if config.has_option("execution", "display_variable"): config._config.remove_option("execution", "display_variable") monkeypatch.delenv("DISPLAY", raising=False) newdisp = config.get_display() assert int(newdisp.split(":")[-1]) > 1000 # Test that it was correctly cached assert config.get_display() == newdisp @pytest.mark.skipif(not has_Xvfb, reason="xvfbwrapper not installed") @pytest.mark.skipif("darwin" in sys.platform, reason="macosx requires root for Xvfb") def test_display_empty_installed(monkeypatch): """ Check that actually uses xvfbwrapper when installed (not mocked) and necessary (no config and $DISPLAY empty) """ config._display = None if config.has_option("execution", "display_variable"): config._config.remove_option("execution", "display_variable") monkeypatch.setenv("DISPLAY", "") newdisp = config.get_display() assert int(newdisp.split(":")[-1]) > 1000 # Test that it was correctly cached assert config.get_display() == newdisp def test_display_empty_macosx(monkeypatch): """ Check that an exception is raised if xvfbwrapper is necessary (no config and $DISPLAY unset) but platform is OSX. See https://github.com/nipy/nipype/issues/1400 """ config._display = None if config.has_option("execution", "display_variable"): config._config.remove_option("execution", "display_variable") monkeypatch.delenv("DISPLAY", "") monkeypatch.setattr(sys, "platform", "darwin") with pytest.raises(RuntimeError): config.get_display() def test_cwd_cached(tmpdir): """Check that changing dirs does not change nipype's cwd""" oldcwd = config.cwd tmpdir.chdir() assert config.cwd == oldcwd def test_debug_mode(): from ... import logging sofc_config = config.get("execution", "stop_on_first_crash") ruo_config = config.get("execution", "remove_unnecessary_outputs") ki_config = config.get("execution", "keep_inputs") wf_config = config.get("logging", "workflow_level") if_config = config.get("logging", "interface_level") ut_config = config.get("logging", "utils_level") wf_level = logging.getLogger("nipype.workflow").level if_level = logging.getLogger("nipype.interface").level ut_level = logging.getLogger("nipype.utils").level config.enable_debug_mode() # Check config is updated and logging levels, too assert config.get("execution", "stop_on_first_crash") == "true" assert config.get("execution", "remove_unnecessary_outputs") == "false" assert config.get("execution", "keep_inputs") == "true" assert config.get("logging", "workflow_level") == "DEBUG" assert config.get("logging", "interface_level") == "DEBUG" assert config.get("logging", "utils_level") == "DEBUG" assert logging.getLogger("nipype.workflow").level == 10 assert logging.getLogger("nipype.interface").level == 10 assert logging.getLogger("nipype.utils").level == 10 # Restore config and levels config.set("execution", "stop_on_first_crash", sofc_config) config.set("execution", "remove_unnecessary_outputs", ruo_config) config.set("execution", "keep_inputs", ki_config) config.set("logging", "workflow_level", wf_config) config.set("logging", "interface_level", if_config) config.set("logging", "utils_level", ut_config) logging.update_logging(config) assert config.get("execution", "stop_on_first_crash") == sofc_config assert config.get("execution", "remove_unnecessary_outputs") == ruo_config assert config.get("execution", "keep_inputs") == ki_config assert config.get("logging", "workflow_level") == wf_config assert config.get("logging", "interface_level") == if_config assert config.get("logging", "utils_level") == ut_config assert logging.getLogger("nipype.workflow").level == wf_level assert logging.getLogger("nipype.interface").level == if_level assert logging.getLogger("nipype.utils").level == ut_level nipype-1.7.0/nipype/utils/tests/test_docparse.py000066400000000000000000000026121413403311400220350ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from nipype.utils.docparse import reverse_opt_map, build_doc, insert_doc foo_opt_map = {"outline": "-o", "fun": "-f %.2f", "flags": "%s"} foo_doc = """Usage: foo infile outfile [opts] Bunch of options: -o something about an outline -f intensity of fun factor Other stuff: -v verbose """ fmtd_doc = """Parameters ---------- outline : something about an outline fun : intensity of fun factor Others Parameters ----------------- -v verbose""" def test_rev_opt_map(): map = {"-f": "fun", "-o": "outline"} rev_map = reverse_opt_map(foo_opt_map) assert rev_map == map def test_build_doc(): opts = reverse_opt_map(foo_opt_map) doc = build_doc(foo_doc, opts) assert doc == fmtd_doc inserted_doc = """Parameters ---------- infile : str The name of the input file outfile : str The name of the output file outline : something about an outline fun : intensity of fun factor Others Parameters ----------------- -v verbose""" def test_insert_doc(): new_items = ["infile : str", " The name of the input file"] new_items.extend(["outfile : str", " The name of the output file"]) newdoc = insert_doc(fmtd_doc, new_items) assert newdoc == inserted_doc nipype-1.7.0/nipype/utils/tests/test_filemanip.py000066400000000000000000000630161413403311400222060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import time from pathlib import Path from unittest import mock, SkipTest import pytest from ...testing import TempFATFS from ...utils.filemanip import ( save_json, load_json, fname_presuffix, fnames_presuffix, hash_rename, check_forhash, _parse_mount_table, _cifs_table, on_cifs, copyfile, copyfiles, ensure_list, simplify_list, check_depends, split_filename, get_related_files, indirectory, loadpkl, loadcrash, savepkl, path_resolve, write_rst_list, emptydirs, ) def _ignore_atime(stat): return stat[:7] + stat[8:] @pytest.mark.parametrize( "filename, split", [ ("foo.nii", ("", "foo", ".nii")), ("foo.nii.gz", ("", "foo", ".nii.gz")), ("foo.niml.dset", ("", "foo", ".niml.dset")), ("/usr/local/foo.nii.gz", ("/usr/local", "foo", ".nii.gz")), ("../usr/local/foo.nii", ("../usr/local", "foo", ".nii")), ("/usr/local/foo.a.b.c.d", ("/usr/local", "foo.a.b.c", ".d")), ("/usr/local/", ("/usr/local", "", "")), ], ) def test_split_filename(filename, split): res = split_filename(filename) assert res == split def test_fname_presuffix(): fname = "foo.nii" pth = fname_presuffix(fname, "pre_", "_post", "/tmp") assert pth == "/tmp/pre_foo_post.nii" fname += ".gz" pth = fname_presuffix(fname, "pre_", "_post", "/tmp") assert pth == "/tmp/pre_foo_post.nii.gz" pth = fname_presuffix(fname, "pre_", "_post", "/tmp", use_ext=False) assert pth == "/tmp/pre_foo_post" def test_fnames_presuffix(): fnames = ["foo.nii", "bar.nii"] pths = fnames_presuffix(fnames, "pre_", "_post", "/tmp") assert pths == ["/tmp/pre_foo_post.nii", "/tmp/pre_bar_post.nii"] @pytest.mark.parametrize( "filename, newname", [ ("foobar.nii", "foobar_0xabc123.nii"), ("foobar.nii.gz", "foobar_0xabc123.nii.gz"), ], ) def test_hash_rename(filename, newname): new_name = hash_rename(filename, "abc123") assert new_name == newname def test_check_forhash(): fname = "foobar" orig_hash = "_0x4323dbcefdc51906decd8edcb3327943" hashed_name = "".join((fname, orig_hash, ".nii")) result, hash = check_forhash(hashed_name) assert result assert hash == [orig_hash] result, hash = check_forhash("foobar.nii") assert not result assert hash is None @pytest.fixture() def _temp_analyze_files(tmpdir): """Generate temporary analyze file pair.""" orig_img = tmpdir.join("orig.img") orig_hdr = tmpdir.join("orig.hdr") orig_img.open("w+").close() orig_hdr.open("w+").close() return str(orig_img), str(orig_hdr) @pytest.fixture() def _temp_analyze_files_prime(tmpdir): """Generate temporary analyze file pair.""" orig_img = tmpdir.join("orig_prime.img") orig_hdr = tmpdir.join("orig_prime.hdr") orig_img.open("w+").close() orig_hdr.open("w+").close() return orig_img.strpath, orig_hdr.strpath def test_copyfile(_temp_analyze_files): orig_img, orig_hdr = _temp_analyze_files pth, fname = os.path.split(orig_img) new_img = os.path.join(pth, "newfile.img") new_hdr = os.path.join(pth, "newfile.hdr") copyfile(orig_img, new_img) assert os.path.exists(new_img) assert os.path.exists(new_hdr) def test_copyfile_true(_temp_analyze_files): orig_img, orig_hdr = _temp_analyze_files pth, fname = os.path.split(orig_img) new_img = os.path.join(pth, "newfile.img") new_hdr = os.path.join(pth, "newfile.hdr") # Test with copy=True copyfile(orig_img, new_img, copy=True) assert os.path.exists(new_img) assert os.path.exists(new_hdr) def test_copyfiles(_temp_analyze_files, _temp_analyze_files_prime): orig_img1, orig_hdr1 = _temp_analyze_files orig_img2, orig_hdr2 = _temp_analyze_files_prime pth, fname = os.path.split(orig_img1) new_img1 = os.path.join(pth, "newfile.img") new_hdr1 = os.path.join(pth, "newfile.hdr") pth, fname = os.path.split(orig_img2) new_img2 = os.path.join(pth, "secondfile.img") new_hdr2 = os.path.join(pth, "secondfile.hdr") copyfiles([orig_img1, orig_img2], [new_img1, new_img2]) assert os.path.exists(new_img1) assert os.path.exists(new_hdr1) assert os.path.exists(new_img2) assert os.path.exists(new_hdr2) def test_linkchain(_temp_analyze_files): if os.name != "posix": return orig_img, orig_hdr = _temp_analyze_files pth, fname = os.path.split(orig_img) new_img1 = os.path.join(pth, "newfile1.img") new_hdr1 = os.path.join(pth, "newfile1.hdr") new_img2 = os.path.join(pth, "newfile2.img") new_hdr2 = os.path.join(pth, "newfile2.hdr") new_img3 = os.path.join(pth, "newfile3.img") new_hdr3 = os.path.join(pth, "newfile3.hdr") copyfile(orig_img, new_img1) assert os.path.islink(new_img1) assert os.path.islink(new_hdr1) copyfile(new_img1, new_img2, copy=True) assert not os.path.islink(new_img2) assert not os.path.islink(new_hdr2) assert not os.path.samefile(orig_img, new_img2) assert not os.path.samefile(orig_hdr, new_hdr2) copyfile(new_img1, new_img3, copy=True, use_hardlink=True) assert not os.path.islink(new_img3) assert not os.path.islink(new_hdr3) assert os.path.samefile(orig_img, new_img3) assert os.path.samefile(orig_hdr, new_hdr3) def test_recopy(_temp_analyze_files): # Re-copying with the same parameters on an unchanged file should be # idempotent # # Test for copying from regular files and symlinks orig_img, orig_hdr = _temp_analyze_files pth, fname = os.path.split(orig_img) img_link = os.path.join(pth, "imglink.img") new_img = os.path.join(pth, "newfile.img") new_hdr = os.path.join(pth, "newfile.hdr") copyfile(orig_img, img_link) for copy in (True, False): for use_hardlink in (True, False): for hashmethod in ("timestamp", "content"): kwargs = { "copy": copy, "use_hardlink": use_hardlink, "hashmethod": hashmethod, } # Copying does not preserve the original file's timestamp, so # we may delete and re-copy, if the test is slower than a clock # tick if copy and not use_hardlink and hashmethod == "timestamp": continue copyfile(orig_img, new_img, **kwargs) img_stat = _ignore_atime(os.stat(new_img)) hdr_stat = _ignore_atime(os.stat(new_hdr)) copyfile(orig_img, new_img, **kwargs) err_msg = "Regular - OS: {}; Copy: {}; Hardlink: {}".format( os.name, copy, use_hardlink ) assert img_stat == _ignore_atime(os.stat(new_img)), err_msg assert hdr_stat == _ignore_atime(os.stat(new_hdr)), err_msg os.unlink(new_img) os.unlink(new_hdr) copyfile(img_link, new_img, **kwargs) img_stat = _ignore_atime(os.stat(new_img)) hdr_stat = _ignore_atime(os.stat(new_hdr)) copyfile(img_link, new_img, **kwargs) err_msg = "Symlink - OS: {}; Copy: {}; Hardlink: {}".format( os.name, copy, use_hardlink ) assert img_stat == _ignore_atime(os.stat(new_img)), err_msg assert hdr_stat == _ignore_atime(os.stat(new_hdr)), err_msg os.unlink(new_img) os.unlink(new_hdr) def test_copyfallback(_temp_analyze_files): if os.name != "posix": return orig_img, orig_hdr = _temp_analyze_files pth, imgname = os.path.split(orig_img) pth, hdrname = os.path.split(orig_hdr) try: fatfs = TempFATFS() except (IOError, OSError): raise SkipTest("Fuse mount failed. copyfile fallback tests skipped.") with fatfs as fatdir: tgt_img = os.path.join(fatdir, imgname) tgt_hdr = os.path.join(fatdir, hdrname) for copy in (True, False): for use_hardlink in (True, False): copyfile(orig_img, tgt_img, copy=copy, use_hardlink=use_hardlink) assert os.path.exists(tgt_img) assert os.path.exists(tgt_hdr) assert not os.path.islink(tgt_img) assert not os.path.islink(tgt_hdr) assert not os.path.samefile(orig_img, tgt_img) assert not os.path.samefile(orig_hdr, tgt_hdr) os.unlink(tgt_img) os.unlink(tgt_hdr) def test_get_related_files(_temp_analyze_files): orig_img, orig_hdr = _temp_analyze_files related_files = get_related_files(orig_img) assert orig_img in related_files assert orig_hdr in related_files related_files = get_related_files(orig_hdr) assert orig_img in related_files assert orig_hdr in related_files def test_get_related_files_noninclusive(_temp_analyze_files): orig_img, orig_hdr = _temp_analyze_files related_files = get_related_files(orig_img, include_this_file=False) assert orig_img not in related_files assert orig_hdr in related_files related_files = get_related_files(orig_hdr, include_this_file=False) assert orig_img in related_files assert orig_hdr not in related_files @pytest.mark.parametrize( "filename, expected", [ ("foo.nii", ["foo.nii"]), (["foo.nii"], ["foo.nii"]), (("foo", "bar"), ["foo", "bar"]), (12.34, None), ], ) def test_ensure_list(filename, expected): x = ensure_list(filename) assert x == expected @pytest.mark.parametrize( "list, expected", [(["foo.nii"], "foo.nii"), (["foo", "bar"], ["foo", "bar"])] ) def test_simplify_list(list, expected): x = simplify_list(list) assert x == expected def test_check_depends(tmpdir): def touch(fname): with open(fname, "a"): os.utime(fname, None) dependencies = [tmpdir.join(str(i)).strpath for i in range(3)] targets = [tmpdir.join(str(i)).strpath for i in range(3, 6)] # Targets newer than dependencies for dep in dependencies: touch(dep) time.sleep(1) for tgt in targets: touch(tgt) assert check_depends(targets, dependencies) # Targets older than newest dependency time.sleep(1) touch(dependencies[0]) assert not check_depends(targets, dependencies) # Missing dependency os.unlink(dependencies[0]) try: check_depends(targets, dependencies) except OSError: pass else: assert False, "Should raise OSError on missing dependency" def test_json(tmpdir): # Simple roundtrip test of json files, just a sanity check. adict = dict(a="one", c="three", b="two") name = tmpdir.join("test.json").strpath save_json(name, adict) # save_json closes the file new_dict = load_json(name) os.unlink(name) assert sorted(adict.items()) == sorted(new_dict.items()) @pytest.mark.parametrize( "file, length, expected_files", [ ("/path/test.img", 3, ["/path/test.hdr", "/path/test.img", "/path/test.mat"]), ("/path/test.hdr", 3, ["/path/test.hdr", "/path/test.img", "/path/test.mat"]), ("/path/test.BRIK", 2, ["/path/test.BRIK", "/path/test.HEAD"]), ("/path/test.HEAD", 2, ["/path/test.BRIK", "/path/test.HEAD"]), ("/path/foo.nii", 2, ["/path/foo.nii", "/path/foo.mat"]), ], ) def test_related_files(file, length, expected_files): related_files = get_related_files(file) assert len(related_files) == length for ef in expected_files: assert ef in related_files MOUNT_OUTPUTS = ( # Linux, no CIFS ( r"""sysfs on /sys type sysfs (rw,nosuid,nodev,noexec,relatime) proc on /proc type proc (rw,nosuid,nodev,noexec,relatime) udev on /dev type devtmpfs (rw,nosuid,relatime,size=8121732k,nr_inodes=2030433,mode=755) devpts on /dev/pts type devpts (rw,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000) tmpfs on /run type tmpfs (rw,nosuid,noexec,relatime,size=1628440k,mode=755) /dev/nvme0n1p2 on / type ext4 (rw,relatime,errors=remount-ro,data=ordered) securityfs on /sys/kernel/security type securityfs (rw,nosuid,nodev,noexec,relatime) tmpfs on /dev/shm type tmpfs (rw,nosuid,nodev) tmpfs on /sys/fs/cgroup type tmpfs (ro,nosuid,nodev,noexec,mode=755) cgroup on /sys/fs/cgroup/systemd type cgroup (rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/lib/systemd/systemd-cgroups-agent,name=systemd) pstore on /sys/fs/pstore type pstore (rw,nosuid,nodev,noexec,relatime) efivarfs on /sys/firmware/efi/efivars type efivarfs (rw,nosuid,nodev,noexec,relatime) cgroup on /sys/fs/cgroup/cpu,cpuacct type cgroup (rw,nosuid,nodev,noexec,relatime,cpu,cpuacct) cgroup on /sys/fs/cgroup/freezer type cgroup (rw,nosuid,nodev,noexec,relatime,freezer) cgroup on /sys/fs/cgroup/pids type cgroup (rw,nosuid,nodev,noexec,relatime,pids) cgroup on /sys/fs/cgroup/cpuset type cgroup (rw,nosuid,nodev,noexec,relatime,cpuset) systemd-1 on /proc/sys/fs/binfmt_misc type autofs (rw,relatime,fd=26,pgrp=1,timeout=0,minproto=5,maxproto=5,direct) hugetlbfs on /dev/hugepages type hugetlbfs (rw,relatime) debugfs on /sys/kernel/debug type debugfs (rw,relatime) mqueue on /dev/mqueue type mqueue (rw,relatime) fusectl on /sys/fs/fuse/connections type fusectl (rw,relatime) /dev/nvme0n1p1 on /boot/efi type vfat (rw,relatime,fmask=0077,dmask=0077,codepage=437,iocharset=iso8859-1,shortname=mixed,errors=remount-ro) /dev/nvme0n1p2 on /var/lib/docker/aufs type ext4 (rw,relatime,errors=remount-ro,data=ordered) gvfsd-fuse on /run/user/1002/gvfs type fuse.gvfsd-fuse (rw,nosuid,nodev,relatime,user_id=1002,group_id=1002) """, 0, [], ), # OS X, no CIFS ( r"""/dev/disk2 on / (hfs, local, journaled) devfs on /dev (devfs, local, nobrowse) map -hosts on /net (autofs, nosuid, automounted, nobrowse) map auto_home on /home (autofs, automounted, nobrowse) map -fstab on /Network/Servers (autofs, automounted, nobrowse) /dev/disk3s2 on /Volumes/MyBookData (hfs, local, nodev, nosuid, journaled) afni:/elrond0 on /Volumes/afni (nfs) afni:/var/www/INCOMING on /Volumes/INCOMING (nfs) afni:/fraid on /Volumes/afni (nfs, asynchronous) boromir:/raid.bot on /Volumes/raid.bot (nfs) elros:/volume2/AFNI_SHARE on /Volumes/AFNI_SHARE (nfs) map -static on /Volumes/safni (autofs, automounted, nobrowse) map -static on /Volumes/raid.top (autofs, automounted, nobrowse) /dev/disk1s3 on /Volumes/Boot OS X (hfs, local, journaled, nobrowse) """, 0, [], ), # Non-zero exit code ("", 1, []), # Variant of Linux example with CIFS added manually ( r"""sysfs on /sys type sysfs (rw,nosuid,nodev,noexec,relatime) proc on /proc type proc (rw,nosuid,nodev,noexec,relatime) udev on /dev type devtmpfs (rw,nosuid,relatime,size=8121732k,nr_inodes=2030433,mode=755) devpts on /dev/pts type devpts (rw,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000) tmpfs on /run type tmpfs (rw,nosuid,noexec,relatime,size=1628440k,mode=755) /dev/nvme0n1p2 on / type ext4 (rw,relatime,errors=remount-ro,data=ordered) securityfs on /sys/kernel/security type securityfs (rw,nosuid,nodev,noexec,relatime) tmpfs on /dev/shm type tmpfs (rw,nosuid,nodev) tmpfs on /sys/fs/cgroup type tmpfs (ro,nosuid,nodev,noexec,mode=755) cgroup on /sys/fs/cgroup/systemd type cgroup (rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/lib/systemd/systemd-cgroups-agent,name=systemd) pstore on /sys/fs/pstore type pstore (rw,nosuid,nodev,noexec,relatime) efivarfs on /sys/firmware/efi/efivars type efivarfs (rw,nosuid,nodev,noexec,relatime) cgroup on /sys/fs/cgroup/cpu,cpuacct type cgroup (rw,nosuid,nodev,noexec,relatime,cpu,cpuacct) cgroup on /sys/fs/cgroup/freezer type cgroup (rw,nosuid,nodev,noexec,relatime,freezer) cgroup on /sys/fs/cgroup/pids type cgroup (rw,nosuid,nodev,noexec,relatime,pids) cgroup on /sys/fs/cgroup/cpuset type cgroup (rw,nosuid,nodev,noexec,relatime,cpuset) systemd-1 on /proc/sys/fs/binfmt_misc type autofs (rw,relatime,fd=26,pgrp=1,timeout=0,minproto=5,maxproto=5,direct) hugetlbfs on /dev/hugepages type hugetlbfs (rw,relatime) debugfs on /sys/kernel/debug type debugfs (rw,relatime) mqueue on /dev/mqueue type mqueue (rw,relatime) fusectl on /sys/fs/fuse/connections type fusectl (rw,relatime) /dev/nvme0n1p1 on /boot/efi type vfat (rw,relatime,fmask=0077,dmask=0077,codepage=437,iocharset=iso8859-1,shortname=mixed,errors=remount-ro) /dev/nvme0n1p2 on /var/lib/docker/aufs type ext4 (rw,relatime,errors=remount-ro,data=ordered) gvfsd-fuse on /run/user/1002/gvfs type fuse.gvfsd-fuse (rw,nosuid,nodev,relatime,user_id=1002,group_id=1002) """, 0, [], ), # Variant of OS X example with CIFS added manually ( r"""/dev/disk2 on / (hfs, local, journaled) devfs on /dev (devfs, local, nobrowse) afni:/elrond0 on /Volumes/afni (cifs) afni:/var/www/INCOMING on /Volumes/INCOMING (nfs) afni:/fraid on /Volumes/afni/fraid (nfs, asynchronous) boromir:/raid.bot on /Volumes/raid.bot (nfs) elros:/volume2/AFNI_SHARE on /Volumes/AFNI_SHARE (nfs) """, 0, [("/Volumes/afni/fraid", "nfs"), ("/Volumes/afni", "cifs")], ), # From Windows: docker run --rm -it -v C:\:/data busybox mount ( r"""overlay on / type overlay (rw,relatime,lowerdir=/var/lib/docker/overlay2/l/26UTYITLF24YE7KEGTMHUNHPPG:/var/lib/docker/overlay2/l/SWGNP3T2EEB4CNBJFN3SDZLXHP,upperdir=/var/lib/docker/overlay2/a4c54ab1aa031bb5a14a424abd655510521e183ee4fa4158672e8376c89df394/diff,workdir=/var/lib/docker/overlay2/a4c54ab1aa031bb5a14a424abd655510521e183ee4fa4158672e8376c89df394/work) proc on /proc type proc (rw,nosuid,nodev,noexec,relatime) tmpfs on /dev type tmpfs (rw,nosuid,size=65536k,mode=755) devpts on /dev/pts type devpts (rw,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=666) sysfs on /sys type sysfs (ro,nosuid,nodev,noexec,relatime) tmpfs on /sys/fs/cgroup type tmpfs (ro,nosuid,nodev,noexec,relatime,mode=755) cpuset on /sys/fs/cgroup/cpuset type cgroup (ro,nosuid,nodev,noexec,relatime,cpuset) cpu on /sys/fs/cgroup/cpu type cgroup (ro,nosuid,nodev,noexec,relatime,cpu) cpuacct on /sys/fs/cgroup/cpuacct type cgroup (ro,nosuid,nodev,noexec,relatime,cpuacct) blkio on /sys/fs/cgroup/blkio type cgroup (ro,nosuid,nodev,noexec,relatime,blkio) memory on /sys/fs/cgroup/memory type cgroup (ro,nosuid,nodev,noexec,relatime,memory) devices on /sys/fs/cgroup/devices type cgroup (ro,nosuid,nodev,noexec,relatime,devices) freezer on /sys/fs/cgroup/freezer type cgroup (ro,nosuid,nodev,noexec,relatime,freezer) net_cls on /sys/fs/cgroup/net_cls type cgroup (ro,nosuid,nodev,noexec,relatime,net_cls) perf_event on /sys/fs/cgroup/perf_event type cgroup (ro,nosuid,nodev,noexec,relatime,perf_event) net_prio on /sys/fs/cgroup/net_prio type cgroup (ro,nosuid,nodev,noexec,relatime,net_prio) hugetlb on /sys/fs/cgroup/hugetlb type cgroup (ro,nosuid,nodev,noexec,relatime,hugetlb) pids on /sys/fs/cgroup/pids type cgroup (ro,nosuid,nodev,noexec,relatime,pids) cgroup on /sys/fs/cgroup/systemd type cgroup (ro,nosuid,nodev,noexec,relatime,name=systemd) mqueue on /dev/mqueue type mqueue (rw,nosuid,nodev,noexec,relatime) //10.0.75.1/C on /data type cifs (rw,relatime,vers=3.02,sec=ntlmsspi,cache=strict,username=filo,domain=MSI,uid=0,noforceuid,gid=0,noforcegid,addr=10.0.75.1,file_mode=0755,dir_mode=0755,iocharset=utf8,nounix,serverino,mapposix,nobrl,mfsymlinks,noperm,rsize=1048576,wsize=1048576,echo_interval=60,actimeo=1) /dev/sda1 on /etc/resolv.conf type ext4 (rw,relatime,data=ordered) /dev/sda1 on /etc/hostname type ext4 (rw,relatime,data=ordered) /dev/sda1 on /etc/hosts type ext4 (rw,relatime,data=ordered) shm on /dev/shm type tmpfs (rw,nosuid,nodev,noexec,relatime,size=65536k) devpts on /dev/console type devpts (rw,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=666) proc on /proc/bus type proc (ro,relatime) proc on /proc/fs type proc (ro,relatime) proc on /proc/irq type proc (ro,relatime) proc on /proc/sys type proc (ro,relatime) proc on /proc/sysrq-trigger type proc (ro,relatime) tmpfs on /proc/kcore type tmpfs (rw,nosuid,size=65536k,mode=755) tmpfs on /proc/timer_list type tmpfs (rw,nosuid,size=65536k,mode=755) tmpfs on /proc/sched_debug type tmpfs (rw,nosuid,size=65536k,mode=755) tmpfs on /proc/scsi type tmpfs (ro,relatime) tmpfs on /sys/firmware type tmpfs (ro,relatime) """, 0, [("/data", "cifs")], ), # From @yarikoptic - added blank lines to test for resilience ( r"""/proc on /proc type proc (rw,relatime) sysfs on /sys type sysfs (rw,nosuid,nodev,noexec,relatime) tmpfs on /dev/shm type tmpfs (rw,relatime) devpts on /dev/pts type devpts (rw,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=666) devpts on /dev/ptmx type devpts (rw,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=666) """, 0, [], ), ) @pytest.mark.parametrize("output, exit_code, expected", MOUNT_OUTPUTS) def test_parse_mount_table(output, exit_code, expected): assert _parse_mount_table(exit_code, output) == expected def test_cifs_check(): assert isinstance(_cifs_table, list) assert isinstance(on_cifs("/"), bool) fake_table = [("/scratch/tmp", "ext4"), ("/scratch", "cifs")] cifs_targets = [ ("/scratch/tmp/x/y", False), ("/scratch/tmp/x", False), ("/scratch/x/y", True), ("/scratch/x", True), ("/x/y", False), ("/x", False), ("/", False), ] orig_table = _cifs_table[:] _cifs_table[:] = [] for target, _ in cifs_targets: assert on_cifs(target) is False _cifs_table.extend(fake_table) for target, expected in cifs_targets: assert on_cifs(target) is expected _cifs_table[:] = [] _cifs_table.extend(orig_table) def test_indirectory(tmpdir): tmpdir.chdir() os.makedirs("subdir1/subdir2") sd1 = os.path.abspath("subdir1") sd2 = os.path.abspath("subdir1/subdir2") assert os.getcwd() == tmpdir.strpath with indirectory("/"): assert os.getcwd() == "/" assert os.getcwd() == tmpdir.strpath with indirectory("subdir1"): assert os.getcwd() == sd1 with indirectory("subdir2"): assert os.getcwd() == sd2 with indirectory(".."): assert os.getcwd() == sd1 with indirectory("/"): assert os.getcwd() == "/" assert os.getcwd() == sd1 assert os.getcwd() == sd2 assert os.getcwd() == sd1 assert os.getcwd() == tmpdir.strpath try: with indirectory("subdir1"): raise ValueError("Erroring out of context") except ValueError: pass assert os.getcwd() == tmpdir.strpath def test_pklization(tmpdir): tmpdir.chdir() exc = Exception("There is something wrong here") savepkl("./except.pkz", exc) newexc = loadpkl("./except.pkz") assert exc.args == newexc.args assert os.getcwd() == tmpdir.strpath class Pickled: def __getstate__(self): return self.__dict__ class PickledBreaker: def __setstate__(self, d): raise Exception() def test_versioned_pklization(tmpdir): tmpdir.chdir() obj = Pickled() savepkl("./pickled.pkz", obj, versioning=True) with pytest.raises(Exception): with mock.patch( "nipype.utils.tests.test_filemanip.Pickled", PickledBreaker ), mock.patch("nipype.__version__", "0.0.0"): loadpkl("./pickled.pkz") def test_unversioned_pklization(tmpdir): tmpdir.chdir() obj = Pickled() savepkl("./pickled.pkz", obj) with pytest.raises(Exception): with mock.patch("nipype.utils.tests.test_filemanip.Pickled", PickledBreaker): loadpkl("./pickled.pkz") def test_path_strict_resolve(tmpdir): """Check the monkeypatch to test strict resolution of Path.""" tmpdir.chdir() # Default strict=False should work out out of the box testfile = Path("somefile.txt") resolved = "%s/somefile.txt" % tmpdir assert str(path_resolve(testfile)) == resolved # Strict keyword is always allowed assert str(path_resolve(testfile, strict=False)) == resolved # Switching to strict=True must raise FileNotFoundError (also in Python2) with pytest.raises(FileNotFoundError): path_resolve(testfile, strict=True) # If the file is created, it should not raise open("somefile.txt", "w").close() assert str(path_resolve(testfile, strict=True)) == resolved @pytest.mark.parametrize("save_versioning", [True, False]) def test_pickle(tmp_path, save_versioning): testobj = "iamateststr" pickle_fname = str(tmp_path / "testpickle.pklz") savepkl(pickle_fname, testobj, versioning=save_versioning) outobj = loadpkl(pickle_fname) assert outobj == testobj @pytest.mark.parametrize( "items,expected", [ ("", " \n\n"), ("A string", " A string\n\n"), (["A list", "Of strings"], " A list\n Of strings\n\n"), (None, TypeError), ], ) def test_write_rst_list(tmp_path, items, expected): if items is not None: assert write_rst_list(items) == expected else: with pytest.raises(expected): write_rst_list(items) def nfs_unlink(pathlike, *, dir_fd=None): if dir_fd is None: path = Path(pathlike) deleted = path.with_name(".nfs00000000") path.rename(deleted) else: os.rename(pathlike, ".nfs1111111111", src_dir_fd=dir_fd, dst_dir_fd=dir_fd) def test_emptydirs_dangling_nfs(tmp_path): busyfile = tmp_path / "base" / "subdir" / "busyfile" busyfile.parent.mkdir(parents=True) busyfile.touch() with mock.patch("os.unlink") as mocked: mocked.side_effect = nfs_unlink emptydirs(tmp_path / "base") assert Path.exists(tmp_path / "base") assert not busyfile.exists() assert busyfile.parent.exists() # Couldn't remove nipype-1.7.0/nipype/utils/tests/test_functions.py000066400000000000000000000017631413403311400222530ustar00rootroot00000000000000# -*- coding: utf-8 -*- import pytest from nipype.utils.functions import getsource, create_function_from_source def _func1(x): return x ** 3 def test_func_to_str(): def func1(x): return x ** 2 # Should be ok with both functions! for f in _func1, func1: f_src = getsource(f) f_recreated = create_function_from_source(f_src) assert f(2.3) == f_recreated(2.3) def test_func_to_str_err(): bad_src = "obbledygobbledygook" with pytest.raises(RuntimeError): create_function_from_source(bad_src) def _print_statement(): try: exec('print("")') return True except SyntaxError: return False def test_func_string(): def is_string(): return isinstance("string", str) wrapped_func = create_function_from_source(getsource(is_string)) assert is_string() == wrapped_func() def test_func_print(): wrapped_func = create_function_from_source(getsource(_print_statement)) assert wrapped_func() nipype-1.7.0/nipype/utils/tests/test_imagemanip.py000066400000000000000000000023401413403311400223420ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import numpy as np import nibabel as nb import pytest from ..imagemanip import copy_header @pytest.mark.parametrize("keep_dtype", (True, False)) def test_copy_header(tmp_path, keep_dtype): """Cover copy_header.""" fname1 = tmp_path / "reference.nii.gz" fname2 = tmp_path / "target.nii.gz" nii = nb.Nifti1Image(np.zeros((10, 10, 10), dtype="uint8"), None, None) nii.set_qform(np.diag((1.0, 2.0, 3.0, 1.0)), code=2) nii.set_sform(np.diag((1.0, 2.0, 3.0, 1.0)), code=1) nii.to_filename(str(fname1)) nii.set_data_dtype("float32") nii.set_qform(np.eye(4), code=1) nii.to_filename(str(fname2)) copied = nb.load(copy_header(fname1, fname2, keep_dtype=keep_dtype)) ref = nb.load(str(fname1)) assert np.all(copied.get_qform(coded=False) == ref.get_qform(coded=False)) assert np.all(copied.get_sform(coded=False) == ref.get_sform(coded=False)) assert copied.get_qform(coded=True)[1] == ref.get_qform(coded=True)[1] assert copied.get_sform(coded=True)[1] == ref.get_sform(coded=True)[1] assert (copied.header.get_data_dtype() == ref.header.get_data_dtype()) != keep_dtype nipype-1.7.0/nipype/utils/tests/test_misc.py000066400000000000000000000072201413403311400211700ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os from shutil import rmtree import pytest from nipype.utils.misc import ( container_to_string, str2bool, flatten, unflatten, dict_diff, ) def test_cont_to_str(): # list x = ["a", "b"] assert container_to_string(x) == "a b" # tuple x = tuple(x) assert container_to_string(x) == "a b" # set x = set(x) y = container_to_string(x) assert (y == "a b") or (y == "b a") # dict x = dict(a="a", b="b") y = container_to_string(x) assert (y == "a b") or (y == "b a") # string assert container_to_string("foobar") == "foobar" # int. Integers are not the main intent of this function, but see # no reason why they shouldn't work. assert container_to_string(123) == "123" @pytest.mark.parametrize( "string, expected", [ ("yes", True), ("true", True), ("t", True), ("1", True), ("no", False), ("false", False), ("n", False), ("f", False), ("0", False), ], ) def test_str2bool(string, expected): assert str2bool(string) == expected def test_flatten(): in_list = [[1, 2, 3], [4], [[5, 6], 7], 8] flat = flatten(in_list) assert flat == [1, 2, 3, 4, 5, 6, 7, 8] back = unflatten(flat, in_list) assert in_list == back new_list = [2, 3, 4, 5, 6, 7, 8, 9] back = unflatten(new_list, in_list) assert back == [[2, 3, 4], [5], [[6, 7], 8], 9] flat = flatten([]) assert flat == [] back = unflatten([], []) assert back == [] def test_rgetcwd(monkeypatch, tmpdir): from ..misc import rgetcwd oldpath = tmpdir.strpath tmpdir.mkdir("sub").chdir() newpath = os.getcwd() # Path still there assert rgetcwd() == newpath # Remove path rmtree(newpath, ignore_errors=True) with pytest.raises(OSError): os.getcwd() monkeypatch.setenv("PWD", oldpath) assert rgetcwd(error=False) == oldpath # Test when error should be raised with pytest.raises(OSError): rgetcwd() # Deleted env variable monkeypatch.delenv("PWD") with pytest.raises(OSError): rgetcwd(error=False) def test_dict_diff(): abtuple = [("a", "b")] abdict = dict(abtuple) # Unchanged assert dict_diff(abdict, abdict) == "" assert dict_diff(abdict, abtuple) == "" assert dict_diff(abtuple, abdict) == "" assert dict_diff(abtuple, abtuple) == "" # Changed keys diff = dict_diff({"a": "b"}, {"b": "a"}) assert "Dictionaries had differing keys" in diff assert "keys not previously seen: {'b'}" in diff assert "keys not presently seen: {'a'}" in diff # Trigger recursive uniformization complicated_val1 = [{"a": ["b"], "c": ("d", "e")}] complicated_val2 = [{"a": ["x"], "c": ("d", "e")}] uniformized_val1 = ({"a": ("b",), "c": ("d", "e")},) uniformized_val2 = ({"a": ("x",), "c": ("d", "e")},) diff = dict_diff({"a": complicated_val1}, {"a": complicated_val2}) assert "Some dictionary entries had differing values:" in diff assert "a: {!r} != {!r}".format(uniformized_val2, uniformized_val1) in diff # Trigger shortening diff = dict_diff({"a": "b" * 60}, {"a": "c" * 70}) assert "Some dictionary entries had differing values:" in diff assert "a: 'cccccccccc...cccccccccc' != 'bbbbbbbbbb...bbbbbbbbbb'" in diff # Fail the dict conversion diff = dict_diff({}, "not a dict") assert diff == ( "Diff between nipype inputs failed:\n" "* Cached inputs: {}\n" "* New inputs: not a dict" ) nipype-1.7.0/nipype/utils/tests/test_nipype2boutiques.py000066400000000000000000000031261413403311400235650ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from ..nipype2boutiques import generate_boutiques_descriptor from nipype.testing import example_data import json def test_generate(): ignored_inputs = ["args", "environ", "output_type"] desc = generate_boutiques_descriptor( module="nipype.interfaces.fsl", interface_name="FLIRT", container_image=("mcin/" "docker-fsl:latest"), container_index="index.docker.io", container_type="docker", verbose=False, save=False, ignore_inputs=ignored_inputs, author=("Oxford Centre for Functional" " MRI of the Brain (FMRIB)"), ) with open(example_data("nipype2boutiques_example.json"), "r") as desc_file: # Make sure that output descriptor matches the expected descriptor. output_desc = json.loads(desc) expected_desc = json.load(desc_file) assert output_desc.get("name") == expected_desc.get("name") assert output_desc.get("author") == expected_desc.get("author") assert output_desc.get("command-line") == expected_desc.get("command-line") assert output_desc.get("description") == expected_desc.get("description") assert len(output_desc.get("inputs")) == len(expected_desc.get("inputs")) assert len(output_desc.get("output-files")) == len( expected_desc.get("output-files") ) assert output_desc.get("container-image").get("image") == expected_desc.get( "container-image" ).get("image") nipype-1.7.0/nipype/utils/tests/test_provenance.py000066400000000000000000000025451413403311400224020ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os from nibabel.optpkg import optional_package import pytest _, have_rdflib5, _ = optional_package("rdflib", min_version="5.0.0") from nipype.utils.provenance import ProvStore, safe_encode needs_rdflib5 = pytest.mark.skipif( not have_rdflib5, reason="Test requires rdflib 5.0.0 or higher" ) @needs_rdflib5 @pytest.mark.timeout(60) def test_provenance(tmpdir): from nipype.interfaces.base import CommandLine tmpdir.chdir() ps = ProvStore() results = CommandLine("echo hello").run() ps.add_results(results) provn = ps.g.get_provn() assert "echo hello" in provn @needs_rdflib5 @pytest.mark.timeout(60) def test_provenance_exists(tmpdir): tmpdir.chdir() from nipype import config from nipype.interfaces.base import CommandLine provenance_state = config.get("execution", "write_provenance") hash_state = config.get("execution", "hash_method") config.enable_provenance() CommandLine("echo hello").run() config.set("execution", "write_provenance", provenance_state) config.set("execution", "hash_method", hash_state) assert tmpdir.join("provenance.provn").check() def test_safe_encode(): a = "\xc3\xa9lg" out = safe_encode(a) assert out.value == a nipype-1.7.0/nipype/utils/tests/use_resources000077500000000000000000000015161413403311400214420ustar00rootroot00000000000000#!/usr/bin/env python # # use_resources ''' Python script to use a certain amount of RAM on disk and number of threads Usage: use_resources -g -p ''' # Make main executable if __name__ == '__main__': # Import packages import argparse from nipype.utils.profiler import _use_resources # Init argparser parser = argparse.ArgumentParser(description=__doc__) # Add arguments parser.add_argument( '-g', '--num_gb', required=True, type=float, help='Number of GB RAM to use, can be float or int') parser.add_argument( '-p', '--num_threads', required=True, type=int, help='Number of threads to run in parallel') # Parse args args = parser.parse_args() _use_resources(args.num_threads, args.num_gb) nipype-1.7.0/nipype/utils/tmpdirs.py000066400000000000000000000024061413403311400175170ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os import shutil from tempfile import template, mkdtemp class TemporaryDirectory(object): """Create and return a temporary directory. This has the same behavior as mkdtemp but can be used as a context manager. For example: with TemporaryDirectory() as tmpdir: ... Upon exiting the context, the directory and everthing contained in it are removed. """ def __init__(self, suffix="", prefix=template, dir=None): self.name = mkdtemp(suffix, prefix, dir) self._closed = False def __enter__(self): return self.name def cleanup(self): if not self._closed: shutil.rmtree(self.name) self._closed = True def __exit__(self, exc, value, tb): self.cleanup() return False class InTemporaryDirectory(TemporaryDirectory): def __enter__(self): self._pwd = os.getcwd() os.chdir(self.name) return super(InTemporaryDirectory, self).__enter__() def __exit__(self, exc, value, tb): os.chdir(self._pwd) return super(InTemporaryDirectory, self).__exit__(exc, value, tb) nipype-1.7.0/nipype/workflows/000077500000000000000000000000001413403311400163565ustar00rootroot00000000000000nipype-1.7.0/nipype/workflows/__init__.py000066400000000000000000000020131413403311400204630ustar00rootroot00000000000000# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: _msg = ["Nipype 1 workflows have been moved to the niflow-nipype1-workflows package."] try: from niflow.nipype1.workflows import data, dmri, fmri, misc, rsfmri, smri except ImportError: _msg.append("pip install niflow-nipype1-workflows to continue using them.") else: import sys # Hack to make `from nipype.workflows.X import Y` work sys.modules["nipype.workflows.data"] = data sys.modules["nipype.workflows.dmri"] = dmri sys.modules["nipype.workflows.fmri"] = fmri sys.modules["nipype.workflows.misc"] = misc sys.modules["nipype.workflows.rsfmri"] = rsfmri sys.modules["nipype.workflows.smri"] = smri _msg.append( "nipype.workflows.* provides a reference for backwards compatibility. " "Please use niflow.nipype1.workflows.* to avoid this warning." ) del sys import warnings warnings.warn(" ".join(_msg)) del warnings, _msg nipype-1.7.0/requirements.txt000066400000000000000000000005001413403311400162740ustar00rootroot00000000000000# Auto-generated by tools/update_requirements.py click>=6.6.0 networkx>=2.0 nibabel>=2.1.0 numpy>=1.13 ; python_version < "3.7" numpy>=1.15.3 ; python_version >= "3.7" packaging prov>=1.5.2 pydot>=1.2.3 python-dateutil>=2.2 rdflib>=5.0.0 scipy>=0.14 simplejson>=3.8.0 traits>=4.6,!=5.0 filelock>=3.0.0 etelemetry>=0.2.0 nipype-1.7.0/setup.cfg000066400000000000000000000000351413403311400146340ustar00rootroot00000000000000[bdist_wheel] python-tag=py3 nipype-1.7.0/setup.py000077500000000000000000000126441413403311400145410ustar00rootroot00000000000000#!/usr/bin/env python # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Nipype : Neuroimaging in Python pipelines and interfaces package. Nipype intends to create python interfaces to other neuroimaging packages and create an API for specifying a full analysis pipeline in python. Much of the machinery at the beginning of this file has been copied over from nibabel denoted by ## START - COPIED FROM NIBABEL and a corresponding ## END """ # Build helper import os from os.path import join as pjoin # Commit hash writing, and dependency checking from setuptools.command.build_py import build_py class BuildWithCommitInfoCommand(build_py): """Return extended build command class for recording commit The extended command tries to run git to find the current commit, getting the empty string if it fails. It then writes the commit hash into a file in the `pkg_dir` path, named ``COMMIT_INFO.txt``. In due course this information can be used by the package after it is installed, to tell you what commit it was installed from if known. To make use of this system, you need a package with a COMMIT_INFO.txt file e.g. ``myproject/COMMIT_INFO.txt`` - that might well look like this:: # This is an ini file that may contain information about the code state [commit hash] # The line below may contain a valid hash if it has been substituted # during 'git archive' archive_subst_hash=$Format:%h$ # This line may be modified by the install process install_hash= The COMMIT_INFO file above is also designed to be used with git substitution - so you probably also want a ``.gitattributes`` file in the root directory of your working tree that contains something like this:: myproject/COMMIT_INFO.txt export-subst That will cause the ``COMMIT_INFO.txt`` file to get filled in by ``git archive`` - useful in case someone makes such an archive - for example with via the github 'download source' button. Although all the above will work as is, you might consider having something like a ``get_info()`` function in your package to display the commit information at the terminal. See the ``pkg_info.py`` module in the nipy package for an example. """ def run(self): import subprocess import configparser build_py.run(self) proc = subprocess.Popen( "git rev-parse --short HEAD", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, ) repo_commit = proc.communicate()[0].decode() # We write the installation commit even if it's empty cfg_parser = configparser.RawConfigParser() cfg_parser.read(pjoin("nipype", "COMMIT_INFO.txt")) cfg_parser.set("commit hash", "install_hash", repo_commit.strip()) out_pth = pjoin(self.build_lib, "nipype", "COMMIT_INFO.txt") cfg_parser.write(open(out_pth, "wt")) def main(): from setuptools import setup, find_packages thispath, _ = os.path.split(__file__) testdatafiles = [ pjoin("testing", "data", val) for val in os.listdir(pjoin(thispath, "nipype", "testing", "data")) if not os.path.isdir(pjoin(thispath, "nipype", "testing", "data", val)) ] testdatafiles += [ pjoin("testing", "data", "dicomdir", "*"), pjoin("testing", "data", "bedpostxout", "*"), pjoin("testing", "data", "tbss_dir", "*"), pjoin("testing", "data", "brukerdir", "fid"), pjoin("testing", "data", "brukerdir", "pdata", "1", "*"), pjoin("testing", "data", "ds005", "*"), pjoin("testing", "data", "realign_json.json"), pjoin("workflows", "data", "*"), pjoin("pipeline", "engine", "report_template.html"), pjoin("external", "d3.js"), pjoin("interfaces", "fsl", "model_templates", "*"), pjoin("interfaces", "tests", "use_resources"), "pytest.ini", "conftest.py", ] # Python 3: use a locals dictionary # http://stackoverflow.com/a/1463370/6820620 ldict = locals() # Get version and release info, which is all stored in nipype/info.py ver_file = os.path.join(thispath, "nipype", "info.py") with open(ver_file) as infofile: exec(infofile.read(), globals(), ldict) setup( name=ldict["NAME"], maintainer=ldict["MAINTAINER"], maintainer_email=ldict["MAINTAINER_EMAIL"], description=ldict["DESCRIPTION"], long_description=ldict["LONG_DESCRIPTION"], long_description_content_type="text/x-rst", url=ldict["URL"], download_url=ldict["DOWNLOAD_URL"], license=ldict["LICENSE"], classifiers=ldict["CLASSIFIERS"], author=ldict["AUTHOR"], author_email=ldict["AUTHOR_EMAIL"], platforms=ldict["PLATFORMS"], version=ldict["VERSION"], python_requires=ldict["PYTHON_REQUIRES"], install_requires=ldict["REQUIRES"], provides=ldict["PROVIDES"], packages=find_packages(), package_data={"nipype": testdatafiles}, cmdclass={"build_py": BuildWithCommitInfoCommand}, tests_require=ldict["TESTS_REQUIRES"], zip_safe=False, extras_require=ldict["EXTRA_REQUIRES"], entry_points=""" [console_scripts] nipypecli=nipype.scripts.cli:cli """, ) if __name__ == "__main__": main() nipype-1.7.0/tools/000077500000000000000000000000001413403311400141555ustar00rootroot00000000000000nipype-1.7.0/tools/checkspecs.py000066400000000000000000000460651413403311400166550ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Attempt to check each interface in nipype """ # Stdlib imports import os import re import sys import warnings import black # Functions and classes class InterfaceChecker(object): """Class for checking all interface specifications """ def __init__( self, package_name, package_skip_patterns=None, module_skip_patterns=None, class_skip_patterns=None, ): r""" Initialize package for parsing Parameters ---------- package_name : string Name of the top-level package. *package_name* must be the name of an importable package package_skip_patterns : None or sequence of {strings, regexps} Sequence of strings giving URIs of packages to be excluded Operates on the package path, starting at (including) the first dot in the package path, after *package_name* - so, if *package_name* is ``sphinx``, then ``sphinx.util`` will result in ``.util`` being passed for earching by these regexps. If is None, gives default. Default is: ['\.tests$'] module_skip_patterns : None or sequence Sequence of strings giving URIs of modules to be excluded Operates on the module name including preceding URI path, back to the first dot after *package_name*. For example ``sphinx.util.console`` results in the string to search of ``.util.console`` If is None, gives default. Default is: ['\.setup$', '\._'] class_skip_patterns : None or sequence Sequence of strings giving classes to be excluded Default is: None """ if package_skip_patterns is None: package_skip_patterns = ["\\.tests$"] if module_skip_patterns is None: module_skip_patterns = ["\\.setup$", "\\._"] if class_skip_patterns: self.class_skip_patterns = class_skip_patterns else: self.class_skip_patterns = [] self.package_name = package_name self.package_skip_patterns = package_skip_patterns self.module_skip_patterns = module_skip_patterns def get_package_name(self): return self._package_name def set_package_name(self, package_name): """Set package_name""" # It's also possible to imagine caching the module parsing here self._package_name = package_name self.root_module = __import__(package_name) self.root_path = self.root_module.__path__[0] package_name = property( get_package_name, set_package_name, None, "get/set package_name" ) def _get_object_name(self, line): name = line.split()[1].split("(")[0].strip() # in case we have classes which are not derived from object # ie. old style classes return name.rstrip(":") def _uri2path(self, uri): """Convert uri to absolute filepath Parameters ---------- uri : string URI of python module to return path for Returns ------- path : None or string Returns None if there is no valid path for this URI Otherwise returns absolute file system path for URI """ if uri == self.package_name: return os.path.join(self.root_path, "__init__.py") path = uri.replace(".", os.path.sep) path = path.replace(self.package_name + os.path.sep, "") path = os.path.join(self.root_path, path) # XXX maybe check for extensions as well? if os.path.exists(path + ".py"): # file path += ".py" elif os.path.exists(os.path.join(path, "__init__.py")): path = os.path.join(path, "__init__.py") else: return None return path def _path2uri(self, dirpath): """ Convert directory path to uri """ relpath = dirpath.replace(self.root_path, self.package_name) if relpath.startswith(os.path.sep): relpath = relpath[1:] return relpath.replace(os.path.sep, ".") def _parse_module(self, uri): """ Parse module defined in *uri* """ filename = self._uri2path(uri) if filename is None: # nothing that we could handle here. return ([], []) f = open(filename, "rt") functions, classes = self._parse_lines(f, uri) f.close() return functions, classes def _parse_lines(self, linesource, module): """ Parse lines of text for functions and classes """ functions = [] classes = [] for line in linesource: if line.startswith("def ") and line.count("("): # exclude private stuff name = self._get_object_name(line) if not name.startswith("_"): functions.append(name) elif line.startswith("class "): # exclude private stuff name = self._get_object_name(line) if not name.startswith("_") and self._survives_exclude( ".".join((module, name)), "class" ): classes.append(name) else: pass functions.sort() classes.sort() return functions, classes @classmethod def _normalize_repr(cls, value): if isinstance(value, list): return "[{}]".format(", ".join(map(cls._normalize_repr, value))) if isinstance(value, tuple): if len(value) == 1: return "({},)".format(cls._normalize_repr(value[0])) return "({})".format(", ".join(map(cls._normalize_repr, value))) if isinstance(value, (str, bytes)): value = repr(value) if value[0] not in ('"', "'"): value = value[1:] else: value = repr(value) return value def test_specs(self, uri): """Check input and output specs in an uri Parameters ---------- uri : string python location of module - e.g 'sphinx.builder' Returns ------- """ from nipype.interfaces.base import BaseInterface # get the names of all classes and functions _, classes = self._parse_module(uri) if not classes: # print 'WARNING: Empty -',uri # dbg return None # Make a shorter version of the uri that omits the package name for # titles allowed_keys = [ "desc", "genfile", "xor", "requires", "desc", "nohash", "argstr", "position", "mandatory", "copyfile", "usedefault", "sep", "hash_files", "deprecated", "new_name", "min_ver", "max_ver", "name_source", "name_template", "keep_extension", "units", "output_name", "extensions", ] in_built = [ "type", "copy", "parent", "instance_handler", "comparison_mode", "array", "default", "editor", ] bad_specs = [] for c in classes: __import__(uri) try: with warnings.catch_warnings(): warnings.simplefilter("ignore") classinst = sys.modules[uri].__dict__[c] except Exception: continue if not issubclass(classinst, BaseInterface): continue testdir = os.path.join(*(uri.split(".")[:-1] + ["tests"])) if not os.path.exists(testdir): os.makedirs(testdir) nonautotest = os.path.join(testdir, "test_%s.py" % c) testfile = os.path.join(testdir, "test_auto_%s.py" % c) if os.path.exists(testfile): os.unlink(testfile) if not os.path.exists(nonautotest): cmd = [ "# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT", "from ..%s import %s" % (uri.split(".")[-1], c), "", ] cmd.append("\ndef test_%s_inputs():" % c) input_fields = "" for traitname, trait in sorted( classinst.input_spec().traits(transient=None).items() ): input_fields += "%s=dict(" % traitname for key, value in sorted(trait.__dict__.items()): if key in in_built or key == "desc": continue input_fields += "%s=%s,\n " % ( key, self._normalize_repr(value), ) input_fields += "),\n " cmd += [" input_map = dict(%s)" % input_fields] cmd += [" inputs = %s.input_spec()" % c] cmd += [ """ for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): assert getattr(inputs.traits()[key], metakey) == value""" ] fmt_cmd = black.format_str("\n".join(cmd), mode=black.FileMode()) with open(testfile, "wt") as fp: fp.writelines(fmt_cmd) else: print("%s has nonautotest" % c) for traitname, trait in sorted( classinst.input_spec().traits(transient=None).items() ): for key in sorted(trait.__dict__): if key in in_built: continue parent_metadata = [] if "parent" in trait.__dict__: parent_metadata = list(getattr(trait, "parent").__dict__.keys()) if ( key not in allowed_keys + classinst._additional_metadata + parent_metadata ): bad_specs.append([uri, c, "Inputs", traitname, key]) if ( key == "mandatory" and trait.mandatory is not None and not trait.mandatory ): bad_specs.append( [uri, c, "Inputs", traitname, "mandatory=False"] ) if key == "usedefault" and trait.__dict__[key] == False: bad_specs.append( [uri, c, "Inputs", traitname, "usedefault=False"] ) # checking if traits that have default_value different that the trits default one # also have `usedefault` specified; # excluding TraitCompound # excluding Enum: always has default value (the first value) # excluding Tuple: takes tuple of inner traits default values as default, but doesn't use it # for Range assuming that if default == low, it's likely that usedefault should be False # (for Range traits takes low as a default default if ( trait.trait_type.__class__.__name__ not in ["TraitCompound", "Tuple", "Enum"] and trait.default and "usedefault" not in trait.__dict__ and "requires" not in trait.__dict__ and "xor" not in trait.__dict__ ): if ( trait.trait_type.__class__.__name__ == "Range" and trait.default == trait.trait_type._low ): continue bad_specs.append( [ uri, c, "Inputs", traitname, "default value is set, no value for usedefault", ] ) if not classinst.output_spec: continue if not os.path.exists(nonautotest): cmd = ["\ndef test_%s_outputs():" % c] input_fields = "" for traitname, trait in sorted( classinst.output_spec().traits(transient=None).items() ): input_fields += "%s=dict(" % traitname for key, value in sorted(trait.__dict__.items()): if key in in_built or key == "desc": continue input_fields += "%s=%s,\n " % ( key, self._normalize_repr(value), ) input_fields += "),\n " cmd += [" output_map = dict(%s)" % input_fields] cmd += [" outputs = %s.output_spec()" % c] cmd += [ """ for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): assert getattr(outputs.traits()[key], metakey) == value""" ] fmt_cmd = black.format_str("\n".join(cmd), mode=black.FileMode()) with open(testfile, "at") as fp: fp.writelines("\n\n" + fmt_cmd) for traitname, trait in sorted( classinst.output_spec().traits(transient=None).items() ): for key in sorted(trait.__dict__): if key in in_built: continue parent_metadata = [] if "parent" in trait.__dict__: parent_metadata = list(getattr(trait, "parent").__dict__.keys()) if ( key not in allowed_keys + classinst._additional_metadata + parent_metadata ): bad_specs.append([uri, c, "Outputs", traitname, key]) return bad_specs def _survives_exclude(self, matchstr, match_type): """ Returns True if *matchstr* does not match patterns ``self.package_name`` removed from front of string if present Examples -------- >>> dw = ApiDocWriter('sphinx') >>> dw._survives_exclude('sphinx.okpkg', 'package') True >>> dw.package_skip_patterns.append('^\\.badpkg$') >>> dw._survives_exclude('sphinx.badpkg', 'package') False >>> dw._survives_exclude('sphinx.badpkg', 'module') True >>> dw._survives_exclude('sphinx.badmod', 'module') True >>> dw.module_skip_patterns.append('^\\.badmod$') >>> dw._survives_exclude('sphinx.badmod', 'module') False """ if match_type == "module": patterns = self.module_skip_patterns elif match_type == "package": patterns = self.package_skip_patterns elif match_type == "class": patterns = self.class_skip_patterns else: raise ValueError('Cannot interpret match type "%s"' % match_type) # Match to URI without package name L = len(self.package_name) if matchstr[:L] == self.package_name: matchstr = matchstr[L:] for pat in patterns: try: pat.search except AttributeError: pat = re.compile(pat) if pat.search(matchstr): return False return True def discover_modules(self): """ Return module sequence discovered from ``self.package_name`` Parameters ---------- None Returns ------- mods : sequence Sequence of module names within ``self.package_name`` Examples -------- """ modules = [self.package_name] # raw directory parsing for dirpath, dirnames, filenames in os.walk(self.root_path): # Check directory names for packages root_uri = self._path2uri(os.path.join(self.root_path, dirpath)) for dirname in dirnames[:]: # copy list - we modify inplace package_uri = ".".join((root_uri, dirname)) if self._uri2path(package_uri) and self._survives_exclude( package_uri, "package" ): modules.append(package_uri) else: dirnames.remove(dirname) # Check filenames for modules for filename in filenames: module_name = filename[:-3] module_uri = ".".join((root_uri, module_name)) if self._uri2path(module_uri) and self._survives_exclude( module_uri, "module" ): modules.append(module_uri) return sorted(modules) def check_modules(self): # write the list modules = self.discover_modules() checked_modules = [] for m in modules: bad_specs = self.test_specs(m) if bad_specs: checked_modules.extend(bad_specs) for bad_spec in checked_modules: print(":".join(bad_spec)) if __name__ == "__main__": os.environ["NIPYPE_NO_ET"] = "1" package = "nipype" ic = InterfaceChecker(package) # Packages that should not be included in generated API docs. ic.package_skip_patterns += [ "\.external$", "\.fixes$", "\.utils$", "\.pipeline", "\.testing", "\.caching", "\.workflows", ] """ # Modules that should not be included in generated API docs. ic.module_skip_patterns += ['\.version$', '\.interfaces\.base$', '\.interfaces\.matlab$', '\.interfaces\.rest$', '\.interfaces\.pymvpa$', '\.interfaces\.slicer\.generate_classes$', '\.interfaces\.spm\.base$', '\.interfaces\.traits', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', '.\testing', ] ic.class_skip_patterns += ['AFNI', 'ANTS', 'FSL', 'FS', 'Info', '^SPM', 'Tester', 'Spec$', 'Numpy', 'NipypeTester', ] """ ic.check_modules() nipype-1.7.0/tools/ci/000077500000000000000000000000001413403311400145505ustar00rootroot00000000000000nipype-1.7.0/tools/ci/activate.sh000066400000000000000000000003451413403311400167060ustar00rootroot00000000000000if [ -e virtenv/bin/activate ]; then source virtenv/bin/activate elif [ -e virtenv/Scripts/activate ]; then source virtenv/Scripts/activate else echo Cannot activate virtual environment ls -R virtenv false fi nipype-1.7.0/tools/ci/build_archive.sh000077500000000000000000000011511413403311400177050ustar00rootroot00000000000000#!/bin/bash echo "Building archive" source tools/ci/activate.sh set -eu # Required dependencies echo "INSTALL_TYPE = $INSTALL_TYPE" set -x if [ "$INSTALL_TYPE" == "sdist" ]; then python setup.py egg_info # check egg_info while we're here python setup.py sdist export ARCHIVE=$( ls dist/*.tar.gz ) elif [ "$INSTALL_TYPE" == "wheel" ]; then python setup.py bdist_wheel export ARCHIVE=$( ls dist/*.whl ) elif [ "$INSTALL_TYPE" == "archive" ]; then export ARCHIVE="package.tar.gz" git archive -o $ARCHIVE HEAD elif [ "$INSTALL_TYPE" == "pip" ]; then export ARCHIVE="." fi set +eux nipype-1.7.0/tools/ci/check.sh000077500000000000000000000011351413403311400161640ustar00rootroot00000000000000#!/bin/bash echo Running tests source tools/ci/activate.sh source tools/ci/env.sh set -eu # Required variables echo CHECK_TYPE = $CHECK_TYPE set -x if [ "${CHECK_TYPE}" == "test" ]; then pytest --capture=no --verbose --doctest-modules -c nipype/pytest.ini \ --cov-config .coveragerc --cov nipype --cov-report xml \ --junitxml=test-results.xml nipype elif [ "$CHECK_TYPE" = "specs" ]; then make specs git status -s test -z "$(git status -s)" elif [ "$CHECK_TYPE" = "style" ]; then black --check nipype setup.py else false fi set +eux echo Done running tests nipype-1.7.0/tools/ci/create_venv.sh000077500000000000000000000006301413403311400174070ustar00rootroot00000000000000#!/bin/bash echo Creating isolated virtual environment source tools/ci/env.sh set -eu # Required variables echo SETUP_REQUIRES = $SETUP_REQUIRES set -x python -m pip install --upgrade pip virtualenv virtualenv --python=python virtenv source tools/ci/activate.sh python --version python -m pip install -U $SETUP_REQUIRES which python which pip set +eux echo Done creating isolated virtual environment nipype-1.7.0/tools/ci/env.sh000066400000000000000000000011441413403311400156740ustar00rootroot00000000000000SETUP_REQUIRES="pip setuptools>=30.3.0 wheel" # Minimum requirements REQUIREMENTS="-r requirements.txt" # Minimum versions of minimum requirements MIN_REQUIREMENTS="-r min-requirements.txt" # Numpy and scipy upload nightly/weekly/intermittent wheels NIGHTLY_WHEELS="https://pypi.anaconda.org/scipy-wheels-nightly/simple" STAGING_WHEELS="https://pypi.anaconda.org/multibuild-wheels-staging/simple" PRE_PIP_FLAGS="--pre --extra-index-url $NIGHTLY_WHEELS --extra-index-url $STAGING_WHEELS" for CONF in /etc/fsl/fsl.sh /etc/afni/afni.sh; do if [ -r $CONF ]; then source $CONF; fi done FSLOUTPUTTYPE=NIFTI_GZ nipype-1.7.0/tools/ci/install.sh000077500000000000000000000012631413403311400165570ustar00rootroot00000000000000#!/bin/bash echo Installing nipype source tools/ci/activate.sh source tools/ci/env.sh set -eu # Required variables echo INSTALL_TYPE = $INSTALL_TYPE echo CHECK_TYPE = $CHECK_TYPE echo NIPYPE_EXTRAS = $NIPYPE_EXTRAS echo EXTRA_PIP_FLAGS = $EXTRA_PIP_FLAGS set -x if [ -n "$EXTRA_PIP_FLAGS" ]; then EXTRA_PIP_FLAGS=${!EXTRA_PIP_FLAGS} fi if [ "$INSTALL_TYPE" == "setup" ]; then python setup.py install else pip install $EXTRA_PIP_FLAGS $ARCHIVE fi # Basic import check python -c 'import nipype; print(nipype.__version__)' if [ "$CHECK_TYPE" == "skiptests" ]; then exit 0 fi pip install $EXTRA_PIP_FLAGS "nipype[$NIPYPE_EXTRAS]" set +eux echo Done installing nipype nipype-1.7.0/tools/ci/install_deb_dependencies.sh000077500000000000000000000005251413403311400220770ustar00rootroot00000000000000#!/bin/bash echo "Installing NeuroDebian dependencies" set -eu echo "INSTALL_DEB_DEPENDENCIES = $INSTALL_DEB_DEPENDENCIES" if $INSTALL_DEB_DEPENDENCIES; then bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) sudo apt update sudo apt install -y -qq fsl afni elastix fsl-atlases xvfb fusefat graphviz fi nipype-1.7.0/tools/ci/install_dependencies.sh000077500000000000000000000006371413403311400212710ustar00rootroot00000000000000#!/bin/bash echo Installing dependencies source tools/ci/activate.sh source tools/ci/env.sh set -eu # Required variables echo EXTRA_PIP_FLAGS = $EXTRA_PIP_FLAGS echo DEPENDS = $DEPENDS set -x if [ -n "$EXTRA_PIP_FLAGS" ]; then EXTRA_PIP_FLAGS=${!EXTRA_PIP_FLAGS} fi if [ -n "$DEPENDS" ]; then pip install ${EXTRA_PIP_FLAGS} --prefer-binary ${!DEPENDS} fi set +eux echo Done installing dependencies nipype-1.7.0/tools/ex2rst000077500000000000000000000227111413403311400153350ustar00rootroot00000000000000#!/usr/bin/env python # # Note: this file is copied (possibly with minor modifications) from the # sources of the PyMVPA project - http://pymvpa.org. It remains licensed as # the rest of PyMVPA (MIT license as of October 2010). # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## # # See COPYING file distributed along with the PyMVPA package for the # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Helper to automagically generate ReST versions of examples""" import os import sys import re import glob from optparse import OptionParser __docformat__ = 'restructuredtext' def auto_image(line): """Automatically replace generic image markers with ones that have full size (width/height) info, plus a :target: link to the original png, to be used in the html docs. """ img_re = re.compile(r'(\s*)\.\. image::\s*(.*)$') m = img_re.match(line) if m is None: # Not an image declaration, leave the line alone and return unmodified return line # Match means it's an image spec, we rewrite it with extra tags ini_space = m.group(1) lines = [ line, ini_space + ' :width: 500\n', #ini_space + ' :height: 350\n' ] fspec = m.group(2) if fspec.endswith('.*'): fspec = fspec.replace('.*', '.png') fspec = fspec.replace('fig/', '../_images/') lines.append(ini_space + (' :target: %s\n' % fspec)) lines.append('\n') return ''.join(lines) def exfile2rst(filename): """Open a Python script and convert it into an ReST string. """ # output string s = '' # open source file xfile = open(filename) # parser status vars inheader = True indocs = False doc2code = False code2doc = False # an empty line found in the example enables the check for a potentially # indented docstring starting on the next line (as an attempt to exclude # function or class docstrings) last_line_empty = False # indentation of indented docstring, which is removed from the RsT output # since we typically do not want an indentation there. indent_level = 0 for line in xfile: # skip header if inheader and \ not (line.startswith('"""') or line.startswith("'''")): continue # determine end of header if inheader and (line.startswith('"""') or line.startswith("'''")): inheader = False # strip comments and remove trailing whitespace if not indocs and last_line_empty: # first remove leading whitespace and store indent level cleanline = line[:line.find('#')].lstrip() indent_level = len(line) - len(cleanline) - 1 cleanline = cleanline.rstrip() else: cleanline = line[:line.find('#')].rstrip() if not indocs and line == '\n': last_line_empty = True else: last_line_empty = False # if we have something that should go into the text if indocs \ or (cleanline.startswith('"""') or cleanline.startswith("'''")): proc_line = None # handle doc start if not indocs: # guarenteed to start with """ if len(cleanline) > 3 \ and (cleanline.endswith('"""') \ or cleanline.endswith("'''")): # single line doc code2doc = True doc2code = True proc_line = cleanline[3:-3] else: # must be start of multiline block indocs = True code2doc = True # rescue what is left on the line proc_line = cleanline[3:] # strip """ else: # we are already in the docs # handle doc end if cleanline.endswith('"""') or cleanline.endswith("'''"): indocs = False doc2code = True # rescue what is left on the line proc_line = cleanline[:-3] # reset the indentation indent_level = 0 else: # has to be documentation # if the indentation is whitespace remove it, other wise # keep it (accounts for some variation in docstring # styles real_indent = \ indent_level - len(line[:indent_level].lstrip()) proc_line = line[real_indent:] if code2doc: code2doc = False s += '\n' proc_line = auto_image(proc_line) if proc_line: s += proc_line.rstrip() + '\n' else: if doc2code: doc2code = False s += '\n\n.. code-block :: python\n' # has to be code s += ' %s' % line xfile.close() return s def exfile2rstfile(filename, opts): """ """ # doc filename dfilename = os.path.basename(filename[:-3]) + '.rst' dfilepath = os.path.join(opts.outdir, os.path.basename(dfilename)) print("Creating file %s." % os.path.abspath(dfilepath)) # open dest file dfile = open(dfilepath, 'w') # place header dfile.write('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n') # place cross-ref target dfile.write('.. _example_' + dfilename[:-4] + ':\n\n') # write converted ReST dfile.write(exfile2rst(filename)) links = """ .. include:: ../../links_names.txt """ dfile.write(links) if opts.sourceref: # write post example see also box msg = """ .. admonition:: Example source code You can download :download:`the full source code of this example <%s>`. This same script is also included in `%s <%s>`__ under the :file:`%s` directory. """ % (os.path.relpath(filename, opts.outdir), "Nipype1 Examples Niflow", "https://github.com/niflows/nipype1-examples", "package/niflow/nipype1/examples") dfile.write(msg) dfile.close() def main(): parser = OptionParser( usage="%prog [options] [...]", version="%prog 0.1", description="""\ %prog converts Python scripts into restructered text (ReST) format suitable for integration into the Sphinx documentation framework. Its key feature is that it extracts stand-alone (unassigned) single, or multiline triple-quote docstrings and moves them out of the code listing so that they are rendered as regular ReST, while at the same time maintaining their position relative to the listing. The detection of such docstrings is exclusively done by parsing the raw code so it is never actually imported into a running Python session. Docstrings have to be written using triple quotes (both forms " and ' are possible). It is recommend that such docstrings are preceded and followed by an empty line. Intended docstring can make use of the full linewidth from the second docstring line on. If the indentation of multiline docstring is maintained for all lines, the respective indentation is removed in the ReST output. The parser algorithm automatically excludes file headers and starts with the first (module-level) docstring instead. """) # define options parser.add_option( '--verbose', action='store_true', dest='verbose', default=False, help='print status messages') parser.add_option( '-x', '--exclude', action='append', dest='excluded', help="""\ Use this option to exclude single files from the to be parsed files. This is especially useful to exclude files when parsing complete directories. This option can be specified multiple times. """) parser.add_option( '-o', '--outdir', action='store', dest='outdir', type='string', default=None, help="""\ Target directory to write the ReST output to. This is a required option. """) parser.add_option( '--no-sourceref', action='store_false', default=True, dest='sourceref', help="""\ If specified, the source reference section will be suppressed. """) parser.add_option( '--project', type='string', action='store', default='', dest='project', help="""\ Name of the project that contains the examples. This name is used in the 'seealso' source references. Default: '' """) # parse options (opts, args) = parser.parse_args() # read sys.argv[1:] by default # check for required options if opts.outdir is None: print('Required option -o, --outdir not specified.') sys.exit(1) # build up list of things to parse toparse = [] for t in args: # expand dirs if os.path.isdir(t): # add all python files in that dir toparse += glob.glob(os.path.join(t, '*.py')) else: toparse.append(t) # filter parse list if opts.excluded is not None: toparse = [t for t in toparse if t not in opts.excluded] toparse_list = toparse toparse = set(toparse) if len(toparse) != len(toparse_list): print('Ignoring duplicate parse targets.') os.makedirs(opts.outdir, exist_ok=True) # finally process all examples for t in toparse: exfile2rstfile(t, opts) if __name__ == '__main__': main() nipype-1.7.0/tools/feedstock.sh000077500000000000000000000061501413403311400164650ustar00rootroot00000000000000#!/bin/bash # # Script to submit and update feedstock PRs from CircleCI # # Requires the following environment variables # # GITHUB_USER: The name of your user or bot # CIRCLE_PROJECT_USERNAME: User under which repository is found # CIRCLE_PROJECT_REPONAME: Name of repository # # One of: # GITHUB_PASSWORD: Password for user or bot # GITHUB_TOKEN: Pre-established token for user or bot # # One of: # CIRCLE_BRANCH: Name of release branch (rel/) # CIRCLE_TAG: Name of release tag () # # Depends: # # bash https://www.gnu.org/software/bash/ # git https://git-scm.com/ # hub https://hub.github.com/ # sha256sum https://www.gnu.org/software/coreutils/coreutils.html # # 2018 Chris Markiewicz set -x REPO=${1:-$CIRCLE_PROJECT_REPONAME} FEEDSTOCK=${2:-$REPO-feedstock} SRCREPO=$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME # Release branches should be of the form 'rel/' # The corresponding tag should be the bare '' strings if [[ -n "$CIRCLE_TAG" ]]; then RELEASE=true REF="$CIRCLE_TAG" BRANCH="rel/$REF" VERSION="$REF" COMMIT_MSG="REL: $VERSION" PR_TITLE="REL: $VERSION" else RELEASE=false REF="$CIRCLE_BRANCH" BRANCH=$REF VERSION="${REF#rel/}" COMMIT_MSG="TEST: $BRANCH" PR_TITLE="[WIP] REL: $VERSION" fi # Clean working copy TMP=`mktemp -d` hub clone conda-forge/$FEEDSTOCK $TMP/$FEEDSTOCK pushd $TMP/$FEEDSTOCK # Get user fork, move to a candidate release branch, detecting if new branch hub fork git fetch --all if git checkout -t $GITHUB_USER/$BRANCH; then NEW_PR=false else NEW_PR=true git checkout -b $BRANCH origin/master fi # Calculate hash SHA256=`curl -sSL https://github.com/$SRCREPO/archive/$REF.tar.gz | sha256sum | cut -d\ -f 1` URL_BASE="https://github.com/$CIRCLE_PROJECT_USERNAME/{{ name }}/archive" if $RELEASE; then URL_FMT="$URL_BASE/{{ version }}.tar.gz" else URL_FMT="$URL_BASE/rel/{{ version }}.tar.gz" fi # Set version, hash, and reset build number # Use ~ for separator in URL, to avoid slash issues sed -i '' \ -e 's/^\({% set version = "\).*\(" %}\)$/'"\1$VERSION\2/" \ -e 's/^\({% set sha256 = "\).*\(" %}\)$/'"\1$SHA256\2/" \ -e 's~^\( *url:\) .*$~\1 '"$URL_FMT~" \ -e 's/^\( *number:\) .*$/\1 0/' \ recipe/meta.yaml # Bump branch git add recipe/meta.yaml git commit -m "$COMMIT_MSG" git push -u $GITHUB_USER $BRANCH if $NEW_PR; then hub pull-request -b conda-forge:master -F - < If not set with options, the repository name is the same as the If not set with options, the main github user is the same as the repository name.""" GITWASH_CENTRAL = "git://github.com/matthew-brett/gitwash.git" GITWASH_BRANCH = "master" def main(): parser = OptionParser() parser.set_usage(parser.get_usage().strip() + USAGE) parser.add_option( "--repo-name", dest="repo_name", help="repository name - e.g. nitime", metavar="REPO_NAME", ) parser.add_option( "--github-user", dest="main_gh_user", help="github username for main repo - e.g fperez", metavar="MAIN_GH_USER", ) parser.add_option( "--gitwash-url", dest="gitwash_url", help="URL to gitwash repository - default %s" % GITWASH_CENTRAL, default=GITWASH_CENTRAL, metavar="GITWASH_URL", ) parser.add_option( "--gitwash-branch", dest="gitwash_branch", help="branch in gitwash repository - default %s" % GITWASH_BRANCH, default=GITWASH_BRANCH, metavar="GITWASH_BRANCH", ) parser.add_option( "--source-suffix", dest="source_suffix", help="suffix of ReST source files - default '.rst'", default=".rst", metavar="SOURCE_SUFFIX", ) parser.add_option( "--project-url", dest="project_url", help="URL for project web pages", default=None, metavar="PROJECT_URL", ) parser.add_option( "--project-ml-url", dest="project_ml_url", help="URL for project mailing list", default=None, metavar="PROJECT_ML_URL", ) (options, args) = parser.parse_args() if len(args) < 2: parser.print_help() sys.exit() out_path, project_name = args if options.repo_name is None: options.repo_name = project_name if options.main_gh_user is None: options.main_gh_user = options.repo_name repo_path = clone_repo(options.gitwash_url, options.gitwash_branch) try: copy_replace( ( ("PROJECTNAME", project_name), ("REPONAME", options.repo_name), ("MAIN_GH_USER", options.main_gh_user), ), repo_path, out_path, cp_globs=(pjoin("gitwash", "*"),), rep_globs=("*.rst",), renames=(("\.rst$", options.source_suffix),), ) make_link_targets( project_name, options.main_gh_user, options.repo_name, pjoin(out_path, "gitwash", "known_projects.inc"), pjoin(out_path, "gitwash", "this_project.inc"), options.project_url, options.project_ml_url, ) finally: shutil.rmtree(repo_path) if __name__ == "__main__": main() nipype-1.7.0/tools/install_spm_mcr.sh000066400000000000000000000015541413403311400177040ustar00rootroot00000000000000if [ ! -d $HOME/mcr ] then echo "destinationFolder=$HOME/mcr" > $HOME/mcr_options.txt echo "agreeToLicense=yes" >> $HOME/mcr_options.txt echo "outputFile=/tmp/matlabinstall_log" >> $HOME/mcr_options.txt echo "mode=silent" >> $HOME/mcr_options.txt mkdir -p $HOME/matlab_installer wget -nc http://www.mathworks.com/supportfiles/downloads/R2015a/deployment_files/R2015a/installers/glnxa64/MCR_R2015a_glnxa64_installer.zip -O $HOME/matlab_installer/installer.zip unzip $HOME/matlab_installer/installer.zip -d $HOME/matlab_installer/ $HOME/matlab_installer/install -inputFile $HOME/mcr_options.txt rm -rf $HOME/matlab_installer $HOME/mcr_options.txt fi if [ ! -d $HOME/spm12 ] then wget http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/dev/spm12_r6472_Linux_R2015a.zip -O $HOME/spm12.zip unzip $HOME/spm12.zip -d $HOME rm -rf $HOME/spm12.zip fi nipype-1.7.0/tools/retry_cmd.sh000077500000000000000000000012101413403311400164760ustar00rootroot00000000000000#!/bin/sh # # retry_cmd.sh [-n NLOOPS] [-s SLEEP] CMD # # Retry command until success or pre-specified number of failures # # 2018 Chris Markiewicz # Released into public domain NLOOPS=3 TOSLEEP=5 while true; do case "$1" in -n ) NLOOPS="$2"; shift 2 ;; -s ) TOSLEEP="$2"; shift 2 ;; -- ) shift; break ;; * ) break ;; esac done # Normalize whitespace in command, preserving quotes CMD="" for ARG; do CMD="$CMD \"$ARG\""; done RET=0 for i in `seq $NLOOPS`; do sh -c "$CMD" RET="$?" if [ "$RET" -eq 0 ]; then break; fi if [ "$i" -ne "$NLOOPS" ]; then sleep $TOSLEEP; fi done exit $RET nipype-1.7.0/tools/run_examples.py000066400000000000000000000005431413403311400172330ustar00rootroot00000000000000# -*- coding: utf-8 -*- import sys from textwrap import dedent if __name__ == "__main__": print(dedent("""Nipype examples have been moved to niflow-nipype1-examples. Install with: pip install niflow-nipype1-examples""")) if sys.argv[1:]: print("Run this command with: niflow-nipype1-examples " + " ".join(sys.argv[1:])) sys.exit(1) nipype-1.7.0/tools/toollib.py000066400000000000000000000020511413403311400161710ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Various utilities common to IPython release and maintenance tools. """ from builtins import map # Library imports import os import sys from subprocess import Popen, PIPE, CalledProcessError, check_call from distutils.dir_util import remove_tree # Useful shorthands pjoin = os.path.join cd = os.chdir # Utility functions # ----------------------------------------------------------------------------- # Functions # ----------------------------------------------------------------------------- def sh(cmd): """Execute command in a subshell, return status code.""" return check_call(cmd, shell=True) def compile_tree(): """Compile all Python files below current directory.""" vstr = ".".join(map(str, sys.version_info[:2])) stat = os.system("%s %s/lib/python%s/compileall.py ." % (sys.executable, sys.prefix, vstr)) if stat: msg = "*** ERROR: Some Python files in tree do NOT compile! ***\n" msg += "See messages above for the actual file that produced it.\n" raise SystemExit(msg) nipype-1.7.0/tools/update_changes.sh000077500000000000000000000035621413403311400174740ustar00rootroot00000000000000#!/bin/bash # # Collects the pull-requests since the latest release and # aranges them in the CHANGES.txt file. # # This is a script to be run before releasing a new version. # # Usage /bin/bash update_changes.sh 1.0.1 # # Setting # $ help set set -u # Treat unset variables as an error when substituting. set -x # Print command traces before executing command. ROOT=$( git rev-parse --show-toplevel ) CHANGES=$ROOT/doc/changelog/1.X.X-changelog.rst # Check whether the Upcoming release header is present head -1 $CHANGES | grep -q Upcoming UPCOMING=$? # Elaborate today's release header HEADER="$1 ($(date '+%B %d, %Y'))" echo $HEADER >> newchanges echo $( printf "%${#HEADER}s" | tr " " "=" ) >> newchanges echo >> newchanges if [[ "x$2" != "x" ]]; then echo "(\`Full changelog \`__)" >> newchanges echo >> newchanges fi # Search for PRs since previous release MERGE_COMMITS=$( git log --grep="Merge pull request\|(#.*)$" `git describe --tags --abbrev=0`..HEAD --pretty='format:%h' ) for COMMIT in ${MERGE_COMMITS//\n}; do SUB=$( git log -n 1 --pretty="format:%s" $COMMIT ) if ( echo $SUB | grep "^Merge pull request" ); then # Merge commit PR=$( echo $SUB | sed -e "s/Merge pull request \#\([0-9]*\).*/\1/" ) TITLE=$( git log -n 1 --pretty="format:%b" $COMMIT ) else # Squashed merge PR=$( echo $SUB | sed -e "s/.*(\#\([0-9]*\))$/\1/" ) TITLE=$( echo $SUB | sed -e "s/\(.*\)(\#[0-9]*)$/\1/" ) fi echo " * $TITLE (https://github.com/nipy/nipype/pull/$PR)" >> newchanges done echo >> newchanges echo >> newchanges # Append old CHANGES if [[ "$UPCOMING" == "0" ]]; then # Drop the Upcoming title if present tail -n+4 $CHANGES >> newchanges else cat $CHANGES >> newchanges fi # Replace old CHANGES with new file mv newchanges $CHANGES nipype-1.7.0/tools/update_requirements.py000077500000000000000000000007001413403311400206140ustar00rootroot00000000000000#!/usr/bin/env python3 from runpy import run_path from pathlib import Path repo_root = Path(__file__).parent.parent info_file = repo_root / "nipype" / "info.py" reqs = repo_root / "requirements.txt" info = run_path(info_file) requirements = info["REQUIRES"] script_name = Path(__file__).relative_to(repo_root) lines = [f"# Auto-generated by {script_name}", ""] # Write requirements lines[1:-1] = requirements reqs.write_text("\n".join(lines)) nipype-1.7.0/tools/update_zenodo.py000077500000000000000000000043101413403311400173700ustar00rootroot00000000000000#!/usr/bin/env python3 """Update and sort the creators list of the zenodo record.""" import git import json from subprocess import run, PIPE from pathlib import Path from fuzzywuzzy import fuzz, process def decommify(name): return " ".join(name.split(", ")[::-1]) # These names should go last CREATORS_LAST = ["Krzysztof J. Gorgolewski", "Satrajit Ghosh"] # Contributors that have requested not to be cited (or bothered) BLACKLIST = {"Jonathan R. Williford"} if __name__ == "__main__": git_root = Path(git.Repo(".", search_parent_directories=True).working_dir) zenodo_file = git_root / ".zenodo.json" zenodo = json.loads(zenodo_file.read_text()) if zenodo_file.exists() else {} creator_map = { decommify(creator["name"]): creator for creator in zenodo.get("creators", []) } shortlog = run(["git", "shortlog", "-ns"], stdout=PIPE) commit_counts = dict( line.split("\t", 1)[::-1] for line in shortlog.stdout.decode().split("\n") if line ) existing_creators = set(creator_map.keys()) committers = [] # Stable sort: # Number of commits in descending order # Ties broken by alphabetical order of first name for committer, _ in sorted(commit_counts.items(), key=lambda x: (-int(x[1]), x[0])): matches = process.extract( committer, creator_map.keys(), scorer=fuzz.token_sort_ratio, limit=2 ) match, score = matches[0] if score <= 80: if committer not in BLACKLIST: print("No entry to sort:", committer) continue existing_creators.discard(match) committers.append(match) for unmatched in sorted(existing_creators): print("No matching commits:", unmatched) # Keep the entries to avoid removing people for bad matching committers.append(unmatched) for last_author in CREATORS_LAST: if committers[-1] != last_author: committers.remove(last_author) committers.append(last_author) creators = [ creator_map.get(committer, {"name": committer}) for committer in committers ] zenodo["creators"] = creators zenodo_file.write_text("%s\n" % json.dumps(zenodo, indent=2))

pN?1cBGN{6 ,<զLn6 lׅD]JIXd1Y>*`"%eA(+qEH7 _\| 2 IQpP6d4v {>W9om8_ty+}p-5$Ǽ\#m}d<ܬ7ƶ54K8C|~g<_^'y>WiRp~'cIVҗ.>^KCǹ:}[oiz;Wu^zus^J{i7{E@PE@PE@PE@PE@PE@PE@PE@PA ڛ-;*N3g^wo.(sOuw^*|¯t|=q^\7do#5s߉\5wcpy3G?N9vN,S>\˵8onq (_eLi"pv\ etj]P{cfOC"PDpC]Ӊ6ʸm%}O.PA琛bs/DWvux6Snby6ZҷNk-HB+6:+Oٸd Lәd:'<߀bԘ2!?_mIG 8䂲Ogq-|)06*-ɳضklp >{~ͺxrNwe'uA_޵y`]A 3}Ykn>rքw>{  Q 6DbxZܹ ~ 'R{م}>|b6{owx^E7uLLNnuo>K&PhmkRy?xp795^PgUmc|?_*\ϋ#E'GKX&-1v:Ml\[2ڳ' XSo?_Lw_>H>cw[0r>_Kr3.ϚM~15l#OS,L-Yݦ}ȿxeҍ%R|~rhGsIy>E9 C.Gxgplqr6vvkg>YmfXpebxn./ki ^_wU#~fkw_Dǥ__ G[ܲL%xf-To?<*ym|6KAǹeyn}˹ rsŸ5@rn-g}Uюoh;>mߘmRuf۞޸w[="("("("("("("("("Hv]d9Bf# _RD Is8w smOe8QtA\16޻dP3\n߄]h|=b͝fj_AB٩bYooyF'.AhRSd=ceV m-M*xyR.5([e drkZ]GeO'qI}̯e^X~?2?c5bߍR 7ww#|ے]{ҧ{ .&n <6l$8WW2wG&D x歵+=ٿWޱ/]''qgo<nuM:{ b,>rlWyqFq]g[)1C#%$v kK,uGYč}(d>/\_|) ˱L'nmw */> 2(c֧^2LIn^2c<.y(0r/l->\'{A_}}<.6F,f?q^ rW~X[Xy\k"?e>%XL!F6/gXq74mWoEP7AOO\q:f}_0Fͧ3``}Ն6b}iw>ONwf̴ĉ&u˗gM .bZ#_g98D/WWx)$i:b%!VX7ub9VoLocpԯ{xk'W|{bwד}]]|X^[,s3 qX^61Ƣ0\OOs:fwקb [e䴺!WF 1q~DW:Fo<ɝ+:3>׾F'Nqאkk&&t׎g=j1(V󍴟-c:^ܩ]=6SXAa8H̳LSgQ6+ІEFo}C6W|ƿgYYu+gB_\LL%lQ5rcchdD󨇎m,Fڊ.l<'XwL(j qO2=6O 5]TVp^WKzH6AȺXHal{[FY1&cŽ^aJ2lkH-"wsL(ovJn˽XxEaxyi&^/ޖ 23tE݆Zd1 •/{:c+zk(c7 K6NŹof{g|s+*'=]-kWW +y{Ky2m/^l̹J˯ݻ{ /8H4cd5CؑL\ev;|[%7P"hggk:/Dž;7v2r#6,ֶ>5:[ih|w4/V Oljrhv.g[1}itlFmyc8 퐄<zO8x~NG61eL<3Y.廑}̅?ژI;kX,϶)kZxzisKb}ftphN`ۼ^_bՈj{Y>YXM;}l5nSY{{{Frբ XY:sSS'~ZIJhF3('¡D8aW?mOg#g:{oxx(6ۧةWhi\Ŧ2')+-w:4O&+)Azn o:/t4 q\!Z3oyĥs:84HiuܻU樂g*"("("("("("("("("("(/5sݹG[t|E#17ppNt- r̺]nlb'[1#ПϾa.I {떔Ϸ b؋q&^gwX%]'ֳaLב&ڛ[;xqd2ftr/Ώ%Jc﻽|m-0>1!~Ƌmʾ΅_/B rV+\CvSx1t[zaOM>nN)~?12G>B79+WYKct5t8@@-`8rN'[h#L_QVprwoROIo:٩+·NuHLLS2P_;kв}6F4'BgZ]EhijZ/%)9)"("("("("("("("("("("({?${>.6bw륲n}+g:>}}XNU);{fkw4RNw7OAq tnXJ)ٻ>EۣSw{dtrs R٬im(ÕX#1LY:5\^3c}1zKh&gոZKf9ΝZ-6À1ǯ8/ŽƇ"ؘ%5\Dt4\A}FJ`^3M.c=w[jF'~-MwkƗD=_v[?em<^onzmWu33ku98Rr2Չ{#J- mk_E<O:}ou2Mg֮Կx&71&99Tg69ֵ_]ce"N(t;\=%iāRS^+gǾp35n{gsr 5z߶텑 *<}*._*Kv/MwzX1=ٿ3lX 76}Zl6(mg:Y|^k3>|{WouK-8[g9y~)o^6o2Z;u-{P5{>xNE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PEॊy:k ً%5qXKUcXN^Aˎ榺wc^ cÀYjd {7_^qffp̼WPh6J@Ef)M埸~xhba:Fpv,U6\4F_|I&pxtb3u],5yOft[ltKiou sF=`m KGFg)-u JM;6ϷO-rNkc7 fl>Zme^k9%PzĂyo8B[1d[\䳯jTࡺuקS/#wPڼ6n\ZtmwK:v4\ZcIY=:<~lV ^{)L]ϖυSE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@PE@?nipype-1.7.0/nipype/testing/data/tpm_01.nii.gz000066400000000000000000004220731413403311400211370ustar00rootroot00000000000000|Snormtpm_01.nii} [U~;2*MI#SA+EId\4EȔp ):)")"**yLW3\o=̴0/='k/OGmDN*`t>1f?8&-p ?Ƿq)mͺe#dFH9PF|WWPŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@12`5bNҦilpF~Ŵmx>,r]W'?EfQ3o{#s',lgә=%ڶ;-FnwX0dCɝzEFkw&=6O>sAvgXh1NcBnwzJ]ko҆z}\Q&–䕲C,Q؟x8q`*m6ÚIPCWzz_\D<gv垪Ix6jN9|=sT}.066c?_Sro&!ݝú-};s̹Z]kpohv&ʹv>Rw+BtJ"HȑN.ə-jƸv?cv~|=}cGNT3;`cjP (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P ( $8 *Qg#nvU,q03w4FICv,ݺy]a{·Hh1T; MΘP۝wpPMq _0RLtI[\ cՒ\-5 2(kW >YN3s\C7 #Wv Cp&lk&`pZ,d)^[q03m||:m1n=5,lrs-F}Wjo0EٓrPGHRt;tؕJg>%iXkY4i-K(\$Vз*~ǩpxc^'GyJ8w|kf7QjD3`?+^e?>w>[#]tl,QP>fpYSxw8sN[1ٳ}y[N2FXsygXk'wF5P>Ο0:0ܾQ"Ճυz/x 'nQ+w7|mo$DcN6?,10X Bbn%睂eەs/G@~^2/^mb'{B:L.r|p9[1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P ( $~/cF+K's3( 9r 1]7\=}볱gԬ| Usmhu.НS ,ɒ*f^?=Dv]%g9&m]O̹)f nE X3tliQ,sO$MST3RKH9Θ23|-ƧOݣ=ʱu|7^IE>V$' /8M֒YŶޝFyyRDO>[/̝m\w*5H:j2hl~LοB]gj*A>͟9^qn<(R]RѕvQ?j ڂixq0S~6IN^D+Ql{f~E'ߌ-ӏ6=%'N=%dM|;*Gۣ.j .o1#?wΝ|1?( 2Pċ3ۂ_%΃<+N94fzW ⟛YJ}}r;5=}԰)<ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (b`mƓԥq-Uh[>y%HvÏk8"1n3sܐx]G7+p;2^4-E"OE_K_j|0]|Hq8r68KmmrgpVYn,)MYֿ7X!z{}:ְ)̅?*c)YWMqxhm ̕!PF8O:.?랚]CulmAu/=2VX۝u+Gn7 g4~mk,[!3egd;33yJiX&blǃK=+[7;R'_:D΍.%Xyȷ7$'opyov̺|}w3wm՚fZ9p]F!(+Js8m n{~Q^RMMFg;%8v(c?2瞬wVGȈz*e-&їMshRu3U la[mBjq9+صFϢ&rY֨Gr)pԥ֎s}}+xq~f {G6O'C{yGtCÛ?'QI]:7;'=c({rې}ٯ!<49b43gݩUuTvqjv)۬Q؍ˠlFg8ˍoHnYNh\!t?9cE>OhXI_wҝBV"T;> =ךEng=1hfohw$m>)mCkPz6G><9[KCNT}eV{ zPc'('Qx3{~֕U/W61Qq'ٱY53^ʿk7U7QzXs,|eى cTQx!g:wcڝ _9)_>CQ>JN3ozUq2Ɇ.Trz>{[7ŒIL+Gvo.sR4_q,l_`q]]lS3J,=*r=$'qռߝJyFNgڠF/9mi ;j(_c';e͌^$3 $ Sy_$Α"иmWbS,Vr\23oaLmݝ-x7sݕ[k;ޅ~27xSV{uLSOoS23r< S܋AWnϙݵY܍kF.N+ˍ~65pa@m86A3os=w}o~HI]۝/y2XQgU>ag,΍ŧ)wbrJp[I{ ۝m%=:_Wn sve6/ϣC\'J9]JtNfYz`6slWJsI9K> 4` /R /rs:̤Kr؋7&}9Ϧ3KrZq>Ǟ|hՍ ,C~y:!U}>7劸SwHO\YMq=&:8Y4 ~E%n\/CvE6?|f$?fϱ ( ''Ȝ*ff?6[SA\Yjn6a{GGel'b_=W"쉘ǜY?񬾪<΢{8n?VZm~룉9j.,6S9U5Zχ M 8b|z3/>^U5 7cdnv :~}:8l%k3&6I:qn+ڼf&Z63H|{g]yxg_'AixkR +j/蔣 2Wd;||# ~'Ȯ;b=kok2}۬{S}ߘ [|F^V`_1t]?t<%3%h pj<- W*w/U=3Q;K3`F6-`0~).cyU o:ÆkMtfke"¦7bTWu=Dkd~9+1϶%*|ԓxY0cf ά>n=[9.7gfc:_NJҪ k;KO}ظ}|6u=QmvFc΃Ph:Gl,5uj\hϝ>kQ!!=Z)Ϛٓptf8DהiL#qmH.mi麞׹;Ku߮eGRgT>'3Gh줜\:*ˍ>IMxyt[nғQxZU8yʇvO>_jW͆gVBt{~ǩUL}v^5Vm6z^,3=4)/WzG{Eԇu'~kSkye9a;싨z;[8PIGN{]|6FwfPۭյvxyuo+'y_+0m_Tsj ܶ͢kr'Pe[Lx^k'vwE/ڋk6E6ش-|:G]od-A7҈:3>Tj{GT<|o;rU`=gv=> <=9KS)<&>%b@1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1h1VYy,(~jm/nmjvHfWU|џVHlOX^kzӑ?P.Q'a0+J|1_&qu.?hj1kEݓTOpNY~s -Sy<ܯr,ė:gy5W_m3H! 0S.z8|4r~ͼBBT; ì |NKmۂFl3n!fqU؅L=8])lܚID~od2cC\ȲUXg=4&E>7u =Ўmբ'E }^d˺/=ѕWK[,mF`?V6:{#tL@e]#skaJԈ2t 9_5c9)F1C M ԞMn:Z_/ >cnѕb?#8f1'~[̾{K3_?pfnao:|,#Fl﷾IjW]&0OsIN[AWSk :-#ŘC+繃Ӛ63ۖ\0pqc0| 3{|vŊo:w/+pLwWkӮnf-tgy}}f9IˇI2?ez WR wo][\sL9Io4/p]Q-Vl|GI=VU"/qe_TYn\7qߊߣlwj]Z%1Ǻr_kXqSs5mwr܇` ]o*Mluw!X(6cM~+xހQUң ݩ<ћ?p/J~D~%ʁXr'{(ޛ35;LIaQ^!3;;uU0uOk{"IDׁO1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀb@1-tz'ni5rz7Nm\MHv5C5Q i +,;Q٩*p-v{ݳy9M_9UkGQ6w_y ikvB>#hYw\kP]Zɑ+jjGZT;2>Fbp1#ױȝ=R5z/o^MGzD " ۢk?BָFfko!+t#<Be=oFOx~V"-3VOXNjx;lkz ]Ѵ.=Ghn)&ᄌЍXwv)S+gũКv7똕N=*}$nb13bKqm6'dVĉA'8n=w)nƶ}:n7Du![?G5kZ;ﯴoޓ'Wyz!p6/+\\Q%KƜw?I>: 6 :E䯘tcU}u;m|m,|bGDͲ)rGPJd=hDxʵ'ok3GM,[X+ {iL63f:obK9Vώbٞ}4XFz;y!|d=d~(ACfnm$~%bWbEޓ*ۃaGP M) +l8dR;bI(O8S%Q!ެͬ%[3;e{GG7M?-qKɧV:o`; >k\;_oW g@pCg{q|ruζw^>Nv5w/sO-rr:;]wo|؇ѯ߯UA^ʝD>?e;|Mn.<%WHfxݐX1_*9ޢwkxgfn9N_P/nޠ$'s/kFN_m'_ <\ywq!}qsjK>+cTs |U6v8k~bX)S1/]Aq9Տc ̣()6k3_fjg0[=2e]y;7 s~ȿo|_/(ߕ;1c(/e@s CiQ'*mɵ0z3}MOXe<< }gxJ9?;kA|=\I6+]t]^Oyݓl_loZ;5맜 z#rPs><}0v[La]+Y7S__p}0Ǫ^|{]`K+ˈ}{q3[tt?yĸ9fyDKNQQS\ڝcqe=ڸPF3;uJsc$w]yGً= yT{=C֥ڮ|lc±ݸ)'m ^|luw$?ZYx/-NQ-?ŕ[ԼaT3H'mħr ʍԩh5:%W1P (ŀb@1P (ŀb@1P (ŀb@1P (ŀbf{6&Jr[_g3^Ye~1ד5'*u$>!c6\*+9R2amf͓~6|N7QDٮYkԝJWɬ-c-k]Xם;f zVmepۣuXW^k__x|:1 =OĎzzG\0wpOX:—b镞}77v=ryhfz7ݴubg%O|ѾQn&G$rg>owһPeE8`1ňw;_k KR>(F/(9vWcqOvng,"K5*[?8᰽,9_0#Ȋ#JL[>ZgI?W('Ę뼦)CYVB~HMBv 0c1awZ,1_7/~|LYZTS֒AT/BM b(#\܏Szs'+XNyhߡ91W!73pB֥ew!O_R./;Rb+uxp4\aـWݩKq>Uc@,}jmm!pzWDž/W.d]MM-ƃ::( Ƅ]iz&=zsI;r.T[~ R9KPX)?fꓵz%_ȕ l*;B|u F Qq72rvC}zBj1 .5[}[R,"d~*apR>#1 5c) *}V.^f?G}%=Vis_`~zl(W\65;I_Mnvsg1Vv مq^˩79h8Is.w3eV `S vkARn'3ۆ:Kh>roJ,&<\W|&M~-J'圫giL i~!hwl.v =ߧ|+-XquX O%[CLbEx)d&VG ~-}(# <u..y׾ۻNƞquLCI:tǀO3~-w/p#׃f7V"!^yƪ囒~ mC> _{L`pr6[1aqBOOAo l¦=8/wٴqH]۶;ojր x̰sffyjin1^4G 'нUtq67c履\Ԟ|H4;)w)NF9KC\d6be?S9O (;pXL'Voht_ⲂHES9`|OHcE}}z m%ߙ d06rOozh^&uҳd59][=,0 4p(G[lgI+J}Mhݩ| ϝs8;Uk;Z-^G[Y2x_zMS#wM;vir 3%"{zL{V7B3-{#2 .1mdB^q9b`~,n[кm`+ܼ*_XYf$Sߦ{߰gҏ8_)oHRYSFa\Jn6߈ZEC9!,Yy w7Ƃ>+=EӑwhA|N o\bhrk^x.D>{i\'#49+Pŀb@1P (ŀb@1P (ŀb@1P (ŀb@1P (;@['b,zݼ^6ˇ zֵFvX~5u;&+Mf9>v"Zn/n5M-EU/w+>II zAH@#(;fky:yjC.򭪵XxSf7Z/6 ;(_i,}uMfkE{Ϥ5r=R:H-cޏYfSgWLcqmcpZZkgv#[Uu7cmY]˘x |w `m˕T{}=(|5#蝀j{oӰkaq]9ֱ=OٯT^7~ NbVY`E鿥EٱڮONg[5e{ΘluU5%,Q8_Cfģ1FA^ە[A0$uј/.ؒw)fschwB17PS<ϵ3X`).,b>SY:mqIm"]biFE,o?[zw8ϒm(tO^zxZ 9+lڕ[_LBZz+_\+Yop;9һ#Y!=fa_ X ?08!7(ˋ qN-`88q}4Vֹ*eT>AbX(ɱ]p {bК-_glZmSn/YAvV` A»׵fd(Bzxx>v688m[تrCWGҸ<-˸-f0/c#>Q䀠9x9fN  <ŏM 1 X>Bγ4w6s40 >|g1V!){;K}C|h`ur co1~H0oUqa`ٽu%؍F4l8+_Œ7pA1ǼfJ$͞W˱}CvQ׌5%K>~Ak /`i<rDVVqcWXXg+f9c9Ns8\Ɵ D>@$?lt r zş}xnYVs |0-_'1#6H9e?O[kxוb}Un{9ǝ<4g؏O>*#ӛ<)#>8QdwrV`c–瑋ӪlkJ.:n+%ɇxaufDiI0.翵5z'TC~ב;kq.o lg RVeGQXַ{zB9hvmx.A9EɿDeEtKCe iG~=#ACKW+3\l+Evxv[oJhX5Fx4v9UDU(KuF9rdXύ}E{#V%pDdSq+ Ϡg{ѳ&`rz{rGoVifޔx73a^6ak(N F38`y,'33_~gQ2uVM&2/G7l]`kKpl$?44MXѯ+e{xokyU푅E>[U8@uu^b3HloHg-J;6;lf9űݖFukxNہn56uŘ)1k5Ek=_*+9~e9D_Wk1jGՇvOy97SdnǽVq=|| lL1C̕aoNʹ@aFNWPϚ܋ۄ9nl,}Վ@}ʄz:kQmו2\ӭC6[m5T9Qtr{k'|Jŀb@11`f?z,tmGlᣇl눦e]bXŀb@1P (ŀb@1P (ŀb@1`q;x}MSy-=[][b,(W3vh'qsj?&EzEO&3Ӈ`:y-W\)ar!P͛(}q3}ݍK+7~%5}9/⬳oMnڨmxO yΗf]Pzz.5_/RnīSֶ{C1Z#)ҝJiϵ:Ly8F[s~jHI\:KiQmf>3/qPvyVz?z3bcM/5[p8`Wrc͠KE]X/c4ŻS nnOgY 5r7E*?u]S3 vY4K1:NE9QһY?u)%A†SGcM/ .SE mŚ:^ț]饵 \8.dj1ZnQFqݬ5{2f\GZCw#oD|ڝuG@]ԝq:zKԅIt?†dq|s4El3kra-lBFN!7h͍Gs~E~YwS:ݜ]ʕU 8MМKL9w㺠o`xKWD_nqQhIi$XY7=VngZȳP/E?Q!tm_P8U^O9-^i'۬睥X&?vHAw>ً]CO‡Aיd5k(:raFY|+G|Wo{J̣ί~4~(Y o+m;Qz1-ccD8)\Y8gq\;sw53^OYeZ÷Rވ!e'Xa!?Ƹ/eOsD}i幂c,ƞ._t(leuЯFi4}R7=U6*.rO|KX_cvp x\ZQs֝٥ҏsm1~MP~+W={}[UpVy`*.x|vX{eE^Qx81O~lv`P\w:9kZ1}o-}@e{ VQNe`9/>KKeߥ<Uqs+_ XXsW&!Թ&w|cqK#giV?OqKYfcۄ*u ¦;~3ݩyuϚ}WBOS^ n+ghw&^To_*߽AU'K Lآ &M)g)U?Oo\x[|QXWu dz⼽ؿu?SK)YlЗ5ý)cċE??Ňc8$\_E7r ٣*/gS>Pgaui,8I']|,b[%*'j;"(? GN:u\'0 ?Ȃ:`S-VTQn{#۵ۃcݳC~EFD'OyzI'_KXQ'`1wڲ 9oݠjp)q8xkҽms5o3x >h $C"9y[m~ 'mll˔sL!}ͅџJY}+ 6-=I C!D}4O1Lݎ6Fc_x1cd|&] YŸ33l>4]G/YߕrOwcC_Jt?gF9?K}D >} (6spKw'~|e͑f5,V.K[pOO[U6=^(_;h~#Շl6 Ͷnc#:=Lm/8_S<)J5P 760=KG*%u_,Sg^ӟJUY5XyCH1 m3>.ܔO/o4t>\òGc%\6q_+="߂ڄEK;H,te{xj빝73'G/%:t*魘K޷Ȟ1w|R]/ vwWao~O<ѻrͻhvAOYs:7*N1P (JqkyJ㟫g,Gb%a@7bsRxM#9Έi[2Cr[9ŀb@1P (ŀb@1P (ŀb@1?=ZGۻ||ϲxM9Q]6l,㻯j] }i}8׉d}{Ӛѐu$3֭!FWh}Ÿ/sE O1'bZJCNxݰfEW|A"PhWJ}kD1tI͌%bp.*t,mfe/M5p|EQ&O[hn{#X /VHʇ> T#cTzC+SkX7Fʋ5`>p3OÇ2٘4x\k8@oeH=c,Q$viL|5آb]ka86G3o׶;&nʽ{·sQ]<6Ö+p04{,xn`l Y1ybl sGr1{k1<Pl.䇝3o6@,5;Fgb΅k0/\WS䱡M'yyȧH~dK1[ҍڌy+}5F3&S| },QҲ3xgG?A\r8뱥9X>0r !+k v-$_yRW{5{YB/ͅwzDRʆ`{&f3v;xnѕK8c[ye\;J{s}ӑ ij5ǀ{8sB؂ ዛy뚞ClUNMhvku()ޅaY~㋲eB1ڂt7zkgS.`dW)ڗ,e3 Z*<8lq峱hsmT5hie檾<{+(o}rl8l>Yfp#Uvc( 3|UK3|q}V=1''V9r>t}MØ+5}#~7vA1江h14qz(]gv͈3cas,l6Mqi% /(#[e=t_C mT 1רC\7۟%D'}E5G|X\`@y&bчk}ȉ-t+cK9=w&6N:wjۭTA3 }mYZz/%}7PXW(зz2 t](e ;ı|#sT~ʭA/xGQnr'yWNf 9ZQ'ErW 355{UY !r|1EwI{aFb8p,[w sf=!|hBכ[r;|iM{K5lEZʑm?- ruVs(3tr WG:QOskI#CrFZ-gQ==V.&Soqtt<{˜Z~i{}+;fξ}ұ%rO;dE׬n8Yߥ;dE֙mUa#?Ӕ o/ť#֗U|D(,7~-r>M@9| Y5"3:Fe!Ilh6Ҁmv$߻>r<1Z`,Yac$Ư'bcStr]؀~oB›"r0x4z l}s ( ń䕂b`q#2g`ܘkcb\;[9aĺ_+U3FۦIx93Mܾrq#5~}9xF09&tIIޕe ћ|(W} y658}rh`|/^?\)OڂpOEya{#2CK"[=򴾢u44- ?,<@5~Gչ?lS/綊Ge{8̞61X0?dɫQ (h ~SsGs}]f]kNy~߱cm0J'WO>~+vf)dgơdW (ŀb@1P (ŀb@1P (?00/շ9(zAӻVN![myZ+~#ޓ ?goG߁Yz(m7B;xOyCByD~EC6v'ֲ#Z>>Wb鍬xGkYq/S,|ֱNcmd+ʭ2X#5_zJZjkm؏jo #ņ+ٱYmmu2-2yKIu6.(8IʋpV)$E1B8_Eּ2߁]ߗY_?bmH̑Vɱ2COű/f0~P#uab4s!e?$ijrĄ5oC'-=QN$ωKBFsʁ uh{l&q;9Xt9`dGŽD}]#Jp cX{W"Eֵxgno-+/bBE/O{H=EּJhQ_H1Uk}雴C5џW4p$b(J=gKt'"q3۠1t'M5{jQo)r,D#x9F9;KqkOfim.Q[gsRX 5F\>~W',4VQ4N/doQ ldcx؅ ZS"c2> seQۓB}oCdeFbbzq)JF1&mGj> xE[0?>Ϊj?E@&@ ”=)2LL2( ?ImPQ)`R@dʞT٦BTAH@\\Ip*;l-g~=sy.?9˗2;&juムtʏ:>wAyؓ:}bs+t@_}Iɋ_M)kM^.zY5 'и0sMhv^~_'پ1:M>F_\k`C%N~]3iU^C#}{JM0M*4<kOm?ft)nl|NվBcG#'H}`y }2QQUfj:RZ3Gy[-lL6>~i͸PzKOL.̝DJ[)lIl٪l^Lv"3Zeߝ އ=MK10Gh'%~i/6'oa,l2#]<^|<|z 2|&x4=T16KV_x=e#+u~bR@_iBw/^ 7'ړX;+E?c^>*:SxXzhb[3G ƃ{^!] ^IG+,E:Clx#Mx8~hFxf񠙻Z1>ηoX5>y$S[ek'9 ]qw-Db5=]1#|ӦvĥRJRqrPțQ\ɫ\h{N盜ׄ3b=xV  cΡ ͌eL5LWaC`PgI:WGBպ@3y&V7v-Ifh\\ y5߅ZgCkӞb0 ޣR̟{:GN^*/fFC1zw_(s5?e:ʧ`f151 o&blW fx;[XǕs]яrg=g׹/ W=C_跛L/YMg@sQ[ m8yZ{Ck ZZ߲ܲ616wSorHZw/h]ڴrq^#ilSnrt׶m?Aq8'selĮ7ᰔ5Ǣx}} B?b0gvtne4f^VʇeJS vPCs(Հou:9.{٘G7Bpl51ٸJJC':gi~ܚ8Ga"iۊ^F}7Sok|/~Qڷ\7Mqβ*Л_ʂv(y—^} 3k1:c{oB^Msۓ_I|lL= ml導_~Ǎ{vX))}z[ `n|Inv嚛VyFar+Yw^#hk:%[j}jpe.wQپ-;_rx8nY,PIy>zpGEr}vmGKya~") ѯ<%r͢?~MʪT=!?,S=Xd=_~ @pZ"*H"*H"*H"*H"*xmH#mb4Հ_Ֆ. [GM(F9Oo~8ObnNþj*wIIk[)}mLw}X,wwP>}iOi%u3jlkgr *i.o?N~/4!{^k-W%n mTo{2ނtft1bNBz޷7G5 }?XߡtYӖ\s9k[<5]MCw}!tυ][>ClBoF|X9nY#7nKd˷5ȫ-oوM%3CyAm8mPNEu'OGA /Gوqb#85lbx3q=O!c-N@6 "`~0(f)k-ˎXĖXˡʹ[+'wCmg<=_O)&1-%օ|9lqfV/R֎VƜtiSшk&Qːw/4X.O8I#Zx)}"͢CCs%+yrEѾ(}rsOI \t(ʀx&ou~ٰϭԧR']aO9N`着컈.ֈCs~7-{2 Hf󙶟ggrLBJ~ r^hۙącJ1W$noņJ&k;Oq+tZL< -xC {r;vҳp4N4yGه8%QEĔV,; {-u<i⫮`os`#:g.|ړNɕr4 Mg:AyGh鈥bc}}f]&O<'%_s;sfV^$+Ӥ_1ng]R[37cL $+:JhӴU#rOD֋ ^:])3.9SG`5T?Ԁxv+خy{_˸li#_oWhedyIgm~%$_e`W1NYkA\/Z~'Hu4'g}gcrXSRl/XfC'Wz&W:]É {<=4Ч.$%q"Jka貼cOɴݼ~3q#I~W9U /$Cp& [2$4+MOJ' {ȯoKZ3{h'X+žtyoc}æI9!d0+dlǍ#)xg<&kFH 0$=mLvv?\g^PD6JS@¼[3ܞJَԉѨi;?7%徶QmN;Omiw8RI쏳3TUn>濱`ս/(6ɷv 늷}w! ˔佧!e'̛dEf;1-ؚg:jtC~1?UIeI:2=mOsKc$k|}qDc93 ~CW񘌫l| X_{ѿyϾzِR=QsMj%. k::KgqW`f;Nr! ӧ[ncryd} n yV~+Saq,G(zU ;\̉ 'Ξ2#/}u0*Hڛ|݋?Sxߐ_1ϩ+P팉6zkl1mvRcI1?0bņkovHeJt |+]u.ww>yMs昘v̐vSFcr9N3.T 3k" }G7d"lXWr9ٽ'n3\@2kؗ}`=GcKEꮃ}#A;wK}&ɧlþ,OH3zlRs5BsW%&s>{/vrn.t܀?,1H<1$߶fSl}痍Αtbw3m?ic7+o{|8/ lmÚ\+Mkۺ\]5?OK[;qte+:üZxg̥;{WUk׶W}cd~5I~{F+'ʒm답9Lʎ!/b\*uxLw9{J XiW#mE/3uK]] b[_ܜ.<W]9xv[dumV9$OQeBYn3{v~i)kcdߌO(uti~ftnEGтυuW\Vy xwgry[լL0]@.f`R³_О;fjCM;ܸBvk6b[뜕u{*tyw6^^Z:M%K 3ŧ7bu <^s'wjYvkUU\h&}f xow\ھ,v,P|Ֆ/XvZxoYURcc9%Ρޅ1wx߅i:{hc)Sӧbell1_gxV25%hؔPa{M;}|ٖzĺl@E~ϋЯcպur/.\-W $9eNwvzK9gxEVUѾfިはeV.K!^5xþ{:^|cĵ//0|00g({@wŏzJ䈾Gҿ?K]o?A]Ş$$~t$|%/5XK1J?-٢_s$5[yntW{ّa5eXD;.ʽ*H"*H"*H"*H"^m@z^XzrpbNWKPoKC]c._h#m/ʻxK>\/ti m-@?;LKgЖvYᠮЉ6?{9]ל g:M~b\ ` _^8bhl=>1w})ϹcVY\)<?InIcGثΔ5*k_9qzXMɫ՞4GxmosSzq{>E#g{gacO) ,o߶=1{'ڬ<^ߖrȌ|KNk\e'FwlҤ{w6-{}K['WSh}Iќ*6{} ƈam^}-|/k6?~5/cH@q{S/~#M½QzǑS Qyf./5NORiRTN| Lޙ3\|5q V3v$y;b&qк>_ɻ#v;7ĩ}O)s> `{+FuĮi^t_igRr?ǖbXUo7§eMڝX1MxHt&i 2[{0k;֬60ܦ۶-<^o:>e}FsEY9˷㴏 c-JL2>&7|p48G|VS83蔴WWr_rg9+> xLcL ptgO*}C #.Qb-ZW{eލ;` bol=, S~Sr:Q|wuֆ.`g,J6-/"a]47+b*tZ%Z¬crҥ}7 zu=HnE5ɪmf?bM{wgoLt0otvrcڰGbSocoIa{@|{qs8Jh $Z]ѩw3`a1n#n3?=*#,K# e@wK$I<04=4 '{'{{l4禊5 9~çc_3qLhw>'eUhBw /k);me<҇ `Z>S Sel\uMhCw8dF?֞< GJԸ}4?<zoCVt/˺O@Q]r,j30wy766/Th{KM^D XhS_?1 .~ioWScն'ذ$~} !e 0X2 i:M{j6lO׶Џ%!pVb k )iʌ^;PuKW3a;H#7}M=y7˱ 08ׁb }mD{J3cc-#O `La`0AzNfv<ۂ[*cۙaN1a:f~S KX0ku# !E Fҹs_~cyuQ۪K3GZd?ǁчzbBwP*.բw7]KvڛŶ O] +3r1>XX\1_xL)uYeԞ{o]mN8tsc)3jɻouQ0lr5##KGk 1=WcҸY$0%ɿ!ZF[M*3o.5jKxs"sqv=yJgk:1lW[o!(eyCI3]gUǞq mcn:W'<䑐ڹ@{Ll>iba]lf';)5ǝRp$ ٘Ya̙;o`oD~1wNp}w`&trأ* <1_чz]~>1`=jܼNYW `mLWH~W=' 'E2vfy̛MI|uKxVwYoUwo [w${B"U+u{nx^{W< m[ڏ Sg:5h{췠6oa'>{Ɍ\Z3) X[hJ>Q\Y'{3 v~"g]}^-fOX˶mrbmؚe˷Wn9¡'[:(o5p^*tA~ִ=>0wJhD_vʧ֏Μk~g,^5W_@E T$P@E T$P@E T$P@E T$J 8 {Xxb}݇Ȝ캖k\νp`>[X[pi5-fK|'^n}99)Oߔ)(GgyC8 fav^'uq{/@ #69s{[;3{kl yf^cToizJW"r2]g.}ku[b}*-nw[{YIe_)?rqm"'|fP:73z/郍6XOI]^VXt>o!ݟ1^楗.Ni fh3ydGz\~KXxgѥս'}w}dC75\\ȳk#=;0' =]{] |c}ܫ=^Y!.5{"Գ?~u\wLL3 v/ŒK#S'K,x-Q/Ta|͜ -\Hwa 'b0`K^'߲u9qLm~<9KK ^SǍXgxSw(˜хP~78d} јg>5s?Bf&v1Sm]+7XƐ_7:;s)9]l&uK&}7a=(xP/LٱWk| [ v% c# =^ d]o4!B QiI)]Bmؽ ;> ˑiF&/H.yr~=x>QX o|ʙi+5!~qOdwe䇼\;YA0G~݋O΃bdz҇Z]ִˬϜ>V #Q2G#w y:pTO\uy&e ս(Ie{bO8I]ge2#b=3Li3ePjog$ydsa+ ED*_xfsno,{w~yv^-"̞6_1yrT''ti2b8/o5 ڼ49_x.Ids27ӝs?#<}tdF/ 6͕|ٛ{nzcyW~%3z$\C=|a8oTC#틷Z悿 5~j[t+U̷mpc(p0&t{T-.Hƶgb ľts@'H:sD>,ϰMf5ʹjKϋ^+Vc/ã[Cl{˔?#D.0`?jox w*_oh~ȡ.zg ]j(Oi[Zޙ;v{+zx?J,4bRƵ/ą/H: _kY6_x?vgn<;Gc 1̴[^YMɿcE8?r8aymMt>ޙk:vǾ )x^1-_GXOlhS:mQ7GYaC\Pt"[ ?Z= =̳gPO]9t/,;:~3 spdž e\XO|#tVl GsO*]?0j9U}%y-~вbȮ?I?ɥW4rsÛy^{1d(ϗxZtvSK"y S\\jMo \sν]`:^/A ; >w(?R}݂9g9NzeRs57M]LE[ yӁU2Qyܫk`a=ŒL{vii?Gj#uzWWh/*j|1L׿0F íb&&(}S74t 5DŽ=m8k0mqm܇'Nګm?כ}L[--[ҧ5m샛._,\]k}*lgXbaz˭R&Is`pgj~c:~j8i)/SkG+ -ޮs}B;ZYNtq_erxQk}æ]~cE T$P@E T$P@E T$P@E T$Z@*W<׫V?#fj[5ݱzw>6e's<)rN?zWm>g[3fr9AOPF\mF\ݻX ]7kCG@؆y#~p~%/ӻʹ]T>xpoe ?,>||)SunpH܉Q5-:=ƤD<3oD|.7`l9}qO''E77{X}ڬeSmBd|k !a}^(%N=7(i>!C }J' W'HߚDx)!)~vĪL̴{Nՙm5TJfF_ggN}T"Fx>zDONft(D_}SCo_a&GŖ>/=X뽯˔~`ϻ޳9xw)ri;Ou 7ؙћg?<(SUմ ),}flϞȚu*ٸDHbهP?Fo~ګ!~f0-_`iVynHwkɚrhScdj6dmHEOI?}״~|/v^%^$;2pՌ7ڧlO_K[ fڻόsч%>;=K_#Y'~gArcy"r 4lriO mRЦlA{-开,vrX]Ӎ5o2gy23e-m@iq ~Nx21 mϿ`\_Ƌ؟OJ$sƜ12aYę3߮盇&mC,+og!'s3^B؛g|}^>/͙Q% f#7}+%mH:ގ!{gq&~kSho>7>6hKB˼.bnjyK(2O9RĪYKG'f~̏L;ş3b}/r.L upy7Wֈ mw&#h|t{iaCizM{\&-zXso]]4*ygxf_XV=`._w>ɾnmw=2}V>:'i5 WIo&vՖ2-+Q6fg^^s*j`{8C[Bfl-s6C*藮/o/|DT,S "vb9T9/ɤ}KC bܾ}|$Z9(6=wc-_9Q e턦>'߉Pv*"aQ#ڑЛE7jˊ2@}];M}HO}mS|˫9 ecϒuWhUIyzi@ܺ` >u1tEYY1b>ٟmP[:GKK{L;384m#*&ʕdeGYc =#O.̀;b`7g7O>Nեw+/6|$mCv[XˡѡH̋xg۴^H9+}#-,wJ},y?M eVUZ̛)nӛw`M[-xc~!?EUG7EϛvwI;} 92k";HYQ3nӶ̛t{3MN|/|C,r|6%?oDGqvM]XjNs6x[}c=[k.$TgR~NȎ1@}ӾHwaA+tb4\_M 7U iع'sRebZ3+VhĜŷ9О-7!+J)4LХἥkxęu/abpRDZMW寈l:;|I!UFxMy_U}׶jsӚ 6 (~!0N6 #vT7cKHrD .imKae _|B5pzDvl2&|o"u;-x&~3׺7@{sL]>>sA^،% 7=ϵon? ],}C6?kls]3X2?`4˿jSqPʯ_ ߤ޶3ˏ5$#ʺvwGNV)}[h =(-H#umY1:lǃJHA׭xT{cS4}:3}f]g(M+oqL7dJ OFn`Y.|'P 5b:t|wjlaP:Iʗyw?83.Qs5{q?M[fcQ-u##y|WeDgzcJi.}<3|/^Lscf4e͋1f-䑸c«|5&s}}U5˺OcjOW :̺;̡(N`*O6?x #DZ1Ŏdսh`gtne]>;gԽ<洺7"t8v'ϩھ/C=Sܾxթї9;y_bs0[xb'6kS[3gCGk@~rl(\Ȅ:Luq{y/oE}<9Jx?p7RɆ'0a/D z >`?>]=I+jg|'X9G{IS(?S;H.NrL[2bX~5>ucߪ=q^JccXÚV~qJMI5˶g<_zL?Y,38i|Oϰ'xCtg;Ji[gwFSaì485b(Sս7 oI`i&i݃}̗v221Rh4?䟬+M9&T9vX0nvqRh~Pxž5f䗰',=g)̣ʁmNJx"PnUSȵ{ug@W9uۖ~Y @{g,_ԙۚ~_)Xm09mP|]^n^_ )|[Bl60[:P2GՕ]4{=bw{w">Y8jZlh0㷾sR{95 /%sqЦؾ/O&FTۓnlx/˗:*35Ϥz>&* 9 7/+wL)k}aݯ)܏xi kmF&j)Hb9_ͱ-g&8^i/ה7霘?h]{χ߄q3)<kiuv겦o/^' 7~#va =1nIRyg{~n>O$ŶӦ6WUq/qTi̱,~E^&:c# OjĄu^Ɓp7!Ȳ)Q;8lXGSr1˜KNe[#~7_ޣdn6իdtRgy瑼މgN/DGLn))d+{oycU0}Yi{Qf|5>)8lg,ӱtM{Lcfq)=W7>fb|w;^[3ɑt1<x]GʁijM,`^Z}a^_-݂rꀴk(/:SG\G<0/5üK^hkoW"'&,cʇwރx>{n^ `Ön 0J KzteHA[E$Xb9ﱖO?%2=ӏ0\.:vG uy6RX9>Dzlj1eǡM걱3xAсS>~?cGl KRdO9$Bn S ^l2Sˏ)gkzl]c:\K~'[Y&ZM:wۼeIL[`Xn${|.9QvXi:3u숭A]t1Fj+@ݏueږim?m=7H!n|O5ce:#f)s G`9tiNIu۴g|k?Ǻ_ZE?]k5Ne'l+NMtch[@igc-W 1mBv ok4r#j yj1>um{ERl}it/k5;bEL5bH{tXg:2/΄~:tswTez$s`nB7)wH}װ~l֤ )_== lUU+ڢ;ony#˺ap{1Lc*՞)~ŰCs;:.ӁbsG;;e\/1l˼c}WSe.>%$O5Mb ;C)겎9gnsXk`Ì nOpz#^§ߺXlԔ]6Stlş.ؼbݞ}\^T"E-g6*' :_>I+G3G@og`t 6'G[OMA+32ty稴o';c33O|%+b]>ZYf*m! *c{aN->JҨ疓YvG=498%]BWy6)XJS5ݽnFraKۦ7wmݏ#{<|.6?l?6/Ǭ =3Cs3"1YXW}s'5 d/ax|2X[=7WɦUf`h"m?Q 2~B[c E?6EG T\Ga϶Hmc'=? Qp"biF`Tͻ\<;}y\?T*kػ+ڂ=uKuSƏ8j]⼴)W+a9jمψna.Ll-˘Z$|Ȅ5nsk=|H1P9O$d#|`oz˕QĊYC?kLcSNH.sĞ,V'Ij+gH#'K未%uߪXYo>9:e |eƔKUf`R$9#}N#ﺏyr5G\`Mںkk)NMCHXEҩk|vk9&Ϧ0oZ;%GIt'ܧlõ2^N۷QI3\( g9O1M-ZhB[3djx/d_դ AI+obPohXV]?Ҕ3y"ÞMbC{ړ5.Qi[qDr%Oߛֶe|:nJlA]nA!r^O }3u)ɞnE$7OAAR_{ z(eݽ8qSj]W(_93mK m?umԉjUU_MzFq4ݙ:;~E_s+=ۺsbY`Jqfɟ`$#v.޴js"X _ferv ;XJ>{c <cyBlf8?حr~Se8՚}{u1;ʳ6?"sy<9:Eս-=^Ϲ҈q|hqoC,L,KG:y:<՗`4ZP>HnbMŜ&V濟#7|WYfyռiÀ&2FUpWX|,3TUu|_ӓR';4il9|6̑QEk I`ұ~qy"#˘sJ2Vۼ.mw$~m_3N2GC6H#֠xMI;y'zk-{ZKeAeL֌PN]mؑN3k>'?ԔWwC=G.>\TnTD;VuJ0#Ddzouʘ{ʫ)eDZIj>Y/|L6gfYzgm7u`7Ne\Ih4n ,券c`b%8n\ R?17xe>ˇGM˝%}HY*23ʆQ( sq uUlFέ2~=3}{zGk[ k(y/`]ܳ s~O'bSWgv̓'WK186mR`G9ecY볙> cuPQu}wSO)̊ՈMxV)Ck3+p <_"'֗\} =Ưy pGUK?}[86"Cɳrж z9Kj뿥5NLJ\ ?"I:NaL9s(Iuc(=eb3OTy";R҉(Vv?B1ꈦ5o,O:[{ Y+UONtp)Z 7oa;7:y]褟gr~D=..n_3fz-?s-f?"1Km5ٮx˄vݥq_v儵Xr3 wAl)9oXƝSOGm?P|Ksg?;۾IY[sK}|ۗ>{:`lOм6Q k(_},|gD'^seM0k$k y[FJiK~k_ ޞ_vkxNyp5-x~XUj`KoOiE^_;L5\f:eCsr_,ա?}~ _|KsDxE>g>Vf46۞] w2a#H7kgM hˏ,SbzK-hjKM9Eë{_>8mi{uRF/om#r=R΀GgXчy64 $~ÎcEhl!9g>aVidy*t} |UA")IY杉iQ{Ze*TLQD4bRYd&QBRqL@n8(8sܧyΙLe.w9=sw;ޕ`x`ln9cK%4[?}s^G|m<)0Oy S`<fK/.{ѥ~݇6ݒOza #T;-gx2 ܏uW6ViuȾVEUr_~8m֖ xOT>tu_LtbtlAq%a$i=l\a_|w;aeĶ>5c?+{FݓοJlض).|>ڸL.cBc|:D\< "m/[XV9s-(NL>zŕA;{{.F̀+2 +xfx5k4M{RbKްх~)}K|M(նnyڑ3\-TzCzgOCNJ ?gRP9 +W&Ǿ]4\52R._pmgJlüNkdڍs;ӋRו7̳$2ߝ7 Oeı-_-͈Ui{qNcSmLhM⮴7(VZ_7{ϓFw[d$ p<|z gZRx?4}GL>+U[|h˸s_ztR"w L9|3CC#op~/}v[+_Ӄ*GGbȧU|4#ɰ=)tCޝTߕ5OOFCX7΋]\#[>r~6bM88 D~+p"I::ik͏ 1`-'# Q4ոD:T_=pcN7.?ζW?xo O*=o{|8UA74cb\,4̬]I`>52x7n}|o%dKүz%b{\ ?+9Z*a=Fq]:_e'tsDENm)m +/4G"S)_/<1Bo1Ł#dCٱrW]>yF+x>2߲mms[%kIzGW2:ڽ\;VQ[>*z gJÖc|{ɹ?奄Ie~PgNͼ|wmN5"F_[şCՎ!Ϣ-#355%ZgHOcٮ.ҵZsB&,mɷ N\^X'/ q2<,/a,-C]ol؆v.X%ے+խk"|U9w1\駣}qoL]nyM贷wWu:aÎuԏH[[5 ܵ?q6giG/ͪ<+-*-φ׉ؕ?oπgZoFʺ,kcZ9 ㇱ [)yg ]HWԿF㠛M0O%O>G4!oYV=t<W/bC몠Asz ;k/>[Cs?C}JkF/11Ʃa=e3sԈj׮T9$*nC~ֹyV|SNe\_,˜FWi9|: 7μ[ @ZT+4Ob> qP8,M+BI1|F«yzO[1Xv5 |_OyϬkiVgb]<7aݐϣJk ?΀)9<t|6?Q*f{!ΡթrG1 R >I4Fq\kww |Zyňˣ,?>9f_?x<ɧgp]K~l^f/y_Mik^enq7uTV^ ^3SLW_jkzΔ|? Tgj7Jo+]}&vwy 0Khl'Uf=1_3XZN #[53WD徯q|}#o6#:Q©>!mhz)-yV6@7Yn--X߱Nj\u廠[nfz{}YX6g`#|IlɈ'b}cY#̾[׼I_m:3cl1~\sp<:ٯlX'6_ia9{$j:^] Fvw/D6Xaty1 7bf;Ŧ[fu$򵡛V"~q Tǖu8WpJeQE/"OOۜ3]~eYqnA7:[ե;5BOG)NRpD8_ľ1m -qX+=Lwo_3[-c朌z Vϧ_.L7J>|J1]&FET'nZϓ;v'kfK柇b/mIڇN wEc&p:\/x'otZum(܎sL? ״~sIs AE!]>+dϾƊ8W:(cξA-"';ģ{U3]ʼK֭`&}mažXuҴĕk=\zIyu_]yr׷ONTlumP`‰m5S[>;l;k\'iuPeo/w~Յ⚚>-koI=ƭeO^1q 3B8xtkJ. ~\ ՗~?G92{jCQw{b{,!ptmkf `{ mYq^NO U?[v#!ish'ċus|U?aaEm֦+s?}H`sX[l&I/sZG$2\%M:9L2w3f~N"RK  ŏ#keSmYƝk*+gbeSi~6~19ۊ/H99=#MO]\[_*3ިx]dDE+@+;zBB֓3^q'~'wgRG^ Nf4rYtAgL=eMSUػJH.{9LYh2gÂ6{-k*Cڙ-]l>R?o+ʁ_䅈WƻrCڨJ(u؁blr\Y.- _M%V蕳ck:ް|M6?|.$$ c$3\L_E7KJ?Ocip\L|^fLUgnJ"ȩnէ8p왓8Bovh#,wC7M!@>q|`(7̄}hY!J[ړfvkn{spu8p9>8NHs4F,b<>ä́?}RR#%ӊLj<4&엫IBxI'LNmTyĒR1"">D6?gT+]>>\=F7keS'N'-u-ͪ-Ti3fm3X~LE)G$}Ed߆:/:sWu,P8ϷAf1 vpNs$?D?rM ~"9/; *\{p [ʌ3c&g:et.7+E1:t/LsZm|.e}8{{BY:_M7-}6la|dtכ_9Eהּhr܆ eF"}$ιzݒңspoo}y`|!Kdw6qù:}ۆ7'E@}@o[E+ðC{ַC+m{4}oHj}-|D~Q[ ݉y}No]+K`[1YV~[uؼ@ⵄ}sqޱk?G#rKZnM)ݴ5ǿM#X˾TIt`4giX_Y[?yx6}>?`@wOo3u/b(cccoOUn5v4a@p֛dMhZ_deqpP[.ci0owěnYxwq"<.\?cY6[~:}}kggRۍ.$#΂7p$gb:su{?tnPs|0!sƽCJL~l8p ?\Q޸vyE| e|%4V70ʨɪ᪱p);,G?5BN΄lglsb%D]'-0ow|h:xhxO>?Oü;'g4b"lo緔_L?ֳkwW[ĜG^qHWmkҟŇhbq1)qW7@ϒV\3?|϶'W+~H=%U^xxSnv\m -Ut0Rfg#Mk~Fm}5{Vb;&HDhO^L{!e1-~}X\}[c" yE^9>oPVV] q$2>^X/"I;Êx][X}c30Z;GO913a"|"8]KG bLKߨY؞39 Tfi"UerlHXH)߿de5zT[noxƅ[8Te< ]!xO=z|B3Һ ]Wӧ?>;ujuaE\)Nb= ,2*xx]o} 8ߘs|}FAfws?Qxyߓϧ1dz]t\') :-*}LJ1i|>ιC,_x;6O'ǽjTI>^]8O1XȾݖ|KS,u0Y;si g F;k83pqi#qՈiI%M46F>Q Ē͟?|=I)skb-Ry^/LV[SbkN7^36Q cMer&(jki"V~,^9.{.잰}Aus@\u􂝤3Fu!}ršXᚎ_ ƻ)Z6Ȗmf;nc\S-WƱ1ęV3iu[ ssD<{{0 >Δ E7~o|UqG=  ?qkbw:3\ c=sc/[LS4_=q%_SFXk~Cg){QF箘Hyb^9cs S^?}ogتdk |K/V7wwҺ5Q>uN۟)-0{j{, r!? ˻}#--(0cCo1s_ĖBω;R:<1\3'KRmX?nSoSη޸_'W>rZ=~[Mr]s+3U]:_y}bM=v߁=/f|쵡>|{wTj!#,o:IS?uSUkr6fc/s:cS0^a- ϥ;`;{EZ1srXnI~촮-kN,ݖ<ҳs 6*v^6&;:_Yr1V@*< |76m6q2uv^#h;'k Cu_ _ںWT/= q͌+}WۗzwpkFXǃ}Œ./7UbkIKqwm Ol=o\}H./F=mml#ɵ_7>&v>5O:1g|~.?1Y'W Kct}AmOXw7]qc?4|39UGwU[J rQn#|fa-ߧڳwU>Tg˒.=Fu`c2l E |vZrV|1'vikImFvi&z-_tѶ`LO #[Ά|Gm|8ݭŸsS9}X]WSDtҷov].kQe5ʼt4V_{e|>g{#u@[k 3 WKI=S޳]H _8cBG*Z%q4$V7K>5\lk,|fٶ-=_ lY`l x\7gʼpm=ىEo /NN~҃ot>g-ַKYko౶˜GOs8SV'a䒦ʸӟ}m׏ϱc;(ďIr 'A[HI.v׍LW-L0-E;wׄqKPP?Q|6Loaq9_1ޞT0 ik k@;s[~x"- ۇ_A*[и:Y߻o{߽﾿`q!zlP;{_?Kvj~+͠"P௙ge.j͖G+1a\Lܭ(nZ7l}U1{3xy_%˝ۧξ^nwVP^NJ%M,{ֱ[mRsj0.INpZ>>֒l?Yx||.w`Ofߥym>P^p+K^Q-;,]b)%+~!grI+<$sqg[ʛB+Y#M`*])6bEE+H|I+,? ia82,5޳WZ[K.1~H/9ΟcyVZ lXWc(~?11uO8v*leJk8ds$x =pJwܴrCDəܛO:TO O 5[YGtԟ7E{ z[W>^i?O磦[*s%=|8N] rp^:66J_(rǹLCC}4}FQa.@r~kO.w:~B~R1myv4zB2l̺~ o񚴢#Ơ!7PG|);C3M<ݵ%.Ֆo<2Wh~9f{#sH$od}3@]E\Ĺ8@;VwuŊQYO"HUF9᳅;*C ޕ; ^"բғ_,͖0wթlă- `u/<#ęCQi=OBg?Ë匧#>]7n{N}Ϭތ{W>;ڿ,?x;mu~lfGm%ϔȭWqò~K><&X|gxnosdf㭗 ^_G5 7U(9጗s2MHx2āxs>"6ڰPl0Lj7zᓏ~s8!&u`q'&8<6^QMb4| 'Ỳ-;J5ݯg p~OtW_ 廻mw>a|ę QÇϬto]ğeL0%Vp=[OY}l^_uf__1VO0&33=lN{ 3I|Qqg93vx332Qעx CO0^ }YbX3҃yX'"m6Ε輬թf6lu ƪdύAׄ0V%]X߷v: O##ӗJ;ަC_/4S8~=f\lN|$F&п`}5=}Z![K1YеqƳeNorC|5ѻed ~ׁ.PI.T_74# ߝzxJ=M`oڗ}Ç{r}w:?+xd|Br?lZ*y™}2у:QX[+?^wNbUp?X=Łv%5tzqG&+wj~KEةs؟ߺBE;y/z];eiݩB\cz3mʞm9!Dzxiw=Xm_gkẂAl![x뵥 uj0^F.pq^,~&&:E"S}c7 >FK_x\fJHd'KSBфX #mCkZ!fuXW13xv߽g@ǝq7`4a.rگCCQ8M]LߌoogŁgc spS#Ώ|F"oĜa-jyK؜1.f_|i}+\wspF4\QnOŇs,BҖ ҮzOLjZvЭWW!vbB_qtއqQ:Տq]!죐V+OiBܶ*/{^ޟa~9iƵDo6u3|o0TloG+G Tߕ>-;Ӎ}`b$W{VF&'|y "޲ۻ|dx#"%ՖykP9w&2ѕ8o_BC=M=-}E|q鎱2ڢke~s9]orM_~|Qʵ"?Eg|1`q:[F^ǘjWսR"|_+flk4+~}?! z2=+a#%X:Dߣsq1K]xd_wR全'[oS]9me$39%`y!~qz6[rH_{B=zsoHa Wc&N5]~}u[a3t,'Qdߧv{?lx~b/c9NtMpw<)0Oy S`[ <"~C{6VٱτE7ye ض_]'zlv[{P9߇[q]̵c`C\t׺#TԆ&8qM=/2X#\)ƻUՆ@~x|c[C={7C6EaRf^d?՛e 3/ٛ;:+m)q42 d}p?I 3؈-. ?S!+ҭahRK3uZRClfǍLڵ>p}1ą02Liz\NSkyOo#>{ ^'ftSZF)m ?,>scLo5%ėω/N9 qfag{߽cua^;+c8K7/k)[> s}#/'yk2J疱bƏG9iG{T]t l6ZNs*S' KS3%:?Q轙*pҵ+tl6άYvQ矛TOj_WL,όcP7eebMmD[/s&˲WɞPE1YljFl;̲Q&GXEqQėr`ylOoMWCo |* &!:hl,"ě)%# ro͎ѭgskz1DUnf>:OgɲĜIn?/F'[D' I;{ |}ô`>2/Dw7ڲױ cRwnt8Ql !0?ر׷c-q p~a׊[XZ,C)aXvk#gG|[>؟oj<&L\჉e;6ʧ"eKF'Syf,|_KLH%th./êOvq1RǢxW6tlxm$pa#n`M/b 5h86ZǛx>vub/ 6t˾Kw%8Qh}9[`܇qqZѯgf|;142yܢ+_^_|?c(d_ hv VΧgL}VF 8X8iuY˗ QI>)P&VYZg$ϋn+ᶶk37y7?كaP_ msݖn:tqթG~v CMn[\Jz_ݭ}@f>dL|>_G}ֻk BIZ&vܝgYyh1?rtr3tNH8'gHbW>ߟ֎=t~b rw7mĚ>׏+֯mRYo~ɦIGI1eO~1}L^Xajz!?a>l0rꯄ8u5; 7yn)iCX|A(tšw5yrė[n~||S}L(]8˲ONF+L?a(wr~3ڛ2Dx_Q׼ 3yY?湠gm)ߙ Go곶Wn3&N'sߓd1'Ӛkb:@|g؆҉;WĹ/jcuJXi5&;8ei{rriM#/7g#n+a{q5ifr|Wm i1'%soE_0#}b-89~RE%| `~󱯐Ƥq&=T87_㴥'4#歍}fmJ_e>N\o'%?×Yk|z0':Wh_鶲Ѐm1=׻[?օk8;%N 뗲E<(|nrʽ7^Vċ}]-`_]p~iy;f^..y,4-_vǭL&J o|om7ԧijGS[z~tugw}G;=km}8Z\αmb-[ a#ŁəwZsk-v(H7 SBzC. ,bnm9 ~?uK~ql~51]ՈEp{JO!om|DqgFsҧ9(UקeP צCiA)ׄɞ{ƶZŹ|A oʻjq&e+qr'1yqml4"47MCcXp=&Kħ9=m- ǞYM$΋.&NN$l/p)xx~rx*37gGS'~!n*hy3[zJcPdReB˓vcz1ltE.k]mM>] a]ChQg[(/OyXO:9~c-}~/;1}CS7iGZkb<"\ƈcvc>~G]3'p|uDq ڕ޴f pqڙko2?Ԡh~g/"> E+98+:56(b.5)蓟>1Y / ϖj̿ vh31^|#%ipEٻF)ܥof|YZadH<8;,*_d: OCܜ'+ ~ c"b3\ĸ G&Ď\!{CgW ѥ٧$n#֎75}وRr~_X}Z_<1t:oKx?L }%?o2VS87*mK#!-uxKko$+1wN>$g ! _O}@KQئ85+ ]^$ N&&+բ?}1\=kmRt|6Ѱ?Paܳvvknk(L >q˭\Ѕ?{̕ZsomMac8ck vo=K^zCf?™xP~?(?ɏKC)<+g1l;PHVlOV&0s.Ɨ"}:ξ7D<}|t.)l~¾/܎[bw]1Z[-.98並!x]Fq`ߚf?6`7$<6zk\vmrqڱN㬓 fWbX쇖TZ%A]vΒ>kwJeD>UmYo l%x,SlVC{؇2g} +غׂ7C}5S`<)0Oy S`)--')Kr[?qG5Yyp.(kKSw2_WW-6eWHßk"se P,]*7p&#Vޕ>{jKI80k}/9|ى'g˝~#:z8SoG`o˜YJ &K u@yLX٣㚑`2\C+&C+do 7KO{ŗ#8EV| KFk?|Q垇mD"%@9؞~u#kP3-% G2zF5yy6{ğ2M~=y^sg}."v8g ԞI'.I`ob:o>α~vKXLup@O(@Y İE>-ƺ>2ʂh]Z;<7>#N,R A{>7aΦ\3R߁1YwH N/!4" K3/ =ٗJ..~AxͲl~|6g3}`GnkilI:gc__}?D~֫͵Z [AdsHl)-&_lG/}oݟr}\B8wtVzJ 9{J]}G'AB~ L߉%e4[l쭨s^ۙmĵ~vO|cO:ɻ\M Ґڙˆ|um",&[z%jߋq.bLS*ԣN?3`GrϺpelؓU:dn3$mXIcŃdPjI _3x <8Qp" S޾rGpu%+?'1;H+\H7ɶ*oϬc1'7N{!>DcO]<'!}y{0r\}6,6q&@v=nEw~ҭI峍IVi@Ww#nO8')A%Ie6ھodC'HݰM01V\XGc|GaIuar\:$Ùz;J7tN]Bp|i5dzv#kq40w%;vH8Z5;QQZg@Ε箾?Noϛyr >V 6ѓ}W -87|2,?4kpnR٧LXYĕ:0&u9T'}џ:L'X}Vd]j^}%$_&-ߎ^>6<Wǭ0]/4g{`mTah ao}S3c6i넡&;yy(_2'q F9>v;巽 2؁}Gܽv;9#quSHɾ#gzJ\`7\v6$ʼnX嚁=˪ZM6ܺ"If]Ͽ0ٻ.3M>ַ}urF #Mfr-kgΖrSsF/_2,Ir@얏)ǻD|_loMW| mϪ=}@Yь0ƍ<>,c2y#|OZ3Y5Ao5{Ɣ1uO^-{gMOV!8U2 )Ӝ^!>Vmkf =zY,4&q&_I{{℗p0 ;g\|?;;a|gc95`JZ'm '4=Ƿ;q]_0t~ ѹ\3X g /F>ěgG8eψL.Yթ ?>8v%˟/zKu*g<lo<{"'\ɜYyouV,om3ŷWԵ wӰ05 J5s; gF'yFY3 eGt#· -Y`G Wb-+fՒ7w#5m|;#tYQ҆C9$("Y5yV)N.t' ^o:;~i]wZ]nʵ2Txmq%3^gªmּce]0?i:׈:W#!݃&Ʈ`ZpMgs׺X<mշ1|5^o؆O%ӋzqFAYaʇD8F҄F?Z韛g=IeAOZ= FQ|f b:(V`dϗOOz`e)knO<"4ko'lhAp^zVIy+w̄~֖ymu1:#ďtf35w-*p>02/KNOieb~}~{-36̚{y|{C"};"`PX^<ҟu Gegw5Jc4mLވ#8rg:{%ź̙^+к_ZJy:ZZ$SK%dfW᷻~GA:>8yhg%sB c㙋/`rŲnk{~iշCٲ̸A{]` u{T=KBis&w@noK8a=ԃxL#"nM ֘ ;6 8t`=_S}`_!k}&⌣~8T+sk0/Y̖OmXCuߘ:![@?G1=@O+⹈S]3 Ե~!-wmBJw/ewnn;|վNRm=k)G{ ˲vb*ڴ䧭M]axI`OmBkYA 7(~eE=y5Ƹpٞת"-=W Vs8K׫Yr^ޯOʸV_Sl )5Fَ[U<R<̹V޳՗o!Kd^bԧ:٘p೥?EߺËӳdz[ס N ZMwOr~ ӜCc18_!u3S %|BFs #c˜x;Kuy!e7|af/sOp6Ш]{/]=鯒`w6+ՋC|f2ždn_/6j{`)~l0X~'~rI5֠/{ t`\{7ߕspW)n7,PHta{ s1`{ė9uՙݥouC`Yv'_ц1+Qۃp&5y⻸k!,]koX\6c7d܇sKBVƚv'sjyʰ,ïgqqNyRƮH{Ү@a ëpIHg~gFqX~?߸rEwnz+"k*+@oD/ .?9Z&c4'|B?3w8jaݹ·M$NwT_]?}O#BrATCmbG0V_~_ +X=M_)}xoV@흈1k8Xg[%z(hGa%$ax-2:"̨hE"?q8p=.Y;k 'g^ =㕈};F=|rs.C8R^|Y[-ZQC%d\_֖&{EHwUN~_ش}Oܚ)E Zlgx7(f Kl"'@DžmO WѱXVO%tLDT_;m,?~[ޱt"?;a^+Ti![K6ERoǟ Z_PU>1E$}=k<9钤ŗy 6>#\x7_&3:~--׌inʘ/vbN"q zGld߹۵}/k't/Mjߑ WĊh AuR!=1tԹ*~qsk|~'z");>M疞oz A!\|Y~xa㏰Ya2N_ќ;E+TGy'q>h\;fgv.˪>#MIF<,y6f:­{E|7ٶV!$K.Z7|b$iqƌL_Ro慞YzF7e1-e=sq";b~yыd~З1|BF:ke:oCA}#Dc|7s"KW!+sFoM|ą8̄<3eOBlR)O|+h>/1\?b.]zVTtoLN OgğGo&~'&S҉?)+$.:x֖{ABs\ޑ7n-Xϔ*c8?ݒA Õ) } g!spk[ή?OC)sMLulLSVDt}~5B@Wi~ʄiGu )|sdzW:!{^(86<q#[/[wuB~`LIʱ}ps}TZ,XkuϮ-ҵ8a(K_«|qe~{@gzԉoEƃ%=MzA[Gx\Cm5责~ER'mWm@_mOV,K)Û8֯\qNumwUbol Y_&RΥ}`Mf/0{ <Z؛xw ﰾ'Y7_w"FgCYB:7a|l3>~|C+]R$ϦZ^AҨ# ټs}kt245z 0 70OM`=:Z}Gl ?j?Q~>^ g1uc[1Yc[2ԼGizoN͍~OECC7E\QqV^kS J㰸PWV ͳĔ|I":nV[q{1U)$rn`ʇ̙?V?5˜&8/ jOпƀv1Wvx`p^X}#m-MޏLv__?DTgب0{hZ}n5p:ZrV7xgb[>Qs|jg✄巶#\knq$>RqlK";qizts=esQ@ۖ{'+ƘL6泵l[ڰ;^oX*;Bz:~2i:t ڽ@|i|z}ypȂ_~B5WUl5SF+_g7FXF$ތA=d9xm#u]}}xG`He4uP&|&5m[NVܧoYq\"oki {Ν;i]8:@qYrbȩ- `ϓ3?C=x*-*=q^ڜ+y-W߸, 9/%mFOY%z8ncep6|seCXvBg(̵Wb!հuܧXl(8Ok˿P2{#δ.Gy0t-~`ư~K+]Gqv' C] qˮf7Y]i= Z]6JtiVϭjԛdmp0Qc׿M?ZZg;>%.mq@7$[G{_B șn/5/uVXOp߆.<tٙ-shrxa%gk,.lq{oiö~鲯[EиSYNSQʆia=}Sb ng|Q/^gis ҇YʉbDP&U;/h.8`|َb߿,q;8 al}5idd0|;®5n }\_E1.jPg)vܯF&W5mŹ+Ӄ_;e|7:63}=3&`"~-8Om|p2tY ,!V`M7=|*?" ~KsYwp"xDf}]ٺ tS{ 秊|/ .A<^Gi~ş!eom%⹨]=ەhEkUr 灺m'^2kM(I :#{: gy>8eYvbpXW³}2vbNс-^i׳y-ɸڧRZ̺r>dYY nַHxWԣ#Jogk_垃Qȅ7LJ_Ё}WZoؑ!䧥,'QBt5h8"b.ue^ &=V |ZfMMLs6t,b;R_`.g?߁uG1]]ʼݏoug2)CUŔ EKm\a݈iTUG:V>%}g7*?fۥJ~$b0 о|Yk˿3%QЃ}r=*P_cFZ8 ]p"\dM3LoLK+ַe= Op^jc[ i[ܖ|FβJe7a˹Wk.k+~ڰiDQƂXqk{~X'okVS=8~YbFwJ'0'>W0,n2\2B;8uc2HQv) h#r6qBp\#ø3={!1 ~۱IښC^<_̱930y!ϭ+oA"ޒ +vzKAߎ< |fl^̟'y+K78N+c4Z}]7ߙ\uϴT4uB>>k}LYݳMsʄ1ﷶM/vǰ6r.C&;9=MHwK>g^[Ga]qJy#0vl硇-u   &KjD,syKߠ,>`9^3 q?-V÷8-G}gc_.ߜҾ%oKD8рm#t9&<`W=es |-qpܴ=f8W P,+gA2G8^oJ́ɛ: ՖwxD^L3引?Wo|LETnsݣ$z K&SwR"ZYy 1|[k1"Z_8<0YWyV_Y5_L/*=BRkg.R£ugc*ez;^+xVZSgHL@W+8~m~rv N* $6l&ęlxƎ*iX; & 9\yѕp5 Tx:tߤϓ9XaZp#>oW/~ҫVw&j:kҼNeV>o=K*v+AvȩɃ+|U㻸K,+4}mmT*kx/!YX[Z˖>V~w#-?>,9X,+'/i|WflzXLm3s~{TiJ\6X/09ۭ`hfNok`a|[v?['[KItɨ+û}2ӄΕ7miw,ܧT'ǩCl4u6JAgz^!6H 3Kg.%-Zi1ݽ&/Ⱦ.4Xn8/j:k ]-TZ|n[Nܹ_C۟f֑+|kyOqɶ^sL[溉V%da^k ;?Oվs; Έ!f%ˋU/-=Zkgd{8 tp%{_ |*Xx++ ihPAhPAhPA݁}"7=QKU~u%3Tݽ.5{ۡgQTnT"'[fMoO.l إqfðm}uvX{]cȍϴP~/6Gur ddqc)${WGG9g~ ߩ/eeOqVq ~ԓ-W8{xutӪh$r/SדyK,WmRH;p;=1Nt.R}$%tMƧuۿis #%_+Iv`icoŸskyx݃E}Q[ʿʈ֩{RrM "!={ 7PƏl_4`htOOd\V*eq&K$s'eh(۔ ҊW Nq[6(c9xsb{|.3o20V&0 ]F/7f߄ߩ@@sk} 쇟Kz6lJqivY7#Oee2hG e#w[iɵ/_*܃ӮZh٣wpSQE·cw 3ʾ0NOK_[NA[^h)Lze?UL[09|tmE8Ie|6ZWXf`퐳Z˶}v#&se% {:al@}\*YY|!E6-/iI1F.ބc` JXr~wea&?+]| u̪p}D\5sGEL_]ɠZ"ɢ.8CE"uo`Q]f}ۅ5:gsK +ݲT W? Ou}όѓy/MU'i c3jbjGҞM-~?؝[ 񇕯9={N~fhG;1~YUn&R{\mW|9ݜ SctzUI,DM{7^e30 yCKI~=q<@"^C{r2_hCK{-\)~b+ҧaߧ&_A/‡.,*lد=zKN/Qy;7gK^l.40ed/\}Dr◟ F68vFKp?L-oܢ&n{;a;0?vo4rVyEPwYSi9} e[*䞴E..PkF霊l9ַ155Ÿ5ۙS= ^,+{}V> \ف}z. 5R r?'"(ݛðC|Z䞽FozGS3c]~R>{v1Z̴Úlg_jn:+KaSŇs䕌 ֑ï7|om*iQs_އ)᳏>gM8W,=x {~3 Ff^bbל+i<1AbɗX?? d1?z~ePS}q oV®ױMuɯ-aE k_#y2sS ~6!N#/5b[>PZ伝5s3tC9.ʹ0b*{ֻ({S>!-`{ ;5y8k?s#?᫓# 7N`ύ^W~8mME 4F/uq,)q:ol^b4ǜ 7{&W _ڸgA6z lyƍ/^+h |Mj`& ClӁK?Y?rޒ~iCe=8F7bYS} ggIZY |rWc#cȍe0`;N.gč= 5x_8^fk.Kş0]= U9!|W/K}xך>q9ZQc!W4.kc|Fu)e]߫+O_%Dtǫzi} SR.HxG2=2.[)fO>^Ǒ?IQ颿C!腳hf߱7™׶]_2u}j*^_fzd!;%>ZKoyn .WX*Z޾N_g+VG <*ymwަ;bc{W~"8bSާ(O7^Mws^:S#o!Ʋ;GyބtZ ^\+<],ɹ7nq|0nAX`ӯݎ}?$kqb29 شcVb>`/ј:1aJg@*~,< nh-3HQ3ٮ%KރX{Y:.yJeԷ5O٧$&:Eq&co#ڝft-WEF\3C{t3nzHq-gz73ܪ}I;JzɵТ1L  3yF|+fW&[1wl>kX #z]dXG-< s2>.ĝ^Ƿִ_;̵uUYSOj1`vW癱_?{4 -+zPv/+^+Bc R3 oSS;=<?~ب|Cl5ϕ8{v987+cȐ#N۹x@\O{5Lj;=O6:SgI|OrZ&0fV;0@%sS66BPʦZ|õ?;^q,ȬvG\ ^ʹ;r<GOMebNQfMr}t,V19v=qcB˕;{;1^Rќyr{,2W`sUo~OG/Ҏ/]ly y ~sC=IdnA̱G2SS%ʃnawݩ&?!|6mwgF_ kLMMZJ -׺9)s}{ݘ{ hPAhPAhPȦ`+woe9X&n\vϣv |'lOտi&GqRB˜GHlʖ?v+l*{؃>1||A*1ϤVVN^LN)j1ynS O;_!j#(6m"w4ShOQe5\_d.&`ȋӷxLz3_k[ΝO wu_J7tқ_?IH_Y4C7>ڜOn8,c'ΖO8w=Ԍ g Wvڄ\m~R] }^3R|8J}8}.-勅_x~˟VW'A¹7e^]8ol>%r"{i՝ZYv[s(a8;p;.kzx0zRlO̚} ?7߄lzY 0u6s[g/ys|~{[ 1//6Lt>g+ 'w,mDl;Ϡ/Jzm}0fu8<ϣ)N78GVgWD7=;rv!q5}D{v TCj˾xN8r{T0]Axq(k5ʲRS5[.c!JwamDY^mgڌ?a6眂r,OMsfx2y[ĩ~}tWJ}vKFd߻6@^P>dӡ#5J tqp'O(/9:1*%lc_v~'|,u1 :&Wv`;^ ]U &-__p:[w sF-p|n=GN^[[_0_Z4|MZ=Ga19|oNbe߫^Y L;{1;vb۟t-cR~Oo##cxLxKGLcҁoz';q#u~3gڦk9~k;wՈdP2߆X8!i /gFG3.{~,`܋.؍uo,Q޻j]9{i-b'b$w"G=ݼzr5A|̥OgF2r,=Gfz_t&)O>kML8Uo,rqZus|y'Q=cڸ^l:O25ΉS =qlǙgk8sMuexl._z\kq?rowZR^M'W.2.cSߙ6@TYeDqQǨ6.Z^}¹ 񋏍ӂ}w=m*+iA1r^ToUގpIxo3mQ}=q{u T٦^c\M֌^>5]v| X_ܤi0#^o=;;{pWv78u.r&ir_?GJW?ϩ3ڱ}1IQAޞpىV¿" [c_ ުoT^ye¾J@H%{q!NVX]˪x837}G1xeF_ae؇6}g>ߝ%,76Ni%r}.h>SM)韏yכYexN7G>51~x̰X|b7%37c*kr4o}ڔOMO]^+ ӎ>Y7oG!kq9HMEںSmL80xcg\Xm4(Р@ 4(Р@ 4(Р@ 4(QY_5[ ;f֛^-Вga7~짰]1&[gVo_ߎ1}_١{b7<'y%8G6ELcX%'~)JcS yzH|fїQS]?2!?)o~ex5#0E$jy>B?C8#p̨EL=mʜ~' C  a md -s,mi]rC ۴X^q;#gv]d:XIN}=euL13c|U{چãA,EcW>=Wu+*Kar0Wkw9699c#c5-@>F>c_0W1WĎ^z\gMG6on|O"0z >upIRk nNJ8{vv4V MkϿ<+[y:=\d~wU~k *(rտ;%aO7QgaqE c|%:agjҔg ''ZA/Ҙc'j+ҜL惬aY*vkS#+ wG3cC7 mb?~y"p7BK1:RKՒuvç}r%O68co{mKZ}f緘֒U0=qd>O}žO> ¾?n9EC/+̾ilAp>͌= agyR.8Q_{WZzks!raJYȿ(\CO#fΐE>fү lu7_.s]섩E0K([8F!Ɓҝ}⏘o 6"֍5]+|MYW !nzs?uqތ#==wϔM㯋72.wO 6tP:^et.Y6e?;:=Vm>W cd+RMIOv9!WU_Rb-k0Q^e_܌[>X8dXEhc[:+9!vE]3JخBLaCi0.m1tCGwƽ|2}q0m+ZlW,瓛{[=%yUW$c m.| #_+!Ok_ec4~S ks/>f6mx>^^Uﻹlw͌=υԳǗ-rb;t}\3j]#߀^De7MX\ÈfÇJb~\ط+6׍ydF?RڟXP}v'u͌1pǾ+ RN9u"igֆpsbqD|JX#`n2zv>xWÃ].վjb)bvp!gcT|&Τ/I;Prm̏WϪSk _lM WCG YOk9ֽ+jz3ߨZ>Pϥ rߋ؜^=8n/v>gܡ`݈R&IT~XͿat҂p }^7_ ~ _t%٧MuU./K%^Íd l{qVG|N=bfBޣRS}lYlľ}paw&=;+'=a fW쮼?.-?=S˻jI`* v nr;?ߕxj;|9L]CSocðr]uf,ifNwnsrIL?G;K{|CQж-Z7ebfηp@w/j 93u(b~uQCD: 4(Р@ 4(Р@ 4(Р@ S`[ߟ̞Hvzwqpݳvd6n >X z5@|Ͼ5)_ա2;kK>#={?~dwT+L%fFDVg5o}ƜB Oz?=3QۇveRuz(xz[q>3z+reey ^*6]{|f.7pxvqªWqz='R~k:#Rʡ>;t፣uvtl[o>?ecI8ᓓi>SxWskr{^gƖ9iWO-u0$90+!9sNuMmaw'傅 D;ǜyz/ž&RP^ Q=vyg;u/˻ž_0[Niy3%wkIߙY<~WdeJW')\z=zL^#;L ;^ƤbS}(xs*|9H_r)pk/G/|Lc t)xm.o()rg^Q[^/y;#疁]G0~1Wwx<gTx8zĿXO/:W&`}l\79|b;+pwM o,|R'ˌg6l7X8єѸv}Lݎ.ENRs?am!oFcf#µ: y탾t^{N:OMr,K|^Xz Ά-|)6a| mZk,@ Ob;rRN8QSu%J6 }sys ]EwA6k最~sҏ@"7y!~Z{ٺ7':/~`%u9޴*s!5[ v@`M$h43ں0M~E=]zm_۳G]S j˿'\KyVZK2"r;j9KXL{1\</+Kz//^ϔuV9֯rݹ)rC\G.N^lJ{YOw?gwJ}O<9FS~8I4Źnz99`֏~CY>> }wٷlc,ߨ_Z<~K29%2:,_Pv;&sjmlkR9c߰^Ew;$mK\5lcqߖ4D"OƜmyq]ᷯMWEWᾋ *!4P=/A1o3Eolm~_Ijʯ3:5?y }p*(cVP6Hpr`}|Md1Pqkq^8Z|זl?cw+ kj5W;aw_7ۢ}c9.|[FsmIٺq;"kīW &[; _<]&DL g:M3޳ wDpшs1įŅv%O; )+K>NA9ɳSF`\Xh_-\͎;qum[,EV?zsM<:o*`!π}kz眧>cd\6(G`-wf>kg/Fowզ_fӸw类`í>xbI|59`jESulnr3Uw6B3 f1lгXawוb: DwuP_0yt]~zX({ mr 畮nL8A\a7>?6K0q1[^q?L? ż)z>v ~xmR/۵w?n"q'5uWzYOp/L՞6ΌǸq.X`ۥ3:TWiԟ+'Kw= jxy,3C2܆|E_\rpaFȵ\%;ݹrbƶ./6ǐß荙3up<>leA|DqwŦ&߻* gl;3eiEM^ė0&xou7 'AS;30i1;zÞ5.度'~זHBlPvo1W NMFC'^rDM?` SEǼg ?W.-/؏{oWo_Q`KMgJ 9D\}+MJ>YBVY9tʷ7{@cN=i)Xܙ&hGj[~+B.da_tc|beCuc9i>ݼ*vԮgmt+Zrȍ ߛd%1<9yug R\j|f/2WtqRwo$-@O-2NvȏPPhPAhPAhPAhPA݋>Pjˍϲ}{;&gf~.]N*rO'^i7hO=m}?r"_ -՝㖳Ŵe6-C&؅HI={iϰNI^ay畱OwT9> >nugq6b9%޷ypO5s7lGgOñ+~s>=l=6R;^jݛ.ol {}Vȫ? W{Uq./>ndݟ) |Bp/7ڞm?> bK=R% -_RǷ^?0$8wl.="|5ukRJ:]k{XG,ƖV0s|Fv\wr}T_Ԋ3;xVwjCu:Iqc#KP'T=YjgDͦM]YoKCecNT73F?2ߦ~bCBf_lޚ5gL ڃ_D=X*uUņ=(W2y(}ke Go-Z}t_m}Mm}{"/t".Y/vt\m|߱p-zf{a%(z; L{ĢuKv3Cf_S]ǭ%0z{{y ۠|,;YOF#7붾>')9sW6c]q6Of*B{'bTt~̋%_$}+uqQ3wo > pF]`ͦ&=7ŜP?Z_b=g]1i 'Lg[5s֕M{>W -;ƃ҈E){(^pLpE }0'5,ZSpJzLڧ? }n^-^eψpP) ΈTL8G5eu'n7v;\s<}%Ph5~6 p}Ϸw]=}&.IM2nü{lLja]K>0PrV7T̿+ioǓ#{n87ʌ}o1F?n)p,b&Zuxsty:?vE]&=йvREya?'ѹW439q}Ⱥ|F}(szbqēk1 #nA{W3c=kOM=e_Wm!xt=i,:sǢu:Fs{E'|k ]ΊH$axx}@:Ш5sgz>BϾ؟1L_Tʫtم{; דG%[f[Wǡ썠^FoQe}~r#暆R|.W̩"TǗבp/*ɕgx:! zS;9rç\WaԦ{9MWe){m]`BwKڏm ]y1?~E0o=osKkRiz {>W^5a<^ 321|Asc w>x[ }S/uW?4T,wraUqq̝nz||tQk-5d$+yM|r=*6\@.: *w;5 Z9CfDL1Ml3[ƉOuNE]ޚ:c5p¨Y熲otv<swIWϫNud9|K:c\=5>>(~^|80]|_#|˙A+1o:Õi[/dA|_ mM.%g83j2*ђ,{[+JL~77i:Klu%N;@ ߦq?Ʊ8yyw}'LYw<S?u{iͻ雲G!va!p&׵wN1:)kq굧ʽVQ~d`U=+<B0:^p9XdTPo҉yo]iͰ0ek{VEgsZ\#rۯ(QId^!^O_'3 ;q_vgF\rTekSS_˙B׭; YwΖإcގ9܊}s(ÈC9s.;[2f M׎xf\c'ϳ{-:s ''xt͵+ 4O_9=yK!׏.6iΖOÕ b9ܳJ }}|uR\7/,xҠ@ 4(Р@ 4(Р@ 4(Р@ P`SzcϚۅRğ>'\؊L,5rmNfvwyW{ Ib'}PԶsJ֗.x.ݿ/?!я.8|j/sIwt}\_;Im̼ybS<$l+,C{#3ӑ#pif׻tەe_XЧ6ăx |d?@y0rZt)wE//[:7}]w,seMm۴ZY 8%W>as39o.=ߚ/Dj}>:[ v)8he6<$MF<Cmn'b_71}̄=lM,&L ZlCjE_Ϋt~#ů:ȘWY/mKjYK)Q!~}ټҹ  f' [3]ر1̮Cq_9#|޵|'W O5J{$vYMOs>RqOyWu+L۱dpYtZ<7ϖоGN;WߒܽNJđy+j%&9W"}7;)6{ vفvs|gT~GF9>c]o`H8ygz:3Ȭ}RSr6f{Eۯbl jr~ig}cf^sI?率wa ~vB=yŞ qf1}䍮52etNuhftȍa:"{'s"oÜr0mujz+3qKdm[ٮu>qd1e '<>Zjk)ǠWhTLϼz f cϭn}s#*] 6]GR.J?yFrFճO+-gñ=K 3yeݝ=b<6lGf=MM]Wk΅_-ʵ'MѽT<مaQOtWhH#}(kz%ȇhyMgRSO~u0̾o-ג9d^bmcȖ/8!cÞ[w9KP=dݾfFأ8BK"o7u.cL H.:O?mxIF̗^u֊a? \XPNfbN-qDS"ƸuOd\Ĵ[>Yjjk vj2+q?Ҭ| Cg~5kjB']c8%\]E(ğu:[F3:Z?Cx2 w b#݈%=9&$ǜqkdFeڕz{ݯ~ww y:ishs_wk_gqKy۵Ϟe\_8[܋%3fI D[n:>#QCbK8_+zfI!|u߷GKs{>S[y7[|$欛L\i0\\w#U嵭ǞhYe@orǘ>ӝNR4vZb(68XXlSk+Tq\_f9uvt=Զ:K+ߩ>Q>zR'|.[;rgյSWG.üVnݯ-?A*㓴Z.A0=Sv㯃]دP;3NuݷeOi+3mۜ%̦o4jtl3beف#bƕiz -ll l^h$)dJccßt㤦^FbaI]K2<~Swoy>#<͟h9C? o{7#"ޯp<7nl#udrx3ĕK2xvO{ps/ﴎUÏY /{n.q/z-7ޚQެ߲$㰶;xW̏]ҡCn.K oY7rݜZ3k /2rMp >{/e .=U:p~~XW)=wu,k2#LH0N\m~ӯqOWB٣3bU[~Y֞.Ӗvk<~ _bܑqHL8[BĻ\$W#= s6/&w@lH#?)6}yTduf.NGiχxǡZܦW=y$Eo.^҇ﯙwo>[sw`u3I87%W6|[붗SƳhPAhPAhPAhPA]K_[g79Sg1Orə {wFbs߶U9{?;O8v씡V$.2M 3%dž{sǃLJ=zGe*?&|™cWkI]JEܝz~? G#SOjևW:5ńZ~Q\yO3Q33؟`P{w6Q,dF')nȓ k _ȿ|ӈq'l'?g wĭe r6gy6ӁȿzR~*)srq*nߛ׹:[rsIOm ) ǧNvu>[Ǟ>_$ᤍM2RWj?Ħ9>yla%َؔux36M\C'm(ȟCQo> Wp;Ƚ[?8U @o 5R!3m';B<yt=O)-i}mVciKrދRǑ .5u`'G{^S,?z7ꋭԦrIZ-r^L7tDV΄?fꯚ-5lōP~==eo[h6c}ڷ՞0oAߑ8S yiT*߽kQ'}p'}cɫ9<~ҡSuY jXUK'HO [K1ݦ8޾VxO1\|Sgsu:75v}Fw(f~Dze6~^3]n{`+赺ZmxgŦIJOOzq ͜5%#kk^_;vvf~\76ݴu̸gXs|Os$-dFٵ k}nuQ30F1v߶hܱ/NO͝.'Yr pЀ\Y10e>8[3'=Wz\:\3q.Ug} lfl.uT:&}r/P7&̗7 1<2&nw1׺[<~CtӟFpcYs4zҭcߴ'~nWEbvt`]kOYU}g2KaG|BzT+SXXMt?+vԍ\쥶$i5ʟp4a|>`O\RW(\sO7]ddX$ߵ; (y|hc?x^K\b|1x˼yvAYԍz=p赴;Y}4 زmC19˔^nο?O4\ /<@.sڢA6cu]r_#+ a~/:=_o̤4..^b8'-Vȶ2zR2gUyF^QǖPƓW*]m-q熯]'f~](-zO`1=q>ҕm[*p~Yhy>y*E\)ޅhKOjٰT{d&s%lf77E02i"_|nxr(mxqߜQʾ5¦}Ճ2ofݚq Nc F~ nuftQ<в /:'`܅9Hϓfǻ(gCFPM_2pt>edr\?NXAp00oo`WI~$oѺSOĘrp-gƏUçO7*TLu>OM_?-z7AviY|̇9F>kF︧*o>Ό~َK1Lgo;p&Hl{}FS eE='~➈2>$~ m;G=}|g;82al>ē_yL>ǫέON~σgchG7 qĨZ>L!g#:'w2<FCd~s%VeįzЕ>#m/;p9n;3 {tU'^O79_ |Hn#/Edꠎ?R|s8UX[ gd-[]uN,LƃwȫA~ݚ9B‰)qtT~U*DMk旰Vov+4yu wOs]$>nrvHdX9B9aJx2<{S~wG2]c6vwk!pe3s('wq^H}ϖ2MTs se?>[ӀAhPAhPAhPAhPI؞,q%\Ⱦ$"[X=Ѳ`obvFB(h{{|oz|| J99v|[#Ll 7[3w/ >I ޾䈦Q[2|tㆯ;[הۛYk̝`l CwMm L~_}CFfE밍c Yow2q.?@^cC;(;Fy&c}ܦٻn닙zh7>A#l,v5Ee3+MNÅ\xm|Ϯ.`N#Ґ|Γ|6"?txA?:PɎB9 Xn+Y*W|0tmqy} .WlhFΙwX| Tw1y,Z6*6>Nm?w{/줿 ¡j_<Ԯ ^~'RKIVy#~|Ow\íQus({xZ jpf|29c|d94D+9OΌV}k gCܚ?O? fn4J -[/61QX~I\/lNd?ցM/6c3Y<__! \vL_t0.Ǘx &(<)}X - gpflNo6kAUҚyk>eײ6啅;\8=K0߱jbXΒs:tUxE`ȷO4}@<:(|MWRmxyx,n^ l=LCYOþ(~ݓ֬,w˼pZwK|{ywo,EsUNnW퇮 4b"|Gs5+r2ơ!,O[( .Mg[!~; +{xS,@}ZF[u6γ)yך c-<goGaʷ7]136 3cM_Ə7G1 7l']<]~~4As9륿K=;;#תg>~,qCٻ|?Ø3/W|=6ݩXc0@q2|cPc02zp}|XyAn{Vs ,OE :Ϩ;]&}D?:?~xom__Ò;1Zcu  ss*)0&&MD"gnڭ:vNعc%.HM }[xu#WO&7]ﯮqǐϯpF{R?{8̻ؕߑûAВ0xo+z:s۝ßk=V`脊yҖ" :Xb*ص;3*Ε#X QO̵gwl͡=/vp6(Р@ 4(Р@ 4(Р@ 4(Р@ 4(0`7Y)cCٽ̙1ޗ߭kgF#Lo}цZ8.GnȘ`SYaTİ>5slyo?ྲps>n=5W97T;•z ^ 0hgf>5 /ykN-ɥ@ N6~ee-9TvvNᯭS'`L2vk"u쿔S۶4we?<™}F9C3M5LM#%7~Wh<t~Nz/cYTd+c^у]z3P >7lO6Wp$_= ;eu ^x]|Gm2u.暹|y:f+Rx˯فDea)t9ς?o]_lC]QzV[WV{FPv}G ^_HKL%|ܼFϳZZt}#%r\ f6yo ف \bxuUMbEyeճ8\1zN7乶ᠮ uE-U)169IEHHDiҢ\?ZE *- H@Q/ ^o:$I;~'_{s{Z{}Ykm #G~׭㵗Gٴko ܝ<{:^Fpy0sXBþ\++?ђ@O1lK8p7igsz(G1>F{ B۩h`uJdU^mquZK|?VuD觉+#uI#&L;Em4[7-%shY~]WtaQO(u+X;"ȇ%z zu[S|1vnUrv۾慚u>tm()[?6atS)74vzuЊ@@liul~?3i˥_SNKgĆFks~3,sN:KD~7}Y;?>5G^6y.c7m8ޒX_۶q_.c;W\ŞWǢo~)~@KIM 2S g؎$[?z%oe~ɗfLO/lܙ~Z r#{ggBSO 5v?Hyw[ kU!Z'scҔszfUgܵmv^?xB*蘹ee- K{d [P6[Od߽0$o}l"s\I]_j4;inߚ=gںo2Fmc.=uژ;dJ#uE)4r=d[s)WXy#.6^^v8rҽcs-AzOEe_#cz笾DhHoCɵ3U :]p9r.\p9r.\p9=ڴ׻wuJ1$s-}n%ξ)ⷛ/:B2&ќ@#I{ޖ-6׼gM9$C'Fj`gWwgϖZbYůl(_xU;؟;GX-YtS]@{ƺWaq̼4UOޫ|ŒW} S#'ԩ;D^Ɉw"Ϻ\c+a''H>C/8ؠ,7= WHL'BrN`?g{KGyo=h$oܬ[ 0N%\S/~@$s{Ⳡz M;=-t8_BӘ}~K2.Jò>H9Oһ{Obr~' fzs(-@:ڜO#F1FJ,ڦËIso.#%Qe,w֭|v o1q#_A7sUWhcJIdL?XSzIm-ڧh3'w|%r#IimbOyH?"v^)w.J_qJč#'(<J8 ԏ3RCԝd~x;?Xr\8߁QS-{$Zְ虐uQlb ;#δ+O=t12rF< $g\NDHU;DW6MJSG[7tgHU~XG6\q`{fȴaTYGmv3c%x9y#hGFZ(#` xUpZ 7d$rSz6~Rׇ{ q;pPִrJ}VעOt32:Ӈ* #1mvF+Y:uL =rRzFu[9=Jvau ;e_A/hܜ; _n:mEuo]eK] jǼQϿJ?{&(A3t 禗5s/53¹eq+ʃh[Dx_3)4=,n`]moSl1 y~(Ƚ}Tu$F[Jߨbhwz1+Vb|!;MY뱲1qKu$_Qn'u>2དj:9x_>Ƽ M1>;F9>>'[45ς̗yAk1>>Ҟ6v,c\3TsU聁_;>[HK:+M@b7sYK37/s ;pocCE潅S%佰=q|SSc7~Tꂝ~lt52V}17r/ sP'z4j&8_EJ{YrZmto@1kNOrO( 2Ř УXl}#>Y(UR:Rq71K}e(g5~C> { g#yW#| TК)Cl8T%ЏGWM%Mu#gp\GM+rئ%eOӅ&A΢>=xŌ{z\CŅb>'u.ܱ|QG-uGCn+4 bkt]3ߪX'>kgBv7={o:T'M֑[8Oew8\h4Ǽ-һ)CtϺp9r.\p9r.\p9rˁq`t/Whكf%wOq`ߢ_.w6|~tіh0Ɣn(=3|~l_}VѾ= zvSAg#tlb~2~ƻF}M>K;au {`nӗéc{(#O=\WNSGPf>%nB|ȗi*p~G^>WxVES>Z[Wq{JǍi k:N$ ?{ڿW kzhijI?pVU:hT/_+qQf̸y%|R)UjZmbe菱ϑ4.iػJ[>2 yY_*ep1DMGIc>U}# |7K9 rޯ-=r}GY1=z#_#mF?m>ōi˯oo'9n6%c1/tI␣YD>ݻn5/ڮXT}ʱx3?3Y57qV:|?0oY䈱l`bqf[Wq_BӟJ1bLgnCYYqmj>1nȞ|vY{<f´Ŕs.ȽM7G \6m%W0r?uEy#cg[ܘsr-A6cn o9<}ьZ;軴ȡ c?/I1*4B?A7l3?sR+9PH ]?d\@ x(}\Jm3孹T]F̱,3H{׷|x3.X*y4tJΨ=]h޷/m4)m lNR^3 κBѼu[q8.wK3ާe!m_/[X||xp9:QV>v=)#~J옽N.4ϴ7~\t׈sSþ>',}Zbf>灖p )=rP]uOQkʷdcP!ƐEC`ӔǒZѾ|*,AZ; -z6A/9t]j]U,hqAgaqOce/eg<=;s>lX)ĩ#_y΢w0`$y1g4ʭ.Ȝo> -3ϰ̩4kkzzIxd]\d8'{+좶4G8{5XN>:4djV}|}#ԛX^9k󐳌ɘWZyA/9ԠӄFoIFv>"HY祯kJ)dRFJ拽o1 cmYy_!7zGEiwCКKִb R\sPuXŖzԣca5,X6ˢn_%ٞ;e)EVSow$Oy0q }ϭ/M]2/UC !oj~"Ivʑö)|/kgzZCMGXFE6ӇyPڤ:^j]g>?ӳvIGDemmz&}-=s*.t @H1O󱔿f-X0 P'_AZuK, $պG(^ciG&7ve6i9 _xͧsk6Ѷ5RbQS_Ӎc' XfsHCccuϲ#15q|d}f`-z_o;&m}~?EqG؀AԲ } ȍ1O8n&ߌkߜ%}2/C ȁ$U%(vyv|;5|+ 0+Oԫ|'ϯ(oќ떑cº7?ezzx0=y*Ϋ9P>{,ixێޫ8?/lrیuʹθFGYk3Z)ۭmA\8dV֑-p?׍[=|ch-sW61ܖvgʘˀ?K~CuB@zh0DWT>V~)=cύ>>DgW3r1*ݗ>L1HnLBt@Gf[h%S\NIaAo3"til(H<VC/pI#q4akU&H_AϜc&ݍq}~.yhNB?:FMw/ba.7M{Ӕ}d-:,̍˘/56hp8Ovw(=~ϨߦY甗+lC񏟗`Vy7:>*C xܫVvg v`'(:Gݱ>9OO`CثrIM!>*cA9=f#Za(c5Rߧ\d|tu#'ZŖS%?=K}WXlA7tAS{[`2"1w_,Ӭ:G}3sbHb3 P*`JB>Jx>p;6R|3u~Ow;vwg\7ڀ\ѯ{N=$y=<0PPاs %h@ۓ뼸" Dȯmc_nf^Mw(ޞ舌ksgvȯё}~}He)tΡ6Q藖|I&ZbT3Ưs0}d5sXs-1sDŽn7+Լ.y]>:PJA42h/_;·0n-xW56x|>tNTV=˱k]7Z~=xs'J}{~rf18=1ovր#')o{B>tgZů:kU2CKȹ߇xr?~̏^ÆG+c{##8=[CyIO&3p_9[1*"cʼVxQ/h"xm_rjW3<~) \?`^^O;u:{+/ Q쐩2/.Ss,TehWqύrm:H].I͸0u2z>P>(q_u)/^[hnѼxs*<4nmKR% .YuF<|j12vʙezpߵޓC&֙:6XrBxA/Ks?n ̰6GxZZQ^,Jӎí@ZǍӘϣ#;pnCKƑ+IZəǔd%]Vnr,X+7s"?J}4bɲ<y!k^AsXv 20fI JOh_NTm58[_ۅ _Xu*_W~/rpzg~#isKz:1FՏUuvktd7'Q. ,-@,IZJoLz?q9 ͼ& {tGbO5_u?j<>iՏf=u<` Sɼ3#ZrV}wQ{MybΎ#d$7`t ܑ\D!R;rZxKk8kK>a*z{h۔`YaкOszQtWdBSsމgd>?l?f|Ok»BSǷFC XrA)p\j>Scv͉><=?nwG#8Ҿ&hƑ}9w:tM:R 3mέ+-+~|n|3@yX=ѣM-D['z@`qPv :gOz~?u`˵oW MRLVwv`@u*ㆷu26yE\ۥoĪN,)wg9.\p9r.\p9r.\p9rx5LŒpnKŷ9|xͫyecmҿ`:"/r8{5@J33'Mt?ژ{eY`%d~ŜPva͠8;݂Ѹ nrt?@~,VW[ 2GXcO]֜iu>F^bL'Er߽S|myIo^2?uggԷa*z4dKz ?~ѽ7:(m0K yxX|.d#uH{%?"sY^@+ ]ߪ4wq?: qSU? <}[ABx~ Q.1oAr{ИL}=4 *)_KXp?Eq=#chq )p"F Y OlGd`甧$x%wQN-\m-"x}=rSYXMʭNO">N/BYmzu)gES|şY(_">(}"MtP + y^BU7Orn>'F^^ %]>9~v"Gm)+^&\yd:՞K~KjqnEE=5nёkXa[ȎqV[X-_nܻ\ۺ˼7bSp2bMYs:úCJ6AyQm7OC Y߁<8qRhT8=uNt{\ he鵙:6׆'qJkc[)#?(OS9T֢ߥw CEyZ&׹ԫl&}:ʸqPzVoa3;]MsuO}M=⟯8٧:fk-3ylNO{%~=`n+ka@9qWLlPݲniOLE]=щ`CUs9I6 tmyVw0FG8ǟr=$V7&L8$l4<9x$\ǟUl3J{_s7-Ὁs9ixо-׬)y*}p6Le_yu#;?ϳԮwA }tG2ͰoiWC0Yю7!Yk ϴ>:$S(]Y8)_)?jnOQ>r-+fzP!7ٜ)J~,c9Z03Lj?p7=SߎFq$>;~<ሟ9Pyl5x= C/7Դ5JL)֑I45-yJxeMGq̝4mj{ fƑ+xg1:Q~l_E Zw4 MLyShk\KBZ[CVqȡe'P~õO~HZ;k2KXw=u eiu~/䍾>Mlu.4>pٯXZB,t=:ߝrG*te=ք)p+ޟ;xc\TG[[wܯn|#qɌ3B+r)~c|( }gkYbwzΉU[@v}&uup9r.\p9r.\p9r.\9&~rf$ô7ks.=lpSKclpػ$e~Tilu|e#޶_{U;)>1E% ~e3 ^ {+<Shet}-ͅOuel2A\@Cڊ_>pO4aeߺuنvCrLL}G"2.{/,Q?fzZ(|X8PHCͺlwpJ|ѹ/Gהdl:=r]xJ;kXN0 yށ\DwAC?xѣUo,7[͑IvDZ5DG(VAIМ t~iyũ{ʋbo]< cݮAf'cҸ*t~@mi+k>t(.]e{ĞAuV(^_t7j a>ٳVh0*]I ut{|g M)G#DۺFq%d4X}#I1O$ca顖͔; 2E97ڗKY澑g}HezW!ĴXq^+_}&o*mai6_R\s{B):'xYQjsq,Q}b@2}Mp? ݳWX8|_.0k_,@LcGRϣciiBqKeZ~|pv!Ϋ fiI]ŒG(K,^E s/j4!9^[zthz|%\Ƌʋp9'fwgs)F1d>þBtc9L;p1_ڈ+vEl!:N>ǎMJ#Q u%C(>;<>{:_azʺWd7q"o6ʥxLi\Iy96]QN3~뜹0ϔX{ѧ9`vtC>͔GuU<.OK84AVVހwG*f=aڗ)ƣfKneb[t$4,&KynNb6؛1iGqk]8b8:}ڟ94Zo(4q~G;{쿁Rf ͹RZ`5 Ym?J;hq㏔7ͦN눖ףEt,PR9gܙ#bBF]/𪜖Mםqg>1VBal;S_J;hPD%'^i=hLg4:_.Dj0Rт?󸩏J]״d^)0v_#(=xƤjX8D{nMMr3Mm e7?;64׹#-tdrmrs/v1yXi/6֏<+W0c2ŇQ^jcק1-z33i8л_>r8^7B`h?igV3-n|DS#?inʅ0rAy@-Vnisv-dT'v;?`]lcmfVFVW i>[9n|}y|i0Kq '[GGc]WnE7rN1.ܜ~Jj{Fڭ6)|Yg8S(Y^ꢜ#m7ͼ|Y9 j- w%ݜ穴~EkXb{2,1AߥVi\mrbM,|5/lsmƳxI^Wu]_XXUF_}\?xגAp{zJ>574> 2N:|qT~|hSqIZݶٝ1)/9!ӆ+d1ooIqC"op/.ODžU1줪rL]3/w3[4;3tmεFjͣ.I& c-W9gYc.cYoGF~CEߵ؏FjO~.S.Qb]GG~xw_ceܴvi5$p9r.\p9r.\p9r.\p9}s5}~YL oNg؋t dH}tjlV??7 }iJEhnEK'kёSLu'2>U;=]˃g1%I ~E>fQf~Y6R0sR|B;'~NO0o0}ufה=DA^Ǎy,#M}]}9x[K/9o+J:fMQݩ. bb7@_})ߑa>ƦX(^cpp_ YG/g3&Fy/.FcQ&;s[%QKJ5Ot$M5^"odOQ¦Y [E{Rsr{E&K;(L[(*̭`?k9gCB7FrͺpSnCp#6~XFcfsCO(jy7) X)FMZ?Xp?'0sH6&94oN䵁7>/lR)?j\ՔAq`GrvkN?m)WF|FG2Ͼz y{O(VmR-]Xgo$ޖӺVtc hd\QddJdz0ׄoZƱQ_ȴUߢua|X@yoV)rm0nbCEKF__d{տqG=>nXL릵)ިzGG֘_-ZZtҼ'ٓ6f+F`CjsiI ([MRS(uV|^ v\86 olҡgߓQ>D3x'(N\L*g)''ߏgQ;Px b]-d23_jXuDς/0!zsHCUl-jh-0m-klL<ǚcusxƒӺOgЁe^Lk/3x#h~)ߋ/̄;KNeLdu8{wh4緵ܔ/IrkqW`/xӭv[yGrY?=3s@kW43?e_iԝ[ִ:kOk'7dSݲʥ.\p9r.\p9r.\p9r.,P )K746K"N{W=Mޔ)L嵎:zhe~ۄ2ZS<0rvLt?~#A/ӻo|'_ykMFxoDˍWWU93W/r+~\?R&pyDrT?mGc / ߖKq w}Wf`G]C9j*d}۬Q-ϒ/-l5E.sDh[#-_}bjq'1?2g|JHhR8Ǵ߇$Om#t@8i~o݊}E'4mvylUtſ^5iS ɞ{7/ʼn]WY7HC{nn,m-?viB7ke)^Ɂ3 iOoёfk:vo(b/Fk$]qkPQoݿMtkHI鈀?gwI37+nԽ7{~ >\,. z (9?t[du.Ş -ȼb D|}S|Yo2GR8sz 'x%=C|pޫK4`].S Ŋ#9wB =ܰM3oB3ge\Vz.$ҋ]_bzִ2}ytdOΓi/3cs^HURQcW\au$,.ګ)Vގ軔xr9$GP6:1Xb7O۹ Y@y?}=V}t2)&d D>S(O&V۞ z>IcǬ4S>o糠]Ֆ̟cb(ZI$״\gOa>rR$/ NtIYX|1j{ ?0par/XrLa1zJ4ms8{Y+{;XDQlB'(%&c_G9֥"49gd .'E87%l;|1< 6PK9Ԁ?yVBŖ7w#O| OKz_Rz,^Fm=f1],^Z#凸ߝmB> %?7 Z糯pL`^ [LY4q{ dKM`ʹ_nm1+_{3ܨ.M]K{u-9Jg1?(A8sÔ6O~M^ujtĹ;l]0<쑭Sdv[3ڿdΩSg[Z-r.\p9r.\p9r.\p9r._ZO~3v>čdd|^\ޜcےF# lqTjlUVb_6}Q9|z/R`$_$+o}JC5~?ia߱`(6C)$?\ƟZo qI_˾)1CR|~s~8X:+4_U[^h:^##Bn#5?8Pd=%؀Q{3f_{<};QO?Fꊶe[t:6D/J,@vd2{&%\@ۉ6*'4yt#J+JFPP~;^#^jҦGNh0C'%:nާG{CWEad zhy!v8 aU4K冹ƑmpG MXJǪnW::w )Af/!;ȸi{U7W+K,cūrA1ZYP^cGlk;> )ZC!פX}@+R0ё3X:or:,gx MWu~a(ئ5[qWBZ7x71ǰzaE;+cTo&Q->Oȫ|L!xPs"{sB3uĬ˵u]y&r:B{'#7L8X_lI:ϋO{i&"Qhbi2IkN2mlGF]Zܫ7oS{yNi@X^HB,Li ykQuL۫>Llg̣gP~^+SAZa,Hg֢zk 8?a2xADܜOG o9d(y19 Rb5z'җ:#EʛR0m]CY*;bÏ>Oy9n*f7FFu$o ^_FٿpPf;ܠ-LpkkOx?K>![abK>n`mer=J}=,:Xen5L}Ŝ?2QmC-}{>w=\7H>6J mS&$Z9GG>c%0nǗ{(Wo-4b}\-=FXM M1ގ5^;ᱧf-ӑ|_27 Q_w'_pkWs5>ʕ}ήbr ~ٽMQf~2 ńҾ6iO'@>o\A>23aH>nSo.>ޛ8w_)s'~ssCh(r)P}5v0WMߏ~xM1+d4b4n&F ?ɨ1ye\{rXԿߛ͡87/%5s I{LF`90y<^|LEw%U|yF Q\֬c&vlk5! sH 2 z )05fw-ۑDj IMү-ر[q F,|ma5zjyH- NF6ǃNwc﫜et&L,@zV(Yku7{I3pLDqF>zatl$a?FJisHOtocLo]׺kqL}mЦЌ<3™[Gf^r:庥~ X_^G=hh\A?6r8,K|^*&Џ MF}( Ƒm@s<72U`Qr$-āYY6Hz,q ӌr;jgq4OCjo#Q 볃h YyA1tA`aHƃ¨ Eb1R}]vvM|MW+cO:Yeaɳ&goU~8,`!|F8Lk#הC-J@q G3aeoL)?q8ly0dNY#|Om6H:zϗPh |_cuX9c? z/AsTC-gNk mUixtcП>? 6 瀹+GߛC%=A{GӺ!:Q9ʠ0ѽAe#HkFg>a."/<ϱ]x>;HVދt: l0S.T Ufhqt6#6g0X%]=)tN{2)<,1osGƵr}nPuʣ~٨QNhm @˜辒1kl'>?}aN-YVaS/,Ϳ 4Ryr\v[꟫e↗d|b;|h&#IVE?]~O}8L;ur?͙Zʵp*`$w: 顳}ۺdRL-4\C90wGx!sչO0݈uT_u h#s_7DInND{`9=gh=Ӵ6oϙ"7_so#~_ eվK\-[(Yp?8^{KD9YH{U'}bX/tBѦ|߶϶G9'bN%~pi%-C2#@qwmq|Blo$}mNF.0"県lMxob]hG ɛ%1D%}–Ao3C2d$Nm[8_2E{ O&qe} cH@pZ/];5*X)>}#/眎KYb,<h⭌$~*#Apq%71}2j+o^rc_DKUO4qR[y0x)gmr|q b-fXr;<#u֬a <`vXhz=:B1'f+E:*ԭ-Ν#dfdž56sH~Kl<;^BNdشss^<@b,]U;`@s'. `{d*gUƷc9:-Ty<~gx~kafࡘЂq/ 37R^rH Q`q~mX͜î%Mu gc#ňkYȖjÔ'@aN$ Za{su 3XqUԱw0^ΩJ g OXva] SA|C91=]_6yTs6Ք7S&Cw#C9Ϻk977lbQ/mJg47j?Р 9rSe,WRhJ:\9 7Ǎ{R.Tg;s헷ú1 }KL[9=$ұ-y{I,ϥ~yrvs!5jsc͎~GZ kn()SnRGI3]Q>$:*a𝩧A. L4K]reC5$:pd ]I#C+@ a%qۈT {ҝb2w=1{]Iޏ]zUյ>u֙[ޟhG8 Թլ[,[ƳYϿqsg!zpث\X9|I.-)XSOc*7Z xv.n V/>Yd?B՗«}H҈Q6;V9Tq#&?@@@@@@@@@ 8Iu]k6給'w8óUW+>;|:uOtCjOȳ*?:NW~crj7+$nz)M%?Rr`h# ۭ $/E(UvY,݃u~a*XX&=8ӕmn{i ng9楘M5So#m'&9f\O\uMU>_lpt_j{)2O9@׶NuT ݬ󶙯M~'>;W}ձ)Wd/~g?s,}_irpyٜuH?DZ79Y{cWsdki=ʼ}GÌ˩9zqXw[lAmst?٭7vTβ?6%vK}o'SͭN?W9]}LrcoؤusYgn+}~{MzZ)>xqh_,LYC%iۻjy]8o柗$FFSurf^m-ؗy:W'㱋7ki'ퟝ\rckcugZ]1p|ZhR,yo^ki⯹WVI+~Gi \ەPBt;T60XZ~-TըJiկ1$mnvwמPV~r݋ywRƀ_[*@_e)ה^5G$1S{)T T㻮6ONr}^}+9^՘:cR \t%Q!Wx4rUw63e%u*)_s䱪>s̬~\䵂m:2hU09vqLtd唎. jM_,s3j]մ<ɕY-wex߷jܮl֣R٨\xG15_u4Uu/OZ$rCWY'cW[NBm7Y:ެy`>_>Qg*DR]~Za]]̏U[Չ}շ̚ΕוNzFf.DYV-ճ Sڱc}(ۇoq           +.,qx=_D'L@FVc[_EwMW&|QKט):6y6V޷];7`Qr S5ð|LwM8gs׸*T]:`#C+N E??sL2)Url΍weMnr+T)L2ZyY` w\÷=Ы}]<|?|`S+s.wuw̘427I^c.Dw1O 9_~;13xgr'uL~KUӦ&'ڸ;sֿfBjEZTw2C1;%O~!rJef;]{>ߧ^bjtMgt.y6ϐdاwz0*դLrlvӥs[b^~WϯHf><5:Kni |T&cZYr ގnTu6Ow,o*mC췮ups䡞ȵ8Q>Cˇ{1)Ֆң?fsm1-c\Iv RL시:+_g's~G~tV1v$O]m0lSO]wqKnyy7[6h֎wH[ޯ[ j$l䆪-oљ'VU-@|Lc.?~޶$Xsd sǰ}}u;` %̗8S":f;L5һtgPK=NՎg/d}w$[JC&绯c3g3whڗeV磛VkosKZeΟ) KMM_q[Cz :[Kj8l:T1yz͞rz=~%f27ur &1?̹\|ЎnvU[Z տtnhG/#s$>uc{~ڟ?'UK1qީ|]C1{5[*>.}]Ưcm?j?U!!^w~ޯ쵹/sGbuUW7tɋ%nM,D}D u$[2SǾ-oj^nqS:wknG7=\-_?Swnt*mPܵy<^ogo&GvJs? m{K~;޿@XoD@@@@@@@@@@@@O`|rM/+9Z=rJ}%wϝlUY9d>ҹxWw/?fmɥmr19҆BtcsڗkkuF-uКܞܶj]&ͷ0 GvLY?ygsxe8]Bs 3Gkt+shcvK@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@.P"ˤnipype-1.7.0/nipype/testing/data/tpm_02.nii.gz000066400000000000000000004207611413403311400211420ustar00rootroot00000000000000|Snormtpm_02.nii |\UQy5T%@̞ᒂi )^1דQ<4x)iKBBUB ^!7pT0xIQwy^3&I|tk?gg=ϼi H<ӃUW_}mAp 'm[yXַ55zֻN5ܜLCy,h ۋϱ'Ǩ#3]*>{ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 ("c#&oi`gĽeiK mim CE&:jWzÆU {=kw:f2Sg?v*hZ{2u_nY2mbbiK/ݧ\k{X}*\M7޸J$g6fHM6d#kú|_;qu WóM>mz_#_mK DA[:j YuKmoYՐ=.)xs}uҶ cE7\SH찘Gn}_/x3ڰ؇K?vw{o2fۇ\8>S^wъ[csݫp[RBO{%7 1\t`{`k;>>:;Fwa#[/-$|O&Tu%3F{WxiƮfJ/4 ˳¯E8y~p/Lsxv2R0h|so͸4NSdߏxyDaǭ,p|}ڜ l+NA~cG;r|ېRH|.6IuiJHтc3{*K _l:;> M"i{뵶ަ r|1u]>Kͽ/X vtzrv޲d!p[ϓفwD5OģXE6}-汮t<&-23;~# CH)盃bdpetM񙅽gXϐt7/aoͩί?躯|֪ܧ\J[{-F/u&^n?4k+05ڶe9}is|Sk;a~[7޿";pk랊o;\pmT- ٹ}/;'gX^Gz?|M%68Xʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (@gnYX\pmEqFc]ˢGwYR/nN۩~bϧSĊ'a@4wRjxxx?tWp:5U_H}Zvߗ;~b(<˧6[Z|定cٶpkSḃllƋdF]Pr]\c}FTGe09xIw~ Jiu˙-|b5 EI(zyr&ޤrOqfr,3,H `< 񱱎%//Y8[vuQ QS=sM7qVjf}_ۃ9iӿx?PcXnᡵH?Ѿ+_>Slq,kT#q򍀥1PrDgΗZyKVTO{Ha&<%744ciߕkDײ_\JTR=cgHd\eWuaRGO^"ɾ쳹9KKkt$?~9缥Y #xn)vѯLMb3K1sY k)aF0a-W^v>n5~r?znA_v4odeags"yIJ0uxd]x{w27]ظ3d${8jsToki)hgoɱX\yGEј[wdY8._g瘏y5E}z7AM/֏Da[LMFMҰs5Xp44_/rB.(_5WX -k꯶}XƤ.:̂._?8#'4|)-O2v?`L ϶X6;GdO9h>4)ge߆ Q.'+Ve@Pe@Pe@Pe@Pe@Pe@Pe@Pe@Pe@Pe@Pe`O10ֵ$ZdqFQ\L 麅p-s.v瓹VܽUgޓqn$ź,9D8~ ҎCyZ0^ﲲKc/8܍ؼ|W'k*|1=8;xyg<:փOlyƹ,8}>&A23>h)Ww1R;v;/#C#/Kl1} f;W,{]sɽ& EZ oijHڼSmoŷcl;u/1 $ꭹ79]OA3e)ƹ[M?4,mݵS7Z&ZޥeS+}qѮoFUE5<;9Glmu!<׳rИbUϙ&7ڦp=_d+sTrsyo͗ur㟗zn_f|{]ߖeNDgYSŢVGl~G7+Ŵ:}{Md~֍_j;"19|k6Ǣف.,k+3'}/mjp?N}$A5uK,6 gMn|W=ds]? o)"0;L@Ot߉[kvvsխϜ$Y]Tɽ\;}m/9bRe@Pe@Pe@Pe@Pe@Pe@Pe@Pe@Pe@Pe,k/Y|EkG3Me^Y4klF}Gkzs*ґ>.o,ibo~]z|d i!+37%Yߔ2I֭ `eY!'$ʏ5Y<@}Iܓ [mXKU*HérNe[S66mr:"בws=ǧE҉l;nC$ve6獴3_e8D'du<8LnBό/K|aq:Fc/Vi3[4=m~ʾ6⩫ѿCL3?rO3ޚI0G8 V99}M+RΞ}]HXN3 W k?cܵ=*[=-Ϭ+x^_Jr g[^Q3F ϾQ0Ӧ_R֘so-B|_'Fb;s~. /K5|-X/tYfJ|>ہԭ.bۺie9)?Ƴc _~*~.uh뼥Y^?C>ߏ?Enw,R >=n>=_m׃]J+Z4||4^sO17g__\tOɼB/헾7Ɇ$vB?!\xhG\{[D>N$. I#ᱮ2si<r w ;q{B3gUg㧞mq?.a7zNc]&{Lޚq,ts*9 G7|by]wЈQC#&Ozl9wLX0#[--3X<*'ц,}w@vm]C8 O~6j񙼿ƺa6wrXKEc a>Aف)V7$9Eiޙ1ukUԗ[[fXe}̫k;㿲m̬ G/Dm?uvd/˯xֶ۫{I3ێt49c0N5 ;{v;$ (lcmE6sqW|l6[\pmm?0ϢOkl83 ~Zdw!3fWemsK\m̤?]:.{ɡ9nWa{R4t~|m۝q5a7n;s߿{6y'v|?|ﮣy[Xgá[})wqjIVe@Pe@Pe@Pe@Pe@Pe@Pe@Pe@P'j74ߙZ )Z_|bAe=>^&*>@kgvkygZZd;]1۬kEFAx$opV>Q%Kx>|[ֶ5cy=?7fzմq._3q_Ng)T:ҸRlgrhla-a%DKf,q/mV3mloo5UXAV|;9cӾuӏs}L˅`qk.:kbN7+ۢb69"`~wCַgnrH81;spc/F/os/ 6`ϭcrܵ\Kd8p&9e>b]g9AdcksVw[ DƎM;Vυ\fnj3؂/W`.=c4:AOX۽b(۔_J8eK+%΋l]Ep)sXg}nWXN?  yFM)εkOIcze{h=[d>2,[$8Op_/ [{4X]YhHC wDnN%>&;?=~B}z xgcv`y{W3r19]fe갳 8[ְ.q0Njr8\Ͽ_̌`6WwG_ru&EˏpڍQ-/#=C$2^^ N8}vv`Xm^*_XXF@,3~7̟|ؼ+Ó^s ov_sS+Llsǎ ny?W<0s z-oS mM/Lx}xDK&n_OMd+2gL ]gTѷHטc;ٰii4<\ˎQeL>:~CJ;GǺu׾!~;^YK0K Eiۯ 37?RkZwƱ_-}JZoWyO=.Lkʳ}+jANҭ]. aM|U]l3[dE73맰=3śu~u`-muo3;Xuݱ).yh"unkcϳ-gIeψM281dEFgn-7}d`KI[ 'N)WN V-0! K\~9=SS_JˁN<%:GR/_Jn v"'?l1~l8sRr~CW ONS<Ǝp}3/URrtAF?%I]7m\h S R}g '~Ȍ~:ȊfJ0pA=q 7/K&c1{Yo(!t5x;ߧ5o\q" lW1MC,l$| xC[ʱ_'uɳÉ}3u}7ѫصE)}weE]|t4Y~\3msleh䆨O>.Ou>7#o'p1f\_ )~om9~g{ؑj^|7t%s<⫝̸bԨb1 ~m脸/^GqFy־wxȌlwlI ̢SpL8삙#yQe߼slOvV]˝85.>LRށqN$7M{kF } *lmiU)[c5FkR&G@r/q\fjrN?a8rmW{sw!l<`0"&Jؘ}iZ[^khٿUݑ~~.Hr]exn|˽^цɡ$$;;/Iw@?96GlpUǵ62<_<$ŹjLoϙ|i _fhy|{$X&Z=F|y[ޝ!)|VsC!ywdke@Pe@Pe@Pe@Pe@Pe@Pe@Pe@P ǖ>k~n{t}ə헧X ͚S/ln<_rlֺKQVx[3> ڲrױV;o-r1 gp4:UYM]L:8>[2y.-\+6͗mvX9&qfu}t6U / XΥ\앳}XʺGz/#.ՓKd}]Ymoze=tfeud.tMȉXllQ}v] )m QSke>7MLJ'6*k=E# %e[JgTQbȝs^'/+ivMvs; &r1k_чb-ǸmB7u""}[-~w_۾U[!:Dnhc.y]{y_bpW+3f]p̷l,a6ן=*q{ߧ7y[@L$lΌhtSoXe>{ɘ v^;uI\٥{bXۑX9G8 vlۼS8PC#6|/9?'M?Q𲍽RGK lu{6FF4bwl Yſ-9$)g-_WX`;C}so&7݃;~(fn8-K>)w} G oX1Zu޽VIV;n)k5 vt /bH~´\'YMg(],l= v9ƶ},桑h.V=w[=cEl|-?*/dϱwޜLJ4O6dCr6t,z/4gR{к?kuVק1{ʼ;z)m7%.~eY_ah:;2-vlK);1ʫF_3~ydӳl g\sV#8M>5kKsb9V #ر竑[_Z7ʵ=cu\#~0DZک%M>h1CR 啑y g w7s;p_dzrV!^}ѽ|1{pő>:?+C)Ⴙ?Yە9Ƽ{#ӕQ^]]#3Bwワ~ݘpR׆os}ѵ{<._x>;}}7̮4=m؁L»$?'3y|stWyR|1qo9=gi5Ƈ_ :WNg/K|́w^7>~+nm-]zc3]Ub:ߗ;|2r9jBӶe@Pe@Pe@Pe@Pe@Pe@Pe@Pe@P*T})ֶ~ߛb/>׳]W#)]2]*뎄Y-Yƹj swR]nQdlS[_.oc},kdM3X?+y 5F3q e=E֖NXIsιjw8 J'kVPn0'^fιzn}HН(urgU[NЉC>Y9d~ۧEuٕGT[6cl&\ 7^{ @>L㬜;7t"r`F>a:/lMI1"T=a, >5$3]õbюF]4YR%ڲ r_[kAnܘ=*kcۆA23)z>v6h뢵Fs?`5к6}GtJ : kwx ѧd>ylObV.b/S/1 W3㱛 n$;(W`61IG{k/N3m|rKrHl9Bltg&b?arȄC{Gz+: Zb)$Ǻ.L,$~u[ޟi A zpج*yt+lÎ,#Tp#:.99\*·`Z^gqgN \:ϥ?8V*:dC09Ⱦ>0wrdFm 037]O|E3 /z']jm!6Ck;oivA[E*<+"|Љ1&=+ix<ڱ  .3nM]7H}J'].q[7 NxǾ/_o]ܧ}K3c͕1l Ǩ8#cl3oC0JͳC2Ccv|:4+K?@f>S90PJaiί97ɘ.S%v_ygVzo-)ЧRx0 ծ[}6b&y* 1p28s{϶cmp~`>m/5Mܗ^<>uf&[V3sedeYg$y&&&<} -֧";8ѳۿӘ}fk"=U&E9^tY7^幇KOjiE>=߱>DCb\-imy?^?()-z$N7n<'cSG&m-0^|K`/rX0s뇌M[s6۠i-Vȉ,m:;<زf+͑qf qƶ\O}RڼE8gsH#uϔ؋NT+ΕeԤFvJ)X[F򾑼 }9_@UKY F֯)E6g>O8rI}1H_=7B 1_r/Z:]qK|gש_ n^UZ0w)FgɸhޕE=f'{gK[رĘ䔣M|ך3˽|w:3Vca]Ynޯȸ0b0|7^U2F/]_/wyaOcۮ޻sy2r) w?laHR(ʀ2 Ks-%vQe@Pe@Pe@Pe@Pe@X tf:N7{!!gB5ֵ&|듮={<^""X:QV3]߳ͫzjde=uƋ kaX/tzlAXWsk{uyf,m^1~s[9\+-])z|gk:e=6+Yg2lR^갦RۖVR[5w7T̥'k' x5IÚ8p;')+ NDž)72^B:|Ix<ΚwX;i]Z%ِ 9Y/?7%%Jam$Z־߉p||xĎ9V%6.;nM1>\lG Jc|ZwM ԑ2^[jk2{DYNFtJ/x'uҒ4͌gba3ꔖwY+q1D,J_.9&1Ur4pCrM)Ic6y-[ rYC=/pC5:u3R\B~Y }}q[ĖcwrOvwy@q?[e9wG/#1 -_H#x}}?3W`GWUn%E-2K|ݢk`.ߒh6s n(6x*8sʡ,גs<Ƿ &blӆq,ₚIyݓx wBZޟuy^X(ljh$Zf4lj[bkIs}y.8C}4y+cc+ i1}c>u}!9#y>ʧ$-7^cZaKl/|78ĢL3W)N]mZ@ȋeBL);{(oǎv- ΧҾp\}+m ̏=/>RR;k%O۞_|wˍ}ڦ?{s"?+Rl]8`bӔp R{Hg3#)k;)\r !Nny<29j+3-(o:lM,E˾!o+|zo~+2_\2?~Hv ,`#XY<|Ǒ-5Oƽx&.>͙uuni8>}0SϤpmן1'1uSm|TGwgyob|x:<7YN0Y ^t N>GI̛Z{>![ޯpޚϤ7\kBڑ92~oӲ2 GD˵ҏ3Ϙ⟃n22_? w]~,=&3W6<`?}{؜á֤sѩ3rHӡ7gkT;a`?3L寲+up=t='192s}Mc݃{乎@‡ȄF'zO$njb\O~5Z{KگnbKehk;⇱Q11ʻsw-I"d~K&*5{1ڪÏnUdžȷiv %s?euh>gy)ky dP_pTkk;5tM28o .4>ri3>qª| pҳ~}իx*lĹ}s:|վC&OiS&o}:K1~`pNy}s82Gs?Ϟ2NV=nՌ/7>q'19W<{;Q$)>a3LMɾy*<-!WsO#_#D<\ZS'˝[ɳqk?vei]1eU.iIoeջZWvƭnWk=e@Pe` L66f.̶Vpyb.m& M_υrMfs{W6Xtr*ʀ2 (ʀ2 (ʀ2 (ʀ2 (ʀ2 5)X8ѲOLSn}fm[1rkͅXo:Mֿ:gւȭ7w+5g#d%cDf֍?ݺI^QN ^0>%kHY p0k\Jc Z->ٞZ2VuƲ^QRytͺQ};`k kmއx_֮oˀN yeR.:%Wփ YKu:gs/K>鿬Y# uk]Rk"VַKðW_earO69#.חM=83 raH}#vAפo~5o05"H_uL,Vp"/+/`8 p E ks!H;b[~6[juV82)_.q6>>@v?<ѮCb6~gN)z@lnj3xE~ߤ>mvŗvŞ(8Hu0 ߏ[}iflXo9I025ؖZ˵Le}ۢg^̡ܘsH]5o]_Frz4cd%~.`#3 ҟhhصB~1``nI26GD\%z+*+_l XSz8pNY ̹ ̸u/]`_[!Gfww4fᚹr)> f䆣W{}OvXG`A#Pt ~d TM\8(l<xR"MC]'E2g$]UXv51l3fl}4)y6k.HvL/0; ]ӟ[9ΓK=c`n|f:<)٘txV*3D<.'>7IfرEOSaDAQ+O{G/HlؤuZqd2W|B#cs7=;[>u)坈&oZ{kNM}±s.kD7ٜi*6qy|w3HpWs5v|Ґ:O]i}">fpt5XWcpsD˾g9 7AUyO82%c2}3:aw&۶B]b3/kF2&kz{iy|M/\%E1v*i|2`c5!?5p'v}3ȼ^>&F6د 83gQy%">}m|X[0N78mINfsx-;$?flEƋJaU-6ˍr L ;|pOwj6ԡ~϶>i^Ji2ȁ &Q"spJrlsk$a|l7>{>BlmC8Fd1ږDԓ2h]+3$svNG}m_8OF_kwFskj\t2CY;+%|O~ORk:xn~~OnN2+U3frgѼMZSokLTc'Wp 9YDUi=vÀ\?g-;I[Џ2Ys2OM<509<=Qg3f +c)oasPr>q\02瓑̽~|F!F\ԕg[r~3b'[j 6 V6/|`Z`emcO،G}uh!PyWw!/H/6~!{ɣA LG㠟y۝+3A)y0dom~!JZ|y=JOw=#B;vobaBr+uqlBV7X9}4P;6s+گG&_ZoD*j̇r)h̔ '/9T#V`K,r\3_?'E|],}?ų~j&!ӟ2^lK-ÝJTG31&҄ɇ/.ǘşc}O>c%:AG2;lKN>>ˏe( \iKփ$gK2vȽ);$\>:؉e+qCzg;ܳp>2bgly-x^)b.k%)\5#|MO#V`-vM1ڷkc?F}#fl˷A7tmd͂}(ܜgpB̘{OHA~wkpc%l`A#`W}ĕP_,W<$1=糊rp NI]ĶH̎\/y|MSNcX^e?,5Zė2ɇo+5${L;ecQ=})}'qE۲񊏕c3 ^l}&~垦&)Y/;^Bk m h mlC .;? Vu?%Pnᴔo=SOl[LZodH ^!$\"2SبkOKsb^. %|I}?o f_Jl}yCĶ/.MOLC#?p 3Wfxro?]K.O[9= rsQb33%(m'I}^l㣧s뢥uF~~|&ѷϾ~ky9Ɨ F6 vؿ̶I2_9l<Ng0X[k.( j:SOd+Cl9>G'n`1X͜mC)(S qyήF: !I]q"8`2qsN13~B;|Է3}sܟJq?b^݊ߖ~}>(,m+X1$3Ѳ%X^aO˽j8]ᓶC͟fdD7fg [t̼Kpү9 0<;J?v:~}w,&+u/rNghApV՘=7NV^,:{O R:[帱kӴ)oءp1R 6)ߛj̾r voSft+6~-Nl4mg,lJִ&n̐^Sn!m:&[3s?5|,[sG&~(S yWlf 6o\>L>'Uʋj+2V{]duN|_',ĕ8xwrٜ5D8|<'g$p3`tz CgҊioKkfYc/iQO}NMKgcg-i^o mo0sޚIk_sXuv_d^]\=㏙|$-鋌.[qGOPdB-[R`hi Mfx˦zBr:yDk-/R[TUtI.M< Wp {|,8+`uz }1{J3H2jáwi}Ε_8#_)ҎcaރiI4C[{^;uN_§tZxEܧMoeSٿ+FGG[sbc*8񽖓Ԑ13>K&M}#s0Ύό\72F16J];?ƌ%Z Y\{> FpޑS XƔ+3bg>G>ےq ͝ev.t`}G0 |$OwϝY׎Koo!'p}R2>W/i7L?6y'_=Y[,o~{+y/q;;;-a~~k1+'hhϙc1;O7q9Y@kk1K5v/< XYŀgWd4|hwD|4$Fׅ}W\4uLh[ػk/8,ܗ|/rag'R;ϩ&usd!H}~}1cS&_hW>=ˮ\w>E3]E9նOwv.6ݕ62 (ʀ2 (ʀ2 (ʀ2 (ʀ2 (@;9^ۖL\v-u;wYOAlL}L54VYGwGdRk$j&OM羑15n^03k|sס0=c&ĭ5w߷ߦ?Mpwtb#&]od}K?6Dg/~\+kW\uɪSѝO䕲8:d }frw3-k8׀fZY)mGϱxJ~sh8n:Z&m`=бbD6_#g.׎`r:8JnMbl;G`g k鿬[f[ 9&@psƅ_o+Md}3rC` }ԓl!|V7>^ = ƶ[ǜ|~p+W=-gA$xg9_('Z[~e_p_Oo9{fzKu D6)Y+R2ޠ_Y9.%N/-rǑw"̈́c6KrwǑ mGsb~RSm)x~>m٠ٕ[_Ra={?N_f5rN|5-uƖ :ڈyWsϲ}C8VN߬|#%miι7ԍfkeN5wsRyE||,*D. Aԏ?LK{;ݱX'6K]aup!mr7D6 n41L@E&ty/-1s({M!( pr5˱>$ Wrks1y?>$=l!/_~`0?6To۾&z]cb>hVjm0wq&V46n)_b dv:smƮǺ~QGԊr4Ж9E6EEߝ'_K\/rQǽrz. l•h[ڕ璱kà*?pn淼׍&#Ft,ZF g|DfCc>|(lG%Ѵ)c;ps>yRSNSCw]oA5!v>gӽL\C +({_WzVƹ`k>5ȶ# ؙSpvz6J1o>K>ۏ7sPvunon3F/s 8;to+FԓR&}gflY3فA˩ .+?\ g67\G'8%qތ{(z[%>|*~yM;:.iZ^|\\Cv2X7~YV c~G&(^w_pg/Noɩ騯 :_;5:8<,؉qA`r |8]+.7mn2xe;Vo-i;3>u 5UԖ~";v hcS^ r?if߄~IO$GL?썺,vwXޜXrdf誤4puqsߒ>(zzϰ];NnOkuir0F˥ c}5n^"h/^"}A95vgOۛh}XW{Һa10ֵ>vƦɆ$N}U9`;v:?ysS:rBNA8Z7dl9(bb]Iͯ|i]ɉgY\c٤$<= ]$nZtnϵ;]~U!r_FF䫵J1O|I)?&rMhb>09ʖ gaZfc>-K]R鷺Nv`}7[sH*ߑ6yMSy%_6#?)R©ѯiB\Wi{W⏹`heh7 9n+Eک5&EFʽufܫ\/({?R3^3>IL֎o N=\ڶ"ם wn67sP?λ"g}|o;.K`}m䎦sd/- b]^K1 ̮ɰrpg[ |]ky1rL??e>3ʽس`2N;Nln%FD`}I߳W)\d)[sYgJ{Omږ殰JJ{K./s^9VJ^|Ŏ}?"轘{*]ޟsVw3mְ>I /T 1D?";n}ޮ?߻X‡tXGu~|ByYސ\.?FWYgr/ ={Ug^,|(ld㷦 ,ocwQhW{2/Z]wSTaOf x}0"# 5CAڼQ|k$fxMr}6};v/\L|  ϙIf9--P5ە5lW8<HyrU?663Ķ EtdbM uru |]\Gxfc|j&-#OJ]&?W]m4zb]Kl͇)|Wu:}=o =H4vj{n X{5mE/9&>}8AnpE])rgkȔsv}KtL-' m5rKKMȘWz]uiq  |\((6+rS^Q#'^ҿmɾb봸q ϼK[ n_ v_~|R|9>~یUNbwgbmWhSXZ=|%AL nmJ7e^c:2R96;^>_2(GOjcf./rONr_>FsUNt#6-v% \E*2 >áCm~ؐɆ9L0xp28NmqFl 9٧# 8^ȑ 9moҦmb%m̧ES/9'I qApQe.KMbM#1Ë9__x˾)o[c839vy [h4qs m:}<C¹*n$vkɱd k؈/͸zyq v_1'l'ǥᴶJ[ɡO?MJ]J}yvm{] kۓK>ϾGd.(ή?-wγr; .(%-ف1wޚ]Vo#L+wzcLվVfl^xbe5y3i ⺮/>|ǁ6?CrhV(Y]75MߴqOumgdǑRo2~SWw#N{;7kh3S=޺*+s=k堡#Yζ%h˅gM뺪{"Sء[Ȅ$ҹW@A)W;}(r(PRyB20ș( S"3@ C0VN PJ rڴu~g++dϽs{xۉ`{U1>ҶY5 ;+6]mW:|m{ic%D"ː62sIFY[3M>CE 3sw>W_ڿ,\S+;kmڧ >qXMyvDOK9wk[Ԓϒ W=Omwj>ڨ{!* LED+lric0wCƼp6[R޹m#5Rzv&+;ouاƠ;U {ڐSorM}?y' {y6 ޙllMp^[WR'F\$Gj';~64$U;e@y7یjxG|A#/vs\G6c:g~!v .3sj$k[Iv1HuJ"8ƾY?_IGfcx7ʾYu`]cO׎Ϛ&ѷkcCr_r9hSM6hSM6hSM6hSM6`>M3CWt[j}=\ k2h|/"Nh!kx_l4`2oxzV w>@<8]-{b|NwUt s4vLT3hdj^W>ǂwO:NKy.@_}gF]LV_ 9[+ez`oZ3h\^/Ն7/2{yuĆWdgwnGi`_z6w&JL?]x:/Kdew]*=:q͚hsJ{ďs< ODkEѰZAB'2 3\}! E_Uxǚ70kP nbA6<{28sc]itٮk-9z/|}ʒw?;/<NWJQZ,;NuȳHyW ^ s!w͸<,,OMAŐJTvT#Z6nAStu'P;U;xh^R &*ڿ1HnxYI3Wd9ߠ]|MxO2H}\/R]X)սT}XeZJo;xX_ڿ>qj}m' cvȇa'^Q=~)m IΨGO.+Zܞ:?m >$Ym'}G[=~?Y3<|:Y=__^|o/?.N&2>Yq =>{:ʓ '(ή=\Oଶw{*G~RC!b[Q߬9]F"G{N~v~golt`$Wiwgʀ~|MWtQx,ˈ%l>xCѮ(΀|_apG=0&w'_QG P4{|Ooo'6̽SQoL٦g>;%^ ^#~Ћ{M/9SYurߪp'|* x/8gSwliY1%q[G|y:ܫ6qZ}D}/|oak{m^6=~"q>6i{b3jڟnf eV!7pqUz5VozjSy>3}>ֆ=Y4ϊK1ʩ^Cn:h sfsU-hV6̽t`Oϲ7|gЃƳR<|*z]w֗ecO䷹ll326q¬7Z0_y^x_y:iuCռ+\ErTM7@#<;K#KUrIn7vvu;yMpY {ZL巗VR;h{^Ot+w4OܕxhRYV5?I8/QƧQ[zYx|zY /\ת?~>Ҫ#&!" MMchlw.+8Q_Fl>mBϙvZb++ޔ|@u| }X y\/D?%˲Lhڇا_1sSRPض>wPmX_(i|)W"+V4(?N1FC,7p !=FiGl: xV—6>'Gp}M'x#2_LGWdGߢef8̫-?5^'UƁ\6+_68|ixFۀpP2oh/e}em]ݟ ŀݺ#mjЈ|I_Ww` ޘ˲X?;lk2'üU3buIiwGGNu wn eio0OGV|cn'b o$豀i)>ß|:k>yR[}῞aXi}WX9|9 W؍w]Y;[;x/z^uhry>w4Xœyd-tPgk={&{5G;&'_`MM7NF6Gȍ;h%(Z]&/x|ߔGb}*D}|e(`V _ |A38o\Py.O }Gz[,:VyG{ΏtC)S㧭ꚃy_x2}vp<+'}ϵyM2!%V}\/hnh;ԧC"6>_o4Z>h\>"h/~Y\3:'sU)L~F2{u_DVMIҞު,Ē>ufU hsR'|᥶2>8o="^,o'Z.ZCmQ3_¾8Ʈjϻۺw׫v 8/y6KRV1p<WYuVSm|}<ʋTݍDU8(a=F7 zJZM[]"_F좗ڴPx[e?(_w!yd#Bvy40ƣCy\c2~2͖9 j|;7]rs8|kh&Gc.q,qSonW| KX}R^O~]7d!DM=^TؕL{'?yA~ x _ 2t}-#=~gp Xk}o$ksfvl|'yCIys=FP>e:oӦOQlƚ*K&ۡƁU?'xlu=9kNO{#ߺv'g5v97 8hv|y78 yI-P?&՝Fп2gڪ bSTP'4^w8}zexAynۏ'Lv]n>Jޕt{҉Dl- zabzVΧr.JݫSkK$ ^'+zR&hFﲭeσY-=g=c|\~`Z5DnE]IWv~5(*Oߖ8R7آgJC } n*xJ]p})ϷF?}*ʻa nW TclOУm;m?/LѷVۑǦzmu8>p~Eǂ)gc XoxhllWh>NLe1|ʇ*Gj/q7ܱmKJ&_ߵx7Wr۪o%ڢh(Smp V_%Ũ4\R1Yx>_Qjz>;|~ynj4rqozӲSti{S~s@.׀=s]IOQb^+0DdZJL{e55n‡kwYg\/icL V_lk߯j6ٔ͡[]ʲ]}mA([4Is,ϼ#]l.|KUcLLbq<}C&e[~;A/kV\C3thna)`*1y #cdxXЯ]WeܴQic ,?Sr\G[F}ce_1f1cwv~>jʉXwf_N04a>cB;@./իkWq9kP/6#xN|3;i $Q7ƘR8&#[;g:})MJ`.;;/w-iNCW\g"pnNejS+}˕Vt7j/l.%M6>Nsλ φ!\ծ O&>۳aM+ksqYfұmbm.,(Y7Yo#xyёrvUYi/3#|Ken G9s}|5gS6a,+1Hml}0M*[Rw3/cpnqO<e/hy)pGwL~"yCN}+ O5vpl}H'P_?>4@b VbRQ릩?)u{x OLS3#[75rh&zVpjvEΙL>m:m;M~&16KmrkJcYu;o0gwM}f[P~|/#OcP~l辢SS'9Ꮇ:ʎlwX\g5^o)Ц@m )Ц@m )Ц@m )/ h>-?b.逿vk~|v`Cy Z6|>;c핞}H_\6OZ Jc_|*xv6nyEwc pM9~iI{v>U/(Yg#_(Jg_Ol֜Fx"s3G3`28chD>1:5kW.+ m\XGDxk8aMlA ɲ|eL45SڇC#x?[k_d@2 }M}'3=֣7|<LU>\q9?|s5 9YQБykA=)pnű@GI30g5SoŸ^fu ;ά/tuCt 43ȷ6-Xʼ-1&s>k=D:?\=Zc;*wsذh%Lکu |oy8 ,6F_2`GWB \yb8!t :7֟J~3QeI&UQ\u,<6(llz,]`nK'~}fS=ZP~3;3le}uZ{@xT1pq5yU8>՝vMuޟpiN<{T]Fܪ,)VgkߐmUk! iky=f]Ƣd1IǠx`A7]~Mʡ{5|xevlo$o/7yEQvGռ{wֺu:xߞŞ%K?_DJ~t:֤l,I^m {5N3bacq]h)Ɓz9gŕo@1t~g.J5NڋmoX7-;|Mc gb{؝nl ~Iwqz+L>[:CFK}-U5 =}/hvZ{7M}ܲmzXP3ΩMP?Ųlc \g"5*z^ل+D3ſe4⠔>Eoi*=:=R&lU4=E)8?1HOUZO^琕6_LxD=UFY{NR_Ez߰f 3k׎پ{0?~u`3fSg t` QKu} CVr?6;O^{ȋLHHkM|ls y#G<]t% cc׬/|)Z}B*an܇>pze"ƴ)svಲr8xV8֮q+w . >ݧp<ϔ& ꋩ]<氷Cy wkQbEp6z~5o'}b`m,PՄa=BSJanw<,o{i_8/˞j{C?ycgr~t uU+?}q#[/X^ }6-_}$z{/|']t(ߵfqB~n+xBL`QIb䃧}^J'U:⻖ݮ+ -A,Dlov$^+ûm>cܧs-n)gmjN,ڣ8'%[}YɟΙ}o?,Ϟɫ*!|Xx`6EeƵh'2mx/oO/x8Uo6F~>/Kp`[S4Zx՞6hdZ=vQsi.#7]\13_nIlI+^\/37+1ZVj U>tSi7FbX R{Gj{KuzNlTʐ ջoziOA ƽs]}@+JOwdkwfN.}:=bؽ8/Y;V9W%}(~-|8C/?]v)jgLo>36C|8{T"&y5͑fnE;2.mU2ǙsGZj͵/Gê/$:.6=i{-kCOkK-EU[}/Xj9[lj| xD OxrozkiN,s톡f{ЃS:cVqs10?Cٯ s+ett³}0fGm=% l[K>,.܇7eٶt|t5t4Fu 矕= =󫲌xҏ<]9,^9_p>'/;3s?UqZ~G,6Ǡp觅+M~e~QO #\ybͷ~ygGs?`6!:dĦ#|C _׫ON~a+(CVL&0ݩV}ĥ9z9}*C27ϥ*JcsN~SN?+3UKl8[Qo1 {v[$Ƌ[_})x*G}щx׼zX}{nFO;d6> g6hSM6hSM6hSMP@~tZrujc57MuDkZc٣K\]h~quhU|͟k!5&N^M 7͟E|nA<ԍ|Ϟ#Ab1h*48UȊ8w]ky9PG~ǖ3/tC==늹(OYVuQw>ٚ(?D68ɖsO[w4wOZh̃Lkj_m88C^}|3SȲΙ'g9~qeޱh8d5[=#ǂVΆyVuJ8sf=s0YfV OXf;36EVmM;se,4AzIv&řNEj+|[vQkWFDcK1d .6ˊcFD8OFʅ\Q?dUmCȓ̗vu75~/7c+Dy[{$/FY.z#AIʂ[vv.S=O}Se<6⑩-βǞw3!x ࿾vggEGs|koZ?)O*{,'eLO1:BMW>+feǥDx /w ?X}^n ᴱ2?υm WqKxriI,\l%'_Eb}ͽ 9G䁖5x;ZT/@OG !Ey33ފ\KƏ.d!6~UjxO\Y?{qqmK2I[N6QWƟIn ~سNjX7~|BtuV~o~ {n[|ý:̞kzmcY7gDA{K;/7'84M1:#]>_d@u<.Ud6˸;*CwXv7WgQ֯ J0-f1ΎtwZMEle`'mrgy'ZT@6lgkx,sNr;&pU;ݪY))Q~o>ltX{J{O?~uwd;8:ulz7nCvsM˿f/Hh_?wg>`f&˥},ptpYԹQO-)gf}S.<j{hugOK(d6ΕohRZpf__-79I@2toR]o=Ӑ)tHڟc:B?g?u3%#GR"m:J\`,NQ6nfxF;i]{t&-/xp˿'}\q})ޝ0^ok?]qm70 b\ /}9dܭDi':WtTq|@JpF9ϡ|t%F9QzS:=6$9/7*7g.4bpo@ޡɕDwU]'[ X43^\ ث:=xNy&[[oX`:w#QF;̫crbէΝi`>d}=` }vYf{\sSC{çQȓڲ==u86N~X Hy6-C.Xc%fpT J>pCnMls { W%߼'hU;GP]qwKzt?9/( -ꎽRd>!⧞;]t :??h`˔O>rLl2:|ES7"|]c2**3O0:w>E[/`IF3'oю$~Wټg8.?0yA;9XM2PDۗe\}Wǒkzmq 5v-:}|_Csj&L3t‡ctQۊB6{۽ 9 :Yƙ'v'80lM 0aӸ89|xA0&a"6=zVm^{ c,KyQsēW=Ǟv(+=3yYx 4' }ѧ Vc|-y&2f0_byɂOb eE9tp#/1=ʜ?gg25v/}7S ]>ЮEagТM!+1i%c sP;ҫkV?zni?wIC^elzcE_R`Ps o]Ί 9|Zndxg}-7ԯf c -_ʞ6j?/ ٦mjTy#ݬگZ_&ls}῾}А\s]{}u{5|9}}/xk!Cyu0#ΟY X}懁 wJ^66'Fl19ۀ5o K^&;/KF+U:Mߵ\c{9x h Yz1ުG?/^gMvڀXxR t'O9^mް__vzl܂'3jno4=ӓNn;ʉ<ѹK0ؓ ;KDw/m/ƺ"(/{Jz|=Y8ubDŽ~,ҶSm )Ц@m )Ц@m lP_q}~gg}gIXϗsygPc>$+Y}+$V~bq从aOPqܪo$| S>oG?P?t?qkwo=uȃiJ3fNͯgPio+VtwaTlw"8I?aDB> kؖEG{ "4?K}go| JmZ)ratuT(dS6/J5WmMp\\%U:,EpH5mFvdַ:s y .bM)#7|+_HkTT/WgdRq?+m}E!-ckh zHzulG:}x41W6~F=e2yVjUR:k(+6_nQ~mŗ\'TIo Px{#a45v>c[HJ]&{]=*4s)Z_ b?Ԉﳓӻb|sslM{JkyEˮ#Ɇq5>pL)|5BGAYXXw“5:|:/]:경ԹpX܌`rMq1fi OQ_Szŝ._"`k4K{z}eYc"񒺔y ۿFCc bq*F+q?ҺԽd|\EFV tCڗ}(a2"mn*)}$=4* ߀Mwagžs`_L_V*b2%G2E\ h*ڨSÿLCg>4x{V= ]GCFΩxL3rY*Ȇy}߲2 9c{?3E[ʴN{ FpM&-[> -i8uVZG ҮXR,,(XOc'|~|;:"}8綹.w}WmBar|bˬl(Ct/fV0~$HViܕ_ouĺ{J꜉}}Ex726fk7ZlΙ-VkjrM|~ sYοyܳ#4t|pאIj\:g&\l/`z5vu{@`xo5_D?:|= soNwδȎqRp 6p8vvڶS83C'.CsˏSX1d[c=,qVmler2p!{׽n-{*g9i.ge:9;,f A'1Xoo=RwÃ$e!SG\lS-c,˺qu -0%/נ0y/#\褭,2k){bkㅮ+zE\uWsا+3>ypƶ</x;+qJǗ}tVls}S! ,v@_3>֏m%c^KkkcSwMneՀ\e<p'הlD3&'~3p]o ,F:GA>9q{i[h/Q_bq^;^?Ol>ƕu||,'o1a.om~\yBImx/xG?bWlw\wр>yR>ϑuA^ʥ s{LcP<Įyd$Y̫M]g# B&pI^t@6^W j>J#sih_?6'ޓ-1+׊r~|UmF|h}2d㞱Fc8\cPh@j,ƶwDǦy$ۡZؠ /(bkMwPz k^<ΐ.bO5LOslOUj\ CW8?:=);gq؛k95 \5^ z=Lu_ye ZI?5[/pƚ,4J<_is:)}Ƨ%>X2'loō27=4ζ7:XGpiIhss|T ߈7䰞hsK緾|f~ _&28u|~y¯,iîY}[ icwv,|X:Ľdq ^́ԹmWvxm$QIjU7Q[gF@lg/ݲ26w|gSt{?\.3Hipں-^#6RuAsfsadgS>?r=~)yX(q->!olv| 3?,dӥ%H|rL[\;mxU6|ꓩ=6ģBWJ_;h Rmpr4}ϩsiOigkm ʹW_Nk9/ABv p˲);gM7gŸ5F9Ӄ]Ϭ:㨉14Z lH]/(T7͝۹.S@61yuGK7yl"ޅ_תf3ʵܪfg[|\lWM?t1&Bf\_4e5ʃa;&> ox-?9 d[/s>o_^ 3QߡʷتW؛xg1h|99jw83Tg^Rs̿ksrط~>]8|lYm չ*=dܶ,cmpc} #27ɗZwsQh\x^ ^%UBs7Ya۰,]Ϝ'h}Xc{-L\k|b|ndkZlUZLk ڗm_jnXSOr\3|ob}.[v! 7϶.kiݘӼ&Ep5W[?t[1}d@"ou4%_A'`!m`3S6]Y5r#|5.9R%=ABNYf1Ϸ} Y??GFT_8vsy탹V}W9gjURky {Gou8]exSWvfXW/2Me+cAWl{,nY{]o.2EǭƩMc^.=P7j ?l#T yJsl{Qnlq;NVLy y8cKYAigFZlwj1-\CJ;ss#kS:'x`q{G%?K$wVsWɹ~`JOk#fYmAw9 Qg)_.cw+/)%v"Q-ڐx^- P+6lKlE踝{\\,}8ݜ \S;;z&#͡}F=&ە s1%{n[:L|'q4jl-> n11ca׭Nr'Mg]^OL<+P8󡂿Q]%qv1QU O^7NSlՅ Ǭ#3blB/Qߡ>>S\*Ϟɇ-pn̻a*94ڶj~Ltv9v/b`>kZ,˕<Ƣ_hwn: Fėt$g t+_9bWʵ`qhc󯳽Sk`9?]} #:j:O;Wʝ[2v`X)US8=;t1+a"nŸ?r^[]=y,LO NL.(N/\ y10lGM>VI8kٌ3kO?WRXʚ!s*;~h0یg_3W!6_ſtO9ޔO{(/^Fr~={wUqN}qrcv%}ޏwqpkcCGFyeAlΣf{&G_ ݝ>&9H5MEE{_#|('b<7oWl˒kՉ^xx>&J?ACR)jz ~_sK4x_"}>EƁGW ˠ 8W/4{z|3LxpπN5:E7;t3S ;ѵBH{kG ω{0ٔ R@>M||y97 ]6; 6|anտ7ko#h 1i$<fhB|G>F&zYI_G"; Nv?ors'Q߂!8)be9zI #U{>5~^ΜxJN?8i -h6ży:}V杄י!rCot}C㊳mHibW{'[ϑ̘|·S|/O32,~E-|TFg83C|}Ϧ ]+WY R[S">m>SC~FcQ|Zߚ8ʟ%/[fJ[|Mޅ,ErjU9:X˂y[ke_0©3J?ُs., ljr۞eayߒ1m~*g?hҽw4ѠBW?+sp9[*/ {I>NN4co^6*H}BG*a~κŵ~: WtZR-z?/lAs؇j㟨ornn[GǧO8+=wZWjoЏeelm'\Om4n<{bͷ|}o:= m|IB΁>Q;ccީ<2[|V~*8|>{e`g1k ,oQ_q`=)ѯy鵕O|MM΁:]ܫu'Q9A;wMF3]C'9<+Kq" c<[xJn>Xe']n>%rm}u/|!x&Y:X߰ڸ.-_¦v{GX?rz /h:8|gi|bkVǗKaY{&3>9*,gӺYTKķPSm },iw-~nm(cw, yAc%cW[ڗ6hb|uB[+@ O>fT{M{{I*sK)lt8vvh{%KLT$f?[ >6;p^slƏT?{lg6Fm )Ц@m )Ц@m )p$Kcdbxfh>ý}G6x$Y׸qqS9 ȏrXz@{NbWo֦!N W\7}i$Zm^Vumb>Jk~J\̓ |X' ='j\{ yrXrM:BjΚ&ኼgro)żP{C/¯1[k8:-ήj, 몐 .sc F߁minѻغ l?]39ٽ>_=~֟Gu3ʵFc]!hƚSS[;?zVH΁K|촱7ޠ{|7r*vӀ>9lZ<}c,nW'&l}}. |EMO¼6s;曟o>Ye'>YCgN1shsԸϻk^Wk[>: ܣ㽳Ort${ĹCҞQg~d ;7UcH+f~?x{ؾZN?<>kkt_k%_!] RDw:6>R-H~k׽wi4yP}4ag~<;'ξ:iT=QȱpgS_gtI3|Gd}>G#xC>LxSF^JqCeHR;jM_drsxJ0`9U];~t;År.>Pg;=Zsy」o+¢9}}w쓈>?I&B}[O:oIyˍ38?'Iֶ>C9^ڹe?ۅVm6όcñU7ښMS3sCn]*o^^rb{PjtЃu]PKjeewb{d mw#fzCm.szӋ?9k&cԋY uum7xY|깃Oٻ>UC9bo=rA5Z|0sۭ[˺Xٓb`-d/(H>cѕ~7WGQe%?Է{ܰ"a/5o]FԠ9K cD𵢽glvhL4{2ۙxv e N2w^F~&Te}g_Rgݳ;Y3˳'+1"_ˏ2=?Osf{ qxa;.vv_)8X=fhny9NKp`W96Na}BnǷ g=_]3t/ʓ͕͢L(,6X}xv] Sg\Y N )pU5ڤC5<Xuׯp|Ao3iT N>3mV>qS;n֍u:9eamfv ?wl-փjv\%IPv٫_!q{NݬJ% &Gohb|X*MREì2H{aӞ Hy.gYJ[voϫr>|ATߺ7:OcᾶO6䪰qEW\2g++|=8BϪeP);mx]^fY1lmU3=v{K{a![q`{ۚB K3|(|8w&kv&(^V 4汽tX;cKygW 7b$}/eLL I Gۿ4~igs OxGSjU֝_c-{Bc6mQQ|!/o^Wau 7ASa9QWh`Vs[VkW9õS|Us0XZ)ˊ53鐏3 WmE8?Y| .}jԭ3RQ.|W8s{ AXl̃} K|BǍG9ʋ~ʎl_מ8W;/UỸ>ғ۹6x^ϱ=l_]+aYFنh E6LwK5z רђ 15d4^Dxr9m]Ȓ:hm)zGu_gF{w7υ2mbh!p4 ?:uRxb#I7M=n{V SK}! 0 v~|cALpv:򞴡7/5bKkcgx{LN\cuk_x'㌏xnqC'o{yK,YS#ס?ʆW=/) h,t~'p-pCLw$8gZYIOz1019/ke^)z\vQzV ?'d^pfC?Zɳ=*?_SYٺn+'d<%7N w߽b5ӕH>~q6%E`%{zG*?rr<.5o}/zCzB7.Ct,Ũle ,>kfn:|2I/ߴTgw~czH#\,ƁOUY 5_O9|X/:uTkhm}#Up}]eOsWM֐*Tx Ԗ~yH`'K|R\_t՚'#fy$Wsx~GoF{m?|bHؓk|.ZK2 +6GM?%}bTNTN ~uz톡7sݻ=xIc3Z-*l ,-1]N-tXhT<3S8\odC>1'b؜< ]8yا=)\KV2NVBni;3>15>a%U<X ~Vc|춳?zcmD͡d{knTy%>.nΛ}_oh4N={UΤ&03 =A|kM϶8ݳrV pǛGʍqpy'6i܅:f~C?c #k9lI=kSb|^*}(wlco|-;%MZC'e A33\?!j<&݌sGCc Kz(/xc)m;wf7res+|b(j,+IGcc*W~𻨤D V@7u?55:dg:bP0NiPS ^ڝ:}n9xlr̼eMu3svUɶ'8#چ71 _]dkS[:&_8RV[1&ްa*|pk%?WQcKnhG~L{=sv~ |"|DMe~&}yi(%-4lM~"]3_ϭ7' ~[lt"JeuM<qۦmyTۣbqOw/ny6d^n{kf5֯9:z|eYؤXω $?@v˜!P߄ULL5;+]tkwz v-~j~Fݔ_z汿w_ſE`K@o>j{ l~ 9/Y[_;~1'}%S [_ƤXg 2k˹tXo<֜aK{CQX?wEUWK&xlY x#p>2#z6 灭'qgI~4SzTfyJ˔\#>"*]YA(s)="WJwnZ\ox7:\p6zKj['r@W1>3#x>;_i~˙|rFZyK ,17BoDl|?{\?;yAd3 B{~#sDl2讫-;)3}Op1ܰlك{J4abuPtYȵ28%mc6f}\c`l8b7GeHI+5Z1Ktę]]syv%]> %exAVϼV Ε#w@; >.ϽKߓP*߻>}s ql' F􏍹~m?HN #rreyO9xm3-]W]i4f^}CqLYH>x 'D?}"7{51ݎ5?WZ2X7$6Iݽ -ߴ,me`X~#S[G9ar[~%8gjPc! mo3usV}>, 8; tC/5yзW){q>.i|opBBlq٬|kY6^m*,M``1=3szdx8U֟5^@媵z>2prJ ߏea۱ iv$Z峜>s#E>.29k3KriEC/aJӣە^g{gC%L4&zpV"z:m n}x⽭{c p8Nd~tsh>NK|ַ_wm[S1foRY=Gˎ~W/9AJ*֓c-?7 lWǍ\ɲ>5q_Zg/x.ۉ=86G8Nor|6_v_e9 7}%|#8bǗ\Ǟz^tƘ>R2.n3TW730G,571ЙMoAʨ݌u֦/WM p7+fbW;[%({b4ǿ'Nȸ{$x7rWsh:?m!^{L9dd0݀ig'P3xޝ-&^eپ+[A<3,(ǟ˶141B{}܇Fc{ < n7҆1!מeZﻝ=p"'p 9~=kh^Q)pOn}s_Ck`c{F~f/prO{أ-VŽֹ?`zM s!+ |3Ǜܓ_̣rB;DLW14təMnb!29)ޮ3/к9-W^p %SqdTA^~`?9C{ˀ3\3r0G,mvݳ/jm?89|. , Ӻ;>9{x廚wCG} ȟcgmO X10Y(GrBD[Qh ߽PkY֮ߚ_%~O|\r[|}w ~9d?JmR87?0nSbb*Dx qxm rv{'-Wiok1_bl,oeҶ3q죤l:F'IQO- yh+/~oc`3'caN ~ 2|Z?=;7֞KLį:Ԑ|o~Rc3q>YNK$wnߜ3;%6V,w#'Gu\'FgJ(ORw>/]z~~>-y*e s_XVԇU729ߤ3 ϴJnf>ReWEy{nh0K5Tgh^ut wTf{S<<\X#BrsN[8ֲLׯΖ?$9E֤-:#ϕ5@|:q:mnk[[K5:uV.ODxW<6+%lem{ݚB;]W. g`~=oKqjzޅ!gZ.lڰ7jPaW$}zP\䶵鹄|;[+ޛR3NߥH֙}_|Ctu gC9rjNE+,[x[x*i?ӟ>n'nй5;ؠ5qp<:ohex^xNqi3٥;pYܓSq974v_G:+#Ć <'54D;gSJ0k|;cy/e)D1f*gƸ[,$>ɡ}ᾭ (My1~2{^A|FUF =.\YeݗD,T|ӲoyY85ec$u"Iw?g5 ~ߘ >1|I.'`~5ZJIRS@ gc3TWg,ύI߈Sdiwڛ~rQ,D1ļgOhwòŷd_ѷ &eĝ#<UE.K3fzWsw$GƜ_ k)-dxf|^Z.r+cBvszyky\u&̎rQ$ybsO .{-ށ|-d̂F@A0:.[|!uF3LC3-SUrJ=.cʽyxF{ia͹.'J!eV`6Y{>8dO9'(U\lXa,9еH[Kkz d>^oƃ SDŽn"GpWFߧv\Q;qo]);]==:}18Ԥ>}TF7vn1ZQcن[tBM8t2x#p$㊰P8eXi:5Szm}_\/cZp8>e;"=T_r-GT~3UWDᵜO yک󜖌7;m-6ij|/vh޾_7eF΁|x{IwBm,'+3+Vۼyi@=e`$m?.P>6>#Za&-%X~ΝL73M](pm86֍Ώ@kQ\>o7$UҧS:,H&v{Mד4G75V}^jڼڱW e&sՕ5"GdC>-Ȇ{^Z+ivm')މI ?/i|v|D'$; Eb[HH[ #c:>K|d{>}ؘGUf2Or#ftB85l:]aJVi7Y6l ½ 54,G~5+z]\wP'jcƲL\-x-k{+,tu Ș )k!:6qq~da6@Qoc[0BGj_^J,KrnV +耾F+kt[O6C͒,6cmϼGr0]){e,q6K0f%]{Ydl \`XlB[7?|p n#z9?uC91~6W_~H 6kz-}_|;77˼u}F9>m]ѓ/Rri|@s:4Khd=xNL[#SJHOz_/]'r3/s~nޥ]q po92ϥG䍺^*}mc̓M6y8J2In-!P]lpF6tiJ}8ڙG/o+C^ yFf!Onp.+qɳ/k.hEy׮ U=C?t/;}wMdRZ#yG:QMx K.o]ۢm8| @a5y|NvdJǰV? eq;:߁C Z(ܩd?Wq3Y=٫ucߘ%|w IBMI?{pu}=8oxE1^Ow6~Jm ylSB ~;x|EΑs]K^(9[Su|Nv﬷m3`kFJ~Y yY{)ҶleL/w}-{ G:rX3kfoU,7jNk%_E<"sdXNZW)lzz ~#4B(yX<-?=c!_RC~)=>Vӹ𖾯M<ӂ>kěcNVQcc")՜;J7:CϹ+c?Snok]}aLdǽ-]?>gNrW>}>#IǕTa5#"&v8a8ﭒv߼W+Wʸuh^(וscxe2%e." 滜ظnU9bl i^QZ9sN]g~y/U}3{;7~$W>m=<9tP2{SoI{!Ip|왢2E|Ps>pHDa.2g5:k':֖ώ.1_r)Ή>i&g)˜#ݿS_SIא_׌5؟Շb/g9fCqm-p6?6ayS%I9?[[7镶VЇzyO/uK$GSV>e͆<]s#QiޖrR~C/{knUbo](wώM G<0pbӰ>g. nC p۹0^yJ2R񜳳D,Zmk 5Q./~ ]~Ɵ ~C/3RBӂ%dt>}Z}zw𮬜K.X%Z-N>z8J;oFW'|z6Ij3ĴH<*ə;Mi4Uu8~j1>5_^6 7 M<8-z3~~=StS:\TևwZတs;|loO|[m5r/k 쑸\FOqdxf3yq̋2d:B^'ps\#-ﻭ=Oˢgߗ\aygm}i y|06-PV%Mf9?ٮh /xb.l"_݆6M>Mmq%+kߺb) O\?:"$8-ˋig'WۋǚSG|ޙ.2!v: .S`X2)Le ,S`rj $߆y]!_1q;gA6S具+@#+_;˙A8nj\>Z4?lBmNP<.}೥߲Bq qw? INE!FfSGLr#%`2)_H˙g0g~2+7]Z'w)~9k[}\= p|lE > Ρ1Lw9aϙ?[ÙSv,k5 (_Ze!w_J߆n+k_QAx{=n_tbq')%5Tv=Mv_[VtǓ$λ:mq=W)`ۓ[wNr0 [`mw*,sd~kӯGpDtkڱ /H\\q[G>u|<>o^\;flqy0p%MbN٧L?*xf$xuQg[7lgݸXWŒa!?+fc0;}M kY;x# F':w}g0b_z<1[z/i9am>e[Y?S[tzı/)ϚmGV+&>4g0ɫe9Y )Kt=wlNc֕4, 3:s{QǘQ9Gs!}w]z6W{ryAR"&'1aw.; fZ顸/+Ne1߫׍5 E%FsFn_ ߀ptuZÒ{r@Y3^\ WrfS:U\m?#>!\̾-j0=d(i^'{d߾q"~lH~9muS\K%q#1o0F{~SnES$‡wL>\nS/g 6#v+_[)kC)1[|ZBỏkoۤ c *ǟ=|TO~:d,M2DN޽8Kq}mIhnCrmq3sUpNeږۦaۑ3\38{oyOxOCM2uo1p&ϋѥCt˓?% wLG~/ b2 K?'o}vw}X<>jvɻe 8((WZO֓#kȆHg̘ &;duvy2^z-|1ȷBG+/H m:x}Dz? {4VaJ?]!'|溌d;`pU5+=7M"#3|vNrun9&DOz{:F~'0tv z9oǁk'.>PJe~伢 eorAeiwBo8 HδBn]ֽ-| g,j\ ^*9"}q>^0h@>7,dM0Th|-Ǒ+{g25eF~_P,udWb3?-7&_ g?g}/21yN$߈wٵ}C',6u݇4[rffJCѯ? ikxq\K<Ի{;i| }yʇmweec9aW7kr' nS.96;i͕]SmkYcvBl fx|3ιd7`g+.*W ~C!2>Ȼ /,  g9tQ#c:s=,k+] N'9@*y9?.8 Ju߷]sh(<[]*OTrOUۖ:+P{m/H=;:K%Q]Z"#q?hk~߄~BۼC[{bϬWVdSsx +u`sm>q+|H*~g>yc<{O+:pFUks2ZʖK[ͤ+lm<`G3h.nDXW~zN_~w|f?7ZiE,ύ. =6虭GDz؈`gL4z5c֜&{up_y^0R}mדOfx͹~h ч]"j?0Nōs%et:X3rng+o9z-g&pJl^}F]//?8qY琹<2*Qο8>k^Tߘ3N 8K{"?\?^"Gr+WZ\zZPfH37^\m l߭mJLW[ͺm\U~RyjiؾńV)sósLZ:풧bKdYy{GsU|Fs}AȘ>8^ǕOcɺ=k&W&ϴgXVZDW dx.zr4>Y}}P$Gg6)>Cb%Ƿտ;:٬.QفoYl6gp=:kObNk7W Hq@[0 r3N_s3nEw_&vdG1|~IHޓ|zedmBr"2dm[-y+dt~$I/&O{%'M:9f_>'u +&^rO~ .Y?{Vos7F۠r D>Nd;G;de6Ɨ.?7LKpNz[t)^o^,'gdl7?KaUSb9&3{Ա5&QD^ 7ia.^(!ұ`[KM7l_3m!w_.VVؙw%է'pre< zmR9^o0 ]S694>b 9sV22vK>̍cK Wèxu 8pw?2*~pg,,v9]Zس][p_|wX}/Oo βL>_4W?.CJ~N-ιoM#}[~Z:1YC r.G!#ǰ|[#{ȷnlq?^ز'ѽ+g^1*^ |0N_owr*5ԋzp=wJzm->}~uD'|f =x`;>뉩u:0M69>ȦWŬGYދ(M( oeb]9iR^(wGu:xm-mAVXhU%[5q`794:/!J4N| bϬПGfA]s?Y +#'Hmux>kn|w?c{%,{qr>Cp8q1\Z9?8q癯e}le\F%V'1+ȄM-}l-ӱe\fLq3`X|;=Rε~Lѩ>􄞡kv Y9c)> C+-w+Q{2sM{ksNG}Su,I=FE~Ban⡡S>WFR( {I݋ǔ|OtϾɡg[@Bʜ拏:$}wWn%gܘnYv3(Dn`S͂x ho4,惜zvڬU^A-s̸.cLy@WU'yXB0'v~89+ZFxxFւd5({CWuN- We~ʹf+ȉ`s2;K$w>>jsqwWp4q>[$BU98)kbEUUpCb"3/;'9F̶ek7d\8\Q󋸘,nUg {nDKTXݮ@}^hdgE9I[v09hrORHiO3{ibBEwfZMRNbʅU3l79;09eI]f$0`Ea:Dl쯟=&'13yp.Ʌ,z%,a% 1W7Q{ثav{n+h9L3e_%ﹾcw1=/u޸q<[ b㿹lɠmwniގrv尺B=7_d^8z^̼f+rfCR͹*ߓci ف^3Qne3FrȈgwG|use|py`E>Fw.K`ǥ2A}K;-FФOmF/R?0M޶wWvu e vbs'Nl'G$Yw7=z]fcoo >;~ܰ_d:OWޠ x]3<'{ƶW B)1&> PU1тq=W OSxL3QO;9_iNWLכsʻ=V}WV{{h3-ۿ~4ُ=;.iםzQb>׋܆c(t^+t}t://2͠Jt,uP-鼓]gZޙ\2_<2ncb0Svh`2x87s!WY{d>>c=5yA^Wi9y_#VYs?ϼ8 }f{́q_gS2gBޜ6jl w_oCIr^:S;d5,j]$ϐATFq6aI2»o"Oϫ#9aF_4ֿoR: C+7 Qlp~۴9?Mf]3氰eZ6ؓg{H.0,Ue⺉:M.ȍȿ.ocmuG`2$>l{c5vܐKΓmqL!yκ])5WzXuٌ*=ֹ+ow\Z0zP֞?=eϋwݖ'?Gə&pbgK/iaRS?.+6>ORZSs7_-}wd8B<J'ʞh[)^O$͡LuZ=-`P m\ܾn~ԺWVi/߱iѓCM pɕ Zg koOj]=͟h6Gܿw:*kےq$ηA D482%(ıFV6,&l%ϼ hT6{ovf|7|>as qZd |mJ^Kƶc7{RKJ߹o, e]OIʤ` _`G{eDPwӶِʺUׅgwT6Izs\.M-l߉oj [!k3oևF>7ɉڑ22!sָ? '9͕#O+tv|cC9?oh_v)8 ̇e@:>孷E[gsv}CDMtQ%8[FsXrS6-\3ڭ=.ON3!$d_>(#-kTf÷->w3ٶ[`2G;&ct cys <~`-:nWȭC{ye%yzX*I7pCɉM4'+d.>*]R`e ,S`X2)Le ,S`l;[|e:gȰcn馦?vK8XxGğ1 \)wOOWۻEշ8şvXtrg,x[b%.Ļo5'|)kU"$glJrIc y\>@  =(I(v l9korX.xGe-_5=u#xP_%O'w9C}zЂu|rxzЀ5JNLcx+|RNW\IA癃?u}{_*>r~bʴV{sڔC`خ.65o|Opc:ou3{1<[L9:]AgGXYEfY7i#_W=Ik,6F[ֹvymD۬9]dGQoGeɡb[kcsrV|1PyaԟB?g|lӞ6GnZQ'Ǜ_z'mYlt-/{5J4+0H4kiX͎]P)1;e]mC3'Eۉ8Ʊ\bڑqm^]om6e(39?!gE LMv )l|f ys .rZl%ڱ1򟝣S#g~x6cf'V/<.bJդ?mJ.߾lE_M=7P$E'+~{tLZWYAEFcM qgNqk㷞#z??/4@VWK:bn1y/g ^z\Q|9X޷IQ{4q^xSdv)C}jվ}})IJK#>]:Oξ->&x~#{۰~ȼCĒ{\ֽ'!k0T;uDzm7t2g|OwTFX,i*r^:ܕ312CŃwdqJϖOm;^ƲGخD1: WHuO E!VW"11gKЋ;S_&{nU|y/?[ҧ#ZWG:F Ͱ{&whd">GɫxNo9r^.KpedS;3xj[ۧuZz& :[\{揿w//?ϱ52S=JltQ~.S6Ӷ}\SX_1mC4y2WGO~X+3Ds =FTv>,xgsEd]~ヷЫzLbNV{<k~7{9=cϹƛ} \+h8[;<:-}+djޘx'6sڮ,gLwГKӭc撃wB(tyvxWܻ`ύ^n$V;Եg' yq=g g "2{EsghF_9•zʁگ17k=Kt&OMɾN%tlVėeڗsLH^Ɏ)H+ |8*McxB[O>m_>X*Vc;z Cwh8s)mɦ=+ bI4cv>kl rW2`NA3.'\ }IC^z/׾L{K-o$sd|o៳WY^lY2 />`\JΝa9Kmyg.}ٮDې],3 ڟ;kWvBb/[C zO[+LO1eḶX .W%b$>fdG ;8($^ww~8J&B%~6[G.O߈Nj]6 (>e(8#%n2lXY]yʜ"{%}1M8S?9à0@n-_Jϔs:6Weֽ6;6| >;4<5Ϲꚍ|K1޸}=]^`U]}>uZĦ]yU+euo;ӱ>NW^փ\=%ܧ|JϔsT]^ܾ8#,A[")o}u:D>%zndz6֕ދU5x[W p;Kc@M@jz{ɡ+}d31&JBG7^XL!Í綎uYvFL:^%ͮ3[y҆_ĆG ;ckf>yqL%'@sZ*17)V@tИ;; kS;^rp+TFu^w~z_ٷ{Ec;EˍE?̷l㦬Ce0& }(7Zl$c/ygg=[Ql8Cs~x!Q/Tgr29Q@mtK.=v|qcѓ*ߓ\SS;mr8'3k>Lx>@ og&?V )_o7 G3km9/<޸WU_;M]'˿Ǣfc./`'?:npgX2)Le ,S`X2)@I{WZ{^1bJ)-_/:WcB.y4~!GG lwW~Xk׺o~\#<n4&mW5);ĒZ{yKMJyk}E\ktw1L; gN#w,lǾPg[ Yk|]_:jrob_}тƛk^&hCh|d=QMdɾy\8h;}q^[9Xg59Z$a!WȾ"dk\%C[+&l8gU8OqOޡ} r?u#+k x9O=9J=sMJyQth|hlqi _T-V>-J|E'gNM>WK#gqni*o|:o]WU߷> M#K5_W֠fܴ۬vD'xOxw=S:@?r*'1qo*#v9n8c'7>˲]f3R IYx >qlK]Q=%Q_5%&c|s!g`C14Yf dʢ_EL}2'PK|>ݴ/p.d?׫xcnCKe,C;49߻B Ž:7h<>K:rxS[O2vz㹪k%@ڈK+|Ay5+~=g*o37DcQޓ:$6?1 3\geacc*)kc1\VrC1pC>&Ʌc=>irb8=󄱔vy.gNcXWg;5[>}" '+ZTs΁)K `O4yn^ e6$^ GgtFyP|bZu23ğvδk`{q1Z#ԋuQ}h?Qc~&EQܼeW`_WzA>H2ok{j~O}عFW{ݦ闶wj{걟gW N!ggJج Lc >YvZ'P1ԃe΍Kj_]x_ xvn2ZU,㶝+̸ZoՍup]YUz%O`ue8vG_KGxX\9 'xbxIAA1YΖ{|.8t)mN 艘c\>->Cg}k8=sLvM><LH>۲|2WLlU^؛<=A3AKKS/nƓZUڗn 5޺JoJsGǽf2`b,? wlQo_iUk %,J r7=/b|F=gnLM=CVld߷CfkORm%J%sk[Ca_ CqzP "9 tp8\7l5lދv|="Í 2lVȮ\\d.2z}D <[~wBjfc镄36l+k7?v|!eU%fϻlOkL}bB/$g$nq`('-[vO}iqv8>{V Ns u =>^"YpեE!st{[e 8CrG M] A9C'/:ȧ|C ,O|]rN]vGO0n7vJ>5;z]#fboZF9|ܒB czAdC}L/~G"͠[[j\50~П78g& |p.}ulV_d1iā"X=li$_+=t9!樂NV,ߔWf+\u!h))ܿv]e3?/62cӍ9 ;Vێ`;Ny] yq¡[>Xcc "[x?Obhy]:O'v'%GsOcEa9NBlO 2}qc~<`sT1F\[g 3m}qF6-9]6ݘ_ŲNY|:^®ƃ`T-}v+5,P`;s//yNߠ})Sy$?1UxK&{Qo_ԵOM~m-wBMQg7oxtr^%WS' 7(ZVxMۢsNzQd3ܳ>ցn?ɡg+:^y[ SRᝬ LƽiΪ{oCyX~sʕ09t|wJ湭m(j=x45~}`c4}Ƴf}~*tA*5ƳtnS%bgY&Ҿ=ώ=[ϺdՕ+c(Oy 7-85wE߅/9,>3f])=[vނ?tq-%\Sk_֮F>czh4[ͿZ~ ߧld67uxYp|Ws3N{̗u1$=v?|6E~9kU֕?=go=*9geȆ jf']l%yڛ?鑖c2X3>V4ݞ9t)&w]Ųm>DhFiT ;I0^-ヷo|ǫVa6vVT#&z\zY϶^d`m26J=Ǩ7 Gm #kg/P-g|R;>2lU_w:N>>./xtc'2Kd nFb!}AӀ@oh^} iYzLPv>0ued}366x]&|Όu|MklC1n9Az|ynbbK}ɛɜ z OtBq7ÒvLḏpdESX([cDVg| o[6!z Ū_?!+o9G+ǪO9zhem,X\bYr~v |SmG`ͱWrboe ,S`X2)Le ,S`X pA|[kXsvc?߯?[gUz }I /toD|ձ4Xc~_kPO3R}¹|gn>]w?c<#_<>Ygl]dMSuʿ99 3:;Bfnl8#u/Ϟ2~^ ɳc}#]U-}^]=;zRbCSHYdjwXgz]mPm\Ͻ֜D/ȩ߸5+>Ɲgi"4F-95dEn22f] 㑖wwd9GNGdg\?cJJv}9O+̚3[ߜX`95wkPd L/V{K}5zڿ-~8B{v|^x8}-5Vԯ o̦qRKX|u|nj9ZS+#Oqf:ޙg]YkF V;Z6tO eYjΗL|?>6k/!e?>|bV1>vJ7OG#_ }oLbhGbRiN#πkxځN`Խ:fxHbKC&uLp}9O(pO*SbD֝ts1,ztߩa䠈u7:ow}H BGpm!ZlƒD2q{|Y31tWW}K}kǃd-o/Iz/}=&SsלZ{9\|w`4q:ag"'6w'gl链wz0MJ~4$Uw){+k.9S]ci&)Eo>'S?P}x]iڟl{쭺68zg0rM' Gk}%."{~xCx7eUsϟ9o]-Qܻre~ŗ ͛>]ߎ;F|ܡTbB$NJ`r ԟ!p?xl24e|g+)p=([?cd(9Ten(RYн]bLxW/ =qrnizr.^(_+&{ ? kIwXr|ؘQӪ7>M\KrNCdU>#yH W]^J~ +6>,gJ/-rp7:zS%*icqke+.*ueH=yu*SZf4&,LF_&t:7unoƛ_h>B͊z]~PɁu6x8TFL!{AOdⳒAЩ$O?2{]Cj&<>8[{Iݮ'اM[ 2" =?%gs+_'d דڝy[l`zKncEw>.9y|>|IqoYP -×mn;X:~gTR=[Ŀ7 އɣS}F.s?NrRoóׄf-rqEWQK":PxWr"߹4g7GnJZ]bCȌl%}ټ&m|vhB7i5}\gh3,Lߞ*/U}|KdvYcȚW1 ס xƻ{JƧs]rv'"Wp$Wq㷾*v؞-!]@cGvك^m?F,^>*B:N"ܖ=51ԿE>] -{X^h`{mJ*l/SLH]sd>&u]Fts0##;|L蒟+kcuzlR'z@J蝕<sUܵ}w.wCUoz_ٵ g*ٓvߔGe5G^}S89U:8ªTH/3ˋ]ٽNcfD:ו._PYnޱ}n܂<˳z̅=? Tސyh<Ӳ{'{_ku+K/)r.\/g\m]>JG{f8-ɟҊDqrZGʗG\ֲ*91U|;O6Ƚ{Mem"]$FL-ǹoYg=[ޛi'/u crIge'خ#6/ ƹV־&~=:6?%_!c927}j /cs6<C)4+^α K_?h>s PWly[{E'MoT_PX3{P=* }>G{ygf Zc ls$~G~OZCk^fCrd+ !fױ;k;-wcfmU|L4{|x7-<ѩXi X%Sj((#OG%+Jz[b)11;4{Px$ 謷vm!oO];=&⯃2g;NI}+=ϗ:sku5(A \}kct.[htN&{2{=k=V `|Gn["=d%gׄt&MqS#܋-c֞lP^$fD4x{ssm矛O&F?(KI/k_0ȋy7KA/onyfImgL ~_;2{owemNJ*5W$a t{R~sYt-_#<4XwlPUl1]9UތTvg*ɍ/z'1"LNgg~]\BhNܐ49sk|ZfyC Q]۲u>ewߛ ؿuMsd9? ^\@L kh_ۯr9g;TT l5i %g+J !t:CW2q<i*̜}m#ΫbMا0x^x]fo5i$K&~h*Xr10|FNƽƵw#^3ip_Xh6=1nX5r{yb*rՋP\==6} 68/R3 ݠУUglLv ?x٧ oڏWuVkl[zq\x{08L{ccyޖUsK<`BF{.=w+諈55oyn2w^׍-Zṽvyγlxp\9>}Bx?;y-5śqfI{3ղΣn%;k6. _sf/;:ʃ|mzg WRT cՑgLʑYb'v%->|l81i賵Ƨ[u3kގK&3k9h7%NP:ק&ձs.U9v^za9)O{cF+^S}Jgu ]dZޫSV,v^֋0GL¶'E3_Kca`8[ۚ7IKKuș]),G xb33N_qw;Ky;W5V%bTq:*ÁX:/7YmGCB"հ94jt|h_VیVndeek˅%u\Ŗ>5ȞT>$^-o;ӓskqVu&8.*$S/4^}q߭WG|0_ qN3RC\O{w _A<4ʇM_*.'V1ՑNNSĠ@֡㮑ק6F5Zv=57Zg>4W}5մb?{Z5'v#5>ɖ={s;o/n少J~Ws^V2ߜW<5=:U?,̫͗>W9e >l:_{w1Ozҽa5Ser& gW'oKe" `لP/sKg gͧ8k҅:mNW~>r676`9 :kszZBkn9/.ỎhW:k{rn{ܧu6.wu>f=":5#mVrww]f|y̏['&߿Z'0b}<k g i |702gONnސ.m[ v+渌<2VGk7Vk6) wr'7/\ToD jxH7[}۔;UE96rk'GQ.MutMC~t0ߡ\J?S,ۯROu]72sg=;9bk#rWx3[sd}Ba]ʽI}wmvx1ɹ/tTg>BY 1[uպoV8owp4l55{ߎo5/jY{ q;l\ Ý#kC.PmzjNY-9v??3& ^'}gcD>MKߟy}b_}=5PK[=i=, O`B[E?e=/9P,MZz(Xo xyAvTV𞾠-'gXnw2@fb%u~rW+%N|nyNGkf^k,ڮc>TKRWy9a,:П?㻕=LKlU)x|ߎŖqWfqPLsɧ&-=܏ݏ -bLHq]wf/ͱj9L|~`n+dc.V^y>k^wo%̷66Mq +|^j./#kv?Z6 jX _1)MVj}/Qwu~}\ c^wy69LހR{>;mLZ tSk֗ظ,Ӹ:k%韦c;unwK/? &&\nmU_y; ""GGv:Nh/@7~.^Zזn^U;^^NխU˿S;:==Ä~ cN8 \;Un9wN Ukª[—Ǟ# {~~8_QM΋| _lg-G =4^0Z>FjcFgç_:>^Pg̖6Hb[+S1 3O҃wr 5|Gt^"xHA R몊}xh| }ٜ1M|OϸP8:`gȣglv:ţNgI&hRI&hRI&hRI&X"GZߊ} .k=(֑X'^egVg-w ?~/ks/_؟0EX Vk{ׂCv\ˑu8WGIW,{>9Z)}5-}1^|9V8aOG7s}oP8x;{ >Gzg쥱wv]x3m/6{p?Im`/wב{q-<GgZۤ"G{R?j{֞ɥ{u/1'ٱ=rxs1y=KCw_Ν=M=.OgX'zq{G^"`왂#;& >G' ';䃶:_vsZ_qj:xoSB!|Z -xjTSgƑd1YW~Qީ߱z Wm-g|#ǘvVWp`sgK5j:G{|ݧͤM9׀ePP+Նap~9=9=kX{] +;Ζk;h|nl<Ŗ7{XS{e)^v1?|bMfUS#swyϴ|\Wy׺H}(Mu,}珪=^Wxm)Mt9~Cc!6{\gwcRZ'S&6\-x[*o}}z6y1Lo3 VG. v\cSdM#go IqԶŝ[sC?rߕz}VLjьc5s/NB_u4FW6=_9XO5 9ǽf5ҥMCA_뇓 j?Q`IJMv'[~ي뾥옾wфzS KqS6S5$7F9|-88) l0zafWgtV#?Jש=]kL LvPS'_/`r9R]ntX+c{w@5t+u2'w|^1<6;hWUC})r7lm}k5Xڹr11-s;<-~~Z`a^~*|C$3[/uz3u{ֲTJu^XJ׀kءZ/~qGk W~j5/^k>GwEsvyņ3{}Ǣ&t9( = 198g&;Ǣƛ}Hul-jl|]|3'6{p۵c{O ov8 ꬵ=ZwZ[x) k/+-`q!`ݧߺ:>*Y+gc]d 75ɮ_oճFŖ>Me}Fk`]IO`_@f=2/N:=KnHehU+'o7ʗ:FY_17iiquk=|ݕ'}m~ vyVk"9}TtZ{[m6RطAl~iI=i,I-n 6!a7 'UWk/H_G5?B42sǎ0Nj_M_;Vu>N]/}u}} Nsv}Kex3}>9޲6~ V6E<||sR}u3kYd{N:ftϕ=Ś3w=ںfbj ĆmOÕCGEآM~-PK-=}&7t0mQ ^!;6għ;z_٫j$1Û~C/HX@gV9=%-}_}C-.Yf/s,']-'`@3}gh_W+^uǞ"xs@΃7l4_؝vݎ~FK\Ց_~Kdr9ky|/%n /t<9Qs5lJ̧MFJğoI~r35N@0nKVoEkB;So>#{Bq)ac?;=jb.%le2 >Qua4:hNsVƟ9\< [ڙe3(k=ٻ-z#ZRd[O?v;d. ߩbV۞g<1=~rCum7chv;d['Z#gK d~c_gxҦ|{ckm Ozeɯq?y>$cB*O_?B2 K_ ?]bqbX`dOO{{qt\U;}|B9)>^1Zc>kW1Mه| >X*>bLj<{̇1bXF|`:6um[|d'=| Zߐ|iٻ2lq slgJ;UJS(my]±;ڮ[D&M 4)Ф@M 4)Ф@M 4)Ф@M 4)pPĻ`kFtҐE,{/Ǟ9& /;{u}>_gsb%|=طU | m\/+Z.{9Ry5ʁ-[,Fu؛`_]ָ=gNGY{&G#^ H?W ~zw5ObG C6ءh%-{;_MF^9i^6]~,スo%M1h7>Ƹ/ i(Czl~G3c%,P32|':h]{ÉtxVg{n|, 0fmFnc{|+:HkU?Kog;7(NY{/׹7<;/)ώ-ve\&/<ӹeK[AxrǺ}i;~PxCȩT_))HgK50~jjl@N>gVyK=yx&=r 9̪݋K131Y!haF|t>8Zggma|[ ь/'k{zw(G3i޿YmWu7v'`u|]o6^-;ڎUD΍sEv^o~r\im''&WoTo[ZS n=6cm(:t6:mo1_e24ž.kr2/|b }B+j4ɏsO_[lSl\LUvYcmu3=ЙLݚ=')ƾ-f_<Me;NۘDgŽe=VG>~?ߏ>20gxwR.[;_z?ie09h;.pT/h Wnx&UnK.U]+֠w`9v;[;ye|G|-WGYW|c'JmmlzAPW.s^k^%U|Nu\w|TzCg]?B>Ѝ<^)kQN\jU9Kq~؃X=o7ZlN c\a_=ߧf9|+VK9݈ }dT?tvjRZlH:cyW͡Ȯ3W5c slrgN|ScYͰw{zWsV~>4<=Za?tYZ79E*M֣GS4~v镈 _"\kj.% vF7'3{8W\s'~U/En8cxΨdھrה9k/T~FʃWǕIN骃0Ufu&8kfU'/?ojH)?v4y {ݻ<0|wwaۤɚfNÏsj8|p1р<# &bZWG6uny3bŋU=ȟH]A[I>rW-ޢuVrFpY=O} }cF:<Қ㘯#GTGH9Əvgm={ts{Vg}˗vQ7\+Z8?#=GyםcǛunrTǷwlOd#EgO{s{GsM3=ݴ65jIż%:)߽q>G< X>QxxMXlyzLSSg{<~,.ZQa8k͊oNπ/~Qgt1-m>8 9rikGRe~Sg> 5om^*/Ϯgi>~O34k˹"Ǽ^@c|FUnm_9;fs畭Pfgx[&w?AudG]^ޞFܪ;}clOBm~jGO;k/zJ{ـ333K :s,ƻk^#L9n/ݞpb-YN` kM=By f?؀3}c't:a\G =umfs־>O81ɕܥc._ڜ{ i-/:Z zYlּ'cjؘ _i+/)ŋ~:t)`٣=|Y3]L66_̉TP1տ6|qUo]>'[lEmN7ʎuho9W|M[^#^7gCA[4O3r=K n5_ł\#_(Xo[8OYG~_]? lRmÆ=5U74W=R8j`~]˶r λ~fx9$"#0o}+q5AҞe7liw<9[\)erq#2ӈ7y^{g)<fSj}`{5T4[Wxyh <1][K59=on|t<-ޔg>v_|}=c"v~weJ4Yqok_~3kI .|u/V@sӔg1w^{q|VKؼ~3::5~s<N1l%8=e}|r ^OH&53dex4>!k]f/f+yM>@ : I'Biero~-?|dd =z|C+h@H;Q52CU؅󗛬/#/(k(>"8VyF cm?X=Z/N+l0G=qeĜ̕/cN#a\>qdZkٷq(d\?Mh|_l^oVv?z kOl;'S9Lo5bv֨>~w1~2Wdsfg"ƾ~EFǯG$3LvM>CN;8.W͞uQW8n:z֙>@-}UAq9g뙏)xNh}tϽVs0I7_qޖ :徻g`t9}z X6)Ф@M 4)Ф@M 4)Ф@M 4)Ф@7(G>,X/zkgC̩l=6vd)/097)YYSq\eH"~U>٧_;T8 us<~Y485p8}{_?T|#"D8|ZU=اJ#\^3g7CrO*GiG_ FMr79=eovbχM[Νu`%???ߋdOr. >sϖ>@M{ܗpq8LV^c<"Q+ĹT!V;,Cn-`? Gb?hqޅ,z|'w=ov{濅&=<'}&EE{wmcyxc6xRVZ=R|X5CB&Cd6u~fpY-r-ǼȿCϸ6ѡ"v`OC|y5l2M1o'ecr-5.TK";臊͋SW*_R pد=|X<<y䟺zt.1?90=n|3:W.(uS.ʊax s?% #ӋοNx9iWyU]>>`9 -rӸ+z}htc)4'irƦMOLc^.~o69ƙz9;n]?nEԠ.=5AiiNxb%qΔ{؊ 4/:rl|u[[O4tQ&#:xɈź[RJ2]!ġaGVr>27gr>!v*Rl;TamڰJ՚:s'`|;uַVdz`Me\ӝ6"6d{lxs8|-K0=>%=To}0gdy~rv2xKpK~`ch>'-F <)q\o<&sPOI`軬y~.' kgKuD뒩Ji3E5RA;'x6T]+x-?I)?~]}grXK/λRԡzCU(1ں+\ q '{*HIy7>0T?0{lL6%l6ǜx:n;7cN*lkRWc/t1i8Tg!>?Q=Z{6α~>Kcgm[1>s\510n>7vBH-c|TNWySϪdk/ZZ'_sd߽vpp|]k][n3&}9v\|?ۧ?ZV~jf8k?*g#;6~;J 3g j/M$ۧ/6):18&۬Wl=pq̭P--ƚ\ '4꫔S;"Xݖ(u/m:6ǩ\ qG?3h^9 +!uٱ10QQ#ӻ`{ɹ%g67vZ:ầ6kGW\<¿591>oِOl,MkOLF-)́ _ =I]vrs[{OـWqjV޶!zGIg~)7k!lB4^ W:Zx13KL ٮvV) nO|x&R^ܝtg GrW/3]N`Yzu.|9ٷ\1'GJ|խ~q'u' ;mc19J\MޖN^[w|6[N{?ztB6&6H~ kbjxtE徼4tE@`ikmQjV˳#NR\mr*<=Y.߳X5: ?:mX:#pLwb6^ m_ɸu^v[5Aթ[P3ިm|2:<+iYxM}j-O߃V3k8wxOje7/u<ϼb+(ŧg\<-o5ָKw_Y Z+I*7j/pC'g|g`̏sHdW\}jзCe9N,XkUb8pWj&;rgE_+}?-SmF| g<:9c'GbA!6'ְ\]NU5=7Q]W2|_I^5-k] 'pFk_mLe-+r%CY"G,1VoAEShSc݊R*W?u꿗gN7:ʺ%='w}Tu}964;5(U~jd=0eأ2m?- 'ОOd%b ;4^\(`C} ]k1.Z+^S:i~h%aj?g(#=t!?:{gBo]yk㨍u)k2q_гߌĜ6𾓯ڠ%S`&_d)r~̩ܷ6_Yl#=tFꃸNyOcO"MiKH/Lusa#7jnPCm dO}'ذA{P |μY%̌7gC?lS[RT@w_f3wNtٓJ xč[eZrzÏay09'bpo; g<эq%gg/RǣJw3~wo/=_Z#U ;'!I#vqMr/t^G BﻬOI7;s4pcp :\/6 =;[(-l*|OmY]?MnSd61_G  \|≠]VMȺұ k9 ]a>U/pӨCq҅Jy{ذwZ sY؋y7@6,NZ*澈59627TG>]dxCN-zlӡP ^;sCm}}ӥ ΁N_ cNOeג礧xuGkziLoXm}qPܑ4}%ι޷GOŧk /4y|C&gAgqah\O-kg/ ;-ܶ Z۷:#5xM+)}-h%eO;i^a7iYgyo[[_Z<P ^c>Is3RZ4;g<+(^Guު޹},yh-+i>.x2ձ sWޖ 1V͎x h*aU9/G>9-L? ew|0?{o:So6`U^=}CҿNy =9wSD~ϡNheyWd'owB/+/O0_1W(hL-}1n#ߡSsuh#~KTcg{֓ dj?:]z_)>`syiveᖾeS5-;2]_kTFnpQΝao=Lnd4}zs`eXch9#t,~rJD٠o ]BaMfOY}D֫fD+o͞_Jΰ>*?I6 \n/rbo7mϵ>tػt+͏mlBmNOd=/߉괕ցrv7Cv7VLN=km/ILxg8*wnq{ݏ:=C-.)WОOFwYkZ_z.sW(mXb~}ÉNQҮgW{D> V݊Uc~[xJ}5/y߄~xچc OW*N9v7c=hy^;RXUfw1+LQoC1&x+O9vK~ʬx!+zRM1EڵNd VՖ1^zz.ij=w]V執TdKyOT>@Ŗw hcw6._l #K{on3]mW]cuwZ-v7}RUGZbFne2NGʗwe\} olȓ3wija]ڶcu|N|-Y:>@[*.kǼV^/!Xu-.+veھߵI 2|ڃߞ៖uۥ߰|UGk6j米'O6v߮Ug9VcW uҾ쏤Nt\;6Xmux'Km¦r28lM1wMKnɗ9%yo˻&352ڠsj,0AaԮ647:PP-Y{LqI_?v6<_{z7ΨWE9~9j\뗰p}`uc?ѽ+qUT._wtڂꮽ$}gvL^`r {^\?xs{'r37cj.#[ᗮ 6;1 7d9d%239t;y5;禎U{fk6/Gbe]5{j}C׫EN?}s~|gJSmC ane]ښ)fU]ʡ/3Nr wiJQY;+s:!zU9}xc{\W*)|>Z|=͟N~nh_umS_i-Le*[anݖguf'5ês:]g O:/o"]o7~_?ga1^,/yy5B!uY'ЦWcկTݒ@˂<\J?E3Yҙ&yt&1qC8G\uRU7+Z̏cS?f?KoY҅'``I|}Ϛ cC/~ZX+A>p}rZ>UkoÜ# ~E/{j-lMSζ[N.7z?Qr:{dm앺b'~~n\̉V" 1>Blޯ hZ㫺Sn5}zt8xUc,$quYǷ,qgm3W48 ͮ qZ{F`߶[A/ԣi'|>_]1=%u>`kDVg}l<=>6qi@MJV>>s'lc)=_1w ^ポwlԦ~CW9`Ps[kX߅]&hRI&hRI&hRI&hRI&PX )笠W;]|ww\ya-ه~^yװ_/3|_y"PUn{L|ߠ1Hߙsar/s=495{f{;Fv1q-؉8Ǟ,~s9O#ו9vyS_?Dn[~~sqLG~ w=mp#qcnmߣvX1c{<` Γcu\.t!/®y:F1 b1*sCʞy 4\ /3bppF2Lo Gr\/M~yrWD6N̝P&.U5* oAVAWOw~@|:WW=YpFiPO)_xؿ䃮yh__IIfO'rvךF+{\?v[ 3Q˱<=䙹9"6fMuw7^Ѕx`=EL!D +b/1WW(K-*8{p.Uvn2Zp |p<'} YgZ:E_%I2#|ſfǺ+Չ\ (|Ǻ/~|ԦtzONyboBOb)ĞE|Dką8%3W0}yWW̿BOe@nk }ʩ,9u.ͿWm+x˾]Ia8#+fw%|sGC4Eq׈g ^c__^~x:~Z skl&Skx58t!vfd::?r4yɠr%i]#{[.R|ޢ3S=/RW8)Ӂ3!h霻 mڶ赆K$pfFؗ_O"C&Wϟowإz*)[6F96xفv(Oi$,v\ice^oϛ|qEzO =ww|DS9gu&cCnڤ23q~:rmr :˶׎|xΰX -E1sG}ixR^ `$UlYJs)}7yQcn9W) ~SՏJܙ;4ǿ/ř+㚉  f|yfZ=Yyƿ] rR>pF9FsٷcNsO(塾#?'QGկIe5ƿ#bHw~H򮎫0OW)Z /L|p}r&Ŗ7?f&zk c5|g e5ʇyt3ܱaɉPt:֟vb88Ccm{ro|# Un8f_ ~tt%}?[)g oJWЏ?|KzՆpy#uz' 63W7sZw#/Hq25pjj5[qW{FkVV!/ ?}|cStF0tQ͠;[~i8róR?ڞ3ImCywh\|x>W5YnR͹ ckq|-JBigOǕ-}JZtWMdž-}OnPNպP~X. s>t|xz: /} ̆!Z7(]%ټuiXm517ԧO:CNq֭4/}}f*svwZGBs߭ VSW, |ܮPApu箑'vƐlQ{+X3od$L0h!U>79xe8ǐ7_5@w+gg=~ios`Sm ؁n=ߦ:^s#q2Y |hչ./tƟk%+?~j}}Eu=0=_06&6B ׍-Ly:.Ӱs/S33E i.[{P?FNc^}j`bMO&>/x ^k:=DY\Jp"~52 T6>͉l,ݼWku]F7d5Z`.xsk`/,'\7Cw-w\ p^^zL N|cblL˞\2Ϩ67u {||.8ͷx7+[_v0k7ZkkS= 'lz=?c2T(AՑK҅⏾6hV٨ug`tzٶs5נFyr4Ob|wd}\ah)wAw4}nQsss/=y' %2i>6očy'5v}+Jipć:cơMsȞ~ΙkW|&hRI&hRI&hRI&hRI&hRHRM&7W^q6ڏ8(h|tߖ꒰ ثtjpxkr^~Ft2t²X#pO^ޛ;.Zy:!KdboArg'ufy6+qG9KوFk]:7yFYFzQĆ?CӞ>rbӚ \/M^x\Z`dCrO =~7wL+.o'c`P,}#-Ale=|,2{P _V1GO_g-LV)n87Mgz?Lq?Q?x63dbۓQ=gt<3q;b,mq{B&*7ͿP.=W?vjxIı: T`X +9j_sݸ"S1wܽd~eNıV]+ =1F3~-pX=ҕ; K6?K1uC[mް]O,MεZcZy w. EUH=~ɮlOe6_yXFxx2pqRի~ǕxpOij˽^R!$sJm%V^Ǝ55TlO 1E]X{c~wkֆu<ü-|ŵ}uKS5Ճ"[l^^gyț Y6cVԘjxo:sjS]J[^nK[M&sWoc7_XXlTvzg]'Sj{'6L͞j: t5x|zh'9U>+mg3OŝK}px/?{!o(z=5ҹX#x1B?Av2_z8=19ݞ쯝yNZ 63qUk9{#otq&tX{G.P8peY/ݰvy}9q\bڼh|aR|eץxfߚ=Ӻ vG]J Q}](~^qr#͞k G\{ 8MZp ~{vO{Řw^q=\۲a|U-}_M);4TvY?duFԫmu}tCƙmAȀ<w1c|v⴦ms?$-u%Z8+16dkXmcϘ<*1U6yA~-ƗKoK>|o\)ڜ B%|Q`trƓ>gf2 o6; i{M76 zڿNᓺjmcdWn#cw'`|^ /NnA?4\>pw'vi^o~uc5Θ._&Z +;^UW2R,?M!>7ƫl5<ڞ6>E ݆̄M|]OXLwf⤆}b.7MkRZ9 י_wW+1s=ϸ. vy$>oU~zjS=/m,p0v|/d \6_3WR.TӣC^] :qȰϣ mV|VV~bQkOf&vTe_/UOMjO~zu|cS]ܷ+Or!wc0u<۪BT֝9%#c7mnmj}?oIBg+$Vcu U3)Xo_xf&vٳ {#? _ƾ7BEԖض8]zkƧڶꞙX 6=a5ŖVޖKg~g7uxosLg^f3oKej`xC<8Z[ɇA3z[ g7}̛oD;vxNrm^S 2ps;ҖtZSV: ['/,[낟?c іtn a#2`eº1˯{$>N{89TVVMc |E¿ZBFװk$d9VR}D |@Z1W)ek؀||Vc3X4Z'2Y9ת[?X|(j-sSD35sXS9)X{BXP |8;u L/踯WyG{k